2006-02-16 09:24:16 +00:00
|
|
|
#!/usr/bin/env perl
|
2006-02-20 18:57:29 +00:00
|
|
|
# Copyright (C) 2006, Eric Wong <normalperson@yhbt.net>
|
|
|
|
# License: GPL v2 or later
|
2010-09-24 20:00:52 +00:00
|
|
|
use 5.008;
|
2006-02-16 09:24:16 +00:00
|
|
|
use warnings;
|
|
|
|
use strict;
|
|
|
|
use vars qw/ $AUTHOR $VERSION
|
2008-05-23 14:19:41 +00:00
|
|
|
$sha1 $sha1_short $_revision $_repository
|
2009-05-15 01:27:15 +00:00
|
|
|
$_q $_authors $_authors_prog %users/;
|
2006-02-16 09:24:16 +00:00
|
|
|
$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
|
2006-07-06 07:14:16 +00:00
|
|
|
$VERSION = '@@GIT_VERSION@@';
|
2006-03-30 06:37:18 +00:00
|
|
|
|
2007-10-16 14:36:50 +00:00
|
|
|
# From which subdir have we been invoked?
|
|
|
|
my $cmd_dir_prefix = eval {
|
|
|
|
command_oneline([qw/rev-parse --show-prefix/], STDERR => 0)
|
|
|
|
} || '';
|
|
|
|
|
2007-02-20 09:36:30 +00:00
|
|
|
my $git_dir_user_set = 1 if defined $ENV{GIT_DIR};
|
2007-01-19 01:50:01 +00:00
|
|
|
$ENV{GIT_DIR} ||= '.git';
|
2007-02-03 20:49:48 +00:00
|
|
|
$Git::SVN::default_repo_id = 'svn';
|
2007-01-22 19:44:57 +00:00
|
|
|
$Git::SVN::default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn';
|
2007-02-15 00:04:10 +00:00
|
|
|
$Git::SVN::Ra::_log_window_size = 100;
|
2009-07-25 07:00:50 +00:00
|
|
|
$Git::SVN::_minimize_url = 'unset';
|
2006-03-30 06:37:18 +00:00
|
|
|
|
2011-10-14 22:53:31 +00:00
|
|
|
if (! exists $ENV{SVN_SSH} && exists $ENV{GIT_SSH}) {
|
|
|
|
$ENV{SVN_SSH} = $ENV{GIT_SSH};
|
|
|
|
}
|
|
|
|
|
|
|
|
if (exists $ENV{SVN_SSH} && $^O eq 'msys') {
|
|
|
|
$ENV{SVN_SSH} =~ s/\\/\\\\/g;
|
|
|
|
$ENV{SVN_SSH} =~ s/(.*)/"$1"/;
|
2009-08-18 23:54:40 +00:00
|
|
|
}
|
|
|
|
|
2007-01-12 10:35:20 +00:00
|
|
|
$Git::SVN::Log::TZ = $ENV{TZ};
|
2006-02-16 09:24:16 +00:00
|
|
|
$ENV{TZ} = 'UTC';
|
git-svn: add --follow-parent and --no-metadata options to fetch
--follow-parent:
This is especially helpful when we're tracking a directory
that has been moved around within the repository, or if we
started tracking a branch and never tracked the trunk it was
descended from.
This relies on the SVN::* libraries to work. We can't
reliably parse path info from the svn command-line client
without relying on XML, so it's better just to have the SVN::*
libs installed.
This also removes oldvalue verification when calling update-ref
In SVN, branches can be deleted, and then recreated under the
same path as the original one with different ancestry
information, causing parent information to be mismatched /
misordered.
Also force the current ref, if existing, to be a parent,
regardless of whether or not it was specified.
--no-metadata:
This gets rid of the git-svn-id: lines at the end of every commit.
With this, you lose the ability to use the rebuild command. If
you ever lose your .git/svn/git-svn/.rev_db file, you won't be
able to fetch again, either. This is fine for one-shot imports.
Also fix some issues with multi-fetch --follow-parent that were
exposed while testing this. Additionally, repack checking is
simplified greatly.
git-svn log will not work on repositories using this, either.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <junkio@cox.net>
2006-06-28 02:39:13 +00:00
|
|
|
$| = 1; # unbuffer STDOUT
|
2006-02-16 09:24:16 +00:00
|
|
|
|
2007-10-16 14:36:52 +00:00
|
|
|
sub fatal (@) { print STDERR "@_\n"; exit 1 }
|
2012-04-02 13:52:34 +00:00
|
|
|
|
|
|
|
# All SVN commands do it. Otherwise we may die on SIGPIPE when the remote
|
|
|
|
# repository decides to close the connection which we expect to be kept alive.
|
|
|
|
$SIG{PIPE} = 'IGNORE';
|
|
|
|
|
2010-02-24 03:13:50 +00:00
|
|
|
sub _req_svn {
|
|
|
|
require SVN::Core; # use()-ing this causes segfaults for me... *shrug*
|
|
|
|
require SVN::Ra;
|
|
|
|
require SVN::Delta;
|
|
|
|
if ($SVN::Core::VERSION lt '1.1.0') {
|
|
|
|
fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)";
|
|
|
|
}
|
2006-12-16 07:58:07 +00:00
|
|
|
}
|
2009-07-19 23:00:52 +00:00
|
|
|
my $can_compress = eval { require Compress::Zlib; 1};
|
2007-01-10 09:22:38 +00:00
|
|
|
push @Git::SVN::Ra::ISA, 'SVN::Ra';
|
2006-12-16 07:58:07 +00:00
|
|
|
push @SVN::Git::Editor::ISA, 'SVN::Delta::Editor';
|
|
|
|
push @SVN::Git::Fetcher::ISA, 'SVN::Delta::Editor';
|
2006-02-16 09:24:16 +00:00
|
|
|
use Carp qw/croak/;
|
2007-11-22 19:18:00 +00:00
|
|
|
use Digest::MD5;
|
2006-02-16 09:24:16 +00:00
|
|
|
use IO::File qw//;
|
|
|
|
use File::Basename qw/dirname basename/;
|
|
|
|
use File::Path qw/mkpath/;
|
2009-05-15 01:27:15 +00:00
|
|
|
use File::Spec;
|
2009-07-19 23:00:52 +00:00
|
|
|
use File::Find;
|
2007-04-03 08:57:08 +00:00
|
|
|
use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
|
2006-06-15 20:36:12 +00:00
|
|
|
use IPC::Open3;
|
2007-01-11 10:14:43 +00:00
|
|
|
use Git;
|
2011-04-04 19:09:08 +00:00
|
|
|
use Memoize; # core since 5.8.0, Jul 2002
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
|
2007-01-11 10:14:43 +00:00
|
|
|
BEGIN {
|
2007-06-15 03:43:59 +00:00
|
|
|
# import functions from Git into our packages, en masse
|
|
|
|
no strict 'refs';
|
2007-01-11 10:14:43 +00:00
|
|
|
foreach (qw/command command_oneline command_noisy command_output_pipe
|
2009-04-10 20:32:41 +00:00
|
|
|
command_input_pipe command_close_pipe
|
|
|
|
command_bidi_pipe command_close_bidi_pipe/) {
|
2007-06-15 03:43:59 +00:00
|
|
|
for my $package ( qw(SVN::Git::Editor SVN::Git::Fetcher
|
2007-11-22 19:18:00 +00:00
|
|
|
Git::SVN::Migration Git::SVN::Log Git::SVN),
|
2007-06-15 03:43:59 +00:00
|
|
|
__PACKAGE__) {
|
|
|
|
*{"${package}::$_"} = \&{"Git::$_"};
|
|
|
|
}
|
2007-01-11 10:14:43 +00:00
|
|
|
}
|
2011-04-04 19:09:08 +00:00
|
|
|
Memoize::memoize 'Git::config';
|
|
|
|
Memoize::memoize 'Git::config_bool';
|
2007-01-11 10:14:43 +00:00
|
|
|
}
|
|
|
|
|
2006-12-16 07:58:07 +00:00
|
|
|
my ($SVN);
|
2006-10-12 01:19:55 +00:00
|
|
|
|
2007-01-12 10:35:20 +00:00
|
|
|
$sha1 = qr/[a-f\d]{40}/;
|
|
|
|
$sha1_short = qr/[a-f\d]{4,40}/;
|
2007-01-14 06:35:53 +00:00
|
|
|
my ($_stdin, $_help, $_edit,
|
2009-06-23 17:02:08 +00:00
|
|
|
$_message, $_file, $_branch_dest,
|
2007-01-16 06:59:26 +00:00
|
|
|
$_template, $_shared,
|
2009-04-06 20:37:59 +00:00
|
|
|
$_version, $_fetch_all, $_no_rebase, $_fetch_parent,
|
2007-03-13 18:40:36 +00:00
|
|
|
$_merge, $_strategy, $_dry_run, $_local,
|
2008-05-11 05:11:18 +00:00
|
|
|
$_prefix, $_no_checkout, $_url, $_verbose,
|
2011-09-16 21:02:01 +00:00
|
|
|
$_git_format, $_commit_url, $_tag, $_merge_info, $_interactive);
|
2007-02-09 10:45:03 +00:00
|
|
|
$Git::SVN::_follow_parent = 1;
|
2011-07-20 22:37:26 +00:00
|
|
|
$SVN::Git::Fetcher::_placeholder_filename = ".gitignore";
|
2009-03-30 18:31:41 +00:00
|
|
|
$_q ||= 0;
|
2007-01-19 01:50:01 +00:00
|
|
|
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
|
|
|
|
'config-dir=s' => \$Git::SVN::Ra::config_dir,
|
2009-01-25 22:21:40 +00:00
|
|
|
'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache,
|
2011-10-10 23:27:37 +00:00
|
|
|
'ignore-paths=s' => \$SVN::Git::Fetcher::_ignore_regex,
|
|
|
|
'ignore-refs=s' => \$Git::SVN::Ra::_ignore_refs_regex );
|
2007-02-09 10:45:03 +00:00
|
|
|
my %fc_opts = ( 'follow-parent|follow!' => \$Git::SVN::_follow_parent,
|
2006-05-24 09:07:32 +00:00
|
|
|
'authors-file|A=s' => \$_authors,
|
2009-05-15 01:27:15 +00:00
|
|
|
'authors-prog=s' => \$_authors_prog,
|
2007-01-31 20:28:10 +00:00
|
|
|
'repack:i' => \$Git::SVN::_repack,
|
2007-02-11 23:21:24 +00:00
|
|
|
'noMetadata' => \$Git::SVN::_no_metadata,
|
|
|
|
'useSvmProps' => \$Git::SVN::_use_svm_props,
|
2007-02-17 03:57:29 +00:00
|
|
|
'useSvnsyncProps' => \$Git::SVN::_use_svnsync_props,
|
2007-02-15 00:04:10 +00:00
|
|
|
'log-window-size=i' => \$Git::SVN::Ra::_log_window_size,
|
2007-02-16 09:45:13 +00:00
|
|
|
'no-checkout' => \$_no_checkout,
|
2009-03-30 18:31:41 +00:00
|
|
|
'quiet|q+' => \$_q,
|
2007-01-31 20:28:10 +00:00
|
|
|
'repack-flags|repack-args|repack-opts=s' =>
|
|
|
|
\$Git::SVN::_repack_flags,
|
2007-11-22 13:44:42 +00:00
|
|
|
'use-log-author' => \$Git::SVN::_use_log_author,
|
2008-04-16 01:04:17 +00:00
|
|
|
'add-author-from' => \$Git::SVN::_add_author_from,
|
2009-01-18 04:10:14 +00:00
|
|
|
'localtime' => \$Git::SVN::_localtime,
|
2007-01-19 01:50:01 +00:00
|
|
|
%remote_opts );
|
2006-05-24 02:23:41 +00:00
|
|
|
|
2009-06-23 17:02:08 +00:00
|
|
|
my ($_trunk, @_tags, @_branches, $_stdlayout);
|
2007-02-18 10:34:09 +00:00
|
|
|
my %icv;
|
2007-02-14 20:27:41 +00:00
|
|
|
my %init_opts = ( 'template=s' => \$_template, 'shared:s' => \$_shared,
|
2009-06-23 17:02:08 +00:00
|
|
|
'trunk|T=s' => \$_trunk, 'tags|t=s@' => \@_tags,
|
|
|
|
'branches|b=s@' => \@_branches, 'prefix=s' => \$_prefix,
|
2007-07-14 09:25:28 +00:00
|
|
|
'stdlayout|s' => \$_stdlayout,
|
2009-07-25 07:00:50 +00:00
|
|
|
'minimize-url|m!' => \$Git::SVN::_minimize_url,
|
2007-02-18 10:34:09 +00:00
|
|
|
'no-metadata' => sub { $icv{noMetadata} = 1 },
|
|
|
|
'use-svm-props' => sub { $icv{useSvmProps} = 1 },
|
|
|
|
'use-svnsync-props' => sub { $icv{useSvnsyncProps} = 1 },
|
|
|
|
'rewrite-root=s' => sub { $icv{rewriteRoot} = $_[1] },
|
2010-01-23 08:30:00 +00:00
|
|
|
'rewrite-uuid=s' => sub { $icv{rewriteUUID} = $_[1] },
|
2007-02-14 20:27:41 +00:00
|
|
|
%remote_opts );
|
2006-06-28 02:39:12 +00:00
|
|
|
my %cmt_opts = ( 'edit|e' => \$_edit,
|
2007-01-29 08:07:49 +00:00
|
|
|
'rmdir' => \$SVN::Git::Editor::_rmdir,
|
|
|
|
'find-copies-harder' => \$SVN::Git::Editor::_find_copies_harder,
|
|
|
|
'l=i' => \$SVN::Git::Editor::_rename_limit,
|
|
|
|
'copy-similarity|C=i'=> \$SVN::Git::Editor::_cp_similarity
|
2006-06-28 02:39:12 +00:00
|
|
|
);
|
2006-06-12 22:53:13 +00:00
|
|
|
|
2006-02-16 09:24:16 +00:00
|
|
|
my %cmd = (
|
2007-01-05 02:09:56 +00:00
|
|
|
fetch => [ \&cmd_fetch, "Download new revisions from SVN",
|
2007-02-14 10:21:19 +00:00
|
|
|
{ 'revision|r=s' => \$_revision,
|
2007-02-16 11:22:40 +00:00
|
|
|
'fetch-all|all' => \$_fetch_all,
|
2009-04-06 20:37:59 +00:00
|
|
|
'parent|p' => \$_fetch_parent,
|
2007-02-14 10:21:19 +00:00
|
|
|
%fc_opts } ],
|
2007-02-17 02:45:01 +00:00
|
|
|
clone => [ \&cmd_clone, "Initialize and fetch revisions",
|
|
|
|
{ 'revision|r=s' => \$_revision,
|
2011-07-20 22:37:26 +00:00
|
|
|
'preserve-empty-dirs' =>
|
|
|
|
\$SVN::Git::Fetcher::_preserve_empty_dirs,
|
|
|
|
'placeholder-filename=s' =>
|
|
|
|
\$SVN::Git::Fetcher::_placeholder_filename,
|
2007-02-17 02:45:01 +00:00
|
|
|
%fc_opts, %init_opts } ],
|
2007-01-11 20:26:16 +00:00
|
|
|
init => [ \&cmd_init, "Initialize a repo for tracking" .
|
2006-05-31 22:49:56 +00:00
|
|
|
" (requires URL argument)",
|
2006-06-12 22:53:13 +00:00
|
|
|
\%init_opts ],
|
2007-02-14 20:27:41 +00:00
|
|
|
'multi-init' => [ \&cmd_multi_init,
|
|
|
|
"Deprecated alias for ".
|
|
|
|
"'$0 init -T<trunk> -b<branches> -t<tags>'",
|
|
|
|
\%init_opts ],
|
2007-01-14 11:14:28 +00:00
|
|
|
dcommit => [ \&cmd_dcommit,
|
|
|
|
'Commit several diffs to merge with upstream',
|
2006-12-16 07:58:08 +00:00
|
|
|
{ 'merge|m|M' => \$_merge,
|
|
|
|
'strategy|s=s' => \$_strategy,
|
2007-02-16 11:22:40 +00:00
|
|
|
'verbose|v' => \$_verbose,
|
2006-12-16 07:58:08 +00:00
|
|
|
'dry-run|n' => \$_dry_run,
|
2007-02-16 11:22:40 +00:00
|
|
|
'fetch-all|all' => \$_fetch_all,
|
2008-08-07 09:06:16 +00:00
|
|
|
'commit-url=s' => \$_commit_url,
|
|
|
|
'revision|r=i' => \$_revision,
|
2007-05-03 05:51:35 +00:00
|
|
|
'no-rebase' => \$_no_rebase,
|
2010-09-25 03:51:50 +00:00
|
|
|
'mergeinfo=s' => \$_merge_info,
|
2011-09-16 21:02:01 +00:00
|
|
|
'interactive|i' => \$_interactive,
|
2006-12-23 05:59:24 +00:00
|
|
|
%cmt_opts, %fc_opts } ],
|
2008-10-05 02:35:17 +00:00
|
|
|
branch => [ \&cmd_branch,
|
|
|
|
'Create a branch in the SVN repository',
|
|
|
|
{ 'message|m=s' => \$_message,
|
2009-06-23 17:02:08 +00:00
|
|
|
'destination|d=s' => \$_branch_dest,
|
2008-10-05 02:35:17 +00:00
|
|
|
'dry-run|n' => \$_dry_run,
|
2010-01-11 16:21:51 +00:00
|
|
|
'tag|t' => \$_tag,
|
|
|
|
'username=s' => \$Git::SVN::Prompt::_username,
|
|
|
|
'commit-url=s' => \$_commit_url } ],
|
2008-10-05 02:35:17 +00:00
|
|
|
tag => [ sub { $_tag = 1; cmd_branch(@_) },
|
|
|
|
'Create a tag in the SVN repository',
|
|
|
|
{ 'message|m=s' => \$_message,
|
2009-06-23 17:02:08 +00:00
|
|
|
'destination|d=s' => \$_branch_dest,
|
2010-01-11 16:21:51 +00:00
|
|
|
'dry-run|n' => \$_dry_run,
|
|
|
|
'username=s' => \$Git::SVN::Prompt::_username,
|
|
|
|
'commit-url=s' => \$_commit_url } ],
|
2007-01-15 07:21:16 +00:00
|
|
|
'set-tree' => [ \&cmd_set_tree,
|
|
|
|
"Set an SVN repository to a git tree-ish",
|
2009-05-05 18:16:14 +00:00
|
|
|
{ 'stdin' => \$_stdin, %cmt_opts, %fc_opts, } ],
|
2007-10-16 14:36:49 +00:00
|
|
|
'create-ignore' => [ \&cmd_create_ignore,
|
|
|
|
'Create a .gitignore per svn:ignore',
|
|
|
|
{ 'revision|r=i' => \$_revision
|
|
|
|
} ],
|
2009-11-16 02:57:16 +00:00
|
|
|
'mkdirs' => [ \&cmd_mkdirs ,
|
|
|
|
"recreate empty directories after a checkout",
|
|
|
|
{ 'revision|r=i' => \$_revision } ],
|
2007-10-16 14:36:50 +00:00
|
|
|
'propget' => [ \&cmd_propget,
|
|
|
|
'Print the value of a property on a file or directory',
|
|
|
|
{ 'revision|r=i' => \$_revision } ],
|
2007-10-16 14:36:51 +00:00
|
|
|
'proplist' => [ \&cmd_proplist,
|
|
|
|
'List all properties of a file or directory',
|
|
|
|
{ 'revision|r=i' => \$_revision } ],
|
2007-01-12 01:58:39 +00:00
|
|
|
'show-ignore' => [ \&cmd_show_ignore, "Show svn:ignore listings",
|
2007-09-07 00:00:08 +00:00
|
|
|
{ 'revision|r=i' => \$_revision
|
2007-09-05 09:35:29 +00:00
|
|
|
} ],
|
2007-11-19 22:56:15 +00:00
|
|
|
'show-externals' => [ \&cmd_show_externals, "Show svn:externals listings",
|
|
|
|
{ 'revision|r=i' => \$_revision
|
|
|
|
} ],
|
2007-01-14 10:17:00 +00:00
|
|
|
'multi-fetch' => [ \&cmd_multi_fetch,
|
2007-02-14 10:21:19 +00:00
|
|
|
"Deprecated alias for $0 fetch --all",
|
|
|
|
{ 'revision|r=s' => \$_revision, %fc_opts } ],
|
2007-01-19 01:50:01 +00:00
|
|
|
'migrate' => [ sub { },
|
|
|
|
# no-op, we automatically run this anyways,
|
|
|
|
'Migrate configuration/metadata/layout from
|
|
|
|
previous versions of git-svn',
|
2007-02-15 03:34:56 +00:00
|
|
|
{ 'minimize' => \$Git::SVN::Migration::_minimize,
|
|
|
|
%remote_opts } ],
|
2007-01-12 10:35:20 +00:00
|
|
|
'log' => [ \&Git::SVN::Log::cmd_show_log, 'Show commit logs',
|
|
|
|
{ 'limit=i' => \$Git::SVN::Log::limit,
|
2006-06-01 09:35:44 +00:00
|
|
|
'revision|r=s' => \$_revision,
|
2007-01-12 10:35:20 +00:00
|
|
|
'verbose|v' => \$Git::SVN::Log::verbose,
|
|
|
|
'incremental' => \$Git::SVN::Log::incremental,
|
|
|
|
'oneline' => \$Git::SVN::Log::oneline,
|
|
|
|
'show-commit' => \$Git::SVN::Log::show_commit,
|
|
|
|
'non-recursive' => \$Git::SVN::Log::non_recursive,
|
2006-06-01 09:35:44 +00:00
|
|
|
'authors-file|A=s' => \$_authors,
|
2007-01-12 10:35:20 +00:00
|
|
|
'color' => \$Git::SVN::Log::color,
|
2007-09-07 00:00:08 +00:00
|
|
|
'pager=s' => \$Git::SVN::Log::pager
|
2006-06-01 09:35:44 +00:00
|
|
|
} ],
|
2008-08-08 08:41:58 +00:00
|
|
|
'find-rev' => [ \&cmd_find_rev,
|
|
|
|
"Translate between SVN revision numbers and tree-ish",
|
2007-09-07 00:00:08 +00:00
|
|
|
{} ],
|
2007-02-16 11:22:40 +00:00
|
|
|
'rebase' => [ \&cmd_rebase, "Fetch and rebase your working directory",
|
|
|
|
{ 'merge|m|M' => \$_merge,
|
|
|
|
'verbose|v' => \$_verbose,
|
|
|
|
'strategy|s=s' => \$_strategy,
|
2007-03-13 18:40:36 +00:00
|
|
|
'local|l' => \$_local,
|
2007-02-16 11:22:40 +00:00
|
|
|
'fetch-all|all' => \$_fetch_all,
|
2008-05-20 03:29:17 +00:00
|
|
|
'dry-run|n' => \$_dry_run,
|
2007-02-16 11:22:40 +00:00
|
|
|
%fc_opts } ],
|
2007-01-14 06:35:53 +00:00
|
|
|
'commit-diff' => [ \&cmd_commit_diff,
|
|
|
|
'Commit a diff between two trees',
|
2006-06-28 02:39:12 +00:00
|
|
|
{ 'message|m=s' => \$_message,
|
|
|
|
'file|F=s' => \$_file,
|
2006-11-09 09:19:37 +00:00
|
|
|
'revision|r=s' => \$_revision,
|
2006-06-28 02:39:12 +00:00
|
|
|
%cmt_opts } ],
|
2007-11-21 19:57:18 +00:00
|
|
|
'info' => [ \&cmd_info,
|
|
|
|
"Show info about the latest SVN revision
|
|
|
|
on the current branch",
|
2007-11-21 19:57:19 +00:00
|
|
|
{ 'url' => \$_url, } ],
|
2008-02-10 04:51:08 +00:00
|
|
|
'blame' => [ \&Git::SVN::Log::cmd_blame,
|
|
|
|
"Show what revision and author last modified each line of a file",
|
2008-05-11 05:11:18 +00:00
|
|
|
{ 'git-format' => \$_git_format } ],
|
2009-06-04 03:45:52 +00:00
|
|
|
'reset' => [ \&cmd_reset,
|
|
|
|
"Undo fetches back to the specified SVN revision",
|
|
|
|
{ 'revision|r=s' => \$_revision,
|
|
|
|
'parent|p' => \$_fetch_parent } ],
|
2009-07-19 23:00:52 +00:00
|
|
|
'gc' => [ \&cmd_gc,
|
|
|
|
"Compress unhandled.log files in .git/svn and remove " .
|
|
|
|
"index files in .git/svn",
|
|
|
|
{} ],
|
2006-02-16 09:24:16 +00:00
|
|
|
);
|
2006-06-12 22:53:13 +00:00
|
|
|
|
2011-09-16 21:02:01 +00:00
|
|
|
use Term::ReadLine;
|
|
|
|
package FakeTerm;
|
|
|
|
sub new {
|
|
|
|
my ($class, $reason) = @_;
|
|
|
|
return bless \$reason, shift;
|
|
|
|
}
|
|
|
|
sub readline {
|
|
|
|
my $self = shift;
|
|
|
|
die "Cannot use readline on FakeTerm: $$self";
|
|
|
|
}
|
|
|
|
package main;
|
|
|
|
|
|
|
|
my $term = eval {
|
|
|
|
$ENV{"GIT_SVN_NOTTY"}
|
|
|
|
? new Term::ReadLine 'git-svn', \*STDIN, \*STDOUT
|
|
|
|
: new Term::ReadLine 'git-svn';
|
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
$term = new FakeTerm "$@: going non-interactive";
|
|
|
|
}
|
|
|
|
|
2006-02-16 09:24:16 +00:00
|
|
|
my $cmd;
|
|
|
|
for (my $i = 0; $i < @ARGV; $i++) {
|
|
|
|
if (defined $cmd{$ARGV[$i]}) {
|
|
|
|
$cmd = $ARGV[$i];
|
|
|
|
splice @ARGV, $i, 1;
|
|
|
|
last;
|
2009-05-31 01:17:06 +00:00
|
|
|
} elsif ($ARGV[$i] eq 'help') {
|
|
|
|
$cmd = $ARGV[$i+1];
|
|
|
|
usage(0);
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2007-12-19 08:31:43 +00:00
|
|
|
# make sure we're always running at the top-level working directory
|
|
|
|
unless ($cmd && $cmd =~ /(?:clone|init|multi-init)$/) {
|
2007-02-20 09:36:30 +00:00
|
|
|
unless (-d $ENV{GIT_DIR}) {
|
|
|
|
if ($git_dir_user_set) {
|
|
|
|
die "GIT_DIR=$ENV{GIT_DIR} explicitly set, ",
|
|
|
|
"but it is not a directory\n";
|
|
|
|
}
|
|
|
|
my $git_dir = delete $ENV{GIT_DIR};
|
2008-11-06 05:07:39 +00:00
|
|
|
my $cdup = undef;
|
|
|
|
git_cmd_try {
|
|
|
|
$cdup = command_oneline(qw/rev-parse --show-cdup/);
|
|
|
|
$git_dir = '.' unless ($cdup);
|
|
|
|
chomp $cdup if ($cdup);
|
|
|
|
$cdup = "." unless ($cdup && length $cdup);
|
|
|
|
} "Already at toplevel, but $git_dir not found\n";
|
2007-02-20 09:36:30 +00:00
|
|
|
chdir $cdup or die "Unable to chdir up to '$cdup'\n";
|
|
|
|
unless (-d $git_dir) {
|
|
|
|
die "$git_dir still not found after going to ",
|
|
|
|
"'$cdup'\n";
|
|
|
|
}
|
|
|
|
$ENV{GIT_DIR} = $git_dir;
|
|
|
|
}
|
2008-05-23 14:19:41 +00:00
|
|
|
$_repository = Git->repository(Repository => $ENV{GIT_DIR});
|
2007-02-20 09:36:30 +00:00
|
|
|
}
|
2007-11-24 13:47:56 +00:00
|
|
|
|
|
|
|
my %opts = %{$cmd{$cmd}->[2]} if (defined $cmd);
|
|
|
|
|
2009-11-14 22:25:11 +00:00
|
|
|
read_git_config(\%opts);
|
2008-08-08 08:41:58 +00:00
|
|
|
if ($cmd && ($cmd eq 'log' || $cmd eq 'blame')) {
|
|
|
|
Getopt::Long::Configure('pass_through');
|
|
|
|
}
|
2011-10-03 18:21:36 +00:00
|
|
|
my $rv = GetOptions(%opts, 'h|H' => \$_help, 'version|V' => \$_version,
|
2007-11-24 13:47:56 +00:00
|
|
|
'minimize-connections' => \$Git::SVN::Migration::_minimize,
|
|
|
|
'id|i=s' => \$Git::SVN::default_ref_id,
|
|
|
|
'svn-remote|remote|R=s' => sub {
|
|
|
|
$Git::SVN::no_reuse_existing = 1;
|
|
|
|
$Git::SVN::default_repo_id = $_[1] });
|
|
|
|
exit 1 if (!$rv && $cmd && $cmd ne 'log');
|
|
|
|
|
|
|
|
usage(0) if $_help;
|
|
|
|
version() if $_version;
|
|
|
|
usage(1) unless defined $cmd;
|
|
|
|
load_authors() if $_authors;
|
2009-05-15 01:27:15 +00:00
|
|
|
if (defined $_authors_prog) {
|
|
|
|
$_authors_prog = "'" . File::Spec->rel2abs($_authors_prog) . "'";
|
|
|
|
}
|
2007-11-24 13:47:56 +00:00
|
|
|
|
2007-02-17 02:45:01 +00:00
|
|
|
unless ($cmd =~ /^(?:clone|init|multi-init|commit-diff)$/) {
|
2007-01-19 01:50:01 +00:00
|
|
|
Git::SVN::Migration::migration_check();
|
|
|
|
}
|
2007-01-31 20:28:10 +00:00
|
|
|
Git::SVN::init_vars();
|
2007-01-22 21:52:04 +00:00
|
|
|
eval {
|
|
|
|
Git::SVN::verify_remotes_sanity();
|
|
|
|
$cmd{$cmd}->[0]->(@ARGV);
|
|
|
|
};
|
|
|
|
fatal $@ if $@;
|
2007-02-16 09:45:13 +00:00
|
|
|
post_fetch_checkout();
|
2006-02-16 09:24:16 +00:00
|
|
|
exit 0;
|
|
|
|
|
|
|
|
####################### primary functions ######################
|
|
|
|
sub usage {
|
|
|
|
my $exit = shift || 0;
|
|
|
|
my $fd = $exit ? \*STDERR : \*STDOUT;
|
|
|
|
print $fd <<"";
|
|
|
|
git-svn - bidirectional operations between a single Subversion tree and git
|
2008-07-13 13:36:15 +00:00
|
|
|
Usage: git svn <command> [options] [arguments]\n
|
2006-03-03 09:20:09 +00:00
|
|
|
|
|
|
|
print $fd "Available commands:\n" unless $cmd;
|
2006-02-16 09:24:16 +00:00
|
|
|
|
|
|
|
foreach (sort keys %cmd) {
|
2006-03-03 09:20:09 +00:00
|
|
|
next if $cmd && $cmd ne $_;
|
2007-02-15 03:34:56 +00:00
|
|
|
next if /^multi-/; # don't show deprecated commands
|
2006-10-11 21:53:36 +00:00
|
|
|
print $fd ' ',pack('A17',$_),$cmd{$_}->[1],"\n";
|
2007-11-03 18:53:34 +00:00
|
|
|
foreach (sort keys %{$cmd{$_}->[2]}) {
|
2007-04-03 08:57:08 +00:00
|
|
|
# mixed-case options are for .git/config only
|
|
|
|
next if /[A-Z]/ && /^[a-z]+$/i;
|
2006-03-03 09:20:09 +00:00
|
|
|
# prints out arguments as they should be passed:
|
2006-05-24 08:40:37 +00:00
|
|
|
my $x = s#[:=]s$## ? '<arg>' : s#[:=]i$## ? '<num>' : '';
|
2006-10-11 21:53:36 +00:00
|
|
|
print $fd ' ' x 21, join(', ', map { length $_ > 1 ?
|
2006-03-03 09:20:09 +00:00
|
|
|
"--$_" : "-$_" }
|
|
|
|
split /\|/,$_)," $x\n";
|
|
|
|
}
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
|
|
|
print $fd <<"";
|
2006-03-03 09:20:09 +00:00
|
|
|
\nGIT_SVN_ID may be set in the environment or via the --id/-i switch to an
|
|
|
|
arbitrary identifier if you're tracking multiple SVN branches/repositories in
|
|
|
|
one git repository and want to keep them separate. See git-svn(1) for more
|
|
|
|
information.
|
2006-02-16 09:24:16 +00:00
|
|
|
|
|
|
|
exit $exit;
|
|
|
|
}
|
|
|
|
|
2006-02-20 18:57:29 +00:00
|
|
|
sub version {
|
2010-03-04 10:23:53 +00:00
|
|
|
::_req_svn();
|
2006-12-28 09:16:20 +00:00
|
|
|
print "git-svn version $VERSION (svn $SVN::Core::VERSION)\n";
|
2006-02-20 18:57:29 +00:00
|
|
|
exit 0;
|
|
|
|
}
|
|
|
|
|
2011-09-16 21:02:01 +00:00
|
|
|
sub ask {
|
|
|
|
my ($prompt, %arg) = @_;
|
|
|
|
my $valid_re = $arg{valid_re};
|
|
|
|
my $default = $arg{default};
|
|
|
|
my $resp;
|
|
|
|
my $i = 0;
|
|
|
|
|
|
|
|
if ( !( defined($term->IN)
|
|
|
|
&& defined( fileno($term->IN) )
|
|
|
|
&& defined( $term->OUT )
|
|
|
|
&& defined( fileno($term->OUT) ) ) ){
|
|
|
|
return defined($default) ? $default : undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
while ($i++ < 10) {
|
|
|
|
$resp = $term->readline($prompt);
|
|
|
|
if (!defined $resp) { # EOF
|
|
|
|
print "\n";
|
|
|
|
return defined $default ? $default : undef;
|
|
|
|
}
|
|
|
|
if ($resp eq '' and defined $default) {
|
|
|
|
return $default;
|
|
|
|
}
|
|
|
|
if (!defined $valid_re or $resp =~ /$valid_re/) {
|
|
|
|
return $resp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return undef;
|
|
|
|
}
|
|
|
|
|
2007-01-11 23:35:55 +00:00
|
|
|
sub do_git_init_db {
|
|
|
|
unless (-d $ENV{GIT_DIR}) {
|
|
|
|
my @init_db = ('init');
|
|
|
|
push @init_db, "--template=$_template" if defined $_template;
|
2007-02-14 20:27:41 +00:00
|
|
|
if (defined $_shared) {
|
|
|
|
if ($_shared =~ /[a-z]/) {
|
|
|
|
push @init_db, "--shared=$_shared";
|
|
|
|
} else {
|
|
|
|
push @init_db, "--shared";
|
|
|
|
}
|
|
|
|
}
|
2007-01-11 23:35:55 +00:00
|
|
|
command_noisy(@init_db);
|
2008-05-23 14:19:41 +00:00
|
|
|
$_repository = Git->repository(Repository => ".git");
|
2007-01-11 23:35:55 +00:00
|
|
|
}
|
2007-02-18 10:34:09 +00:00
|
|
|
my $set;
|
|
|
|
my $pfx = "svn-remote.$Git::SVN::default_repo_id";
|
|
|
|
foreach my $i (keys %icv) {
|
|
|
|
die "'$set' and '$i' cannot both be set\n" if $set;
|
|
|
|
next unless defined $icv{$i};
|
|
|
|
command_noisy('config', "$pfx.$i", $icv{$i});
|
|
|
|
$set = $i;
|
|
|
|
}
|
2011-10-10 23:27:37 +00:00
|
|
|
my $ignore_paths_regex = \$SVN::Git::Fetcher::_ignore_regex;
|
|
|
|
command_noisy('config', "$pfx.ignore-paths", $$ignore_paths_regex)
|
|
|
|
if defined $$ignore_paths_regex;
|
|
|
|
my $ignore_refs_regex = \$Git::SVN::Ra::_ignore_refs_regex;
|
|
|
|
command_noisy('config', "$pfx.ignore-refs", $$ignore_refs_regex)
|
|
|
|
if defined $$ignore_refs_regex;
|
2011-07-20 22:37:26 +00:00
|
|
|
|
|
|
|
if (defined $SVN::Git::Fetcher::_preserve_empty_dirs) {
|
|
|
|
my $fname = \$SVN::Git::Fetcher::_placeholder_filename;
|
|
|
|
command_noisy('config', "$pfx.preserve-empty-dirs", 'true');
|
|
|
|
command_noisy('config', "$pfx.placeholder-filename", $$fname);
|
|
|
|
}
|
2007-01-11 23:35:55 +00:00
|
|
|
}
|
|
|
|
|
2007-02-14 20:27:41 +00:00
|
|
|
sub init_subdir {
|
|
|
|
my $repo_path = shift or return;
|
|
|
|
mkpath([$repo_path]) unless -d $repo_path;
|
|
|
|
chdir $repo_path or die "Couldn't chdir to $repo_path: $!\n";
|
2007-02-23 09:26:26 +00:00
|
|
|
$ENV{GIT_DIR} = '.git';
|
2008-05-23 14:19:41 +00:00
|
|
|
$_repository = Git->repository(Repository => $ENV{GIT_DIR});
|
2007-02-14 20:27:41 +00:00
|
|
|
}
|
|
|
|
|
2007-02-17 02:45:01 +00:00
|
|
|
sub cmd_clone {
|
|
|
|
my ($url, $path) = @_;
|
|
|
|
if (!defined $path &&
|
2009-06-23 17:02:08 +00:00
|
|
|
(defined $_trunk || @_branches || @_tags ||
|
2007-07-14 09:25:28 +00:00
|
|
|
defined $_stdlayout) &&
|
2007-02-17 02:45:01 +00:00
|
|
|
$url !~ m#^[a-z\+]+://#) {
|
|
|
|
$path = $url;
|
|
|
|
}
|
|
|
|
$path = basename($url) if !defined $path || !length $path;
|
2009-12-08 20:54:10 +00:00
|
|
|
my $authors_absolute = $_authors ? File::Spec->rel2abs($_authors) : "";
|
2007-02-23 09:26:26 +00:00
|
|
|
cmd_init($url, $path);
|
2009-12-08 20:54:10 +00:00
|
|
|
command_oneline('config', 'svn.authorsfile', $authors_absolute)
|
|
|
|
if $_authors;
|
2009-12-08 20:54:11 +00:00
|
|
|
Git::SVN::fetch_all($Git::SVN::default_repo_id);
|
2007-02-17 02:45:01 +00:00
|
|
|
}
|
|
|
|
|
2007-01-11 20:26:16 +00:00
|
|
|
sub cmd_init {
|
2007-07-14 09:25:28 +00:00
|
|
|
if (defined $_stdlayout) {
|
|
|
|
$_trunk = 'trunk' if (!defined $_trunk);
|
2009-06-23 17:02:08 +00:00
|
|
|
@_tags = 'tags' if (! @_tags);
|
|
|
|
@_branches = 'branches' if (! @_branches);
|
2007-07-14 09:25:28 +00:00
|
|
|
}
|
2009-06-23 17:02:08 +00:00
|
|
|
if (defined $_trunk || @_branches || @_tags) {
|
2007-02-14 20:27:41 +00:00
|
|
|
return cmd_multi_init(@_);
|
2006-07-01 04:42:53 +00:00
|
|
|
}
|
2007-02-14 20:27:41 +00:00
|
|
|
my $url = shift or die "SVN repository location required ",
|
|
|
|
"as a command-line argument\n";
|
2009-06-26 14:52:09 +00:00
|
|
|
$url = canonicalize_url($url);
|
2007-02-14 20:27:41 +00:00
|
|
|
init_subdir(@_);
|
2007-01-11 23:35:55 +00:00
|
|
|
do_git_init_db();
|
2006-07-01 04:42:53 +00:00
|
|
|
|
2009-07-25 07:00:50 +00:00
|
|
|
if ($Git::SVN::_minimize_url eq 'unset') {
|
|
|
|
$Git::SVN::_minimize_url = 0;
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
Git::SVN->init($url);
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
|
|
|
|
2007-01-05 02:09:56 +00:00
|
|
|
sub cmd_fetch {
|
2007-02-14 10:21:19 +00:00
|
|
|
if (grep /^\d+=./, @_) {
|
|
|
|
die "'<rev>=<commit>' fetch arguments are ",
|
|
|
|
"no longer supported.\n";
|
2007-01-22 23:47:41 +00:00
|
|
|
}
|
2007-02-14 10:21:19 +00:00
|
|
|
my ($remote) = @_;
|
|
|
|
if (@_ > 1) {
|
2009-04-06 20:37:59 +00:00
|
|
|
die "Usage: $0 fetch [--all] [--parent] [svn-remote]\n";
|
2007-02-14 10:21:19 +00:00
|
|
|
}
|
2009-11-22 20:37:06 +00:00
|
|
|
$Git::SVN::no_reuse_existing = undef;
|
2009-04-06 20:37:59 +00:00
|
|
|
if ($_fetch_parent) {
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
unless ($gs) {
|
|
|
|
die "Unable to determine upstream SVN information from ",
|
|
|
|
"working tree history\n";
|
|
|
|
}
|
|
|
|
# just fetch, don't checkout.
|
|
|
|
$_no_checkout = 'true';
|
|
|
|
$_fetch_all ? $gs->fetch_all : $gs->fetch;
|
|
|
|
} elsif ($_fetch_all) {
|
2007-02-14 10:21:19 +00:00
|
|
|
cmd_multi_fetch();
|
|
|
|
} else {
|
2009-04-06 20:37:59 +00:00
|
|
|
$remote ||= $Git::SVN::default_repo_id;
|
2007-02-14 10:21:19 +00:00
|
|
|
Git::SVN::fetch_all($remote, Git::SVN::read_all_remotes());
|
2007-01-14 10:17:00 +00:00
|
|
|
}
|
2007-01-05 02:09:56 +00:00
|
|
|
}
|
|
|
|
|
2007-01-15 07:21:16 +00:00
|
|
|
sub cmd_set_tree {
|
2006-02-16 09:24:16 +00:00
|
|
|
my (@commits) = @_;
|
|
|
|
if ($_stdin || !@commits) {
|
|
|
|
print "Reading from stdin...\n";
|
|
|
|
@commits = ();
|
|
|
|
while (<STDIN>) {
|
2006-03-03 09:20:09 +00:00
|
|
|
if (/\b($sha1_short)\b/o) {
|
2006-02-16 09:24:16 +00:00
|
|
|
unshift @commits, $1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
my @revs;
|
2006-02-20 18:57:26 +00:00
|
|
|
foreach my $c (@commits) {
|
2006-12-15 18:59:54 +00:00
|
|
|
my @tmp = command('rev-parse',$c);
|
2006-02-20 18:57:26 +00:00
|
|
|
if (scalar @tmp == 1) {
|
|
|
|
push @revs, $tmp[0];
|
|
|
|
} elsif (scalar @tmp > 1) {
|
2006-12-15 18:59:54 +00:00
|
|
|
push @revs, reverse(command('rev-list',@tmp));
|
2006-02-20 18:57:26 +00:00
|
|
|
} else {
|
2007-10-16 14:36:52 +00:00
|
|
|
fatal "Failed to rev-parse $c";
|
2006-02-20 18:57:26 +00:00
|
|
|
}
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
2007-01-15 07:21:16 +00:00
|
|
|
my $gs = Git::SVN->new;
|
|
|
|
my ($r_last, $cmt_last) = $gs->last_rev_commit;
|
|
|
|
$gs->fetch;
|
2007-01-25 19:53:13 +00:00
|
|
|
if (defined $gs->{last_rev} && $r_last != $gs->{last_rev}) {
|
2007-01-15 07:21:16 +00:00
|
|
|
fatal "There are new revisions that were fetched ",
|
|
|
|
"and need to be merged (or acknowledged) ",
|
|
|
|
"before committing.\nlast rev: $r_last\n",
|
2007-10-16 14:36:52 +00:00
|
|
|
" current: $gs->{last_rev}";
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
2007-01-15 07:21:16 +00:00
|
|
|
$gs->set_tree($_) foreach @revs;
|
|
|
|
print "Done committing ",scalar @revs," revisions to SVN\n";
|
2007-12-13 16:27:34 +00:00
|
|
|
unlink $gs->{index};
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
2006-02-26 10:22:27 +00:00
|
|
|
|
2011-09-07 17:36:05 +00:00
|
|
|
sub split_merge_info_range {
|
|
|
|
my ($range) = @_;
|
|
|
|
if ($range =~ /(\d+)-(\d+)/) {
|
|
|
|
return (int($1), int($2));
|
|
|
|
} else {
|
|
|
|
return (int($range), int($range));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub combine_ranges {
|
|
|
|
my ($in) = @_;
|
|
|
|
|
|
|
|
my @fnums = ();
|
|
|
|
my @arr = split(/,/, $in);
|
|
|
|
for my $element (@arr) {
|
|
|
|
my ($start, $end) = split_merge_info_range($element);
|
|
|
|
push @fnums, $start;
|
|
|
|
}
|
|
|
|
|
|
|
|
my @sorted = @arr [ sort {
|
|
|
|
$fnums[$a] <=> $fnums[$b]
|
|
|
|
} 0..$#arr ];
|
|
|
|
|
|
|
|
my @return = ();
|
|
|
|
my $last = -1;
|
|
|
|
my $first = -1;
|
|
|
|
for my $element (@sorted) {
|
|
|
|
my ($start, $end) = split_merge_info_range($element);
|
|
|
|
|
|
|
|
if ($last == -1) {
|
|
|
|
$first = $start;
|
|
|
|
$last = $end;
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
if ($start <= $last+1) {
|
|
|
|
if ($end > $last) {
|
|
|
|
$last = $end;
|
|
|
|
}
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
if ($first == $last) {
|
|
|
|
push @return, "$first";
|
|
|
|
} else {
|
|
|
|
push @return, "$first-$last";
|
|
|
|
}
|
|
|
|
$first = $start;
|
|
|
|
$last = $end;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($first != -1) {
|
|
|
|
if ($first == $last) {
|
|
|
|
push @return, "$first";
|
|
|
|
} else {
|
|
|
|
push @return, "$first-$last";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return join(',', @return);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub merge_revs_into_hash {
|
|
|
|
my ($hash, $minfo) = @_;
|
|
|
|
my @lines = split(' ', $minfo);
|
|
|
|
|
|
|
|
for my $line (@lines) {
|
|
|
|
my ($branchpath, $revs) = split(/:/, $line);
|
|
|
|
|
|
|
|
if (exists($hash->{$branchpath})) {
|
|
|
|
# Merge the two revision sets
|
|
|
|
my $combined = "$hash->{$branchpath},$revs";
|
|
|
|
$hash->{$branchpath} = combine_ranges($combined);
|
|
|
|
} else {
|
|
|
|
# Just do range combining for consolidation
|
|
|
|
$hash->{$branchpath} = combine_ranges($revs);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub merge_merge_info {
|
|
|
|
my ($mergeinfo_one, $mergeinfo_two) = @_;
|
|
|
|
my %result_hash = ();
|
|
|
|
|
|
|
|
merge_revs_into_hash(\%result_hash, $mergeinfo_one);
|
|
|
|
merge_revs_into_hash(\%result_hash, $mergeinfo_two);
|
|
|
|
|
|
|
|
my $result = '';
|
|
|
|
# Sort below is for consistency's sake
|
|
|
|
for my $branchname (sort keys(%result_hash)) {
|
|
|
|
my $revlist = $result_hash{$branchname};
|
|
|
|
$result .= "$branchname:$revlist\n"
|
|
|
|
}
|
|
|
|
return $result;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub populate_merge_info {
|
|
|
|
my ($d, $gs, $uuid, $linear_refs, $rewritten_parent) = @_;
|
|
|
|
|
|
|
|
my %parentshash;
|
|
|
|
read_commit_parents(\%parentshash, $d);
|
|
|
|
my @parents = @{$parentshash{$d}};
|
|
|
|
if ($#parents > 0) {
|
|
|
|
# Merge commit
|
|
|
|
my $all_parents_ok = 1;
|
|
|
|
my $aggregate_mergeinfo = '';
|
|
|
|
my $rooturl = $gs->repos_root;
|
|
|
|
|
|
|
|
if (defined($rewritten_parent)) {
|
|
|
|
# Replace first parent with newly-rewritten version
|
|
|
|
shift @parents;
|
|
|
|
unshift @parents, $rewritten_parent;
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $parent (@parents) {
|
|
|
|
my ($branchurl, $svnrev, $paruuid) =
|
|
|
|
cmt_metadata($parent);
|
|
|
|
|
|
|
|
unless (defined($svnrev)) {
|
|
|
|
# Should have been caught be preflight check
|
|
|
|
fatal "merge commit $d has ancestor $parent, but that change "
|
|
|
|
."does not have git-svn metadata!";
|
|
|
|
}
|
2011-10-31 22:37:12 +00:00
|
|
|
unless ($branchurl =~ /^\Q$rooturl\E(.*)/) {
|
2011-09-07 17:36:05 +00:00
|
|
|
fatal "commit $parent git-svn metadata changed mid-run!";
|
|
|
|
}
|
|
|
|
my $branchpath = $1;
|
|
|
|
|
|
|
|
my $ra = Git::SVN::Ra->new($branchurl);
|
|
|
|
my (undef, undef, $props) =
|
|
|
|
$ra->get_dir(canonicalize_path("."), $svnrev);
|
|
|
|
my $par_mergeinfo = $props->{'svn:mergeinfo'};
|
|
|
|
unless (defined $par_mergeinfo) {
|
|
|
|
$par_mergeinfo = '';
|
|
|
|
}
|
|
|
|
# Merge previous mergeinfo values
|
|
|
|
$aggregate_mergeinfo =
|
|
|
|
merge_merge_info($aggregate_mergeinfo,
|
|
|
|
$par_mergeinfo, 0);
|
|
|
|
|
|
|
|
next if $parent eq $parents[0]; # Skip first parent
|
|
|
|
# Add new changes being placed in tree by merge
|
|
|
|
my @cmd = (qw/rev-list --reverse/,
|
|
|
|
$parent, qw/--not/);
|
|
|
|
foreach my $par (@parents) {
|
|
|
|
unless ($par eq $parent) {
|
|
|
|
push @cmd, $par;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
my @revsin = ();
|
|
|
|
my ($revlist, $ctx) = command_output_pipe(@cmd);
|
|
|
|
while (<$revlist>) {
|
|
|
|
my $irev = $_;
|
|
|
|
chomp $irev;
|
|
|
|
my (undef, $csvnrev, undef) =
|
|
|
|
cmt_metadata($irev);
|
|
|
|
unless (defined $csvnrev) {
|
|
|
|
# A child is missing SVN annotations...
|
|
|
|
# this might be OK, or might not be.
|
|
|
|
warn "W:child $irev is merged into revision "
|
|
|
|
."$d but does not have git-svn metadata. "
|
|
|
|
."This means git-svn cannot determine the "
|
|
|
|
."svn revision numbers to place into the "
|
|
|
|
."svn:mergeinfo property. You must ensure "
|
|
|
|
."a branch is entirely committed to "
|
|
|
|
."SVN before merging it in order for "
|
|
|
|
."svn:mergeinfo population to function "
|
|
|
|
."properly";
|
|
|
|
}
|
|
|
|
push @revsin, $csvnrev;
|
|
|
|
}
|
|
|
|
command_close_pipe($revlist, $ctx);
|
|
|
|
|
|
|
|
last unless $all_parents_ok;
|
|
|
|
|
|
|
|
# We now have a list of all SVN revnos which are
|
|
|
|
# merged by this particular parent. Integrate them.
|
|
|
|
next if $#revsin == -1;
|
|
|
|
my $newmergeinfo = "$branchpath:" . join(',', @revsin);
|
|
|
|
$aggregate_mergeinfo =
|
|
|
|
merge_merge_info($aggregate_mergeinfo,
|
|
|
|
$newmergeinfo, 1);
|
|
|
|
}
|
|
|
|
if ($all_parents_ok and $aggregate_mergeinfo) {
|
|
|
|
return $aggregate_mergeinfo;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return undef;
|
|
|
|
}
|
|
|
|
|
2007-01-14 11:14:28 +00:00
|
|
|
sub cmd_dcommit {
|
|
|
|
my $head = shift;
|
2010-08-02 19:58:19 +00:00
|
|
|
command_noisy(qw/update-index --refresh/);
|
2007-11-11 18:41:41 +00:00
|
|
|
git_cmd_try { command_oneline(qw/diff-index --quiet HEAD/) }
|
2007-11-13 21:47:26 +00:00
|
|
|
'Cannot dcommit with a dirty index. Commit your changes first, '
|
2007-11-11 18:41:41 +00:00
|
|
|
. "or stash them with `git stash'.\n";
|
2007-01-14 11:14:28 +00:00
|
|
|
$head ||= 'HEAD';
|
2009-05-29 15:09:42 +00:00
|
|
|
|
|
|
|
my $old_head;
|
|
|
|
if ($head ne 'HEAD') {
|
|
|
|
$old_head = eval {
|
|
|
|
command_oneline([qw/symbolic-ref -q HEAD/])
|
|
|
|
};
|
|
|
|
if ($old_head) {
|
|
|
|
$old_head =~ s{^refs/heads/}{};
|
|
|
|
} else {
|
|
|
|
$old_head = eval { command_oneline(qw/rev-parse HEAD/) };
|
|
|
|
}
|
|
|
|
command(['checkout', $head], STDERR => 0);
|
|
|
|
}
|
|
|
|
|
2007-02-13 22:22:11 +00:00
|
|
|
my @refs;
|
2009-05-29 15:09:42 +00:00
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD', \@refs);
|
2008-08-31 13:50:59 +00:00
|
|
|
unless ($gs) {
|
|
|
|
die "Unable to determine upstream SVN information from ",
|
|
|
|
"$head history.\nPerhaps the repository is empty.";
|
|
|
|
}
|
2009-02-23 11:02:53 +00:00
|
|
|
|
|
|
|
if (defined $_commit_url) {
|
|
|
|
$url = $_commit_url;
|
|
|
|
} else {
|
|
|
|
$url = eval { command_oneline('config', '--get',
|
|
|
|
"svn-remote.$gs->{repo_id}.commiturl") };
|
|
|
|
if (!$url) {
|
2011-04-08 14:57:54 +00:00
|
|
|
$url = $gs->full_pushurl
|
2009-02-23 11:02:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-08-07 09:06:16 +00:00
|
|
|
my $last_rev = $_revision if defined $_revision;
|
2008-04-24 18:06:36 +00:00
|
|
|
if ($url) {
|
|
|
|
print "Committing to $url ...\n";
|
|
|
|
}
|
2007-06-13 09:23:28 +00:00
|
|
|
my ($linear_refs, $parents) = linearize_history($gs, \@refs);
|
2007-09-01 01:16:12 +00:00
|
|
|
if ($_no_rebase && scalar(@$linear_refs) > 1) {
|
|
|
|
warn "Attempting to commit more than one change while ",
|
|
|
|
"--no-rebase is enabled.\n",
|
|
|
|
"If these changes depend on each other, re-running ",
|
2008-01-02 18:09:49 +00:00
|
|
|
"without --no-rebase may be required."
|
2007-09-01 01:16:12 +00:00
|
|
|
}
|
2011-09-16 21:02:01 +00:00
|
|
|
|
|
|
|
if (defined $_interactive){
|
|
|
|
my $ask_default = "y";
|
|
|
|
foreach my $d (@$linear_refs){
|
|
|
|
my ($fh, $ctx) = command_output_pipe(qw(show --summary), "$d");
|
|
|
|
while (<$fh>){
|
|
|
|
print $_;
|
|
|
|
}
|
|
|
|
command_close_pipe($fh, $ctx);
|
|
|
|
$_ = ask("Commit this patch to SVN? ([y]es (default)|[n]o|[q]uit|[a]ll): ",
|
|
|
|
valid_re => qr/^(?:yes|y|no|n|quit|q|all|a)/i,
|
|
|
|
default => $ask_default);
|
|
|
|
die "Commit this patch reply required" unless defined $_;
|
|
|
|
if (/^[nq]/i) {
|
|
|
|
exit(0);
|
|
|
|
} elsif (/^a/i) {
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-08-20 07:30:06 +00:00
|
|
|
my $expect_url = $url;
|
2011-09-07 17:36:05 +00:00
|
|
|
|
|
|
|
my $push_merge_info = eval {
|
|
|
|
command_oneline(qw/config --get svn.pushmergeinfo/)
|
|
|
|
};
|
|
|
|
if (not defined($push_merge_info)
|
|
|
|
or $push_merge_info eq "false"
|
|
|
|
or $push_merge_info eq "no"
|
|
|
|
or $push_merge_info eq "never") {
|
|
|
|
$push_merge_info = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
unless (defined($_merge_info) || ! $push_merge_info) {
|
|
|
|
# Preflight check of changes to ensure no issues with mergeinfo
|
|
|
|
# This includes check for uncommitted-to-SVN parents
|
|
|
|
# (other than the first parent, which we will handle),
|
|
|
|
# information from different SVN repos, and paths
|
|
|
|
# which are not underneath this repository root.
|
|
|
|
my $rooturl = $gs->repos_root;
|
|
|
|
foreach my $d (@$linear_refs) {
|
|
|
|
my %parentshash;
|
|
|
|
read_commit_parents(\%parentshash, $d);
|
|
|
|
my @realparents = @{$parentshash{$d}};
|
|
|
|
if ($#realparents > 0) {
|
|
|
|
# Merge commit
|
|
|
|
shift @realparents; # Remove/ignore first parent
|
|
|
|
foreach my $parent (@realparents) {
|
|
|
|
my ($branchurl, $svnrev, $paruuid) = cmt_metadata($parent);
|
|
|
|
unless (defined $paruuid) {
|
|
|
|
# A parent is missing SVN annotations...
|
|
|
|
# abort the whole operation.
|
|
|
|
fatal "$parent is merged into revision $d, "
|
|
|
|
."but does not have git-svn metadata. "
|
|
|
|
."Either dcommit the branch or use a "
|
|
|
|
."local cherry-pick, FF merge, or rebase "
|
|
|
|
."instead of an explicit merge commit.";
|
|
|
|
}
|
|
|
|
|
|
|
|
unless ($paruuid eq $uuid) {
|
|
|
|
# Parent has SVN metadata from different repository
|
|
|
|
fatal "merge parent $parent for change $d has "
|
|
|
|
."git-svn uuid $paruuid, while current change "
|
|
|
|
."has uuid $uuid!";
|
|
|
|
}
|
|
|
|
|
2011-10-31 22:37:12 +00:00
|
|
|
unless ($branchurl =~ /^\Q$rooturl\E(.*)/) {
|
2011-09-07 17:36:05 +00:00
|
|
|
# This branch is very strange indeed.
|
|
|
|
fatal "merge parent $parent for $d is on branch "
|
|
|
|
."$branchurl, which is not under the "
|
|
|
|
."git-svn root $rooturl!";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
my $rewritten_parent;
|
2008-08-20 07:30:06 +00:00
|
|
|
Git::SVN::remove_username($expect_url);
|
2011-08-31 16:48:39 +00:00
|
|
|
if (defined($_merge_info)) {
|
|
|
|
$_merge_info =~ tr{ }{\n};
|
|
|
|
}
|
2007-11-05 11:21:47 +00:00
|
|
|
while (1) {
|
|
|
|
my $d = shift @$linear_refs or last;
|
2006-11-09 09:19:37 +00:00
|
|
|
unless (defined $last_rev) {
|
|
|
|
(undef, $last_rev, undef) = cmt_metadata("$d~1");
|
|
|
|
unless (defined $last_rev) {
|
2007-01-14 11:14:28 +00:00
|
|
|
fatal "Unable to extract revision information ",
|
2007-10-16 14:36:52 +00:00
|
|
|
"from commit $d~1";
|
2006-11-09 09:19:37 +00:00
|
|
|
}
|
|
|
|
}
|
2006-08-26 07:01:23 +00:00
|
|
|
if ($_dry_run) {
|
|
|
|
print "diff-tree $d~1 $d\n";
|
|
|
|
} else {
|
2007-09-01 01:16:12 +00:00
|
|
|
my $cmt_rev;
|
2011-09-07 17:36:05 +00:00
|
|
|
|
|
|
|
unless (defined($_merge_info) || ! $push_merge_info) {
|
|
|
|
$_merge_info = populate_merge_info($d, $gs,
|
|
|
|
$uuid,
|
|
|
|
$linear_refs,
|
|
|
|
$rewritten_parent);
|
|
|
|
}
|
|
|
|
|
2007-01-14 11:14:28 +00:00
|
|
|
my %ed_opts = ( r => $last_rev,
|
2007-01-27 22:33:08 +00:00
|
|
|
log => get_commit_entry($d)->{log},
|
2008-08-07 09:06:16 +00:00
|
|
|
ra => Git::SVN::Ra->new($url),
|
2007-11-14 00:52:02 +00:00
|
|
|
config => SVN::Core::config_get_config(
|
|
|
|
$Git::SVN::Ra::config_dir
|
|
|
|
),
|
2007-01-27 22:33:08 +00:00
|
|
|
tree_a => "$d~1",
|
|
|
|
tree_b => $d,
|
|
|
|
editor_cb => sub {
|
|
|
|
print "Committed r$_[0]\n";
|
2007-09-01 01:16:12 +00:00
|
|
|
$cmt_rev = $_[0];
|
|
|
|
},
|
2010-09-25 03:51:50 +00:00
|
|
|
mergeinfo => $_merge_info,
|
2007-02-13 22:22:11 +00:00
|
|
|
svn_path => '');
|
2007-01-27 22:33:08 +00:00
|
|
|
if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) {
|
2007-01-14 11:14:28 +00:00
|
|
|
print "No changes\n$d~1 == $d\n";
|
2007-06-13 09:23:28 +00:00
|
|
|
} elsif ($parents->{$d} && @{$parents->{$d}}) {
|
2007-09-01 01:16:12 +00:00
|
|
|
$gs->{inject_parents_dcommit}->{$cmt_rev} =
|
2007-06-13 09:23:28 +00:00
|
|
|
$parents->{$d};
|
2007-01-14 11:14:28 +00:00
|
|
|
}
|
2007-09-01 01:16:12 +00:00
|
|
|
$_fetch_all ? $gs->fetch_all : $gs->fetch;
|
2008-01-02 18:09:49 +00:00
|
|
|
$last_rev = $cmt_rev;
|
2007-09-01 01:16:12 +00:00
|
|
|
next if $_no_rebase;
|
|
|
|
|
|
|
|
# we always want to rebase against the current HEAD,
|
|
|
|
# not any head that was passed to us
|
2007-11-05 11:21:47 +00:00
|
|
|
my @diff = command('diff-tree', $d,
|
2007-09-01 01:16:12 +00:00
|
|
|
$gs->refname, '--');
|
|
|
|
my @finish;
|
|
|
|
if (@diff) {
|
|
|
|
@finish = rebase_cmd();
|
2007-11-05 11:21:47 +00:00
|
|
|
print STDERR "W: $d and ", $gs->refname,
|
2007-09-01 01:16:12 +00:00
|
|
|
" differ, using @finish:\n",
|
2007-11-05 11:21:47 +00:00
|
|
|
join("\n", @diff), "\n";
|
2007-09-01 01:16:12 +00:00
|
|
|
} else {
|
|
|
|
print "No changes between current HEAD and ",
|
|
|
|
$gs->refname,
|
|
|
|
"\nResetting to the latest ",
|
|
|
|
$gs->refname, "\n";
|
|
|
|
@finish = qw/reset --mixed/;
|
|
|
|
}
|
|
|
|
command_noisy(@finish, $gs->refname);
|
2011-09-07 17:36:05 +00:00
|
|
|
|
|
|
|
$rewritten_parent = command_oneline(qw/rev-parse HEAD/);
|
|
|
|
|
2007-11-05 11:21:47 +00:00
|
|
|
if (@diff) {
|
|
|
|
@refs = ();
|
|
|
|
my ($url_, $rev_, $uuid_, $gs_) =
|
2009-05-29 15:09:42 +00:00
|
|
|
working_head_info('HEAD', \@refs);
|
2007-11-05 11:21:47 +00:00
|
|
|
my ($linear_refs_, $parents_) =
|
|
|
|
linearize_history($gs_, \@refs);
|
|
|
|
if (scalar(@$linear_refs) !=
|
|
|
|
scalar(@$linear_refs_)) {
|
|
|
|
fatal "# of revisions changed ",
|
|
|
|
"\nbefore:\n",
|
|
|
|
join("\n", @$linear_refs),
|
|
|
|
"\n\nafter:\n",
|
|
|
|
join("\n", @$linear_refs_), "\n",
|
|
|
|
'If you are attempting to commit ',
|
|
|
|
"merges, try running:\n\t",
|
|
|
|
'git rebase --interactive',
|
|
|
|
'--preserve-merges ',
|
|
|
|
$gs->refname,
|
|
|
|
"\nBefore dcommitting";
|
|
|
|
}
|
2008-08-20 07:30:06 +00:00
|
|
|
if ($url_ ne $expect_url) {
|
2009-10-09 07:01:04 +00:00
|
|
|
if ($url_ eq $gs->metadata_url) {
|
|
|
|
print
|
|
|
|
"Accepting rewritten URL:",
|
|
|
|
" $url_\n";
|
|
|
|
} else {
|
|
|
|
fatal
|
|
|
|
"URL mismatch after rebase:",
|
|
|
|
" $url_ != $expect_url";
|
|
|
|
}
|
2007-11-05 11:21:47 +00:00
|
|
|
}
|
|
|
|
if ($uuid_ ne $uuid) {
|
|
|
|
fatal "uuid mismatch after rebase: ",
|
|
|
|
"$uuid_ != $uuid";
|
|
|
|
}
|
|
|
|
# remap parents
|
|
|
|
my (%p, @l, $i);
|
|
|
|
for ($i = 0; $i < scalar @$linear_refs; $i++) {
|
|
|
|
my $new = $linear_refs_->[$i] or next;
|
|
|
|
$p{$new} =
|
|
|
|
$parents->{$linear_refs->[$i]};
|
|
|
|
push @l, $new;
|
|
|
|
}
|
|
|
|
$parents = \%p;
|
|
|
|
$linear_refs = \@l;
|
|
|
|
}
|
2006-08-26 07:01:23 +00:00
|
|
|
}
|
|
|
|
}
|
2009-05-29 15:09:42 +00:00
|
|
|
|
|
|
|
if ($old_head) {
|
|
|
|
my $new_head = command_oneline(qw/rev-parse HEAD/);
|
|
|
|
my $new_is_symbolic = eval {
|
|
|
|
command_oneline(qw/symbolic-ref -q HEAD/);
|
|
|
|
};
|
|
|
|
if ($new_is_symbolic) {
|
|
|
|
print "dcommitted the branch ", $head, "\n";
|
|
|
|
} else {
|
|
|
|
print "dcommitted on a detached HEAD because you gave ",
|
|
|
|
"a revision argument.\n",
|
|
|
|
"The rewritten commit is: ", $new_head, "\n";
|
|
|
|
}
|
|
|
|
command(['checkout', $old_head], STDERR => 0);
|
|
|
|
}
|
|
|
|
|
2007-12-13 16:27:34 +00:00
|
|
|
unlink $gs->{index};
|
2006-08-26 07:01:23 +00:00
|
|
|
}
|
|
|
|
|
2008-10-05 02:35:17 +00:00
|
|
|
sub cmd_branch {
|
|
|
|
my ($branch_name, $head) = @_;
|
|
|
|
|
|
|
|
unless (defined $branch_name && length $branch_name) {
|
|
|
|
die(($_tag ? "tag" : "branch") . " name required\n");
|
|
|
|
}
|
|
|
|
$head ||= 'HEAD';
|
|
|
|
|
2009-12-23 06:40:18 +00:00
|
|
|
my (undef, $rev, undef, $gs) = working_head_info($head);
|
2011-04-08 14:57:54 +00:00
|
|
|
my $src = $gs->full_pushurl;
|
2008-10-05 02:35:17 +00:00
|
|
|
|
2008-12-02 02:43:00 +00:00
|
|
|
my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}};
|
2009-06-23 17:02:08 +00:00
|
|
|
my $allglobs = $remote->{ $_tag ? 'tags' : 'branches' };
|
|
|
|
my $glob;
|
|
|
|
if ($#{$allglobs} == 0) {
|
|
|
|
$glob = $allglobs->[0];
|
|
|
|
} else {
|
|
|
|
unless(defined $_branch_dest) {
|
|
|
|
die "Multiple ",
|
|
|
|
$_tag ? "tag" : "branch",
|
|
|
|
" paths defined for Subversion repository.\n",
|
|
|
|
"You must specify where you want to create the ",
|
|
|
|
$_tag ? "tag" : "branch",
|
|
|
|
" with the --destination argument.\n";
|
|
|
|
}
|
|
|
|
foreach my $g (@{$allglobs}) {
|
2009-06-25 09:28:15 +00:00
|
|
|
# SVN::Git::Editor could probably be moved to Git.pm..
|
|
|
|
my $re = SVN::Git::Editor::glob2pat($g->{path}->{left});
|
|
|
|
if ($_branch_dest =~ /$re/) {
|
2009-06-23 17:02:08 +00:00
|
|
|
$glob = $g;
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
unless (defined $glob) {
|
2009-07-25 08:36:06 +00:00
|
|
|
my $dest_re = qr/\b\Q$_branch_dest\E\b/;
|
|
|
|
foreach my $g (@{$allglobs}) {
|
|
|
|
$g->{path}->{left} =~ /$dest_re/ or next;
|
|
|
|
if (defined $glob) {
|
|
|
|
die "Ambiguous destination: ",
|
|
|
|
$_branch_dest, "\nmatches both '",
|
|
|
|
$glob->{path}->{left}, "' and '",
|
|
|
|
$g->{path}->{left}, "'\n";
|
|
|
|
}
|
|
|
|
$glob = $g;
|
|
|
|
}
|
|
|
|
unless (defined $glob) {
|
|
|
|
die "Unknown ",
|
|
|
|
$_tag ? "tag" : "branch",
|
|
|
|
" destination $_branch_dest\n";
|
|
|
|
}
|
2009-06-23 17:02:08 +00:00
|
|
|
}
|
|
|
|
}
|
2008-10-05 02:35:17 +00:00
|
|
|
my ($lft, $rgt) = @{ $glob->{path} }{qw/left right/};
|
2010-01-11 16:21:23 +00:00
|
|
|
my $url;
|
|
|
|
if (defined $_commit_url) {
|
|
|
|
$url = $_commit_url;
|
|
|
|
} else {
|
|
|
|
$url = eval { command_oneline('config', '--get',
|
|
|
|
"svn-remote.$gs->{repo_id}.commiturl") };
|
|
|
|
if (!$url) {
|
2011-04-08 14:57:54 +00:00
|
|
|
$url = $remote->{pushurl} || $remote->{url};
|
2010-01-11 16:21:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
my $dst = join '/', $url, $lft, $branch_name, ($rgt || ());
|
2008-10-05 02:35:17 +00:00
|
|
|
|
2010-01-11 16:20:43 +00:00
|
|
|
if ($dst =~ /^https:/ && $src =~ /^http:/) {
|
|
|
|
$src=~s/^http:/https:/;
|
|
|
|
}
|
|
|
|
|
2010-02-24 03:13:50 +00:00
|
|
|
::_req_svn();
|
|
|
|
|
2008-10-05 02:35:17 +00:00
|
|
|
my $ctx = SVN::Client->new(
|
|
|
|
auth => Git::SVN::Ra::_auth_providers(),
|
|
|
|
log_msg => sub {
|
|
|
|
${ $_[0] } = defined $_message
|
|
|
|
? $_message
|
|
|
|
: 'Create ' . ($_tag ? 'tag ' : 'branch ' )
|
|
|
|
. $branch_name;
|
|
|
|
},
|
|
|
|
);
|
|
|
|
|
|
|
|
eval {
|
|
|
|
$ctx->ls($dst, 'HEAD', 0);
|
|
|
|
} and die "branch ${branch_name} already exists\n";
|
|
|
|
|
|
|
|
print "Copying ${src} at r${rev} to ${dst}...\n";
|
|
|
|
$ctx->copy($src, $rev, $dst)
|
|
|
|
unless $_dry_run;
|
|
|
|
|
|
|
|
$gs->fetch_all;
|
|
|
|
}
|
|
|
|
|
2007-04-27 18:57:53 +00:00
|
|
|
sub cmd_find_rev {
|
2008-03-11 08:00:45 +00:00
|
|
|
my $revision_or_hash = shift or die "SVN or git revision required ",
|
|
|
|
"as a command-line argument\n";
|
2007-04-27 18:57:53 +00:00
|
|
|
my $result;
|
|
|
|
if ($revision_or_hash =~ /^r\d+$/) {
|
2007-04-29 08:35:27 +00:00
|
|
|
my $head = shift;
|
|
|
|
$head ||= 'HEAD';
|
|
|
|
my @refs;
|
2008-07-14 15:28:04 +00:00
|
|
|
my (undef, undef, $uuid, $gs) = working_head_info($head, \@refs);
|
2007-04-29 08:35:27 +00:00
|
|
|
unless ($gs) {
|
|
|
|
die "Unable to determine upstream SVN information from ",
|
|
|
|
"$head history\n";
|
2007-04-27 18:57:53 +00:00
|
|
|
}
|
2007-04-29 08:35:27 +00:00
|
|
|
my $desired_revision = substr($revision_or_hash, 1);
|
2008-07-14 15:28:04 +00:00
|
|
|
$result = $gs->rev_map_get($desired_revision, $uuid);
|
2007-04-27 18:57:53 +00:00
|
|
|
} else {
|
|
|
|
my (undef, $rev, undef) = cmt_metadata($revision_or_hash);
|
|
|
|
$result = $rev;
|
|
|
|
}
|
|
|
|
print "$result\n" if $result;
|
|
|
|
}
|
|
|
|
|
2011-04-01 10:26:00 +00:00
|
|
|
sub auto_create_empty_directories {
|
|
|
|
my ($gs) = @_;
|
|
|
|
my $var = eval { command_oneline('config', '--get', '--bool',
|
|
|
|
"svn-remote.$gs->{repo_id}.automkdirs") };
|
|
|
|
# By default, create empty directories by consulting the unhandled log,
|
|
|
|
# but allow setting it to 'false' to skip it.
|
|
|
|
return !($var && $var eq 'false');
|
|
|
|
}
|
|
|
|
|
2007-02-16 11:22:40 +00:00
|
|
|
sub cmd_rebase {
|
|
|
|
command_noisy(qw/update-index --refresh/);
|
2007-04-08 07:59:19 +00:00
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
unless ($gs) {
|
2007-02-16 11:22:40 +00:00
|
|
|
die "Unable to determine upstream SVN information from ",
|
|
|
|
"working tree history\n";
|
|
|
|
}
|
2008-05-20 03:29:17 +00:00
|
|
|
if ($_dry_run) {
|
|
|
|
print "Remote Branch: " . $gs->refname . "\n";
|
|
|
|
print "SVN URL: " . $url . "\n";
|
|
|
|
return;
|
|
|
|
}
|
2007-02-16 11:22:40 +00:00
|
|
|
if (command(qw/diff-index HEAD --/)) {
|
|
|
|
print STDERR "Cannot rebase with uncommited changes:\n";
|
|
|
|
command_noisy('status');
|
|
|
|
exit 1;
|
|
|
|
}
|
2007-03-13 18:40:36 +00:00
|
|
|
unless ($_local) {
|
2007-11-29 19:54:39 +00:00
|
|
|
# rebase will checkout for us, so no need to do it explicitly
|
|
|
|
$_no_checkout = 'true';
|
2007-03-13 18:40:36 +00:00
|
|
|
$_fetch_all ? $gs->fetch_all : $gs->fetch;
|
|
|
|
}
|
2007-02-16 11:22:40 +00:00
|
|
|
command_noisy(rebase_cmd(), $gs->refname);
|
2011-04-01 10:26:00 +00:00
|
|
|
if (auto_create_empty_directories($gs)) {
|
|
|
|
$gs->mkemptydirs;
|
|
|
|
}
|
2007-02-16 11:22:40 +00:00
|
|
|
}
|
|
|
|
|
2007-01-12 01:58:39 +00:00
|
|
|
sub cmd_show_ignore {
|
2007-04-08 07:59:19 +00:00
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
$gs ||= Git::SVN->new;
|
2007-01-12 01:58:39 +00:00
|
|
|
my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum);
|
2007-10-16 14:36:48 +00:00
|
|
|
$gs->prop_walk($gs->{path}, $r, sub {
|
|
|
|
my ($gs, $path, $props) = @_;
|
|
|
|
print STDOUT "\n# $path\n";
|
|
|
|
my $s = $props->{'svn:ignore'} or return;
|
|
|
|
$s =~ s/[\r\n]+/\n/g;
|
2009-08-07 19:21:21 +00:00
|
|
|
$s =~ s/^\n+//;
|
2007-10-16 14:36:48 +00:00
|
|
|
chomp $s;
|
|
|
|
$s =~ s#^#$path#gm;
|
|
|
|
print STDOUT "$s\n";
|
|
|
|
});
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
|
|
|
|
2007-11-19 22:56:15 +00:00
|
|
|
sub cmd_show_externals {
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
$gs ||= Git::SVN->new;
|
|
|
|
my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum);
|
|
|
|
$gs->prop_walk($gs->{path}, $r, sub {
|
|
|
|
my ($gs, $path, $props) = @_;
|
|
|
|
print STDOUT "\n# $path\n";
|
|
|
|
my $s = $props->{'svn:externals'} or return;
|
|
|
|
$s =~ s/[\r\n]+/\n/g;
|
|
|
|
chomp $s;
|
|
|
|
$s =~ s#^#$path#gm;
|
|
|
|
print STDOUT "$s\n";
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2007-10-16 14:36:49 +00:00
|
|
|
sub cmd_create_ignore {
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
$gs ||= Git::SVN->new;
|
|
|
|
my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum);
|
|
|
|
$gs->prop_walk($gs->{path}, $r, sub {
|
|
|
|
my ($gs, $path, $props) = @_;
|
|
|
|
# $path is of the form /path/to/dir/
|
2009-02-19 18:08:04 +00:00
|
|
|
$path = '.' . $path;
|
|
|
|
# SVN can have attributes on empty directories,
|
|
|
|
# which git won't track
|
|
|
|
mkpath([$path]) unless -d $path;
|
|
|
|
my $ignore = $path . '.gitignore';
|
2007-10-16 14:36:49 +00:00
|
|
|
my $s = $props->{'svn:ignore'} or return;
|
|
|
|
open(GITIGNORE, '>', $ignore)
|
2007-10-16 14:36:52 +00:00
|
|
|
or fatal("Failed to open `$ignore' for writing: $!");
|
2007-10-16 14:36:49 +00:00
|
|
|
$s =~ s/[\r\n]+/\n/g;
|
2009-08-07 19:21:21 +00:00
|
|
|
$s =~ s/^\n+//;
|
2007-10-16 14:36:49 +00:00
|
|
|
chomp $s;
|
|
|
|
# Prefix all patterns so that the ignore doesn't apply
|
|
|
|
# to sub-directories.
|
|
|
|
$s =~ s#^#/#gm;
|
|
|
|
print GITIGNORE "$s\n";
|
|
|
|
close(GITIGNORE)
|
2007-10-16 14:36:52 +00:00
|
|
|
or fatal("Failed to close `$ignore': $!");
|
2008-05-04 22:33:09 +00:00
|
|
|
command_noisy('add', '-f', $ignore);
|
2007-10-16 14:36:49 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2009-11-16 02:57:16 +00:00
|
|
|
sub cmd_mkdirs {
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
$gs ||= Git::SVN->new;
|
|
|
|
$gs->mkemptydirs($_revision);
|
|
|
|
}
|
|
|
|
|
2007-11-21 06:43:17 +00:00
|
|
|
sub canonicalize_path {
|
|
|
|
my ($path) = @_;
|
2007-11-21 19:57:18 +00:00
|
|
|
my $dot_slash_added = 0;
|
|
|
|
if (substr($path, 0, 1) ne "/") {
|
|
|
|
$path = "./" . $path;
|
|
|
|
$dot_slash_added = 1;
|
|
|
|
}
|
2007-11-21 06:43:17 +00:00
|
|
|
# File::Spec->canonpath doesn't collapse x/../y into y (for a
|
|
|
|
# good reason), so let's do this manually.
|
|
|
|
$path =~ s#/+#/#g;
|
|
|
|
$path =~ s#/\.(?:/|$)#/#g;
|
|
|
|
$path =~ s#/[^/]+/\.\.##g;
|
|
|
|
$path =~ s#/$##g;
|
2007-11-21 19:57:18 +00:00
|
|
|
$path =~ s#^\./## if $dot_slash_added;
|
2008-07-06 19:28:50 +00:00
|
|
|
$path =~ s#^/##;
|
|
|
|
$path =~ s#^\.$##;
|
2007-11-21 06:43:17 +00:00
|
|
|
return $path;
|
|
|
|
}
|
|
|
|
|
2009-06-26 14:52:09 +00:00
|
|
|
sub canonicalize_url {
|
|
|
|
my ($url) = @_;
|
|
|
|
$url =~ s#^([^:]+://[^/]*/)(.*)$#$1 . canonicalize_path($2)#e;
|
|
|
|
return $url;
|
|
|
|
}
|
|
|
|
|
2007-10-16 14:36:50 +00:00
|
|
|
# get_svnprops(PATH)
|
|
|
|
# ------------------
|
2007-10-16 14:36:51 +00:00
|
|
|
# Helper for cmd_propget and cmd_proplist below.
|
2007-10-16 14:36:50 +00:00
|
|
|
sub get_svnprops {
|
|
|
|
my $path = shift;
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
$gs ||= Git::SVN->new;
|
|
|
|
|
|
|
|
# prefix THE PATH by the sub-directory from which the user
|
|
|
|
# invoked us.
|
|
|
|
$path = $cmd_dir_prefix . $path;
|
2007-10-16 14:36:52 +00:00
|
|
|
fatal("No such file or directory: $path") unless -e $path;
|
2007-10-16 14:36:50 +00:00
|
|
|
my $is_dir = -d $path ? 1 : 0;
|
|
|
|
$path = $gs->{path} . '/' . $path;
|
|
|
|
|
|
|
|
# canonicalize the path (otherwise libsvn will abort or fail to
|
|
|
|
# find the file)
|
2007-11-21 06:43:17 +00:00
|
|
|
$path = canonicalize_path($path);
|
2007-10-16 14:36:50 +00:00
|
|
|
|
|
|
|
my $r = (defined $_revision ? $_revision : $gs->ra->get_latest_revnum);
|
|
|
|
my $props;
|
|
|
|
if ($is_dir) {
|
|
|
|
(undef, undef, $props) = $gs->ra->get_dir($path, $r);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
(undef, $props) = $gs->ra->get_file($path, $r, undef);
|
|
|
|
}
|
|
|
|
return $props;
|
|
|
|
}
|
|
|
|
|
|
|
|
# cmd_propget (PROP, PATH)
|
|
|
|
# ------------------------
|
|
|
|
# Print the SVN property PROP for PATH.
|
|
|
|
sub cmd_propget {
|
|
|
|
my ($prop, $path) = @_;
|
|
|
|
$path = '.' if not defined $path;
|
|
|
|
usage(1) if not defined $prop;
|
|
|
|
my $props = get_svnprops($path);
|
|
|
|
if (not defined $props->{$prop}) {
|
2007-10-16 14:36:52 +00:00
|
|
|
fatal("`$path' does not have a `$prop' SVN property.");
|
2007-10-16 14:36:50 +00:00
|
|
|
}
|
|
|
|
print $props->{$prop} . "\n";
|
|
|
|
}
|
|
|
|
|
2007-10-16 14:36:51 +00:00
|
|
|
# cmd_proplist (PATH)
|
|
|
|
# -------------------
|
|
|
|
# Print the list of SVN properties for PATH.
|
|
|
|
sub cmd_proplist {
|
|
|
|
my $path = shift;
|
|
|
|
$path = '.' if not defined $path;
|
|
|
|
my $props = get_svnprops($path);
|
|
|
|
print "Properties on '$path':\n";
|
|
|
|
foreach (sort keys %{$props}) {
|
|
|
|
print " $_\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-11 23:35:55 +00:00
|
|
|
sub cmd_multi_init {
|
2006-06-12 22:53:13 +00:00
|
|
|
my $url = shift;
|
2009-06-23 17:02:08 +00:00
|
|
|
unless (defined $_trunk || @_branches || @_tags) {
|
2007-01-05 02:02:00 +00:00
|
|
|
usage(1);
|
2006-06-12 22:53:13 +00:00
|
|
|
}
|
2007-05-19 10:59:02 +00:00
|
|
|
|
2007-01-11 23:35:55 +00:00
|
|
|
$_prefix = '' unless defined $_prefix;
|
2007-02-14 20:27:41 +00:00
|
|
|
if (defined $url) {
|
2009-06-26 14:52:09 +00:00
|
|
|
$url = canonicalize_url($url);
|
2007-02-14 20:27:41 +00:00
|
|
|
init_subdir(@_);
|
|
|
|
}
|
2007-02-23 09:26:26 +00:00
|
|
|
do_git_init_db();
|
2007-01-05 02:02:00 +00:00
|
|
|
if (defined $_trunk) {
|
2010-06-13 11:27:43 +00:00
|
|
|
$_trunk =~ s#^/+##;
|
2009-08-12 03:14:27 +00:00
|
|
|
my $trunk_ref = 'refs/remotes/' . $_prefix . 'trunk';
|
2007-01-19 01:50:01 +00:00
|
|
|
# try both old-style and new-style lookups:
|
|
|
|
my $gs_trunk = eval { Git::SVN->new($trunk_ref) };
|
2007-01-11 23:35:55 +00:00
|
|
|
unless ($gs_trunk) {
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($trunk_url, $trunk_path) =
|
|
|
|
complete_svn_url($url, $_trunk);
|
|
|
|
$gs_trunk = Git::SVN->init($trunk_url, $trunk_path,
|
|
|
|
undef, $trunk_ref);
|
2007-01-05 02:02:00 +00:00
|
|
|
}
|
2006-10-11 18:53:21 +00:00
|
|
|
}
|
2009-06-23 17:02:08 +00:00
|
|
|
return unless @_branches || @_tags;
|
2007-01-12 01:09:26 +00:00
|
|
|
my $ra = $url ? Git::SVN::Ra->new($url) : undef;
|
2009-06-23 17:02:08 +00:00
|
|
|
foreach my $path (@_branches) {
|
|
|
|
complete_url_ls_init($ra, $path, '--branches/-b', $_prefix);
|
|
|
|
}
|
|
|
|
foreach my $path (@_tags) {
|
|
|
|
complete_url_ls_init($ra, $path, '--tags/-t', $_prefix.'tags/');
|
|
|
|
}
|
2006-06-12 22:53:13 +00:00
|
|
|
}
|
|
|
|
|
2007-01-14 10:17:00 +00:00
|
|
|
sub cmd_multi_fetch {
|
2009-11-22 20:37:06 +00:00
|
|
|
$Git::SVN::no_reuse_existing = undef;
|
2007-01-28 06:28:56 +00:00
|
|
|
my $remotes = Git::SVN::read_all_remotes();
|
|
|
|
foreach my $repo_id (sort keys %$remotes) {
|
2007-02-13 08:38:02 +00:00
|
|
|
if ($remotes->{$repo_id}->{url}) {
|
2007-02-03 21:29:17 +00:00
|
|
|
Git::SVN::fetch_all($repo_id, $remotes);
|
|
|
|
}
|
2006-06-12 22:53:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-14 06:35:53 +00:00
|
|
|
# this command is special because it requires no metadata
|
|
|
|
sub cmd_commit_diff {
|
|
|
|
my ($ta, $tb, $url) = @_;
|
|
|
|
my $usage = "Usage: $0 commit-diff -r<revision> ".
|
2007-10-16 14:36:52 +00:00
|
|
|
"<tree-ish> <tree-ish> [<URL>]";
|
2007-01-14 06:35:53 +00:00
|
|
|
fatal($usage) if (!defined $ta || !defined $tb);
|
2008-05-17 15:07:09 +00:00
|
|
|
my $svn_path = '';
|
2007-01-14 06:35:53 +00:00
|
|
|
if (!defined $url) {
|
|
|
|
my $gs = eval { Git::SVN->new };
|
|
|
|
if (!$gs) {
|
|
|
|
fatal("Needed URL or usable git-svn --id in ",
|
|
|
|
"the command-line\n", $usage);
|
|
|
|
}
|
|
|
|
$url = $gs->{url};
|
2007-01-26 09:32:45 +00:00
|
|
|
$svn_path = $gs->{path};
|
2007-01-14 06:35:53 +00:00
|
|
|
}
|
|
|
|
unless (defined $_revision) {
|
|
|
|
fatal("-r|--revision is a required argument\n", $usage);
|
|
|
|
}
|
|
|
|
if (defined $_message && defined $_file) {
|
|
|
|
fatal("Both --message/-m and --file/-F specified ",
|
|
|
|
"for the commit message.\n",
|
2007-10-16 14:36:52 +00:00
|
|
|
"I have no idea what you mean");
|
2007-01-14 06:35:53 +00:00
|
|
|
}
|
|
|
|
if (defined $_file) {
|
|
|
|
$_message = file_to_s($_file);
|
|
|
|
} else {
|
|
|
|
$_message ||= get_commit_entry($tb)->{log};
|
|
|
|
}
|
|
|
|
my $ra ||= Git::SVN::Ra->new($url);
|
|
|
|
my $r = $_revision;
|
|
|
|
if ($r eq 'HEAD') {
|
|
|
|
$r = $ra->get_latest_revnum;
|
|
|
|
} elsif ($r !~ /^\d+$/) {
|
|
|
|
die "revision argument: $r not understood by git-svn\n";
|
|
|
|
}
|
2007-01-27 22:33:08 +00:00
|
|
|
my %ed_opts = ( r => $r,
|
|
|
|
log => $_message,
|
|
|
|
ra => $ra,
|
|
|
|
tree_a => $ta,
|
|
|
|
tree_b => $tb,
|
|
|
|
editor_cb => sub { print "Committed r$_[0]\n" },
|
|
|
|
svn_path => $svn_path );
|
|
|
|
if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) {
|
2007-01-14 06:35:53 +00:00
|
|
|
print "No changes\n$ta == $tb\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-08-26 19:32:37 +00:00
|
|
|
sub escape_uri_only {
|
|
|
|
my ($uri) = @_;
|
|
|
|
my @tmp;
|
|
|
|
foreach (split m{/}, $uri) {
|
2008-10-21 21:12:15 +00:00
|
|
|
s/([^~\w.%+-]|%(?![a-fA-F0-9]{2}))/sprintf("%%%02X",ord($1))/eg;
|
2008-08-26 19:32:37 +00:00
|
|
|
push @tmp, $_;
|
|
|
|
}
|
|
|
|
join('/', @tmp);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub escape_url {
|
|
|
|
my ($url) = @_;
|
|
|
|
if ($url =~ m#^([^:]+)://([^/]*)(.*)$#) {
|
|
|
|
my ($scheme, $domain, $uri) = ($1, $2, escape_uri_only($3));
|
|
|
|
$url = "$scheme://$domain$uri";
|
|
|
|
}
|
|
|
|
$url;
|
|
|
|
}
|
|
|
|
|
2007-11-21 19:57:18 +00:00
|
|
|
sub cmd_info {
|
2008-08-05 07:35:16 +00:00
|
|
|
my $path = canonicalize_path(defined($_[0]) ? $_[0] : ".");
|
2008-08-26 19:32:36 +00:00
|
|
|
my $fullpath = canonicalize_path($cmd_dir_prefix . $path);
|
2008-08-05 07:35:16 +00:00
|
|
|
if (exists $_[1]) {
|
2007-11-21 19:57:18 +00:00
|
|
|
die "Too many arguments specified\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
my ($file_type, $diff_status) = find_file_type_and_diff_status($path);
|
|
|
|
|
|
|
|
if (!$file_type && !$diff_status) {
|
2008-08-29 13:42:48 +00:00
|
|
|
print STDERR "svn: '$path' is not under version control\n";
|
|
|
|
exit 1;
|
2007-11-21 19:57:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
unless ($gs) {
|
|
|
|
die "Unable to determine upstream SVN information from ",
|
|
|
|
"working tree history\n";
|
|
|
|
}
|
2008-08-05 07:35:16 +00:00
|
|
|
|
|
|
|
# canonicalize_path() will return "" to make libsvn 1.5.x happy,
|
|
|
|
$path = "." if $path eq "";
|
|
|
|
|
2008-08-26 19:32:36 +00:00
|
|
|
my $full_url = $url . ($fullpath eq "" ? "" : "/$fullpath");
|
2007-11-21 19:57:18 +00:00
|
|
|
|
2007-11-21 19:57:19 +00:00
|
|
|
if ($_url) {
|
2008-08-26 19:32:37 +00:00
|
|
|
print escape_url($full_url), "\n";
|
2007-11-21 19:57:19 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2007-11-21 19:57:18 +00:00
|
|
|
my $result = "Path: $path\n";
|
|
|
|
$result .= "Name: " . basename($path) . "\n" if $file_type ne "dir";
|
2008-08-26 19:32:37 +00:00
|
|
|
$result .= "URL: " . escape_url($full_url) . "\n";
|
2007-11-21 19:57:18 +00:00
|
|
|
|
2007-11-22 02:20:57 +00:00
|
|
|
eval {
|
|
|
|
my $repos_root = $gs->repos_root;
|
|
|
|
Git::SVN::remove_username($repos_root);
|
2008-08-26 19:32:37 +00:00
|
|
|
$result .= "Repository Root: " . escape_url($repos_root) . "\n";
|
2007-11-22 02:20:57 +00:00
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
$result .= "Repository Root: (offline)\n";
|
|
|
|
}
|
2010-03-03 20:34:31 +00:00
|
|
|
::_req_svn();
|
2009-01-19 02:02:01 +00:00
|
|
|
$result .= "Repository UUID: $uuid\n" unless $diff_status eq "A" &&
|
|
|
|
($SVN::Core::VERSION le '1.5.4' || $file_type ne "dir");
|
2007-11-21 19:57:18 +00:00
|
|
|
$result .= "Revision: " . ($diff_status eq "A" ? 0 : $rev) . "\n";
|
|
|
|
|
|
|
|
$result .= "Node Kind: " .
|
|
|
|
($file_type eq "dir" ? "directory" : "file") . "\n";
|
|
|
|
|
|
|
|
my $schedule = $diff_status eq "A"
|
|
|
|
? "add"
|
|
|
|
: ($diff_status eq "D" ? "delete" : "normal");
|
|
|
|
$result .= "Schedule: $schedule\n";
|
|
|
|
|
|
|
|
if ($diff_status eq "A") {
|
|
|
|
print $result, "\n";
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
my ($lc_author, $lc_rev, $lc_date_utc);
|
2008-08-26 19:32:36 +00:00
|
|
|
my @args = Git::SVN::Log::git_svn_log_cmd($rev, $rev, "--", $fullpath);
|
2007-11-21 19:57:18 +00:00
|
|
|
my $log = command_output_pipe(@args);
|
|
|
|
my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/;
|
|
|
|
while (<$log>) {
|
|
|
|
if (/^${esc_color}author (.+) <[^>]+> (\d+) ([\-\+]?\d+)$/o) {
|
|
|
|
$lc_author = $1;
|
|
|
|
$lc_date_utc = Git::SVN::Log::parse_git_date($2, $3);
|
|
|
|
} elsif (/^${esc_color} (git-svn-id:.+)$/o) {
|
|
|
|
(undef, $lc_rev, undef) = ::extract_metadata($1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close $log;
|
|
|
|
|
|
|
|
Git::SVN::Log::set_local_timezone();
|
|
|
|
|
|
|
|
$result .= "Last Changed Author: $lc_author\n";
|
|
|
|
$result .= "Last Changed Rev: $lc_rev\n";
|
|
|
|
$result .= "Last Changed Date: " .
|
|
|
|
Git::SVN::Log::format_svn_date($lc_date_utc) . "\n";
|
|
|
|
|
|
|
|
if ($file_type ne "dir") {
|
|
|
|
my $text_last_updated_date =
|
|
|
|
($diff_status eq "D" ? $lc_date_utc : (stat $path)[9]);
|
|
|
|
$result .=
|
|
|
|
"Text Last Updated: " .
|
|
|
|
Git::SVN::Log::format_svn_date($text_last_updated_date) .
|
|
|
|
"\n";
|
|
|
|
my $checksum;
|
|
|
|
if ($diff_status eq "D") {
|
|
|
|
my ($fh, $ctx) =
|
|
|
|
command_output_pipe(qw(cat-file blob), "HEAD:$path");
|
|
|
|
if ($file_type eq "link") {
|
|
|
|
my $file_name = <$fh>;
|
2007-11-22 19:18:00 +00:00
|
|
|
$checksum = md5sum("link $file_name");
|
2007-11-21 19:57:18 +00:00
|
|
|
} else {
|
2007-11-22 19:18:00 +00:00
|
|
|
$checksum = md5sum($fh);
|
2007-11-21 19:57:18 +00:00
|
|
|
}
|
|
|
|
command_close_pipe($fh, $ctx);
|
|
|
|
} elsif ($file_type eq "link") {
|
|
|
|
my $file_name =
|
|
|
|
command(qw(cat-file blob), "HEAD:$path");
|
|
|
|
$checksum =
|
2007-11-22 19:18:00 +00:00
|
|
|
md5sum("link " . $file_name);
|
2007-11-21 19:57:18 +00:00
|
|
|
} else {
|
|
|
|
open FILE, "<", $path or die $!;
|
2007-11-22 19:18:00 +00:00
|
|
|
$checksum = md5sum(\*FILE);
|
2007-11-21 19:57:18 +00:00
|
|
|
close FILE or die $!;
|
|
|
|
}
|
|
|
|
$result .= "Checksum: " . $checksum . "\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
print $result, "\n";
|
|
|
|
}
|
|
|
|
|
2009-06-04 03:45:52 +00:00
|
|
|
sub cmd_reset {
|
|
|
|
my $target = shift || $_revision or die "SVN revision required\n";
|
|
|
|
$target = $1 if $target =~ /^r(\d+)$/;
|
|
|
|
$target =~ /^\d+$/ or die "Numeric SVN revision expected\n";
|
|
|
|
my ($url, $rev, $uuid, $gs) = working_head_info('HEAD');
|
|
|
|
unless ($gs) {
|
|
|
|
die "Unable to determine upstream SVN information from ".
|
|
|
|
"history\n";
|
|
|
|
}
|
|
|
|
my ($r, $c) = $gs->find_rev_before($target, not $_fetch_parent);
|
2010-05-04 23:36:47 +00:00
|
|
|
die "Cannot find SVN revision $target\n" unless defined($c);
|
2009-06-04 03:45:52 +00:00
|
|
|
$gs->rev_map_set($r, $c, 'reset', $uuid);
|
|
|
|
print "r$r = $c ($gs->{ref_id})\n";
|
|
|
|
}
|
|
|
|
|
2009-07-19 23:00:52 +00:00
|
|
|
sub cmd_gc {
|
|
|
|
if (!$can_compress) {
|
|
|
|
warn "Compress::Zlib could not be found; unhandled.log " .
|
|
|
|
"files will not be compressed.\n";
|
|
|
|
}
|
|
|
|
find({ wanted => \&gc_directory, no_chdir => 1}, "$ENV{GIT_DIR}/svn");
|
|
|
|
}
|
|
|
|
|
2006-02-16 09:24:16 +00:00
|
|
|
########################### utility functions #########################
|
|
|
|
|
2007-02-16 11:22:40 +00:00
|
|
|
sub rebase_cmd {
|
|
|
|
my @cmd = qw/rebase/;
|
|
|
|
push @cmd, '-v' if $_verbose;
|
|
|
|
push @cmd, qw/--merge/ if $_merge;
|
|
|
|
push @cmd, "--strategy=$_strategy" if $_strategy;
|
|
|
|
@cmd;
|
|
|
|
}
|
|
|
|
|
2007-02-16 09:45:13 +00:00
|
|
|
sub post_fetch_checkout {
|
|
|
|
return if $_no_checkout;
|
|
|
|
my $gs = $Git::SVN::_head or return;
|
|
|
|
return if verify_ref('refs/heads/master^0');
|
|
|
|
|
2009-08-12 23:01:59 +00:00
|
|
|
# look for "trunk" ref if it exists
|
|
|
|
my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}};
|
|
|
|
my $fetch = $remote->{fetch};
|
|
|
|
if ($fetch) {
|
|
|
|
foreach my $p (keys %$fetch) {
|
|
|
|
basename($fetch->{$p}) eq 'trunk' or next;
|
|
|
|
$gs = Git::SVN->new($fetch->{$p}, $gs->{repo_id}, $p);
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-02-16 09:45:13 +00:00
|
|
|
my $valid_head = verify_ref('HEAD^0');
|
|
|
|
command_noisy(qw(update-ref refs/heads/master), $gs->refname);
|
|
|
|
return if ($valid_head || !verify_ref('HEAD^0'));
|
|
|
|
|
|
|
|
return if $ENV{GIT_DIR} !~ m#^(?:.*/)?\.git$#;
|
|
|
|
my $index = $ENV{GIT_INDEX_FILE} || "$ENV{GIT_DIR}/index";
|
|
|
|
return if -f $index;
|
|
|
|
|
2007-06-03 14:48:16 +00:00
|
|
|
return if command_oneline(qw/rev-parse --is-inside-work-tree/) eq 'false';
|
2007-02-16 09:45:13 +00:00
|
|
|
return if command_oneline(qw/rev-parse --is-inside-git-dir/) eq 'true';
|
|
|
|
command_noisy(qw/read-tree -m -u -v HEAD HEAD/);
|
|
|
|
print STDERR "Checked out HEAD:\n ",
|
|
|
|
$gs->full_url, " r", $gs->last_rev, "\n";
|
2011-04-01 10:26:00 +00:00
|
|
|
if (auto_create_empty_directories($gs)) {
|
|
|
|
$gs->mkemptydirs($gs->last_rev);
|
|
|
|
}
|
2007-02-16 09:45:13 +00:00
|
|
|
}
|
|
|
|
|
2007-01-05 02:02:00 +00:00
|
|
|
sub complete_svn_url {
|
|
|
|
my ($url, $path) = @_;
|
|
|
|
$path =~ s#/+$##;
|
|
|
|
if ($path !~ m#^[a-z\+]+://#) {
|
|
|
|
if (!defined $url || $url !~ m#^[a-z\+]+://#) {
|
|
|
|
fatal("E: '$path' is not a complete URL ",
|
2007-10-16 14:36:52 +00:00
|
|
|
"and a separate URL is not specified");
|
2007-01-05 02:02:00 +00:00
|
|
|
}
|
2007-01-19 01:50:01 +00:00
|
|
|
return ($url, $path);
|
2007-01-05 02:02:00 +00:00
|
|
|
}
|
2007-01-19 01:50:01 +00:00
|
|
|
return ($path, '');
|
2007-01-05 02:02:00 +00:00
|
|
|
}
|
|
|
|
|
2006-06-12 22:53:13 +00:00
|
|
|
sub complete_url_ls_init {
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($ra, $repo_path, $switch, $pfx) = @_;
|
|
|
|
unless ($repo_path) {
|
2006-06-12 22:53:13 +00:00
|
|
|
print STDERR "W: $switch not specified\n";
|
|
|
|
return;
|
|
|
|
}
|
2007-01-19 01:50:01 +00:00
|
|
|
$repo_path =~ s#/+$##;
|
|
|
|
if ($repo_path =~ m#^[a-z\+]+://#) {
|
|
|
|
$ra = Git::SVN::Ra->new($repo_path);
|
|
|
|
$repo_path = '';
|
2007-01-12 01:09:26 +00:00
|
|
|
} else {
|
2007-01-19 01:50:01 +00:00
|
|
|
$repo_path =~ s#^/+##;
|
2007-01-12 01:09:26 +00:00
|
|
|
unless ($ra) {
|
2007-01-19 01:50:01 +00:00
|
|
|
fatal("E: '$repo_path' is not a complete URL ",
|
2007-10-16 14:36:52 +00:00
|
|
|
"and a separate URL is not specified");
|
2007-01-11 23:35:55 +00:00
|
|
|
}
|
2007-01-12 01:09:26 +00:00
|
|
|
}
|
2007-01-19 01:50:01 +00:00
|
|
|
my $url = $ra->{url};
|
2007-02-14 23:10:44 +00:00
|
|
|
my $gs = Git::SVN->init($url, undef, undef, undef, 1);
|
|
|
|
my $k = "svn-remote.$gs->{repo_id}.url";
|
|
|
|
my $orig_url = eval { command_oneline(qw/config --get/, $k) };
|
|
|
|
if ($orig_url && ($orig_url ne $gs->{url})) {
|
|
|
|
die "$k already set: $orig_url\n",
|
|
|
|
"wanted to set to: $gs->{url}\n";
|
2007-02-01 11:59:07 +00:00
|
|
|
}
|
2007-02-14 23:10:44 +00:00
|
|
|
command_oneline('config', $k, $gs->{url}) unless $orig_url;
|
2009-07-06 23:40:02 +00:00
|
|
|
my $remote_path = "$gs->{path}/$repo_path";
|
2009-08-16 21:22:12 +00:00
|
|
|
$remote_path =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
|
2007-02-14 23:10:44 +00:00
|
|
|
$remote_path =~ s#/+#/#g;
|
|
|
|
$remote_path =~ s#^/##g;
|
2008-03-14 18:01:23 +00:00
|
|
|
$remote_path .= "/*" if $remote_path !~ /\*/;
|
2007-02-14 23:10:44 +00:00
|
|
|
my ($n) = ($switch =~ /^--(\w+)/);
|
|
|
|
if (length $pfx && $pfx !~ m#/$#) {
|
|
|
|
die "--prefix='$pfx' must have a trailing slash '/'\n";
|
2006-06-12 22:53:13 +00:00
|
|
|
}
|
2008-08-08 08:41:57 +00:00
|
|
|
command_noisy('config',
|
2009-06-23 17:02:08 +00:00
|
|
|
'--add',
|
2008-08-08 08:41:57 +00:00
|
|
|
"svn-remote.$gs->{repo_id}.$n",
|
|
|
|
"$remote_path:refs/remotes/$pfx*" .
|
|
|
|
('/*' x (($remote_path =~ tr/*/*/) - 1)) );
|
2006-06-12 22:53:13 +00:00
|
|
|
}
|
|
|
|
|
2006-12-15 18:59:54 +00:00
|
|
|
sub verify_ref {
|
|
|
|
my ($ref) = @_;
|
2006-12-28 09:16:21 +00:00
|
|
|
eval { command_oneline([ 'rev-parse', '--verify', $ref ],
|
|
|
|
{ STDERR => 0 }); };
|
2006-12-15 18:59:54 +00:00
|
|
|
}
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
sub get_tree_from_treeish {
|
2006-02-20 18:57:28 +00:00
|
|
|
my ($treeish) = @_;
|
2007-01-14 06:35:53 +00:00
|
|
|
# $treeish can be a symbolic ref, too:
|
2006-12-15 18:59:54 +00:00
|
|
|
my $type = command_oneline(qw/cat-file -t/, $treeish);
|
2006-02-20 18:57:28 +00:00
|
|
|
my $expected;
|
|
|
|
while ($type eq 'tag') {
|
2006-12-15 18:59:54 +00:00
|
|
|
($treeish, $type) = command(qw/cat-file tag/, $treeish);
|
2006-02-20 18:57:28 +00:00
|
|
|
}
|
|
|
|
if ($type eq 'commit') {
|
2006-12-15 18:59:54 +00:00
|
|
|
$expected = (grep /^tree /, command(qw/cat-file commit/,
|
|
|
|
$treeish))[0];
|
2007-01-14 06:35:53 +00:00
|
|
|
($expected) = ($expected =~ /^tree ($sha1)$/o);
|
2006-02-20 18:57:28 +00:00
|
|
|
die "Unable to get tree from $treeish\n" unless $expected;
|
|
|
|
} elsif ($type eq 'tree') {
|
|
|
|
$expected = $treeish;
|
|
|
|
} else {
|
|
|
|
die "$treeish is a $type, expected tree, tag or commit\n";
|
|
|
|
}
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
return $expected;
|
|
|
|
}
|
|
|
|
|
2007-01-14 06:35:53 +00:00
|
|
|
sub get_commit_entry {
|
|
|
|
my ($treeish) = shift;
|
|
|
|
my %log_entry = ( log => '', tree => get_tree_from_treeish($treeish) );
|
|
|
|
my $commit_editmsg = "$ENV{GIT_DIR}/COMMIT_EDITMSG";
|
|
|
|
my $commit_msg = "$ENV{GIT_DIR}/COMMIT_MSG";
|
|
|
|
open my $log_fh, '>', $commit_editmsg or croak $!;
|
2006-02-16 09:24:16 +00:00
|
|
|
|
2007-01-14 06:35:53 +00:00
|
|
|
my $type = command_oneline(qw/cat-file -t/, $treeish);
|
2006-07-10 03:20:48 +00:00
|
|
|
if ($type eq 'commit' || $type eq 'tag') {
|
2006-12-15 18:59:54 +00:00
|
|
|
my ($msg_fh, $ctx) = command_output_pipe('cat-file',
|
2007-01-14 06:35:53 +00:00
|
|
|
$type, $treeish);
|
2006-02-16 09:24:16 +00:00
|
|
|
my $in_msg = 0;
|
2008-04-16 01:04:17 +00:00
|
|
|
my $author;
|
|
|
|
my $saw_from = 0;
|
2008-06-12 23:10:50 +00:00
|
|
|
my $msgbuf = "";
|
2006-02-16 09:24:16 +00:00
|
|
|
while (<$msg_fh>) {
|
|
|
|
if (!$in_msg) {
|
|
|
|
$in_msg = 1 if (/^\s*$/);
|
2008-04-16 01:04:17 +00:00
|
|
|
$author = $1 if (/^author (.*>)/);
|
2006-03-03 09:20:08 +00:00
|
|
|
} elsif (/^git-svn-id: /) {
|
2007-01-14 06:35:53 +00:00
|
|
|
# skip this for now, we regenerate the
|
|
|
|
# correct one on re-fetch anyways
|
|
|
|
# TODO: set *:merge properties or like...
|
2006-02-16 09:24:16 +00:00
|
|
|
} else {
|
2008-04-16 01:04:17 +00:00
|
|
|
if (/^From:/ || /^Signed-off-by:/) {
|
|
|
|
$saw_from = 1;
|
|
|
|
}
|
2008-06-12 23:10:50 +00:00
|
|
|
$msgbuf .= $_;
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
|
|
|
}
|
2008-06-12 23:10:50 +00:00
|
|
|
$msgbuf =~ s/\s+$//s;
|
2008-04-16 01:04:17 +00:00
|
|
|
if ($Git::SVN::_add_author_from && defined($author)
|
|
|
|
&& !$saw_from) {
|
2008-06-12 23:10:50 +00:00
|
|
|
$msgbuf .= "\n\nFrom: $author";
|
2008-04-16 01:04:17 +00:00
|
|
|
}
|
2008-06-12 23:10:50 +00:00
|
|
|
print $log_fh $msgbuf or croak $!;
|
2006-12-15 18:59:54 +00:00
|
|
|
command_close_pipe($msg_fh, $ctx);
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
2007-01-14 06:35:53 +00:00
|
|
|
close $log_fh or croak $!;
|
2006-02-16 09:24:16 +00:00
|
|
|
|
|
|
|
if ($_edit || ($type eq 'tree')) {
|
2009-10-31 01:42:34 +00:00
|
|
|
chomp(my $editor = command_oneline(qw(var GIT_EDITOR)));
|
|
|
|
system('sh', '-c', $editor.' "$@"', $editor, $commit_editmsg);
|
2006-02-16 09:24:16 +00:00
|
|
|
}
|
2007-01-14 06:35:53 +00:00
|
|
|
rename $commit_editmsg, $commit_msg or croak $!;
|
2008-10-30 06:49:26 +00:00
|
|
|
{
|
2009-05-28 07:56:23 +00:00
|
|
|
require Encode;
|
2008-10-30 06:49:26 +00:00
|
|
|
# SVN requires messages to be UTF-8 when entering the repo
|
|
|
|
local $/;
|
|
|
|
open $log_fh, '<', $commit_msg or croak $!;
|
|
|
|
binmode $log_fh;
|
|
|
|
chomp($log_entry{log} = <$log_fh>);
|
|
|
|
|
2009-05-28 07:56:23 +00:00
|
|
|
my $enc = Git::config('i18n.commitencoding') || 'UTF-8';
|
|
|
|
my $msg = $log_entry{log};
|
|
|
|
|
|
|
|
eval { $msg = Encode::decode($enc, $msg, 1) };
|
|
|
|
if ($@) {
|
|
|
|
die "Could not decode as $enc:\n", $msg,
|
|
|
|
"\nPerhaps you need to set i18n.commitencoding\n";
|
2008-10-30 06:49:26 +00:00
|
|
|
}
|
2009-05-28 07:56:23 +00:00
|
|
|
|
|
|
|
eval { $msg = Encode::encode('UTF-8', $msg, 1) };
|
|
|
|
die "Could not encode as UTF-8:\n$msg\n" if $@;
|
|
|
|
|
|
|
|
$log_entry{log} = $msg;
|
|
|
|
|
2008-10-30 06:49:26 +00:00
|
|
|
close $log_fh or croak $!;
|
|
|
|
}
|
2007-01-14 06:35:53 +00:00
|
|
|
unlink $commit_msg;
|
|
|
|
\%log_entry;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
|
|
|
|
2006-02-16 09:24:16 +00:00
|
|
|
sub s_to_file {
|
|
|
|
my ($str, $file, $mode) = @_;
|
|
|
|
open my $fd,'>',$file or croak $!;
|
|
|
|
print $fd $str,"\n" or croak $!;
|
|
|
|
close $fd or croak $!;
|
|
|
|
chmod ($mode &~ umask, $file) if (defined $mode);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub file_to_s {
|
|
|
|
my $file = shift;
|
|
|
|
open my $fd,'<',$file or croak "$!: file: $file\n";
|
|
|
|
local $/;
|
|
|
|
my $ret = <$fd>;
|
|
|
|
close $fd or croak $!;
|
|
|
|
$ret =~ s/\s*$//s;
|
|
|
|
return $ret;
|
|
|
|
}
|
|
|
|
|
2006-03-03 09:20:08 +00:00
|
|
|
# '<svn username> = real-name <email address>' mapping based on git-svnimport:
|
|
|
|
sub load_authors {
|
|
|
|
open my $authors, '<', $_authors or die "Can't open $_authors $!\n";
|
2007-01-12 10:35:20 +00:00
|
|
|
my $log = $cmd eq 'log';
|
2006-03-03 09:20:08 +00:00
|
|
|
while (<$authors>) {
|
|
|
|
chomp;
|
2007-07-17 17:02:57 +00:00
|
|
|
next unless /^(.+?|\(no author\))\s*=\s*(.+?)\s*<(.+)>\s*$/;
|
2006-03-03 09:20:08 +00:00
|
|
|
my ($user, $name, $email) = ($1, $2, $3);
|
2007-01-12 10:35:20 +00:00
|
|
|
if ($log) {
|
|
|
|
$Git::SVN::Log::rusers{"$name <$email>"} = $user;
|
|
|
|
} else {
|
|
|
|
$users{$user} = [$name, $email];
|
|
|
|
}
|
2006-06-01 09:35:44 +00:00
|
|
|
}
|
|
|
|
close $authors or croak $!;
|
|
|
|
}
|
|
|
|
|
2007-01-29 00:16:53 +00:00
|
|
|
# convert GetOpt::Long specs for use by git-config
|
2009-11-14 22:25:11 +00:00
|
|
|
sub read_git_config {
|
2006-05-24 08:40:37 +00:00
|
|
|
my $opts = shift;
|
2007-02-11 23:21:24 +00:00
|
|
|
my @config_only;
|
2006-05-24 08:40:37 +00:00
|
|
|
foreach my $o (keys %$opts) {
|
2007-02-11 23:21:24 +00:00
|
|
|
# if we have mixedCase and a long option-only, then
|
|
|
|
# it's a config-only variable that we don't need for
|
|
|
|
# the command-line.
|
|
|
|
push @config_only, $o if ($o =~ /[A-Z]/ && $o =~ /^[a-z]+$/i);
|
2006-05-24 08:40:37 +00:00
|
|
|
my $v = $opts->{$o};
|
2007-02-11 23:21:24 +00:00
|
|
|
my ($key) = ($o =~ /^([a-zA-Z\-]+)/);
|
2006-05-24 08:40:37 +00:00
|
|
|
$key =~ s/-//g;
|
2008-10-23 19:21:34 +00:00
|
|
|
my $arg = 'git config';
|
2006-05-24 08:40:37 +00:00
|
|
|
$arg .= ' --int' if ($o =~ /[:=]i$/);
|
|
|
|
$arg .= ' --bool' if ($o !~ /[:=][sfi]$/);
|
|
|
|
if (ref $v eq 'ARRAY') {
|
|
|
|
chomp(my @tmp = `$arg --get-all svn.$key`);
|
|
|
|
@$v = @tmp if @tmp;
|
|
|
|
} else {
|
|
|
|
chomp(my $tmp = `$arg --get svn.$key`);
|
2007-02-11 05:07:12 +00:00
|
|
|
if ($tmp && !($arg =~ / --bool/ && $tmp eq 'false')) {
|
2006-05-24 08:40:37 +00:00
|
|
|
$$v = $tmp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2007-02-11 23:21:24 +00:00
|
|
|
delete @$opts{@config_only} if @config_only;
|
2006-05-24 08:40:37 +00:00
|
|
|
}
|
|
|
|
|
2006-06-01 09:35:44 +00:00
|
|
|
sub extract_metadata {
|
2006-06-28 02:39:11 +00:00
|
|
|
my $id = shift or return (undef, undef, undef);
|
2007-06-30 08:56:13 +00:00
|
|
|
my ($url, $rev, $uuid) = ($id =~ /^\s*git-svn-id:\s+(.*)\@(\d+)
|
2009-07-11 21:13:12 +00:00
|
|
|
\s([a-f\d\-]+)$/ix);
|
2006-11-23 22:54:04 +00:00
|
|
|
if (!defined $rev || !$uuid || !$url) {
|
2006-06-01 09:35:44 +00:00
|
|
|
# some of the original repositories I made had
|
2006-07-10 05:50:18 +00:00
|
|
|
# identifiers like this:
|
2009-07-11 21:13:12 +00:00
|
|
|
($rev, $uuid) = ($id =~/^\s*git-svn-id:\s(\d+)\@([a-f\d\-]+)/i);
|
2006-06-01 09:35:44 +00:00
|
|
|
}
|
|
|
|
return ($url, $rev, $uuid);
|
|
|
|
}
|
|
|
|
|
2006-06-28 02:39:11 +00:00
|
|
|
sub cmt_metadata {
|
|
|
|
return extract_metadata((grep(/^git-svn-id: /,
|
2006-12-15 18:59:54 +00:00
|
|
|
command(qw/cat-file commit/, shift)))[-1]);
|
2006-06-28 02:39:11 +00:00
|
|
|
}
|
|
|
|
|
2009-04-10 20:32:41 +00:00
|
|
|
sub cmt_sha2rev_batch {
|
|
|
|
my %s2r;
|
|
|
|
my ($pid, $in, $out, $ctx) = command_bidi_pipe(qw/cat-file --batch/);
|
|
|
|
my $list = shift;
|
|
|
|
|
|
|
|
foreach my $sha (@{$list}) {
|
|
|
|
my $first = 1;
|
|
|
|
my $size = 0;
|
|
|
|
print $out $sha, "\n";
|
|
|
|
|
|
|
|
while (my $line = <$in>) {
|
|
|
|
if ($first && $line =~ /^[[:xdigit:]]{40}\smissing$/) {
|
|
|
|
last;
|
|
|
|
} elsif ($first &&
|
|
|
|
$line =~ /^[[:xdigit:]]{40}\scommit\s(\d+)$/) {
|
|
|
|
$first = 0;
|
|
|
|
$size = $1;
|
|
|
|
next;
|
|
|
|
} elsif ($line =~ /^(git-svn-id: )/) {
|
|
|
|
my (undef, $rev, undef) =
|
|
|
|
extract_metadata($line);
|
|
|
|
$s2r{$sha} = $rev;
|
|
|
|
}
|
|
|
|
|
|
|
|
$size -= length($line);
|
|
|
|
last if ($size == 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
command_close_bidi_pipe($pid, $in, $out, $ctx);
|
|
|
|
|
|
|
|
return \%s2r;
|
|
|
|
}
|
|
|
|
|
2007-02-16 11:22:40 +00:00
|
|
|
sub working_head_info {
|
|
|
|
my ($head, $refs) = @_;
|
2012-02-12 00:23:06 +00:00
|
|
|
my @args = qw/rev-list --first-parent --pretty=medium/;
|
2007-09-05 09:35:29 +00:00
|
|
|
my ($fh, $ctx) = command_output_pipe(@args, $head);
|
2007-06-30 08:56:13 +00:00
|
|
|
my $hash;
|
2007-06-30 08:56:14 +00:00
|
|
|
my %max;
|
2007-06-30 08:56:13 +00:00
|
|
|
while (<$fh>) {
|
|
|
|
if ( m{^commit ($::sha1)$} ) {
|
|
|
|
unshift @$refs, $hash if $hash and $refs;
|
|
|
|
$hash = $1;
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
next unless s{^\s*(git-svn-id:)}{$1};
|
|
|
|
my ($url, $rev, $uuid) = extract_metadata($_);
|
2007-04-08 07:59:19 +00:00
|
|
|
if (defined $url && defined $rev) {
|
2007-06-30 08:56:14 +00:00
|
|
|
next if $max{$url} and $max{$url} < $rev;
|
2007-04-08 07:59:19 +00:00
|
|
|
if (my $gs = Git::SVN->find_by_url($url)) {
|
2008-07-14 15:28:04 +00:00
|
|
|
my $c = $gs->rev_map_get($rev, $uuid);
|
2007-04-25 18:50:32 +00:00
|
|
|
if ($c && $c eq $hash) {
|
2007-04-08 07:59:19 +00:00
|
|
|
close $fh; # break the pipe
|
|
|
|
return ($url, $rev, $uuid, $gs);
|
2007-06-30 08:56:14 +00:00
|
|
|
} else {
|
2007-12-09 07:27:41 +00:00
|
|
|
$max{$url} ||= $gs->rev_map_max;
|
2007-04-08 07:59:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2007-02-16 11:22:40 +00:00
|
|
|
}
|
2007-04-08 07:59:19 +00:00
|
|
|
command_close_pipe($fh, $ctx);
|
|
|
|
(undef, undef, undef, undef);
|
2007-02-16 11:22:40 +00:00
|
|
|
}
|
|
|
|
|
2007-06-13 09:23:28 +00:00
|
|
|
sub read_commit_parents {
|
|
|
|
my ($parents, $c) = @_;
|
2007-09-08 23:33:08 +00:00
|
|
|
chomp(my $p = command_oneline(qw/rev-list --parents -1/, $c));
|
|
|
|
$p =~ s/^($c)\s*// or die "rev-list --parents -1 $c failed!\n";
|
|
|
|
@{$parents->{$c}} = split(/ /, $p);
|
2007-06-13 09:23:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub linearize_history {
|
|
|
|
my ($gs, $refs) = @_;
|
|
|
|
my %parents;
|
|
|
|
foreach my $c (@$refs) {
|
|
|
|
read_commit_parents(\%parents, $c);
|
|
|
|
}
|
|
|
|
|
|
|
|
my @linear_refs;
|
|
|
|
my %skip = ();
|
|
|
|
my $last_svn_commit = $gs->last_commit;
|
|
|
|
foreach my $c (reverse @$refs) {
|
|
|
|
next if $c eq $last_svn_commit;
|
|
|
|
last if $skip{$c};
|
|
|
|
|
|
|
|
unshift @linear_refs, $c;
|
|
|
|
$skip{$c} = 1;
|
|
|
|
|
|
|
|
# we only want the first parent to diff against for linear
|
|
|
|
# history, we save the rest to inject when we finalize the
|
|
|
|
# svn commit
|
|
|
|
my $fp_a = verify_ref("$c~1");
|
|
|
|
my $fp_b = shift @{$parents{$c}} if $parents{$c};
|
|
|
|
if (!$fp_a || !$fp_b) {
|
|
|
|
die "Commit $c\n",
|
|
|
|
"has no parent commit, and therefore ",
|
|
|
|
"nothing to diff against.\n",
|
|
|
|
"You should be working from a repository ",
|
|
|
|
"originally created by git-svn\n";
|
|
|
|
}
|
|
|
|
if ($fp_a ne $fp_b) {
|
|
|
|
die "$c~1 = $fp_a, however parsing commit $c ",
|
|
|
|
"revealed that:\n$c~1 = $fp_b\nBUG!\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $p (@{$parents{$c}}) {
|
|
|
|
$skip{$p} = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
(\@linear_refs, \%parents);
|
|
|
|
}
|
|
|
|
|
2007-11-21 19:57:18 +00:00
|
|
|
sub find_file_type_and_diff_status {
|
|
|
|
my ($path) = @_;
|
2008-07-20 20:14:07 +00:00
|
|
|
return ('dir', '') if $path eq '';
|
2007-11-21 19:57:18 +00:00
|
|
|
|
|
|
|
my $diff_output =
|
|
|
|
command_oneline(qw(diff --cached --name-status --), $path) || "";
|
|
|
|
my $diff_status = (split(' ', $diff_output))[0] || "";
|
|
|
|
|
|
|
|
my $ls_tree = command_oneline(qw(ls-tree HEAD), $path) || "";
|
|
|
|
|
|
|
|
return (undef, undef) if !$diff_status && !$ls_tree;
|
|
|
|
|
|
|
|
if ($diff_status eq "A") {
|
|
|
|
return ("link", $diff_status) if -l $path;
|
|
|
|
return ("dir", $diff_status) if -d $path;
|
|
|
|
return ("file", $diff_status);
|
|
|
|
}
|
|
|
|
|
|
|
|
my $mode = (split(' ', $ls_tree))[0] || "";
|
|
|
|
|
|
|
|
return ("link", $diff_status) if $mode eq "120000";
|
|
|
|
return ("dir", $diff_status) if $mode eq "040000";
|
|
|
|
return ("file", $diff_status);
|
|
|
|
}
|
|
|
|
|
2007-11-21 06:43:17 +00:00
|
|
|
sub md5sum {
|
|
|
|
my $arg = shift;
|
|
|
|
my $ref = ref $arg;
|
|
|
|
my $md5 = Digest::MD5->new();
|
2008-08-12 16:00:53 +00:00
|
|
|
if ($ref eq 'GLOB' || $ref eq 'IO::File' || $ref eq 'File::Temp') {
|
2007-11-21 06:43:17 +00:00
|
|
|
$md5->addfile($arg) or croak $!;
|
|
|
|
} elsif ($ref eq 'SCALAR') {
|
|
|
|
$md5->add($$arg) or croak $!;
|
|
|
|
} elsif (!$ref) {
|
|
|
|
$md5->add($arg) or croak $!;
|
|
|
|
} else {
|
|
|
|
::fatal "Can't provide MD5 hash for unknown ref type: '", $ref, "'";
|
|
|
|
}
|
|
|
|
return $md5->hexdigest();
|
|
|
|
}
|
|
|
|
|
2009-07-19 23:00:52 +00:00
|
|
|
sub gc_directory {
|
|
|
|
if ($can_compress && -f $_ && basename($_) eq "unhandled.log") {
|
|
|
|
my $out_filename = $_ . ".gz";
|
|
|
|
open my $in_fh, "<", $_ or die "Unable to open $_: $!\n";
|
|
|
|
binmode $in_fh;
|
|
|
|
my $gz = Compress::Zlib::gzopen($out_filename, "ab") or
|
|
|
|
die "Unable to open $out_filename: $!\n";
|
|
|
|
|
|
|
|
my $res;
|
|
|
|
while ($res = sysread($in_fh, my $str, 1024)) {
|
|
|
|
$gz->gzwrite($str) or
|
|
|
|
die "Unable to write: ".$gz->gzerror()."!\n";
|
|
|
|
}
|
|
|
|
unlink $_ or die "unlink $File::Find::name: $!\n";
|
|
|
|
} elsif (-f $_ && basename($_) eq "index") {
|
|
|
|
unlink $_ or die "unlink $_: $!\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
package Git::SVN;
|
|
|
|
use strict;
|
|
|
|
use warnings;
|
2007-12-09 07:27:41 +00:00
|
|
|
use Fcntl qw/:DEFAULT :seek/;
|
|
|
|
use constant rev_map_fmt => 'NH40';
|
2007-01-31 20:28:10 +00:00
|
|
|
use vars qw/$default_repo_id $default_ref_id $_no_metadata $_follow_parent
|
2007-02-17 03:57:29 +00:00
|
|
|
$_repack $_repack_flags $_use_svm_props $_head
|
2007-11-22 13:44:42 +00:00
|
|
|
$_use_svnsync_props $no_reuse_existing $_minimize_url
|
2009-01-18 04:10:14 +00:00
|
|
|
$_use_log_author $_add_author_from $_localtime/;
|
2007-01-11 20:14:21 +00:00
|
|
|
use Carp qw/croak/;
|
|
|
|
use File::Path qw/mkpath/;
|
2007-01-31 21:54:23 +00:00
|
|
|
use File::Copy qw/copy/;
|
2007-01-11 20:14:21 +00:00
|
|
|
use IPC::Open3;
|
2011-12-19 08:11:05 +00:00
|
|
|
use Time::Local;
|
2009-12-19 11:55:13 +00:00
|
|
|
use Memoize; # core since 5.8.0, Jul 2002
|
2010-01-30 03:14:22 +00:00
|
|
|
use Memoize::Storable;
|
2012-04-02 13:29:32 +00:00
|
|
|
use POSIX qw(:signal_h);
|
2007-01-11 20:14:21 +00:00
|
|
|
|
2008-02-03 16:56:18 +00:00
|
|
|
my ($_gc_nr, $_gc_period);
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
# properties that we do not log:
|
|
|
|
my %SKIP_PROP;
|
|
|
|
BEGIN {
|
|
|
|
%SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url
|
|
|
|
svn:special svn:executable
|
|
|
|
svn:entry:committed-rev
|
|
|
|
svn:entry:last-author
|
|
|
|
svn:entry:uuid
|
|
|
|
svn:entry:committed-date/;
|
2007-02-11 08:51:33 +00:00
|
|
|
|
|
|
|
# some options are read globally, but can be overridden locally
|
|
|
|
# per [svn-remote "..."] section. Command-line options will *NOT*
|
|
|
|
# override options set in an [svn-remote "..."] section
|
2007-06-15 03:43:59 +00:00
|
|
|
no strict 'refs';
|
|
|
|
for my $option (qw/follow_parent no_metadata use_svm_props
|
|
|
|
use_svnsync_props/) {
|
|
|
|
my $key = $option;
|
2007-02-11 08:51:33 +00:00
|
|
|
$key =~ tr/_//d;
|
2007-06-15 03:43:59 +00:00
|
|
|
my $prop = "-$option";
|
|
|
|
*$option = sub {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{$prop} if exists $self->{$prop};
|
|
|
|
my $k = "svn-remote.$self->{repo_id}.$key";
|
|
|
|
eval { command_oneline(qw/config --get/, $k) };
|
|
|
|
if ($@) {
|
|
|
|
$self->{$prop} = ${"Git::SVN::_$option"};
|
2007-02-11 08:51:33 +00:00
|
|
|
} else {
|
2007-06-15 03:43:59 +00:00
|
|
|
my $v = command_oneline(qw/config --bool/,$k);
|
|
|
|
$self->{$prop} = $v eq 'false' ? 0 : 1;
|
2007-02-11 08:51:33 +00:00
|
|
|
}
|
2007-06-15 03:43:59 +00:00
|
|
|
return $self->{$prop};
|
|
|
|
}
|
2007-02-11 08:51:33 +00:00
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2008-08-12 16:00:53 +00:00
|
|
|
|
2008-01-02 18:10:03 +00:00
|
|
|
my (%LOCKFILES, %INDEX_FILES);
|
|
|
|
END {
|
|
|
|
unlink keys %LOCKFILES if %LOCKFILES;
|
|
|
|
unlink keys %INDEX_FILES if %INDEX_FILES;
|
|
|
|
}
|
2007-01-31 21:54:23 +00:00
|
|
|
|
2007-02-03 21:29:17 +00:00
|
|
|
sub resolve_local_globs {
|
|
|
|
my ($url, $fetch, $glob_spec) = @_;
|
|
|
|
return unless defined $glob_spec;
|
|
|
|
my $ref = $glob_spec->{ref};
|
|
|
|
my $path = $glob_spec->{path};
|
2009-08-12 03:14:27 +00:00
|
|
|
foreach (command(qw#for-each-ref --format=%(refname) refs/#)) {
|
|
|
|
next unless m#^$ref->{regex}$#;
|
2007-02-03 21:29:17 +00:00
|
|
|
my $p = $1;
|
2007-07-30 09:08:21 +00:00
|
|
|
my $pathname = desanitize_refname($path->full_path($p));
|
|
|
|
my $refname = desanitize_refname($ref->full_path($p));
|
2007-02-03 21:29:17 +00:00
|
|
|
if (my $existing = $fetch->{$pathname}) {
|
|
|
|
if ($existing ne $refname) {
|
|
|
|
die "Refspec conflict:\n",
|
2009-08-12 03:14:27 +00:00
|
|
|
"existing: $existing\n",
|
|
|
|
" globbed: $refname\n";
|
2007-02-03 21:29:17 +00:00
|
|
|
}
|
2009-08-12 03:14:27 +00:00
|
|
|
my $u = (::cmt_metadata("$refname"))[0];
|
2007-02-09 20:17:57 +00:00
|
|
|
$u =~ s!^\Q$url\E(/|$)!! or die
|
2009-08-12 03:14:27 +00:00
|
|
|
"$refname: '$url' not found in '$u'\n";
|
2007-02-03 21:29:17 +00:00
|
|
|
if ($pathname ne $u) {
|
|
|
|
warn "W: Refspec glob conflict ",
|
2009-08-12 03:14:27 +00:00
|
|
|
"(ref: $refname):\n",
|
2007-02-03 21:29:17 +00:00
|
|
|
"expected path: $pathname\n",
|
|
|
|
" real path: $u\n",
|
|
|
|
"Continuing ahead with $u\n";
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
$fetch->{$pathname} = $refname;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-02-14 10:21:19 +00:00
|
|
|
sub parse_revision_argument {
|
|
|
|
my ($base, $head) = @_;
|
|
|
|
if (!defined $::_revision || $::_revision eq 'BASE:HEAD') {
|
|
|
|
return ($base, $head);
|
|
|
|
}
|
|
|
|
return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/);
|
|
|
|
return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/);
|
|
|
|
return ($head, $head) if ($::_revision eq 'HEAD');
|
|
|
|
return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/);
|
|
|
|
return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/);
|
|
|
|
die "revision argument: $::_revision not understood by git-svn\n";
|
|
|
|
}
|
|
|
|
|
2007-01-28 06:28:56 +00:00
|
|
|
sub fetch_all {
|
2007-02-03 21:29:17 +00:00
|
|
|
my ($repo_id, $remotes) = @_;
|
2007-02-16 11:22:40 +00:00
|
|
|
if (ref $repo_id) {
|
|
|
|
my $gs = $repo_id;
|
|
|
|
$repo_id = undef;
|
|
|
|
$repo_id = $gs->{repo_id};
|
|
|
|
}
|
|
|
|
$remotes ||= read_all_remotes();
|
2007-02-15 02:38:46 +00:00
|
|
|
my $remote = $remotes->{$repo_id} or
|
|
|
|
die "[svn-remote \"$repo_id\"] unknown\n";
|
2007-02-08 20:53:57 +00:00
|
|
|
my $fetch = $remote->{fetch};
|
2007-02-15 02:38:46 +00:00
|
|
|
my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n";
|
2007-02-08 20:53:57 +00:00
|
|
|
my (@gs, @globs);
|
2007-01-28 06:28:56 +00:00
|
|
|
my $ra = Git::SVN::Ra->new($url);
|
2007-02-12 21:25:25 +00:00
|
|
|
my $uuid = $ra->get_uuid;
|
2007-01-28 06:28:56 +00:00
|
|
|
my $head = $ra->get_latest_revnum;
|
2009-12-21 10:06:04 +00:00
|
|
|
|
|
|
|
# ignore errors, $head revision may not even exist anymore
|
|
|
|
eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) };
|
|
|
|
warn "W: $@\n" if $@;
|
|
|
|
|
2007-02-14 21:32:21 +00:00
|
|
|
my $base = defined $fetch ? $head : 0;
|
2007-02-08 20:53:57 +00:00
|
|
|
|
|
|
|
# read the max revs for wildcard expansion (branches/*, tags/*)
|
|
|
|
foreach my $t (qw/branches tags/) {
|
|
|
|
defined $remote->{$t} or next;
|
2009-06-23 17:02:08 +00:00
|
|
|
push @globs, @{$remote->{$t}};
|
|
|
|
|
2007-02-11 09:20:26 +00:00
|
|
|
my $max_rev = eval { tmp_config(qw/--int --get/,
|
|
|
|
"svn-remote.$repo_id.${t}-maxRev") };
|
|
|
|
if (defined $max_rev && ($max_rev < $base)) {
|
|
|
|
$base = $max_rev;
|
2007-02-16 12:05:33 +00:00
|
|
|
} elsif (!defined $max_rev) {
|
|
|
|
$base = 0;
|
2007-02-08 20:53:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-02-13 08:38:02 +00:00
|
|
|
if ($fetch) {
|
|
|
|
foreach my $p (sort keys %$fetch) {
|
|
|
|
my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p);
|
2007-12-09 07:27:41 +00:00
|
|
|
my $lr = $gs->rev_map_max;
|
2007-02-13 08:38:02 +00:00
|
|
|
if (defined $lr) {
|
|
|
|
$base = $lr if ($lr < $base);
|
|
|
|
}
|
|
|
|
push @gs, $gs;
|
2007-01-28 06:28:56 +00:00
|
|
|
}
|
|
|
|
}
|
2007-02-14 10:21:19 +00:00
|
|
|
|
|
|
|
($base, $head) = parse_revision_argument($base, $head);
|
2007-02-08 20:53:57 +00:00
|
|
|
$ra->gs_fetch_loop_common($base, $head, \@gs, \@globs);
|
2007-01-28 06:28:56 +00:00
|
|
|
}
|
|
|
|
|
2007-01-21 12:27:09 +00:00
|
|
|
sub read_all_remotes {
|
|
|
|
my $r = {};
|
2008-07-14 15:28:04 +00:00
|
|
|
my $use_svm_props = eval { command_oneline(qw/config --bool
|
|
|
|
svn.useSvmProps/) };
|
|
|
|
$use_svm_props = $use_svm_props eq 'true' if $use_svm_props;
|
2009-10-23 06:39:04 +00:00
|
|
|
my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*};
|
2007-01-22 19:44:57 +00:00
|
|
|
foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) {
|
2009-08-12 03:14:27 +00:00
|
|
|
if (m!^(.+)\.fetch=$svn_refspec$!) {
|
|
|
|
my ($remote, $local_ref, $remote_ref) = ($1, $2, $3);
|
|
|
|
die("svn-remote.$remote: remote ref '$remote_ref' "
|
|
|
|
. "must start with 'refs/'\n")
|
|
|
|
unless $remote_ref =~ m{^refs/};
|
2010-08-03 23:21:25 +00:00
|
|
|
$local_ref = uri_decode($local_ref);
|
2007-07-14 19:40:32 +00:00
|
|
|
$r->{$remote}->{fetch}->{$local_ref} = $remote_ref;
|
2008-07-14 15:28:04 +00:00
|
|
|
$r->{$remote}->{svm} = {} if $use_svm_props;
|
|
|
|
} elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) {
|
|
|
|
$r->{$1}->{svm} = {};
|
2007-01-21 12:27:09 +00:00
|
|
|
} elsif (m!^(.+)\.url=\s*(.*)\s*$!) {
|
|
|
|
$r->{$1}->{url} = $2;
|
2011-04-08 14:57:54 +00:00
|
|
|
} elsif (m!^(.+)\.pushurl=\s*(.*)\s*$!) {
|
|
|
|
$r->{$1}->{pushurl} = $2;
|
2011-10-10 23:27:37 +00:00
|
|
|
} elsif (m!^(.+)\.ignore-refs=\s*(.*)\s*$!) {
|
|
|
|
$r->{$1}->{ignore_refs_regex} = $2;
|
2009-08-12 03:14:27 +00:00
|
|
|
} elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
|
|
|
|
my ($remote, $t, $local_ref, $remote_ref) =
|
|
|
|
($1, $2, $3, $4);
|
|
|
|
die("svn-remote.$remote: remote ref '$remote_ref' ($t) "
|
|
|
|
. "must start with 'refs/'\n")
|
|
|
|
unless $remote_ref =~ m{^refs/};
|
2010-08-03 23:21:25 +00:00
|
|
|
$local_ref = uri_decode($local_ref);
|
2009-06-23 17:02:08 +00:00
|
|
|
my $rs = {
|
2009-08-12 03:14:27 +00:00
|
|
|
t => $t,
|
|
|
|
remote => $remote,
|
2010-01-23 08:30:01 +00:00
|
|
|
path => Git::SVN::GlobSpec->new($local_ref, 1),
|
|
|
|
ref => Git::SVN::GlobSpec->new($remote_ref, 0) };
|
2007-02-03 21:29:17 +00:00
|
|
|
if (length($rs->{ref}->{right}) != 0) {
|
|
|
|
die "The '*' glob character must be the last ",
|
2009-08-12 03:14:27 +00:00
|
|
|
"character of '$remote_ref'\n";
|
2007-02-03 21:29:17 +00:00
|
|
|
}
|
2009-08-12 03:14:27 +00:00
|
|
|
push @{ $r->{$remote}->{$t} }, $rs;
|
2007-01-21 12:27:09 +00:00
|
|
|
}
|
|
|
|
}
|
2008-07-14 15:28:04 +00:00
|
|
|
|
|
|
|
map {
|
|
|
|
if (defined $r->{$_}->{svm}) {
|
|
|
|
my $svm;
|
|
|
|
eval {
|
|
|
|
my $section = "svn-remote.$_";
|
|
|
|
$svm = {
|
|
|
|
source => tmp_config('--get',
|
|
|
|
"$section.svm-source"),
|
|
|
|
replace => tmp_config('--get',
|
|
|
|
"$section.svm-replace"),
|
|
|
|
}
|
|
|
|
};
|
|
|
|
$r->{$_}->{svm} = $svm;
|
|
|
|
}
|
|
|
|
} keys %$r;
|
|
|
|
|
2011-10-10 23:27:37 +00:00
|
|
|
foreach my $remote (keys %$r) {
|
|
|
|
foreach ( grep { defined $_ }
|
|
|
|
map { $r->{$remote}->{$_} } qw(branches tags) ) {
|
|
|
|
foreach my $rs ( @$_ ) {
|
|
|
|
$rs->{ignore_refs_regex} =
|
|
|
|
$r->{$remote}->{ignore_refs_regex};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-21 12:27:09 +00:00
|
|
|
$r;
|
|
|
|
}
|
|
|
|
|
2007-01-31 20:28:10 +00:00
|
|
|
sub init_vars {
|
2008-02-03 16:56:18 +00:00
|
|
|
$_gc_nr = $_gc_period = 1000;
|
2008-02-03 16:56:12 +00:00
|
|
|
if (defined $_repack || defined $_repack_flags) {
|
|
|
|
warn "Repack options are obsolete; they have no effect.\n";
|
|
|
|
}
|
2007-01-31 20:28:10 +00:00
|
|
|
}
|
|
|
|
|
2007-01-22 21:52:04 +00:00
|
|
|
sub verify_remotes_sanity {
|
2007-01-23 19:35:53 +00:00
|
|
|
return unless -d $ENV{GIT_DIR};
|
2007-01-22 21:52:04 +00:00
|
|
|
my %seen;
|
|
|
|
foreach (command(qw/config -l/)) {
|
|
|
|
if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) {
|
|
|
|
if ($seen{$1}) {
|
|
|
|
die "Remote ref refs/remote/$1 is tracked by",
|
|
|
|
"\n \"$_\"\nand\n \"$seen{$1}\"\n",
|
|
|
|
"Please resolve this ambiguity in ",
|
|
|
|
"your git configuration file before ",
|
|
|
|
"continuing\n";
|
|
|
|
}
|
|
|
|
$seen{$1} = $_;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-24 00:29:23 +00:00
|
|
|
sub find_existing_remote {
|
|
|
|
my ($url, $remotes) = @_;
|
2007-02-17 10:53:07 +00:00
|
|
|
return undef if $no_reuse_existing;
|
2007-01-24 00:29:23 +00:00
|
|
|
my $existing;
|
|
|
|
foreach my $repo_id (keys %$remotes) {
|
|
|
|
my $u = $remotes->{$repo_id}->{url} or next;
|
|
|
|
next if $u ne $url;
|
|
|
|
$existing = $repo_id;
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
$existing;
|
|
|
|
}
|
2007-01-22 21:52:04 +00:00
|
|
|
|
2007-01-24 00:29:23 +00:00
|
|
|
sub init_remote_config {
|
2007-02-01 11:30:31 +00:00
|
|
|
my ($self, $url, $no_write) = @_;
|
2007-01-24 00:29:23 +00:00
|
|
|
$url =~ s!/+$!!; # strip trailing slash
|
|
|
|
my $r = read_all_remotes();
|
|
|
|
my $existing = find_existing_remote($url, $r);
|
|
|
|
if ($existing) {
|
2007-02-08 20:53:57 +00:00
|
|
|
unless ($no_write) {
|
|
|
|
print STDERR "Using existing ",
|
|
|
|
"[svn-remote \"$existing\"]\n";
|
|
|
|
}
|
2007-01-24 00:29:23 +00:00
|
|
|
$self->{repo_id} = $existing;
|
git-svn: don't attempt to minimize URLs by default
For tracking branches and tags, git-svn prefers to connect
to the root of the repository or at least the level that
houses branches and tags as well as trunk. However, users
that are accustomed to tracking a single directory have
no use for this feature.
As pointed out by Junio, users may not have permissions to
connect to connect to a higher-level path in the repository.
While the current minimize_url() function detects lack of
permissions to certain paths _after_ successful logins, it
cannot effectively determine if it is trying to access a
login-only portion of a repo when the user expects to
connect to a part where anonymous access is allowed.
For people used to the git-svnimport switches of
--trunk, --tags, --branches, they'll already pass the
repository root (or root+subdirectory), so minimize URL
isn't of too much use to them, either.
For people *not* used to git-svnimport, git-svn also
supports:
git svn init --minimize-url \
--trunk http://repository-root/foo/trunk \
--branches http://repository-root/foo/branches \
--tags http://repository-root/foo/tags
And this is where the new --minimize-url command-line switch
comes in to allow for this behavior to continue working.
2007-05-13 16:58:14 +00:00
|
|
|
} elsif ($_minimize_url) {
|
2007-01-24 00:29:23 +00:00
|
|
|
my $min_url = Git::SVN::Ra->new($url)->minimize_url;
|
|
|
|
$existing = find_existing_remote($min_url, $r);
|
|
|
|
if ($existing) {
|
2007-02-08 20:53:57 +00:00
|
|
|
unless ($no_write) {
|
|
|
|
print STDERR "Using existing ",
|
|
|
|
"[svn-remote \"$existing\"]\n";
|
|
|
|
}
|
2007-01-24 00:29:23 +00:00
|
|
|
$self->{repo_id} = $existing;
|
|
|
|
}
|
|
|
|
if ($min_url ne $url) {
|
2007-02-08 20:53:57 +00:00
|
|
|
unless ($no_write) {
|
|
|
|
print STDERR "Using higher level of URL: ",
|
|
|
|
"$url => $min_url\n";
|
|
|
|
}
|
2007-01-24 00:29:23 +00:00
|
|
|
my $old_path = $self->{path};
|
|
|
|
$self->{path} = $url;
|
2007-02-09 20:17:57 +00:00
|
|
|
$self->{path} =~ s!^\Q$min_url\E(/|$)!!;
|
2007-01-24 00:29:23 +00:00
|
|
|
if (length $old_path) {
|
|
|
|
$self->{path} .= "/$old_path";
|
|
|
|
}
|
|
|
|
$url = $min_url;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
my $orig_url;
|
|
|
|
if (!$existing) {
|
2007-01-22 21:52:04 +00:00
|
|
|
# verify that we aren't overwriting anything:
|
2007-01-24 00:29:23 +00:00
|
|
|
$orig_url = eval {
|
2007-01-19 01:50:01 +00:00
|
|
|
command_oneline('config', '--get',
|
2007-01-24 00:29:23 +00:00
|
|
|
"svn-remote.$self->{repo_id}.url")
|
2007-01-19 01:50:01 +00:00
|
|
|
};
|
2007-01-22 21:52:04 +00:00
|
|
|
if ($orig_url && ($orig_url ne $url)) {
|
2007-01-24 00:29:23 +00:00
|
|
|
die "svn-remote.$self->{repo_id}.url already set: ",
|
2007-01-22 21:52:04 +00:00
|
|
|
"$orig_url\nwanted to set to: $url\n";
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
2007-01-24 00:29:23 +00:00
|
|
|
my ($xrepo_id, $xpath) = find_ref($self->refname);
|
2009-08-12 03:14:27 +00:00
|
|
|
if (!$no_write && defined $xpath) {
|
2007-01-24 00:29:23 +00:00
|
|
|
die "svn-remote.$xrepo_id.fetch already set to track ",
|
2009-08-12 03:14:27 +00:00
|
|
|
"$xpath:", $self->refname, "\n";
|
2007-01-24 00:29:23 +00:00
|
|
|
}
|
2007-02-01 11:30:31 +00:00
|
|
|
unless ($no_write) {
|
|
|
|
command_noisy('config',
|
|
|
|
"svn-remote.$self->{repo_id}.url", $url);
|
2007-07-14 19:40:32 +00:00
|
|
|
$self->{path} =~ s{^/}{};
|
2009-08-16 21:22:12 +00:00
|
|
|
$self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
|
2007-02-01 11:30:31 +00:00
|
|
|
command_noisy('config', '--add',
|
|
|
|
"svn-remote.$self->{repo_id}.fetch",
|
|
|
|
"$self->{path}:".$self->refname);
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
$self->{url} = $url;
|
2007-01-24 00:29:23 +00:00
|
|
|
}
|
|
|
|
|
2007-02-13 22:22:11 +00:00
|
|
|
sub find_by_url { # repos_root and, path are optional
|
|
|
|
my ($class, $full_url, $repos_root, $path) = @_;
|
2007-04-25 19:42:58 +00:00
|
|
|
|
2007-02-20 08:43:19 +00:00
|
|
|
return undef unless defined $full_url;
|
2007-04-25 19:42:58 +00:00
|
|
|
remove_username($full_url);
|
|
|
|
remove_username($repos_root) if defined $repos_root;
|
2007-02-13 22:22:11 +00:00
|
|
|
my $remotes = read_all_remotes();
|
|
|
|
if (defined $full_url && defined $repos_root && !defined $path) {
|
|
|
|
$path = $full_url;
|
|
|
|
$path =~ s#^\Q$repos_root\E(?:/|$)##;
|
|
|
|
}
|
|
|
|
foreach my $repo_id (keys %$remotes) {
|
|
|
|
my $u = $remotes->{$repo_id}->{url} or next;
|
2007-04-25 19:42:58 +00:00
|
|
|
remove_username($u);
|
2007-02-13 22:22:11 +00:00
|
|
|
next if defined $repos_root && $repos_root ne $u;
|
|
|
|
|
|
|
|
my $fetch = $remotes->{$repo_id}->{fetch} || {};
|
2009-06-23 17:02:08 +00:00
|
|
|
foreach my $t (qw/branches tags/) {
|
|
|
|
foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) {
|
|
|
|
resolve_local_globs($u, $fetch, $globspec);
|
|
|
|
}
|
2007-02-13 22:22:11 +00:00
|
|
|
}
|
|
|
|
my $p = $path;
|
Fix dcommit, rebase when rewriteRoot is in use
When the rewriteRoot setting is used with git-svn, it causes the svn
IDs added to commit messages to bear a different URL than is actually
used to retrieve Subversion data.
It is common for Subversion repositories to be available multiple
ways: for instance, HTTP to the public, and svn+ssh to people with
commit access. The need to switch URLs for access is fairly common as
well -- perhaps someone was just given commit access. To switch URLs
without having to rewrite history, one can use the old url as a
rewriteRoot, and use the new one in the svn-remote url setting.
This works well for svn fetching and general git commands.
However, git-svn dcommit, rebase, and perhaps other commands do not
work in this scenario. They scan the svn ID lines in commit messages
and attempt to match them up with url lines in [svn-remote] sections
in the git config.
This patch allows them to match rewriteRoot options, if such options
are present.
Signed-off-by: John Goerzen <jgoerzen@complete.org>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-03-08 22:04:05 +00:00
|
|
|
my $rwr = rewrite_root({repo_id => $repo_id});
|
2008-07-14 15:28:04 +00:00
|
|
|
my $svm = $remotes->{$repo_id}->{svm}
|
|
|
|
if defined $remotes->{$repo_id}->{svm};
|
2007-02-13 22:22:11 +00:00
|
|
|
unless (defined $p) {
|
|
|
|
$p = $full_url;
|
Fix dcommit, rebase when rewriteRoot is in use
When the rewriteRoot setting is used with git-svn, it causes the svn
IDs added to commit messages to bear a different URL than is actually
used to retrieve Subversion data.
It is common for Subversion repositories to be available multiple
ways: for instance, HTTP to the public, and svn+ssh to people with
commit access. The need to switch URLs for access is fairly common as
well -- perhaps someone was just given commit access. To switch URLs
without having to rewrite history, one can use the old url as a
rewriteRoot, and use the new one in the svn-remote url setting.
This works well for svn fetching and general git commands.
However, git-svn dcommit, rebase, and perhaps other commands do not
work in this scenario. They scan the svn ID lines in commit messages
and attempt to match them up with url lines in [svn-remote] sections
in the git config.
This patch allows them to match rewriteRoot options, if such options
are present.
Signed-off-by: John Goerzen <jgoerzen@complete.org>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-03-08 22:04:05 +00:00
|
|
|
my $z = $u;
|
2008-07-14 15:28:04 +00:00
|
|
|
my $prefix = '';
|
Fix dcommit, rebase when rewriteRoot is in use
When the rewriteRoot setting is used with git-svn, it causes the svn
IDs added to commit messages to bear a different URL than is actually
used to retrieve Subversion data.
It is common for Subversion repositories to be available multiple
ways: for instance, HTTP to the public, and svn+ssh to people with
commit access. The need to switch URLs for access is fairly common as
well -- perhaps someone was just given commit access. To switch URLs
without having to rewrite history, one can use the old url as a
rewriteRoot, and use the new one in the svn-remote url setting.
This works well for svn fetching and general git commands.
However, git-svn dcommit, rebase, and perhaps other commands do not
work in this scenario. They scan the svn ID lines in commit messages
and attempt to match them up with url lines in [svn-remote] sections
in the git config.
This patch allows them to match rewriteRoot options, if such options
are present.
Signed-off-by: John Goerzen <jgoerzen@complete.org>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-03-08 22:04:05 +00:00
|
|
|
if ($rwr) {
|
|
|
|
$z = $rwr;
|
2009-02-11 23:14:02 +00:00
|
|
|
remove_username($z);
|
2008-07-14 15:28:04 +00:00
|
|
|
} elsif (defined $svm) {
|
|
|
|
$z = $svm->{source};
|
|
|
|
$prefix = $svm->{replace};
|
|
|
|
$prefix =~ s#^\Q$u\E(?:/|$)##;
|
|
|
|
$prefix =~ s#/$##;
|
Fix dcommit, rebase when rewriteRoot is in use
When the rewriteRoot setting is used with git-svn, it causes the svn
IDs added to commit messages to bear a different URL than is actually
used to retrieve Subversion data.
It is common for Subversion repositories to be available multiple
ways: for instance, HTTP to the public, and svn+ssh to people with
commit access. The need to switch URLs for access is fairly common as
well -- perhaps someone was just given commit access. To switch URLs
without having to rewrite history, one can use the old url as a
rewriteRoot, and use the new one in the svn-remote url setting.
This works well for svn fetching and general git commands.
However, git-svn dcommit, rebase, and perhaps other commands do not
work in this scenario. They scan the svn ID lines in commit messages
and attempt to match them up with url lines in [svn-remote] sections
in the git config.
This patch allows them to match rewriteRoot options, if such options
are present.
Signed-off-by: John Goerzen <jgoerzen@complete.org>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-03-08 22:04:05 +00:00
|
|
|
}
|
2008-07-14 15:28:04 +00:00
|
|
|
$p =~ s#^\Q$z\E(?:/|$)#$prefix# or next;
|
2007-02-13 22:22:11 +00:00
|
|
|
}
|
|
|
|
foreach my $f (keys %$fetch) {
|
|
|
|
next if $f ne $p;
|
|
|
|
return Git::SVN->new($fetch->{$f}, $repo_id, $f);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
2007-01-24 00:29:23 +00:00
|
|
|
sub init {
|
2007-02-01 11:30:31 +00:00
|
|
|
my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_;
|
2007-01-24 00:29:23 +00:00
|
|
|
my $self = _new($class, $repo_id, $ref_id, $path);
|
|
|
|
if (defined $url) {
|
2007-02-01 11:30:31 +00:00
|
|
|
$self->init_remote_config($url, $no_write);
|
2007-01-24 00:29:23 +00:00
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
$self;
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
sub find_ref {
|
|
|
|
my ($ref_id) = @_;
|
|
|
|
foreach (command(qw/config -l/)) {
|
|
|
|
next unless m!^svn-remote\.(.+)\.fetch=
|
2009-10-23 06:39:04 +00:00
|
|
|
\s*(.*?)\s*:\s*(.+?)\s*$!x;
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($repo_id, $path, $ref) = ($1, $2, $3);
|
|
|
|
if ($ref eq $ref_id) {
|
|
|
|
$path = '' if ($path =~ m#^\./?#);
|
|
|
|
return ($repo_id, $path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
(undef, undef, undef);
|
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
sub new {
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($class, $ref_id, $repo_id, $path) = @_;
|
|
|
|
if (defined $ref_id && !defined $repo_id && !defined $path) {
|
|
|
|
($repo_id, $path) = find_ref($ref_id);
|
|
|
|
if (!defined $repo_id) {
|
|
|
|
die "Could not find a \"svn-remote.*.fetch\" key ",
|
|
|
|
"in the repository configuration matching: ",
|
2009-08-12 03:14:27 +00:00
|
|
|
"$ref_id\n";
|
2007-01-19 01:50:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
my $self = _new($class, $repo_id, $ref_id, $path);
|
2007-01-22 19:44:57 +00:00
|
|
|
if (!defined $self->{path} || !length $self->{path}) {
|
|
|
|
my $fetch = command_oneline('config', '--get',
|
|
|
|
"svn-remote.$repo_id.fetch",
|
2009-08-12 03:14:27 +00:00
|
|
|
":$ref_id\$") or
|
2007-01-22 19:44:57 +00:00
|
|
|
die "Failed to read \"svn-remote.$repo_id.fetch\" ",
|
2009-08-12 03:14:27 +00:00
|
|
|
"\":$ref_id\$\" in config\n";
|
2007-01-22 19:44:57 +00:00
|
|
|
($self->{path}, undef) = split(/\s*:\s*/, $fetch);
|
|
|
|
}
|
2010-06-14 04:31:10 +00:00
|
|
|
$self->{path} =~ s{/+}{/}g;
|
|
|
|
$self->{path} =~ s{\A/}{};
|
|
|
|
$self->{path} =~ s{/\z}{};
|
2007-01-19 01:50:01 +00:00
|
|
|
$self->{url} = command_oneline('config', '--get',
|
|
|
|
"svn-remote.$repo_id.url") or
|
|
|
|
die "Failed to read \"svn-remote.$repo_id.url\" in config\n";
|
2011-04-08 14:57:54 +00:00
|
|
|
$self->{pushurl} = eval { command_oneline('config', '--get',
|
|
|
|
"svn-remote.$repo_id.pushurl") };
|
2007-02-16 12:05:33 +00:00
|
|
|
$self->rebuild;
|
2007-01-11 20:14:21 +00:00
|
|
|
$self;
|
|
|
|
}
|
|
|
|
|
2007-07-30 09:08:21 +00:00
|
|
|
sub refname {
|
2009-08-12 03:14:27 +00:00
|
|
|
my ($refname) = $_[0]->{ref_id} ;
|
2007-07-30 09:08:21 +00:00
|
|
|
|
|
|
|
# It cannot end with a slash /, we'll throw up on this because
|
|
|
|
# SVN can't have directories with a slash in their name, either:
|
|
|
|
if ($refname =~ m{/$}) {
|
|
|
|
die "ref: '$refname' ends with a trailing slash, this is ",
|
|
|
|
"not permitted by git nor Subversion\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
# It cannot have ASCII control character space, tilde ~, caret ^,
|
|
|
|
# colon :, question-mark ?, asterisk *, space, or open bracket [
|
|
|
|
# anywhere.
|
|
|
|
#
|
|
|
|
# Additionally, % must be escaped because it is used for escaping
|
|
|
|
# and we want our escaped refname to be reversible
|
|
|
|
$refname =~ s{([ \%~\^:\?\*\[\t])}{uc sprintf('%%%02x',ord($1))}eg;
|
|
|
|
|
|
|
|
# no slash-separated component can begin with a dot .
|
|
|
|
# /.* becomes /%2E*
|
|
|
|
$refname =~ s{/\.}{/%2E}g;
|
|
|
|
|
|
|
|
# It cannot have two consecutive dots .. anywhere
|
|
|
|
# .. becomes %2E%2E
|
|
|
|
$refname =~ s{\.\.}{%2E%2E}g;
|
|
|
|
|
2010-05-06 20:20:43 +00:00
|
|
|
# trailing dots and .lock are not allowed
|
|
|
|
# .$ becomes %2E and .lock becomes %2Elock
|
|
|
|
$refname =~ s{\.(?=$|lock$)}{%2E};
|
|
|
|
|
|
|
|
# the sequence @{ is used to access the reflog
|
|
|
|
# @{ becomes %40{
|
|
|
|
$refname =~ s{\@\{}{%40\{}g;
|
|
|
|
|
2007-07-30 09:08:21 +00:00
|
|
|
return $refname;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub desanitize_refname {
|
|
|
|
my ($refname) = @_;
|
|
|
|
$refname =~ s{%(?:([0-9A-F]{2}))}{chr hex($1)}eg;
|
|
|
|
return $refname;
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
|
2007-02-12 21:25:25 +00:00
|
|
|
sub svm_uuid {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{svm}->{uuid} if $self->svm;
|
|
|
|
$self->ra;
|
|
|
|
unless ($self->{svm}) {
|
|
|
|
die "SVM UUID not cached, and reading remotely failed\n";
|
|
|
|
}
|
|
|
|
$self->{svm}->{uuid};
|
|
|
|
}
|
2007-02-11 04:46:50 +00:00
|
|
|
|
2007-02-12 21:25:25 +00:00
|
|
|
sub svm {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{svm} if $self->{svm};
|
|
|
|
my $svm;
|
2007-02-11 04:46:50 +00:00
|
|
|
# see if we have it in our config, first:
|
|
|
|
eval {
|
2007-02-12 21:25:25 +00:00
|
|
|
my $section = "svn-remote.$self->{repo_id}";
|
|
|
|
$svm = {
|
2007-02-11 09:20:26 +00:00
|
|
|
source => tmp_config('--get', "$section.svm-source"),
|
|
|
|
uuid => tmp_config('--get', "$section.svm-uuid"),
|
2007-02-17 10:53:07 +00:00
|
|
|
replace => tmp_config('--get', "$section.svm-replace"),
|
2007-02-11 04:46:50 +00:00
|
|
|
}
|
|
|
|
};
|
2007-02-17 10:53:07 +00:00
|
|
|
if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) {
|
|
|
|
$self->{svm} = $svm;
|
|
|
|
}
|
2007-02-12 21:25:25 +00:00
|
|
|
$self->{svm};
|
|
|
|
}
|
|
|
|
|
|
|
|
sub _set_svm_vars {
|
|
|
|
my ($self, $ra) = @_;
|
2007-02-13 08:38:02 +00:00
|
|
|
return $ra if $self->svm;
|
|
|
|
|
|
|
|
my @err = ( "useSvmProps set, but failed to read SVM properties\n",
|
2007-02-17 10:53:07 +00:00
|
|
|
"(svm:source, svm:uuid) ",
|
2007-02-13 08:38:02 +00:00
|
|
|
"from the following URLs:\n" );
|
|
|
|
sub read_svm_props {
|
2007-02-17 10:53:07 +00:00
|
|
|
my ($self, $ra, $path, $r) = @_;
|
|
|
|
my $props = ($ra->get_dir($path, $r))[2];
|
2007-02-13 08:38:02 +00:00
|
|
|
my $src = $props->{'svm:source'};
|
|
|
|
my $uuid = $props->{'svm:uuid'};
|
2007-02-17 10:53:07 +00:00
|
|
|
return undef if (!$src || !$uuid);
|
2007-02-12 21:25:25 +00:00
|
|
|
|
2007-02-17 10:53:07 +00:00
|
|
|
chomp($src, $uuid);
|
2007-02-12 21:25:25 +00:00
|
|
|
|
2009-07-11 21:13:12 +00:00
|
|
|
$uuid =~ m{^[0-9a-f\-]{30,}$}i
|
2007-02-13 08:38:02 +00:00
|
|
|
or die "doesn't look right - svm:uuid is '$uuid'\n";
|
2007-02-17 10:53:07 +00:00
|
|
|
|
|
|
|
# the '!' is used to mark the repos_root!/relative/path
|
|
|
|
$src =~ s{/?!/?}{/};
|
2007-02-13 08:38:02 +00:00
|
|
|
$src =~ s{/+$}{}; # no trailing slashes please
|
2007-02-17 10:53:07 +00:00
|
|
|
# username is of no interest
|
2007-02-11 04:46:50 +00:00
|
|
|
$src =~ s{(^[a-z\+]*://)[^/@]*@}{$1};
|
|
|
|
|
2007-02-17 10:53:07 +00:00
|
|
|
my $replace = $ra->{url};
|
|
|
|
$replace .= "/$path" if length $path;
|
|
|
|
|
2007-02-13 08:38:02 +00:00
|
|
|
my $section = "svn-remote.$self->{repo_id}";
|
2007-02-17 10:53:07 +00:00
|
|
|
tmp_config("$section.svm-source", $src);
|
|
|
|
tmp_config("$section.svm-replace", $replace);
|
|
|
|
tmp_config("$section.svm-uuid", $uuid);
|
|
|
|
$self->{svm} = {
|
|
|
|
source => $src,
|
|
|
|
uuid => $uuid,
|
|
|
|
replace => $replace
|
|
|
|
};
|
2007-02-13 08:38:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
my $r = $ra->get_latest_revnum;
|
|
|
|
my $path = $self->{path};
|
2007-02-17 10:53:07 +00:00
|
|
|
my %tried;
|
2007-02-13 08:38:02 +00:00
|
|
|
while (length $path) {
|
2007-02-17 10:53:07 +00:00
|
|
|
unless ($tried{"$self->{url}/$path"}) {
|
|
|
|
return $ra if $self->read_svm_props($ra, $path, $r);
|
|
|
|
$tried{"$self->{url}/$path"} = 1;
|
2007-02-13 08:38:02 +00:00
|
|
|
}
|
2007-02-17 10:53:07 +00:00
|
|
|
$path =~ s#/?[^/]+$##;
|
2007-02-11 04:46:50 +00:00
|
|
|
}
|
2007-02-17 10:53:07 +00:00
|
|
|
die "Path: '$path' should be ''\n" if $path ne '';
|
|
|
|
return $ra if $self->read_svm_props($ra, $path, $r);
|
|
|
|
$tried{"$self->{url}/$path"} = 1;
|
2007-02-13 08:38:02 +00:00
|
|
|
|
|
|
|
if ($ra->{repos_root} eq $self->{url}) {
|
2007-02-17 10:53:07 +00:00
|
|
|
die @err, (map { " $_\n" } keys %tried), "\n";
|
2007-02-13 08:38:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# nope, make sure we're connected to the repository root:
|
|
|
|
my $ok;
|
|
|
|
my @tried_b;
|
|
|
|
$path = $ra->{svn_path};
|
|
|
|
$ra = Git::SVN::Ra->new($ra->{repos_root});
|
|
|
|
while (length $path) {
|
2007-02-17 10:53:07 +00:00
|
|
|
unless ($tried{"$ra->{url}/$path"}) {
|
|
|
|
$ok = $self->read_svm_props($ra, $path, $r);
|
|
|
|
last if $ok;
|
|
|
|
$tried{"$ra->{url}/$path"} = 1;
|
|
|
|
}
|
|
|
|
$path =~ s#/?[^/]+$##;
|
2007-02-13 08:38:02 +00:00
|
|
|
}
|
2007-02-17 10:53:07 +00:00
|
|
|
die "Path: '$path' should be ''\n" if $path ne '';
|
|
|
|
$ok ||= $self->read_svm_props($ra, $path, $r);
|
|
|
|
$tried{"$ra->{url}/$path"} = 1;
|
2007-02-13 08:38:02 +00:00
|
|
|
if (!$ok) {
|
2007-02-17 10:53:07 +00:00
|
|
|
die @err, (map { " $_\n" } keys %tried), "\n";
|
2007-02-13 08:38:02 +00:00
|
|
|
}
|
|
|
|
Git::SVN::Ra->new($self->{url});
|
2007-02-11 04:46:50 +00:00
|
|
|
}
|
|
|
|
|
2007-02-17 03:57:29 +00:00
|
|
|
sub svnsync {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{svnsync} if $self->{svnsync};
|
|
|
|
|
|
|
|
if ($self->no_metadata) {
|
|
|
|
die "Can't have both 'noMetadata' and ",
|
|
|
|
"'useSvnsyncProps' options set!\n";
|
|
|
|
}
|
|
|
|
if ($self->rewrite_root) {
|
|
|
|
die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ",
|
|
|
|
"options set!\n";
|
|
|
|
}
|
2010-01-23 08:30:00 +00:00
|
|
|
if ($self->rewrite_uuid) {
|
|
|
|
die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ",
|
|
|
|
"options set!\n";
|
|
|
|
}
|
2007-02-17 03:57:29 +00:00
|
|
|
|
|
|
|
my $svnsync;
|
|
|
|
# see if we have it in our config, first:
|
|
|
|
eval {
|
|
|
|
my $section = "svn-remote.$self->{repo_id}";
|
2008-01-12 07:13:55 +00:00
|
|
|
|
|
|
|
my $url = tmp_config('--get', "$section.svnsync-url");
|
|
|
|
($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
|
|
|
|
die "doesn't look right - svn:sync-from-url is '$url'\n";
|
|
|
|
|
|
|
|
my $uuid = tmp_config('--get', "$section.svnsync-uuid");
|
2009-07-11 21:13:12 +00:00
|
|
|
($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
|
2008-01-12 07:13:55 +00:00
|
|
|
die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
|
|
|
|
|
|
|
|
$svnsync = { url => $url, uuid => $uuid }
|
2007-02-17 03:57:29 +00:00
|
|
|
};
|
|
|
|
if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) {
|
|
|
|
return $self->{svnsync} = $svnsync;
|
|
|
|
}
|
|
|
|
|
|
|
|
my $err = "useSvnsyncProps set, but failed to read " .
|
|
|
|
"svnsync property: svn:sync-from-";
|
|
|
|
my $rp = $self->ra->rev_proplist(0);
|
|
|
|
|
|
|
|
my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n";
|
2008-01-12 07:13:55 +00:00
|
|
|
($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
|
2007-02-17 03:57:29 +00:00
|
|
|
die "doesn't look right - svn:sync-from-url is '$url'\n";
|
|
|
|
|
|
|
|
my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n";
|
2009-07-11 21:13:12 +00:00
|
|
|
($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
|
2007-02-17 03:57:29 +00:00
|
|
|
die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
|
|
|
|
|
|
|
|
my $section = "svn-remote.$self->{repo_id}";
|
|
|
|
tmp_config('--add', "$section.svnsync-uuid", $uuid);
|
|
|
|
tmp_config('--add', "$section.svnsync-url", $url);
|
|
|
|
return $self->{svnsync} = { url => $url, uuid => $uuid };
|
|
|
|
}
|
|
|
|
|
2007-02-12 21:25:25 +00:00
|
|
|
# this allows us to memoize our SVN::Ra UUID locally and avoid a
|
|
|
|
# remote lookup (useful for 'git svn log').
|
|
|
|
sub ra_uuid {
|
|
|
|
my ($self) = @_;
|
|
|
|
unless ($self->{ra_uuid}) {
|
|
|
|
my $key = "svn-remote.$self->{repo_id}.uuid";
|
|
|
|
my $uuid = eval { tmp_config('--get', $key) };
|
2009-07-11 21:13:12 +00:00
|
|
|
if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) {
|
2007-02-12 21:25:25 +00:00
|
|
|
$self->{ra_uuid} = $uuid;
|
|
|
|
} else {
|
|
|
|
die "ra_uuid called without URL\n" unless $self->{url};
|
|
|
|
$self->{ra_uuid} = $self->ra->get_uuid;
|
|
|
|
tmp_config('--add', $key, $self->{ra_uuid});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$self->{ra_uuid};
|
|
|
|
}
|
|
|
|
|
2007-11-22 02:20:57 +00:00
|
|
|
sub _set_repos_root {
|
|
|
|
my ($self, $repos_root) = @_;
|
|
|
|
my $k = "svn-remote.$self->{repo_id}.reposRoot";
|
|
|
|
$repos_root ||= $self->ra->{repos_root};
|
|
|
|
tmp_config($k, $repos_root);
|
|
|
|
$repos_root;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub repos_root {
|
|
|
|
my ($self) = @_;
|
|
|
|
my $k = "svn-remote.$self->{repo_id}.reposRoot";
|
|
|
|
eval { tmp_config('--get', $k) } || $self->_set_repos_root;
|
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
sub ra {
|
|
|
|
my ($self) = shift;
|
2007-02-11 04:46:50 +00:00
|
|
|
my $ra = Git::SVN::Ra->new($self->{url});
|
2007-11-22 02:20:57 +00:00
|
|
|
$self->_set_repos_root($ra->{repos_root});
|
2007-02-11 08:51:33 +00:00
|
|
|
if ($self->use_svm_props && !$self->{svm}) {
|
|
|
|
if ($self->no_metadata) {
|
2007-02-11 23:21:24 +00:00
|
|
|
die "Can't have both 'noMetadata' and ",
|
|
|
|
"'useSvmProps' options set!\n";
|
2007-02-17 03:57:29 +00:00
|
|
|
} elsif ($self->use_svnsync_props) {
|
|
|
|
die "Can't have both 'useSvnsyncProps' and ",
|
|
|
|
"'useSvmProps' options set!\n";
|
2007-02-11 08:51:33 +00:00
|
|
|
}
|
2007-02-12 21:25:25 +00:00
|
|
|
$ra = $self->_set_svm_vars($ra);
|
2007-02-11 04:46:50 +00:00
|
|
|
$self->{-want_revprops} = 1;
|
|
|
|
}
|
|
|
|
$ra;
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2007-10-16 14:36:48 +00:00
|
|
|
# prop_walk(PATH, REV, SUB)
|
|
|
|
# -------------------------
|
|
|
|
# Recursively traverse PATH at revision REV and invoke SUB for each
|
|
|
|
# directory that contains a SVN property. SUB will be invoked as
|
|
|
|
# follows: &SUB(gs, path, props); where `gs' is this instance of
|
|
|
|
# Git::SVN, `path' the path to the directory where the properties
|
|
|
|
# `props' were found. The `path' will be relative to point of checkout,
|
|
|
|
# that is, if url://repo/trunk is the current Git branch, and that
|
|
|
|
# directory contains a sub-directory `d', SUB will be invoked with `/d/'
|
|
|
|
# as `path' (note the trailing `/').
|
|
|
|
sub prop_walk {
|
|
|
|
my ($self, $path, $rev, $sub) = @_;
|
|
|
|
|
2008-01-09 06:37:20 +00:00
|
|
|
$path =~ s#^/##;
|
2007-10-16 14:36:48 +00:00
|
|
|
my ($dirent, undef, $props) = $self->ra->get_dir($path, $rev);
|
|
|
|
$path =~ s#^/*#/#g;
|
2007-01-11 20:14:21 +00:00
|
|
|
my $p = $path;
|
2007-10-16 14:36:48 +00:00
|
|
|
# Strip the irrelevant part of the path.
|
|
|
|
$p =~ s#^/+\Q$self->{path}\E(/|$)#/#;
|
|
|
|
# Ensure the path is terminated by a `/'.
|
|
|
|
$p =~ s#/*$#/#;
|
|
|
|
|
|
|
|
# The properties contain all the internal SVN stuff nobody
|
|
|
|
# (usually) cares about.
|
|
|
|
my $interesting_props = 0;
|
|
|
|
foreach (keys %{$props}) {
|
|
|
|
# If it doesn't start with `svn:', it must be a
|
|
|
|
# user-defined property.
|
|
|
|
++$interesting_props and next if $_ !~ /^svn:/;
|
|
|
|
# FIXME: Fragile, if SVN adds new public properties,
|
|
|
|
# this needs to be updated.
|
|
|
|
++$interesting_props if /^svn:(?:ignore|keywords|executable
|
|
|
|
|eol-style|mime-type
|
|
|
|
|externals|needs-lock)$/x;
|
|
|
|
}
|
|
|
|
&$sub($self, $p, $props) if $interesting_props;
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
foreach (sort keys %$dirent) {
|
2007-05-13 08:04:43 +00:00
|
|
|
next if $dirent->{$_}->{kind} != $SVN::Node::dir;
|
2008-05-27 08:46:55 +00:00
|
|
|
$self->prop_walk($self->{path} . $p . $_, $rev, $sub);
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-26 01:35:40 +00:00
|
|
|
sub last_rev { ($_[0]->last_rev_commit)[0] }
|
|
|
|
sub last_commit { ($_[0]->last_rev_commit)[1] }
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
# returns the newest SVN revision number and newest commit SHA1
|
|
|
|
sub last_rev_commit {
|
|
|
|
my ($self) = @_;
|
|
|
|
if (defined $self->{last_rev} && defined $self->{last_commit}) {
|
|
|
|
return ($self->{last_rev}, $self->{last_commit});
|
|
|
|
}
|
2007-01-11 20:26:16 +00:00
|
|
|
my $c = ::verify_ref($self->refname.'^0');
|
2007-02-11 08:51:33 +00:00
|
|
|
if ($c && !$self->use_svm_props && !$self->no_metadata) {
|
2007-01-11 20:26:16 +00:00
|
|
|
my $rev = (::cmt_metadata($c))[1];
|
2007-01-11 20:14:21 +00:00
|
|
|
if (defined $rev) {
|
|
|
|
($self->{last_rev}, $self->{last_commit}) = ($rev, $c);
|
|
|
|
return ($rev, $c);
|
|
|
|
}
|
|
|
|
}
|
2007-12-09 07:27:41 +00:00
|
|
|
my $map_path = $self->map_path;
|
|
|
|
unless (-e $map_path) {
|
2007-02-12 21:25:25 +00:00
|
|
|
($self->{last_rev}, $self->{last_commit}) = (undef, undef);
|
|
|
|
return (undef, undef);
|
|
|
|
}
|
2007-12-09 07:27:42 +00:00
|
|
|
my ($rev, $commit) = $self->rev_map_max(1);
|
2007-12-09 07:27:41 +00:00
|
|
|
($self->{last_rev}, $self->{last_commit}) = ($rev, $commit);
|
|
|
|
return ($rev, $commit);
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2007-01-26 01:35:40 +00:00
|
|
|
sub get_fetch_range {
|
|
|
|
my ($self, $min, $max) = @_;
|
|
|
|
$max ||= $self->ra->get_latest_revnum;
|
2007-12-09 07:27:41 +00:00
|
|
|
$min ||= $self->rev_map_max;
|
2007-01-26 01:35:40 +00:00
|
|
|
(++$min, $max);
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2007-02-11 04:46:50 +00:00
|
|
|
sub tmp_config {
|
2007-02-11 09:20:26 +00:00
|
|
|
my (@args) = @_;
|
2007-02-16 12:09:28 +00:00
|
|
|
my $old_def_config = "$ENV{GIT_DIR}/svn/config";
|
|
|
|
my $config = "$ENV{GIT_DIR}/svn/.metadata";
|
2007-06-13 09:37:05 +00:00
|
|
|
if (! -f $config && -f $old_def_config) {
|
2007-02-16 12:09:28 +00:00
|
|
|
rename $old_def_config, $config or
|
|
|
|
die "Failed rename $old_def_config => $config: $!\n";
|
|
|
|
}
|
2007-02-11 04:46:50 +00:00
|
|
|
my $old_config = $ENV{GIT_CONFIG};
|
2007-02-11 09:20:26 +00:00
|
|
|
$ENV{GIT_CONFIG} = $config;
|
2007-02-11 04:46:50 +00:00
|
|
|
$@ = undef;
|
2007-02-14 23:10:44 +00:00
|
|
|
my @ret = eval {
|
|
|
|
unless (-f $config) {
|
|
|
|
mkfile($config);
|
|
|
|
open my $fh, '>', $config or
|
|
|
|
die "Can't open $config: $!\n";
|
|
|
|
print $fh "; This file is used internally by ",
|
|
|
|
"git-svn\n" or die
|
|
|
|
"Couldn't write to $config: $!\n";
|
|
|
|
print $fh "; You should not have to edit it\n" or
|
|
|
|
die "Couldn't write to $config: $!\n";
|
|
|
|
close $fh or die "Couldn't close $config: $!\n";
|
|
|
|
}
|
|
|
|
command('config', @args);
|
|
|
|
};
|
2007-02-11 04:46:50 +00:00
|
|
|
my $err = $@;
|
|
|
|
if (defined $old_config) {
|
|
|
|
$ENV{GIT_CONFIG} = $old_config;
|
|
|
|
} else {
|
|
|
|
delete $ENV{GIT_CONFIG};
|
|
|
|
}
|
|
|
|
die $err if $err;
|
|
|
|
wantarray ? @ret : $ret[0];
|
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
sub tmp_index_do {
|
|
|
|
my ($self, $sub) = @_;
|
|
|
|
my $old_index = $ENV{GIT_INDEX_FILE};
|
|
|
|
$ENV{GIT_INDEX_FILE} = $self->{index};
|
2007-02-11 04:46:50 +00:00
|
|
|
$@ = undef;
|
2007-02-14 23:10:44 +00:00
|
|
|
my @ret = eval {
|
|
|
|
my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#);
|
|
|
|
mkpath([$dir]) unless -d $dir;
|
|
|
|
&$sub;
|
|
|
|
};
|
2007-02-11 04:46:50 +00:00
|
|
|
my $err = $@;
|
|
|
|
if (defined $old_index) {
|
2007-01-11 20:14:21 +00:00
|
|
|
$ENV{GIT_INDEX_FILE} = $old_index;
|
|
|
|
} else {
|
|
|
|
delete $ENV{GIT_INDEX_FILE};
|
|
|
|
}
|
2007-02-11 04:46:50 +00:00
|
|
|
die $err if $err;
|
2007-01-11 20:14:21 +00:00
|
|
|
wantarray ? @ret : $ret[0];
|
|
|
|
}
|
|
|
|
|
|
|
|
sub assert_index_clean {
|
|
|
|
my ($self, $treeish) = @_;
|
|
|
|
|
|
|
|
$self->tmp_index_do(sub {
|
|
|
|
command_noisy('read-tree', $treeish) unless -e $self->{index};
|
|
|
|
my $x = command_oneline('write-tree');
|
|
|
|
my ($y) = (command(qw/cat-file commit/, $treeish) =~
|
|
|
|
/^tree ($::sha1)/mo);
|
2007-02-15 00:29:52 +00:00
|
|
|
return if $y eq $x;
|
|
|
|
|
|
|
|
warn "Index mismatch: $y != $x\nrereading $treeish\n";
|
|
|
|
unlink $self->{index} or die "unlink $self->{index}: $!\n";
|
|
|
|
command_noisy('read-tree', $treeish);
|
2007-01-11 20:14:21 +00:00
|
|
|
$x = command_oneline('write-tree');
|
|
|
|
if ($y ne $x) {
|
|
|
|
::fatal "trees ($treeish) $y != $x\n",
|
2007-10-16 14:36:52 +00:00
|
|
|
"Something is seriously wrong...";
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
sub get_commit_parents {
|
2007-01-28 06:28:56 +00:00
|
|
|
my ($self, $log_entry) = @_;
|
2007-01-11 20:14:21 +00:00
|
|
|
my (%seen, @ret, @tmp);
|
2007-01-28 06:28:56 +00:00
|
|
|
# legacy support for 'set-tree'; this is only used by set_tree_cb:
|
|
|
|
if (my $ip = $self->{inject_parents}) {
|
|
|
|
if (my $commit = delete $ip->{$log_entry->{revision}}) {
|
|
|
|
push @tmp, $commit;
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
}
|
2007-01-11 20:26:16 +00:00
|
|
|
if (my $cur = ::verify_ref($self->refname.'^0')) {
|
2007-01-11 20:14:21 +00:00
|
|
|
push @tmp, $cur;
|
|
|
|
}
|
2007-06-13 09:23:28 +00:00
|
|
|
if (my $ipd = $self->{inject_parents_dcommit}) {
|
|
|
|
if (my $commit = delete $ipd->{$log_entry->{revision}}) {
|
|
|
|
push @tmp, @$commit;
|
|
|
|
}
|
|
|
|
}
|
2007-01-14 06:35:53 +00:00
|
|
|
push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp);
|
2007-01-11 20:14:21 +00:00
|
|
|
while (my $p = shift @tmp) {
|
|
|
|
next if $seen{$p};
|
|
|
|
$seen{$p} = 1;
|
|
|
|
push @ret, $p;
|
|
|
|
}
|
|
|
|
@ret;
|
|
|
|
}
|
|
|
|
|
2007-02-17 03:15:21 +00:00
|
|
|
sub rewrite_root {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{-rewrite_root} if exists $self->{-rewrite_root};
|
|
|
|
my $k = "svn-remote.$self->{repo_id}.rewriteRoot";
|
|
|
|
my $rwr = eval { command_oneline(qw/config --get/, $k) };
|
|
|
|
if ($rwr) {
|
|
|
|
$rwr =~ s#/+$##;
|
|
|
|
if ($rwr !~ m#^[a-z\+]+://#) {
|
|
|
|
die "$rwr is not a valid URL (key: $k)\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$self->{-rewrite_root} = $rwr;
|
|
|
|
}
|
|
|
|
|
2010-01-23 08:30:00 +00:00
|
|
|
sub rewrite_uuid {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid};
|
|
|
|
my $k = "svn-remote.$self->{repo_id}.rewriteUUID";
|
|
|
|
my $rwid = eval { command_oneline(qw/config --get/, $k) };
|
|
|
|
if ($rwid) {
|
|
|
|
$rwid =~ s#/+$##;
|
|
|
|
if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) {
|
|
|
|
die "$rwid is not a valid UUID (key: $k)\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
$self->{-rewrite_uuid} = $rwid;
|
|
|
|
}
|
|
|
|
|
2007-02-17 03:15:21 +00:00
|
|
|
sub metadata_url {
|
|
|
|
my ($self) = @_;
|
|
|
|
($self->rewrite_root || $self->{url}) .
|
|
|
|
(length $self->{path} ? '/' . $self->{path} : '');
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
sub full_url {
|
2007-01-11 20:14:21 +00:00
|
|
|
my ($self) = @_;
|
2007-01-30 03:16:01 +00:00
|
|
|
$self->{url} . (length $self->{path} ? '/' . $self->{path} : '');
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2011-04-08 14:57:54 +00:00
|
|
|
sub full_pushurl {
|
|
|
|
my ($self) = @_;
|
|
|
|
if ($self->{pushurl}) {
|
|
|
|
return $self->{pushurl} . (length $self->{path} ? '/' .
|
|
|
|
$self->{path} : '');
|
|
|
|
} else {
|
|
|
|
return $self->full_url;
|
|
|
|
}
|
|
|
|
}
|
2007-12-16 03:08:22 +00:00
|
|
|
|
|
|
|
sub set_commit_header_env {
|
|
|
|
my ($log_entry) = @_;
|
|
|
|
my %env;
|
|
|
|
foreach my $ned (qw/NAME EMAIL DATE/) {
|
|
|
|
foreach my $ac (qw/AUTHOR COMMITTER/) {
|
|
|
|
$env{"GIT_${ac}_${ned}"} = $ENV{"GIT_${ac}_${ned}"};
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
2007-12-16 03:08:22 +00:00
|
|
|
|
2007-11-22 13:44:42 +00:00
|
|
|
$ENV{GIT_AUTHOR_NAME} = $log_entry->{name};
|
|
|
|
$ENV{GIT_AUTHOR_EMAIL} = $log_entry->{email};
|
2007-01-14 06:35:53 +00:00
|
|
|
$ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date};
|
2007-01-11 20:14:21 +00:00
|
|
|
|
2007-11-22 13:44:42 +00:00
|
|
|
$ENV{GIT_COMMITTER_NAME} = (defined $log_entry->{commit_name})
|
|
|
|
? $log_entry->{commit_name}
|
|
|
|
: $log_entry->{name};
|
|
|
|
$ENV{GIT_COMMITTER_EMAIL} = (defined $log_entry->{commit_email})
|
|
|
|
? $log_entry->{commit_email}
|
|
|
|
: $log_entry->{email};
|
2007-12-16 03:08:22 +00:00
|
|
|
\%env;
|
|
|
|
}
|
2007-11-22 13:44:42 +00:00
|
|
|
|
2007-12-16 03:08:22 +00:00
|
|
|
sub restore_commit_header_env {
|
|
|
|
my ($env) = @_;
|
|
|
|
foreach my $ned (qw/NAME EMAIL DATE/) {
|
|
|
|
foreach my $ac (qw/AUTHOR COMMITTER/) {
|
|
|
|
my $k = "GIT_${ac}_${ned}";
|
|
|
|
if (defined $env->{$k}) {
|
|
|
|
$ENV{$k} = $env->{$k};
|
|
|
|
} else {
|
|
|
|
delete $ENV{$k};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2008-02-03 16:56:18 +00:00
|
|
|
sub gc {
|
|
|
|
command_noisy('gc', '--auto');
|
|
|
|
};
|
|
|
|
|
2007-12-16 03:08:22 +00:00
|
|
|
sub do_git_commit {
|
|
|
|
my ($self, $log_entry) = @_;
|
|
|
|
my $lr = $self->last_rev;
|
|
|
|
if (defined $lr && $lr >= $log_entry->{revision}) {
|
|
|
|
die "Last fetched revision of ", $self->refname,
|
|
|
|
" was r$lr, but we are about to fetch: ",
|
|
|
|
"r$log_entry->{revision}!\n";
|
|
|
|
}
|
|
|
|
if (my $c = $self->rev_map_get($log_entry->{revision})) {
|
|
|
|
croak "$log_entry->{revision} = $c already exists! ",
|
|
|
|
"Why are we refetching it?\n";
|
|
|
|
}
|
|
|
|
my $old_env = set_commit_header_env($log_entry);
|
2007-01-14 06:35:53 +00:00
|
|
|
my $tree = $log_entry->{tree};
|
2007-01-11 20:14:21 +00:00
|
|
|
if (!defined $tree) {
|
|
|
|
$tree = $self->tmp_index_do(sub {
|
|
|
|
command_oneline('write-tree') });
|
|
|
|
}
|
|
|
|
die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o;
|
|
|
|
|
2008-10-31 04:10:25 +00:00
|
|
|
my @exec = ('git', 'commit-tree', $tree);
|
2007-01-28 06:28:56 +00:00
|
|
|
foreach ($self->get_commit_parents($log_entry)) {
|
2007-01-11 20:14:21 +00:00
|
|
|
push @exec, '-p', $_;
|
|
|
|
}
|
|
|
|
defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec))
|
|
|
|
or croak $!;
|
2008-10-30 06:49:26 +00:00
|
|
|
binmode $msg_fh;
|
|
|
|
|
|
|
|
# we always get UTF-8 from SVN, but we may want our commits in
|
|
|
|
# a different encoding.
|
|
|
|
if (my $enc = Git::config('i18n.commitencoding')) {
|
|
|
|
require Encode;
|
|
|
|
Encode::from_to($log_entry->{log}, 'UTF-8', $enc);
|
|
|
|
}
|
2007-01-14 06:35:53 +00:00
|
|
|
print $msg_fh $log_entry->{log} or croak $!;
|
2007-12-16 03:08:22 +00:00
|
|
|
restore_commit_header_env($old_env);
|
2007-02-11 08:51:33 +00:00
|
|
|
unless ($self->no_metadata) {
|
2007-02-11 04:46:50 +00:00
|
|
|
print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n"
|
|
|
|
or croak $!;
|
2007-01-31 11:06:56 +00:00
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
$msg_fh->flush == 0 or croak $!;
|
|
|
|
close $msg_fh or croak $!;
|
|
|
|
chomp(my $commit = do { local $/; <$out_fh> });
|
|
|
|
close $out_fh or croak $!;
|
|
|
|
waitpid $pid, 0;
|
|
|
|
croak $? if $?;
|
|
|
|
if ($commit !~ /^$::sha1$/o) {
|
|
|
|
die "Failed to commit, invalid sha1: $commit\n";
|
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
$self->rev_map_set($log_entry->{revision}, $commit, 1);
|
2007-01-11 20:14:21 +00:00
|
|
|
|
2007-01-14 06:35:53 +00:00
|
|
|
$self->{last_rev} = $log_entry->{revision};
|
2007-01-11 20:14:21 +00:00
|
|
|
$self->{last_commit} = $commit;
|
2009-03-30 18:31:41 +00:00
|
|
|
print "r$log_entry->{revision}" unless $::_q > 1;
|
2007-02-11 04:46:50 +00:00
|
|
|
if (defined $log_entry->{svm_revision}) {
|
2009-03-30 18:31:41 +00:00
|
|
|
print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
|
2007-12-09 07:27:41 +00:00
|
|
|
$self->rev_map_set($log_entry->{svm_revision}, $commit,
|
2007-02-12 21:25:25 +00:00
|
|
|
0, $self->svm_uuid);
|
2007-02-11 04:46:50 +00:00
|
|
|
}
|
2009-03-30 18:31:41 +00:00
|
|
|
print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
|
2008-02-03 16:56:18 +00:00
|
|
|
if (--$_gc_nr == 0) {
|
|
|
|
$_gc_nr = $_gc_period;
|
|
|
|
gc();
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
return $commit;
|
|
|
|
}
|
|
|
|
|
2007-02-07 02:35:30 +00:00
|
|
|
sub match_paths {
|
|
|
|
my ($self, $paths, $r) = @_;
|
2007-02-09 20:17:57 +00:00
|
|
|
return 1 if $self->{path} eq '';
|
2007-02-09 10:19:41 +00:00
|
|
|
if (my $path = $paths->{"/$self->{path}"}) {
|
|
|
|
return ($path->{action} eq 'D') ? 0 : 1;
|
|
|
|
}
|
2009-07-06 23:40:02 +00:00
|
|
|
$self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//;
|
2007-02-07 02:35:30 +00:00
|
|
|
if (grep /$self->{path_regex}/, keys %$paths) {
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
my $c = '';
|
|
|
|
foreach (split m#/#, $self->{path}) {
|
|
|
|
$c .= "/$_";
|
2007-02-10 21:28:50 +00:00
|
|
|
next unless ($paths->{$c} &&
|
|
|
|
($paths->{$c}->{action} =~ /^[AR]$/));
|
2007-02-08 20:53:57 +00:00
|
|
|
if ($self->ra->check_path($self->{path}, $r) ==
|
|
|
|
$SVN::Node::dir) {
|
2007-02-07 02:35:30 +00:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2007-01-22 10:20:33 +00:00
|
|
|
sub find_parent_branch {
|
|
|
|
my ($self, $paths, $rev) = @_;
|
2007-02-11 08:51:33 +00:00
|
|
|
return undef unless $self->follow_parent;
|
2007-01-25 23:44:54 +00:00
|
|
|
unless (defined $paths) {
|
2007-01-31 11:45:28 +00:00
|
|
|
my $err_handler = $SVN::Error::handler;
|
|
|
|
$SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs;
|
2009-07-06 23:39:52 +00:00
|
|
|
$self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1,
|
|
|
|
sub { $paths = $_[0] });
|
2007-01-31 11:45:28 +00:00
|
|
|
$SVN::Error::handler = $err_handler;
|
2007-01-25 23:44:54 +00:00
|
|
|
}
|
|
|
|
return undef unless defined $paths;
|
2007-01-22 10:20:33 +00:00
|
|
|
|
|
|
|
# look for a parent from another branch:
|
2009-07-06 23:40:02 +00:00
|
|
|
my @b_path_components = split m#/#, $self->{path};
|
2007-01-24 10:16:25 +00:00
|
|
|
my @a_path_components;
|
|
|
|
my $i;
|
|
|
|
while (@b_path_components) {
|
|
|
|
$i = $paths->{'/'.join('/', @b_path_components)};
|
2007-02-10 21:28:50 +00:00
|
|
|
last if $i && defined $i->{copyfrom_path};
|
2007-01-24 10:16:25 +00:00
|
|
|
unshift(@a_path_components, pop(@b_path_components));
|
|
|
|
}
|
2007-02-10 21:28:50 +00:00
|
|
|
return undef unless defined $i && defined $i->{copyfrom_path};
|
|
|
|
my $branch_from = $i->{copyfrom_path};
|
2007-01-24 10:16:25 +00:00
|
|
|
if (@a_path_components) {
|
|
|
|
print STDERR "branch_from: $branch_from => ";
|
|
|
|
$branch_from .= '/'.join('/', @a_path_components);
|
|
|
|
print STDERR $branch_from, "\n";
|
|
|
|
}
|
2007-01-26 01:35:40 +00:00
|
|
|
my $r = $i->{copyfrom_rev};
|
2007-01-22 10:20:33 +00:00
|
|
|
my $repos_root = $self->ra->{repos_root};
|
|
|
|
my $url = $self->ra->{url};
|
2009-07-06 23:40:02 +00:00
|
|
|
my $new_url = $url . $branch_from;
|
2007-01-22 10:20:33 +00:00
|
|
|
print STDERR "Found possible branch point: ",
|
2009-10-09 12:21:13 +00:00
|
|
|
"$new_url => ", $self->full_url, ", $r\n"
|
|
|
|
unless $::_q > 1;
|
2007-01-22 10:20:33 +00:00
|
|
|
$branch_from =~ s#^/##;
|
2009-07-06 23:40:02 +00:00
|
|
|
my $gs = $self->other_gs($new_url, $url,
|
2007-06-26 07:23:59 +00:00
|
|
|
$branch_from, $r, $self->{ref_id});
|
2007-01-22 10:20:33 +00:00
|
|
|
my ($r0, $parent) = $gs->find_rev_before($r, 1);
|
2008-12-08 13:31:31 +00:00
|
|
|
{
|
|
|
|
my ($base, $head);
|
|
|
|
if (!defined $r0 || !defined $parent) {
|
|
|
|
($base, $head) = parse_revision_argument(0, $r);
|
|
|
|
} else {
|
|
|
|
if ($r0 < $r) {
|
|
|
|
$gs->ra->get_log([$gs->{path}], $r0 + 1, $r, 1,
|
|
|
|
0, 1, sub { $base = $_[1] - 1 });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (defined $base && $base <= $r) {
|
2007-04-15 10:01:29 +00:00
|
|
|
$gs->fetch($base, $r);
|
|
|
|
}
|
2008-12-08 13:31:31 +00:00
|
|
|
($r0, $parent) = $gs->find_rev_before($r, 1);
|
2007-01-22 10:20:33 +00:00
|
|
|
}
|
2007-02-01 12:12:41 +00:00
|
|
|
if (defined $r0 && defined $parent) {
|
2009-10-09 12:21:13 +00:00
|
|
|
print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"
|
|
|
|
unless $::_q > 1;
|
2007-01-22 10:20:33 +00:00
|
|
|
my $ed;
|
|
|
|
if ($self->ra->can_do_switch) {
|
2007-02-23 10:21:59 +00:00
|
|
|
$self->assert_index_clean($parent);
|
2009-10-09 12:21:13 +00:00
|
|
|
print STDERR "Following parent with do_switch\n"
|
|
|
|
unless $::_q > 1;
|
2007-01-22 10:20:33 +00:00
|
|
|
# do_switch works with svn/trunk >= r22312, but that
|
2007-01-28 12:59:05 +00:00
|
|
|
# is not included with SVN 1.4.3 (the latest version
|
2007-01-22 10:20:33 +00:00
|
|
|
# at the moment), so we can't rely on it
|
2009-02-23 04:25:00 +00:00
|
|
|
$self->{last_rev} = $r0;
|
2007-01-22 10:20:33 +00:00
|
|
|
$self->{last_commit} = $parent;
|
2009-02-11 09:56:58 +00:00
|
|
|
$ed = SVN::Git::Fetcher->new($self, $gs->{path});
|
2007-01-31 10:45:50 +00:00
|
|
|
$gs->ra->gs_do_switch($r0, $rev, $gs,
|
2007-01-22 10:20:33 +00:00
|
|
|
$self->full_url, $ed)
|
|
|
|
or die "SVN connection failed somewhere...\n";
|
2007-09-28 17:24:19 +00:00
|
|
|
} elsif ($self->ra->trees_match($new_url, $r0,
|
|
|
|
$self->full_url, $rev)) {
|
|
|
|
print STDERR "Trees match:\n",
|
|
|
|
" $new_url\@$r0\n",
|
|
|
|
" ${\$self->full_url}\@$rev\n",
|
2009-10-09 12:21:13 +00:00
|
|
|
"Following parent with no changes\n"
|
|
|
|
unless $::_q > 1;
|
2007-09-28 17:24:19 +00:00
|
|
|
$self->tmp_index_do(sub {
|
|
|
|
command_noisy('read-tree', $parent);
|
|
|
|
});
|
|
|
|
$self->{last_commit} = $parent;
|
2007-01-22 10:20:33 +00:00
|
|
|
} else {
|
2009-10-09 12:21:13 +00:00
|
|
|
print STDERR "Following parent with do_update\n"
|
|
|
|
unless $::_q > 1;
|
2007-01-22 10:20:33 +00:00
|
|
|
$ed = SVN::Git::Fetcher->new($self);
|
2007-01-31 10:45:50 +00:00
|
|
|
$self->ra->gs_do_update($rev, $rev, $self, $ed)
|
2007-01-22 10:20:33 +00:00
|
|
|
or die "SVN connection failed somewhere...\n";
|
|
|
|
}
|
2009-10-09 12:21:13 +00:00
|
|
|
print STDERR "Successfully followed parent\n" unless $::_q > 1;
|
2007-01-22 10:20:33 +00:00
|
|
|
return $self->make_log_entry($rev, [$parent], $ed);
|
|
|
|
}
|
|
|
|
return undef;
|
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
sub do_fetch {
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($self, $paths, $rev) = @_;
|
2007-01-22 10:20:33 +00:00
|
|
|
my $ed;
|
2007-01-11 20:14:21 +00:00
|
|
|
my ($last_rev, @parents);
|
2007-02-09 09:28:30 +00:00
|
|
|
if (my $lc = $self->last_commit) {
|
|
|
|
# we can have a branch that was deleted, then re-added
|
|
|
|
# under the same name but copied from another path, in
|
|
|
|
# which case we'll have multiple parents (we don't
|
|
|
|
# want to break the original ref, nor lose copypath info):
|
|
|
|
if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
|
|
|
|
push @{$log_entry->{parents}}, $lc;
|
|
|
|
return $log_entry;
|
|
|
|
}
|
2007-01-22 10:20:33 +00:00
|
|
|
$ed = SVN::Git::Fetcher->new($self);
|
2007-01-11 20:14:21 +00:00
|
|
|
$last_rev = $self->{last_rev};
|
2007-02-09 09:28:30 +00:00
|
|
|
$ed->{c} = $lc;
|
|
|
|
@parents = ($lc);
|
2007-01-11 20:14:21 +00:00
|
|
|
} else {
|
|
|
|
$last_rev = $rev;
|
2007-01-22 10:20:33 +00:00
|
|
|
if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
|
|
|
|
return $log_entry;
|
|
|
|
}
|
|
|
|
$ed = SVN::Git::Fetcher->new($self);
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
2007-01-31 10:45:50 +00:00
|
|
|
unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) {
|
2007-01-11 20:14:21 +00:00
|
|
|
die "SVN connection failed somewhere...\n";
|
|
|
|
}
|
|
|
|
$self->make_log_entry($rev, \@parents, $ed);
|
|
|
|
}
|
|
|
|
|
2009-11-16 02:57:16 +00:00
|
|
|
sub mkemptydirs {
|
|
|
|
my ($self, $r) = @_;
|
2009-12-19 21:49:00 +00:00
|
|
|
|
|
|
|
sub scan {
|
|
|
|
my ($r, $empty_dirs, $line) = @_;
|
|
|
|
if (defined $r && $line =~ /^r(\d+)$/) {
|
|
|
|
return 0 if $1 > $r;
|
|
|
|
} elsif ($line =~ /^ \+empty_dir: (.+)$/) {
|
|
|
|
$empty_dirs->{$1} = 1;
|
|
|
|
} elsif ($line =~ /^ \-empty_dir: (.+)$/) {
|
|
|
|
my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs);
|
|
|
|
delete @$empty_dirs{@d};
|
|
|
|
}
|
|
|
|
1; # continue
|
|
|
|
};
|
|
|
|
|
2009-11-16 02:57:16 +00:00
|
|
|
my %empty_dirs = ();
|
2009-12-19 21:49:00 +00:00
|
|
|
my $gz_file = "$self->{dir}/unhandled.log.gz";
|
|
|
|
if (-f $gz_file) {
|
|
|
|
if (!$can_compress) {
|
|
|
|
warn "Compress::Zlib could not be found; ",
|
|
|
|
"empty directories in $gz_file will not be read\n";
|
|
|
|
} else {
|
|
|
|
my $gz = Compress::Zlib::gzopen($gz_file, "rb") or
|
|
|
|
die "Unable to open $gz_file: $!\n";
|
|
|
|
my $line;
|
|
|
|
while ($gz->gzreadline($line) > 0) {
|
|
|
|
scan($r, \%empty_dirs, $line) or last;
|
|
|
|
}
|
|
|
|
$gz->gzclose;
|
|
|
|
}
|
|
|
|
}
|
2009-11-16 02:57:16 +00:00
|
|
|
|
2009-12-19 21:49:00 +00:00
|
|
|
if (open my $fh, '<', "$self->{dir}/unhandled.log") {
|
|
|
|
binmode $fh or croak "binmode: $!";
|
|
|
|
while (<$fh>) {
|
|
|
|
scan($r, \%empty_dirs, $_) or last;
|
2009-11-16 02:57:16 +00:00
|
|
|
}
|
2009-12-19 21:49:00 +00:00
|
|
|
close $fh;
|
2009-11-16 02:57:16 +00:00
|
|
|
}
|
2009-11-23 02:11:32 +00:00
|
|
|
|
|
|
|
my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/;
|
2009-11-16 02:57:16 +00:00
|
|
|
foreach my $d (sort keys %empty_dirs) {
|
|
|
|
$d = uri_decode($d);
|
2009-11-23 02:11:32 +00:00
|
|
|
$d =~ s/$strip//;
|
2010-06-01 21:24:57 +00:00
|
|
|
next unless length($d);
|
2009-11-16 02:57:16 +00:00
|
|
|
next if -d $d;
|
2010-06-01 21:24:57 +00:00
|
|
|
if (-e $d) {
|
2009-11-16 02:57:16 +00:00
|
|
|
warn "$d exists but is not a directory\n";
|
|
|
|
} else {
|
|
|
|
print "creating empty directory: $d\n";
|
|
|
|
mkpath([$d]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-25 19:53:13 +00:00
|
|
|
sub get_untracked {
|
|
|
|
my ($self, $ed) = @_;
|
|
|
|
my @out;
|
|
|
|
my $h = $ed->{empty};
|
2007-01-11 20:14:21 +00:00
|
|
|
foreach (sort keys %$h) {
|
|
|
|
my $act = $h->{$_} ? '+empty_dir' : '-empty_dir';
|
2007-01-25 19:53:13 +00:00
|
|
|
push @out, " $act: " . uri_encode($_);
|
2007-01-11 20:14:21 +00:00
|
|
|
warn "W: $act: $_\n";
|
|
|
|
}
|
|
|
|
foreach my $t (qw/dir_prop file_prop/) {
|
2007-01-25 19:53:13 +00:00
|
|
|
$h = $ed->{$t} or next;
|
2007-01-11 20:14:21 +00:00
|
|
|
foreach my $path (sort keys %$h) {
|
|
|
|
my $ppath = $path eq '' ? '.' : $path;
|
|
|
|
foreach my $prop (sort keys %{$h->{$path}}) {
|
2007-01-15 07:21:16 +00:00
|
|
|
next if $SKIP_PROP{$prop};
|
2007-01-11 20:14:21 +00:00
|
|
|
my $v = $h->{$path}->{$prop};
|
2007-01-25 19:53:13 +00:00
|
|
|
my $t_ppath_prop = "$t: " .
|
|
|
|
uri_encode($ppath) . ' ' .
|
|
|
|
uri_encode($prop);
|
2007-01-11 20:14:21 +00:00
|
|
|
if (defined $v) {
|
2007-01-25 19:53:13 +00:00
|
|
|
push @out, " +$t_ppath_prop " .
|
|
|
|
uri_encode($v);
|
2007-01-11 20:14:21 +00:00
|
|
|
} else {
|
2007-01-25 19:53:13 +00:00
|
|
|
push @out, " -$t_ppath_prop";
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
foreach my $t (qw/absent_file absent_directory/) {
|
2007-01-25 19:53:13 +00:00
|
|
|
$h = $ed->{$t} or next;
|
2007-01-11 20:14:21 +00:00
|
|
|
foreach my $parent (sort keys %$h) {
|
|
|
|
foreach my $path (sort @{$h->{$parent}}) {
|
2007-01-25 19:53:13 +00:00
|
|
|
push @out, " $t: " .
|
|
|
|
uri_encode("$parent/$path");
|
2007-01-11 20:14:21 +00:00
|
|
|
warn "W: $t: $parent/$path ",
|
|
|
|
"Insufficient permissions?\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2007-01-25 19:53:13 +00:00
|
|
|
\@out;
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2011-12-19 08:11:05 +00:00
|
|
|
sub get_tz {
|
|
|
|
# some systmes don't handle or mishandle %z, so be creative.
|
|
|
|
my $t = shift || time;
|
|
|
|
my $gm = timelocal(gmtime($t));
|
|
|
|
my $sign = qw( + + - )[ $t <=> $gm ];
|
|
|
|
return sprintf("%s%02d%02d", $sign, (gmtime(abs($t - $gm)))[2,1]);
|
|
|
|
}
|
|
|
|
|
2009-01-18 04:10:14 +00:00
|
|
|
# parse_svn_date(DATE)
|
|
|
|
# --------------------
|
|
|
|
# Given a date (in UTC) from Subversion, return a string in the format
|
|
|
|
# "<TZ Offset> <local date/time>" that Git will use.
|
|
|
|
#
|
|
|
|
# By default the parsed date will be in UTC; if $Git::SVN::_localtime
|
|
|
|
# is true we'll convert it to the local timezone instead.
|
2007-01-14 10:17:00 +00:00
|
|
|
sub parse_svn_date {
|
|
|
|
my $date = shift || return '+0000 1970-01-01 00:00:00';
|
|
|
|
my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
|
2009-02-18 18:48:01 +00:00
|
|
|
(\d\d)\:(\d\d)\:(\d\d)\.\d*Z$/x) or
|
2007-01-14 10:17:00 +00:00
|
|
|
croak "Unable to parse date: $date\n";
|
2009-01-18 04:10:14 +00:00
|
|
|
my $parsed_date; # Set next.
|
|
|
|
|
|
|
|
if ($Git::SVN::_localtime) {
|
|
|
|
# Translate the Subversion datetime to an epoch time.
|
|
|
|
# Begin by switching ourselves to $date's timezone, UTC.
|
|
|
|
my $old_env_TZ = $ENV{TZ};
|
|
|
|
$ENV{TZ} = 'UTC';
|
|
|
|
|
|
|
|
my $epoch_in_UTC =
|
|
|
|
POSIX::strftime('%s', $S, $M, $H, $d, $m - 1, $Y - 1900);
|
|
|
|
|
|
|
|
# Determine our local timezone (including DST) at the
|
|
|
|
# time of $epoch_in_UTC. $Git::SVN::Log::TZ stored the
|
|
|
|
# value of TZ, if any, at the time we were run.
|
|
|
|
if (defined $Git::SVN::Log::TZ) {
|
|
|
|
$ENV{TZ} = $Git::SVN::Log::TZ;
|
|
|
|
} else {
|
|
|
|
delete $ENV{TZ};
|
|
|
|
}
|
|
|
|
|
2011-12-19 08:11:05 +00:00
|
|
|
my $our_TZ = get_tz();
|
2009-01-18 04:10:14 +00:00
|
|
|
|
|
|
|
# This converts $epoch_in_UTC into our local timezone.
|
|
|
|
my ($sec, $min, $hour, $mday, $mon, $year,
|
|
|
|
$wday, $yday, $isdst) = localtime($epoch_in_UTC);
|
|
|
|
|
|
|
|
$parsed_date = sprintf('%s %04d-%02d-%02d %02d:%02d:%02d',
|
|
|
|
$our_TZ, $year + 1900, $mon + 1,
|
|
|
|
$mday, $hour, $min, $sec);
|
|
|
|
|
|
|
|
# Reset us to the timezone in effect when we entered
|
|
|
|
# this routine.
|
|
|
|
if (defined $old_env_TZ) {
|
|
|
|
$ENV{TZ} = $old_env_TZ;
|
|
|
|
} else {
|
|
|
|
delete $ENV{TZ};
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
$parsed_date = "+0000 $Y-$m-$d $H:$M:$S";
|
|
|
|
}
|
|
|
|
|
|
|
|
return $parsed_date;
|
2007-01-14 10:17:00 +00:00
|
|
|
}
|
|
|
|
|
2007-06-26 07:23:59 +00:00
|
|
|
sub other_gs {
|
2009-07-06 23:40:02 +00:00
|
|
|
my ($self, $new_url, $url,
|
2007-06-26 07:23:59 +00:00
|
|
|
$branch_from, $r, $old_ref_id) = @_;
|
2009-07-06 23:40:02 +00:00
|
|
|
my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from);
|
2007-06-26 07:23:59 +00:00
|
|
|
unless ($gs) {
|
|
|
|
my $ref_id = $old_ref_id;
|
2010-08-15 13:15:54 +00:00
|
|
|
$ref_id =~ s/\@\d+-*$//;
|
2007-06-26 07:23:59 +00:00
|
|
|
$ref_id .= "\@$r";
|
|
|
|
# just grow a tail if we're not unique enough :x
|
|
|
|
$ref_id .= '-' while find_ref($ref_id);
|
|
|
|
my ($u, $p, $repo_id) = ($new_url, '', $ref_id);
|
|
|
|
if ($u =~ s#^\Q$url\E(/|$)##) {
|
|
|
|
$p = $u;
|
|
|
|
$u = $url;
|
|
|
|
$repo_id = $self->{repo_id};
|
|
|
|
}
|
2010-08-15 13:15:55 +00:00
|
|
|
while (1) {
|
|
|
|
# It is possible to tag two different subdirectories at
|
|
|
|
# the same revision. If the url for an existing ref
|
|
|
|
# does not match, we must either find a ref with a
|
|
|
|
# matching url or create a new ref by growing a tail.
|
|
|
|
$gs = Git::SVN->init($u, $p, $repo_id, $ref_id, 1);
|
|
|
|
my (undef, $max_commit) = $gs->rev_map_max(1);
|
|
|
|
last if (!$max_commit);
|
|
|
|
my ($url) = ::cmt_metadata($max_commit);
|
2011-08-29 00:45:44 +00:00
|
|
|
last if ($url eq $gs->metadata_url);
|
2010-08-15 13:15:55 +00:00
|
|
|
$ref_id .= '-';
|
|
|
|
}
|
|
|
|
print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1;
|
2007-06-26 07:23:59 +00:00
|
|
|
}
|
|
|
|
$gs
|
|
|
|
}
|
|
|
|
|
2009-05-15 01:27:15 +00:00
|
|
|
sub call_authors_prog {
|
|
|
|
my ($orig_author) = @_;
|
2009-09-13 00:33:23 +00:00
|
|
|
$orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author);
|
2009-05-15 01:27:15 +00:00
|
|
|
my $author = `$::_authors_prog $orig_author`;
|
|
|
|
if ($? != 0) {
|
|
|
|
die "$::_authors_prog failed with exit code $?\n"
|
|
|
|
}
|
|
|
|
if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) {
|
|
|
|
my ($name, $email) = ($1, $2);
|
|
|
|
$email = undef if length $2 == 0;
|
|
|
|
return [$name, $email];
|
|
|
|
} else {
|
|
|
|
die "Author: $orig_author: $::_authors_prog returned "
|
|
|
|
. "invalid author format: $author\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-14 10:17:00 +00:00
|
|
|
sub check_author {
|
|
|
|
my ($author) = @_;
|
|
|
|
if (!defined $author || length $author == 0) {
|
|
|
|
$author = '(no author)';
|
2009-05-15 01:27:15 +00:00
|
|
|
}
|
|
|
|
if (!defined $::users{$author}) {
|
|
|
|
if (defined $::_authors_prog) {
|
|
|
|
$::users{$author} = call_authors_prog($author);
|
|
|
|
} elsif (defined $::_authors) {
|
|
|
|
die "Author: $author not defined in $::_authors file\n";
|
|
|
|
}
|
2007-01-14 10:17:00 +00:00
|
|
|
}
|
|
|
|
$author;
|
|
|
|
}
|
|
|
|
|
2009-10-20 02:42:01 +00:00
|
|
|
sub find_extra_svk_parents {
|
|
|
|
my ($self, $ed, $tickets, $parents) = @_;
|
|
|
|
# aha! svk:merge property changed...
|
|
|
|
my @tickets = split "\n", $tickets;
|
|
|
|
my @known_parents;
|
|
|
|
for my $ticket ( @tickets ) {
|
|
|
|
my ($uuid, $path, $rev) = split /:/, $ticket;
|
|
|
|
if ( $uuid eq $self->ra_uuid ) {
|
2010-02-24 18:09:01 +00:00
|
|
|
my $url = $self->{url};
|
2009-10-20 02:42:01 +00:00
|
|
|
my $repos_root = $url;
|
|
|
|
my $branch_from = $path;
|
|
|
|
$branch_from =~ s{^/}{};
|
|
|
|
my $gs = $self->other_gs($repos_root."/".$branch_from,
|
|
|
|
$url,
|
|
|
|
$branch_from,
|
|
|
|
$rev,
|
|
|
|
$self->{ref_id});
|
|
|
|
if ( my $commit = $gs->rev_map_get($rev, $uuid) ) {
|
|
|
|
# wahey! we found it, but it might be
|
|
|
|
# an old one (!)
|
2009-11-29 07:20:21 +00:00
|
|
|
push @known_parents, [ $rev, $commit ];
|
2009-10-20 02:42:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-11-29 07:20:21 +00:00
|
|
|
# Ordering matters; highest-numbered commit merge tickets
|
|
|
|
# first, as they may account for later merge ticket additions
|
|
|
|
# or changes.
|
|
|
|
@known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents;
|
2009-10-20 02:42:01 +00:00
|
|
|
for my $parent ( @known_parents ) {
|
|
|
|
my @cmd = ('rev-list', $parent, map { "^$_" } @$parents );
|
|
|
|
my ($msg_fh, $ctx) = command_output_pipe(@cmd);
|
|
|
|
my $new;
|
|
|
|
while ( <$msg_fh> ) {
|
|
|
|
$new=1;last;
|
|
|
|
}
|
|
|
|
command_close_pipe($msg_fh, $ctx);
|
|
|
|
if ( $new ) {
|
|
|
|
print STDERR
|
|
|
|
"Found merge parent (svk:merge ticket): $parent\n";
|
|
|
|
push @$parents, $parent;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-12-19 11:55:13 +00:00
|
|
|
sub lookup_svn_merge {
|
|
|
|
my $uuid = shift;
|
|
|
|
my $url = shift;
|
|
|
|
my $merge = shift;
|
|
|
|
|
|
|
|
my ($source, $revs) = split ":", $merge;
|
|
|
|
my $path = $source;
|
|
|
|
$path =~ s{^/}{};
|
|
|
|
my $gs = Git::SVN->find_by_url($url.$source, $url, $path);
|
|
|
|
if ( !$gs ) {
|
|
|
|
warn "Couldn't find revmap for $url$source\n";
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
my @ranges = split ",", $revs;
|
|
|
|
my ($tip, $tip_commit);
|
|
|
|
my @merged_commit_ranges;
|
|
|
|
# find the tip
|
|
|
|
for my $range ( @ranges ) {
|
|
|
|
my ($bottom, $top) = split "-", $range;
|
|
|
|
$top ||= $bottom;
|
2009-12-19 16:22:42 +00:00
|
|
|
my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top );
|
|
|
|
my $top_commit = $gs->find_rev_before( $top, 1, $bottom );
|
2009-12-19 11:55:13 +00:00
|
|
|
|
|
|
|
unless ($top_commit and $bottom_commit) {
|
|
|
|
warn "W:unknown path/rev in svn:mergeinfo "
|
|
|
|
."dirprop: $source:$range\n";
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
|
2011-06-18 06:48:00 +00:00
|
|
|
if (scalar(command('rev-parse', "$bottom_commit^@"))) {
|
|
|
|
push @merged_commit_ranges,
|
|
|
|
"$bottom_commit^..$top_commit";
|
|
|
|
} else {
|
|
|
|
push @merged_commit_ranges, "$top_commit";
|
|
|
|
}
|
2009-12-19 11:55:13 +00:00
|
|
|
|
|
|
|
if ( !defined $tip or $top > $tip ) {
|
|
|
|
$tip = $top;
|
|
|
|
$tip_commit = $top_commit;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ($tip_commit, @merged_commit_ranges);
|
|
|
|
}
|
2009-12-19 16:26:26 +00:00
|
|
|
|
|
|
|
sub _rev_list {
|
|
|
|
my ($msg_fh, $ctx) = command_output_pipe(
|
|
|
|
"rev-list", @_,
|
|
|
|
);
|
|
|
|
my @rv;
|
|
|
|
while ( <$msg_fh> ) {
|
|
|
|
chomp;
|
|
|
|
push @rv, $_;
|
|
|
|
}
|
|
|
|
command_close_pipe($msg_fh, $ctx);
|
|
|
|
@rv;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub check_cherry_pick {
|
|
|
|
my $base = shift;
|
|
|
|
my $tip = shift;
|
2010-09-02 22:32:06 +00:00
|
|
|
my $parents = shift;
|
2009-12-19 16:26:26 +00:00
|
|
|
my @ranges = @_;
|
|
|
|
my %commits = map { $_ => 1 }
|
2011-06-18 06:47:59 +00:00
|
|
|
_rev_list("--no-merges", $tip, "--not", $base, @$parents, "--");
|
2009-12-19 16:26:26 +00:00
|
|
|
for my $range ( @ranges ) {
|
2011-06-18 06:47:59 +00:00
|
|
|
delete @commits{_rev_list($range, "--")};
|
2009-12-19 16:26:26 +00:00
|
|
|
}
|
2010-01-07 00:25:21 +00:00
|
|
|
for my $commit (keys %commits) {
|
|
|
|
if (has_no_changes($commit)) {
|
|
|
|
delete $commits{$commit};
|
|
|
|
}
|
|
|
|
}
|
2009-12-19 16:26:26 +00:00
|
|
|
return (keys %commits);
|
|
|
|
}
|
|
|
|
|
2010-01-07 00:25:21 +00:00
|
|
|
sub has_no_changes {
|
|
|
|
my $commit = shift;
|
|
|
|
|
|
|
|
my @revs = split / /, command_oneline(
|
|
|
|
qw(rev-list --parents -1 -m), $commit);
|
|
|
|
|
|
|
|
# Commits with no parents, e.g. the start of a partial branch,
|
|
|
|
# have changes by definition.
|
|
|
|
return 1 if (@revs < 2);
|
|
|
|
|
|
|
|
# Commits with multiple parents, e.g a merge, have no changes
|
|
|
|
# by definition.
|
|
|
|
return 0 if (@revs > 2);
|
|
|
|
|
|
|
|
return (command_oneline("rev-parse", "$commit^{tree}") eq
|
|
|
|
command_oneline("rev-parse", "$commit~1^{tree}"));
|
|
|
|
}
|
|
|
|
|
2010-01-30 03:14:22 +00:00
|
|
|
# The GIT_DIR environment variable is not always set until after the command
|
|
|
|
# line arguments are processed, so we can't memoize in a BEGIN block.
|
|
|
|
{
|
|
|
|
my $memoized = 0;
|
|
|
|
|
|
|
|
sub memoize_svn_mergeinfo_functions {
|
|
|
|
return if $memoized;
|
|
|
|
$memoized = 1;
|
|
|
|
|
|
|
|
my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
|
|
|
|
mkpath([$cache_path]) unless -d $cache_path;
|
|
|
|
|
|
|
|
tie my %lookup_svn_merge_cache => 'Memoize::Storable',
|
|
|
|
"$cache_path/lookup_svn_merge.db", 'nstore';
|
|
|
|
memoize 'lookup_svn_merge',
|
|
|
|
SCALAR_CACHE => 'FAULT',
|
|
|
|
LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache],
|
|
|
|
;
|
|
|
|
|
|
|
|
tie my %check_cherry_pick_cache => 'Memoize::Storable',
|
|
|
|
"$cache_path/check_cherry_pick.db", 'nstore';
|
|
|
|
memoize 'check_cherry_pick',
|
|
|
|
SCALAR_CACHE => 'FAULT',
|
|
|
|
LIST_CACHE => ['HASH' => \%check_cherry_pick_cache],
|
|
|
|
;
|
|
|
|
|
|
|
|
tie my %has_no_changes_cache => 'Memoize::Storable',
|
|
|
|
"$cache_path/has_no_changes.db", 'nstore';
|
|
|
|
memoize 'has_no_changes',
|
|
|
|
SCALAR_CACHE => ['HASH' => \%has_no_changes_cache],
|
|
|
|
LIST_CACHE => 'FAULT',
|
|
|
|
;
|
|
|
|
}
|
2010-07-18 12:17:49 +00:00
|
|
|
|
|
|
|
sub unmemoize_svn_mergeinfo_functions {
|
|
|
|
return if not $memoized;
|
|
|
|
$memoized = 0;
|
|
|
|
|
|
|
|
Memoize::unmemoize 'lookup_svn_merge';
|
|
|
|
Memoize::unmemoize 'check_cherry_pick';
|
|
|
|
Memoize::unmemoize 'has_no_changes';
|
|
|
|
}
|
2011-04-04 19:09:08 +00:00
|
|
|
|
|
|
|
Memoize::memoize 'Git::SVN::repos_root';
|
2010-07-18 12:17:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
END {
|
|
|
|
# Force cache writeout explicitly instead of waiting for
|
|
|
|
# global destruction to avoid segfault in Storable:
|
|
|
|
# http://rt.cpan.org/Public/Bug/Display.html?id=36087
|
|
|
|
unmemoize_svn_mergeinfo_functions();
|
2009-12-19 11:55:13 +00:00
|
|
|
}
|
|
|
|
|
2009-12-19 16:25:31 +00:00
|
|
|
sub parents_exclude {
|
|
|
|
my $parents = shift;
|
|
|
|
my @commits = @_;
|
|
|
|
return unless @commits;
|
|
|
|
|
|
|
|
my @excluded;
|
|
|
|
my $excluded;
|
|
|
|
do {
|
|
|
|
my @cmd = ('rev-list', "-1", @commits, "--not", @$parents );
|
|
|
|
$excluded = command_oneline(@cmd);
|
|
|
|
if ( $excluded ) {
|
|
|
|
my @new;
|
|
|
|
my $found;
|
|
|
|
for my $commit ( @commits ) {
|
|
|
|
if ( $commit eq $excluded ) {
|
|
|
|
push @excluded, $commit;
|
|
|
|
$found++;
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
push @new, $commit;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
die "saw commit '$excluded' in rev-list output, "
|
|
|
|
."but we didn't ask for that commit (wanted: @commits --not @$parents)"
|
|
|
|
unless $found;
|
|
|
|
@commits = @new;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
while ($excluded and @commits);
|
|
|
|
|
|
|
|
return @excluded;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-10-20 02:42:03 +00:00
|
|
|
# note: this function should only be called if the various dirprops
|
|
|
|
# have actually changed
|
|
|
|
sub find_extra_svn_parents {
|
|
|
|
my ($self, $ed, $mergeinfo, $parents) = @_;
|
|
|
|
# aha! svk:merge property changed...
|
|
|
|
|
2010-01-30 03:14:22 +00:00
|
|
|
memoize_svn_mergeinfo_functions();
|
|
|
|
|
2009-10-20 02:42:03 +00:00
|
|
|
# We first search for merged tips which are not in our
|
|
|
|
# history. Then, we figure out which git revisions are in
|
|
|
|
# that tip, but not this revision. If all of those revisions
|
|
|
|
# are now marked as merge, we can add the tip as a parent.
|
|
|
|
my @merges = split "\n", $mergeinfo;
|
|
|
|
my @merge_tips;
|
2010-02-24 18:09:01 +00:00
|
|
|
my $url = $self->{url};
|
2009-12-19 11:55:13 +00:00
|
|
|
my $uuid = $self->ra_uuid;
|
2009-12-19 16:25:31 +00:00
|
|
|
my %ranges;
|
2009-10-20 02:42:03 +00:00
|
|
|
for my $merge ( @merges ) {
|
2009-12-19 11:55:13 +00:00
|
|
|
my ($tip_commit, @ranges) =
|
|
|
|
lookup_svn_merge( $uuid, $url, $merge );
|
2009-10-20 02:42:03 +00:00
|
|
|
unless (!$tip_commit or
|
|
|
|
grep { $_ eq $tip_commit } @$parents ) {
|
|
|
|
push @merge_tips, $tip_commit;
|
2009-12-19 16:25:31 +00:00
|
|
|
$ranges{$tip_commit} = \@ranges;
|
2009-10-20 02:42:03 +00:00
|
|
|
} else {
|
|
|
|
push @merge_tips, undef;
|
|
|
|
}
|
|
|
|
}
|
2009-12-19 16:25:31 +00:00
|
|
|
|
|
|
|
my %excluded = map { $_ => 1 }
|
|
|
|
parents_exclude($parents, grep { defined } @merge_tips);
|
|
|
|
|
|
|
|
# check merge tips for new parents
|
|
|
|
my @new_parents;
|
2009-10-20 02:42:03 +00:00
|
|
|
for my $merge_tip ( @merge_tips ) {
|
|
|
|
my $spec = shift @merges;
|
2009-12-19 16:25:31 +00:00
|
|
|
next unless $merge_tip and $excluded{$merge_tip};
|
|
|
|
|
|
|
|
my $ranges = $ranges{$merge_tip};
|
|
|
|
|
2009-12-19 16:26:26 +00:00
|
|
|
# check out 'new' tips
|
2010-01-07 00:25:22 +00:00
|
|
|
my $merge_base;
|
|
|
|
eval {
|
|
|
|
$merge_base = command_oneline(
|
|
|
|
"merge-base",
|
|
|
|
@$parents, $merge_tip,
|
|
|
|
);
|
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
die "An error occurred during merge-base"
|
|
|
|
unless $@->isa("Git::Error::Command");
|
|
|
|
|
|
|
|
warn "W: Cannot find common ancestor between ".
|
|
|
|
"@$parents and $merge_tip. Ignoring merge info.\n";
|
|
|
|
next;
|
|
|
|
}
|
2009-12-19 16:26:26 +00:00
|
|
|
|
|
|
|
# double check that there are no missing non-merge commits
|
|
|
|
my (@incomplete) = check_cherry_pick(
|
|
|
|
$merge_base, $merge_tip,
|
2010-09-02 22:32:06 +00:00
|
|
|
$parents,
|
2009-12-19 16:26:26 +00:00
|
|
|
@$ranges,
|
|
|
|
);
|
|
|
|
|
|
|
|
if ( @incomplete ) {
|
|
|
|
warn "W:svn cherry-pick ignored ($spec) - missing "
|
|
|
|
.@incomplete." commit(s) (eg $incomplete[0])\n";
|
|
|
|
} else {
|
|
|
|
warn
|
|
|
|
"Found merge parent (svn:mergeinfo prop): ",
|
|
|
|
$merge_tip, "\n";
|
|
|
|
push @new_parents, $merge_tip;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# cater for merges which merge commits from multiple branches
|
|
|
|
if ( @new_parents > 1 ) {
|
|
|
|
for ( my $i = 0; $i <= $#new_parents; $i++ ) {
|
|
|
|
for ( my $j = 0; $j <= $#new_parents; $j++ ) {
|
|
|
|
next if $i == $j;
|
|
|
|
next unless $new_parents[$i];
|
|
|
|
next unless $new_parents[$j];
|
|
|
|
my $revs = command_oneline(
|
2009-12-22 20:15:40 +00:00
|
|
|
"rev-list", "-1",
|
|
|
|
"$new_parents[$i]..$new_parents[$j]",
|
2009-12-19 16:26:26 +00:00
|
|
|
);
|
|
|
|
if ( !$revs ) {
|
2010-02-22 18:12:53 +00:00
|
|
|
undef($new_parents[$j]);
|
2009-12-19 16:26:26 +00:00
|
|
|
}
|
2009-10-20 02:42:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-12-19 16:26:26 +00:00
|
|
|
push @$parents, grep { defined } @new_parents;
|
2009-10-20 02:42:03 +00:00
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
sub make_log_entry {
|
2007-01-25 19:53:13 +00:00
|
|
|
my ($self, $rev, $parents, $ed) = @_;
|
|
|
|
my $untracked = $self->get_untracked($ed);
|
|
|
|
|
2009-10-20 02:42:01 +00:00
|
|
|
my @parents = @$parents;
|
|
|
|
my $ps = $ed->{path_strip} || "";
|
|
|
|
for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) {
|
|
|
|
my $props = $ed->{dir_prop}{$path};
|
|
|
|
if ( $props->{"svk:merge"} ) {
|
|
|
|
$self->find_extra_svk_parents
|
|
|
|
($ed, $props->{"svk:merge"}, \@parents);
|
|
|
|
}
|
2009-10-20 02:42:03 +00:00
|
|
|
if ( $props->{"svn:mergeinfo"} ) {
|
|
|
|
$self->find_extra_svn_parents
|
|
|
|
($ed,
|
|
|
|
$props->{"svn:mergeinfo"},
|
|
|
|
\@parents);
|
|
|
|
}
|
2009-10-20 02:42:01 +00:00
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!;
|
2007-01-25 19:53:13 +00:00
|
|
|
print $un "r$rev\n" or croak $!;
|
|
|
|
print $un $_, "\n" foreach @$untracked;
|
2009-10-20 02:42:01 +00:00
|
|
|
my %log_entry = ( parents => \@parents, revision => $rev,
|
2007-01-25 19:53:13 +00:00
|
|
|
log => '');
|
2007-02-07 02:35:30 +00:00
|
|
|
|
2007-02-11 04:46:50 +00:00
|
|
|
my $headrev;
|
2007-02-07 02:35:30 +00:00
|
|
|
my $logged = delete $self->{logged_rev_props};
|
2007-02-11 04:46:50 +00:00
|
|
|
if (!$logged || $self->{-want_revprops}) {
|
2007-02-07 02:35:30 +00:00
|
|
|
my $rp = $self->ra->rev_proplist($rev);
|
|
|
|
foreach (sort keys %$rp) {
|
|
|
|
my $v = $rp->{$_};
|
|
|
|
if (/^svn:(author|date|log)$/) {
|
|
|
|
$log_entry{$1} = $v;
|
2007-02-11 04:46:50 +00:00
|
|
|
} elsif ($_ eq 'svm:headrev') {
|
|
|
|
$headrev = $v;
|
2007-02-07 02:35:30 +00:00
|
|
|
} else {
|
|
|
|
print $un " rev_prop: ", uri_encode($_), ' ',
|
|
|
|
uri_encode($v), "\n";
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
2007-02-07 02:35:30 +00:00
|
|
|
} else {
|
|
|
|
map { $log_entry{$_} = $logged->{$_} } keys %$logged;
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
close $un or croak $!;
|
2007-01-25 19:53:13 +00:00
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
$log_entry{date} = parse_svn_date($log_entry{date});
|
|
|
|
$log_entry{log} .= "\n";
|
2007-02-13 08:38:02 +00:00
|
|
|
my $author = $log_entry{author} = check_author($log_entry{author});
|
|
|
|
my ($name, $email) = defined $::users{$author} ? @{$::users{$author}}
|
2007-11-22 13:44:42 +00:00
|
|
|
: ($author, undef);
|
|
|
|
|
|
|
|
my ($commit_name, $commit_email) = ($name, $email);
|
|
|
|
if ($_use_log_author) {
|
2007-12-13 06:58:15 +00:00
|
|
|
my $name_field;
|
|
|
|
if ($log_entry{log} =~ /From:\s+(.*\S)\s*\n/i) {
|
|
|
|
$name_field = $1;
|
|
|
|
} elsif ($log_entry{log} =~ /Signed-off-by:\s+(.*\S)\s*\n/i) {
|
|
|
|
$name_field = $1;
|
|
|
|
}
|
|
|
|
if (!defined $name_field) {
|
2008-04-29 21:20:32 +00:00
|
|
|
if (!defined $email) {
|
|
|
|
$email = $name;
|
|
|
|
}
|
2007-12-13 06:58:15 +00:00
|
|
|
} elsif ($name_field =~ /(.*?)\s+<(.*)>/) {
|
2007-11-22 13:44:42 +00:00
|
|
|
($name, $email) = ($1, $2);
|
2007-12-13 06:58:15 +00:00
|
|
|
} elsif ($name_field =~ /(.*)@/) {
|
|
|
|
($name, $email) = ($1, $name_field);
|
|
|
|
} else {
|
2008-04-29 21:20:32 +00:00
|
|
|
($name, $email) = ($name_field, $name_field);
|
2007-11-22 13:44:42 +00:00
|
|
|
}
|
|
|
|
}
|
2007-02-11 08:51:33 +00:00
|
|
|
if (defined $headrev && $self->use_svm_props) {
|
2007-02-17 03:15:21 +00:00
|
|
|
if ($self->rewrite_root) {
|
|
|
|
die "Can't have both 'useSvmProps' and 'rewriteRoot' ",
|
|
|
|
"options set!\n";
|
|
|
|
}
|
2010-01-23 08:30:00 +00:00
|
|
|
if ($self->rewrite_uuid) {
|
|
|
|
die "Can't have both 'useSvmProps' and 'rewriteUUID' ",
|
|
|
|
"options set!\n";
|
|
|
|
}
|
2009-07-11 21:13:12 +00:00
|
|
|
my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i;
|
2007-02-17 10:53:07 +00:00
|
|
|
# we don't want "SVM: initializing mirror for junk" ...
|
|
|
|
return undef if $r == 0;
|
|
|
|
my $svm = $self->svm;
|
|
|
|
if ($uuid ne $svm->{uuid}) {
|
2007-02-11 04:46:50 +00:00
|
|
|
die "UUID mismatch on SVM path:\n",
|
2007-02-17 10:53:07 +00:00
|
|
|
"expected: $svm->{uuid}\n",
|
2007-02-11 04:46:50 +00:00
|
|
|
" got: $uuid\n";
|
|
|
|
}
|
2007-02-17 10:53:07 +00:00
|
|
|
my $full_url = $self->full_url;
|
|
|
|
$full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or
|
|
|
|
die "Failed to replace '$svm->{replace}' with ",
|
|
|
|
"'$svm->{source}' in $full_url\n";
|
2007-02-22 23:32:29 +00:00
|
|
|
# throw away username for storing in records
|
|
|
|
remove_username($full_url);
|
2007-02-11 04:46:50 +00:00
|
|
|
$log_entry{metadata} = "$full_url\@$r $uuid";
|
|
|
|
$log_entry{svm_revision} = $r;
|
2007-11-22 13:44:42 +00:00
|
|
|
$email ||= "$author\@$uuid";
|
|
|
|
$commit_email ||= "$author\@$uuid";
|
2007-02-17 03:57:29 +00:00
|
|
|
} elsif ($self->use_svnsync_props) {
|
|
|
|
my $full_url = $self->svnsync->{url};
|
|
|
|
$full_url .= "/$self->{path}" if length $self->{path};
|
2007-04-25 01:02:07 +00:00
|
|
|
remove_username($full_url);
|
2007-02-17 03:57:29 +00:00
|
|
|
my $uuid = $self->svnsync->{uuid};
|
|
|
|
$log_entry{metadata} = "$full_url\@$rev $uuid";
|
2007-11-22 13:44:42 +00:00
|
|
|
$email ||= "$author\@$uuid";
|
|
|
|
$commit_email ||= "$author\@$uuid";
|
2007-02-11 04:46:50 +00:00
|
|
|
} else {
|
2007-04-25 01:02:07 +00:00
|
|
|
my $url = $self->metadata_url;
|
|
|
|
remove_username($url);
|
2010-01-23 08:30:00 +00:00
|
|
|
my $uuid = $self->rewrite_uuid || $self->ra->get_uuid;
|
|
|
|
$log_entry{metadata} = "$url\@$rev " . $uuid;
|
|
|
|
$email ||= "$author\@" . $uuid;
|
|
|
|
$commit_email ||= "$author\@" . $uuid;
|
2007-02-11 04:46:50 +00:00
|
|
|
}
|
2007-02-13 08:38:02 +00:00
|
|
|
$log_entry{name} = $name;
|
|
|
|
$log_entry{email} = $email;
|
2007-11-22 13:44:42 +00:00
|
|
|
$log_entry{commit_name} = $commit_name;
|
|
|
|
$log_entry{commit_email} = $commit_email;
|
2007-01-11 20:14:21 +00:00
|
|
|
\%log_entry;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub fetch {
|
2007-01-26 01:35:40 +00:00
|
|
|
my ($self, $min_rev, $max_rev, @parents) = @_;
|
2007-01-11 20:14:21 +00:00
|
|
|
my ($last_rev, $last_commit) = $self->last_rev_commit;
|
2007-01-26 01:35:40 +00:00
|
|
|
my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev);
|
2007-02-08 20:53:57 +00:00
|
|
|
$self->ra->gs_fetch_loop_common($base, $head, [$self]);
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub set_tree_cb {
|
|
|
|
my ($self, $log_entry, $tree, $rev, $date, $author) = @_;
|
2007-02-10 21:58:33 +00:00
|
|
|
$self->{inject_parents} = { $rev => $tree };
|
|
|
|
$self->fetch(undef, undef);
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub set_tree {
|
|
|
|
my ($self, $tree) = (shift, shift);
|
2007-01-15 07:21:16 +00:00
|
|
|
my $log_entry = ::get_commit_entry($tree);
|
2007-01-11 20:14:21 +00:00
|
|
|
unless ($self->{last_rev}) {
|
2008-09-29 13:58:18 +00:00
|
|
|
::fatal("Must have an existing revision to commit");
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
2007-01-27 22:33:08 +00:00
|
|
|
my %ed_opts = ( r => $self->{last_rev},
|
|
|
|
log => $log_entry->{log},
|
|
|
|
ra => $self->ra,
|
|
|
|
tree_a => $self->{last_commit},
|
|
|
|
tree_b => $tree,
|
|
|
|
editor_cb => sub {
|
|
|
|
$self->set_tree_cb($log_entry, $tree, @_) },
|
|
|
|
svn_path => $self->{path} );
|
|
|
|
if (!SVN::Git::Editor->new(\%ed_opts)->apply_diff) {
|
2007-01-11 20:14:21 +00:00
|
|
|
print "No changes\nr$self->{last_rev} = $tree\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
sub rebuild_from_rev_db {
|
|
|
|
my ($self, $path) = @_;
|
|
|
|
my $r = -1;
|
|
|
|
open my $fh, '<', $path or croak "open: $!";
|
2008-04-18 13:12:04 +00:00
|
|
|
binmode $fh or croak "binmode: $!";
|
2007-12-09 07:27:41 +00:00
|
|
|
while (<$fh>) {
|
|
|
|
length($_) == 41 or croak "inconsistent size in ($_) != 41";
|
|
|
|
chomp($_);
|
|
|
|
++$r;
|
|
|
|
next if $_ eq ('0' x 40);
|
|
|
|
$self->rev_map_set($r, $_);
|
|
|
|
print "r$r = $_\n";
|
|
|
|
}
|
|
|
|
close $fh or croak "close: $!";
|
|
|
|
unlink $path or croak "unlink: $!";
|
|
|
|
}
|
|
|
|
|
2007-01-30 21:11:14 +00:00
|
|
|
sub rebuild {
|
|
|
|
my ($self) = @_;
|
2007-12-09 07:27:41 +00:00
|
|
|
my $map_path = $self->map_path;
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
my $partial = (-e $map_path && ! -z $map_path);
|
2007-02-16 12:05:33 +00:00
|
|
|
return unless ::verify_ref($self->refname.'^0');
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
if (!$partial && ($self->use_svm_props || $self->no_metadata)) {
|
2007-12-09 07:27:41 +00:00
|
|
|
my $rev_db = $self->rev_db_path;
|
|
|
|
$self->rebuild_from_rev_db($rev_db);
|
|
|
|
if ($self->use_svm_props) {
|
|
|
|
my $svm_rev_db = $self->rev_db_path($self->svm_uuid);
|
|
|
|
$self->rebuild_from_rev_db($svm_rev_db);
|
|
|
|
}
|
|
|
|
$self->unlink_rev_db_symlink;
|
2007-02-12 21:25:25 +00:00
|
|
|
return;
|
|
|
|
}
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
print "Rebuilding $map_path ...\n" if (!$partial);
|
|
|
|
my ($base_rev, $head) = ($partial ? $self->rev_map_max_norebuild(1) :
|
|
|
|
(undef, undef));
|
2007-12-09 07:27:41 +00:00
|
|
|
my ($log, $ctx) =
|
2012-02-12 00:23:05 +00:00
|
|
|
command_output_pipe(qw/rev-list --pretty=raw --reverse/,
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
($head ? "$head.." : "") . $self->refname,
|
|
|
|
'--');
|
2008-06-24 00:17:36 +00:00
|
|
|
my $metadata_url = $self->metadata_url;
|
|
|
|
remove_username($metadata_url);
|
2010-01-23 08:30:00 +00:00
|
|
|
my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid;
|
2007-06-30 08:56:13 +00:00
|
|
|
my $c;
|
|
|
|
while (<$log>) {
|
|
|
|
if ( m{^commit ($::sha1)$} ) {
|
|
|
|
$c = $1;
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
next unless s{^\s*(git-svn-id:)}{$1};
|
|
|
|
my ($url, $rev, $uuid) = ::extract_metadata($_);
|
2007-02-22 23:32:29 +00:00
|
|
|
remove_username($url);
|
2007-01-30 21:11:14 +00:00
|
|
|
|
|
|
|
# ignore merges (from set-tree)
|
|
|
|
next if (!defined $rev || !$uuid);
|
|
|
|
|
|
|
|
# if we merged or otherwise started elsewhere, this is
|
|
|
|
# how we break out of it
|
2007-12-09 07:27:41 +00:00
|
|
|
if (($uuid ne $svn_uuid) ||
|
2008-06-24 00:17:36 +00:00
|
|
|
($metadata_url && $url && ($url ne $metadata_url))) {
|
2007-01-30 21:11:14 +00:00
|
|
|
next;
|
|
|
|
}
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
if ($partial && $head) {
|
|
|
|
print "Partial-rebuilding $map_path ...\n";
|
|
|
|
print "Currently at $base_rev = $head\n";
|
|
|
|
$head = undef;
|
|
|
|
}
|
2007-01-30 21:11:14 +00:00
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
$self->rev_map_set($rev, $c);
|
2007-01-30 21:11:14 +00:00
|
|
|
print "r$rev = $c\n";
|
|
|
|
}
|
2007-06-30 08:56:13 +00:00
|
|
|
command_close_pipe($log, $ctx);
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
print "Done rebuilding $map_path\n" if (!$partial || !$head);
|
2007-12-09 07:27:41 +00:00
|
|
|
my $rev_db_path = $self->rev_db_path;
|
|
|
|
if (-f $self->rev_db_path) {
|
|
|
|
unlink $self->rev_db_path or croak "unlink: $!";
|
|
|
|
}
|
|
|
|
$self->unlink_rev_db_symlink;
|
2007-01-30 21:11:14 +00:00
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
# rev_map:
|
2007-01-11 20:14:21 +00:00
|
|
|
# Tie::File seems to be prone to offset errors if revisions get sparse,
|
|
|
|
# it's not that fast, either. Tie::File is also not in Perl 5.6. So
|
|
|
|
# one of my favorite modules is out :< Next up would be one of the DBM
|
2007-12-09 07:27:41 +00:00
|
|
|
# modules, but I'm not sure which is most portable...
|
|
|
|
#
|
|
|
|
# This is the replacement for the rev_db format, which was too big
|
|
|
|
# and inefficient for large repositories with a lot of sparse history
|
|
|
|
# (mainly tags)
|
|
|
|
#
|
|
|
|
# The format is this:
|
|
|
|
# - 24 bytes for every record,
|
|
|
|
# * 4 bytes for the integer representing an SVN revision number
|
|
|
|
# * 20 bytes representing the sha1 of a git commit
|
|
|
|
# - No empty padding records like the old format
|
2007-12-09 07:27:42 +00:00
|
|
|
# (except the last record, which can be overwritten)
|
2007-12-09 07:27:41 +00:00
|
|
|
# - new records are written append-only since SVN revision numbers
|
|
|
|
# increase monotonically
|
|
|
|
# - lookups on SVN revision number are done via a binary search
|
2007-12-09 07:27:42 +00:00
|
|
|
# - Piping the file to xxd -c24 is a good way of dumping it for
|
|
|
|
# viewing or editing (piped back through xxd -r), should the need
|
|
|
|
# ever arise.
|
|
|
|
# - The last record can be padding revision with an all-zero sha1
|
|
|
|
# This is used to optimize fetch performance when using multiple
|
|
|
|
# "fetch" directives in .git/config
|
2007-12-09 07:27:41 +00:00
|
|
|
#
|
2007-02-11 23:21:24 +00:00
|
|
|
# These files are disposable unless noMetadata or useSvmProps is set
|
2007-01-11 20:14:21 +00:00
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
sub _rev_map_set {
|
2007-02-12 21:25:25 +00:00
|
|
|
my ($fh, $rev, $commit) = @_;
|
2007-12-09 07:27:41 +00:00
|
|
|
|
2008-04-18 13:12:04 +00:00
|
|
|
binmode $fh or croak "binmode: $!";
|
2007-12-09 07:27:41 +00:00
|
|
|
my $size = (stat($fh))[7];
|
|
|
|
($size % 24) == 0 or croak "inconsistent size: $size";
|
|
|
|
|
2007-12-09 07:27:42 +00:00
|
|
|
my $wr_offset = 0;
|
2007-12-09 07:27:41 +00:00
|
|
|
if ($size > 0) {
|
|
|
|
sysseek($fh, -24, SEEK_END) or croak "seek: $!";
|
|
|
|
my $read = sysread($fh, my $buf, 24) or croak "read: $!";
|
|
|
|
$read == 24 or croak "read only $read bytes (!= 24)";
|
|
|
|
my ($last_rev, $last_commit) = unpack(rev_map_fmt, $buf);
|
2007-12-09 07:27:42 +00:00
|
|
|
if ($last_commit eq ('0' x40)) {
|
|
|
|
if ($size >= 48) {
|
|
|
|
sysseek($fh, -48, SEEK_END) or croak "seek: $!";
|
|
|
|
$read = sysread($fh, $buf, 24) or
|
|
|
|
croak "read: $!";
|
|
|
|
$read == 24 or
|
|
|
|
croak "read only $read bytes (!= 24)";
|
|
|
|
($last_rev, $last_commit) =
|
|
|
|
unpack(rev_map_fmt, $buf);
|
|
|
|
if ($last_commit eq ('0' x40)) {
|
|
|
|
croak "inconsistent .rev_map\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ($last_rev >= $rev) {
|
|
|
|
croak "last_rev is higher!: $last_rev >= $rev";
|
|
|
|
}
|
|
|
|
$wr_offset = -24;
|
2007-02-12 21:25:25 +00:00
|
|
|
}
|
|
|
|
}
|
2007-12-09 07:27:42 +00:00
|
|
|
sysseek($fh, $wr_offset, SEEK_END) or croak "seek: $!";
|
2007-12-09 07:27:41 +00:00
|
|
|
syswrite($fh, pack(rev_map_fmt, $rev, $commit), 24) == 24 or
|
|
|
|
croak "write: $!";
|
2007-02-12 21:25:25 +00:00
|
|
|
}
|
|
|
|
|
2009-06-04 03:45:52 +00:00
|
|
|
sub _rev_map_reset {
|
|
|
|
my ($fh, $rev, $commit) = @_;
|
|
|
|
my $c = _rev_map_get($fh, $rev);
|
|
|
|
$c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n";
|
|
|
|
my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!";
|
|
|
|
truncate $fh, $offset or croak "truncate: $!";
|
|
|
|
}
|
|
|
|
|
2007-02-12 21:25:25 +00:00
|
|
|
sub mkfile {
|
|
|
|
my ($path) = @_;
|
|
|
|
unless (-e $path) {
|
|
|
|
my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#);
|
|
|
|
mkpath([$dir]) unless -d $dir;
|
|
|
|
open my $fh, '>>', $path or die "Couldn't create $path: $!\n";
|
|
|
|
close $fh or die "Couldn't close (create) $path: $!\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
sub rev_map_set {
|
2007-02-12 21:25:25 +00:00
|
|
|
my ($self, $rev, $commit, $update_ref, $uuid) = @_;
|
2010-05-04 23:36:47 +00:00
|
|
|
defined $commit or die "missing arg3\n";
|
2007-02-12 21:25:25 +00:00
|
|
|
length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
|
2007-12-09 07:27:41 +00:00
|
|
|
my $db = $self->map_path($uuid);
|
2007-02-12 21:25:25 +00:00
|
|
|
my $db_lock = "$db.lock";
|
2012-04-02 13:29:32 +00:00
|
|
|
my $sigmask;
|
2009-06-04 03:45:52 +00:00
|
|
|
$update_ref ||= 0;
|
2007-01-31 21:54:23 +00:00
|
|
|
if ($update_ref) {
|
2012-04-02 13:29:32 +00:00
|
|
|
$sigmask = POSIX::SigSet->new();
|
|
|
|
my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
|
2012-04-23 16:26:56 +00:00
|
|
|
SIGALRM, SIGUSR1, SIGUSR2);
|
2012-04-02 13:29:32 +00:00
|
|
|
sigprocmask(SIG_BLOCK, $signew, $sigmask) or
|
|
|
|
croak "Can't block signals: $!";
|
2007-01-31 21:54:23 +00:00
|
|
|
}
|
2007-02-12 21:25:25 +00:00
|
|
|
mkfile($db);
|
|
|
|
|
2007-01-31 21:54:23 +00:00
|
|
|
$LOCKFILES{$db_lock} = 1;
|
2007-02-11 23:21:24 +00:00
|
|
|
my $sync;
|
|
|
|
# both of these options make our .rev_db file very, very important
|
|
|
|
# and we can't afford to lose it because rebuild() won't work
|
|
|
|
if ($self->use_svm_props || $self->no_metadata) {
|
|
|
|
$sync = 1;
|
2007-12-09 07:27:41 +00:00
|
|
|
copy($db, $db_lock) or die "rev_map_set(@_): ",
|
2007-02-12 21:25:25 +00:00
|
|
|
"Failed to copy: ",
|
2007-01-31 21:54:23 +00:00
|
|
|
"$db => $db_lock ($!)\n";
|
|
|
|
} else {
|
2007-12-09 07:27:41 +00:00
|
|
|
rename $db, $db_lock or die "rev_map_set(@_): ",
|
2007-02-12 21:25:25 +00:00
|
|
|
"Failed to rename: ",
|
2007-01-31 21:54:23 +00:00
|
|
|
"$db => $db_lock ($!)\n";
|
|
|
|
}
|
2007-12-09 07:27:41 +00:00
|
|
|
|
2007-12-09 07:27:42 +00:00
|
|
|
sysopen(my $fh, $db_lock, O_RDWR | O_CREAT)
|
2007-12-09 07:27:41 +00:00
|
|
|
or croak "Couldn't open $db_lock: $!\n";
|
2009-06-04 03:45:52 +00:00
|
|
|
$update_ref eq 'reset' ? _rev_map_reset($fh, $rev, $commit) :
|
|
|
|
_rev_map_set($fh, $rev, $commit);
|
2007-02-11 23:21:24 +00:00
|
|
|
if ($sync) {
|
|
|
|
$fh->flush or die "Couldn't flush $db_lock: $!\n";
|
|
|
|
$fh->sync or die "Couldn't sync $db_lock: $!\n";
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
close $fh or croak $!;
|
2007-01-31 21:54:23 +00:00
|
|
|
if ($update_ref) {
|
2007-02-16 09:45:13 +00:00
|
|
|
$_head = $self;
|
2009-06-04 03:45:52 +00:00
|
|
|
my $note = "";
|
|
|
|
$note = " ($update_ref)" if ($update_ref !~ /^\d*$/);
|
|
|
|
command_noisy('update-ref', '-m', "r$rev$note",
|
2007-01-31 21:54:23 +00:00
|
|
|
$self->refname, $commit);
|
|
|
|
}
|
2007-12-09 07:27:41 +00:00
|
|
|
rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ",
|
2007-01-31 21:54:23 +00:00
|
|
|
"$db_lock => $db ($!)\n";
|
|
|
|
delete $LOCKFILES{$db_lock};
|
|
|
|
if ($update_ref) {
|
2012-04-02 13:29:32 +00:00
|
|
|
sigprocmask(SIG_SETMASK, $sigmask) or
|
|
|
|
croak "Can't restore signal mask: $!";
|
2007-01-31 21:54:23 +00:00
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:42 +00:00
|
|
|
# If want_commit, this will return an array of (rev, commit) where
|
|
|
|
# commit _must_ be a valid commit in the archive.
|
|
|
|
# Otherwise, it'll return the max revision (whether or not the
|
|
|
|
# commit is valid or just a 0x40 placeholder).
|
2007-12-09 07:27:41 +00:00
|
|
|
sub rev_map_max {
|
2007-12-09 07:27:42 +00:00
|
|
|
my ($self, $want_commit) = @_;
|
2007-02-16 12:05:33 +00:00
|
|
|
$self->rebuild;
|
git-svn: do a partial rebuild if rev_map is out-of-date
Suppose you're using git-svn to work with a certain SVN repository.
Since you don't like 'git-svn fetch' to take forever, and you don't want
to accidentally interrupt it and end up corrupting your repository, you
set up a remote Git repository to mirror the SVN repository, which does
its own 'git-svn fetch' on a cronjob; now you can 'git-fetch' from the
Git mirror into your local repository, and still dcommit to SVN when you
have changes to push.
After you do this, though, git-svn will get very confused if you ever
try to do 'git-svn fetch' in your local repository again, since its
rev_map will differ from the branch's head, and it will be unable to
fetch new commits from SVN because of the metadata conflict. But all
the necessary metadata are there in the Git commit message; git-svn
already knows how to rebuild rev_map files that get blown away, by
using the metadata.
This patch teaches git-svn do a partial rebuild of the rev_map to
match the true state of the branch, if it ever is used to fetch again.
This will only work for projects not using either noMetadata or
useSvmProps configuration options; if you are using these options,
git-svn will fall back to the previous behaviour.
Signed-off-by: Deskin Miller <deskinm@umich.edu>
Acked-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <gitster@pobox.com>
2008-09-16 01:12:58 +00:00
|
|
|
my ($r, $c) = $self->rev_map_max_norebuild($want_commit);
|
|
|
|
$want_commit ? ($r, $c) : $r;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub rev_map_max_norebuild {
|
|
|
|
my ($self, $want_commit) = @_;
|
2007-12-09 07:27:41 +00:00
|
|
|
my $map_path = $self->map_path;
|
2007-12-09 07:27:42 +00:00
|
|
|
stat $map_path or return $want_commit ? (0, undef) : 0;
|
2007-12-09 07:27:41 +00:00
|
|
|
sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
|
2008-04-18 13:12:04 +00:00
|
|
|
binmode $fh or croak "binmode: $!";
|
2007-12-09 07:27:41 +00:00
|
|
|
my $size = (stat($fh))[7];
|
|
|
|
($size % 24) == 0 or croak "inconsistent size: $size";
|
|
|
|
|
|
|
|
if ($size == 0) {
|
|
|
|
close $fh or croak "close: $!";
|
2007-12-09 07:27:42 +00:00
|
|
|
return $want_commit ? (0, undef) : 0;
|
2007-12-09 07:27:41 +00:00
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:42 +00:00
|
|
|
sysseek($fh, -24, SEEK_END) or croak "seek: $!";
|
2007-12-09 07:27:41 +00:00
|
|
|
sysread($fh, my $buf, 24) == 24 or croak "read: $!";
|
|
|
|
my ($r, $c) = unpack(rev_map_fmt, $buf);
|
2007-12-09 07:27:42 +00:00
|
|
|
if ($want_commit && $c eq ('0' x40)) {
|
|
|
|
if ($size < 48) {
|
|
|
|
return $want_commit ? (0, undef) : 0;
|
|
|
|
}
|
|
|
|
sysseek($fh, -48, SEEK_END) or croak "seek: $!";
|
|
|
|
sysread($fh, $buf, 24) == 24 or croak "read: $!";
|
|
|
|
($r, $c) = unpack(rev_map_fmt, $buf);
|
|
|
|
if ($c eq ('0'x40)) {
|
|
|
|
croak "Penultimate record is all-zeroes in $map_path";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
close $fh or croak "close: $!";
|
|
|
|
$want_commit ? ($r, $c) : $r;
|
2007-02-01 01:22:31 +00:00
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
sub rev_map_get {
|
2007-02-12 21:25:25 +00:00
|
|
|
my ($self, $rev, $uuid) = @_;
|
2007-12-09 07:27:41 +00:00
|
|
|
my $map_path = $self->map_path($uuid);
|
|
|
|
return undef unless -e $map_path;
|
|
|
|
|
|
|
|
sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
|
2009-06-04 03:45:52 +00:00
|
|
|
my $c = _rev_map_get($fh, $rev);
|
|
|
|
close($fh) or croak "close: $!";
|
|
|
|
$c
|
|
|
|
}
|
|
|
|
|
|
|
|
sub _rev_map_get {
|
|
|
|
my ($fh, $rev) = @_;
|
|
|
|
|
2008-04-18 13:12:04 +00:00
|
|
|
binmode $fh or croak "binmode: $!";
|
2007-12-09 07:27:41 +00:00
|
|
|
my $size = (stat($fh))[7];
|
|
|
|
($size % 24) == 0 or croak "inconsistent size: $size";
|
|
|
|
|
|
|
|
if ($size == 0) {
|
|
|
|
return undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
my ($l, $u) = (0, $size - 24);
|
|
|
|
my ($r, $c, $buf);
|
|
|
|
|
|
|
|
while ($l <= $u) {
|
|
|
|
my $i = int(($l/24 + $u/24) / 2) * 24;
|
|
|
|
sysseek($fh, $i, SEEK_SET) or croak "seek: $!";
|
|
|
|
sysread($fh, my $buf, 24) == 24 or croak "read: $!";
|
2009-08-13 05:20:02 +00:00
|
|
|
my ($r, $c) = unpack(rev_map_fmt, $buf);
|
2007-12-09 07:27:41 +00:00
|
|
|
|
|
|
|
if ($r < $rev) {
|
|
|
|
$l = $i + 24;
|
|
|
|
} elsif ($r > $rev) {
|
|
|
|
$u = $i - 24;
|
|
|
|
} else { # $r == $rev
|
2007-12-09 07:27:42 +00:00
|
|
|
return $c eq ('0' x 40) ? undef : $c;
|
2007-12-09 07:27:41 +00:00
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
2007-12-09 07:27:41 +00:00
|
|
|
undef;
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2007-11-12 06:56:52 +00:00
|
|
|
# Finds the first svn revision that exists on (if $eq_ok is true) or
|
|
|
|
# before $rev for the current branch. It will not search any lower
|
|
|
|
# than $min_rev. Returns the git commit hash and svn revision number
|
|
|
|
# if found, else (undef, undef).
|
2007-01-22 10:20:33 +00:00
|
|
|
sub find_rev_before {
|
2007-11-12 06:56:52 +00:00
|
|
|
my ($self, $rev, $eq_ok, $min_rev) = @_;
|
2007-01-22 10:20:33 +00:00
|
|
|
--$rev unless $eq_ok;
|
2007-11-12 06:56:52 +00:00
|
|
|
$min_rev ||= 1;
|
2009-06-04 03:45:51 +00:00
|
|
|
my $max_rev = $self->rev_map_max;
|
|
|
|
$rev = $max_rev if ($rev > $max_rev);
|
2007-11-12 06:56:52 +00:00
|
|
|
while ($rev >= $min_rev) {
|
2007-12-09 07:27:41 +00:00
|
|
|
if (my $c = $self->rev_map_get($rev)) {
|
2007-01-22 10:20:33 +00:00
|
|
|
return ($rev, $c);
|
|
|
|
}
|
|
|
|
--$rev;
|
|
|
|
}
|
|
|
|
return (undef, undef);
|
|
|
|
}
|
|
|
|
|
2007-11-12 06:56:52 +00:00
|
|
|
# Finds the first svn revision that exists on (if $eq_ok is true) or
|
|
|
|
# after $rev for the current branch. It will not search any higher
|
|
|
|
# than $max_rev. Returns the git commit hash and svn revision number
|
|
|
|
# if found, else (undef, undef).
|
|
|
|
sub find_rev_after {
|
|
|
|
my ($self, $rev, $eq_ok, $max_rev) = @_;
|
|
|
|
++$rev unless $eq_ok;
|
2007-12-09 07:27:41 +00:00
|
|
|
$max_rev ||= $self->rev_map_max;
|
2007-11-12 06:56:52 +00:00
|
|
|
while ($rev <= $max_rev) {
|
2007-12-09 07:27:41 +00:00
|
|
|
if (my $c = $self->rev_map_get($rev)) {
|
2007-11-12 06:56:52 +00:00
|
|
|
return ($rev, $c);
|
|
|
|
}
|
|
|
|
++$rev;
|
|
|
|
}
|
|
|
|
return (undef, undef);
|
|
|
|
}
|
|
|
|
|
2007-01-11 20:14:21 +00:00
|
|
|
sub _new {
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($class, $repo_id, $ref_id, $path) = @_;
|
|
|
|
unless (defined $repo_id && length $repo_id) {
|
|
|
|
$repo_id = $Git::SVN::default_repo_id;
|
|
|
|
}
|
|
|
|
unless (defined $ref_id && length $ref_id) {
|
2009-08-04 01:40:38 +00:00
|
|
|
$_prefix = '' unless defined($_prefix);
|
2009-08-12 03:14:27 +00:00
|
|
|
$_[2] = $ref_id =
|
|
|
|
"refs/remotes/$_prefix$Git::SVN::default_ref_id";
|
2007-01-19 01:50:01 +00:00
|
|
|
}
|
2008-06-29 03:40:32 +00:00
|
|
|
$_[1] = $repo_id;
|
2007-01-19 01:50:01 +00:00
|
|
|
my $dir = "$ENV{GIT_DIR}/svn/$ref_id";
|
2009-08-12 03:14:27 +00:00
|
|
|
|
|
|
|
# Older repos imported by us used $GIT_DIR/svn/foo instead of
|
|
|
|
# $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo
|
|
|
|
if ($ref_id =~ m{^refs/remotes/(.*)}) {
|
|
|
|
my $old_dir = "$ENV{GIT_DIR}/svn/$1";
|
|
|
|
if (-d $old_dir && ! -d $dir) {
|
|
|
|
$dir = $old_dir;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
$_[3] = $path = '' unless (defined $path);
|
2009-08-12 03:14:27 +00:00
|
|
|
mkpath([$dir]);
|
2007-02-12 21:25:25 +00:00
|
|
|
bless {
|
|
|
|
ref_id => $ref_id, dir => $dir, index => "$dir/index",
|
2007-02-11 04:46:50 +00:00
|
|
|
path => $path, config => "$ENV{GIT_DIR}/svn/config",
|
2007-12-09 07:27:41 +00:00
|
|
|
map_root => "$dir/.rev_map", repo_id => $repo_id }, $class;
|
2007-02-12 21:25:25 +00:00
|
|
|
}
|
|
|
|
|
2007-12-09 07:27:41 +00:00
|
|
|
# for read-only access of old .rev_db formats
|
|
|
|
sub unlink_rev_db_symlink {
|
|
|
|
my ($self) = @_;
|
|
|
|
my $link = $self->rev_db_path;
|
|
|
|
$link =~ s/\.[\w-]+$// or croak "missing UUID at the end of $link";
|
|
|
|
if (-l $link) {
|
|
|
|
unlink $link or croak "unlink: $link failed!";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub rev_db_path {
|
|
|
|
my ($self, $uuid) = @_;
|
|
|
|
my $db_path = $self->map_path($uuid);
|
|
|
|
$db_path =~ s{/\.rev_map\.}{/\.rev_db\.}
|
|
|
|
or croak "map_path: $db_path does not contain '/.rev_map.' !";
|
|
|
|
$db_path;
|
|
|
|
}
|
|
|
|
|
|
|
|
# the new replacement for .rev_db
|
|
|
|
sub map_path {
|
2007-02-12 21:25:25 +00:00
|
|
|
my ($self, $uuid) = @_;
|
|
|
|
$uuid ||= $self->ra_uuid;
|
2007-12-09 07:27:41 +00:00
|
|
|
"$self->{map_root}.$uuid";
|
2007-01-11 20:14:21 +00:00
|
|
|
}
|
|
|
|
|
2007-01-14 10:17:00 +00:00
|
|
|
sub uri_encode {
|
|
|
|
my ($f) = @_;
|
|
|
|
$f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg;
|
|
|
|
$f
|
|
|
|
}
|
2007-01-11 20:14:21 +00:00
|
|
|
|
2009-11-16 02:57:16 +00:00
|
|
|
sub uri_decode {
|
|
|
|
my ($f) = @_;
|
|
|
|
$f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg;
|
|
|
|
$f
|
|
|
|
}
|
|
|
|
|
2007-02-22 23:32:29 +00:00
|
|
|
sub remove_username {
|
|
|
|
$_[0] =~ s{^([^:]*://)[^@]+@}{$1};
|
|
|
|
}
|
|
|
|
|
2007-01-04 08:45:03 +00:00
|
|
|
package Git::SVN::Prompt;
|
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
require SVN::Core;
|
|
|
|
use vars qw/$_no_auth_cache $_username/;
|
|
|
|
|
|
|
|
sub simple {
|
2006-11-24 09:38:04 +00:00
|
|
|
my ($cred, $realm, $default_username, $may_save, $pool) = @_;
|
|
|
|
$may_save = undef if $_no_auth_cache;
|
|
|
|
$default_username = $_username if defined $_username;
|
|
|
|
if (defined $default_username && length $default_username) {
|
|
|
|
if (defined $realm && length $realm) {
|
2007-01-16 04:15:55 +00:00
|
|
|
print STDERR "Authentication realm: $realm\n";
|
|
|
|
STDERR->flush;
|
2006-11-24 09:38:04 +00:00
|
|
|
}
|
|
|
|
$cred->username($default_username);
|
|
|
|
} else {
|
2007-01-04 08:45:03 +00:00
|
|
|
username($cred, $realm, $may_save, $pool);
|
2006-11-24 09:38:04 +00:00
|
|
|
}
|
|
|
|
$cred->password(_read_password("Password for '" .
|
|
|
|
$cred->username . "': ", $realm));
|
|
|
|
$cred->may_save($may_save);
|
|
|
|
$SVN::_Core::SVN_NO_ERROR;
|
|
|
|
}
|
|
|
|
|
2007-01-04 08:45:03 +00:00
|
|
|
sub ssl_server_trust {
|
2006-11-24 09:38:04 +00:00
|
|
|
my ($cred, $realm, $failures, $cert_info, $may_save, $pool) = @_;
|
|
|
|
$may_save = undef if $_no_auth_cache;
|
2007-01-16 04:15:55 +00:00
|
|
|
print STDERR "Error validating server certificate for '$realm':\n";
|
2007-10-15 07:19:12 +00:00
|
|
|
{
|
|
|
|
no warnings 'once';
|
|
|
|
# All variables SVN::Auth::SSL::* are used only once,
|
|
|
|
# so we're shutting up Perl warnings about this.
|
|
|
|
if ($failures & $SVN::Auth::SSL::UNKNOWNCA) {
|
|
|
|
print STDERR " - The certificate is not issued ",
|
|
|
|
"by a trusted authority. Use the\n",
|
|
|
|
" fingerprint to validate ",
|
|
|
|
"the certificate manually!\n";
|
|
|
|
}
|
|
|
|
if ($failures & $SVN::Auth::SSL::CNMISMATCH) {
|
|
|
|
print STDERR " - The certificate hostname ",
|
|
|
|
"does not match.\n";
|
|
|
|
}
|
|
|
|
if ($failures & $SVN::Auth::SSL::NOTYETVALID) {
|
|
|
|
print STDERR " - The certificate is not yet valid.\n";
|
|
|
|
}
|
|
|
|
if ($failures & $SVN::Auth::SSL::EXPIRED) {
|
|
|
|
print STDERR " - The certificate has expired.\n";
|
|
|
|
}
|
|
|
|
if ($failures & $SVN::Auth::SSL::OTHER) {
|
|
|
|
print STDERR " - The certificate has ",
|
|
|
|
"an unknown error.\n";
|
|
|
|
}
|
|
|
|
} # no warnings 'once'
|
2007-01-16 04:15:55 +00:00
|
|
|
printf STDERR
|
|
|
|
"Certificate information:\n".
|
2006-11-24 09:38:04 +00:00
|
|
|
" - Hostname: %s\n".
|
|
|
|
" - Valid: from %s until %s\n".
|
|
|
|
" - Issuer: %s\n".
|
|
|
|
" - Fingerprint: %s\n",
|
|
|
|
map $cert_info->$_, qw(hostname valid_from valid_until
|
2007-01-16 04:15:55 +00:00
|
|
|
issuer_dname fingerprint);
|
2006-11-24 09:38:04 +00:00
|
|
|
my $choice;
|
|
|
|
prompt:
|
2007-01-16 04:15:55 +00:00
|
|
|
print STDERR $may_save ?
|
2006-11-24 09:38:04 +00:00
|
|
|
"(R)eject, accept (t)emporarily or accept (p)ermanently? " :
|
|
|
|
"(R)eject or accept (t)emporarily? ";
|
2007-01-16 04:15:55 +00:00
|
|
|
STDERR->flush;
|
2006-11-24 09:38:04 +00:00
|
|
|
$choice = lc(substr(<STDIN> || 'R', 0, 1));
|
|
|
|
if ($choice =~ /^t$/i) {
|
|
|
|
$cred->may_save(undef);
|
|
|
|
} elsif ($choice =~ /^r$/i) {
|
|
|
|
return -1;
|
|
|
|
} elsif ($may_save && $choice =~ /^p$/i) {
|
|
|
|
$cred->may_save($may_save);
|
|
|
|
} else {
|
|
|
|
goto prompt;
|
|
|
|
}
|
|
|
|
$cred->accepted_failures($failures);
|
|
|
|
$SVN::_Core::SVN_NO_ERROR;
|
|
|
|
}
|
|
|
|
|
2007-01-04 08:45:03 +00:00
|
|
|
sub ssl_client_cert {
|
2006-11-24 09:38:04 +00:00
|
|
|
my ($cred, $realm, $may_save, $pool) = @_;
|
|
|
|
$may_save = undef if $_no_auth_cache;
|
2007-01-16 04:15:55 +00:00
|
|
|
print STDERR "Client certificate filename: ";
|
|
|
|
STDERR->flush;
|
2006-11-24 09:38:04 +00:00
|
|
|
chomp(my $filename = <STDIN>);
|
|
|
|
$cred->cert_file($filename);
|
|
|
|
$cred->may_save($may_save);
|
|
|
|
$SVN::_Core::SVN_NO_ERROR;
|
|
|
|
}
|
|
|
|
|
2007-01-04 08:45:03 +00:00
|
|
|
sub ssl_client_cert_pw {
|
2006-11-24 09:38:04 +00:00
|
|
|
my ($cred, $realm, $may_save, $pool) = @_;
|
|
|
|
$may_save = undef if $_no_auth_cache;
|
|
|
|
$cred->password(_read_password("Password: ", $realm));
|
|
|
|
$cred->may_save($may_save);
|
|
|
|
$SVN::_Core::SVN_NO_ERROR;
|
|
|
|
}
|
|
|
|
|
2007-01-04 08:45:03 +00:00
|
|
|
sub username {
|
2006-11-24 09:38:04 +00:00
|
|
|
my ($cred, $realm, $may_save, $pool) = @_;
|
|
|
|
$may_save = undef if $_no_auth_cache;
|
|
|
|
if (defined $realm && length $realm) {
|
2007-01-16 04:15:55 +00:00
|
|
|
print STDERR "Authentication realm: $realm\n";
|
2006-11-24 09:38:04 +00:00
|
|
|
}
|
|
|
|
my $username;
|
|
|
|
if (defined $_username) {
|
|
|
|
$username = $_username;
|
|
|
|
} else {
|
2007-01-16 04:15:55 +00:00
|
|
|
print STDERR "Username: ";
|
|
|
|
STDERR->flush;
|
2006-11-24 09:38:04 +00:00
|
|
|
chomp($username = <STDIN>);
|
|
|
|
}
|
|
|
|
$cred->username($username);
|
|
|
|
$cred->may_save($may_save);
|
|
|
|
$SVN::_Core::SVN_NO_ERROR;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub _read_password {
|
|
|
|
my ($prompt, $realm) = @_;
|
|
|
|
my $password = '';
|
2010-03-02 11:47:52 +00:00
|
|
|
if (exists $ENV{GIT_ASKPASS}) {
|
|
|
|
open(PH, "-|", $ENV{GIT_ASKPASS}, $prompt);
|
|
|
|
$password = <PH>;
|
|
|
|
$password =~ s/[\012\015]//; # \n\r
|
|
|
|
close(PH);
|
|
|
|
} else {
|
|
|
|
print STDERR $prompt;
|
|
|
|
STDERR->flush;
|
|
|
|
require Term::ReadKey;
|
|
|
|
Term::ReadKey::ReadMode('noecho');
|
|
|
|
while (defined(my $key = Term::ReadKey::ReadKey(0))) {
|
|
|
|
last if $key =~ /[\012\015]/; # \n\r
|
|
|
|
$password .= $key;
|
|
|
|
}
|
|
|
|
Term::ReadKey::ReadMode('restore');
|
|
|
|
print STDERR "\n";
|
|
|
|
STDERR->flush;
|
2006-11-24 09:38:04 +00:00
|
|
|
}
|
|
|
|
$password;
|
|
|
|
}
|
|
|
|
|
2006-11-28 05:44:48 +00:00
|
|
|
package SVN::Git::Fetcher;
|
2011-07-20 22:37:26 +00:00
|
|
|
use vars qw/@ISA $_ignore_regex $_preserve_empty_dirs $_placeholder_filename
|
|
|
|
@deleted_gpath %added_placeholder $repo_id/;
|
2006-11-28 05:44:48 +00:00
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
use Carp qw/croak/;
|
2011-07-20 22:37:26 +00:00
|
|
|
use File::Basename qw/dirname/;
|
2006-11-28 05:44:48 +00:00
|
|
|
use IO::File qw//;
|
|
|
|
|
|
|
|
# file baton members: path, mode_a, mode_b, pool, fh, blob, base
|
|
|
|
sub new {
|
2009-02-11 09:56:58 +00:00
|
|
|
my ($class, $git_svn, $switch_path) = @_;
|
2006-11-28 05:44:48 +00:00
|
|
|
my $self = SVN::Delta::Editor->new;
|
|
|
|
bless $self, $class;
|
2009-01-12 00:51:10 +00:00
|
|
|
if (exists $git_svn->{last_commit}) {
|
|
|
|
$self->{c} = $git_svn->{last_commit};
|
2009-02-11 09:56:58 +00:00
|
|
|
$self->{empty_symlinks} =
|
|
|
|
_mark_empty_symlinks($git_svn, $switch_path);
|
2009-01-12 00:51:10 +00:00
|
|
|
}
|
2011-07-20 22:37:26 +00:00
|
|
|
|
|
|
|
# some options are read globally, but can be overridden locally
|
|
|
|
# per [svn-remote "..."] section. Command-line options will *NOT*
|
|
|
|
# override options set in an [svn-remote "..."] section
|
|
|
|
$repo_id = $git_svn->{repo_id};
|
|
|
|
my $k = "svn-remote.$repo_id.ignore-paths";
|
|
|
|
my $v = eval { command_oneline('config', '--get', $k) };
|
|
|
|
$self->{ignore_regex} = $v;
|
|
|
|
|
|
|
|
$k = "svn-remote.$repo_id.preserve-empty-dirs";
|
|
|
|
$v = eval { command_oneline('config', '--get', '--bool', $k) };
|
|
|
|
if ($v && $v eq 'true') {
|
|
|
|
$_preserve_empty_dirs = 1;
|
|
|
|
$k = "svn-remote.$repo_id.placeholder-filename";
|
|
|
|
$v = eval { command_oneline('config', '--get', $k) };
|
|
|
|
$_placeholder_filename = $v;
|
|
|
|
}
|
|
|
|
|
|
|
|
# Load the list of placeholder files added during previous invocations.
|
|
|
|
$k = "svn-remote.$repo_id.added-placeholder";
|
|
|
|
$v = eval { command_oneline('config', '--get-all', $k) };
|
|
|
|
if ($_preserve_empty_dirs && $v) {
|
|
|
|
# command() prints errors to stderr, so we only call it if
|
|
|
|
# command_oneline() succeeded.
|
|
|
|
my @v = command('config', '--get-all', $k);
|
|
|
|
$added_placeholder{ dirname($_) } = $_ foreach @v;
|
|
|
|
}
|
|
|
|
|
2006-12-12 22:47:00 +00:00
|
|
|
$self->{empty} = {};
|
|
|
|
$self->{dir_prop} = {};
|
|
|
|
$self->{file_prop} = {};
|
|
|
|
$self->{absent_dir} = {};
|
|
|
|
$self->{absent_file} = {};
|
2007-01-24 11:30:57 +00:00
|
|
|
$self->{gii} = $git_svn->tmp_index_do(sub { Git::IndexInfo->new });
|
2010-07-30 02:30:13 +00:00
|
|
|
$self->{pathnameencoding} = Git::config('svn.pathnameencoding');
|
2006-11-28 05:44:48 +00:00
|
|
|
$self;
|
|
|
|
}
|
|
|
|
|
2009-01-12 00:51:10 +00:00
|
|
|
# this uses the Ra object, so it must be called before do_{switch,update},
|
|
|
|
# not inside them (when the Git::SVN::Fetcher object is passed) to
|
|
|
|
# do_{switch,update}
|
|
|
|
sub _mark_empty_symlinks {
|
2009-02-11 09:56:58 +00:00
|
|
|
my ($git_svn, $switch_path) = @_;
|
2009-02-01 01:31:12 +00:00
|
|
|
my $bool = Git::config_bool('svn.brokenSymlinkWorkaround');
|
2009-02-28 03:40:16 +00:00
|
|
|
return {} if (!defined($bool)) || (defined($bool) && ! $bool);
|
2009-02-01 01:31:12 +00:00
|
|
|
|
2009-01-12 00:51:10 +00:00
|
|
|
my %ret;
|
|
|
|
my ($rev, $cmt) = $git_svn->last_rev_commit;
|
|
|
|
return {} unless ($rev && $cmt);
|
|
|
|
|
2009-02-01 01:31:12 +00:00
|
|
|
# allow the warning to be printed for each revision we fetch to
|
|
|
|
# ensure the user sees it. The user can also disable the workaround
|
|
|
|
# on the repository even while git svn is running and the next
|
|
|
|
# revision fetched will skip this expensive function.
|
|
|
|
my $printed_warning;
|
2009-01-12 00:51:10 +00:00
|
|
|
chomp(my $empty_blob = `git hash-object -t blob --stdin < /dev/null`);
|
|
|
|
my ($ls, $ctx) = command_output_pipe(qw/ls-tree -r -z/, $cmt);
|
|
|
|
local $/ = "\0";
|
2009-02-11 09:56:58 +00:00
|
|
|
my $pfx = defined($switch_path) ? $switch_path : $git_svn->{path};
|
2009-01-12 00:51:10 +00:00
|
|
|
$pfx .= '/' if length($pfx);
|
|
|
|
while (<$ls>) {
|
|
|
|
chomp;
|
|
|
|
s/\A100644 blob $empty_blob\t//o or next;
|
2009-02-01 01:31:12 +00:00
|
|
|
unless ($printed_warning) {
|
|
|
|
print STDERR "Scanning for empty symlinks, ",
|
|
|
|
"this may take a while if you have ",
|
|
|
|
"many empty files\n",
|
|
|
|
"You may disable this with `",
|
|
|
|
"git config svn.brokenSymlinkWorkaround ",
|
|
|
|
"false'.\n",
|
|
|
|
"This may be done in a different ",
|
|
|
|
"terminal without restarting ",
|
|
|
|
"git svn\n";
|
|
|
|
$printed_warning = 1;
|
|
|
|
}
|
2009-01-12 00:51:10 +00:00
|
|
|
my $path = $_;
|
|
|
|
my (undef, $props) =
|
|
|
|
$git_svn->ra->get_file($pfx.$path, $rev, undef);
|
|
|
|
if ($props->{'svn:special'}) {
|
|
|
|
$ret{$path} = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
command_close_pipe($ls, $ctx);
|
|
|
|
\%ret;
|
|
|
|
}
|
|
|
|
|
2009-01-12 02:23:38 +00:00
|
|
|
# returns true if a given path is inside a ".git" directory
|
|
|
|
sub in_dot_git {
|
|
|
|
$_[0] =~ m{(?:^|/)\.git(?:/|$)};
|
|
|
|
}
|
|
|
|
|
2009-01-25 22:21:40 +00:00
|
|
|
# return value: 0 -- don't ignore, 1 -- ignore
|
|
|
|
sub is_path_ignored {
|
2009-04-11 17:46:17 +00:00
|
|
|
my ($self, $path) = @_;
|
2009-01-25 22:21:40 +00:00
|
|
|
return 1 if in_dot_git($path);
|
2009-04-11 17:46:17 +00:00
|
|
|
return 1 if defined($self->{ignore_regex}) &&
|
|
|
|
$path =~ m!$self->{ignore_regex}!;
|
2009-01-25 22:21:40 +00:00
|
|
|
return 0 unless defined($_ignore_regex);
|
|
|
|
return 1 if $path =~ m!$_ignore_regex!o;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2007-01-22 19:44:57 +00:00
|
|
|
sub set_path_strip {
|
|
|
|
my ($self, $path) = @_;
|
2007-02-09 20:17:57 +00:00
|
|
|
$self->{path_strip} = qr/^\Q$path\E(\/|$)/ if length $path;
|
2007-01-22 19:44:57 +00:00
|
|
|
}
|
|
|
|
|
2006-12-12 22:47:00 +00:00
|
|
|
sub open_root {
|
|
|
|
{ path => '' };
|
|
|
|
}
|
|
|
|
|
|
|
|
sub open_directory {
|
|
|
|
my ($self, $path, $pb, $rev) = @_;
|
|
|
|
{ path => $path };
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
sub git_path {
|
|
|
|
my ($self, $path) = @_;
|
2010-07-30 02:30:13 +00:00
|
|
|
if (my $enc = $self->{pathnameencoding}) {
|
|
|
|
require Encode;
|
|
|
|
Encode::from_to($path, 'UTF-8', $enc);
|
|
|
|
}
|
2007-01-28 12:59:05 +00:00
|
|
|
if ($self->{path_strip}) {
|
|
|
|
$path =~ s!$self->{path_strip}!! or
|
|
|
|
die "Failed to strip path '$path' ($self->{path_strip})\n";
|
|
|
|
}
|
2007-01-19 01:50:01 +00:00
|
|
|
$path;
|
|
|
|
}
|
|
|
|
|
2006-11-28 05:44:48 +00:00
|
|
|
sub delete_entry {
|
|
|
|
my ($self, $path, $rev, $pb) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($path);
|
2007-01-04 09:38:18 +00:00
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
my $gpath = $self->git_path($path);
|
2007-01-31 10:45:50 +00:00
|
|
|
return undef if ($gpath eq '');
|
|
|
|
|
2007-01-04 09:38:18 +00:00
|
|
|
# remove entire directories.
|
2009-03-29 05:10:08 +00:00
|
|
|
my ($tree) = (command('ls-tree', '-z', $self->{c}, "./$gpath")
|
|
|
|
=~ /\A040000 tree ([a-f\d]{40})\t\Q$gpath\E\0/);
|
|
|
|
if ($tree) {
|
2007-01-04 09:38:18 +00:00
|
|
|
my ($ls, $ctx) = command_output_pipe(qw/ls-tree
|
|
|
|
-r --name-only -z/,
|
2009-03-29 05:10:08 +00:00
|
|
|
$tree);
|
2007-01-04 09:38:18 +00:00
|
|
|
local $/ = "\0";
|
|
|
|
while (<$ls>) {
|
2007-01-24 11:30:57 +00:00
|
|
|
chomp;
|
2009-03-29 05:10:08 +00:00
|
|
|
my $rmpath = "$gpath/$_";
|
|
|
|
$self->{gii}->remove($rmpath);
|
|
|
|
print "\tD\t$rmpath\n" unless $::_q;
|
2007-01-04 09:38:18 +00:00
|
|
|
}
|
2007-02-09 20:23:47 +00:00
|
|
|
print "\tD\t$gpath/\n" unless $::_q;
|
2007-01-04 09:38:18 +00:00
|
|
|
command_close_pipe($ls, $ctx);
|
|
|
|
} else {
|
2007-01-24 11:30:57 +00:00
|
|
|
$self->{gii}->remove($gpath);
|
2007-02-09 20:23:47 +00:00
|
|
|
print "\tD\t$gpath\n" unless $::_q;
|
2007-01-04 09:38:18 +00:00
|
|
|
}
|
2011-07-20 22:37:26 +00:00
|
|
|
# Don't add to @deleted_gpath if we're deleting a placeholder file.
|
|
|
|
push @deleted_gpath, $gpath unless $added_placeholder{dirname($path)};
|
2009-12-08 04:49:38 +00:00
|
|
|
$self->{empty}->{$path} = 0;
|
2006-11-28 05:44:48 +00:00
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub open_file {
|
|
|
|
my ($self, $path, $pb, $rev) = @_;
|
2009-01-12 02:23:38 +00:00
|
|
|
my ($mode, $blob);
|
|
|
|
|
2009-04-11 17:46:17 +00:00
|
|
|
goto out if $self->is_path_ignored($path);
|
2009-01-12 02:23:38 +00:00
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
my $gpath = $self->git_path($path);
|
2009-03-29 05:10:08 +00:00
|
|
|
($mode, $blob) = (command('ls-tree', '-z', $self->{c}, "./$gpath")
|
|
|
|
=~ /\A(\d{6}) blob ([a-f\d]{40})\t\Q$gpath\E\0/);
|
2006-12-08 09:55:19 +00:00
|
|
|
unless (defined $mode && defined $blob) {
|
|
|
|
die "$path was not found in commit $self->{c} (r$rev)\n";
|
|
|
|
}
|
2009-01-12 00:51:10 +00:00
|
|
|
if ($mode eq '100644' && $self->{empty_symlinks}->{$path}) {
|
|
|
|
$mode = '120000';
|
|
|
|
}
|
2009-01-12 02:23:38 +00:00
|
|
|
out:
|
2006-11-28 05:44:48 +00:00
|
|
|
{ path => $path, mode_a => $mode, mode_b => $mode, blob => $blob,
|
2006-11-28 10:50:17 +00:00
|
|
|
pool => SVN::Pool->new, action => 'M' };
|
2006-11-28 05:44:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub add_file {
|
|
|
|
my ($self, $path, $pb, $cp_path, $cp_rev) = @_;
|
2009-01-12 02:23:38 +00:00
|
|
|
my $mode;
|
|
|
|
|
2009-04-11 17:46:17 +00:00
|
|
|
if (!$self->is_path_ignored($path)) {
|
2009-01-12 02:23:38 +00:00
|
|
|
my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#);
|
|
|
|
delete $self->{empty}->{$dir};
|
|
|
|
$mode = '100644';
|
2011-07-20 22:37:26 +00:00
|
|
|
|
|
|
|
if ($added_placeholder{$dir}) {
|
|
|
|
# Remove our placeholder file, if we created one.
|
|
|
|
delete_entry($self, $added_placeholder{$dir})
|
|
|
|
unless $path eq $added_placeholder{$dir};
|
|
|
|
delete $added_placeholder{$dir}
|
|
|
|
}
|
2009-01-12 02:23:38 +00:00
|
|
|
}
|
2011-07-20 22:37:26 +00:00
|
|
|
|
2009-01-12 02:23:38 +00:00
|
|
|
{ path => $path, mode_a => $mode, mode_b => $mode,
|
2006-11-28 10:50:17 +00:00
|
|
|
pool => SVN::Pool->new, action => 'A' };
|
2006-11-28 05:44:48 +00:00
|
|
|
}
|
|
|
|
|
2006-12-12 22:47:00 +00:00
|
|
|
sub add_directory {
|
|
|
|
my ($self, $path, $cp_path, $cp_rev) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
goto out if $self->is_path_ignored($path);
|
2007-12-14 16:39:09 +00:00
|
|
|
my $gpath = $self->git_path($path);
|
|
|
|
if ($gpath eq '') {
|
|
|
|
my ($ls, $ctx) = command_output_pipe(qw/ls-tree
|
|
|
|
-r --name-only -z/,
|
|
|
|
$self->{c});
|
|
|
|
local $/ = "\0";
|
|
|
|
while (<$ls>) {
|
|
|
|
chomp;
|
|
|
|
$self->{gii}->remove($_);
|
|
|
|
print "\tD\t$_\n" unless $::_q;
|
2011-07-20 22:37:26 +00:00
|
|
|
push @deleted_gpath, $gpath;
|
2007-12-14 16:39:09 +00:00
|
|
|
}
|
|
|
|
command_close_pipe($ls, $ctx);
|
|
|
|
$self->{empty}->{$path} = 0;
|
|
|
|
}
|
2006-12-12 22:47:00 +00:00
|
|
|
my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#);
|
|
|
|
delete $self->{empty}->{$dir};
|
|
|
|
$self->{empty}->{$path} = 1;
|
2011-07-20 22:37:26 +00:00
|
|
|
|
|
|
|
if ($added_placeholder{$dir}) {
|
|
|
|
# Remove our placeholder file, if we created one.
|
|
|
|
delete_entry($self, $added_placeholder{$dir});
|
|
|
|
delete $added_placeholder{$dir}
|
|
|
|
}
|
|
|
|
|
2009-01-12 02:23:38 +00:00
|
|
|
out:
|
2006-12-12 22:47:00 +00:00
|
|
|
{ path => $path };
|
|
|
|
}
|
|
|
|
|
|
|
|
sub change_dir_prop {
|
|
|
|
my ($self, $db, $prop, $value) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($db->{path});
|
2006-12-12 22:47:00 +00:00
|
|
|
$self->{dir_prop}->{$db->{path}} ||= {};
|
|
|
|
$self->{dir_prop}->{$db->{path}}->{$prop} = $value;
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub absent_directory {
|
|
|
|
my ($self, $path, $pb) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($path);
|
2006-12-12 22:47:00 +00:00
|
|
|
$self->{absent_dir}->{$pb->{path}} ||= [];
|
|
|
|
push @{$self->{absent_dir}->{$pb->{path}}}, $path;
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub absent_file {
|
|
|
|
my ($self, $path, $pb) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($path);
|
2006-12-12 22:47:00 +00:00
|
|
|
$self->{absent_file}->{$pb->{path}} ||= [];
|
|
|
|
push @{$self->{absent_file}->{$pb->{path}}}, $path;
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
2006-11-28 05:44:48 +00:00
|
|
|
sub change_file_prop {
|
|
|
|
my ($self, $fb, $prop, $value) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($fb->{path});
|
2006-11-28 05:44:48 +00:00
|
|
|
if ($prop eq 'svn:executable') {
|
|
|
|
if ($fb->{mode_b} != 120000) {
|
|
|
|
$fb->{mode_b} = defined $value ? 100755 : 100644;
|
|
|
|
}
|
|
|
|
} elsif ($prop eq 'svn:special') {
|
|
|
|
$fb->{mode_b} = defined $value ? 120000 : 100644;
|
2006-12-12 22:47:00 +00:00
|
|
|
} else {
|
|
|
|
$self->{file_prop}->{$fb->{path}} ||= {};
|
|
|
|
$self->{file_prop}->{$fb->{path}}->{$prop} = $value;
|
2006-11-28 05:44:48 +00:00
|
|
|
}
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub apply_textdelta {
|
|
|
|
my ($self, $fb, $exp) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($fb->{path});
|
2008-11-12 16:38:06 +00:00
|
|
|
my $fh = $::_repository->temp_acquire('svn_delta');
|
2006-11-28 05:44:48 +00:00
|
|
|
# $fh gets auto-closed() by SVN::TxDelta::apply(),
|
|
|
|
# (but $base does not,) so dup() it for reading in close_file
|
|
|
|
open my $dup, '<&', $fh or croak $!;
|
2008-11-12 16:38:06 +00:00
|
|
|
my $base = $::_repository->temp_acquire('git_blob');
|
2009-01-12 02:23:38 +00:00
|
|
|
|
2006-11-28 05:44:48 +00:00
|
|
|
if ($fb->{blob}) {
|
2009-01-12 00:51:11 +00:00
|
|
|
my ($base_is_link, $size);
|
|
|
|
|
2009-01-12 00:51:10 +00:00
|
|
|
if ($fb->{mode_a} eq '120000' &&
|
|
|
|
! $self->{empty_symlinks}->{$fb->{path}}) {
|
|
|
|
print $base 'link ' or die "print $!\n";
|
2009-01-12 00:51:11 +00:00
|
|
|
$base_is_link = 1;
|
2009-01-12 00:51:10 +00:00
|
|
|
}
|
2009-01-12 00:51:11 +00:00
|
|
|
retry:
|
|
|
|
$size = $::_repository->cat_blob($fb->{blob}, $base);
|
2008-05-28 06:33:22 +00:00
|
|
|
die "Failed to read object $fb->{blob}" if ($size < 0);
|
2006-11-28 05:44:48 +00:00
|
|
|
|
|
|
|
if (defined $exp) {
|
|
|
|
seek $base, 0, 0 or croak $!;
|
2007-11-22 19:18:00 +00:00
|
|
|
my $got = ::md5sum($base);
|
2009-01-12 00:51:11 +00:00
|
|
|
if ($got ne $exp) {
|
|
|
|
my $err = "Checksum mismatch: ".
|
|
|
|
"$fb->{path} $fb->{blob}\n" .
|
|
|
|
"expected: $exp\n" .
|
|
|
|
" got: $got\n";
|
|
|
|
if ($base_is_link) {
|
|
|
|
warn $err,
|
|
|
|
"Retrying... (possibly ",
|
|
|
|
"a bad symlink from SVN)\n";
|
|
|
|
$::_repository->temp_reset($base);
|
|
|
|
$base_is_link = 0;
|
|
|
|
goto retry;
|
|
|
|
}
|
|
|
|
die $err;
|
|
|
|
}
|
2006-11-28 05:44:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
seek $base, 0, 0 or croak $!;
|
2008-08-12 16:00:53 +00:00
|
|
|
$fb->{fh} = $fh;
|
2006-11-28 05:44:48 +00:00
|
|
|
$fb->{base} = $base;
|
2008-08-12 16:00:53 +00:00
|
|
|
[ SVN::TxDelta::apply($base, $dup, undef, $fb->{path}, $fb->{pool}) ];
|
2006-11-28 05:44:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub close_file {
|
|
|
|
my ($self, $fb, $exp) = @_;
|
2009-04-11 17:46:17 +00:00
|
|
|
return undef if $self->is_path_ignored($fb->{path});
|
2009-01-12 02:23:38 +00:00
|
|
|
|
2006-11-28 05:44:48 +00:00
|
|
|
my $hash;
|
2007-01-19 01:50:01 +00:00
|
|
|
my $path = $self->git_path($fb->{path});
|
2006-11-28 05:44:48 +00:00
|
|
|
if (my $fh = $fb->{fh}) {
|
2007-05-27 22:59:01 +00:00
|
|
|
if (defined $exp) {
|
|
|
|
seek($fh, 0, 0) or croak $!;
|
2007-11-22 19:18:00 +00:00
|
|
|
my $got = ::md5sum($fh);
|
2007-05-27 22:59:01 +00:00
|
|
|
if ($got ne $exp) {
|
|
|
|
die "Checksum mismatch: $path\n",
|
|
|
|
"expected: $exp\n got: $got\n";
|
|
|
|
}
|
|
|
|
}
|
2006-11-28 05:44:48 +00:00
|
|
|
if ($fb->{mode_b} == 120000) {
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
sysseek($fh, 0, 0) or croak $!;
|
2009-01-12 00:51:10 +00:00
|
|
|
my $rd = sysread($fh, my $buf, 5);
|
2008-05-23 14:19:41 +00:00
|
|
|
|
2009-01-12 00:51:10 +00:00
|
|
|
if (!defined $rd) {
|
|
|
|
croak "sysread: $!\n";
|
|
|
|
} elsif ($rd == 0) {
|
|
|
|
warn "$path has mode 120000",
|
|
|
|
" but it points to nothing\n",
|
|
|
|
"converting to an empty file with mode",
|
|
|
|
" 100644\n";
|
|
|
|
$fb->{mode_b} = '100644';
|
|
|
|
} elsif ($buf ne 'link ') {
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
warn "$path has mode 120000",
|
2009-01-12 00:51:10 +00:00
|
|
|
" but is not a link\n";
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
} else {
|
2008-11-12 16:38:06 +00:00
|
|
|
my $tmp_fh = $::_repository->temp_acquire(
|
|
|
|
'svn_hash');
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
my $res;
|
|
|
|
while ($res = sysread($fh, my $str, 1024)) {
|
|
|
|
my $out = syswrite($tmp_fh, $str, $res);
|
|
|
|
defined($out) && $out == $res
|
|
|
|
or croak("write ",
|
2008-09-08 16:53:01 +00:00
|
|
|
Git::temp_path($tmp_fh),
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
": $!\n");
|
|
|
|
}
|
|
|
|
defined $res or croak $!;
|
2008-05-23 14:19:41 +00:00
|
|
|
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
($fh, $tmp_fh) = ($tmp_fh, $fh);
|
|
|
|
Git::temp_release($tmp_fh, 1);
|
|
|
|
}
|
|
|
|
}
|
2008-08-12 16:00:53 +00:00
|
|
|
|
|
|
|
$hash = $::_repository->hash_and_insert_object(
|
2008-09-08 16:53:01 +00:00
|
|
|
Git::temp_path($fh));
|
2006-11-28 05:44:48 +00:00
|
|
|
$hash =~ /^[a-f\d]{40}$/ or die "not a sha1: $hash\n";
|
2008-08-12 16:00:53 +00:00
|
|
|
|
|
|
|
Git::temp_release($fb->{base}, 1);
|
git-svn: Reduce temp file usage when dealing with non-links
Currently, in sub 'close_file', git-svn creates a temporary file and
copies the contents of the blob to be written into it. This is useful
for symlinks because svn stores symlinks in the form:
link $FILE_PATH
Git creates a blob only out of '$FILE_PATH' and uses file mode to
indicate that the blob should be interpreted as a symlink.
As git-hash-object is invoked with --stdin-paths, a duplicate of the
link from svn must be created that leaves off the first five bytes,
i.e. 'link '. However, this is wholly unnecessary for normal blobs,
though, as we already have a temp file with their contents. Copying
the entire file gains nothing, and effectively requires a file to be
written twice before making it into the object db.
This patch corrects that issue, holding onto the substr-like
duplication for symlinks, but skipping it altogether for normal blobs
by reusing the existing temp file.
Signed-off-by: Marcus Griep <marcus@griep.us>
Acked-by: Eric Wong <normalperson@yhbt.net>
2008-08-12 16:45:39 +00:00
|
|
|
Git::temp_release($fh, 1);
|
2006-11-28 05:44:48 +00:00
|
|
|
} else {
|
|
|
|
$hash = $fb->{blob} or die "no blob information\n";
|
|
|
|
}
|
|
|
|
$fb->{pool}->clear;
|
2007-01-24 11:30:57 +00:00
|
|
|
$self->{gii}->update($fb->{mode_b}, $hash, $path) or croak $!;
|
2007-02-09 20:23:47 +00:00
|
|
|
print "\t$fb->{action}\t$path\n" if $fb->{action} && ! $::_q;
|
2006-11-28 05:44:48 +00:00
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub abort_edit {
|
|
|
|
my $self = shift;
|
2007-01-24 11:30:57 +00:00
|
|
|
$self->{nr} = $self->{gii}->{nr};
|
|
|
|
delete $self->{gii};
|
2006-11-28 05:44:48 +00:00
|
|
|
$self->SUPER::abort_edit(@_);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub close_edit {
|
|
|
|
my $self = shift;
|
2011-07-20 22:37:26 +00:00
|
|
|
|
|
|
|
if ($_preserve_empty_dirs) {
|
|
|
|
my @empty_dirs;
|
|
|
|
|
|
|
|
# Any entry flagged as empty that also has an associated
|
|
|
|
# dir_prop represents a newly created empty directory.
|
|
|
|
foreach my $i (keys %{$self->{empty}}) {
|
|
|
|
push @empty_dirs, $i if exists $self->{dir_prop}->{$i};
|
|
|
|
}
|
|
|
|
|
|
|
|
# Search for directories that have become empty due subsequent
|
|
|
|
# file deletes.
|
|
|
|
push @empty_dirs, $self->find_empty_directories();
|
|
|
|
|
|
|
|
# Finally, add a placeholder file to each empty directory.
|
|
|
|
$self->add_placeholder_file($_) foreach (@empty_dirs);
|
|
|
|
|
|
|
|
$self->stash_placeholder_list();
|
|
|
|
}
|
|
|
|
|
2006-11-28 22:06:05 +00:00
|
|
|
$self->{git_commit_ok} = 1;
|
2007-01-24 11:30:57 +00:00
|
|
|
$self->{nr} = $self->{gii}->{nr};
|
|
|
|
delete $self->{gii};
|
2006-11-28 05:44:48 +00:00
|
|
|
$self->SUPER::close_edit(@_);
|
|
|
|
}
|
2006-06-16 09:55:13 +00:00
|
|
|
|
2011-07-20 22:37:26 +00:00
|
|
|
sub find_empty_directories {
|
|
|
|
my ($self) = @_;
|
|
|
|
my @empty_dirs;
|
|
|
|
my %dirs = map { dirname($_) => 1 } @deleted_gpath;
|
|
|
|
|
|
|
|
foreach my $dir (sort keys %dirs) {
|
|
|
|
next if $dir eq ".";
|
|
|
|
|
|
|
|
# If there have been any additions to this directory, there is
|
|
|
|
# no reason to check if it is empty.
|
|
|
|
my $skip_added = 0;
|
|
|
|
foreach my $t (qw/dir_prop file_prop/) {
|
|
|
|
foreach my $path (keys %{ $self->{$t} }) {
|
|
|
|
if (exists $self->{$t}->{dirname($path)}) {
|
|
|
|
$skip_added = 1;
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
last if $skip_added;
|
|
|
|
}
|
|
|
|
next if $skip_added;
|
|
|
|
|
|
|
|
# Use `git ls-tree` to get the filenames of this directory
|
|
|
|
# that existed prior to this particular commit.
|
|
|
|
my $ls = command('ls-tree', '-z', '--name-only',
|
|
|
|
$self->{c}, "$dir/");
|
|
|
|
my %files = map { $_ => 1 } split(/\0/, $ls);
|
|
|
|
|
|
|
|
# Remove the filenames that were deleted during this commit.
|
|
|
|
delete $files{$_} foreach (@deleted_gpath);
|
|
|
|
|
|
|
|
# Report the directory if there are no filenames left.
|
|
|
|
push @empty_dirs, $dir unless (scalar %files);
|
|
|
|
}
|
|
|
|
@empty_dirs;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub add_placeholder_file {
|
|
|
|
my ($self, $dir) = @_;
|
|
|
|
my $path = "$dir/$_placeholder_filename";
|
|
|
|
my $gpath = $self->git_path($path);
|
|
|
|
|
|
|
|
my $fh = $::_repository->temp_acquire($gpath);
|
|
|
|
my $hash = $::_repository->hash_and_insert_object(Git::temp_path($fh));
|
|
|
|
Git::temp_release($fh, 1);
|
|
|
|
$self->{gii}->update('100644', $hash, $gpath) or croak $!;
|
|
|
|
|
|
|
|
# The directory should no longer be considered empty.
|
|
|
|
delete $self->{empty}->{$dir} if exists $self->{empty}->{$dir};
|
|
|
|
|
|
|
|
# Keep track of any placeholder files we create.
|
|
|
|
$added_placeholder{$dir} = $path;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub stash_placeholder_list {
|
|
|
|
my ($self) = @_;
|
|
|
|
my $k = "svn-remote.$repo_id.added-placeholder";
|
|
|
|
my $v = eval { command_oneline('config', '--get-all', $k) };
|
|
|
|
command_noisy('config', '--unset-all', $k) if $v;
|
|
|
|
foreach (values %added_placeholder) {
|
|
|
|
command_noisy('config', '--add', $k, $_);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
package SVN::Git::Editor;
|
2007-01-29 08:07:49 +00:00
|
|
|
use vars qw/@ISA $_rmdir $_cp_similarity $_find_copies_harder $_rename_limit/;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
use Carp qw/croak/;
|
|
|
|
use IO::File;
|
|
|
|
|
|
|
|
sub new {
|
2007-01-27 22:33:08 +00:00
|
|
|
my ($class, $opts) = @_;
|
|
|
|
foreach (qw/svn_path r ra tree_a tree_b log editor_cb/) {
|
|
|
|
die "$_ required!\n" unless (defined $opts->{$_});
|
|
|
|
}
|
|
|
|
|
|
|
|
my $pool = SVN::Pool->new;
|
|
|
|
my $mods = generate_diff($opts->{tree_a}, $opts->{tree_b});
|
|
|
|
my $types = check_diff_paths($opts->{ra}, $opts->{svn_path},
|
|
|
|
$opts->{r}, $mods);
|
|
|
|
|
|
|
|
# $opts->{ra} functions should not be used after this:
|
|
|
|
my @ce = $opts->{ra}->get_commit_editor($opts->{log},
|
|
|
|
$opts->{editor_cb}, $pool);
|
|
|
|
my $self = SVN::Delta::Editor->new(@ce, $pool);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
bless $self, $class;
|
2007-01-27 22:33:08 +00:00
|
|
|
foreach (qw/svn_path r tree_a tree_b/) {
|
|
|
|
$self->{$_} = $opts->{$_};
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
2007-01-27 22:33:08 +00:00
|
|
|
$self->{url} = $opts->{ra}->{url};
|
|
|
|
$self->{mods} = $mods;
|
|
|
|
$self->{types} = $types;
|
|
|
|
$self->{pool} = $pool;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->{bat} = { '' => $self->open_root($self->{r}, $self->{pool}) };
|
|
|
|
$self->{rm} = { };
|
2007-01-26 09:32:45 +00:00
|
|
|
$self->{path_prefix} = length $self->{svn_path} ?
|
|
|
|
"$self->{svn_path}/" : '';
|
2008-07-25 15:32:37 +00:00
|
|
|
$self->{config} = $opts->{config};
|
2010-09-25 03:51:50 +00:00
|
|
|
$self->{mergeinfo} = $opts->{mergeinfo};
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
return $self;
|
|
|
|
}
|
|
|
|
|
2007-01-27 22:33:08 +00:00
|
|
|
sub generate_diff {
|
|
|
|
my ($tree_a, $tree_b) = @_;
|
|
|
|
my @diff_tree = qw(diff-tree -z -r);
|
2007-01-29 08:07:49 +00:00
|
|
|
if ($_cp_similarity) {
|
|
|
|
push @diff_tree, "-C$_cp_similarity";
|
2007-01-27 22:33:08 +00:00
|
|
|
} else {
|
|
|
|
push @diff_tree, '-C';
|
|
|
|
}
|
2007-01-29 08:07:49 +00:00
|
|
|
push @diff_tree, '--find-copies-harder' if $_find_copies_harder;
|
|
|
|
push @diff_tree, "-l$_rename_limit" if defined $_rename_limit;
|
2007-01-27 22:33:08 +00:00
|
|
|
push @diff_tree, $tree_a, $tree_b;
|
|
|
|
my ($diff_fh, $ctx) = command_output_pipe(@diff_tree);
|
|
|
|
local $/ = "\0";
|
|
|
|
my $state = 'meta';
|
|
|
|
my @mods;
|
|
|
|
while (<$diff_fh>) {
|
|
|
|
chomp $_; # this gets rid of the trailing "\0"
|
|
|
|
if ($state eq 'meta' && /^:(\d{6})\s(\d{6})\s
|
2008-08-31 15:05:09 +00:00
|
|
|
($::sha1)\s($::sha1)\s
|
2007-01-27 22:33:08 +00:00
|
|
|
([MTCRAD])\d*$/xo) {
|
|
|
|
push @mods, { mode_a => $1, mode_b => $2,
|
2008-08-31 15:05:09 +00:00
|
|
|
sha1_a => $3, sha1_b => $4,
|
|
|
|
chg => $5 };
|
|
|
|
if ($5 =~ /^(?:C|R)$/) {
|
2007-01-27 22:33:08 +00:00
|
|
|
$state = 'file_a';
|
|
|
|
} else {
|
|
|
|
$state = 'file_b';
|
|
|
|
}
|
|
|
|
} elsif ($state eq 'file_a') {
|
|
|
|
my $x = $mods[$#mods] or croak "Empty array\n";
|
|
|
|
if ($x->{chg} !~ /^(?:C|R)$/) {
|
|
|
|
croak "Error parsing $_, $x->{chg}\n";
|
|
|
|
}
|
|
|
|
$x->{file_a} = $_;
|
|
|
|
$state = 'file_b';
|
|
|
|
} elsif ($state eq 'file_b') {
|
|
|
|
my $x = $mods[$#mods] or croak "Empty array\n";
|
|
|
|
if (exists $x->{file_a} && $x->{chg} !~ /^(?:C|R)$/) {
|
|
|
|
croak "Error parsing $_, $x->{chg}\n";
|
|
|
|
}
|
|
|
|
if (!exists $x->{file_a} && $x->{chg} =~ /^(?:C|R)$/) {
|
|
|
|
croak "Error parsing $_, $x->{chg}\n";
|
|
|
|
}
|
|
|
|
$x->{file_b} = $_;
|
|
|
|
$state = 'meta';
|
|
|
|
} else {
|
|
|
|
croak "Error parsing $_\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
command_close_pipe($diff_fh, $ctx);
|
|
|
|
\@mods;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub check_diff_paths {
|
|
|
|
my ($ra, $pfx, $rev, $mods) = @_;
|
|
|
|
my %types;
|
|
|
|
$pfx .= '/' if length $pfx;
|
|
|
|
|
|
|
|
sub type_diff_paths {
|
|
|
|
my ($ra, $types, $path, $rev) = @_;
|
|
|
|
my @p = split m#/+#, $path;
|
|
|
|
my $c = shift @p;
|
|
|
|
unless (defined $types->{$c}) {
|
|
|
|
$types->{$c} = $ra->check_path($c, $rev);
|
|
|
|
}
|
|
|
|
while (@p) {
|
|
|
|
$c .= '/' . shift @p;
|
|
|
|
next if defined $types->{$c};
|
|
|
|
$types->{$c} = $ra->check_path($c, $rev);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach my $m (@$mods) {
|
|
|
|
foreach my $f (qw/file_a file_b/) {
|
|
|
|
next unless defined $m->{$f};
|
|
|
|
my ($dir) = ($m->{$f} =~ m#^(.*?)/?(?:[^/]+)$#);
|
|
|
|
if (length $pfx.$dir && ! defined $types{$dir}) {
|
|
|
|
type_diff_paths($ra, \%types, $pfx.$dir, $rev);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
\%types;
|
|
|
|
}
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
sub split_path {
|
|
|
|
return ($_[0] =~ m#^(.*?)/?([^/]+)$#);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub repo_path {
|
2007-01-26 09:32:45 +00:00
|
|
|
my ($self, $path) = @_;
|
2010-07-30 02:30:13 +00:00
|
|
|
if (my $enc = $self->{pathnameencoding}) {
|
|
|
|
require Encode;
|
|
|
|
Encode::from_to($path, $enc, 'UTF-8');
|
|
|
|
}
|
2007-01-26 09:32:45 +00:00
|
|
|
$self->{path_prefix}.(defined $path ? $path : '');
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub url_path {
|
|
|
|
my ($self, $path) = @_;
|
2007-07-16 04:53:50 +00:00
|
|
|
if ($self->{url} =~ m#^https?://#) {
|
2009-07-25 09:29:28 +00:00
|
|
|
$path =~ s!([^~a-zA-Z0-9_./-])!uc sprintf("%%%02x",ord($1))!eg;
|
2007-07-16 04:53:50 +00:00
|
|
|
}
|
2007-01-27 09:32:00 +00:00
|
|
|
$self->{url} . '/' . $self->repo_path($path);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub rmdirs {
|
2007-01-27 22:33:08 +00:00
|
|
|
my ($self) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $rm = $self->{rm};
|
|
|
|
delete $rm->{''}; # we never delete the url we're tracking
|
|
|
|
return unless %$rm;
|
|
|
|
|
|
|
|
foreach (keys %$rm) {
|
|
|
|
my @d = split m#/#, $_;
|
|
|
|
my $c = shift @d;
|
|
|
|
$rm->{$c} = 1;
|
|
|
|
while (@d) {
|
|
|
|
$c .= '/' . shift @d;
|
|
|
|
$rm->{$c} = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
delete $rm->{$self->{svn_path}};
|
|
|
|
delete $rm->{''}; # we never delete the url we're tracking
|
|
|
|
return unless %$rm;
|
|
|
|
|
2007-01-27 22:33:08 +00:00
|
|
|
my ($fh, $ctx) = command_output_pipe(qw/ls-tree --name-only -r -z/,
|
|
|
|
$self->{tree_b});
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
local $/ = "\0";
|
|
|
|
while (<$fh>) {
|
|
|
|
chomp;
|
2006-11-25 06:38:17 +00:00
|
|
|
my @dn = split m#/#, $_;
|
2006-06-20 00:59:35 +00:00
|
|
|
while (pop @dn) {
|
|
|
|
delete $rm->{join '/', @dn};
|
|
|
|
}
|
|
|
|
unless (%$rm) {
|
2007-02-01 21:12:26 +00:00
|
|
|
close $fh;
|
2006-06-20 00:59:35 +00:00
|
|
|
return;
|
|
|
|
}
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
2006-12-15 18:59:54 +00:00
|
|
|
command_close_pipe($fh, $ctx);
|
2006-06-20 00:59:35 +00:00
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my ($r, $p, $bat) = ($self->{r}, $self->{pool}, $self->{bat});
|
|
|
|
foreach my $d (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$rm) {
|
|
|
|
$self->close_directory($bat->{$d}, $p);
|
|
|
|
my ($dn) = ($d =~ m#^(.*?)/?(?:[^/]+)$#);
|
2007-01-14 06:35:53 +00:00
|
|
|
print "\tD+\t$d/\n" unless $::_q;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->SUPER::delete_entry($d, $r, $bat->{$dn}, $p);
|
|
|
|
delete $bat->{$d};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub open_or_add_dir {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $full_path, $baton, $deletions) = @_;
|
2007-01-27 09:32:00 +00:00
|
|
|
my $t = $self->{types}->{$full_path};
|
|
|
|
if (!defined $t) {
|
|
|
|
die "$full_path not known in r$self->{r} or we have a bug!\n";
|
|
|
|
}
|
2007-10-15 07:19:12 +00:00
|
|
|
{
|
|
|
|
no warnings 'once';
|
|
|
|
# SVN::Node::none and SVN::Node::file are used only once,
|
|
|
|
# so we're shutting up Perl's warnings about them.
|
2012-02-20 14:17:54 +00:00
|
|
|
if ($t == $SVN::Node::none || defined($deletions->{$full_path})) {
|
2007-10-15 07:19:12 +00:00
|
|
|
return $self->add_directory($full_path, $baton,
|
|
|
|
undef, -1, $self->{pool});
|
|
|
|
} elsif ($t == $SVN::Node::dir) {
|
|
|
|
return $self->open_directory($full_path, $baton,
|
|
|
|
$self->{r}, $self->{pool});
|
|
|
|
} # no warnings 'once'
|
|
|
|
print STDERR "$full_path already exists in repository at ",
|
|
|
|
"r$self->{r} and it is not a directory (",
|
|
|
|
($t == $SVN::Node::file ? 'file' : 'unknown'),"/$t)\n";
|
|
|
|
} # no warnings 'once'
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
exit 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub ensure_path {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $path, $deletions) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $bat = $self->{bat};
|
2007-01-27 09:32:00 +00:00
|
|
|
my $repo_path = $self->repo_path($path);
|
|
|
|
return $bat->{''} unless (length $repo_path);
|
2012-02-20 14:17:54 +00:00
|
|
|
|
2007-01-27 09:32:00 +00:00
|
|
|
my @p = split m#/+#, $repo_path;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $c = shift @p;
|
2012-02-20 14:17:54 +00:00
|
|
|
$bat->{$c} ||= $self->open_or_add_dir($c, $bat->{''}, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
while (@p) {
|
|
|
|
my $c0 = $c;
|
|
|
|
$c .= '/' . shift @p;
|
2012-02-20 14:17:54 +00:00
|
|
|
$bat->{$c} ||= $self->open_or_add_dir($c, $bat->{$c0}, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
|
|
|
return $bat->{$c};
|
|
|
|
}
|
|
|
|
|
2008-07-25 15:32:37 +00:00
|
|
|
# Subroutine to convert a globbing pattern to a regular expression.
|
|
|
|
# From perl cookbook.
|
|
|
|
sub glob2pat {
|
|
|
|
my $globstr = shift;
|
|
|
|
my %patmap = ('*' => '.*', '?' => '.', '[' => '[', ']' => ']');
|
|
|
|
$globstr =~ s{(.)} { $patmap{$1} || "\Q$1" }ge;
|
|
|
|
return '^' . $globstr . '$';
|
|
|
|
}
|
|
|
|
|
|
|
|
sub check_autoprop {
|
|
|
|
my ($self, $pattern, $properties, $file, $fbat) = @_;
|
|
|
|
# Convert the globbing pattern to a regular expression.
|
|
|
|
my $regex = glob2pat($pattern);
|
|
|
|
# Check if the pattern matches the file name.
|
|
|
|
if($file =~ m/($regex)/) {
|
|
|
|
# Parse the list of properties to set.
|
|
|
|
my @props = split(/;/, $properties);
|
|
|
|
foreach my $prop (@props) {
|
|
|
|
# Parse 'name=value' syntax and set the property.
|
|
|
|
if ($prop =~ /([^=]+)=(.*)/) {
|
|
|
|
my ($n,$v) = ($1,$2);
|
|
|
|
for ($n, $v) {
|
|
|
|
s/^\s+//; s/\s+$//;
|
|
|
|
}
|
|
|
|
$self->change_file_prop($fbat, $n, $v);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub apply_autoprops {
|
|
|
|
my ($self, $file, $fbat) = @_;
|
|
|
|
my $conf_t = ${$self->{config}}{'config'};
|
|
|
|
no warnings 'once';
|
|
|
|
# Check [miscellany]/enable-auto-props in svn configuration.
|
|
|
|
if (SVN::_Core::svn_config_get_bool(
|
|
|
|
$conf_t,
|
|
|
|
$SVN::_Core::SVN_CONFIG_SECTION_MISCELLANY,
|
|
|
|
$SVN::_Core::SVN_CONFIG_OPTION_ENABLE_AUTO_PROPS,
|
|
|
|
0)) {
|
|
|
|
# Auto-props are enabled. Enumerate them to look for matches.
|
|
|
|
my $callback = sub {
|
|
|
|
$self->check_autoprop($_[0], $_[1], $file, $fbat);
|
|
|
|
};
|
|
|
|
SVN::_Core::svn_config_enumerate(
|
|
|
|
$conf_t,
|
|
|
|
$SVN::_Core::SVN_CONFIG_SECTION_AUTO_PROPS,
|
|
|
|
$callback);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
sub A {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $m, $deletions) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my ($dir, $file) = split_path($m->{file_b});
|
2012-02-20 14:17:54 +00:00
|
|
|
my $pbat = $self->ensure_path($dir, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
|
|
|
|
undef, -1);
|
2007-01-14 06:35:53 +00:00
|
|
|
print "\tA\t$m->{file_b}\n" unless $::_q;
|
2008-07-25 15:32:37 +00:00
|
|
|
$self->apply_autoprops($file, $fbat);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->chg_file($fbat, $m);
|
|
|
|
$self->close_file($fbat,undef,$self->{pool});
|
|
|
|
}
|
|
|
|
|
|
|
|
sub C {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $m, $deletions) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my ($dir, $file) = split_path($m->{file_b});
|
2012-02-20 14:17:54 +00:00
|
|
|
my $pbat = $self->ensure_path($dir, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
|
|
|
|
$self->url_path($m->{file_a}), $self->{r});
|
2007-01-14 06:35:53 +00:00
|
|
|
print "\tC\t$m->{file_a} => $m->{file_b}\n" unless $::_q;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->chg_file($fbat, $m);
|
|
|
|
$self->close_file($fbat,undef,$self->{pool});
|
|
|
|
}
|
|
|
|
|
|
|
|
sub delete_entry {
|
|
|
|
my ($self, $path, $pbat) = @_;
|
|
|
|
my $rpath = $self->repo_path($path);
|
|
|
|
my ($dir, $file) = split_path($rpath);
|
|
|
|
$self->{rm}->{$dir} = 1;
|
|
|
|
$self->SUPER::delete_entry($rpath, $self->{r}, $pbat, $self->{pool});
|
|
|
|
}
|
|
|
|
|
|
|
|
sub R {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $m, $deletions) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my ($dir, $file) = split_path($m->{file_b});
|
2012-02-20 14:17:54 +00:00
|
|
|
my $pbat = $self->ensure_path($dir, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $fbat = $self->add_file($self->repo_path($m->{file_b}), $pbat,
|
|
|
|
$self->url_path($m->{file_a}), $self->{r});
|
2007-01-14 06:35:53 +00:00
|
|
|
print "\tR\t$m->{file_a} => $m->{file_b}\n" unless $::_q;
|
2008-09-07 01:50:38 +00:00
|
|
|
$self->apply_autoprops($file, $fbat);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->chg_file($fbat, $m);
|
|
|
|
$self->close_file($fbat,undef,$self->{pool});
|
|
|
|
|
|
|
|
($dir, $file) = split_path($m->{file_a});
|
2012-02-20 14:17:54 +00:00
|
|
|
$pbat = $self->ensure_path($dir, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->delete_entry($m->{file_a}, $pbat);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub M {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $m, $deletions) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my ($dir, $file) = split_path($m->{file_b});
|
2012-02-20 14:17:54 +00:00
|
|
|
my $pbat = $self->ensure_path($dir, $deletions);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my $fbat = $self->open_file($self->repo_path($m->{file_b}),
|
|
|
|
$pbat,$self->{r},$self->{pool});
|
2007-01-14 06:35:53 +00:00
|
|
|
print "\t$m->{chg}\t$m->{file_b}\n" unless $::_q;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->chg_file($fbat, $m);
|
|
|
|
$self->close_file($fbat,undef,$self->{pool});
|
|
|
|
}
|
|
|
|
|
|
|
|
sub T { shift->M(@_) }
|
|
|
|
|
|
|
|
sub change_file_prop {
|
|
|
|
my ($self, $fbat, $pname, $pval) = @_;
|
|
|
|
$self->SUPER::change_file_prop($fbat, $pname, $pval, $self->{pool});
|
|
|
|
}
|
|
|
|
|
2010-09-25 03:51:50 +00:00
|
|
|
sub change_dir_prop {
|
|
|
|
my ($self, $pbat, $pname, $pval) = @_;
|
|
|
|
$self->SUPER::change_dir_prop($pbat, $pname, $pval, $self->{pool});
|
|
|
|
}
|
|
|
|
|
2008-08-31 15:45:04 +00:00
|
|
|
sub _chg_file_get_blob ($$$$) {
|
|
|
|
my ($self, $fbat, $m, $which) = @_;
|
2008-11-12 16:38:06 +00:00
|
|
|
my $fh = $::_repository->temp_acquire("git_blob_$which");
|
2008-08-31 15:45:04 +00:00
|
|
|
if ($m->{"mode_$which"} =~ /^120/) {
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
print $fh 'link ' or croak $!;
|
|
|
|
$self->change_file_prop($fbat,'svn:special','*');
|
2008-08-31 15:45:04 +00:00
|
|
|
} elsif ($m->{mode_a} =~ /^120/ && $m->{"mode_$which"} !~ /^120/) {
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->change_file_prop($fbat,'svn:special',undef);
|
|
|
|
}
|
2008-08-31 15:45:04 +00:00
|
|
|
my $blob = $m->{"sha1_$which"};
|
|
|
|
return ($fh,) if ($blob =~ /^0{40}$/);
|
|
|
|
my $size = $::_repository->cat_blob($blob, $fh);
|
|
|
|
croak "Failed to read object $blob" if ($size < 0);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$fh->flush == 0 or croak $!;
|
|
|
|
seek $fh, 0, 0 or croak $!;
|
|
|
|
|
2007-11-22 19:18:00 +00:00
|
|
|
my $exp = ::md5sum($fh);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
seek $fh, 0, 0 or croak $!;
|
2008-08-31 15:45:04 +00:00
|
|
|
return ($fh, $exp);
|
|
|
|
}
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
|
2008-08-31 15:45:04 +00:00
|
|
|
sub chg_file {
|
|
|
|
my ($self, $fbat, $m) = @_;
|
|
|
|
if ($m->{mode_b} =~ /755$/ && $m->{mode_a} !~ /755$/) {
|
|
|
|
$self->change_file_prop($fbat,'svn:executable','*');
|
|
|
|
} elsif ($m->{mode_b} !~ /755$/ && $m->{mode_a} =~ /755$/) {
|
|
|
|
$self->change_file_prop($fbat,'svn:executable',undef);
|
|
|
|
}
|
2008-08-31 15:47:09 +00:00
|
|
|
my ($fh_a, $exp_a) = _chg_file_get_blob $self, $fbat, $m, 'a';
|
|
|
|
my ($fh_b, $exp_b) = _chg_file_get_blob $self, $fbat, $m, 'b';
|
2006-10-14 22:48:35 +00:00
|
|
|
my $pool = SVN::Pool->new;
|
2008-08-31 15:47:09 +00:00
|
|
|
my $atd = $self->apply_textdelta($fbat, $exp_a, $pool);
|
|
|
|
if (-s $fh_a) {
|
|
|
|
my $txstream = SVN::TxDelta::new ($fh_a, $fh_b, $pool);
|
2008-09-01 02:45:07 +00:00
|
|
|
my $res = SVN::TxDelta::send_txstream($txstream, @$atd, $pool);
|
|
|
|
if (defined $res) {
|
|
|
|
die "Unexpected result from send_txstream: $res\n",
|
|
|
|
"(SVN::Core::VERSION: $SVN::Core::VERSION)\n";
|
|
|
|
}
|
2008-08-31 15:47:09 +00:00
|
|
|
} else {
|
|
|
|
my $got = SVN::TxDelta::send_stream($fh_b, @$atd, $pool);
|
|
|
|
die "Checksum mismatch\nexpected: $exp_b\ngot: $got\n"
|
|
|
|
if ($got ne $exp_b);
|
|
|
|
}
|
|
|
|
Git::temp_release($fh_b, 1);
|
|
|
|
Git::temp_release($fh_a, 1);
|
2006-10-14 22:48:35 +00:00
|
|
|
$pool->clear;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub D {
|
2012-02-20 14:17:54 +00:00
|
|
|
my ($self, $m, $deletions) = @_;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
my ($dir, $file) = split_path($m->{file_b});
|
2012-02-20 14:17:54 +00:00
|
|
|
my $pbat = $self->ensure_path($dir, $deletions);
|
2007-01-14 06:35:53 +00:00
|
|
|
print "\tD\t$m->{file_b}\n" unless $::_q;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->delete_entry($m->{file_b}, $pbat);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub close_edit {
|
|
|
|
my ($self) = @_;
|
|
|
|
my ($p,$bat) = ($self->{pool}, $self->{bat});
|
|
|
|
foreach (sort { $b =~ tr#/#/# <=> $a =~ tr#/#/# } keys %$bat) {
|
2007-05-19 09:58:37 +00:00
|
|
|
next if $_ eq '';
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->close_directory($bat->{$_}, $p);
|
|
|
|
}
|
2007-05-19 09:58:37 +00:00
|
|
|
$self->close_directory($bat->{''}, $p);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->SUPER::close_edit($p);
|
|
|
|
$p->clear;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub abort_edit {
|
|
|
|
my ($self) = @_;
|
|
|
|
$self->SUPER::abort_edit($self->{pool});
|
2007-01-27 22:33:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub DESTROY {
|
|
|
|
my $self = shift;
|
|
|
|
$self->SUPER::DESTROY(@_);
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
$self->{pool}->clear;
|
|
|
|
}
|
|
|
|
|
2007-01-14 06:35:53 +00:00
|
|
|
# this drives the editor
|
|
|
|
sub apply_diff {
|
2007-01-27 22:33:08 +00:00
|
|
|
my ($self) = @_;
|
|
|
|
my $mods = $self->{mods};
|
2012-02-09 20:52:22 +00:00
|
|
|
my %o = ( D => 0, C => 1, R => 2, A => 3, M => 4, T => 5 );
|
2012-02-20 14:17:54 +00:00
|
|
|
my %deletions;
|
|
|
|
|
|
|
|
foreach my $m (@$mods) {
|
|
|
|
if ($m->{chg} eq "D") {
|
|
|
|
$deletions{$m->{file_b}} = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-27 09:32:00 +00:00
|
|
|
foreach my $m (sort { $o{$a->{chg}} <=> $o{$b->{chg}} } @$mods) {
|
2007-01-14 06:35:53 +00:00
|
|
|
my $f = $m->{chg};
|
|
|
|
if (defined $o{$f}) {
|
2012-02-20 14:17:54 +00:00
|
|
|
$self->$f($m, \%deletions);
|
2007-01-14 06:35:53 +00:00
|
|
|
} else {
|
2007-10-16 14:36:52 +00:00
|
|
|
fatal("Invalid change type: $f");
|
2007-01-14 06:35:53 +00:00
|
|
|
}
|
|
|
|
}
|
2010-09-25 03:51:50 +00:00
|
|
|
|
|
|
|
if (defined($self->{mergeinfo})) {
|
|
|
|
$self->change_dir_prop($self->{bat}{''}, "svn:mergeinfo",
|
|
|
|
$self->{mergeinfo});
|
|
|
|
}
|
2007-01-29 08:07:49 +00:00
|
|
|
$self->rmdirs if $_rmdir;
|
2011-12-01 02:37:27 +00:00
|
|
|
if (@$mods == 0 && !defined($self->{mergeinfo})) {
|
2007-01-14 06:35:53 +00:00
|
|
|
$self->abort_edit;
|
|
|
|
} else {
|
|
|
|
$self->close_edit;
|
|
|
|
}
|
2007-01-27 09:32:00 +00:00
|
|
|
return scalar @$mods;
|
2007-01-14 06:35:53 +00:00
|
|
|
}
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
package Git::SVN::Ra;
|
2011-10-10 23:27:37 +00:00
|
|
|
use vars qw/@ISA $config_dir $_ignore_refs_regex $_log_window_size/;
|
2007-01-10 09:22:38 +00:00
|
|
|
use strict;
|
|
|
|
use warnings;
|
2007-09-07 11:00:40 +00:00
|
|
|
my ($ra_invalid, $can_do_switch, %ignored_err, $RA);
|
2007-01-10 09:22:38 +00:00
|
|
|
|
|
|
|
BEGIN {
|
|
|
|
# enforce temporary pool usage for some simple functions
|
2007-06-15 03:43:59 +00:00
|
|
|
no strict 'refs';
|
2009-01-25 23:35:52 +00:00
|
|
|
for my $f (qw/rev_proplist get_latest_revnum get_uuid get_repos_root
|
|
|
|
get_file/) {
|
2007-06-15 03:43:59 +00:00
|
|
|
my $SUPER = "SUPER::$f";
|
|
|
|
*$f = sub {
|
|
|
|
my $self = shift;
|
|
|
|
my $pool = SVN::Pool->new;
|
|
|
|
my @ret = $self->$SUPER(@_,$pool);
|
|
|
|
$pool->clear;
|
|
|
|
wantarray ? @ret : $ret[0];
|
|
|
|
};
|
2007-01-10 09:22:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-09-28 17:24:19 +00:00
|
|
|
sub _auth_providers () {
|
2012-04-26 19:34:02 +00:00
|
|
|
my @rv = (
|
2007-09-28 17:24:19 +00:00
|
|
|
SVN::Client::get_simple_provider(),
|
|
|
|
SVN::Client::get_ssl_server_trust_file_provider(),
|
|
|
|
SVN::Client::get_simple_prompt_provider(
|
|
|
|
\&Git::SVN::Prompt::simple, 2),
|
|
|
|
SVN::Client::get_ssl_client_cert_file_provider(),
|
|
|
|
SVN::Client::get_ssl_client_cert_prompt_provider(
|
|
|
|
\&Git::SVN::Prompt::ssl_client_cert, 2),
|
2008-02-25 14:56:28 +00:00
|
|
|
SVN::Client::get_ssl_client_cert_pw_file_provider(),
|
2007-09-28 17:24:19 +00:00
|
|
|
SVN::Client::get_ssl_client_cert_pw_prompt_provider(
|
|
|
|
\&Git::SVN::Prompt::ssl_client_cert_pw, 2),
|
|
|
|
SVN::Client::get_username_provider(),
|
|
|
|
SVN::Client::get_ssl_server_trust_prompt_provider(
|
|
|
|
\&Git::SVN::Prompt::ssl_server_trust),
|
|
|
|
SVN::Client::get_username_prompt_provider(
|
|
|
|
\&Git::SVN::Prompt::username, 2)
|
2012-04-26 19:34:02 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
# earlier 1.6.x versions would segfault, and <= 1.5.x didn't have
|
|
|
|
# this function
|
|
|
|
if ($SVN::Core::VERSION gt '1.6.12') {
|
|
|
|
my $config = SVN::Core::config_get_config($config_dir);
|
|
|
|
my ($p, @a);
|
|
|
|
# config_get_config returns all config files from
|
|
|
|
# ~/.subversion, auth_get_platform_specific_client_providers
|
|
|
|
# just wants the config "file".
|
|
|
|
@a = ($config->{'config'}, undef);
|
|
|
|
$p = SVN::Core::auth_get_platform_specific_client_providers(@a);
|
|
|
|
# Insert the return value from
|
|
|
|
# auth_get_platform_specific_providers
|
|
|
|
unshift @rv, @$p;
|
|
|
|
}
|
|
|
|
\@rv;
|
2007-09-28 17:24:19 +00:00
|
|
|
}
|
|
|
|
|
2007-11-12 07:37:42 +00:00
|
|
|
sub escape_uri_only {
|
|
|
|
my ($uri) = @_;
|
|
|
|
my @tmp;
|
|
|
|
foreach (split m{/}, $uri) {
|
2008-10-21 21:12:15 +00:00
|
|
|
s/([^~\w.%+-]|%(?![a-fA-F0-9]{2}))/sprintf("%%%02X",ord($1))/eg;
|
2007-11-12 07:37:42 +00:00
|
|
|
push @tmp, $_;
|
|
|
|
}
|
|
|
|
join('/', @tmp);
|
|
|
|
}
|
|
|
|
|
|
|
|
sub escape_url {
|
|
|
|
my ($url) = @_;
|
|
|
|
if ($url =~ m#^(https?)://([^/]+)(.*)$#) {
|
|
|
|
my ($scheme, $domain, $uri) = ($1, $2, escape_uri_only($3));
|
|
|
|
$url = "$scheme://$domain$uri";
|
|
|
|
}
|
|
|
|
$url;
|
|
|
|
}
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
sub new {
|
|
|
|
my ($class, $url) = @_;
|
2007-01-19 02:22:18 +00:00
|
|
|
$url =~ s!/+$!!;
|
2007-01-30 03:16:01 +00:00
|
|
|
return $RA if ($RA && $RA->{url} eq $url);
|
2007-01-19 02:22:18 +00:00
|
|
|
|
2010-02-24 03:13:50 +00:00
|
|
|
::_req_svn();
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
SVN::_Core::svn_config_ensure($config_dir, undef);
|
2007-09-28 17:24:19 +00:00
|
|
|
my ($baton, $callbacks) = SVN::Core::auth_open_helper(_auth_providers);
|
2007-01-10 09:22:38 +00:00
|
|
|
my $config = SVN::Core::config_get_config($config_dir);
|
2007-07-04 21:07:42 +00:00
|
|
|
$RA = undef;
|
2007-10-06 18:57:19 +00:00
|
|
|
my $dont_store_passwords = 1;
|
|
|
|
my $conf_t = ${$config}{'config'};
|
|
|
|
{
|
2007-10-15 07:19:12 +00:00
|
|
|
no warnings 'once';
|
2007-10-06 18:57:19 +00:00
|
|
|
# The usage of $SVN::_Core::SVN_CONFIG_* variables
|
|
|
|
# produces warnings that variables are used only once.
|
|
|
|
# I had not found the better way to shut them up, so
|
2007-10-15 07:19:12 +00:00
|
|
|
# the warnings of type 'once' are disabled in this block.
|
2007-10-06 18:57:19 +00:00
|
|
|
if (SVN::_Core::svn_config_get_bool($conf_t,
|
|
|
|
$SVN::_Core::SVN_CONFIG_SECTION_AUTH,
|
|
|
|
$SVN::_Core::SVN_CONFIG_OPTION_STORE_PASSWORDS,
|
|
|
|
1) == 0) {
|
|
|
|
SVN::_Core::svn_auth_set_parameter($baton,
|
|
|
|
$SVN::_Core::SVN_AUTH_PARAM_DONT_STORE_PASSWORDS,
|
|
|
|
bless (\$dont_store_passwords, "_p_void"));
|
|
|
|
}
|
|
|
|
if (SVN::_Core::svn_config_get_bool($conf_t,
|
|
|
|
$SVN::_Core::SVN_CONFIG_SECTION_AUTH,
|
|
|
|
$SVN::_Core::SVN_CONFIG_OPTION_STORE_AUTH_CREDS,
|
|
|
|
1) == 0) {
|
|
|
|
$Git::SVN::Prompt::_no_auth_cache = 1;
|
|
|
|
}
|
2007-10-15 07:19:12 +00:00
|
|
|
} # no warnings 'once'
|
2007-11-12 07:37:42 +00:00
|
|
|
my $self = SVN::Ra->new(url => escape_url($url), auth => $baton,
|
2007-01-10 09:22:38 +00:00
|
|
|
config => $config,
|
|
|
|
pool => SVN::Pool->new,
|
|
|
|
auth_provider_callbacks => $callbacks);
|
2007-11-12 07:37:42 +00:00
|
|
|
$self->{url} = $url;
|
2007-01-10 09:22:38 +00:00
|
|
|
$self->{svn_path} = $url;
|
|
|
|
$self->{repos_root} = $self->get_repos_root;
|
2007-02-09 20:17:57 +00:00
|
|
|
$self->{svn_path} =~ s#^\Q$self->{repos_root}\E(/|$)##;
|
2007-05-13 08:04:43 +00:00
|
|
|
$self->{cache} = { check_path => { r => 0, data => {} },
|
|
|
|
get_dir => { r => 0, data => {} } };
|
2007-01-30 03:16:01 +00:00
|
|
|
$RA = bless $self, $class;
|
2007-01-10 09:22:38 +00:00
|
|
|
}
|
|
|
|
|
2007-05-13 08:04:43 +00:00
|
|
|
sub check_path {
|
|
|
|
my ($self, $path, $r) = @_;
|
|
|
|
my $cache = $self->{cache}->{check_path};
|
|
|
|
if ($r == $cache->{r} && exists $cache->{data}->{$path}) {
|
|
|
|
return $cache->{data}->{$path};
|
|
|
|
}
|
|
|
|
my $pool = SVN::Pool->new;
|
|
|
|
my $t = $self->SUPER::check_path($path, $r, $pool);
|
|
|
|
$pool->clear;
|
|
|
|
if ($r != $cache->{r}) {
|
|
|
|
%{$cache->{data}} = ();
|
|
|
|
$cache->{r} = $r;
|
|
|
|
}
|
|
|
|
$cache->{data}->{$path} = $t;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub get_dir {
|
|
|
|
my ($self, $dir, $r) = @_;
|
|
|
|
my $cache = $self->{cache}->{get_dir};
|
|
|
|
if ($r == $cache->{r}) {
|
|
|
|
if (my $x = $cache->{data}->{$dir}) {
|
|
|
|
return wantarray ? @$x : $x->[0];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
my $pool = SVN::Pool->new;
|
|
|
|
my ($d, undef, $props) = $self->SUPER::get_dir($dir, $r, $pool);
|
|
|
|
my %dirents = map { $_ => { kind => $d->{$_}->kind } } keys %$d;
|
|
|
|
$pool->clear;
|
|
|
|
if ($r != $cache->{r}) {
|
|
|
|
%{$cache->{data}} = ();
|
|
|
|
$cache->{r} = $r;
|
|
|
|
}
|
|
|
|
$cache->{data}->{$dir} = [ \%dirents, $r, $props ];
|
|
|
|
wantarray ? (\%dirents, $r, $props) : \%dirents;
|
|
|
|
}
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
sub DESTROY {
|
2007-01-30 03:16:01 +00:00
|
|
|
# do not call the real DESTROY since we store ourselves in $RA
|
2007-01-10 09:22:38 +00:00
|
|
|
}
|
|
|
|
|
2009-01-18 06:11:44 +00:00
|
|
|
# get_log(paths, start, end, limit,
|
|
|
|
# discover_changed_paths, strict_node_history, receiver)
|
2007-01-10 09:22:38 +00:00
|
|
|
sub get_log {
|
|
|
|
my ($self, @args) = @_;
|
|
|
|
my $pool = SVN::Pool->new;
|
2009-01-18 06:11:44 +00:00
|
|
|
|
2009-07-06 23:39:52 +00:00
|
|
|
# svn_log_changed_path_t objects passed to get_log are likely to be
|
|
|
|
# overwritten even if only the refs are copied to an external variable,
|
|
|
|
# so we should dup the structures in their entirety. Using an
|
|
|
|
# externally passed pool (instead of our temporary and quickly cleared
|
|
|
|
# pool in Git::SVN::Ra) does not help matters at all...
|
|
|
|
my $receiver = pop @args;
|
2009-07-06 23:40:02 +00:00
|
|
|
my $prefix = "/".$self->{svn_path};
|
|
|
|
$prefix =~ s#/+($)##;
|
|
|
|
my $prefix_regex = qr#^\Q$prefix\E#;
|
2009-07-06 23:39:52 +00:00
|
|
|
push(@args, sub {
|
|
|
|
my ($paths) = $_[0];
|
|
|
|
return &$receiver(@_) unless $paths;
|
|
|
|
$_[0] = ();
|
|
|
|
foreach my $p (keys %$paths) {
|
|
|
|
my $i = $paths->{$p};
|
2009-07-06 23:40:02 +00:00
|
|
|
# Make path relative to our url, not repos_root
|
|
|
|
$p =~ s/$prefix_regex//;
|
|
|
|
my %s = map { $_ => $i->$_; }
|
|
|
|
qw/copyfrom_path copyfrom_rev action/;
|
|
|
|
if ($s{'copyfrom_path'}) {
|
|
|
|
$s{'copyfrom_path'} =~ s/$prefix_regex//;
|
|
|
|
}
|
2009-07-06 23:39:52 +00:00
|
|
|
$_[0]{$p} = \%s;
|
|
|
|
}
|
|
|
|
&$receiver(@_);
|
|
|
|
});
|
|
|
|
|
|
|
|
|
2009-01-18 06:11:44 +00:00
|
|
|
# the limit parameter was not supported in SVN 1.1.x, so we
|
|
|
|
# drop it. Therefore, the receiver callback passed to it
|
|
|
|
# is made aware of this limitation by being wrapped if
|
|
|
|
# the limit passed to is being wrapped.
|
|
|
|
if ($SVN::Core::VERSION le '1.2.0') {
|
|
|
|
my $limit = splice(@args, 3, 1);
|
|
|
|
if ($limit > 0) {
|
|
|
|
my $receiver = pop @args;
|
|
|
|
push(@args, sub { &$receiver(@_) if (--$limit >= 0) });
|
|
|
|
}
|
|
|
|
}
|
2007-01-10 09:22:38 +00:00
|
|
|
my $ret = $self->SUPER::get_log(@args, $pool);
|
|
|
|
$pool->clear;
|
|
|
|
$ret;
|
|
|
|
}
|
|
|
|
|
2007-09-28 17:24:19 +00:00
|
|
|
sub trees_match {
|
|
|
|
my ($self, $url1, $rev1, $url2, $rev2) = @_;
|
|
|
|
my $ctx = SVN::Client->new(auth => _auth_providers);
|
|
|
|
my $out = IO::File->new_tmpfile;
|
|
|
|
|
|
|
|
# older SVN (1.1.x) doesn't take $pool as the last parameter for
|
|
|
|
# $ctx->diff(), so we'll create a default one
|
|
|
|
my $pool = SVN::Pool->new_default_sub;
|
|
|
|
|
|
|
|
$ra_invalid = 1; # this will open a new SVN::Ra connection to $url1
|
|
|
|
$ctx->diff([], $url1, $rev1, $url2, $rev2, 1, 1, 0, $out, $out);
|
|
|
|
$out->flush;
|
|
|
|
my $ret = (($out->stat)[7] == 0);
|
|
|
|
close $out or croak $!;
|
|
|
|
|
|
|
|
$ret;
|
|
|
|
}
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
sub get_commit_editor {
|
2007-01-14 06:35:53 +00:00
|
|
|
my ($self, $log, $cb, $pool) = @_;
|
2007-01-10 09:22:38 +00:00
|
|
|
my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef, 0) : ();
|
2007-01-14 06:35:53 +00:00
|
|
|
$self->SUPER::get_commit_editor($log, $cb, @lock, $pool);
|
2007-01-10 09:22:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub gs_do_update {
|
2007-01-31 10:45:50 +00:00
|
|
|
my ($self, $rev_a, $rev_b, $gs, $editor) = @_;
|
|
|
|
my $new = ($rev_a == $rev_b);
|
|
|
|
my $path = $gs->{path};
|
|
|
|
|
2007-02-23 10:21:59 +00:00
|
|
|
if ($new && -e $gs->{index}) {
|
|
|
|
unlink $gs->{index} or die
|
|
|
|
"Couldn't unlink index: $gs->{index}: $!\n";
|
|
|
|
}
|
2007-01-10 09:22:38 +00:00
|
|
|
my $pool = SVN::Pool->new;
|
2007-01-22 19:44:57 +00:00
|
|
|
$editor->set_path_strip($path);
|
2007-01-28 12:59:05 +00:00
|
|
|
my (@pc) = split m#/#, $path;
|
|
|
|
my $reporter = $self->do_update($rev_b, (@pc ? shift @pc : ''),
|
2007-01-31 10:45:50 +00:00
|
|
|
1, $editor, $pool);
|
2007-01-10 09:22:38 +00:00
|
|
|
my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : ();
|
2007-01-28 12:59:05 +00:00
|
|
|
|
|
|
|
# Since we can't rely on svn_ra_reparent being available, we'll
|
|
|
|
# just have to do some magic with set_path to make it so
|
|
|
|
# we only want a partial path.
|
|
|
|
my $sp = '';
|
|
|
|
my $final = join('/', @pc);
|
|
|
|
while (@pc) {
|
|
|
|
$reporter->set_path($sp, $rev_b, 0, @lock, $pool);
|
|
|
|
$sp .= '/' if length $sp;
|
|
|
|
$sp .= shift @pc;
|
|
|
|
}
|
|
|
|
die "BUG: '$sp' != '$final'\n" if ($sp ne $final);
|
|
|
|
|
|
|
|
$reporter->set_path($sp, $rev_a, $new, @lock, $pool);
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
$reporter->finish_report($pool);
|
|
|
|
$pool->clear;
|
|
|
|
$editor->{git_commit_ok};
|
|
|
|
}
|
|
|
|
|
2007-01-28 12:59:05 +00:00
|
|
|
# this requires SVN 1.4.3 or later (do_switch didn't work before 1.4.3, and
|
|
|
|
# svn_ra_reparent didn't work before 1.4)
|
2007-01-10 09:22:38 +00:00
|
|
|
sub gs_do_switch {
|
2007-01-31 10:45:50 +00:00
|
|
|
my ($self, $rev_a, $rev_b, $gs, $url_b, $editor) = @_;
|
|
|
|
my $path = $gs->{path};
|
2007-01-10 09:22:38 +00:00
|
|
|
my $pool = SVN::Pool->new;
|
2007-01-28 12:59:05 +00:00
|
|
|
|
|
|
|
my $full_url = $self->{url};
|
|
|
|
my $old_url = $full_url;
|
2009-07-20 05:08:45 +00:00
|
|
|
$full_url .= '/' . $path if length $path;
|
2007-01-30 03:16:01 +00:00
|
|
|
my ($ra, $reparented);
|
2008-09-14 21:14:16 +00:00
|
|
|
|
2009-07-20 05:08:45 +00:00
|
|
|
if ($old_url =~ m#^svn(\+ssh)?://# ||
|
|
|
|
($full_url =~ m#^https?://# &&
|
|
|
|
escape_url($full_url) ne $full_url)) {
|
2008-09-14 21:14:16 +00:00
|
|
|
$_[0] = undef;
|
|
|
|
$self = undef;
|
|
|
|
$RA = undef;
|
|
|
|
$ra = Git::SVN::Ra->new($full_url);
|
|
|
|
$ra_invalid = 1;
|
|
|
|
} elsif ($old_url ne $full_url) {
|
|
|
|
SVN::_Ra::svn_ra_reparent($self->{session}, $full_url, $pool);
|
|
|
|
$self->{url} = $full_url;
|
|
|
|
$reparented = 1;
|
2007-01-30 03:16:01 +00:00
|
|
|
}
|
2008-09-14 21:14:16 +00:00
|
|
|
|
2007-01-30 03:16:01 +00:00
|
|
|
$ra ||= $self;
|
2008-09-07 03:18:18 +00:00
|
|
|
$url_b = escape_url($url_b);
|
2007-01-31 10:45:50 +00:00
|
|
|
my $reporter = $ra->do_switch($rev_b, '', 1, $url_b, $editor, $pool);
|
2007-01-10 09:22:38 +00:00
|
|
|
my @lock = $SVN::Core::VERSION ge '1.2.0' ? (undef) : ();
|
2007-01-22 19:44:57 +00:00
|
|
|
$reporter->set_path('', $rev_a, 0, @lock, $pool);
|
2007-01-10 09:22:38 +00:00
|
|
|
$reporter->finish_report($pool);
|
2007-01-28 12:59:05 +00:00
|
|
|
|
2007-01-30 03:16:01 +00:00
|
|
|
if ($reparented) {
|
|
|
|
SVN::_Ra::svn_ra_reparent($self->{session}, $old_url, $pool);
|
|
|
|
$self->{url} = $old_url;
|
|
|
|
}
|
2007-01-28 12:59:05 +00:00
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
$pool->clear;
|
|
|
|
$editor->{git_commit_ok};
|
|
|
|
}
|
|
|
|
|
2007-06-13 09:37:03 +00:00
|
|
|
sub longest_common_path {
|
|
|
|
my ($gsv, $globs) = @_;
|
2007-02-07 19:50:16 +00:00
|
|
|
my %common;
|
2007-02-08 20:53:57 +00:00
|
|
|
my $common_max = scalar @$gsv;
|
|
|
|
|
|
|
|
foreach my $gs (@$gsv) {
|
2007-02-07 19:50:16 +00:00
|
|
|
my @tmp = split m#/#, $gs->{path};
|
|
|
|
my $p = '';
|
|
|
|
foreach (@tmp) {
|
|
|
|
$p .= length($p) ? "/$_" : $_;
|
|
|
|
$common{$p} ||= 0;
|
|
|
|
$common{$p}++;
|
|
|
|
}
|
|
|
|
}
|
2007-02-08 20:53:57 +00:00
|
|
|
$globs ||= [];
|
|
|
|
$common_max += scalar @$globs;
|
|
|
|
foreach my $glob (@$globs) {
|
|
|
|
my @tmp = split m#/#, $glob->{path}->{left};
|
|
|
|
my $p = '';
|
|
|
|
foreach (@tmp) {
|
|
|
|
$p .= length($p) ? "/$_" : $_;
|
|
|
|
$common{$p} ||= 0;
|
|
|
|
$common{$p}++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-02-07 19:50:16 +00:00
|
|
|
my $longest_path = '';
|
|
|
|
foreach (sort {length $b <=> length $a} keys %common) {
|
2007-02-08 20:53:57 +00:00
|
|
|
if ($common{$_} == $common_max) {
|
2007-02-07 19:50:16 +00:00
|
|
|
$longest_path = $_;
|
|
|
|
last;
|
|
|
|
}
|
2007-01-28 06:28:56 +00:00
|
|
|
}
|
2007-06-13 09:37:03 +00:00
|
|
|
$longest_path;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub gs_fetch_loop_common {
|
|
|
|
my ($self, $base, $head, $gsv, $globs) = @_;
|
|
|
|
return if ($base > $head);
|
|
|
|
my $inc = $_log_window_size;
|
|
|
|
my ($min, $max) = ($base, $head < $base + $inc ? $head : $base + $inc);
|
|
|
|
my $longest_path = longest_common_path($gsv, $globs);
|
2007-09-07 11:00:40 +00:00
|
|
|
my $ra_url = $self->{url};
|
2009-05-06 20:18:52 +00:00
|
|
|
my $find_trailing_edge;
|
2007-01-28 06:28:56 +00:00
|
|
|
while (1) {
|
2007-01-30 22:04:22 +00:00
|
|
|
my %revs;
|
2007-02-07 19:50:16 +00:00
|
|
|
my $err;
|
2007-01-30 02:34:55 +00:00
|
|
|
my $err_handler = $SVN::Error::handler;
|
2007-02-07 19:50:16 +00:00
|
|
|
$SVN::Error::handler = sub {
|
|
|
|
($err) = @_;
|
|
|
|
skip_unknown_revs($err);
|
|
|
|
};
|
|
|
|
sub _cb {
|
|
|
|
my ($paths, $r, $author, $date, $log) = @_;
|
2009-07-06 23:39:52 +00:00
|
|
|
[ $paths,
|
2007-02-07 19:50:16 +00:00
|
|
|
{ author => $author, date => $date, log => $log } ];
|
|
|
|
}
|
|
|
|
$self->get_log([$longest_path], $min, $max, 0, 1, 1,
|
|
|
|
sub { $revs{$_[1]} = _cb(@_) });
|
2009-02-09 00:33:18 +00:00
|
|
|
if ($err) {
|
|
|
|
print "Checked through r$max\r";
|
2009-05-06 20:18:52 +00:00
|
|
|
} else {
|
|
|
|
$find_trailing_edge = 1;
|
2009-02-09 00:33:18 +00:00
|
|
|
}
|
2009-05-06 20:18:52 +00:00
|
|
|
if ($err and $find_trailing_edge) {
|
2007-02-07 19:50:16 +00:00
|
|
|
print STDERR "Path '$longest_path' ",
|
|
|
|
"was probably deleted:\n",
|
|
|
|
$err->expanded_message,
|
|
|
|
"\nWill attempt to follow ",
|
|
|
|
"revisions r$min .. r$max ",
|
|
|
|
"committed before the deletion\n";
|
|
|
|
my $hi = $max;
|
|
|
|
while (--$hi >= $min) {
|
|
|
|
my $ok;
|
|
|
|
$self->get_log([$longest_path], $min, $hi,
|
|
|
|
0, 1, 1, sub {
|
2009-05-06 20:18:53 +00:00
|
|
|
$ok = $_[1];
|
2007-02-07 19:50:16 +00:00
|
|
|
$revs{$_[1]} = _cb(@_) });
|
|
|
|
if ($ok) {
|
|
|
|
print STDERR "r$min .. r$ok OK\n";
|
|
|
|
last;
|
|
|
|
}
|
|
|
|
}
|
2009-05-06 20:18:52 +00:00
|
|
|
$find_trailing_edge = 0;
|
2007-02-07 19:50:16 +00:00
|
|
|
}
|
2007-01-30 22:04:22 +00:00
|
|
|
$SVN::Error::handler = $err_handler;
|
2007-02-07 02:35:30 +00:00
|
|
|
|
2007-02-08 20:53:57 +00:00
|
|
|
my %exists = map { $_->{path} => $_ } @$gsv;
|
2007-01-30 22:04:22 +00:00
|
|
|
foreach my $r (sort {$a <=> $b} keys %revs) {
|
2007-02-07 02:35:30 +00:00
|
|
|
my ($paths, $logged) = @{$revs{$r}};
|
2007-02-08 20:53:57 +00:00
|
|
|
|
|
|
|
foreach my $gs ($self->match_globs(\%exists, $paths,
|
|
|
|
$globs, $r)) {
|
2007-12-09 07:27:41 +00:00
|
|
|
if ($gs->rev_map_max >= $r) {
|
2007-02-07 02:35:30 +00:00
|
|
|
next;
|
|
|
|
}
|
|
|
|
next unless $gs->match_paths($paths, $r);
|
|
|
|
$gs->{logged_rev_props} = $logged;
|
2007-02-15 00:29:52 +00:00
|
|
|
if (my $last_commit = $gs->last_commit) {
|
|
|
|
$gs->assert_index_clean($last_commit);
|
|
|
|
}
|
2007-02-07 02:35:30 +00:00
|
|
|
my $log_entry = $gs->do_fetch($paths, $r);
|
|
|
|
if ($log_entry) {
|
2007-01-28 06:28:56 +00:00
|
|
|
$gs->do_git_commit($log_entry);
|
|
|
|
}
|
2008-01-02 18:10:03 +00:00
|
|
|
$INDEX_FILES{$gs->{index}} = 1;
|
2007-01-28 06:28:56 +00:00
|
|
|
}
|
2007-02-08 20:53:57 +00:00
|
|
|
foreach my $g (@$globs) {
|
2007-02-11 09:20:26 +00:00
|
|
|
my $k = "svn-remote.$g->{remote}." .
|
|
|
|
"$g->{t}-maxRev";
|
|
|
|
Git::SVN::tmp_config($k, $r);
|
2007-02-08 20:53:57 +00:00
|
|
|
}
|
2007-09-07 11:00:40 +00:00
|
|
|
if ($ra_invalid) {
|
|
|
|
$_[0] = undef;
|
|
|
|
$self = undef;
|
|
|
|
$RA = undef;
|
|
|
|
$self = Git::SVN::Ra->new($ra_url);
|
|
|
|
$ra_invalid = undef;
|
|
|
|
}
|
2007-01-28 06:28:56 +00:00
|
|
|
}
|
2007-02-01 01:22:31 +00:00
|
|
|
# pre-fill the .rev_db since it'll eventually get filled in
|
|
|
|
# with '0' x40 if something new gets committed
|
2007-02-08 20:53:57 +00:00
|
|
|
foreach my $gs (@$gsv) {
|
2007-12-09 07:27:42 +00:00
|
|
|
next if $gs->rev_map_max >= $max;
|
|
|
|
next if defined $gs->rev_map_get($max);
|
|
|
|
$gs->rev_map_set($max, 0 x40);
|
2007-02-01 01:22:31 +00:00
|
|
|
}
|
2007-02-13 00:03:32 +00:00
|
|
|
foreach my $g (@$globs) {
|
|
|
|
my $k = "svn-remote.$g->{remote}.$g->{t}-maxRev";
|
|
|
|
Git::SVN::tmp_config($k, $max);
|
|
|
|
}
|
2007-01-28 06:28:56 +00:00
|
|
|
last if $max >= $head;
|
|
|
|
$min = $max + 1;
|
|
|
|
$max += $inc;
|
|
|
|
$max = $head if ($max > $head);
|
|
|
|
}
|
2008-02-03 16:56:18 +00:00
|
|
|
Git::SVN::gc();
|
2007-01-28 06:28:56 +00:00
|
|
|
}
|
|
|
|
|
2008-08-08 08:41:57 +00:00
|
|
|
sub get_dir_globbed {
|
|
|
|
my ($self, $left, $depth, $r) = @_;
|
|
|
|
|
|
|
|
my @x = eval { $self->get_dir($left, $r) };
|
|
|
|
return unless scalar @x == 3;
|
|
|
|
my $dirents = $x[0];
|
|
|
|
my @finalents;
|
|
|
|
foreach my $de (keys %$dirents) {
|
|
|
|
next if $dirents->{$de}->{kind} != $SVN::Node::dir;
|
|
|
|
if ($depth > 1) {
|
|
|
|
my @args = ("$left/$de", $depth - 1, $r);
|
|
|
|
foreach my $dir ($self->get_dir_globbed(@args)) {
|
|
|
|
push @finalents, "$de/$dir";
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
push @finalents, $de;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
@finalents;
|
|
|
|
}
|
|
|
|
|
2011-10-10 23:27:37 +00:00
|
|
|
# return value: 0 -- don't ignore, 1 -- ignore
|
|
|
|
sub is_ref_ignored {
|
|
|
|
my ($g, $p) = @_;
|
|
|
|
my $refname = $g->{ref}->full_path($p);
|
|
|
|
return 1 if defined($g->{ignore_refs_regex}) &&
|
|
|
|
$refname =~ m!$g->{ignore_refs_regex}!;
|
|
|
|
return 0 unless defined($_ignore_refs_regex);
|
|
|
|
return 1 if $refname =~ m!$_ignore_refs_regex!o;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2007-02-08 20:53:57 +00:00
|
|
|
sub match_globs {
|
|
|
|
my ($self, $exists, $paths, $globs, $r) = @_;
|
2007-02-10 21:28:50 +00:00
|
|
|
|
|
|
|
sub get_dir_check {
|
|
|
|
my ($self, $exists, $g, $r) = @_;
|
2008-08-08 08:41:57 +00:00
|
|
|
|
|
|
|
my @dirs = $self->get_dir_globbed($g->{path}->{left},
|
|
|
|
$g->{path}->{depth},
|
|
|
|
$r);
|
|
|
|
|
|
|
|
foreach my $de (@dirs) {
|
2007-02-10 21:28:50 +00:00
|
|
|
my $p = $g->{path}->full_path($de);
|
|
|
|
next if $exists->{$p};
|
|
|
|
next if (length $g->{path}->{right} &&
|
|
|
|
($self->check_path($p, $r) !=
|
|
|
|
$SVN::Node::dir));
|
2010-01-23 08:30:01 +00:00
|
|
|
next unless $p =~ /$g->{path}->{regex}/;
|
2007-02-10 21:28:50 +00:00
|
|
|
$exists->{$p} = Git::SVN->init($self->{url}, $p, undef,
|
|
|
|
$g->{ref}->full_path($de), 1);
|
|
|
|
}
|
|
|
|
}
|
2007-02-08 20:53:57 +00:00
|
|
|
foreach my $g (@$globs) {
|
2007-02-10 21:28:50 +00:00
|
|
|
if (my $path = $paths->{"/$g->{path}->{left}"}) {
|
|
|
|
if ($path->{action} =~ /^[AR]$/) {
|
|
|
|
get_dir_check($self, $exists, $g, $r);
|
|
|
|
}
|
|
|
|
}
|
2007-02-08 20:53:57 +00:00
|
|
|
foreach (keys %$paths) {
|
2007-02-14 21:32:21 +00:00
|
|
|
if (/$g->{path}->{left_regex}/ &&
|
|
|
|
!/$g->{path}->{regex}/) {
|
2007-02-10 21:28:50 +00:00
|
|
|
next if $paths->{$_}->{action} !~ /^[AR]$/;
|
|
|
|
get_dir_check($self, $exists, $g, $r);
|
|
|
|
}
|
2007-02-08 20:53:57 +00:00
|
|
|
next unless /$g->{path}->{regex}/;
|
|
|
|
my $p = $1;
|
|
|
|
my $pathname = $g->{path}->full_path($p);
|
2011-10-10 23:27:37 +00:00
|
|
|
next if is_ref_ignored($g, $p);
|
2007-02-08 20:53:57 +00:00
|
|
|
next if $exists->{$pathname};
|
2007-04-18 07:17:33 +00:00
|
|
|
next if ($self->check_path($pathname, $r) !=
|
|
|
|
$SVN::Node::dir);
|
2007-02-08 20:53:57 +00:00
|
|
|
$exists->{$pathname} = Git::SVN->init(
|
|
|
|
$self->{url}, $pathname, undef,
|
|
|
|
$g->{ref}->full_path($p), 1);
|
|
|
|
}
|
|
|
|
my $c = '';
|
|
|
|
foreach (split m#/#, $g->{path}->{left}) {
|
|
|
|
$c .= "/$_";
|
|
|
|
next unless ($paths->{$c} &&
|
2007-02-10 21:28:50 +00:00
|
|
|
($paths->{$c}->{action} =~ /^[AR]$/));
|
|
|
|
get_dir_check($self, $exists, $g, $r);
|
2007-02-08 20:53:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
values %$exists;
|
|
|
|
}
|
|
|
|
|
2007-01-24 00:29:23 +00:00
|
|
|
sub minimize_url {
|
|
|
|
my ($self) = @_;
|
|
|
|
return $self->{url} if ($self->{url} eq $self->{repos_root});
|
|
|
|
my $url = $self->{repos_root};
|
|
|
|
my @components = split(m!/!, $self->{svn_path});
|
|
|
|
my $c = '';
|
|
|
|
do {
|
|
|
|
$url .= "/$c" if length $c;
|
2009-07-25 20:14:16 +00:00
|
|
|
eval {
|
|
|
|
my $ra = (ref $self)->new($url);
|
|
|
|
my $latest = $ra->get_latest_revnum;
|
|
|
|
$ra->get_log("", $latest, 0, 1, 0, 1, sub {});
|
|
|
|
};
|
2007-01-24 00:29:23 +00:00
|
|
|
} while ($@ && ($c = shift @components));
|
|
|
|
$url;
|
|
|
|
}
|
|
|
|
|
2007-01-10 09:22:38 +00:00
|
|
|
sub can_do_switch {
|
|
|
|
my $self = shift;
|
|
|
|
unless (defined $can_do_switch) {
|
|
|
|
my $pool = SVN::Pool->new;
|
|
|
|
my $rep = eval {
|
|
|
|
$self->do_switch(1, '', 0, $self->{url},
|
|
|
|
SVN::Delta::Editor->new, $pool);
|
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
$can_do_switch = 0;
|
|
|
|
} else {
|
|
|
|
$rep->abort_report($pool);
|
|
|
|
$can_do_switch = 1;
|
|
|
|
}
|
|
|
|
$pool->clear;
|
|
|
|
}
|
|
|
|
$can_do_switch;
|
|
|
|
}
|
|
|
|
|
2007-01-28 06:28:56 +00:00
|
|
|
sub skip_unknown_revs {
|
|
|
|
my ($err) = @_;
|
|
|
|
my $errno = $err->apr_err();
|
|
|
|
# Maybe the branch we're tracking didn't
|
|
|
|
# exist when the repo started, so it's
|
|
|
|
# not an error if it doesn't, just continue
|
|
|
|
#
|
|
|
|
# Wonderfully consistent library, eh?
|
|
|
|
# 160013 - svn:// and file://
|
|
|
|
# 175002 - http(s)://
|
|
|
|
# 175007 - http(s):// (this repo required authorization, too...)
|
|
|
|
# More codes may be discovered later...
|
|
|
|
if ($errno == 175007 || $errno == 175002 || $errno == 160013) {
|
2007-03-31 00:54:48 +00:00
|
|
|
my $err_key = $err->expanded_message;
|
|
|
|
# revision numbers change every time, filter them out
|
|
|
|
$err_key =~ s/\d+/\0/g;
|
|
|
|
$err_key = "$errno\0$err_key";
|
|
|
|
unless ($ignored_err{$err_key}) {
|
|
|
|
warn "W: Ignoring error from SVN, path probably ",
|
|
|
|
"does not exist: ($errno): ",
|
|
|
|
$err->expanded_message,"\n";
|
2008-01-07 10:40:40 +00:00
|
|
|
warn "W: Do not be alarmed at the above message ",
|
|
|
|
"git-svn is just searching aggressively for ",
|
|
|
|
"old history.\n",
|
|
|
|
"This may take a while on large repositories\n";
|
2007-03-31 00:54:48 +00:00
|
|
|
$ignored_err{$err_key} = 1;
|
|
|
|
}
|
2007-01-28 06:28:56 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
die "Error from SVN, ($errno): ", $err->expanded_message,"\n";
|
|
|
|
}
|
|
|
|
|
2007-01-12 10:35:20 +00:00
|
|
|
package Git::SVN::Log;
|
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
use POSIX qw/strftime/;
|
2007-11-12 06:56:52 +00:00
|
|
|
use constant commit_log_separator => ('-' x 72) . "\n";
|
2007-01-12 10:35:20 +00:00
|
|
|
use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline
|
|
|
|
%rusers $show_commit $incremental/;
|
|
|
|
my $l_fmt;
|
|
|
|
|
|
|
|
sub cmt_showable {
|
|
|
|
my ($c) = @_;
|
|
|
|
return 1 if defined $c->{r};
|
2007-04-08 07:59:22 +00:00
|
|
|
|
|
|
|
# big commit message got truncated by the 16k pretty buffer in rev-list
|
2007-01-12 10:35:20 +00:00
|
|
|
if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
|
|
|
|
$c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
|
2007-04-08 07:59:22 +00:00
|
|
|
@{$c->{l}} = ();
|
2007-01-14 06:35:53 +00:00
|
|
|
my @log = command(qw/cat-file commit/, $c->{c});
|
2007-04-08 07:59:22 +00:00
|
|
|
|
|
|
|
# shift off the headers
|
|
|
|
shift @log while ($log[0] ne '');
|
2007-01-14 06:35:53 +00:00
|
|
|
shift @log;
|
2007-04-08 07:59:22 +00:00
|
|
|
|
|
|
|
# TODO: make $c->{l} not have a trailing newline in the future
|
|
|
|
@{$c->{l}} = map { "$_\n" } grep !/^git-svn-id: /, @log;
|
2007-01-12 10:35:20 +00:00
|
|
|
|
|
|
|
(undef, $c->{r}, undef) = ::extract_metadata(
|
2007-01-14 06:35:53 +00:00
|
|
|
(grep(/^git-svn-id: /, @log))[-1]);
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
return defined $c->{r};
|
|
|
|
}
|
|
|
|
|
|
|
|
sub log_use_color {
|
2007-12-11 06:28:42 +00:00
|
|
|
return $color || Git->repository->get_colorbool('color.diff');
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub git_svn_log_cmd {
|
2007-02-14 01:09:40 +00:00
|
|
|
my ($r_min, $r_max, @args) = @_;
|
|
|
|
my $head = 'HEAD';
|
2007-08-15 16:55:18 +00:00
|
|
|
my (@files, @log_opts);
|
2007-02-14 01:09:40 +00:00
|
|
|
foreach my $x (@args) {
|
2007-08-15 16:55:18 +00:00
|
|
|
if ($x eq '--' || @files) {
|
|
|
|
push @files, $x;
|
|
|
|
} else {
|
|
|
|
if (::verify_ref("$x^0")) {
|
|
|
|
$head = $x;
|
|
|
|
} else {
|
|
|
|
push @log_opts, $x;
|
|
|
|
}
|
|
|
|
}
|
2007-02-14 01:09:40 +00:00
|
|
|
}
|
|
|
|
|
2007-04-08 07:59:19 +00:00
|
|
|
my ($url, $rev, $uuid, $gs) = ::working_head_info($head);
|
|
|
|
$gs ||= Git::SVN->_new;
|
2007-01-12 10:35:20 +00:00
|
|
|
my @cmd = (qw/log --abbrev-commit --pretty=raw --default/,
|
|
|
|
$gs->refname);
|
|
|
|
push @cmd, '-r' unless $non_recursive;
|
|
|
|
push @cmd, qw/--raw --name-status/ if $verbose;
|
|
|
|
push @cmd, '--color' if log_use_color();
|
2007-08-15 16:55:18 +00:00
|
|
|
push @cmd, @log_opts;
|
|
|
|
if (defined $r_max && $r_max == $r_min) {
|
2007-01-12 10:35:20 +00:00
|
|
|
push @cmd, '--max-count=1';
|
2007-12-09 07:27:41 +00:00
|
|
|
if (my $c = $gs->rev_map_get($r_max)) {
|
2007-01-12 10:35:20 +00:00
|
|
|
push @cmd, $c;
|
|
|
|
}
|
2007-08-15 16:55:18 +00:00
|
|
|
} elsif (defined $r_max) {
|
2007-11-12 06:56:52 +00:00
|
|
|
if ($r_max < $r_min) {
|
|
|
|
($r_min, $r_max) = ($r_max, $r_min);
|
|
|
|
}
|
|
|
|
my (undef, $c_max) = $gs->find_rev_before($r_max, 1, $r_min);
|
|
|
|
my (undef, $c_min) = $gs->find_rev_after($r_min, 1, $r_max);
|
|
|
|
# If there are no commits in the range, both $c_max and $c_min
|
|
|
|
# will be undefined. If there is at least 1 commit in the
|
|
|
|
# range, both will be defined.
|
|
|
|
return () if !defined $c_min || !defined $c_max;
|
|
|
|
if ($c_min eq $c_max) {
|
|
|
|
push @cmd, '--max-count=1', $c_min;
|
2007-01-12 10:35:20 +00:00
|
|
|
} else {
|
2007-11-12 06:56:52 +00:00
|
|
|
push @cmd, '--boundary', "$c_min..$c_max";
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
}
|
2007-08-15 16:55:18 +00:00
|
|
|
return (@cmd, @files);
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# adapted from pager.c
|
|
|
|
sub config_pager {
|
2010-02-14 12:06:10 +00:00
|
|
|
if (! -t *STDOUT) {
|
|
|
|
$ENV{GIT_PAGER_IN_USE} = 'false';
|
|
|
|
$pager = undef;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
chomp($pager = command_oneline(qw(var GIT_PAGER)));
|
2009-10-31 01:43:19 +00:00
|
|
|
if ($pager eq 'cat') {
|
2007-01-12 10:35:20 +00:00
|
|
|
$pager = undef;
|
|
|
|
}
|
2007-12-11 06:28:42 +00:00
|
|
|
$ENV{GIT_PAGER_IN_USE} = defined($pager);
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub run_pager {
|
2010-02-14 12:06:10 +00:00
|
|
|
return unless defined $pager;
|
2008-09-10 15:09:46 +00:00
|
|
|
pipe my ($rfd, $wfd) or return;
|
2007-10-16 14:36:52 +00:00
|
|
|
defined(my $pid = fork) or ::fatal "Can't fork: $!";
|
2007-01-12 10:35:20 +00:00
|
|
|
if (!$pid) {
|
|
|
|
open STDOUT, '>&', $wfd or
|
2007-10-16 14:36:52 +00:00
|
|
|
::fatal "Can't redirect to stdout: $!";
|
2007-01-12 10:35:20 +00:00
|
|
|
return;
|
|
|
|
}
|
2007-10-16 14:36:52 +00:00
|
|
|
open STDIN, '<&', $rfd or ::fatal "Can't redirect stdin: $!";
|
2007-01-12 10:35:20 +00:00
|
|
|
$ENV{LESS} ||= 'FRSX';
|
2007-10-16 14:36:52 +00:00
|
|
|
exec $pager or ::fatal "Can't run pager: $! ($pager)";
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
|
2007-11-21 06:43:17 +00:00
|
|
|
sub format_svn_date {
|
2009-02-28 03:11:45 +00:00
|
|
|
my $t = shift || time;
|
2011-12-19 08:11:05 +00:00
|
|
|
my $gmoff = Git::SVN::get_tz($t);
|
2009-02-24 19:44:49 +00:00
|
|
|
return strftime("%Y-%m-%d %H:%M:%S $gmoff (%a, %d %b %Y)", localtime($t));
|
2007-11-21 06:43:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub parse_git_date {
|
|
|
|
my ($t, $tz) = @_;
|
|
|
|
# Date::Parse isn't in the standard Perl distro :(
|
|
|
|
if ($tz =~ s/^\+//) {
|
|
|
|
$t += tz_to_s_offset($tz);
|
|
|
|
} elsif ($tz =~ s/^\-//) {
|
|
|
|
$t -= tz_to_s_offset($tz);
|
|
|
|
}
|
|
|
|
return $t;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub set_local_timezone {
|
|
|
|
if (defined $TZ) {
|
|
|
|
$ENV{TZ} = $TZ;
|
|
|
|
} else {
|
|
|
|
delete $ENV{TZ};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-27 22:38:10 +00:00
|
|
|
sub tz_to_s_offset {
|
|
|
|
my ($tz) = @_;
|
|
|
|
$tz =~ s/(\d\d)$//;
|
|
|
|
return ($1 * 60) + ($tz * 3600);
|
|
|
|
}
|
|
|
|
|
2007-01-12 10:35:20 +00:00
|
|
|
sub get_author_info {
|
|
|
|
my ($dest, $author, $t, $tz) = @_;
|
|
|
|
$author =~ s/(?:^\s*|\s*$)//g;
|
|
|
|
$dest->{a_raw} = $author;
|
|
|
|
my $au;
|
2007-01-14 10:17:00 +00:00
|
|
|
if ($::_authors) {
|
2007-01-12 10:35:20 +00:00
|
|
|
$au = $rusers{$author} || undef;
|
|
|
|
}
|
|
|
|
if (!$au) {
|
|
|
|
($au) = ($author =~ /<([^>]+)\@[^>]+>$/);
|
|
|
|
}
|
|
|
|
$dest->{t} = $t;
|
|
|
|
$dest->{tz} = $tz;
|
|
|
|
$dest->{a} = $au;
|
2007-11-21 06:43:17 +00:00
|
|
|
$dest->{t_utc} = parse_git_date($t, $tz);
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub process_commit {
|
|
|
|
my ($c, $r_min, $r_max, $defer) = @_;
|
|
|
|
if (defined $r_min && defined $r_max) {
|
|
|
|
if ($r_min == $c->{r} && $r_min == $r_max) {
|
|
|
|
show_commit($c);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return 1 if $r_min == $r_max;
|
|
|
|
if ($r_min < $r_max) {
|
|
|
|
# we need to reverse the print order
|
|
|
|
return 0 if (defined $limit && --$limit < 0);
|
|
|
|
push @$defer, $c;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
if ($r_min != $r_max) {
|
|
|
|
return 1 if ($r_min < $c->{r});
|
|
|
|
return 1 if ($r_max > $c->{r});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0 if (defined $limit && --$limit < 0);
|
|
|
|
show_commit($c);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub show_commit {
|
|
|
|
my $c = shift;
|
|
|
|
if ($oneline) {
|
|
|
|
my $x = "\n";
|
|
|
|
if (my $l = $c->{l}) {
|
|
|
|
while ($l->[0] =~ /^\s*$/) { shift @$l }
|
|
|
|
$x = $l->[0];
|
|
|
|
}
|
|
|
|
$l_fmt ||= 'A' . length($c->{r});
|
|
|
|
print 'r',pack($l_fmt, $c->{r}),' | ';
|
|
|
|
print "$c->{c} | " if $show_commit;
|
|
|
|
print $x;
|
|
|
|
} else {
|
|
|
|
show_commit_normal($c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub show_commit_changed_paths {
|
|
|
|
my ($c) = @_;
|
|
|
|
return unless $c->{changed};
|
|
|
|
print "Changed paths:\n", @{$c->{changed}};
|
|
|
|
}
|
|
|
|
|
|
|
|
sub show_commit_normal {
|
|
|
|
my ($c) = @_;
|
2007-11-12 06:56:52 +00:00
|
|
|
print commit_log_separator, "r$c->{r} | ";
|
2007-01-12 10:35:20 +00:00
|
|
|
print "$c->{c} | " if $show_commit;
|
2007-11-21 06:43:17 +00:00
|
|
|
print "$c->{a} | ", format_svn_date($c->{t_utc}), ' | ';
|
2007-01-12 10:35:20 +00:00
|
|
|
my $nr_line = 0;
|
|
|
|
|
|
|
|
if (my $l = $c->{l}) {
|
|
|
|
while ($l->[$#$l] eq "\n" && $#$l > 0
|
|
|
|
&& $l->[($#$l - 1)] eq "\n") {
|
|
|
|
pop @$l;
|
|
|
|
}
|
|
|
|
$nr_line = scalar @$l;
|
|
|
|
if (!$nr_line) {
|
|
|
|
print "1 line\n\n\n";
|
|
|
|
} else {
|
|
|
|
if ($nr_line == 1) {
|
|
|
|
$nr_line = '1 line';
|
|
|
|
} else {
|
|
|
|
$nr_line .= ' lines';
|
|
|
|
}
|
|
|
|
print $nr_line, "\n";
|
|
|
|
show_commit_changed_paths($c);
|
|
|
|
print "\n";
|
|
|
|
print $_ foreach @$l;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
print "1 line\n";
|
|
|
|
show_commit_changed_paths($c);
|
|
|
|
print "\n";
|
|
|
|
|
|
|
|
}
|
2007-02-15 08:40:42 +00:00
|
|
|
foreach my $x (qw/raw stat diff/) {
|
2007-01-12 10:35:20 +00:00
|
|
|
if ($c->{$x}) {
|
|
|
|
print "\n";
|
|
|
|
print $_ foreach @{$c->{$x}}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub cmd_show_log {
|
|
|
|
my (@args) = @_;
|
|
|
|
my ($r_min, $r_max);
|
|
|
|
my $r_last = -1; # prevent dupes
|
2007-11-21 06:43:17 +00:00
|
|
|
set_local_timezone();
|
2007-01-12 10:35:20 +00:00
|
|
|
if (defined $::_revision) {
|
|
|
|
if ($::_revision =~ /^(\d+):(\d+)$/) {
|
|
|
|
($r_min, $r_max) = ($1, $2);
|
|
|
|
} elsif ($::_revision =~ /^\d+$/) {
|
|
|
|
$r_min = $r_max = $::_revision;
|
|
|
|
} else {
|
|
|
|
::fatal "-r$::_revision is not supported, use ",
|
2007-10-16 14:36:52 +00:00
|
|
|
"standard 'git log' arguments instead";
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
config_pager();
|
2007-08-15 16:55:18 +00:00
|
|
|
@args = git_svn_log_cmd($r_min, $r_max, @args);
|
2007-11-12 06:56:52 +00:00
|
|
|
if (!@args) {
|
|
|
|
print commit_log_separator unless $incremental || $oneline;
|
|
|
|
return;
|
|
|
|
}
|
2007-01-12 10:35:20 +00:00
|
|
|
my $log = command_output_pipe(@args);
|
|
|
|
run_pager();
|
2007-02-15 08:40:42 +00:00
|
|
|
my (@k, $c, $d, $stat);
|
2007-01-12 10:35:20 +00:00
|
|
|
my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/;
|
|
|
|
while (<$log>) {
|
2011-05-20 11:16:34 +00:00
|
|
|
if (/^${esc_color}commit (?:- )?($::sha1_short)/o) {
|
2007-01-12 10:35:20 +00:00
|
|
|
my $cmt = $1;
|
|
|
|
if ($c && cmt_showable($c) && $c->{r} != $r_last) {
|
|
|
|
$r_last = $c->{r};
|
|
|
|
process_commit($c, $r_min, $r_max, \@k) or
|
|
|
|
goto out;
|
|
|
|
}
|
|
|
|
$d = undef;
|
|
|
|
$c = { c => $cmt };
|
|
|
|
} elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) {
|
|
|
|
get_author_info($c, $1, $2, $3);
|
|
|
|
} elsif (/^${esc_color}(?:tree|parent|committer) /o) {
|
|
|
|
# ignore
|
|
|
|
} elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) {
|
|
|
|
push @{$c->{raw}}, $_;
|
|
|
|
} elsif (/^${esc_color}[ACRMDT]\t/) {
|
|
|
|
# we could add $SVN->{svn_path} here, but that requires
|
|
|
|
# remote access at the moment (repo_path_split)...
|
|
|
|
s#^(${esc_color})([ACRMDT])\t#$1 $2 #o;
|
|
|
|
push @{$c->{changed}}, $_;
|
|
|
|
} elsif (/^${esc_color}diff /o) {
|
|
|
|
$d = 1;
|
|
|
|
push @{$c->{diff}}, $_;
|
|
|
|
} elsif ($d) {
|
|
|
|
push @{$c->{diff}}, $_;
|
2007-02-15 08:40:42 +00:00
|
|
|
} elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]*
|
|
|
|
$esc_color*[\+\-]*$esc_color$/x) {
|
|
|
|
$stat = 1;
|
|
|
|
push @{$c->{stat}}, $_;
|
|
|
|
} elsif ($stat && /^ \d+ files changed, \d+ insertions/) {
|
|
|
|
push @{$c->{stat}}, $_;
|
|
|
|
$stat = undef;
|
2007-01-12 10:35:20 +00:00
|
|
|
} elsif (/^${esc_color} (git-svn-id:.+)$/o) {
|
|
|
|
($c->{url}, $c->{r}, undef) = ::extract_metadata($1);
|
|
|
|
} elsif (s/^${esc_color} //o) {
|
|
|
|
push @{$c->{l}}, $_;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ($c && defined $c->{r} && $c->{r} != $r_last) {
|
|
|
|
$r_last = $c->{r};
|
|
|
|
process_commit($c, $r_min, $r_max, \@k);
|
|
|
|
}
|
|
|
|
if (@k) {
|
2007-11-12 06:56:52 +00:00
|
|
|
($r_min, $r_max) = ($r_max, $r_min);
|
2007-01-12 10:35:20 +00:00
|
|
|
process_commit($_, $r_min, $r_max) foreach reverse @k;
|
|
|
|
}
|
|
|
|
out:
|
2007-01-12 11:07:31 +00:00
|
|
|
close $log;
|
2007-11-12 06:56:52 +00:00
|
|
|
print commit_log_separator unless $incremental || $oneline;
|
2007-01-12 10:35:20 +00:00
|
|
|
}
|
|
|
|
|
2008-02-10 04:51:08 +00:00
|
|
|
sub cmd_blame {
|
2008-05-11 05:11:18 +00:00
|
|
|
my $path = pop;
|
2008-02-10 04:51:08 +00:00
|
|
|
|
|
|
|
config_pager();
|
|
|
|
run_pager();
|
|
|
|
|
2008-05-11 05:11:18 +00:00
|
|
|
my ($fh, $ctx, $rev);
|
|
|
|
|
|
|
|
if ($_git_format) {
|
|
|
|
($fh, $ctx) = command_output_pipe('blame', @_, $path);
|
|
|
|
while (my $line = <$fh>) {
|
|
|
|
if ($line =~ /^\^?([[:xdigit:]]+)\s/) {
|
|
|
|
# Uncommitted edits show up as a rev ID of
|
|
|
|
# all zeros, which we can't look up with
|
|
|
|
# cmt_metadata
|
|
|
|
if ($1 !~ /^0+$/) {
|
|
|
|
(undef, $rev, undef) =
|
|
|
|
::cmt_metadata($1);
|
|
|
|
$rev = '0' if (!$rev);
|
|
|
|
} else {
|
|
|
|
$rev = '0';
|
|
|
|
}
|
|
|
|
$rev = sprintf('%-10s', $rev);
|
|
|
|
$line =~ s/^\^?[[:xdigit:]]+(\s)/$rev$1/;
|
|
|
|
}
|
|
|
|
print $line;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
($fh, $ctx) = command_output_pipe('blame', '-p', @_, 'HEAD',
|
|
|
|
'--', $path);
|
|
|
|
my ($sha1);
|
|
|
|
my %authors;
|
2009-04-10 20:32:41 +00:00
|
|
|
my @buffer;
|
|
|
|
my %dsha; #distinct sha keys
|
|
|
|
|
2008-05-11 05:11:18 +00:00
|
|
|
while (my $line = <$fh>) {
|
2009-04-10 20:32:41 +00:00
|
|
|
push @buffer, $line;
|
|
|
|
if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
|
|
|
|
$dsha{$1} = 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
my $s2r = ::cmt_sha2rev_batch([keys %dsha]);
|
|
|
|
|
|
|
|
foreach my $line (@buffer) {
|
2008-05-11 05:11:18 +00:00
|
|
|
if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
|
2009-04-10 20:32:41 +00:00
|
|
|
$rev = $s2r->{$1};
|
|
|
|
$rev = '0' if (!$rev)
|
2008-05-11 05:11:18 +00:00
|
|
|
}
|
|
|
|
elsif ($line =~ /^author (.*)/) {
|
|
|
|
$authors{$rev} = $1;
|
|
|
|
$authors{$rev} =~ s/\s/_/g;
|
|
|
|
}
|
|
|
|
elsif ($line =~ /^\t(.*)$/) {
|
|
|
|
printf("%6s %10s %s\n", $rev, $authors{$rev}, $1);
|
|
|
|
}
|
2008-02-10 04:51:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
command_close_pipe($fh, $ctx);
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
package Git::SVN::Migration;
|
|
|
|
# these version numbers do NOT correspond to actual version numbers
|
|
|
|
# of git nor git-svn. They are just relative.
|
|
|
|
#
|
|
|
|
# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD
|
|
|
|
#
|
|
|
|
# v1 layout: .git/$id/info/url, refs/remotes/$id
|
|
|
|
#
|
|
|
|
# v2 layout: .git/svn/$id/info/url, refs/remotes/$id
|
|
|
|
#
|
|
|
|
# v3 layout: .git/svn/$id, refs/remotes/$id
|
|
|
|
# - info/url may remain for backwards compatibility
|
|
|
|
# - this is what we migrate up to this layout automatically,
|
|
|
|
# - this will be used by git svn init on single branches
|
2007-02-12 21:25:25 +00:00
|
|
|
# v3.1 layout (auto migrated):
|
|
|
|
# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink
|
|
|
|
# for backwards compatibility
|
2007-01-19 01:50:01 +00:00
|
|
|
#
|
|
|
|
# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id
|
|
|
|
# - this is only created for newly multi-init-ed
|
|
|
|
# repositories. Similar in spirit to the
|
|
|
|
# --use-separate-remotes option in git-clone (now default)
|
|
|
|
# - we do not automatically migrate to this (following
|
|
|
|
# the example set by core git)
|
2007-12-09 07:27:41 +00:00
|
|
|
#
|
|
|
|
# v5 layout: .rev_db.$UUID => .rev_map.$UUID
|
|
|
|
# - newer, more-efficient format that uses 24-bytes per record
|
|
|
|
# with no filler space.
|
|
|
|
# - use xxd -c24 < .rev_map.$UUID to view and debug
|
|
|
|
# - This is a one-way migration, repositories updated to the
|
|
|
|
# new format will not be able to use old git-svn without
|
|
|
|
# rebuilding the .rev_db. Rebuilding the rev_db is not
|
|
|
|
# possible if noMetadata or useSvmProps are set; but should
|
|
|
|
# be no problem for users that use the (sensible) defaults.
|
2007-01-19 01:50:01 +00:00
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
use Carp qw/croak/;
|
|
|
|
use File::Path qw/mkpath/;
|
2007-01-21 12:27:09 +00:00
|
|
|
use File::Basename qw/dirname basename/;
|
|
|
|
use vars qw/$_minimize/;
|
2007-01-19 01:50:01 +00:00
|
|
|
|
|
|
|
sub migrate_from_v0 {
|
|
|
|
my $git_dir = $ENV{GIT_DIR};
|
|
|
|
return undef unless -d $git_dir;
|
|
|
|
my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
|
|
|
|
my $migrated = 0;
|
|
|
|
while (<$fh>) {
|
|
|
|
chomp;
|
|
|
|
my ($id, $orig_ref) = ($_, $_);
|
|
|
|
next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#;
|
|
|
|
next unless -f "$git_dir/$id/info/url";
|
|
|
|
my $new_ref = "refs/remotes/$id";
|
|
|
|
if (::verify_ref("$new_ref^0")) {
|
|
|
|
print STDERR "W: $orig_ref is probably an old ",
|
|
|
|
"branch used by an ancient version of ",
|
|
|
|
"git-svn.\n",
|
|
|
|
"However, $new_ref also exists.\n",
|
|
|
|
"We will not be able ",
|
|
|
|
"to use this branch until this ",
|
|
|
|
"ambiguity is resolved.\n";
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
print STDERR "Migrating from v0 layout...\n" if !$migrated;
|
|
|
|
print STDERR "Renaming ref: $orig_ref => $new_ref\n";
|
|
|
|
command_noisy('update-ref', $new_ref, $orig_ref);
|
|
|
|
command_noisy('update-ref', '-d', $orig_ref, $orig_ref);
|
|
|
|
$migrated++;
|
|
|
|
}
|
|
|
|
command_close_pipe($fh, $ctx);
|
|
|
|
print STDERR "Done migrating from v0 layout...\n" if $migrated;
|
|
|
|
$migrated;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub migrate_from_v1 {
|
|
|
|
my $git_dir = $ENV{GIT_DIR};
|
|
|
|
my $migrated = 0;
|
|
|
|
return $migrated unless -d $git_dir;
|
|
|
|
my $svn_dir = "$git_dir/svn";
|
|
|
|
|
|
|
|
# just in case somebody used 'svn' as their $id at some point...
|
|
|
|
return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url";
|
|
|
|
|
|
|
|
print STDERR "Migrating from a git-svn v1 layout...\n";
|
|
|
|
mkpath([$svn_dir]);
|
|
|
|
print STDERR "Data from a previous version of git-svn exists, but\n\t",
|
|
|
|
"$svn_dir\n\t(required for this version ",
|
2008-07-14 16:30:24 +00:00
|
|
|
"($::VERSION) of git-svn) does not exist.\n";
|
2007-01-19 01:50:01 +00:00
|
|
|
my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
|
|
|
|
while (<$fh>) {
|
|
|
|
my $x = $_;
|
|
|
|
next unless $x =~ s#^refs/remotes/##;
|
|
|
|
chomp $x;
|
|
|
|
next unless -f "$git_dir/$x/info/url";
|
|
|
|
my $u = eval { ::file_to_s("$git_dir/$x/info/url") };
|
|
|
|
next unless $u;
|
|
|
|
my $dn = dirname("$git_dir/svn/$x");
|
|
|
|
mkpath([$dn]) unless -d $dn;
|
|
|
|
if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID:
|
|
|
|
mkpath(["$git_dir/svn/svn"]);
|
|
|
|
print STDERR " - $git_dir/$x/info => ",
|
|
|
|
"$git_dir/svn/$x/info\n";
|
|
|
|
rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or
|
|
|
|
croak "$!: $x";
|
|
|
|
# don't worry too much about these, they probably
|
|
|
|
# don't exist with repos this old (save for index,
|
|
|
|
# and we can easily regenerate that)
|
|
|
|
foreach my $f (qw/unhandled.log index .rev_db/) {
|
|
|
|
rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f";
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
print STDERR " - $git_dir/$x => $git_dir/svn/$x\n";
|
|
|
|
rename "$git_dir/$x", "$git_dir/svn/$x" or
|
|
|
|
croak "$!: $x";
|
|
|
|
}
|
|
|
|
$migrated++;
|
|
|
|
}
|
|
|
|
command_close_pipe($fh, $ctx);
|
|
|
|
print STDERR "Done migrating from a git-svn v1 layout\n";
|
|
|
|
$migrated;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub read_old_urls {
|
|
|
|
my ($l_map, $pfx, $path) = @_;
|
|
|
|
my @dir;
|
|
|
|
foreach (<$path/*>) {
|
|
|
|
if (-r "$_/info/url") {
|
|
|
|
$pfx .= '/' if $pfx && $pfx !~ m!/$!;
|
|
|
|
my $ref_id = $pfx . basename $_;
|
|
|
|
my $url = ::file_to_s("$_/info/url");
|
|
|
|
$l_map->{$ref_id} = $url;
|
|
|
|
} elsif (-d $_) {
|
|
|
|
push @dir, $_;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
foreach (@dir) {
|
|
|
|
my $x = $_;
|
|
|
|
$x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o;
|
|
|
|
read_old_urls($l_map, $x, $_);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sub migrate_from_v2 {
|
|
|
|
my @cfg = command(qw/config -l/);
|
|
|
|
return if grep /^svn-remote\..+\.url=/, @cfg;
|
|
|
|
my %l_map;
|
|
|
|
read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn");
|
|
|
|
my $migrated = 0;
|
|
|
|
|
|
|
|
foreach my $ref_id (sort keys %l_map) {
|
2007-02-01 12:06:27 +00:00
|
|
|
eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) };
|
|
|
|
if ($@) {
|
|
|
|
Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id);
|
|
|
|
}
|
2007-01-19 01:50:01 +00:00
|
|
|
$migrated++;
|
|
|
|
}
|
|
|
|
$migrated;
|
|
|
|
}
|
|
|
|
|
2007-01-21 12:27:09 +00:00
|
|
|
sub minimize_connections {
|
|
|
|
my $r = Git::SVN::read_all_remotes();
|
|
|
|
my $new_urls = {};
|
|
|
|
my $root_repos = {};
|
|
|
|
foreach my $repo_id (keys %$r) {
|
|
|
|
my $url = $r->{$repo_id}->{url} or next;
|
|
|
|
my $fetch = $r->{$repo_id}->{fetch} or next;
|
|
|
|
my $ra = Git::SVN::Ra->new($url);
|
|
|
|
|
|
|
|
# skip existing cases where we already connect to the root
|
|
|
|
if (($ra->{url} eq $ra->{repos_root}) ||
|
2008-06-29 03:40:32 +00:00
|
|
|
($ra->{repos_root} eq $repo_id)) {
|
2007-01-21 12:27:09 +00:00
|
|
|
$root_repos->{$ra->{url}} = $repo_id;
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
|
|
|
|
my $root_ra = Git::SVN::Ra->new($ra->{repos_root});
|
|
|
|
my $root_path = $ra->{url};
|
2007-02-09 20:17:57 +00:00
|
|
|
$root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##;
|
2007-01-21 12:27:09 +00:00
|
|
|
foreach my $path (keys %$fetch) {
|
|
|
|
my $ref_id = $fetch->{$path};
|
|
|
|
my $gs = Git::SVN->new($ref_id, $repo_id, $path);
|
|
|
|
|
|
|
|
# make sure we can read when connecting to
|
|
|
|
# a higher level of a repository
|
|
|
|
my ($last_rev, undef) = $gs->last_rev_commit;
|
|
|
|
if (!defined $last_rev) {
|
|
|
|
$last_rev = eval {
|
|
|
|
$root_ra->get_latest_revnum;
|
|
|
|
};
|
|
|
|
next if $@;
|
|
|
|
}
|
|
|
|
my $new = $root_path;
|
|
|
|
$new .= length $path ? "/$path" : '';
|
|
|
|
eval {
|
|
|
|
$root_ra->get_log([$new], $last_rev, $last_rev,
|
|
|
|
0, 0, 1, sub { });
|
|
|
|
};
|
|
|
|
next if $@;
|
|
|
|
$new_urls->{$ra->{repos_root}}->{$new} =
|
|
|
|
{ ref_id => $ref_id,
|
|
|
|
old_repo_id => $repo_id,
|
|
|
|
old_path => $path };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
my @emptied;
|
|
|
|
foreach my $url (keys %$new_urls) {
|
|
|
|
# see if we can re-use an existing [svn-remote "repo_id"]
|
|
|
|
# instead of creating a(n ugly) new section:
|
2008-06-29 03:40:32 +00:00
|
|
|
my $repo_id = $root_repos->{$url} || $url;
|
2007-01-21 12:27:09 +00:00
|
|
|
|
|
|
|
my $fetch = $new_urls->{$url};
|
|
|
|
foreach my $path (keys %$fetch) {
|
|
|
|
my $x = $fetch->{$path};
|
|
|
|
Git::SVN->init($url, $path, $repo_id, $x->{ref_id});
|
|
|
|
my $pfx = "svn-remote.$x->{old_repo_id}";
|
|
|
|
|
|
|
|
my $old_fetch = quotemeta("$x->{old_path}:".
|
2009-08-12 03:14:27 +00:00
|
|
|
"$x->{ref_id}");
|
2007-01-22 19:44:57 +00:00
|
|
|
command_noisy(qw/config --unset/,
|
2007-01-21 12:27:09 +00:00
|
|
|
"$pfx.fetch", '^'. $old_fetch . '$');
|
|
|
|
delete $r->{$x->{old_repo_id}}->
|
|
|
|
{fetch}->{$x->{old_path}};
|
|
|
|
if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) {
|
2007-01-22 19:44:57 +00:00
|
|
|
command_noisy(qw/config --unset/,
|
2007-01-21 12:27:09 +00:00
|
|
|
"$pfx.url");
|
|
|
|
push @emptied, $x->{old_repo_id}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (@emptied) {
|
2008-12-14 22:10:52 +00:00
|
|
|
my $file = $ENV{GIT_CONFIG} || "$ENV{GIT_DIR}/config";
|
2007-01-21 12:27:09 +00:00
|
|
|
print STDERR <<EOF;
|
|
|
|
The following [svn-remote] sections in your config file ($file) are empty
|
|
|
|
and can be safely removed:
|
|
|
|
EOF
|
|
|
|
print STDERR "[svn-remote \"$_\"]\n" foreach @emptied;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-01-19 01:50:01 +00:00
|
|
|
sub migration_check {
|
|
|
|
migrate_from_v0();
|
|
|
|
migrate_from_v1();
|
|
|
|
migrate_from_v2();
|
2007-01-21 12:27:09 +00:00
|
|
|
minimize_connections() if $_minimize;
|
2007-01-19 01:50:01 +00:00
|
|
|
}
|
|
|
|
|
2007-01-24 11:30:57 +00:00
|
|
|
package Git::IndexInfo;
|
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
use Git qw/command_input_pipe command_close_pipe/;
|
|
|
|
|
|
|
|
sub new {
|
|
|
|
my ($class) = @_;
|
|
|
|
my ($gui, $ctx) = command_input_pipe(qw/update-index -z --index-info/);
|
|
|
|
bless { gui => $gui, ctx => $ctx, nr => 0}, $class;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub remove {
|
|
|
|
my ($self, $path) = @_;
|
|
|
|
if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") {
|
|
|
|
return ++$self->{nr};
|
|
|
|
}
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub update {
|
|
|
|
my ($self, $mode, $hash, $path) = @_;
|
|
|
|
if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") {
|
|
|
|
return ++$self->{nr};
|
|
|
|
}
|
|
|
|
undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub DESTROY {
|
|
|
|
my ($self) = @_;
|
|
|
|
command_close_pipe($self->{gui}, $self->{ctx});
|
|
|
|
}
|
|
|
|
|
2007-02-03 21:29:17 +00:00
|
|
|
package Git::SVN::GlobSpec;
|
|
|
|
use strict;
|
|
|
|
use warnings;
|
|
|
|
|
|
|
|
sub new {
|
2010-01-23 08:30:01 +00:00
|
|
|
my ($class, $glob, $pattern_ok) = @_;
|
2007-02-03 21:29:17 +00:00
|
|
|
my $re = $glob;
|
|
|
|
$re =~ s!/+$!!g; # no need for trailing slashes
|
2010-01-23 08:30:01 +00:00
|
|
|
my (@left, @right, @patterns);
|
|
|
|
my $state = "left";
|
|
|
|
my $die_msg = "Only one set of wildcard directories " .
|
|
|
|
"(e.g. '*' or '*/*/*') is supported: '$glob'\n";
|
|
|
|
for my $part (split(m|/|, $glob)) {
|
|
|
|
if ($part =~ /\*/ && $part ne "*") {
|
|
|
|
die "Invalid pattern in '$glob': $part\n";
|
|
|
|
} elsif ($pattern_ok && $part =~ /[{}]/ &&
|
|
|
|
$part !~ /^\{[^{}]+\}/) {
|
|
|
|
die "Invalid pattern in '$glob': $part\n";
|
|
|
|
}
|
|
|
|
if ($part eq "*") {
|
|
|
|
die $die_msg if $state eq "right";
|
|
|
|
$state = "pattern";
|
|
|
|
push(@patterns, "[^/]*");
|
|
|
|
} elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) {
|
|
|
|
die $die_msg if $state eq "right";
|
|
|
|
$state = "pattern";
|
|
|
|
my $p = quotemeta($1);
|
|
|
|
$p =~ s/\\,/|/g;
|
|
|
|
push(@patterns, "(?:$p)");
|
|
|
|
} else {
|
|
|
|
if ($state eq "left") {
|
|
|
|
push(@left, $part);
|
|
|
|
} else {
|
|
|
|
push(@right, $part);
|
|
|
|
$state = "right";
|
|
|
|
}
|
|
|
|
}
|
2008-08-08 08:41:57 +00:00
|
|
|
}
|
2010-01-23 08:30:01 +00:00
|
|
|
my $depth = @patterns;
|
2008-08-08 08:41:57 +00:00
|
|
|
if ($depth == 0) {
|
2010-01-23 08:30:01 +00:00
|
|
|
die "One '*' is needed in glob: '$glob'\n";
|
2007-02-09 20:17:57 +00:00
|
|
|
}
|
2010-01-23 08:30:01 +00:00
|
|
|
my $left = join('/', @left);
|
|
|
|
my $right = join('/', @right);
|
|
|
|
$re = join('/', @patterns);
|
|
|
|
$re = join('\/',
|
|
|
|
grep(length, quotemeta($left), "($re)", quotemeta($right)));
|
2007-02-10 21:28:50 +00:00
|
|
|
my $left_re = qr/^\/\Q$left\E(\/|$)/;
|
|
|
|
bless { left => $left, right => $right, left_regex => $left_re,
|
2008-08-08 08:41:57 +00:00
|
|
|
regex => qr/$re/, glob => $glob, depth => $depth }, $class;
|
2007-02-03 21:29:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sub full_path {
|
|
|
|
my ($self, $path) = @_;
|
|
|
|
return (length $self->{left} ? "$self->{left}/" : '') .
|
|
|
|
$path . (length $self->{right} ? "/$self->{right}" : '');
|
|
|
|
}
|
|
|
|
|
2006-02-16 09:24:16 +00:00
|
|
|
__END__
|
|
|
|
|
|
|
|
Data structures:
|
|
|
|
|
2007-02-03 21:29:17 +00:00
|
|
|
|
|
|
|
$remotes = { # returned by read_all_remotes()
|
|
|
|
'svn' => {
|
|
|
|
# svn-remote.svn.url=https://svn.musicpd.org
|
|
|
|
url => 'https://svn.musicpd.org',
|
|
|
|
# svn-remote.svn.fetch=mpd/trunk:trunk
|
|
|
|
fetch => {
|
|
|
|
'mpd/trunk' => 'trunk',
|
|
|
|
},
|
|
|
|
# svn-remote.svn.tags=mpd/tags/*:tags/*
|
|
|
|
tags => {
|
|
|
|
path => {
|
|
|
|
left => 'mpd/tags',
|
|
|
|
right => '',
|
|
|
|
regex => qr!mpd/tags/([^/]+)$!,
|
|
|
|
glob => 'tags/*',
|
|
|
|
},
|
|
|
|
ref => {
|
|
|
|
left => 'tags',
|
|
|
|
right => '',
|
|
|
|
regex => qr!tags/([^/]+)$!,
|
|
|
|
glob => 'tags/*',
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2007-01-14 06:35:53 +00:00
|
|
|
$log_entry hashref as returned by libsvn_log_entry()
|
2006-02-16 09:24:16 +00:00
|
|
|
{
|
2007-01-14 06:35:53 +00:00
|
|
|
log => 'whitespace-formatted log entry
|
2006-02-16 09:24:16 +00:00
|
|
|
', # trailing newline is preserved
|
|
|
|
revision => '8', # integer
|
|
|
|
date => '2004-02-24T17:01:44.108345Z', # commit date
|
|
|
|
author => 'committer name'
|
|
|
|
};
|
|
|
|
|
2007-01-27 09:32:00 +00:00
|
|
|
|
|
|
|
# this is generated by generate_diff();
|
2006-02-16 09:24:16 +00:00
|
|
|
@mods = array of diff-index line hashes, each element represents one line
|
|
|
|
of diff-index output
|
|
|
|
|
|
|
|
diff-index line ($m hash)
|
|
|
|
{
|
|
|
|
mode_a => first column of diff-index output, no leading ':',
|
|
|
|
mode_b => second column of diff-index output,
|
|
|
|
sha1_b => sha1sum of the final blob,
|
2006-03-03 09:20:07 +00:00
|
|
|
chg => change type [MCRADT],
|
2006-02-16 09:24:16 +00:00
|
|
|
file_a => original file name of a file (iff chg is 'C' or 'R')
|
|
|
|
file_b => new/current file name of a file (any chg)
|
|
|
|
}
|
|
|
|
;
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
|
git-svn: add --follow-parent and --no-metadata options to fetch
--follow-parent:
This is especially helpful when we're tracking a directory
that has been moved around within the repository, or if we
started tracking a branch and never tracked the trunk it was
descended from.
This relies on the SVN::* libraries to work. We can't
reliably parse path info from the svn command-line client
without relying on XML, so it's better just to have the SVN::*
libs installed.
This also removes oldvalue verification when calling update-ref
In SVN, branches can be deleted, and then recreated under the
same path as the original one with different ancestry
information, causing parent information to be mismatched /
misordered.
Also force the current ref, if existing, to be a parent,
regardless of whether or not it was specified.
--no-metadata:
This gets rid of the git-svn-id: lines at the end of every commit.
With this, you lose the ability to use the rebuild command. If
you ever lose your .git/svn/git-svn/.rev_db file, you won't be
able to fetch again, either. This is fine for one-shot imports.
Also fix some issues with multi-fetch --follow-parent that were
exposed while testing this. Additionally, repack checking is
simplified greatly.
git-svn log will not work on repositories using this, either.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
Signed-off-by: Junio C Hamano <junkio@cox.net>
2006-06-28 02:39:13 +00:00
|
|
|
# retval of read_url_paths{,_all}();
|
|
|
|
$l_map = {
|
|
|
|
# repository root url
|
|
|
|
'https://svn.musicpd.org' => {
|
|
|
|
# repository path # GIT_SVN_ID
|
|
|
|
'mpd/trunk' => 'trunk',
|
|
|
|
'mpd/tags/0.11.5' => 'tags/0.11.5',
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-12 22:23:48 +00:00
|
|
|
Notes:
|
|
|
|
I don't trust the each() function on unless I created %hash myself
|
|
|
|
because the internal iterator may not have started at base.
|