use Digest::MD5;
use List::Util qw(any);
use List::MoreUtils qw(pairwise);
+use Carp;
use Debian::Dgit;
sub lrfetchrefs () { return "refs/dgit-fetch/$csuite"; }
sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
+# We fetch some parts of lrfetchrefs/*. Ideally we delete these
+# locally fetched refs because they have unhelpful names and clutter
+# up gitk etc. So we track whether we have "used up" head ref (ie,
+# whether we have made another local ref which refers to this object).
+#
+# (If we deleted them unconditionally, then we might end up
+# re-fetching the same git objects each time dgit fetch was run.)
+#
+# So, leach use of lrfetchrefs needs to be accompanied by arrangements
+# in git_fetch_us to fetch the refs in question, and possibly a call
+# to lrfetchref_used.
+
+our (%lrfetchrefs_f, %lrfetchrefs_d);
+# $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
+
+sub lrfetchref_used ($) {
+ my ($fullrefname) = @_;
+ my $objid = $lrfetchrefs_f{$fullrefname};
+ $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
+}
+
sub stripepoch ($) {
my ($vsn) = @_;
$vsn =~ s/^\d+\://;
sub changedir ($) {
my ($newdir) = @_;
printdebug "CD $newdir\n";
- chdir $newdir or die "chdir: $newdir: $!";
+ chdir $newdir or confess "chdir: $newdir: $!";
}
sub deliberately ($) {
return $c;
}
+sub commit_getclogp ($) {
+ # Returns the parsed changelog hashref for a particular commit
+ my ($objid) = @_;
+ our %commit_getclogp_memo;
+ my $memo = $commit_getclogp_memo{$objid};
+ return $memo if $memo;
+ mkpath '.git/dgit';
+ my $mclog = ".git/dgit/clog-$objid";
+ runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
+ "$objid:debian/changelog";
+ $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
+}
+
sub must_getcwd () {
my $d = getcwd();
defined $d or fail "getcwd failed: $!";
my @output = ($rawimport_mergeinput);
progress "synthesised git commit from .dsc $cversion";
if ($lastpush_mergeinput) {
- my $lastpush_hash = $lastpush_mergeinput->{Commit};
- runcmd @git, qw(reset -q --hard), $lastpush_hash;
- runcmd qw(sh -ec), 'dpkg-parsechangelog >>../changelogold.tmp';
- my $oldclogp = parsecontrol('../changelogold.tmp','previous changelog');
+ my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
my $oversion = getfield $oldclogp, 'Version';
my $vcmp =
version_compare($oversion, $cversion);
END
@output = $lastpush_mergeinput;
} else {
+ # Same version. Use what's in the server git branch,
+ # discarding our own import. (This could happen if the
+ # server automatically imports all packages into git.)
@output = $lastpush_mergeinput;
}
}
}
sub git_fetch_us () {
- my @specs =
- map { "+refs/$_/*:".lrfetchrefs."/$_/*" }
- qw(tags heads), $branchprefix;
- runcmd_ordryrun_local @git, qw(fetch -p -n -q), access_giturl(), @specs;
+ # Want to fetch only what we are going to use, unless
+ # deliberately-not-ff, in which case we must fetch everything.
+
+ my @specs = deliberately_not_fast_forward ? qw(tags/*) :
+ map { "tags/$_" } debiantags('*',access_basedistro);
+ push @specs, server_branch($csuite);
+ push @specs, qw(heads/*) if deliberately_not_fast_forward;
+
+ # This is rather miserable:
+ # When git-fetch --prune is passed a fetchspec ending with a *,
+ # it does a plausible thing. If there is no * then:
+ # - it matches subpaths too, even if the supplied refspec
+ # starts refs, and behaves completely madly if the source
+ # has refs/refs/something. (See, for example, Debian #NNNN.)
+ # - if there is no matching remote ref, it bombs out the whole
+ # fetch.
+ # We want to fetch a fixed ref, and we don't know in advance
+ # if it exists, so this is not suitable.
+ #
+ # Our workaround is to use git-ls-remote. git-ls-remote has its
+ # own qairks. Notably, it has the absurd multi-tail-matching
+ # behaviour: git-ls-remote R refs/foo can report refs/foo AND
+ # refs/refs/foo etc.
+ #
+ # Also, we want an idempotent snapshot, but we have to make two
+ # calls to the remote: one to git-ls-remote and to git-fetch. The
+ # solution is use git-ls-remote to obtain a target state, and
+ # git-fetch to try to generate it. If we don't manage to generate
+ # the target state, we try again.
+
+ my $specre = join '|', map {
+ my $x = $_;
+ $x =~ s/\W/\\$&/g;
+ $x =~ s/\\\*$/.*/;
+ "(?:refs/$x)";
+ } @specs;
+ printdebug "git_fetch_us specre=$specre\n";
+ my $wanted_rref = sub {
+ local ($_) = @_;
+ return m/^(?:$specre)$/o;
+ };
+
+ my $fetch_iteration = 0;
+ FETCH_ITERATION:
+ for (;;) {
+ if (++$fetch_iteration > 10) {
+ fail "too many iterations trying to get sane fetch!";
+ }
+
+ my @look = map { "refs/$_" } @specs;
+ my @lcmd = (@git, qw(ls-remote -q --refs), access_giturl(), @look);
+ debugcmd "|",@lcmd;
+
+ my %wantr;
+ open GITLS, "-|", @lcmd or die $!;
+ while (<GITLS>) {
+ printdebug "=> ", $_;
+ m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
+ my ($objid,$rrefname) = ($1,$2);
+ if (!$wanted_rref->($rrefname)) {
+ print STDERR <<END;
+warning: git-ls-remote @look reported $rrefname; this is silly, ignoring it.
+END
+ next;
+ }
+ $wantr{$rrefname} = $objid;
+ }
+ $!=0; $?=0;
+ close GITLS or failedcmd @lcmd;
+
+ # OK, now %want is exactly what we want for refs in @specs
+ my @fspecs = map {
+ return () if !m/\*$/ && !exists $wantr{"refs/$_"};
+ "+refs/$_:".lrfetchrefs."/$_";
+ } @specs;
+
+ my @fcmd = (@git, qw(fetch -p -n -q), access_giturl(), @fspecs);
+ runcmd_ordryrun_local @git, qw(fetch -p -n -q), access_giturl(),
+ @fspecs;
+
+ %lrfetchrefs_f = ();
+ my %objgot;
+
+ git_for_each_ref(lrfetchrefs, sub {
+ my ($objid,$objtype,$lrefname,$reftail) = @_;
+ $lrfetchrefs_f{$lrefname} = $objid;
+ $objgot{$objid} = 1;
+ });
+
+ foreach my $lrefname (sort keys %lrfetchrefs_f) {
+ my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
+ if (!exists $wantr{$rrefname}) {
+ if ($wanted_rref->($rrefname)) {
+ printdebug <<END;
+git-fetch @fspecs created $lrefname which git-ls-remote @look didn't list.
+END
+ } else {
+ print STDERR <<END
+warning: git-fetch @fspecs created $lrefname; this is silly, deleting it.
+END
+ }
+ runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
+ delete $lrfetchrefs_f{$lrefname};
+ next;
+ }
+ }
+ foreach my $rrefname (sort keys %wantr) {
+ my $lrefname = lrfetchrefs.substr($rrefname, 4);
+ my $got = $lrfetchrefs_f{$lrefname} // '<none>';
+ my $want = $wantr{$rrefname};
+ next if $got eq $want;
+ if (!defined $objgot{$want}) {
+ print STDERR <<END;
+warning: git-ls-remote suggests we want $lrefname
+warning: and it should refer to $want
+warning: but git-fetch didn't fetch that object to any relevant ref.
+warning: This may be due to a race with someone updating the server.
+warning: Will try again...
+END
+ next FETCH_ITERATION;
+ }
+ printdebug <<END;
+git-fetch @fspecs made $lrefname=$got but want git-ls-remote @look says $want
+END
+ runcmd_ordryrun_local @git, qw(update-ref -m),
+ "dgit fetch git-fetch fixup", $lrefname, $want;
+ $lrfetchrefs_f{$lrefname} = $want;
+ }
+ last;
+ }
+ printdebug "git_fetch_us: git-fetch --no-insane emulation complete\n",
+ Dumper(\%lrfetchrefs_f);
my %here;
my @tagpats = debiantags('*',access_basedistro);
});
git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
my ($objid,$objtype,$fullrefname,$reftail) = @_;
- my $lref = "refs".substr($fullrefname, length lrfetchrefs);
+ my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
printdebug "offered $lref=$objid\n";
if (!defined $here{$lref}) {
my @upd = (@git, qw(update-ref), $lref, $objid, '');
runcmd_ordryrun_local @upd;
+ lrfetchref_used $fullrefname;
} elsif ($here{$lref} eq $objid) {
+ lrfetchref_used $fullrefname;
} else {
print STDERR \
"Not updateting $lref from $here{$lref} to $objid.\n";
}
sub mergeinfo_getclogp ($) {
- my ($mi) = @_;
# Ensures thit $mi->{Clogp} exists and returns it
- return $mi->{Clogp} if $mi->{Clogp};
- my $mclog = ".git/dgit/clog-$mi->{Commit}";
- mkpath '.git/dgit';
- runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
- "$mi->{Commit}:debian/changelog";
- $mi->{Clogp} = parsechangelog("-l$mclog");
+ my ($mi) = @_;
+ $mi->{Clogp} = commit_getclogp($mi->{Commit});
}
sub mergeinfo_version ($) {
}
sub fetch_from_archive () {
- # ensures that lrref() is what is actually in the archive,
- # one way or another
+ # Ensures that lrref() is what is actually in the archive, one way
+ # or another, according to us - ie this client's
+ # appropritaely-updated archive view. Also returns the commit id.
+ # If there is nothing in the archive, leaves lrref alone and
+ # returns undef. git_fetch_us must have already been called.
get_archive_dsc();
if ($dsc) {
Info => "Dgit field in .dsc from archive",
};
+ my $cwd = getcwd();
+ my $del_lrfetchrefs = sub {
+ changedir $cwd;
+ my $gur;
+ printdebug "del_lrfetchrefs...\n";
+ foreach my $fullrefname (sort keys %lrfetchrefs_d) {
+ my $objid = $lrfetchrefs_d{$fullrefname};
+ printdebug "del_lrfetchrefs: $objid $fullrefname\n";
+ if (!$gur) {
+ $gur ||= new IO::Handle;
+ open $gur, "|-", qw(git update-ref --stdin) or die $!;
+ }
+ printf $gur "delete %s %s\n", $fullrefname, $objid;
+ }
+ if ($gur) {
+ close $gur or failedcmd "git update-ref delete lrfetchrefs";
+ }
+ };
+
if (defined $dsc_hash) {
fail "missing remote git history even though dsc has hash -".
" could not find ref ".rref()." at ".access_giturl()
END
}
- return 0;
+ unshift @end, $del_lrfetchrefs;
+ return undef;
}
if ($lastfetch_hash &&
if (defined $skew_warning_vsn) {
mkpath '.git/dgit';
printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
- my $clogf = ".git/dgit/changelog.tmp";
- runcmd shell_cmd "exec >$clogf",
- @git, qw(cat-file blob), "$hash:debian/changelog";
- my $gotclogp = parsechangelog("-l$clogf");
+ my $gotclogp = commit_getclogp($hash);
my $got_vsn = getfield $gotclogp, 'Version';
printdebug "SKEW CHECK GOT $got_vsn\n";
if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
dryrun_report @upd_cmd;
}
}
- return 1;
+
+ lrfetchref_used lrfetchref();
+
+ unshift @end, $del_lrfetchrefs;
+ return $hash;
}
sub set_local_git_config ($$) {
}
}
-sub dopush ($) {
- my ($forceflag) = @_;
+sub dopush () {
printdebug "actually entering push\n";
+
+ supplementary_message(<<'END');
+Push failed, while checking state of the archive.
+You can retry the push, after fixing the problem, if you like.
+END
+ if (check_for_git()) {
+ git_fetch_us();
+ }
+ my $archive_hash = fetch_from_archive();
+ if (!$archive_hash) {
+ $new_package or
+ fail "package appears to be new in this suite;".
+ " if this is intentional, use --new";
+ }
+
supplementary_message(<<'END');
Push failed, while preparing your push.
You can retry the push, after fixing the problem, if you like.
}
check_not_dirty();
+
+ my $forceflag = '';
+ if ($archive_hash) {
+ if (is_fast_fwd($archive_hash, $dgithead)) {
+ # ok
+ } elsif (deliberately_not_fast_forward) {
+ $forceflag = '+';
+ } else {
+ fail "dgit push: HEAD is not a descendant".
+ " of the archive's version.\n".
+ "dgit: To overwrite its contents,".
+ " use git merge -s ours ".lrref().".\n".
+ "dgit: To rewind history, if permitted by the archive,".
+ " use --deliberately-not-fast-forward";
+ }
+ }
+
changedir $ud;
progress "checking that $dscfn corresponds to HEAD";
runcmd qw(dpkg-source -x --),
$changesfile = "$buildproductsdir/$changesfile";
}
+ # Checks complete, we're going to try and go ahead:
+
responder_send_file('changes',$changesfile);
responder_send_command("param head $dgithead");
responder_send_command("param csuite $csuite");
create_remote_git_repo();
}
- my @pushrefs = $forceflag."HEAD:".rrref();
+ my @pushrefs = $forceflag.$dgithead.":".rrref();
foreach my $tw (@tagwants) {
my $view = $tw->{View};
next unless $view eq 'dgit'
or any { $_ eq $view } access_cfg_tagformats();
+ # ^ $view is "dgit" or "maint" so this looks for "maint"
+ # in archive supported tagformats.
push @pushrefs, $forceflag."refs/tags/$tw->{Tag}";
}
fail "dgit push: changelog specifies $isuite ($csuite)".
" but command line specifies $specsuite";
}
- supplementary_message(<<'END');
-Push failed, while checking state of the archive.
-You can retry the push, after fixing the problem, if you like.
-END
- if (check_for_git()) {
- git_fetch_us();
- }
- my $forceflag = '';
- if (fetch_from_archive()) {
- if (is_fast_fwd(lrref(), 'HEAD')) {
- # ok
- } elsif (deliberately_not_fast_forward) {
- $forceflag = '+';
- } else {
- fail "dgit push: HEAD is not a descendant".
- " of the archive's version.\n".
- "dgit: To overwrite its contents,".
- " use git merge -s ours ".lrref().".\n".
- "dgit: To rewind history, if permitted by the archive,".
- " use --deliberately-not-fast-forward";
- }
- } else {
- $new_package or
- fail "package appears to be new in this suite;".
- " if this is intentional, use --new";
- }
- dopush($forceflag);
+ dopush();
}
#---------- remote commands' implementation ----------
if $patches_applied_dirtily & 01;
rmtree '.pc'
if $patches_applied_dirtily & 02;
+ $patches_applied_dirtily = 0;
}
#----- other building -----
}
build_prep();
}
+ maybe_unapply_patches_again();
if ($wantsrc < 2) {
unless (grep { m/^--git-debian-branch|^--git-ignore-branch/ } @ARGV) {
canonicalise_suite();
push @cmd, "--git-debian-branch=".lbranch();
}
push @cmd, changesopts();
- maybe_apply_patches_dirtily();
runcmd_ordryrun_local @cmd, @ARGV;
}
- maybe_unapply_patches_again();
printdone "build successful\n";
}
sub cmd_git_build { cmd_gbp_build(); } # compatibility with <= 1.0
" building would result in ambiguity about the intended results"
if @unwanted;
}
+ my $wasdir = must_getcwd();
changedir "..";
if (act_local()) {
stat_exists $dscfn or fail "$dscfn (in parent directory): $!";
rename "$cf", "$cf.inmulti" or fail "$cf\{,.inmulti}: $!";
}
}
+ changedir $wasdir;
maybe_unapply_patches_again();
printdone "build successful, results in $multichanges\n" or die $!;
}