X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~ianmdlvl/git?p=dgit.git;a=blobdiff_plain;f=dgit;h=1fde60bba61f663b82f931aaf454f13b193bb7b1;hp=db52f91ef04a04c4deb2179c04f006bf1cb02975;hb=d5a04ee68cb3ed5c2853bae83b132505b850a5e6;hpb=6b764d3497dce00128d8fd56d4b2401d875606d1 diff --git a/dgit b/dgit index db52f91e..1fde60bb 100755 --- a/dgit +++ b/dgit @@ -2,8 +2,8 @@ # dgit # Integration between git and Debian-style archives # -# Copyright (C)2013-2017 Ian Jackson -# Copyright (C)2017 Sean Whitton +# Copyright (C)2013-2018 Ian Jackson +# Copyright (C)2017-2018 Sean Whitton # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -20,6 +20,7 @@ END { $? = $Debian::Dgit::ExitStatus::desired // -1; }; use Debian::Dgit::ExitStatus; +use Debian::Dgit::I18n; use strict; @@ -37,6 +38,7 @@ use Dpkg::Version; use Dpkg::Compression; use Dpkg::Compression::Process; use POSIX; +use Locale::gettext; use IPC::Open2; use Digest::SHA; use Digest::MD5; @@ -78,7 +80,7 @@ our $overwrite_version; # undef: not specified; '': check changelog our $quilt_mode; our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied'; our $dodep14tag; -our $split_brain_save; +our %internal_object_save; our $we_are_responder; our $we_are_initiator; our $initiator_tempdir; @@ -100,9 +102,6 @@ our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)"); our $suite_re = '[-+.0-9a-z]+'; our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none'; -our $orig_f_comp_re = qr{orig(?:-$extra_orig_namepart_re)?}; -our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)'; -our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?"; our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$'; our $splitbraincache = 'dgit-intern/quilt-cache'; @@ -116,7 +115,7 @@ our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L)); our (@dput) = qw(dput); our (@debsign) = qw(debsign); our (@gpg) = qw(gpg); -our (@sbuild) = qw(sbuild); +our (@sbuild) = (qw(sbuild --no-source)); our (@ssh) = 'ssh'; our (@dgit) = qw(dgit); our (@git_debrebase) = qw(git-debrebase); @@ -129,6 +128,8 @@ our (@mergechanges) = qw(mergechanges -f); our (@gbp_build) = (''); our (@gbp_pq) = ('gbp pq'); our (@changesopts) = (''); +our (@pbuilder) = ("sudo -E pbuilder"); +our (@cowbuilder) = ("sudo -E cowbuilder"); our %opts_opt_map = ('dget' => \@dget, # accept for compatibility 'curl' => \@curl, @@ -148,7 +149,9 @@ our %opts_opt_map = ('dget' => \@dget, # accept for compatibility 'gbp-build' => \@gbp_build, 'gbp-pq' => \@gbp_pq, 'ch' => \@changesopts, - 'mergechanges' => \@mergechanges); + 'mergechanges' => \@mergechanges, + 'pbuilder' => \@pbuilder, + 'cowbuilder' => \@cowbuilder); our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1); our %opts_cfg_insertpos = map { @@ -166,7 +169,6 @@ our $keyid; autoflush STDOUT 1; our $supplementary_message = ''; -our $need_split_build_invocation = 1; our $split_brain = 0; END { @@ -198,15 +200,13 @@ sub lref () { return "refs/heads/".lbranch(); } sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); } sub rrref () { return server_ref($csuite); } -sub stripepoch ($) { - my ($vsn) = @_; - $vsn =~ s/^\d+\://; - return $vsn; -} - sub srcfn ($$) { - my ($vsn,$sfx) = @_; - return "${package}_".(stripepoch $vsn).$sfx + my ($vsn, $sfx) = @_; + return &source_file_leafname($package, $vsn, $sfx); +} +sub is_orig_file_of_vsn ($$) { + my ($f, $upstreamvsn) = @_; + return is_orig_file_of_p_v($f, $package, $upstreamvsn); } sub dscfn ($) { @@ -219,12 +219,6 @@ sub changespat ($;$) { return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes"; } -sub upstreamversion ($) { - my ($vsn) = @_; - $vsn =~ s/-[^-]+$//; - return $vsn; -} - our $us = 'dgit'; initdebug(''); @@ -238,24 +232,29 @@ END { } }; -sub badcfg { print STDERR "$us: invalid configuration: @_\n"; finish 12; } +sub badcfg { + print STDERR f_ "%s: invalid configuration: %s\n", $us, "@_"; + finish 12; +} sub forceable_fail ($$) { my ($forceoptsl, $msg) = @_; fail $msg unless grep { $forceopts{$_} } @$forceoptsl; - print STDERR "warning: overriding problem due to --force:\n". $msg; + print STDERR +(__ "warning: overriding problem due to --force:\n"). $msg; } sub forceing ($) { my ($forceoptsl) = @_; my @got = grep { $forceopts{$_} } @$forceoptsl; return 0 unless @got; - print STDERR - "warning: skipping checks or functionality due to --force-$got[0]\n"; + print STDERR f_ + "warning: skipping checks or functionality due to --force-%s\n", + $got[0]; } sub no_such_package () { - print STDERR "$us: package $package does not exist in suite $isuite\n"; + print STDERR f_ "%s: package %s does not exist in suite %s\n", + $us, $package, $isuite; finish 4; } @@ -275,14 +274,16 @@ sub quiltmode_splitbrain () { } sub opts_opt_multi_cmd { + my $extra = shift; my @cmd; push @cmd, split /\s+/, shift @_; + push @cmd, @$extra; push @cmd, @_; @cmd; } sub gbp_pq { - return opts_opt_multi_cmd @gbp_pq; + return opts_opt_multi_cmd [], @gbp_pq; } sub dgit_privdir () { @@ -295,6 +296,14 @@ sub bpd_abs () { return $r; } +sub get_tree_of_commit ($) { + my ($commitish) = @_; + my $cdata = cmdoutput @git, qw(cat-file commit), $commitish; + $cdata =~ m/\n\n/; $cdata = $`; + $cdata =~ m/^tree (\w+)$/m or confess "cdata $cdata ?"; + return $1; +} + sub branch_gdr_info ($$) { my ($symref, $head) = @_; my ($status, $msg, $current, $ffq_prev, $gdrlast) = @@ -306,21 +315,91 @@ sub branch_gdr_info ($$) { return ($ffq_prev, $gdrlast); } -sub branch_is_gdr ($$) { - my ($symref, $head) = @_; - my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); - return 0 unless $ffq_prev || $gdrlast; - return 1; -} - sub branch_is_gdr_unstitched_ff ($$$) { my ($symref, $head, $ancestor) = @_; my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); return 0 unless $ffq_prev; - return 0 unless is_fast_fwd $ancestor, $ffq_prev; + return 0 unless !defined $ancestor or is_fast_fwd $ancestor, $ffq_prev; return 1; } +sub branch_is_gdr ($) { + my ($head) = @_; + # This is quite like git-debrebase's keycommits. + # We have our own implementation because: + # - our algorighm can do fewer tests so is faster + # - it saves testing to see if gdr is installed + + # NB we use this jsut for deciding whether to run gdr make-patches + # Before reusing this algorithm for somthing else, its + # suitability should be reconsidered. + + my $walk = $head; + local $Debian::Dgit::debugcmd_when_debuglevel = 3; + printdebug "branch_is_gdr $head...\n"; + my $get_patches = sub { + my $t = git_cat_file "$_[0]:debian/patches", [qw(missing tree)]; + return $t // ''; + }; + my $tip_patches = $get_patches->($head); + WALK: + for (;;) { + my $cdata = git_cat_file $walk, 'commit'; + my ($hdrs,$msg) = $cdata =~ m{\n\n} ? ($`,$') : ($cdata,''); + if ($msg =~ m{^\[git-debrebase\ ( + anchor | changelog | make-patches | + merged-breakwater | pseudomerge + ) [: ] }mx) { + # no need to analyse this - it's sufficient + # (gdr classifications: Anchor, MergedBreakwaters) + # (made by gdr: Pseudomerge, Changelog) + printdebug "branch_is_gdr $walk gdr $1 YES\n"; + return 1; + } + my @parents = ($hdrs =~ m/^parent (\w+)$/gm); + if (@parents==2) { + my $walk_tree = get_tree_of_commit $walk; + foreach my $p (@parents) { + my $p_tree = get_tree_of_commit $p; + if ($p_tree eq $walk_tree) { # pseudomerge contriburor + # (gdr classification: Pseudomerge; not made by gdr) + printdebug "branch_is_gdr $walk unmarked pseudomerge\n" + if $debuglevel >= 2; + $walk = $p; + next WALK; + } + } + # some other non-gdr merge + # (gdr classification: VanillaMerge, DgitImportUnpatched, ?) + printdebug "branch_is_gdr $walk ?-2-merge NO\n"; + return 0; + } + if (@parents>2) { + # (gdr classification: ?) + printdebug "branch_is_gdr $walk ?-octopus NO\n"; + return 0; + } + if ($get_patches->($walk) ne $tip_patches) { + # Our parent added, removed, or edited patches, and wasn't + # a gdr make-patches commit. gdr make-patches probably + # won't do that well, then. + # (gdr classification of parent: AddPatches or ?) + printdebug "branch_is_gdr $walk ?-patches NO\n"; + return 0; + } + if ($tip_patches eq '' and + !defined git_cat_file "$walk:debian") { + # (gdr classification of parent: BreakwaterStart + printdebug "branch_is_gdr $walk unmarked BreakwaterStart YES\n"; + return 1; + } + # (gdr classification: Upstream Packaging Mixed Changelog) + printdebug "branch_is_gdr $walk plain\n" + if $debuglevel >= 2; + $walk = $parents[0]; + } +} + #---------- remote protocol support, common ---------- # remote push initiator/responder protocol: @@ -393,28 +472,28 @@ sub i_child_report () { die unless $got == $i_child_pid; $i_child_pid = undef; return undef unless $?; - return "build host child ".waitstatusmsg(); + return f_ "build host child %s", waitstatusmsg(); } sub badproto ($$) { my ($fh, $m) = @_; - fail "connection lost: $!" if $fh->error; - fail "protocol violation; $m not expected"; + fail f_ "connection lost: %s", $! if $fh->error; + fail f_ "protocol violation; %s not expected", $m; } sub badproto_badread ($$) { my ($fh, $wh) = @_; - fail "connection lost: $!" if $!; + fail f_ "connection lost: %s", $! if $!; my $report = i_child_report(); fail $report if defined $report; - badproto $fh, "eof (reading $wh)"; + badproto $fh, f_ "eof (reading %s)", $wh; } sub protocol_expect (&$) { my ($match, $fh) = @_; local $_; $_ = <$fh>; - defined && chomp or badproto_badread $fh, "protocol message"; + defined && chomp or badproto_badread $fh, __ "protocol message"; if (wantarray) { my @r = &$match; return @r if @r; @@ -422,7 +501,7 @@ sub protocol_expect (&$) { my $r = &$match; return $r if $r; } - badproto $fh, "\`$_'"; + badproto $fh, f_ "\`%s'", $_; } sub protocol_send_file ($$) { @@ -443,10 +522,10 @@ sub protocol_send_file ($$) { sub protocol_read_bytes ($$) { my ($fh, $nbytes) = @_; - $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count"; + $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, __ "bad byte count"; my $d; my $got = read $fh, $d, $nbytes; - $got==$nbytes or badproto_badread $fh, "data block"; + $got==$nbytes or badproto_badread $fh, __ "data block"; return $d; } @@ -527,7 +606,8 @@ sub url_get { progress "downloading $what..."; my $r = $ua->get(@_) or die $!; return undef if $r->code == 404; - $r->is_success or fail "failed to fetch $what: ".$r->status_line; + $r->is_success or fail f_ "failed to fetch %s: %s", + $what, $r->status_line; return $r->decoded_content(charset => 'none'); } @@ -538,9 +618,9 @@ sub act_scary () { return !$dryrun_level; } sub printdone { if (!$dryrun_level) { - progress "$us ok: @_"; + progress f_ "%s ok: %s", $us, "@_"; } else { - progress "would be ok: @_ (but dry run only)"; + progress f_ "would be ok: %s (but dry run only)", "@_"; } } @@ -564,12 +644,13 @@ sub runcmd_ordryrun_local { } } -our $helpmsg = <= set git config option (used directly by dgit too) END -our $later_warning_msg = <= 4; $l or next; - @$l==1 or badcfg "multiple values for $c". - " (in $src git config)" if @$l > 1; + @$l==1 or badcfg + f_ "multiple values for %s (in %s git config)", $c, $src + if @$l > 1; return $l->[0]; } return undef; @@ -721,8 +803,10 @@ sub cfg { return $dv; } } - badcfg "need value for one of: @_\n". - "$us: distro or suite appears not to be (properly) supported"; + badcfg f_ + "need value for one of: %s\n". + "%s: distro or suite appears not to be (properly) supported", + "@_", $us; } sub not_necessarily_a_tree () { @@ -761,7 +845,8 @@ sub access_nomdistro () { my $base = access_basedistro(); my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base; $r =~ m/^$distro_re$/ or badcfg - "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)"; + f_ "bad syntax for (nominal) distro \`%s' (does not match %s)", + $r, "/^$distro_re$/"; return $r; } @@ -775,7 +860,7 @@ sub access_quirk () { $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig; $re =~ s/\*/.*/g; $re =~ s/\%/([-0-9a-z_]+)/ - or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )"; + or $re =~ m/[()]/ or badcfg __ "backports-quirk needs \% or ( )"; if ($isuite =~ m/^$re$/) { return ('backports',"$basedistro-backports",$1); } @@ -791,7 +876,8 @@ sub parse_cfg_bool ($$$) { return $v =~ m/^[ty1]/ ? 1 : $v =~ m/^[fn0]/ ? 0 : - badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'"; + badcfg f_ "%s needs t (true, y, 1) or f (false, n, 0) not \`%s'", + $what, $v; } sub access_forpush_config () { @@ -809,7 +895,8 @@ sub access_forpush_config () { $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1 $v =~ m/^[a]/ ? '' : # auto, forpush = '' - badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; + badcfg __ + "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; } sub access_forpush () { @@ -818,11 +905,12 @@ sub access_forpush () { } sub pushing () { - die "$access_forpush ?" if ($access_forpush // 1) ne 1; - badcfg "pushing but distro is configured readonly" + confess +(__ 'internal error').' '.Dumper($access_forpush)," ?" if + defined $access_forpush and !$access_forpush; + badcfg __ "pushing but distro is configured readonly" if access_forpush_config() eq '0'; $access_forpush = 1; - $supplementary_message = <<'END' unless $we_are_responder; + $supplementary_message = __ <<'END' unless $we_are_responder; Push failed, before we got started. You can retry the push, after fixing the problem, if you like. END @@ -988,7 +1076,7 @@ our %rmad; sub archive_query ($;@) { my ($method) = shift @_; - fail "this operation does not support multiple comma-separated suites" + fail __ "this operation does not support multiple comma-separated suites" if $isuite =~ m/,/; my $query = access_cfg('archive-query','RETURN-UNDEF'); $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'"; @@ -1034,8 +1122,9 @@ sub archive_api_query_cmd ($) { fail "for $url: stat $key: $!" unless $!==ENOENT; next; } - fail "config requested specific TLS key but do not know". - " how to get curl to use exactly that EE key ($key)"; + fail f_ "config requested specific TLS key but do not know". + " how to get curl to use exactly that EE key (%s)", + $key; # push @cmd, "--cacert", $key, "--capath", "/dev/enoent"; # # Sadly the above line does not work because of changes # # to gnutls. The real fix for #790093 may involve @@ -1054,7 +1143,7 @@ sub archive_api_query_cmd ($) { sub api_query ($$;$) { use JSON; my ($data, $subpath, $ok404) = @_; - badcfg "ftpmasterapi archive query method takes no data part" + badcfg __ "ftpmasterapi archive query method takes no data part" if length $data; my @cmd = archive_api_query_cmd($subpath); my $url = $cmd[$#cmd]; @@ -1062,11 +1151,11 @@ sub api_query ($$;$) { my $json = cmdoutput @cmd; unless ($json =~ s/\d+\d+\d$//) { failedcmd_report_cmd undef, @cmd; - fail "curl failed to print 3-digit HTTP code"; + fail __ "curl failed to print 3-digit HTTP code"; } my $code = $&; return undef if $code eq '404' && $ok404; - fail "fetch of $url gave HTTP code $code" + fail f_ "fetch of %s gave HTTP code %s", $url, $code unless $url =~ m#^file://# or $code =~ m/^2/; return decode_json($json); } @@ -1082,15 +1171,17 @@ sub canonicalise_suite_ftpmasterapi { } qw(codename name); push @matched, $entry; } - fail "unknown suite $isuite" unless @matched; + fail f_ "unknown suite %s, maybe -d would help", $isuite + unless @matched; my $cn; eval { - @matched==1 or die "multiple matches for suite $isuite\n"; + @matched==1 or die f_ "multiple matches for suite %s\n", $isuite; $cn = "$matched[0]{codename}"; - defined $cn or die "suite $isuite info has no codename\n"; - $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n"; + defined $cn or die f_ "suite %s info has no codename\n", $isuite; + $cn =~ m/^$suite_re$/ + or die f_ "suite %s maps to bad codename\n", $isuite; }; - die "bad ftpmaster api response: $@\n".Dumper(\@matched) + die +(__ "bad ftpmaster api response: ")."$@\n".Dumper(\@matched) if length $@; return $cn; } @@ -1104,18 +1195,18 @@ sub archive_query_ftpmasterapi { eval { my $vsn = "$entry->{version}"; my ($ok,$msg) = version_check $vsn; - die "bad version: $msg\n" unless $ok; + die f_ "bad version: %s\n", $msg unless $ok; my $component = "$entry->{component}"; - $component =~ m/^$component_re$/ or die "bad component"; + $component =~ m/^$component_re$/ or die __ "bad component"; my $filename = "$entry->{filename}"; $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]# - or die "bad filename"; + or die __ "bad filename"; my $sha256sum = "$entry->{sha256sum}"; - $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum"; + $sha256sum =~ m/^[0-9a-f]+$/ or die __ "bad sha256sum"; push @rows, [ $vsn, "/pool/$component/$filename", $digester, $sha256sum ]; }; - die "bad ftpmaster api response: $@\n".Dumper($entry) + die +(__ "bad ftpmaster api response: ")."$@\n".Dumper($entry) if length $@; } @rows = sort { -version_compare($a->[0],$b->[0]) } @rows; @@ -1154,15 +1245,15 @@ sub aptget_cache_clean { sub aptget_lock_acquire () { my $lockfile = "$aptget_base/lock"; - open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!"; - flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!"; + open APTGET_LOCK, '>', $lockfile or confess "open $lockfile: $!"; + flock APTGET_LOCK, LOCK_EX or confess "lock $lockfile: $!"; } sub aptget_prep ($) { my ($data) = @_; return if defined $aptget_base; - badcfg "aptget archive query method takes no data part" + badcfg __ "aptget archive query method takes no data part" if length $data; my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache"; @@ -1177,7 +1268,7 @@ sub aptget_prep ($) { ensuredir $aptget_base; my $quoted_base = $aptget_base; - die "$quoted_base contains bad chars, cannot continue" + confess "$quoted_base contains bad chars, cannot continue" if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/ ensuredir $aptget_base; @@ -1193,7 +1284,7 @@ sub aptget_prep ($) { cfg_apply_map(\$aptsuites, 'suite map', access_cfg('aptget-suite-map', 'RETURN-UNDEF')); - open SRCS, ">", "$aptget_base/$sourceslist" or die $!; + open SRCS, ">", "$aptget_base/$sourceslist" or confess $!; printf SRCS "deb-src %s %s %s\n", access_cfg('mirror'), $aptsuites, @@ -1245,13 +1336,13 @@ END my @inreleasefiles = grep { m#/InRelease$# } @releasefiles; @releasefiles = @inreleasefiles if @inreleasefiles; if (!@releasefiles) { - fail <{$name}; if (defined $val) { printdebug "release file $name: $val\n"; - $val =~ m/^$suite_re$/o or fail - "Release file ($aptget_releasefile) specifies intolerable $name"; + $val =~ m/^$suite_re$/o or fail f_ + "Release file (%s) specifies intolerable %s", + $aptget_releasefile, $name; cfg_apply_map(\$val, 'suite rmap', access_cfg('aptget-suite-rmap', 'RETURN-UNDEF')); return $val @@ -1295,8 +1387,9 @@ sub archive_query_aptget { aptget_aptget(), qw(--download-only --only-source source), $package; my @dscs = <$aptget_base/source/*.dsc>; - fail "apt-get source did not produce a .dsc" unless @dscs; - fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1; + fail __ "apt-get source did not produce a .dsc" unless @dscs; + fail f_ "apt-get source produced several .dscs (%s)", "@dscs" + unless @dscs==1; my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1; @@ -1310,6 +1403,7 @@ sub file_in_archive_aptget () { return undef; } sub package_not_wholly_new_aptget () { return undef; } #---------- `dummyapicat' archive query method ---------- +# (untranslated, because this is for testing purposes etc.) sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; } sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; } @@ -1398,10 +1492,11 @@ sub madison_get_parse { sub canonicalise_suite_madison { # madison canonicalises for us my @r = madison_get_parse(@_); - @r or fail - "unable to canonicalise suite using package $package". - " which does not appear to exist in suite $isuite;". - " --existing-package may help"; + @r or fail f_ + "unable to canonicalise suite using package %s". + " which does not appear to exist in suite %s;". + " --existing-package may help", + $package, $isuite; return $r[0][2]; } @@ -1409,6 +1504,7 @@ sub file_in_archive_madison { return undef; } sub package_not_wholly_new_madison { return undef; } #---------- `sshpsql' archive query method ---------- +# (untranslated, because this is obsolete) sub sshpsql ($$$) { my ($data,$runeinfo,$sql) = @_; @@ -1487,6 +1583,7 @@ sub file_in_archive_sshpsql ($$$) { return undef; } sub package_not_wholly_new_sshpsql ($$$) { return undef; } #---------- `dummycat' archive query method ---------- +# (untranslated, because this is for testing purposes etc.) sub canonicalise_suite_dummycat ($$) { my ($proto,$data) = @_; @@ -1532,6 +1629,7 @@ sub file_in_archive_dummycat () { return undef; } sub package_not_wholly_new_dummycat () { return undef; } #---------- tag format handling ---------- +# (untranslated, because everything should be new tag format by now) sub access_cfg_tagformats () { split /\,/, access_cfg('dgit-tag-format'); @@ -1586,12 +1684,12 @@ sub select_tagformat () { sub canonicalise_suite () { return if defined $csuite; - fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED'; + fail f_ "cannot operate on %s suite", $isuite if $isuite eq 'UNRELEASED'; $csuite = archive_query('canonicalise_suite'); if ($isuite ne $csuite) { - progress "canonical suite name for $isuite is $csuite"; + progress f_ "canonical suite name for %s is %s", $isuite, $csuite; } else { - progress "canonical suite name is $csuite"; + progress f_ "canonical suite name is %s", $csuite; } } @@ -1611,13 +1709,13 @@ sub get_archive_dsc () { $digester->add($dscdata); my $got = $digester->hexdigest(); $got eq $digest or - fail "$dscurl has hash $got but". - " archive told us to expect $digest"; + fail f_ "%s has hash %s but archive told us to expect %s", + $dscurl, $got, $digest; } parse_dscdata(); my $fmt = getfield $dsc, 'Format'; $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)], - "unsupported source format $fmt, sorry"; + f_ "unsupported source format %s, sorry", $fmt; $dsc_checked = !!$digester; printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n"; @@ -1644,7 +1742,8 @@ sub check_for_git () { # NB that if we are pushing, $usedistro will be $distro/push $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert"); $instead_distro =~ s{^/}{ access_basedistro()."/" }e; - progress "diverting to $divert (using config for $instead_distro)"; + progress f_ "diverting to %s (using config for %s)", + $divert, $instead_distro; return check_for_git(); } failedcmd @cmd unless defined $r and $r =~ m/^[01]$/; @@ -1660,7 +1759,7 @@ sub check_for_git () { # curl -sS -I with https_proxy prints # HTTP/1.0 200 Connection established $result =~ m/^\S+ (404|200) /s or - fail "unexpected results from git check query - ". + fail +(__ "unexpected results from git check query - "). Dumper($prefix, $result); my $code = $1; if ($code eq '404') { @@ -1675,7 +1774,7 @@ sub check_for_git () { } elsif ($how eq 'false') { return 0; } else { - badcfg "unknown git-check \`$how'"; + badcfg f_ "unknown git-check \`%s'", $how; } } @@ -1690,7 +1789,7 @@ sub create_remote_git_repo () { } elsif ($how eq 'true') { # nothing to do } else { - badcfg "unknown git-create \`$how'"; + badcfg f_ "unknown git-create \`%s'", $how; } } @@ -1727,8 +1826,8 @@ sub remove_stray_gits ($) { local $/="\0"; while () { chomp or die; - print STDERR "$us: warning: removing from $what: ", - (messagequote $_), "\n"; + print STDERR f_ "%s: warning: removing from %s: %s\n", + $us, $what, (messagequote $_); rmtree $_; } } @@ -1740,7 +1839,7 @@ sub mktree_in_ud_from_only_subdir ($;$) { # changes into the subdir my (@dirs) = <*/.>; - die "expected one subdir but found @dirs ?" unless @dirs==1; + confess "expected one subdir but found @dirs ?" unless @dirs==1; $dirs[0] =~ m#^([^/]+)/\.$# or die; my $dir = $1; changedir $dir; @@ -1773,7 +1872,7 @@ sub dsc_files_info () { foreach (split /\n/, $field) { next unless m/\S/; m/^(\w+) (\d+) (\S+)$/ or - fail "could not parse .dsc $fname line \`$_'"; + fail f_ "could not parse .dsc %s line \`%s'", $fname, $_; my $digester = eval "$module"."->$method;" or die $@; push @out, { Hash => $1, @@ -1784,8 +1883,8 @@ sub dsc_files_info () { } return @out; } - fail "missing any supported Checksums-* or Files field in ". - $dsc->get_option('name'); + fail f_ "missing any supported Checksums-* or Files field in %s", + $dsc->get_option('name'); } sub dsc_files () { @@ -1829,8 +1928,9 @@ sub files_compare_inputs (@) { if (defined $$re) { $fchecked{$f}{$in_name} = 1; $$re eq $info or - fail "hash or size of $f varies in $fname fields". - " (between: ".$showinputs->().")"; + fail f_ + "hash or size of %s varies in %s fields (between: %s)", + $f, $fname, $showinputs->(); } else { $$re = $info; } @@ -1838,17 +1938,18 @@ sub files_compare_inputs (@) { @files = sort @files; $expected_files //= \@files; "@$expected_files" eq "@files" or - fail "file list in $in_name varies between hash fields!"; + fail f_ "file list in %s varies between hash fields!", + $in_name; } $expected_files or - fail "$in_name has no files list field(s)"; + fail f_ "%s has no files list field(s)", $in_name; } printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record) if $debuglevel>=2; grep { keys %$_ == @$inputs-1 } values %fchecked - or fail "no file appears in all file lists". - " (looked in: ".$showinputs->().")"; + or fail f_ "no file appears in all file lists (looked in: %s)", + $showinputs->(); } sub is_orig_file_in_dsc ($$) { @@ -1860,13 +1961,6 @@ sub is_orig_file_in_dsc ($$) { return 1; } -sub is_orig_file_of_vsn ($$) { - my ($f, $upstreamvsn) = @_; - my $base = srcfn $upstreamvsn, ''; - return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/; - return 1; -} - # This function determines whether a .changes file is source-only from # the point of view of dak. Thus, it permits *_source.buildinfo # files. @@ -1894,7 +1988,7 @@ sub test_source_only_changes ($) { $l =~ m/\S+$/ or next; # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) { - print "purportedly source-only changes polluted by $&\n"; + print f_ "purportedly source-only changes polluted by %s\n", $&; return 0; } } @@ -1907,7 +2001,7 @@ sub changes_update_origs_from_dsc ($$$$) { printdebug "checking origs needed ($upstreamvsn)...\n"; $_ = getfield $changes, 'Files'; m/^\w+ \d+ (\S+ \S+) \S+$/m or - fail "cannot find section/priority from .changes Files field"; + fail __ "cannot find section/priority from .changes Files field"; my $placementinfo = $1; my %changed; printdebug "checking origs needed placement '$placementinfo'...\n"; @@ -1919,7 +2013,7 @@ sub changes_update_origs_from_dsc ($$$$) { printdebug "origs $file is_orig\n"; my $have = archive_query('file_in_archive', $file); if (!defined $have) { - print STDERR <{$fname}; next unless defined; m/^(\w+) .* \Q$file\E$/m or - fail ".dsc $fname missing entry for $file"; + fail f_ ".dsc %s missing entry for %s", $fname, $file; if ($h->{$archivefield} eq $1) { $same++; } else { - push @differ, - "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)"; + push @differ, f_ + "%s: %s (archive) != %s (local .dsc)", + $archivefield, $h->{$archivefield}, $1; } } - die "$file ".Dumper($h)." ?!" if $same && @differ; + confess "$file ".Dumper($h)." ?!" if $same && @differ; $found_same++ if $same; - push @found_differ, "archive $h->{filename}: ".join "; ", @differ + push @found_differ, + f_ "archive %s: %s", $h->{filename}, join "; ", @differ if @differ; } printdebug "origs $file f.same=$found_same". " #f._differ=$#found_differ\n"; if (@found_differ && !$found_same) { fail join "\n", - "archive contains $file with different checksum", + (f_ "archive contains %s with different checksum", $file), @found_differ; } # Now we edit the changes file to add or remove it @@ -1972,7 +2068,7 @@ END $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?"; my $extra = $1; $extra =~ s/ \d+ /$&$placementinfo / - or die "$fname $extra >$dsc_data< ?" + or confess "$fname $extra >$dsc_data< ?" if $fname eq 'Files'; $changes->{$fname} .= "\n". $extra; $changed{$file} = "added"; @@ -1981,7 +2077,7 @@ END } if (%changed) { foreach my $file (keys %changed) { - progress sprintf + progress f_ "edited .changes for archive .orig contents: %s %s", $changed{$file}, $file; } @@ -1990,10 +2086,11 @@ END if (act_local()) { rename $chtmp,$changesfile or die "$changesfile $!"; } else { - progress "[new .changes left in $changesfile]"; + progress f_ "[new .changes left in %s]", $changesfile; } } else { - progress "$changesfile already has appropriate .orig(s) (if any)"; + progress f_ "%s already has appropriate .orig(s) (if any)", + $changesfile; } } @@ -2002,28 +2099,6 @@ sub make_commit ($) { return cmdoutput @git, qw(hash-object -w -t commit), $file; } -sub make_commit_text ($) { - my ($text) = @_; - my ($out, $in); - my @cmd = (@git, qw(hash-object -w -t commit --stdin)); - debugcmd "|",@cmd; - print Dumper($text) if $debuglevel > 1; - my $child = open2($out, $in, @cmd) or die $!; - my $h; - eval { - print $in $text or die $!; - close $in or die $!; - $h = <$out>; - $h =~ m/^\w+$/ or die; - $h = $&; - printdebug "=> $h\n"; - }; - close $out; - waitpid $child, 0 == $child or die "$child $!"; - $? and failedcmd @cmd; - return $h; -} - sub clogp_authline ($) { my ($clogp) = @_; my $author = getfield $clogp, 'Maintainer'; @@ -2038,8 +2113,9 @@ sub clogp_authline ($) { my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date'); my $authline = "$author $date"; $authline =~ m/$git_authline_re/o or - fail "unexpected commit author line format \`$authline'". - " (was generated from changelog Maintainer field)"; + fail f_ "unexpected commit author line format \`%s'". + " (was generated from changelog Maintainer field)", + $authline; return ($1,$2,$3) if wantarray; return $authline; } @@ -2052,14 +2128,14 @@ sub vendor_patches_distro ($$) { printdebug "checking for vendor-specific $series ($what)\n"; if (!open SERIES, "<", $series) { - die "$series $!" unless $!==ENOENT; + confess "$series $!" unless $!==ENOENT; return; } while () { next unless m/\S/; next if m/^\s+\#/; - print STDERR <error; close SERIES; @@ -2102,11 +2179,11 @@ sub check_for_vendor_patches () { use Dpkg::Vendor; vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR"); vendor_patches_distro(Dpkg::Vendor::get_current_vendor(), - "Dpkg::Vendor \`current vendor'"); + __ "Dpkg::Vendor \`current vendor'"); vendor_patches_distro(access_basedistro(), - "(base) distro being accessed"); + __ "(base) distro being accessed"); vendor_patches_distro(access_nomdistro(), - "(nominal) distro being accessed"); + __ "(nominal) distro being accessed"); } sub generate_commits_from_dsc () { @@ -2127,12 +2204,12 @@ sub generate_commits_from_dsc () { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { - fail "accessing $buildproductsdir/$f,fetch: $!"; + fail f_ "accessing %s: %s", "$buildproductsdir/$f,fetch", $!; } elsif (link_ltarget $upper_f, $f) { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { - fail "accessing $buildproductsdir/$f: $!"; + fail f_ "accessing %s: %s", "$buildproductsdir/$f", $!; } else { printdebug "absent.\n"; } @@ -2147,14 +2224,14 @@ sub generate_commits_from_dsc () { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != EEXIST) { - fail "saving $buildproductsdir/$f: $!"; + fail f_ "saving %s: %s", "$buildproductsdir/$f", $!; } elsif (!$refetched) { printdebug "no need.\n"; } elsif (link $f, "$upper_f,fetch") { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != EEXIST) { - fail "saving $buildproductsdir/$f,fetch: $!"; + fail f_ "saving %s: %s", "$buildproductsdir/$f,fetch", $!; } else { printdebug "cannot.\n"; } @@ -2220,7 +2297,7 @@ sub generate_commits_from_dsc () { chdir "_unpack-tar" or die $!; open STDIN, "<&", $input or die $!; exec @tarcmd; - die "dgit (child): exec $tarcmd[0]: $!"; + die f_ "dgit (child): exec %s: %s", $tarcmd[0], $!; } $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!; !$? or failedcmd @tarcmd; @@ -2284,7 +2361,7 @@ sub generate_commits_from_dsc () { push @cmd, qw(-x --), $dscfn; runcmd @cmd; - my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package"); + my ($tree,$dir) = mktree_in_ud_from_only_subdir(__ "source package"); if (madformat $dsc->{format}) { check_for_vendor_patches(); } @@ -2302,7 +2379,7 @@ sub generate_commits_from_dsc () { my $r1clogp; printdebug "import clog search...\n"; - parsechangelog_loop \@clogcmd, "package changelog", sub { + parsechangelog_loop \@clogcmd, (__ "package changelog"), sub { my ($thisstanza, $desc) = @_; no warnings qw(exiting); @@ -2342,7 +2419,7 @@ sub generate_commits_from_dsc () { printdebug "import clog $r1clogp->{version} becomes r1\n"; }; - $clogp or fail "package changelog has no entries!"; + $clogp or fail __ "package changelog has no entries!"; my $authline = clogp_authline $clogp; my $changes = getfield $clogp, 'Changes'; @@ -2361,12 +2438,13 @@ sub generate_commits_from_dsc () { foreach my $tt (@tartrees) { printdebug "import tartree $tt->{F} $tt->{Tree}\n"; + my $mbody = f_ "Import %s", $tt->{F}; $tt->{Commit} = make_commit_text($tt->{Orig} ? <{Tree} author $r1authline committer $r1authline -Import $tt->{F} +$mbody [dgit import orig $tt->{F}] END_O @@ -2374,7 +2452,7 @@ tree $tt->{Tree} author $authline committer $authline -Import $tt->{F} +$mbody [dgit import tarball $package $cversion $tt->{F}] END_T @@ -2441,7 +2519,7 @@ END chomp $@; progress "warning: $@"; $path = "$absurdity:$path"; - progress "$us: trying slow absurd-git-apply..."; + progress f_ "%s: trying slow absurd-git-apply...", $us; rename "../../gbp-pq-output","../../gbp-pq-output.0" or $!==ENOENT or die $!; @@ -2460,19 +2538,19 @@ END 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd; debugcmd "+",@realcmd; if (system @realcmd) { - die +(shellquote @showcmd). - " failed: ". - failedcmd_waitstatus()."\n"; + die f_ "%s failed: %s\n", + +(shellquote @showcmd), + failedcmd_waitstatus(); } my $gapplied = git_rev_parse('HEAD'); my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:); $gappliedtree eq $dappliedtree or - fail < $rawimport_hash, - Info => "Import of source package", + Info => __ "Import of source package", }; my @output = ($rawimport_mergeinput); @@ -2499,16 +2577,18 @@ END version_compare($oversion, $cversion); if ($vcmp < 0) { @output = ($rawimport_mergeinput, $lastpush_mergeinput, - { Message => < 1 }); -Record $package ($cversion) in archive suite $csuite + { ReverseParents => 1, + Message => (f_ < 0) { - print STDERR <()) { - progress "using existing $f"; + progress f_ "using existing %s", $f; return 1; } if (!$refetched) { - fail "file $f has hash $got but .dsc". - " demands hash $fi->{Hash} ". - "(perhaps you should delete this file?)"; + fail f_ "file %s has hash %s but .dsc demands hash %s". + " (perhaps you should delete this file?)", + $f, $got, $fi->{Hash}; } - progress "need to fetch correct version of $f"; + progress f_ "need to fetch correct version of %s", $f; unlink $tf or die "$tf $!"; $$refetched = 1; } else { @@ -2569,9 +2649,9 @@ sub complete_file_from_dsc ($$;$) { return 0 if !act_local(); $checkhash->() or - fail "file $f has hash $got but .dsc". - " demands hash $fi->{Hash} ". - "(got wrong file from archive!)"; + fail f_ "file %s has hash %s but .dsc demands hash %s". + " (got wrong file from archive!)", + $f, $got, $fi->{Hash}; return 1; } @@ -2664,7 +2744,7 @@ sub git_lrfetch_sane { for (;;) { printdebug "git_lrfetch_sane iteration $fetch_iteration\n"; if (++$fetch_iteration > 10) { - fail "too many iterations trying to get sane fetch!"; + fail __ "too many iterations trying to get sane fetch!"; } my @look = map { "refs/$_" } @specs; @@ -2678,8 +2758,8 @@ sub git_lrfetch_sane { m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?"; my ($objid,$rrefname) = ($1,$2); if (!$wanted_rref->($rrefname)) { - print STDERR <(), and returns undef # in parent, returns canonical suite name for $tsuite my $canonsuitefh = IO::File::new_tmpfile; @@ -3495,7 +3574,7 @@ sub multisuite_suite_child ($$$) { return $csuite; } printdebug "multisuite $tsuite ok (canon=$csuite)\n"; - push @$merginputs, { + push @$mergeinputs, { Ref => lrref, Info => $csuite, }; @@ -3539,7 +3618,6 @@ sub fork_for_multisuite ($) { fetch_one(); finish 0; }); - # xxx collecte the ref here $csubsuite =~ s/^\Q$cbasesuite\E-/-/; push @csuites, $csubsuite; @@ -3830,13 +3908,14 @@ sub madformat_wantfixup ($) { sub maybe_split_brain_save ($$$) { my ($headref, $dgitview, $msg) = @_; # => message fragment "$saved" describing disposition of $dgitview - return "commit id $dgitview" unless defined $split_brain_save; + my $save = $internal_object_save{'dgit-view'}; + return "commit id $dgitview" unless defined $save; my @cmd = (shell_cmd 'cd "$1"; shift', $maindir, git_update_ref_cmd "dgit --dgit-view-save $msg HEAD=$headref", - $split_brain_save, $dgitview); + $save, $dgitview); runcmd @cmd; - return "and left in $split_brain_save"; + return "and left in $save"; } # An "infopair" is a tuple [ $thing, $what ] @@ -4268,6 +4347,15 @@ END my $actualhead = git_rev_parse('HEAD'); if (branch_is_gdr_unstitched_ff($symref, $actualhead, $archive_hash)) { + if (quiltmode_splitbrain()) { + my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $actualhead); + fail <= 4; + confess "internal error (protovsn=$protovsn)" + if defined $protovsn and $protovsn < 4; responder_send_command("param maint-view $maintviewhead"); } @@ -4508,9 +4597,8 @@ END supplementary_message(<<'END'); Push failed, while obtaining signatures on the .changes and .dsc. If it was just that the signature failed, you may try again by using -debsign by hand to sign the changes - $changesfile -and then dput to complete the upload. +debsign by hand to sign the changes file (see the command dgit tried, +above), and then dput that changes file to complete the upload. If you need to change the package, you must use a new version number. END if ($we_are_responder) { @@ -4756,30 +4844,6 @@ sub cmd_push { dopush(); } -our $sourcechanges; - -sub cmd_push_source { - prep_push(); - fail "dgit push-source: --include-dirty/--ignore-dirty does not make". - "sense with push-source!" if $includedirty; - clean_tree(); - build_maybe_quilt_fixup(); - if ($changesfile) { - my $changes = parsecontrol("$buildproductsdir/$changesfile", - "source changes file"); - unless (test_source_only_changes($changes)) { - fail "user-specified changes file is not source-only"; - } - } else { - # Building a source package is very fast, so just do it - build_source(); - die "er, patches are applied dirtily but shouldn't be.." - if $patches_applied_dirtily; - $changesfile = $sourcechanges; - } - dopush(); -} - #---------- remote commands' implementation ---------- sub pre_remote_push_build_host { @@ -5069,6 +5133,7 @@ sub i_want_signed_dsc_changes { #---------- building etc. ---------- our $version; +our $sourcechanges; our $dscfn; #----- `3.0 (quilt)' handling ----- @@ -5283,29 +5348,7 @@ END my $dgitview = git_rev_parse 'HEAD'; changedir $maindir; - # When we no longer need to support squeeze, use --create-reflog - # instead of this: - ensuredir "$maindir_gitcommon/logs/refs/dgit-intern"; - my $makelogfh = new IO::File "$maindir_gitcommon/logs/refs/$splitbraincache", '>>' - or die $!; - - my $oldcache = git_get_ref "refs/$splitbraincache"; - if ($oldcache eq $dgitview) { - my $tree = cmdoutput qw(git rev-parse), "$dgitview:"; - # git update-ref doesn't always update, in this case. *sigh* - my $dummy = make_commit_text < 1000000000 +0000 -committer Dgit 1000000000 +0000 - -Dummy commit - do not use -END - runcmd @git, qw(update-ref -m), "dgit $our_version - dummy", - "refs/$splitbraincache", $dummy; - } - runcmd @git, qw(update-ref -m), $cachekey, "refs/$splitbraincache", - $dgitview; + reflog_cache_insert "refs/$splitbraincache", $cachekey, $dgitview; changedir "$playground/work"; @@ -5445,13 +5488,20 @@ sub quiltify ($$$$) { }; if ($quilt_mode eq 'linear') { print STDERR "\n$us: error: quilt fixup cannot be linear. Stopped at:\n"; + my $all_gdr = !!@nots; foreach my $notp (@nots) { print STDERR "$us: ", $reportnot->($notp), "\n"; + $all_gdr &&= $notp->{Child} && + (git_cat_file $notp->{Child}{Commit}, 'commit') + =~ m{^\[git-debrebase(?! split[: ]).*\]$}m; } - print STDERR "$us: $_\n" foreach @$failsuggestion; + print STDERR "\n"; + $failsuggestion = + [ grep { $_->[0] ne 'quilt-mode' } @$failsuggestion ] + if $all_gdr; + print STDERR "$us: $_->[1]\n" foreach @$failsuggestion; fail - "quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n". - "Use dpkg-source --commit by hand; or, --quilt=smash for one ugly patch"; + "quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n"; } elsif ($quilt_mode eq 'smash') { } elsif ($quilt_mode eq 'auto') { progress "quilt fixup cannot be linear, smashing..."; @@ -5605,7 +5655,7 @@ END if ($quilt_mode eq 'linear' && !$fopts->{'single-debian-patch'} - && branch_is_gdr($symref, $headref)) { + && branch_is_gdr($headref)) { # This is much faster. It also makes patches that gdr # likes better for future updates without laundering. # @@ -5640,8 +5690,6 @@ END quilt_fixup_multipatch($clogp, $headref, $upstreamversion); } - die 'bug' if $split_brain && !$need_split_build_invocation; - changedir $maindir; runcmd_ordryrun_local @git, qw(pull --ff-only -q), "$playground/work", qw(master); @@ -5754,6 +5802,31 @@ END close $fakedsc or die $!; } +sub quilt_fakedsc2unapplied ($$) { + my ($headref, $upstreamversion) = @_; + # must be run in the playground + # quilt_make_fake_dsc must have been called + + runcmd qw(sh -ec), + 'exec dpkg-source --no-check --skip-patches -x fake.dsc >/dev/null'; + + my $fakexdir= $package.'-'.(stripepoch $upstreamversion); + rename $fakexdir, "fake" or die "$fakexdir $!"; + + changedir 'fake'; + + remove_stray_gits("source package"); + mktree_in_ud_here(); + + rmtree '.pc'; + + rmtree 'debian'; # git checkout commitish paths does not delete! + runcmd @git, qw(checkout -f), $headref, qw(-- debian); + my $unapplied=git_add_write_tree(); + printdebug "fake orig tree object $unapplied\n"; + return $unapplied; +} + sub quilt_check_splitbrain_cache ($$) { my ($headref, $upstreamversion) = @_; # Called only if we are in (potentially) split brain mode. @@ -5785,26 +5858,12 @@ sub quilt_check_splitbrain_cache ($$) { push @cachekey, $srcshash->hexdigest(); $splitbrain_cachekey = "@cachekey"; - my @cmd = (@git, qw(log -g), '--pretty=format:%H %gs', - $splitbraincache); printdebug "splitbrain cachekey $splitbrain_cachekey\n"; - debugcmd "|(probably)",@cmd; - my $child = open GC, "-|"; defined $child or die $!; - if (!$child) { - chdir $maindir or die $!; - if (!stat "$maindir_gitcommon/logs/refs/$splitbraincache") { - $! == ENOENT or die $!; - printdebug ">(no reflog)\n"; - finish 0; - } - exec @cmd; die $!; - } - while () { - chomp; - printdebug ">| ", $_, "\n" if $debuglevel > 1; - next unless m/^(\w+) (\S.*\S)$/ && $2 eq $splitbrain_cachekey; - - my $cachehit = $1; + + my $cachehit = reflog_cache_lookup + "refs/$splitbraincache", $splitbrain_cachekey; + + if ($cachehit) { unpack_playtree_mkwork($headref); my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit"; if ($cachehit ne $headref) { @@ -5816,8 +5875,6 @@ sub quilt_check_splitbrain_cache ($$) { progress "dgit view: found cached, no changes required"; return ($headref, $splitbrain_cachekey); } - die $! if GC->error; - failedcmd unless close GC; printdebug "splitbrain cache miss\n"; return (undef, $splitbrain_cachekey); @@ -5907,24 +5964,7 @@ sub quilt_fixup_multipatch ($$$) { quilt_check_splitbrain_cache($headref, $upstreamversion); return if $cachehit; } - - runcmd qw(sh -ec), - 'exec dpkg-source --no-check --skip-patches -x fake.dsc >/dev/null'; - - my $fakexdir= $package.'-'.(stripepoch $upstreamversion); - rename $fakexdir, "fake" or die "$fakexdir $!"; - - changedir 'fake'; - - remove_stray_gits("source package"); - mktree_in_ud_here(); - - rmtree '.pc'; - - rmtree 'debian'; # git checkout commitish paths does not delete! - runcmd @git, qw(checkout -f), $headref, qw(-- debian); - my $unapplied=git_add_write_tree(); - printdebug "fake orig tree object $unapplied\n"; + my $unapplied=quilt_fakedsc2unapplied($headref, $upstreamversion); ensuredir '.pc'; @@ -6002,12 +6042,21 @@ END my @failsuggestion; if (!($diffbits->{O2H} & $diffbits->{O2A})) { - push @failsuggestion, "This might be a patches-unapplied branch."; - } elsif (!($diffbits->{H2A} & $diffbits->{O2A})) { - push @failsuggestion, "This might be a patches-applied branch."; + push @failsuggestion, [ 'unapplied', + "This might be a patches-unapplied branch." ]; + } elsif (!($diffbits->{H2A} & $diffbits->{O2A})) { + push @failsuggestion, [ 'applied', + "This might be a patches-applied branch." ]; } - push @failsuggestion, "Maybe you need to specify one of". - " --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?"; + push @failsuggestion, [ 'quilt-mode', + "Maybe you need one of --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?" ]; + + push @failsuggestion, [ 'gitattrs', + "Warning: Tree has .gitattributes. See GITATTRIBUTES in dgit(7)." ] + if stat_exists '.gitattributes'; + + push @failsuggestion, [ 'origs', + "Maybe orig tarball(s) are not identical to git representation?" ]; if (quiltmode_splitbrain()) { quiltify_splitbrain($clogp, $unapplied, $headref, $oldtiptree, @@ -6126,6 +6175,7 @@ sub build_or_push_prep_early () { $isuite = getfield $clogp, 'Distribution'; $package = getfield $clogp, 'Source'; $version = getfield $clogp, 'Version'; + $dscfn = dscfn($version); } sub build_prep_early () { @@ -6134,9 +6184,12 @@ sub build_prep_early () { check_not_dirty(); } -sub build_prep () { +sub build_prep ($) { + my ($wantsrc) = @_; build_prep_early(); - clean_tree(); + # clean the tree if we're trying to include dirty changes in the + # source package, or we are running the builder in $maindir + clean_tree() if $includedirty || ($wantsrc & WANTSRC_BUILDER); build_maybe_quilt_fixup(); if ($rmchanges) { my $pat = changespat $version; @@ -6195,23 +6248,11 @@ sub changesopts () { sub massage_dbp_args ($;$) { my ($cmd,$xargs) = @_; - # We need to: - # - # - if we're going to split the source build out so we can - # do strange things to it, massage the arguments to dpkg-buildpackage - # so that the main build doessn't build source (or add an argument - # to stop it building source by default). - # - # - add -nc to stop dpkg-source cleaning the source tree, - # unless we're not doing a split build and want dpkg-source - # as cleanmode, in which case we can do nothing - # + # Since we split the source build out so we can do strange things + # to it, massage the arguments to dpkg-buildpackage so that the + # main build doessn't build source (or add an argument to stop it + # building source by default). debugcmd '#massaging#', @$cmd if $debuglevel>1; -#print STDERR "MASS0 ",Dumper($cmd, $xargs, $need_split_build_invocation); - if ($cleanmode eq 'dpkg-source' && !$need_split_build_invocation) { - $clean_using_builder = 1; - return WANTSRC_BUILDER; - } # -nc has the side effect of specifying -b if nothing else specified # and some combinations of -S, -b, et al, are errors, rather than # later simply overriding earlie. So we need to: @@ -6222,17 +6263,26 @@ sub massage_dbp_args ($;$) { my $dmode = '-F'; foreach my $l ($cmd, $xargs) { next unless $l; - @$l = grep { !(m/^-[SgGFABb]$/s and $dmode=$_) } @$l; + @$l = grep { !(m/^-[SgGFABb]$|^--build=/s and $dmode=$_) } @$l; } push @$cmd, '-nc'; #print STDERR "MASS1 ",Dumper($cmd, $xargs, $dmode); my $r = WANTSRC_BUILDER; - if ($need_split_build_invocation) { - printdebug "massage split $dmode.\n"; - $r = $dmode =~ m/[S]/ ? WANTSRC_SOURCE : - $dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER : - $dmode =~ m/[ABb]/ ? WANTSRC_BUILDER : - die "$dmode ?"; + printdebug "massage split $dmode.\n"; + if ($dmode =~ s/^--build=//) { + $r = 0; + my @d = split /,/, $dmode; + $r |= WANTSRC_SOURCE if grep { s/^full$/binary/ } @d; + $r |= WANTSRC_SOURCE if grep { s/^source$// } @d; + $r |= WANTSRC_BUILDER if grep { m/./ } @d; + fail "Wanted to build nothing!" unless $r; + $dmode = '--build='. join ',', grep m/./, @d; + } else { + $r = + $dmode =~ m/[S]/ ? WANTSRC_SOURCE : + $dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER : + $dmode =~ m/[ABb]/ ? WANTSRC_BUILDER : + die "$dmode ?"; } printdebug "massage done $r $dmode.\n"; push @$cmd, $dmode; @@ -6321,9 +6371,14 @@ sub postbuild_mergechanges_vanilla ($) { sub cmd_build { build_prep_early(); + $buildproductsdir eq '..' or print STDERR <; - changedir $maindir; - foreach my $f (@udfiles) { - printdebug "source copy, found $f\n"; - next unless - $f eq $dscfn or - ($f =~ m/\.debian\.tar(?:\.\w+)$/ && - $f eq srcfn($version, $&)); - printdebug "source copy, found $f - renaming\n"; - rename "$playground/$f", "$buildproductsdir/$f" or $!==ENOENT - or fail "put in place new source file ($f): $!"; + unless ($split_brain) { + my $upstreamversion = upstreamversion $version; + unpack_playtree_linkorigs($upstreamversion, sub { }); + unpack_playtree_mkwork($headref); + changedir '..'; } } else { - my $pwd = must_getcwd(); - my $leafdir = basename $pwd; - changedir ".."; - runcmd_ordryrun_local @cmd, $leafdir; - changedir $pwd; + $leafdir = basename $maindir; + changedir '..'; } + runcmd_ordryrun_local @cmd, $leafdir; + + changedir $leafdir; runcmd_ordryrun_local qw(sh -ec), - 'exec >$1; shift; exec "$@"','x', - "$buildproductsdir/$sourcechanges", + 'exec >../$1; shift; exec "$@"','x', $sourcechanges, @dpkggenchanges, qw(-S), changesopts(); + changedir '..'; + + printdebug "moving $dscfn, $sourcechanges, etc. to ".bpd_abs()."\n"; + $dsc = parsecontrol($dscfn, "source package"); + + my $mv = sub { + my ($why, $l) = @_; + printdebug " renaming ($why) $l\n"; + rename "$l", bpd_abs()."/$l" + or fail "put in place new built file ($l): $!"; + }; + foreach my $l (split /\n/, getfield $dsc, 'Files') { + $l =~ m/\S+$/ or next; + $mv->('Files', $&); + } + $mv->('dsc', $dscfn); + $mv->('changes', $sourcechanges); + + changedir $maindir; } sub cmd_build_source { badusage "build-source takes no additional arguments" if @ARGV; - build_prep(); + build_prep(WANTSRC_SOURCE); build_source(); maybe_unapply_patches_again(); printdone "source built, results in $dscfn and $sourcechanges"; } -sub cmd_sbuild { - build_prep(); # not BUILDER because sbuild uses the .dsc +sub cmd_push_source { + prep_push(); + fail "dgit push-source: --include-dirty/--ignore-dirty does not make". + "sense with push-source!" if $includedirty; + build_maybe_quilt_fixup(); + if ($changesfile) { + my $changes = parsecontrol("$buildproductsdir/$changesfile", + "source changes file"); + unless (test_source_only_changes($changes)) { + fail "user-specified changes file is not source-only"; + } + } else { + # Building a source package is very fast, so just do it + build_source(); + die "er, patches are applied dirtily but shouldn't be.." + if $patches_applied_dirtily; + $changesfile = $sourcechanges; + } + dopush(); +} + +sub binary_builder { + my ($bbuilder, $pbmc_msg, @args) = @_; + build_prep(WANTSRC_SOURCE); build_source(); midbuild_checkchanges(); in_bpd { if (act_local()) { - stat_exists $dscfn or fail "$dscfn (in parent directory): $!"; + stat_exists $dscfn or fail "$dscfn (in build products dir): $!"; stat_exists $sourcechanges - or fail "$sourcechanges (in parent directory): $!"; + or fail "$sourcechanges (in build products dir): $!"; } - runcmd_ordryrun_local @sbuild, qw(-d), $isuite, @ARGV, $dscfn; + runcmd_ordryrun_local @$bbuilder, @args; }; maybe_unapply_patches_again(); in_bpd { - postbuild_mergechanges(<ismember($signum) and die "$signame is blocked\n"; } @@ -7068,9 +7227,7 @@ sub parseopts_late_defaults () { $$vr = $v; } - $need_split_build_invocation ||= quiltmode_splitbrain(); - - fail "dgit: --include-dirty is not supported in split view quilt mode" + fail __ "dgit: --include-dirty is not supported in split view quilt mode" if $split_brain && $includedirty; if (!defined $cleanmode) { @@ -7088,6 +7245,9 @@ sub parseopts_late_defaults () { $bpd_glob =~ s#[][\\{}*?~]#\\$&#g; } +setlocale(LC_MESSAGES, ""); +textdomain("dgit"); + if ($ENV{$fakeeditorenv}) { git_slurp_config(); quilt_fixup_editor(); @@ -7100,7 +7260,7 @@ print STDERR "DRY RUN ONLY\n" if $dryrun_level > 1; print STDERR "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n" if $dryrun_level == 1; if (!@ARGV) { - print STDERR $helpmsg or die $!; + print STDERR __ $helpmsg or die $!; finish 8; } $cmd = $subcommand = shift @ARGV;