X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~ianmdlvl/git?p=dgit.git;a=blobdiff_plain;f=dgit;h=e0ebfbadd1c230f94466f420b5dad7e1ee2996e4;hp=3438f62f2b69a56d30fb5533bfc4cd25e583b6c2;hb=7bcf96858056c7c5c64c451833688cf495647d56;hpb=683ef14506d9907dfa102b5e228637415c4922b6 diff --git a/dgit b/dgit index 3438f62f..e0ebfbad 100755 --- a/dgit +++ b/dgit @@ -20,6 +20,7 @@ END { $? = $Debian::Dgit::ExitStatus::desired // -1; }; use Debian::Dgit::ExitStatus; +use Debian::Dgit::I18n; use strict; @@ -37,6 +38,7 @@ use Dpkg::Version; use Dpkg::Compression; use Dpkg::Compression::Process; use POSIX; +use Locale::gettext; use IPC::Open2; use Digest::SHA; use Digest::MD5; @@ -113,7 +115,7 @@ our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L)); our (@dput) = qw(dput); our (@debsign) = qw(debsign); our (@gpg) = qw(gpg); -our (@sbuild) = qw(sbuild); +our (@sbuild) = (qw(sbuild --no-source)); our (@ssh) = 'ssh'; our (@dgit) = qw(dgit); our (@git_debrebase) = qw(git-debrebase); @@ -230,24 +232,29 @@ END { } }; -sub badcfg { print STDERR "$us: invalid configuration: @_\n"; finish 12; } +sub badcfg { + print STDERR f_ "%s: invalid configuration: %s\n", $us, "@_"; + finish 12; +} sub forceable_fail ($$) { my ($forceoptsl, $msg) = @_; fail $msg unless grep { $forceopts{$_} } @$forceoptsl; - print STDERR "warning: overriding problem due to --force:\n". $msg; + print STDERR +(__ "warning: overriding problem due to --force:\n"). $msg; } sub forceing ($) { my ($forceoptsl) = @_; my @got = grep { $forceopts{$_} } @$forceoptsl; return 0 unless @got; - print STDERR - "warning: skipping checks or functionality due to --force-$got[0]\n"; + print STDERR f_ + "warning: skipping checks or functionality due to --force-%s\n", + $got[0]; } sub no_such_package () { - print STDERR "$us: package $package does not exist in suite $isuite\n"; + print STDERR f_ "%s: package %s does not exist in suite %s\n", + $us, $package, $isuite; finish 4; } @@ -308,13 +315,6 @@ sub branch_gdr_info ($$) { return ($ffq_prev, $gdrlast); } -sub branch_is_gdr ($$) { - my ($symref, $head) = @_; - my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); - return 0 unless $ffq_prev || $gdrlast; - return 1; -} - sub branch_is_gdr_unstitched_ff ($$$) { my ($symref, $head, $ancestor) = @_; my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); @@ -323,6 +323,83 @@ sub branch_is_gdr_unstitched_ff ($$$) { return 1; } +sub branch_is_gdr ($) { + my ($head) = @_; + # This is quite like git-debrebase's keycommits. + # We have our own implementation because: + # - our algorighm can do fewer tests so is faster + # - it saves testing to see if gdr is installed + + # NB we use this jsut for deciding whether to run gdr make-patches + # Before reusing this algorithm for somthing else, its + # suitability should be reconsidered. + + my $walk = $head; + local $Debian::Dgit::debugcmd_when_debuglevel = 3; + printdebug "branch_is_gdr $head...\n"; + my $get_patches = sub { + my $t = git_cat_file "$_[0]:debian/patches", [qw(missing tree)]; + return $t // ''; + }; + my $tip_patches = $get_patches->($head); + WALK: + for (;;) { + my $cdata = git_cat_file $walk, 'commit'; + my ($hdrs,$msg) = $cdata =~ m{\n\n} ? ($`,$') : ($cdata,''); + if ($msg =~ m{^\[git-debrebase\ ( + anchor | changelog | make-patches | + merged-breakwater | pseudomerge + ) [: ] }mx) { + # no need to analyse this - it's sufficient + # (gdr classifications: Anchor, MergedBreakwaters) + # (made by gdr: Pseudomerge, Changelog) + printdebug "branch_is_gdr $walk gdr $1 YES\n"; + return 1; + } + my @parents = ($hdrs =~ m/^parent (\w+)$/gm); + if (@parents==2) { + my $walk_tree = get_tree_of_commit $walk; + foreach my $p (@parents) { + my $p_tree = get_tree_of_commit $p; + if ($p_tree eq $walk_tree) { # pseudomerge contriburor + # (gdr classification: Pseudomerge; not made by gdr) + printdebug "branch_is_gdr $walk unmarked pseudomerge\n" + if $debuglevel >= 2; + $walk = $p; + next WALK; + } + } + # some other non-gdr merge + # (gdr classification: VanillaMerge, DgitImportUnpatched, ?) + printdebug "branch_is_gdr $walk ?-2-merge NO\n"; + return 0; + } + if (@parents>2) { + # (gdr classification: ?) + printdebug "branch_is_gdr $walk ?-octopus NO\n"; + return 0; + } + if ($get_patches->($walk) ne $tip_patches) { + # Our parent added, removed, or edited patches, and wasn't + # a gdr make-patches commit. gdr make-patches probably + # won't do that well, then. + # (gdr classification of parent: AddPatches or ?) + printdebug "branch_is_gdr $walk ?-patches NO\n"; + return 0; + } + if ($tip_patches eq '' and + !defined git_cat_file "$walk:debian") { + # (gdr classification of parent: BreakwaterStart + printdebug "branch_is_gdr $walk unmarked BreakwaterStart YES\n"; + return 1; + } + # (gdr classification: Upstream Packaging Mixed Changelog) + printdebug "branch_is_gdr $walk plain\n" + if $debuglevel >= 2; + $walk = $parents[0]; + } +} + #---------- remote protocol support, common ---------- # remote push initiator/responder protocol: @@ -395,28 +472,28 @@ sub i_child_report () { die unless $got == $i_child_pid; $i_child_pid = undef; return undef unless $?; - return "build host child ".waitstatusmsg(); + return f_ "build host child %s", waitstatusmsg(); } sub badproto ($$) { my ($fh, $m) = @_; - fail "connection lost: $!" if $fh->error; - fail "protocol violation; $m not expected"; + fail f_ "connection lost: %s", $! if $fh->error; + fail f_ "protocol violation; %s not expected", $m; } sub badproto_badread ($$) { my ($fh, $wh) = @_; - fail "connection lost: $!" if $!; + fail f_ "connection lost: %s", $! if $!; my $report = i_child_report(); fail $report if defined $report; - badproto $fh, "eof (reading $wh)"; + badproto $fh, f_ "eof (reading %s)", $wh; } sub protocol_expect (&$) { my ($match, $fh) = @_; local $_; $_ = <$fh>; - defined && chomp or badproto_badread $fh, "protocol message"; + defined && chomp or badproto_badread $fh, __ "protocol message"; if (wantarray) { my @r = &$match; return @r if @r; @@ -424,7 +501,7 @@ sub protocol_expect (&$) { my $r = &$match; return $r if $r; } - badproto $fh, "\`$_'"; + badproto $fh, f_ "\`%s'", $_; } sub protocol_send_file ($$) { @@ -445,10 +522,10 @@ sub protocol_send_file ($$) { sub protocol_read_bytes ($$) { my ($fh, $nbytes) = @_; - $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count"; + $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, __ "bad byte count"; my $d; my $got = read $fh, $d, $nbytes; - $got==$nbytes or badproto_badread $fh, "data block"; + $got==$nbytes or badproto_badread $fh, __ "data block"; return $d; } @@ -529,7 +606,8 @@ sub url_get { progress "downloading $what..."; my $r = $ua->get(@_) or die $!; return undef if $r->code == 404; - $r->is_success or fail "failed to fetch $what: ".$r->status_line; + $r->is_success or fail f_ "failed to fetch %s: %s", + $what, $r->status_line; return $r->decoded_content(charset => 'none'); } @@ -540,9 +618,9 @@ sub act_scary () { return !$dryrun_level; } sub printdone { if (!$dryrun_level) { - progress "$us ok: @_"; + progress f_ "%s ok: %s", $us, "@_"; } else { - progress "would be ok: @_ (but dry run only)"; + progress f_ "would be ok: %s (but dry run only)", "@_"; } } @@ -566,7 +644,7 @@ sub runcmd_ordryrun_local { } } -our $helpmsg = <= set git config option (used directly by dgit too) END -our $later_warning_msg = <= 4; $l or next; - @$l==1 or badcfg "multiple values for $c". - " (in $src git config)" if @$l > 1; + @$l==1 or badcfg + f_ "multiple values for %s (in %s git config)", $c, $src + if @$l > 1; return $l->[0]; } return undef; @@ -724,8 +803,10 @@ sub cfg { return $dv; } } - badcfg "need value for one of: @_\n". - "$us: distro or suite appears not to be (properly) supported"; + badcfg f_ + "need value for one of: %s\n". + "%s: distro or suite appears not to be (properly) supported", + "@_", $us; } sub not_necessarily_a_tree () { @@ -764,7 +845,8 @@ sub access_nomdistro () { my $base = access_basedistro(); my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base; $r =~ m/^$distro_re$/ or badcfg - "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)"; + f_ "bad syntax for (nominal) distro \`%s' (does not match %s)", + $r, "/^$distro_re$/"; return $r; } @@ -778,7 +860,7 @@ sub access_quirk () { $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig; $re =~ s/\*/.*/g; $re =~ s/\%/([-0-9a-z_]+)/ - or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )"; + or $re =~ m/[()]/ or badcfg __ "backports-quirk needs \% or ( )"; if ($isuite =~ m/^$re$/) { return ('backports',"$basedistro-backports",$1); } @@ -794,7 +876,8 @@ sub parse_cfg_bool ($$$) { return $v =~ m/^[ty1]/ ? 1 : $v =~ m/^[fn0]/ ? 0 : - badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'"; + badcfg f_ "%s needs t (true, y, 1) or f (false, n, 0) not \`%s'", + $what, $v; } sub access_forpush_config () { @@ -812,7 +895,8 @@ sub access_forpush_config () { $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1 $v =~ m/^[a]/ ? '' : # auto, forpush = '' - badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; + badcfg __ + "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; } sub access_forpush () { @@ -821,12 +905,12 @@ sub access_forpush () { } sub pushing () { - confess 'internal error '.Dumper($access_forpush)," ?" if + confess +(__ 'internal error').' '.Dumper($access_forpush)," ?" if defined $access_forpush and !$access_forpush; - badcfg "pushing but distro is configured readonly" + badcfg __ "pushing but distro is configured readonly" if access_forpush_config() eq '0'; $access_forpush = 1; - $supplementary_message = <<'END' unless $we_are_responder; + $supplementary_message = __ <<'END' unless $we_are_responder; Push failed, before we got started. You can retry the push, after fixing the problem, if you like. END @@ -992,7 +1076,7 @@ our %rmad; sub archive_query ($;@) { my ($method) = shift @_; - fail "this operation does not support multiple comma-separated suites" + fail __ "this operation does not support multiple comma-separated suites" if $isuite =~ m/,/; my $query = access_cfg('archive-query','RETURN-UNDEF'); $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'"; @@ -1038,8 +1122,9 @@ sub archive_api_query_cmd ($) { fail "for $url: stat $key: $!" unless $!==ENOENT; next; } - fail "config requested specific TLS key but do not know". - " how to get curl to use exactly that EE key ($key)"; + fail f_ "config requested specific TLS key but do not know". + " how to get curl to use exactly that EE key (%s)", + $key; # push @cmd, "--cacert", $key, "--capath", "/dev/enoent"; # # Sadly the above line does not work because of changes # # to gnutls. The real fix for #790093 may involve @@ -1058,7 +1143,7 @@ sub archive_api_query_cmd ($) { sub api_query ($$;$) { use JSON; my ($data, $subpath, $ok404) = @_; - badcfg "ftpmasterapi archive query method takes no data part" + badcfg __ "ftpmasterapi archive query method takes no data part" if length $data; my @cmd = archive_api_query_cmd($subpath); my $url = $cmd[$#cmd]; @@ -1066,11 +1151,11 @@ sub api_query ($$;$) { my $json = cmdoutput @cmd; unless ($json =~ s/\d+\d+\d$//) { failedcmd_report_cmd undef, @cmd; - fail "curl failed to print 3-digit HTTP code"; + fail __ "curl failed to print 3-digit HTTP code"; } my $code = $&; return undef if $code eq '404' && $ok404; - fail "fetch of $url gave HTTP code $code" + fail f_ "fetch of %s gave HTTP code %s", $url, $code unless $url =~ m#^file://# or $code =~ m/^2/; return decode_json($json); } @@ -1086,15 +1171,17 @@ sub canonicalise_suite_ftpmasterapi { } qw(codename name); push @matched, $entry; } - fail "unknown suite $isuite, maybe -d would help" unless @matched; + fail f_ "unknown suite %s, maybe -d would help", $isuite + unless @matched; my $cn; eval { - @matched==1 or die "multiple matches for suite $isuite\n"; + @matched==1 or die f_ "multiple matches for suite %s\n", $isuite; $cn = "$matched[0]{codename}"; - defined $cn or die "suite $isuite info has no codename\n"; - $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n"; + defined $cn or die f_ "suite %s info has no codename\n", $isuite; + $cn =~ m/^$suite_re$/ + or die f_ "suite %s maps to bad codename\n", $isuite; }; - die "bad ftpmaster api response: $@\n".Dumper(\@matched) + die +(__ "bad ftpmaster api response: ")."$@\n".Dumper(\@matched) if length $@; return $cn; } @@ -1108,18 +1195,18 @@ sub archive_query_ftpmasterapi { eval { my $vsn = "$entry->{version}"; my ($ok,$msg) = version_check $vsn; - die "bad version: $msg\n" unless $ok; + die f_ "bad version: %s\n", $msg unless $ok; my $component = "$entry->{component}"; - $component =~ m/^$component_re$/ or die "bad component"; + $component =~ m/^$component_re$/ or die __ "bad component"; my $filename = "$entry->{filename}"; $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]# - or die "bad filename"; + or die __ "bad filename"; my $sha256sum = "$entry->{sha256sum}"; - $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum"; + $sha256sum =~ m/^[0-9a-f]+$/ or die __ "bad sha256sum"; push @rows, [ $vsn, "/pool/$component/$filename", $digester, $sha256sum ]; }; - die "bad ftpmaster api response: $@\n".Dumper($entry) + die +(__ "bad ftpmaster api response: ")."$@\n".Dumper($entry) if length $@; } @rows = sort { -version_compare($a->[0],$b->[0]) } @rows; @@ -1158,15 +1245,15 @@ sub aptget_cache_clean { sub aptget_lock_acquire () { my $lockfile = "$aptget_base/lock"; - open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!"; - flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!"; + open APTGET_LOCK, '>', $lockfile or confess "open $lockfile: $!"; + flock APTGET_LOCK, LOCK_EX or confess "lock $lockfile: $!"; } sub aptget_prep ($) { my ($data) = @_; return if defined $aptget_base; - badcfg "aptget archive query method takes no data part" + badcfg __ "aptget archive query method takes no data part" if length $data; my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache"; @@ -1181,7 +1268,7 @@ sub aptget_prep ($) { ensuredir $aptget_base; my $quoted_base = $aptget_base; - die "$quoted_base contains bad chars, cannot continue" + confess "$quoted_base contains bad chars, cannot continue" if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/ ensuredir $aptget_base; @@ -1197,7 +1284,7 @@ sub aptget_prep ($) { cfg_apply_map(\$aptsuites, 'suite map', access_cfg('aptget-suite-map', 'RETURN-UNDEF')); - open SRCS, ">", "$aptget_base/$sourceslist" or die $!; + open SRCS, ">", "$aptget_base/$sourceslist" or confess $!; printf SRCS "deb-src %s %s %s\n", access_cfg('mirror'), $aptsuites, @@ -1249,13 +1336,13 @@ END my @inreleasefiles = grep { m#/InRelease$# } @releasefiles; @releasefiles = @inreleasefiles if @inreleasefiles; if (!@releasefiles) { - fail <{$name}; if (defined $val) { printdebug "release file $name: $val\n"; - $val =~ m/^$suite_re$/o or fail - "Release file ($aptget_releasefile) specifies intolerable $name"; + $val =~ m/^$suite_re$/o or fail f_ + "Release file (%s) specifies intolerable %s", + $aptget_releasefile, $name; cfg_apply_map(\$val, 'suite rmap', access_cfg('aptget-suite-rmap', 'RETURN-UNDEF')); return $val @@ -1299,8 +1387,9 @@ sub archive_query_aptget { aptget_aptget(), qw(--download-only --only-source source), $package; my @dscs = <$aptget_base/source/*.dsc>; - fail "apt-get source did not produce a .dsc" unless @dscs; - fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1; + fail __ "apt-get source did not produce a .dsc" unless @dscs; + fail f_ "apt-get source produced several .dscs (%s)", "@dscs" + unless @dscs==1; my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1; @@ -1314,6 +1403,7 @@ sub file_in_archive_aptget () { return undef; } sub package_not_wholly_new_aptget () { return undef; } #---------- `dummyapicat' archive query method ---------- +# (untranslated, because this is for testing purposes etc.) sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; } sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; } @@ -1402,10 +1492,11 @@ sub madison_get_parse { sub canonicalise_suite_madison { # madison canonicalises for us my @r = madison_get_parse(@_); - @r or fail - "unable to canonicalise suite using package $package". - " which does not appear to exist in suite $isuite;". - " --existing-package may help"; + @r or fail f_ + "unable to canonicalise suite using package %s". + " which does not appear to exist in suite %s;". + " --existing-package may help", + $package, $isuite; return $r[0][2]; } @@ -1413,6 +1504,7 @@ sub file_in_archive_madison { return undef; } sub package_not_wholly_new_madison { return undef; } #---------- `sshpsql' archive query method ---------- +# (untranslated, because this is obsolete) sub sshpsql ($$$) { my ($data,$runeinfo,$sql) = @_; @@ -1491,6 +1583,7 @@ sub file_in_archive_sshpsql ($$$) { return undef; } sub package_not_wholly_new_sshpsql ($$$) { return undef; } #---------- `dummycat' archive query method ---------- +# (untranslated, because this is for testing purposes etc.) sub canonicalise_suite_dummycat ($$) { my ($proto,$data) = @_; @@ -1536,6 +1629,7 @@ sub file_in_archive_dummycat () { return undef; } sub package_not_wholly_new_dummycat () { return undef; } #---------- tag format handling ---------- +# (untranslated, because everything should be new tag format by now) sub access_cfg_tagformats () { split /\,/, access_cfg('dgit-tag-format'); @@ -1590,12 +1684,12 @@ sub select_tagformat () { sub canonicalise_suite () { return if defined $csuite; - fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED'; + fail f_ "cannot operate on %s suite", $isuite if $isuite eq 'UNRELEASED'; $csuite = archive_query('canonicalise_suite'); if ($isuite ne $csuite) { - progress "canonical suite name for $isuite is $csuite"; + progress f_ "canonical suite name for %s is %s", $isuite, $csuite; } else { - progress "canonical suite name is $csuite"; + progress f_ "canonical suite name is %s", $csuite; } } @@ -1615,13 +1709,13 @@ sub get_archive_dsc () { $digester->add($dscdata); my $got = $digester->hexdigest(); $got eq $digest or - fail "$dscurl has hash $got but". - " archive told us to expect $digest"; + fail f_ "%s has hash %s but archive told us to expect %s", + $dscurl, $got, $digest; } parse_dscdata(); my $fmt = getfield $dsc, 'Format'; $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)], - "unsupported source format $fmt, sorry"; + f_ "unsupported source format %s, sorry", $fmt; $dsc_checked = !!$digester; printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n"; @@ -1648,7 +1742,8 @@ sub check_for_git () { # NB that if we are pushing, $usedistro will be $distro/push $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert"); $instead_distro =~ s{^/}{ access_basedistro()."/" }e; - progress "diverting to $divert (using config for $instead_distro)"; + progress f_ "diverting to %s (using config for %s)", + $divert, $instead_distro; return check_for_git(); } failedcmd @cmd unless defined $r and $r =~ m/^[01]$/; @@ -1664,7 +1759,7 @@ sub check_for_git () { # curl -sS -I with https_proxy prints # HTTP/1.0 200 Connection established $result =~ m/^\S+ (404|200) /s or - fail "unexpected results from git check query - ". + fail +(__ "unexpected results from git check query - "). Dumper($prefix, $result); my $code = $1; if ($code eq '404') { @@ -1679,7 +1774,7 @@ sub check_for_git () { } elsif ($how eq 'false') { return 0; } else { - badcfg "unknown git-check \`$how'"; + badcfg f_ "unknown git-check \`%s'", $how; } } @@ -1694,7 +1789,7 @@ sub create_remote_git_repo () { } elsif ($how eq 'true') { # nothing to do } else { - badcfg "unknown git-create \`$how'"; + badcfg f_ "unknown git-create \`%s'", $how; } } @@ -1731,8 +1826,8 @@ sub remove_stray_gits ($) { local $/="\0"; while () { chomp or die; - print STDERR "$us: warning: removing from $what: ", - (messagequote $_), "\n"; + print STDERR f_ "%s: warning: removing from %s: %s\n", + $us, $what, (messagequote $_); rmtree $_; } } @@ -1744,7 +1839,7 @@ sub mktree_in_ud_from_only_subdir ($;$) { # changes into the subdir my (@dirs) = <*/.>; - die "expected one subdir but found @dirs ?" unless @dirs==1; + confess "expected one subdir but found @dirs ?" unless @dirs==1; $dirs[0] =~ m#^([^/]+)/\.$# or die; my $dir = $1; changedir $dir; @@ -1777,7 +1872,7 @@ sub dsc_files_info () { foreach (split /\n/, $field) { next unless m/\S/; m/^(\w+) (\d+) (\S+)$/ or - fail "could not parse .dsc $fname line \`$_'"; + fail f_ "could not parse .dsc %s line \`%s'", $fname, $_; my $digester = eval "$module"."->$method;" or die $@; push @out, { Hash => $1, @@ -1788,8 +1883,8 @@ sub dsc_files_info () { } return @out; } - fail "missing any supported Checksums-* or Files field in ". - $dsc->get_option('name'); + fail f_ "missing any supported Checksums-* or Files field in %s", + $dsc->get_option('name'); } sub dsc_files () { @@ -1833,8 +1928,9 @@ sub files_compare_inputs (@) { if (defined $$re) { $fchecked{$f}{$in_name} = 1; $$re eq $info or - fail "hash or size of $f varies in $fname fields". - " (between: ".$showinputs->().")"; + fail f_ + "hash or size of %s varies in %s fields (between: %s)", + $f, $fname, $showinputs->(); } else { $$re = $info; } @@ -1842,17 +1938,18 @@ sub files_compare_inputs (@) { @files = sort @files; $expected_files //= \@files; "@$expected_files" eq "@files" or - fail "file list in $in_name varies between hash fields!"; + fail f_ "file list in %s varies between hash fields!", + $in_name; } $expected_files or - fail "$in_name has no files list field(s)"; + fail f_ "%s has no files list field(s)", $in_name; } printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record) if $debuglevel>=2; grep { keys %$_ == @$inputs-1 } values %fchecked - or fail "no file appears in all file lists". - " (looked in: ".$showinputs->().")"; + or fail f_ "no file appears in all file lists (looked in: %s)", + $showinputs->(); } sub is_orig_file_in_dsc ($$) { @@ -1891,7 +1988,7 @@ sub test_source_only_changes ($) { $l =~ m/\S+$/ or next; # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) { - print "purportedly source-only changes polluted by $&\n"; + print f_ "purportedly source-only changes polluted by %s\n", $&; return 0; } } @@ -1904,7 +2001,7 @@ sub changes_update_origs_from_dsc ($$$$) { printdebug "checking origs needed ($upstreamvsn)...\n"; $_ = getfield $changes, 'Files'; m/^\w+ \d+ (\S+ \S+) \S+$/m or - fail "cannot find section/priority from .changes Files field"; + fail __ "cannot find section/priority from .changes Files field"; my $placementinfo = $1; my %changed; printdebug "checking origs needed placement '$placementinfo'...\n"; @@ -1916,7 +2013,7 @@ sub changes_update_origs_from_dsc ($$$$) { printdebug "origs $file is_orig\n"; my $have = archive_query('file_in_archive', $file); if (!defined $have) { - print STDERR <{$fname}; next unless defined; m/^(\w+) .* \Q$file\E$/m or - fail ".dsc $fname missing entry for $file"; + fail f_ ".dsc %s missing entry for %s", $fname, $file; if ($h->{$archivefield} eq $1) { $same++; } else { - push @differ, - "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)"; + push @differ, f_ + "%s: %s (archive) != %s (local .dsc)", + $archivefield, $h->{$archivefield}, $1; } } - die "$file ".Dumper($h)." ?!" if $same && @differ; + confess "$file ".Dumper($h)." ?!" if $same && @differ; $found_same++ if $same; - push @found_differ, "archive $h->{filename}: ".join "; ", @differ + push @found_differ, + f_ "archive %s: %s", $h->{filename}, join "; ", @differ if @differ; } printdebug "origs $file f.same=$found_same". " #f._differ=$#found_differ\n"; if (@found_differ && !$found_same) { fail join "\n", - "archive contains $file with different checksum", + (f_ "archive contains %s with different checksum", $file), @found_differ; } # Now we edit the changes file to add or remove it @@ -1969,7 +2068,7 @@ END $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?"; my $extra = $1; $extra =~ s/ \d+ /$&$placementinfo / - or die "$fname $extra >$dsc_data< ?" + or confess "$fname $extra >$dsc_data< ?" if $fname eq 'Files'; $changes->{$fname} .= "\n". $extra; $changed{$file} = "added"; @@ -1978,7 +2077,7 @@ END } if (%changed) { foreach my $file (keys %changed) { - progress sprintf + progress f_ "edited .changes for archive .orig contents: %s %s", $changed{$file}, $file; } @@ -1987,10 +2086,11 @@ END if (act_local()) { rename $chtmp,$changesfile or die "$changesfile $!"; } else { - progress "[new .changes left in $changesfile]"; + progress f_ "[new .changes left in %s]", $changesfile; } } else { - progress "$changesfile already has appropriate .orig(s) (if any)"; + progress f_ "%s already has appropriate .orig(s) (if any)", + $changesfile; } } @@ -2013,8 +2113,9 @@ sub clogp_authline ($) { my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date'); my $authline = "$author $date"; $authline =~ m/$git_authline_re/o or - fail "unexpected commit author line format \`$authline'". - " (was generated from changelog Maintainer field)"; + fail f_ "unexpected commit author line format \`%s'". + " (was generated from changelog Maintainer field)", + $authline; return ($1,$2,$3) if wantarray; return $authline; } @@ -2027,14 +2128,14 @@ sub vendor_patches_distro ($$) { printdebug "checking for vendor-specific $series ($what)\n"; if (!open SERIES, "<", $series) { - die "$series $!" unless $!==ENOENT; + confess "$series $!" unless $!==ENOENT; return; } while () { next unless m/\S/; next if m/^\s+\#/; - print STDERR <error; close SERIES; @@ -2077,11 +2179,11 @@ sub check_for_vendor_patches () { use Dpkg::Vendor; vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR"); vendor_patches_distro(Dpkg::Vendor::get_current_vendor(), - "Dpkg::Vendor \`current vendor'"); + __ "Dpkg::Vendor \`current vendor'"); vendor_patches_distro(access_basedistro(), - "(base) distro being accessed"); + __ "(base) distro being accessed"); vendor_patches_distro(access_nomdistro(), - "(nominal) distro being accessed"); + __ "(nominal) distro being accessed"); } sub generate_commits_from_dsc () { @@ -2102,12 +2204,12 @@ sub generate_commits_from_dsc () { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { - fail "accessing $buildproductsdir/$f,fetch: $!"; + fail f_ "accessing %s: %s", "$buildproductsdir/$f,fetch", $!; } elsif (link_ltarget $upper_f, $f) { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { - fail "accessing $buildproductsdir/$f: $!"; + fail f_ "accessing %s: %s", "$buildproductsdir/$f", $!; } else { printdebug "absent.\n"; } @@ -2122,14 +2224,14 @@ sub generate_commits_from_dsc () { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != EEXIST) { - fail "saving $buildproductsdir/$f: $!"; + fail f_ "saving %s: %s", "$buildproductsdir/$f", $!; } elsif (!$refetched) { printdebug "no need.\n"; } elsif (link $f, "$upper_f,fetch") { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != EEXIST) { - fail "saving $buildproductsdir/$f,fetch: $!"; + fail f_ "saving %s: %s", "$buildproductsdir/$f,fetch", $!; } else { printdebug "cannot.\n"; } @@ -2195,7 +2297,7 @@ sub generate_commits_from_dsc () { chdir "_unpack-tar" or die $!; open STDIN, "<&", $input or die $!; exec @tarcmd; - die "dgit (child): exec $tarcmd[0]: $!"; + die f_ "dgit (child): exec %s: %s", $tarcmd[0], $!; } $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!; !$? or failedcmd @tarcmd; @@ -2259,7 +2361,7 @@ sub generate_commits_from_dsc () { push @cmd, qw(-x --), $dscfn; runcmd @cmd; - my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package"); + my ($tree,$dir) = mktree_in_ud_from_only_subdir(__ "source package"); if (madformat $dsc->{format}) { check_for_vendor_patches(); } @@ -2277,7 +2379,7 @@ sub generate_commits_from_dsc () { my $r1clogp; printdebug "import clog search...\n"; - parsechangelog_loop \@clogcmd, "package changelog", sub { + parsechangelog_loop \@clogcmd, (__ "package changelog"), sub { my ($thisstanza, $desc) = @_; no warnings qw(exiting); @@ -2317,7 +2419,7 @@ sub generate_commits_from_dsc () { printdebug "import clog $r1clogp->{version} becomes r1\n"; }; - $clogp or fail "package changelog has no entries!"; + $clogp or fail __ "package changelog has no entries!"; my $authline = clogp_authline $clogp; my $changes = getfield $clogp, 'Changes'; @@ -2336,12 +2438,13 @@ sub generate_commits_from_dsc () { foreach my $tt (@tartrees) { printdebug "import tartree $tt->{F} $tt->{Tree}\n"; + my $mbody = f_ "Import %s", $tt->{F}; $tt->{Commit} = make_commit_text($tt->{Orig} ? <{Tree} author $r1authline committer $r1authline -Import $tt->{F} +$mbody [dgit import orig $tt->{F}] END_O @@ -2349,7 +2452,7 @@ tree $tt->{Tree} author $authline committer $authline -Import $tt->{F} +$mbody [dgit import tarball $package $cversion $tt->{F}] END_T @@ -2416,7 +2519,7 @@ END chomp $@; progress "warning: $@"; $path = "$absurdity:$path"; - progress "$us: trying slow absurd-git-apply..."; + progress f_ "%s: trying slow absurd-git-apply...", $us; rename "../../gbp-pq-output","../../gbp-pq-output.0" or $!==ENOENT or die $!; @@ -2435,19 +2538,19 @@ END 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd; debugcmd "+",@realcmd; if (system @realcmd) { - die +(shellquote @showcmd). - " failed: ". - failedcmd_waitstatus()."\n"; + die f_ "%s failed: %s\n", + +(shellquote @showcmd), + failedcmd_waitstatus(); } my $gapplied = git_rev_parse('HEAD'); my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:); $gappliedtree eq $dappliedtree or - fail < $rawimport_hash, - Info => "Import of source package", + Info => __ "Import of source package", }; my @output = ($rawimport_mergeinput); @@ -2474,16 +2577,18 @@ END version_compare($oversion, $cversion); if ($vcmp < 0) { @output = ($rawimport_mergeinput, $lastpush_mergeinput, - { Message => < 1 }); -Record $package ($cversion) in archive suite $csuite + { ReverseParents => 1, + Message => (f_ < 0) { - print STDERR <()) { - progress "using existing $f"; + progress f_ "using existing %s", $f; return 1; } if (!$refetched) { - fail "file $f has hash $got but .dsc". - " demands hash $fi->{Hash} ". - "(perhaps you should delete this file?)"; + fail f_ "file %s has hash %s but .dsc demands hash %s". + " (perhaps you should delete this file?)", + $f, $got, $fi->{Hash}; } - progress "need to fetch correct version of $f"; + progress f_ "need to fetch correct version of %s", $f; unlink $tf or die "$tf $!"; $$refetched = 1; } else { @@ -2544,9 +2649,9 @@ sub complete_file_from_dsc ($$;$) { return 0 if !act_local(); $checkhash->() or - fail "file $f has hash $got but .dsc". - " demands hash $fi->{Hash} ". - "(got wrong file from archive!)"; + fail f_ "file %s has hash %s but .dsc demands hash %s". + " (got wrong file from archive!)", + $f, $got, $fi->{Hash}; return 1; } @@ -2639,7 +2744,7 @@ sub git_lrfetch_sane { for (;;) { printdebug "git_lrfetch_sane iteration $fetch_iteration\n"; if (++$fetch_iteration > 10) { - fail "too many iterations trying to get sane fetch!"; + fail __ "too many iterations trying to get sane fetch!"; } my @look = map { "refs/$_" } @specs; @@ -2653,8 +2758,8 @@ sub git_lrfetch_sane { m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?"; my ($objid,$rrefname) = ($1,$2); if (!$wanted_rref->($rrefname)) { - print STDERR <("rewrite map", $rewritemap) or return; + my $lrf = $do_fetch->((__ "rewrite map"), $rewritemap) or return; $mapref = $lrf.'/'.$rewritemap; } my $rewritemapdata = git_cat_file $mapref.':map'; if (defined $rewritemapdata && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) { - progress + progress __ "server's git history rewrite map contains a relevant entry!"; $dsc_hash = $1; if (defined $dsc_hash) { - progress "using rewritten git hash in place of .dsc value"; + progress __ "using rewritten git hash in place of .dsc value"; } else { - progress "server data says .dsc hash is to be disregarded"; + progress __ "server data says .dsc hash is to be disregarded"; } } } if (!defined git_cat_file $dsc_hash) { my @tags = map { "tags/".$_ } @$dsc_hint_tag; - my $lrf = $do_fetch->("additional commits", @tags) && + my $lrf = $do_fetch->((__ "additional commits"), @tags) && defined git_cat_file $dsc_hash - or fail < $lastpush_hash, - Info => "dgit suite branch on dgit git server", + Info => (__ "dgit suite branch on dgit git server"), }; my $lastfetch_hash = git_get_ref(lrref()); printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n"; my $lastfetch_mergeinput = $lastfetch_hash && { Commit => $lastfetch_hash, - Info => "dgit client's archive history view", + Info => (__ "dgit client's archive history view"), }; my $dsc_mergeinput = $dsc_hash && { Commit => $dsc_hash, - Info => "Dgit field in .dsc from archive", + Info => (__ "Dgit field in .dsc from archive"), }; my $cwd = getcwd(); @@ -3083,23 +3187,24 @@ sub fetch_from_archive () { if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) { @mergeinputs = $dsc_mergeinput } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) { - print STDERR <{Message}) { print MC $compat_info->{Message} or die $!; } else { - print MC <($mergeinputs[0]); - print MC <($_) foreach @mergeinputs[1..$#mergeinputs]; @@ -3241,12 +3347,12 @@ END my $chkff = sub { my ($lasth, $what) = @_; return unless $lasth; - die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash); + confess "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash); }; - $chkff->($lastpush_hash, 'dgit repo server tip (last push)') + $chkff->($lastpush_hash, __ 'dgit repo server tip (last push)') if $lastpush_hash; - $chkff->($lastfetch_hash, 'local tracking tip (last fetch)'); + $chkff->($lastfetch_hash, __ 'local tracking tip (last fetch)'); fetch_from_archive_record_1($hash); @@ -3256,11 +3362,11 @@ END my $got_vsn = getfield $gotclogp, 'Version'; printdebug "SKEW CHECK GOT $got_vsn\n"; if (version_compare($got_vsn, $skew_warning_vsn) < 0) { - print STDERR < $af.new" or die $!; - print GAO <) { @@ -3401,7 +3508,7 @@ END $gai->error and die $!; } close GAO or die $!; - rename "$af.new", "$af" or die "install $af: $!"; + rename "$af.new", "$af" or fail f_ "install %s: %s", $af, $!; } sub setup_new_tree () { @@ -3427,8 +3534,8 @@ sub check_gitattrs ($$) { next unless m{(?:^|/)\.gitattributes$}; # oh dear, found one - print STDERR <; die $! unless defined $csuite && chomp $csuite; @@ -3496,7 +3604,7 @@ sub fork_for_multisuite ($) { sub { }); return 0 unless defined $cbasesuite; - fail "package $package missing in (base suite) $cbasesuite" + fail f_ "package %s missing in (base suite) %s", $package, $cbasesuite unless @mergeinputs; my @csuites = ($cbasesuite); @@ -3528,9 +3636,9 @@ sub fork_for_multisuite ($) { if ($previous) { unshift @mergeinputs, { Commit => $previous, - Info => "local combined tracking branch", - Warning => - "archive seems to have rewound: local tracking branch is ahead!", + Info => (__ "local combined tracking branch"), + Warning => (__ + "archive seems to have rewound: local tracking branch is ahead!"), }; } @@ -3569,8 +3677,9 @@ sub fork_for_multisuite ($) { my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':'; my $commit = "tree $tree\n"; - my $msg = "Combine archive branches $csuite [dgit]\n\n". - "Input branches:\n"; + my $msg = f_ "Combine archive branches %s [dgit]\n\n". + "Input branches:\n", + $csuite; foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) { printdebug "multisuite merge include $mi->{Info}\n"; @@ -3582,9 +3691,10 @@ sub fork_for_multisuite ($) { $mi->{Info}; } my $authline = clogp_authline mergeinfo_getclogp $needed[0]; - $msg .= "\nKey\n". + $msg .= __ "\nKey\n". " * marks the highest version branch, which choose to use\n". - " + marks each branch which was not already an ancestor\n\n". + " + marks each branch which was not already an ancestor\n\n"; + $msg .= "[dgit multi-suite $csuite]\n"; $commit .= "author $authline\n". @@ -3596,7 +3706,7 @@ sub fork_for_multisuite ($) { fetch_from_archive_record_1($output); fetch_from_archive_record_2($output); - progress "calculated combined tracking suite $csuite"; + progress f_ "calculated combined tracking suite %s", $csuite; return 1; } @@ -3614,7 +3724,7 @@ sub clone_finish ($) { git ls-tree -r --name-only -z HEAD | \ xargs -0r touch -h -r . -- END - printdone "ready for work in $dstdir"; + printdone f_ "ready for work in %s", $dstdir; } sub clone ($) { @@ -3622,7 +3732,7 @@ sub clone ($) { # once in parent after first suite fetched, # and then again in child after everything is finished my ($dstdir) = @_; - badusage "dry run makes no sense with clone" unless act_local(); + badusage __ "dry run makes no sense with clone" unless act_local(); my $multi_fetched = fork_for_multisuite(sub { printdebug "multi clone before fetch merge\n"; @@ -3639,7 +3749,7 @@ sub clone ($) { canonicalise_suite(); my $hasgit = check_for_git(); - mkdir $dstdir or fail "create \`$dstdir': $!"; + mkdir $dstdir or fail f_ "create \`%s': %s", $dstdir, $!; changedir $dstdir; runcmd @git, qw(init -q); record_maindir(); @@ -3650,11 +3760,11 @@ sub clone ($) { runcmd @git, qw(remote add), 'origin', $giturl; } if ($hasgit) { - progress "fetching existing git history"; + progress __ "fetching existing git history"; git_fetch_us(); runcmd_ordryrun_local @git, qw(fetch origin); } else { - progress "starting new git history"; + progress __ "starting new git history"; } fetch_from_archive() or no_such_package; my $vcsgiturl = $dsc->{'Vcs-Git'}; @@ -3679,13 +3789,13 @@ sub fetch_one () { cfg 'dgit.vcs-git.suites')) { my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF'; if (defined $current && $current ne $vcsgiturl) { - print STDERR < message fragment "$saved" describing disposition of $dgitview + # (used inside parens, in the English texts) my $save = $internal_object_save{'dgit-view'}; - return "commit id $dgitview" unless defined $save; + return f_ "commit id %s", $dgitview unless defined $save; my @cmd = (shell_cmd 'cd "$1"; shift', $maindir, git_update_ref_cmd "dgit --dgit-view-save $msg HEAD=$headref", $save, $dgitview); runcmd @cmd; - return "and left in $save"; + return f_ "and left in %s", $save; } # An "infopair" is a tuple [ $thing, $what ] @@ -3833,17 +3947,19 @@ sub infopair_lrf_tag_lookup ($$) { printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n"; return [ git_rev_parse($tagobj), $what ]; } - fail @tagnames==1 ? <[0], $desc->[0]) or fail <[1] ($anc->[0]) .. $desc->[1] ($desc->[0]) is not fast forward + is_fast_fwd($anc->[0], $desc->[0]) or + fail f_ <[1], $anc->[0], $desc->[1], $desc->[0]; +%s (%s) .. %s (%s) is not fast forward END }; @@ -3852,7 +3968,7 @@ sub pseudomerge_version_check ($$) { my $arch_clogp = commit_getclogp $archive_hash; my $i_arch_v = [ (getfield $arch_clogp, 'Version'), - 'version currently in archive' ]; + __ 'version currently in archive' ]; if (defined $overwrite_version) { if (length $overwrite_version) { infopair_cond_equal([ $overwrite_version, @@ -3860,7 +3976,8 @@ sub pseudomerge_version_check ($$) { $i_arch_v); } else { my $v = $i_arch_v->[0]; - progress "Checking package changelog for archive version $v ..."; + progress f_ + "Checking package changelog for archive version %s ...", $v; my $cd; eval { my @xa = ("-f$v", "-t$v"); @@ -3868,7 +3985,8 @@ sub pseudomerge_version_check ($$) { my $gf = sub { my ($fn) = @_; [ (getfield $vclogp, $fn), - "$fn field from dpkg-parsechangelog @xa" ]; + (f_ "%s field from dpkg-parsechangelog %s", + $fn, "@xa") ]; }; my $cv = $gf->('Version'); infopair_cond_equal($i_arch_v, $cv); @@ -3877,12 +3995,13 @@ sub pseudomerge_version_check ($$) { if ($@) { $@ =~ s/^dgit: //gm; fail "$@". - "Perhaps debian/changelog does not mention $v ?"; + f_ "Perhaps debian/changelog does not mention %s ?", $v; } - fail <[0] =~ m/UNRELEASED/; -$cd->[1] is $cd->[0] -Your tree seems to based on earlier (not uploaded) $v. + fail f_ <[1], $cd->[0], $v +%s is %s +Your tree seems to based on earlier (not uploaded) %s. END + if $cd->[0] =~ m/UNRELEASED/; } } @@ -3893,7 +4012,8 @@ END sub pseudomerge_make_commit ($$$$ $$) { my ($clogp, $dgitview, $archive_hash, $i_arch_v, $msg_cmd, $msg_msg) = @_; - progress "Declaring that HEAD inciudes all changes in $i_arch_v->[0]..."; + progress f_ "Declaring that HEAD inciudes all changes in %s...", + $i_arch_v->[0]; my $tree = cmdoutput qw(git rev-parse), "${dgitview}:"; my $authline = clogp_authline $clogp; @@ -3948,7 +4068,7 @@ sub splitbrain_pseudomerge ($$$$) { my $i_arch_v = pseudomerge_version_check($clogp, $archive_hash); if (!defined $overwrite_version) { - progress "Checking that HEAD inciudes all changes in archive..."; + progress __ "Checking that HEAD inciudes all changes in archive..."; } return $dgitview if is_fast_fwd $archive_hash, $dgitview; @@ -3956,10 +4076,11 @@ sub splitbrain_pseudomerge ($$$$) { if (defined $overwrite_version) { } elsif (!eval { my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_nomdistro; - my $i_dep14 = infopair_lrf_tag_lookup($t_dep14, "maintainer view tag"); + my $i_dep14 = infopair_lrf_tag_lookup($t_dep14, + __ "maintainer view tag"); my $t_dgit = debiantag_new $i_arch_v->[0], access_nomdistro; - my $i_dgit = infopair_lrf_tag_lookup($t_dgit, "dgit view tag"); - my $i_archive = [ $archive_hash, "current archive contents" ]; + my $i_dgit = infopair_lrf_tag_lookup($t_dgit, __ "dgit view tag"); + my $i_archive = [ $archive_hash, __ "current archive contents" ]; printdebug "splitbrain_pseudomerge i_archive @$i_archive\n"; @@ -3969,25 +4090,25 @@ sub splitbrain_pseudomerge ($$$$) { 1; }) { $@ =~ s/^\n//; chomp $@; - print STDERR <[0]; my $r = pseudomerge_make_commit $clogp, $dgitview, $archive_hash, $i_arch_v, "dgit --quilt=$quilt_mode", - (defined $overwrite_version ? <[0] -END_OVERWR -Make fast forward from $i_arch_v->[0] -END_MAKEFF + (defined $overwrite_version + ? f_ "Declare fast forward from %s\n", $arch_v + : f_ "Make fast forward from %s\n", $arch_v); maybe_split_brain_save $maintview, $r, "pseudomerge"; - progress "Made pseudo-merge of $i_arch_v->[0] into dgit view."; + progress f_ "Made pseudo-merge of %s into dgit view.", $arch_v; return $r; } @@ -4000,7 +4121,7 @@ sub plain_overwrite_pseudomerge ($$$) { return $head if is_fast_fwd $archive_hash, $head; - my $m = "Declare fast forward from $i_arch_v->[0]"; + my $m = f_ "Declare fast forward from %s", $i_arch_v->[0]; my $r = pseudomerge_make_commit $clogp, $head, $archive_hash, $i_arch_v, @@ -4008,7 +4129,7 @@ sub plain_overwrite_pseudomerge ($$$) { runcmd git_update_ref_cmd $m, 'HEAD', $r, $head; - progress "Make pseudo-merge of $i_arch_v->[0] into your HEAD."; + progress f_ "Make pseudo-merge of %s into your HEAD.", $i_arch_v->[0]; return $r; } @@ -4020,7 +4141,8 @@ sub push_parse_changelog ($) { my $clogpackage = getfield $clogp, 'Source'; $package //= $clogpackage; - fail "-p specified $package but changelog specified $clogpackage" + fail f_ "-p specified %s but changelog specified %s", + $package, $clogpackage unless $package eq $clogpackage; my $cversion = getfield $clogp, 'Version'; @@ -4041,8 +4163,9 @@ sub push_parse_dsc ($$$) { my $dversion = getfield $dsc, 'Version'; my $dscpackage = getfield $dsc, 'Source'; ($dscpackage eq $package && $dversion eq $cversion) or - fail "$dscfn is for $dscpackage $dversion". - " but debian/changelog is for $package $cversion"; + fail f_ "%s is for %s %s but debian/changelog is for %s %s", + $dscfn, $dscpackage, $dversion, + $package, $cversion; } sub push_tagwants ($$$$) { @@ -4101,8 +4224,8 @@ sub push_mktags ($$ $$ $) { my $changes = parsecontrol($changesfile,$changesfilewhat); foreach my $field (qw(Source Distribution Version)) { $changes->{$field} eq $clogp->{$field} or - fail "changes field $field \`$changes->{$field}'". - " does not match changelog \`$clogp->{$field}'"; + fail f_ "changes field %s \`%s' does not match changelog \`%s'", + $field, $changes->{$field}, $clogp->{$field}; } my $cversion = getfield $clogp, 'Version'; @@ -4128,8 +4251,11 @@ tagger $authline END if ($tw->{View} eq 'dgit') { + print TO f_ <{View} eq 'maint') { - print TO <{$_} or - fail "buildinfo contains $_" + fail f_ "buildinfo contains forbidden field %s", $_ foreach qw(Changes Changed-by Distribution); } push @i_buildinfos, $bi; @@ -5013,7 +5154,7 @@ sub i_want_signed_tag { return push_mktags $i_clogp, $i_dscfn, - $i_changesfn, 'remote changes', + $i_changesfn, (__ 'remote changes file'), \@tagwants; } @@ -5039,7 +5180,7 @@ sub quiltify_dpkg_commit ($$$;$) { mkpath '.git/dgit'; # we are in playtree my $descfn = ".git/dgit/quilt-description.tmp"; - open O, '>', $descfn or die "$descfn: $!"; + open O, '>', $descfn or confess "$descfn: $!"; $msg =~ s/\n+/\n\n/; print O <{O2H} & 01)) { - my $msg = - "--quilt=$quilt_mode specified, implying patches-unapplied git tree\n". - " but git tree differs from orig in upstream files."; + my $msg = f_ + "--quilt=%s specified, implying patches-unapplied git tree\n". + " but git tree differs from orig in upstream files.", + $quilt_mode; $msg .= $fulldiffhint->($unapplied, 'HEAD'); if (!stat_exists "debian/patches") { - $msg .= + $msg .= __ "\n ... debian/patches is missing; perhaps this is a patch queue branch?"; } fail $msg; } if ($quilt_mode =~ m/dpm/ && ($diffbits->{H2A} & 01)) { - fail <($oldtiptree,'HEAD'); ---quilt=$quilt_mode specified, implying patches-applied git tree + fail +(f_ <($oldtiptree,'HEAD'); +--quilt=%s specified, implying patches-applied git tree but git tree differs from result of applying debian/patches to upstream END } if ($quilt_mode =~ m/gbp|unapplied/ && ($diffbits->{O2A} & 01)) { # some patches quiltify_splitbrain_needed(); - progress "dgit view: creating patches-applied version using gbp pq"; + progress __ "dgit view: creating patches-applied version using gbp pq"; runcmd shell_cmd 'exec >/dev/null', gbp_pq, qw(import); # gbp pq import creates a fresh branch; push back to dgit-view runcmd @git, qw(update-ref refs/heads/dgit-view HEAD); @@ -5194,8 +5337,8 @@ END } if ($quilt_mode =~ m/gbp|dpm/ && ($diffbits->{O2A} & 02)) { - fail <{O2H} & 02) && # user has modified .gitignore !($diffbits->{O2A} & 02)) { # patches do not change .gitignore quiltify_splitbrain_needed(); - progress "dgit view: creating patch to represent .gitignore changes"; + progress __ + "dgit view: creating patch to represent .gitignore changes"; ensuredir "debian/patches"; my $gipatch = "debian/patches/auto-gitignore"; - open GIPATCH, ">>", "$gipatch" or die "$gipatch: $!"; - stat GIPATCH or die "$gipatch: $!"; - fail "$gipatch already exists; but want to create it". - " to record .gitignore changes" if (stat _)[7]; - print GIPATCH <>", "$gipatch" or confess "$gipatch: $!"; + stat GIPATCH or confess "$gipatch: $!"; + fail f_ "%s already exists; but want to create it". + " to record .gitignore changes", + $gipatch + if (stat _)[7]; + print GIPATCH +(__ <>$gipatch", @git, qw(diff), $unapplied, $headref, "--", sort keys %$editedignores; @@ -5231,11 +5378,12 @@ END print SERIES "auto-gitignore\n" or die $!; close SERIES or die $!; runcmd @git, qw(add -f -- debian/patches/series), $gipatch; - commit_admin <{Commit}}++; - $not->($c, "maximum search space exceeded") if --$max_work <= 0; + $not->($c, __ "maximum search space exceeded") if --$max_work <= 0; printdebug "quiltify investigate $c->{Commit}\n"; @@ -5329,7 +5477,7 @@ sub quiltify ($$$$) { } my $c_sentinels = quiltify_tree_sentinelfiles $c->{Commit}; - $not->($c, "has $c_sentinels not $t_sentinels") + $not->($c, f_ "has %s not %s", $c_sentinels, $t_sentinels) if $c_sentinels ne $t_sentinels; my $commitdata = cmdoutput @git, qw(cat-file commit), $c->{Commit}; @@ -5338,13 +5486,14 @@ sub quiltify ($$$$) { my @parents = ($commitdata =~ m/^parent (\w+)$/gm); @parents = map { { Commit => $_, Child => $c } } @parents; - $not->($c, "root commit") if !@parents; + $not->($c, __ "root commit") if !@parents; foreach my $p (@parents) { $p->{Nontrivial}= quiltify_trees_differ $p->{Commit},$c->{Commit}; } my $ndiffers = grep { $_->{Nontrivial} } @parents; - $not->($c, "merge ($ndiffers nontrivial parents)") if $ndiffers > 1; + $not->($c, f_ "merge (%s nontrivial parents)", $ndiffers) + if $ndiffers > 1; foreach my $p (@parents) { printdebug "considering C=$c->{Commit} P=$p->{Commit}\n"; @@ -5354,7 +5503,7 @@ sub quiltify ($$$$) { my $patchstackchange = cmdoutput @cmd; if (length $patchstackchange) { $patchstackchange =~ s/\n/,/g; - $not->($p, "changed $patchstackchange"); + $not->($p, f_ "changed %s", $patchstackchange); } printdebug " search queue P=$p->{Commit} ", @@ -5371,19 +5520,17 @@ sub quiltify ($$$$) { $x =~ s/(.*?[0-9a-z]{8})[0-9a-z]*$/$1/; return $x; }; - my $reportnot = sub { - my ($notp) = @_; - my $s = $abbrev->($notp); - my $c = $notp->{Child}; - $s .= "..".$abbrev->($c) if $c; - $s .= ": ".$notp->{Whynot}; - return $s; - }; if ($quilt_mode eq 'linear') { - print STDERR "\n$us: error: quilt fixup cannot be linear. Stopped at:\n"; + print STDERR f_ + "\n%s: error: quilt fixup cannot be linear. Stopped at:\n", + $us; my $all_gdr = !!@nots; foreach my $notp (@nots) { - print STDERR "$us: ", $reportnot->($notp), "\n"; + my $c = $notp->{Child}; + my $cprange = $abbrev->($notp); + $cprange .= "..".$abbrev->($c) if $c; + print STDERR f_ "%s: %s: %s\n", + $us, $cprange, $notp->{Whynot}; $all_gdr &&= $notp->{Child} && (git_cat_file $notp->{Child}{Commit}, 'commit') =~ m{^\[git-debrebase(?! split[: ]).*\]$}m; @@ -5393,13 +5540,13 @@ sub quiltify ($$$$) { [ grep { $_->[0] ne 'quilt-mode' } @$failsuggestion ] if $all_gdr; print STDERR "$us: $_->[1]\n" foreach @$failsuggestion; - fail + fail __ "quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n"; } elsif ($quilt_mode eq 'smash') { } elsif ($quilt_mode eq 'auto') { - progress "quilt fixup cannot be linear, smashing..."; + progress __ "quilt fixup cannot be linear, smashing..."; } else { - die "$quilt_mode ?"; + confess "$quilt_mode ?"; } my $time = $ENV{'GIT_COMMITTER_DATE'} || time; @@ -5409,12 +5556,14 @@ sub quiltify ($$$$) { quiltify_dpkg_commit "auto-$version-$target-$time", (getfield $clogp, 'Maintainer'), - "Automatically generated patch ($clogp->{Version})\n". - "Last (up to) $ncommits git changes, FYI:\n\n". $msg; + (f_ "Automatically generated patch (%s)\n". + "Last (up to) %s git changes, FYI:\n\n", + $clogp->{Version}, $ncommits). + $msg; return; } - progress "quiltify linearisation planning successful, executing..."; + progress __ "quiltify linearisation planning successful, executing..."; for (my $p = $sref_S; my $c = $p->{Child}; @@ -5452,7 +5601,7 @@ sub quiltify ($$$$) { die "contains leading punctuation\n" if m{^\W} || m{/\W}; die "contains bad character(s)\n" if m{[^-a-z0-9_.+=~/]}i; die "is series file\n" if m{$series_filename_re}o; - die "too long" if length > 200; + die "too long\n" if length > 200; }; return $_ unless $@; print STDERR "quiltifying commit $cc:". @@ -5548,7 +5697,7 @@ END if ($quilt_mode eq 'linear' && !$fopts->{'single-debian-patch'} - && branch_is_gdr($symref, $headref)) { + && branch_is_gdr($headref)) { # This is much faster. It also makes patches that gdr # likes better for future updates without laundering. # @@ -5620,11 +5769,12 @@ sub unpack_playtree_linkorigs ($$) { sub quilt_fixup_delete_pc () { runcmd @git, qw(rm -rqf .pc); - commit_admin <[1]: $_->[0]\n" @@ -6156,16 +6306,27 @@ sub massage_dbp_args ($;$) { my $dmode = '-F'; foreach my $l ($cmd, $xargs) { next unless $l; - @$l = grep { !(m/^-[SgGFABb]$/s and $dmode=$_) } @$l; + @$l = grep { !(m/^-[SgGFABb]$|^--build=/s and $dmode=$_) } @$l; } push @$cmd, '-nc'; #print STDERR "MASS1 ",Dumper($cmd, $xargs, $dmode); my $r = WANTSRC_BUILDER; printdebug "massage split $dmode.\n"; - $r = $dmode =~ m/[S]/ ? WANTSRC_SOURCE : - $dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER : - $dmode =~ m/[ABb]/ ? WANTSRC_BUILDER : - die "$dmode ?"; + if ($dmode =~ s/^--build=//) { + $r = 0; + my @d = split /,/, $dmode; + $r |= WANTSRC_SOURCE if grep { s/^full$/binary/ } @d; + $r |= WANTSRC_SOURCE if grep { s/^source$// } @d; + $r |= WANTSRC_BUILDER if grep { m/./ } @d; + fail "Wanted to build nothing!" unless $r; + $dmode = '--build='. join ',', grep m/./, @d; + } else { + $r = + $dmode =~ m/[S]/ ? WANTSRC_SOURCE : + $dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER : + $dmode =~ m/[ABb]/ ? WANTSRC_BUILDER : + die "$dmode ?"; + } printdebug "massage done $r $dmode.\n"; push @$cmd, $dmode; #print STDERR "MASS2 ",Dumper($cmd, $xargs, $r); @@ -6254,9 +6415,10 @@ sub postbuild_mergechanges_vanilla ($) { sub cmd_build { build_prep_early(); $buildproductsdir eq '..' or print STDERR < 1; print STDERR "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n" if $dryrun_level == 1; if (!@ARGV) { - print STDERR $helpmsg or die $!; + print STDERR __ $helpmsg or die $!; finish 8; } $cmd = $subcommand = shift @ARGV;