X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~ianmdlvl/git?p=dgit.git;a=blobdiff_plain;f=dgit;h=1fde60bba61f663b82f931aaf454f13b193bb7b1;hp=961d974882fc568ef6b6061af805c0f2e99ab80f;hb=d5a04ee68cb3ed5c2853bae83b132505b850a5e6;hpb=a57e3b1aaf62ca5d28b4e65fdaf46340067b5748 diff --git a/dgit b/dgit index 961d9748..1fde60bb 100755 --- a/dgit +++ b/dgit @@ -2,8 +2,8 @@ # dgit # Integration between git and Debian-style archives # -# Copyright (C)2013-2017 Ian Jackson -# Copyright (C)2017 Sean Whitton +# Copyright (C)2013-2018 Ian Jackson +# Copyright (C)2017-2018 Sean Whitton # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -20,6 +20,7 @@ END { $? = $Debian::Dgit::ExitStatus::desired // -1; }; use Debian::Dgit::ExitStatus; +use Debian::Dgit::I18n; use strict; @@ -37,6 +38,7 @@ use Dpkg::Version; use Dpkg::Compression; use Dpkg::Compression::Process; use POSIX; +use Locale::gettext; use IPC::Open2; use Digest::SHA; use Digest::MD5; @@ -63,9 +65,10 @@ our @ropts; our $sign = 1; our $dryrun_level = 0; our $changesfile; -our $buildproductsdir = '..'; +our $buildproductsdir; +our $bpd_glob; our $new_package = 0; -our $ignoredirty = 0; +our $includedirty = 0; our $rmonerror = 1; our @deliberatelies; our %previously; @@ -77,7 +80,7 @@ our $overwrite_version; # undef: not specified; '': check changelog our $quilt_mode; our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied'; our $dodep14tag; -our $split_brain_save; +our %internal_object_save; our $we_are_responder; our $we_are_initiator; our $initiator_tempdir; @@ -90,6 +93,7 @@ our $chase_dsc_distro=1; our %forceopts = map { $_=>0 } qw(unrepresentable unsupported-source-format dsc-changes-mismatch changes-origs-exactly + uploading-binaries uploading-source-only import-gitapply-absurd import-gitapply-no-absurd import-dsc-with-dgit-field); @@ -98,9 +102,6 @@ our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)"); our $suite_re = '[-+.0-9a-z]+'; our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none'; -our $orig_f_comp_re = qr{orig(?:-$extra_orig_namepart_re)?}; -our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)'; -our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?"; our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$'; our $splitbraincache = 'dgit-intern/quilt-cache'; @@ -114,7 +115,7 @@ our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L)); our (@dput) = qw(dput); our (@debsign) = qw(debsign); our (@gpg) = qw(gpg); -our (@sbuild) = qw(sbuild); +our (@sbuild) = (qw(sbuild --no-source)); our (@ssh) = 'ssh'; our (@dgit) = qw(dgit); our (@git_debrebase) = qw(git-debrebase); @@ -127,6 +128,8 @@ our (@mergechanges) = qw(mergechanges -f); our (@gbp_build) = (''); our (@gbp_pq) = ('gbp pq'); our (@changesopts) = (''); +our (@pbuilder) = ("sudo -E pbuilder"); +our (@cowbuilder) = ("sudo -E cowbuilder"); our %opts_opt_map = ('dget' => \@dget, # accept for compatibility 'curl' => \@curl, @@ -146,7 +149,9 @@ our %opts_opt_map = ('dget' => \@dget, # accept for compatibility 'gbp-build' => \@gbp_build, 'gbp-pq' => \@gbp_pq, 'ch' => \@changesopts, - 'mergechanges' => \@mergechanges); + 'mergechanges' => \@mergechanges, + 'pbuilder' => \@pbuilder, + 'cowbuilder' => \@cowbuilder); our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1); our %opts_cfg_insertpos = map { @@ -164,7 +169,6 @@ our $keyid; autoflush STDOUT 1; our $supplementary_message = ''; -our $need_split_build_invocation = 0; our $split_brain = 0; END { @@ -188,11 +192,6 @@ sub debiantag ($$) { return $tagformatfn->($v, $distro); } -sub debiantag_maintview ($$) { - my ($v,$distro) = @_; - return "$distro/".dep14_version_mangle $v; -} - sub madformat ($) { $_[0] eq '3.0 (quilt)' } sub lbranch () { return "$branchprefix/$csuite"; } @@ -201,15 +200,13 @@ sub lref () { return "refs/heads/".lbranch(); } sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); } sub rrref () { return server_ref($csuite); } -sub stripepoch ($) { - my ($vsn) = @_; - $vsn =~ s/^\d+\://; - return $vsn; -} - sub srcfn ($$) { - my ($vsn,$sfx) = @_; - return "${package}_".(stripepoch $vsn).$sfx + my ($vsn, $sfx) = @_; + return &source_file_leafname($package, $vsn, $sfx); +} +sub is_orig_file_of_vsn ($$) { + my ($f, $upstreamvsn) = @_; + return is_orig_file_of_p_v($f, $package, $upstreamvsn); } sub dscfn ($) { @@ -222,12 +219,6 @@ sub changespat ($;$) { return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes"; } -sub upstreamversion ($) { - my ($vsn) = @_; - $vsn =~ s/-[^-]+$//; - return $vsn; -} - our $us = 'dgit'; initdebug(''); @@ -241,24 +232,29 @@ END { } }; -sub badcfg { print STDERR "$us: invalid configuration: @_\n"; finish 12; } +sub badcfg { + print STDERR f_ "%s: invalid configuration: %s\n", $us, "@_"; + finish 12; +} sub forceable_fail ($$) { my ($forceoptsl, $msg) = @_; fail $msg unless grep { $forceopts{$_} } @$forceoptsl; - print STDERR "warning: overriding problem due to --force:\n". $msg; + print STDERR +(__ "warning: overriding problem due to --force:\n"). $msg; } sub forceing ($) { my ($forceoptsl) = @_; my @got = grep { $forceopts{$_} } @$forceoptsl; return 0 unless @got; - print STDERR - "warning: skipping checks or functionality due to --force-$got[0]\n"; + print STDERR f_ + "warning: skipping checks or functionality due to --force-%s\n", + $got[0]; } sub no_such_package () { - print STDERR "$us: package $package does not exist in suite $isuite\n"; + print STDERR f_ "%s: package %s does not exist in suite %s\n", + $us, $package, $isuite; finish 4; } @@ -278,20 +274,36 @@ sub quiltmode_splitbrain () { } sub opts_opt_multi_cmd { + my $extra = shift; my @cmd; push @cmd, split /\s+/, shift @_; + push @cmd, @$extra; push @cmd, @_; @cmd; } sub gbp_pq { - return opts_opt_multi_cmd @gbp_pq; + return opts_opt_multi_cmd [], @gbp_pq; } sub dgit_privdir () { our $dgit_privdir_made //= ensure_a_playground 'dgit'; } +sub bpd_abs () { + my $r = $buildproductsdir; + $r = "$maindir/$r" unless $r =~ m{^/}; + return $r; +} + +sub get_tree_of_commit ($) { + my ($commitish) = @_; + my $cdata = cmdoutput @git, qw(cat-file commit), $commitish; + $cdata =~ m/\n\n/; $cdata = $`; + $cdata =~ m/^tree (\w+)$/m or confess "cdata $cdata ?"; + return $1; +} + sub branch_gdr_info ($$) { my ($symref, $head) = @_; my ($status, $msg, $current, $ffq_prev, $gdrlast) = @@ -303,21 +315,91 @@ sub branch_gdr_info ($$) { return ($ffq_prev, $gdrlast); } -sub branch_is_gdr ($$) { - my ($symref, $head) = @_; - my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); - return 0 unless $ffq_prev || $gdrlast; - return 1; -} - sub branch_is_gdr_unstitched_ff ($$$) { my ($symref, $head, $ancestor) = @_; my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); return 0 unless $ffq_prev; - return 0 unless is_fast_fwd $ancestor, $ffq_prev; + return 0 unless !defined $ancestor or is_fast_fwd $ancestor, $ffq_prev; return 1; } +sub branch_is_gdr ($) { + my ($head) = @_; + # This is quite like git-debrebase's keycommits. + # We have our own implementation because: + # - our algorighm can do fewer tests so is faster + # - it saves testing to see if gdr is installed + + # NB we use this jsut for deciding whether to run gdr make-patches + # Before reusing this algorithm for somthing else, its + # suitability should be reconsidered. + + my $walk = $head; + local $Debian::Dgit::debugcmd_when_debuglevel = 3; + printdebug "branch_is_gdr $head...\n"; + my $get_patches = sub { + my $t = git_cat_file "$_[0]:debian/patches", [qw(missing tree)]; + return $t // ''; + }; + my $tip_patches = $get_patches->($head); + WALK: + for (;;) { + my $cdata = git_cat_file $walk, 'commit'; + my ($hdrs,$msg) = $cdata =~ m{\n\n} ? ($`,$') : ($cdata,''); + if ($msg =~ m{^\[git-debrebase\ ( + anchor | changelog | make-patches | + merged-breakwater | pseudomerge + ) [: ] }mx) { + # no need to analyse this - it's sufficient + # (gdr classifications: Anchor, MergedBreakwaters) + # (made by gdr: Pseudomerge, Changelog) + printdebug "branch_is_gdr $walk gdr $1 YES\n"; + return 1; + } + my @parents = ($hdrs =~ m/^parent (\w+)$/gm); + if (@parents==2) { + my $walk_tree = get_tree_of_commit $walk; + foreach my $p (@parents) { + my $p_tree = get_tree_of_commit $p; + if ($p_tree eq $walk_tree) { # pseudomerge contriburor + # (gdr classification: Pseudomerge; not made by gdr) + printdebug "branch_is_gdr $walk unmarked pseudomerge\n" + if $debuglevel >= 2; + $walk = $p; + next WALK; + } + } + # some other non-gdr merge + # (gdr classification: VanillaMerge, DgitImportUnpatched, ?) + printdebug "branch_is_gdr $walk ?-2-merge NO\n"; + return 0; + } + if (@parents>2) { + # (gdr classification: ?) + printdebug "branch_is_gdr $walk ?-octopus NO\n"; + return 0; + } + if ($get_patches->($walk) ne $tip_patches) { + # Our parent added, removed, or edited patches, and wasn't + # a gdr make-patches commit. gdr make-patches probably + # won't do that well, then. + # (gdr classification of parent: AddPatches or ?) + printdebug "branch_is_gdr $walk ?-patches NO\n"; + return 0; + } + if ($tip_patches eq '' and + !defined git_cat_file "$walk:debian") { + # (gdr classification of parent: BreakwaterStart + printdebug "branch_is_gdr $walk unmarked BreakwaterStart YES\n"; + return 1; + } + # (gdr classification: Upstream Packaging Mixed Changelog) + printdebug "branch_is_gdr $walk plain\n" + if $debuglevel >= 2; + $walk = $parents[0]; + } +} + #---------- remote protocol support, common ---------- # remote push initiator/responder protocol: @@ -390,28 +472,28 @@ sub i_child_report () { die unless $got == $i_child_pid; $i_child_pid = undef; return undef unless $?; - return "build host child ".waitstatusmsg(); + return f_ "build host child %s", waitstatusmsg(); } sub badproto ($$) { my ($fh, $m) = @_; - fail "connection lost: $!" if $fh->error; - fail "protocol violation; $m not expected"; + fail f_ "connection lost: %s", $! if $fh->error; + fail f_ "protocol violation; %s not expected", $m; } sub badproto_badread ($$) { my ($fh, $wh) = @_; - fail "connection lost: $!" if $!; + fail f_ "connection lost: %s", $! if $!; my $report = i_child_report(); fail $report if defined $report; - badproto $fh, "eof (reading $wh)"; + badproto $fh, f_ "eof (reading %s)", $wh; } sub protocol_expect (&$) { my ($match, $fh) = @_; local $_; $_ = <$fh>; - defined && chomp or badproto_badread $fh, "protocol message"; + defined && chomp or badproto_badread $fh, __ "protocol message"; if (wantarray) { my @r = &$match; return @r if @r; @@ -419,7 +501,7 @@ sub protocol_expect (&$) { my $r = &$match; return $r if $r; } - badproto $fh, "\`$_'"; + badproto $fh, f_ "\`%s'", $_; } sub protocol_send_file ($$) { @@ -440,10 +522,10 @@ sub protocol_send_file ($$) { sub protocol_read_bytes ($$) { my ($fh, $nbytes) = @_; - $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count"; + $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, __ "bad byte count"; my $d; my $got = read $fh, $d, $nbytes; - $got==$nbytes or badproto_badread $fh, "data block"; + $got==$nbytes or badproto_badread $fh, __ "data block"; return $d; } @@ -524,7 +606,8 @@ sub url_get { progress "downloading $what..."; my $r = $ua->get(@_) or die $!; return undef if $r->code == 404; - $r->is_success or fail "failed to fetch $what: ".$r->status_line; + $r->is_success or fail f_ "failed to fetch %s: %s", + $what, $r->status_line; return $r->decoded_content(charset => 'none'); } @@ -535,9 +618,9 @@ sub act_scary () { return !$dryrun_level; } sub printdone { if (!$dryrun_level) { - progress "$us ok: @_"; + progress f_ "%s ok: %s", $us, "@_"; } else { - progress "would be ok: @_ (but dry run only)"; + progress f_ "would be ok: %s (but dry run only)", "@_"; } } @@ -561,12 +644,13 @@ sub runcmd_ordryrun_local { } } -our $helpmsg = <= set git config option (used directly by dgit too) END -our $later_warning_msg = < 'debian', 'dgit.default.sshpsql-dbname' => 'service=projectb', 'dgit.default.aptget-components' => 'main', 'dgit.default.dgit-tag-format' => 'new,old,maint', + 'dgit.default.source-only-uploads' => 'ok', 'dgit.dsc-url-proto-ok.http' => 'true', 'dgit.dsc-url-proto-ok.https' => 'true', 'dgit.dsc-url-proto-ok.git' => 'true', + 'dgit.vcs-git.suites', => 'sid', # ;-separated 'dgit.default.dsc-url-proto-ok' => 'false', # old means "repo server accepts pushes with old dgit tags" # new means "repo server accepts pushes with new dgit tags" @@ -627,6 +713,7 @@ our %defcfg = ('dgit.default.distro' => 'debian', 'dgit-distro.debian.git-check' => 'url', 'dgit-distro.debian.git-check-suffix' => '/info/refs', 'dgit-distro.debian.new-private-pushers' => 't', + 'dgit-distro.debian.source-only-uploads' => 'not-wholly-new', 'dgit-distro.debian/push.git-url' => '', 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org', 'dgit-distro.debian/push.git-user-force' => 'dgit', @@ -696,8 +783,9 @@ sub git_get_config ($) { "undef")."\n" if $debuglevel >= 4; $l or next; - @$l==1 or badcfg "multiple values for $c". - " (in $src git config)" if @$l > 1; + @$l==1 or badcfg + f_ "multiple values for %s (in %s git config)", $c, $src + if @$l > 1; return $l->[0]; } return undef; @@ -715,8 +803,10 @@ sub cfg { return $dv; } } - badcfg "need value for one of: @_\n". - "$us: distro or suite appears not to be (properly) supported"; + badcfg f_ + "need value for one of: %s\n". + "%s: distro or suite appears not to be (properly) supported", + "@_", $us; } sub not_necessarily_a_tree () { @@ -755,7 +845,8 @@ sub access_nomdistro () { my $base = access_basedistro(); my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base; $r =~ m/^$distro_re$/ or badcfg - "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)"; + f_ "bad syntax for (nominal) distro \`%s' (does not match %s)", + $r, "/^$distro_re$/"; return $r; } @@ -769,7 +860,7 @@ sub access_quirk () { $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig; $re =~ s/\*/.*/g; $re =~ s/\%/([-0-9a-z_]+)/ - or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )"; + or $re =~ m/[()]/ or badcfg __ "backports-quirk needs \% or ( )"; if ($isuite =~ m/^$re$/) { return ('backports',"$basedistro-backports",$1); } @@ -785,7 +876,8 @@ sub parse_cfg_bool ($$$) { return $v =~ m/^[ty1]/ ? 1 : $v =~ m/^[fn0]/ ? 0 : - badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'"; + badcfg f_ "%s needs t (true, y, 1) or f (false, n, 0) not \`%s'", + $what, $v; } sub access_forpush_config () { @@ -803,7 +895,8 @@ sub access_forpush_config () { $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1 $v =~ m/^[a]/ ? '' : # auto, forpush = '' - badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; + badcfg __ + "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; } sub access_forpush () { @@ -812,11 +905,12 @@ sub access_forpush () { } sub pushing () { - die "$access_forpush ?" if ($access_forpush // 1) ne 1; - badcfg "pushing but distro is configured readonly" + confess +(__ 'internal error').' '.Dumper($access_forpush)," ?" if + defined $access_forpush and !$access_forpush; + badcfg __ "pushing but distro is configured readonly" if access_forpush_config() eq '0'; $access_forpush = 1; - $supplementary_message = <<'END' unless $we_are_responder; + $supplementary_message = __ <<'END' unless $we_are_responder; Push failed, before we got started. You can retry the push, after fixing the problem, if you like. END @@ -958,59 +1052,6 @@ sub access_giturl (;$) { return "$url/$package$suffix"; } -sub parsecontrolfh ($$;$) { - my ($fh, $desc, $allowsigned) = @_; - our $dpkgcontrolhash_noissigned; - my $c; - for (;;) { - my %opts = ('name' => $desc); - $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned; - $c = Dpkg::Control::Hash->new(%opts); - $c->parse($fh,$desc) or die "parsing of $desc failed"; - last if $allowsigned; - last if $dpkgcontrolhash_noissigned; - my $issigned= $c->get_option('is_pgp_signed'); - if (!defined $issigned) { - $dpkgcontrolhash_noissigned= 1; - seek $fh, 0,0 or die "seek $desc: $!"; - } elsif ($issigned) { - fail "control file $desc is (already) PGP-signed. ". - " Note that dgit push needs to modify the .dsc and then". - " do the signature itself"; - } else { - last; - } - } - return $c; -} - -sub parsecontrol { - my ($file, $desc, $allowsigned) = @_; - my $fh = new IO::Handle; - open $fh, '<', $file or die "$file: $!"; - my $c = parsecontrolfh($fh,$desc,$allowsigned); - $fh->error and die $!; - close $fh; - return $c; -} - -sub getfield ($$) { - my ($dctrl,$field) = @_; - my $v = $dctrl->{$field}; - return $v if defined $v; - fail "missing field $field in ".$dctrl->get_option('name'); -} - -sub parsechangelog { - my $c = Dpkg::Control::Hash->new(name => 'parsed changelog'); - my $p = new IO::Handle; - my @cmd = (qw(dpkg-parsechangelog), @_); - open $p, '-|', @cmd or die $!; - $c->parse($p); - $?=0; $!=0; close $p or failedcmd @cmd; - return $c; -} - sub commit_getclogp ($) { # Returns the parsed changelog hashref for a particular commit my ($objid) = @_; @@ -1035,7 +1076,7 @@ our %rmad; sub archive_query ($;@) { my ($method) = shift @_; - fail "this operation does not support multiple comma-separated suites" + fail __ "this operation does not support multiple comma-separated suites" if $isuite =~ m/,/; my $query = access_cfg('archive-query','RETURN-UNDEF'); $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'"; @@ -1081,8 +1122,9 @@ sub archive_api_query_cmd ($) { fail "for $url: stat $key: $!" unless $!==ENOENT; next; } - fail "config requested specific TLS key but do not know". - " how to get curl to use exactly that EE key ($key)"; + fail f_ "config requested specific TLS key but do not know". + " how to get curl to use exactly that EE key (%s)", + $key; # push @cmd, "--cacert", $key, "--capath", "/dev/enoent"; # # Sadly the above line does not work because of changes # # to gnutls. The real fix for #790093 may involve @@ -1101,7 +1143,7 @@ sub archive_api_query_cmd ($) { sub api_query ($$;$) { use JSON; my ($data, $subpath, $ok404) = @_; - badcfg "ftpmasterapi archive query method takes no data part" + badcfg __ "ftpmasterapi archive query method takes no data part" if length $data; my @cmd = archive_api_query_cmd($subpath); my $url = $cmd[$#cmd]; @@ -1109,11 +1151,11 @@ sub api_query ($$;$) { my $json = cmdoutput @cmd; unless ($json =~ s/\d+\d+\d$//) { failedcmd_report_cmd undef, @cmd; - fail "curl failed to print 3-digit HTTP code"; + fail __ "curl failed to print 3-digit HTTP code"; } my $code = $&; return undef if $code eq '404' && $ok404; - fail "fetch of $url gave HTTP code $code" + fail f_ "fetch of %s gave HTTP code %s", $url, $code unless $url =~ m#^file://# or $code =~ m/^2/; return decode_json($json); } @@ -1129,15 +1171,17 @@ sub canonicalise_suite_ftpmasterapi { } qw(codename name); push @matched, $entry; } - fail "unknown suite $isuite" unless @matched; + fail f_ "unknown suite %s, maybe -d would help", $isuite + unless @matched; my $cn; eval { - @matched==1 or die "multiple matches for suite $isuite\n"; + @matched==1 or die f_ "multiple matches for suite %s\n", $isuite; $cn = "$matched[0]{codename}"; - defined $cn or die "suite $isuite info has no codename\n"; - $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n"; + defined $cn or die f_ "suite %s info has no codename\n", $isuite; + $cn =~ m/^$suite_re$/ + or die f_ "suite %s maps to bad codename\n", $isuite; }; - die "bad ftpmaster api response: $@\n".Dumper(\@matched) + die +(__ "bad ftpmaster api response: ")."$@\n".Dumper(\@matched) if length $@; return $cn; } @@ -1151,18 +1195,18 @@ sub archive_query_ftpmasterapi { eval { my $vsn = "$entry->{version}"; my ($ok,$msg) = version_check $vsn; - die "bad version: $msg\n" unless $ok; + die f_ "bad version: %s\n", $msg unless $ok; my $component = "$entry->{component}"; - $component =~ m/^$component_re$/ or die "bad component"; + $component =~ m/^$component_re$/ or die __ "bad component"; my $filename = "$entry->{filename}"; $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]# - or die "bad filename"; + or die __ "bad filename"; my $sha256sum = "$entry->{sha256sum}"; - $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum"; + $sha256sum =~ m/^[0-9a-f]+$/ or die __ "bad sha256sum"; push @rows, [ $vsn, "/pool/$component/$filename", $digester, $sha256sum ]; }; - die "bad ftpmaster api response: $@\n".Dumper($entry) + die +(__ "bad ftpmaster api response: ")."$@\n".Dumper($entry) if length $@; } @rows = sort { -version_compare($a->[0],$b->[0]) } @rows; @@ -1178,6 +1222,12 @@ sub file_in_archive_ftpmasterapi { my $info = api_query($data, "file_in_archive/$pat", 1); } +sub package_not_wholly_new_ftpmasterapi { + my ($proto,$data,$pkg) = @_; + my $info = api_query($data,"madison?package=${pkg}&f=json"); + return !!@$info; +} + #---------- `aptget' archive query method ---------- our $aptget_base; @@ -1195,15 +1245,15 @@ sub aptget_cache_clean { sub aptget_lock_acquire () { my $lockfile = "$aptget_base/lock"; - open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!"; - flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!"; + open APTGET_LOCK, '>', $lockfile or confess "open $lockfile: $!"; + flock APTGET_LOCK, LOCK_EX or confess "lock $lockfile: $!"; } sub aptget_prep ($) { my ($data) = @_; return if defined $aptget_base; - badcfg "aptget archive query method takes no data part" + badcfg __ "aptget archive query method takes no data part" if length $data; my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache"; @@ -1218,7 +1268,7 @@ sub aptget_prep ($) { ensuredir $aptget_base; my $quoted_base = $aptget_base; - die "$quoted_base contains bad chars, cannot continue" + confess "$quoted_base contains bad chars, cannot continue" if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/ ensuredir $aptget_base; @@ -1234,7 +1284,7 @@ sub aptget_prep ($) { cfg_apply_map(\$aptsuites, 'suite map', access_cfg('aptget-suite-map', 'RETURN-UNDEF')); - open SRCS, ">", "$aptget_base/$sourceslist" or die $!; + open SRCS, ">", "$aptget_base/$sourceslist" or confess $!; printf SRCS "deb-src %s %s %s\n", access_cfg('mirror'), $aptsuites, @@ -1285,7 +1335,14 @@ END } my @inreleasefiles = grep { m#/InRelease$# } @releasefiles; @releasefiles = @inreleasefiles if @inreleasefiles; - die "apt updated wrong number of Release files (@releasefiles), erk" + if (!@releasefiles) { + fail f_ <{$name}; if (defined $val) { printdebug "release file $name: $val\n"; - $val =~ m/^$suite_re$/o or fail - "Release file ($aptget_releasefile) specifies intolerable $name"; + $val =~ m/^$suite_re$/o or fail f_ + "Release file (%s) specifies intolerable %s", + $aptget_releasefile, $name; cfg_apply_map(\$val, 'suite rmap', access_cfg('aptget-suite-rmap', 'RETURN-UNDEF')); return $val @@ -1329,8 +1387,9 @@ sub archive_query_aptget { aptget_aptget(), qw(--download-only --only-source source), $package; my @dscs = <$aptget_base/source/*.dsc>; - fail "apt-get source did not produce a .dsc" unless @dscs; - fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1; + fail __ "apt-get source did not produce a .dsc" unless @dscs; + fail f_ "apt-get source produced several .dscs (%s)", "@dscs" + unless @dscs==1; my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1; @@ -1341,34 +1400,56 @@ sub archive_query_aptget { } sub file_in_archive_aptget () { return undef; } +sub package_not_wholly_new_aptget () { return undef; } #---------- `dummyapicat' archive query method ---------- +# (untranslated, because this is for testing purposes etc.) sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; } sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; } -sub file_in_archive_dummycatapi ($$$) { - my ($proto,$data,$filename) = @_; +sub dummycatapi_run_in_mirror ($@) { + # runs $fn with FIA open onto rune + my ($rune, $argl, $fn) = @_; + my $mirror = access_cfg('mirror'); $mirror =~ s#^file://#/# or die "$mirror ?"; - my @out; - my @cmd = (qw(sh -ec), ' - cd "$1" - find -name "$2" -print0 | - xargs -0r sha256sum - ', qw(x), $mirror, $filename); + my @cmd = (qw(sh -ec), 'cd "$1"; shift'."\n".$rune, + qw(x), $mirror, @$argl); debugcmd "-|", @cmd; open FIA, "-|", @cmd or die $!; - while () { - chomp or die; - printdebug "| $_\n"; - m/^(\w+) (\S+)$/ or die "$_ ?"; - push @out, { sha256sum => $1, filename => $2 }; - } - close FIA or die failedcmd @cmd; + my $r = $fn->(); + close FIA or ($!==0 && $?==141) or die failedcmd @cmd; + return $r; +} + +sub file_in_archive_dummycatapi ($$$) { + my ($proto,$data,$filename) = @_; + my @out; + dummycatapi_run_in_mirror ' + find -name "$1" -print0 | + xargs -0r sha256sum + ', [$filename], sub { + while () { + chomp or die; + printdebug "| $_\n"; + m/^(\w+) (\S+)$/ or die "$_ ?"; + push @out, { sha256sum => $1, filename => $2 }; + } + }; return \@out; } +sub package_not_wholly_new_dummycatapi { + my ($proto,$data,$pkg) = @_; + dummycatapi_run_in_mirror " + find -name ${pkg}_*.dsc + ", [], sub { + local $/ = undef; + !!; + }; +} + #---------- `madison' archive query method ---------- sub archive_query_madison { @@ -1411,16 +1492,19 @@ sub madison_get_parse { sub canonicalise_suite_madison { # madison canonicalises for us my @r = madison_get_parse(@_); - @r or fail - "unable to canonicalise suite using package $package". - " which does not appear to exist in suite $isuite;". - " --existing-package may help"; + @r or fail f_ + "unable to canonicalise suite using package %s". + " which does not appear to exist in suite %s;". + " --existing-package may help", + $package, $isuite; return $r[0][2]; } sub file_in_archive_madison { return undef; } +sub package_not_wholly_new_madison { return undef; } #---------- `sshpsql' archive query method ---------- +# (untranslated, because this is obsolete) sub sshpsql ($$$) { my ($data,$runeinfo,$sql) = @_; @@ -1496,8 +1580,10 @@ END } sub file_in_archive_sshpsql ($$$) { return undef; } +sub package_not_wholly_new_sshpsql ($$$) { return undef; } #---------- `dummycat' archive query method ---------- +# (untranslated, because this is for testing purposes etc.) sub canonicalise_suite_dummycat ($$) { my ($proto,$data) = @_; @@ -1540,8 +1626,10 @@ sub archive_query_dummycat ($$) { } sub file_in_archive_dummycat () { return undef; } +sub package_not_wholly_new_dummycat () { return undef; } #---------- tag format handling ---------- +# (untranslated, because everything should be new tag format by now) sub access_cfg_tagformats () { split /\,/, access_cfg('dgit-tag-format'); @@ -1596,12 +1684,12 @@ sub select_tagformat () { sub canonicalise_suite () { return if defined $csuite; - fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED'; + fail f_ "cannot operate on %s suite", $isuite if $isuite eq 'UNRELEASED'; $csuite = archive_query('canonicalise_suite'); if ($isuite ne $csuite) { - progress "canonical suite name for $isuite is $csuite"; + progress f_ "canonical suite name for %s is %s", $isuite, $csuite; } else { - progress "canonical suite name is $csuite"; + progress f_ "canonical suite name is %s", $csuite; } } @@ -1621,13 +1709,13 @@ sub get_archive_dsc () { $digester->add($dscdata); my $got = $digester->hexdigest(); $got eq $digest or - fail "$dscurl has hash $got but". - " archive told us to expect $digest"; + fail f_ "%s has hash %s but archive told us to expect %s", + $dscurl, $got, $digest; } parse_dscdata(); my $fmt = getfield $dsc, 'Format'; $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)], - "unsupported source format $fmt, sorry"; + f_ "unsupported source format %s, sorry", $fmt; $dsc_checked = !!$digester; printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n"; @@ -1654,7 +1742,8 @@ sub check_for_git () { # NB that if we are pushing, $usedistro will be $distro/push $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert"); $instead_distro =~ s{^/}{ access_basedistro()."/" }e; - progress "diverting to $divert (using config for $instead_distro)"; + progress f_ "diverting to %s (using config for %s)", + $divert, $instead_distro; return check_for_git(); } failedcmd @cmd unless defined $r and $r =~ m/^[01]$/; @@ -1670,7 +1759,7 @@ sub check_for_git () { # curl -sS -I with https_proxy prints # HTTP/1.0 200 Connection established $result =~ m/^\S+ (404|200) /s or - fail "unexpected results from git check query - ". + fail +(__ "unexpected results from git check query - "). Dumper($prefix, $result); my $code = $1; if ($code eq '404') { @@ -1685,7 +1774,7 @@ sub check_for_git () { } elsif ($how eq 'false') { return 0; } else { - badcfg "unknown git-check \`$how'"; + badcfg f_ "unknown git-check \`%s'", $how; } } @@ -1700,7 +1789,7 @@ sub create_remote_git_repo () { } elsif ($how eq 'true') { # nothing to do } else { - badcfg "unknown git-create \`$how'"; + badcfg f_ "unknown git-create \`%s'", $how; } } @@ -1737,8 +1826,8 @@ sub remove_stray_gits ($) { local $/="\0"; while () { chomp or die; - print STDERR "$us: warning: removing from $what: ", - (messagequote $_), "\n"; + print STDERR f_ "%s: warning: removing from %s: %s\n", + $us, $what, (messagequote $_); rmtree $_; } } @@ -1750,7 +1839,7 @@ sub mktree_in_ud_from_only_subdir ($;$) { # changes into the subdir my (@dirs) = <*/.>; - die "expected one subdir but found @dirs ?" unless @dirs==1; + confess "expected one subdir but found @dirs ?" unless @dirs==1; $dirs[0] =~ m#^([^/]+)/\.$# or die; my $dir = $1; changedir $dir; @@ -1783,7 +1872,7 @@ sub dsc_files_info () { foreach (split /\n/, $field) { next unless m/\S/; m/^(\w+) (\d+) (\S+)$/ or - fail "could not parse .dsc $fname line \`$_'"; + fail f_ "could not parse .dsc %s line \`%s'", $fname, $_; my $digester = eval "$module"."->$method;" or die $@; push @out, { Hash => $1, @@ -1794,8 +1883,8 @@ sub dsc_files_info () { } return @out; } - fail "missing any supported Checksums-* or Files field in ". - $dsc->get_option('name'); + fail f_ "missing any supported Checksums-* or Files field in %s", + $dsc->get_option('name'); } sub dsc_files () { @@ -1839,8 +1928,9 @@ sub files_compare_inputs (@) { if (defined $$re) { $fchecked{$f}{$in_name} = 1; $$re eq $info or - fail "hash or size of $f varies in $fname fields". - " (between: ".$showinputs->().")"; + fail f_ + "hash or size of %s varies in %s fields (between: %s)", + $f, $fname, $showinputs->(); } else { $$re = $info; } @@ -1848,17 +1938,18 @@ sub files_compare_inputs (@) { @files = sort @files; $expected_files //= \@files; "@$expected_files" eq "@files" or - fail "file list in $in_name varies between hash fields!"; + fail f_ "file list in %s varies between hash fields!", + $in_name; } $expected_files or - fail "$in_name has no files list field(s)"; + fail f_ "%s has no files list field(s)", $in_name; } printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record) if $debuglevel>=2; grep { keys %$_ == @$inputs-1 } values %fchecked - or fail "no file appears in all file lists". - " (looked in: ".$showinputs->().")"; + or fail f_ "no file appears in all file lists (looked in: %s)", + $showinputs->(); } sub is_orig_file_in_dsc ($$) { @@ -1870,13 +1961,6 @@ sub is_orig_file_in_dsc ($$) { return 1; } -sub is_orig_file_of_vsn ($$) { - my ($f, $upstreamvsn) = @_; - my $base = srcfn $upstreamvsn, ''; - return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/; - return 1; -} - # This function determines whether a .changes file is source-only from # the point of view of dak. Thus, it permits *_source.buildinfo # files. @@ -1904,7 +1988,7 @@ sub test_source_only_changes ($) { $l =~ m/\S+$/ or next; # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) { - print "purportedly source-only changes polluted by $&\n"; + print f_ "purportedly source-only changes polluted by %s\n", $&; return 0; } } @@ -1917,7 +2001,7 @@ sub changes_update_origs_from_dsc ($$$$) { printdebug "checking origs needed ($upstreamvsn)...\n"; $_ = getfield $changes, 'Files'; m/^\w+ \d+ (\S+ \S+) \S+$/m or - fail "cannot find section/priority from .changes Files field"; + fail __ "cannot find section/priority from .changes Files field"; my $placementinfo = $1; my %changed; printdebug "checking origs needed placement '$placementinfo'...\n"; @@ -1929,7 +2013,7 @@ sub changes_update_origs_from_dsc ($$$$) { printdebug "origs $file is_orig\n"; my $have = archive_query('file_in_archive', $file); if (!defined $have) { - print STDERR <{$fname}; next unless defined; m/^(\w+) .* \Q$file\E$/m or - fail ".dsc $fname missing entry for $file"; + fail f_ ".dsc %s missing entry for %s", $fname, $file; if ($h->{$archivefield} eq $1) { $same++; } else { - push @differ, - "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)"; + push @differ, f_ + "%s: %s (archive) != %s (local .dsc)", + $archivefield, $h->{$archivefield}, $1; } } - die "$file ".Dumper($h)." ?!" if $same && @differ; + confess "$file ".Dumper($h)." ?!" if $same && @differ; $found_same++ if $same; - push @found_differ, "archive $h->{filename}: ".join "; ", @differ + push @found_differ, + f_ "archive %s: %s", $h->{filename}, join "; ", @differ if @differ; } printdebug "origs $file f.same=$found_same". " #f._differ=$#found_differ\n"; if (@found_differ && !$found_same) { fail join "\n", - "archive contains $file with different checksum", + (f_ "archive contains %s with different checksum", $file), @found_differ; } # Now we edit the changes file to add or remove it @@ -1974,15 +2060,15 @@ END if ($found_same) { # in archive, delete from .changes if it's there $changed{$file} = "removed" if - $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m; - } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) { + $changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m; + } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) { # not in archive, but it's here in the .changes } else { my $dsc_data = getfield $dsc, $fname; - $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?"; + $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?"; my $extra = $1; $extra =~ s/ \d+ /$&$placementinfo / - or die "$fname $extra >$dsc_data< ?" + or confess "$fname $extra >$dsc_data< ?" if $fname eq 'Files'; $changes->{$fname} .= "\n". $extra; $changed{$file} = "added"; @@ -1991,7 +2077,7 @@ END } if (%changed) { foreach my $file (keys %changed) { - progress sprintf + progress f_ "edited .changes for archive .orig contents: %s %s", $changed{$file}, $file; } @@ -2000,10 +2086,11 @@ END if (act_local()) { rename $chtmp,$changesfile or die "$changesfile $!"; } else { - progress "[new .changes left in $changesfile]"; + progress f_ "[new .changes left in %s]", $changesfile; } } else { - progress "$changesfile already has appropriate .orig(s) (if any)"; + progress f_ "%s already has appropriate .orig(s) (if any)", + $changesfile; } } @@ -2012,28 +2099,6 @@ sub make_commit ($) { return cmdoutput @git, qw(hash-object -w -t commit), $file; } -sub make_commit_text ($) { - my ($text) = @_; - my ($out, $in); - my @cmd = (@git, qw(hash-object -w -t commit --stdin)); - debugcmd "|",@cmd; - print Dumper($text) if $debuglevel > 1; - my $child = open2($out, $in, @cmd) or die $!; - my $h; - eval { - print $in $text or die $!; - close $in or die $!; - $h = <$out>; - $h =~ m/^\w+$/ or die; - $h = $&; - printdebug "=> $h\n"; - }; - close $out; - waitpid $child, 0 == $child or die "$child $!"; - $? and failedcmd @cmd; - return $h; -} - sub clogp_authline ($) { my ($clogp) = @_; my $author = getfield $clogp, 'Maintainer'; @@ -2048,8 +2113,9 @@ sub clogp_authline ($) { my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date'); my $authline = "$author $date"; $authline =~ m/$git_authline_re/o or - fail "unexpected commit author line format \`$authline'". - " (was generated from changelog Maintainer field)"; + fail f_ "unexpected commit author line format \`%s'". + " (was generated from changelog Maintainer field)", + $authline; return ($1,$2,$3) if wantarray; return $authline; } @@ -2062,14 +2128,14 @@ sub vendor_patches_distro ($$) { printdebug "checking for vendor-specific $series ($what)\n"; if (!open SERIES, "<", $series) { - die "$series $!" unless $!==ENOENT; + confess "$series $!" unless $!==ENOENT; return; } while () { next unless m/\S/; next if m/^\s+\#/; - print STDERR <error; close SERIES; @@ -2112,11 +2179,11 @@ sub check_for_vendor_patches () { use Dpkg::Vendor; vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR"); vendor_patches_distro(Dpkg::Vendor::get_current_vendor(), - "Dpkg::Vendor \`current vendor'"); + __ "Dpkg::Vendor \`current vendor'"); vendor_patches_distro(access_basedistro(), - "(base) distro being accessed"); + __ "(base) distro being accessed"); vendor_patches_distro(access_nomdistro(), - "(nominal) distro being accessed"); + __ "(nominal) distro being accessed"); } sub generate_commits_from_dsc () { @@ -2129,7 +2196,7 @@ sub generate_commits_from_dsc () { foreach my $fi (@dfi) { my $f = $fi->{Filename}; die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#; - my $upper_f = "$maindir/../$f"; + my $upper_f = (bpd_abs()."/$f"); printdebug "considering reusing $f: "; @@ -2137,12 +2204,12 @@ sub generate_commits_from_dsc () { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { - fail "accessing ../$f,fetch: $!"; + fail f_ "accessing %s: %s", "$buildproductsdir/$f,fetch", $!; } elsif (link_ltarget $upper_f, $f) { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { - fail "accessing ../$f: $!"; + fail f_ "accessing %s: %s", "$buildproductsdir/$f", $!; } else { printdebug "absent.\n"; } @@ -2157,14 +2224,14 @@ sub generate_commits_from_dsc () { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != EEXIST) { - fail "saving ../$f: $!"; + fail f_ "saving %s: %s", "$buildproductsdir/$f", $!; } elsif (!$refetched) { printdebug "no need.\n"; } elsif (link $f, "$upper_f,fetch") { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != EEXIST) { - fail "saving ../$f,fetch: $!"; + fail f_ "saving %s: %s", "$buildproductsdir/$f,fetch", $!; } else { printdebug "cannot.\n"; } @@ -2230,7 +2297,7 @@ sub generate_commits_from_dsc () { chdir "_unpack-tar" or die $!; open STDIN, "<&", $input or die $!; exec @tarcmd; - die "dgit (child): exec $tarcmd[0]: $!"; + die f_ "dgit (child): exec %s: %s", $tarcmd[0], $!; } $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!; !$? or failedcmd @tarcmd; @@ -2294,7 +2361,7 @@ sub generate_commits_from_dsc () { push @cmd, qw(-x --), $dscfn; runcmd @cmd; - my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package"); + my ($tree,$dir) = mktree_in_ud_from_only_subdir(__ "source package"); if (madformat $dsc->{format}) { check_for_vendor_patches(); } @@ -2308,22 +2375,14 @@ sub generate_commits_from_dsc () { } my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all); - debugcmd "|",@clogcmd; - open CLOGS, "-|", @clogcmd or die $!; - my $clogp; my $r1clogp; printdebug "import clog search...\n"; + parsechangelog_loop \@clogcmd, (__ "package changelog"), sub { + my ($thisstanza, $desc) = @_; + no warnings qw(exiting); - for (;;) { - my $stanzatext = do { local $/=""; ; }; - printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1; - last if !defined $stanzatext; - - my $desc = "package changelog, entry no.$."; - open my $stanzafh, "<", \$stanzatext or die; - my $thisstanza = parsecontrolfh $stanzafh, $desc, 1; $clogp //= $thisstanza; printdebug "import clog $thisstanza->{version} $desc...\n"; @@ -2349,7 +2408,7 @@ sub generate_commits_from_dsc () { # version). Then it remains to choose between the physically # last entry in the file, and the one with the lowest version # number. If these are not the same, we guess that the - # versions were created in a non-monotic order rather than + # versions were created in a non-monotonic order rather than # that the changelog entries have been misordered. printdebug "import clog $thisstanza->{version} vs $upstreamv...\n"; @@ -2358,11 +2417,9 @@ sub generate_commits_from_dsc () { $r1clogp = $thisstanza; printdebug "import clog $r1clogp->{version} becomes r1\n"; - } - die $! if CLOGS->error; - close CLOGS or $?==SIGPIPE or failedcmd @clogcmd; + }; - $clogp or fail "package changelog has no entries!"; + $clogp or fail __ "package changelog has no entries!"; my $authline = clogp_authline $clogp; my $changes = getfield $clogp, 'Changes'; @@ -2381,12 +2438,13 @@ sub generate_commits_from_dsc () { foreach my $tt (@tartrees) { printdebug "import tartree $tt->{F} $tt->{Tree}\n"; + my $mbody = f_ "Import %s", $tt->{F}; $tt->{Commit} = make_commit_text($tt->{Orig} ? <{Tree} author $r1authline committer $r1authline -Import $tt->{F} +$mbody [dgit import orig $tt->{F}] END_O @@ -2394,7 +2452,7 @@ tree $tt->{Tree} author $authline committer $authline -Import $tt->{F} +$mbody [dgit import tarball $package $cversion $tt->{F}] END_T @@ -2461,7 +2519,7 @@ END chomp $@; progress "warning: $@"; $path = "$absurdity:$path"; - progress "$us: trying slow absurd-git-apply..."; + progress f_ "%s: trying slow absurd-git-apply...", $us; rename "../../gbp-pq-output","../../gbp-pq-output.0" or $!==ENOENT or die $!; @@ -2480,19 +2538,19 @@ END 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd; debugcmd "+",@realcmd; if (system @realcmd) { - die +(shellquote @showcmd). - " failed: ". - failedcmd_waitstatus()."\n"; + die f_ "%s failed: %s\n", + +(shellquote @showcmd), + failedcmd_waitstatus(); } my $gapplied = git_rev_parse('HEAD'); my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:); $gappliedtree eq $dappliedtree or - fail < $rawimport_hash, - Info => "Import of source package", + Info => __ "Import of source package", }; my @output = ($rawimport_mergeinput); @@ -2519,16 +2577,18 @@ END version_compare($oversion, $cversion); if ($vcmp < 0) { @output = ($rawimport_mergeinput, $lastpush_mergeinput, - { Message => < 1 }); -Record $package ($cversion) in archive suite $csuite + { ReverseParents => 1, + Message => (f_ < 0) { - print STDERR <()) { - progress "using existing $f"; + progress f_ "using existing %s", $f; return 1; } if (!$refetched) { - fail "file $f has hash $got but .dsc". - " demands hash $fi->{Hash} ". - "(perhaps you should delete this file?)"; + fail f_ "file %s has hash %s but .dsc demands hash %s". + " (perhaps you should delete this file?)", + $f, $got, $fi->{Hash}; } - progress "need to fetch correct version of $f"; + progress f_ "need to fetch correct version of %s", $f; unlink $tf or die "$tf $!"; $$refetched = 1; } else { @@ -2589,9 +2649,9 @@ sub complete_file_from_dsc ($$;$) { return 0 if !act_local(); $checkhash->() or - fail "file $f has hash $got but .dsc". - " demands hash $fi->{Hash} ". - "(got wrong file from archive!)"; + fail f_ "file %s has hash %s but .dsc demands hash %s". + " (got wrong file from archive!)", + $f, $got, $fi->{Hash}; return 1; } @@ -2601,7 +2661,7 @@ sub ensure_we_have_orig () { foreach my $fi (@dfi) { my $f = $fi->{Filename}; next unless is_orig_file_in_dsc($f, \@dfi); - complete_file_from_dsc('..', $fi) + complete_file_from_dsc($buildproductsdir, $fi) or next; } } @@ -2684,7 +2744,7 @@ sub git_lrfetch_sane { for (;;) { printdebug "git_lrfetch_sane iteration $fetch_iteration\n"; if (++$fetch_iteration > 10) { - fail "too many iterations trying to get sane fetch!"; + fail __ "too many iterations trying to get sane fetch!"; } my @look = map { "refs/$_" } @specs; @@ -2698,8 +2758,8 @@ sub git_lrfetch_sane { m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?"; my ($objid,$rrefname) = ($1,$2); if (!$wanted_rref->($rrefname)) { - print STDERR <(), and returns undef # in parent, returns canonical suite name for $tsuite my $canonsuitefh = IO::File::new_tmpfile; @@ -3511,7 +3574,7 @@ sub multisuite_suite_child ($$$) { return $csuite; } printdebug "multisuite $tsuite ok (canon=$csuite)\n"; - push @$merginputs, { + push @$mergeinputs, { Ref => lrref, Info => $csuite, }; @@ -3552,10 +3615,9 @@ sub fork_for_multisuite ($) { my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs, sub { @end = (); - fetch(); + fetch_one(); finish 0; }); - # xxx collecte the ref here $csubsuite =~ s/^\Q$cbasesuite\E-/-/; push @csuites, $csubsuite; @@ -3710,19 +3772,37 @@ sub clone ($) { clone_finish($dstdir); } -sub fetch () { +sub fetch_one () { canonicalise_suite(); if (check_for_git()) { git_fetch_us(); } fetch_from_archive() or no_such_package(); + + my $vcsgiturl = $dsc && $dsc->{'Vcs-Git'}; + if (length $vcsgiturl and + (grep { $csuite eq $_ } + split /\;/, + cfg 'dgit.vcs-git.suites')) { + my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF'; + if (defined $current && $current ne $vcsgiturl) { + print STDERR < message fragment "$saved" describing disposition of $dgitview - return "commit id $dgitview" unless defined $split_brain_save; + my $save = $internal_object_save{'dgit-view'}; + return "commit id $dgitview" unless defined $save; my @cmd = (shell_cmd 'cd "$1"; shift', $maindir, - @git, qw(update-ref -m), + git_update_ref_cmd "dgit --dgit-view-save $msg HEAD=$headref", - $split_brain_save, $dgitview); + $save, $dgitview); runcmd @cmd; - return "and left in $split_brain_save"; + return "and left in $save"; } # An "infopair" is a tuple [ $thing, $what ] @@ -3994,10 +4075,12 @@ sub splitbrain_pseudomerge ($$$$) { infopair_cond_ff($i_dep14, [ $maintview, 'HEAD' ]); 1; }) { + $@ =~ s/^\n//; chomp $@; print STDERR <[0] into your HEAD."; return $r; @@ -4264,6 +4347,15 @@ END my $actualhead = git_rev_parse('HEAD'); if (branch_is_gdr_unstitched_ff($symref, $actualhead, $archive_hash)) { + if (quiltmode_splitbrain()) { + my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $actualhead); + fail <{Files} =~ m{\.deb$}m; + my $sourceonlypolicy = access_cfg 'source-only-uploads'; + if ($sourceonlypolicy eq 'ok') { + } elsif ($sourceonlypolicy eq 'always') { + forceable_fail [qw(uploading-binaries)], + "uploading binaries, although distroy policy is source only" + if $hasdebs; + } elsif ($sourceonlypolicy eq 'never') { + forceable_fail [qw(uploading-source-only)], + "source-only upload, although distroy policy requires .debs" + if !$hasdebs; + } elsif ($sourceonlypolicy eq 'not-wholly-new') { + forceable_fail [qw(uploading-source-only)], + "source-only upload, even though package is entirely NEW\n". + "(this is contrary to policy in ".(access_nomdistro()).")" + if !$hasdebs + && $new_package + && !(archive_query('package_not_wholly_new', $package) // 1); + } else { + badcfg "unknown source-only-uploads policy \`$sourceonlypolicy'"; + } + # Perhaps adjust .dsc to contain right set of origs changes_update_origs_from_dsc($dsc, $changes, $upstreamversion, $changesfile) @@ -4405,7 +4521,8 @@ END responder_send_command("param isuite $isuite"); responder_send_command("param tagformat $tagformat"); if (defined $maintviewhead) { - die unless ($protovsn//4) >= 4; + confess "internal error (protovsn=$protovsn)" + if defined $protovsn and $protovsn < 4; responder_send_command("param maint-view $maintviewhead"); } @@ -4475,14 +4592,13 @@ END runcmd_ordryrun @git, qw(-c push.followTags=false push), access_giturl(), @pushrefs; - runcmd_ordryrun @git, qw(update-ref -m), 'dgit push', lrref(), $dgithead; + runcmd_ordryrun git_update_ref_cmd 'dgit push', lrref(), $dgithead; supplementary_message(<<'END'); Push failed, while obtaining signatures on the .changes and .dsc. If it was just that the signature failed, you may try again by using -debsign by hand to sign the changes - $changesfile -and then dput to complete the upload. +debsign by hand to sign the changes file (see the command dgit tried, +above), and then dput that changes file to complete the upload. If you need to change the package, you must use a new version number. END if ($we_are_responder) { @@ -4575,11 +4691,15 @@ sub branchsuite () { } } -sub fetchpullargs () { +sub package_from_d_control () { if (!defined $package) { my $sourcep = parsecontrol('debian/control','debian/control'); $package = getfield $sourcep, 'Source'; } +} + +sub fetchpullargs () { + package_from_d_control(); if (@ARGV==0) { $isuite = branchsuite(); if (!$isuite) { @@ -4598,9 +4718,7 @@ sub fetchpullargs () { sub cmd_fetch { parseopts(); fetchpullargs(); - my $multi_fetched = fork_for_multisuite(sub { }); - finish 0 if $multi_fetched; - fetch(); + dofetch(); } sub cmd_pull { @@ -4615,6 +4733,86 @@ END pull(); } +sub cmd_checkout { + parseopts(); + package_from_d_control(); + @ARGV==1 or badusage "dgit checkout needs a suite argument"; + ($isuite) = @ARGV; + notpushing(); + + foreach my $canon (qw(0 1)) { + if (!$canon) { + $csuite= $isuite; + } else { + undef $csuite; + canonicalise_suite(); + } + if (length git_get_ref lref()) { + # local branch already exists, yay + last; + } + if (!length git_get_ref lrref()) { + if (!$canon) { + # nope + next; + } + dofetch(); + } + # now lrref exists + runcmd (@git, qw(update-ref), lref(), lrref(), ''); + last; + } + local $ENV{GIT_REFLOG_ACTION} = git_reflog_action_msg + "dgit checkout $isuite"; + runcmd (@git, qw(checkout), lbranch()); +} + +sub cmd_update_vcs_git () { + my $specsuite; + if (@ARGV==0 || $ARGV[0] =~ m/^-/) { + ($specsuite,) = split /\;/, cfg 'dgit.vcs-git.suites'; + } else { + ($specsuite) = (@ARGV); + shift @ARGV; + } + my $dofetch=1; + if (@ARGV) { + if ($ARGV[0] eq '-') { + $dofetch = 0; + } elsif ($ARGV[0] eq '-') { + shift; + } + } + + package_from_d_control(); + my $ctrl; + if ($specsuite eq '.') { + $ctrl = parsecontrol 'debian/control', 'debian/control'; + } else { + $isuite = $specsuite; + get_archive_dsc(); + $ctrl = $dsc; + } + my $url = getfield $ctrl, 'Vcs-Git'; + + my @cmd; + my $orgurl = cfg 'remote.vcs-git.url', 'RETURN-UNDEF'; + if (!defined $orgurl) { + print STDERR "setting up vcs-git: $url\n"; + @cmd = (@git, qw(remote add vcs-git), $url); + } elsif ($orgurl eq $url) { + print STDERR "vcs git already configured: $url\n"; + } else { + print STDERR "changing vcs-git url to: $url\n"; + @cmd = (@git, qw(remote set-url vcs-git), $url); + } + runcmd_ordryrun_local @cmd; + if ($dofetch) { + print "fetching (@ARGV)\n"; + runcmd_ordryrun_local @git, qw(fetch vcs-git), @ARGV; + } +} + sub prep_push () { parseopts(); build_or_push_prep_early(); @@ -4646,21 +4844,6 @@ sub cmd_push { dopush(); } -sub cmd_push_source { - prep_push(); - if ($changesfile) { - my $changes = parsecontrol("$buildproductsdir/$changesfile", - "source changes file"); - unless (test_source_only_changes($changes)) { - fail "user-specified changes file is not source-only"; - } - } else { - # Building a source package is very fast, so just do it - build_source_for_push(); - } - dopush(); -} - #---------- remote commands' implementation ---------- sub pre_remote_push_build_host { @@ -5154,7 +5337,7 @@ END print SERIES "\n" or die $! unless $newline eq "\n"; print SERIES "auto-gitignore\n" or die $!; close SERIES or die $!; - runcmd @git, qw(add -- debian/patches/series), $gipatch; + runcmd @git, qw(add -f -- debian/patches/series), $gipatch; commit_admin <>' - or die $!; - - my $oldcache = git_get_ref "refs/$splitbraincache"; - if ($oldcache eq $dgitview) { - my $tree = cmdoutput qw(git rev-parse), "$dgitview:"; - # git update-ref doesn't always update, in this case. *sigh* - my $dummy = make_commit_text < 1000000000 +0000 -committer Dgit 1000000000 +0000 - -Dummy commit - do not use -END - runcmd @git, qw(update-ref -m), "dgit $our_version - dummy", - "refs/$splitbraincache", $dummy; - } - runcmd @git, qw(update-ref -m), $cachekey, "refs/$splitbraincache", - $dgitview; + reflog_cache_insert "refs/$splitbraincache", $cachekey, $dgitview; changedir "$playground/work"; @@ -5327,12 +5488,20 @@ sub quiltify ($$$$) { }; if ($quilt_mode eq 'linear') { print STDERR "\n$us: error: quilt fixup cannot be linear. Stopped at:\n"; + my $all_gdr = !!@nots; foreach my $notp (@nots) { print STDERR "$us: ", $reportnot->($notp), "\n"; + $all_gdr &&= $notp->{Child} && + (git_cat_file $notp->{Child}{Commit}, 'commit') + =~ m{^\[git-debrebase(?! split[: ]).*\]$}m; } - print STDERR "$us: $_\n" foreach @$failsuggestion; - fail "quilt fixup naive history linearisation failed.\n". - "Use dpkg-source --commit by hand; or, --quilt=smash for one ugly patch"; + print STDERR "\n"; + $failsuggestion = + [ grep { $_->[0] ne 'quilt-mode' } @$failsuggestion ] + if $all_gdr; + print STDERR "$us: $_->[1]\n" foreach @$failsuggestion; + fail + "quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n"; } elsif ($quilt_mode eq 'smash') { } elsif ($quilt_mode eq 'auto') { progress "quilt fixup cannot be linear, smashing..."; @@ -5486,7 +5655,7 @@ END if ($quilt_mode eq 'linear' && !$fopts->{'single-debian-patch'} - && branch_is_gdr($symref, $headref)) { + && branch_is_gdr($headref)) { # This is much faster. It also makes patches that gdr # likes better for future updates without laundering. # @@ -5501,8 +5670,9 @@ END make-patches --quiet-would-amend)); # We tolerate soe snags that gdr wouldn't, by default. if (act_local()) { + debugcmd "+",@cmd; $!=0; $?=-1; - failedcmd @cmd if system @cmd and $?!=7; + failedcmd @cmd if system @cmd and $?!=7*256; } else { dryrun_report @cmd; } @@ -5520,14 +5690,12 @@ END quilt_fixup_multipatch($clogp, $headref, $upstreamversion); } - die 'bug' if $split_brain && !$need_split_build_invocation; - changedir $maindir; runcmd_ordryrun_local @git, qw(pull --ff-only -q), "$playground/work", qw(master); } -sub quilt_fixup_mkwork ($) { +sub unpack_playtree_mkwork ($) { my ($headref) = @_; mkdir "work" or die $!; @@ -5536,12 +5704,14 @@ sub quilt_fixup_mkwork ($) { runcmd @git, qw(reset -q --hard), $headref; } -sub quilt_fixup_linkorigs ($$) { +sub unpack_playtree_linkorigs ($$) { my ($upstreamversion, $fn) = @_; # calls $fn->($leafname); - foreach my $f (<$maindir/../*>) { #/){ - my $b=$f; $b =~ s{.*/}{}; + my $bpd_abs = bpd_abs(); + opendir QFD, $bpd_abs or fail "buildproductsdir: $bpd_abs: $!"; + while ($!=0, defined(my $b = readdir QFD)) { + my $f = bpd_abs()."/".$b; { local ($debuglevel) = $debuglevel-1; printdebug "QF linkorigs $b, $f ?\n"; @@ -5551,6 +5721,8 @@ sub quilt_fixup_linkorigs ($$) { link_ltarget $f, $b or die "$b $!"; $fn->($b); } + die "$buildproductsdir: $!" if $!; + closedir QFD; } sub quilt_fixup_delete_pc () { @@ -5572,8 +5744,8 @@ sub quilt_fixup_singlepatch ($$$) { # get it to generate debian/patches/debian-changes, it is # necessary to build the source package. - quilt_fixup_linkorigs($upstreamversion, sub { }); - quilt_fixup_mkwork($headref); + unpack_playtree_linkorigs($upstreamversion, sub { }); + unpack_playtree_mkwork($headref); rmtree("debian/patches"); @@ -5613,7 +5785,7 @@ END print $fakedsc " ".$md->hexdigest." $size $b\n" or die $!; }; - quilt_fixup_linkorigs($upstreamversion, $dscaddfile); + unpack_playtree_linkorigs($upstreamversion, $dscaddfile); my @files=qw(debian/source/format debian/rules debian/control debian/changelog); @@ -5630,6 +5802,31 @@ END close $fakedsc or die $!; } +sub quilt_fakedsc2unapplied ($$) { + my ($headref, $upstreamversion) = @_; + # must be run in the playground + # quilt_make_fake_dsc must have been called + + runcmd qw(sh -ec), + 'exec dpkg-source --no-check --skip-patches -x fake.dsc >/dev/null'; + + my $fakexdir= $package.'-'.(stripepoch $upstreamversion); + rename $fakexdir, "fake" or die "$fakexdir $!"; + + changedir 'fake'; + + remove_stray_gits("source package"); + mktree_in_ud_here(); + + rmtree '.pc'; + + rmtree 'debian'; # git checkout commitish paths does not delete! + runcmd @git, qw(checkout -f), $headref, qw(-- debian); + my $unapplied=git_add_write_tree(); + printdebug "fake orig tree object $unapplied\n"; + return $unapplied; +} + sub quilt_check_splitbrain_cache ($$) { my ($headref, $upstreamversion) = @_; # Called only if we are in (potentially) split brain mode. @@ -5661,27 +5858,13 @@ sub quilt_check_splitbrain_cache ($$) { push @cachekey, $srcshash->hexdigest(); $splitbrain_cachekey = "@cachekey"; - my @cmd = (@git, qw(log -g), '--pretty=format:%H %gs', - $splitbraincache); printdebug "splitbrain cachekey $splitbrain_cachekey\n"; - debugcmd "|(probably)",@cmd; - my $child = open GC, "-|"; defined $child or die $!; - if (!$child) { - chdir $maindir or die $!; - if (!stat "$maindir_gitcommon/logs/refs/$splitbraincache") { - $! == ENOENT or die $!; - printdebug ">(no reflog)\n"; - finish 0; - } - exec @cmd; die $!; - } - while () { - chomp; - printdebug ">| ", $_, "\n" if $debuglevel > 1; - next unless m/^(\w+) (\S.*\S)$/ && $2 eq $splitbrain_cachekey; - - my $cachehit = $1; - quilt_fixup_mkwork($headref); + + my $cachehit = reflog_cache_lookup + "refs/$splitbraincache", $splitbrain_cachekey; + + if ($cachehit) { + unpack_playtree_mkwork($headref); my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit"; if ($cachehit ne $headref) { progress "dgit view: found cached ($saved)"; @@ -5692,8 +5875,6 @@ sub quilt_check_splitbrain_cache ($$) { progress "dgit view: found cached, no changes required"; return ($headref, $splitbrain_cachekey); } - die $! if GC->error; - failedcmd unless close GC; printdebug "splitbrain cache miss\n"; return (undef, $splitbrain_cachekey); @@ -5783,24 +5964,7 @@ sub quilt_fixup_multipatch ($$$) { quilt_check_splitbrain_cache($headref, $upstreamversion); return if $cachehit; } - - runcmd qw(sh -ec), - 'exec dpkg-source --no-check --skip-patches -x fake.dsc >/dev/null'; - - my $fakexdir= $package.'-'.(stripepoch $upstreamversion); - rename $fakexdir, "fake" or die "$fakexdir $!"; - - changedir 'fake'; - - remove_stray_gits("source package"); - mktree_in_ud_here(); - - rmtree '.pc'; - - rmtree 'debian'; # git checkout commitish paths does not delete! - runcmd @git, qw(checkout -f), $headref, qw(-- debian); - my $unapplied=git_add_write_tree(); - printdebug "fake orig tree object $unapplied\n"; + my $unapplied=quilt_fakedsc2unapplied($headref, $upstreamversion); ensuredir '.pc'; @@ -5812,13 +5976,13 @@ sub quilt_fixup_multipatch ($$$) { failed to apply your git tree's patch stack (from debian/patches/) to the corresponding upstream tarball(s). Your source tree and .orig are probably too inconsistent. dgit can only fix up certain kinds of - anomaly (depending on the quilt mode). See --quilt= in dgit(1). + anomaly (depending on the quilt mode). Please see --quilt= in dgit(1). END } changedir '..'; - quilt_fixup_mkwork($headref); + unpack_playtree_mkwork($headref); my $mustdeletepc=0; if (stat_exists ".pc") { @@ -5878,12 +6042,21 @@ END my @failsuggestion; if (!($diffbits->{O2H} & $diffbits->{O2A})) { - push @failsuggestion, "This might be a patches-unapplied branch."; - } elsif (!($diffbits->{H2A} & $diffbits->{O2A})) { - push @failsuggestion, "This might be a patches-applied branch."; + push @failsuggestion, [ 'unapplied', + "This might be a patches-unapplied branch." ]; + } elsif (!($diffbits->{H2A} & $diffbits->{O2A})) { + push @failsuggestion, [ 'applied', + "This might be a patches-applied branch." ]; } - push @failsuggestion, "Maybe you need to specify one of". - " --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?"; + push @failsuggestion, [ 'quilt-mode', + "Maybe you need one of --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?" ]; + + push @failsuggestion, [ 'gitattrs', + "Warning: Tree has .gitattributes. See GITATTRIBUTES in dgit(7)." ] + if stat_exists '.gitattributes'; + + push @failsuggestion, [ 'origs', + "Maybe orig tarball(s) are not identical to git representation?" ]; if (quiltmode_splitbrain()) { quiltify_splitbrain($clogp, $unapplied, $headref, $oldtiptree, @@ -5990,6 +6163,10 @@ sub cmd_clean () { maybe_unapply_patches_again(); } +# return values from massage_dbp_args are one or both of these flags +sub WANTSRC_SOURCE () { 01; } # caller should build source (separately) +sub WANTSRC_BUILDER () { 02; } # caller should run dpkg-buildpackage + sub build_or_push_prep_early () { our $build_or_push_prep_early_done //= 0; return if $build_or_push_prep_early_done++; @@ -5998,6 +6175,7 @@ sub build_or_push_prep_early () { $isuite = getfield $clogp, 'Distribution'; $package = getfield $clogp, 'Source'; $version = getfield $clogp, 'Version'; + $dscfn = dscfn($version); } sub build_prep_early () { @@ -6006,9 +6184,12 @@ sub build_prep_early () { check_not_dirty(); } -sub build_prep () { +sub build_prep ($) { + my ($wantsrc) = @_; build_prep_early(); - clean_tree(); + # clean the tree if we're trying to include dirty changes in the + # source package, or we are running the builder in $maindir + clean_tree() if $includedirty || ($wantsrc & WANTSRC_BUILDER); build_maybe_quilt_fixup(); if ($rmchanges) { my $pat = changespat $version; @@ -6067,28 +6248,11 @@ sub changesopts () { sub massage_dbp_args ($;$) { my ($cmd,$xargs) = @_; - # We need to: - # - # - if we're going to split the source build out so we can - # do strange things to it, massage the arguments to dpkg-buildpackage - # so that the main build doessn't build source (or add an argument - # to stop it building source by default). - # - # - add -nc to stop dpkg-source cleaning the source tree, - # unless we're not doing a split build and want dpkg-source - # as cleanmode, in which case we can do nothing - # - # return values: - # 0 - source will NOT need to be built separately by caller - # +1 - source will need to be built separately by caller - # +2 - source will need to be built separately by caller AND - # dpkg-buildpackage should not in fact be run at all! + # Since we split the source build out so we can do strange things + # to it, massage the arguments to dpkg-buildpackage so that the + # main build doessn't build source (or add an argument to stop it + # building source by default). debugcmd '#massaging#', @$cmd if $debuglevel>1; -#print STDERR "MASS0 ",Dumper($cmd, $xargs, $need_split_build_invocation); - if ($cleanmode eq 'dpkg-source' && !$need_split_build_invocation) { - $clean_using_builder = 1; - return 0; - } # -nc has the side effect of specifying -b if nothing else specified # and some combinations of -S, -b, et al, are errors, rather than # later simply overriding earlie. So we need to: @@ -6099,17 +6263,26 @@ sub massage_dbp_args ($;$) { my $dmode = '-F'; foreach my $l ($cmd, $xargs) { next unless $l; - @$l = grep { !(m/^-[SgGFABb]$/s and $dmode=$_) } @$l; + @$l = grep { !(m/^-[SgGFABb]$|^--build=/s and $dmode=$_) } @$l; } push @$cmd, '-nc'; #print STDERR "MASS1 ",Dumper($cmd, $xargs, $dmode); - my $r = 0; - if ($need_split_build_invocation) { - printdebug "massage split $dmode.\n"; - $r = $dmode =~ m/[S]/ ? +2 : - $dmode =~ y/gGF/ABb/ ? +1 : - $dmode =~ m/[ABb]/ ? 0 : - die "$dmode ?"; + my $r = WANTSRC_BUILDER; + printdebug "massage split $dmode.\n"; + if ($dmode =~ s/^--build=//) { + $r = 0; + my @d = split /,/, $dmode; + $r |= WANTSRC_SOURCE if grep { s/^full$/binary/ } @d; + $r |= WANTSRC_SOURCE if grep { s/^source$// } @d; + $r |= WANTSRC_BUILDER if grep { m/./ } @d; + fail "Wanted to build nothing!" unless $r; + $dmode = '--build='. join ',', grep m/./, @d; + } else { + $r = + $dmode =~ m/[S]/ ? WANTSRC_SOURCE : + $dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER : + $dmode =~ m/[ABb]/ ? WANTSRC_BUILDER : + die "$dmode ?"; } printdebug "massage done $r $dmode.\n"; push @$cmd, $dmode; @@ -6117,21 +6290,22 @@ sub massage_dbp_args ($;$) { return $r; } -sub in_parent (&) { +sub in_bpd (&) { my ($fn) = @_; my $wasdir = must_getcwd(); - changedir ".."; + changedir $buildproductsdir; $fn->(); changedir $wasdir; } -sub postbuild_mergechanges ($) { # must run with CWD=.. (eg in in_parent) +# this sub must run with CWD=$buildproductsdir (eg in in_bpd) +sub postbuild_mergechanges ($) { my ($msg_if_onlyone) = @_; # If there is only one .changes file, fail with $msg_if_onlyone, # or if that is undef, be a no-op. # Returns the changes file to report to the user. my $pat = changespat $version; - my @changesfiles = glob $pat; + my @changesfiles = grep { !m/_multi\.changes/ } glob $pat; @changesfiles = sort { ($b =~ m/_source\.changes$/ <=> $a =~ m/_source\.changes$/) or $a cmp $b @@ -6167,8 +6341,11 @@ END sub midbuild_checkchanges () { my $pat = changespat $version; return if $rmchanges; - my @unwanted = map { s#^\.\./##; $_; } glob "../$pat"; - @unwanted = grep { $_ ne changespat $version,'source' } @unwanted; + my @unwanted = map { s#.*/##; $_; } glob "$bpd_glob/$pat"; + @unwanted = grep { + $_ ne changespat $version,'source' and + $_ ne changespat $version,'multi' + } @unwanted; fail < 0) { + build_prep($wantsrc); + if ($wantsrc & WANTSRC_SOURCE) { build_source(); midbuild_checkchanges_vanilla $wantsrc; - } else { - build_prep(); } - if ($wantsrc < 2) { + if ($wantsrc & WANTSRC_BUILDER) { push @dbp, changesopts_version(); maybe_apply_patches_dirtily(); runcmd_ordryrun_local @dbp; @@ -6226,12 +6407,11 @@ sub cmd_gbp_build { # orig is absent. my $upstreamversion = upstreamversion $version; my $origfnpat = srcfn $upstreamversion, '.orig.tar.*'; - my $gbp_make_orig = $version =~ m/-/ && !(() = glob "../$origfnpat"); + my $gbp_make_orig = $version =~ m/-/ && !(() = glob "$bpd_glob/$origfnpat"); if ($gbp_make_orig) { clean_tree(); $cleanmode = 'none'; # don't do it again - $need_split_build_invocation = 1; } my @dbp = @dpkgbuildpackage; @@ -6245,7 +6425,7 @@ sub cmd_gbp_build { $gbp_build[0] = 'gbp buildpackage'; } } - my @cmd = opts_opt_multi_cmd @gbp_build; + my @cmd = opts_opt_multi_cmd [], @gbp_build; push @cmd, (qw(-us -uc --git-no-sign-tags), "--git-builder=".(shellquote @dbp)); @@ -6269,17 +6449,17 @@ sub cmd_gbp_build { } } - if ($wantsrc > 0) { + build_prep($wantsrc); + if ($wantsrc & WANTSRC_SOURCE) { build_source(); midbuild_checkchanges_vanilla $wantsrc; } else { if (!$clean_using_builder) { push @cmd, '--git-cleaner=true'; } - build_prep(); } maybe_unapply_patches_again(); - if ($wantsrc < 2) { + if ($wantsrc & WANTSRC_BUILDER) { push @cmd, changesopts(); runcmd_ordryrun_local @cmd, @ARGV; } @@ -6287,77 +6467,158 @@ sub cmd_gbp_build { } sub cmd_git_build { cmd_gbp_build(); } # compatibility with <= 1.0 -sub build_source_for_push { - build_source(); - maybe_unapply_patches_again(); - $changesfile = $sourcechanges; +sub building_source_in_playtree { + # If $includedirty, we have to build the source package from the + # working tree, not a playtree, so that uncommitted changes are + # included (copying or hardlinking them into the playtree could + # cause trouble). + # + # Note that if we are building a source package in split brain + # mode we do not support including uncommitted changes, because + # that makes quilt fixup too hard. I.e. ($split_brain && (dgit is + # building a source package)) => !$includedirty + return !$includedirty; } sub build_source { - build_prep_early(); - build_prep(); $sourcechanges = changespat $version,'source'; if (act_local()) { - unlink "../$sourcechanges" or $!==ENOENT + unlink "$buildproductsdir/$sourcechanges" or $!==ENOENT or fail "remove $sourcechanges: $!"; } - $dscfn = dscfn($version); my @cmd = (@dpkgsource, qw(-b --)); - if ($split_brain) { + my $leafdir; + if (building_source_in_playtree()) { + $leafdir = 'work'; + my $headref = git_rev_parse('HEAD'); + # If we are in split brain, there is already a playtree with + # the thing we should package into a .dsc (thanks to quilt + # fixup). If not, make a playtree + prep_ud() unless $split_brain; changedir $playground; - runcmd_ordryrun_local @cmd, "work"; - my @udfiles = <${package}_*>; - changedir $maindir; - foreach my $f (@udfiles) { - printdebug "source copy, found $f\n"; - next unless - $f eq $dscfn or - ($f =~ m/\.debian\.tar(?:\.\w+)$/ && - $f eq srcfn($version, $&)); - printdebug "source copy, found $f - renaming\n"; - rename "$playground/$f", "../$f" or $!==ENOENT - or fail "put in place new source file ($f): $!"; + unless ($split_brain) { + my $upstreamversion = upstreamversion $version; + unpack_playtree_linkorigs($upstreamversion, sub { }); + unpack_playtree_mkwork($headref); + changedir '..'; } } else { - my $pwd = must_getcwd(); - my $leafdir = basename $pwd; - changedir ".."; - runcmd_ordryrun_local @cmd, $leafdir; - changedir $pwd; + $leafdir = basename $maindir; + changedir '..'; } + runcmd_ordryrun_local @cmd, $leafdir; + + changedir $leafdir; runcmd_ordryrun_local qw(sh -ec), - 'exec >$1; shift; exec "$@"','x', - "../$sourcechanges", + 'exec >../$1; shift; exec "$@"','x', $sourcechanges, @dpkggenchanges, qw(-S), changesopts(); + changedir '..'; + + printdebug "moving $dscfn, $sourcechanges, etc. to ".bpd_abs()."\n"; + $dsc = parsecontrol($dscfn, "source package"); + + my $mv = sub { + my ($why, $l) = @_; + printdebug " renaming ($why) $l\n"; + rename "$l", bpd_abs()."/$l" + or fail "put in place new built file ($l): $!"; + }; + foreach my $l (split /\n/, getfield $dsc, 'Files') { + $l =~ m/\S+$/ or next; + $mv->('Files', $&); + } + $mv->('dsc', $dscfn); + $mv->('changes', $sourcechanges); + + changedir $maindir; } sub cmd_build_source { - build_prep_early(); badusage "build-source takes no additional arguments" if @ARGV; + build_prep(WANTSRC_SOURCE); build_source(); maybe_unapply_patches_again(); printdone "source built, results in $dscfn and $sourcechanges"; } -sub cmd_sbuild { +sub cmd_push_source { + prep_push(); + fail "dgit push-source: --include-dirty/--ignore-dirty does not make". + "sense with push-source!" if $includedirty; + build_maybe_quilt_fixup(); + if ($changesfile) { + my $changes = parsecontrol("$buildproductsdir/$changesfile", + "source changes file"); + unless (test_source_only_changes($changes)) { + fail "user-specified changes file is not source-only"; + } + } else { + # Building a source package is very fast, so just do it + build_source(); + die "er, patches are applied dirtily but shouldn't be.." + if $patches_applied_dirtily; + $changesfile = $sourcechanges; + } + dopush(); +} + +sub binary_builder { + my ($bbuilder, $pbmc_msg, @args) = @_; + build_prep(WANTSRC_SOURCE); build_source(); midbuild_checkchanges(); - in_parent { + in_bpd { if (act_local()) { - stat_exists $dscfn or fail "$dscfn (in parent directory): $!"; + stat_exists $dscfn or fail "$dscfn (in build products dir): $!"; stat_exists $sourcechanges - or fail "$sourcechanges (in parent directory): $!"; + or fail "$sourcechanges (in build products dir): $!"; } - runcmd_ordryrun_local @sbuild, qw(-d), $isuite, @ARGV, $dscfn; + runcmd_ordryrun_local @$bbuilder, @args; }; maybe_unapply_patches_again(); - in_parent { - postbuild_mergechanges(<{Filename}; - my $here = "../$f"; + my $here = "$buildproductsdir/$f"; if (lstat $here) { next if stat $here; fail "lstat $here works but stat gives $! !"; @@ -6748,9 +7027,9 @@ sub parseopts () { } elsif (m/^--(gbp|dpm)$/s) { push @ropts, "--quilt=$1"; $quilt_mode = $1; - } elsif (m/^--ignore-dirty$/s) { + } elsif (m/^--(?:ignore|include)-dirty$/s) { push @ropts, $_; - $ignoredirty = 1; + $includedirty = 1; } elsif (m/^--no-quilt-fixup$/s) { push @ropts, $_; $quilt_mode = 'nocheck'; @@ -6769,10 +7048,13 @@ sub parseopts () { } elsif (m/^--delayed=(\d+)$/s) { push @ropts, $_; push @dput, $_; - } elsif (m/^--dgit-view-save=(.+)$/s) { + } elsif (my ($k,$v) = + m/^--save-(dgit-view)=(.+)$/s || + m/^--(dgit-view)-save=(.+)$/s + ) { push @ropts, $_; - $split_brain_save = $1; - $split_brain_save =~ s#^(?!refs/)#refs/heads/#; + $v =~ s#^(?!refs/)#refs/heads/#; + $internal_object_save{$k} = $v; } elsif (m/^--(no-)?rm-old-changes$/s) { push @ropts, $_; $rmchanges = !$1; @@ -6792,10 +7074,6 @@ sub parseopts () { push @ropts, $_; $tagformat_want = [ $1, 'command line', 1 ]; # 1 menas overrides distro configuration - } elsif (m/^--always-split-source-build$/s) { - # undocumented, for testing - push @ropts, $_; - $need_split_build_invocation = 1; } elsif (m/^--config-lookup-explode=(.+)$/s) { # undocumented, for testing push @ropts, $_; @@ -6876,8 +7154,8 @@ sub check_env_sanity () { foreach my $name (qw(PIPE CHLD)) { my $signame = "SIG$name"; my $signum = eval "POSIX::$signame" // die; - ($SIG{$name} // 'DEFAULT') eq 'DEFAULT' or - die "$signame is set to something other than SIG_DFL\n"; + die "$signame is set to something other than SIG_DFL\n" + if defined $SIG{$name} and $SIG{$name} ne 'DEFAULT'; $blocked->ismember($signum) and die "$signame is blocked\n"; } @@ -6949,7 +7227,8 @@ sub parseopts_late_defaults () { $$vr = $v; } - $need_split_build_invocation ||= quiltmode_splitbrain(); + fail __ "dgit: --include-dirty is not supported in split view quilt mode" + if $split_brain && $includedirty; if (!defined $cleanmode) { local $access_forpush; @@ -6959,8 +7238,16 @@ sub parseopts_late_defaults () { badcfg "unknown clean-mode \`$cleanmode'" unless $cleanmode =~ m/^($cleanmode_re)$(?!\n)/s; } + + $buildproductsdir //= access_cfg('build-products-dir', 'RETURN-UNDEF'); + $buildproductsdir //= '..'; + $bpd_glob = $buildproductsdir; + $bpd_glob =~ s#[][\\{}*?~]#\\$&#g; } +setlocale(LC_MESSAGES, ""); +textdomain("dgit"); + if ($ENV{$fakeeditorenv}) { git_slurp_config(); quilt_fixup_editor(); @@ -6973,7 +7260,7 @@ print STDERR "DRY RUN ONLY\n" if $dryrun_level > 1; print STDERR "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n" if $dryrun_level == 1; if (!@ARGV) { - print STDERR $helpmsg or die $!; + print STDERR __ $helpmsg or die $!; finish 8; } $cmd = $subcommand = shift @ARGV;