X-Git-Url: https://www.chiark.greenend.org.uk/ucgi/~ianmdlvl/git?p=dgit.git;a=blobdiff_plain;f=dgit;h=ee1101da23a4521f3d9d5ac33e59b0b2c37cd248;hp=8e72b2aac1c93535faaaf29173e2f46e0b281c67;hb=205d601c59eb8407bccd3bb2981b79eab81a71e8;hpb=cc0686845b8db87998cfe937014e0f3d2c24b03e diff --git a/dgit b/dgit index 8e72b2aa..ee1101da 100755 --- a/dgit +++ b/dgit @@ -36,6 +36,8 @@ use Digest::SHA; use Digest::MD5; use List::Util qw(any); use List::MoreUtils qw(pairwise); +use Text::Glob qw(match_glob); +use Fcntl qw(:DEFAULT :flock); use Carp; use Debian::Dgit; @@ -67,12 +69,16 @@ our $rmchanges; our $overwrite_version; # undef: not specified; '': check changelog our $quilt_mode; our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied'; +our $dodep14tag; +our $dodep14tag_re = 'want|no|always'; +our $split_brain_save; our $we_are_responder; our $initiator_tempdir; our $patches_applied_dirtily = 00; our $tagformat_want; our $tagformat; our $tagformatfn; +our $chase_dsc_distro=1; #xxx configurable our %forceopts = map { $_=>0 } qw(unrepresentable unsupported-source-format @@ -91,6 +97,7 @@ our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?"; our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$'; our $splitbraincache = 'dgit-intern/quilt-cache'; +our $rewritemap = 'dgit-rewrite/map'; our (@git) = qw(git); our (@dget) = qw(dget); @@ -101,6 +108,8 @@ our (@gpg) = qw(gpg); our (@sbuild) = qw(sbuild); our (@ssh) = 'ssh'; our (@dgit) = qw(dgit); +our (@aptget) = qw(apt-get); +our (@aptcache) = qw(apt-cache); our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git); our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git); our (@dpkggenchanges) = qw(dpkg-genchanges); @@ -118,6 +127,8 @@ our %opts_opt_map = ('dget' => \@dget, # accept for compatibility 'ssh' => \@ssh, 'dgit' => \@dgit, 'git' => \@git, + 'apt-get' => \@aptget, + 'apt-cache' => \@aptcache, 'dpkg-source' => \@dpkgsource, 'dpkg-buildpackage' => \@dpkgbuildpackage, 'dpkg-genchanges' => \@dpkggenchanges, @@ -132,7 +143,7 @@ our %opts_cfg_insertpos = map { scalar @{ $opts_opt_map{$_} } } keys %opts_opt_map; -sub finalise_opts_opts(); +sub parseopts_late_defaults(); our $keyid; @@ -164,8 +175,7 @@ sub debiantag ($$) { sub debiantag_maintview ($$) { my ($v,$distro) = @_; - $v =~ y/~:/_%/; - return "$distro/$v"; + return "$distro/".dep14_version_mangle $v; } sub madformat ($) { $_[0] eq '3.0 (quilt)' } @@ -176,30 +186,6 @@ sub lref () { return "refs/heads/".lbranch(); } sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); } sub rrref () { return server_ref($csuite); } -sub lrfetchrefs () { return "refs/dgit-fetch/$csuite"; } -sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); } - -# We fetch some parts of lrfetchrefs/*. Ideally we delete these -# locally fetched refs because they have unhelpful names and clutter -# up gitk etc. So we track whether we have "used up" head ref (ie, -# whether we have made another local ref which refers to this object). -# -# (If we deleted them unconditionally, then we might end up -# re-fetching the same git objects each time dgit fetch was run.) -# -# So, leach use of lrfetchrefs needs to be accompanied by arrangements -# in git_fetch_us to fetch the refs in question, and possibly a call -# to lrfetchref_used. - -our (%lrfetchrefs_f, %lrfetchrefs_d); -# $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid - -sub lrfetchref_used ($) { - my ($fullrefname) = @_; - my $objid = $lrfetchrefs_f{$fullrefname}; - $lrfetchrefs_d{$fullrefname} = $objid if defined $objid; -} - sub stripepoch ($) { my ($vsn) = @_; $vsn =~ s/^\d+\://; @@ -221,6 +207,12 @@ sub changespat ($;$) { return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes"; } +sub upstreamversion ($) { + my ($vsn) = @_; + $vsn =~ s/-[^-]+$//; + return $vsn; +} + our $us = 'dgit'; initdebug(''); @@ -503,7 +495,7 @@ sub act_scary () { return !$dryrun_level; } sub printdone { if (!$dryrun_level) { - progress "dgit ok: @_"; + progress "$us ok: @_"; } else { progress "would be ok: @_ (but dry run only)"; } @@ -573,12 +565,18 @@ sub cmd_help () { our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset"; our %defcfg = ('dgit.default.distro' => 'debian', + 'dgit-suite.*-security.distro' => 'debian-security', 'dgit.default.username' => '', 'dgit.default.archive-query-default-component' => 'main', 'dgit.default.ssh' => 'ssh', 'dgit.default.archive-query' => 'madison:', 'dgit.default.sshpsql-dbname' => 'service=projectb', + 'dgit.default.aptget-components' => 'main', 'dgit.default.dgit-tag-format' => 'new,old,maint', + 'dgit.dsc-url-proto-ok.http' => 'true', + 'dgit.dsc-url-proto-ok.https' => 'true', + 'dgit.dsc-url-proto-ok.git' => 'true', + 'dgit.default.dsc-url-proto-ok' => 'false', # old means "repo server accepts pushes with old dgit tags" # new means "repo server accepts pushes with new dgit tags" # maint means "repo server accepts split brain pushes" @@ -609,6 +607,11 @@ our %defcfg = ('dgit.default.distro' => 'debian', 'dgit-distro.debian.git-url-suffix' => '', 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/', + 'dgit-distro.debian-security.archive-query' => 'aptget:', + 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/', + 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#', + 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#', + 'dgit-distro.debian-security.nominal-distro' => 'debian', 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*', 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/', 'dgit-distro.ubuntu.git-check' => 'false', @@ -661,7 +664,10 @@ sub git_get_config ($) { my ($c) = @_; foreach my $src (@gitcfgsources) { my $l = $gitcfgs{$src}{$c}; - printdebug"C $c ".(defined $l ? messagequote "'$l'" : "undef")."\n" + croak "$l $c" if $l && !ref $l; + printdebug"C $c ".(defined $l ? + join " ", map { messagequote "'$_'" } @$l : + "undef")."\n" if $debuglevel >= 4; $l or next; @$l==1 or badcfg "multiple values for $c". @@ -674,24 +680,53 @@ sub git_get_config ($) { sub cfg { foreach my $c (@_) { return undef if $c =~ /RETURN-UNDEF/; + printdebug "C? $c\n" if $debuglevel >= 5; my $v = git_get_config($c); return $v if defined $v; my $dv = $defcfg{$c}; - return $dv if defined $dv; + if (defined $dv) { + printdebug "CD $c $dv\n" if $debuglevel >= 4; + return $dv; + } } badcfg "need value for one of: @_\n". "$us: distro or suite appears not to be (properly) supported"; } -sub access_basedistro () { +sub access_basedistro__noalias () { if (defined $idistro) { return $idistro; } else { - return cfg("dgit-suite.$isuite.distro", - "dgit.default.distro"); + my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF'); + return $def if defined $def; + foreach my $src (@gitcfgsources, 'internal') { + my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src}; + next unless $kl; + foreach my $k (keys %$kl) { + next unless $k =~ m#^dgit-suite\.(.*)\.distro$#; + my $dpat = $1; + next unless match_glob $dpat, $isuite; + return $kl->{$k}; + } + } + return cfg("dgit.default.distro"); } } +sub access_basedistro () { + my $noalias = access_basedistro__noalias(); + my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF'); + return $canon // $noalias; +} + +sub access_nomdistro () { + my $base = access_basedistro(); + my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base; + $r =~ m/^$distro_re$/ or badcfg + "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)"; + return $r; +} + sub access_quirk () { # returns (quirk name, distro to use instead or undef, quirk-specific info) my $basedistro = access_basedistro(); @@ -753,11 +788,11 @@ sub pushing () { Push failed, before we got started. You can retry the push, after fixing the problem, if you like. END - finalise_opts_opts(); + parseopts_late_defaults(); } sub notpushing () { - finalise_opts_opts(); + parseopts_late_defaults(); } sub supplementary_message ($) { @@ -787,6 +822,8 @@ sub access_distros () { unshift @l, $instead_distro; @l = grep { defined } @l; + push @l, access_nomdistro(); + if (access_forpush()) { @l = map { ("$_/push", $_) } @l; } @@ -916,10 +953,10 @@ sub parsecontrolfh ($$;$) { } sub parsecontrol { - my ($file, $desc) = @_; + my ($file, $desc, $allowsigned) = @_; my $fh = new IO::Handle; open $fh, '<', $file or die "$file: $!"; - my $c = parsecontrolfh($fh,$desc); + my $c = parsecontrolfh($fh,$desc,$allowsigned); $fh->error and die $!; close $fh; return $c; @@ -972,6 +1009,8 @@ our %rmad; sub archive_query ($;@) { my ($method) = shift @_; + fail "this operation does not support multiple comma-separated suites" + if $isuite =~ m/,/; my $query = access_cfg('archive-query','RETURN-UNDEF'); $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'"; my $proto = $1; @@ -979,12 +1018,28 @@ sub archive_query ($;@) { { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); } } +sub archive_query_prepend_mirror { + my $m = access_cfg('mirror'); + return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_; +} + sub pool_dsc_subpath ($$) { my ($vsn,$component) = @_; # $package is implict arg my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1); return "/pool/$component/$prefix/$package/".dscfn($vsn); } +sub cfg_apply_map ($$$) { + my ($varref, $what, $mapspec) = @_; + return unless $mapspec; + + printdebug "config $what EVAL{ $mapspec; }\n"; + $_ = $$varref; + eval "package Dgit::Config; $mapspec;"; + die $@ if $@; + $$varref = $_; +} + #---------- `ftpmasterapi' archive query method (nascent) ---------- sub archive_api_query_cmd ($) { @@ -1085,7 +1140,7 @@ sub archive_query_ftpmasterapi { if length $@; } @rows = sort { -version_compare($a->[0],$b->[0]) } @rows; - return @rows; + return archive_query_prepend_mirror @rows; } sub file_in_archive_ftpmasterapi { @@ -1097,6 +1152,168 @@ sub file_in_archive_ftpmasterapi { my $info = api_query($data, "file_in_archive/$pat", 1); } +#---------- `aptget' archive query method ---------- + +our $aptget_base; +our $aptget_releasefile; +our $aptget_configpath; + +sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; } +sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; } + +sub aptget_cache_clean { + runcmd_ordryrun_local qw(sh -ec), + 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --', + 'x', $aptget_base; +} + +sub aptget_lock_acquire () { + my $lockfile = "$aptget_base/lock"; + open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!"; + flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!"; +} + +sub aptget_prep ($) { + my ($data) = @_; + return if defined $aptget_base; + + badcfg "aptget archive query method takes no data part" + if length $data; + + my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache"; + + ensuredir $cache; + ensuredir "$cache/dgit"; + my $cachekey = + access_cfg('aptget-cachekey','RETURN-UNDEF') + // access_nomdistro(); + + $aptget_base = "$cache/dgit/aptget"; + ensuredir $aptget_base; + + my $quoted_base = $aptget_base; + die "$quoted_base contains bad chars, cannot continue" + if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/ + + ensuredir $aptget_base; + + aptget_lock_acquire(); + + aptget_cache_clean(); + + $aptget_configpath = "$aptget_base/apt.conf#$cachekey"; + my $sourceslist = "source.list#$cachekey"; + + my $aptsuites = $isuite; + cfg_apply_map(\$aptsuites, 'suite map', + access_cfg('aptget-suite-map', 'RETURN-UNDEF')); + + open SRCS, ">", "$aptget_base/$sourceslist" or die $!; + printf SRCS "deb-src %s %s %s\n", + access_cfg('mirror'), + $aptsuites, + access_cfg('aptget-components') + or die $!; + + ensuredir "$aptget_base/cache"; + ensuredir "$aptget_base/lists"; + + open CONF, ">", $aptget_configpath or die $!; + print CONF <) { + next unless stat_exists $oldlist; + my ($mtime) = (stat _)[9]; + utime $oldatime, $mtime, $oldlist or die "$oldlist $!"; + } + + runcmd_ordryrun_local aptget_aptget(), qw(update); + + my @releasefiles; + foreach my $oldlist (<$aptget_base/lists/*Release>) { + next unless stat_exists $oldlist; + my ($atime) = (stat _)[8]; + next if $atime == $oldatime; + push @releasefiles, $oldlist; + } + my @inreleasefiles = grep { m#/InRelease$# } @releasefiles; + @releasefiles = @inreleasefiles if @inreleasefiles; + die "apt updated wrong number of Release files (@releasefiles), erk" + unless @releasefiles == 1; + + ($aptget_releasefile) = @releasefiles; +} + +sub canonicalise_suite_aptget { + my ($proto,$data) = @_; + aptget_prep($data); + + my $release = parsecontrol $aptget_releasefile, "Release file", 1; + + foreach my $name (qw(Codename Suite)) { + my $val = $release->{$name}; + if (defined $val) { + printdebug "release file $name: $val\n"; + $val =~ m/^$suite_re$/o or fail + "Release file ($aptget_releasefile) specifies intolerable $name"; + cfg_apply_map(\$val, 'suite rmap', + access_cfg('aptget-suite-rmap', 'RETURN-UNDEF')); + return $val + } + } + return $isuite; +} + +sub archive_query_aptget { + my ($proto,$data) = @_; + aptget_prep($data); + + ensuredir "$aptget_base/source"; + foreach my $old (<$aptget_base/source/*.dsc>) { + unlink $old or die "$old: $!"; + } + + my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package; + return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi; + # avoids apt-get source failing with ambiguous error code + + runcmd_ordryrun_local + shell_cmd 'cd "$1"/source; shift', $aptget_base, + aptget_aptget(), qw(--download-only --only-source source), $package; + + my @dscs = <$aptget_base/source/*.dsc>; + fail "apt-get source did not produce a .dsc" unless @dscs; + fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1; + + my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1; + + use URI::Escape; + my $uri = "file://". uri_escape $dscs[0]; + $uri =~ s{\%2f}{/}gi; + return [ (getfield $pre_dsc, 'Version'), $uri ]; +} + #---------- `dummyapicat' archive query method ---------- sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; } @@ -1127,7 +1344,8 @@ sub file_in_archive_dummycatapi ($$$) { #---------- `madison' archive query method ---------- sub archive_query_madison { - return map { [ @$_[0..1] ] } madison_get_parse(@_); + return archive_query_prepend_mirror + map { [ @$_[0..1] ] } madison_get_parse(@_); } sub madison_get_parse { @@ -1233,7 +1451,7 @@ END my ($vsn,$component,$filename,$sha256sum) = @$_; [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ]; } @rows; - return @rows; + return archive_query_prepend_mirror @rows; } sub canonicalise_suite_sshpsql ($$) { @@ -1289,7 +1507,8 @@ sub archive_query_dummycat ($$) { } C->error and die "$dpath: $!"; close C; - return sort { -version_compare($a->[0],$b->[0]); } @rows; + return archive_query_prepend_mirror + sort { -version_compare($a->[0],$b->[0]); } @rows; } sub file_in_archive_dummycat () { return undef; } @@ -1300,6 +1519,15 @@ sub access_cfg_tagformats () { split /\,/, access_cfg('dgit-tag-format'); } +sub access_cfg_tagformats_can_splitbrain () { + my %y = map { $_ => 1 } access_cfg_tagformats; + foreach my $needtf (qw(new maint)) { + next if $y{$needtf}; + return 0; + } + return 1; +} + sub need_tagformat ($$) { my ($fmt, $why) = @_; fail "need to use tag format $fmt ($why) but also need". @@ -1344,6 +1572,8 @@ sub canonicalise_suite () { $csuite = archive_query('canonicalise_suite'); if ($isuite ne $csuite) { progress "canonical suite name for $isuite is $csuite"; + } else { + progress "canonical suite name is $csuite"; } } @@ -1351,8 +1581,8 @@ sub get_archive_dsc () { canonicalise_suite(); my @vsns = archive_query('archive_query'); foreach my $vinfo (@vsns) { - my ($vsn,$subpath,$digester,$digest) = @$vinfo; - $dscurl = access_cfg('mirror').$subpath; + my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo; + $dscurl = $vsn_dscurl; $dscdata = url_get($dscurl); if (!$dscdata) { $skew_warning_vsn = $vsn if !defined $skew_warning_vsn; @@ -1447,6 +1677,7 @@ sub create_remote_git_repo () { } our ($dsc_hash,$lastpush_mergeinput); +our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url); our $ud = '.git/dgit/unpack'; @@ -1471,7 +1702,13 @@ sub git_write_tree () { return $tree; } -sub remove_stray_gits () { +sub git_add_write_tree () { + runcmd @git, qw(add -Af .); + return git_write_tree(); +} + +sub remove_stray_gits ($) { + my ($what) = @_; my @gitscmd = qw(find -name .git -prune -print0); debugcmd "|",@gitscmd; open GITS, "-|", @gitscmd or die $!; @@ -1479,7 +1716,7 @@ sub remove_stray_gits () { local $/="\0"; while () { chomp or die; - print STDERR "$us: warning: removing from source package: ", + print STDERR "$us: warning: removing from $what: ", (messagequote $_), "\n"; rmtree $_; } @@ -1487,8 +1724,8 @@ sub remove_stray_gits () { $!=0; $?=0; close GITS or failedcmd @gitscmd; } -sub mktree_in_ud_from_only_subdir (;$) { - my ($raw) = @_; +sub mktree_in_ud_from_only_subdir ($;$) { + my ($what,$raw) = @_; # changes into the subdir my (@dirs) = <*/.>; @@ -1497,7 +1734,7 @@ sub mktree_in_ud_from_only_subdir (;$) { my $dir = $1; changedir $dir; - remove_stray_gits(); + remove_stray_gits($what); mktree_in_ud_here(); if (!$raw) { my ($format, $fopts) = get_source_format(); @@ -1506,8 +1743,7 @@ sub mktree_in_ud_from_only_subdir (;$) { } } - runcmd @git, qw(add -Af); - my $tree=git_write_tree(); + my $tree=git_add_write_tree(); return ($tree,$dir); } @@ -1669,7 +1905,8 @@ END push @found_differ, "archive $h->{filename}: ".join "; ", @differ if @differ; } - print "origs $file f.same=$found_same #f._differ=$#found_differ\n"; + printdebug "origs $file f.same=$found_same". + " #f._differ=$#found_differ\n"; if (@found_differ && !$found_same) { fail join "\n", "archive contains $file with different checksum", @@ -1815,7 +2052,9 @@ sub check_for_vendor_patches () { vendor_patches_distro(Dpkg::Vendor::get_current_vendor(), "Dpkg::Vendor \`current vendor'"); vendor_patches_distro(access_basedistro(), - "distro being accessed"); + "(base) distro being accessed"); + vendor_patches_distro(access_nomdistro(), + "(nominal) distro being accessed"); } sub generate_commits_from_dsc () { @@ -1854,8 +2093,7 @@ sub generate_commits_from_dsc () { # from the debian/changelog, so we record the tree objects now and # make them into commits later. my @tartrees; - my $upstreamv = $dsc->{version}; - $upstreamv =~ s/-[^-]+$//; + my $upstreamv = upstreamversion $dsc->{version}; my $orig_f_base = srcfn $upstreamv, ''; foreach my $fi (@dfi) { @@ -1899,14 +2137,14 @@ sub generate_commits_from_dsc () { $input = $compr_fh; } - rmtree "../unpack-tar"; - mkdir "../unpack-tar" or die $!; + rmtree "_unpack-tar"; + mkdir "_unpack-tar" or die $!; my @tarcmd = qw(tar -x -f - --no-same-owner --no-same-permissions --no-acls --no-xattrs --no-selinux); my $tar_pid = fork // die $!; if (!$tar_pid) { - chdir "../unpack-tar" or die $!; + chdir "_unpack-tar" or die $!; open STDIN, "<&", $input or die $!; exec @tarcmd; die "dgit (child): exec $tarcmd[0]: $!"; @@ -1920,11 +2158,21 @@ sub generate_commits_from_dsc () { # finally, we have the results in "tarball", but maybe # with the wrong permissions - runcmd qw(chmod -R +rwX ../unpack-tar); - changedir "../unpack-tar"; - my ($tree) = mktree_in_ud_from_only_subdir(1); - changedir "../../unpack"; - rmtree "../unpack-tar"; + runcmd qw(chmod -R +rwX _unpack-tar); + changedir "_unpack-tar"; + remove_stray_gits($f); + mktree_in_ud_here(); + + my ($tree) = git_add_write_tree(); + my $tentries = cmdoutput @git, qw(ls-tree -z), $tree; + if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) { + $tree = $1; + printdebug "one subtree $1\n"; + } else { + printdebug "multiple subtrees\n"; + } + changedir ".."; + rmtree "_unpack-tar"; my $ent = [ $f, $tree ]; push @tartrees, { @@ -1963,7 +2211,7 @@ sub generate_commits_from_dsc () { push @cmd, qw(-x --), $dscfn; runcmd @cmd; - my ($tree,$dir) = mktree_in_ud_from_only_subdir(); + my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package"); if (madformat $dsc->{format}) { check_for_vendor_patches(); } @@ -1973,8 +2221,7 @@ sub generate_commits_from_dsc () { my @pcmd = qw(dpkg-source --before-build .); runcmd shell_cmd 'exec >/dev/null', @pcmd; rmtree '.pc'; - runcmd @git, qw(add -Af); - $dappliedtree = git_write_tree(); + $dappliedtree = git_add_write_tree(); } my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all); @@ -2119,6 +2366,8 @@ END my $path = $ENV{PATH} or die; foreach my $use_absurd (qw(0 1)) { + runcmd @git, qw(checkout -q unpa); + runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa); local $ENV{PATH} = $path; if ($use_absurd) { chomp $@; @@ -2135,11 +2384,12 @@ END die "only absurd git-apply!\n" if !$use_absurd && forceing [qw(import-gitapply-absurd)]; - local $ENV{PATH} = $path if $use_absurd; + local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd; + local $ENV{PATH} = $path if $use_absurd; my @showcmd = (gbp_pq, qw(import)); my @realcmd = shell_cmd - 'exec >/dev/null 2>../../gbp-pq-output', @showcmd; + 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd; debugcmd "+",@realcmd; if (system @realcmd) { die +(shellquote @showcmd). @@ -2251,18 +2501,41 @@ sub ensure_we_have_orig () { } } -sub git_fetch_us () { - # Want to fetch only what we are going to use, unless - # deliberately-not-ff, in which case we must fetch everything. +#---------- git fetch ---------- - my @specs = deliberately_not_fast_forward ? qw(tags/*) : - map { "tags/$_" } - (quiltmode_splitbrain - ? (map { $_->('*',access_basedistro) } - \&debiantag_new, \&debiantag_maintview) - : debiantags('*',access_basedistro)); - push @specs, server_branch($csuite); - push @specs, qw(heads/*) if deliberately_not_fast_forward; +sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); } +sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); } + +# We fetch some parts of lrfetchrefs/*. Ideally we delete these +# locally fetched refs because they have unhelpful names and clutter +# up gitk etc. So we track whether we have "used up" head ref (ie, +# whether we have made another local ref which refers to this object). +# +# (If we deleted them unconditionally, then we might end up +# re-fetching the same git objects each time dgit fetch was run.) +# +# So, leach use of lrfetchrefs needs to be accompanied by arrangements +# in git_fetch_us to fetch the refs in question, and possibly a call +# to lrfetchref_used. + +our (%lrfetchrefs_f, %lrfetchrefs_d); +# $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid + +sub lrfetchref_used ($) { + my ($fullrefname) = @_; + my $objid = $lrfetchrefs_f{$fullrefname}; + $lrfetchrefs_d{$fullrefname} = $objid if defined $objid; +} + +sub git_lrfetch_sane { + my ($supplementary, @specs) = @_; + # Make a 'refs/'.lrfetchrefs.'/*' be just like on server, + # at least as regards @specs. Also leave the results in + # %lrfetchrefs_f, and arrange for lrfetchref_used to be + # able to clean these up. + # + # With $supplementary==1, @specs must not contain wildcards + # and we add to our previous fetches (non-atomically). # This is rather miserable: # When git fetch --prune is passed a fetchspec ending with a *, @@ -2286,27 +2559,33 @@ sub git_fetch_us () { # git fetch to try to generate it. If we don't manage to generate # the target state, we try again. + my $url = access_giturl(); + + printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n"; + my $specre = join '|', map { my $x = $_; $x =~ s/\W/\\$&/g; - $x =~ s/\\\*$/.*/; + my $wildcard = $x =~ s/\\\*$/.*/; + die if $wildcard && $supplementary; "(?:refs/$x)"; } @specs; - printdebug "git_fetch_us specre=$specre\n"; + printdebug "git_lrfetch_sane specre=$specre\n"; my $wanted_rref = sub { local ($_) = @_; - return m/^(?:$specre)$/o; + return m/^(?:$specre)$/; }; my $fetch_iteration = 0; FETCH_ITERATION: for (;;) { + printdebug "git_lrfetch_sane iteration $fetch_iteration\n"; if (++$fetch_iteration > 10) { fail "too many iterations trying to get sane fetch!"; } my @look = map { "refs/$_" } @specs; - my @lcmd = (@git, qw(ls-remote -q --refs), access_giturl(), @look); + my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look); debugcmd "|",@lcmd; my %wantr; @@ -2328,15 +2607,18 @@ END # OK, now %want is exactly what we want for refs in @specs my @fspecs = map { - return () if !m/\*$/ && !exists $wantr{"refs/$_"}; + !m/\*$/ && !exists $wantr{"refs/$_"} ? () : "+refs/$_:".lrfetchrefs."/$_"; } @specs; - my @fcmd = (@git, qw(fetch -p -n -q), access_giturl(), @fspecs); - runcmd_ordryrun_local @git, qw(fetch -p -n -q), access_giturl(), - @fspecs; + printdebug "git_lrfetch_sane fspecs @fspecs\n"; + + my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs); + runcmd_ordryrun_local @fcmd if @fspecs; - %lrfetchrefs_f = (); + if (!$supplementary) { + %lrfetchrefs_f = (); + } my %objgot; git_for_each_ref(lrfetchrefs, sub { @@ -2345,6 +2627,10 @@ END $objgot{$objid} = 1; }); + if ($supplementary) { + last; + } + foreach my $lrefname (sort keys %lrfetchrefs_f) { my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs); if (!exists $wantr{$rrefname}) { @@ -2386,11 +2672,28 @@ END } last; } - printdebug "git_fetch_us: git fetch --no-insane emulation complete\n", + printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n", Dumper(\%lrfetchrefs_f); +} + +sub git_fetch_us () { + # Want to fetch only what we are going to use, unless + # deliberately-not-ff, in which case we must fetch everything. + + my @specs = deliberately_not_fast_forward ? qw(tags/*) : + map { "tags/$_" } + (quiltmode_splitbrain + ? (map { $_->('*',access_nomdistro) } + \&debiantag_new, \&debiantag_maintview) + : debiantags('*',access_nomdistro)); + push @specs, server_branch($csuite); + push @specs, $rewritemap; + push @specs, qw(heads/*) if deliberately_not_fast_forward; + + git_lrfetch_sane 0, @specs; my %here; - my @tagpats = debiantags('*',access_basedistro); + my @tagpats = debiantags('*',access_nomdistro); git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub { my ($objid,$objtype,$fullrefname,$reftail) = @_; @@ -2414,6 +2717,8 @@ END }); } +#---------- dsc and archive handling ---------- + sub mergeinfo_getclogp ($) { # Ensures thit $mi->{Clogp} exists and returns it my ($mi) = @_; @@ -2424,6 +2729,137 @@ sub mergeinfo_version ($) { return getfield( (mergeinfo_getclogp $_[0]), 'Version' ); } +sub fetch_from_archive_record_1 ($) { + my ($hash) = @_; + runcmd @git, qw(update-ref -m), "dgit fetch $csuite", + 'DGIT_ARCHIVE', $hash; + cmdoutput @git, qw(log -n2), $hash; + # ... gives git a chance to complain if our commit is malformed +} + +sub fetch_from_archive_record_2 ($) { + my ($hash) = @_; + my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash); + if (act_local()) { + cmdoutput @upd_cmd; + } else { + dryrun_report @upd_cmd; + } +} + +sub parse_dsc_field ($$) { + my ($dsc, $what) = @_; + my $f; + foreach my $field (@ourdscfield) { + $f = $dsc->{$field}; + last if defined $f; + } + if (!defined $f) { + progress "$what: NO git hash"; + } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url) + = $f =~ m/^(\w+) ($distro_re) ($versiontag_re) (\S+)(?:\s|$)/) { + progress "$what: specified git info ($dsc_distro)"; + $dsc_hint_tag = [ $dsc_hint_tag ]; + } elsif ($f =~ m/^\w+\s*$/) { + $dsc_hash = $&; + $dsc_distro //= 'debian'; + $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'), + $dsc_distro ]; + progress "$what: specified git hash"; + } else { + fail "$what: invalid Dgit info"; + } +} + +sub resolve_dsc_field_commit ($$) { + my ($already_distro, $already_mapref) = @_; + + return unless defined $dsc_hash; + + my $mapref = + defined $already_mapref && + ($already_distro eq $dsc_distro || !$chase_dsc_distro) + ? $already_mapref : undef; + + my $do_fetch; + $do_fetch = sub { + my ($what, @fetch) = @_; + + local $idistro = $dsc_distro; + my $lrf = lrfetchrefs; + + if (!$chase_dsc_distro) { + progress + "not chasing .dsc distro $dsc_distro: not fetching $what"; + return 0; + } + + progress + ".dsc names distro $dsc_distro: fetching $what"; + + my $url = access_giturl(); + if (!defined $url) { + defined $dsc_hint_url or fail <("rewrite map", $rewritemap) or return; + $mapref = $lrf.'/'.$rewritemap; + my $rewritemapdata = git_cat_file $mapref.':map'; + if (defined $rewritemapdata + && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) { + progress + "server's git history rewrite map contains a relevant entry!"; + + $dsc_hash = $1; + if (defined $dsc_hash) { + progress "using rewritten git hash in place of .dsc value"; + } else { + progress "server data says .dsc hash is to be disregarded"; + } + } + } + + if (!defined git_cat_file $dsc_hash) { + my @tags = map { "tags/".$_ } @$dsc_hint_tag; + my $lrf = $do_fetch->("additional commits", @tags) && + defined git_cat_file $dsc_hash + or fail <{$field}; - last if defined $dsc_hash; - } - if (defined $dsc_hash) { - $dsc_hash =~ m/\w+/ or fail "invalid hash in .dsc \`$dsc_hash'"; - $dsc_hash = $&; - progress "last upload to archive specified git hash"; - } else { - progress "last upload to archive has NO git hash"; - } + parse_dsc_field($dsc, 'last upload to archive'); + resolve_dsc_field_commit access_basedistro, + lrfetchrefs."/".$rewritemap } else { progress "no version available from the archive"; } @@ -2565,11 +2993,8 @@ sub fetch_from_archive () { }; if (defined $dsc_hash) { - fail "missing remote git history even though dsc has hash -". - " could not find ref ".rref()." at ".access_giturl() - unless $lastpush_hash; ensure_we_have_orig(); - if ($dsc_hash eq $lastpush_hash) { + if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) { @mergeinputs = $dsc_mergeinput } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) { print STDERR <($lastpush_hash, 'dgit repo server tip (last push)'); + $chkff->($lastpush_hash, 'dgit repo server tip (last push)') + if $lastpush_hash; $chkff->($lastfetch_hash, 'local tracking tip (last fetch)'); - runcmd @git, qw(update-ref -m), "dgit fetch $csuite", - 'DGIT_ARCHIVE', $hash; - cmdoutput @git, qw(log -n2), $hash; - # ... gives git a chance to complain if our commit is malformed + fetch_from_archive_record_1($hash); if (defined $skew_warning_vsn) { mkpath '.git/dgit'; @@ -2762,12 +3185,7 @@ END } if ($lastfetch_hash ne $hash) { - my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash); - if (act_local()) { - cmdoutput @upd_cmd; - } else { - dryrun_report @upd_cmd; - } + fetch_from_archive_record_2($hash); } lrfetchref_used lrfetchref(); @@ -2838,19 +3256,207 @@ sub setup_new_tree () { setup_useremail(); } +sub multisuite_suite_child ($$$) { + my ($tsuite, $merginputs, $fn) = @_; + # in child, sets things up, calls $fn->(), and returns undef + # in parent, returns canonical suite name for $tsuite + my $canonsuitefh = IO::File::new_tmpfile; + my $pid = fork // die $!; + if (!$pid) { + $isuite = $tsuite; + $us .= " [$isuite]"; + $debugprefix .= " "; + progress "fetching $tsuite..."; + canonicalise_suite(); + print $canonsuitefh $csuite, "\n" or die $!; + close $canonsuitefh or die $!; + $fn->(); + return undef; + } + waitpid $pid,0 == $pid or die $!; + fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4; + seek $canonsuitefh,0,0 or die $!; + local $csuite = <$canonsuitefh>; + die $! unless defined $csuite && chomp $csuite; + if ($? == 256*4) { + printdebug "multisuite $tsuite missing\n"; + return $csuite; + } + printdebug "multisuite $tsuite ok (canon=$csuite)\n"; + push @$merginputs, { + Ref => lrref, + Info => $csuite, + }; + return $csuite; +} + +sub fork_for_multisuite ($) { + my ($before_fetch_merge) = @_; + # if nothing unusual, just returns '' + # + # if multisuite: + # returns 0 to caller in child, to do first of the specified suites + # in child, $csuite is not yet set + # + # returns 1 to caller in parent, to finish up anything needed after + # in parent, $csuite is set to canonicalised portmanteau + + my $org_isuite = $isuite; + my @suites = split /\,/, $isuite; + return '' unless @suites > 1; + printdebug "fork_for_multisuite: @suites\n"; + + my @mergeinputs; + + my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs, + sub { }); + return 0 unless defined $cbasesuite; + + fail "package $package missing in (base suite) $cbasesuite" + unless @mergeinputs; + + my @csuites = ($cbasesuite); + + $before_fetch_merge->(); + + foreach my $tsuite (@suites[1..$#suites]) { + my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs, + sub { + @end = (); + fetch(); + exit 0; + }); + # xxx collecte the ref here + + $csubsuite =~ s/^\Q$cbasesuite\E-/-/; + push @csuites, $csubsuite; + } + + foreach my $mi (@mergeinputs) { + my $ref = git_get_ref $mi->{Ref}; + die "$mi->{Ref} ?" unless length $ref; + $mi->{Commit} = $ref; + } + + $csuite = join ",", @csuites; + + my $previous = git_get_ref lrref; + if ($previous) { + unshift @mergeinputs, { + Commit => $previous, + Info => "local combined tracking branch", + Warning => + "archive seems to have rewound: local tracking branch is ahead!", + }; + } + + foreach my $ix (0..$#mergeinputs) { + $mergeinputs[$ix]{Index} = $ix; + } + + @mergeinputs = sort { + -version_compare(mergeinfo_version $a, + mergeinfo_version $b) # highest version first + or + $a->{Index} <=> $b->{Index}; # earliest in spec first + } @mergeinputs; + + my @needed; + + NEEDED: + foreach my $mi (@mergeinputs) { + printdebug "multisuite merge check $mi->{Info}\n"; + foreach my $previous (@needed) { + next unless is_fast_fwd $mi->{Commit}, $previous->{Commit}; + printdebug "multisuite merge un-needed $previous->{Info}\n"; + next NEEDED; + } + push @needed, $mi; + printdebug "multisuite merge this-needed\n"; + $mi->{Character} = '+'; + } + + $needed[0]{Character} = '*'; + + my $output = $needed[0]{Commit}; + + if (@needed > 1) { + printdebug "multisuite merge nontrivial\n"; + my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':'; + + my $commit = "tree $tree\n"; + my $msg = "Combine archive branches $csuite [dgit]\n\n". + "Input branches:\n"; + + foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) { + printdebug "multisuite merge include $mi->{Info}\n"; + $mi->{Character} //= ' '; + $commit .= "parent $mi->{Commit}\n"; + $msg .= sprintf " %s %-25s %s\n", + $mi->{Character}, + (mergeinfo_version $mi), + $mi->{Info}; + } + my $authline = clogp_authline mergeinfo_getclogp $needed[0]; + $msg .= "\nKey\n". + " * marks the highest version branch, which choose to use\n". + " + marks each branch which was not already an ancestor\n\n". + "[dgit multi-suite $csuite]\n"; + $commit .= + "author $authline\n". + "committer $authline\n\n"; + $output = make_commit_text $commit.$msg; + printdebug "multisuite merge generated $output\n"; + } + + fetch_from_archive_record_1($output); + fetch_from_archive_record_2($output); + + progress "calculated combined tracking suite $csuite"; + + return 1; +} + +sub clone_set_head () { + open H, "> .git/HEAD" or die $!; + print H "ref: ".lref()."\n" or die $!; + close H or die $!; +} +sub clone_finish ($) { + my ($dstdir) = @_; + runcmd @git, qw(reset --hard), lrref(); + runcmd qw(bash -ec), <<'END'; + set -o pipefail + git ls-tree -r --name-only -z HEAD | \ + xargs -0r touch -h -r . -- +END + printdone "ready for work in $dstdir"; +} + sub clone ($) { my ($dstdir) = @_; - canonicalise_suite(); badusage "dry run makes no sense with clone" unless act_local(); + + my $multi_fetched = fork_for_multisuite(sub { + printdebug "multi clone before fetch merge\n"; + changedir $dstdir; + }); + if ($multi_fetched) { + printdebug "multi clone after fetch merge\n"; + clone_set_head(); + clone_finish($dstdir); + exit 0; + } + printdebug "clone main body\n"; + + canonicalise_suite(); my $hasgit = check_for_git(); mkdir $dstdir or fail "create \`$dstdir': $!"; changedir $dstdir; runcmd @git, qw(init -q); + clone_set_head(); my $giturl = access_giturl(1); if (defined $giturl) { - open H, "> .git/HEAD" or die $!; - print H "ref: ".lref()."\n" or die $!; - close H or die $!; runcmd @git, qw(remote add), 'origin', $giturl; } if ($hasgit) { @@ -2867,11 +3473,11 @@ sub clone ($) { runcmd @git, qw(remote add vcs-git), $vcsgiturl; } setup_new_tree(); - runcmd @git, qw(reset --hard), lrref(); - printdone "ready for work in $dstdir"; + clone_finish($dstdir); } sub fetch () { + canonicalise_suite(); if (check_for_git()) { git_fetch_us(); } @@ -2880,7 +3486,9 @@ sub fetch () { } sub pull () { - fetch(); + my $multi_fetched = fork_for_multisuite(sub { }); + fetch() unless $multi_fetched; # parent + return if $multi_fetched eq '0'; # child runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]", lrref(); printdone "fetched to ".lrref()." and merged into HEAD"; @@ -2981,6 +3589,18 @@ sub madformat_wantfixup ($) { return 1; } +sub maybe_split_brain_save ($$$) { + my ($headref, $dgitview, $msg) = @_; + # => message fragment "$saved" describing disposition of $dgitview + return "commit id $dgitview" unless defined $split_brain_save; + my @cmd = (shell_cmd "cd ../../../..", + @git, qw(update-ref -m), + "dgit --dgit-view-save $msg HEAD=$headref", + $split_brain_save, $dgitview); + runcmd @cmd; + return "and left in $split_brain_save"; +} + # An "infopair" is a tuple [ $thing, $what ] # (often $thing is a commit hash; $what is a description) @@ -3072,7 +3692,7 @@ tree $tree parent $dgitview parent $archive_hash author $authline -commiter $authline +committer $authline $msg_msg @@ -3113,9 +3733,9 @@ sub splitbrain_pseudomerge ($$$$) { if (defined $overwrite_version) { } elsif (!eval { - my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_basedistro; + my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_nomdistro; my $i_dep14 = infopair_lrf_tag_lookup($t_dep14, "maintainer view tag"); - my $t_dgit = debiantag_new $i_arch_v->[0], access_basedistro; + my $t_dgit = debiantag_new $i_arch_v->[0], access_nomdistro; my $i_dgit = infopair_lrf_tag_lookup($t_dgit, "dgit view tag"); my $i_archive = [ $archive_hash, "current archive contents" ]; @@ -3141,6 +3761,8 @@ END_OVERWR Make fast forward from $i_arch_v->[0] END_MAKEFF + maybe_split_brain_save $maintview, $r, "pseudomerge"; + progress "Made pseudo-merge of $i_arch_v->[0] into dgit view."; return $r; } @@ -3172,9 +3794,12 @@ sub push_parse_changelog ($) { my $clogp = Dpkg::Control::Hash->new(); $clogp->load($clogpfn) or die; - $package = getfield $clogp, 'Source'; + my $clogpackage = getfield $clogp, 'Source'; + $package //= $clogpackage; + fail "-p specified $package but changelog specified $clogpackage" + unless $package eq $clogpackage; my $cversion = getfield $clogp, 'Version'; - my $tag = debiantag($cversion, access_basedistro); + my $tag = debiantag($cversion, access_nomdistro); runcmd @git, qw(check-ref-format), $tag; my $dscfn = dscfn($cversion); @@ -3208,9 +3833,23 @@ sub push_tagwants ($$$$) { TfSuffix => '-maintview', View => 'maint', }; - } + } elsif ($dodep14tag eq 'no' ? 0 + : $dodep14tag eq 'want' ? access_cfg_tagformats_can_splitbrain + : $dodep14tag eq 'always' + ? (access_cfg_tagformats_can_splitbrain or fail < \&debiantag_maintview, + Objid => $dgithead, + TfSuffix => '-dgit', + View => 'dgit', + }; + }; foreach my $tw (@tagwants) { - $tw->{Tag} = $tw->{TagFn}($cversion, access_basedistro); + $tw->{Tag} = $tw->{TagFn}($cversion, access_nomdistro); $tw->{Tfn} = sub { $tfbase.$tw->{TfSuffix}.$_[0]; }; } printdebug 'push_tagwants: ', Dumper(\@_, \@tagwants); @@ -3224,7 +3863,11 @@ sub push_mktags ($$ $$ $) { die unless $tagwants->[0]{View} eq 'dgit'; - $dsc->{$ourdscfield[0]} = $tagwants->[0]{Objid}; + my $declaredistro = access_nomdistro(); + my $reader_giturl = do { local $access_forpush=0; access_giturl(); }; + $dsc->{$ourdscfield[0]} = join " ", + $tagwants->[0]{Objid}, $declaredistro, $tagwants->[0]{Tag}, + $reader_giturl; $dsc->save("$dscfn.tmp") or die $!; my $changes = parsecontrol($changesfile,$changesfilewhat); @@ -3241,7 +3884,6 @@ sub push_mktags ($$ $$ $) { # to control the "tagger" (b) we can do remote signing my $authline = clogp_authline $clogp; my $delibs = join(" ", "",@deliberatelies); - my $declaredistro = access_basedistro(); my $mktag = sub { my ($tw) = @_; @@ -3355,7 +3997,7 @@ END my $dscpath = "$buildproductsdir/$dscfn"; stat_exists $dscpath or - fail "looked for .dsc $dscfn, but $!;". + fail "looked for .dsc $dscpath, but $!;". " maybe you forgot to build"; responder_send_file('dsc', $dscpath); @@ -3369,8 +4011,7 @@ END my $dgithead = $actualhead; my $maintviewhead = undef; - my $upstreamversion = $clogp->{Version}; - $upstreamversion =~ s/-[^-]*$//; + my $upstreamversion = upstreamversion $clogp->{Version}; if (madformat_wantfixup($format)) { # user might have not used dgit build, so maybe do this now: @@ -3423,7 +4064,7 @@ END progress "checking that $dscfn corresponds to HEAD"; runcmd qw(dpkg-source -x --), $dscpath =~ m#^/# ? $dscpath : "../../../$dscpath"; - my ($tree,$dir) = mktree_in_ud_from_only_subdir(); + my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package"); check_for_vendor_patches() if madformat($dsc->{format}); changedir '../../../..'; my @diffcmd = (@git, qw(diff --quiet), $tree, $dgithead); @@ -3539,8 +4180,12 @@ END runcmd_ordryrun @git, qw(update-ref -m), 'dgit push', lrref(), $dgithead; supplementary_message(<<'END'); -Push failed, after updating the remote git repository. -If you want to try again, you must use a new version number. +Push failed, while obtaining signatures on the .changes and .dsc. +If it was just that the signature failed, you may try again by using +debsign by hand to sign the changes + $changesfile +and then dput to complete the upload. +If you need to change the package, you must use a new version number. END if ($we_are_responder) { my $dryrunsuffix = act_local() ? "" : ".tmp"; @@ -3574,7 +4219,6 @@ END sub cmd_clone { parseopts(); - notpushing(); my $dstdir; badusage "-p is not allowed with clone; specify as argument instead" if defined $package; @@ -3589,8 +4233,9 @@ sub cmd_clone { } else { badusage "incorrect arguments to dgit clone"; } - $dstdir ||= "$package"; + notpushing(); + $dstdir ||= "$package"; if (stat_exists $dstdir) { fail "$dstdir already exists"; } @@ -3604,6 +4249,7 @@ sub cmd_clone { return if $!==&ENOENT; die "chdir $cwd_remove: $!"; } + printdebug "clone rmonerror removing $dstdir\n"; if (stat $dstdir) { rmtree($dstdir) or die "remove $dstdir: $!\n"; } elsif (grep { $! == $_ } @@ -3628,36 +4274,41 @@ sub branchsuite () { } sub fetchpullargs () { - notpushing(); if (!defined $package) { my $sourcep = parsecontrol('debian/control','debian/control'); $package = getfield $sourcep, 'Source'; } if (@ARGV==0) { -# $isuite = branchsuite(); # this doesn't work because dak hates canons + $isuite = branchsuite(); if (!$isuite) { my $clogp = parsechangelog(); $isuite = getfield $clogp, 'Distribution'; } - canonicalise_suite(); - progress "fetching from suite $csuite"; } elsif (@ARGV==1) { ($isuite) = @ARGV; - canonicalise_suite(); } else { badusage "incorrect arguments to dgit fetch or dgit pull"; } + notpushing(); } sub cmd_fetch { parseopts(); fetchpullargs(); + my $multi_fetched = fork_for_multisuite(sub { }); + exit 0 if $multi_fetched; fetch(); } sub cmd_pull { parseopts(); fetchpullargs(); + if (quiltmode_splitbrain()) { + my ($format, $fopts) = get_source_format(); + madformat($format) and fail <$newmode)" ]; } } @@ -4159,9 +4814,10 @@ END runcmd @git, qw(update-ref -m), $cachekey, "refs/$splitbraincache", $dgitview; - progress "dgit view: created (commit id $dgitview)"; - changedir '.git/dgit/unpack/work'; + + my $saved = maybe_split_brain_save $headref, $dgitview, "converted"; + progress "dgit view: created ($saved)"; } sub quiltify ($$$$) { @@ -4445,13 +5101,10 @@ sub build_maybe_quilt_fixup () { check_for_vendor_patches(); if (quiltmode_splitbrain) { - foreach my $needtf (qw(new maint)) { - next if grep { $_ eq $needtf } access_cfg_tagformats; - fail <{'single-debian-patch'}) { quilt_fixup_singlepatch($clogp, $headref, $upstreamversion); @@ -4631,8 +5283,9 @@ sub quilt_check_splitbrain_cache ($$) { my $cachehit = $1; quilt_fixup_mkwork($headref); + my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit"; if ($cachehit ne $headref) { - progress "dgit view: found cached (commit id $cachehit)"; + progress "dgit view: found cached ($saved)"; runcmd @git, qw(checkout -q -b dgit-view), $cachehit; $split_brain = 1; return ($cachehit, $splitbrain_cachekey); @@ -4740,13 +5393,12 @@ sub quilt_fixup_multipatch ($$$) { changedir 'fake'; - remove_stray_gits(); + remove_stray_gits("source package"); mktree_in_ud_here(); rmtree '.pc'; - runcmd @git, qw(add -Af .); - my $unapplied=git_write_tree(); + my $unapplied=git_add_write_tree(); printdebug "fake orig tree object $unapplied\n"; ensuredir '.pc'; @@ -4778,8 +5430,7 @@ END changedir '../fake'; rmtree '.pc'; - runcmd @git, qw(add -Af .); - my $oldtiptree=git_write_tree(); + my $oldtiptree=git_add_write_tree(); printdebug "fake o+d/p tree object $unapplied\n"; changedir '../work'; @@ -4938,15 +5589,21 @@ sub cmd_clean () { maybe_unapply_patches_again(); } -sub build_prep () { +sub build_prep_early () { + our $build_prep_early_done //= 0; + return if $build_prep_early_done++; notpushing(); badusage "-p is not allowed when building" if defined $package; - check_not_dirty(); - clean_tree(); my $clogp = parsechangelog(); $isuite = getfield $clogp, 'Distribution'; $package = getfield $clogp, 'Source'; $version = getfield $clogp, 'Version'; + check_not_dirty(); +} + +sub build_prep () { + build_prep_early(); + clean_tree(); build_maybe_quilt_fixup(); if ($rmchanges) { my $pat = changespat $version; @@ -5123,6 +5780,7 @@ sub postbuild_mergechanges_vanilla ($) { } sub cmd_build { + build_prep_early(); my @dbp = (@dpkgbuildpackage, qw(-us -uc), changesopts_initial(), @ARGV); my $wantsrc = massage_dbp_args \@dbp; if ($wantsrc > 0) { @@ -5145,6 +5803,24 @@ sub pre_gbp_build { } sub cmd_gbp_build { + build_prep_early(); + + # gbp can make .origs out of thin air. In my tests it does this + # even for a 1.0 format package, with no origs present. So I + # guess it keys off just the version number. We don't know + # exactly what .origs ought to exist, but let's assume that we + # should run gbp if: the version has an upstream part and the main + # orig is absent. + my $upstreamversion = upstreamversion $version; + my $origfnpat = srcfn $upstreamversion, '.orig.tar.*'; + my $gbp_make_orig = $version =~ m/-/ && !(() = glob "../$origfnpat"); + + if ($gbp_make_orig) { + clean_tree(); + $cleanmode = 'none'; # don't do it again + $need_split_build_invocation = 1; + } + my @dbp = @dpkgbuildpackage; my $wantsrc = massage_dbp_args \@dbp, \@ARGV; @@ -5160,6 +5836,24 @@ sub cmd_gbp_build { push @cmd, (qw(-us -uc --git-no-sign-tags), "--git-builder=@dbp"); + if ($gbp_make_orig) { + ensuredir '.git/dgit'; + my $ok = '.git/dgit/origs-gen-ok'; + unlink $ok or $!==&ENOENT or die $!; + my @origs_cmd = @cmd; + push @origs_cmd, qw(--git-cleaner=true); + push @origs_cmd, "--git-prebuild=touch $ok .git/dgit/no-such-dir/ok"; + push @origs_cmd, @ARGV; + if (act_local()) { + debugcmd @origs_cmd; + system @origs_cmd; + do { local $!; stat_exists $ok; } + or failedcmd @origs_cmd; + } else { + dryrun_report @origs_cmd; + } + } + if ($wantsrc > 0) { build_source(); midbuild_checkchanges_vanilla $wantsrc; @@ -5179,6 +5873,7 @@ sub cmd_gbp_build { sub cmd_git_build { cmd_gbp_build(); } # compatibility with <= 1.0 sub build_source { + build_prep_early(); my $our_cleanmode = $cleanmode; if ($need_split_build_invocation) { # Pretend that clean is being done some other way. This @@ -5239,6 +5934,7 @@ sub build_source { } sub cmd_build_source { + build_prep_early(); badusage "build-source takes no additional arguments" if @ARGV; build_source(); maybe_unapply_patches_again(); @@ -5267,10 +5963,7 @@ END sub cmd_quilt_fixup { badusage "incorrect arguments to dgit quilt-fixup" if @ARGV; - my $clogp = parsechangelog(); - $version = getfield $clogp, 'Version'; - $package = getfield $clogp, 'Source'; - check_not_dirty(); + build_prep_early(); clean_tree(); build_maybe_quilt_fixup(); } @@ -5338,30 +6031,35 @@ sub cmd_import_dsc { parse_dscdata(); - my $dgit_commit = $dsc->{$ourdscfield[0]}; - if (defined $dgit_commit && - !forceing [qw(import-dsc-with-dgit-field)]) { - $dgit_commit =~ m/\w+/ or fail "invalid hash in .dsc"; + $package = getfield $dsc, 'Source'; + + parse_dsc_field($dsc, "Dgit metadata in .dsc") + unless forceing [qw(import-dsc-with-dgit-field)]; + + if (defined $dsc_hash) { progress "dgit: import-dsc of .dsc with Dgit field, using git hash"; + resolve_dsc_field_commit undef, undef; + } + if (defined $dsc_hash) { my @cmd = (qw(sh -ec), - "echo $dgit_commit | git cat-file --batch-check"); + "echo $dsc_hash | git cat-file --batch-check"); my $objgot = cmdoutput @cmd; if ($objgot =~ m#^\w+ missing\b#) { fail < 0) { progress "Not fast forward, forced update."; } else { - fail "Not fast forward to $dgit_commit"; + fail "Not fast forward to $dsc_hash"; } } @cmd = (@git, qw(update-ref -m), "dgit import-dsc (Dgit): $info", - $dstbranch, $dgit_commit); + $dstbranch, $dsc_hash); runcmd @cmd; progress "dgit: import-dsc updated git ref $dstbranch"; return 0; @@ -5374,7 +6072,6 @@ Specify +$specbranch to overwrite, discarding existing history END if $oldhash && !$force; - $package = getfield $dsc, 'Source'; my @dfi = dsc_files_info(); foreach my $fi (@dfi) { my $f = $fi->{Filename}; @@ -5394,7 +6091,7 @@ END $there .= "/$f"; symlink $there, $here or fail "symlink $there to $here: $!"; progress "made symlink $here -> $there"; - print STDERR Dumper($fi); +# print STDERR Dumper($fi); } my @mergeinputs = generate_commits_from_dsc(); die unless @mergeinputs == 1; @@ -5408,10 +6105,14 @@ END progress "Import, merging."; my $tree = cmdoutput @git, qw(rev-parse), "$newhash:"; my $version = getfield $dsc, 'Version'; + my $clogp = commit_getclogp $newhash; + my $authline = clogp_authline $clogp; $newhash = make_commit_text <($oi->{Long}); @@ -5692,7 +6415,7 @@ END } -sub finalise_opts_opts () { +sub parseopts_late_defaults () { foreach my $k (keys %opts_opt_map) { my $om = $opts_opt_map{$k}; @@ -5719,6 +6442,40 @@ sub finalise_opts_opts () { @$om[$insertpos..$#$om] ); } } + + if (!defined $rmchanges) { + local $access_forpush; + $rmchanges = access_cfg_bool(0, 'rm-old-changes'); + } + + if (!defined $quilt_mode) { + local $access_forpush; + $quilt_mode = cfg('dgit.force.quilt-mode', 'RETURN-UNDEF') + // access_cfg('quilt-mode', 'RETURN-UNDEF') + // 'linear'; + $quilt_mode =~ m/^($quilt_modes_re)$/ + or badcfg "unknown quilt-mode \`$quilt_mode'"; + $quilt_mode = $1; + } + + if (!defined $dodep14tag) { + local $access_forpush; + $dodep14tag = access_cfg('dep14tag', 'RETURN-UNDEF') // 'want'; + $dodep14tag =~ m/^($dodep14tag_re)$/ + or badcfg "unknown dep14tag setting \`$dodep14tag'"; + $dodep14tag = $1; + } + + $need_split_build_invocation ||= quiltmode_splitbrain(); + + if (!defined $cleanmode) { + local $access_forpush; + $cleanmode = access_cfg('clean-mode', 'RETURN-UNDEF'); + $cleanmode //= 'dpkg-source'; + + badcfg "unknown clean-mode \`$cleanmode'" unless + $cleanmode =~ m/^($cleanmode_re)$(?!\n)/s; + } } if ($ENV{$fakeeditorenv}) { @@ -5743,32 +6500,6 @@ $cmd =~ y/-/_/; my $pre_fn = ${*::}{"pre_$cmd"}; $pre_fn->() if $pre_fn; -if (!defined $rmchanges) { - local $access_forpush; - $rmchanges = access_cfg_bool(0, 'rm-old-changes'); -} - -if (!defined $quilt_mode) { - local $access_forpush; - $quilt_mode = cfg('dgit.force.quilt-mode', 'RETURN-UNDEF') - // access_cfg('quilt-mode', 'RETURN-UNDEF') - // 'linear'; - $quilt_mode =~ m/^($quilt_modes_re)$/ - or badcfg "unknown quilt-mode \`$quilt_mode'"; - $quilt_mode = $1; -} - -$need_split_build_invocation ||= quiltmode_splitbrain(); - -if (!defined $cleanmode) { - local $access_forpush; - $cleanmode = access_cfg('clean-mode', 'RETURN-UNDEF'); - $cleanmode //= 'dpkg-source'; - - badcfg "unknown clean-mode \`$cleanmode'" unless - $cleanmode =~ m/^($cleanmode_re)$(?!\n)/s; -} - my $fn = ${*::}{"cmd_$cmd"}; $fn or badusage "unknown operation $cmd"; $fn->();