3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2017 Ian Jackson
6 # Copyright (C)2017 Sean Whitton
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 END { $? = $Debian::Dgit::ExitStatus::desired // -1; };
22 use Debian::Dgit::ExitStatus;
26 use Debian::Dgit qw(:DEFAULT :playground);
32 use Dpkg::Control::Hash;
34 use File::Temp qw(tempdir);
37 use Dpkg::Compression;
38 use Dpkg::Compression::Process;
43 use List::MoreUtils qw(pairwise);
44 use Text::Glob qw(match_glob);
45 use Fcntl qw(:DEFAULT :flock);
50 our $our_version = 'UNRELEASED'; ###substituted###
51 our $absurdity = undef; ###substituted###
53 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
64 our $dryrun_level = 0;
66 our $buildproductsdir = '..';
72 our $existing_package = 'dpkg';
74 our $changes_since_version;
76 our $overwrite_version; # undef: not specified; '': check changelog
78 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
80 our $split_brain_save;
81 our $we_are_responder;
82 our $we_are_initiator;
83 our $initiator_tempdir;
84 our $patches_applied_dirtily = 00;
88 our $chase_dsc_distro=1;
90 our %forceopts = map { $_=>0 }
91 qw(unrepresentable unsupported-source-format
92 dsc-changes-mismatch changes-origs-exactly
93 uploading-binaries uploading-source-only
94 import-gitapply-absurd
95 import-gitapply-no-absurd
96 import-dsc-with-dgit-field);
98 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
100 our $suite_re = '[-+.0-9a-z]+';
101 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
102 our $orig_f_comp_re = qr{orig(?:-$extra_orig_namepart_re)?};
103 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
104 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
106 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
107 our $splitbraincache = 'dgit-intern/quilt-cache';
108 our $rewritemap = 'dgit-rewrite/map';
110 our @dpkg_source_ignores = qw(-i(?:^|/)\.git(?:/|$) -I.git);
112 our (@git) = qw(git);
113 our (@dget) = qw(dget);
114 our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L));
115 our (@dput) = qw(dput);
116 our (@debsign) = qw(debsign);
117 our (@gpg) = qw(gpg);
118 our (@sbuild) = qw(sbuild);
120 our (@dgit) = qw(dgit);
121 our (@git_debrebase) = qw(git-debrebase);
122 our (@aptget) = qw(apt-get);
123 our (@aptcache) = qw(apt-cache);
124 our (@dpkgbuildpackage) = (qw(dpkg-buildpackage), @dpkg_source_ignores);
125 our (@dpkgsource) = (qw(dpkg-source), @dpkg_source_ignores);
126 our (@dpkggenchanges) = qw(dpkg-genchanges);
127 our (@mergechanges) = qw(mergechanges -f);
128 our (@gbp_build) = ('');
129 our (@gbp_pq) = ('gbp pq');
130 our (@changesopts) = ('');
132 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
135 'debsign' => \@debsign,
137 'sbuild' => \@sbuild,
141 'git-debrebase' => \@git_debrebase,
142 'apt-get' => \@aptget,
143 'apt-cache' => \@aptcache,
144 'dpkg-source' => \@dpkgsource,
145 'dpkg-buildpackage' => \@dpkgbuildpackage,
146 'dpkg-genchanges' => \@dpkggenchanges,
147 'gbp-build' => \@gbp_build,
148 'gbp-pq' => \@gbp_pq,
149 'ch' => \@changesopts,
150 'mergechanges' => \@mergechanges);
152 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
153 our %opts_cfg_insertpos = map {
155 scalar @{ $opts_opt_map{$_} }
156 } keys %opts_opt_map;
158 sub parseopts_late_defaults();
159 sub setup_gitattrs(;$);
160 sub check_gitattrs($$);
167 our $supplementary_message = '';
168 our $need_split_build_invocation = 0;
169 our $split_brain = 0;
173 return unless forkcheck_mainprocess();
174 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
177 our $remotename = 'dgit';
178 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
182 if (!defined $absurdity) {
184 $absurdity =~ s{/[^/]+$}{/absurd} or die;
188 my ($v,$distro) = @_;
189 return $tagformatfn->($v, $distro);
192 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
194 sub lbranch () { return "$branchprefix/$csuite"; }
195 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
196 sub lref () { return "refs/heads/".lbranch(); }
197 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
198 sub rrref () { return server_ref($csuite); }
208 return "${package}_".(stripepoch $vsn).$sfx
213 return srcfn($vsn,".dsc");
216 sub changespat ($;$) {
217 my ($vsn, $arch) = @_;
218 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
221 sub upstreamversion ($) {
233 return unless forkcheck_mainprocess();
234 foreach my $f (@end) {
236 print STDERR "$us: cleanup: $@" if length $@;
240 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; finish 12; }
242 sub forceable_fail ($$) {
243 my ($forceoptsl, $msg) = @_;
244 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
245 print STDERR "warning: overriding problem due to --force:\n". $msg;
249 my ($forceoptsl) = @_;
250 my @got = grep { $forceopts{$_} } @$forceoptsl;
251 return 0 unless @got;
253 "warning: skipping checks or functionality due to --force-$got[0]\n";
256 sub no_such_package () {
257 print STDERR "$us: package $package does not exist in suite $isuite\n";
261 sub deliberately ($) {
263 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
266 sub deliberately_not_fast_forward () {
267 foreach (qw(not-fast-forward fresh-repo)) {
268 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
272 sub quiltmode_splitbrain () {
273 $quilt_mode =~ m/gbp|dpm|unapplied/;
276 sub opts_opt_multi_cmd {
278 push @cmd, split /\s+/, shift @_;
284 return opts_opt_multi_cmd @gbp_pq;
287 sub dgit_privdir () {
288 our $dgit_privdir_made //= ensure_a_playground 'dgit';
291 sub branch_gdr_info ($$) {
292 my ($symref, $head) = @_;
293 my ($status, $msg, $current, $ffq_prev, $gdrlast) =
294 gdr_ffq_prev_branchinfo($symref);
295 return () unless $status eq 'branch';
296 $ffq_prev = git_get_ref $ffq_prev;
297 $gdrlast = git_get_ref $gdrlast;
298 $gdrlast &&= is_fast_fwd $gdrlast, $head;
299 return ($ffq_prev, $gdrlast);
302 sub branch_is_gdr ($$) {
303 my ($symref, $head) = @_;
304 my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head);
305 return 0 unless $ffq_prev || $gdrlast;
309 sub branch_is_gdr_unstitched_ff ($$$) {
310 my ($symref, $head, $ancestor) = @_;
311 my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head);
312 return 0 unless $ffq_prev;
313 return 0 unless is_fast_fwd $ancestor, $ffq_prev;
317 #---------- remote protocol support, common ----------
319 # remote push initiator/responder protocol:
320 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
321 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
322 # < dgit-remote-push-ready <actual-proto-vsn>
329 # > supplementary-message NBYTES # $protovsn >= 3
334 # > file parsed-changelog
335 # [indicates that output of dpkg-parsechangelog follows]
336 # > data-block NBYTES
337 # > [NBYTES bytes of data (no newline)]
338 # [maybe some more blocks]
347 # > param head DGIT-VIEW-HEAD
348 # > param csuite SUITE
349 # > param tagformat old|new
350 # > param maint-view MAINT-VIEW-HEAD
352 # > param buildinfo-filename P_V_X.buildinfo # zero or more times
353 # > file buildinfo # for buildinfos to sign
355 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
356 # # goes into tag, for replay prevention
359 # [indicates that signed tag is wanted]
360 # < data-block NBYTES
361 # < [NBYTES bytes of data (no newline)]
362 # [maybe some more blocks]
366 # > want signed-dsc-changes
367 # < data-block NBYTES [transfer of signed dsc]
369 # < data-block NBYTES [transfer of signed changes]
371 # < data-block NBYTES [transfer of each signed buildinfo
372 # [etc] same number and order as "file buildinfo"]
380 sub i_child_report () {
381 # Sees if our child has died, and reap it if so. Returns a string
382 # describing how it died if it failed, or undef otherwise.
383 return undef unless $i_child_pid;
384 my $got = waitpid $i_child_pid, WNOHANG;
385 return undef if $got <= 0;
386 die unless $got == $i_child_pid;
387 $i_child_pid = undef;
388 return undef unless $?;
389 return "build host child ".waitstatusmsg();
394 fail "connection lost: $!" if $fh->error;
395 fail "protocol violation; $m not expected";
398 sub badproto_badread ($$) {
400 fail "connection lost: $!" if $!;
401 my $report = i_child_report();
402 fail $report if defined $report;
403 badproto $fh, "eof (reading $wh)";
406 sub protocol_expect (&$) {
407 my ($match, $fh) = @_;
410 defined && chomp or badproto_badread $fh, "protocol message";
418 badproto $fh, "\`$_'";
421 sub protocol_send_file ($$) {
422 my ($fh, $ourfn) = @_;
423 open PF, "<", $ourfn or die "$ourfn: $!";
426 my $got = read PF, $d, 65536;
427 die "$ourfn: $!" unless defined $got;
429 print $fh "data-block ".length($d)."\n" or die $!;
430 print $fh $d or die $!;
432 PF->error and die "$ourfn $!";
433 print $fh "data-end\n" or die $!;
437 sub protocol_read_bytes ($$) {
438 my ($fh, $nbytes) = @_;
439 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
441 my $got = read $fh, $d, $nbytes;
442 $got==$nbytes or badproto_badread $fh, "data block";
446 sub protocol_receive_file ($$) {
447 my ($fh, $ourfn) = @_;
448 printdebug "() $ourfn\n";
449 open PF, ">", $ourfn or die "$ourfn: $!";
451 my ($y,$l) = protocol_expect {
452 m/^data-block (.*)$/ ? (1,$1) :
453 m/^data-end$/ ? (0,) :
457 my $d = protocol_read_bytes $fh, $l;
458 print PF $d or die $!;
463 #---------- remote protocol support, responder ----------
465 sub responder_send_command ($) {
467 return unless $we_are_responder;
468 # called even without $we_are_responder
469 printdebug ">> $command\n";
470 print PO $command, "\n" or die $!;
473 sub responder_send_file ($$) {
474 my ($keyword, $ourfn) = @_;
475 return unless $we_are_responder;
476 printdebug "]] $keyword $ourfn\n";
477 responder_send_command "file $keyword";
478 protocol_send_file \*PO, $ourfn;
481 sub responder_receive_files ($@) {
482 my ($keyword, @ourfns) = @_;
483 die unless $we_are_responder;
484 printdebug "[[ $keyword @ourfns\n";
485 responder_send_command "want $keyword";
486 foreach my $fn (@ourfns) {
487 protocol_receive_file \*PI, $fn;
490 protocol_expect { m/^files-end$/ } \*PI;
493 #---------- remote protocol support, initiator ----------
495 sub initiator_expect (&) {
497 protocol_expect { &$match } \*RO;
500 #---------- end remote code ----------
503 if ($we_are_responder) {
505 responder_send_command "progress ".length($m) or die $!;
506 print PO $m or die $!;
516 $ua = LWP::UserAgent->new();
520 progress "downloading $what...";
521 my $r = $ua->get(@_) or die $!;
522 return undef if $r->code == 404;
523 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
524 return $r->decoded_content(charset => 'none');
527 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
529 sub act_local () { return $dryrun_level <= 1; }
530 sub act_scary () { return !$dryrun_level; }
533 if (!$dryrun_level) {
534 progress "$us ok: @_";
536 progress "would be ok: @_ (but dry run only)";
541 printcmd(\*STDERR,$debugprefix."#",@_);
544 sub runcmd_ordryrun {
552 sub runcmd_ordryrun_local {
560 our $helpmsg = <<END;
562 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
563 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
564 dgit [dgit-opts] build [dpkg-buildpackage-opts]
565 dgit [dgit-opts] sbuild [sbuild-opts]
566 dgit [dgit-opts] push [dgit-opts] [suite]
567 dgit [dgit-opts] push-source [dgit-opts] [suite]
568 dgit [dgit-opts] rpush build-host:build-dir ...
569 important dgit options:
570 -k<keyid> sign tag and package with <keyid> instead of default
571 --dry-run -n do not change anything, but go through the motions
572 --damp-run -L like --dry-run but make local changes, without signing
573 --new -N allow introducing a new package
574 --debug -D increase debug level
575 -c<name>=<value> set git config option (used directly by dgit too)
578 our $later_warning_msg = <<END;
579 Perhaps the upload is stuck in incoming. Using the version from git.
583 print STDERR "$us: @_\n", $helpmsg or die $!;
588 @ARGV or badusage "too few arguments";
589 return scalar shift @ARGV;
593 not_necessarily_a_tree();
596 print $helpmsg or die $!;
600 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
602 our %defcfg = ('dgit.default.distro' => 'debian',
603 'dgit.default.default-suite' => 'unstable',
604 'dgit.default.old-dsc-distro' => 'debian',
605 'dgit-suite.*-security.distro' => 'debian-security',
606 'dgit.default.username' => '',
607 'dgit.default.archive-query-default-component' => 'main',
608 'dgit.default.ssh' => 'ssh',
609 'dgit.default.archive-query' => 'madison:',
610 'dgit.default.sshpsql-dbname' => 'service=projectb',
611 'dgit.default.aptget-components' => 'main',
612 'dgit.default.dgit-tag-format' => 'new,old,maint',
613 'dgit.default.source-only-uploads' => 'ok',
614 'dgit.dsc-url-proto-ok.http' => 'true',
615 'dgit.dsc-url-proto-ok.https' => 'true',
616 'dgit.dsc-url-proto-ok.git' => 'true',
617 'dgit.vcs-git.suites', => 'sid', # ;-separated
618 'dgit.default.dsc-url-proto-ok' => 'false',
619 # old means "repo server accepts pushes with old dgit tags"
620 # new means "repo server accepts pushes with new dgit tags"
621 # maint means "repo server accepts split brain pushes"
622 # hist means "repo server may have old pushes without new tag"
623 # ("hist" is implied by "old")
624 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
625 'dgit-distro.debian.git-check' => 'url',
626 'dgit-distro.debian.git-check-suffix' => '/info/refs',
627 'dgit-distro.debian.new-private-pushers' => 't',
628 'dgit-distro.debian.source-only-uploads' => 'not-wholly-new',
629 'dgit-distro.debian/push.git-url' => '',
630 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
631 'dgit-distro.debian/push.git-user-force' => 'dgit',
632 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
633 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
634 'dgit-distro.debian/push.git-create' => 'true',
635 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
636 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
637 # 'dgit-distro.debian.archive-query-tls-key',
638 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
639 # ^ this does not work because curl is broken nowadays
640 # Fixing #790093 properly will involve providing providing the key
641 # in some pacagke and maybe updating these paths.
643 # 'dgit-distro.debian.archive-query-tls-curl-args',
644 # '--ca-path=/etc/ssl/ca-debian',
645 # ^ this is a workaround but works (only) on DSA-administered machines
646 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
647 'dgit-distro.debian.git-url-suffix' => '',
648 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
649 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
650 'dgit-distro.debian-security.archive-query' => 'aptget:',
651 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
652 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
653 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
654 'dgit-distro.debian-security.nominal-distro' => 'debian',
655 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
656 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
657 'dgit-distro.ubuntu.git-check' => 'false',
658 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
659 'dgit-distro.test-dummy.ssh' => "$td/ssh",
660 'dgit-distro.test-dummy.username' => "alice",
661 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
662 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
663 'dgit-distro.test-dummy.git-url' => "$td/git",
664 'dgit-distro.test-dummy.git-host' => "git",
665 'dgit-distro.test-dummy.git-path' => "$td/git",
666 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
667 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
668 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
669 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
673 our @gitcfgsources = qw(cmdline local global system);
674 our $invoked_in_git_tree = 1;
676 sub git_slurp_config () {
677 # This algoritm is a bit subtle, but this is needed so that for
678 # options which we want to be single-valued, we allow the
679 # different config sources to override properly. See #835858.
680 foreach my $src (@gitcfgsources) {
681 next if $src eq 'cmdline';
682 # we do this ourselves since git doesn't handle it
684 $gitcfgs{$src} = git_slurp_config_src $src;
688 sub git_get_config ($) {
690 foreach my $src (@gitcfgsources) {
691 my $l = $gitcfgs{$src}{$c};
692 confess "internal error ($l $c)" if $l && !ref $l;
693 printdebug"C $c ".(defined $l ?
694 join " ", map { messagequote "'$_'" } @$l :
698 @$l==1 or badcfg "multiple values for $c".
699 " (in $src git config)" if @$l > 1;
707 return undef if $c =~ /RETURN-UNDEF/;
708 printdebug "C? $c\n" if $debuglevel >= 5;
709 my $v = git_get_config($c);
710 return $v if defined $v;
711 my $dv = $defcfg{$c};
713 printdebug "CD $c $dv\n" if $debuglevel >= 4;
717 badcfg "need value for one of: @_\n".
718 "$us: distro or suite appears not to be (properly) supported";
721 sub not_necessarily_a_tree () {
722 # needs to be called from pre_*
723 @gitcfgsources = grep { $_ ne 'local' } @gitcfgsources;
724 $invoked_in_git_tree = 0;
727 sub access_basedistro__noalias () {
728 if (defined $idistro) {
731 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
732 return $def if defined $def;
733 foreach my $src (@gitcfgsources, 'internal') {
734 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
736 foreach my $k (keys %$kl) {
737 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
739 next unless match_glob $dpat, $isuite;
743 return cfg("dgit.default.distro");
747 sub access_basedistro () {
748 my $noalias = access_basedistro__noalias();
749 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
750 return $canon // $noalias;
753 sub access_nomdistro () {
754 my $base = access_basedistro();
755 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
756 $r =~ m/^$distro_re$/ or badcfg
757 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
761 sub access_quirk () {
762 # returns (quirk name, distro to use instead or undef, quirk-specific info)
763 my $basedistro = access_basedistro();
764 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
766 if (defined $backports_quirk) {
767 my $re = $backports_quirk;
768 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
770 $re =~ s/\%/([-0-9a-z_]+)/
771 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
772 if ($isuite =~ m/^$re$/) {
773 return ('backports',"$basedistro-backports",$1);
776 return ('none',undef);
781 sub parse_cfg_bool ($$$) {
782 my ($what,$def,$v) = @_;
785 $v =~ m/^[ty1]/ ? 1 :
786 $v =~ m/^[fn0]/ ? 0 :
787 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
790 sub access_forpush_config () {
791 my $d = access_basedistro();
795 parse_cfg_bool('new-private-pushers', 0,
796 cfg("dgit-distro.$d.new-private-pushers",
799 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
802 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
803 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
804 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
805 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
808 sub access_forpush () {
809 $access_forpush //= access_forpush_config();
810 return $access_forpush;
814 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
815 badcfg "pushing but distro is configured readonly"
816 if access_forpush_config() eq '0';
818 $supplementary_message = <<'END' unless $we_are_responder;
819 Push failed, before we got started.
820 You can retry the push, after fixing the problem, if you like.
822 parseopts_late_defaults();
826 parseopts_late_defaults();
829 sub supplementary_message ($) {
831 if (!$we_are_responder) {
832 $supplementary_message = $msg;
834 } elsif ($protovsn >= 3) {
835 responder_send_command "supplementary-message ".length($msg)
837 print PO $msg or die $!;
841 sub access_distros () {
842 # Returns list of distros to try, in order
845 # 0. `instead of' distro name(s) we have been pointed to
846 # 1. the access_quirk distro, if any
847 # 2a. the user's specified distro, or failing that } basedistro
848 # 2b. the distro calculated from the suite }
849 my @l = access_basedistro();
851 my (undef,$quirkdistro) = access_quirk();
852 unshift @l, $quirkdistro;
853 unshift @l, $instead_distro;
854 @l = grep { defined } @l;
856 push @l, access_nomdistro();
858 if (access_forpush()) {
859 @l = map { ("$_/push", $_) } @l;
864 sub access_cfg_cfgs (@) {
867 # The nesting of these loops determines the search order. We put
868 # the key loop on the outside so that we search all the distros
869 # for each key, before going on to the next key. That means that
870 # if access_cfg is called with a more specific, and then a less
871 # specific, key, an earlier distro can override the less specific
872 # without necessarily overriding any more specific keys. (If the
873 # distro wants to override the more specific keys it can simply do
874 # so; whereas if we did the loop the other way around, it would be
875 # impossible to for an earlier distro to override a less specific
876 # key but not the more specific ones without restating the unknown
877 # values of the more specific keys.
880 # We have to deal with RETURN-UNDEF specially, so that we don't
881 # terminate the search prematurely.
883 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
886 foreach my $d (access_distros()) {
887 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
889 push @cfgs, map { "dgit.default.$_" } @realkeys;
896 my (@cfgs) = access_cfg_cfgs(@keys);
897 my $value = cfg(@cfgs);
901 sub access_cfg_bool ($$) {
902 my ($def, @keys) = @_;
903 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
906 sub string_to_ssh ($) {
908 if ($spec =~ m/\s/) {
909 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
915 sub access_cfg_ssh () {
916 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
917 if (!defined $gitssh) {
920 return string_to_ssh $gitssh;
924 sub access_runeinfo ($) {
926 return ": dgit ".access_basedistro()." $info ;";
929 sub access_someuserhost ($) {
931 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
932 defined($user) && length($user) or
933 $user = access_cfg("$some-user",'username');
934 my $host = access_cfg("$some-host");
935 return length($user) ? "$user\@$host" : $host;
938 sub access_gituserhost () {
939 return access_someuserhost('git');
942 sub access_giturl (;$) {
944 my $url = access_cfg('git-url','RETURN-UNDEF');
947 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
948 return undef unless defined $proto;
951 access_gituserhost().
952 access_cfg('git-path');
954 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
957 return "$url/$package$suffix";
960 sub commit_getclogp ($) {
961 # Returns the parsed changelog hashref for a particular commit
963 our %commit_getclogp_memo;
964 my $memo = $commit_getclogp_memo{$objid};
965 return $memo if $memo;
967 my $mclog = dgit_privdir()."clog";
968 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
969 "$objid:debian/changelog";
970 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
973 sub parse_dscdata () {
974 my $dscfh = new IO::File \$dscdata, '<' or die $!;
975 printdebug Dumper($dscdata) if $debuglevel>1;
976 $dsc = parsecontrolfh($dscfh,$dscurl,1);
977 printdebug Dumper($dsc) if $debuglevel>1;
982 sub archive_query ($;@) {
983 my ($method) = shift @_;
984 fail "this operation does not support multiple comma-separated suites"
986 my $query = access_cfg('archive-query','RETURN-UNDEF');
987 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
990 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
993 sub archive_query_prepend_mirror {
994 my $m = access_cfg('mirror');
995 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
998 sub pool_dsc_subpath ($$) {
999 my ($vsn,$component) = @_; # $package is implict arg
1000 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1001 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1004 sub cfg_apply_map ($$$) {
1005 my ($varref, $what, $mapspec) = @_;
1006 return unless $mapspec;
1008 printdebug "config $what EVAL{ $mapspec; }\n";
1010 eval "package Dgit::Config; $mapspec;";
1015 #---------- `ftpmasterapi' archive query method (nascent) ----------
1017 sub archive_api_query_cmd ($) {
1019 my @cmd = (@curl, qw(-sS));
1020 my $url = access_cfg('archive-query-url');
1021 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1023 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1024 foreach my $key (split /\:/, $keys) {
1025 $key =~ s/\%HOST\%/$host/g;
1027 fail "for $url: stat $key: $!" unless $!==ENOENT;
1030 fail "config requested specific TLS key but do not know".
1031 " how to get curl to use exactly that EE key ($key)";
1032 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1033 # # Sadly the above line does not work because of changes
1034 # # to gnutls. The real fix for #790093 may involve
1035 # # new curl options.
1038 # Fixing #790093 properly will involve providing a value
1039 # for this on clients.
1040 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1041 push @cmd, split / /, $kargs if defined $kargs;
1043 push @cmd, $url.$subpath;
1047 sub api_query ($$;$) {
1049 my ($data, $subpath, $ok404) = @_;
1050 badcfg "ftpmasterapi archive query method takes no data part"
1052 my @cmd = archive_api_query_cmd($subpath);
1053 my $url = $cmd[$#cmd];
1054 push @cmd, qw(-w %{http_code});
1055 my $json = cmdoutput @cmd;
1056 unless ($json =~ s/\d+\d+\d$//) {
1057 failedcmd_report_cmd undef, @cmd;
1058 fail "curl failed to print 3-digit HTTP code";
1061 return undef if $code eq '404' && $ok404;
1062 fail "fetch of $url gave HTTP code $code"
1063 unless $url =~ m#^file://# or $code =~ m/^2/;
1064 return decode_json($json);
1067 sub canonicalise_suite_ftpmasterapi {
1068 my ($proto,$data) = @_;
1069 my $suites = api_query($data, 'suites');
1071 foreach my $entry (@$suites) {
1073 my $v = $entry->{$_};
1074 defined $v && $v eq $isuite;
1075 } qw(codename name);
1076 push @matched, $entry;
1078 fail "unknown suite $isuite" unless @matched;
1081 @matched==1 or die "multiple matches for suite $isuite\n";
1082 $cn = "$matched[0]{codename}";
1083 defined $cn or die "suite $isuite info has no codename\n";
1084 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1086 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1091 sub archive_query_ftpmasterapi {
1092 my ($proto,$data) = @_;
1093 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1095 my $digester = Digest::SHA->new(256);
1096 foreach my $entry (@$info) {
1098 my $vsn = "$entry->{version}";
1099 my ($ok,$msg) = version_check $vsn;
1100 die "bad version: $msg\n" unless $ok;
1101 my $component = "$entry->{component}";
1102 $component =~ m/^$component_re$/ or die "bad component";
1103 my $filename = "$entry->{filename}";
1104 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1105 or die "bad filename";
1106 my $sha256sum = "$entry->{sha256sum}";
1107 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1108 push @rows, [ $vsn, "/pool/$component/$filename",
1109 $digester, $sha256sum ];
1111 die "bad ftpmaster api response: $@\n".Dumper($entry)
1114 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1115 return archive_query_prepend_mirror @rows;
1118 sub file_in_archive_ftpmasterapi {
1119 my ($proto,$data,$filename) = @_;
1120 my $pat = $filename;
1123 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1124 my $info = api_query($data, "file_in_archive/$pat", 1);
1127 sub package_not_wholly_new_ftpmasterapi {
1128 my ($proto,$data,$pkg) = @_;
1129 my $info = api_query($data,"madison?package=${pkg}&f=json");
1133 #---------- `aptget' archive query method ----------
1136 our $aptget_releasefile;
1137 our $aptget_configpath;
1139 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1140 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1142 sub aptget_cache_clean {
1143 runcmd_ordryrun_local qw(sh -ec),
1144 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1148 sub aptget_lock_acquire () {
1149 my $lockfile = "$aptget_base/lock";
1150 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1151 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1154 sub aptget_prep ($) {
1156 return if defined $aptget_base;
1158 badcfg "aptget archive query method takes no data part"
1161 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1164 ensuredir "$cache/dgit";
1166 access_cfg('aptget-cachekey','RETURN-UNDEF')
1167 // access_nomdistro();
1169 $aptget_base = "$cache/dgit/aptget";
1170 ensuredir $aptget_base;
1172 my $quoted_base = $aptget_base;
1173 die "$quoted_base contains bad chars, cannot continue"
1174 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1176 ensuredir $aptget_base;
1178 aptget_lock_acquire();
1180 aptget_cache_clean();
1182 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1183 my $sourceslist = "source.list#$cachekey";
1185 my $aptsuites = $isuite;
1186 cfg_apply_map(\$aptsuites, 'suite map',
1187 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1189 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1190 printf SRCS "deb-src %s %s %s\n",
1191 access_cfg('mirror'),
1193 access_cfg('aptget-components')
1196 ensuredir "$aptget_base/cache";
1197 ensuredir "$aptget_base/lists";
1199 open CONF, ">", $aptget_configpath or die $!;
1201 Debug::NoLocking "true";
1202 APT::Get::List-Cleanup "false";
1203 #clear APT::Update::Post-Invoke-Success;
1204 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1205 Dir::State::Lists "$quoted_base/lists";
1206 Dir::Etc::preferences "$quoted_base/preferences";
1207 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1208 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1211 foreach my $key (qw(
1214 Dir::Cache::Archives
1215 Dir::Etc::SourceParts
1216 Dir::Etc::preferencesparts
1218 ensuredir "$aptget_base/$key";
1219 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1222 my $oldatime = (time // die $!) - 1;
1223 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1224 next unless stat_exists $oldlist;
1225 my ($mtime) = (stat _)[9];
1226 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1229 runcmd_ordryrun_local aptget_aptget(), qw(update);
1232 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1233 next unless stat_exists $oldlist;
1234 my ($atime) = (stat _)[8];
1235 next if $atime == $oldatime;
1236 push @releasefiles, $oldlist;
1238 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1239 @releasefiles = @inreleasefiles if @inreleasefiles;
1240 die "apt updated wrong number of Release files (@releasefiles), erk"
1241 unless @releasefiles == 1;
1243 ($aptget_releasefile) = @releasefiles;
1246 sub canonicalise_suite_aptget {
1247 my ($proto,$data) = @_;
1250 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1252 foreach my $name (qw(Codename Suite)) {
1253 my $val = $release->{$name};
1255 printdebug "release file $name: $val\n";
1256 $val =~ m/^$suite_re$/o or fail
1257 "Release file ($aptget_releasefile) specifies intolerable $name";
1258 cfg_apply_map(\$val, 'suite rmap',
1259 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1266 sub archive_query_aptget {
1267 my ($proto,$data) = @_;
1270 ensuredir "$aptget_base/source";
1271 foreach my $old (<$aptget_base/source/*.dsc>) {
1272 unlink $old or die "$old: $!";
1275 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1276 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1277 # avoids apt-get source failing with ambiguous error code
1279 runcmd_ordryrun_local
1280 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1281 aptget_aptget(), qw(--download-only --only-source source), $package;
1283 my @dscs = <$aptget_base/source/*.dsc>;
1284 fail "apt-get source did not produce a .dsc" unless @dscs;
1285 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1287 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1290 my $uri = "file://". uri_escape $dscs[0];
1291 $uri =~ s{\%2f}{/}gi;
1292 return [ (getfield $pre_dsc, 'Version'), $uri ];
1295 sub file_in_archive_aptget () { return undef; }
1296 sub package_not_wholly_new_aptget () { return undef; }
1298 #---------- `dummyapicat' archive query method ----------
1300 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1301 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1303 sub dummycatapi_run_in_mirror ($@) {
1304 # runs $fn with FIA open onto rune
1305 my ($rune, $argl, $fn) = @_;
1307 my $mirror = access_cfg('mirror');
1308 $mirror =~ s#^file://#/# or die "$mirror ?";
1309 my @cmd = (qw(sh -ec), 'cd "$1"; shift'."\n".$rune,
1310 qw(x), $mirror, @$argl);
1311 debugcmd "-|", @cmd;
1312 open FIA, "-|", @cmd or die $!;
1314 close FIA or ($!==0 && $?==141) or die failedcmd @cmd;
1318 sub file_in_archive_dummycatapi ($$$) {
1319 my ($proto,$data,$filename) = @_;
1321 dummycatapi_run_in_mirror '
1322 find -name "$1" -print0 |
1324 ', [$filename], sub {
1327 printdebug "| $_\n";
1328 m/^(\w+) (\S+)$/ or die "$_ ?";
1329 push @out, { sha256sum => $1, filename => $2 };
1335 sub package_not_wholly_new_dummycatapi {
1336 my ($proto,$data,$pkg) = @_;
1337 dummycatapi_run_in_mirror "
1338 find -name ${pkg}_*.dsc
1345 #---------- `madison' archive query method ----------
1347 sub archive_query_madison {
1348 return archive_query_prepend_mirror
1349 map { [ @$_[0..1] ] } madison_get_parse(@_);
1352 sub madison_get_parse {
1353 my ($proto,$data) = @_;
1354 die unless $proto eq 'madison';
1355 if (!length $data) {
1356 $data= access_cfg('madison-distro','RETURN-UNDEF');
1357 $data //= access_basedistro();
1359 $rmad{$proto,$data,$package} ||= cmdoutput
1360 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1361 my $rmad = $rmad{$proto,$data,$package};
1364 foreach my $l (split /\n/, $rmad) {
1365 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1366 \s*( [^ \t|]+ )\s* \|
1367 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1368 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1369 $1 eq $package or die "$rmad $package ?";
1376 $component = access_cfg('archive-query-default-component');
1378 $5 eq 'source' or die "$rmad ?";
1379 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1381 return sort { -version_compare($a->[0],$b->[0]); } @out;
1384 sub canonicalise_suite_madison {
1385 # madison canonicalises for us
1386 my @r = madison_get_parse(@_);
1388 "unable to canonicalise suite using package $package".
1389 " which does not appear to exist in suite $isuite;".
1390 " --existing-package may help";
1394 sub file_in_archive_madison { return undef; }
1395 sub package_not_wholly_new_madison { return undef; }
1397 #---------- `sshpsql' archive query method ----------
1400 my ($data,$runeinfo,$sql) = @_;
1401 if (!length $data) {
1402 $data= access_someuserhost('sshpsql').':'.
1403 access_cfg('sshpsql-dbname');
1405 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1406 my ($userhost,$dbname) = ($`,$'); #';
1408 my @cmd = (access_cfg_ssh, $userhost,
1409 access_runeinfo("ssh-psql $runeinfo").
1410 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1411 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1413 open P, "-|", @cmd or die $!;
1416 printdebug(">|$_|\n");
1419 $!=0; $?=0; close P or failedcmd @cmd;
1421 my $nrows = pop @rows;
1422 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1423 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1424 @rows = map { [ split /\|/, $_ ] } @rows;
1425 my $ncols = scalar @{ shift @rows };
1426 die if grep { scalar @$_ != $ncols } @rows;
1430 sub sql_injection_check {
1431 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1434 sub archive_query_sshpsql ($$) {
1435 my ($proto,$data) = @_;
1436 sql_injection_check $isuite, $package;
1437 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1438 SELECT source.version, component.name, files.filename, files.sha256sum
1440 JOIN src_associations ON source.id = src_associations.source
1441 JOIN suite ON suite.id = src_associations.suite
1442 JOIN dsc_files ON dsc_files.source = source.id
1443 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1444 JOIN component ON component.id = files_archive_map.component_id
1445 JOIN files ON files.id = dsc_files.file
1446 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1447 AND source.source='$package'
1448 AND files.filename LIKE '%.dsc';
1450 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1451 my $digester = Digest::SHA->new(256);
1453 my ($vsn,$component,$filename,$sha256sum) = @$_;
1454 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1456 return archive_query_prepend_mirror @rows;
1459 sub canonicalise_suite_sshpsql ($$) {
1460 my ($proto,$data) = @_;
1461 sql_injection_check $isuite;
1462 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1463 SELECT suite.codename
1464 FROM suite where suite_name='$isuite' or codename='$isuite';
1466 @rows = map { $_->[0] } @rows;
1467 fail "unknown suite $isuite" unless @rows;
1468 die "ambiguous $isuite: @rows ?" if @rows>1;
1472 sub file_in_archive_sshpsql ($$$) { return undef; }
1473 sub package_not_wholly_new_sshpsql ($$$) { return undef; }
1475 #---------- `dummycat' archive query method ----------
1477 sub canonicalise_suite_dummycat ($$) {
1478 my ($proto,$data) = @_;
1479 my $dpath = "$data/suite.$isuite";
1480 if (!open C, "<", $dpath) {
1481 $!==ENOENT or die "$dpath: $!";
1482 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1486 chomp or die "$dpath: $!";
1488 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1492 sub archive_query_dummycat ($$) {
1493 my ($proto,$data) = @_;
1494 canonicalise_suite();
1495 my $dpath = "$data/package.$csuite.$package";
1496 if (!open C, "<", $dpath) {
1497 $!==ENOENT or die "$dpath: $!";
1498 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1506 printdebug "dummycat query $csuite $package $dpath | $_\n";
1507 my @row = split /\s+/, $_;
1508 @row==2 or die "$dpath: $_ ?";
1511 C->error and die "$dpath: $!";
1513 return archive_query_prepend_mirror
1514 sort { -version_compare($a->[0],$b->[0]); } @rows;
1517 sub file_in_archive_dummycat () { return undef; }
1518 sub package_not_wholly_new_dummycat () { return undef; }
1520 #---------- tag format handling ----------
1522 sub access_cfg_tagformats () {
1523 split /\,/, access_cfg('dgit-tag-format');
1526 sub access_cfg_tagformats_can_splitbrain () {
1527 my %y = map { $_ => 1 } access_cfg_tagformats;
1528 foreach my $needtf (qw(new maint)) {
1529 next if $y{$needtf};
1535 sub need_tagformat ($$) {
1536 my ($fmt, $why) = @_;
1537 fail "need to use tag format $fmt ($why) but also need".
1538 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1539 " - no way to proceed"
1540 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1541 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1544 sub select_tagformat () {
1546 return if $tagformatfn && !$tagformat_want;
1547 die 'bug' if $tagformatfn && $tagformat_want;
1548 # ... $tagformat_want assigned after previous select_tagformat
1550 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1551 printdebug "select_tagformat supported @supported\n";
1553 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1554 printdebug "select_tagformat specified @$tagformat_want\n";
1556 my ($fmt,$why,$override) = @$tagformat_want;
1558 fail "target distro supports tag formats @supported".
1559 " but have to use $fmt ($why)"
1561 or grep { $_ eq $fmt } @supported;
1563 $tagformat_want = undef;
1565 $tagformatfn = ${*::}{"debiantag_$fmt"};
1567 fail "trying to use unknown tag format \`$fmt' ($why) !"
1568 unless $tagformatfn;
1571 #---------- archive query entrypoints and rest of program ----------
1573 sub canonicalise_suite () {
1574 return if defined $csuite;
1575 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1576 $csuite = archive_query('canonicalise_suite');
1577 if ($isuite ne $csuite) {
1578 progress "canonical suite name for $isuite is $csuite";
1580 progress "canonical suite name is $csuite";
1584 sub get_archive_dsc () {
1585 canonicalise_suite();
1586 my @vsns = archive_query('archive_query');
1587 foreach my $vinfo (@vsns) {
1588 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1589 $dscurl = $vsn_dscurl;
1590 $dscdata = url_get($dscurl);
1592 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1597 $digester->add($dscdata);
1598 my $got = $digester->hexdigest();
1600 fail "$dscurl has hash $got but".
1601 " archive told us to expect $digest";
1604 my $fmt = getfield $dsc, 'Format';
1605 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1606 "unsupported source format $fmt, sorry";
1608 $dsc_checked = !!$digester;
1609 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1613 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1616 sub check_for_git ();
1617 sub check_for_git () {
1619 my $how = access_cfg('git-check');
1620 if ($how eq 'ssh-cmd') {
1622 (access_cfg_ssh, access_gituserhost(),
1623 access_runeinfo("git-check $package").
1624 " set -e; cd ".access_cfg('git-path').";".
1625 " if test -d $package.git; then echo 1; else echo 0; fi");
1626 my $r= cmdoutput @cmd;
1627 if (defined $r and $r =~ m/^divert (\w+)$/) {
1629 my ($usedistro,) = access_distros();
1630 # NB that if we are pushing, $usedistro will be $distro/push
1631 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1632 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1633 progress "diverting to $divert (using config for $instead_distro)";
1634 return check_for_git();
1636 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1638 } elsif ($how eq 'url') {
1639 my $prefix = access_cfg('git-check-url','git-url');
1640 my $suffix = access_cfg('git-check-suffix','git-suffix',
1641 'RETURN-UNDEF') // '.git';
1642 my $url = "$prefix/$package$suffix";
1643 my @cmd = (@curl, qw(-sS -I), $url);
1644 my $result = cmdoutput @cmd;
1645 $result =~ s/^\S+ 200 .*\n\r?\n//;
1646 # curl -sS -I with https_proxy prints
1647 # HTTP/1.0 200 Connection established
1648 $result =~ m/^\S+ (404|200) /s or
1649 fail "unexpected results from git check query - ".
1650 Dumper($prefix, $result);
1652 if ($code eq '404') {
1654 } elsif ($code eq '200') {
1659 } elsif ($how eq 'true') {
1661 } elsif ($how eq 'false') {
1664 badcfg "unknown git-check \`$how'";
1668 sub create_remote_git_repo () {
1669 my $how = access_cfg('git-create');
1670 if ($how eq 'ssh-cmd') {
1672 (access_cfg_ssh, access_gituserhost(),
1673 access_runeinfo("git-create $package").
1674 "set -e; cd ".access_cfg('git-path').";".
1675 " cp -a _template $package.git");
1676 } elsif ($how eq 'true') {
1679 badcfg "unknown git-create \`$how'";
1683 our ($dsc_hash,$lastpush_mergeinput);
1684 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1688 dgit_privdir(); # ensures that $dgit_privdir_made is based on $maindir
1689 $playground = fresh_playground 'dgit/unpack';
1692 sub mktree_in_ud_here () {
1693 playtree_setup $gitcfgs{local};
1696 sub git_write_tree () {
1697 my $tree = cmdoutput @git, qw(write-tree);
1698 $tree =~ m/^\w+$/ or die "$tree ?";
1702 sub git_add_write_tree () {
1703 runcmd @git, qw(add -Af .);
1704 return git_write_tree();
1707 sub remove_stray_gits ($) {
1709 my @gitscmd = qw(find -name .git -prune -print0);
1710 debugcmd "|",@gitscmd;
1711 open GITS, "-|", @gitscmd or die $!;
1716 print STDERR "$us: warning: removing from $what: ",
1717 (messagequote $_), "\n";
1721 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1724 sub mktree_in_ud_from_only_subdir ($;$) {
1725 my ($what,$raw) = @_;
1726 # changes into the subdir
1729 die "expected one subdir but found @dirs ?" unless @dirs==1;
1730 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1734 remove_stray_gits($what);
1735 mktree_in_ud_here();
1737 my ($format, $fopts) = get_source_format();
1738 if (madformat($format)) {
1743 my $tree=git_add_write_tree();
1744 return ($tree,$dir);
1747 our @files_csum_info_fields =
1748 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1749 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1750 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1752 sub dsc_files_info () {
1753 foreach my $csumi (@files_csum_info_fields) {
1754 my ($fname, $module, $method) = @$csumi;
1755 my $field = $dsc->{$fname};
1756 next unless defined $field;
1757 eval "use $module; 1;" or die $@;
1759 foreach (split /\n/, $field) {
1761 m/^(\w+) (\d+) (\S+)$/ or
1762 fail "could not parse .dsc $fname line \`$_'";
1763 my $digester = eval "$module"."->$method;" or die $@;
1768 Digester => $digester,
1773 fail "missing any supported Checksums-* or Files field in ".
1774 $dsc->get_option('name');
1778 map { $_->{Filename} } dsc_files_info();
1781 sub files_compare_inputs (@) {
1786 my $showinputs = sub {
1787 return join "; ", map { $_->get_option('name') } @$inputs;
1790 foreach my $in (@$inputs) {
1792 my $in_name = $in->get_option('name');
1794 printdebug "files_compare_inputs $in_name\n";
1796 foreach my $csumi (@files_csum_info_fields) {
1797 my ($fname) = @$csumi;
1798 printdebug "files_compare_inputs $in_name $fname\n";
1800 my $field = $in->{$fname};
1801 next unless defined $field;
1804 foreach (split /\n/, $field) {
1807 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1808 fail "could not parse $in_name $fname line \`$_'";
1810 printdebug "files_compare_inputs $in_name $fname $f\n";
1814 my $re = \ $record{$f}{$fname};
1816 $fchecked{$f}{$in_name} = 1;
1818 fail "hash or size of $f varies in $fname fields".
1819 " (between: ".$showinputs->().")";
1824 @files = sort @files;
1825 $expected_files //= \@files;
1826 "@$expected_files" eq "@files" or
1827 fail "file list in $in_name varies between hash fields!";
1830 fail "$in_name has no files list field(s)";
1832 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1835 grep { keys %$_ == @$inputs-1 } values %fchecked
1836 or fail "no file appears in all file lists".
1837 " (looked in: ".$showinputs->().")";
1840 sub is_orig_file_in_dsc ($$) {
1841 my ($f, $dsc_files_info) = @_;
1842 return 0 if @$dsc_files_info <= 1;
1843 # One file means no origs, and the filename doesn't have a "what
1844 # part of dsc" component. (Consider versions ending `.orig'.)
1845 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1849 sub is_orig_file_of_vsn ($$) {
1850 my ($f, $upstreamvsn) = @_;
1851 my $base = srcfn $upstreamvsn, '';
1852 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1856 # This function determines whether a .changes file is source-only from
1857 # the point of view of dak. Thus, it permits *_source.buildinfo
1860 # It does not, however, permit any other buildinfo files. After a
1861 # source-only upload, the buildds will try to upload files like
1862 # foo_1.2.3_amd64.buildinfo. If the package maintainer included files
1863 # named like this in their (otherwise) source-only upload, the uploads
1864 # of the buildd can be rejected by dak. Fixing the resultant
1865 # situation can require manual intervention. So we block such
1866 # .buildinfo files when the user tells us to perform a source-only
1867 # upload (such as when using the push-source subcommand with the -C
1868 # option, which calls this function).
1870 # Note, though, that when dgit is told to prepare a source-only
1871 # upload, such as when subcommands like build-source and push-source
1872 # without -C are used, dgit has a more restrictive notion of
1873 # source-only .changes than dak: such uploads will never include
1874 # *_source.buildinfo files. This is because there is no use for such
1875 # files when using a tool like dgit to produce the source package, as
1876 # dgit ensures the source is identical to git HEAD.
1877 sub test_source_only_changes ($) {
1879 foreach my $l (split /\n/, getfield $changes, 'Files') {
1880 $l =~ m/\S+$/ or next;
1881 # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages
1882 unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) {
1883 print "purportedly source-only changes polluted by $&\n";
1890 sub changes_update_origs_from_dsc ($$$$) {
1891 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1893 printdebug "checking origs needed ($upstreamvsn)...\n";
1894 $_ = getfield $changes, 'Files';
1895 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1896 fail "cannot find section/priority from .changes Files field";
1897 my $placementinfo = $1;
1899 printdebug "checking origs needed placement '$placementinfo'...\n";
1900 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1901 $l =~ m/\S+$/ or next;
1903 printdebug "origs $file | $l\n";
1904 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1905 printdebug "origs $file is_orig\n";
1906 my $have = archive_query('file_in_archive', $file);
1907 if (!defined $have) {
1909 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1915 printdebug "origs $file \$#\$have=$#$have\n";
1916 foreach my $h (@$have) {
1919 foreach my $csumi (@files_csum_info_fields) {
1920 my ($fname, $module, $method, $archivefield) = @$csumi;
1921 next unless defined $h->{$archivefield};
1922 $_ = $dsc->{$fname};
1923 next unless defined;
1924 m/^(\w+) .* \Q$file\E$/m or
1925 fail ".dsc $fname missing entry for $file";
1926 if ($h->{$archivefield} eq $1) {
1930 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1933 die "$file ".Dumper($h)." ?!" if $same && @differ;
1936 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1939 printdebug "origs $file f.same=$found_same".
1940 " #f._differ=$#found_differ\n";
1941 if (@found_differ && !$found_same) {
1943 "archive contains $file with different checksum",
1946 # Now we edit the changes file to add or remove it
1947 foreach my $csumi (@files_csum_info_fields) {
1948 my ($fname, $module, $method, $archivefield) = @$csumi;
1949 next unless defined $changes->{$fname};
1951 # in archive, delete from .changes if it's there
1952 $changed{$file} = "removed" if
1953 $changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m;
1954 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) {
1955 # not in archive, but it's here in the .changes
1957 my $dsc_data = getfield $dsc, $fname;
1958 $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?";
1960 $extra =~ s/ \d+ /$&$placementinfo /
1961 or die "$fname $extra >$dsc_data< ?"
1962 if $fname eq 'Files';
1963 $changes->{$fname} .= "\n". $extra;
1964 $changed{$file} = "added";
1969 foreach my $file (keys %changed) {
1971 "edited .changes for archive .orig contents: %s %s",
1972 $changed{$file}, $file;
1974 my $chtmp = "$changesfile.tmp";
1975 $changes->save($chtmp);
1977 rename $chtmp,$changesfile or die "$changesfile $!";
1979 progress "[new .changes left in $changesfile]";
1982 progress "$changesfile already has appropriate .orig(s) (if any)";
1986 sub make_commit ($) {
1988 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1991 sub make_commit_text ($) {
1994 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1996 print Dumper($text) if $debuglevel > 1;
1997 my $child = open2($out, $in, @cmd) or die $!;
2000 print $in $text or die $!;
2001 close $in or die $!;
2003 $h =~ m/^\w+$/ or die;
2005 printdebug "=> $h\n";
2008 waitpid $child, 0 == $child or die "$child $!";
2009 $? and failedcmd @cmd;
2013 sub clogp_authline ($) {
2015 my $author = getfield $clogp, 'Maintainer';
2016 if ($author =~ m/^[^"\@]+\,/) {
2017 # single entry Maintainer field with unquoted comma
2018 $author = ($& =~ y/,//rd).$'; # strip the comma
2020 # git wants a single author; any remaining commas in $author
2021 # are by now preceded by @ (or "). It seems safer to punt on
2022 # "..." for now rather than attempting to dequote or something.
2023 $author =~ s#,.*##ms unless $author =~ m/"/;
2024 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
2025 my $authline = "$author $date";
2026 $authline =~ m/$git_authline_re/o or
2027 fail "unexpected commit author line format \`$authline'".
2028 " (was generated from changelog Maintainer field)";
2029 return ($1,$2,$3) if wantarray;
2033 sub vendor_patches_distro ($$) {
2034 my ($checkdistro, $what) = @_;
2035 return unless defined $checkdistro;
2037 my $series = "debian/patches/\L$checkdistro\E.series";
2038 printdebug "checking for vendor-specific $series ($what)\n";
2040 if (!open SERIES, "<", $series) {
2041 die "$series $!" unless $!==ENOENT;
2050 Unfortunately, this source package uses a feature of dpkg-source where
2051 the same source package unpacks to different source code on different
2052 distros. dgit cannot safely operate on such packages on affected
2053 distros, because the meaning of source packages is not stable.
2055 Please ask the distro/maintainer to remove the distro-specific series
2056 files and use a different technique (if necessary, uploading actually
2057 different packages, if different distros are supposed to have
2061 fail "Found active distro-specific series file for".
2062 " $checkdistro ($what): $series, cannot continue";
2064 die "$series $!" if SERIES->error;
2068 sub check_for_vendor_patches () {
2069 # This dpkg-source feature doesn't seem to be documented anywhere!
2070 # But it can be found in the changelog (reformatted):
2072 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2073 # Author: Raphael Hertzog <hertzog@debian.org>
2074 # Date: Sun Oct 3 09:36:48 2010 +0200
2076 # dpkg-source: correctly create .pc/.quilt_series with alternate
2079 # If you have debian/patches/ubuntu.series and you were
2080 # unpacking the source package on ubuntu, quilt was still
2081 # directed to debian/patches/series instead of
2082 # debian/patches/ubuntu.series.
2084 # debian/changelog | 3 +++
2085 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2086 # 2 files changed, 6 insertions(+), 1 deletion(-)
2089 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2090 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2091 "Dpkg::Vendor \`current vendor'");
2092 vendor_patches_distro(access_basedistro(),
2093 "(base) distro being accessed");
2094 vendor_patches_distro(access_nomdistro(),
2095 "(nominal) distro being accessed");
2098 sub generate_commits_from_dsc () {
2099 # See big comment in fetch_from_archive, below.
2100 # See also README.dsc-import.
2102 changedir $playground;
2104 my @dfi = dsc_files_info();
2105 foreach my $fi (@dfi) {
2106 my $f = $fi->{Filename};
2107 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2108 my $upper_f = "$maindir/../$f";
2110 printdebug "considering reusing $f: ";
2112 if (link_ltarget "$upper_f,fetch", $f) {
2113 printdebug "linked (using ...,fetch).\n";
2114 } elsif ((printdebug "($!) "),
2116 fail "accessing ../$f,fetch: $!";
2117 } elsif (link_ltarget $upper_f, $f) {
2118 printdebug "linked.\n";
2119 } elsif ((printdebug "($!) "),
2121 fail "accessing ../$f: $!";
2123 printdebug "absent.\n";
2127 complete_file_from_dsc('.', $fi, \$refetched)
2130 printdebug "considering saving $f: ";
2132 if (link $f, $upper_f) {
2133 printdebug "linked.\n";
2134 } elsif ((printdebug "($!) "),
2136 fail "saving ../$f: $!";
2137 } elsif (!$refetched) {
2138 printdebug "no need.\n";
2139 } elsif (link $f, "$upper_f,fetch") {
2140 printdebug "linked (using ...,fetch).\n";
2141 } elsif ((printdebug "($!) "),
2143 fail "saving ../$f,fetch: $!";
2145 printdebug "cannot.\n";
2149 # We unpack and record the orig tarballs first, so that we only
2150 # need disk space for one private copy of the unpacked source.
2151 # But we can't make them into commits until we have the metadata
2152 # from the debian/changelog, so we record the tree objects now and
2153 # make them into commits later.
2155 my $upstreamv = upstreamversion $dsc->{version};
2156 my $orig_f_base = srcfn $upstreamv, '';
2158 foreach my $fi (@dfi) {
2159 # We actually import, and record as a commit, every tarball
2160 # (unless there is only one file, in which case there seems
2163 my $f = $fi->{Filename};
2164 printdebug "import considering $f ";
2165 (printdebug "only one dfi\n"), next if @dfi == 1;
2166 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2167 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2171 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2173 printdebug "Y ", (join ' ', map { $_//"(none)" }
2174 $compr_ext, $orig_f_part
2177 my $input = new IO::File $f, '<' or die "$f $!";
2181 if (defined $compr_ext) {
2183 Dpkg::Compression::compression_guess_from_filename $f;
2184 fail "Dpkg::Compression cannot handle file $f in source package"
2185 if defined $compr_ext && !defined $cname;
2187 new Dpkg::Compression::Process compression => $cname;
2188 @compr_cmd = $compr_proc->get_uncompress_cmdline();
2189 my $compr_fh = new IO::Handle;
2190 my $compr_pid = open $compr_fh, "-|" // die $!;
2192 open STDIN, "<&", $input or die $!;
2194 die "dgit (child): exec $compr_cmd[0]: $!\n";
2199 rmtree "_unpack-tar";
2200 mkdir "_unpack-tar" or die $!;
2201 my @tarcmd = qw(tar -x -f -
2202 --no-same-owner --no-same-permissions
2203 --no-acls --no-xattrs --no-selinux);
2204 my $tar_pid = fork // die $!;
2206 chdir "_unpack-tar" or die $!;
2207 open STDIN, "<&", $input or die $!;
2209 die "dgit (child): exec $tarcmd[0]: $!";
2211 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2212 !$? or failedcmd @tarcmd;
2215 (@compr_cmd ? ($?==SIGPIPE || failedcmd @compr_cmd)
2217 # finally, we have the results in "tarball", but maybe
2218 # with the wrong permissions
2220 runcmd qw(chmod -R +rwX _unpack-tar);
2221 changedir "_unpack-tar";
2222 remove_stray_gits($f);
2223 mktree_in_ud_here();
2225 my ($tree) = git_add_write_tree();
2226 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2227 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2229 printdebug "one subtree $1\n";
2231 printdebug "multiple subtrees\n";
2234 rmtree "_unpack-tar";
2236 my $ent = [ $f, $tree ];
2238 Orig => !!$orig_f_part,
2239 Sort => (!$orig_f_part ? 2 :
2240 $orig_f_part =~ m/-/g ? 1 :
2248 # put any without "_" first (spec is not clear whether files
2249 # are always in the usual order). Tarballs without "_" are
2250 # the main orig or the debian tarball.
2251 $a->{Sort} <=> $b->{Sort} or
2255 my $any_orig = grep { $_->{Orig} } @tartrees;
2257 my $dscfn = "$package.dsc";
2259 my $treeimporthow = 'package';
2261 open D, ">", $dscfn or die "$dscfn: $!";
2262 print D $dscdata or die "$dscfn: $!";
2263 close D or die "$dscfn: $!";
2264 my @cmd = qw(dpkg-source);
2265 push @cmd, '--no-check' if $dsc_checked;
2266 if (madformat $dsc->{format}) {
2267 push @cmd, '--skip-patches';
2268 $treeimporthow = 'unpatched';
2270 push @cmd, qw(-x --), $dscfn;
2273 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2274 if (madformat $dsc->{format}) {
2275 check_for_vendor_patches();
2279 if (madformat $dsc->{format}) {
2280 my @pcmd = qw(dpkg-source --before-build .);
2281 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2283 $dappliedtree = git_add_write_tree();
2286 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2290 printdebug "import clog search...\n";
2291 parsechangelog_loop \@clogcmd, "package changelog", sub {
2292 my ($thisstanza, $desc) = @_;
2293 no warnings qw(exiting);
2295 $clogp //= $thisstanza;
2297 printdebug "import clog $thisstanza->{version} $desc...\n";
2299 last if !$any_orig; # we don't need $r1clogp
2301 # We look for the first (most recent) changelog entry whose
2302 # version number is lower than the upstream version of this
2303 # package. Then the last (least recent) previous changelog
2304 # entry is treated as the one which introduced this upstream
2305 # version and used for the synthetic commits for the upstream
2308 # One might think that a more sophisticated algorithm would be
2309 # necessary. But: we do not want to scan the whole changelog
2310 # file. Stopping when we see an earlier version, which
2311 # necessarily then is an earlier upstream version, is the only
2312 # realistic way to do that. Then, either the earliest
2313 # changelog entry we have seen so far is indeed the earliest
2314 # upload of this upstream version; or there are only changelog
2315 # entries relating to later upstream versions (which is not
2316 # possible unless the changelog and .dsc disagree about the
2317 # version). Then it remains to choose between the physically
2318 # last entry in the file, and the one with the lowest version
2319 # number. If these are not the same, we guess that the
2320 # versions were created in a non-monotonic order rather than
2321 # that the changelog entries have been misordered.
2323 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2325 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2326 $r1clogp = $thisstanza;
2328 printdebug "import clog $r1clogp->{version} becomes r1\n";
2331 $clogp or fail "package changelog has no entries!";
2333 my $authline = clogp_authline $clogp;
2334 my $changes = getfield $clogp, 'Changes';
2335 $changes =~ s/^\n//; # Changes: \n
2336 my $cversion = getfield $clogp, 'Version';
2339 $r1clogp //= $clogp; # maybe there's only one entry;
2340 my $r1authline = clogp_authline $r1clogp;
2341 # Strictly, r1authline might now be wrong if it's going to be
2342 # unused because !$any_orig. Whatever.
2344 printdebug "import tartrees authline $authline\n";
2345 printdebug "import tartrees r1authline $r1authline\n";
2347 foreach my $tt (@tartrees) {
2348 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2350 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2353 committer $r1authline
2357 [dgit import orig $tt->{F}]
2365 [dgit import tarball $package $cversion $tt->{F}]
2370 printdebug "import main commit\n";
2372 open C, ">../commit.tmp" or die $!;
2373 print C <<END or die $!;
2376 print C <<END or die $! foreach @tartrees;
2379 print C <<END or die $!;
2385 [dgit import $treeimporthow $package $cversion]
2389 my $rawimport_hash = make_commit qw(../commit.tmp);
2391 if (madformat $dsc->{format}) {
2392 printdebug "import apply patches...\n";
2394 # regularise the state of the working tree so that
2395 # the checkout of $rawimport_hash works nicely.
2396 my $dappliedcommit = make_commit_text(<<END);
2403 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2405 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2407 # We need the answers to be reproducible
2408 my @authline = clogp_authline($clogp);
2409 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2410 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2411 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2412 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2413 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2414 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2416 my $path = $ENV{PATH} or die;
2418 # we use ../../gbp-pq-output, which (given that we are in
2419 # $playground/PLAYTREE, and $playground is .git/dgit/unpack,
2422 foreach my $use_absurd (qw(0 1)) {
2423 runcmd @git, qw(checkout -q unpa);
2424 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2425 local $ENV{PATH} = $path;
2428 progress "warning: $@";
2429 $path = "$absurdity:$path";
2430 progress "$us: trying slow absurd-git-apply...";
2431 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2436 die "forbid absurd git-apply\n" if $use_absurd
2437 && forceing [qw(import-gitapply-no-absurd)];
2438 die "only absurd git-apply!\n" if !$use_absurd
2439 && forceing [qw(import-gitapply-absurd)];
2441 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2442 local $ENV{PATH} = $path if $use_absurd;
2444 my @showcmd = (gbp_pq, qw(import));
2445 my @realcmd = shell_cmd
2446 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2447 debugcmd "+",@realcmd;
2448 if (system @realcmd) {
2449 die +(shellquote @showcmd).
2451 failedcmd_waitstatus()."\n";
2454 my $gapplied = git_rev_parse('HEAD');
2455 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2456 $gappliedtree eq $dappliedtree or
2458 gbp-pq import and dpkg-source disagree!
2459 gbp-pq import gave commit $gapplied
2460 gbp-pq import gave tree $gappliedtree
2461 dpkg-source --before-build gave tree $dappliedtree
2463 $rawimport_hash = $gapplied;
2468 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2473 progress "synthesised git commit from .dsc $cversion";
2475 my $rawimport_mergeinput = {
2476 Commit => $rawimport_hash,
2477 Info => "Import of source package",
2479 my @output = ($rawimport_mergeinput);
2481 if ($lastpush_mergeinput) {
2482 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2483 my $oversion = getfield $oldclogp, 'Version';
2485 version_compare($oversion, $cversion);
2487 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2488 { Message => <<END, ReverseParents => 1 });
2489 Record $package ($cversion) in archive suite $csuite
2491 } elsif ($vcmp > 0) {
2492 print STDERR <<END or die $!;
2494 Version actually in archive: $cversion (older)
2495 Last version pushed with dgit: $oversion (newer or same)
2498 @output = $lastpush_mergeinput;
2500 # Same version. Use what's in the server git branch,
2501 # discarding our own import. (This could happen if the
2502 # server automatically imports all packages into git.)
2503 @output = $lastpush_mergeinput;
2511 sub complete_file_from_dsc ($$;$) {
2512 our ($dstdir, $fi, $refetched) = @_;
2513 # Ensures that we have, in $dstdir, the file $fi, with the correct
2514 # contents. (Downloading it from alongside $dscurl if necessary.)
2515 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2516 # and will set $$refetched=1 if it did so (or tried to).
2518 my $f = $fi->{Filename};
2519 my $tf = "$dstdir/$f";
2523 my $checkhash = sub {
2524 open F, "<", "$tf" or die "$tf: $!";
2525 $fi->{Digester}->reset();
2526 $fi->{Digester}->addfile(*F);
2527 F->error and die $!;
2528 $got = $fi->{Digester}->hexdigest();
2529 return $got eq $fi->{Hash};
2532 if (stat_exists $tf) {
2533 if ($checkhash->()) {
2534 progress "using existing $f";
2538 fail "file $f has hash $got but .dsc".
2539 " demands hash $fi->{Hash} ".
2540 "(perhaps you should delete this file?)";
2542 progress "need to fetch correct version of $f";
2543 unlink $tf or die "$tf $!";
2546 printdebug "$tf does not exist, need to fetch\n";
2550 $furl =~ s{/[^/]+$}{};
2552 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2553 die "$f ?" if $f =~ m#/#;
2554 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2555 return 0 if !act_local();
2558 fail "file $f has hash $got but .dsc".
2559 " demands hash $fi->{Hash} ".
2560 "(got wrong file from archive!)";
2565 sub ensure_we_have_orig () {
2566 my @dfi = dsc_files_info();
2567 foreach my $fi (@dfi) {
2568 my $f = $fi->{Filename};
2569 next unless is_orig_file_in_dsc($f, \@dfi);
2570 complete_file_from_dsc('..', $fi)
2575 #---------- git fetch ----------
2577 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2578 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2580 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2581 # locally fetched refs because they have unhelpful names and clutter
2582 # up gitk etc. So we track whether we have "used up" head ref (ie,
2583 # whether we have made another local ref which refers to this object).
2585 # (If we deleted them unconditionally, then we might end up
2586 # re-fetching the same git objects each time dgit fetch was run.)
2588 # So, each use of lrfetchrefs needs to be accompanied by arrangements
2589 # in git_fetch_us to fetch the refs in question, and possibly a call
2590 # to lrfetchref_used.
2592 our (%lrfetchrefs_f, %lrfetchrefs_d);
2593 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2595 sub lrfetchref_used ($) {
2596 my ($fullrefname) = @_;
2597 my $objid = $lrfetchrefs_f{$fullrefname};
2598 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2601 sub git_lrfetch_sane {
2602 my ($url, $supplementary, @specs) = @_;
2603 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2604 # at least as regards @specs. Also leave the results in
2605 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2606 # able to clean these up.
2608 # With $supplementary==1, @specs must not contain wildcards
2609 # and we add to our previous fetches (non-atomically).
2611 # This is rather miserable:
2612 # When git fetch --prune is passed a fetchspec ending with a *,
2613 # it does a plausible thing. If there is no * then:
2614 # - it matches subpaths too, even if the supplied refspec
2615 # starts refs, and behaves completely madly if the source
2616 # has refs/refs/something. (See, for example, Debian #NNNN.)
2617 # - if there is no matching remote ref, it bombs out the whole
2619 # We want to fetch a fixed ref, and we don't know in advance
2620 # if it exists, so this is not suitable.
2622 # Our workaround is to use git ls-remote. git ls-remote has its
2623 # own qairks. Notably, it has the absurd multi-tail-matching
2624 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2625 # refs/refs/foo etc.
2627 # Also, we want an idempotent snapshot, but we have to make two
2628 # calls to the remote: one to git ls-remote and to git fetch. The
2629 # solution is use git ls-remote to obtain a target state, and
2630 # git fetch to try to generate it. If we don't manage to generate
2631 # the target state, we try again.
2633 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2635 my $specre = join '|', map {
2638 my $wildcard = $x =~ s/\\\*$/.*/;
2639 die if $wildcard && $supplementary;
2642 printdebug "git_lrfetch_sane specre=$specre\n";
2643 my $wanted_rref = sub {
2645 return m/^(?:$specre)$/;
2648 my $fetch_iteration = 0;
2651 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2652 if (++$fetch_iteration > 10) {
2653 fail "too many iterations trying to get sane fetch!";
2656 my @look = map { "refs/$_" } @specs;
2657 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2661 open GITLS, "-|", @lcmd or die $!;
2663 printdebug "=> ", $_;
2664 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2665 my ($objid,$rrefname) = ($1,$2);
2666 if (!$wanted_rref->($rrefname)) {
2668 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2672 $wantr{$rrefname} = $objid;
2675 close GITLS or failedcmd @lcmd;
2677 # OK, now %want is exactly what we want for refs in @specs
2679 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2680 "+refs/$_:".lrfetchrefs."/$_";
2683 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2685 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2686 runcmd_ordryrun_local @fcmd if @fspecs;
2688 if (!$supplementary) {
2689 %lrfetchrefs_f = ();
2693 git_for_each_ref(lrfetchrefs, sub {
2694 my ($objid,$objtype,$lrefname,$reftail) = @_;
2695 $lrfetchrefs_f{$lrefname} = $objid;
2696 $objgot{$objid} = 1;
2699 if ($supplementary) {
2703 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2704 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2705 if (!exists $wantr{$rrefname}) {
2706 if ($wanted_rref->($rrefname)) {
2708 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2712 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2715 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2716 delete $lrfetchrefs_f{$lrefname};
2720 foreach my $rrefname (sort keys %wantr) {
2721 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2722 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2723 my $want = $wantr{$rrefname};
2724 next if $got eq $want;
2725 if (!defined $objgot{$want}) {
2726 fail <<END unless act_local();
2727 --dry-run specified but we actually wanted the results of git fetch,
2728 so this is not going to work. Try running dgit fetch first,
2729 or using --damp-run instead of --dry-run.
2732 warning: git ls-remote suggests we want $lrefname
2733 warning: and it should refer to $want
2734 warning: but git fetch didn't fetch that object to any relevant ref.
2735 warning: This may be due to a race with someone updating the server.
2736 warning: Will try again...
2738 next FETCH_ITERATION;
2741 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2743 runcmd_ordryrun_local @git, qw(update-ref -m),
2744 "dgit fetch git fetch fixup", $lrefname, $want;
2745 $lrfetchrefs_f{$lrefname} = $want;
2750 if (defined $csuite) {
2751 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2752 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2753 my ($objid,$objtype,$lrefname,$reftail) = @_;
2754 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2755 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2759 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2760 Dumper(\%lrfetchrefs_f);
2763 sub git_fetch_us () {
2764 # Want to fetch only what we are going to use, unless
2765 # deliberately-not-ff, in which case we must fetch everything.
2767 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2769 (quiltmode_splitbrain
2770 ? (map { $_->('*',access_nomdistro) }
2771 \&debiantag_new, \&debiantag_maintview)
2772 : debiantags('*',access_nomdistro));
2773 push @specs, server_branch($csuite);
2774 push @specs, $rewritemap;
2775 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2777 my $url = access_giturl();
2778 git_lrfetch_sane $url, 0, @specs;
2781 my @tagpats = debiantags('*',access_nomdistro);
2783 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2784 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2785 printdebug "currently $fullrefname=$objid\n";
2786 $here{$fullrefname} = $objid;
2788 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2789 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2790 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2791 printdebug "offered $lref=$objid\n";
2792 if (!defined $here{$lref}) {
2793 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2794 runcmd_ordryrun_local @upd;
2795 lrfetchref_used $fullrefname;
2796 } elsif ($here{$lref} eq $objid) {
2797 lrfetchref_used $fullrefname;
2800 "Not updating $lref from $here{$lref} to $objid.\n";
2805 #---------- dsc and archive handling ----------
2807 sub mergeinfo_getclogp ($) {
2808 # Ensures thit $mi->{Clogp} exists and returns it
2810 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2813 sub mergeinfo_version ($) {
2814 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2817 sub fetch_from_archive_record_1 ($) {
2819 runcmd git_update_ref_cmd "dgit fetch $csuite", 'DGIT_ARCHIVE', $hash;
2820 cmdoutput @git, qw(log -n2), $hash;
2821 # ... gives git a chance to complain if our commit is malformed
2824 sub fetch_from_archive_record_2 ($) {
2826 my @upd_cmd = (git_update_ref_cmd 'dgit fetch', lrref(), $hash);
2830 dryrun_report @upd_cmd;
2834 sub parse_dsc_field_def_dsc_distro () {
2835 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2836 dgit.default.distro);
2839 sub parse_dsc_field ($$) {
2840 my ($dsc, $what) = @_;
2842 foreach my $field (@ourdscfield) {
2843 $f = $dsc->{$field};
2848 progress "$what: NO git hash";
2849 parse_dsc_field_def_dsc_distro();
2850 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2851 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2852 progress "$what: specified git info ($dsc_distro)";
2853 $dsc_hint_tag = [ $dsc_hint_tag ];
2854 } elsif ($f =~ m/^\w+\s*$/) {
2856 parse_dsc_field_def_dsc_distro();
2857 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2859 progress "$what: specified git hash";
2861 fail "$what: invalid Dgit info";
2865 sub resolve_dsc_field_commit ($$) {
2866 my ($already_distro, $already_mapref) = @_;
2868 return unless defined $dsc_hash;
2871 defined $already_mapref &&
2872 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2873 ? $already_mapref : undef;
2877 my ($what, @fetch) = @_;
2879 local $idistro = $dsc_distro;
2880 my $lrf = lrfetchrefs;
2882 if (!$chase_dsc_distro) {
2884 "not chasing .dsc distro $dsc_distro: not fetching $what";
2889 ".dsc names distro $dsc_distro: fetching $what";
2891 my $url = access_giturl();
2892 if (!defined $url) {
2893 defined $dsc_hint_url or fail <<END;
2894 .dsc Dgit metadata is in context of distro $dsc_distro
2895 for which we have no configured url and .dsc provides no hint
2898 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2899 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2900 parse_cfg_bool "dsc-url-proto-ok", 'false',
2901 cfg("dgit.dsc-url-proto-ok.$proto",
2902 "dgit.default.dsc-url-proto-ok")
2904 .dsc Dgit metadata is in context of distro $dsc_distro
2905 for which we have no configured url;
2906 .dsc provides hinted url with protocol $proto which is unsafe.
2907 (can be overridden by config - consult documentation)
2909 $url = $dsc_hint_url;
2912 git_lrfetch_sane $url, 1, @fetch;
2917 my $rewrite_enable = do {
2918 local $idistro = $dsc_distro;
2919 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2922 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2923 if (!defined $mapref) {
2924 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2925 $mapref = $lrf.'/'.$rewritemap;
2927 my $rewritemapdata = git_cat_file $mapref.':map';
2928 if (defined $rewritemapdata
2929 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2931 "server's git history rewrite map contains a relevant entry!";
2934 if (defined $dsc_hash) {
2935 progress "using rewritten git hash in place of .dsc value";
2937 progress "server data says .dsc hash is to be disregarded";
2942 if (!defined git_cat_file $dsc_hash) {
2943 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2944 my $lrf = $do_fetch->("additional commits", @tags) &&
2945 defined git_cat_file $dsc_hash
2947 .dsc Dgit metadata requires commit $dsc_hash
2948 but we could not obtain that object anywhere.
2950 foreach my $t (@tags) {
2951 my $fullrefname = $lrf.'/'.$t;
2952 # print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2953 next unless $lrfetchrefs_f{$fullrefname};
2954 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2955 lrfetchref_used $fullrefname;
2960 sub fetch_from_archive () {
2961 ensure_setup_existing_tree();
2963 # Ensures that lrref() is what is actually in the archive, one way
2964 # or another, according to us - ie this client's
2965 # appropritaely-updated archive view. Also returns the commit id.
2966 # If there is nothing in the archive, leaves lrref alone and
2967 # returns undef. git_fetch_us must have already been called.
2971 parse_dsc_field($dsc, 'last upload to archive');
2972 resolve_dsc_field_commit access_basedistro,
2973 lrfetchrefs."/".$rewritemap
2975 progress "no version available from the archive";
2978 # If the archive's .dsc has a Dgit field, there are three
2979 # relevant git commitids we need to choose between and/or merge
2981 # 1. $dsc_hash: the Dgit field from the archive
2982 # 2. $lastpush_hash: the suite branch on the dgit git server
2983 # 3. $lastfetch_hash: our local tracking brach for the suite
2985 # These may all be distinct and need not be in any fast forward
2988 # If the dsc was pushed to this suite, then the server suite
2989 # branch will have been updated; but it might have been pushed to
2990 # a different suite and copied by the archive. Conversely a more
2991 # recent version may have been pushed with dgit but not appeared
2992 # in the archive (yet).
2994 # $lastfetch_hash may be awkward because archive imports
2995 # (particularly, imports of Dgit-less .dscs) are performed only as
2996 # needed on individual clients, so different clients may perform a
2997 # different subset of them - and these imports are only made
2998 # public during push. So $lastfetch_hash may represent a set of
2999 # imports different to a subsequent upload by a different dgit
3002 # Our approach is as follows:
3004 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
3005 # descendant of $dsc_hash, then it was pushed by a dgit user who
3006 # had based their work on $dsc_hash, so we should prefer it.
3007 # Otherwise, $dsc_hash was installed into this suite in the
3008 # archive other than by a dgit push, and (necessarily) after the
3009 # last dgit push into that suite (since a dgit push would have
3010 # been descended from the dgit server git branch); thus, in that
3011 # case, we prefer the archive's version (and produce a
3012 # pseudo-merge to overwrite the dgit server git branch).
3014 # (If there is no Dgit field in the archive's .dsc then
3015 # generate_commit_from_dsc uses the version numbers to decide
3016 # whether the suite branch or the archive is newer. If the suite
3017 # branch is newer it ignores the archive's .dsc; otherwise it
3018 # generates an import of the .dsc, and produces a pseudo-merge to
3019 # overwrite the suite branch with the archive contents.)
3021 # The outcome of that part of the algorithm is the `public view',
3022 # and is same for all dgit clients: it does not depend on any
3023 # unpublished history in the local tracking branch.
3025 # As between the public view and the local tracking branch: The
3026 # local tracking branch is only updated by dgit fetch, and
3027 # whenever dgit fetch runs it includes the public view in the
3028 # local tracking branch. Therefore if the public view is not
3029 # descended from the local tracking branch, the local tracking
3030 # branch must contain history which was imported from the archive
3031 # but never pushed; and, its tip is now out of date. So, we make
3032 # a pseudo-merge to overwrite the old imports and stitch the old
3035 # Finally: we do not necessarily reify the public view (as
3036 # described above). This is so that we do not end up stacking two
3037 # pseudo-merges. So what we actually do is figure out the inputs
3038 # to any public view pseudo-merge and put them in @mergeinputs.
3041 # $mergeinputs[]{Commit}
3042 # $mergeinputs[]{Info}
3043 # $mergeinputs[0] is the one whose tree we use
3044 # @mergeinputs is in the order we use in the actual commit)
3047 # $mergeinputs[]{Message} is a commit message to use
3048 # $mergeinputs[]{ReverseParents} if def specifies that parent
3049 # list should be in opposite order
3050 # Such an entry has no Commit or Info. It applies only when found
3051 # in the last entry. (This ugliness is to support making
3052 # identical imports to previous dgit versions.)
3054 my $lastpush_hash = git_get_ref(lrfetchref());
3055 printdebug "previous reference hash=$lastpush_hash\n";
3056 $lastpush_mergeinput = $lastpush_hash && {
3057 Commit => $lastpush_hash,
3058 Info => "dgit suite branch on dgit git server",
3061 my $lastfetch_hash = git_get_ref(lrref());
3062 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3063 my $lastfetch_mergeinput = $lastfetch_hash && {
3064 Commit => $lastfetch_hash,
3065 Info => "dgit client's archive history view",
3068 my $dsc_mergeinput = $dsc_hash && {
3069 Commit => $dsc_hash,
3070 Info => "Dgit field in .dsc from archive",
3074 my $del_lrfetchrefs = sub {
3077 printdebug "del_lrfetchrefs...\n";
3078 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3079 my $objid = $lrfetchrefs_d{$fullrefname};
3080 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3082 $gur ||= new IO::Handle;
3083 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3085 printf $gur "delete %s %s\n", $fullrefname, $objid;
3088 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3092 if (defined $dsc_hash) {
3093 ensure_we_have_orig();
3094 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3095 @mergeinputs = $dsc_mergeinput
3096 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3097 print STDERR <<END or die $!;
3099 Git commit in archive is behind the last version allegedly pushed/uploaded.
3100 Commit referred to by archive: $dsc_hash
3101 Last version pushed with dgit: $lastpush_hash
3104 @mergeinputs = ($lastpush_mergeinput);
3106 # Archive has .dsc which is not a descendant of the last dgit
3107 # push. This can happen if the archive moves .dscs about.
3108 # Just follow its lead.
3109 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3110 progress "archive .dsc names newer git commit";
3111 @mergeinputs = ($dsc_mergeinput);
3113 progress "archive .dsc names other git commit, fixing up";
3114 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3118 @mergeinputs = generate_commits_from_dsc();
3119 # We have just done an import. Now, our import algorithm might
3120 # have been improved. But even so we do not want to generate
3121 # a new different import of the same package. So if the
3122 # version numbers are the same, just use our existing version.
3123 # If the version numbers are different, the archive has changed
3124 # (perhaps, rewound).
3125 if ($lastfetch_mergeinput &&
3126 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3127 (mergeinfo_version $mergeinputs[0]) )) {
3128 @mergeinputs = ($lastfetch_mergeinput);
3130 } elsif ($lastpush_hash) {
3131 # only in git, not in the archive yet
3132 @mergeinputs = ($lastpush_mergeinput);
3133 print STDERR <<END or die $!;
3135 Package not found in the archive, but has allegedly been pushed using dgit.
3139 printdebug "nothing found!\n";
3140 if (defined $skew_warning_vsn) {
3141 print STDERR <<END or die $!;
3143 Warning: relevant archive skew detected.
3144 Archive allegedly contains $skew_warning_vsn
3145 But we were not able to obtain any version from the archive or git.
3149 unshift @end, $del_lrfetchrefs;
3153 if ($lastfetch_hash &&
3155 my $h = $_->{Commit};
3156 $h and is_fast_fwd($lastfetch_hash, $h);
3157 # If true, one of the existing parents of this commit
3158 # is a descendant of the $lastfetch_hash, so we'll
3159 # be ff from that automatically.
3163 push @mergeinputs, $lastfetch_mergeinput;
3166 printdebug "fetch mergeinfos:\n";
3167 foreach my $mi (@mergeinputs) {
3169 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3171 printdebug sprintf " ReverseParents=%d Message=%s",
3172 $mi->{ReverseParents}, $mi->{Message};
3176 my $compat_info= pop @mergeinputs
3177 if $mergeinputs[$#mergeinputs]{Message};
3179 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3182 if (@mergeinputs > 1) {
3184 my $tree_commit = $mergeinputs[0]{Commit};
3186 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3187 $tree =~ m/\n\n/; $tree = $`;
3188 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3191 # We use the changelog author of the package in question the
3192 # author of this pseudo-merge. This is (roughly) correct if
3193 # this commit is simply representing aa non-dgit upload.
3194 # (Roughly because it does not record sponsorship - but we
3195 # don't have sponsorship info because that's in the .changes,
3196 # which isn't in the archivw.)
3198 # But, it might be that we are representing archive history
3199 # updates (including in-archive copies). These are not really
3200 # the responsibility of the person who created the .dsc, but
3201 # there is no-one whose name we should better use. (The
3202 # author of the .dsc-named commit is clearly worse.)
3204 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3205 my $author = clogp_authline $useclogp;
3206 my $cversion = getfield $useclogp, 'Version';
3208 my $mcf = dgit_privdir()."/mergecommit";
3209 open MC, ">", $mcf or die "$mcf $!";
3210 print MC <<END or die $!;
3214 my @parents = grep { $_->{Commit} } @mergeinputs;
3215 @parents = reverse @parents if $compat_info->{ReverseParents};
3216 print MC <<END or die $! foreach @parents;
3220 print MC <<END or die $!;
3226 if (defined $compat_info->{Message}) {
3227 print MC $compat_info->{Message} or die $!;
3229 print MC <<END or die $!;
3230 Record $package ($cversion) in archive suite $csuite
3234 my $message_add_info = sub {
3236 my $mversion = mergeinfo_version $mi;
3237 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3241 $message_add_info->($mergeinputs[0]);
3242 print MC <<END or die $!;
3243 should be treated as descended from
3245 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3249 $hash = make_commit $mcf;
3251 $hash = $mergeinputs[0]{Commit};
3253 printdebug "fetch hash=$hash\n";
3256 my ($lasth, $what) = @_;
3257 return unless $lasth;
3258 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3261 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3263 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3265 fetch_from_archive_record_1($hash);
3267 if (defined $skew_warning_vsn) {
3268 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3269 my $gotclogp = commit_getclogp($hash);
3270 my $got_vsn = getfield $gotclogp, 'Version';
3271 printdebug "SKEW CHECK GOT $got_vsn\n";
3272 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3273 print STDERR <<END or die $!;
3275 Warning: archive skew detected. Using the available version:
3276 Archive allegedly contains $skew_warning_vsn
3277 We were able to obtain only $got_vsn
3283 if ($lastfetch_hash ne $hash) {
3284 fetch_from_archive_record_2($hash);
3287 lrfetchref_used lrfetchref();
3289 check_gitattrs($hash, "fetched source tree");
3291 unshift @end, $del_lrfetchrefs;
3295 sub set_local_git_config ($$) {
3297 runcmd @git, qw(config), $k, $v;
3300 sub setup_mergechangelogs (;$) {
3302 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3304 my $driver = 'dpkg-mergechangelogs';
3305 my $cb = "merge.$driver";
3306 confess unless defined $maindir;
3307 my $attrs = "$maindir_gitcommon/info/attributes";
3308 ensuredir "$maindir_gitcommon/info";
3310 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3311 if (!open ATTRS, "<", $attrs) {
3312 $!==ENOENT or die "$attrs: $!";
3316 next if m{^debian/changelog\s};
3317 print NATTRS $_, "\n" or die $!;
3319 ATTRS->error and die $!;
3322 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3325 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3326 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3328 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3331 sub setup_useremail (;$) {
3333 return unless $always || access_cfg_bool(1, 'setup-useremail');
3336 my ($k, $envvar) = @_;
3337 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3338 return unless defined $v;
3339 set_local_git_config "user.$k", $v;
3342 $setup->('email', 'DEBEMAIL');
3343 $setup->('name', 'DEBFULLNAME');
3346 sub ensure_setup_existing_tree () {
3347 my $k = "remote.$remotename.skipdefaultupdate";
3348 my $c = git_get_config $k;
3349 return if defined $c;
3350 set_local_git_config $k, 'true';
3353 sub open_main_gitattrs () {
3354 confess 'internal error no maindir' unless defined $maindir;
3355 my $gai = new IO::File "$maindir_gitcommon/info/attributes"
3357 or die "open $maindir_gitcommon/info/attributes: $!";
3361 our $gitattrs_ourmacro_re = qr{^\[attr\]dgit-defuse-attrs\s};
3363 sub is_gitattrs_setup () {
3366 # 1: gitattributes set up and should be left alone
3368 # 0: there is a dgit-defuse-attrs but it needs fixing
3369 # undef: there is none
3370 my $gai = open_main_gitattrs();
3371 return 0 unless $gai;
3373 next unless m{$gitattrs_ourmacro_re};
3374 return 1 if m{\s-working-tree-encoding\s};
3375 printdebug "is_gitattrs_setup: found old macro\n";
3378 $gai->error and die $!;
3379 printdebug "is_gitattrs_setup: found nothing\n";
3383 sub setup_gitattrs (;$) {
3385 return unless $always || access_cfg_bool(1, 'setup-gitattributes');
3387 my $already = is_gitattrs_setup();
3390 [attr]dgit-defuse-attrs already found, and proper, in .git/info/attributes
3391 not doing further gitattributes setup
3395 my $new = "[attr]dgit-defuse-attrs $negate_harmful_gitattrs";
3396 my $af = "$maindir_gitcommon/info/attributes";
3397 ensuredir "$maindir_gitcommon/info";
3399 open GAO, "> $af.new" or die $!;
3400 print GAO <<END or die $! unless defined $already;
3403 # ^ see GITATTRIBUTES in dgit(7) and dgit setup-new-tree in dgit(1)
3405 my $gai = open_main_gitattrs();
3408 if (m{$gitattrs_ourmacro_re}) {
3409 die unless defined $already;
3413 print GAO $_, "\n" or die $!;
3415 $gai->error and die $!;
3417 close GAO or die $!;
3418 rename "$af.new", "$af" or die "install $af: $!";
3421 sub setup_new_tree () {
3422 setup_mergechangelogs();
3427 sub check_gitattrs ($$) {
3428 my ($treeish, $what) = @_;
3430 return if is_gitattrs_setup;
3433 my @cmd = (@git, qw(ls-tree -lrz --), "${treeish}:");
3435 my $gafl = new IO::File;
3436 open $gafl, "-|", @cmd or die $!;
3439 s/^\d+\s+\w+\s+\w+\s+(\d+)\t// or die;
3441 next unless m{(?:^|/)\.gitattributes$};
3443 # oh dear, found one
3445 dgit: warning: $what contains .gitattributes
3446 dgit: .gitattributes not (fully) defused. Recommended: dgit setup-new-tree.
3451 # tree contains no .gitattributes files
3452 $?=0; $!=0; close $gafl or failedcmd @cmd;
3456 sub multisuite_suite_child ($$$) {
3457 my ($tsuite, $merginputs, $fn) = @_;
3458 # in child, sets things up, calls $fn->(), and returns undef
3459 # in parent, returns canonical suite name for $tsuite
3460 my $canonsuitefh = IO::File::new_tmpfile;
3461 my $pid = fork // die $!;
3465 $us .= " [$isuite]";
3466 $debugprefix .= " ";
3467 progress "fetching $tsuite...";
3468 canonicalise_suite();
3469 print $canonsuitefh $csuite, "\n" or die $!;
3470 close $canonsuitefh or die $!;
3474 waitpid $pid,0 == $pid or die $!;
3475 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3476 seek $canonsuitefh,0,0 or die $!;
3477 local $csuite = <$canonsuitefh>;
3478 die $! unless defined $csuite && chomp $csuite;
3480 printdebug "multisuite $tsuite missing\n";
3483 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3484 push @$merginputs, {
3491 sub fork_for_multisuite ($) {
3492 my ($before_fetch_merge) = @_;
3493 # if nothing unusual, just returns ''
3496 # returns 0 to caller in child, to do first of the specified suites
3497 # in child, $csuite is not yet set
3499 # returns 1 to caller in parent, to finish up anything needed after
3500 # in parent, $csuite is set to canonicalised portmanteau
3502 my $org_isuite = $isuite;
3503 my @suites = split /\,/, $isuite;
3504 return '' unless @suites > 1;
3505 printdebug "fork_for_multisuite: @suites\n";
3509 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3511 return 0 unless defined $cbasesuite;
3513 fail "package $package missing in (base suite) $cbasesuite"
3514 unless @mergeinputs;
3516 my @csuites = ($cbasesuite);
3518 $before_fetch_merge->();
3520 foreach my $tsuite (@suites[1..$#suites]) {
3521 $tsuite =~ s/^-/$cbasesuite-/;
3522 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3528 # xxx collecte the ref here
3530 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3531 push @csuites, $csubsuite;
3534 foreach my $mi (@mergeinputs) {
3535 my $ref = git_get_ref $mi->{Ref};
3536 die "$mi->{Ref} ?" unless length $ref;
3537 $mi->{Commit} = $ref;
3540 $csuite = join ",", @csuites;
3542 my $previous = git_get_ref lrref;
3544 unshift @mergeinputs, {
3545 Commit => $previous,
3546 Info => "local combined tracking branch",
3548 "archive seems to have rewound: local tracking branch is ahead!",
3552 foreach my $ix (0..$#mergeinputs) {
3553 $mergeinputs[$ix]{Index} = $ix;
3556 @mergeinputs = sort {
3557 -version_compare(mergeinfo_version $a,
3558 mergeinfo_version $b) # highest version first
3560 $a->{Index} <=> $b->{Index}; # earliest in spec first
3566 foreach my $mi (@mergeinputs) {
3567 printdebug "multisuite merge check $mi->{Info}\n";
3568 foreach my $previous (@needed) {
3569 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3570 printdebug "multisuite merge un-needed $previous->{Info}\n";
3574 printdebug "multisuite merge this-needed\n";
3575 $mi->{Character} = '+';
3578 $needed[0]{Character} = '*';
3580 my $output = $needed[0]{Commit};
3583 printdebug "multisuite merge nontrivial\n";
3584 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3586 my $commit = "tree $tree\n";
3587 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3588 "Input branches:\n";
3590 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3591 printdebug "multisuite merge include $mi->{Info}\n";
3592 $mi->{Character} //= ' ';
3593 $commit .= "parent $mi->{Commit}\n";
3594 $msg .= sprintf " %s %-25s %s\n",
3596 (mergeinfo_version $mi),
3599 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3601 " * marks the highest version branch, which choose to use\n".
3602 " + marks each branch which was not already an ancestor\n\n".
3603 "[dgit multi-suite $csuite]\n";
3605 "author $authline\n".
3606 "committer $authline\n\n";
3607 $output = make_commit_text $commit.$msg;
3608 printdebug "multisuite merge generated $output\n";
3611 fetch_from_archive_record_1($output);
3612 fetch_from_archive_record_2($output);
3614 progress "calculated combined tracking suite $csuite";
3619 sub clone_set_head () {
3620 open H, "> .git/HEAD" or die $!;
3621 print H "ref: ".lref()."\n" or die $!;
3624 sub clone_finish ($) {
3626 runcmd @git, qw(reset --hard), lrref();
3627 runcmd qw(bash -ec), <<'END';
3629 git ls-tree -r --name-only -z HEAD | \
3630 xargs -0r touch -h -r . --
3632 printdone "ready for work in $dstdir";
3636 # in multisuite, returns twice!
3637 # once in parent after first suite fetched,
3638 # and then again in child after everything is finished
3640 badusage "dry run makes no sense with clone" unless act_local();
3642 my $multi_fetched = fork_for_multisuite(sub {
3643 printdebug "multi clone before fetch merge\n";
3647 if ($multi_fetched) {
3648 printdebug "multi clone after fetch merge\n";
3650 clone_finish($dstdir);
3653 printdebug "clone main body\n";
3655 canonicalise_suite();
3656 my $hasgit = check_for_git();
3657 mkdir $dstdir or fail "create \`$dstdir': $!";
3659 runcmd @git, qw(init -q);
3663 my $giturl = access_giturl(1);
3664 if (defined $giturl) {
3665 runcmd @git, qw(remote add), 'origin', $giturl;
3668 progress "fetching existing git history";
3670 runcmd_ordryrun_local @git, qw(fetch origin);
3672 progress "starting new git history";
3674 fetch_from_archive() or no_such_package;
3675 my $vcsgiturl = $dsc->{'Vcs-Git'};
3676 if (length $vcsgiturl) {
3677 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3678 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3680 clone_finish($dstdir);
3684 canonicalise_suite();
3685 if (check_for_git()) {
3688 fetch_from_archive() or no_such_package();
3690 my $vcsgiturl = $dsc && $dsc->{'Vcs-Git'};
3691 if (length $vcsgiturl and
3692 (grep { $csuite eq $_ }
3694 cfg 'dgit.vcs-git.suites')) {
3695 my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF';
3696 if (defined $current && $current ne $vcsgiturl) {
3698 FYI: Vcs-Git in $csuite has different url to your vcs-git remote.
3699 Your vcs-git remote url may be out of date. Use dgit update-vcs-git ?
3703 printdone "fetched into ".lrref();
3707 my $multi_fetched = fork_for_multisuite(sub { });
3708 fetch_one() unless $multi_fetched; # parent
3709 finish 0 if $multi_fetched eq '0'; # child