3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2017 Ian Jackson
6 # Copyright (C)2017 Sean Whitton
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 END { $? = $Debian::Dgit::ExitStatus::desired // -1; };
22 use Debian::Dgit::ExitStatus;
26 use Debian::Dgit qw(:DEFAULT :playground);
32 use Dpkg::Control::Hash;
34 use File::Temp qw(tempdir);
37 use Dpkg::Compression;
38 use Dpkg::Compression::Process;
43 use List::MoreUtils qw(pairwise);
44 use Text::Glob qw(match_glob);
45 use Fcntl qw(:DEFAULT :flock);
50 our $our_version = 'UNRELEASED'; ###substituted###
51 our $absurdity = undef; ###substituted###
53 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
64 our $dryrun_level = 0;
66 our $buildproductsdir;
72 our $existing_package = 'dpkg';
74 our $changes_since_version;
76 our $overwrite_version; # undef: not specified; '': check changelog
78 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
80 our $split_brain_save;
81 our $we_are_responder;
82 our $we_are_initiator;
83 our $initiator_tempdir;
84 our $patches_applied_dirtily = 00;
88 our $chase_dsc_distro=1;
90 our %forceopts = map { $_=>0 }
91 qw(unrepresentable unsupported-source-format
92 dsc-changes-mismatch changes-origs-exactly
93 uploading-binaries uploading-source-only
94 import-gitapply-absurd
95 import-gitapply-no-absurd
96 import-dsc-with-dgit-field);
98 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
100 our $suite_re = '[-+.0-9a-z]+';
101 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
102 our $orig_f_comp_re = qr{orig(?:-$extra_orig_namepart_re)?};
103 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
104 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
106 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
107 our $splitbraincache = 'dgit-intern/quilt-cache';
108 our $rewritemap = 'dgit-rewrite/map';
110 our @dpkg_source_ignores = qw(-i(?:^|/)\.git(?:/|$) -I.git);
112 our (@git) = qw(git);
113 our (@dget) = qw(dget);
114 our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L));
115 our (@dput) = qw(dput);
116 our (@debsign) = qw(debsign);
117 our (@gpg) = qw(gpg);
118 our (@sbuild) = qw(sbuild);
120 our (@dgit) = qw(dgit);
121 our (@git_debrebase) = qw(git-debrebase);
122 our (@aptget) = qw(apt-get);
123 our (@aptcache) = qw(apt-cache);
124 our (@dpkgbuildpackage) = (qw(dpkg-buildpackage), @dpkg_source_ignores);
125 our (@dpkgsource) = (qw(dpkg-source), @dpkg_source_ignores);
126 our (@dpkggenchanges) = qw(dpkg-genchanges);
127 our (@mergechanges) = qw(mergechanges -f);
128 our (@gbp_build) = ('');
129 our (@gbp_pq) = ('gbp pq');
130 our (@changesopts) = ('');
132 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
135 'debsign' => \@debsign,
137 'sbuild' => \@sbuild,
141 'git-debrebase' => \@git_debrebase,
142 'apt-get' => \@aptget,
143 'apt-cache' => \@aptcache,
144 'dpkg-source' => \@dpkgsource,
145 'dpkg-buildpackage' => \@dpkgbuildpackage,
146 'dpkg-genchanges' => \@dpkggenchanges,
147 'gbp-build' => \@gbp_build,
148 'gbp-pq' => \@gbp_pq,
149 'ch' => \@changesopts,
150 'mergechanges' => \@mergechanges);
152 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
153 our %opts_cfg_insertpos = map {
155 scalar @{ $opts_opt_map{$_} }
156 } keys %opts_opt_map;
158 sub parseopts_late_defaults();
159 sub setup_gitattrs(;$);
160 sub check_gitattrs($$);
167 our $supplementary_message = '';
168 our $need_split_build_invocation = 0;
169 our $split_brain = 0;
173 return unless forkcheck_mainprocess();
174 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
177 our $remotename = 'dgit';
178 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
182 if (!defined $absurdity) {
184 $absurdity =~ s{/[^/]+$}{/absurd} or die;
188 my ($v,$distro) = @_;
189 return $tagformatfn->($v, $distro);
192 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
194 sub lbranch () { return "$branchprefix/$csuite"; }
195 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
196 sub lref () { return "refs/heads/".lbranch(); }
197 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
198 sub rrref () { return server_ref($csuite); }
208 return "${package}_".(stripepoch $vsn).$sfx
213 return srcfn($vsn,".dsc");
216 sub changespat ($;$) {
217 my ($vsn, $arch) = @_;
218 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
221 sub upstreamversion ($) {
233 return unless forkcheck_mainprocess();
234 foreach my $f (@end) {
236 print STDERR "$us: cleanup: $@" if length $@;
240 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; finish 12; }
242 sub forceable_fail ($$) {
243 my ($forceoptsl, $msg) = @_;
244 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
245 print STDERR "warning: overriding problem due to --force:\n". $msg;
249 my ($forceoptsl) = @_;
250 my @got = grep { $forceopts{$_} } @$forceoptsl;
251 return 0 unless @got;
253 "warning: skipping checks or functionality due to --force-$got[0]\n";
256 sub no_such_package () {
257 print STDERR "$us: package $package does not exist in suite $isuite\n";
261 sub deliberately ($) {
263 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
266 sub deliberately_not_fast_forward () {
267 foreach (qw(not-fast-forward fresh-repo)) {
268 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
272 sub quiltmode_splitbrain () {
273 $quilt_mode =~ m/gbp|dpm|unapplied/;
276 sub opts_opt_multi_cmd {
278 push @cmd, split /\s+/, shift @_;
284 return opts_opt_multi_cmd @gbp_pq;
287 sub dgit_privdir () {
288 our $dgit_privdir_made //= ensure_a_playground 'dgit';
291 sub branch_gdr_info ($$) {
292 my ($symref, $head) = @_;
293 my ($status, $msg, $current, $ffq_prev, $gdrlast) =
294 gdr_ffq_prev_branchinfo($symref);
295 return () unless $status eq 'branch';
296 $ffq_prev = git_get_ref $ffq_prev;
297 $gdrlast = git_get_ref $gdrlast;
298 $gdrlast &&= is_fast_fwd $gdrlast, $head;
299 return ($ffq_prev, $gdrlast);
302 sub branch_is_gdr ($$) {
303 my ($symref, $head) = @_;
304 my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head);
305 return 0 unless $ffq_prev || $gdrlast;
309 sub branch_is_gdr_unstitched_ff ($$$) {
310 my ($symref, $head, $ancestor) = @_;
311 my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head);
312 return 0 unless $ffq_prev;
313 return 0 unless is_fast_fwd $ancestor, $ffq_prev;
317 #---------- remote protocol support, common ----------
319 # remote push initiator/responder protocol:
320 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
321 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
322 # < dgit-remote-push-ready <actual-proto-vsn>
329 # > supplementary-message NBYTES # $protovsn >= 3
334 # > file parsed-changelog
335 # [indicates that output of dpkg-parsechangelog follows]
336 # > data-block NBYTES
337 # > [NBYTES bytes of data (no newline)]
338 # [maybe some more blocks]
347 # > param head DGIT-VIEW-HEAD
348 # > param csuite SUITE
349 # > param tagformat old|new
350 # > param maint-view MAINT-VIEW-HEAD
352 # > param buildinfo-filename P_V_X.buildinfo # zero or more times
353 # > file buildinfo # for buildinfos to sign
355 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
356 # # goes into tag, for replay prevention
359 # [indicates that signed tag is wanted]
360 # < data-block NBYTES
361 # < [NBYTES bytes of data (no newline)]
362 # [maybe some more blocks]
366 # > want signed-dsc-changes
367 # < data-block NBYTES [transfer of signed dsc]
369 # < data-block NBYTES [transfer of signed changes]
371 # < data-block NBYTES [transfer of each signed buildinfo
372 # [etc] same number and order as "file buildinfo"]
380 sub i_child_report () {
381 # Sees if our child has died, and reap it if so. Returns a string
382 # describing how it died if it failed, or undef otherwise.
383 return undef unless $i_child_pid;
384 my $got = waitpid $i_child_pid, WNOHANG;
385 return undef if $got <= 0;
386 die unless $got == $i_child_pid;
387 $i_child_pid = undef;
388 return undef unless $?;
389 return "build host child ".waitstatusmsg();
394 fail "connection lost: $!" if $fh->error;
395 fail "protocol violation; $m not expected";
398 sub badproto_badread ($$) {
400 fail "connection lost: $!" if $!;
401 my $report = i_child_report();
402 fail $report if defined $report;
403 badproto $fh, "eof (reading $wh)";
406 sub protocol_expect (&$) {
407 my ($match, $fh) = @_;
410 defined && chomp or badproto_badread $fh, "protocol message";
418 badproto $fh, "\`$_'";
421 sub protocol_send_file ($$) {
422 my ($fh, $ourfn) = @_;
423 open PF, "<", $ourfn or die "$ourfn: $!";
426 my $got = read PF, $d, 65536;
427 die "$ourfn: $!" unless defined $got;
429 print $fh "data-block ".length($d)."\n" or die $!;
430 print $fh $d or die $!;
432 PF->error and die "$ourfn $!";
433 print $fh "data-end\n" or die $!;
437 sub protocol_read_bytes ($$) {
438 my ($fh, $nbytes) = @_;
439 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
441 my $got = read $fh, $d, $nbytes;
442 $got==$nbytes or badproto_badread $fh, "data block";
446 sub protocol_receive_file ($$) {
447 my ($fh, $ourfn) = @_;
448 printdebug "() $ourfn\n";
449 open PF, ">", $ourfn or die "$ourfn: $!";
451 my ($y,$l) = protocol_expect {
452 m/^data-block (.*)$/ ? (1,$1) :
453 m/^data-end$/ ? (0,) :
457 my $d = protocol_read_bytes $fh, $l;
458 print PF $d or die $!;
463 #---------- remote protocol support, responder ----------
465 sub responder_send_command ($) {
467 return unless $we_are_responder;
468 # called even without $we_are_responder
469 printdebug ">> $command\n";
470 print PO $command, "\n" or die $!;
473 sub responder_send_file ($$) {
474 my ($keyword, $ourfn) = @_;
475 return unless $we_are_responder;
476 printdebug "]] $keyword $ourfn\n";
477 responder_send_command "file $keyword";
478 protocol_send_file \*PO, $ourfn;
481 sub responder_receive_files ($@) {
482 my ($keyword, @ourfns) = @_;
483 die unless $we_are_responder;
484 printdebug "[[ $keyword @ourfns\n";
485 responder_send_command "want $keyword";
486 foreach my $fn (@ourfns) {
487 protocol_receive_file \*PI, $fn;
490 protocol_expect { m/^files-end$/ } \*PI;
493 #---------- remote protocol support, initiator ----------
495 sub initiator_expect (&) {
497 protocol_expect { &$match } \*RO;
500 #---------- end remote code ----------
503 if ($we_are_responder) {
505 responder_send_command "progress ".length($m) or die $!;
506 print PO $m or die $!;
516 $ua = LWP::UserAgent->new();
520 progress "downloading $what...";
521 my $r = $ua->get(@_) or die $!;
522 return undef if $r->code == 404;
523 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
524 return $r->decoded_content(charset => 'none');
527 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
529 sub act_local () { return $dryrun_level <= 1; }
530 sub act_scary () { return !$dryrun_level; }
533 if (!$dryrun_level) {
534 progress "$us ok: @_";
536 progress "would be ok: @_ (but dry run only)";
541 printcmd(\*STDERR,$debugprefix."#",@_);
544 sub runcmd_ordryrun {
552 sub runcmd_ordryrun_local {
560 our $helpmsg = <<END;
562 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
563 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
564 dgit [dgit-opts] build [dpkg-buildpackage-opts]
565 dgit [dgit-opts] sbuild [sbuild-opts]
566 dgit [dgit-opts] push [dgit-opts] [suite]
567 dgit [dgit-opts] push-source [dgit-opts] [suite]
568 dgit [dgit-opts] rpush build-host:build-dir ...
569 important dgit options:
570 -k<keyid> sign tag and package with <keyid> instead of default
571 --dry-run -n do not change anything, but go through the motions
572 --damp-run -L like --dry-run but make local changes, without signing
573 --new -N allow introducing a new package
574 --debug -D increase debug level
575 -c<name>=<value> set git config option (used directly by dgit too)
578 our $later_warning_msg = <<END;
579 Perhaps the upload is stuck in incoming. Using the version from git.
583 print STDERR "$us: @_\n", $helpmsg or die $!;
588 @ARGV or badusage "too few arguments";
589 return scalar shift @ARGV;
593 not_necessarily_a_tree();
596 print $helpmsg or die $!;
600 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
602 our %defcfg = ('dgit.default.distro' => 'debian',
603 'dgit.default.default-suite' => 'unstable',
604 'dgit.default.old-dsc-distro' => 'debian',
605 'dgit-suite.*-security.distro' => 'debian-security',
606 'dgit.default.username' => '',
607 'dgit.default.archive-query-default-component' => 'main',
608 'dgit.default.ssh' => 'ssh',
609 'dgit.default.archive-query' => 'madison:',
610 'dgit.default.sshpsql-dbname' => 'service=projectb',
611 'dgit.default.aptget-components' => 'main',
612 'dgit.default.dgit-tag-format' => 'new,old,maint',
613 'dgit.default.source-only-uploads' => 'ok',
614 'dgit.dsc-url-proto-ok.http' => 'true',
615 'dgit.dsc-url-proto-ok.https' => 'true',
616 'dgit.dsc-url-proto-ok.git' => 'true',
617 'dgit.vcs-git.suites', => 'sid', # ;-separated
618 'dgit.default.dsc-url-proto-ok' => 'false',
619 # old means "repo server accepts pushes with old dgit tags"
620 # new means "repo server accepts pushes with new dgit tags"
621 # maint means "repo server accepts split brain pushes"
622 # hist means "repo server may have old pushes without new tag"
623 # ("hist" is implied by "old")
624 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
625 'dgit-distro.debian.git-check' => 'url',
626 'dgit-distro.debian.git-check-suffix' => '/info/refs',
627 'dgit-distro.debian.new-private-pushers' => 't',
628 'dgit-distro.debian.source-only-uploads' => 'not-wholly-new',
629 'dgit-distro.debian/push.git-url' => '',
630 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
631 'dgit-distro.debian/push.git-user-force' => 'dgit',
632 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
633 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
634 'dgit-distro.debian/push.git-create' => 'true',
635 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
636 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
637 # 'dgit-distro.debian.archive-query-tls-key',
638 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
639 # ^ this does not work because curl is broken nowadays
640 # Fixing #790093 properly will involve providing providing the key
641 # in some pacagke and maybe updating these paths.
643 # 'dgit-distro.debian.archive-query-tls-curl-args',
644 # '--ca-path=/etc/ssl/ca-debian',
645 # ^ this is a workaround but works (only) on DSA-administered machines
646 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
647 'dgit-distro.debian.git-url-suffix' => '',
648 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
649 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
650 'dgit-distro.debian-security.archive-query' => 'aptget:',
651 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
652 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
653 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
654 'dgit-distro.debian-security.nominal-distro' => 'debian',
655 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
656 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
657 'dgit-distro.ubuntu.git-check' => 'false',
658 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
659 'dgit-distro.test-dummy.ssh' => "$td/ssh",
660 'dgit-distro.test-dummy.username' => "alice",
661 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
662 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
663 'dgit-distro.test-dummy.git-url' => "$td/git",
664 'dgit-distro.test-dummy.git-host' => "git",
665 'dgit-distro.test-dummy.git-path' => "$td/git",
666 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
667 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
668 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
669 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
673 our @gitcfgsources = qw(cmdline local global system);
674 our $invoked_in_git_tree = 1;
676 sub git_slurp_config () {
677 # This algoritm is a bit subtle, but this is needed so that for
678 # options which we want to be single-valued, we allow the
679 # different config sources to override properly. See #835858.
680 foreach my $src (@gitcfgsources) {
681 next if $src eq 'cmdline';
682 # we do this ourselves since git doesn't handle it
684 $gitcfgs{$src} = git_slurp_config_src $src;
688 sub git_get_config ($) {
690 foreach my $src (@gitcfgsources) {
691 my $l = $gitcfgs{$src}{$c};
692 confess "internal error ($l $c)" if $l && !ref $l;
693 printdebug"C $c ".(defined $l ?
694 join " ", map { messagequote "'$_'" } @$l :
698 @$l==1 or badcfg "multiple values for $c".
699 " (in $src git config)" if @$l > 1;
707 return undef if $c =~ /RETURN-UNDEF/;
708 printdebug "C? $c\n" if $debuglevel >= 5;
709 my $v = git_get_config($c);
710 return $v if defined $v;
711 my $dv = $defcfg{$c};
713 printdebug "CD $c $dv\n" if $debuglevel >= 4;
717 badcfg "need value for one of: @_\n".
718 "$us: distro or suite appears not to be (properly) supported";
721 sub not_necessarily_a_tree () {
722 # needs to be called from pre_*
723 @gitcfgsources = grep { $_ ne 'local' } @gitcfgsources;
724 $invoked_in_git_tree = 0;
727 sub access_basedistro__noalias () {
728 if (defined $idistro) {
731 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
732 return $def if defined $def;
733 foreach my $src (@gitcfgsources, 'internal') {
734 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
736 foreach my $k (keys %$kl) {
737 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
739 next unless match_glob $dpat, $isuite;
743 return cfg("dgit.default.distro");
747 sub access_basedistro () {
748 my $noalias = access_basedistro__noalias();
749 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
750 return $canon // $noalias;
753 sub access_nomdistro () {
754 my $base = access_basedistro();
755 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
756 $r =~ m/^$distro_re$/ or badcfg
757 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
761 sub access_quirk () {
762 # returns (quirk name, distro to use instead or undef, quirk-specific info)
763 my $basedistro = access_basedistro();
764 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
766 if (defined $backports_quirk) {
767 my $re = $backports_quirk;
768 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
770 $re =~ s/\%/([-0-9a-z_]+)/
771 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
772 if ($isuite =~ m/^$re$/) {
773 return ('backports',"$basedistro-backports",$1);
776 return ('none',undef);
781 sub parse_cfg_bool ($$$) {
782 my ($what,$def,$v) = @_;
785 $v =~ m/^[ty1]/ ? 1 :
786 $v =~ m/^[fn0]/ ? 0 :
787 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
790 sub access_forpush_config () {
791 my $d = access_basedistro();
795 parse_cfg_bool('new-private-pushers', 0,
796 cfg("dgit-distro.$d.new-private-pushers",
799 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
802 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
803 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
804 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
805 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
808 sub access_forpush () {
809 $access_forpush //= access_forpush_config();
810 return $access_forpush;
814 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
815 badcfg "pushing but distro is configured readonly"
816 if access_forpush_config() eq '0';
818 $supplementary_message = <<'END' unless $we_are_responder;
819 Push failed, before we got started.
820 You can retry the push, after fixing the problem, if you like.
822 parseopts_late_defaults();
826 parseopts_late_defaults();
829 sub supplementary_message ($) {
831 if (!$we_are_responder) {
832 $supplementary_message = $msg;
834 } elsif ($protovsn >= 3) {
835 responder_send_command "supplementary-message ".length($msg)
837 print PO $msg or die $!;
841 sub access_distros () {
842 # Returns list of distros to try, in order
845 # 0. `instead of' distro name(s) we have been pointed to
846 # 1. the access_quirk distro, if any
847 # 2a. the user's specified distro, or failing that } basedistro
848 # 2b. the distro calculated from the suite }
849 my @l = access_basedistro();
851 my (undef,$quirkdistro) = access_quirk();
852 unshift @l, $quirkdistro;
853 unshift @l, $instead_distro;
854 @l = grep { defined } @l;
856 push @l, access_nomdistro();
858 if (access_forpush()) {
859 @l = map { ("$_/push", $_) } @l;
864 sub access_cfg_cfgs (@) {
867 # The nesting of these loops determines the search order. We put
868 # the key loop on the outside so that we search all the distros
869 # for each key, before going on to the next key. That means that
870 # if access_cfg is called with a more specific, and then a less
871 # specific, key, an earlier distro can override the less specific
872 # without necessarily overriding any more specific keys. (If the
873 # distro wants to override the more specific keys it can simply do
874 # so; whereas if we did the loop the other way around, it would be
875 # impossible to for an earlier distro to override a less specific
876 # key but not the more specific ones without restating the unknown
877 # values of the more specific keys.
880 # We have to deal with RETURN-UNDEF specially, so that we don't
881 # terminate the search prematurely.
883 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
886 foreach my $d (access_distros()) {
887 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
889 push @cfgs, map { "dgit.default.$_" } @realkeys;
896 my (@cfgs) = access_cfg_cfgs(@keys);
897 my $value = cfg(@cfgs);
901 sub access_cfg_bool ($$) {
902 my ($def, @keys) = @_;
903 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
906 sub string_to_ssh ($) {
908 if ($spec =~ m/\s/) {
909 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
915 sub access_cfg_ssh () {
916 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
917 if (!defined $gitssh) {
920 return string_to_ssh $gitssh;
924 sub access_runeinfo ($) {
926 return ": dgit ".access_basedistro()." $info ;";
929 sub access_someuserhost ($) {
931 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
932 defined($user) && length($user) or
933 $user = access_cfg("$some-user",'username');
934 my $host = access_cfg("$some-host");
935 return length($user) ? "$user\@$host" : $host;
938 sub access_gituserhost () {
939 return access_someuserhost('git');
942 sub access_giturl (;$) {
944 my $url = access_cfg('git-url','RETURN-UNDEF');
947 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
948 return undef unless defined $proto;
951 access_gituserhost().
952 access_cfg('git-path');
954 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
957 return "$url/$package$suffix";
960 sub commit_getclogp ($) {
961 # Returns the parsed changelog hashref for a particular commit
963 our %commit_getclogp_memo;
964 my $memo = $commit_getclogp_memo{$objid};
965 return $memo if $memo;
967 my $mclog = dgit_privdir()."clog";
968 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
969 "$objid:debian/changelog";
970 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
973 sub parse_dscdata () {
974 my $dscfh = new IO::File \$dscdata, '<' or die $!;
975 printdebug Dumper($dscdata) if $debuglevel>1;
976 $dsc = parsecontrolfh($dscfh,$dscurl,1);
977 printdebug Dumper($dsc) if $debuglevel>1;
982 sub archive_query ($;@) {
983 my ($method) = shift @_;
984 fail "this operation does not support multiple comma-separated suites"
986 my $query = access_cfg('archive-query','RETURN-UNDEF');
987 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
990 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
993 sub archive_query_prepend_mirror {
994 my $m = access_cfg('mirror');
995 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
998 sub pool_dsc_subpath ($$) {
999 my ($vsn,$component) = @_; # $package is implict arg
1000 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1001 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1004 sub cfg_apply_map ($$$) {
1005 my ($varref, $what, $mapspec) = @_;
1006 return unless $mapspec;
1008 printdebug "config $what EVAL{ $mapspec; }\n";
1010 eval "package Dgit::Config; $mapspec;";
1015 #---------- `ftpmasterapi' archive query method (nascent) ----------
1017 sub archive_api_query_cmd ($) {
1019 my @cmd = (@curl, qw(-sS));
1020 my $url = access_cfg('archive-query-url');
1021 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1023 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1024 foreach my $key (split /\:/, $keys) {
1025 $key =~ s/\%HOST\%/$host/g;
1027 fail "for $url: stat $key: $!" unless $!==ENOENT;
1030 fail "config requested specific TLS key but do not know".
1031 " how to get curl to use exactly that EE key ($key)";
1032 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1033 # # Sadly the above line does not work because of changes
1034 # # to gnutls. The real fix for #790093 may involve
1035 # # new curl options.
1038 # Fixing #790093 properly will involve providing a value
1039 # for this on clients.
1040 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1041 push @cmd, split / /, $kargs if defined $kargs;
1043 push @cmd, $url.$subpath;
1047 sub api_query ($$;$) {
1049 my ($data, $subpath, $ok404) = @_;
1050 badcfg "ftpmasterapi archive query method takes no data part"
1052 my @cmd = archive_api_query_cmd($subpath);
1053 my $url = $cmd[$#cmd];
1054 push @cmd, qw(-w %{http_code});
1055 my $json = cmdoutput @cmd;
1056 unless ($json =~ s/\d+\d+\d$//) {
1057 failedcmd_report_cmd undef, @cmd;
1058 fail "curl failed to print 3-digit HTTP code";
1061 return undef if $code eq '404' && $ok404;
1062 fail "fetch of $url gave HTTP code $code"
1063 unless $url =~ m#^file://# or $code =~ m/^2/;
1064 return decode_json($json);
1067 sub canonicalise_suite_ftpmasterapi {
1068 my ($proto,$data) = @_;
1069 my $suites = api_query($data, 'suites');
1071 foreach my $entry (@$suites) {
1073 my $v = $entry->{$_};
1074 defined $v && $v eq $isuite;
1075 } qw(codename name);
1076 push @matched, $entry;
1078 fail "unknown suite $isuite" unless @matched;
1081 @matched==1 or die "multiple matches for suite $isuite\n";
1082 $cn = "$matched[0]{codename}";
1083 defined $cn or die "suite $isuite info has no codename\n";
1084 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1086 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1091 sub archive_query_ftpmasterapi {
1092 my ($proto,$data) = @_;
1093 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1095 my $digester = Digest::SHA->new(256);
1096 foreach my $entry (@$info) {
1098 my $vsn = "$entry->{version}";
1099 my ($ok,$msg) = version_check $vsn;
1100 die "bad version: $msg\n" unless $ok;
1101 my $component = "$entry->{component}";
1102 $component =~ m/^$component_re$/ or die "bad component";
1103 my $filename = "$entry->{filename}";
1104 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1105 or die "bad filename";
1106 my $sha256sum = "$entry->{sha256sum}";
1107 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1108 push @rows, [ $vsn, "/pool/$component/$filename",
1109 $digester, $sha256sum ];
1111 die "bad ftpmaster api response: $@\n".Dumper($entry)
1114 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1115 return archive_query_prepend_mirror @rows;
1118 sub file_in_archive_ftpmasterapi {
1119 my ($proto,$data,$filename) = @_;
1120 my $pat = $filename;
1123 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1124 my $info = api_query($data, "file_in_archive/$pat", 1);
1127 sub package_not_wholly_new_ftpmasterapi {
1128 my ($proto,$data,$pkg) = @_;
1129 my $info = api_query($data,"madison?package=${pkg}&f=json");
1133 #---------- `aptget' archive query method ----------
1136 our $aptget_releasefile;
1137 our $aptget_configpath;
1139 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1140 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1142 sub aptget_cache_clean {
1143 runcmd_ordryrun_local qw(sh -ec),
1144 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1148 sub aptget_lock_acquire () {
1149 my $lockfile = "$aptget_base/lock";
1150 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1151 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1154 sub aptget_prep ($) {
1156 return if defined $aptget_base;
1158 badcfg "aptget archive query method takes no data part"
1161 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1164 ensuredir "$cache/dgit";
1166 access_cfg('aptget-cachekey','RETURN-UNDEF')
1167 // access_nomdistro();
1169 $aptget_base = "$cache/dgit/aptget";
1170 ensuredir $aptget_base;
1172 my $quoted_base = $aptget_base;
1173 die "$quoted_base contains bad chars, cannot continue"
1174 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1176 ensuredir $aptget_base;
1178 aptget_lock_acquire();
1180 aptget_cache_clean();
1182 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1183 my $sourceslist = "source.list#$cachekey";
1185 my $aptsuites = $isuite;
1186 cfg_apply_map(\$aptsuites, 'suite map',
1187 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1189 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1190 printf SRCS "deb-src %s %s %s\n",
1191 access_cfg('mirror'),
1193 access_cfg('aptget-components')
1196 ensuredir "$aptget_base/cache";
1197 ensuredir "$aptget_base/lists";
1199 open CONF, ">", $aptget_configpath or die $!;
1201 Debug::NoLocking "true";
1202 APT::Get::List-Cleanup "false";
1203 #clear APT::Update::Post-Invoke-Success;
1204 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1205 Dir::State::Lists "$quoted_base/lists";
1206 Dir::Etc::preferences "$quoted_base/preferences";
1207 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1208 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1211 foreach my $key (qw(
1214 Dir::Cache::Archives
1215 Dir::Etc::SourceParts
1216 Dir::Etc::preferencesparts
1218 ensuredir "$aptget_base/$key";
1219 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1222 my $oldatime = (time // die $!) - 1;
1223 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1224 next unless stat_exists $oldlist;
1225 my ($mtime) = (stat _)[9];
1226 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1229 runcmd_ordryrun_local aptget_aptget(), qw(update);
1232 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1233 next unless stat_exists $oldlist;
1234 my ($atime) = (stat _)[8];
1235 next if $atime == $oldatime;
1236 push @releasefiles, $oldlist;
1238 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1239 @releasefiles = @inreleasefiles if @inreleasefiles;
1240 if (!@releasefiles) {
1242 apt seemed to not to update dgit's cached Release files for $isuite.
1244 is on a filesystem mounted `noatime'; if so, please use `relatime'.)
1247 die "apt updated too many Release files (@releasefiles), erk"
1248 unless @releasefiles == 1;
1250 ($aptget_releasefile) = @releasefiles;
1253 sub canonicalise_suite_aptget {
1254 my ($proto,$data) = @_;
1257 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1259 foreach my $name (qw(Codename Suite)) {
1260 my $val = $release->{$name};
1262 printdebug "release file $name: $val\n";
1263 $val =~ m/^$suite_re$/o or fail
1264 "Release file ($aptget_releasefile) specifies intolerable $name";
1265 cfg_apply_map(\$val, 'suite rmap',
1266 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1273 sub archive_query_aptget {
1274 my ($proto,$data) = @_;
1277 ensuredir "$aptget_base/source";
1278 foreach my $old (<$aptget_base/source/*.dsc>) {
1279 unlink $old or die "$old: $!";
1282 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1283 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1284 # avoids apt-get source failing with ambiguous error code
1286 runcmd_ordryrun_local
1287 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1288 aptget_aptget(), qw(--download-only --only-source source), $package;
1290 my @dscs = <$aptget_base/source/*.dsc>;
1291 fail "apt-get source did not produce a .dsc" unless @dscs;
1292 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1294 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1297 my $uri = "file://". uri_escape $dscs[0];
1298 $uri =~ s{\%2f}{/}gi;
1299 return [ (getfield $pre_dsc, 'Version'), $uri ];
1302 sub file_in_archive_aptget () { return undef; }
1303 sub package_not_wholly_new_aptget () { return undef; }
1305 #---------- `dummyapicat' archive query method ----------
1307 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1308 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1310 sub dummycatapi_run_in_mirror ($@) {
1311 # runs $fn with FIA open onto rune
1312 my ($rune, $argl, $fn) = @_;
1314 my $mirror = access_cfg('mirror');
1315 $mirror =~ s#^file://#/# or die "$mirror ?";
1316 my @cmd = (qw(sh -ec), 'cd "$1"; shift'."\n".$rune,
1317 qw(x), $mirror, @$argl);
1318 debugcmd "-|", @cmd;
1319 open FIA, "-|", @cmd or die $!;
1321 close FIA or ($!==0 && $?==141) or die failedcmd @cmd;
1325 sub file_in_archive_dummycatapi ($$$) {
1326 my ($proto,$data,$filename) = @_;
1328 dummycatapi_run_in_mirror '
1329 find -name "$1" -print0 |
1331 ', [$filename], sub {
1334 printdebug "| $_\n";
1335 m/^(\w+) (\S+)$/ or die "$_ ?";
1336 push @out, { sha256sum => $1, filename => $2 };
1342 sub package_not_wholly_new_dummycatapi {
1343 my ($proto,$data,$pkg) = @_;
1344 dummycatapi_run_in_mirror "
1345 find -name ${pkg}_*.dsc
1352 #---------- `madison' archive query method ----------
1354 sub archive_query_madison {
1355 return archive_query_prepend_mirror
1356 map { [ @$_[0..1] ] } madison_get_parse(@_);
1359 sub madison_get_parse {
1360 my ($proto,$data) = @_;
1361 die unless $proto eq 'madison';
1362 if (!length $data) {
1363 $data= access_cfg('madison-distro','RETURN-UNDEF');
1364 $data //= access_basedistro();
1366 $rmad{$proto,$data,$package} ||= cmdoutput
1367 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1368 my $rmad = $rmad{$proto,$data,$package};
1371 foreach my $l (split /\n/, $rmad) {
1372 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1373 \s*( [^ \t|]+ )\s* \|
1374 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1375 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1376 $1 eq $package or die "$rmad $package ?";
1383 $component = access_cfg('archive-query-default-component');
1385 $5 eq 'source' or die "$rmad ?";
1386 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1388 return sort { -version_compare($a->[0],$b->[0]); } @out;
1391 sub canonicalise_suite_madison {
1392 # madison canonicalises for us
1393 my @r = madison_get_parse(@_);
1395 "unable to canonicalise suite using package $package".
1396 " which does not appear to exist in suite $isuite;".
1397 " --existing-package may help";
1401 sub file_in_archive_madison { return undef; }
1402 sub package_not_wholly_new_madison { return undef; }
1404 #---------- `sshpsql' archive query method ----------
1407 my ($data,$runeinfo,$sql) = @_;
1408 if (!length $data) {
1409 $data= access_someuserhost('sshpsql').':'.
1410 access_cfg('sshpsql-dbname');
1412 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1413 my ($userhost,$dbname) = ($`,$'); #';
1415 my @cmd = (access_cfg_ssh, $userhost,
1416 access_runeinfo("ssh-psql $runeinfo").
1417 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1418 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1420 open P, "-|", @cmd or die $!;
1423 printdebug(">|$_|\n");
1426 $!=0; $?=0; close P or failedcmd @cmd;
1428 my $nrows = pop @rows;
1429 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1430 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1431 @rows = map { [ split /\|/, $_ ] } @rows;
1432 my $ncols = scalar @{ shift @rows };
1433 die if grep { scalar @$_ != $ncols } @rows;
1437 sub sql_injection_check {
1438 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1441 sub archive_query_sshpsql ($$) {
1442 my ($proto,$data) = @_;
1443 sql_injection_check $isuite, $package;
1444 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1445 SELECT source.version, component.name, files.filename, files.sha256sum
1447 JOIN src_associations ON source.id = src_associations.source
1448 JOIN suite ON suite.id = src_associations.suite
1449 JOIN dsc_files ON dsc_files.source = source.id
1450 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1451 JOIN component ON component.id = files_archive_map.component_id
1452 JOIN files ON files.id = dsc_files.file
1453 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1454 AND source.source='$package'
1455 AND files.filename LIKE '%.dsc';
1457 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1458 my $digester = Digest::SHA->new(256);
1460 my ($vsn,$component,$filename,$sha256sum) = @$_;
1461 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1463 return archive_query_prepend_mirror @rows;
1466 sub canonicalise_suite_sshpsql ($$) {
1467 my ($proto,$data) = @_;
1468 sql_injection_check $isuite;
1469 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1470 SELECT suite.codename
1471 FROM suite where suite_name='$isuite' or codename='$isuite';
1473 @rows = map { $_->[0] } @rows;
1474 fail "unknown suite $isuite" unless @rows;
1475 die "ambiguous $isuite: @rows ?" if @rows>1;
1479 sub file_in_archive_sshpsql ($$$) { return undef; }
1480 sub package_not_wholly_new_sshpsql ($$$) { return undef; }
1482 #---------- `dummycat' archive query method ----------
1484 sub canonicalise_suite_dummycat ($$) {
1485 my ($proto,$data) = @_;
1486 my $dpath = "$data/suite.$isuite";
1487 if (!open C, "<", $dpath) {
1488 $!==ENOENT or die "$dpath: $!";
1489 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1493 chomp or die "$dpath: $!";
1495 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1499 sub archive_query_dummycat ($$) {
1500 my ($proto,$data) = @_;
1501 canonicalise_suite();
1502 my $dpath = "$data/package.$csuite.$package";
1503 if (!open C, "<", $dpath) {
1504 $!==ENOENT or die "$dpath: $!";
1505 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1513 printdebug "dummycat query $csuite $package $dpath | $_\n";
1514 my @row = split /\s+/, $_;
1515 @row==2 or die "$dpath: $_ ?";
1518 C->error and die "$dpath: $!";
1520 return archive_query_prepend_mirror
1521 sort { -version_compare($a->[0],$b->[0]); } @rows;
1524 sub file_in_archive_dummycat () { return undef; }
1525 sub package_not_wholly_new_dummycat () { return undef; }
1527 #---------- tag format handling ----------
1529 sub access_cfg_tagformats () {
1530 split /\,/, access_cfg('dgit-tag-format');
1533 sub access_cfg_tagformats_can_splitbrain () {
1534 my %y = map { $_ => 1 } access_cfg_tagformats;
1535 foreach my $needtf (qw(new maint)) {
1536 next if $y{$needtf};
1542 sub need_tagformat ($$) {
1543 my ($fmt, $why) = @_;
1544 fail "need to use tag format $fmt ($why) but also need".
1545 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1546 " - no way to proceed"
1547 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1548 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1551 sub select_tagformat () {
1553 return if $tagformatfn && !$tagformat_want;
1554 die 'bug' if $tagformatfn && $tagformat_want;
1555 # ... $tagformat_want assigned after previous select_tagformat
1557 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1558 printdebug "select_tagformat supported @supported\n";
1560 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1561 printdebug "select_tagformat specified @$tagformat_want\n";
1563 my ($fmt,$why,$override) = @$tagformat_want;
1565 fail "target distro supports tag formats @supported".
1566 " but have to use $fmt ($why)"
1568 or grep { $_ eq $fmt } @supported;
1570 $tagformat_want = undef;
1572 $tagformatfn = ${*::}{"debiantag_$fmt"};
1574 fail "trying to use unknown tag format \`$fmt' ($why) !"
1575 unless $tagformatfn;
1578 #---------- archive query entrypoints and rest of program ----------
1580 sub canonicalise_suite () {
1581 return if defined $csuite;
1582 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1583 $csuite = archive_query('canonicalise_suite');
1584 if ($isuite ne $csuite) {
1585 progress "canonical suite name for $isuite is $csuite";
1587 progress "canonical suite name is $csuite";
1591 sub get_archive_dsc () {
1592 canonicalise_suite();
1593 my @vsns = archive_query('archive_query');
1594 foreach my $vinfo (@vsns) {
1595 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1596 $dscurl = $vsn_dscurl;
1597 $dscdata = url_get($dscurl);
1599 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1604 $digester->add($dscdata);
1605 my $got = $digester->hexdigest();
1607 fail "$dscurl has hash $got but".
1608 " archive told us to expect $digest";
1611 my $fmt = getfield $dsc, 'Format';
1612 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1613 "unsupported source format $fmt, sorry";
1615 $dsc_checked = !!$digester;
1616 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1620 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1623 sub check_for_git ();
1624 sub check_for_git () {
1626 my $how = access_cfg('git-check');
1627 if ($how eq 'ssh-cmd') {
1629 (access_cfg_ssh, access_gituserhost(),
1630 access_runeinfo("git-check $package").
1631 " set -e; cd ".access_cfg('git-path').";".
1632 " if test -d $package.git; then echo 1; else echo 0; fi");
1633 my $r= cmdoutput @cmd;
1634 if (defined $r and $r =~ m/^divert (\w+)$/) {
1636 my ($usedistro,) = access_distros();
1637 # NB that if we are pushing, $usedistro will be $distro/push
1638 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1639 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1640 progress "diverting to $divert (using config for $instead_distro)";
1641 return check_for_git();
1643 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1645 } elsif ($how eq 'url') {
1646 my $prefix = access_cfg('git-check-url','git-url');
1647 my $suffix = access_cfg('git-check-suffix','git-suffix',
1648 'RETURN-UNDEF') // '.git';
1649 my $url = "$prefix/$package$suffix";
1650 my @cmd = (@curl, qw(-sS -I), $url);
1651 my $result = cmdoutput @cmd;
1652 $result =~ s/^\S+ 200 .*\n\r?\n//;
1653 # curl -sS -I with https_proxy prints
1654 # HTTP/1.0 200 Connection established
1655 $result =~ m/^\S+ (404|200) /s or
1656 fail "unexpected results from git check query - ".
1657 Dumper($prefix, $result);
1659 if ($code eq '404') {
1661 } elsif ($code eq '200') {
1666 } elsif ($how eq 'true') {
1668 } elsif ($how eq 'false') {
1671 badcfg "unknown git-check \`$how'";
1675 sub create_remote_git_repo () {
1676 my $how = access_cfg('git-create');
1677 if ($how eq 'ssh-cmd') {
1679 (access_cfg_ssh, access_gituserhost(),
1680 access_runeinfo("git-create $package").
1681 "set -e; cd ".access_cfg('git-path').";".
1682 " cp -a _template $package.git");
1683 } elsif ($how eq 'true') {
1686 badcfg "unknown git-create \`$how'";
1690 our ($dsc_hash,$lastpush_mergeinput);
1691 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1695 dgit_privdir(); # ensures that $dgit_privdir_made is based on $maindir
1696 $playground = fresh_playground 'dgit/unpack';
1699 sub mktree_in_ud_here () {
1700 playtree_setup $gitcfgs{local};
1703 sub git_write_tree () {
1704 my $tree = cmdoutput @git, qw(write-tree);
1705 $tree =~ m/^\w+$/ or die "$tree ?";
1709 sub git_add_write_tree () {
1710 runcmd @git, qw(add -Af .);
1711 return git_write_tree();
1714 sub remove_stray_gits ($) {
1716 my @gitscmd = qw(find -name .git -prune -print0);
1717 debugcmd "|",@gitscmd;
1718 open GITS, "-|", @gitscmd or die $!;
1723 print STDERR "$us: warning: removing from $what: ",
1724 (messagequote $_), "\n";
1728 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1731 sub mktree_in_ud_from_only_subdir ($;$) {
1732 my ($what,$raw) = @_;
1733 # changes into the subdir
1736 die "expected one subdir but found @dirs ?" unless @dirs==1;
1737 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1741 remove_stray_gits($what);
1742 mktree_in_ud_here();
1744 my ($format, $fopts) = get_source_format();
1745 if (madformat($format)) {
1750 my $tree=git_add_write_tree();
1751 return ($tree,$dir);
1754 our @files_csum_info_fields =
1755 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1756 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1757 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1759 sub dsc_files_info () {
1760 foreach my $csumi (@files_csum_info_fields) {
1761 my ($fname, $module, $method) = @$csumi;
1762 my $field = $dsc->{$fname};
1763 next unless defined $field;
1764 eval "use $module; 1;" or die $@;
1766 foreach (split /\n/, $field) {
1768 m/^(\w+) (\d+) (\S+)$/ or
1769 fail "could not parse .dsc $fname line \`$_'";
1770 my $digester = eval "$module"."->$method;" or die $@;
1775 Digester => $digester,
1780 fail "missing any supported Checksums-* or Files field in ".
1781 $dsc->get_option('name');
1785 map { $_->{Filename} } dsc_files_info();
1788 sub files_compare_inputs (@) {
1793 my $showinputs = sub {
1794 return join "; ", map { $_->get_option('name') } @$inputs;
1797 foreach my $in (@$inputs) {
1799 my $in_name = $in->get_option('name');
1801 printdebug "files_compare_inputs $in_name\n";
1803 foreach my $csumi (@files_csum_info_fields) {
1804 my ($fname) = @$csumi;
1805 printdebug "files_compare_inputs $in_name $fname\n";
1807 my $field = $in->{$fname};
1808 next unless defined $field;
1811 foreach (split /\n/, $field) {
1814 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1815 fail "could not parse $in_name $fname line \`$_'";
1817 printdebug "files_compare_inputs $in_name $fname $f\n";
1821 my $re = \ $record{$f}{$fname};
1823 $fchecked{$f}{$in_name} = 1;
1825 fail "hash or size of $f varies in $fname fields".
1826 " (between: ".$showinputs->().")";
1831 @files = sort @files;
1832 $expected_files //= \@files;
1833 "@$expected_files" eq "@files" or
1834 fail "file list in $in_name varies between hash fields!";
1837 fail "$in_name has no files list field(s)";
1839 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1842 grep { keys %$_ == @$inputs-1 } values %fchecked
1843 or fail "no file appears in all file lists".
1844 " (looked in: ".$showinputs->().")";
1847 sub is_orig_file_in_dsc ($$) {
1848 my ($f, $dsc_files_info) = @_;
1849 return 0 if @$dsc_files_info <= 1;
1850 # One file means no origs, and the filename doesn't have a "what
1851 # part of dsc" component. (Consider versions ending `.orig'.)
1852 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1856 sub is_orig_file_of_vsn ($$) {
1857 my ($f, $upstreamvsn) = @_;
1858 my $base = srcfn $upstreamvsn, '';
1859 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1863 # This function determines whether a .changes file is source-only from
1864 # the point of view of dak. Thus, it permits *_source.buildinfo
1867 # It does not, however, permit any other buildinfo files. After a
1868 # source-only upload, the buildds will try to upload files like
1869 # foo_1.2.3_amd64.buildinfo. If the package maintainer included files
1870 # named like this in their (otherwise) source-only upload, the uploads
1871 # of the buildd can be rejected by dak. Fixing the resultant
1872 # situation can require manual intervention. So we block such
1873 # .buildinfo files when the user tells us to perform a source-only
1874 # upload (such as when using the push-source subcommand with the -C
1875 # option, which calls this function).
1877 # Note, though, that when dgit is told to prepare a source-only
1878 # upload, such as when subcommands like build-source and push-source
1879 # without -C are used, dgit has a more restrictive notion of
1880 # source-only .changes than dak: such uploads will never include
1881 # *_source.buildinfo files. This is because there is no use for such
1882 # files when using a tool like dgit to produce the source package, as
1883 # dgit ensures the source is identical to git HEAD.
1884 sub test_source_only_changes ($) {
1886 foreach my $l (split /\n/, getfield $changes, 'Files') {
1887 $l =~ m/\S+$/ or next;
1888 # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages
1889 unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) {
1890 print "purportedly source-only changes polluted by $&\n";
1897 sub changes_update_origs_from_dsc ($$$$) {
1898 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1900 printdebug "checking origs needed ($upstreamvsn)...\n";
1901 $_ = getfield $changes, 'Files';
1902 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1903 fail "cannot find section/priority from .changes Files field";
1904 my $placementinfo = $1;
1906 printdebug "checking origs needed placement '$placementinfo'...\n";
1907 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1908 $l =~ m/\S+$/ or next;
1910 printdebug "origs $file | $l\n";
1911 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1912 printdebug "origs $file is_orig\n";
1913 my $have = archive_query('file_in_archive', $file);
1914 if (!defined $have) {
1916 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1922 printdebug "origs $file \$#\$have=$#$have\n";
1923 foreach my $h (@$have) {
1926 foreach my $csumi (@files_csum_info_fields) {
1927 my ($fname, $module, $method, $archivefield) = @$csumi;
1928 next unless defined $h->{$archivefield};
1929 $_ = $dsc->{$fname};
1930 next unless defined;
1931 m/^(\w+) .* \Q$file\E$/m or
1932 fail ".dsc $fname missing entry for $file";
1933 if ($h->{$archivefield} eq $1) {
1937 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1940 die "$file ".Dumper($h)." ?!" if $same && @differ;
1943 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1946 printdebug "origs $file f.same=$found_same".
1947 " #f._differ=$#found_differ\n";
1948 if (@found_differ && !$found_same) {
1950 "archive contains $file with different checksum",
1953 # Now we edit the changes file to add or remove it
1954 foreach my $csumi (@files_csum_info_fields) {
1955 my ($fname, $module, $method, $archivefield) = @$csumi;
1956 next unless defined $changes->{$fname};
1958 # in archive, delete from .changes if it's there
1959 $changed{$file} = "removed" if
1960 $changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m;
1961 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) {
1962 # not in archive, but it's here in the .changes
1964 my $dsc_data = getfield $dsc, $fname;
1965 $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?";
1967 $extra =~ s/ \d+ /$&$placementinfo /
1968 or die "$fname $extra >$dsc_data< ?"
1969 if $fname eq 'Files';
1970 $changes->{$fname} .= "\n". $extra;
1971 $changed{$file} = "added";
1976 foreach my $file (keys %changed) {
1978 "edited .changes for archive .orig contents: %s %s",
1979 $changed{$file}, $file;
1981 my $chtmp = "$changesfile.tmp";
1982 $changes->save($chtmp);
1984 rename $chtmp,$changesfile or die "$changesfile $!";
1986 progress "[new .changes left in $changesfile]";
1989 progress "$changesfile already has appropriate .orig(s) (if any)";
1993 sub make_commit ($) {
1995 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1998 sub make_commit_text ($) {
2001 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
2003 print Dumper($text) if $debuglevel > 1;
2004 my $child = open2($out, $in, @cmd) or die $!;
2007 print $in $text or die $!;
2008 close $in or die $!;
2010 $h =~ m/^\w+$/ or die;
2012 printdebug "=> $h\n";
2015 waitpid $child, 0 == $child or die "$child $!";
2016 $? and failedcmd @cmd;
2020 sub clogp_authline ($) {
2022 my $author = getfield $clogp, 'Maintainer';
2023 if ($author =~ m/^[^"\@]+\,/) {
2024 # single entry Maintainer field with unquoted comma
2025 $author = ($& =~ y/,//rd).$'; # strip the comma
2027 # git wants a single author; any remaining commas in $author
2028 # are by now preceded by @ (or "). It seems safer to punt on
2029 # "..." for now rather than attempting to dequote or something.
2030 $author =~ s#,.*##ms unless $author =~ m/"/;
2031 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
2032 my $authline = "$author $date";
2033 $authline =~ m/$git_authline_re/o or
2034 fail "unexpected commit author line format \`$authline'".
2035 " (was generated from changelog Maintainer field)";
2036 return ($1,$2,$3) if wantarray;
2040 sub vendor_patches_distro ($$) {
2041 my ($checkdistro, $what) = @_;
2042 return unless defined $checkdistro;
2044 my $series = "debian/patches/\L$checkdistro\E.series";
2045 printdebug "checking for vendor-specific $series ($what)\n";
2047 if (!open SERIES, "<", $series) {
2048 die "$series $!" unless $!==ENOENT;
2057 Unfortunately, this source package uses a feature of dpkg-source where
2058 the same source package unpacks to different source code on different
2059 distros. dgit cannot safely operate on such packages on affected
2060 distros, because the meaning of source packages is not stable.
2062 Please ask the distro/maintainer to remove the distro-specific series
2063 files and use a different technique (if necessary, uploading actually
2064 different packages, if different distros are supposed to have
2068 fail "Found active distro-specific series file for".
2069 " $checkdistro ($what): $series, cannot continue";
2071 die "$series $!" if SERIES->error;
2075 sub check_for_vendor_patches () {
2076 # This dpkg-source feature doesn't seem to be documented anywhere!
2077 # But it can be found in the changelog (reformatted):
2079 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2080 # Author: Raphael Hertzog <hertzog@debian.org>
2081 # Date: Sun Oct 3 09:36:48 2010 +0200
2083 # dpkg-source: correctly create .pc/.quilt_series with alternate
2086 # If you have debian/patches/ubuntu.series and you were
2087 # unpacking the source package on ubuntu, quilt was still
2088 # directed to debian/patches/series instead of
2089 # debian/patches/ubuntu.series.
2091 # debian/changelog | 3 +++
2092 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2093 # 2 files changed, 6 insertions(+), 1 deletion(-)
2096 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2097 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2098 "Dpkg::Vendor \`current vendor'");
2099 vendor_patches_distro(access_basedistro(),
2100 "(base) distro being accessed");
2101 vendor_patches_distro(access_nomdistro(),
2102 "(nominal) distro being accessed");
2105 sub generate_commits_from_dsc () {
2106 # See big comment in fetch_from_archive, below.
2107 # See also README.dsc-import.
2109 changedir $playground;
2111 my @dfi = dsc_files_info();
2112 foreach my $fi (@dfi) {
2113 my $f = $fi->{Filename};
2114 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2115 my $upper_f = "$maindir/../$f";
2117 printdebug "considering reusing $f: ";
2119 if (link_ltarget "$upper_f,fetch", $f) {
2120 printdebug "linked (using ...,fetch).\n";
2121 } elsif ((printdebug "($!) "),
2123 fail "accessing ../$f,fetch: $!";
2124 } elsif (link_ltarget $upper_f, $f) {
2125 printdebug "linked.\n";
2126 } elsif ((printdebug "($!) "),
2128 fail "accessing ../$f: $!";
2130 printdebug "absent.\n";
2134 complete_file_from_dsc('.', $fi, \$refetched)
2137 printdebug "considering saving $f: ";
2139 if (link $f, $upper_f) {
2140 printdebug "linked.\n";
2141 } elsif ((printdebug "($!) "),
2143 fail "saving ../$f: $!";
2144 } elsif (!$refetched) {
2145 printdebug "no need.\n";
2146 } elsif (link $f, "$upper_f,fetch") {
2147 printdebug "linked (using ...,fetch).\n";
2148 } elsif ((printdebug "($!) "),
2150 fail "saving ../$f,fetch: $!";
2152 printdebug "cannot.\n";
2156 # We unpack and record the orig tarballs first, so that we only
2157 # need disk space for one private copy of the unpacked source.
2158 # But we can't make them into commits until we have the metadata
2159 # from the debian/changelog, so we record the tree objects now and
2160 # make them into commits later.
2162 my $upstreamv = upstreamversion $dsc->{version};
2163 my $orig_f_base = srcfn $upstreamv, '';
2165 foreach my $fi (@dfi) {
2166 # We actually import, and record as a commit, every tarball
2167 # (unless there is only one file, in which case there seems
2170 my $f = $fi->{Filename};
2171 printdebug "import considering $f ";
2172 (printdebug "only one dfi\n"), next if @dfi == 1;
2173 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2174 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2178 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2180 printdebug "Y ", (join ' ', map { $_//"(none)" }
2181 $compr_ext, $orig_f_part
2184 my $input = new IO::File $f, '<' or die "$f $!";
2188 if (defined $compr_ext) {
2190 Dpkg::Compression::compression_guess_from_filename $f;
2191 fail "Dpkg::Compression cannot handle file $f in source package"
2192 if defined $compr_ext && !defined $cname;
2194 new Dpkg::Compression::Process compression => $cname;
2195 @compr_cmd = $compr_proc->get_uncompress_cmdline();
2196 my $compr_fh = new IO::Handle;
2197 my $compr_pid = open $compr_fh, "-|" // die $!;
2199 open STDIN, "<&", $input or die $!;
2201 die "dgit (child): exec $compr_cmd[0]: $!\n";
2206 rmtree "_unpack-tar";
2207 mkdir "_unpack-tar" or die $!;
2208 my @tarcmd = qw(tar -x -f -
2209 --no-same-owner --no-same-permissions
2210 --no-acls --no-xattrs --no-selinux);
2211 my $tar_pid = fork // die $!;
2213 chdir "_unpack-tar" or die $!;
2214 open STDIN, "<&", $input or die $!;
2216 die "dgit (child): exec $tarcmd[0]: $!";
2218 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2219 !$? or failedcmd @tarcmd;
2222 (@compr_cmd ? ($?==SIGPIPE || failedcmd @compr_cmd)
2224 # finally, we have the results in "tarball", but maybe
2225 # with the wrong permissions
2227 runcmd qw(chmod -R +rwX _unpack-tar);
2228 changedir "_unpack-tar";
2229 remove_stray_gits($f);
2230 mktree_in_ud_here();
2232 my ($tree) = git_add_write_tree();
2233 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2234 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2236 printdebug "one subtree $1\n";
2238 printdebug "multiple subtrees\n";
2241 rmtree "_unpack-tar";
2243 my $ent = [ $f, $tree ];
2245 Orig => !!$orig_f_part,
2246 Sort => (!$orig_f_part ? 2 :
2247 $orig_f_part =~ m/-/g ? 1 :
2255 # put any without "_" first (spec is not clear whether files
2256 # are always in the usual order). Tarballs without "_" are
2257 # the main orig or the debian tarball.
2258 $a->{Sort} <=> $b->{Sort} or
2262 my $any_orig = grep { $_->{Orig} } @tartrees;
2264 my $dscfn = "$package.dsc";
2266 my $treeimporthow = 'package';
2268 open D, ">", $dscfn or die "$dscfn: $!";
2269 print D $dscdata or die "$dscfn: $!";
2270 close D or die "$dscfn: $!";
2271 my @cmd = qw(dpkg-source);
2272 push @cmd, '--no-check' if $dsc_checked;
2273 if (madformat $dsc->{format}) {
2274 push @cmd, '--skip-patches';
2275 $treeimporthow = 'unpatched';
2277 push @cmd, qw(-x --), $dscfn;
2280 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2281 if (madformat $dsc->{format}) {
2282 check_for_vendor_patches();
2286 if (madformat $dsc->{format}) {
2287 my @pcmd = qw(dpkg-source --before-build .);
2288 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2290 $dappliedtree = git_add_write_tree();
2293 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2297 printdebug "import clog search...\n";
2298 parsechangelog_loop \@clogcmd, "package changelog", sub {
2299 my ($thisstanza, $desc) = @_;
2300 no warnings qw(exiting);
2302 $clogp //= $thisstanza;
2304 printdebug "import clog $thisstanza->{version} $desc...\n";
2306 last if !$any_orig; # we don't need $r1clogp
2308 # We look for the first (most recent) changelog entry whose
2309 # version number is lower than the upstream version of this
2310 # package. Then the last (least recent) previous changelog
2311 # entry is treated as the one which introduced this upstream
2312 # version and used for the synthetic commits for the upstream
2315 # One might think that a more sophisticated algorithm would be
2316 # necessary. But: we do not want to scan the whole changelog
2317 # file. Stopping when we see an earlier version, which
2318 # necessarily then is an earlier upstream version, is the only
2319 # realistic way to do that. Then, either the earliest
2320 # changelog entry we have seen so far is indeed the earliest
2321 # upload of this upstream version; or there are only changelog
2322 # entries relating to later upstream versions (which is not
2323 # possible unless the changelog and .dsc disagree about the
2324 # version). Then it remains to choose between the physically
2325 # last entry in the file, and the one with the lowest version
2326 # number. If these are not the same, we guess that the
2327 # versions were created in a non-monotonic order rather than
2328 # that the changelog entries have been misordered.
2330 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2332 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2333 $r1clogp = $thisstanza;
2335 printdebug "import clog $r1clogp->{version} becomes r1\n";
2338 $clogp or fail "package changelog has no entries!";
2340 my $authline = clogp_authline $clogp;
2341 my $changes = getfield $clogp, 'Changes';
2342 $changes =~ s/^\n//; # Changes: \n
2343 my $cversion = getfield $clogp, 'Version';
2346 $r1clogp //= $clogp; # maybe there's only one entry;
2347 my $r1authline = clogp_authline $r1clogp;
2348 # Strictly, r1authline might now be wrong if it's going to be
2349 # unused because !$any_orig. Whatever.
2351 printdebug "import tartrees authline $authline\n";
2352 printdebug "import tartrees r1authline $r1authline\n";
2354 foreach my $tt (@tartrees) {
2355 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2357 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2360 committer $r1authline
2364 [dgit import orig $tt->{F}]
2372 [dgit import tarball $package $cversion $tt->{F}]
2377 printdebug "import main commit\n";
2379 open C, ">../commit.tmp" or die $!;
2380 print C <<END or die $!;
2383 print C <<END or die $! foreach @tartrees;
2386 print C <<END or die $!;
2392 [dgit import $treeimporthow $package $cversion]
2396 my $rawimport_hash = make_commit qw(../commit.tmp);
2398 if (madformat $dsc->{format}) {
2399 printdebug "import apply patches...\n";
2401 # regularise the state of the working tree so that
2402 # the checkout of $rawimport_hash works nicely.
2403 my $dappliedcommit = make_commit_text(<<END);
2410 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2412 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2414 # We need the answers to be reproducible
2415 my @authline = clogp_authline($clogp);
2416 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2417 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2418 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2419 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2420 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2421 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2423 my $path = $ENV{PATH} or die;
2425 # we use ../../gbp-pq-output, which (given that we are in
2426 # $playground/PLAYTREE, and $playground is .git/dgit/unpack,
2429 foreach my $use_absurd (qw(0 1)) {
2430 runcmd @git, qw(checkout -q unpa);
2431 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2432 local $ENV{PATH} = $path;
2435 progress "warning: $@";
2436 $path = "$absurdity:$path";
2437 progress "$us: trying slow absurd-git-apply...";
2438 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2443 die "forbid absurd git-apply\n" if $use_absurd
2444 && forceing [qw(import-gitapply-no-absurd)];
2445 die "only absurd git-apply!\n" if !$use_absurd
2446 && forceing [qw(import-gitapply-absurd)];
2448 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2449 local $ENV{PATH} = $path if $use_absurd;
2451 my @showcmd = (gbp_pq, qw(import));
2452 my @realcmd = shell_cmd
2453 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2454 debugcmd "+",@realcmd;
2455 if (system @realcmd) {
2456 die +(shellquote @showcmd).
2458 failedcmd_waitstatus()."\n";
2461 my $gapplied = git_rev_parse('HEAD');
2462 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2463 $gappliedtree eq $dappliedtree or
2465 gbp-pq import and dpkg-source disagree!
2466 gbp-pq import gave commit $gapplied
2467 gbp-pq import gave tree $gappliedtree
2468 dpkg-source --before-build gave tree $dappliedtree
2470 $rawimport_hash = $gapplied;
2475 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2480 progress "synthesised git commit from .dsc $cversion";
2482 my $rawimport_mergeinput = {
2483 Commit => $rawimport_hash,
2484 Info => "Import of source package",
2486 my @output = ($rawimport_mergeinput);
2488 if ($lastpush_mergeinput) {
2489 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2490 my $oversion = getfield $oldclogp, 'Version';
2492 version_compare($oversion, $cversion);
2494 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2495 { Message => <<END, ReverseParents => 1 });
2496 Record $package ($cversion) in archive suite $csuite
2498 } elsif ($vcmp > 0) {
2499 print STDERR <<END or die $!;
2501 Version actually in archive: $cversion (older)
2502 Last version pushed with dgit: $oversion (newer or same)
2505 @output = $lastpush_mergeinput;
2507 # Same version. Use what's in the server git branch,
2508 # discarding our own import. (This could happen if the
2509 # server automatically imports all packages into git.)
2510 @output = $lastpush_mergeinput;
2518 sub complete_file_from_dsc ($$;$) {
2519 our ($dstdir, $fi, $refetched) = @_;
2520 # Ensures that we have, in $dstdir, the file $fi, with the correct
2521 # contents. (Downloading it from alongside $dscurl if necessary.)
2522 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2523 # and will set $$refetched=1 if it did so (or tried to).
2525 my $f = $fi->{Filename};
2526 my $tf = "$dstdir/$f";
2530 my $checkhash = sub {
2531 open F, "<", "$tf" or die "$tf: $!";
2532 $fi->{Digester}->reset();
2533 $fi->{Digester}->addfile(*F);
2534 F->error and die $!;
2535 $got = $fi->{Digester}->hexdigest();
2536 return $got eq $fi->{Hash};
2539 if (stat_exists $tf) {
2540 if ($checkhash->()) {
2541 progress "using existing $f";
2545 fail "file $f has hash $got but .dsc".
2546 " demands hash $fi->{Hash} ".
2547 "(perhaps you should delete this file?)";
2549 progress "need to fetch correct version of $f";
2550 unlink $tf or die "$tf $!";
2553 printdebug "$tf does not exist, need to fetch\n";
2557 $furl =~ s{/[^/]+$}{};
2559 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2560 die "$f ?" if $f =~ m#/#;
2561 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2562 return 0 if !act_local();
2565 fail "file $f has hash $got but .dsc".
2566 " demands hash $fi->{Hash} ".
2567 "(got wrong file from archive!)";
2572 sub ensure_we_have_orig () {
2573 my @dfi = dsc_files_info();
2574 foreach my $fi (@dfi) {
2575 my $f = $fi->{Filename};
2576 next unless is_orig_file_in_dsc($f, \@dfi);
2577 complete_file_from_dsc('..', $fi)
2582 #---------- git fetch ----------
2584 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2585 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2587 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2588 # locally fetched refs because they have unhelpful names and clutter
2589 # up gitk etc. So we track whether we have "used up" head ref (ie,
2590 # whether we have made another local ref which refers to this object).
2592 # (If we deleted them unconditionally, then we might end up
2593 # re-fetching the same git objects each time dgit fetch was run.)
2595 # So, each use of lrfetchrefs needs to be accompanied by arrangements
2596 # in git_fetch_us to fetch the refs in question, and possibly a call
2597 # to lrfetchref_used.
2599 our (%lrfetchrefs_f, %lrfetchrefs_d);
2600 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2602 sub lrfetchref_used ($) {
2603 my ($fullrefname) = @_;
2604 my $objid = $lrfetchrefs_f{$fullrefname};
2605 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2608 sub git_lrfetch_sane {
2609 my ($url, $supplementary, @specs) = @_;
2610 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2611 # at least as regards @specs. Also leave the results in
2612 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2613 # able to clean these up.
2615 # With $supplementary==1, @specs must not contain wildcards
2616 # and we add to our previous fetches (non-atomically).
2618 # This is rather miserable:
2619 # When git fetch --prune is passed a fetchspec ending with a *,
2620 # it does a plausible thing. If there is no * then:
2621 # - it matches subpaths too, even if the supplied refspec
2622 # starts refs, and behaves completely madly if the source
2623 # has refs/refs/something. (See, for example, Debian #NNNN.)
2624 # - if there is no matching remote ref, it bombs out the whole
2626 # We want to fetch a fixed ref, and we don't know in advance
2627 # if it exists, so this is not suitable.
2629 # Our workaround is to use git ls-remote. git ls-remote has its
2630 # own qairks. Notably, it has the absurd multi-tail-matching
2631 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2632 # refs/refs/foo etc.
2634 # Also, we want an idempotent snapshot, but we have to make two
2635 # calls to the remote: one to git ls-remote and to git fetch. The
2636 # solution is use git ls-remote to obtain a target state, and
2637 # git fetch to try to generate it. If we don't manage to generate
2638 # the target state, we try again.
2640 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2642 my $specre = join '|', map {
2645 my $wildcard = $x =~ s/\\\*$/.*/;
2646 die if $wildcard && $supplementary;
2649 printdebug "git_lrfetch_sane specre=$specre\n";
2650 my $wanted_rref = sub {
2652 return m/^(?:$specre)$/;
2655 my $fetch_iteration = 0;
2658 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2659 if (++$fetch_iteration > 10) {
2660 fail "too many iterations trying to get sane fetch!";
2663 my @look = map { "refs/$_" } @specs;
2664 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2668 open GITLS, "-|", @lcmd or die $!;
2670 printdebug "=> ", $_;
2671 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2672 my ($objid,$rrefname) = ($1,$2);
2673 if (!$wanted_rref->($rrefname)) {
2675 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2679 $wantr{$rrefname} = $objid;
2682 close GITLS or failedcmd @lcmd;
2684 # OK, now %want is exactly what we want for refs in @specs
2686 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2687 "+refs/$_:".lrfetchrefs."/$_";
2690 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2692 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2693 runcmd_ordryrun_local @fcmd if @fspecs;
2695 if (!$supplementary) {
2696 %lrfetchrefs_f = ();
2700 git_for_each_ref(lrfetchrefs, sub {
2701 my ($objid,$objtype,$lrefname,$reftail) = @_;
2702 $lrfetchrefs_f{$lrefname} = $objid;
2703 $objgot{$objid} = 1;
2706 if ($supplementary) {
2710 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2711 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2712 if (!exists $wantr{$rrefname}) {
2713 if ($wanted_rref->($rrefname)) {
2715 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2719 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2722 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2723 delete $lrfetchrefs_f{$lrefname};
2727 foreach my $rrefname (sort keys %wantr) {
2728 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2729 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2730 my $want = $wantr{$rrefname};
2731 next if $got eq $want;
2732 if (!defined $objgot{$want}) {
2733 fail <<END unless act_local();
2734 --dry-run specified but we actually wanted the results of git fetch,
2735 so this is not going to work. Try running dgit fetch first,
2736 or using --damp-run instead of --dry-run.
2739 warning: git ls-remote suggests we want $lrefname
2740 warning: and it should refer to $want
2741 warning: but git fetch didn't fetch that object to any relevant ref.
2742 warning: This may be due to a race with someone updating the server.
2743 warning: Will try again...
2745 next FETCH_ITERATION;
2748 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2750 runcmd_ordryrun_local @git, qw(update-ref -m),
2751 "dgit fetch git fetch fixup", $lrefname, $want;
2752 $lrfetchrefs_f{$lrefname} = $want;
2757 if (defined $csuite) {
2758 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2759 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2760 my ($objid,$objtype,$lrefname,$reftail) = @_;
2761 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2762 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2766 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2767 Dumper(\%lrfetchrefs_f);
2770 sub git_fetch_us () {
2771 # Want to fetch only what we are going to use, unless
2772 # deliberately-not-ff, in which case we must fetch everything.
2774 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2776 (quiltmode_splitbrain
2777 ? (map { $_->('*',access_nomdistro) }
2778 \&debiantag_new, \&debiantag_maintview)
2779 : debiantags('*',access_nomdistro));
2780 push @specs, server_branch($csuite);
2781 push @specs, $rewritemap;
2782 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2784 my $url = access_giturl();
2785 git_lrfetch_sane $url, 0, @specs;
2788 my @tagpats = debiantags('*',access_nomdistro);
2790 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2791 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2792 printdebug "currently $fullrefname=$objid\n";
2793 $here{$fullrefname} = $objid;
2795 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2796 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2797 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2798 printdebug "offered $lref=$objid\n";
2799 if (!defined $here{$lref}) {
2800 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2801 runcmd_ordryrun_local @upd;
2802 lrfetchref_used $fullrefname;
2803 } elsif ($here{$lref} eq $objid) {
2804 lrfetchref_used $fullrefname;
2807 "Not updating $lref from $here{$lref} to $objid.\n";
2812 #---------- dsc and archive handling ----------
2814 sub mergeinfo_getclogp ($) {
2815 # Ensures thit $mi->{Clogp} exists and returns it
2817 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2820 sub mergeinfo_version ($) {
2821 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2824 sub fetch_from_archive_record_1 ($) {
2826 runcmd git_update_ref_cmd "dgit fetch $csuite", 'DGIT_ARCHIVE', $hash;
2827 cmdoutput @git, qw(log -n2), $hash;
2828 # ... gives git a chance to complain if our commit is malformed
2831 sub fetch_from_archive_record_2 ($) {
2833 my @upd_cmd = (git_update_ref_cmd 'dgit fetch', lrref(), $hash);
2837 dryrun_report @upd_cmd;
2841 sub parse_dsc_field_def_dsc_distro () {
2842 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2843 dgit.default.distro);
2846 sub parse_dsc_field ($$) {
2847 my ($dsc, $what) = @_;
2849 foreach my $field (@ourdscfield) {
2850 $f = $dsc->{$field};
2855 progress "$what: NO git hash";
2856 parse_dsc_field_def_dsc_distro();
2857 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2858 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2859 progress "$what: specified git info ($dsc_distro)";
2860 $dsc_hint_tag = [ $dsc_hint_tag ];
2861 } elsif ($f =~ m/^\w+\s*$/) {
2863 parse_dsc_field_def_dsc_distro();
2864 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2866 progress "$what: specified git hash";
2868 fail "$what: invalid Dgit info";
2872 sub resolve_dsc_field_commit ($$) {
2873 my ($already_distro, $already_mapref) = @_;
2875 return unless defined $dsc_hash;
2878 defined $already_mapref &&
2879 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2880 ? $already_mapref : undef;
2884 my ($what, @fetch) = @_;
2886 local $idistro = $dsc_distro;
2887 my $lrf = lrfetchrefs;
2889 if (!$chase_dsc_distro) {
2891 "not chasing .dsc distro $dsc_distro: not fetching $what";
2896 ".dsc names distro $dsc_distro: fetching $what";
2898 my $url = access_giturl();
2899 if (!defined $url) {
2900 defined $dsc_hint_url or fail <<END;
2901 .dsc Dgit metadata is in context of distro $dsc_distro
2902 for which we have no configured url and .dsc provides no hint
2905 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2906 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2907 parse_cfg_bool "dsc-url-proto-ok", 'false',
2908 cfg("dgit.dsc-url-proto-ok.$proto",
2909 "dgit.default.dsc-url-proto-ok")
2911 .dsc Dgit metadata is in context of distro $dsc_distro
2912 for which we have no configured url;
2913 .dsc provides hinted url with protocol $proto which is unsafe.
2914 (can be overridden by config - consult documentation)
2916 $url = $dsc_hint_url;
2919 git_lrfetch_sane $url, 1, @fetch;
2924 my $rewrite_enable = do {
2925 local $idistro = $dsc_distro;
2926 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2929 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2930 if (!defined $mapref) {
2931 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2932 $mapref = $lrf.'/'.$rewritemap;
2934 my $rewritemapdata = git_cat_file $mapref.':map';
2935 if (defined $rewritemapdata
2936 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2938 "server's git history rewrite map contains a relevant entry!";
2941 if (defined $dsc_hash) {
2942 progress "using rewritten git hash in place of .dsc value";
2944 progress "server data says .dsc hash is to be disregarded";
2949 if (!defined git_cat_file $dsc_hash) {
2950 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2951 my $lrf = $do_fetch->("additional commits", @tags) &&
2952 defined git_cat_file $dsc_hash
2954 .dsc Dgit metadata requires commit $dsc_hash
2955 but we could not obtain that object anywhere.
2957 foreach my $t (@tags) {
2958 my $fullrefname = $lrf.'/'.$t;
2959 # print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2960 next unless $lrfetchrefs_f{$fullrefname};
2961 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2962 lrfetchref_used $fullrefname;
2967 sub fetch_from_archive () {
2968 ensure_setup_existing_tree();
2970 # Ensures that lrref() is what is actually in the archive, one way
2971 # or another, according to us - ie this client's
2972 # appropritaely-updated archive view. Also returns the commit id.
2973 # If there is nothing in the archive, leaves lrref alone and
2974 # returns undef. git_fetch_us must have already been called.
2978 parse_dsc_field($dsc, 'last upload to archive');
2979 resolve_dsc_field_commit access_basedistro,
2980 lrfetchrefs."/".$rewritemap
2982 progress "no version available from the archive";
2985 # If the archive's .dsc has a Dgit field, there are three
2986 # relevant git commitids we need to choose between and/or merge
2988 # 1. $dsc_hash: the Dgit field from the archive
2989 # 2. $lastpush_hash: the suite branch on the dgit git server
2990 # 3. $lastfetch_hash: our local tracking brach for the suite
2992 # These may all be distinct and need not be in any fast forward
2995 # If the dsc was pushed to this suite, then the server suite
2996 # branch will have been updated; but it might have been pushed to
2997 # a different suite and copied by the archive. Conversely a more
2998 # recent version may have been pushed with dgit but not appeared
2999 # in the archive (yet).
3001 # $lastfetch_hash may be awkward because archive imports
3002 # (particularly, imports of Dgit-less .dscs) are performed only as
3003 # needed on individual clients, so different clients may perform a
3004 # different subset of them - and these imports are only made
3005 # public during push. So $lastfetch_hash may represent a set of
3006 # imports different to a subsequent upload by a different dgit
3009 # Our approach is as follows:
3011 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
3012 # descendant of $dsc_hash, then it was pushed by a dgit user who
3013 # had based their work on $dsc_hash, so we should prefer it.
3014 # Otherwise, $dsc_hash was installed into this suite in the
3015 # archive other than by a dgit push, and (necessarily) after the
3016 # last dgit push into that suite (since a dgit push would have
3017 # been descended from the dgit server git branch); thus, in that
3018 # case, we prefer the archive's version (and produce a
3019 # pseudo-merge to overwrite the dgit server git branch).
3021 # (If there is no Dgit field in the archive's .dsc then
3022 # generate_commit_from_dsc uses the version numbers to decide
3023 # whether the suite branch or the archive is newer. If the suite
3024 # branch is newer it ignores the archive's .dsc; otherwise it
3025 # generates an import of the .dsc, and produces a pseudo-merge to
3026 # overwrite the suite branch with the archive contents.)
3028 # The outcome of that part of the algorithm is the `public view',
3029 # and is same for all dgit clients: it does not depend on any
3030 # unpublished history in the local tracking branch.
3032 # As between the public view and the local tracking branch: The
3033 # local tracking branch is only updated by dgit fetch, and
3034 # whenever dgit fetch runs it includes the public view in the
3035 # local tracking branch. Therefore if the public view is not
3036 # descended from the local tracking branch, the local tracking
3037 # branch must contain history which was imported from the archive
3038 # but never pushed; and, its tip is now out of date. So, we make
3039 # a pseudo-merge to overwrite the old imports and stitch the old
3042 # Finally: we do not necessarily reify the public view (as
3043 # described above). This is so that we do not end up stacking two
3044 # pseudo-merges. So what we actually do is figure out the inputs
3045 # to any public view pseudo-merge and put them in @mergeinputs.
3048 # $mergeinputs[]{Commit}
3049 # $mergeinputs[]{Info}
3050 # $mergeinputs[0] is the one whose tree we use
3051 # @mergeinputs is in the order we use in the actual commit)
3054 # $mergeinputs[]{Message} is a commit message to use
3055 # $mergeinputs[]{ReverseParents} if def specifies that parent
3056 # list should be in opposite order
3057 # Such an entry has no Commit or Info. It applies only when found
3058 # in the last entry. (This ugliness is to support making
3059 # identical imports to previous dgit versions.)
3061 my $lastpush_hash = git_get_ref(lrfetchref());
3062 printdebug "previous reference hash=$lastpush_hash\n";
3063 $lastpush_mergeinput = $lastpush_hash && {
3064 Commit => $lastpush_hash,
3065 Info => "dgit suite branch on dgit git server",
3068 my $lastfetch_hash = git_get_ref(lrref());
3069 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3070 my $lastfetch_mergeinput = $lastfetch_hash && {
3071 Commit => $lastfetch_hash,
3072 Info => "dgit client's archive history view",
3075 my $dsc_mergeinput = $dsc_hash && {
3076 Commit => $dsc_hash,
3077 Info => "Dgit field in .dsc from archive",
3081 my $del_lrfetchrefs = sub {
3084 printdebug "del_lrfetchrefs...\n";
3085 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3086 my $objid = $lrfetchrefs_d{$fullrefname};
3087 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3089 $gur ||= new IO::Handle;
3090 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3092 printf $gur "delete %s %s\n", $fullrefname, $objid;
3095 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3099 if (defined $dsc_hash) {
3100 ensure_we_have_orig();
3101 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3102 @mergeinputs = $dsc_mergeinput
3103 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3104 print STDERR <<END or die $!;
3106 Git commit in archive is behind the last version allegedly pushed/uploaded.
3107 Commit referred to by archive: $dsc_hash
3108 Last version pushed with dgit: $lastpush_hash
3111 @mergeinputs = ($lastpush_mergeinput);
3113 # Archive has .dsc which is not a descendant of the last dgit
3114 # push. This can happen if the archive moves .dscs about.
3115 # Just follow its lead.
3116 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3117 progress "archive .dsc names newer git commit";
3118 @mergeinputs = ($dsc_mergeinput);
3120 progress "archive .dsc names other git commit, fixing up";
3121 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3125 @mergeinputs = generate_commits_from_dsc();
3126 # We have just done an import. Now, our import algorithm might
3127 # have been improved. But even so we do not want to generate
3128 # a new different import of the same package. So if the
3129 # version numbers are the same, just use our existing version.
3130 # If the version numbers are different, the archive has changed
3131 # (perhaps, rewound).
3132 if ($lastfetch_mergeinput &&
3133 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3134 (mergeinfo_version $mergeinputs[0]) )) {
3135 @mergeinputs = ($lastfetch_mergeinput);
3137 } elsif ($lastpush_hash) {
3138 # only in git, not in the archive yet
3139 @mergeinputs = ($lastpush_mergeinput);
3140 print STDERR <<END or die $!;
3142 Package not found in the archive, but has allegedly been pushed using dgit.
3146 printdebug "nothing found!\n";
3147 if (defined $skew_warning_vsn) {
3148 print STDERR <<END or die $!;
3150 Warning: relevant archive skew detected.
3151 Archive allegedly contains $skew_warning_vsn
3152 But we were not able to obtain any version from the archive or git.
3156 unshift @end, $del_lrfetchrefs;
3160 if ($lastfetch_hash &&
3162 my $h = $_->{Commit};
3163 $h and is_fast_fwd($lastfetch_hash, $h);
3164 # If true, one of the existing parents of this commit
3165 # is a descendant of the $lastfetch_hash, so we'll
3166 # be ff from that automatically.
3170 push @mergeinputs, $lastfetch_mergeinput;
3173 printdebug "fetch mergeinfos:\n";
3174 foreach my $mi (@mergeinputs) {
3176 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3178 printdebug sprintf " ReverseParents=%d Message=%s",
3179 $mi->{ReverseParents}, $mi->{Message};
3183 my $compat_info= pop @mergeinputs
3184 if $mergeinputs[$#mergeinputs]{Message};
3186 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3189 if (@mergeinputs > 1) {
3191 my $tree_commit = $mergeinputs[0]{Commit};
3193 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3194 $tree =~ m/\n\n/; $tree = $`;
3195 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3198 # We use the changelog author of the package in question the
3199 # author of this pseudo-merge. This is (roughly) correct if
3200 # this commit is simply representing aa non-dgit upload.
3201 # (Roughly because it does not record sponsorship - but we
3202 # don't have sponsorship info because that's in the .changes,
3203 # which isn't in the archivw.)
3205 # But, it might be that we are representing archive history
3206 # updates (including in-archive copies). These are not really
3207 # the responsibility of the person who created the .dsc, but
3208 # there is no-one whose name we should better use. (The
3209 # author of the .dsc-named commit is clearly worse.)
3211 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3212 my $author = clogp_authline $useclogp;
3213 my $cversion = getfield $useclogp, 'Version';
3215 my $mcf = dgit_privdir()."/mergecommit";
3216 open MC, ">", $mcf or die "$mcf $!";
3217 print MC <<END or die $!;
3221 my @parents = grep { $_->{Commit} } @mergeinputs;
3222 @parents = reverse @parents if $compat_info->{ReverseParents};
3223 print MC <<END or die $! foreach @parents;
3227 print MC <<END or die $!;
3233 if (defined $compat_info->{Message}) {
3234 print MC $compat_info->{Message} or die $!;
3236 print MC <<END or die $!;
3237 Record $package ($cversion) in archive suite $csuite
3241 my $message_add_info = sub {
3243 my $mversion = mergeinfo_version $mi;
3244 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3248 $message_add_info->($mergeinputs[0]);
3249 print MC <<END or die $!;
3250 should be treated as descended from
3252 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3256 $hash = make_commit $mcf;
3258 $hash = $mergeinputs[0]{Commit};
3260 printdebug "fetch hash=$hash\n";
3263 my ($lasth, $what) = @_;
3264 return unless $lasth;
3265 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3268 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3270 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3272 fetch_from_archive_record_1($hash);
3274 if (defined $skew_warning_vsn) {
3275 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3276 my $gotclogp = commit_getclogp($hash);
3277 my $got_vsn = getfield $gotclogp, 'Version';
3278 printdebug "SKEW CHECK GOT $got_vsn\n";
3279 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3280 print STDERR <<END or die $!;
3282 Warning: archive skew detected. Using the available version:
3283 Archive allegedly contains $skew_warning_vsn
3284 We were able to obtain only $got_vsn
3290 if ($lastfetch_hash ne $hash) {
3291 fetch_from_archive_record_2($hash);
3294 lrfetchref_used lrfetchref();
3296 check_gitattrs($hash, "fetched source tree");
3298 unshift @end, $del_lrfetchrefs;
3302 sub set_local_git_config ($$) {
3304 runcmd @git, qw(config), $k, $v;
3307 sub setup_mergechangelogs (;$) {
3309 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3311 my $driver = 'dpkg-mergechangelogs';
3312 my $cb = "merge.$driver";
3313 confess unless defined $maindir;
3314 my $attrs = "$maindir_gitcommon/info/attributes";
3315 ensuredir "$maindir_gitcommon/info";
3317 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3318 if (!open ATTRS, "<", $attrs) {
3319 $!==ENOENT or die "$attrs: $!";
3323 next if m{^debian/changelog\s};
3324 print NATTRS $_, "\n" or die $!;
3326 ATTRS->error and die $!;
3329 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3332 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3333 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3335 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3338 sub setup_useremail (;$) {
3340 return unless $always || access_cfg_bool(1, 'setup-useremail');
3343 my ($k, $envvar) = @_;
3344 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3345 return unless defined $v;
3346 set_local_git_config "user.$k", $v;
3349 $setup->('email', 'DEBEMAIL');
3350 $setup->('name', 'DEBFULLNAME');
3353 sub ensure_setup_existing_tree () {
3354 my $k = "remote.$remotename.skipdefaultupdate";
3355 my $c = git_get_config $k;
3356 return if defined $c;
3357 set_local_git_config $k, 'true';
3360 sub open_main_gitattrs () {
3361 confess 'internal error no maindir' unless defined $maindir;
3362 my $gai = new IO::File "$maindir_gitcommon/info/attributes"
3364 or die "open $maindir_gitcommon/info/attributes: $!";
3368 our $gitattrs_ourmacro_re = qr{^\[attr\]dgit-defuse-attrs\s};
3370 sub is_gitattrs_setup () {
3373 # 1: gitattributes set up and should be left alone
3375 # 0: there is a dgit-defuse-attrs but it needs fixing
3376 # undef: there is none
3377 my $gai = open_main_gitattrs();
3378 return 0 unless $gai;
3380 next unless m{$gitattrs_ourmacro_re};
3381 return 1 if m{\s-working-tree-encoding\s};
3382 printdebug "is_gitattrs_setup: found old macro\n";
3385 $gai->error and die $!;
3386 printdebug "is_gitattrs_setup: found nothing\n";
3390 sub setup_gitattrs (;$) {
3392 return unless $always || access_cfg_bool(1, 'setup-gitattributes');
3394 my $already = is_gitattrs_setup();
3397 [attr]dgit-defuse-attrs already found, and proper, in .git/info/attributes
3398 not doing further gitattributes setup
3402 my $new = "[attr]dgit-defuse-attrs $negate_harmful_gitattrs";
3403 my $af = "$maindir_gitcommon/info/attributes";
3404 ensuredir "$maindir_gitcommon/info";
3406 open GAO, "> $af.new" or die $!;
3407 print GAO <<END or die $! unless defined $already;
3410 # ^ see GITATTRIBUTES in dgit(7) and dgit setup-new-tree in dgit(1)
3412 my $gai = open_main_gitattrs();
3415 if (m{$gitattrs_ourmacro_re}) {
3416 die unless defined $already;
3420 print GAO $_, "\n" or die $!;
3422 $gai->error and die $!;
3424 close GAO or die $!;
3425 rename "$af.new", "$af" or die "install $af: $!";
3428 sub setup_new_tree () {
3429 setup_mergechangelogs();
3434 sub check_gitattrs ($$) {
3435 my ($treeish, $what) = @_;
3437 return if is_gitattrs_setup;
3440 my @cmd = (@git, qw(ls-tree -lrz --), "${treeish}:");
3442 my $gafl = new IO::File;
3443 open $gafl, "-|", @cmd or die $!;
3446 s/^\d+\s+\w+\s+\w+\s+(\d+)\t// or die;
3448 next unless m{(?:^|/)\.gitattributes$};
3450 # oh dear, found one
3452 dgit: warning: $what contains .gitattributes
3453 dgit: .gitattributes not (fully) defused. Recommended: dgit setup-new-tree.
3458 # tree contains no .gitattributes files
3459 $?=0; $!=0; close $gafl or failedcmd @cmd;
3463 sub multisuite_suite_child ($$$) {
3464 my ($tsuite, $merginputs, $fn) = @_;
3465 # in child, sets things up, calls $fn->(), and returns undef
3466 # in parent, returns canonical suite name for $tsuite
3467 my $canonsuitefh = IO::File::new_tmpfile;
3468 my $pid = fork // die $!;
3472 $us .= " [$isuite]";
3473 $debugprefix .= " ";
3474 progress "fetching $tsuite...";
3475 canonicalise_suite();
3476 print $canonsuitefh $csuite, "\n" or die $!;
3477 close $canonsuitefh or die $!;
3481 waitpid $pid,0 == $pid or die $!;
3482 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3483 seek $canonsuitefh,0,0 or die $!;
3484 local $csuite = <$canonsuitefh>;
3485 die $! unless defined $csuite && chomp $csuite;
3487 printdebug "multisuite $tsuite missing\n";
3490 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3491 push @$merginputs, {
3498 sub fork_for_multisuite ($) {
3499 my ($before_fetch_merge) = @_;
3500 # if nothing unusual, just returns ''
3503 # returns 0 to caller in child, to do first of the specified suites
3504 # in child, $csuite is not yet set
3506 # returns 1 to caller in parent, to finish up anything needed after
3507 # in parent, $csuite is set to canonicalised portmanteau
3509 my $org_isuite = $isuite;
3510 my @suites = split /\,/, $isuite;
3511 return '' unless @suites > 1;
3512 printdebug "fork_for_multisuite: @suites\n";
3516 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3518 return 0 unless defined $cbasesuite;
3520 fail "package $package missing in (base suite) $cbasesuite"
3521 unless @mergeinputs;
3523 my @csuites = ($cbasesuite);
3525 $before_fetch_merge->();
3527 foreach my $tsuite (@suites[1..$#suites]) {
3528 $tsuite =~ s/^-/$cbasesuite-/;
3529 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3535 # xxx collecte the ref here
3537 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3538 push @csuites, $csubsuite;
3541 foreach my $mi (@mergeinputs) {
3542 my $ref = git_get_ref $mi->{Ref};
3543 die "$mi->{Ref} ?" unless length $ref;
3544 $mi->{Commit} = $ref;
3547 $csuite = join ",", @csuites;
3549 my $previous = git_get_ref lrref;
3551 unshift @mergeinputs, {
3552 Commit => $previous,
3553 Info => "local combined tracking branch",
3555 "archive seems to have rewound: local tracking branch is ahead!",
3559 foreach my $ix (0..$#mergeinputs) {
3560 $mergeinputs[$ix]{Index} = $ix;
3563 @mergeinputs = sort {
3564 -version_compare(mergeinfo_version $a,
3565 mergeinfo_version $b) # highest version first
3567 $a->{Index} <=> $b->{Index}; # earliest in spec first
3573 foreach my $mi (@mergeinputs) {
3574 printdebug "multisuite merge check $mi->{Info}\n";
3575 foreach my $previous (@needed) {
3576 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3577 printdebug "multisuite merge un-needed $previous->{Info}\n";
3581 printdebug "multisuite merge this-needed\n";
3582 $mi->{Character} = '+';
3585 $needed[0]{Character} = '*';
3587 my $output = $needed[0]{Commit};
3590 printdebug "multisuite merge nontrivial\n";
3591 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3593 my $commit = "tree $tree\n";
3594 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3595 "Input branches:\n";
3597 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3598 printdebug "multisuite merge include $mi->{Info}\n";
3599 $mi->{Character} //= ' ';
3600 $commit .= "parent $mi->{Commit}\n";
3601 $msg .= sprintf " %s %-25s %s\n",
3603 (mergeinfo_version $mi),
3606 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3608 " * marks the highest version branch, which choose to use\n".
3609 " + marks each branch which was not already an ancestor\n\n".
3610 "[dgit multi-suite $csuite]\n";
3612 "author $authline\n".
3613 "committer $authline\n\n";
3614 $output = make_commit_text $commit.$msg;
3615 printdebug "multisuite merge generated $output\n";
3618 fetch_from_archive_record_1($output);
3619 fetch_from_archive_record_2($output);
3621 progress "calculated combined tracking suite $csuite";
3626 sub clone_set_head () {
3627 open H, "> .git/HEAD" or die $!;
3628 print H "ref: ".lref()."\n" or die $!;
3631 sub clone_finish ($) {
3633 runcmd @git, qw(reset --hard), lrref();
3634 runcmd qw(bash -ec), <<'END';
3636 git ls-tree -r --name-only -z HEAD | \
3637 xargs -0r touch -h -r . --
3639 printdone "ready for work in $dstdir";
3643 # in multisuite, returns twice!
3644 # once in parent after first suite fetched,
3645 # and then again in child after everything is finished
3647 badusage "dry run makes no sense with clone" unless act_local();
3649 my $multi_fetched = fork_for_multisuite(sub {
3650 printdebug "multi clone before fetch merge\n";
3654 if ($multi_fetched) {
3655 printdebug "multi clone after fetch merge\n";
3657 clone_finish($dstdir);
3660 printdebug "clone main body\n";
3662 canonicalise_suite();
3663 my $hasgit = check_for_git();
3664 mkdir $dstdir or fail "create \`$dstdir': $!";
3666 runcmd @git, qw(init -q);
3670 my $giturl = access_giturl(1);
3671 if (defined $giturl) {
3672 runcmd @git, qw(remote add), 'origin', $giturl;
3675 progress "fetching existing git history";
3677 runcmd_ordryrun_local @git, qw(fetch origin);
3679 progress "starting new git history";
3681 fetch_from_archive() or no_such_package;
3682 my $vcsgiturl = $dsc->{'Vcs-Git'};
3683 if (length $vcsgiturl) {
3684 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3685 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3687 clone_finish($dstdir);
3691 canonicalise_suite();
3692 if (check_for_git()) {
3695 fetch_from_archive() or no_such_package();
3697 my $vcsgiturl = $dsc && $dsc->{'Vcs-Git'};
3698 if (length $vcsgiturl and
3699 (grep { $csuite eq $_ }
3701 cfg 'dgit.vcs-git.suites')) {
3702 my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF';
3703 if (defined $current && $current ne $vcsgiturl) {
3705 FYI: Vcs-Git in $csuite has different url to your vcs-git remote.
3706 Your vcs-git remote url may be out of date. Use dgit update-vcs-git ?
3710 printdone "fetched into ".lrref();