3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2016 Ian Jackson
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 use Dpkg::Control::Hash;
30 use File::Temp qw(tempdir);
37 use List::Util qw(any);
38 use List::MoreUtils qw(pairwise);
39 use Text::Glob qw(match_glob);
40 use Fcntl qw(:DEFAULT :flock);
45 our $our_version = 'UNRELEASED'; ###substituted###
46 our $absurdity = undef; ###substituted###
48 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
51 our $isuite = 'unstable';
57 our $dryrun_level = 0;
59 our $buildproductsdir = '..';
65 our $existing_package = 'dpkg';
67 our $changes_since_version;
69 our $overwrite_version; # undef: not specified; '': check changelog
71 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
73 our $dodep14tag_re = 'want|no|always';
74 our $split_brain_save;
75 our $we_are_responder;
76 our $initiator_tempdir;
77 our $patches_applied_dirtily = 00;
81 our $chase_dsc_distro=1;
83 our %forceopts = map { $_=>0 }
84 qw(unrepresentable unsupported-source-format
85 dsc-changes-mismatch changes-origs-exactly
86 import-gitapply-absurd
87 import-gitapply-no-absurd
88 import-dsc-with-dgit-field);
90 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
92 our $suite_re = '[-+.0-9a-z]+';
93 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
94 our $orig_f_comp_re = 'orig(?:-[-0-9a-z]+)?';
95 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
96 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
98 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
99 our $splitbraincache = 'dgit-intern/quilt-cache';
100 our $rewritemap = 'dgit-rewrite/map';
102 our (@git) = qw(git);
103 our (@dget) = qw(dget);
104 our (@curl) = qw(curl);
105 our (@dput) = qw(dput);
106 our (@debsign) = qw(debsign);
107 our (@gpg) = qw(gpg);
108 our (@sbuild) = qw(sbuild);
110 our (@dgit) = qw(dgit);
111 our (@aptget) = qw(apt-get);
112 our (@aptcache) = qw(apt-cache);
113 our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
114 our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
115 our (@dpkggenchanges) = qw(dpkg-genchanges);
116 our (@mergechanges) = qw(mergechanges -f);
117 our (@gbp_build) = ('');
118 our (@gbp_pq) = ('gbp pq');
119 our (@changesopts) = ('');
121 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
124 'debsign' => \@debsign,
126 'sbuild' => \@sbuild,
130 'apt-get' => \@aptget,
131 'apt-cache' => \@aptcache,
132 'dpkg-source' => \@dpkgsource,
133 'dpkg-buildpackage' => \@dpkgbuildpackage,
134 'dpkg-genchanges' => \@dpkggenchanges,
135 'gbp-build' => \@gbp_build,
136 'gbp-pq' => \@gbp_pq,
137 'ch' => \@changesopts,
138 'mergechanges' => \@mergechanges);
140 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
141 our %opts_cfg_insertpos = map {
143 scalar @{ $opts_opt_map{$_} }
144 } keys %opts_opt_map;
146 sub parseopts_late_defaults();
152 our $supplementary_message = '';
153 our $need_split_build_invocation = 0;
154 our $split_brain = 0;
158 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
161 our $remotename = 'dgit';
162 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
166 if (!defined $absurdity) {
168 $absurdity =~ s{/[^/]+$}{/absurd} or die;
172 my ($v,$distro) = @_;
173 return $tagformatfn->($v, $distro);
176 sub debiantag_maintview ($$) {
177 my ($v,$distro) = @_;
178 return "$distro/".dep14_version_mangle $v;
181 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
183 sub lbranch () { return "$branchprefix/$csuite"; }
184 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
185 sub lref () { return "refs/heads/".lbranch(); }
186 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
187 sub rrref () { return server_ref($csuite); }
197 return "${package}_".(stripepoch $vsn).$sfx
202 return srcfn($vsn,".dsc");
205 sub changespat ($;$) {
206 my ($vsn, $arch) = @_;
207 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
210 sub upstreamversion ($) {
222 foreach my $f (@end) {
224 print STDERR "$us: cleanup: $@" if length $@;
228 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
230 sub forceable_fail ($$) {
231 my ($forceoptsl, $msg) = @_;
232 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
233 print STDERR "warning: overriding problem due to --force:\n". $msg;
237 my ($forceoptsl) = @_;
238 my @got = grep { $forceopts{$_} } @$forceoptsl;
239 return 0 unless @got;
241 "warning: skipping checks or functionality due to --force-$got[0]\n";
244 sub no_such_package () {
245 print STDERR "$us: package $package does not exist in suite $isuite\n";
251 printdebug "CD $newdir\n";
252 chdir $newdir or confess "chdir: $newdir: $!";
255 sub deliberately ($) {
257 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
260 sub deliberately_not_fast_forward () {
261 foreach (qw(not-fast-forward fresh-repo)) {
262 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
266 sub quiltmode_splitbrain () {
267 $quilt_mode =~ m/gbp|dpm|unapplied/;
270 sub opts_opt_multi_cmd {
272 push @cmd, split /\s+/, shift @_;
278 return opts_opt_multi_cmd @gbp_pq;
281 #---------- remote protocol support, common ----------
283 # remote push initiator/responder protocol:
284 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
285 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
286 # < dgit-remote-push-ready <actual-proto-vsn>
293 # > supplementary-message NBYTES # $protovsn >= 3
298 # > file parsed-changelog
299 # [indicates that output of dpkg-parsechangelog follows]
300 # > data-block NBYTES
301 # > [NBYTES bytes of data (no newline)]
302 # [maybe some more blocks]
311 # > param head DGIT-VIEW-HEAD
312 # > param csuite SUITE
313 # > param tagformat old|new
314 # > param maint-view MAINT-VIEW-HEAD
316 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
317 # # goes into tag, for replay prevention
320 # [indicates that signed tag is wanted]
321 # < data-block NBYTES
322 # < [NBYTES bytes of data (no newline)]
323 # [maybe some more blocks]
327 # > want signed-dsc-changes
328 # < data-block NBYTES [transfer of signed dsc]
330 # < data-block NBYTES [transfer of signed changes]
338 sub i_child_report () {
339 # Sees if our child has died, and reap it if so. Returns a string
340 # describing how it died if it failed, or undef otherwise.
341 return undef unless $i_child_pid;
342 my $got = waitpid $i_child_pid, WNOHANG;
343 return undef if $got <= 0;
344 die unless $got == $i_child_pid;
345 $i_child_pid = undef;
346 return undef unless $?;
347 return "build host child ".waitstatusmsg();
352 fail "connection lost: $!" if $fh->error;
353 fail "protocol violation; $m not expected";
356 sub badproto_badread ($$) {
358 fail "connection lost: $!" if $!;
359 my $report = i_child_report();
360 fail $report if defined $report;
361 badproto $fh, "eof (reading $wh)";
364 sub protocol_expect (&$) {
365 my ($match, $fh) = @_;
368 defined && chomp or badproto_badread $fh, "protocol message";
376 badproto $fh, "\`$_'";
379 sub protocol_send_file ($$) {
380 my ($fh, $ourfn) = @_;
381 open PF, "<", $ourfn or die "$ourfn: $!";
384 my $got = read PF, $d, 65536;
385 die "$ourfn: $!" unless defined $got;
387 print $fh "data-block ".length($d)."\n" or die $!;
388 print $fh $d or die $!;
390 PF->error and die "$ourfn $!";
391 print $fh "data-end\n" or die $!;
395 sub protocol_read_bytes ($$) {
396 my ($fh, $nbytes) = @_;
397 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
399 my $got = read $fh, $d, $nbytes;
400 $got==$nbytes or badproto_badread $fh, "data block";
404 sub protocol_receive_file ($$) {
405 my ($fh, $ourfn) = @_;
406 printdebug "() $ourfn\n";
407 open PF, ">", $ourfn or die "$ourfn: $!";
409 my ($y,$l) = protocol_expect {
410 m/^data-block (.*)$/ ? (1,$1) :
411 m/^data-end$/ ? (0,) :
415 my $d = protocol_read_bytes $fh, $l;
416 print PF $d or die $!;
421 #---------- remote protocol support, responder ----------
423 sub responder_send_command ($) {
425 return unless $we_are_responder;
426 # called even without $we_are_responder
427 printdebug ">> $command\n";
428 print PO $command, "\n" or die $!;
431 sub responder_send_file ($$) {
432 my ($keyword, $ourfn) = @_;
433 return unless $we_are_responder;
434 printdebug "]] $keyword $ourfn\n";
435 responder_send_command "file $keyword";
436 protocol_send_file \*PO, $ourfn;
439 sub responder_receive_files ($@) {
440 my ($keyword, @ourfns) = @_;
441 die unless $we_are_responder;
442 printdebug "[[ $keyword @ourfns\n";
443 responder_send_command "want $keyword";
444 foreach my $fn (@ourfns) {
445 protocol_receive_file \*PI, $fn;
448 protocol_expect { m/^files-end$/ } \*PI;
451 #---------- remote protocol support, initiator ----------
453 sub initiator_expect (&) {
455 protocol_expect { &$match } \*RO;
458 #---------- end remote code ----------
461 if ($we_are_responder) {
463 responder_send_command "progress ".length($m) or die $!;
464 print PO $m or die $!;
474 $ua = LWP::UserAgent->new();
478 progress "downloading $what...";
479 my $r = $ua->get(@_) or die $!;
480 return undef if $r->code == 404;
481 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
482 return $r->decoded_content(charset => 'none');
485 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
490 failedcmd @_ if system @_;
493 sub act_local () { return $dryrun_level <= 1; }
494 sub act_scary () { return !$dryrun_level; }
497 if (!$dryrun_level) {
498 progress "$us ok: @_";
500 progress "would be ok: @_ (but dry run only)";
505 printcmd(\*STDERR,$debugprefix."#",@_);
508 sub runcmd_ordryrun {
516 sub runcmd_ordryrun_local {
525 my ($first_shell, @cmd) = @_;
526 return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
529 our $helpmsg = <<END;
531 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
532 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
533 dgit [dgit-opts] build [dpkg-buildpackage-opts]
534 dgit [dgit-opts] sbuild [sbuild-opts]
535 dgit [dgit-opts] push [dgit-opts] [suite]
536 dgit [dgit-opts] rpush build-host:build-dir ...
537 important dgit options:
538 -k<keyid> sign tag and package with <keyid> instead of default
539 --dry-run -n do not change anything, but go through the motions
540 --damp-run -L like --dry-run but make local changes, without signing
541 --new -N allow introducing a new package
542 --debug -D increase debug level
543 -c<name>=<value> set git config option (used directly by dgit too)
546 our $later_warning_msg = <<END;
547 Perhaps the upload is stuck in incoming. Using the version from git.
551 print STDERR "$us: @_\n", $helpmsg or die $!;
556 @ARGV or badusage "too few arguments";
557 return scalar shift @ARGV;
561 print $helpmsg or die $!;
565 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
567 our %defcfg = ('dgit.default.distro' => 'debian',
568 'dgit-suite.*-security.distro' => 'debian-security',
569 'dgit.default.username' => '',
570 'dgit.default.archive-query-default-component' => 'main',
571 'dgit.default.ssh' => 'ssh',
572 'dgit.default.archive-query' => 'madison:',
573 'dgit.default.sshpsql-dbname' => 'service=projectb',
574 'dgit.default.aptget-components' => 'main',
575 'dgit.default.dgit-tag-format' => 'new,old,maint',
576 'dgit.dsc-url-proto-ok.http' => 'true',
577 'dgit.dsc-url-proto-ok.https' => 'true',
578 'dgit.dsc-url-proto-ok.git' => 'true',
579 'dgit.default.dsc-url-proto-ok' => 'false',
580 # old means "repo server accepts pushes with old dgit tags"
581 # new means "repo server accepts pushes with new dgit tags"
582 # maint means "repo server accepts split brain pushes"
583 # hist means "repo server may have old pushes without new tag"
584 # ("hist" is implied by "old")
585 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
586 'dgit-distro.debian.git-check' => 'url',
587 'dgit-distro.debian.git-check-suffix' => '/info/refs',
588 'dgit-distro.debian.new-private-pushers' => 't',
589 'dgit-distro.debian/push.git-url' => '',
590 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
591 'dgit-distro.debian/push.git-user-force' => 'dgit',
592 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
593 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
594 'dgit-distro.debian/push.git-create' => 'true',
595 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
596 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
597 # 'dgit-distro.debian.archive-query-tls-key',
598 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
599 # ^ this does not work because curl is broken nowadays
600 # Fixing #790093 properly will involve providing providing the key
601 # in some pacagke and maybe updating these paths.
603 # 'dgit-distro.debian.archive-query-tls-curl-args',
604 # '--ca-path=/etc/ssl/ca-debian',
605 # ^ this is a workaround but works (only) on DSA-administered machines
606 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
607 'dgit-distro.debian.git-url-suffix' => '',
608 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
609 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
610 'dgit-distro.debian-security.archive-query' => 'aptget:',
611 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
612 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
613 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
614 'dgit-distro.debian-security.nominal-distro' => 'debian',
615 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
616 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
617 'dgit-distro.ubuntu.git-check' => 'false',
618 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
619 'dgit-distro.test-dummy.ssh' => "$td/ssh",
620 'dgit-distro.test-dummy.username' => "alice",
621 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
622 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
623 'dgit-distro.test-dummy.git-url' => "$td/git",
624 'dgit-distro.test-dummy.git-host' => "git",
625 'dgit-distro.test-dummy.git-path' => "$td/git",
626 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
627 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
628 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
629 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
633 our @gitcfgsources = qw(cmdline local global system);
635 sub git_slurp_config () {
636 local ($debuglevel) = $debuglevel-2;
639 # This algoritm is a bit subtle, but this is needed so that for
640 # options which we want to be single-valued, we allow the
641 # different config sources to override properly. See #835858.
642 foreach my $src (@gitcfgsources) {
643 next if $src eq 'cmdline';
644 # we do this ourselves since git doesn't handle it
646 my @cmd = (@git, qw(config -z --get-regexp), "--$src", qw(.*));
649 open GITS, "-|", @cmd or die $!;
652 printdebug "=> ", (messagequote $_), "\n";
654 push @{ $gitcfgs{$src}{$`} }, $'; #';
658 or ($!==0 && $?==256)
663 sub git_get_config ($) {
665 foreach my $src (@gitcfgsources) {
666 my $l = $gitcfgs{$src}{$c};
667 croak "$l $c" if $l && !ref $l;
668 printdebug"C $c ".(defined $l ?
669 join " ", map { messagequote "'$_'" } @$l :
673 @$l==1 or badcfg "multiple values for $c".
674 " (in $src git config)" if @$l > 1;
682 return undef if $c =~ /RETURN-UNDEF/;
683 printdebug "C? $c\n" if $debuglevel >= 5;
684 my $v = git_get_config($c);
685 return $v if defined $v;
686 my $dv = $defcfg{$c};
688 printdebug "CD $c $dv\n" if $debuglevel >= 4;
692 badcfg "need value for one of: @_\n".
693 "$us: distro or suite appears not to be (properly) supported";
696 sub access_basedistro__noalias () {
697 if (defined $idistro) {
700 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
701 return $def if defined $def;
702 foreach my $src (@gitcfgsources, 'internal') {
703 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
705 foreach my $k (keys %$kl) {
706 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
708 next unless match_glob $dpat, $isuite;
712 return cfg("dgit.default.distro");
716 sub access_basedistro () {
717 my $noalias = access_basedistro__noalias();
718 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
719 return $canon // $noalias;
722 sub access_nomdistro () {
723 my $base = access_basedistro();
724 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
725 $r =~ m/^$distro_re$/ or badcfg
726 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
730 sub access_quirk () {
731 # returns (quirk name, distro to use instead or undef, quirk-specific info)
732 my $basedistro = access_basedistro();
733 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
735 if (defined $backports_quirk) {
736 my $re = $backports_quirk;
737 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
739 $re =~ s/\%/([-0-9a-z_]+)/
740 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
741 if ($isuite =~ m/^$re$/) {
742 return ('backports',"$basedistro-backports",$1);
745 return ('none',undef);
750 sub parse_cfg_bool ($$$) {
751 my ($what,$def,$v) = @_;
754 $v =~ m/^[ty1]/ ? 1 :
755 $v =~ m/^[fn0]/ ? 0 :
756 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
759 sub access_forpush_config () {
760 my $d = access_basedistro();
764 parse_cfg_bool('new-private-pushers', 0,
765 cfg("dgit-distro.$d.new-private-pushers",
768 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
771 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
772 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
773 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
774 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
777 sub access_forpush () {
778 $access_forpush //= access_forpush_config();
779 return $access_forpush;
783 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
784 badcfg "pushing but distro is configured readonly"
785 if access_forpush_config() eq '0';
787 $supplementary_message = <<'END' unless $we_are_responder;
788 Push failed, before we got started.
789 You can retry the push, after fixing the problem, if you like.
791 parseopts_late_defaults();
795 parseopts_late_defaults();
798 sub supplementary_message ($) {
800 if (!$we_are_responder) {
801 $supplementary_message = $msg;
803 } elsif ($protovsn >= 3) {
804 responder_send_command "supplementary-message ".length($msg)
806 print PO $msg or die $!;
810 sub access_distros () {
811 # Returns list of distros to try, in order
814 # 0. `instead of' distro name(s) we have been pointed to
815 # 1. the access_quirk distro, if any
816 # 2a. the user's specified distro, or failing that } basedistro
817 # 2b. the distro calculated from the suite }
818 my @l = access_basedistro();
820 my (undef,$quirkdistro) = access_quirk();
821 unshift @l, $quirkdistro;
822 unshift @l, $instead_distro;
823 @l = grep { defined } @l;
825 push @l, access_nomdistro();
827 if (access_forpush()) {
828 @l = map { ("$_/push", $_) } @l;
833 sub access_cfg_cfgs (@) {
836 # The nesting of these loops determines the search order. We put
837 # the key loop on the outside so that we search all the distros
838 # for each key, before going on to the next key. That means that
839 # if access_cfg is called with a more specific, and then a less
840 # specific, key, an earlier distro can override the less specific
841 # without necessarily overriding any more specific keys. (If the
842 # distro wants to override the more specific keys it can simply do
843 # so; whereas if we did the loop the other way around, it would be
844 # impossible to for an earlier distro to override a less specific
845 # key but not the more specific ones without restating the unknown
846 # values of the more specific keys.
849 # We have to deal with RETURN-UNDEF specially, so that we don't
850 # terminate the search prematurely.
852 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
855 foreach my $d (access_distros()) {
856 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
858 push @cfgs, map { "dgit.default.$_" } @realkeys;
865 my (@cfgs) = access_cfg_cfgs(@keys);
866 my $value = cfg(@cfgs);
870 sub access_cfg_bool ($$) {
871 my ($def, @keys) = @_;
872 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
875 sub string_to_ssh ($) {
877 if ($spec =~ m/\s/) {
878 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
884 sub access_cfg_ssh () {
885 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
886 if (!defined $gitssh) {
889 return string_to_ssh $gitssh;
893 sub access_runeinfo ($) {
895 return ": dgit ".access_basedistro()." $info ;";
898 sub access_someuserhost ($) {
900 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
901 defined($user) && length($user) or
902 $user = access_cfg("$some-user",'username');
903 my $host = access_cfg("$some-host");
904 return length($user) ? "$user\@$host" : $host;
907 sub access_gituserhost () {
908 return access_someuserhost('git');
911 sub access_giturl (;$) {
913 my $url = access_cfg('git-url','RETURN-UNDEF');
916 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
917 return undef unless defined $proto;
920 access_gituserhost().
921 access_cfg('git-path');
923 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
926 return "$url/$package$suffix";
929 sub parsecontrolfh ($$;$) {
930 my ($fh, $desc, $allowsigned) = @_;
931 our $dpkgcontrolhash_noissigned;
934 my %opts = ('name' => $desc);
935 $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
936 $c = Dpkg::Control::Hash->new(%opts);
937 $c->parse($fh,$desc) or die "parsing of $desc failed";
938 last if $allowsigned;
939 last if $dpkgcontrolhash_noissigned;
940 my $issigned= $c->get_option('is_pgp_signed');
941 if (!defined $issigned) {
942 $dpkgcontrolhash_noissigned= 1;
943 seek $fh, 0,0 or die "seek $desc: $!";
944 } elsif ($issigned) {
945 fail "control file $desc is (already) PGP-signed. ".
946 " Note that dgit push needs to modify the .dsc and then".
947 " do the signature itself";
956 my ($file, $desc, $allowsigned) = @_;
957 my $fh = new IO::Handle;
958 open $fh, '<', $file or die "$file: $!";
959 my $c = parsecontrolfh($fh,$desc,$allowsigned);
960 $fh->error and die $!;
966 my ($dctrl,$field) = @_;
967 my $v = $dctrl->{$field};
968 return $v if defined $v;
969 fail "missing field $field in ".$dctrl->get_option('name');
973 my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
974 my $p = new IO::Handle;
975 my @cmd = (qw(dpkg-parsechangelog), @_);
976 open $p, '-|', @cmd or die $!;
978 $?=0; $!=0; close $p or failedcmd @cmd;
982 sub commit_getclogp ($) {
983 # Returns the parsed changelog hashref for a particular commit
985 our %commit_getclogp_memo;
986 my $memo = $commit_getclogp_memo{$objid};
987 return $memo if $memo;
989 my $mclog = ".git/dgit/clog-$objid";
990 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
991 "$objid:debian/changelog";
992 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
997 defined $d or fail "getcwd failed: $!";
1001 sub parse_dscdata () {
1002 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1003 printdebug Dumper($dscdata) if $debuglevel>1;
1004 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1005 printdebug Dumper($dsc) if $debuglevel>1;
1010 sub archive_query ($;@) {
1011 my ($method) = shift @_;
1012 fail "this operation does not support multiple comma-separated suites"
1014 my $query = access_cfg('archive-query','RETURN-UNDEF');
1015 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1018 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1021 sub archive_query_prepend_mirror {
1022 my $m = access_cfg('mirror');
1023 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1026 sub pool_dsc_subpath ($$) {
1027 my ($vsn,$component) = @_; # $package is implict arg
1028 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1029 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1032 sub cfg_apply_map ($$$) {
1033 my ($varref, $what, $mapspec) = @_;
1034 return unless $mapspec;
1036 printdebug "config $what EVAL{ $mapspec; }\n";
1038 eval "package Dgit::Config; $mapspec;";
1043 #---------- `ftpmasterapi' archive query method (nascent) ----------
1045 sub archive_api_query_cmd ($) {
1047 my @cmd = (@curl, qw(-sS));
1048 my $url = access_cfg('archive-query-url');
1049 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1051 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1052 foreach my $key (split /\:/, $keys) {
1053 $key =~ s/\%HOST\%/$host/g;
1055 fail "for $url: stat $key: $!" unless $!==ENOENT;
1058 fail "config requested specific TLS key but do not know".
1059 " how to get curl to use exactly that EE key ($key)";
1060 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1061 # # Sadly the above line does not work because of changes
1062 # # to gnutls. The real fix for #790093 may involve
1063 # # new curl options.
1066 # Fixing #790093 properly will involve providing a value
1067 # for this on clients.
1068 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1069 push @cmd, split / /, $kargs if defined $kargs;
1071 push @cmd, $url.$subpath;
1075 sub api_query ($$;$) {
1077 my ($data, $subpath, $ok404) = @_;
1078 badcfg "ftpmasterapi archive query method takes no data part"
1080 my @cmd = archive_api_query_cmd($subpath);
1081 my $url = $cmd[$#cmd];
1082 push @cmd, qw(-w %{http_code});
1083 my $json = cmdoutput @cmd;
1084 unless ($json =~ s/\d+\d+\d$//) {
1085 failedcmd_report_cmd undef, @cmd;
1086 fail "curl failed to print 3-digit HTTP code";
1089 return undef if $code eq '404' && $ok404;
1090 fail "fetch of $url gave HTTP code $code"
1091 unless $url =~ m#^file://# or $code =~ m/^2/;
1092 return decode_json($json);
1095 sub canonicalise_suite_ftpmasterapi {
1096 my ($proto,$data) = @_;
1097 my $suites = api_query($data, 'suites');
1099 foreach my $entry (@$suites) {
1101 my $v = $entry->{$_};
1102 defined $v && $v eq $isuite;
1103 } qw(codename name);
1104 push @matched, $entry;
1106 fail "unknown suite $isuite" unless @matched;
1109 @matched==1 or die "multiple matches for suite $isuite\n";
1110 $cn = "$matched[0]{codename}";
1111 defined $cn or die "suite $isuite info has no codename\n";
1112 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1114 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1119 sub archive_query_ftpmasterapi {
1120 my ($proto,$data) = @_;
1121 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1123 my $digester = Digest::SHA->new(256);
1124 foreach my $entry (@$info) {
1126 my $vsn = "$entry->{version}";
1127 my ($ok,$msg) = version_check $vsn;
1128 die "bad version: $msg\n" unless $ok;
1129 my $component = "$entry->{component}";
1130 $component =~ m/^$component_re$/ or die "bad component";
1131 my $filename = "$entry->{filename}";
1132 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1133 or die "bad filename";
1134 my $sha256sum = "$entry->{sha256sum}";
1135 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1136 push @rows, [ $vsn, "/pool/$component/$filename",
1137 $digester, $sha256sum ];
1139 die "bad ftpmaster api response: $@\n".Dumper($entry)
1142 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1143 return archive_query_prepend_mirror @rows;
1146 sub file_in_archive_ftpmasterapi {
1147 my ($proto,$data,$filename) = @_;
1148 my $pat = $filename;
1151 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1152 my $info = api_query($data, "file_in_archive/$pat", 1);
1155 #---------- `aptget' archive query method ----------
1158 our $aptget_releasefile;
1159 our $aptget_configpath;
1161 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1162 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1164 sub aptget_cache_clean {
1165 runcmd_ordryrun_local qw(sh -ec),
1166 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1170 sub aptget_lock_acquire () {
1171 my $lockfile = "$aptget_base/lock";
1172 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1173 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1176 sub aptget_prep ($) {
1178 return if defined $aptget_base;
1180 badcfg "aptget archive query method takes no data part"
1183 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1186 ensuredir "$cache/dgit";
1188 access_cfg('aptget-cachekey','RETURN-UNDEF')
1189 // access_nomdistro();
1191 $aptget_base = "$cache/dgit/aptget";
1192 ensuredir $aptget_base;
1194 my $quoted_base = $aptget_base;
1195 die "$quoted_base contains bad chars, cannot continue"
1196 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1198 ensuredir $aptget_base;
1200 aptget_lock_acquire();
1202 aptget_cache_clean();
1204 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1205 my $sourceslist = "source.list#$cachekey";
1207 my $aptsuites = $isuite;
1208 cfg_apply_map(\$aptsuites, 'suite map',
1209 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1211 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1212 printf SRCS "deb-src %s %s %s\n",
1213 access_cfg('mirror'),
1215 access_cfg('aptget-components')
1218 ensuredir "$aptget_base/cache";
1219 ensuredir "$aptget_base/lists";
1221 open CONF, ">", $aptget_configpath or die $!;
1223 Debug::NoLocking "true";
1224 APT::Get::List-Cleanup "false";
1225 #clear APT::Update::Post-Invoke-Success;
1226 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1227 Dir::State::Lists "$quoted_base/lists";
1228 Dir::Etc::preferences "$quoted_base/preferences";
1229 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1230 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1233 foreach my $key (qw(
1236 Dir::Cache::Archives
1237 Dir::Etc::SourceParts
1238 Dir::Etc::preferencesparts
1240 ensuredir "$aptget_base/$key";
1241 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1244 my $oldatime = (time // die $!) - 1;
1245 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1246 next unless stat_exists $oldlist;
1247 my ($mtime) = (stat _)[9];
1248 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1251 runcmd_ordryrun_local aptget_aptget(), qw(update);
1254 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1255 next unless stat_exists $oldlist;
1256 my ($atime) = (stat _)[8];
1257 next if $atime == $oldatime;
1258 push @releasefiles, $oldlist;
1260 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1261 @releasefiles = @inreleasefiles if @inreleasefiles;
1262 die "apt updated wrong number of Release files (@releasefiles), erk"
1263 unless @releasefiles == 1;
1265 ($aptget_releasefile) = @releasefiles;
1268 sub canonicalise_suite_aptget {
1269 my ($proto,$data) = @_;
1272 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1274 foreach my $name (qw(Codename Suite)) {
1275 my $val = $release->{$name};
1277 printdebug "release file $name: $val\n";
1278 $val =~ m/^$suite_re$/o or fail
1279 "Release file ($aptget_releasefile) specifies intolerable $name";
1280 cfg_apply_map(\$val, 'suite rmap',
1281 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1288 sub archive_query_aptget {
1289 my ($proto,$data) = @_;
1292 ensuredir "$aptget_base/source";
1293 foreach my $old (<$aptget_base/source/*.dsc>) {
1294 unlink $old or die "$old: $!";
1297 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1298 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1299 # avoids apt-get source failing with ambiguous error code
1301 runcmd_ordryrun_local
1302 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1303 aptget_aptget(), qw(--download-only --only-source source), $package;
1305 my @dscs = <$aptget_base/source/*.dsc>;
1306 fail "apt-get source did not produce a .dsc" unless @dscs;
1307 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1309 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1312 my $uri = "file://". uri_escape $dscs[0];
1313 $uri =~ s{\%2f}{/}gi;
1314 return [ (getfield $pre_dsc, 'Version'), $uri ];
1317 #---------- `dummyapicat' archive query method ----------
1319 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1320 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1322 sub file_in_archive_dummycatapi ($$$) {
1323 my ($proto,$data,$filename) = @_;
1324 my $mirror = access_cfg('mirror');
1325 $mirror =~ s#^file://#/# or die "$mirror ?";
1327 my @cmd = (qw(sh -ec), '
1329 find -name "$2" -print0 |
1331 ', qw(x), $mirror, $filename);
1332 debugcmd "-|", @cmd;
1333 open FIA, "-|", @cmd or die $!;
1336 printdebug "| $_\n";
1337 m/^(\w+) (\S+)$/ or die "$_ ?";
1338 push @out, { sha256sum => $1, filename => $2 };
1340 close FIA or die failedcmd @cmd;
1344 #---------- `madison' archive query method ----------
1346 sub archive_query_madison {
1347 return archive_query_prepend_mirror
1348 map { [ @$_[0..1] ] } madison_get_parse(@_);
1351 sub madison_get_parse {
1352 my ($proto,$data) = @_;
1353 die unless $proto eq 'madison';
1354 if (!length $data) {
1355 $data= access_cfg('madison-distro','RETURN-UNDEF');
1356 $data //= access_basedistro();
1358 $rmad{$proto,$data,$package} ||= cmdoutput
1359 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1360 my $rmad = $rmad{$proto,$data,$package};
1363 foreach my $l (split /\n/, $rmad) {
1364 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1365 \s*( [^ \t|]+ )\s* \|
1366 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1367 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1368 $1 eq $package or die "$rmad $package ?";
1375 $component = access_cfg('archive-query-default-component');
1377 $5 eq 'source' or die "$rmad ?";
1378 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1380 return sort { -version_compare($a->[0],$b->[0]); } @out;
1383 sub canonicalise_suite_madison {
1384 # madison canonicalises for us
1385 my @r = madison_get_parse(@_);
1387 "unable to canonicalise suite using package $package".
1388 " which does not appear to exist in suite $isuite;".
1389 " --existing-package may help";
1393 sub file_in_archive_madison { return undef; }
1395 #---------- `sshpsql' archive query method ----------
1398 my ($data,$runeinfo,$sql) = @_;
1399 if (!length $data) {
1400 $data= access_someuserhost('sshpsql').':'.
1401 access_cfg('sshpsql-dbname');
1403 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1404 my ($userhost,$dbname) = ($`,$'); #';
1406 my @cmd = (access_cfg_ssh, $userhost,
1407 access_runeinfo("ssh-psql $runeinfo").
1408 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1409 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1411 open P, "-|", @cmd or die $!;
1414 printdebug(">|$_|\n");
1417 $!=0; $?=0; close P or failedcmd @cmd;
1419 my $nrows = pop @rows;
1420 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1421 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1422 @rows = map { [ split /\|/, $_ ] } @rows;
1423 my $ncols = scalar @{ shift @rows };
1424 die if grep { scalar @$_ != $ncols } @rows;
1428 sub sql_injection_check {
1429 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1432 sub archive_query_sshpsql ($$) {
1433 my ($proto,$data) = @_;
1434 sql_injection_check $isuite, $package;
1435 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1436 SELECT source.version, component.name, files.filename, files.sha256sum
1438 JOIN src_associations ON source.id = src_associations.source
1439 JOIN suite ON suite.id = src_associations.suite
1440 JOIN dsc_files ON dsc_files.source = source.id
1441 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1442 JOIN component ON component.id = files_archive_map.component_id
1443 JOIN files ON files.id = dsc_files.file
1444 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1445 AND source.source='$package'
1446 AND files.filename LIKE '%.dsc';
1448 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1449 my $digester = Digest::SHA->new(256);
1451 my ($vsn,$component,$filename,$sha256sum) = @$_;
1452 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1454 return archive_query_prepend_mirror @rows;
1457 sub canonicalise_suite_sshpsql ($$) {
1458 my ($proto,$data) = @_;
1459 sql_injection_check $isuite;
1460 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1461 SELECT suite.codename
1462 FROM suite where suite_name='$isuite' or codename='$isuite';
1464 @rows = map { $_->[0] } @rows;
1465 fail "unknown suite $isuite" unless @rows;
1466 die "ambiguous $isuite: @rows ?" if @rows>1;
1470 sub file_in_archive_sshpsql ($$$) { return undef; }
1472 #---------- `dummycat' archive query method ----------
1474 sub canonicalise_suite_dummycat ($$) {
1475 my ($proto,$data) = @_;
1476 my $dpath = "$data/suite.$isuite";
1477 if (!open C, "<", $dpath) {
1478 $!==ENOENT or die "$dpath: $!";
1479 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1483 chomp or die "$dpath: $!";
1485 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1489 sub archive_query_dummycat ($$) {
1490 my ($proto,$data) = @_;
1491 canonicalise_suite();
1492 my $dpath = "$data/package.$csuite.$package";
1493 if (!open C, "<", $dpath) {
1494 $!==ENOENT or die "$dpath: $!";
1495 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1503 printdebug "dummycat query $csuite $package $dpath | $_\n";
1504 my @row = split /\s+/, $_;
1505 @row==2 or die "$dpath: $_ ?";
1508 C->error and die "$dpath: $!";
1510 return archive_query_prepend_mirror
1511 sort { -version_compare($a->[0],$b->[0]); } @rows;
1514 sub file_in_archive_dummycat () { return undef; }
1516 #---------- tag format handling ----------
1518 sub access_cfg_tagformats () {
1519 split /\,/, access_cfg('dgit-tag-format');
1522 sub access_cfg_tagformats_can_splitbrain () {
1523 my %y = map { $_ => 1 } access_cfg_tagformats;
1524 foreach my $needtf (qw(new maint)) {
1525 next if $y{$needtf};
1531 sub need_tagformat ($$) {
1532 my ($fmt, $why) = @_;
1533 fail "need to use tag format $fmt ($why) but also need".
1534 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1535 " - no way to proceed"
1536 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1537 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1540 sub select_tagformat () {
1542 return if $tagformatfn && !$tagformat_want;
1543 die 'bug' if $tagformatfn && $tagformat_want;
1544 # ... $tagformat_want assigned after previous select_tagformat
1546 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1547 printdebug "select_tagformat supported @supported\n";
1549 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1550 printdebug "select_tagformat specified @$tagformat_want\n";
1552 my ($fmt,$why,$override) = @$tagformat_want;
1554 fail "target distro supports tag formats @supported".
1555 " but have to use $fmt ($why)"
1557 or grep { $_ eq $fmt } @supported;
1559 $tagformat_want = undef;
1561 $tagformatfn = ${*::}{"debiantag_$fmt"};
1563 fail "trying to use unknown tag format \`$fmt' ($why) !"
1564 unless $tagformatfn;
1567 #---------- archive query entrypoints and rest of program ----------
1569 sub canonicalise_suite () {
1570 return if defined $csuite;
1571 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1572 $csuite = archive_query('canonicalise_suite');
1573 if ($isuite ne $csuite) {
1574 progress "canonical suite name for $isuite is $csuite";
1576 progress "canonical suite name is $csuite";
1580 sub get_archive_dsc () {
1581 canonicalise_suite();
1582 my @vsns = archive_query('archive_query');
1583 foreach my $vinfo (@vsns) {
1584 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1585 $dscurl = $vsn_dscurl;
1586 $dscdata = url_get($dscurl);
1588 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1593 $digester->add($dscdata);
1594 my $got = $digester->hexdigest();
1596 fail "$dscurl has hash $got but".
1597 " archive told us to expect $digest";
1600 my $fmt = getfield $dsc, 'Format';
1601 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1602 "unsupported source format $fmt, sorry";
1604 $dsc_checked = !!$digester;
1605 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1609 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1612 sub check_for_git ();
1613 sub check_for_git () {
1615 my $how = access_cfg('git-check');
1616 if ($how eq 'ssh-cmd') {
1618 (access_cfg_ssh, access_gituserhost(),
1619 access_runeinfo("git-check $package").
1620 " set -e; cd ".access_cfg('git-path').";".
1621 " if test -d $package.git; then echo 1; else echo 0; fi");
1622 my $r= cmdoutput @cmd;
1623 if (defined $r and $r =~ m/^divert (\w+)$/) {
1625 my ($usedistro,) = access_distros();
1626 # NB that if we are pushing, $usedistro will be $distro/push
1627 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1628 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1629 progress "diverting to $divert (using config for $instead_distro)";
1630 return check_for_git();
1632 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1634 } elsif ($how eq 'url') {
1635 my $prefix = access_cfg('git-check-url','git-url');
1636 my $suffix = access_cfg('git-check-suffix','git-suffix',
1637 'RETURN-UNDEF') // '.git';
1638 my $url = "$prefix/$package$suffix";
1639 my @cmd = (@curl, qw(-sS -I), $url);
1640 my $result = cmdoutput @cmd;
1641 $result =~ s/^\S+ 200 .*\n\r?\n//;
1642 # curl -sS -I with https_proxy prints
1643 # HTTP/1.0 200 Connection established
1644 $result =~ m/^\S+ (404|200) /s or
1645 fail "unexpected results from git check query - ".
1646 Dumper($prefix, $result);
1648 if ($code eq '404') {
1650 } elsif ($code eq '200') {
1655 } elsif ($how eq 'true') {
1657 } elsif ($how eq 'false') {
1660 badcfg "unknown git-check \`$how'";
1664 sub create_remote_git_repo () {
1665 my $how = access_cfg('git-create');
1666 if ($how eq 'ssh-cmd') {
1668 (access_cfg_ssh, access_gituserhost(),
1669 access_runeinfo("git-create $package").
1670 "set -e; cd ".access_cfg('git-path').";".
1671 " cp -a _template $package.git");
1672 } elsif ($how eq 'true') {
1675 badcfg "unknown git-create \`$how'";
1679 our ($dsc_hash,$lastpush_mergeinput);
1680 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1682 our $ud = '.git/dgit/unpack';
1692 sub mktree_in_ud_here () {
1693 runcmd qw(git init -q);
1694 runcmd qw(git config gc.auto 0);
1695 rmtree('.git/objects');
1696 symlink '../../../../objects','.git/objects' or die $!;
1699 sub git_write_tree () {
1700 my $tree = cmdoutput @git, qw(write-tree);
1701 $tree =~ m/^\w+$/ or die "$tree ?";
1705 sub git_add_write_tree () {
1706 runcmd @git, qw(add -Af .);
1707 return git_write_tree();
1710 sub remove_stray_gits ($) {
1712 my @gitscmd = qw(find -name .git -prune -print0);
1713 debugcmd "|",@gitscmd;
1714 open GITS, "-|", @gitscmd or die $!;
1719 print STDERR "$us: warning: removing from $what: ",
1720 (messagequote $_), "\n";
1724 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1727 sub mktree_in_ud_from_only_subdir ($;$) {
1728 my ($what,$raw) = @_;
1730 # changes into the subdir
1732 die "expected one subdir but found @dirs ?" unless @dirs==1;
1733 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1737 remove_stray_gits($what);
1738 mktree_in_ud_here();
1740 my ($format, $fopts) = get_source_format();
1741 if (madformat($format)) {
1746 my $tree=git_add_write_tree();
1747 return ($tree,$dir);
1750 our @files_csum_info_fields =
1751 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1752 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1753 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1755 sub dsc_files_info () {
1756 foreach my $csumi (@files_csum_info_fields) {
1757 my ($fname, $module, $method) = @$csumi;
1758 my $field = $dsc->{$fname};
1759 next unless defined $field;
1760 eval "use $module; 1;" or die $@;
1762 foreach (split /\n/, $field) {
1764 m/^(\w+) (\d+) (\S+)$/ or
1765 fail "could not parse .dsc $fname line \`$_'";
1766 my $digester = eval "$module"."->$method;" or die $@;
1771 Digester => $digester,
1776 fail "missing any supported Checksums-* or Files field in ".
1777 $dsc->get_option('name');
1781 map { $_->{Filename} } dsc_files_info();
1784 sub files_compare_inputs (@) {
1789 my $showinputs = sub {
1790 return join "; ", map { $_->get_option('name') } @$inputs;
1793 foreach my $in (@$inputs) {
1795 my $in_name = $in->get_option('name');
1797 printdebug "files_compare_inputs $in_name\n";
1799 foreach my $csumi (@files_csum_info_fields) {
1800 my ($fname) = @$csumi;
1801 printdebug "files_compare_inputs $in_name $fname\n";
1803 my $field = $in->{$fname};
1804 next unless defined $field;
1807 foreach (split /\n/, $field) {
1810 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1811 fail "could not parse $in_name $fname line \`$_'";
1813 printdebug "files_compare_inputs $in_name $fname $f\n";
1817 my $re = \ $record{$f}{$fname};
1819 $fchecked{$f}{$in_name} = 1;
1821 fail "hash or size of $f varies in $fname fields".
1822 " (between: ".$showinputs->().")";
1827 @files = sort @files;
1828 $expected_files //= \@files;
1829 "@$expected_files" eq "@files" or
1830 fail "file list in $in_name varies between hash fields!";
1833 fail "$in_name has no files list field(s)";
1835 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1838 grep { keys %$_ == @$inputs-1 } values %fchecked
1839 or fail "no file appears in all file lists".
1840 " (looked in: ".$showinputs->().")";
1843 sub is_orig_file_in_dsc ($$) {
1844 my ($f, $dsc_files_info) = @_;
1845 return 0 if @$dsc_files_info <= 1;
1846 # One file means no origs, and the filename doesn't have a "what
1847 # part of dsc" component. (Consider versions ending `.orig'.)
1848 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1852 sub is_orig_file_of_vsn ($$) {
1853 my ($f, $upstreamvsn) = @_;
1854 my $base = srcfn $upstreamvsn, '';
1855 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1859 sub changes_update_origs_from_dsc ($$$$) {
1860 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1862 printdebug "checking origs needed ($upstreamvsn)...\n";
1863 $_ = getfield $changes, 'Files';
1864 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1865 fail "cannot find section/priority from .changes Files field";
1866 my $placementinfo = $1;
1868 printdebug "checking origs needed placement '$placementinfo'...\n";
1869 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1870 $l =~ m/\S+$/ or next;
1872 printdebug "origs $file | $l\n";
1873 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1874 printdebug "origs $file is_orig\n";
1875 my $have = archive_query('file_in_archive', $file);
1876 if (!defined $have) {
1878 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1884 printdebug "origs $file \$#\$have=$#$have\n";
1885 foreach my $h (@$have) {
1888 foreach my $csumi (@files_csum_info_fields) {
1889 my ($fname, $module, $method, $archivefield) = @$csumi;
1890 next unless defined $h->{$archivefield};
1891 $_ = $dsc->{$fname};
1892 next unless defined;
1893 m/^(\w+) .* \Q$file\E$/m or
1894 fail ".dsc $fname missing entry for $file";
1895 if ($h->{$archivefield} eq $1) {
1899 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1902 die "$file ".Dumper($h)." ?!" if $same && @differ;
1905 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1908 printdebug "origs $file f.same=$found_same".
1909 " #f._differ=$#found_differ\n";
1910 if (@found_differ && !$found_same) {
1912 "archive contains $file with different checksum",
1915 # Now we edit the changes file to add or remove it
1916 foreach my $csumi (@files_csum_info_fields) {
1917 my ($fname, $module, $method, $archivefield) = @$csumi;
1918 next unless defined $changes->{$fname};
1920 # in archive, delete from .changes if it's there
1921 $changed{$file} = "removed" if
1922 $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
1923 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
1924 # not in archive, but it's here in the .changes
1926 my $dsc_data = getfield $dsc, $fname;
1927 $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
1929 $extra =~ s/ \d+ /$&$placementinfo /
1930 or die "$fname $extra >$dsc_data< ?"
1931 if $fname eq 'Files';
1932 $changes->{$fname} .= "\n". $extra;
1933 $changed{$file} = "added";
1938 foreach my $file (keys %changed) {
1940 "edited .changes for archive .orig contents: %s %s",
1941 $changed{$file}, $file;
1943 my $chtmp = "$changesfile.tmp";
1944 $changes->save($chtmp);
1946 rename $chtmp,$changesfile or die "$changesfile $!";
1948 progress "[new .changes left in $changesfile]";
1951 progress "$changesfile already has appropriate .orig(s) (if any)";
1955 sub make_commit ($) {
1957 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1960 sub make_commit_text ($) {
1963 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1965 print Dumper($text) if $debuglevel > 1;
1966 my $child = open2($out, $in, @cmd) or die $!;
1969 print $in $text or die $!;
1970 close $in or die $!;
1972 $h =~ m/^\w+$/ or die;
1974 printdebug "=> $h\n";
1977 waitpid $child, 0 == $child or die "$child $!";
1978 $? and failedcmd @cmd;
1982 sub clogp_authline ($) {
1984 my $author = getfield $clogp, 'Maintainer';
1985 $author =~ s#,.*##ms;
1986 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
1987 my $authline = "$author $date";
1988 $authline =~ m/$git_authline_re/o or
1989 fail "unexpected commit author line format \`$authline'".
1990 " (was generated from changelog Maintainer field)";
1991 return ($1,$2,$3) if wantarray;
1995 sub vendor_patches_distro ($$) {
1996 my ($checkdistro, $what) = @_;
1997 return unless defined $checkdistro;
1999 my $series = "debian/patches/\L$checkdistro\E.series";
2000 printdebug "checking for vendor-specific $series ($what)\n";
2002 if (!open SERIES, "<", $series) {
2003 die "$series $!" unless $!==ENOENT;
2012 Unfortunately, this source package uses a feature of dpkg-source where
2013 the same source package unpacks to different source code on different
2014 distros. dgit cannot safely operate on such packages on affected
2015 distros, because the meaning of source packages is not stable.
2017 Please ask the distro/maintainer to remove the distro-specific series
2018 files and use a different technique (if necessary, uploading actually
2019 different packages, if different distros are supposed to have
2023 fail "Found active distro-specific series file for".
2024 " $checkdistro ($what): $series, cannot continue";
2026 die "$series $!" if SERIES->error;
2030 sub check_for_vendor_patches () {
2031 # This dpkg-source feature doesn't seem to be documented anywhere!
2032 # But it can be found in the changelog (reformatted):
2034 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2035 # Author: Raphael Hertzog <hertzog@debian.org>
2036 # Date: Sun Oct 3 09:36:48 2010 +0200
2038 # dpkg-source: correctly create .pc/.quilt_series with alternate
2041 # If you have debian/patches/ubuntu.series and you were
2042 # unpacking the source package on ubuntu, quilt was still
2043 # directed to debian/patches/series instead of
2044 # debian/patches/ubuntu.series.
2046 # debian/changelog | 3 +++
2047 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2048 # 2 files changed, 6 insertions(+), 1 deletion(-)
2051 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2052 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2053 "Dpkg::Vendor \`current vendor'");
2054 vendor_patches_distro(access_basedistro(),
2055 "(base) distro being accessed");
2056 vendor_patches_distro(access_nomdistro(),
2057 "(nominal) distro being accessed");
2060 sub generate_commits_from_dsc () {
2061 # See big comment in fetch_from_archive, below.
2062 # See also README.dsc-import.
2066 my @dfi = dsc_files_info();
2067 foreach my $fi (@dfi) {
2068 my $f = $fi->{Filename};
2069 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2071 printdebug "considering linking $f: ";
2073 link_ltarget "../../../../$f", $f
2074 or ((printdebug "($!) "), 0)
2078 printdebug "linked.\n";
2080 complete_file_from_dsc('.', $fi)
2083 if (is_orig_file_in_dsc($f, \@dfi)) {
2084 link $f, "../../../../$f"
2090 # We unpack and record the orig tarballs first, so that we only
2091 # need disk space for one private copy of the unpacked source.
2092 # But we can't make them into commits until we have the metadata
2093 # from the debian/changelog, so we record the tree objects now and
2094 # make them into commits later.
2096 my $upstreamv = upstreamversion $dsc->{version};
2097 my $orig_f_base = srcfn $upstreamv, '';
2099 foreach my $fi (@dfi) {
2100 # We actually import, and record as a commit, every tarball
2101 # (unless there is only one file, in which case there seems
2104 my $f = $fi->{Filename};
2105 printdebug "import considering $f ";
2106 (printdebug "only one dfi\n"), next if @dfi == 1;
2107 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2108 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2112 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2114 printdebug "Y ", (join ' ', map { $_//"(none)" }
2115 $compr_ext, $orig_f_part
2118 my $input = new IO::File $f, '<' or die "$f $!";
2122 if (defined $compr_ext) {
2124 Dpkg::Compression::compression_guess_from_filename $f;
2125 fail "Dpkg::Compression cannot handle file $f in source package"
2126 if defined $compr_ext && !defined $cname;
2128 new Dpkg::Compression::Process compression => $cname;
2129 my @compr_cmd = $compr_proc->get_uncompress_cmdline();
2130 my $compr_fh = new IO::Handle;
2131 my $compr_pid = open $compr_fh, "-|" // die $!;
2133 open STDIN, "<&", $input or die $!;
2135 die "dgit (child): exec $compr_cmd[0]: $!\n";
2140 rmtree "_unpack-tar";
2141 mkdir "_unpack-tar" or die $!;
2142 my @tarcmd = qw(tar -x -f -
2143 --no-same-owner --no-same-permissions
2144 --no-acls --no-xattrs --no-selinux);
2145 my $tar_pid = fork // die $!;
2147 chdir "_unpack-tar" or die $!;
2148 open STDIN, "<&", $input or die $!;
2150 die "dgit (child): exec $tarcmd[0]: $!";
2152 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2153 !$? or failedcmd @tarcmd;
2156 (@compr_cmd ? failedcmd @compr_cmd
2158 # finally, we have the results in "tarball", but maybe
2159 # with the wrong permissions
2161 runcmd qw(chmod -R +rwX _unpack-tar);
2162 changedir "_unpack-tar";
2163 remove_stray_gits($f);
2164 mktree_in_ud_here();
2166 my ($tree) = git_add_write_tree();
2167 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2168 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2170 printdebug "one subtree $1\n";
2172 printdebug "multiple subtrees\n";
2175 rmtree "_unpack-tar";
2177 my $ent = [ $f, $tree ];
2179 Orig => !!$orig_f_part,
2180 Sort => (!$orig_f_part ? 2 :
2181 $orig_f_part =~ m/-/g ? 1 :
2189 # put any without "_" first (spec is not clear whether files
2190 # are always in the usual order). Tarballs without "_" are
2191 # the main orig or the debian tarball.
2192 $a->{Sort} <=> $b->{Sort} or
2196 my $any_orig = grep { $_->{Orig} } @tartrees;
2198 my $dscfn = "$package.dsc";
2200 my $treeimporthow = 'package';
2202 open D, ">", $dscfn or die "$dscfn: $!";
2203 print D $dscdata or die "$dscfn: $!";
2204 close D or die "$dscfn: $!";
2205 my @cmd = qw(dpkg-source);
2206 push @cmd, '--no-check' if $dsc_checked;
2207 if (madformat $dsc->{format}) {
2208 push @cmd, '--skip-patches';
2209 $treeimporthow = 'unpatched';
2211 push @cmd, qw(-x --), $dscfn;
2214 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2215 if (madformat $dsc->{format}) {
2216 check_for_vendor_patches();
2220 if (madformat $dsc->{format}) {
2221 my @pcmd = qw(dpkg-source --before-build .);
2222 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2224 $dappliedtree = git_add_write_tree();
2227 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2228 debugcmd "|",@clogcmd;
2229 open CLOGS, "-|", @clogcmd or die $!;
2234 printdebug "import clog search...\n";
2237 my $stanzatext = do { local $/=""; <CLOGS>; };
2238 printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
2239 last if !defined $stanzatext;
2241 my $desc = "package changelog, entry no.$.";
2242 open my $stanzafh, "<", \$stanzatext or die;
2243 my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
2244 $clogp //= $thisstanza;
2246 printdebug "import clog $thisstanza->{version} $desc...\n";
2248 last if !$any_orig; # we don't need $r1clogp
2250 # We look for the first (most recent) changelog entry whose
2251 # version number is lower than the upstream version of this
2252 # package. Then the last (least recent) previous changelog
2253 # entry is treated as the one which introduced this upstream
2254 # version and used for the synthetic commits for the upstream
2257 # One might think that a more sophisticated algorithm would be
2258 # necessary. But: we do not want to scan the whole changelog
2259 # file. Stopping when we see an earlier version, which
2260 # necessarily then is an earlier upstream version, is the only
2261 # realistic way to do that. Then, either the earliest
2262 # changelog entry we have seen so far is indeed the earliest
2263 # upload of this upstream version; or there are only changelog
2264 # entries relating to later upstream versions (which is not
2265 # possible unless the changelog and .dsc disagree about the
2266 # version). Then it remains to choose between the physically
2267 # last entry in the file, and the one with the lowest version
2268 # number. If these are not the same, we guess that the
2269 # versions were created in a non-monotic order rather than
2270 # that the changelog entries have been misordered.
2272 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2274 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2275 $r1clogp = $thisstanza;
2277 printdebug "import clog $r1clogp->{version} becomes r1\n";
2279 die $! if CLOGS->error;
2280 close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
2282 $clogp or fail "package changelog has no entries!";
2284 my $authline = clogp_authline $clogp;
2285 my $changes = getfield $clogp, 'Changes';
2286 my $cversion = getfield $clogp, 'Version';
2289 $r1clogp //= $clogp; # maybe there's only one entry;
2290 my $r1authline = clogp_authline $r1clogp;
2291 # Strictly, r1authline might now be wrong if it's going to be
2292 # unused because !$any_orig. Whatever.
2294 printdebug "import tartrees authline $authline\n";
2295 printdebug "import tartrees r1authline $r1authline\n";
2297 foreach my $tt (@tartrees) {
2298 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2300 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2303 committer $r1authline
2307 [dgit import orig $tt->{F}]
2315 [dgit import tarball $package $cversion $tt->{F}]
2320 printdebug "import main commit\n";
2322 open C, ">../commit.tmp" or die $!;
2323 print C <<END or die $!;
2326 print C <<END or die $! foreach @tartrees;
2329 print C <<END or die $!;
2335 [dgit import $treeimporthow $package $cversion]
2339 my $rawimport_hash = make_commit qw(../commit.tmp);
2341 if (madformat $dsc->{format}) {
2342 printdebug "import apply patches...\n";
2344 # regularise the state of the working tree so that
2345 # the checkout of $rawimport_hash works nicely.
2346 my $dappliedcommit = make_commit_text(<<END);
2353 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2355 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2357 # We need the answers to be reproducible
2358 my @authline = clogp_authline($clogp);
2359 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2360 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2361 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2362 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2363 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2364 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2366 my $path = $ENV{PATH} or die;
2368 foreach my $use_absurd (qw(0 1)) {
2369 runcmd @git, qw(checkout -q unpa);
2370 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2371 local $ENV{PATH} = $path;
2374 progress "warning: $@";
2375 $path = "$absurdity:$path";
2376 progress "$us: trying slow absurd-git-apply...";
2377 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2382 die "forbid absurd git-apply\n" if $use_absurd
2383 && forceing [qw(import-gitapply-no-absurd)];
2384 die "only absurd git-apply!\n" if !$use_absurd
2385 && forceing [qw(import-gitapply-absurd)];
2387 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2388 local $ENV{PATH} = $path if $use_absurd;
2390 my @showcmd = (gbp_pq, qw(import));
2391 my @realcmd = shell_cmd
2392 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2393 debugcmd "+",@realcmd;
2394 if (system @realcmd) {
2395 die +(shellquote @showcmd).
2397 failedcmd_waitstatus()."\n";
2400 my $gapplied = git_rev_parse('HEAD');
2401 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2402 $gappliedtree eq $dappliedtree or
2404 gbp-pq import and dpkg-source disagree!
2405 gbp-pq import gave commit $gapplied
2406 gbp-pq import gave tree $gappliedtree
2407 dpkg-source --before-build gave tree $dappliedtree
2409 $rawimport_hash = $gapplied;
2414 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2419 progress "synthesised git commit from .dsc $cversion";
2421 my $rawimport_mergeinput = {
2422 Commit => $rawimport_hash,
2423 Info => "Import of source package",
2425 my @output = ($rawimport_mergeinput);
2427 if ($lastpush_mergeinput) {
2428 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2429 my $oversion = getfield $oldclogp, 'Version';
2431 version_compare($oversion, $cversion);
2433 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2434 { Message => <<END, ReverseParents => 1 });
2435 Record $package ($cversion) in archive suite $csuite
2437 } elsif ($vcmp > 0) {
2438 print STDERR <<END or die $!;
2440 Version actually in archive: $cversion (older)
2441 Last version pushed with dgit: $oversion (newer or same)
2444 @output = $lastpush_mergeinput;
2446 # Same version. Use what's in the server git branch,
2447 # discarding our own import. (This could happen if the
2448 # server automatically imports all packages into git.)
2449 @output = $lastpush_mergeinput;
2452 changedir '../../../..';
2457 sub complete_file_from_dsc ($$) {
2458 our ($dstdir, $fi) = @_;
2459 # Ensures that we have, in $dir, the file $fi, with the correct
2460 # contents. (Downloading it from alongside $dscurl if necessary.)
2462 my $f = $fi->{Filename};
2463 my $tf = "$dstdir/$f";
2466 if (stat_exists $tf) {
2467 progress "using existing $f";
2469 printdebug "$tf does not exist, need to fetch\n";
2471 $furl =~ s{/[^/]+$}{};
2473 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2474 die "$f ?" if $f =~ m#/#;
2475 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2476 return 0 if !act_local();
2480 open F, "<", "$tf" or die "$tf: $!";
2481 $fi->{Digester}->reset();
2482 $fi->{Digester}->addfile(*F);
2483 F->error and die $!;
2484 my $got = $fi->{Digester}->hexdigest();
2485 $got eq $fi->{Hash} or
2486 fail "file $f has hash $got but .dsc".
2487 " demands hash $fi->{Hash} ".
2488 ($downloaded ? "(got wrong file from archive!)"
2489 : "(perhaps you should delete this file?)");
2494 sub ensure_we_have_orig () {
2495 my @dfi = dsc_files_info();
2496 foreach my $fi (@dfi) {
2497 my $f = $fi->{Filename};
2498 next unless is_orig_file_in_dsc($f, \@dfi);
2499 complete_file_from_dsc('..', $fi)
2504 #---------- git fetch ----------
2506 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2507 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2509 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2510 # locally fetched refs because they have unhelpful names and clutter
2511 # up gitk etc. So we track whether we have "used up" head ref (ie,
2512 # whether we have made another local ref which refers to this object).
2514 # (If we deleted them unconditionally, then we might end up
2515 # re-fetching the same git objects each time dgit fetch was run.)
2517 # So, leach use of lrfetchrefs needs to be accompanied by arrangements
2518 # in git_fetch_us to fetch the refs in question, and possibly a call
2519 # to lrfetchref_used.
2521 our (%lrfetchrefs_f, %lrfetchrefs_d);
2522 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2524 sub lrfetchref_used ($) {
2525 my ($fullrefname) = @_;
2526 my $objid = $lrfetchrefs_f{$fullrefname};
2527 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2530 sub git_lrfetch_sane {
2531 my ($supplementary, @specs) = @_;
2532 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2533 # at least as regards @specs. Also leave the results in
2534 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2535 # able to clean these up.
2537 # With $supplementary==1, @specs must not contain wildcards
2538 # and we add to our previous fetches (non-atomically).
2540 # This is rather miserable:
2541 # When git fetch --prune is passed a fetchspec ending with a *,
2542 # it does a plausible thing. If there is no * then:
2543 # - it matches subpaths too, even if the supplied refspec
2544 # starts refs, and behaves completely madly if the source
2545 # has refs/refs/something. (See, for example, Debian #NNNN.)
2546 # - if there is no matching remote ref, it bombs out the whole
2548 # We want to fetch a fixed ref, and we don't know in advance
2549 # if it exists, so this is not suitable.
2551 # Our workaround is to use git ls-remote. git ls-remote has its
2552 # own qairks. Notably, it has the absurd multi-tail-matching
2553 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2554 # refs/refs/foo etc.
2556 # Also, we want an idempotent snapshot, but we have to make two
2557 # calls to the remote: one to git ls-remote and to git fetch. The
2558 # solution is use git ls-remote to obtain a target state, and
2559 # git fetch to try to generate it. If we don't manage to generate
2560 # the target state, we try again.
2562 my $url = access_giturl();
2564 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2566 my $specre = join '|', map {
2569 my $wildcard = $x =~ s/\\\*$/.*/;
2570 die if $wildcard && $supplementary;
2573 printdebug "git_lrfetch_sane specre=$specre\n";
2574 my $wanted_rref = sub {
2576 return m/^(?:$specre)$/;
2579 my $fetch_iteration = 0;
2582 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2583 if (++$fetch_iteration > 10) {
2584 fail "too many iterations trying to get sane fetch!";
2587 my @look = map { "refs/$_" } @specs;
2588 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2592 open GITLS, "-|", @lcmd or die $!;
2594 printdebug "=> ", $_;
2595 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2596 my ($objid,$rrefname) = ($1,$2);
2597 if (!$wanted_rref->($rrefname)) {
2599 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2603 $wantr{$rrefname} = $objid;
2606 close GITLS or failedcmd @lcmd;
2608 # OK, now %want is exactly what we want for refs in @specs
2610 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2611 "+refs/$_:".lrfetchrefs."/$_";
2614 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2616 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2617 runcmd_ordryrun_local @fcmd if @fspecs;
2619 if (!$supplementary) {
2620 %lrfetchrefs_f = ();
2624 git_for_each_ref(lrfetchrefs, sub {
2625 my ($objid,$objtype,$lrefname,$reftail) = @_;
2626 $lrfetchrefs_f{$lrefname} = $objid;
2627 $objgot{$objid} = 1;
2630 if ($supplementary) {
2634 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2635 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2636 if (!exists $wantr{$rrefname}) {
2637 if ($wanted_rref->($rrefname)) {
2639 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2643 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2646 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2647 delete $lrfetchrefs_f{$lrefname};
2651 foreach my $rrefname (sort keys %wantr) {
2652 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2653 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2654 my $want = $wantr{$rrefname};
2655 next if $got eq $want;
2656 if (!defined $objgot{$want}) {
2658 warning: git ls-remote suggests we want $lrefname
2659 warning: and it should refer to $want
2660 warning: but git fetch didn't fetch that object to any relevant ref.
2661 warning: This may be due to a race with someone updating the server.
2662 warning: Will try again...
2664 next FETCH_ITERATION;
2667 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2669 runcmd_ordryrun_local @git, qw(update-ref -m),
2670 "dgit fetch git fetch fixup", $lrefname, $want;
2671 $lrfetchrefs_f{$lrefname} = $want;
2675 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2676 Dumper(\%lrfetchrefs_f);
2679 sub git_fetch_us () {
2680 # Want to fetch only what we are going to use, unless
2681 # deliberately-not-ff, in which case we must fetch everything.
2683 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2685 (quiltmode_splitbrain
2686 ? (map { $_->('*',access_nomdistro) }
2687 \&debiantag_new, \&debiantag_maintview)
2688 : debiantags('*',access_nomdistro));
2689 push @specs, server_branch($csuite);
2690 push @specs, $rewritemap;
2691 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2693 git_lrfetch_sane 0, @specs;
2696 my @tagpats = debiantags('*',access_nomdistro);
2698 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2699 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2700 printdebug "currently $fullrefname=$objid\n";
2701 $here{$fullrefname} = $objid;
2703 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2704 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2705 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2706 printdebug "offered $lref=$objid\n";
2707 if (!defined $here{$lref}) {
2708 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2709 runcmd_ordryrun_local @upd;
2710 lrfetchref_used $fullrefname;
2711 } elsif ($here{$lref} eq $objid) {
2712 lrfetchref_used $fullrefname;
2715 "Not updateting $lref from $here{$lref} to $objid.\n";
2720 #---------- dsc and archive handling ----------
2722 sub mergeinfo_getclogp ($) {
2723 # Ensures thit $mi->{Clogp} exists and returns it
2725 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2728 sub mergeinfo_version ($) {
2729 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2732 sub fetch_from_archive_record_1 ($) {
2734 runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
2735 'DGIT_ARCHIVE', $hash;
2736 cmdoutput @git, qw(log -n2), $hash;
2737 # ... gives git a chance to complain if our commit is malformed
2740 sub fetch_from_archive_record_2 ($) {
2742 my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
2746 dryrun_report @upd_cmd;
2750 sub parse_dsc_field ($$) {
2751 my ($dsc, $what) = @_;
2753 foreach my $field (@ourdscfield) {
2754 $f = $dsc->{$field};
2758 progress "$what: NO git hash";
2759 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2760 = $f =~ m/^(\w+) ($distro_re) ($versiontag_re) (\S+)(?:\s|$)/) {
2761 progress "$what: specified git info ($dsc_distro)";
2762 $dsc_hint_tag = [ $dsc_hint_tag ];
2763 } elsif ($f =~ m/^\w+\s*$/) {
2765 $dsc_distro //= 'debian';
2766 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2768 progress "$what: specified git hash";
2770 fail "$what: invalid Dgit info";
2774 sub resolve_dsc_field_commit ($$) {
2775 my ($already_distro, $already_mapref) = @_;
2777 return unless defined $dsc_hash;
2780 defined $already_mapref &&
2781 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2782 ? $already_mapref : undef;
2786 my ($what, @fetch) = @_;
2788 local $idistro = $dsc_distro;
2789 my $lrf = lrfetchrefs;
2791 if (!$chase_dsc_distro) {
2793 "not chasing .dsc distro $dsc_distro: not fetching $what";
2798 ".dsc names distro $dsc_distro: fetching $what";
2800 my $url = access_giturl();
2801 if (!defined $url) {
2802 defined $dsc_hint_url or fail <<END;
2803 .dsc Dgit metadata is in context of distro $dsc_distro
2804 for which we have no configured url and .dsc provides no hint
2807 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2808 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2809 parse_cfg_bool "dsc-url-proto-ok", 'false',
2810 cfg("dgit.dsc-url-proto-ok.$proto",
2811 "dgit.default.dsc-url-proto-ok")
2813 .dsc Dgit metadata is in context of distro $dsc_distro
2814 for which we have no configured url;
2815 .dsc provices hinted url with protocol $proto which is unsafe.
2816 (can be overridden by config - consult documentation)
2818 $url = $dsc_hint_url;
2821 git_lrfetch_sane 1, @fetch;
2826 if (parse_cfg_bool 'rewrite-map-enable', 'true',
2827 access_cfg('rewrite-map-enable', 'RETURN-UNDEF')) {
2828 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2829 $mapref = $lrf.'/'.$rewritemap;
2830 my $rewritemapdata = git_cat_file $mapref.':map';
2831 if (defined $rewritemapdata
2832 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2834 "server's git history rewrite map contains a relevant entry!";
2837 if (defined $dsc_hash) {
2838 progress "using rewritten git hash in place of .dsc value";
2840 progress "server data says .dsc hash is to be disregarded";
2845 if (!defined git_cat_file $dsc_hash) {
2846 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2847 my $lrf = $do_fetch->("additional commits", @tags) &&
2848 defined git_cat_file $dsc_hash
2850 .dsc Dgit metadata requires commit $dsc_hash
2851 but we could not obtain that object anywhere.
2853 foreach my $t (@tags) {
2854 my $fullrefname = $lrf.'/'.$t;
2855 print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2856 next unless $lrfetchrefs_f{$fullrefname};
2857 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2858 lrfetchref_used $fullrefname;
2863 sub fetch_from_archive () {
2864 ensure_setup_existing_tree();
2866 # Ensures that lrref() is what is actually in the archive, one way
2867 # or another, according to us - ie this client's
2868 # appropritaely-updated archive view. Also returns the commit id.
2869 # If there is nothing in the archive, leaves lrref alone and
2870 # returns undef. git_fetch_us must have already been called.
2874 parse_dsc_field($dsc, 'last upload to archive');
2875 resolve_dsc_field_commit access_basedistro,
2876 lrfetchrefs."/".$rewritemap
2878 progress "no version available from the archive";
2881 # If the archive's .dsc has a Dgit field, there are three
2882 # relevant git commitids we need to choose between and/or merge
2884 # 1. $dsc_hash: the Dgit field from the archive
2885 # 2. $lastpush_hash: the suite branch on the dgit git server
2886 # 3. $lastfetch_hash: our local tracking brach for the suite
2888 # These may all be distinct and need not be in any fast forward
2891 # If the dsc was pushed to this suite, then the server suite
2892 # branch will have been updated; but it might have been pushed to
2893 # a different suite and copied by the archive. Conversely a more
2894 # recent version may have been pushed with dgit but not appeared
2895 # in the archive (yet).
2897 # $lastfetch_hash may be awkward because archive imports
2898 # (particularly, imports of Dgit-less .dscs) are performed only as
2899 # needed on individual clients, so different clients may perform a
2900 # different subset of them - and these imports are only made
2901 # public during push. So $lastfetch_hash may represent a set of
2902 # imports different to a subsequent upload by a different dgit
2905 # Our approach is as follows:
2907 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
2908 # descendant of $dsc_hash, then it was pushed by a dgit user who
2909 # had based their work on $dsc_hash, so we should prefer it.
2910 # Otherwise, $dsc_hash was installed into this suite in the
2911 # archive other than by a dgit push, and (necessarily) after the
2912 # last dgit push into that suite (since a dgit push would have
2913 # been descended from the dgit server git branch); thus, in that
2914 # case, we prefer the archive's version (and produce a
2915 # pseudo-merge to overwrite the dgit server git branch).
2917 # (If there is no Dgit field in the archive's .dsc then
2918 # generate_commit_from_dsc uses the version numbers to decide
2919 # whether the suite branch or the archive is newer. If the suite
2920 # branch is newer it ignores the archive's .dsc; otherwise it
2921 # generates an import of the .dsc, and produces a pseudo-merge to
2922 # overwrite the suite branch with the archive contents.)
2924 # The outcome of that part of the algorithm is the `public view',
2925 # and is same for all dgit clients: it does not depend on any
2926 # unpublished history in the local tracking branch.
2928 # As between the public view and the local tracking branch: The
2929 # local tracking branch is only updated by dgit fetch, and
2930 # whenever dgit fetch runs it includes the public view in the
2931 # local tracking branch. Therefore if the public view is not
2932 # descended from the local tracking branch, the local tracking
2933 # branch must contain history which was imported from the archive
2934 # but never pushed; and, its tip is now out of date. So, we make
2935 # a pseudo-merge to overwrite the old imports and stitch the old
2938 # Finally: we do not necessarily reify the public view (as
2939 # described above). This is so that we do not end up stacking two
2940 # pseudo-merges. So what we actually do is figure out the inputs
2941 # to any public view pseudo-merge and put them in @mergeinputs.
2944 # $mergeinputs[]{Commit}
2945 # $mergeinputs[]{Info}
2946 # $mergeinputs[0] is the one whose tree we use
2947 # @mergeinputs is in the order we use in the actual commit)
2950 # $mergeinputs[]{Message} is a commit message to use
2951 # $mergeinputs[]{ReverseParents} if def specifies that parent
2952 # list should be in opposite order
2953 # Such an entry has no Commit or Info. It applies only when found
2954 # in the last entry. (This ugliness is to support making
2955 # identical imports to previous dgit versions.)
2957 my $lastpush_hash = git_get_ref(lrfetchref());
2958 printdebug "previous reference hash=$lastpush_hash\n";
2959 $lastpush_mergeinput = $lastpush_hash && {
2960 Commit => $lastpush_hash,
2961 Info => "dgit suite branch on dgit git server",
2964 my $lastfetch_hash = git_get_ref(lrref());
2965 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
2966 my $lastfetch_mergeinput = $lastfetch_hash && {
2967 Commit => $lastfetch_hash,
2968 Info => "dgit client's archive history view",
2971 my $dsc_mergeinput = $dsc_hash && {
2972 Commit => $dsc_hash,
2973 Info => "Dgit field in .dsc from archive",
2977 my $del_lrfetchrefs = sub {
2980 printdebug "del_lrfetchrefs...\n";
2981 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
2982 my $objid = $lrfetchrefs_d{$fullrefname};
2983 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
2985 $gur ||= new IO::Handle;
2986 open $gur, "|-", qw(git update-ref --stdin) or die $!;
2988 printf $gur "delete %s %s\n", $fullrefname, $objid;
2991 close $gur or failedcmd "git update-ref delete lrfetchrefs";
2995 if (defined $dsc_hash) {
2996 ensure_we_have_orig();
2997 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
2998 @mergeinputs = $dsc_mergeinput
2999 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3000 print STDERR <<END or die $!;
3002 Git commit in archive is behind the last version allegedly pushed/uploaded.
3003 Commit referred to by archive: $dsc_hash
3004 Last version pushed with dgit: $lastpush_hash
3007 @mergeinputs = ($lastpush_mergeinput);
3009 # Archive has .dsc which is not a descendant of the last dgit
3010 # push. This can happen if the archive moves .dscs about.
3011 # Just follow its lead.
3012 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3013 progress "archive .dsc names newer git commit";
3014 @mergeinputs = ($dsc_mergeinput);
3016 progress "archive .dsc names other git commit, fixing up";
3017 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3021 @mergeinputs = generate_commits_from_dsc();
3022 # We have just done an import. Now, our import algorithm might
3023 # have been improved. But even so we do not want to generate
3024 # a new different import of the same package. So if the
3025 # version numbers are the same, just use our existing version.
3026 # If the version numbers are different, the archive has changed
3027 # (perhaps, rewound).
3028 if ($lastfetch_mergeinput &&
3029 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3030 (mergeinfo_version $mergeinputs[0]) )) {
3031 @mergeinputs = ($lastfetch_mergeinput);
3033 } elsif ($lastpush_hash) {
3034 # only in git, not in the archive yet
3035 @mergeinputs = ($lastpush_mergeinput);
3036 print STDERR <<END or die $!;
3038 Package not found in the archive, but has allegedly been pushed using dgit.
3042 printdebug "nothing found!\n";
3043 if (defined $skew_warning_vsn) {
3044 print STDERR <<END or die $!;
3046 Warning: relevant archive skew detected.
3047 Archive allegedly contains $skew_warning_vsn
3048 But we were not able to obtain any version from the archive or git.
3052 unshift @end, $del_lrfetchrefs;
3056 if ($lastfetch_hash &&
3058 my $h = $_->{Commit};
3059 $h and is_fast_fwd($lastfetch_hash, $h);
3060 # If true, one of the existing parents of this commit
3061 # is a descendant of the $lastfetch_hash, so we'll
3062 # be ff from that automatically.
3066 push @mergeinputs, $lastfetch_mergeinput;
3069 printdebug "fetch mergeinfos:\n";
3070 foreach my $mi (@mergeinputs) {
3072 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3074 printdebug sprintf " ReverseParents=%d Message=%s",
3075 $mi->{ReverseParents}, $mi->{Message};
3079 my $compat_info= pop @mergeinputs
3080 if $mergeinputs[$#mergeinputs]{Message};
3082 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3085 if (@mergeinputs > 1) {
3087 my $tree_commit = $mergeinputs[0]{Commit};
3089 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3090 $tree =~ m/\n\n/; $tree = $`;
3091 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3094 # We use the changelog author of the package in question the
3095 # author of this pseudo-merge. This is (roughly) correct if
3096 # this commit is simply representing aa non-dgit upload.
3097 # (Roughly because it does not record sponsorship - but we
3098 # don't have sponsorship info because that's in the .changes,
3099 # which isn't in the archivw.)
3101 # But, it might be that we are representing archive history
3102 # updates (including in-archive copies). These are not really
3103 # the responsibility of the person who created the .dsc, but
3104 # there is no-one whose name we should better use. (The
3105 # author of the .dsc-named commit is clearly worse.)
3107 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3108 my $author = clogp_authline $useclogp;
3109 my $cversion = getfield $useclogp, 'Version';
3111 my $mcf = ".git/dgit/mergecommit";
3112 open MC, ">", $mcf or die "$mcf $!";
3113 print MC <<END or die $!;
3117 my @parents = grep { $_->{Commit} } @mergeinputs;
3118 @parents = reverse @parents if $compat_info->{ReverseParents};
3119 print MC <<END or die $! foreach @parents;
3123 print MC <<END or die $!;
3129 if (defined $compat_info->{Message}) {
3130 print MC $compat_info->{Message} or die $!;
3132 print MC <<END or die $!;
3133 Record $package ($cversion) in archive suite $csuite
3137 my $message_add_info = sub {
3139 my $mversion = mergeinfo_version $mi;
3140 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3144 $message_add_info->($mergeinputs[0]);
3145 print MC <<END or die $!;
3146 should be treated as descended from
3148 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3152 $hash = make_commit $mcf;
3154 $hash = $mergeinputs[0]{Commit};
3156 printdebug "fetch hash=$hash\n";
3159 my ($lasth, $what) = @_;
3160 return unless $lasth;
3161 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3164 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3166 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3168 fetch_from_archive_record_1($hash);
3170 if (defined $skew_warning_vsn) {
3172 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3173 my $gotclogp = commit_getclogp($hash);
3174 my $got_vsn = getfield $gotclogp, 'Version';
3175 printdebug "SKEW CHECK GOT $got_vsn\n";
3176 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3177 print STDERR <<END or die $!;
3179 Warning: archive skew detected. Using the available version:
3180 Archive allegedly contains $skew_warning_vsn
3181 We were able to obtain only $got_vsn
3187 if ($lastfetch_hash ne $hash) {
3188 fetch_from_archive_record_2($hash);
3191 lrfetchref_used lrfetchref();
3193 unshift @end, $del_lrfetchrefs;
3197 sub set_local_git_config ($$) {
3199 runcmd @git, qw(config), $k, $v;
3202 sub setup_mergechangelogs (;$) {
3204 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3206 my $driver = 'dpkg-mergechangelogs';
3207 my $cb = "merge.$driver";
3208 my $attrs = '.git/info/attributes';
3209 ensuredir '.git/info';
3211 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3212 if (!open ATTRS, "<", $attrs) {
3213 $!==ENOENT or die "$attrs: $!";
3217 next if m{^debian/changelog\s};
3218 print NATTRS $_, "\n" or die $!;
3220 ATTRS->error and die $!;
3223 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3226 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3227 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3229 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3232 sub setup_useremail (;$) {
3234 return unless $always || access_cfg_bool(1, 'setup-useremail');
3237 my ($k, $envvar) = @_;
3238 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3239 return unless defined $v;
3240 set_local_git_config "user.$k", $v;
3243 $setup->('email', 'DEBEMAIL');
3244 $setup->('name', 'DEBFULLNAME');
3247 sub ensure_setup_existing_tree () {
3248 my $k = "remote.$remotename.skipdefaultupdate";
3249 my $c = git_get_config $k;
3250 return if defined $c;
3251 set_local_git_config $k, 'true';
3254 sub setup_new_tree () {
3255 setup_mergechangelogs();
3259 sub multisuite_suite_child ($$$) {
3260 my ($tsuite, $merginputs, $fn) = @_;
3261 # in child, sets things up, calls $fn->(), and returns undef
3262 # in parent, returns canonical suite name for $tsuite
3263 my $canonsuitefh = IO::File::new_tmpfile;
3264 my $pid = fork // die $!;
3267 $us .= " [$isuite]";
3268 $debugprefix .= " ";
3269 progress "fetching $tsuite...";
3270 canonicalise_suite();
3271 print $canonsuitefh $csuite, "\n" or die $!;
3272 close $canonsuitefh or die $!;
3276 waitpid $pid,0 == $pid or die $!;
3277 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3278 seek $canonsuitefh,0,0 or die $!;
3279 local $csuite = <$canonsuitefh>;
3280 die $! unless defined $csuite && chomp $csuite;
3282 printdebug "multisuite $tsuite missing\n";
3285 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3286 push @$merginputs, {
3293 sub fork_for_multisuite ($) {
3294 my ($before_fetch_merge) = @_;
3295 # if nothing unusual, just returns ''
3298 # returns 0 to caller in child, to do first of the specified suites
3299 # in child, $csuite is not yet set
3301 # returns 1 to caller in parent, to finish up anything needed after
3302 # in parent, $csuite is set to canonicalised portmanteau
3304 my $org_isuite = $isuite;
3305 my @suites = split /\,/, $isuite;
3306 return '' unless @suites > 1;
3307 printdebug "fork_for_multisuite: @suites\n";
3311 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3313 return 0 unless defined $cbasesuite;
3315 fail "package $package missing in (base suite) $cbasesuite"
3316 unless @mergeinputs;
3318 my @csuites = ($cbasesuite);
3320 $before_fetch_merge->();
3322 foreach my $tsuite (@suites[1..$#suites]) {
3323 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3329 # xxx collecte the ref here
3331 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3332 push @csuites, $csubsuite;
3335 foreach my $mi (@mergeinputs) {
3336 my $ref = git_get_ref $mi->{Ref};
3337 die "$mi->{Ref} ?" unless length $ref;
3338 $mi->{Commit} = $ref;
3341 $csuite = join ",", @csuites;
3343 my $previous = git_get_ref lrref;
3345 unshift @mergeinputs, {
3346 Commit => $previous,
3347 Info => "local combined tracking branch",
3349 "archive seems to have rewound: local tracking branch is ahead!",
3353 foreach my $ix (0..$#mergeinputs) {
3354 $mergeinputs[$ix]{Index} = $ix;
3357 @mergeinputs = sort {
3358 -version_compare(mergeinfo_version $a,
3359 mergeinfo_version $b) # highest version first
3361 $a->{Index} <=> $b->{Index}; # earliest in spec first
3367 foreach my $mi (@mergeinputs) {
3368 printdebug "multisuite merge check $mi->{Info}\n";
3369 foreach my $previous (@needed) {
3370 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3371 printdebug "multisuite merge un-needed $previous->{Info}\n";
3375 printdebug "multisuite merge this-needed\n";
3376 $mi->{Character} = '+';
3379 $needed[0]{Character} = '*';
3381 my $output = $needed[0]{Commit};
3384 printdebug "multisuite merge nontrivial\n";
3385 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3387 my $commit = "tree $tree\n";
3388 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3389 "Input branches:\n";
3391 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3392 printdebug "multisuite merge include $mi->{Info}\n";
3393 $mi->{Character} //= ' ';
3394 $commit .= "parent $mi->{Commit}\n";
3395 $msg .= sprintf " %s %-25s %s\n",
3397 (mergeinfo_version $mi),
3400 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3402 " * marks the highest version branch, which choose to use\n".
3403 " + marks each branch which was not already an ancestor\n\n".
3404 "[dgit multi-suite $csuite]\n";
3406 "author $authline\n".
3407 "committer $authline\n\n";
3408 $output = make_commit_text $commit.$msg;
3409 printdebug "multisuite merge generated $output\n";
3412 fetch_from_archive_record_1($output);
3413 fetch_from_archive_record_2($output);
3415 progress "calculated combined tracking suite $csuite";
3420 sub clone_set_head () {
3421 open H, "> .git/HEAD" or die $!;
3422 print H "ref: ".lref()."\n" or die $!;
3425 sub clone_finish ($) {
3427 runcmd @git, qw(reset --hard), lrref();
3428 runcmd qw(bash -ec), <<'END';
3430 git ls-tree -r --name-only -z HEAD | \
3431 xargs -0r touch -h -r . --
3433 printdone "ready for work in $dstdir";
3438 badusage "dry run makes no sense with clone" unless act_local();
3440 my $multi_fetched = fork_for_multisuite(sub {
3441 printdebug "multi clone before fetch merge\n";
3444 if ($multi_fetched) {
3445 printdebug "multi clone after fetch merge\n";
3447 clone_finish($dstdir);
3450 printdebug "clone main body\n";
3452 canonicalise_suite();
3453 my $hasgit = check_for_git();
3454 mkdir $dstdir or fail "create \`$dstdir': $!";
3456 runcmd @git, qw(init -q);
3458 my $giturl = access_giturl(1);
3459 if (defined $giturl) {
3460 runcmd @git, qw(remote add), 'origin', $giturl;
3463 progress "fetching existing git history";
3465 runcmd_ordryrun_local @git, qw(fetch origin);
3467 progress "starting new git history";
3469 fetch_from_archive() or no_such_package;
3470 my $vcsgiturl = $dsc->{'Vcs-Git'};
3471 if (length $vcsgiturl) {
3472 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3473 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3476 clone_finish($dstdir);
3480 canonicalise_suite();
3481 if (check_for_git()) {
3484 fetch_from_archive() or no_such_package();
3485 printdone "fetched into ".lrref();
3489 my $multi_fetched = fork_for_multisuite(sub { });
3490 fetch() unless $multi_fetched; # parent
3491 return if $multi_fetched eq '0'; # child
3492 runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
3494 printdone "fetched to ".lrref()." and merged into HEAD";
3497 sub check_not_dirty () {
3498 foreach my $f (qw(local-options local-patch-header)) {
3499 if (stat_exists "debian/source/$f") {
3500 fail "git tree contains debian/source/$f";
3504 return if $ignoredirty;
3506 my @cmd = (@git, qw(diff --quiet HEAD));
3508 $!=0; $?=-1; system @cmd;
3511 fail "working tree is dirty (does not match HEAD)";
3517 sub commit_admin ($) {
3520 runcmd_ordryrun_local @git, qw(commit -m), $m;
3523 sub commit_quilty_patch () {
3524 my $output = cmdoutput @git, qw(status --porcelain);
3526 foreach my $l (split /\n/, $output) {
3527 next unless $l =~ m/\S/;
3528 if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
3532 delete $adds{'.pc'}; # if there wasn't one before, don't add it
3534 progress "nothing quilty to commit, ok.";
3537 my @adds = map { s/[][*?\\]/\\$&/g; $_; } sort keys %adds;
3538 runcmd_ordryrun_local @git, qw(add -f), @adds;
3540 Commit Debian 3.0 (quilt) metadata
3542 [dgit ($our_version) quilt-fixup]
3546 sub get_source_format () {
3548 if (open F, "debian/source/options") {
3552 s/\s+$//; # ignore missing final newline
3554 my ($k, $v) = ($`, $'); #');
3555 $v =~ s/^"(.*)"$/$1/;
3561 F->error and die $!;
3564 die $! unless $!==&ENOENT;
3567 if (!open F, "debian/source/format") {
3568 die $! unless $!==&ENOENT;
3572 F->error and die $!;
3574 return ($_, \%options);
3577 sub madformat_wantfixup ($) {
3579 return 0 unless $format eq '3.0 (quilt)';
3580 our $quilt_mode_warned;
3581 if ($quilt_mode eq 'nocheck') {
3582 progress "Not doing any fixup of \`$format' due to".
3583 " ----no-quilt-fixup or --quilt=nocheck"
3584 unless $quilt_mode_warned++;
3587 progress "Format \`$format', need to check/update patch stack"
3588 unless $quilt_mode_warned++;
3592 sub maybe_split_brain_save ($$$) {
3593 my ($headref, $dgitview, $msg) = @_;
3594 # => message fragment "$saved" describing disposition of $dgitview
3595 return "commit id $dgitview" unless defined $split_brain_save;
3596 my @cmd = (shell_cmd "cd ../../../..",
3597 @git, qw(update-ref -m),
3598 "dgit --dgit-view-save $msg HEAD=$headref",
3599 $split_brain_save, $dgitview);
3601 return "and left in $split_brain_save";
3604 # An "infopair" is a tuple [ $thing, $what ]
3605 # (often $thing is a commit hash; $what is a description)
3607 sub infopair_cond_equal ($$) {
3609 $x->[0] eq $y->[0] or fail <<END;
3610 $x->[1] ($x->[0]) not equal to $y->[1] ($y->[0])
3614 sub infopair_lrf_tag_lookup ($$) {
3615 my ($tagnames, $what) = @_;
3616 # $tagname may be an array ref
3617 my @tagnames = ref $tagnames ? @$tagnames : ($tagnames);
3618 printdebug "infopair_lrfetchref_tag_lookup $what @tagnames\n";
3619 foreach my $tagname (@tagnames) {
3620 my $lrefname = lrfetchrefs."/tags/$tagname";
3621 my $tagobj = $lrfetchrefs_f{$lrefname};
3622 next unless defined $tagobj;
3623 printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n";
3624 return [ git_rev_parse($tagobj), $what ];
3626 fail @tagnames==1 ? <<END : <<END;
3627 Wanted tag $what (@tagnames) on dgit server, but not found
3629 Wanted tag $what (one of: @tagnames) on dgit server, but not found
3633 sub infopair_cond_ff ($$) {
3634 my ($anc,$desc) = @_;
3635 is_fast_fwd($anc->[0], $desc->[0]) or fail <<END;
3636 $anc->[1] ($anc->[0]) .. $desc->[1] ($desc->[0]) is not fast forward
3640 sub pseudomerge_version_check ($$) {
3641 my ($clogp, $archive_hash) = @_;
3643 my $arch_clogp = commit_getclogp $archive_hash;
3644 my $i_arch_v = [ (getfield $arch_clogp, 'Version'),
3645 'version currently in archive' ];
3646 if (defined $overwrite_version) {
3647 if (length $overwrite_version) {
3648 infopair_cond_equal([ $overwrite_version,
3649 '--overwrite= version' ],
3652 my $v = $i_arch_v->[0];
3653 progress "Checking package changelog for archive version $v ...";
3655 my @xa = ("-f$v", "-t$v");
3656 my $vclogp = parsechangelog @xa;
3657 my $cv = [ (getfield $vclogp, 'Version'),
3658 "Version field from dpkg-parsechangelog @xa" ];
3659 infopair_cond_equal($i_arch_v, $cv);
3662 $@ =~ s/^dgit: //gm;
3664 "Perhaps debian/changelog does not mention $v ?";
3669 printdebug "pseudomerge_version_check i_arch_v @$i_arch_v\n";
3673 sub pseudomerge_make_commit ($$$$ $$) {
3674 my ($clogp, $dgitview, $archive_hash, $i_arch_v,
3675 $msg_cmd, $msg_msg) = @_;
3676 progress "Declaring that HEAD inciudes all changes in $i_arch_v->[0]...";
3678 my $tree = cmdoutput qw(git rev-parse), "${dgitview}:";
3679 my $authline = clogp_authline $clogp;
3683 !defined $overwrite_version ? ""
3684 : !length $overwrite_version ? " --overwrite"
3685 : " --overwrite=".$overwrite_version;
3688 my $pmf = ".git/dgit/pseudomerge";
3689 open MC, ">", $pmf or die "$pmf $!";
3690 print MC <<END or die $!;
3693 parent $archive_hash
3703 return make_commit($pmf);
3706 sub splitbrain_pseudomerge ($$$$) {
3707 my ($clogp, $maintview, $dgitview, $archive_hash) = @_;
3708 # => $merged_dgitview
3709 printdebug "splitbrain_pseudomerge...\n";
3711 # We: debian/PREVIOUS HEAD($maintview)
3712 # expect: o ----------------- o
3715 # a/d/PREVIOUS $dgitview