3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2016 Ian Jackson
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 use Dpkg::Control::Hash;
30 use File::Temp qw(tempdir);
37 use List::Util qw(any);
38 use List::MoreUtils qw(pairwise);
39 use Text::Glob qw(match_glob);
40 use Fcntl qw(:DEFAULT :flock);
45 our $our_version = 'UNRELEASED'; ###substituted###
46 our $absurdity = undef; ###substituted###
48 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
57 our $dryrun_level = 0;
59 our $buildproductsdir = '..';
65 our $existing_package = 'dpkg';
67 our $changes_since_version;
69 our $overwrite_version; # undef: not specified; '': check changelog
71 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
73 our $split_brain_save;
74 our $we_are_responder;
75 our $we_are_initiator;
76 our $initiator_tempdir;
77 our $patches_applied_dirtily = 00;
81 our $chase_dsc_distro=1;
83 our %forceopts = map { $_=>0 }
84 qw(unrepresentable unsupported-source-format
85 dsc-changes-mismatch changes-origs-exactly
86 import-gitapply-absurd
87 import-gitapply-no-absurd
88 import-dsc-with-dgit-field);
90 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
92 our $suite_re = '[-+.0-9a-z]+';
93 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
94 our $orig_f_comp_re = 'orig(?:-[-0-9a-z]+)?';
95 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
96 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
98 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
99 our $splitbraincache = 'dgit-intern/quilt-cache';
100 our $rewritemap = 'dgit-rewrite/map';
102 our (@git) = qw(git);
103 our (@dget) = qw(dget);
104 our (@curl) = qw(curl);
105 our (@dput) = qw(dput);
106 our (@debsign) = qw(debsign);
107 our (@gpg) = qw(gpg);
108 our (@sbuild) = qw(sbuild);
110 our (@dgit) = qw(dgit);
111 our (@aptget) = qw(apt-get);
112 our (@aptcache) = qw(apt-cache);
113 our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
114 our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
115 our (@dpkggenchanges) = qw(dpkg-genchanges);
116 our (@mergechanges) = qw(mergechanges -f);
117 our (@gbp_build) = ('');
118 our (@gbp_pq) = ('gbp pq');
119 our (@changesopts) = ('');
121 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
124 'debsign' => \@debsign,
126 'sbuild' => \@sbuild,
130 'apt-get' => \@aptget,
131 'apt-cache' => \@aptcache,
132 'dpkg-source' => \@dpkgsource,
133 'dpkg-buildpackage' => \@dpkgbuildpackage,
134 'dpkg-genchanges' => \@dpkggenchanges,
135 'gbp-build' => \@gbp_build,
136 'gbp-pq' => \@gbp_pq,
137 'ch' => \@changesopts,
138 'mergechanges' => \@mergechanges);
140 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
141 our %opts_cfg_insertpos = map {
143 scalar @{ $opts_opt_map{$_} }
144 } keys %opts_opt_map;
146 sub parseopts_late_defaults();
152 our $supplementary_message = '';
153 our $need_split_build_invocation = 0;
154 our $split_brain = 0;
158 return unless forkcheck_mainprocess();
159 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
162 our $remotename = 'dgit';
163 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
167 if (!defined $absurdity) {
169 $absurdity =~ s{/[^/]+$}{/absurd} or die;
173 my ($v,$distro) = @_;
174 return $tagformatfn->($v, $distro);
177 sub debiantag_maintview ($$) {
178 my ($v,$distro) = @_;
179 return "$distro/".dep14_version_mangle $v;
182 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
184 sub lbranch () { return "$branchprefix/$csuite"; }
185 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
186 sub lref () { return "refs/heads/".lbranch(); }
187 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
188 sub rrref () { return server_ref($csuite); }
198 return "${package}_".(stripepoch $vsn).$sfx
203 return srcfn($vsn,".dsc");
206 sub changespat ($;$) {
207 my ($vsn, $arch) = @_;
208 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
211 sub upstreamversion ($) {
223 return unless forkcheck_mainprocess();
224 foreach my $f (@end) {
226 print STDERR "$us: cleanup: $@" if length $@;
230 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
232 sub forceable_fail ($$) {
233 my ($forceoptsl, $msg) = @_;
234 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
235 print STDERR "warning: overriding problem due to --force:\n". $msg;
239 my ($forceoptsl) = @_;
240 my @got = grep { $forceopts{$_} } @$forceoptsl;
241 return 0 unless @got;
243 "warning: skipping checks or functionality due to --force-$got[0]\n";
246 sub no_such_package () {
247 print STDERR "$us: package $package does not exist in suite $isuite\n";
253 printdebug "CD $newdir\n";
254 chdir $newdir or confess "chdir: $newdir: $!";
257 sub deliberately ($) {
259 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
262 sub deliberately_not_fast_forward () {
263 foreach (qw(not-fast-forward fresh-repo)) {
264 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
268 sub quiltmode_splitbrain () {
269 $quilt_mode =~ m/gbp|dpm|unapplied/;
272 sub opts_opt_multi_cmd {
274 push @cmd, split /\s+/, shift @_;
280 return opts_opt_multi_cmd @gbp_pq;
283 #---------- remote protocol support, common ----------
285 # remote push initiator/responder protocol:
286 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
287 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
288 # < dgit-remote-push-ready <actual-proto-vsn>
295 # > supplementary-message NBYTES # $protovsn >= 3
300 # > file parsed-changelog
301 # [indicates that output of dpkg-parsechangelog follows]
302 # > data-block NBYTES
303 # > [NBYTES bytes of data (no newline)]
304 # [maybe some more blocks]
313 # > param head DGIT-VIEW-HEAD
314 # > param csuite SUITE
315 # > param tagformat old|new
316 # > param maint-view MAINT-VIEW-HEAD
318 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
319 # # goes into tag, for replay prevention
322 # [indicates that signed tag is wanted]
323 # < data-block NBYTES
324 # < [NBYTES bytes of data (no newline)]
325 # [maybe some more blocks]
329 # > want signed-dsc-changes
330 # < data-block NBYTES [transfer of signed dsc]
332 # < data-block NBYTES [transfer of signed changes]
340 sub i_child_report () {
341 # Sees if our child has died, and reap it if so. Returns a string
342 # describing how it died if it failed, or undef otherwise.
343 return undef unless $i_child_pid;
344 my $got = waitpid $i_child_pid, WNOHANG;
345 return undef if $got <= 0;
346 die unless $got == $i_child_pid;
347 $i_child_pid = undef;
348 return undef unless $?;
349 return "build host child ".waitstatusmsg();
354 fail "connection lost: $!" if $fh->error;
355 fail "protocol violation; $m not expected";
358 sub badproto_badread ($$) {
360 fail "connection lost: $!" if $!;
361 my $report = i_child_report();
362 fail $report if defined $report;
363 badproto $fh, "eof (reading $wh)";
366 sub protocol_expect (&$) {
367 my ($match, $fh) = @_;
370 defined && chomp or badproto_badread $fh, "protocol message";
378 badproto $fh, "\`$_'";
381 sub protocol_send_file ($$) {
382 my ($fh, $ourfn) = @_;
383 open PF, "<", $ourfn or die "$ourfn: $!";
386 my $got = read PF, $d, 65536;
387 die "$ourfn: $!" unless defined $got;
389 print $fh "data-block ".length($d)."\n" or die $!;
390 print $fh $d or die $!;
392 PF->error and die "$ourfn $!";
393 print $fh "data-end\n" or die $!;
397 sub protocol_read_bytes ($$) {
398 my ($fh, $nbytes) = @_;
399 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
401 my $got = read $fh, $d, $nbytes;
402 $got==$nbytes or badproto_badread $fh, "data block";
406 sub protocol_receive_file ($$) {
407 my ($fh, $ourfn) = @_;
408 printdebug "() $ourfn\n";
409 open PF, ">", $ourfn or die "$ourfn: $!";
411 my ($y,$l) = protocol_expect {
412 m/^data-block (.*)$/ ? (1,$1) :
413 m/^data-end$/ ? (0,) :
417 my $d = protocol_read_bytes $fh, $l;
418 print PF $d or die $!;
423 #---------- remote protocol support, responder ----------
425 sub responder_send_command ($) {
427 return unless $we_are_responder;
428 # called even without $we_are_responder
429 printdebug ">> $command\n";
430 print PO $command, "\n" or die $!;
433 sub responder_send_file ($$) {
434 my ($keyword, $ourfn) = @_;
435 return unless $we_are_responder;
436 printdebug "]] $keyword $ourfn\n";
437 responder_send_command "file $keyword";
438 protocol_send_file \*PO, $ourfn;
441 sub responder_receive_files ($@) {
442 my ($keyword, @ourfns) = @_;
443 die unless $we_are_responder;
444 printdebug "[[ $keyword @ourfns\n";
445 responder_send_command "want $keyword";
446 foreach my $fn (@ourfns) {
447 protocol_receive_file \*PI, $fn;
450 protocol_expect { m/^files-end$/ } \*PI;
453 #---------- remote protocol support, initiator ----------
455 sub initiator_expect (&) {
457 protocol_expect { &$match } \*RO;
460 #---------- end remote code ----------
463 if ($we_are_responder) {
465 responder_send_command "progress ".length($m) or die $!;
466 print PO $m or die $!;
476 $ua = LWP::UserAgent->new();
480 progress "downloading $what...";
481 my $r = $ua->get(@_) or die $!;
482 return undef if $r->code == 404;
483 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
484 return $r->decoded_content(charset => 'none');
487 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
492 failedcmd @_ if system @_;
495 sub act_local () { return $dryrun_level <= 1; }
496 sub act_scary () { return !$dryrun_level; }
499 if (!$dryrun_level) {
500 progress "$us ok: @_";
502 progress "would be ok: @_ (but dry run only)";
507 printcmd(\*STDERR,$debugprefix."#",@_);
510 sub runcmd_ordryrun {
518 sub runcmd_ordryrun_local {
527 my ($first_shell, @cmd) = @_;
528 return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
531 our $helpmsg = <<END;
533 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
534 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
535 dgit [dgit-opts] build [dpkg-buildpackage-opts]
536 dgit [dgit-opts] sbuild [sbuild-opts]
537 dgit [dgit-opts] push [dgit-opts] [suite]
538 dgit [dgit-opts] rpush build-host:build-dir ...
539 important dgit options:
540 -k<keyid> sign tag and package with <keyid> instead of default
541 --dry-run -n do not change anything, but go through the motions
542 --damp-run -L like --dry-run but make local changes, without signing
543 --new -N allow introducing a new package
544 --debug -D increase debug level
545 -c<name>=<value> set git config option (used directly by dgit too)
548 our $later_warning_msg = <<END;
549 Perhaps the upload is stuck in incoming. Using the version from git.
553 print STDERR "$us: @_\n", $helpmsg or die $!;
558 @ARGV or badusage "too few arguments";
559 return scalar shift @ARGV;
563 print $helpmsg or die $!;
567 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
569 our %defcfg = ('dgit.default.distro' => 'debian',
570 'dgit.default.default-suite' => 'unstable',
571 'dgit.default.old-dsc-distro' => 'debian',
572 'dgit-suite.*-security.distro' => 'debian-security',
573 'dgit.default.username' => '',
574 'dgit.default.archive-query-default-component' => 'main',
575 'dgit.default.ssh' => 'ssh',
576 'dgit.default.archive-query' => 'madison:',
577 'dgit.default.sshpsql-dbname' => 'service=projectb',
578 'dgit.default.aptget-components' => 'main',
579 'dgit.default.dgit-tag-format' => 'new,old,maint',
580 'dgit.dsc-url-proto-ok.http' => 'true',
581 'dgit.dsc-url-proto-ok.https' => 'true',
582 'dgit.dsc-url-proto-ok.git' => 'true',
583 'dgit.default.dsc-url-proto-ok' => 'false',
584 # old means "repo server accepts pushes with old dgit tags"
585 # new means "repo server accepts pushes with new dgit tags"
586 # maint means "repo server accepts split brain pushes"
587 # hist means "repo server may have old pushes without new tag"
588 # ("hist" is implied by "old")
589 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
590 'dgit-distro.debian.git-check' => 'url',
591 'dgit-distro.debian.git-check-suffix' => '/info/refs',
592 'dgit-distro.debian.new-private-pushers' => 't',
593 'dgit-distro.debian/push.git-url' => '',
594 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
595 'dgit-distro.debian/push.git-user-force' => 'dgit',
596 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
597 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
598 'dgit-distro.debian/push.git-create' => 'true',
599 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
600 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
601 # 'dgit-distro.debian.archive-query-tls-key',
602 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
603 # ^ this does not work because curl is broken nowadays
604 # Fixing #790093 properly will involve providing providing the key
605 # in some pacagke and maybe updating these paths.
607 # 'dgit-distro.debian.archive-query-tls-curl-args',
608 # '--ca-path=/etc/ssl/ca-debian',
609 # ^ this is a workaround but works (only) on DSA-administered machines
610 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
611 'dgit-distro.debian.git-url-suffix' => '',
612 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
613 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
614 'dgit-distro.debian-security.archive-query' => 'aptget:',
615 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
616 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
617 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
618 'dgit-distro.debian-security.nominal-distro' => 'debian',
619 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
620 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
621 'dgit-distro.ubuntu.git-check' => 'false',
622 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
623 'dgit-distro.test-dummy.ssh' => "$td/ssh",
624 'dgit-distro.test-dummy.username' => "alice",
625 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
626 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
627 'dgit-distro.test-dummy.git-url' => "$td/git",
628 'dgit-distro.test-dummy.git-host' => "git",
629 'dgit-distro.test-dummy.git-path' => "$td/git",
630 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
631 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
632 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
633 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
637 our @gitcfgsources = qw(cmdline local global system);
639 sub git_slurp_config () {
640 local ($debuglevel) = $debuglevel-2;
643 # This algoritm is a bit subtle, but this is needed so that for
644 # options which we want to be single-valued, we allow the
645 # different config sources to override properly. See #835858.
646 foreach my $src (@gitcfgsources) {
647 next if $src eq 'cmdline';
648 # we do this ourselves since git doesn't handle it
650 my @cmd = (@git, qw(config -z --get-regexp), "--$src", qw(.*));
653 open GITS, "-|", @cmd or die $!;
656 printdebug "=> ", (messagequote $_), "\n";
658 push @{ $gitcfgs{$src}{$`} }, $'; #';
662 or ($!==0 && $?==256)
667 sub git_get_config ($) {
669 foreach my $src (@gitcfgsources) {
670 my $l = $gitcfgs{$src}{$c};
671 croak "$l $c" if $l && !ref $l;
672 printdebug"C $c ".(defined $l ?
673 join " ", map { messagequote "'$_'" } @$l :
677 @$l==1 or badcfg "multiple values for $c".
678 " (in $src git config)" if @$l > 1;
686 return undef if $c =~ /RETURN-UNDEF/;
687 printdebug "C? $c\n" if $debuglevel >= 5;
688 my $v = git_get_config($c);
689 return $v if defined $v;
690 my $dv = $defcfg{$c};
692 printdebug "CD $c $dv\n" if $debuglevel >= 4;
696 badcfg "need value for one of: @_\n".
697 "$us: distro or suite appears not to be (properly) supported";
700 sub access_basedistro__noalias () {
701 if (defined $idistro) {
704 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
705 return $def if defined $def;
706 foreach my $src (@gitcfgsources, 'internal') {
707 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
709 foreach my $k (keys %$kl) {
710 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
712 next unless match_glob $dpat, $isuite;
716 return cfg("dgit.default.distro");
720 sub access_basedistro () {
721 my $noalias = access_basedistro__noalias();
722 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
723 return $canon // $noalias;
726 sub access_nomdistro () {
727 my $base = access_basedistro();
728 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
729 $r =~ m/^$distro_re$/ or badcfg
730 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
734 sub access_quirk () {
735 # returns (quirk name, distro to use instead or undef, quirk-specific info)
736 my $basedistro = access_basedistro();
737 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
739 if (defined $backports_quirk) {
740 my $re = $backports_quirk;
741 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
743 $re =~ s/\%/([-0-9a-z_]+)/
744 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
745 if ($isuite =~ m/^$re$/) {
746 return ('backports',"$basedistro-backports",$1);
749 return ('none',undef);
754 sub parse_cfg_bool ($$$) {
755 my ($what,$def,$v) = @_;
758 $v =~ m/^[ty1]/ ? 1 :
759 $v =~ m/^[fn0]/ ? 0 :
760 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
763 sub access_forpush_config () {
764 my $d = access_basedistro();
768 parse_cfg_bool('new-private-pushers', 0,
769 cfg("dgit-distro.$d.new-private-pushers",
772 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
775 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
776 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
777 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
778 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
781 sub access_forpush () {
782 $access_forpush //= access_forpush_config();
783 return $access_forpush;
787 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
788 badcfg "pushing but distro is configured readonly"
789 if access_forpush_config() eq '0';
791 $supplementary_message = <<'END' unless $we_are_responder;
792 Push failed, before we got started.
793 You can retry the push, after fixing the problem, if you like.
795 parseopts_late_defaults();
799 parseopts_late_defaults();
802 sub supplementary_message ($) {
804 if (!$we_are_responder) {
805 $supplementary_message = $msg;
807 } elsif ($protovsn >= 3) {
808 responder_send_command "supplementary-message ".length($msg)
810 print PO $msg or die $!;
814 sub access_distros () {
815 # Returns list of distros to try, in order
818 # 0. `instead of' distro name(s) we have been pointed to
819 # 1. the access_quirk distro, if any
820 # 2a. the user's specified distro, or failing that } basedistro
821 # 2b. the distro calculated from the suite }
822 my @l = access_basedistro();
824 my (undef,$quirkdistro) = access_quirk();
825 unshift @l, $quirkdistro;
826 unshift @l, $instead_distro;
827 @l = grep { defined } @l;
829 push @l, access_nomdistro();
831 if (access_forpush()) {
832 @l = map { ("$_/push", $_) } @l;
837 sub access_cfg_cfgs (@) {
840 # The nesting of these loops determines the search order. We put
841 # the key loop on the outside so that we search all the distros
842 # for each key, before going on to the next key. That means that
843 # if access_cfg is called with a more specific, and then a less
844 # specific, key, an earlier distro can override the less specific
845 # without necessarily overriding any more specific keys. (If the
846 # distro wants to override the more specific keys it can simply do
847 # so; whereas if we did the loop the other way around, it would be
848 # impossible to for an earlier distro to override a less specific
849 # key but not the more specific ones without restating the unknown
850 # values of the more specific keys.
853 # We have to deal with RETURN-UNDEF specially, so that we don't
854 # terminate the search prematurely.
856 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
859 foreach my $d (access_distros()) {
860 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
862 push @cfgs, map { "dgit.default.$_" } @realkeys;
869 my (@cfgs) = access_cfg_cfgs(@keys);
870 my $value = cfg(@cfgs);
874 sub access_cfg_bool ($$) {
875 my ($def, @keys) = @_;
876 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
879 sub string_to_ssh ($) {
881 if ($spec =~ m/\s/) {
882 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
888 sub access_cfg_ssh () {
889 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
890 if (!defined $gitssh) {
893 return string_to_ssh $gitssh;
897 sub access_runeinfo ($) {
899 return ": dgit ".access_basedistro()." $info ;";
902 sub access_someuserhost ($) {
904 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
905 defined($user) && length($user) or
906 $user = access_cfg("$some-user",'username');
907 my $host = access_cfg("$some-host");
908 return length($user) ? "$user\@$host" : $host;
911 sub access_gituserhost () {
912 return access_someuserhost('git');
915 sub access_giturl (;$) {
917 my $url = access_cfg('git-url','RETURN-UNDEF');
920 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
921 return undef unless defined $proto;
924 access_gituserhost().
925 access_cfg('git-path');
927 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
930 return "$url/$package$suffix";
933 sub parsecontrolfh ($$;$) {
934 my ($fh, $desc, $allowsigned) = @_;
935 our $dpkgcontrolhash_noissigned;
938 my %opts = ('name' => $desc);
939 $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
940 $c = Dpkg::Control::Hash->new(%opts);
941 $c->parse($fh,$desc) or die "parsing of $desc failed";
942 last if $allowsigned;
943 last if $dpkgcontrolhash_noissigned;
944 my $issigned= $c->get_option('is_pgp_signed');
945 if (!defined $issigned) {
946 $dpkgcontrolhash_noissigned= 1;
947 seek $fh, 0,0 or die "seek $desc: $!";
948 } elsif ($issigned) {
949 fail "control file $desc is (already) PGP-signed. ".
950 " Note that dgit push needs to modify the .dsc and then".
951 " do the signature itself";
960 my ($file, $desc, $allowsigned) = @_;
961 my $fh = new IO::Handle;
962 open $fh, '<', $file or die "$file: $!";
963 my $c = parsecontrolfh($fh,$desc,$allowsigned);
964 $fh->error and die $!;
970 my ($dctrl,$field) = @_;
971 my $v = $dctrl->{$field};
972 return $v if defined $v;
973 fail "missing field $field in ".$dctrl->get_option('name');
977 my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
978 my $p = new IO::Handle;
979 my @cmd = (qw(dpkg-parsechangelog), @_);
980 open $p, '-|', @cmd or die $!;
982 $?=0; $!=0; close $p or failedcmd @cmd;
986 sub commit_getclogp ($) {
987 # Returns the parsed changelog hashref for a particular commit
989 our %commit_getclogp_memo;
990 my $memo = $commit_getclogp_memo{$objid};
991 return $memo if $memo;
993 my $mclog = ".git/dgit/clog-$objid";
994 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
995 "$objid:debian/changelog";
996 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
1001 defined $d or fail "getcwd failed: $!";
1005 sub parse_dscdata () {
1006 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1007 printdebug Dumper($dscdata) if $debuglevel>1;
1008 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1009 printdebug Dumper($dsc) if $debuglevel>1;
1014 sub archive_query ($;@) {
1015 my ($method) = shift @_;
1016 fail "this operation does not support multiple comma-separated suites"
1018 my $query = access_cfg('archive-query','RETURN-UNDEF');
1019 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1022 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1025 sub archive_query_prepend_mirror {
1026 my $m = access_cfg('mirror');
1027 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1030 sub pool_dsc_subpath ($$) {
1031 my ($vsn,$component) = @_; # $package is implict arg
1032 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1033 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1036 sub cfg_apply_map ($$$) {
1037 my ($varref, $what, $mapspec) = @_;
1038 return unless $mapspec;
1040 printdebug "config $what EVAL{ $mapspec; }\n";
1042 eval "package Dgit::Config; $mapspec;";
1047 #---------- `ftpmasterapi' archive query method (nascent) ----------
1049 sub archive_api_query_cmd ($) {
1051 my @cmd = (@curl, qw(-sS));
1052 my $url = access_cfg('archive-query-url');
1053 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1055 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1056 foreach my $key (split /\:/, $keys) {
1057 $key =~ s/\%HOST\%/$host/g;
1059 fail "for $url: stat $key: $!" unless $!==ENOENT;
1062 fail "config requested specific TLS key but do not know".
1063 " how to get curl to use exactly that EE key ($key)";
1064 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1065 # # Sadly the above line does not work because of changes
1066 # # to gnutls. The real fix for #790093 may involve
1067 # # new curl options.
1070 # Fixing #790093 properly will involve providing a value
1071 # for this on clients.
1072 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1073 push @cmd, split / /, $kargs if defined $kargs;
1075 push @cmd, $url.$subpath;
1079 sub api_query ($$;$) {
1081 my ($data, $subpath, $ok404) = @_;
1082 badcfg "ftpmasterapi archive query method takes no data part"
1084 my @cmd = archive_api_query_cmd($subpath);
1085 my $url = $cmd[$#cmd];
1086 push @cmd, qw(-w %{http_code});
1087 my $json = cmdoutput @cmd;
1088 unless ($json =~ s/\d+\d+\d$//) {
1089 failedcmd_report_cmd undef, @cmd;
1090 fail "curl failed to print 3-digit HTTP code";
1093 return undef if $code eq '404' && $ok404;
1094 fail "fetch of $url gave HTTP code $code"
1095 unless $url =~ m#^file://# or $code =~ m/^2/;
1096 return decode_json($json);
1099 sub canonicalise_suite_ftpmasterapi {
1100 my ($proto,$data) = @_;
1101 my $suites = api_query($data, 'suites');
1103 foreach my $entry (@$suites) {
1105 my $v = $entry->{$_};
1106 defined $v && $v eq $isuite;
1107 } qw(codename name);
1108 push @matched, $entry;
1110 fail "unknown suite $isuite" unless @matched;
1113 @matched==1 or die "multiple matches for suite $isuite\n";
1114 $cn = "$matched[0]{codename}";
1115 defined $cn or die "suite $isuite info has no codename\n";
1116 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1118 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1123 sub archive_query_ftpmasterapi {
1124 my ($proto,$data) = @_;
1125 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1127 my $digester = Digest::SHA->new(256);
1128 foreach my $entry (@$info) {
1130 my $vsn = "$entry->{version}";
1131 my ($ok,$msg) = version_check $vsn;
1132 die "bad version: $msg\n" unless $ok;
1133 my $component = "$entry->{component}";
1134 $component =~ m/^$component_re$/ or die "bad component";
1135 my $filename = "$entry->{filename}";
1136 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1137 or die "bad filename";
1138 my $sha256sum = "$entry->{sha256sum}";
1139 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1140 push @rows, [ $vsn, "/pool/$component/$filename",
1141 $digester, $sha256sum ];
1143 die "bad ftpmaster api response: $@\n".Dumper($entry)
1146 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1147 return archive_query_prepend_mirror @rows;
1150 sub file_in_archive_ftpmasterapi {
1151 my ($proto,$data,$filename) = @_;
1152 my $pat = $filename;
1155 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1156 my $info = api_query($data, "file_in_archive/$pat", 1);
1159 #---------- `aptget' archive query method ----------
1162 our $aptget_releasefile;
1163 our $aptget_configpath;
1165 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1166 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1168 sub aptget_cache_clean {
1169 runcmd_ordryrun_local qw(sh -ec),
1170 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1174 sub aptget_lock_acquire () {
1175 my $lockfile = "$aptget_base/lock";
1176 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1177 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1180 sub aptget_prep ($) {
1182 return if defined $aptget_base;
1184 badcfg "aptget archive query method takes no data part"
1187 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1190 ensuredir "$cache/dgit";
1192 access_cfg('aptget-cachekey','RETURN-UNDEF')
1193 // access_nomdistro();
1195 $aptget_base = "$cache/dgit/aptget";
1196 ensuredir $aptget_base;
1198 my $quoted_base = $aptget_base;
1199 die "$quoted_base contains bad chars, cannot continue"
1200 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1202 ensuredir $aptget_base;
1204 aptget_lock_acquire();
1206 aptget_cache_clean();
1208 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1209 my $sourceslist = "source.list#$cachekey";
1211 my $aptsuites = $isuite;
1212 cfg_apply_map(\$aptsuites, 'suite map',
1213 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1215 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1216 printf SRCS "deb-src %s %s %s\n",
1217 access_cfg('mirror'),
1219 access_cfg('aptget-components')
1222 ensuredir "$aptget_base/cache";
1223 ensuredir "$aptget_base/lists";
1225 open CONF, ">", $aptget_configpath or die $!;
1227 Debug::NoLocking "true";
1228 APT::Get::List-Cleanup "false";
1229 #clear APT::Update::Post-Invoke-Success;
1230 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1231 Dir::State::Lists "$quoted_base/lists";
1232 Dir::Etc::preferences "$quoted_base/preferences";
1233 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1234 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1237 foreach my $key (qw(
1240 Dir::Cache::Archives
1241 Dir::Etc::SourceParts
1242 Dir::Etc::preferencesparts
1244 ensuredir "$aptget_base/$key";
1245 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1248 my $oldatime = (time // die $!) - 1;
1249 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1250 next unless stat_exists $oldlist;
1251 my ($mtime) = (stat _)[9];
1252 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1255 runcmd_ordryrun_local aptget_aptget(), qw(update);
1258 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1259 next unless stat_exists $oldlist;
1260 my ($atime) = (stat _)[8];
1261 next if $atime == $oldatime;
1262 push @releasefiles, $oldlist;
1264 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1265 @releasefiles = @inreleasefiles if @inreleasefiles;
1266 die "apt updated wrong number of Release files (@releasefiles), erk"
1267 unless @releasefiles == 1;
1269 ($aptget_releasefile) = @releasefiles;
1272 sub canonicalise_suite_aptget {
1273 my ($proto,$data) = @_;
1276 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1278 foreach my $name (qw(Codename Suite)) {
1279 my $val = $release->{$name};
1281 printdebug "release file $name: $val\n";
1282 $val =~ m/^$suite_re$/o or fail
1283 "Release file ($aptget_releasefile) specifies intolerable $name";
1284 cfg_apply_map(\$val, 'suite rmap',
1285 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1292 sub archive_query_aptget {
1293 my ($proto,$data) = @_;
1296 ensuredir "$aptget_base/source";
1297 foreach my $old (<$aptget_base/source/*.dsc>) {
1298 unlink $old or die "$old: $!";
1301 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1302 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1303 # avoids apt-get source failing with ambiguous error code
1305 runcmd_ordryrun_local
1306 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1307 aptget_aptget(), qw(--download-only --only-source source), $package;
1309 my @dscs = <$aptget_base/source/*.dsc>;
1310 fail "apt-get source did not produce a .dsc" unless @dscs;
1311 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1313 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1316 my $uri = "file://". uri_escape $dscs[0];
1317 $uri =~ s{\%2f}{/}gi;
1318 return [ (getfield $pre_dsc, 'Version'), $uri ];
1321 #---------- `dummyapicat' archive query method ----------
1323 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1324 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1326 sub file_in_archive_dummycatapi ($$$) {
1327 my ($proto,$data,$filename) = @_;
1328 my $mirror = access_cfg('mirror');
1329 $mirror =~ s#^file://#/# or die "$mirror ?";
1331 my @cmd = (qw(sh -ec), '
1333 find -name "$2" -print0 |
1335 ', qw(x), $mirror, $filename);
1336 debugcmd "-|", @cmd;
1337 open FIA, "-|", @cmd or die $!;
1340 printdebug "| $_\n";
1341 m/^(\w+) (\S+)$/ or die "$_ ?";
1342 push @out, { sha256sum => $1, filename => $2 };
1344 close FIA or die failedcmd @cmd;
1348 #---------- `madison' archive query method ----------
1350 sub archive_query_madison {
1351 return archive_query_prepend_mirror
1352 map { [ @$_[0..1] ] } madison_get_parse(@_);
1355 sub madison_get_parse {
1356 my ($proto,$data) = @_;
1357 die unless $proto eq 'madison';
1358 if (!length $data) {
1359 $data= access_cfg('madison-distro','RETURN-UNDEF');
1360 $data //= access_basedistro();
1362 $rmad{$proto,$data,$package} ||= cmdoutput
1363 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1364 my $rmad = $rmad{$proto,$data,$package};
1367 foreach my $l (split /\n/, $rmad) {
1368 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1369 \s*( [^ \t|]+ )\s* \|
1370 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1371 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1372 $1 eq $package or die "$rmad $package ?";
1379 $component = access_cfg('archive-query-default-component');
1381 $5 eq 'source' or die "$rmad ?";
1382 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1384 return sort { -version_compare($a->[0],$b->[0]); } @out;
1387 sub canonicalise_suite_madison {
1388 # madison canonicalises for us
1389 my @r = madison_get_parse(@_);
1391 "unable to canonicalise suite using package $package".
1392 " which does not appear to exist in suite $isuite;".
1393 " --existing-package may help";
1397 sub file_in_archive_madison { return undef; }
1399 #---------- `sshpsql' archive query method ----------
1402 my ($data,$runeinfo,$sql) = @_;
1403 if (!length $data) {
1404 $data= access_someuserhost('sshpsql').':'.
1405 access_cfg('sshpsql-dbname');
1407 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1408 my ($userhost,$dbname) = ($`,$'); #';
1410 my @cmd = (access_cfg_ssh, $userhost,
1411 access_runeinfo("ssh-psql $runeinfo").
1412 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1413 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1415 open P, "-|", @cmd or die $!;
1418 printdebug(">|$_|\n");
1421 $!=0; $?=0; close P or failedcmd @cmd;
1423 my $nrows = pop @rows;
1424 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1425 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1426 @rows = map { [ split /\|/, $_ ] } @rows;
1427 my $ncols = scalar @{ shift @rows };
1428 die if grep { scalar @$_ != $ncols } @rows;
1432 sub sql_injection_check {
1433 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1436 sub archive_query_sshpsql ($$) {
1437 my ($proto,$data) = @_;
1438 sql_injection_check $isuite, $package;
1439 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1440 SELECT source.version, component.name, files.filename, files.sha256sum
1442 JOIN src_associations ON source.id = src_associations.source
1443 JOIN suite ON suite.id = src_associations.suite
1444 JOIN dsc_files ON dsc_files.source = source.id
1445 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1446 JOIN component ON component.id = files_archive_map.component_id
1447 JOIN files ON files.id = dsc_files.file
1448 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1449 AND source.source='$package'
1450 AND files.filename LIKE '%.dsc';
1452 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1453 my $digester = Digest::SHA->new(256);
1455 my ($vsn,$component,$filename,$sha256sum) = @$_;
1456 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1458 return archive_query_prepend_mirror @rows;
1461 sub canonicalise_suite_sshpsql ($$) {
1462 my ($proto,$data) = @_;
1463 sql_injection_check $isuite;
1464 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1465 SELECT suite.codename
1466 FROM suite where suite_name='$isuite' or codename='$isuite';
1468 @rows = map { $_->[0] } @rows;
1469 fail "unknown suite $isuite" unless @rows;
1470 die "ambiguous $isuite: @rows ?" if @rows>1;
1474 sub file_in_archive_sshpsql ($$$) { return undef; }
1476 #---------- `dummycat' archive query method ----------
1478 sub canonicalise_suite_dummycat ($$) {
1479 my ($proto,$data) = @_;
1480 my $dpath = "$data/suite.$isuite";
1481 if (!open C, "<", $dpath) {
1482 $!==ENOENT or die "$dpath: $!";
1483 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1487 chomp or die "$dpath: $!";
1489 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1493 sub archive_query_dummycat ($$) {
1494 my ($proto,$data) = @_;
1495 canonicalise_suite();
1496 my $dpath = "$data/package.$csuite.$package";
1497 if (!open C, "<", $dpath) {
1498 $!==ENOENT or die "$dpath: $!";
1499 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1507 printdebug "dummycat query $csuite $package $dpath | $_\n";
1508 my @row = split /\s+/, $_;
1509 @row==2 or die "$dpath: $_ ?";
1512 C->error and die "$dpath: $!";
1514 return archive_query_prepend_mirror
1515 sort { -version_compare($a->[0],$b->[0]); } @rows;
1518 sub file_in_archive_dummycat () { return undef; }
1520 #---------- tag format handling ----------
1522 sub access_cfg_tagformats () {
1523 split /\,/, access_cfg('dgit-tag-format');
1526 sub access_cfg_tagformats_can_splitbrain () {
1527 my %y = map { $_ => 1 } access_cfg_tagformats;
1528 foreach my $needtf (qw(new maint)) {
1529 next if $y{$needtf};
1535 sub need_tagformat ($$) {
1536 my ($fmt, $why) = @_;
1537 fail "need to use tag format $fmt ($why) but also need".
1538 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1539 " - no way to proceed"
1540 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1541 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1544 sub select_tagformat () {
1546 return if $tagformatfn && !$tagformat_want;
1547 die 'bug' if $tagformatfn && $tagformat_want;
1548 # ... $tagformat_want assigned after previous select_tagformat
1550 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1551 printdebug "select_tagformat supported @supported\n";
1553 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1554 printdebug "select_tagformat specified @$tagformat_want\n";
1556 my ($fmt,$why,$override) = @$tagformat_want;
1558 fail "target distro supports tag formats @supported".
1559 " but have to use $fmt ($why)"
1561 or grep { $_ eq $fmt } @supported;
1563 $tagformat_want = undef;
1565 $tagformatfn = ${*::}{"debiantag_$fmt"};
1567 fail "trying to use unknown tag format \`$fmt' ($why) !"
1568 unless $tagformatfn;
1571 #---------- archive query entrypoints and rest of program ----------
1573 sub canonicalise_suite () {
1574 return if defined $csuite;
1575 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1576 $csuite = archive_query('canonicalise_suite');
1577 if ($isuite ne $csuite) {
1578 progress "canonical suite name for $isuite is $csuite";
1580 progress "canonical suite name is $csuite";
1584 sub get_archive_dsc () {
1585 canonicalise_suite();
1586 my @vsns = archive_query('archive_query');
1587 foreach my $vinfo (@vsns) {
1588 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1589 $dscurl = $vsn_dscurl;
1590 $dscdata = url_get($dscurl);
1592 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1597 $digester->add($dscdata);
1598 my $got = $digester->hexdigest();
1600 fail "$dscurl has hash $got but".
1601 " archive told us to expect $digest";
1604 my $fmt = getfield $dsc, 'Format';
1605 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1606 "unsupported source format $fmt, sorry";
1608 $dsc_checked = !!$digester;
1609 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1613 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1616 sub check_for_git ();
1617 sub check_for_git () {
1619 my $how = access_cfg('git-check');
1620 if ($how eq 'ssh-cmd') {
1622 (access_cfg_ssh, access_gituserhost(),
1623 access_runeinfo("git-check $package").
1624 " set -e; cd ".access_cfg('git-path').";".
1625 " if test -d $package.git; then echo 1; else echo 0; fi");
1626 my $r= cmdoutput @cmd;
1627 if (defined $r and $r =~ m/^divert (\w+)$/) {
1629 my ($usedistro,) = access_distros();
1630 # NB that if we are pushing, $usedistro will be $distro/push
1631 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1632 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1633 progress "diverting to $divert (using config for $instead_distro)";
1634 return check_for_git();
1636 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1638 } elsif ($how eq 'url') {
1639 my $prefix = access_cfg('git-check-url','git-url');
1640 my $suffix = access_cfg('git-check-suffix','git-suffix',
1641 'RETURN-UNDEF') // '.git';
1642 my $url = "$prefix/$package$suffix";
1643 my @cmd = (@curl, qw(-sS -I), $url);
1644 my $result = cmdoutput @cmd;
1645 $result =~ s/^\S+ 200 .*\n\r?\n//;
1646 # curl -sS -I with https_proxy prints
1647 # HTTP/1.0 200 Connection established
1648 $result =~ m/^\S+ (404|200) /s or
1649 fail "unexpected results from git check query - ".
1650 Dumper($prefix, $result);
1652 if ($code eq '404') {
1654 } elsif ($code eq '200') {
1659 } elsif ($how eq 'true') {
1661 } elsif ($how eq 'false') {
1664 badcfg "unknown git-check \`$how'";
1668 sub create_remote_git_repo () {
1669 my $how = access_cfg('git-create');
1670 if ($how eq 'ssh-cmd') {
1672 (access_cfg_ssh, access_gituserhost(),
1673 access_runeinfo("git-create $package").
1674 "set -e; cd ".access_cfg('git-path').";".
1675 " cp -a _template $package.git");
1676 } elsif ($how eq 'true') {
1679 badcfg "unknown git-create \`$how'";
1683 our ($dsc_hash,$lastpush_mergeinput);
1684 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1686 our $ud = '.git/dgit/unpack';
1696 sub mktree_in_ud_here () {
1697 runcmd qw(git init -q);
1698 runcmd qw(git config gc.auto 0);
1699 rmtree('.git/objects');
1700 symlink '../../../../objects','.git/objects' or die $!;
1703 sub git_write_tree () {
1704 my $tree = cmdoutput @git, qw(write-tree);
1705 $tree =~ m/^\w+$/ or die "$tree ?";
1709 sub git_add_write_tree () {
1710 runcmd @git, qw(add -Af .);
1711 return git_write_tree();
1714 sub remove_stray_gits ($) {
1716 my @gitscmd = qw(find -name .git -prune -print0);
1717 debugcmd "|",@gitscmd;
1718 open GITS, "-|", @gitscmd or die $!;
1723 print STDERR "$us: warning: removing from $what: ",
1724 (messagequote $_), "\n";
1728 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1731 sub mktree_in_ud_from_only_subdir ($;$) {
1732 my ($what,$raw) = @_;
1734 # changes into the subdir
1736 die "expected one subdir but found @dirs ?" unless @dirs==1;
1737 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1741 remove_stray_gits($what);
1742 mktree_in_ud_here();
1744 my ($format, $fopts) = get_source_format();
1745 if (madformat($format)) {
1750 my $tree=git_add_write_tree();
1751 return ($tree,$dir);
1754 our @files_csum_info_fields =
1755 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1756 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1757 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1759 sub dsc_files_info () {
1760 foreach my $csumi (@files_csum_info_fields) {
1761 my ($fname, $module, $method) = @$csumi;
1762 my $field = $dsc->{$fname};
1763 next unless defined $field;
1764 eval "use $module; 1;" or die $@;
1766 foreach (split /\n/, $field) {
1768 m/^(\w+) (\d+) (\S+)$/ or
1769 fail "could not parse .dsc $fname line \`$_'";
1770 my $digester = eval "$module"."->$method;" or die $@;
1775 Digester => $digester,
1780 fail "missing any supported Checksums-* or Files field in ".
1781 $dsc->get_option('name');
1785 map { $_->{Filename} } dsc_files_info();
1788 sub files_compare_inputs (@) {
1793 my $showinputs = sub {
1794 return join "; ", map { $_->get_option('name') } @$inputs;
1797 foreach my $in (@$inputs) {
1799 my $in_name = $in->get_option('name');
1801 printdebug "files_compare_inputs $in_name\n";
1803 foreach my $csumi (@files_csum_info_fields) {
1804 my ($fname) = @$csumi;
1805 printdebug "files_compare_inputs $in_name $fname\n";
1807 my $field = $in->{$fname};
1808 next unless defined $field;
1811 foreach (split /\n/, $field) {
1814 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1815 fail "could not parse $in_name $fname line \`$_'";
1817 printdebug "files_compare_inputs $in_name $fname $f\n";
1821 my $re = \ $record{$f}{$fname};
1823 $fchecked{$f}{$in_name} = 1;
1825 fail "hash or size of $f varies in $fname fields".
1826 " (between: ".$showinputs->().")";
1831 @files = sort @files;
1832 $expected_files //= \@files;
1833 "@$expected_files" eq "@files" or
1834 fail "file list in $in_name varies between hash fields!";
1837 fail "$in_name has no files list field(s)";
1839 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1842 grep { keys %$_ == @$inputs-1 } values %fchecked
1843 or fail "no file appears in all file lists".
1844 " (looked in: ".$showinputs->().")";
1847 sub is_orig_file_in_dsc ($$) {
1848 my ($f, $dsc_files_info) = @_;
1849 return 0 if @$dsc_files_info <= 1;
1850 # One file means no origs, and the filename doesn't have a "what
1851 # part of dsc" component. (Consider versions ending `.orig'.)
1852 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1856 sub is_orig_file_of_vsn ($$) {
1857 my ($f, $upstreamvsn) = @_;
1858 my $base = srcfn $upstreamvsn, '';
1859 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1863 sub changes_update_origs_from_dsc ($$$$) {
1864 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1866 printdebug "checking origs needed ($upstreamvsn)...\n";
1867 $_ = getfield $changes, 'Files';
1868 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1869 fail "cannot find section/priority from .changes Files field";
1870 my $placementinfo = $1;
1872 printdebug "checking origs needed placement '$placementinfo'...\n";
1873 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1874 $l =~ m/\S+$/ or next;
1876 printdebug "origs $file | $l\n";
1877 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1878 printdebug "origs $file is_orig\n";
1879 my $have = archive_query('file_in_archive', $file);
1880 if (!defined $have) {
1882 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1888 printdebug "origs $file \$#\$have=$#$have\n";
1889 foreach my $h (@$have) {
1892 foreach my $csumi (@files_csum_info_fields) {
1893 my ($fname, $module, $method, $archivefield) = @$csumi;
1894 next unless defined $h->{$archivefield};
1895 $_ = $dsc->{$fname};
1896 next unless defined;
1897 m/^(\w+) .* \Q$file\E$/m or
1898 fail ".dsc $fname missing entry for $file";
1899 if ($h->{$archivefield} eq $1) {
1903 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1906 die "$file ".Dumper($h)." ?!" if $same && @differ;
1909 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1912 printdebug "origs $file f.same=$found_same".
1913 " #f._differ=$#found_differ\n";
1914 if (@found_differ && !$found_same) {
1916 "archive contains $file with different checksum",
1919 # Now we edit the changes file to add or remove it
1920 foreach my $csumi (@files_csum_info_fields) {
1921 my ($fname, $module, $method, $archivefield) = @$csumi;
1922 next unless defined $changes->{$fname};
1924 # in archive, delete from .changes if it's there
1925 $changed{$file} = "removed" if
1926 $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
1927 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
1928 # not in archive, but it's here in the .changes
1930 my $dsc_data = getfield $dsc, $fname;
1931 $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
1933 $extra =~ s/ \d+ /$&$placementinfo /
1934 or die "$fname $extra >$dsc_data< ?"
1935 if $fname eq 'Files';
1936 $changes->{$fname} .= "\n". $extra;
1937 $changed{$file} = "added";
1942 foreach my $file (keys %changed) {
1944 "edited .changes for archive .orig contents: %s %s",
1945 $changed{$file}, $file;
1947 my $chtmp = "$changesfile.tmp";
1948 $changes->save($chtmp);
1950 rename $chtmp,$changesfile or die "$changesfile $!";
1952 progress "[new .changes left in $changesfile]";
1955 progress "$changesfile already has appropriate .orig(s) (if any)";
1959 sub make_commit ($) {
1961 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1964 sub make_commit_text ($) {
1967 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1969 print Dumper($text) if $debuglevel > 1;
1970 my $child = open2($out, $in, @cmd) or die $!;
1973 print $in $text or die $!;
1974 close $in or die $!;
1976 $h =~ m/^\w+$/ or die;
1978 printdebug "=> $h\n";
1981 waitpid $child, 0 == $child or die "$child $!";
1982 $? and failedcmd @cmd;
1986 sub clogp_authline ($) {
1988 my $author = getfield $clogp, 'Maintainer';
1989 $author =~ s#,.*##ms;
1990 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
1991 my $authline = "$author $date";
1992 $authline =~ m/$git_authline_re/o or
1993 fail "unexpected commit author line format \`$authline'".
1994 " (was generated from changelog Maintainer field)";
1995 return ($1,$2,$3) if wantarray;
1999 sub vendor_patches_distro ($$) {
2000 my ($checkdistro, $what) = @_;
2001 return unless defined $checkdistro;
2003 my $series = "debian/patches/\L$checkdistro\E.series";
2004 printdebug "checking for vendor-specific $series ($what)\n";
2006 if (!open SERIES, "<", $series) {
2007 die "$series $!" unless $!==ENOENT;
2016 Unfortunately, this source package uses a feature of dpkg-source where
2017 the same source package unpacks to different source code on different
2018 distros. dgit cannot safely operate on such packages on affected
2019 distros, because the meaning of source packages is not stable.
2021 Please ask the distro/maintainer to remove the distro-specific series
2022 files and use a different technique (if necessary, uploading actually
2023 different packages, if different distros are supposed to have
2027 fail "Found active distro-specific series file for".
2028 " $checkdistro ($what): $series, cannot continue";
2030 die "$series $!" if SERIES->error;
2034 sub check_for_vendor_patches () {
2035 # This dpkg-source feature doesn't seem to be documented anywhere!
2036 # But it can be found in the changelog (reformatted):
2038 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2039 # Author: Raphael Hertzog <hertzog@debian.org>
2040 # Date: Sun Oct 3 09:36:48 2010 +0200
2042 # dpkg-source: correctly create .pc/.quilt_series with alternate
2045 # If you have debian/patches/ubuntu.series and you were
2046 # unpacking the source package on ubuntu, quilt was still
2047 # directed to debian/patches/series instead of
2048 # debian/patches/ubuntu.series.
2050 # debian/changelog | 3 +++
2051 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2052 # 2 files changed, 6 insertions(+), 1 deletion(-)
2055 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2056 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2057 "Dpkg::Vendor \`current vendor'");
2058 vendor_patches_distro(access_basedistro(),
2059 "(base) distro being accessed");
2060 vendor_patches_distro(access_nomdistro(),
2061 "(nominal) distro being accessed");
2064 sub generate_commits_from_dsc () {
2065 # See big comment in fetch_from_archive, below.
2066 # See also README.dsc-import.
2070 my @dfi = dsc_files_info();
2071 foreach my $fi (@dfi) {
2072 my $f = $fi->{Filename};
2073 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2074 my $upper_f = "../../../../$f";
2076 printdebug "considering reusing $f: ";
2078 if (link_ltarget "$upper_f,fetch", $f) {
2079 printdebug "linked (using ...,fetch).\n";
2080 } elsif ((printdebug "($!) "),
2082 fail "accessing ../$f,fetch: $!";
2083 } elsif (link_ltarget $upper_f, $f) {
2084 printdebug "linked.\n";
2085 } elsif ((printdebug "($!) "),
2087 fail "accessing ../$f: $!";
2089 printdebug "absent.\n";
2093 complete_file_from_dsc('.', $fi, \$refetched)
2096 printdebug "considering saving $f: ";
2098 if (link $f, $upper_f) {
2099 printdebug "linked.\n";
2100 } elsif ((printdebug "($!) "),
2102 fail "saving ../$f: $!";
2103 } elsif (!$refetched) {
2104 printdebug "no need.\n";
2105 } elsif (link $f, "$upper_f,fetch") {
2106 printdebug "linked (using ...,fetch).\n";
2107 } elsif ((printdebug "($!) "),
2109 fail "saving ../$f,fetch: $!";
2111 printdebug "cannot.\n";
2115 # We unpack and record the orig tarballs first, so that we only
2116 # need disk space for one private copy of the unpacked source.
2117 # But we can't make them into commits until we have the metadata
2118 # from the debian/changelog, so we record the tree objects now and
2119 # make them into commits later.
2121 my $upstreamv = upstreamversion $dsc->{version};
2122 my $orig_f_base = srcfn $upstreamv, '';
2124 foreach my $fi (@dfi) {
2125 # We actually import, and record as a commit, every tarball
2126 # (unless there is only one file, in which case there seems
2129 my $f = $fi->{Filename};
2130 printdebug "import considering $f ";
2131 (printdebug "only one dfi\n"), next if @dfi == 1;
2132 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2133 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2137 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2139 printdebug "Y ", (join ' ', map { $_//"(none)" }
2140 $compr_ext, $orig_f_part
2143 my $input = new IO::File $f, '<' or die "$f $!";
2147 if (defined $compr_ext) {
2149 Dpkg::Compression::compression_guess_from_filename $f;
2150 fail "Dpkg::Compression cannot handle file $f in source package"
2151 if defined $compr_ext && !defined $cname;
2153 new Dpkg::Compression::Process compression => $cname;
2154 my @compr_cmd = $compr_proc->get_uncompress_cmdline();
2155 my $compr_fh = new IO::Handle;
2156 my $compr_pid = open $compr_fh, "-|" // die $!;
2158 open STDIN, "<&", $input or die $!;
2160 die "dgit (child): exec $compr_cmd[0]: $!\n";
2165 rmtree "_unpack-tar";
2166 mkdir "_unpack-tar" or die $!;
2167 my @tarcmd = qw(tar -x -f -
2168 --no-same-owner --no-same-permissions
2169 --no-acls --no-xattrs --no-selinux);
2170 my $tar_pid = fork // die $!;
2172 chdir "_unpack-tar" or die $!;
2173 open STDIN, "<&", $input or die $!;
2175 die "dgit (child): exec $tarcmd[0]: $!";
2177 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2178 !$? or failedcmd @tarcmd;
2181 (@compr_cmd ? failedcmd @compr_cmd
2183 # finally, we have the results in "tarball", but maybe
2184 # with the wrong permissions
2186 runcmd qw(chmod -R +rwX _unpack-tar);
2187 changedir "_unpack-tar";
2188 remove_stray_gits($f);
2189 mktree_in_ud_here();
2191 my ($tree) = git_add_write_tree();
2192 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2193 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2195 printdebug "one subtree $1\n";
2197 printdebug "multiple subtrees\n";
2200 rmtree "_unpack-tar";
2202 my $ent = [ $f, $tree ];
2204 Orig => !!$orig_f_part,
2205 Sort => (!$orig_f_part ? 2 :
2206 $orig_f_part =~ m/-/g ? 1 :
2214 # put any without "_" first (spec is not clear whether files
2215 # are always in the usual order). Tarballs without "_" are
2216 # the main orig or the debian tarball.
2217 $a->{Sort} <=> $b->{Sort} or
2221 my $any_orig = grep { $_->{Orig} } @tartrees;
2223 my $dscfn = "$package.dsc";
2225 my $treeimporthow = 'package';
2227 open D, ">", $dscfn or die "$dscfn: $!";
2228 print D $dscdata or die "$dscfn: $!";
2229 close D or die "$dscfn: $!";
2230 my @cmd = qw(dpkg-source);
2231 push @cmd, '--no-check' if $dsc_checked;
2232 if (madformat $dsc->{format}) {
2233 push @cmd, '--skip-patches';
2234 $treeimporthow = 'unpatched';
2236 push @cmd, qw(-x --), $dscfn;
2239 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2240 if (madformat $dsc->{format}) {
2241 check_for_vendor_patches();
2245 if (madformat $dsc->{format}) {
2246 my @pcmd = qw(dpkg-source --before-build .);
2247 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2249 $dappliedtree = git_add_write_tree();
2252 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2253 debugcmd "|",@clogcmd;
2254 open CLOGS, "-|", @clogcmd or die $!;
2259 printdebug "import clog search...\n";
2262 my $stanzatext = do { local $/=""; <CLOGS>; };
2263 printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
2264 last if !defined $stanzatext;
2266 my $desc = "package changelog, entry no.$.";
2267 open my $stanzafh, "<", \$stanzatext or die;
2268 my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
2269 $clogp //= $thisstanza;
2271 printdebug "import clog $thisstanza->{version} $desc...\n";
2273 last if !$any_orig; # we don't need $r1clogp
2275 # We look for the first (most recent) changelog entry whose
2276 # version number is lower than the upstream version of this
2277 # package. Then the last (least recent) previous changelog
2278 # entry is treated as the one which introduced this upstream
2279 # version and used for the synthetic commits for the upstream
2282 # One might think that a more sophisticated algorithm would be
2283 # necessary. But: we do not want to scan the whole changelog
2284 # file. Stopping when we see an earlier version, which
2285 # necessarily then is an earlier upstream version, is the only
2286 # realistic way to do that. Then, either the earliest
2287 # changelog entry we have seen so far is indeed the earliest
2288 # upload of this upstream version; or there are only changelog
2289 # entries relating to later upstream versions (which is not
2290 # possible unless the changelog and .dsc disagree about the
2291 # version). Then it remains to choose between the physically
2292 # last entry in the file, and the one with the lowest version
2293 # number. If these are not the same, we guess that the
2294 # versions were created in a non-monotic order rather than
2295 # that the changelog entries have been misordered.
2297 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2299 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2300 $r1clogp = $thisstanza;
2302 printdebug "import clog $r1clogp->{version} becomes r1\n";
2304 die $! if CLOGS->error;
2305 close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
2307 $clogp or fail "package changelog has no entries!";
2309 my $authline = clogp_authline $clogp;
2310 my $changes = getfield $clogp, 'Changes';
2311 my $cversion = getfield $clogp, 'Version';
2314 $r1clogp //= $clogp; # maybe there's only one entry;
2315 my $r1authline = clogp_authline $r1clogp;
2316 # Strictly, r1authline might now be wrong if it's going to be
2317 # unused because !$any_orig. Whatever.
2319 printdebug "import tartrees authline $authline\n";
2320 printdebug "import tartrees r1authline $r1authline\n";
2322 foreach my $tt (@tartrees) {
2323 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2325 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2328 committer $r1authline
2332 [dgit import orig $tt->{F}]
2340 [dgit import tarball $package $cversion $tt->{F}]
2345 printdebug "import main commit\n";
2347 open C, ">../commit.tmp" or die $!;
2348 print C <<END or die $!;
2351 print C <<END or die $! foreach @tartrees;
2354 print C <<END or die $!;
2360 [dgit import $treeimporthow $package $cversion]
2364 my $rawimport_hash = make_commit qw(../commit.tmp);
2366 if (madformat $dsc->{format}) {
2367 printdebug "import apply patches...\n";
2369 # regularise the state of the working tree so that
2370 # the checkout of $rawimport_hash works nicely.
2371 my $dappliedcommit = make_commit_text(<<END);
2378 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2380 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2382 # We need the answers to be reproducible
2383 my @authline = clogp_authline($clogp);
2384 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2385 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2386 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2387 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2388 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2389 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2391 my $path = $ENV{PATH} or die;
2393 foreach my $use_absurd (qw(0 1)) {
2394 runcmd @git, qw(checkout -q unpa);
2395 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2396 local $ENV{PATH} = $path;
2399 progress "warning: $@";
2400 $path = "$absurdity:$path";
2401 progress "$us: trying slow absurd-git-apply...";
2402 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2407 die "forbid absurd git-apply\n" if $use_absurd
2408 && forceing [qw(import-gitapply-no-absurd)];
2409 die "only absurd git-apply!\n" if !$use_absurd
2410 && forceing [qw(import-gitapply-absurd)];
2412 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2413 local $ENV{PATH} = $path if $use_absurd;
2415 my @showcmd = (gbp_pq, qw(import));
2416 my @realcmd = shell_cmd
2417 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2418 debugcmd "+",@realcmd;
2419 if (system @realcmd) {
2420 die +(shellquote @showcmd).
2422 failedcmd_waitstatus()."\n";
2425 my $gapplied = git_rev_parse('HEAD');
2426 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2427 $gappliedtree eq $dappliedtree or
2429 gbp-pq import and dpkg-source disagree!
2430 gbp-pq import gave commit $gapplied
2431 gbp-pq import gave tree $gappliedtree
2432 dpkg-source --before-build gave tree $dappliedtree
2434 $rawimport_hash = $gapplied;
2439 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2444 progress "synthesised git commit from .dsc $cversion";
2446 my $rawimport_mergeinput = {
2447 Commit => $rawimport_hash,
2448 Info => "Import of source package",
2450 my @output = ($rawimport_mergeinput);
2452 if ($lastpush_mergeinput) {
2453 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2454 my $oversion = getfield $oldclogp, 'Version';
2456 version_compare($oversion, $cversion);
2458 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2459 { Message => <<END, ReverseParents => 1 });
2460 Record $package ($cversion) in archive suite $csuite
2462 } elsif ($vcmp > 0) {
2463 print STDERR <<END or die $!;
2465 Version actually in archive: $cversion (older)
2466 Last version pushed with dgit: $oversion (newer or same)
2469 @output = $lastpush_mergeinput;
2471 # Same version. Use what's in the server git branch,
2472 # discarding our own import. (This could happen if the
2473 # server automatically imports all packages into git.)
2474 @output = $lastpush_mergeinput;
2477 changedir '../../../..';
2482 sub complete_file_from_dsc ($$;$) {
2483 our ($dstdir, $fi, $refetched) = @_;
2484 # Ensures that we have, in $dstdir, the file $fi, with the correct
2485 # contents. (Downloading it from alongside $dscurl if necessary.)
2486 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2487 # and will set $$refetched=1 if it did so (or tried to).
2489 my $f = $fi->{Filename};
2490 my $tf = "$dstdir/$f";
2494 my $checkhash = sub {
2495 open F, "<", "$tf" or die "$tf: $!";
2496 $fi->{Digester}->reset();
2497 $fi->{Digester}->addfile(*F);
2498 F->error and die $!;
2499 my $got = $fi->{Digester}->hexdigest();
2500 return $got eq $fi->{Hash};
2503 if (stat_exists $tf) {
2504 if ($checkhash->()) {
2505 progress "using existing $f";
2509 fail "file $f has hash $got but .dsc".
2510 " demands hash $fi->{Hash} ".
2511 "(perhaps you should delete this file?)";
2513 progress "need to fetch correct version of $f";
2514 unlink $tf or die "$tf $!";
2517 printdebug "$tf does not exist, need to fetch\n";
2521 $furl =~ s{/[^/]+$}{};
2523 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2524 die "$f ?" if $f =~ m#/#;
2525 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2526 return 0 if !act_local();
2529 fail "file $f has hash $got but .dsc".
2530 " demands hash $fi->{Hash} ".
2531 "(got wrong file from archive!)";
2536 sub ensure_we_have_orig () {
2537 my @dfi = dsc_files_info();
2538 foreach my $fi (@dfi) {
2539 my $f = $fi->{Filename};
2540 next unless is_orig_file_in_dsc($f, \@dfi);
2541 complete_file_from_dsc('..', $fi)
2546 #---------- git fetch ----------
2548 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2549 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2551 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2552 # locally fetched refs because they have unhelpful names and clutter
2553 # up gitk etc. So we track whether we have "used up" head ref (ie,
2554 # whether we have made another local ref which refers to this object).
2556 # (If we deleted them unconditionally, then we might end up
2557 # re-fetching the same git objects each time dgit fetch was run.)
2559 # So, each use of lrfetchrefs needs to be accompanied by arrangements
2560 # in git_fetch_us to fetch the refs in question, and possibly a call
2561 # to lrfetchref_used.
2563 our (%lrfetchrefs_f, %lrfetchrefs_d);
2564 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2566 sub lrfetchref_used ($) {
2567 my ($fullrefname) = @_;
2568 my $objid = $lrfetchrefs_f{$fullrefname};
2569 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2572 sub git_lrfetch_sane {
2573 my ($supplementary, @specs) = @_;
2574 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2575 # at least as regards @specs. Also leave the results in
2576 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2577 # able to clean these up.
2579 # With $supplementary==1, @specs must not contain wildcards
2580 # and we add to our previous fetches (non-atomically).
2582 # This is rather miserable:
2583 # When git fetch --prune is passed a fetchspec ending with a *,
2584 # it does a plausible thing. If there is no * then:
2585 # - it matches subpaths too, even if the supplied refspec
2586 # starts refs, and behaves completely madly if the source
2587 # has refs/refs/something. (See, for example, Debian #NNNN.)
2588 # - if there is no matching remote ref, it bombs out the whole
2590 # We want to fetch a fixed ref, and we don't know in advance
2591 # if it exists, so this is not suitable.
2593 # Our workaround is to use git ls-remote. git ls-remote has its
2594 # own qairks. Notably, it has the absurd multi-tail-matching
2595 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2596 # refs/refs/foo etc.
2598 # Also, we want an idempotent snapshot, but we have to make two
2599 # calls to the remote: one to git ls-remote and to git fetch. The
2600 # solution is use git ls-remote to obtain a target state, and
2601 # git fetch to try to generate it. If we don't manage to generate
2602 # the target state, we try again.
2604 my $url = access_giturl();
2606 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2608 my $specre = join '|', map {
2611 my $wildcard = $x =~ s/\\\*$/.*/;
2612 die if $wildcard && $supplementary;
2615 printdebug "git_lrfetch_sane specre=$specre\n";
2616 my $wanted_rref = sub {
2618 return m/^(?:$specre)$/;
2621 my $fetch_iteration = 0;
2624 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2625 if (++$fetch_iteration > 10) {
2626 fail "too many iterations trying to get sane fetch!";
2629 my @look = map { "refs/$_" } @specs;
2630 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2634 open GITLS, "-|", @lcmd or die $!;
2636 printdebug "=> ", $_;
2637 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2638 my ($objid,$rrefname) = ($1,$2);
2639 if (!$wanted_rref->($rrefname)) {
2641 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2645 $wantr{$rrefname} = $objid;
2648 close GITLS or failedcmd @lcmd;
2650 # OK, now %want is exactly what we want for refs in @specs
2652 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2653 "+refs/$_:".lrfetchrefs."/$_";
2656 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2658 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2659 runcmd_ordryrun_local @fcmd if @fspecs;
2661 if (!$supplementary) {
2662 %lrfetchrefs_f = ();
2666 git_for_each_ref(lrfetchrefs, sub {
2667 my ($objid,$objtype,$lrefname,$reftail) = @_;
2668 $lrfetchrefs_f{$lrefname} = $objid;
2669 $objgot{$objid} = 1;
2672 if ($supplementary) {
2676 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2677 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2678 if (!exists $wantr{$rrefname}) {
2679 if ($wanted_rref->($rrefname)) {
2681 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2685 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2688 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2689 delete $lrfetchrefs_f{$lrefname};
2693 foreach my $rrefname (sort keys %wantr) {
2694 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2695 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2696 my $want = $wantr{$rrefname};
2697 next if $got eq $want;
2698 if (!defined $objgot{$want}) {
2700 warning: git ls-remote suggests we want $lrefname
2701 warning: and it should refer to $want
2702 warning: but git fetch didn't fetch that object to any relevant ref.
2703 warning: This may be due to a race with someone updating the server.
2704 warning: Will try again...
2706 next FETCH_ITERATION;
2709 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2711 runcmd_ordryrun_local @git, qw(update-ref -m),
2712 "dgit fetch git fetch fixup", $lrefname, $want;
2713 $lrfetchrefs_f{$lrefname} = $want;
2718 if (defined $csuite) {
2719 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2720 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2721 my ($objid,$objtype,$lrefname,$reftail) = @_;
2722 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2723 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2727 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2728 Dumper(\%lrfetchrefs_f);
2731 sub git_fetch_us () {
2732 # Want to fetch only what we are going to use, unless
2733 # deliberately-not-ff, in which case we must fetch everything.
2735 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2737 (quiltmode_splitbrain
2738 ? (map { $_->('*',access_nomdistro) }
2739 \&debiantag_new, \&debiantag_maintview)
2740 : debiantags('*',access_nomdistro));
2741 push @specs, server_branch($csuite);
2742 push @specs, $rewritemap;
2743 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2745 git_lrfetch_sane 0, @specs;
2748 my @tagpats = debiantags('*',access_nomdistro);
2750 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2751 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2752 printdebug "currently $fullrefname=$objid\n";
2753 $here{$fullrefname} = $objid;
2755 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2756 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2757 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2758 printdebug "offered $lref=$objid\n";
2759 if (!defined $here{$lref}) {
2760 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2761 runcmd_ordryrun_local @upd;
2762 lrfetchref_used $fullrefname;
2763 } elsif ($here{$lref} eq $objid) {
2764 lrfetchref_used $fullrefname;
2767 "Not updating $lref from $here{$lref} to $objid.\n";
2772 #---------- dsc and archive handling ----------
2774 sub mergeinfo_getclogp ($) {
2775 # Ensures thit $mi->{Clogp} exists and returns it
2777 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2780 sub mergeinfo_version ($) {
2781 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2784 sub fetch_from_archive_record_1 ($) {
2786 runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
2787 'DGIT_ARCHIVE', $hash;
2788 cmdoutput @git, qw(log -n2), $hash;
2789 # ... gives git a chance to complain if our commit is malformed
2792 sub fetch_from_archive_record_2 ($) {
2794 my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
2798 dryrun_report @upd_cmd;
2802 sub parse_dsc_field_def_dsc_distro () {
2803 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2804 dgit.default.distro);
2807 sub parse_dsc_field ($$) {
2808 my ($dsc, $what) = @_;
2810 foreach my $field (@ourdscfield) {
2811 $f = $dsc->{$field};
2816 progress "$what: NO git hash";
2817 parse_dsc_field_def_dsc_distro();
2818 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2819 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2820 progress "$what: specified git info ($dsc_distro)";
2821 $dsc_hint_tag = [ $dsc_hint_tag ];
2822 } elsif ($f =~ m/^\w+\s*$/) {
2824 parse_dsc_field_def_dsc_distro();
2825 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2827 progress "$what: specified git hash";
2829 fail "$what: invalid Dgit info";
2833 sub resolve_dsc_field_commit ($$) {
2834 my ($already_distro, $already_mapref) = @_;
2836 return unless defined $dsc_hash;
2839 defined $already_mapref &&
2840 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2841 ? $already_mapref : undef;
2845 my ($what, @fetch) = @_;
2847 local $idistro = $dsc_distro;
2848 my $lrf = lrfetchrefs;
2850 if (!$chase_dsc_distro) {
2852 "not chasing .dsc distro $dsc_distro: not fetching $what";
2857 ".dsc names distro $dsc_distro: fetching $what";
2859 my $url = access_giturl();
2860 if (!defined $url) {
2861 defined $dsc_hint_url or fail <<END;
2862 .dsc Dgit metadata is in context of distro $dsc_distro
2863 for which we have no configured url and .dsc provides no hint
2866 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2867 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2868 parse_cfg_bool "dsc-url-proto-ok", 'false',
2869 cfg("dgit.dsc-url-proto-ok.$proto",
2870 "dgit.default.dsc-url-proto-ok")
2872 .dsc Dgit metadata is in context of distro $dsc_distro
2873 for which we have no configured url;
2874 .dsc provices hinted url with protocol $proto which is unsafe.
2875 (can be overridden by config - consult documentation)
2877 $url = $dsc_hint_url;
2880 git_lrfetch_sane 1, @fetch;
2885 my $rewrite_enable = do {
2886 local $idistro = $dsc_distro;
2887 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2890 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2891 if (!defined $mapref) {
2892 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2893 $mapref = $lrf.'/'.$rewritemap;
2895 my $rewritemapdata = git_cat_file $mapref.':map';
2896 if (defined $rewritemapdata
2897 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2899 "server's git history rewrite map contains a relevant entry!";
2902 if (defined $dsc_hash) {
2903 progress "using rewritten git hash in place of .dsc value";
2905 progress "server data says .dsc hash is to be disregarded";
2910 if (!defined git_cat_file $dsc_hash) {
2911 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2912 my $lrf = $do_fetch->("additional commits", @tags) &&
2913 defined git_cat_file $dsc_hash
2915 .dsc Dgit metadata requires commit $dsc_hash
2916 but we could not obtain that object anywhere.
2918 foreach my $t (@tags) {
2919 my $fullrefname = $lrf.'/'.$t;
2920 # print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2921 next unless $lrfetchrefs_f{$fullrefname};
2922 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2923 lrfetchref_used $fullrefname;
2928 sub fetch_from_archive () {
2929 ensure_setup_existing_tree();
2931 # Ensures that lrref() is what is actually in the archive, one way
2932 # or another, according to us - ie this client's
2933 # appropritaely-updated archive view. Also returns the commit id.
2934 # If there is nothing in the archive, leaves lrref alone and
2935 # returns undef. git_fetch_us must have already been called.
2939 parse_dsc_field($dsc, 'last upload to archive');
2940 resolve_dsc_field_commit access_basedistro,
2941 lrfetchrefs."/".$rewritemap
2943 progress "no version available from the archive";
2946 # If the archive's .dsc has a Dgit field, there are three
2947 # relevant git commitids we need to choose between and/or merge
2949 # 1. $dsc_hash: the Dgit field from the archive
2950 # 2. $lastpush_hash: the suite branch on the dgit git server
2951 # 3. $lastfetch_hash: our local tracking brach for the suite
2953 # These may all be distinct and need not be in any fast forward
2956 # If the dsc was pushed to this suite, then the server suite
2957 # branch will have been updated; but it might have been pushed to
2958 # a different suite and copied by the archive. Conversely a more
2959 # recent version may have been pushed with dgit but not appeared
2960 # in the archive (yet).
2962 # $lastfetch_hash may be awkward because archive imports
2963 # (particularly, imports of Dgit-less .dscs) are performed only as
2964 # needed on individual clients, so different clients may perform a
2965 # different subset of them - and these imports are only made
2966 # public during push. So $lastfetch_hash may represent a set of
2967 # imports different to a subsequent upload by a different dgit
2970 # Our approach is as follows:
2972 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
2973 # descendant of $dsc_hash, then it was pushed by a dgit user who
2974 # had based their work on $dsc_hash, so we should prefer it.
2975 # Otherwise, $dsc_hash was installed into this suite in the
2976 # archive other than by a dgit push, and (necessarily) after the
2977 # last dgit push into that suite (since a dgit push would have
2978 # been descended from the dgit server git branch); thus, in that
2979 # case, we prefer the archive's version (and produce a
2980 # pseudo-merge to overwrite the dgit server git branch).
2982 # (If there is no Dgit field in the archive's .dsc then
2983 # generate_commit_from_dsc uses the version numbers to decide
2984 # whether the suite branch or the archive is newer. If the suite
2985 # branch is newer it ignores the archive's .dsc; otherwise it
2986 # generates an import of the .dsc, and produces a pseudo-merge to
2987 # overwrite the suite branch with the archive contents.)
2989 # The outcome of that part of the algorithm is the `public view',
2990 # and is same for all dgit clients: it does not depend on any
2991 # unpublished history in the local tracking branch.
2993 # As between the public view and the local tracking branch: The
2994 # local tracking branch is only updated by dgit fetch, and
2995 # whenever dgit fetch runs it includes the public view in the
2996 # local tracking branch. Therefore if the public view is not
2997 # descended from the local tracking branch, the local tracking
2998 # branch must contain history which was imported from the archive
2999 # but never pushed; and, its tip is now out of date. So, we make
3000 # a pseudo-merge to overwrite the old imports and stitch the old
3003 # Finally: we do not necessarily reify the public view (as
3004 # described above). This is so that we do not end up stacking two
3005 # pseudo-merges. So what we actually do is figure out the inputs
3006 # to any public view pseudo-merge and put them in @mergeinputs.
3009 # $mergeinputs[]{Commit}
3010 # $mergeinputs[]{Info}
3011 # $mergeinputs[0] is the one whose tree we use
3012 # @mergeinputs is in the order we use in the actual commit)
3015 # $mergeinputs[]{Message} is a commit message to use
3016 # $mergeinputs[]{ReverseParents} if def specifies that parent
3017 # list should be in opposite order
3018 # Such an entry has no Commit or Info. It applies only when found
3019 # in the last entry. (This ugliness is to support making
3020 # identical imports to previous dgit versions.)
3022 my $lastpush_hash = git_get_ref(lrfetchref());
3023 printdebug "previous reference hash=$lastpush_hash\n";
3024 $lastpush_mergeinput = $lastpush_hash && {
3025 Commit => $lastpush_hash,
3026 Info => "dgit suite branch on dgit git server",
3029 my $lastfetch_hash = git_get_ref(lrref());
3030 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3031 my $lastfetch_mergeinput = $lastfetch_hash && {
3032 Commit => $lastfetch_hash,
3033 Info => "dgit client's archive history view",
3036 my $dsc_mergeinput = $dsc_hash && {
3037 Commit => $dsc_hash,
3038 Info => "Dgit field in .dsc from archive",
3042 my $del_lrfetchrefs = sub {
3045 printdebug "del_lrfetchrefs...\n";
3046 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3047 my $objid = $lrfetchrefs_d{$fullrefname};
3048 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3050 $gur ||= new IO::Handle;
3051 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3053 printf $gur "delete %s %s\n", $fullrefname, $objid;
3056 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3060 if (defined $dsc_hash) {
3061 ensure_we_have_orig();
3062 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3063 @mergeinputs = $dsc_mergeinput
3064 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3065 print STDERR <<END or die $!;
3067 Git commit in archive is behind the last version allegedly pushed/uploaded.
3068 Commit referred to by archive: $dsc_hash
3069 Last version pushed with dgit: $lastpush_hash
3072 @mergeinputs = ($lastpush_mergeinput);
3074 # Archive has .dsc which is not a descendant of the last dgit
3075 # push. This can happen if the archive moves .dscs about.
3076 # Just follow its lead.
3077 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3078 progress "archive .dsc names newer git commit";
3079 @mergeinputs = ($dsc_mergeinput);
3081 progress "archive .dsc names other git commit, fixing up";
3082 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3086 @mergeinputs = generate_commits_from_dsc();
3087 # We have just done an import. Now, our import algorithm might
3088 # have been improved. But even so we do not want to generate
3089 # a new different import of the same package. So if the
3090 # version numbers are the same, just use our existing version.
3091 # If the version numbers are different, the archive has changed
3092 # (perhaps, rewound).
3093 if ($lastfetch_mergeinput &&
3094 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3095 (mergeinfo_version $mergeinputs[0]) )) {
3096 @mergeinputs = ($lastfetch_mergeinput);
3098 } elsif ($lastpush_hash) {
3099 # only in git, not in the archive yet
3100 @mergeinputs = ($lastpush_mergeinput);
3101 print STDERR <<END or die $!;
3103 Package not found in the archive, but has allegedly been pushed using dgit.
3107 printdebug "nothing found!\n";
3108 if (defined $skew_warning_vsn) {
3109 print STDERR <<END or die $!;
3111 Warning: relevant archive skew detected.
3112 Archive allegedly contains $skew_warning_vsn
3113 But we were not able to obtain any version from the archive or git.
3117 unshift @end, $del_lrfetchrefs;
3121 if ($lastfetch_hash &&
3123 my $h = $_->{Commit};
3124 $h and is_fast_fwd($lastfetch_hash, $h);
3125 # If true, one of the existing parents of this commit
3126 # is a descendant of the $lastfetch_hash, so we'll
3127 # be ff from that automatically.
3131 push @mergeinputs, $lastfetch_mergeinput;
3134 printdebug "fetch mergeinfos:\n";
3135 foreach my $mi (@mergeinputs) {
3137 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3139 printdebug sprintf " ReverseParents=%d Message=%s",
3140 $mi->{ReverseParents}, $mi->{Message};
3144 my $compat_info= pop @mergeinputs
3145 if $mergeinputs[$#mergeinputs]{Message};
3147 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3150 if (@mergeinputs > 1) {
3152 my $tree_commit = $mergeinputs[0]{Commit};
3154 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3155 $tree =~ m/\n\n/; $tree = $`;
3156 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3159 # We use the changelog author of the package in question the
3160 # author of this pseudo-merge. This is (roughly) correct if
3161 # this commit is simply representing aa non-dgit upload.
3162 # (Roughly because it does not record sponsorship - but we
3163 # don't have sponsorship info because that's in the .changes,
3164 # which isn't in the archivw.)
3166 # But, it might be that we are representing archive history
3167 # updates (including in-archive copies). These are not really
3168 # the responsibility of the person who created the .dsc, but
3169 # there is no-one whose name we should better use. (The
3170 # author of the .dsc-named commit is clearly worse.)
3172 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3173 my $author = clogp_authline $useclogp;
3174 my $cversion = getfield $useclogp, 'Version';
3176 my $mcf = ".git/dgit/mergecommit";
3177 open MC, ">", $mcf or die "$mcf $!";
3178 print MC <<END or die $!;
3182 my @parents = grep { $_->{Commit} } @mergeinputs;
3183 @parents = reverse @parents if $compat_info->{ReverseParents};
3184 print MC <<END or die $! foreach @parents;
3188 print MC <<END or die $!;
3194 if (defined $compat_info->{Message}) {
3195 print MC $compat_info->{Message} or die $!;
3197 print MC <<END or die $!;
3198 Record $package ($cversion) in archive suite $csuite
3202 my $message_add_info = sub {
3204 my $mversion = mergeinfo_version $mi;
3205 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3209 $message_add_info->($mergeinputs[0]);
3210 print MC <<END or die $!;
3211 should be treated as descended from
3213 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3217 $hash = make_commit $mcf;
3219 $hash = $mergeinputs[0]{Commit};
3221 printdebug "fetch hash=$hash\n";
3224 my ($lasth, $what) = @_;
3225 return unless $lasth;
3226 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3229 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3231 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3233 fetch_from_archive_record_1($hash);
3235 if (defined $skew_warning_vsn) {
3237 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3238 my $gotclogp = commit_getclogp($hash);
3239 my $got_vsn = getfield $gotclogp, 'Version';
3240 printdebug "SKEW CHECK GOT $got_vsn\n";
3241 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3242 print STDERR <<END or die $!;
3244 Warning: archive skew detected. Using the available version:
3245 Archive allegedly contains $skew_warning_vsn
3246 We were able to obtain only $got_vsn
3252 if ($lastfetch_hash ne $hash) {
3253 fetch_from_archive_record_2($hash);
3256 lrfetchref_used lrfetchref();
3258 unshift @end, $del_lrfetchrefs;
3262 sub set_local_git_config ($$) {
3264 runcmd @git, qw(config), $k, $v;
3267 sub setup_mergechangelogs (;$) {
3269 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3271 my $driver = 'dpkg-mergechangelogs';
3272 my $cb = "merge.$driver";
3273 my $attrs = '.git/info/attributes';
3274 ensuredir '.git/info';
3276 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3277 if (!open ATTRS, "<", $attrs) {
3278 $!==ENOENT or die "$attrs: $!";
3282 next if m{^debian/changelog\s};
3283 print NATTRS $_, "\n" or die $!;
3285 ATTRS->error and die $!;
3288 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3291 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3292 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3294 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3297 sub setup_useremail (;$) {
3299 return unless $always || access_cfg_bool(1, 'setup-useremail');
3302 my ($k, $envvar) = @_;
3303 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3304 return unless defined $v;
3305 set_local_git_config "user.$k", $v;
3308 $setup->('email', 'DEBEMAIL');
3309 $setup->('name', 'DEBFULLNAME');
3312 sub ensure_setup_existing_tree () {
3313 my $k = "remote.$remotename.skipdefaultupdate";
3314 my $c = git_get_config $k;
3315 return if defined $c;
3316 set_local_git_config $k, 'true';
3319 sub setup_new_tree () {
3320 setup_mergechangelogs();
3324 sub multisuite_suite_child ($$$) {
3325 my ($tsuite, $merginputs, $fn) = @_;
3326 # in child, sets things up, calls $fn->(), and returns undef
3327 # in parent, returns canonical suite name for $tsuite
3328 my $canonsuitefh = IO::File::new_tmpfile;
3329 my $pid = fork // die $!;
3333 $us .= " [$isuite]";
3334 $debugprefix .= " ";
3335 progress "fetching $tsuite...";
3336 canonicalise_suite();
3337 print $canonsuitefh $csuite, "\n" or die $!;
3338 close $canonsuitefh or die $!;
3342 waitpid $pid,0 == $pid or die $!;
3343 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3344 seek $canonsuitefh,0,0 or die $!;
3345 local $csuite = <$canonsuitefh>;
3346 die $! unless defined $csuite && chomp $csuite;
3348 printdebug "multisuite $tsuite missing\n";
3351 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3352 push @$merginputs, {
3359 sub fork_for_multisuite ($) {
3360 my ($before_fetch_merge) = @_;
3361 # if nothing unusual, just returns ''
3364 # returns 0 to caller in child, to do first of the specified suites
3365 # in child, $csuite is not yet set
3367 # returns 1 to caller in parent, to finish up anything needed after
3368 # in parent, $csuite is set to canonicalised portmanteau
3370 my $org_isuite = $isuite;
3371 my @suites = split /\,/, $isuite;
3372 return '' unless @suites > 1;
3373 printdebug "fork_for_multisuite: @suites\n";
3377 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3379 return 0 unless defined $cbasesuite;
3381 fail "package $package missing in (base suite) $cbasesuite"
3382 unless @mergeinputs;
3384 my @csuites = ($cbasesuite);
3386 $before_fetch_merge->();
3388 foreach my $tsuite (@suites[1..$#suites]) {
3389 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3395 # xxx collecte the ref here
3397 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3398 push @csuites, $csubsuite;
3401 foreach my $mi (@mergeinputs) {
3402 my $ref = git_get_ref $mi->{Ref};
3403 die "$mi->{Ref} ?" unless length $ref;
3404 $mi->{Commit} = $ref;
3407 $csuite = join ",", @csuites;
3409 my $previous = git_get_ref lrref;
3411 unshift @mergeinputs, {
3412 Commit => $previous,
3413 Info => "local combined tracking branch",
3415 "archive seems to have rewound: local tracking branch is ahead!",
3419 foreach my $ix (0..$#mergeinputs) {
3420 $mergeinputs[$ix]{Index} = $ix;
3423 @mergeinputs = sort {
3424 -version_compare(mergeinfo_version $a,
3425 mergeinfo_version $b) # highest version first
3427 $a->{Index} <=> $b->{Index}; # earliest in spec first
3433 foreach my $mi (@mergeinputs) {
3434 printdebug "multisuite merge check $mi->{Info}\n";
3435 foreach my $previous (@needed) {
3436 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3437 printdebug "multisuite merge un-needed $previous->{Info}\n";
3441 printdebug "multisuite merge this-needed\n";
3442 $mi->{Character} = '+';
3445 $needed[0]{Character} = '*';
3447 my $output = $needed[0]{Commit};
3450 printdebug "multisuite merge nontrivial\n";
3451 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3453 my $commit = "tree $tree\n";
3454 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3455 "Input branches:\n";
3457 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3458 printdebug "multisuite merge include $mi->{Info}\n";
3459 $mi->{Character} //= ' ';
3460 $commit .= "parent $mi->{Commit}\n";
3461 $msg .= sprintf " %s %-25s %s\n",
3463 (mergeinfo_version $mi),
3466 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3468 " * marks the highest version branch, which choose to use\n".
3469 " + marks each branch which was not already an ancestor\n\n".
3470 "[dgit multi-suite $csuite]\n";
3472 "author $authline\n".
3473 "committer $authline\n\n";
3474 $output = make_commit_text $commit.$msg;
3475 printdebug "multisuite merge generated $output\n";
3478 fetch_from_archive_record_1($output);
3479 fetch_from_archive_record_2($output);
3481 progress "calculated combined tracking suite $csuite";
3486 sub clone_set_head () {
3487 open H, "> .git/HEAD" or die $!;
3488 print H "ref: ".lref()."\n" or die $!;
3491 sub clone_finish ($) {
3493 runcmd @git, qw(reset --hard), lrref();
3494 runcmd qw(bash -ec), <<'END';
3496 git ls-tree -r --name-only -z HEAD | \
3497 xargs -0r touch -h -r . --
3499 printdone "ready for work in $dstdir";
3504 badusage "dry run makes no sense with clone" unless act_local();
3506 my $multi_fetched = fork_for_multisuite(sub {
3507 printdebug "multi clone before fetch merge\n";
3510 if ($multi_fetched) {
3511 printdebug "multi clone after fetch merge\n";
3513 clone_finish($dstdir);
3516 printdebug "clone main body\n";
3518 canonicalise_suite();
3519 my $hasgit = check_for_git();
3520 mkdir $dstdir or fail "create \`$dstdir': $!";
3522 runcmd @git, qw(init -q);
3524 my $giturl = access_giturl(1);
3525 if (defined $giturl) {
3526 runcmd @git, qw(remote add), 'origin', $giturl;
3529 progress "fetching existing git history";
3531 runcmd_ordryrun_local @git, qw(fetch origin);
3533 progress "starting new git history";
3535 fetch_from_archive() or no_such_package;
3536 my $vcsgiturl = $dsc->{'Vcs-Git'};
3537 if (length $vcsgiturl) {
3538 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3539 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3542 clone_finish($dstdir);
3546 canonicalise_suite();
3547 if (check_for_git()) {
3550 fetch_from_archive() or no_such_package();
3551 printdone "fetched into ".lrref();
3555 my $multi_fetched = fork_for_multisuite(sub { });
3556 fetch() unless $multi_fetched; # parent
3557 return if $multi_fetched eq '0'; # child
3558 runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
3560 printdone "fetched to ".lrref()." and merged into HEAD";
3563 sub check_not_dirty () {
3564 foreach my $f (qw(local-options local-patch-header)) {
3565 if (stat_exists "debian/source/$f") {
3566 fail "git tree contains debian/source/$f";
3570 return if $ignoredirty;
3572 my @cmd = (@git, qw(diff --quiet HEAD));
3574 $!=0; $?=-1; system @cmd;
3577 fail "working tree is dirty (does not match HEAD)";
3583 sub commit_admin ($) {
3586 runcmd_ordryrun_local @git, qw(commit -m), $m;
3589 sub commit_quilty_patch () {
3590 my $output = cmdoutput @git, qw(status --porcelain);
3592 foreach my $l (split /\n/, $output) {
3593 next unless $l =~ m/\S/;
3594 if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
3598 delete $adds{'.pc'}; # if there wasn't one before, don't add it
3600 progress "nothing quilty to commit, ok.";
3603 my @adds = map { s/[][*?\\]/\\$&/g; $_; } sort keys %adds;
3604 runcmd_ordryrun_local @git, qw(add -f), @adds;
3606 Commit Debian 3.0 (quilt) metadata
3608 [dgit ($our_version) quilt-fixup]
3612 sub get_source_format () {
3614 if (open F, "debian/source/options") {
3618 s/\s+$//; # ignore missing final newline
3620 my ($k, $v) = ($`, $'); #');
3621 $v =~ s/^"(.*)"$/$1/;
3627 F->error and die $!;
3630 die $! unless $!==&ENOENT;
3633 if (!open F, "debian/source/format") {
3634 die $! unless $!==&ENOENT;
3638 F->error and die $!;
3640 return ($_, \%options);
3643 sub madformat_wantfixup ($) {
3645 return 0 unless $format eq '3.0 (quilt)';
3646 our $quilt_mode_warned;
3647 if ($quilt_mode eq 'nocheck') {
3648 progress "Not doing any fixup of \`$format' due to".
3649 " ----no-quilt-fixup or --quilt=nocheck"
3650 unless $quilt_mode_warned++;
3653 progress "Format \`$format', need to check/update patch stack"
3654 unless $quilt_mode_warned++;
3658 sub maybe_split_brain_save ($$$) {
3659 my ($headref, $dgitview, $msg) = @_;
3660 # => message fragment "$saved" describing disposition of $dgitview
3661 return "commit id $dgitview" unless defined $split_brain_save;
3662 my @cmd = (shell_cmd "cd ../../../..",
3663 @git, qw(update-ref -m),
3664 "dgit --dgit-view-save $msg HEAD=$headref",
3665 $split_brain_save, $dgitview);
3667 return "and left in $split_brain_save";
3670 # An "infopair" is a tuple [ $thing, $what ]
3671 # (often $thing is a commit hash; $what is a description)
3673 sub infopair_cond_equal ($$) {
3675 $x->[0] eq $y->[0] or fail <<END;
3676 $x->[1] ($x->[0]) not equal to $y->[1] ($y->[0])
3680 sub infopair_lrf_tag_lookup ($$) {
3681 my ($tagnames, $what) = @_;
3682 # $tagname may be an array ref
3683 my @tagnames = ref $tagnames ? @$tagnames : ($tagnames);
3684 printdebug "infopair_lrfetchref_tag_lookup $what @tagnames\n";
3685 foreach my $tagname (@tagnames) {
3686 my $lrefname = lrfetchrefs."/tags/$tagname";
3687 my $tagobj = $lrfetchrefs_f{$lrefname};
3688 next unless defined $tagobj;
3689 printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n";
3690 return [ git_rev_parse($tagobj), $what ];
3692 fail @tagnames==1 ? <<END : <<END;
3693 Wanted tag $what (@tagnames) on dgit server, but not found
3695 Wanted tag $what (one of: @tagnames) on dgit server, but not found
3699 sub infopair_cond_ff ($$) {
3700 my ($anc,$desc) = @_;
3701 is_fast_fwd($anc->[0], $desc->[0]) or fail <<END;
3702 $anc->[1] ($anc->[0]) .. $desc->[1] ($desc->[0]) is not fast forward
3706 sub pseudomerge_version_check ($$) {
3707 my ($clogp, $archive_hash) = @_;
3709 my $arch_clogp = commit_getclogp $archive_hash;
3710 my $i_arch_v = [ (getfield $arch_clogp, 'Version'),
3711 'version currently in archive' ];
3712 if (defined $overwrite_version) {
3713 if (length $overwrite_version) {
3714 infopair_cond_equal([ $overwrite_version,
3715 '--overwrite= version' ],