3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2016 Ian Jackson
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 use Dpkg::Control::Hash;
30 use File::Temp qw(tempdir);
37 use List::Util qw(any);
38 use List::MoreUtils qw(pairwise);
39 use Text::Glob qw(match_glob);
40 use Fcntl qw(:DEFAULT :flock);
45 our $our_version = 'UNRELEASED'; ###substituted###
46 our $absurdity = undef; ###substituted###
48 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
57 our $dryrun_level = 0;
59 our $buildproductsdir = '..';
65 our $existing_package = 'dpkg';
67 our $changes_since_version;
69 our $overwrite_version; # undef: not specified; '': check changelog
71 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
73 our $dodep14tag_re = 'want|no|always';
74 our $split_brain_save;
75 our $we_are_responder;
76 our $we_are_initiator;
77 our $initiator_tempdir;
78 our $patches_applied_dirtily = 00;
82 our $chase_dsc_distro=1;
84 our %forceopts = map { $_=>0 }
85 qw(unrepresentable unsupported-source-format
86 dsc-changes-mismatch changes-origs-exactly
87 import-gitapply-absurd
88 import-gitapply-no-absurd
89 import-dsc-with-dgit-field);
91 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
93 our $suite_re = '[-+.0-9a-z]+';
94 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
95 our $orig_f_comp_re = 'orig(?:-[-0-9a-z]+)?';
96 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
97 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
99 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
100 our $splitbraincache = 'dgit-intern/quilt-cache';
101 our $rewritemap = 'dgit-rewrite/map';
103 our (@git) = qw(git);
104 our (@dget) = qw(dget);
105 our (@curl) = qw(curl);
106 our (@dput) = qw(dput);
107 our (@debsign) = qw(debsign);
108 our (@gpg) = qw(gpg);
109 our (@sbuild) = qw(sbuild);
111 our (@dgit) = qw(dgit);
112 our (@aptget) = qw(apt-get);
113 our (@aptcache) = qw(apt-cache);
114 our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
115 our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
116 our (@dpkggenchanges) = qw(dpkg-genchanges);
117 our (@mergechanges) = qw(mergechanges -f);
118 our (@gbp_build) = ('');
119 our (@gbp_pq) = ('gbp pq');
120 our (@changesopts) = ('');
122 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
125 'debsign' => \@debsign,
127 'sbuild' => \@sbuild,
131 'apt-get' => \@aptget,
132 'apt-cache' => \@aptcache,
133 'dpkg-source' => \@dpkgsource,
134 'dpkg-buildpackage' => \@dpkgbuildpackage,
135 'dpkg-genchanges' => \@dpkggenchanges,
136 'gbp-build' => \@gbp_build,
137 'gbp-pq' => \@gbp_pq,
138 'ch' => \@changesopts,
139 'mergechanges' => \@mergechanges);
141 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
142 our %opts_cfg_insertpos = map {
144 scalar @{ $opts_opt_map{$_} }
145 } keys %opts_opt_map;
147 sub parseopts_late_defaults();
153 our $supplementary_message = '';
154 our $need_split_build_invocation = 0;
155 our $split_brain = 0;
159 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
162 our $remotename = 'dgit';
163 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
167 if (!defined $absurdity) {
169 $absurdity =~ s{/[^/]+$}{/absurd} or die;
173 my ($v,$distro) = @_;
174 return $tagformatfn->($v, $distro);
177 sub debiantag_maintview ($$) {
178 my ($v,$distro) = @_;
179 return "$distro/".dep14_version_mangle $v;
182 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
184 sub lbranch () { return "$branchprefix/$csuite"; }
185 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
186 sub lref () { return "refs/heads/".lbranch(); }
187 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
188 sub rrref () { return server_ref($csuite); }
198 return "${package}_".(stripepoch $vsn).$sfx
203 return srcfn($vsn,".dsc");
206 sub changespat ($;$) {
207 my ($vsn, $arch) = @_;
208 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
211 sub upstreamversion ($) {
223 foreach my $f (@end) {
225 print STDERR "$us: cleanup: $@" if length $@;
229 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
231 sub forceable_fail ($$) {
232 my ($forceoptsl, $msg) = @_;
233 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
234 print STDERR "warning: overriding problem due to --force:\n". $msg;
238 my ($forceoptsl) = @_;
239 my @got = grep { $forceopts{$_} } @$forceoptsl;
240 return 0 unless @got;
242 "warning: skipping checks or functionality due to --force-$got[0]\n";
245 sub no_such_package () {
246 print STDERR "$us: package $package does not exist in suite $isuite\n";
252 printdebug "CD $newdir\n";
253 chdir $newdir or confess "chdir: $newdir: $!";
256 sub deliberately ($) {
258 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
261 sub deliberately_not_fast_forward () {
262 foreach (qw(not-fast-forward fresh-repo)) {
263 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
267 sub quiltmode_splitbrain () {
268 $quilt_mode =~ m/gbp|dpm|unapplied/;
271 sub opts_opt_multi_cmd {
273 push @cmd, split /\s+/, shift @_;
279 return opts_opt_multi_cmd @gbp_pq;
282 #---------- remote protocol support, common ----------
284 # remote push initiator/responder protocol:
285 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
286 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
287 # < dgit-remote-push-ready <actual-proto-vsn>
294 # > supplementary-message NBYTES # $protovsn >= 3
299 # > file parsed-changelog
300 # [indicates that output of dpkg-parsechangelog follows]
301 # > data-block NBYTES
302 # > [NBYTES bytes of data (no newline)]
303 # [maybe some more blocks]
312 # > param head DGIT-VIEW-HEAD
313 # > param csuite SUITE
314 # > param tagformat old|new
315 # > param maint-view MAINT-VIEW-HEAD
317 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
318 # # goes into tag, for replay prevention
321 # [indicates that signed tag is wanted]
322 # < data-block NBYTES
323 # < [NBYTES bytes of data (no newline)]
324 # [maybe some more blocks]
328 # > want signed-dsc-changes
329 # < data-block NBYTES [transfer of signed dsc]
331 # < data-block NBYTES [transfer of signed changes]
339 sub i_child_report () {
340 # Sees if our child has died, and reap it if so. Returns a string
341 # describing how it died if it failed, or undef otherwise.
342 return undef unless $i_child_pid;
343 my $got = waitpid $i_child_pid, WNOHANG;
344 return undef if $got <= 0;
345 die unless $got == $i_child_pid;
346 $i_child_pid = undef;
347 return undef unless $?;
348 return "build host child ".waitstatusmsg();
353 fail "connection lost: $!" if $fh->error;
354 fail "protocol violation; $m not expected";
357 sub badproto_badread ($$) {
359 fail "connection lost: $!" if $!;
360 my $report = i_child_report();
361 fail $report if defined $report;
362 badproto $fh, "eof (reading $wh)";
365 sub protocol_expect (&$) {
366 my ($match, $fh) = @_;
369 defined && chomp or badproto_badread $fh, "protocol message";
377 badproto $fh, "\`$_'";
380 sub protocol_send_file ($$) {
381 my ($fh, $ourfn) = @_;
382 open PF, "<", $ourfn or die "$ourfn: $!";
385 my $got = read PF, $d, 65536;
386 die "$ourfn: $!" unless defined $got;
388 print $fh "data-block ".length($d)."\n" or die $!;
389 print $fh $d or die $!;
391 PF->error and die "$ourfn $!";
392 print $fh "data-end\n" or die $!;
396 sub protocol_read_bytes ($$) {
397 my ($fh, $nbytes) = @_;
398 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
400 my $got = read $fh, $d, $nbytes;
401 $got==$nbytes or badproto_badread $fh, "data block";
405 sub protocol_receive_file ($$) {
406 my ($fh, $ourfn) = @_;
407 printdebug "() $ourfn\n";
408 open PF, ">", $ourfn or die "$ourfn: $!";
410 my ($y,$l) = protocol_expect {
411 m/^data-block (.*)$/ ? (1,$1) :
412 m/^data-end$/ ? (0,) :
416 my $d = protocol_read_bytes $fh, $l;
417 print PF $d or die $!;
422 #---------- remote protocol support, responder ----------
424 sub responder_send_command ($) {
426 return unless $we_are_responder;
427 # called even without $we_are_responder
428 printdebug ">> $command\n";
429 print PO $command, "\n" or die $!;
432 sub responder_send_file ($$) {
433 my ($keyword, $ourfn) = @_;
434 return unless $we_are_responder;
435 printdebug "]] $keyword $ourfn\n";
436 responder_send_command "file $keyword";
437 protocol_send_file \*PO, $ourfn;
440 sub responder_receive_files ($@) {
441 my ($keyword, @ourfns) = @_;
442 die unless $we_are_responder;
443 printdebug "[[ $keyword @ourfns\n";
444 responder_send_command "want $keyword";
445 foreach my $fn (@ourfns) {
446 protocol_receive_file \*PI, $fn;
449 protocol_expect { m/^files-end$/ } \*PI;
452 #---------- remote protocol support, initiator ----------
454 sub initiator_expect (&) {
456 protocol_expect { &$match } \*RO;
459 #---------- end remote code ----------
462 if ($we_are_responder) {
464 responder_send_command "progress ".length($m) or die $!;
465 print PO $m or die $!;
475 $ua = LWP::UserAgent->new();
479 progress "downloading $what...";
480 my $r = $ua->get(@_) or die $!;
481 return undef if $r->code == 404;
482 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
483 return $r->decoded_content(charset => 'none');
486 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
491 failedcmd @_ if system @_;
494 sub act_local () { return $dryrun_level <= 1; }
495 sub act_scary () { return !$dryrun_level; }
498 if (!$dryrun_level) {
499 progress "$us ok: @_";
501 progress "would be ok: @_ (but dry run only)";
506 printcmd(\*STDERR,$debugprefix."#",@_);
509 sub runcmd_ordryrun {
517 sub runcmd_ordryrun_local {
526 my ($first_shell, @cmd) = @_;
527 return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
530 our $helpmsg = <<END;
532 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
533 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
534 dgit [dgit-opts] build [dpkg-buildpackage-opts]
535 dgit [dgit-opts] sbuild [sbuild-opts]
536 dgit [dgit-opts] push [dgit-opts] [suite]
537 dgit [dgit-opts] rpush build-host:build-dir ...
538 important dgit options:
539 -k<keyid> sign tag and package with <keyid> instead of default
540 --dry-run -n do not change anything, but go through the motions
541 --damp-run -L like --dry-run but make local changes, without signing
542 --new -N allow introducing a new package
543 --debug -D increase debug level
544 -c<name>=<value> set git config option (used directly by dgit too)
547 our $later_warning_msg = <<END;
548 Perhaps the upload is stuck in incoming. Using the version from git.
552 print STDERR "$us: @_\n", $helpmsg or die $!;
557 @ARGV or badusage "too few arguments";
558 return scalar shift @ARGV;
562 print $helpmsg or die $!;
566 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
568 our %defcfg = ('dgit.default.distro' => 'debian',
569 'dgit.default.default-suite' => 'unstable',
570 'dgit.default.old-dsc-distro' => 'debian',
571 'dgit-suite.*-security.distro' => 'debian-security',
572 'dgit.default.username' => '',
573 'dgit.default.archive-query-default-component' => 'main',
574 'dgit.default.ssh' => 'ssh',
575 'dgit.default.archive-query' => 'madison:',
576 'dgit.default.sshpsql-dbname' => 'service=projectb',
577 'dgit.default.aptget-components' => 'main',
578 'dgit.default.dgit-tag-format' => 'new,old,maint',
579 'dgit.dsc-url-proto-ok.http' => 'true',
580 'dgit.dsc-url-proto-ok.https' => 'true',
581 'dgit.dsc-url-proto-ok.git' => 'true',
582 'dgit.default.dsc-url-proto-ok' => 'false',
583 # old means "repo server accepts pushes with old dgit tags"
584 # new means "repo server accepts pushes with new dgit tags"
585 # maint means "repo server accepts split brain pushes"
586 # hist means "repo server may have old pushes without new tag"
587 # ("hist" is implied by "old")
588 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
589 'dgit-distro.debian.git-check' => 'url',
590 'dgit-distro.debian.git-check-suffix' => '/info/refs',
591 'dgit-distro.debian.new-private-pushers' => 't',
592 'dgit-distro.debian/push.git-url' => '',
593 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
594 'dgit-distro.debian/push.git-user-force' => 'dgit',
595 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
596 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
597 'dgit-distro.debian/push.git-create' => 'true',
598 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
599 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
600 # 'dgit-distro.debian.archive-query-tls-key',
601 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
602 # ^ this does not work because curl is broken nowadays
603 # Fixing #790093 properly will involve providing providing the key
604 # in some pacagke and maybe updating these paths.
606 # 'dgit-distro.debian.archive-query-tls-curl-args',
607 # '--ca-path=/etc/ssl/ca-debian',
608 # ^ this is a workaround but works (only) on DSA-administered machines
609 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
610 'dgit-distro.debian.git-url-suffix' => '',
611 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
612 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
613 'dgit-distro.debian-security.archive-query' => 'aptget:',
614 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
615 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
616 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
617 'dgit-distro.debian-security.nominal-distro' => 'debian',
618 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
619 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
620 'dgit-distro.ubuntu.git-check' => 'false',
621 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
622 'dgit-distro.test-dummy.ssh' => "$td/ssh",
623 'dgit-distro.test-dummy.username' => "alice",
624 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
625 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
626 'dgit-distro.test-dummy.git-url' => "$td/git",
627 'dgit-distro.test-dummy.git-host' => "git",
628 'dgit-distro.test-dummy.git-path' => "$td/git",
629 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
630 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
631 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
632 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
636 our @gitcfgsources = qw(cmdline local global system);
638 sub git_slurp_config () {
639 local ($debuglevel) = $debuglevel-2;
642 # This algoritm is a bit subtle, but this is needed so that for
643 # options which we want to be single-valued, we allow the
644 # different config sources to override properly. See #835858.
645 foreach my $src (@gitcfgsources) {
646 next if $src eq 'cmdline';
647 # we do this ourselves since git doesn't handle it
649 my @cmd = (@git, qw(config -z --get-regexp), "--$src", qw(.*));
652 open GITS, "-|", @cmd or die $!;
655 printdebug "=> ", (messagequote $_), "\n";
657 push @{ $gitcfgs{$src}{$`} }, $'; #';
661 or ($!==0 && $?==256)
666 sub git_get_config ($) {
668 foreach my $src (@gitcfgsources) {
669 my $l = $gitcfgs{$src}{$c};
670 croak "$l $c" if $l && !ref $l;
671 printdebug"C $c ".(defined $l ?
672 join " ", map { messagequote "'$_'" } @$l :
676 @$l==1 or badcfg "multiple values for $c".
677 " (in $src git config)" if @$l > 1;
685 return undef if $c =~ /RETURN-UNDEF/;
686 printdebug "C? $c\n" if $debuglevel >= 5;
687 my $v = git_get_config($c);
688 return $v if defined $v;
689 my $dv = $defcfg{$c};
691 printdebug "CD $c $dv\n" if $debuglevel >= 4;
695 badcfg "need value for one of: @_\n".
696 "$us: distro or suite appears not to be (properly) supported";
699 sub access_basedistro__noalias () {
700 if (defined $idistro) {
703 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
704 return $def if defined $def;
705 foreach my $src (@gitcfgsources, 'internal') {
706 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
708 foreach my $k (keys %$kl) {
709 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
711 next unless match_glob $dpat, $isuite;
715 return cfg("dgit.default.distro");
719 sub access_basedistro () {
720 my $noalias = access_basedistro__noalias();
721 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
722 return $canon // $noalias;
725 sub access_nomdistro () {
726 my $base = access_basedistro();
727 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
728 $r =~ m/^$distro_re$/ or badcfg
729 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
733 sub access_quirk () {
734 # returns (quirk name, distro to use instead or undef, quirk-specific info)
735 my $basedistro = access_basedistro();
736 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
738 if (defined $backports_quirk) {
739 my $re = $backports_quirk;
740 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
742 $re =~ s/\%/([-0-9a-z_]+)/
743 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
744 if ($isuite =~ m/^$re$/) {
745 return ('backports',"$basedistro-backports",$1);
748 return ('none',undef);
753 sub parse_cfg_bool ($$$) {
754 my ($what,$def,$v) = @_;
757 $v =~ m/^[ty1]/ ? 1 :
758 $v =~ m/^[fn0]/ ? 0 :
759 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
762 sub access_forpush_config () {
763 my $d = access_basedistro();
767 parse_cfg_bool('new-private-pushers', 0,
768 cfg("dgit-distro.$d.new-private-pushers",
771 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
774 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
775 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
776 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
777 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
780 sub access_forpush () {
781 $access_forpush //= access_forpush_config();
782 return $access_forpush;
786 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
787 badcfg "pushing but distro is configured readonly"
788 if access_forpush_config() eq '0';
790 $supplementary_message = <<'END' unless $we_are_responder;
791 Push failed, before we got started.
792 You can retry the push, after fixing the problem, if you like.
794 parseopts_late_defaults();
798 parseopts_late_defaults();
801 sub supplementary_message ($) {
803 if (!$we_are_responder) {
804 $supplementary_message = $msg;
806 } elsif ($protovsn >= 3) {
807 responder_send_command "supplementary-message ".length($msg)
809 print PO $msg or die $!;
813 sub access_distros () {
814 # Returns list of distros to try, in order
817 # 0. `instead of' distro name(s) we have been pointed to
818 # 1. the access_quirk distro, if any
819 # 2a. the user's specified distro, or failing that } basedistro
820 # 2b. the distro calculated from the suite }
821 my @l = access_basedistro();
823 my (undef,$quirkdistro) = access_quirk();
824 unshift @l, $quirkdistro;
825 unshift @l, $instead_distro;
826 @l = grep { defined } @l;
828 push @l, access_nomdistro();
830 if (access_forpush()) {
831 @l = map { ("$_/push", $_) } @l;
836 sub access_cfg_cfgs (@) {
839 # The nesting of these loops determines the search order. We put
840 # the key loop on the outside so that we search all the distros
841 # for each key, before going on to the next key. That means that
842 # if access_cfg is called with a more specific, and then a less
843 # specific, key, an earlier distro can override the less specific
844 # without necessarily overriding any more specific keys. (If the
845 # distro wants to override the more specific keys it can simply do
846 # so; whereas if we did the loop the other way around, it would be
847 # impossible to for an earlier distro to override a less specific
848 # key but not the more specific ones without restating the unknown
849 # values of the more specific keys.
852 # We have to deal with RETURN-UNDEF specially, so that we don't
853 # terminate the search prematurely.
855 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
858 foreach my $d (access_distros()) {
859 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
861 push @cfgs, map { "dgit.default.$_" } @realkeys;
868 my (@cfgs) = access_cfg_cfgs(@keys);
869 my $value = cfg(@cfgs);
873 sub access_cfg_bool ($$) {
874 my ($def, @keys) = @_;
875 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
878 sub string_to_ssh ($) {
880 if ($spec =~ m/\s/) {
881 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
887 sub access_cfg_ssh () {
888 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
889 if (!defined $gitssh) {
892 return string_to_ssh $gitssh;
896 sub access_runeinfo ($) {
898 return ": dgit ".access_basedistro()." $info ;";
901 sub access_someuserhost ($) {
903 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
904 defined($user) && length($user) or
905 $user = access_cfg("$some-user",'username');
906 my $host = access_cfg("$some-host");
907 return length($user) ? "$user\@$host" : $host;
910 sub access_gituserhost () {
911 return access_someuserhost('git');
914 sub access_giturl (;$) {
916 my $url = access_cfg('git-url','RETURN-UNDEF');
919 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
920 return undef unless defined $proto;
923 access_gituserhost().
924 access_cfg('git-path');
926 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
929 return "$url/$package$suffix";
932 sub parsecontrolfh ($$;$) {
933 my ($fh, $desc, $allowsigned) = @_;
934 our $dpkgcontrolhash_noissigned;
937 my %opts = ('name' => $desc);
938 $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
939 $c = Dpkg::Control::Hash->new(%opts);
940 $c->parse($fh,$desc) or die "parsing of $desc failed";
941 last if $allowsigned;
942 last if $dpkgcontrolhash_noissigned;
943 my $issigned= $c->get_option('is_pgp_signed');
944 if (!defined $issigned) {
945 $dpkgcontrolhash_noissigned= 1;
946 seek $fh, 0,0 or die "seek $desc: $!";
947 } elsif ($issigned) {
948 fail "control file $desc is (already) PGP-signed. ".
949 " Note that dgit push needs to modify the .dsc and then".
950 " do the signature itself";
959 my ($file, $desc, $allowsigned) = @_;
960 my $fh = new IO::Handle;
961 open $fh, '<', $file or die "$file: $!";
962 my $c = parsecontrolfh($fh,$desc,$allowsigned);
963 $fh->error and die $!;
969 my ($dctrl,$field) = @_;
970 my $v = $dctrl->{$field};
971 return $v if defined $v;
972 fail "missing field $field in ".$dctrl->get_option('name');
976 my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
977 my $p = new IO::Handle;
978 my @cmd = (qw(dpkg-parsechangelog), @_);
979 open $p, '-|', @cmd or die $!;
981 $?=0; $!=0; close $p or failedcmd @cmd;
985 sub commit_getclogp ($) {
986 # Returns the parsed changelog hashref for a particular commit
988 our %commit_getclogp_memo;
989 my $memo = $commit_getclogp_memo{$objid};
990 return $memo if $memo;
992 my $mclog = ".git/dgit/clog-$objid";
993 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
994 "$objid:debian/changelog";
995 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
1000 defined $d or fail "getcwd failed: $!";
1004 sub parse_dscdata () {
1005 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1006 printdebug Dumper($dscdata) if $debuglevel>1;
1007 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1008 printdebug Dumper($dsc) if $debuglevel>1;
1013 sub archive_query ($;@) {
1014 my ($method) = shift @_;
1015 fail "this operation does not support multiple comma-separated suites"
1017 my $query = access_cfg('archive-query','RETURN-UNDEF');
1018 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1021 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1024 sub archive_query_prepend_mirror {
1025 my $m = access_cfg('mirror');
1026 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1029 sub pool_dsc_subpath ($$) {
1030 my ($vsn,$component) = @_; # $package is implict arg
1031 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1032 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1035 sub cfg_apply_map ($$$) {
1036 my ($varref, $what, $mapspec) = @_;
1037 return unless $mapspec;
1039 printdebug "config $what EVAL{ $mapspec; }\n";
1041 eval "package Dgit::Config; $mapspec;";
1046 #---------- `ftpmasterapi' archive query method (nascent) ----------
1048 sub archive_api_query_cmd ($) {
1050 my @cmd = (@curl, qw(-sS));
1051 my $url = access_cfg('archive-query-url');
1052 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1054 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1055 foreach my $key (split /\:/, $keys) {
1056 $key =~ s/\%HOST\%/$host/g;
1058 fail "for $url: stat $key: $!" unless $!==ENOENT;
1061 fail "config requested specific TLS key but do not know".
1062 " how to get curl to use exactly that EE key ($key)";
1063 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1064 # # Sadly the above line does not work because of changes
1065 # # to gnutls. The real fix for #790093 may involve
1066 # # new curl options.
1069 # Fixing #790093 properly will involve providing a value
1070 # for this on clients.
1071 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1072 push @cmd, split / /, $kargs if defined $kargs;
1074 push @cmd, $url.$subpath;
1078 sub api_query ($$;$) {
1080 my ($data, $subpath, $ok404) = @_;
1081 badcfg "ftpmasterapi archive query method takes no data part"
1083 my @cmd = archive_api_query_cmd($subpath);
1084 my $url = $cmd[$#cmd];
1085 push @cmd, qw(-w %{http_code});
1086 my $json = cmdoutput @cmd;
1087 unless ($json =~ s/\d+\d+\d$//) {
1088 failedcmd_report_cmd undef, @cmd;
1089 fail "curl failed to print 3-digit HTTP code";
1092 return undef if $code eq '404' && $ok404;
1093 fail "fetch of $url gave HTTP code $code"
1094 unless $url =~ m#^file://# or $code =~ m/^2/;
1095 return decode_json($json);
1098 sub canonicalise_suite_ftpmasterapi {
1099 my ($proto,$data) = @_;
1100 my $suites = api_query($data, 'suites');
1102 foreach my $entry (@$suites) {
1104 my $v = $entry->{$_};
1105 defined $v && $v eq $isuite;
1106 } qw(codename name);
1107 push @matched, $entry;
1109 fail "unknown suite $isuite" unless @matched;
1112 @matched==1 or die "multiple matches for suite $isuite\n";
1113 $cn = "$matched[0]{codename}";
1114 defined $cn or die "suite $isuite info has no codename\n";
1115 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1117 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1122 sub archive_query_ftpmasterapi {
1123 my ($proto,$data) = @_;
1124 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1126 my $digester = Digest::SHA->new(256);
1127 foreach my $entry (@$info) {
1129 my $vsn = "$entry->{version}";
1130 my ($ok,$msg) = version_check $vsn;
1131 die "bad version: $msg\n" unless $ok;
1132 my $component = "$entry->{component}";
1133 $component =~ m/^$component_re$/ or die "bad component";
1134 my $filename = "$entry->{filename}";
1135 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1136 or die "bad filename";
1137 my $sha256sum = "$entry->{sha256sum}";
1138 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1139 push @rows, [ $vsn, "/pool/$component/$filename",
1140 $digester, $sha256sum ];
1142 die "bad ftpmaster api response: $@\n".Dumper($entry)
1145 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1146 return archive_query_prepend_mirror @rows;
1149 sub file_in_archive_ftpmasterapi {
1150 my ($proto,$data,$filename) = @_;
1151 my $pat = $filename;
1154 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1155 my $info = api_query($data, "file_in_archive/$pat", 1);
1158 #---------- `aptget' archive query method ----------
1161 our $aptget_releasefile;
1162 our $aptget_configpath;
1164 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1165 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1167 sub aptget_cache_clean {
1168 runcmd_ordryrun_local qw(sh -ec),
1169 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1173 sub aptget_lock_acquire () {
1174 my $lockfile = "$aptget_base/lock";
1175 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1176 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1179 sub aptget_prep ($) {
1181 return if defined $aptget_base;
1183 badcfg "aptget archive query method takes no data part"
1186 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1189 ensuredir "$cache/dgit";
1191 access_cfg('aptget-cachekey','RETURN-UNDEF')
1192 // access_nomdistro();
1194 $aptget_base = "$cache/dgit/aptget";
1195 ensuredir $aptget_base;
1197 my $quoted_base = $aptget_base;
1198 die "$quoted_base contains bad chars, cannot continue"
1199 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1201 ensuredir $aptget_base;
1203 aptget_lock_acquire();
1205 aptget_cache_clean();
1207 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1208 my $sourceslist = "source.list#$cachekey";
1210 my $aptsuites = $isuite;
1211 cfg_apply_map(\$aptsuites, 'suite map',
1212 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1214 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1215 printf SRCS "deb-src %s %s %s\n",
1216 access_cfg('mirror'),
1218 access_cfg('aptget-components')
1221 ensuredir "$aptget_base/cache";
1222 ensuredir "$aptget_base/lists";
1224 open CONF, ">", $aptget_configpath or die $!;
1226 Debug::NoLocking "true";
1227 APT::Get::List-Cleanup "false";
1228 #clear APT::Update::Post-Invoke-Success;
1229 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1230 Dir::State::Lists "$quoted_base/lists";
1231 Dir::Etc::preferences "$quoted_base/preferences";
1232 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1233 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1236 foreach my $key (qw(
1239 Dir::Cache::Archives
1240 Dir::Etc::SourceParts
1241 Dir::Etc::preferencesparts
1243 ensuredir "$aptget_base/$key";
1244 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1247 my $oldatime = (time // die $!) - 1;
1248 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1249 next unless stat_exists $oldlist;
1250 my ($mtime) = (stat _)[9];
1251 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1254 runcmd_ordryrun_local aptget_aptget(), qw(update);
1257 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1258 next unless stat_exists $oldlist;
1259 my ($atime) = (stat _)[8];
1260 next if $atime == $oldatime;
1261 push @releasefiles, $oldlist;
1263 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1264 @releasefiles = @inreleasefiles if @inreleasefiles;
1265 die "apt updated wrong number of Release files (@releasefiles), erk"
1266 unless @releasefiles == 1;
1268 ($aptget_releasefile) = @releasefiles;
1271 sub canonicalise_suite_aptget {
1272 my ($proto,$data) = @_;
1275 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1277 foreach my $name (qw(Codename Suite)) {
1278 my $val = $release->{$name};
1280 printdebug "release file $name: $val\n";
1281 $val =~ m/^$suite_re$/o or fail
1282 "Release file ($aptget_releasefile) specifies intolerable $name";
1283 cfg_apply_map(\$val, 'suite rmap',
1284 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1291 sub archive_query_aptget {
1292 my ($proto,$data) = @_;
1295 ensuredir "$aptget_base/source";
1296 foreach my $old (<$aptget_base/source/*.dsc>) {
1297 unlink $old or die "$old: $!";
1300 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1301 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1302 # avoids apt-get source failing with ambiguous error code
1304 runcmd_ordryrun_local
1305 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1306 aptget_aptget(), qw(--download-only --only-source source), $package;
1308 my @dscs = <$aptget_base/source/*.dsc>;
1309 fail "apt-get source did not produce a .dsc" unless @dscs;
1310 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1312 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1315 my $uri = "file://". uri_escape $dscs[0];
1316 $uri =~ s{\%2f}{/}gi;
1317 return [ (getfield $pre_dsc, 'Version'), $uri ];
1320 #---------- `dummyapicat' archive query method ----------
1322 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1323 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1325 sub file_in_archive_dummycatapi ($$$) {
1326 my ($proto,$data,$filename) = @_;
1327 my $mirror = access_cfg('mirror');
1328 $mirror =~ s#^file://#/# or die "$mirror ?";
1330 my @cmd = (qw(sh -ec), '
1332 find -name "$2" -print0 |
1334 ', qw(x), $mirror, $filename);
1335 debugcmd "-|", @cmd;
1336 open FIA, "-|", @cmd or die $!;
1339 printdebug "| $_\n";
1340 m/^(\w+) (\S+)$/ or die "$_ ?";
1341 push @out, { sha256sum => $1, filename => $2 };
1343 close FIA or die failedcmd @cmd;
1347 #---------- `madison' archive query method ----------
1349 sub archive_query_madison {
1350 return archive_query_prepend_mirror
1351 map { [ @$_[0..1] ] } madison_get_parse(@_);
1354 sub madison_get_parse {
1355 my ($proto,$data) = @_;
1356 die unless $proto eq 'madison';
1357 if (!length $data) {
1358 $data= access_cfg('madison-distro','RETURN-UNDEF');
1359 $data //= access_basedistro();
1361 $rmad{$proto,$data,$package} ||= cmdoutput
1362 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1363 my $rmad = $rmad{$proto,$data,$package};
1366 foreach my $l (split /\n/, $rmad) {
1367 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1368 \s*( [^ \t|]+ )\s* \|
1369 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1370 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1371 $1 eq $package or die "$rmad $package ?";
1378 $component = access_cfg('archive-query-default-component');
1380 $5 eq 'source' or die "$rmad ?";
1381 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1383 return sort { -version_compare($a->[0],$b->[0]); } @out;
1386 sub canonicalise_suite_madison {
1387 # madison canonicalises for us
1388 my @r = madison_get_parse(@_);
1390 "unable to canonicalise suite using package $package".
1391 " which does not appear to exist in suite $isuite;".
1392 " --existing-package may help";
1396 sub file_in_archive_madison { return undef; }
1398 #---------- `sshpsql' archive query method ----------
1401 my ($data,$runeinfo,$sql) = @_;
1402 if (!length $data) {
1403 $data= access_someuserhost('sshpsql').':'.
1404 access_cfg('sshpsql-dbname');
1406 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1407 my ($userhost,$dbname) = ($`,$'); #';
1409 my @cmd = (access_cfg_ssh, $userhost,
1410 access_runeinfo("ssh-psql $runeinfo").
1411 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1412 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1414 open P, "-|", @cmd or die $!;
1417 printdebug(">|$_|\n");
1420 $!=0; $?=0; close P or failedcmd @cmd;
1422 my $nrows = pop @rows;
1423 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1424 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1425 @rows = map { [ split /\|/, $_ ] } @rows;
1426 my $ncols = scalar @{ shift @rows };
1427 die if grep { scalar @$_ != $ncols } @rows;
1431 sub sql_injection_check {
1432 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1435 sub archive_query_sshpsql ($$) {
1436 my ($proto,$data) = @_;
1437 sql_injection_check $isuite, $package;
1438 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1439 SELECT source.version, component.name, files.filename, files.sha256sum
1441 JOIN src_associations ON source.id = src_associations.source
1442 JOIN suite ON suite.id = src_associations.suite
1443 JOIN dsc_files ON dsc_files.source = source.id
1444 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1445 JOIN component ON component.id = files_archive_map.component_id
1446 JOIN files ON files.id = dsc_files.file
1447 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1448 AND source.source='$package'
1449 AND files.filename LIKE '%.dsc';
1451 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1452 my $digester = Digest::SHA->new(256);
1454 my ($vsn,$component,$filename,$sha256sum) = @$_;
1455 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1457 return archive_query_prepend_mirror @rows;
1460 sub canonicalise_suite_sshpsql ($$) {
1461 my ($proto,$data) = @_;
1462 sql_injection_check $isuite;
1463 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1464 SELECT suite.codename
1465 FROM suite where suite_name='$isuite' or codename='$isuite';
1467 @rows = map { $_->[0] } @rows;
1468 fail "unknown suite $isuite" unless @rows;
1469 die "ambiguous $isuite: @rows ?" if @rows>1;
1473 sub file_in_archive_sshpsql ($$$) { return undef; }
1475 #---------- `dummycat' archive query method ----------
1477 sub canonicalise_suite_dummycat ($$) {
1478 my ($proto,$data) = @_;
1479 my $dpath = "$data/suite.$isuite";
1480 if (!open C, "<", $dpath) {
1481 $!==ENOENT or die "$dpath: $!";
1482 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1486 chomp or die "$dpath: $!";
1488 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1492 sub archive_query_dummycat ($$) {
1493 my ($proto,$data) = @_;
1494 canonicalise_suite();
1495 my $dpath = "$data/package.$csuite.$package";
1496 if (!open C, "<", $dpath) {
1497 $!==ENOENT or die "$dpath: $!";
1498 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1506 printdebug "dummycat query $csuite $package $dpath | $_\n";
1507 my @row = split /\s+/, $_;
1508 @row==2 or die "$dpath: $_ ?";
1511 C->error and die "$dpath: $!";
1513 return archive_query_prepend_mirror
1514 sort { -version_compare($a->[0],$b->[0]); } @rows;
1517 sub file_in_archive_dummycat () { return undef; }
1519 #---------- tag format handling ----------
1521 sub access_cfg_tagformats () {
1522 split /\,/, access_cfg('dgit-tag-format');
1525 sub access_cfg_tagformats_can_splitbrain () {
1526 my %y = map { $_ => 1 } access_cfg_tagformats;
1527 foreach my $needtf (qw(new maint)) {
1528 next if $y{$needtf};
1534 sub need_tagformat ($$) {
1535 my ($fmt, $why) = @_;
1536 fail "need to use tag format $fmt ($why) but also need".
1537 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1538 " - no way to proceed"
1539 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1540 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1543 sub select_tagformat () {
1545 return if $tagformatfn && !$tagformat_want;
1546 die 'bug' if $tagformatfn && $tagformat_want;
1547 # ... $tagformat_want assigned after previous select_tagformat
1549 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1550 printdebug "select_tagformat supported @supported\n";
1552 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1553 printdebug "select_tagformat specified @$tagformat_want\n";
1555 my ($fmt,$why,$override) = @$tagformat_want;
1557 fail "target distro supports tag formats @supported".
1558 " but have to use $fmt ($why)"
1560 or grep { $_ eq $fmt } @supported;
1562 $tagformat_want = undef;
1564 $tagformatfn = ${*::}{"debiantag_$fmt"};
1566 fail "trying to use unknown tag format \`$fmt' ($why) !"
1567 unless $tagformatfn;
1570 #---------- archive query entrypoints and rest of program ----------
1572 sub canonicalise_suite () {
1573 return if defined $csuite;
1574 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1575 $csuite = archive_query('canonicalise_suite');
1576 if ($isuite ne $csuite) {
1577 progress "canonical suite name for $isuite is $csuite";
1579 progress "canonical suite name is $csuite";
1583 sub get_archive_dsc () {
1584 canonicalise_suite();
1585 my @vsns = archive_query('archive_query');
1586 foreach my $vinfo (@vsns) {
1587 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1588 $dscurl = $vsn_dscurl;
1589 $dscdata = url_get($dscurl);
1591 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1596 $digester->add($dscdata);
1597 my $got = $digester->hexdigest();
1599 fail "$dscurl has hash $got but".
1600 " archive told us to expect $digest";
1603 my $fmt = getfield $dsc, 'Format';
1604 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1605 "unsupported source format $fmt, sorry";
1607 $dsc_checked = !!$digester;
1608 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1612 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1615 sub check_for_git ();
1616 sub check_for_git () {
1618 my $how = access_cfg('git-check');
1619 if ($how eq 'ssh-cmd') {
1621 (access_cfg_ssh, access_gituserhost(),
1622 access_runeinfo("git-check $package").
1623 " set -e; cd ".access_cfg('git-path').";".
1624 " if test -d $package.git; then echo 1; else echo 0; fi");
1625 my $r= cmdoutput @cmd;
1626 if (defined $r and $r =~ m/^divert (\w+)$/) {
1628 my ($usedistro,) = access_distros();
1629 # NB that if we are pushing, $usedistro will be $distro/push
1630 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1631 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1632 progress "diverting to $divert (using config for $instead_distro)";
1633 return check_for_git();
1635 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1637 } elsif ($how eq 'url') {
1638 my $prefix = access_cfg('git-check-url','git-url');
1639 my $suffix = access_cfg('git-check-suffix','git-suffix',
1640 'RETURN-UNDEF') // '.git';
1641 my $url = "$prefix/$package$suffix";
1642 my @cmd = (@curl, qw(-sS -I), $url);
1643 my $result = cmdoutput @cmd;
1644 $result =~ s/^\S+ 200 .*\n\r?\n//;
1645 # curl -sS -I with https_proxy prints
1646 # HTTP/1.0 200 Connection established
1647 $result =~ m/^\S+ (404|200) /s or
1648 fail "unexpected results from git check query - ".
1649 Dumper($prefix, $result);
1651 if ($code eq '404') {
1653 } elsif ($code eq '200') {
1658 } elsif ($how eq 'true') {
1660 } elsif ($how eq 'false') {
1663 badcfg "unknown git-check \`$how'";
1667 sub create_remote_git_repo () {
1668 my $how = access_cfg('git-create');
1669 if ($how eq 'ssh-cmd') {
1671 (access_cfg_ssh, access_gituserhost(),
1672 access_runeinfo("git-create $package").
1673 "set -e; cd ".access_cfg('git-path').";".
1674 " cp -a _template $package.git");
1675 } elsif ($how eq 'true') {
1678 badcfg "unknown git-create \`$how'";
1682 our ($dsc_hash,$lastpush_mergeinput);
1683 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1685 our $ud = '.git/dgit/unpack';
1695 sub mktree_in_ud_here () {
1696 runcmd qw(git init -q);
1697 runcmd qw(git config gc.auto 0);
1698 rmtree('.git/objects');
1699 symlink '../../../../objects','.git/objects' or die $!;
1702 sub git_write_tree () {
1703 my $tree = cmdoutput @git, qw(write-tree);
1704 $tree =~ m/^\w+$/ or die "$tree ?";
1708 sub git_add_write_tree () {
1709 runcmd @git, qw(add -Af .);
1710 return git_write_tree();
1713 sub remove_stray_gits ($) {
1715 my @gitscmd = qw(find -name .git -prune -print0);
1716 debugcmd "|",@gitscmd;
1717 open GITS, "-|", @gitscmd or die $!;
1722 print STDERR "$us: warning: removing from $what: ",
1723 (messagequote $_), "\n";
1727 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1730 sub mktree_in_ud_from_only_subdir ($;$) {
1731 my ($what,$raw) = @_;
1733 # changes into the subdir
1735 die "expected one subdir but found @dirs ?" unless @dirs==1;
1736 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1740 remove_stray_gits($what);
1741 mktree_in_ud_here();
1743 my ($format, $fopts) = get_source_format();
1744 if (madformat($format)) {
1749 my $tree=git_add_write_tree();
1750 return ($tree,$dir);
1753 our @files_csum_info_fields =
1754 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1755 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1756 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1758 sub dsc_files_info () {
1759 foreach my $csumi (@files_csum_info_fields) {
1760 my ($fname, $module, $method) = @$csumi;
1761 my $field = $dsc->{$fname};
1762 next unless defined $field;
1763 eval "use $module; 1;" or die $@;
1765 foreach (split /\n/, $field) {
1767 m/^(\w+) (\d+) (\S+)$/ or
1768 fail "could not parse .dsc $fname line \`$_'";
1769 my $digester = eval "$module"."->$method;" or die $@;
1774 Digester => $digester,
1779 fail "missing any supported Checksums-* or Files field in ".
1780 $dsc->get_option('name');
1784 map { $_->{Filename} } dsc_files_info();
1787 sub files_compare_inputs (@) {
1792 my $showinputs = sub {
1793 return join "; ", map { $_->get_option('name') } @$inputs;
1796 foreach my $in (@$inputs) {
1798 my $in_name = $in->get_option('name');
1800 printdebug "files_compare_inputs $in_name\n";
1802 foreach my $csumi (@files_csum_info_fields) {
1803 my ($fname) = @$csumi;
1804 printdebug "files_compare_inputs $in_name $fname\n";
1806 my $field = $in->{$fname};
1807 next unless defined $field;
1810 foreach (split /\n/, $field) {
1813 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1814 fail "could not parse $in_name $fname line \`$_'";
1816 printdebug "files_compare_inputs $in_name $fname $f\n";
1820 my $re = \ $record{$f}{$fname};
1822 $fchecked{$f}{$in_name} = 1;
1824 fail "hash or size of $f varies in $fname fields".
1825 " (between: ".$showinputs->().")";
1830 @files = sort @files;
1831 $expected_files //= \@files;
1832 "@$expected_files" eq "@files" or
1833 fail "file list in $in_name varies between hash fields!";
1836 fail "$in_name has no files list field(s)";
1838 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1841 grep { keys %$_ == @$inputs-1 } values %fchecked
1842 or fail "no file appears in all file lists".
1843 " (looked in: ".$showinputs->().")";
1846 sub is_orig_file_in_dsc ($$) {
1847 my ($f, $dsc_files_info) = @_;
1848 return 0 if @$dsc_files_info <= 1;
1849 # One file means no origs, and the filename doesn't have a "what
1850 # part of dsc" component. (Consider versions ending `.orig'.)
1851 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1855 sub is_orig_file_of_vsn ($$) {
1856 my ($f, $upstreamvsn) = @_;
1857 my $base = srcfn $upstreamvsn, '';
1858 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1862 sub changes_update_origs_from_dsc ($$$$) {
1863 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1865 printdebug "checking origs needed ($upstreamvsn)...\n";
1866 $_ = getfield $changes, 'Files';
1867 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1868 fail "cannot find section/priority from .changes Files field";
1869 my $placementinfo = $1;
1871 printdebug "checking origs needed placement '$placementinfo'...\n";
1872 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1873 $l =~ m/\S+$/ or next;
1875 printdebug "origs $file | $l\n";
1876 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1877 printdebug "origs $file is_orig\n";
1878 my $have = archive_query('file_in_archive', $file);
1879 if (!defined $have) {
1881 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1887 printdebug "origs $file \$#\$have=$#$have\n";
1888 foreach my $h (@$have) {
1891 foreach my $csumi (@files_csum_info_fields) {
1892 my ($fname, $module, $method, $archivefield) = @$csumi;
1893 next unless defined $h->{$archivefield};
1894 $_ = $dsc->{$fname};
1895 next unless defined;
1896 m/^(\w+) .* \Q$file\E$/m or
1897 fail ".dsc $fname missing entry for $file";
1898 if ($h->{$archivefield} eq $1) {
1902 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1905 die "$file ".Dumper($h)." ?!" if $same && @differ;
1908 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1911 printdebug "origs $file f.same=$found_same".
1912 " #f._differ=$#found_differ\n";
1913 if (@found_differ && !$found_same) {
1915 "archive contains $file with different checksum",
1918 # Now we edit the changes file to add or remove it
1919 foreach my $csumi (@files_csum_info_fields) {
1920 my ($fname, $module, $method, $archivefield) = @$csumi;
1921 next unless defined $changes->{$fname};
1923 # in archive, delete from .changes if it's there
1924 $changed{$file} = "removed" if
1925 $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
1926 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
1927 # not in archive, but it's here in the .changes
1929 my $dsc_data = getfield $dsc, $fname;
1930 $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
1932 $extra =~ s/ \d+ /$&$placementinfo /
1933 or die "$fname $extra >$dsc_data< ?"
1934 if $fname eq 'Files';
1935 $changes->{$fname} .= "\n". $extra;
1936 $changed{$file} = "added";
1941 foreach my $file (keys %changed) {
1943 "edited .changes for archive .orig contents: %s %s",
1944 $changed{$file}, $file;
1946 my $chtmp = "$changesfile.tmp";
1947 $changes->save($chtmp);
1949 rename $chtmp,$changesfile or die "$changesfile $!";
1951 progress "[new .changes left in $changesfile]";
1954 progress "$changesfile already has appropriate .orig(s) (if any)";
1958 sub make_commit ($) {
1960 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1963 sub make_commit_text ($) {
1966 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1968 print Dumper($text) if $debuglevel > 1;
1969 my $child = open2($out, $in, @cmd) or die $!;
1972 print $in $text or die $!;
1973 close $in or die $!;
1975 $h =~ m/^\w+$/ or die;
1977 printdebug "=> $h\n";
1980 waitpid $child, 0 == $child or die "$child $!";
1981 $? and failedcmd @cmd;
1985 sub clogp_authline ($) {
1987 my $author = getfield $clogp, 'Maintainer';
1988 $author =~ s#,.*##ms;
1989 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
1990 my $authline = "$author $date";
1991 $authline =~ m/$git_authline_re/o or
1992 fail "unexpected commit author line format \`$authline'".
1993 " (was generated from changelog Maintainer field)";
1994 return ($1,$2,$3) if wantarray;
1998 sub vendor_patches_distro ($$) {
1999 my ($checkdistro, $what) = @_;
2000 return unless defined $checkdistro;
2002 my $series = "debian/patches/\L$checkdistro\E.series";
2003 printdebug "checking for vendor-specific $series ($what)\n";
2005 if (!open SERIES, "<", $series) {
2006 die "$series $!" unless $!==ENOENT;
2015 Unfortunately, this source package uses a feature of dpkg-source where
2016 the same source package unpacks to different source code on different
2017 distros. dgit cannot safely operate on such packages on affected
2018 distros, because the meaning of source packages is not stable.
2020 Please ask the distro/maintainer to remove the distro-specific series
2021 files and use a different technique (if necessary, uploading actually
2022 different packages, if different distros are supposed to have
2026 fail "Found active distro-specific series file for".
2027 " $checkdistro ($what): $series, cannot continue";
2029 die "$series $!" if SERIES->error;
2033 sub check_for_vendor_patches () {
2034 # This dpkg-source feature doesn't seem to be documented anywhere!
2035 # But it can be found in the changelog (reformatted):
2037 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2038 # Author: Raphael Hertzog <hertzog@debian.org>
2039 # Date: Sun Oct 3 09:36:48 2010 +0200
2041 # dpkg-source: correctly create .pc/.quilt_series with alternate
2044 # If you have debian/patches/ubuntu.series and you were
2045 # unpacking the source package on ubuntu, quilt was still
2046 # directed to debian/patches/series instead of
2047 # debian/patches/ubuntu.series.
2049 # debian/changelog | 3 +++
2050 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2051 # 2 files changed, 6 insertions(+), 1 deletion(-)
2054 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2055 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2056 "Dpkg::Vendor \`current vendor'");
2057 vendor_patches_distro(access_basedistro(),
2058 "(base) distro being accessed");
2059 vendor_patches_distro(access_nomdistro(),
2060 "(nominal) distro being accessed");
2063 sub generate_commits_from_dsc () {
2064 # See big comment in fetch_from_archive, below.
2065 # See also README.dsc-import.
2069 my @dfi = dsc_files_info();
2070 foreach my $fi (@dfi) {
2071 my $f = $fi->{Filename};
2072 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2073 my $upper_f = "../../../../$f";
2075 printdebug "considering reusing $f: ";
2077 if (link_ltarget "$upper_f,fetch", $f) {
2078 printdebug "linked (using ...,fetch).\n";
2079 } elsif ((printdebug "($!) "),
2081 fail "accessing ../$f,fetch: $!";
2082 } elsif (link_ltarget $upper_f, $f) {
2083 printdebug "linked.\n";
2084 } elsif ((printdebug "($!) "),
2086 fail "accessing ../$f: $!";
2088 printdebug "absent.\n";
2092 complete_file_from_dsc('.', $fi, \$refetched)
2095 printdebug "considering saving $f: ";
2097 if (link $f, $upper_f) {
2098 printdebug "linked.\n";
2099 } elsif ((printdebug "($!) "),
2101 fail "saving ../$f: $!";
2102 } elsif (!$refetched) {
2103 printdebug "no need.\n";
2104 } elsif (link $f, "$upper_f,fetch") {
2105 printdebug "linked (using ...,fetch).\n";
2106 } elsif ((printdebug "($!) "),
2108 fail "saving ../$f,fetch: $!";
2110 printdebug "cannot.\n";
2114 # We unpack and record the orig tarballs first, so that we only
2115 # need disk space for one private copy of the unpacked source.
2116 # But we can't make them into commits until we have the metadata
2117 # from the debian/changelog, so we record the tree objects now and
2118 # make them into commits later.
2120 my $upstreamv = upstreamversion $dsc->{version};
2121 my $orig_f_base = srcfn $upstreamv, '';
2123 foreach my $fi (@dfi) {
2124 # We actually import, and record as a commit, every tarball
2125 # (unless there is only one file, in which case there seems
2128 my $f = $fi->{Filename};
2129 printdebug "import considering $f ";
2130 (printdebug "only one dfi\n"), next if @dfi == 1;
2131 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2132 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2136 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2138 printdebug "Y ", (join ' ', map { $_//"(none)" }
2139 $compr_ext, $orig_f_part
2142 my $input = new IO::File $f, '<' or die "$f $!";
2146 if (defined $compr_ext) {
2148 Dpkg::Compression::compression_guess_from_filename $f;
2149 fail "Dpkg::Compression cannot handle file $f in source package"
2150 if defined $compr_ext && !defined $cname;
2152 new Dpkg::Compression::Process compression => $cname;
2153 my @compr_cmd = $compr_proc->get_uncompress_cmdline();
2154 my $compr_fh = new IO::Handle;
2155 my $compr_pid = open $compr_fh, "-|" // die $!;
2157 open STDIN, "<&", $input or die $!;
2159 die "dgit (child): exec $compr_cmd[0]: $!\n";
2164 rmtree "_unpack-tar";
2165 mkdir "_unpack-tar" or die $!;
2166 my @tarcmd = qw(tar -x -f -
2167 --no-same-owner --no-same-permissions
2168 --no-acls --no-xattrs --no-selinux);
2169 my $tar_pid = fork // die $!;
2171 chdir "_unpack-tar" or die $!;
2172 open STDIN, "<&", $input or die $!;
2174 die "dgit (child): exec $tarcmd[0]: $!";
2176 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2177 !$? or failedcmd @tarcmd;
2180 (@compr_cmd ? failedcmd @compr_cmd
2182 # finally, we have the results in "tarball", but maybe
2183 # with the wrong permissions
2185 runcmd qw(chmod -R +rwX _unpack-tar);
2186 changedir "_unpack-tar";
2187 remove_stray_gits($f);
2188 mktree_in_ud_here();
2190 my ($tree) = git_add_write_tree();
2191 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2192 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2194 printdebug "one subtree $1\n";
2196 printdebug "multiple subtrees\n";
2199 rmtree "_unpack-tar";
2201 my $ent = [ $f, $tree ];
2203 Orig => !!$orig_f_part,
2204 Sort => (!$orig_f_part ? 2 :
2205 $orig_f_part =~ m/-/g ? 1 :
2213 # put any without "_" first (spec is not clear whether files
2214 # are always in the usual order). Tarballs without "_" are
2215 # the main orig or the debian tarball.
2216 $a->{Sort} <=> $b->{Sort} or
2220 my $any_orig = grep { $_->{Orig} } @tartrees;
2222 my $dscfn = "$package.dsc";
2224 my $treeimporthow = 'package';
2226 open D, ">", $dscfn or die "$dscfn: $!";
2227 print D $dscdata or die "$dscfn: $!";
2228 close D or die "$dscfn: $!";
2229 my @cmd = qw(dpkg-source);
2230 push @cmd, '--no-check' if $dsc_checked;
2231 if (madformat $dsc->{format}) {
2232 push @cmd, '--skip-patches';
2233 $treeimporthow = 'unpatched';
2235 push @cmd, qw(-x --), $dscfn;
2238 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2239 if (madformat $dsc->{format}) {
2240 check_for_vendor_patches();
2244 if (madformat $dsc->{format}) {
2245 my @pcmd = qw(dpkg-source --before-build .);
2246 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2248 $dappliedtree = git_add_write_tree();
2251 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2252 debugcmd "|",@clogcmd;
2253 open CLOGS, "-|", @clogcmd or die $!;
2258 printdebug "import clog search...\n";
2261 my $stanzatext = do { local $/=""; <CLOGS>; };
2262 printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
2263 last if !defined $stanzatext;
2265 my $desc = "package changelog, entry no.$.";
2266 open my $stanzafh, "<", \$stanzatext or die;
2267 my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
2268 $clogp //= $thisstanza;
2270 printdebug "import clog $thisstanza->{version} $desc...\n";
2272 last if !$any_orig; # we don't need $r1clogp
2274 # We look for the first (most recent) changelog entry whose
2275 # version number is lower than the upstream version of this
2276 # package. Then the last (least recent) previous changelog
2277 # entry is treated as the one which introduced this upstream
2278 # version and used for the synthetic commits for the upstream
2281 # One might think that a more sophisticated algorithm would be
2282 # necessary. But: we do not want to scan the whole changelog
2283 # file. Stopping when we see an earlier version, which
2284 # necessarily then is an earlier upstream version, is the only
2285 # realistic way to do that. Then, either the earliest
2286 # changelog entry we have seen so far is indeed the earliest
2287 # upload of this upstream version; or there are only changelog
2288 # entries relating to later upstream versions (which is not
2289 # possible unless the changelog and .dsc disagree about the
2290 # version). Then it remains to choose between the physically
2291 # last entry in the file, and the one with the lowest version
2292 # number. If these are not the same, we guess that the
2293 # versions were created in a non-monotic order rather than
2294 # that the changelog entries have been misordered.
2296 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2298 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2299 $r1clogp = $thisstanza;
2301 printdebug "import clog $r1clogp->{version} becomes r1\n";
2303 die $! if CLOGS->error;
2304 close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
2306 $clogp or fail "package changelog has no entries!";
2308 my $authline = clogp_authline $clogp;
2309 my $changes = getfield $clogp, 'Changes';
2310 my $cversion = getfield $clogp, 'Version';
2313 $r1clogp //= $clogp; # maybe there's only one entry;
2314 my $r1authline = clogp_authline $r1clogp;
2315 # Strictly, r1authline might now be wrong if it's going to be
2316 # unused because !$any_orig. Whatever.
2318 printdebug "import tartrees authline $authline\n";
2319 printdebug "import tartrees r1authline $r1authline\n";
2321 foreach my $tt (@tartrees) {
2322 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2324 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2327 committer $r1authline
2331 [dgit import orig $tt->{F}]
2339 [dgit import tarball $package $cversion $tt->{F}]
2344 printdebug "import main commit\n";
2346 open C, ">../commit.tmp" or die $!;
2347 print C <<END or die $!;
2350 print C <<END or die $! foreach @tartrees;
2353 print C <<END or die $!;
2359 [dgit import $treeimporthow $package $cversion]
2363 my $rawimport_hash = make_commit qw(../commit.tmp);
2365 if (madformat $dsc->{format}) {
2366 printdebug "import apply patches...\n";
2368 # regularise the state of the working tree so that
2369 # the checkout of $rawimport_hash works nicely.
2370 my $dappliedcommit = make_commit_text(<<END);
2377 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2379 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2381 # We need the answers to be reproducible
2382 my @authline = clogp_authline($clogp);
2383 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2384 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2385 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2386 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2387 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2388 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2390 my $path = $ENV{PATH} or die;
2392 foreach my $use_absurd (qw(0 1)) {
2393 runcmd @git, qw(checkout -q unpa);
2394 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2395 local $ENV{PATH} = $path;
2398 progress "warning: $@";
2399 $path = "$absurdity:$path";
2400 progress "$us: trying slow absurd-git-apply...";
2401 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2406 die "forbid absurd git-apply\n" if $use_absurd
2407 && forceing [qw(import-gitapply-no-absurd)];
2408 die "only absurd git-apply!\n" if !$use_absurd
2409 && forceing [qw(import-gitapply-absurd)];
2411 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2412 local $ENV{PATH} = $path if $use_absurd;
2414 my @showcmd = (gbp_pq, qw(import));
2415 my @realcmd = shell_cmd
2416 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2417 debugcmd "+",@realcmd;
2418 if (system @realcmd) {
2419 die +(shellquote @showcmd).
2421 failedcmd_waitstatus()."\n";
2424 my $gapplied = git_rev_parse('HEAD');
2425 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2426 $gappliedtree eq $dappliedtree or
2428 gbp-pq import and dpkg-source disagree!
2429 gbp-pq import gave commit $gapplied
2430 gbp-pq import gave tree $gappliedtree
2431 dpkg-source --before-build gave tree $dappliedtree
2433 $rawimport_hash = $gapplied;
2438 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2443 progress "synthesised git commit from .dsc $cversion";
2445 my $rawimport_mergeinput = {
2446 Commit => $rawimport_hash,
2447 Info => "Import of source package",
2449 my @output = ($rawimport_mergeinput);
2451 if ($lastpush_mergeinput) {
2452 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2453 my $oversion = getfield $oldclogp, 'Version';
2455 version_compare($oversion, $cversion);
2457 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2458 { Message => <<END, ReverseParents => 1 });
2459 Record $package ($cversion) in archive suite $csuite
2461 } elsif ($vcmp > 0) {
2462 print STDERR <<END or die $!;
2464 Version actually in archive: $cversion (older)
2465 Last version pushed with dgit: $oversion (newer or same)
2468 @output = $lastpush_mergeinput;
2470 # Same version. Use what's in the server git branch,
2471 # discarding our own import. (This could happen if the
2472 # server automatically imports all packages into git.)
2473 @output = $lastpush_mergeinput;
2476 changedir '../../../..';
2481 sub complete_file_from_dsc ($$;$) {
2482 our ($dstdir, $fi, $refetched) = @_;
2483 # Ensures that we have, in $dstdir, the file $fi, with the correct
2484 # contents. (Downloading it from alongside $dscurl if necessary.)
2485 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2486 # and will set $$refetched=1 if it did so (or tried to).
2488 my $f = $fi->{Filename};
2489 my $tf = "$dstdir/$f";
2493 my $checkhash = sub {
2494 open F, "<", "$tf" or die "$tf: $!";
2495 $fi->{Digester}->reset();
2496 $fi->{Digester}->addfile(*F);
2497 F->error and die $!;
2498 my $got = $fi->{Digester}->hexdigest();
2499 return $got eq $fi->{Hash};
2502 if (stat_exists $tf) {
2503 if ($checkhash->()) {
2504 progress "using existing $f";
2508 fail "file $f has hash $got but .dsc".
2509 " demands hash $fi->{Hash} ".
2510 "(perhaps you should delete this file?)";
2512 progress "need to fetch correct version of $f";
2513 unlink $tf or die "$tf $!";
2516 printdebug "$tf does not exist, need to fetch\n";
2520 $furl =~ s{/[^/]+$}{};
2522 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2523 die "$f ?" if $f =~ m#/#;
2524 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2525 return 0 if !act_local();
2528 fail "file $f has hash $got but .dsc".
2529 " demands hash $fi->{Hash} ".
2530 "(got wrong file from archive!)";
2535 sub ensure_we_have_orig () {
2536 my @dfi = dsc_files_info();
2537 foreach my $fi (@dfi) {
2538 my $f = $fi->{Filename};
2539 next unless is_orig_file_in_dsc($f, \@dfi);
2540 complete_file_from_dsc('..', $fi)
2545 #---------- git fetch ----------
2547 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2548 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2550 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2551 # locally fetched refs because they have unhelpful names and clutter
2552 # up gitk etc. So we track whether we have "used up" head ref (ie,
2553 # whether we have made another local ref which refers to this object).
2555 # (If we deleted them unconditionally, then we might end up
2556 # re-fetching the same git objects each time dgit fetch was run.)
2558 # So, leach use of lrfetchrefs needs to be accompanied by arrangements
2559 # in git_fetch_us to fetch the refs in question, and possibly a call
2560 # to lrfetchref_used.
2562 our (%lrfetchrefs_f, %lrfetchrefs_d);
2563 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2565 sub lrfetchref_used ($) {
2566 my ($fullrefname) = @_;
2567 my $objid = $lrfetchrefs_f{$fullrefname};
2568 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2571 sub git_lrfetch_sane {
2572 my ($supplementary, @specs) = @_;
2573 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2574 # at least as regards @specs. Also leave the results in
2575 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2576 # able to clean these up.
2578 # With $supplementary==1, @specs must not contain wildcards
2579 # and we add to our previous fetches (non-atomically).
2581 # This is rather miserable:
2582 # When git fetch --prune is passed a fetchspec ending with a *,
2583 # it does a plausible thing. If there is no * then:
2584 # - it matches subpaths too, even if the supplied refspec
2585 # starts refs, and behaves completely madly if the source
2586 # has refs/refs/something. (See, for example, Debian #NNNN.)
2587 # - if there is no matching remote ref, it bombs out the whole
2589 # We want to fetch a fixed ref, and we don't know in advance
2590 # if it exists, so this is not suitable.
2592 # Our workaround is to use git ls-remote. git ls-remote has its
2593 # own qairks. Notably, it has the absurd multi-tail-matching
2594 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2595 # refs/refs/foo etc.
2597 # Also, we want an idempotent snapshot, but we have to make two
2598 # calls to the remote: one to git ls-remote and to git fetch. The
2599 # solution is use git ls-remote to obtain a target state, and
2600 # git fetch to try to generate it. If we don't manage to generate
2601 # the target state, we try again.
2603 my $url = access_giturl();
2605 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2607 my $specre = join '|', map {
2610 my $wildcard = $x =~ s/\\\*$/.*/;
2611 die if $wildcard && $supplementary;
2614 printdebug "git_lrfetch_sane specre=$specre\n";
2615 my $wanted_rref = sub {
2617 return m/^(?:$specre)$/;
2620 my $fetch_iteration = 0;
2623 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2624 if (++$fetch_iteration > 10) {
2625 fail "too many iterations trying to get sane fetch!";
2628 my @look = map { "refs/$_" } @specs;
2629 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2633 open GITLS, "-|", @lcmd or die $!;
2635 printdebug "=> ", $_;
2636 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2637 my ($objid,$rrefname) = ($1,$2);
2638 if (!$wanted_rref->($rrefname)) {
2640 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2644 $wantr{$rrefname} = $objid;
2647 close GITLS or failedcmd @lcmd;
2649 # OK, now %want is exactly what we want for refs in @specs
2651 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2652 "+refs/$_:".lrfetchrefs."/$_";
2655 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2657 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2658 runcmd_ordryrun_local @fcmd if @fspecs;
2660 if (!$supplementary) {
2661 %lrfetchrefs_f = ();
2665 git_for_each_ref(lrfetchrefs, sub {
2666 my ($objid,$objtype,$lrefname,$reftail) = @_;
2667 $lrfetchrefs_f{$lrefname} = $objid;
2668 $objgot{$objid} = 1;
2671 if ($supplementary) {
2675 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2676 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2677 if (!exists $wantr{$rrefname}) {
2678 if ($wanted_rref->($rrefname)) {
2680 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2684 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2687 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2688 delete $lrfetchrefs_f{$lrefname};
2692 foreach my $rrefname (sort keys %wantr) {
2693 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2694 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2695 my $want = $wantr{$rrefname};
2696 next if $got eq $want;
2697 if (!defined $objgot{$want}) {
2699 warning: git ls-remote suggests we want $lrefname
2700 warning: and it should refer to $want
2701 warning: but git fetch didn't fetch that object to any relevant ref.
2702 warning: This may be due to a race with someone updating the server.
2703 warning: Will try again...
2705 next FETCH_ITERATION;
2708 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2710 runcmd_ordryrun_local @git, qw(update-ref -m),
2711 "dgit fetch git fetch fixup", $lrefname, $want;
2712 $lrfetchrefs_f{$lrefname} = $want;
2717 if (defined $csuite) {
2718 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2719 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2720 my ($objid,$objtype,$lrefname,$reftail) = @_;
2721 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2722 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2726 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2727 Dumper(\%lrfetchrefs_f);
2730 sub git_fetch_us () {
2731 # Want to fetch only what we are going to use, unless
2732 # deliberately-not-ff, in which case we must fetch everything.
2734 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2736 (quiltmode_splitbrain
2737 ? (map { $_->('*',access_nomdistro) }
2738 \&debiantag_new, \&debiantag_maintview)
2739 : debiantags('*',access_nomdistro));
2740 push @specs, server_branch($csuite);
2741 push @specs, $rewritemap;
2742 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2744 git_lrfetch_sane 0, @specs;
2747 my @tagpats = debiantags('*',access_nomdistro);
2749 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2750 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2751 printdebug "currently $fullrefname=$objid\n";
2752 $here{$fullrefname} = $objid;
2754 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2755 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2756 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2757 printdebug "offered $lref=$objid\n";
2758 if (!defined $here{$lref}) {
2759 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2760 runcmd_ordryrun_local @upd;
2761 lrfetchref_used $fullrefname;
2762 } elsif ($here{$lref} eq $objid) {
2763 lrfetchref_used $fullrefname;
2766 "Not updateting $lref from $here{$lref} to $objid.\n";
2771 #---------- dsc and archive handling ----------
2773 sub mergeinfo_getclogp ($) {
2774 # Ensures thit $mi->{Clogp} exists and returns it
2776 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2779 sub mergeinfo_version ($) {
2780 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2783 sub fetch_from_archive_record_1 ($) {
2785 runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
2786 'DGIT_ARCHIVE', $hash;
2787 cmdoutput @git, qw(log -n2), $hash;
2788 # ... gives git a chance to complain if our commit is malformed
2791 sub fetch_from_archive_record_2 ($) {
2793 my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
2797 dryrun_report @upd_cmd;
2801 sub parse_dsc_field ($$) {
2802 my ($dsc, $what) = @_;
2804 foreach my $field (@ourdscfield) {
2805 $f = $dsc->{$field};
2809 progress "$what: NO git hash";
2810 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2811 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2812 progress "$what: specified git info ($dsc_distro)";
2813 $dsc_hint_tag = [ $dsc_hint_tag ];
2814 } elsif ($f =~ m/^\w+\s*$/) {
2816 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2817 dgit.default.distro);
2818 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2820 progress "$what: specified git hash";
2822 fail "$what: invalid Dgit info";
2826 sub resolve_dsc_field_commit ($$) {
2827 my ($already_distro, $already_mapref) = @_;
2829 return unless defined $dsc_hash;
2832 defined $already_mapref &&
2833 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2834 ? $already_mapref : undef;
2838 my ($what, @fetch) = @_;
2840 local $idistro = $dsc_distro;
2841 my $lrf = lrfetchrefs;
2843 if (!$chase_dsc_distro) {
2845 "not chasing .dsc distro $dsc_distro: not fetching $what";
2850 ".dsc names distro $dsc_distro: fetching $what";
2852 my $url = access_giturl();
2853 if (!defined $url) {
2854 defined $dsc_hint_url or fail <<END;
2855 .dsc Dgit metadata is in context of distro $dsc_distro
2856 for which we have no configured url and .dsc provides no hint
2859 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2860 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2861 parse_cfg_bool "dsc-url-proto-ok", 'false',
2862 cfg("dgit.dsc-url-proto-ok.$proto",
2863 "dgit.default.dsc-url-proto-ok")
2865 .dsc Dgit metadata is in context of distro $dsc_distro
2866 for which we have no configured url;
2867 .dsc provices hinted url with protocol $proto which is unsafe.
2868 (can be overridden by config - consult documentation)
2870 $url = $dsc_hint_url;
2873 git_lrfetch_sane 1, @fetch;
2878 my $rewrite_enable = do {
2879 local $idistro = $dsc_distro;
2880 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2883 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2884 if (!defined $mapref) {
2885 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2886 $mapref = $lrf.'/'.$rewritemap;
2888 my $rewritemapdata = git_cat_file $mapref.':map';
2889 if (defined $rewritemapdata
2890 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2892 "server's git history rewrite map contains a relevant entry!";
2895 if (defined $dsc_hash) {
2896 progress "using rewritten git hash in place of .dsc value";
2898 progress "server data says .dsc hash is to be disregarded";
2903 if (!defined git_cat_file $dsc_hash) {
2904 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2905 my $lrf = $do_fetch->("additional commits", @tags) &&
2906 defined git_cat_file $dsc_hash
2908 .dsc Dgit metadata requires commit $dsc_hash
2909 but we could not obtain that object anywhere.
2911 foreach my $t (@tags) {
2912 my $fullrefname = $lrf.'/'.$t;
2913 print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2914 next unless $lrfetchrefs_f{$fullrefname};
2915 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2916 lrfetchref_used $fullrefname;
2921 sub fetch_from_archive () {
2922 ensure_setup_existing_tree();
2924 # Ensures that lrref() is what is actually in the archive, one way
2925 # or another, according to us - ie this client's
2926 # appropritaely-updated archive view. Also returns the commit id.
2927 # If there is nothing in the archive, leaves lrref alone and
2928 # returns undef. git_fetch_us must have already been called.
2932 parse_dsc_field($dsc, 'last upload to archive');
2933 resolve_dsc_field_commit access_basedistro,
2934 lrfetchrefs."/".$rewritemap
2936 progress "no version available from the archive";
2939 # If the archive's .dsc has a Dgit field, there are three
2940 # relevant git commitids we need to choose between and/or merge
2942 # 1. $dsc_hash: the Dgit field from the archive
2943 # 2. $lastpush_hash: the suite branch on the dgit git server
2944 # 3. $lastfetch_hash: our local tracking brach for the suite
2946 # These may all be distinct and need not be in any fast forward
2949 # If the dsc was pushed to this suite, then the server suite
2950 # branch will have been updated; but it might have been pushed to
2951 # a different suite and copied by the archive. Conversely a more
2952 # recent version may have been pushed with dgit but not appeared
2953 # in the archive (yet).
2955 # $lastfetch_hash may be awkward because archive imports
2956 # (particularly, imports of Dgit-less .dscs) are performed only as
2957 # needed on individual clients, so different clients may perform a
2958 # different subset of them - and these imports are only made
2959 # public during push. So $lastfetch_hash may represent a set of
2960 # imports different to a subsequent upload by a different dgit
2963 # Our approach is as follows:
2965 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
2966 # descendant of $dsc_hash, then it was pushed by a dgit user who
2967 # had based their work on $dsc_hash, so we should prefer it.
2968 # Otherwise, $dsc_hash was installed into this suite in the
2969 # archive other than by a dgit push, and (necessarily) after the
2970 # last dgit push into that suite (since a dgit push would have
2971 # been descended from the dgit server git branch); thus, in that
2972 # case, we prefer the archive's version (and produce a
2973 # pseudo-merge to overwrite the dgit server git branch).
2975 # (If there is no Dgit field in the archive's .dsc then
2976 # generate_commit_from_dsc uses the version numbers to decide
2977 # whether the suite branch or the archive is newer. If the suite
2978 # branch is newer it ignores the archive's .dsc; otherwise it
2979 # generates an import of the .dsc, and produces a pseudo-merge to
2980 # overwrite the suite branch with the archive contents.)
2982 # The outcome of that part of the algorithm is the `public view',
2983 # and is same for all dgit clients: it does not depend on any
2984 # unpublished history in the local tracking branch.
2986 # As between the public view and the local tracking branch: The
2987 # local tracking branch is only updated by dgit fetch, and
2988 # whenever dgit fetch runs it includes the public view in the
2989 # local tracking branch. Therefore if the public view is not
2990 # descended from the local tracking branch, the local tracking
2991 # branch must contain history which was imported from the archive
2992 # but never pushed; and, its tip is now out of date. So, we make
2993 # a pseudo-merge to overwrite the old imports and stitch the old
2996 # Finally: we do not necessarily reify the public view (as
2997 # described above). This is so that we do not end up stacking two
2998 # pseudo-merges. So what we actually do is figure out the inputs
2999 # to any public view pseudo-merge and put them in @mergeinputs.
3002 # $mergeinputs[]{Commit}
3003 # $mergeinputs[]{Info}
3004 # $mergeinputs[0] is the one whose tree we use
3005 # @mergeinputs is in the order we use in the actual commit)
3008 # $mergeinputs[]{Message} is a commit message to use
3009 # $mergeinputs[]{ReverseParents} if def specifies that parent
3010 # list should be in opposite order
3011 # Such an entry has no Commit or Info. It applies only when found
3012 # in the last entry. (This ugliness is to support making
3013 # identical imports to previous dgit versions.)
3015 my $lastpush_hash = git_get_ref(lrfetchref());
3016 printdebug "previous reference hash=$lastpush_hash\n";
3017 $lastpush_mergeinput = $lastpush_hash && {
3018 Commit => $lastpush_hash,
3019 Info => "dgit suite branch on dgit git server",
3022 my $lastfetch_hash = git_get_ref(lrref());
3023 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3024 my $lastfetch_mergeinput = $lastfetch_hash && {
3025 Commit => $lastfetch_hash,
3026 Info => "dgit client's archive history view",
3029 my $dsc_mergeinput = $dsc_hash && {
3030 Commit => $dsc_hash,
3031 Info => "Dgit field in .dsc from archive",
3035 my $del_lrfetchrefs = sub {
3038 printdebug "del_lrfetchrefs...\n";
3039 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3040 my $objid = $lrfetchrefs_d{$fullrefname};
3041 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3043 $gur ||= new IO::Handle;
3044 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3046 printf $gur "delete %s %s\n", $fullrefname, $objid;
3049 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3053 if (defined $dsc_hash) {
3054 ensure_we_have_orig();
3055 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3056 @mergeinputs = $dsc_mergeinput
3057 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3058 print STDERR <<END or die $!;
3060 Git commit in archive is behind the last version allegedly pushed/uploaded.
3061 Commit referred to by archive: $dsc_hash
3062 Last version pushed with dgit: $lastpush_hash
3065 @mergeinputs = ($lastpush_mergeinput);
3067 # Archive has .dsc which is not a descendant of the last dgit
3068 # push. This can happen if the archive moves .dscs about.
3069 # Just follow its lead.
3070 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3071 progress "archive .dsc names newer git commit";
3072 @mergeinputs = ($dsc_mergeinput);
3074 progress "archive .dsc names other git commit, fixing up";
3075 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3079 @mergeinputs = generate_commits_from_dsc();
3080 # We have just done an import. Now, our import algorithm might
3081 # have been improved. But even so we do not want to generate
3082 # a new different import of the same package. So if the
3083 # version numbers are the same, just use our existing version.
3084 # If the version numbers are different, the archive has changed
3085 # (perhaps, rewound).
3086 if ($lastfetch_mergeinput &&
3087 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3088 (mergeinfo_version $mergeinputs[0]) )) {
3089 @mergeinputs = ($lastfetch_mergeinput);
3091 } elsif ($lastpush_hash) {
3092 # only in git, not in the archive yet
3093 @mergeinputs = ($lastpush_mergeinput);
3094 print STDERR <<END or die $!;
3096 Package not found in the archive, but has allegedly been pushed using dgit.
3100 printdebug "nothing found!\n";
3101 if (defined $skew_warning_vsn) {
3102 print STDERR <<END or die $!;
3104 Warning: relevant archive skew detected.
3105 Archive allegedly contains $skew_warning_vsn
3106 But we were not able to obtain any version from the archive or git.
3110 unshift @end, $del_lrfetchrefs;
3114 if ($lastfetch_hash &&
3116 my $h = $_->{Commit};
3117 $h and is_fast_fwd($lastfetch_hash, $h);
3118 # If true, one of the existing parents of this commit
3119 # is a descendant of the $lastfetch_hash, so we'll
3120 # be ff from that automatically.
3124 push @mergeinputs, $lastfetch_mergeinput;
3127 printdebug "fetch mergeinfos:\n";
3128 foreach my $mi (@mergeinputs) {
3130 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3132 printdebug sprintf " ReverseParents=%d Message=%s",
3133 $mi->{ReverseParents}, $mi->{Message};
3137 my $compat_info= pop @mergeinputs
3138 if $mergeinputs[$#mergeinputs]{Message};
3140 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3143 if (@mergeinputs > 1) {
3145 my $tree_commit = $mergeinputs[0]{Commit};
3147 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3148 $tree =~ m/\n\n/; $tree = $`;
3149 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3152 # We use the changelog author of the package in question the
3153 # author of this pseudo-merge. This is (roughly) correct if
3154 # this commit is simply representing aa non-dgit upload.
3155 # (Roughly because it does not record sponsorship - but we
3156 # don't have sponsorship info because that's in the .changes,
3157 # which isn't in the archivw.)
3159 # But, it might be that we are representing archive history
3160 # updates (including in-archive copies). These are not really
3161 # the responsibility of the person who created the .dsc, but
3162 # there is no-one whose name we should better use. (The
3163 # author of the .dsc-named commit is clearly worse.)
3165 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3166 my $author = clogp_authline $useclogp;
3167 my $cversion = getfield $useclogp, 'Version';
3169 my $mcf = ".git/dgit/mergecommit";
3170 open MC, ">", $mcf or die "$mcf $!";
3171 print MC <<END or die $!;
3175 my @parents = grep { $_->{Commit} } @mergeinputs;
3176 @parents = reverse @parents if $compat_info->{ReverseParents};
3177 print MC <<END or die $! foreach @parents;
3181 print MC <<END or die $!;
3187 if (defined $compat_info->{Message}) {
3188 print MC $compat_info->{Message} or die $!;
3190 print MC <<END or die $!;
3191 Record $package ($cversion) in archive suite $csuite
3195 my $message_add_info = sub {
3197 my $mversion = mergeinfo_version $mi;
3198 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3202 $message_add_info->($mergeinputs[0]);
3203 print MC <<END or die $!;
3204 should be treated as descended from
3206 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3210 $hash = make_commit $mcf;
3212 $hash = $mergeinputs[0]{Commit};
3214 printdebug "fetch hash=$hash\n";
3217 my ($lasth, $what) = @_;
3218 return unless $lasth;
3219 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3222 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3224 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3226 fetch_from_archive_record_1($hash);
3228 if (defined $skew_warning_vsn) {
3230 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3231 my $gotclogp = commit_getclogp($hash);
3232 my $got_vsn = getfield $gotclogp, 'Version';
3233 printdebug "SKEW CHECK GOT $got_vsn\n";
3234 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3235 print STDERR <<END or die $!;
3237 Warning: archive skew detected. Using the available version:
3238 Archive allegedly contains $skew_warning_vsn
3239 We were able to obtain only $got_vsn
3245 if ($lastfetch_hash ne $hash) {
3246 fetch_from_archive_record_2($hash);
3249 lrfetchref_used lrfetchref();
3251 unshift @end, $del_lrfetchrefs;
3255 sub set_local_git_config ($$) {
3257 runcmd @git, qw(config), $k, $v;
3260 sub setup_mergechangelogs (;$) {
3262 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3264 my $driver = 'dpkg-mergechangelogs';
3265 my $cb = "merge.$driver";
3266 my $attrs = '.git/info/attributes';
3267 ensuredir '.git/info';
3269 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3270 if (!open ATTRS, "<", $attrs) {
3271 $!==ENOENT or die "$attrs: $!";
3275 next if m{^debian/changelog\s};
3276 print NATTRS $_, "\n" or die $!;
3278 ATTRS->error and die $!;
3281 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3284 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3285 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3287 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3290 sub setup_useremail (;$) {
3292 return unless $always || access_cfg_bool(1, 'setup-useremail');
3295 my ($k, $envvar) = @_;
3296 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3297 return unless defined $v;
3298 set_local_git_config "user.$k", $v;
3301 $setup->('email', 'DEBEMAIL');
3302 $setup->('name', 'DEBFULLNAME');
3305 sub ensure_setup_existing_tree () {
3306 my $k = "remote.$remotename.skipdefaultupdate";
3307 my $c = git_get_config $k;
3308 return if defined $c;
3309 set_local_git_config $k, 'true';
3312 sub setup_new_tree () {
3313 setup_mergechangelogs();
3317 sub multisuite_suite_child ($$$) {
3318 my ($tsuite, $merginputs, $fn) = @_;
3319 # in child, sets things up, calls $fn->(), and returns undef
3320 # in parent, returns canonical suite name for $tsuite
3321 my $canonsuitefh = IO::File::new_tmpfile;
3322 my $pid = fork // die $!;
3325 $us .= " [$isuite]";
3326 $debugprefix .= " ";
3327 progress "fetching $tsuite...";
3328 canonicalise_suite();
3329 print $canonsuitefh $csuite, "\n" or die $!;
3330 close $canonsuitefh or die $!;
3334 waitpid $pid,0 == $pid or die $!;
3335 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3336 seek $canonsuitefh,0,0 or die $!;
3337 local $csuite = <$canonsuitefh>;
3338 die $! unless defined $csuite && chomp $csuite;
3340 printdebug "multisuite $tsuite missing\n";
3343 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3344 push @$merginputs, {
3351 sub fork_for_multisuite ($) {
3352 my ($before_fetch_merge) = @_;
3353 # if nothing unusual, just returns ''
3356 # returns 0 to caller in child, to do first of the specified suites
3357 # in child, $csuite is not yet set
3359 # returns 1 to caller in parent, to finish up anything needed after
3360 # in parent, $csuite is set to canonicalised portmanteau
3362 my $org_isuite = $isuite;
3363 my @suites = split /\,/, $isuite;
3364 return '' unless @suites > 1;
3365 printdebug "fork_for_multisuite: @suites\n";
3369 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3371 return 0 unless defined $cbasesuite;
3373 fail "package $package missing in (base suite) $cbasesuite"
3374 unless @mergeinputs;
3376 my @csuites = ($cbasesuite);
3378 $before_fetch_merge->();
3380 foreach my $tsuite (@suites[1..$#suites]) {
3381 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3387 # xxx collecte the ref here
3389 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3390 push @csuites, $csubsuite;
3393 foreach my $mi (@mergeinputs) {
3394 my $ref = git_get_ref $mi->{Ref};
3395 die "$mi->{Ref} ?" unless length $ref;
3396 $mi->{Commit} = $ref;
3399 $csuite = join ",", @csuites;
3401 my $previous = git_get_ref lrref;
3403 unshift @mergeinputs, {
3404 Commit => $previous,
3405 Info => "local combined tracking branch",
3407 "archive seems to have rewound: local tracking branch is ahead!",
3411 foreach my $ix (0..$#mergeinputs) {
3412 $mergeinputs[$ix]{Index} = $ix;
3415 @mergeinputs = sort {
3416 -version_compare(mergeinfo_version $a,
3417 mergeinfo_version $b) # highest version first
3419 $a->{Index} <=> $b->{Index}; # earliest in spec first
3425 foreach my $mi (@mergeinputs) {
3426 printdebug "multisuite merge check $mi->{Info}\n";
3427 foreach my $previous (@needed) {
3428 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3429 printdebug "multisuite merge un-needed $previous->{Info}\n";
3433 printdebug "multisuite merge this-needed\n";
3434 $mi->{Character} = '+';
3437 $needed[0]{Character} = '*';
3439 my $output = $needed[0]{Commit};
3442 printdebug "multisuite merge nontrivial\n";
3443 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3445 my $commit = "tree $tree\n";
3446 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3447 "Input branches:\n";
3449 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3450 printdebug "multisuite merge include $mi->{Info}\n";
3451 $mi->{Character} //= ' ';
3452 $commit .= "parent $mi->{Commit}\n";
3453 $msg .= sprintf " %s %-25s %s\n",
3455 (mergeinfo_version $mi),
3458 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3460 " * marks the highest version branch, which choose to use\n".
3461 " + marks each branch which was not already an ancestor\n\n".
3462 "[dgit multi-suite $csuite]\n";
3464 "author $authline\n".
3465 "committer $authline\n\n";
3466 $output = make_commit_text $commit.$msg;
3467 printdebug "multisuite merge generated $output\n";
3470 fetch_from_archive_record_1($output);
3471 fetch_from_archive_record_2($output);
3473 progress "calculated combined tracking suite $csuite";
3478 sub clone_set_head () {
3479 open H, "> .git/HEAD" or die $!;
3480 print H "ref: ".lref()."\n" or die $!;
3483 sub clone_finish ($) {
3485 runcmd @git, qw(reset --hard), lrref();
3486 runcmd qw(bash -ec), <<'END';
3488 git ls-tree -r --name-only -z HEAD | \
3489 xargs -0r touch -h -r . --
3491 printdone "ready for work in $dstdir";
3496 badusage "dry run makes no sense with clone" unless act_local();
3498 my $multi_fetched = fork_for_multisuite(sub {
3499 printdebug "multi clone before fetch merge\n";
3502 if ($multi_fetched) {
3503 printdebug "multi clone after fetch merge\n";
3505 clone_finish($dstdir);
3508 printdebug "clone main body\n";
3510 canonicalise_suite();
3511 my $hasgit = check_for_git();
3512 mkdir $dstdir or fail "create \`$dstdir': $!";
3514 runcmd @git, qw(init -q);
3516 my $giturl = access_giturl(1);
3517 if (defined $giturl) {
3518 runcmd @git, qw(remote add), 'origin', $giturl;
3521 progress "fetching existing git history";
3523 runcmd_ordryrun_local @git, qw(fetch origin);
3525 progress "starting new git history";
3527 fetch_from_archive() or no_such_package;
3528 my $vcsgiturl = $dsc->{'Vcs-Git'};
3529 if (length $vcsgiturl) {
3530 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3531 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3534 clone_finish($dstdir);
3538 canonicalise_suite();
3539 if (check_for_git()) {
3542 fetch_from_archive() or no_such_package();
3543 printdone "fetched into ".lrref();
3547 my $multi_fetched = fork_for_multisuite(sub { });
3548 fetch() unless $multi_fetched; # parent
3549 return if $multi_fetched eq '0'; # child
3550 runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
3552 printdone "fetched to ".lrref()." and merged into HEAD";
3555 sub check_not_dirty () {
3556 foreach my $f (qw(local-options local-patch-header)) {
3557 if (stat_exists "debian/source/$f") {
3558 fail "git tree contains debian/source/$f";
3562 return if $ignoredirty;
3564 my @cmd = (@git, qw(diff --quiet HEAD));
3566 $!=0; $?=-1; system @cmd;
3569 fail "working tree is dirty (does not match HEAD)";
3575 sub commit_admin ($) {
3578 runcmd_ordryrun_local @git, qw(commit -m), $m;
3581 sub commit_quilty_patch () {
3582 my $output = cmdoutput @git, qw(status --porcelain);
3584 foreach my $l (split /\n/, $output) {
3585 next unless $l =~ m/\S/;
3586 if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
3590 delete $adds{'.pc'}; # if there wasn't one before, don't add it
3592 progress "nothing quilty to commit, ok.";
3595 my @adds = map { s/[][*?\\]/\\$&/g; $_; } sort keys %adds;
3596 runcmd_ordryrun_local @git, qw(add -f), @adds;
3598 Commit Debian 3.0 (quilt) metadata
3600 [dgit ($our_version) quilt-fixup]
3604 sub get_source_format () {
3606 if (open F, "debian/source/options") {
3610 s/\s+$//; # ignore missing final newline
3612 my ($k, $v) = ($`, $'); #');
3613 $v =~ s/^"(.*)"$/$1/;
3619 F->error and die $!;
3622 die $! unless $!==&ENOENT;
3625 if (!open F, "debian/source/format") {
3626 die $! unless $!==&ENOENT;
3630 F->error and die $!;
3632 return ($_, \%options);
3635 sub madformat_wantfixup ($) {
3637 return 0 unless $format eq '3.0 (quilt)';
3638 our $quilt_mode_warned;
3639 if ($quilt_mode eq 'nocheck') {
3640 progress "Not doing any fixup of \`$format' due to".
3641 " ----no-quilt-fixup or --quilt=nocheck"
3642 unless $quilt_mode_warned++;
3645 progress "Format \`$format', need to check/update patch stack"
3646 unless $quilt_mode_warned++;
3650 sub maybe_split_brain_save ($$$) {
3651 my ($headref, $dgitview, $msg) = @_;
3652 # => message fragment "$saved" describing disposition of $dgitview
3653 return "commit id $dgitview" unless defined $split_brain_save;
3654 my @cmd = (shell_cmd "cd ../../../..",
3655 @git, qw(update-ref -m),
3656 "dgit --dgit-view-save $msg HEAD=$headref",
3657 $split_brain_save, $dgitview);
3659 return "and left in $split_brain_save";
3662 # An "infopair" is a tuple [ $thing, $what ]
3663 # (often $thing is a commit hash; $what is a description)
3665 sub infopair_cond_equal ($$) {
3667 $x->[0] eq $y->[0] or fail <<END;
3668 $x->[1] ($x->[0]) not equal to $y->[1] ($y->[0])
3672 sub infopair_lrf_tag_lookup ($$) {
3673 my ($tagnames, $what) = @_;
3674 # $tagname may be an array ref
3675 my @tagnames = ref $tagnames ? @$tagnames : ($tagnames);
3676 printdebug "infopair_lrfetchref_tag_lookup $what @tagnames\n";
3677 foreach my $tagname (@tagnames) {
3678 my $lrefname = lrfetchrefs."/tags/$tagname";
3679 my $tagobj = $lrfetchrefs_f{$lrefname};
3680 next unless defined $tagobj;
3681 printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n";
3682 return [ git_rev_parse($tagobj), $what ];
3684 fail @tagnames==1 ? <<END : <<END;
3685 Wanted tag $what (@tagnames) on dgit server, but not found
3687 Wanted tag $what (one of: @tagnames) on dgit server, but not found
3691 sub infopair_cond_ff ($$) {
3692 my ($anc,$desc) = @_;
3693 is_fast_fwd($anc->[0], $desc->[0]) or fail <<END;
3694 $anc->[1] ($anc->[0]) .. $desc->[1] ($desc->[0]) is not fast forward
3698 sub pseudomerge_version_check ($$) {
3699 my ($clogp, $archive_hash) = @_;
3701 my $arch_clogp = commit_getclogp $archive_hash;
3702 my $i_arch_v = [ (getfield $arch_clogp, 'Version'),
3703 'version currently in archive' ];
3704 if (defined $overwrite_version) {
3705 if (length $overwrite_version) {
3706 infopair_cond_equal([ $overwrite_version,
3707 '--overwrite= version' ],
3710 my $v = $i_arch_v->[0];
3711 progress "Checking package changelog for archive version $v ...";
3713 my @xa = ("-f$v", "-t$v");
3714 my $vclogp = parsechangelog @xa;