3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2016 Ian Jackson
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 use Dpkg::Control::Hash;
30 use File::Temp qw(tempdir);
37 use List::MoreUtils qw(pairwise);
38 use Text::Glob qw(match_glob);
39 use Fcntl qw(:DEFAULT :flock);
44 our $our_version = 'UNRELEASED'; ###substituted###
45 our $absurdity = undef; ###substituted###
47 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
56 our $dryrun_level = 0;
58 our $buildproductsdir = '..';
64 our $existing_package = 'dpkg';
66 our $changes_since_version;
68 our $overwrite_version; # undef: not specified; '': check changelog
70 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
72 our $split_brain_save;
73 our $we_are_responder;
74 our $we_are_initiator;
75 our $initiator_tempdir;
76 our $patches_applied_dirtily = 00;
80 our $chase_dsc_distro=1;
82 our %forceopts = map { $_=>0 }
83 qw(unrepresentable unsupported-source-format
84 dsc-changes-mismatch changes-origs-exactly
85 import-gitapply-absurd
86 import-gitapply-no-absurd
87 import-dsc-with-dgit-field);
89 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
91 our $suite_re = '[-+.0-9a-z]+';
92 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
93 our $orig_f_comp_re = 'orig(?:-[-0-9a-z]+)?';
94 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
95 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
97 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
98 our $splitbraincache = 'dgit-intern/quilt-cache';
99 our $rewritemap = 'dgit-rewrite/map';
101 our (@git) = qw(git);
102 our (@dget) = qw(dget);
103 our (@curl) = qw(curl);
104 our (@dput) = qw(dput);
105 our (@debsign) = qw(debsign);
106 our (@gpg) = qw(gpg);
107 our (@sbuild) = qw(sbuild);
109 our (@dgit) = qw(dgit);
110 our (@aptget) = qw(apt-get);
111 our (@aptcache) = qw(apt-cache);
112 our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
113 our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
114 our (@dpkggenchanges) = qw(dpkg-genchanges);
115 our (@mergechanges) = qw(mergechanges -f);
116 our (@gbp_build) = ('');
117 our (@gbp_pq) = ('gbp pq');
118 our (@changesopts) = ('');
120 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
123 'debsign' => \@debsign,
125 'sbuild' => \@sbuild,
129 'apt-get' => \@aptget,
130 'apt-cache' => \@aptcache,
131 'dpkg-source' => \@dpkgsource,
132 'dpkg-buildpackage' => \@dpkgbuildpackage,
133 'dpkg-genchanges' => \@dpkggenchanges,
134 'gbp-build' => \@gbp_build,
135 'gbp-pq' => \@gbp_pq,
136 'ch' => \@changesopts,
137 'mergechanges' => \@mergechanges);
139 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
140 our %opts_cfg_insertpos = map {
142 scalar @{ $opts_opt_map{$_} }
143 } keys %opts_opt_map;
145 sub parseopts_late_defaults();
146 sub setup_gitattrs(;$);
152 our $supplementary_message = '';
153 our $need_split_build_invocation = 0;
154 our $split_brain = 0;
158 return unless forkcheck_mainprocess();
159 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
162 our $remotename = 'dgit';
163 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
167 if (!defined $absurdity) {
169 $absurdity =~ s{/[^/]+$}{/absurd} or die;
173 my ($v,$distro) = @_;
174 return $tagformatfn->($v, $distro);
177 sub debiantag_maintview ($$) {
178 my ($v,$distro) = @_;
179 return "$distro/".dep14_version_mangle $v;
182 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
184 sub lbranch () { return "$branchprefix/$csuite"; }
185 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
186 sub lref () { return "refs/heads/".lbranch(); }
187 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
188 sub rrref () { return server_ref($csuite); }
198 return "${package}_".(stripepoch $vsn).$sfx
203 return srcfn($vsn,".dsc");
206 sub changespat ($;$) {
207 my ($vsn, $arch) = @_;
208 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
211 sub upstreamversion ($) {
223 return unless forkcheck_mainprocess();
224 foreach my $f (@end) {
226 print STDERR "$us: cleanup: $@" if length $@;
230 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
232 sub forceable_fail ($$) {
233 my ($forceoptsl, $msg) = @_;
234 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
235 print STDERR "warning: overriding problem due to --force:\n". $msg;
239 my ($forceoptsl) = @_;
240 my @got = grep { $forceopts{$_} } @$forceoptsl;
241 return 0 unless @got;
243 "warning: skipping checks or functionality due to --force-$got[0]\n";
246 sub no_such_package () {
247 print STDERR "$us: package $package does not exist in suite $isuite\n";
253 printdebug "CD $newdir\n";
254 chdir $newdir or confess "chdir: $newdir: $!";
257 sub deliberately ($) {
259 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
262 sub deliberately_not_fast_forward () {
263 foreach (qw(not-fast-forward fresh-repo)) {
264 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
268 sub quiltmode_splitbrain () {
269 $quilt_mode =~ m/gbp|dpm|unapplied/;
272 sub opts_opt_multi_cmd {
274 push @cmd, split /\s+/, shift @_;
280 return opts_opt_multi_cmd @gbp_pq;
283 #---------- remote protocol support, common ----------
285 # remote push initiator/responder protocol:
286 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
287 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
288 # < dgit-remote-push-ready <actual-proto-vsn>
295 # > supplementary-message NBYTES # $protovsn >= 3
300 # > file parsed-changelog
301 # [indicates that output of dpkg-parsechangelog follows]
302 # > data-block NBYTES
303 # > [NBYTES bytes of data (no newline)]
304 # [maybe some more blocks]
313 # > param head DGIT-VIEW-HEAD
314 # > param csuite SUITE
315 # > param tagformat old|new
316 # > param maint-view MAINT-VIEW-HEAD
318 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
319 # # goes into tag, for replay prevention
322 # [indicates that signed tag is wanted]
323 # < data-block NBYTES
324 # < [NBYTES bytes of data (no newline)]
325 # [maybe some more blocks]
329 # > want signed-dsc-changes
330 # < data-block NBYTES [transfer of signed dsc]
332 # < data-block NBYTES [transfer of signed changes]
340 sub i_child_report () {
341 # Sees if our child has died, and reap it if so. Returns a string
342 # describing how it died if it failed, or undef otherwise.
343 return undef unless $i_child_pid;
344 my $got = waitpid $i_child_pid, WNOHANG;
345 return undef if $got <= 0;
346 die unless $got == $i_child_pid;
347 $i_child_pid = undef;
348 return undef unless $?;
349 return "build host child ".waitstatusmsg();
354 fail "connection lost: $!" if $fh->error;
355 fail "protocol violation; $m not expected";
358 sub badproto_badread ($$) {
360 fail "connection lost: $!" if $!;
361 my $report = i_child_report();
362 fail $report if defined $report;
363 badproto $fh, "eof (reading $wh)";
366 sub protocol_expect (&$) {
367 my ($match, $fh) = @_;
370 defined && chomp or badproto_badread $fh, "protocol message";
378 badproto $fh, "\`$_'";
381 sub protocol_send_file ($$) {
382 my ($fh, $ourfn) = @_;
383 open PF, "<", $ourfn or die "$ourfn: $!";
386 my $got = read PF, $d, 65536;
387 die "$ourfn: $!" unless defined $got;
389 print $fh "data-block ".length($d)."\n" or die $!;
390 print $fh $d or die $!;
392 PF->error and die "$ourfn $!";
393 print $fh "data-end\n" or die $!;
397 sub protocol_read_bytes ($$) {
398 my ($fh, $nbytes) = @_;
399 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
401 my $got = read $fh, $d, $nbytes;
402 $got==$nbytes or badproto_badread $fh, "data block";
406 sub protocol_receive_file ($$) {
407 my ($fh, $ourfn) = @_;
408 printdebug "() $ourfn\n";
409 open PF, ">", $ourfn or die "$ourfn: $!";
411 my ($y,$l) = protocol_expect {
412 m/^data-block (.*)$/ ? (1,$1) :
413 m/^data-end$/ ? (0,) :
417 my $d = protocol_read_bytes $fh, $l;
418 print PF $d or die $!;
423 #---------- remote protocol support, responder ----------
425 sub responder_send_command ($) {
427 return unless $we_are_responder;
428 # called even without $we_are_responder
429 printdebug ">> $command\n";
430 print PO $command, "\n" or die $!;
433 sub responder_send_file ($$) {
434 my ($keyword, $ourfn) = @_;
435 return unless $we_are_responder;
436 printdebug "]] $keyword $ourfn\n";
437 responder_send_command "file $keyword";
438 protocol_send_file \*PO, $ourfn;
441 sub responder_receive_files ($@) {
442 my ($keyword, @ourfns) = @_;
443 die unless $we_are_responder;
444 printdebug "[[ $keyword @ourfns\n";
445 responder_send_command "want $keyword";
446 foreach my $fn (@ourfns) {
447 protocol_receive_file \*PI, $fn;
450 protocol_expect { m/^files-end$/ } \*PI;
453 #---------- remote protocol support, initiator ----------
455 sub initiator_expect (&) {
457 protocol_expect { &$match } \*RO;
460 #---------- end remote code ----------
463 if ($we_are_responder) {
465 responder_send_command "progress ".length($m) or die $!;
466 print PO $m or die $!;
476 $ua = LWP::UserAgent->new();
480 progress "downloading $what...";
481 my $r = $ua->get(@_) or die $!;
482 return undef if $r->code == 404;
483 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
484 return $r->decoded_content(charset => 'none');
487 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
492 failedcmd @_ if system @_;
495 sub act_local () { return $dryrun_level <= 1; }
496 sub act_scary () { return !$dryrun_level; }
499 if (!$dryrun_level) {
500 progress "$us ok: @_";
502 progress "would be ok: @_ (but dry run only)";
507 printcmd(\*STDERR,$debugprefix."#",@_);
510 sub runcmd_ordryrun {
518 sub runcmd_ordryrun_local {
527 my ($first_shell, @cmd) = @_;
528 return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
531 our $helpmsg = <<END;
533 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
534 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
535 dgit [dgit-opts] build [dpkg-buildpackage-opts]
536 dgit [dgit-opts] sbuild [sbuild-opts]
537 dgit [dgit-opts] push [dgit-opts] [suite]
538 dgit [dgit-opts] rpush build-host:build-dir ...
539 important dgit options:
540 -k<keyid> sign tag and package with <keyid> instead of default
541 --dry-run -n do not change anything, but go through the motions
542 --damp-run -L like --dry-run but make local changes, without signing
543 --new -N allow introducing a new package
544 --debug -D increase debug level
545 -c<name>=<value> set git config option (used directly by dgit too)
548 our $later_warning_msg = <<END;
549 Perhaps the upload is stuck in incoming. Using the version from git.
553 print STDERR "$us: @_\n", $helpmsg or die $!;
558 @ARGV or badusage "too few arguments";
559 return scalar shift @ARGV;
563 print $helpmsg or die $!;
567 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
569 our %defcfg = ('dgit.default.distro' => 'debian',
570 'dgit.default.default-suite' => 'unstable',
571 'dgit.default.old-dsc-distro' => 'debian',
572 'dgit-suite.*-security.distro' => 'debian-security',
573 'dgit.default.username' => '',
574 'dgit.default.archive-query-default-component' => 'main',
575 'dgit.default.ssh' => 'ssh',
576 'dgit.default.archive-query' => 'madison:',
577 'dgit.default.sshpsql-dbname' => 'service=projectb',
578 'dgit.default.aptget-components' => 'main',
579 'dgit.default.dgit-tag-format' => 'new,old,maint',
580 'dgit.dsc-url-proto-ok.http' => 'true',
581 'dgit.dsc-url-proto-ok.https' => 'true',
582 'dgit.dsc-url-proto-ok.git' => 'true',
583 'dgit.default.dsc-url-proto-ok' => 'false',
584 # old means "repo server accepts pushes with old dgit tags"
585 # new means "repo server accepts pushes with new dgit tags"
586 # maint means "repo server accepts split brain pushes"
587 # hist means "repo server may have old pushes without new tag"
588 # ("hist" is implied by "old")
589 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
590 'dgit-distro.debian.git-check' => 'url',
591 'dgit-distro.debian.git-check-suffix' => '/info/refs',
592 'dgit-distro.debian.new-private-pushers' => 't',
593 'dgit-distro.debian/push.git-url' => '',
594 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
595 'dgit-distro.debian/push.git-user-force' => 'dgit',
596 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
597 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
598 'dgit-distro.debian/push.git-create' => 'true',
599 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
600 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
601 # 'dgit-distro.debian.archive-query-tls-key',
602 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
603 # ^ this does not work because curl is broken nowadays
604 # Fixing #790093 properly will involve providing providing the key
605 # in some pacagke and maybe updating these paths.
607 # 'dgit-distro.debian.archive-query-tls-curl-args',
608 # '--ca-path=/etc/ssl/ca-debian',
609 # ^ this is a workaround but works (only) on DSA-administered machines
610 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
611 'dgit-distro.debian.git-url-suffix' => '',
612 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
613 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
614 'dgit-distro.debian-security.archive-query' => 'aptget:',
615 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
616 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
617 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
618 'dgit-distro.debian-security.nominal-distro' => 'debian',
619 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
620 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
621 'dgit-distro.ubuntu.git-check' => 'false',
622 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
623 'dgit-distro.test-dummy.ssh' => "$td/ssh",
624 'dgit-distro.test-dummy.username' => "alice",
625 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
626 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
627 'dgit-distro.test-dummy.git-url' => "$td/git",
628 'dgit-distro.test-dummy.git-host' => "git",
629 'dgit-distro.test-dummy.git-path' => "$td/git",
630 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
631 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
632 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
633 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
637 our @gitcfgsources = qw(cmdline local global system);
639 sub git_slurp_config () {
640 local ($debuglevel) = $debuglevel-2;
643 # This algoritm is a bit subtle, but this is needed so that for
644 # options which we want to be single-valued, we allow the
645 # different config sources to override properly. See #835858.
646 foreach my $src (@gitcfgsources) {
647 next if $src eq 'cmdline';
648 # we do this ourselves since git doesn't handle it
650 my @cmd = (@git, qw(config -z --get-regexp), "--$src", qw(.*));
653 open GITS, "-|", @cmd or die $!;
656 printdebug "=> ", (messagequote $_), "\n";
658 push @{ $gitcfgs{$src}{$`} }, $'; #';
662 or ($!==0 && $?==256)
667 sub git_get_config ($) {
669 foreach my $src (@gitcfgsources) {
670 my $l = $gitcfgs{$src}{$c};
671 croak "$l $c" if $l && !ref $l;
672 printdebug"C $c ".(defined $l ?
673 join " ", map { messagequote "'$_'" } @$l :
677 @$l==1 or badcfg "multiple values for $c".
678 " (in $src git config)" if @$l > 1;
686 return undef if $c =~ /RETURN-UNDEF/;
687 printdebug "C? $c\n" if $debuglevel >= 5;
688 my $v = git_get_config($c);
689 return $v if defined $v;
690 my $dv = $defcfg{$c};
692 printdebug "CD $c $dv\n" if $debuglevel >= 4;
696 badcfg "need value for one of: @_\n".
697 "$us: distro or suite appears not to be (properly) supported";
700 sub access_basedistro__noalias () {
701 if (defined $idistro) {
704 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
705 return $def if defined $def;
706 foreach my $src (@gitcfgsources, 'internal') {
707 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
709 foreach my $k (keys %$kl) {
710 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
712 next unless match_glob $dpat, $isuite;
716 return cfg("dgit.default.distro");
720 sub access_basedistro () {
721 my $noalias = access_basedistro__noalias();
722 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
723 return $canon // $noalias;
726 sub access_nomdistro () {
727 my $base = access_basedistro();
728 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
729 $r =~ m/^$distro_re$/ or badcfg
730 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
734 sub access_quirk () {
735 # returns (quirk name, distro to use instead or undef, quirk-specific info)
736 my $basedistro = access_basedistro();
737 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
739 if (defined $backports_quirk) {
740 my $re = $backports_quirk;
741 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
743 $re =~ s/\%/([-0-9a-z_]+)/
744 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
745 if ($isuite =~ m/^$re$/) {
746 return ('backports',"$basedistro-backports",$1);
749 return ('none',undef);
754 sub parse_cfg_bool ($$$) {
755 my ($what,$def,$v) = @_;
758 $v =~ m/^[ty1]/ ? 1 :
759 $v =~ m/^[fn0]/ ? 0 :
760 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
763 sub access_forpush_config () {
764 my $d = access_basedistro();
768 parse_cfg_bool('new-private-pushers', 0,
769 cfg("dgit-distro.$d.new-private-pushers",
772 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
775 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
776 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
777 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
778 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
781 sub access_forpush () {
782 $access_forpush //= access_forpush_config();
783 return $access_forpush;
787 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
788 badcfg "pushing but distro is configured readonly"
789 if access_forpush_config() eq '0';
791 $supplementary_message = <<'END' unless $we_are_responder;
792 Push failed, before we got started.
793 You can retry the push, after fixing the problem, if you like.
795 parseopts_late_defaults();
799 parseopts_late_defaults();
802 sub supplementary_message ($) {
804 if (!$we_are_responder) {
805 $supplementary_message = $msg;
807 } elsif ($protovsn >= 3) {
808 responder_send_command "supplementary-message ".length($msg)
810 print PO $msg or die $!;
814 sub access_distros () {
815 # Returns list of distros to try, in order
818 # 0. `instead of' distro name(s) we have been pointed to
819 # 1. the access_quirk distro, if any
820 # 2a. the user's specified distro, or failing that } basedistro
821 # 2b. the distro calculated from the suite }
822 my @l = access_basedistro();
824 my (undef,$quirkdistro) = access_quirk();
825 unshift @l, $quirkdistro;
826 unshift @l, $instead_distro;
827 @l = grep { defined } @l;
829 push @l, access_nomdistro();
831 if (access_forpush()) {
832 @l = map { ("$_/push", $_) } @l;
837 sub access_cfg_cfgs (@) {
840 # The nesting of these loops determines the search order. We put
841 # the key loop on the outside so that we search all the distros
842 # for each key, before going on to the next key. That means that
843 # if access_cfg is called with a more specific, and then a less
844 # specific, key, an earlier distro can override the less specific
845 # without necessarily overriding any more specific keys. (If the
846 # distro wants to override the more specific keys it can simply do
847 # so; whereas if we did the loop the other way around, it would be
848 # impossible to for an earlier distro to override a less specific
849 # key but not the more specific ones without restating the unknown
850 # values of the more specific keys.
853 # We have to deal with RETURN-UNDEF specially, so that we don't
854 # terminate the search prematurely.
856 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
859 foreach my $d (access_distros()) {
860 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
862 push @cfgs, map { "dgit.default.$_" } @realkeys;
869 my (@cfgs) = access_cfg_cfgs(@keys);
870 my $value = cfg(@cfgs);
874 sub access_cfg_bool ($$) {
875 my ($def, @keys) = @_;
876 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
879 sub string_to_ssh ($) {
881 if ($spec =~ m/\s/) {
882 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
888 sub access_cfg_ssh () {
889 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
890 if (!defined $gitssh) {
893 return string_to_ssh $gitssh;
897 sub access_runeinfo ($) {
899 return ": dgit ".access_basedistro()." $info ;";
902 sub access_someuserhost ($) {
904 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
905 defined($user) && length($user) or
906 $user = access_cfg("$some-user",'username');
907 my $host = access_cfg("$some-host");
908 return length($user) ? "$user\@$host" : $host;
911 sub access_gituserhost () {
912 return access_someuserhost('git');
915 sub access_giturl (;$) {
917 my $url = access_cfg('git-url','RETURN-UNDEF');
920 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
921 return undef unless defined $proto;
924 access_gituserhost().
925 access_cfg('git-path');
927 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
930 return "$url/$package$suffix";
933 sub parsecontrolfh ($$;$) {
934 my ($fh, $desc, $allowsigned) = @_;
935 our $dpkgcontrolhash_noissigned;
938 my %opts = ('name' => $desc);
939 $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
940 $c = Dpkg::Control::Hash->new(%opts);
941 $c->parse($fh,$desc) or die "parsing of $desc failed";
942 last if $allowsigned;
943 last if $dpkgcontrolhash_noissigned;
944 my $issigned= $c->get_option('is_pgp_signed');
945 if (!defined $issigned) {
946 $dpkgcontrolhash_noissigned= 1;
947 seek $fh, 0,0 or die "seek $desc: $!";
948 } elsif ($issigned) {
949 fail "control file $desc is (already) PGP-signed. ".
950 " Note that dgit push needs to modify the .dsc and then".
951 " do the signature itself";
960 my ($file, $desc, $allowsigned) = @_;
961 my $fh = new IO::Handle;
962 open $fh, '<', $file or die "$file: $!";
963 my $c = parsecontrolfh($fh,$desc,$allowsigned);
964 $fh->error and die $!;
970 my ($dctrl,$field) = @_;
971 my $v = $dctrl->{$field};
972 return $v if defined $v;
973 fail "missing field $field in ".$dctrl->get_option('name');
977 my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
978 my $p = new IO::Handle;
979 my @cmd = (qw(dpkg-parsechangelog), @_);
980 open $p, '-|', @cmd or die $!;
982 $?=0; $!=0; close $p or failedcmd @cmd;
986 sub commit_getclogp ($) {
987 # Returns the parsed changelog hashref for a particular commit
989 our %commit_getclogp_memo;
990 my $memo = $commit_getclogp_memo{$objid};
991 return $memo if $memo;
993 my $mclog = ".git/dgit/clog-$objid";
994 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
995 "$objid:debian/changelog";
996 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
1001 defined $d or fail "getcwd failed: $!";
1005 sub parse_dscdata () {
1006 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1007 printdebug Dumper($dscdata) if $debuglevel>1;
1008 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1009 printdebug Dumper($dsc) if $debuglevel>1;
1014 sub archive_query ($;@) {
1015 my ($method) = shift @_;
1016 fail "this operation does not support multiple comma-separated suites"
1018 my $query = access_cfg('archive-query','RETURN-UNDEF');
1019 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1022 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1025 sub archive_query_prepend_mirror {
1026 my $m = access_cfg('mirror');
1027 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1030 sub pool_dsc_subpath ($$) {
1031 my ($vsn,$component) = @_; # $package is implict arg
1032 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1033 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1036 sub cfg_apply_map ($$$) {
1037 my ($varref, $what, $mapspec) = @_;
1038 return unless $mapspec;
1040 printdebug "config $what EVAL{ $mapspec; }\n";
1042 eval "package Dgit::Config; $mapspec;";
1047 #---------- `ftpmasterapi' archive query method (nascent) ----------
1049 sub archive_api_query_cmd ($) {
1051 my @cmd = (@curl, qw(-sS));
1052 my $url = access_cfg('archive-query-url');
1053 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1055 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1056 foreach my $key (split /\:/, $keys) {
1057 $key =~ s/\%HOST\%/$host/g;
1059 fail "for $url: stat $key: $!" unless $!==ENOENT;
1062 fail "config requested specific TLS key but do not know".
1063 " how to get curl to use exactly that EE key ($key)";
1064 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1065 # # Sadly the above line does not work because of changes
1066 # # to gnutls. The real fix for #790093 may involve
1067 # # new curl options.
1070 # Fixing #790093 properly will involve providing a value
1071 # for this on clients.
1072 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1073 push @cmd, split / /, $kargs if defined $kargs;
1075 push @cmd, $url.$subpath;
1079 sub api_query ($$;$) {
1081 my ($data, $subpath, $ok404) = @_;
1082 badcfg "ftpmasterapi archive query method takes no data part"
1084 my @cmd = archive_api_query_cmd($subpath);
1085 my $url = $cmd[$#cmd];
1086 push @cmd, qw(-w %{http_code});
1087 my $json = cmdoutput @cmd;
1088 unless ($json =~ s/\d+\d+\d$//) {
1089 failedcmd_report_cmd undef, @cmd;
1090 fail "curl failed to print 3-digit HTTP code";
1093 return undef if $code eq '404' && $ok404;
1094 fail "fetch of $url gave HTTP code $code"
1095 unless $url =~ m#^file://# or $code =~ m/^2/;
1096 return decode_json($json);
1099 sub canonicalise_suite_ftpmasterapi {
1100 my ($proto,$data) = @_;
1101 my $suites = api_query($data, 'suites');
1103 foreach my $entry (@$suites) {
1105 my $v = $entry->{$_};
1106 defined $v && $v eq $isuite;
1107 } qw(codename name);
1108 push @matched, $entry;
1110 fail "unknown suite $isuite" unless @matched;
1113 @matched==1 or die "multiple matches for suite $isuite\n";
1114 $cn = "$matched[0]{codename}";
1115 defined $cn or die "suite $isuite info has no codename\n";
1116 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1118 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1123 sub archive_query_ftpmasterapi {
1124 my ($proto,$data) = @_;
1125 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1127 my $digester = Digest::SHA->new(256);
1128 foreach my $entry (@$info) {
1130 my $vsn = "$entry->{version}";
1131 my ($ok,$msg) = version_check $vsn;
1132 die "bad version: $msg\n" unless $ok;
1133 my $component = "$entry->{component}";
1134 $component =~ m/^$component_re$/ or die "bad component";
1135 my $filename = "$entry->{filename}";
1136 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1137 or die "bad filename";
1138 my $sha256sum = "$entry->{sha256sum}";
1139 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1140 push @rows, [ $vsn, "/pool/$component/$filename",
1141 $digester, $sha256sum ];
1143 die "bad ftpmaster api response: $@\n".Dumper($entry)
1146 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1147 return archive_query_prepend_mirror @rows;
1150 sub file_in_archive_ftpmasterapi {
1151 my ($proto,$data,$filename) = @_;
1152 my $pat = $filename;
1155 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1156 my $info = api_query($data, "file_in_archive/$pat", 1);
1159 #---------- `aptget' archive query method ----------
1162 our $aptget_releasefile;
1163 our $aptget_configpath;
1165 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1166 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1168 sub aptget_cache_clean {
1169 runcmd_ordryrun_local qw(sh -ec),
1170 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1174 sub aptget_lock_acquire () {
1175 my $lockfile = "$aptget_base/lock";
1176 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1177 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1180 sub aptget_prep ($) {
1182 return if defined $aptget_base;
1184 badcfg "aptget archive query method takes no data part"
1187 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1190 ensuredir "$cache/dgit";
1192 access_cfg('aptget-cachekey','RETURN-UNDEF')
1193 // access_nomdistro();
1195 $aptget_base = "$cache/dgit/aptget";
1196 ensuredir $aptget_base;
1198 my $quoted_base = $aptget_base;
1199 die "$quoted_base contains bad chars, cannot continue"
1200 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1202 ensuredir $aptget_base;
1204 aptget_lock_acquire();
1206 aptget_cache_clean();
1208 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1209 my $sourceslist = "source.list#$cachekey";
1211 my $aptsuites = $isuite;
1212 cfg_apply_map(\$aptsuites, 'suite map',
1213 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1215 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1216 printf SRCS "deb-src %s %s %s\n",
1217 access_cfg('mirror'),
1219 access_cfg('aptget-components')
1222 ensuredir "$aptget_base/cache";
1223 ensuredir "$aptget_base/lists";
1225 open CONF, ">", $aptget_configpath or die $!;
1227 Debug::NoLocking "true";
1228 APT::Get::List-Cleanup "false";
1229 #clear APT::Update::Post-Invoke-Success;
1230 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1231 Dir::State::Lists "$quoted_base/lists";
1232 Dir::Etc::preferences "$quoted_base/preferences";
1233 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1234 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1237 foreach my $key (qw(
1240 Dir::Cache::Archives
1241 Dir::Etc::SourceParts
1242 Dir::Etc::preferencesparts
1244 ensuredir "$aptget_base/$key";
1245 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1248 my $oldatime = (time // die $!) - 1;
1249 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1250 next unless stat_exists $oldlist;
1251 my ($mtime) = (stat _)[9];
1252 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1255 runcmd_ordryrun_local aptget_aptget(), qw(update);
1258 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1259 next unless stat_exists $oldlist;
1260 my ($atime) = (stat _)[8];
1261 next if $atime == $oldatime;
1262 push @releasefiles, $oldlist;
1264 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1265 @releasefiles = @inreleasefiles if @inreleasefiles;
1266 die "apt updated wrong number of Release files (@releasefiles), erk"
1267 unless @releasefiles == 1;
1269 ($aptget_releasefile) = @releasefiles;
1272 sub canonicalise_suite_aptget {
1273 my ($proto,$data) = @_;
1276 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1278 foreach my $name (qw(Codename Suite)) {
1279 my $val = $release->{$name};
1281 printdebug "release file $name: $val\n";
1282 $val =~ m/^$suite_re$/o or fail
1283 "Release file ($aptget_releasefile) specifies intolerable $name";
1284 cfg_apply_map(\$val, 'suite rmap',
1285 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1292 sub archive_query_aptget {
1293 my ($proto,$data) = @_;
1296 ensuredir "$aptget_base/source";
1297 foreach my $old (<$aptget_base/source/*.dsc>) {
1298 unlink $old or die "$old: $!";
1301 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1302 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1303 # avoids apt-get source failing with ambiguous error code
1305 runcmd_ordryrun_local
1306 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1307 aptget_aptget(), qw(--download-only --only-source source), $package;
1309 my @dscs = <$aptget_base/source/*.dsc>;
1310 fail "apt-get source did not produce a .dsc" unless @dscs;
1311 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1313 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1316 my $uri = "file://". uri_escape $dscs[0];
1317 $uri =~ s{\%2f}{/}gi;
1318 return [ (getfield $pre_dsc, 'Version'), $uri ];
1321 #---------- `dummyapicat' archive query method ----------
1323 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1324 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1326 sub file_in_archive_dummycatapi ($$$) {
1327 my ($proto,$data,$filename) = @_;
1328 my $mirror = access_cfg('mirror');
1329 $mirror =~ s#^file://#/# or die "$mirror ?";
1331 my @cmd = (qw(sh -ec), '
1333 find -name "$2" -print0 |
1335 ', qw(x), $mirror, $filename);
1336 debugcmd "-|", @cmd;
1337 open FIA, "-|", @cmd or die $!;
1340 printdebug "| $_\n";
1341 m/^(\w+) (\S+)$/ or die "$_ ?";
1342 push @out, { sha256sum => $1, filename => $2 };
1344 close FIA or die failedcmd @cmd;
1348 #---------- `madison' archive query method ----------
1350 sub archive_query_madison {
1351 return archive_query_prepend_mirror
1352 map { [ @$_[0..1] ] } madison_get_parse(@_);
1355 sub madison_get_parse {
1356 my ($proto,$data) = @_;
1357 die unless $proto eq 'madison';
1358 if (!length $data) {
1359 $data= access_cfg('madison-distro','RETURN-UNDEF');
1360 $data //= access_basedistro();
1362 $rmad{$proto,$data,$package} ||= cmdoutput
1363 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1364 my $rmad = $rmad{$proto,$data,$package};
1367 foreach my $l (split /\n/, $rmad) {
1368 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1369 \s*( [^ \t|]+ )\s* \|
1370 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1371 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1372 $1 eq $package or die "$rmad $package ?";
1379 $component = access_cfg('archive-query-default-component');
1381 $5 eq 'source' or die "$rmad ?";
1382 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1384 return sort { -version_compare($a->[0],$b->[0]); } @out;
1387 sub canonicalise_suite_madison {
1388 # madison canonicalises for us
1389 my @r = madison_get_parse(@_);
1391 "unable to canonicalise suite using package $package".
1392 " which does not appear to exist in suite $isuite;".
1393 " --existing-package may help";
1397 sub file_in_archive_madison { return undef; }
1399 #---------- `sshpsql' archive query method ----------
1402 my ($data,$runeinfo,$sql) = @_;
1403 if (!length $data) {
1404 $data= access_someuserhost('sshpsql').':'.
1405 access_cfg('sshpsql-dbname');
1407 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1408 my ($userhost,$dbname) = ($`,$'); #';
1410 my @cmd = (access_cfg_ssh, $userhost,
1411 access_runeinfo("ssh-psql $runeinfo").
1412 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1413 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1415 open P, "-|", @cmd or die $!;
1418 printdebug(">|$_|\n");
1421 $!=0; $?=0; close P or failedcmd @cmd;
1423 my $nrows = pop @rows;
1424 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1425 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1426 @rows = map { [ split /\|/, $_ ] } @rows;
1427 my $ncols = scalar @{ shift @rows };
1428 die if grep { scalar @$_ != $ncols } @rows;
1432 sub sql_injection_check {
1433 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1436 sub archive_query_sshpsql ($$) {
1437 my ($proto,$data) = @_;
1438 sql_injection_check $isuite, $package;
1439 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1440 SELECT source.version, component.name, files.filename, files.sha256sum
1442 JOIN src_associations ON source.id = src_associations.source
1443 JOIN suite ON suite.id = src_associations.suite
1444 JOIN dsc_files ON dsc_files.source = source.id
1445 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1446 JOIN component ON component.id = files_archive_map.component_id
1447 JOIN files ON files.id = dsc_files.file
1448 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1449 AND source.source='$package'
1450 AND files.filename LIKE '%.dsc';
1452 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1453 my $digester = Digest::SHA->new(256);
1455 my ($vsn,$component,$filename,$sha256sum) = @$_;
1456 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1458 return archive_query_prepend_mirror @rows;
1461 sub canonicalise_suite_sshpsql ($$) {
1462 my ($proto,$data) = @_;
1463 sql_injection_check $isuite;
1464 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1465 SELECT suite.codename
1466 FROM suite where suite_name='$isuite' or codename='$isuite';
1468 @rows = map { $_->[0] } @rows;
1469 fail "unknown suite $isuite" unless @rows;
1470 die "ambiguous $isuite: @rows ?" if @rows>1;
1474 sub file_in_archive_sshpsql ($$$) { return undef; }
1476 #---------- `dummycat' archive query method ----------
1478 sub canonicalise_suite_dummycat ($$) {
1479 my ($proto,$data) = @_;
1480 my $dpath = "$data/suite.$isuite";
1481 if (!open C, "<", $dpath) {
1482 $!==ENOENT or die "$dpath: $!";
1483 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1487 chomp or die "$dpath: $!";
1489 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1493 sub archive_query_dummycat ($$) {
1494 my ($proto,$data) = @_;
1495 canonicalise_suite();
1496 my $dpath = "$data/package.$csuite.$package";
1497 if (!open C, "<", $dpath) {
1498 $!==ENOENT or die "$dpath: $!";
1499 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1507 printdebug "dummycat query $csuite $package $dpath | $_\n";
1508 my @row = split /\s+/, $_;
1509 @row==2 or die "$dpath: $_ ?";
1512 C->error and die "$dpath: $!";
1514 return archive_query_prepend_mirror
1515 sort { -version_compare($a->[0],$b->[0]); } @rows;
1518 sub file_in_archive_dummycat () { return undef; }
1520 #---------- tag format handling ----------
1522 sub access_cfg_tagformats () {
1523 split /\,/, access_cfg('dgit-tag-format');
1526 sub access_cfg_tagformats_can_splitbrain () {
1527 my %y = map { $_ => 1 } access_cfg_tagformats;
1528 foreach my $needtf (qw(new maint)) {
1529 next if $y{$needtf};
1535 sub need_tagformat ($$) {
1536 my ($fmt, $why) = @_;
1537 fail "need to use tag format $fmt ($why) but also need".
1538 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1539 " - no way to proceed"
1540 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1541 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1544 sub select_tagformat () {
1546 return if $tagformatfn && !$tagformat_want;
1547 die 'bug' if $tagformatfn && $tagformat_want;
1548 # ... $tagformat_want assigned after previous select_tagformat
1550 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1551 printdebug "select_tagformat supported @supported\n";
1553 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1554 printdebug "select_tagformat specified @$tagformat_want\n";
1556 my ($fmt,$why,$override) = @$tagformat_want;
1558 fail "target distro supports tag formats @supported".
1559 " but have to use $fmt ($why)"
1561 or grep { $_ eq $fmt } @supported;
1563 $tagformat_want = undef;
1565 $tagformatfn = ${*::}{"debiantag_$fmt"};
1567 fail "trying to use unknown tag format \`$fmt' ($why) !"
1568 unless $tagformatfn;
1571 #---------- archive query entrypoints and rest of program ----------
1573 sub canonicalise_suite () {
1574 return if defined $csuite;
1575 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1576 $csuite = archive_query('canonicalise_suite');
1577 if ($isuite ne $csuite) {
1578 progress "canonical suite name for $isuite is $csuite";
1580 progress "canonical suite name is $csuite";
1584 sub get_archive_dsc () {
1585 canonicalise_suite();
1586 my @vsns = archive_query('archive_query');
1587 foreach my $vinfo (@vsns) {
1588 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1589 $dscurl = $vsn_dscurl;
1590 $dscdata = url_get($dscurl);
1592 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1597 $digester->add($dscdata);
1598 my $got = $digester->hexdigest();
1600 fail "$dscurl has hash $got but".
1601 " archive told us to expect $digest";
1604 my $fmt = getfield $dsc, 'Format';
1605 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1606 "unsupported source format $fmt, sorry";
1608 $dsc_checked = !!$digester;
1609 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1613 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1616 sub check_for_git ();
1617 sub check_for_git () {
1619 my $how = access_cfg('git-check');
1620 if ($how eq 'ssh-cmd') {
1622 (access_cfg_ssh, access_gituserhost(),
1623 access_runeinfo("git-check $package").
1624 " set -e; cd ".access_cfg('git-path').";".
1625 " if test -d $package.git; then echo 1; else echo 0; fi");
1626 my $r= cmdoutput @cmd;
1627 if (defined $r and $r =~ m/^divert (\w+)$/) {
1629 my ($usedistro,) = access_distros();
1630 # NB that if we are pushing, $usedistro will be $distro/push
1631 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1632 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1633 progress "diverting to $divert (using config for $instead_distro)";
1634 return check_for_git();
1636 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1638 } elsif ($how eq 'url') {
1639 my $prefix = access_cfg('git-check-url','git-url');
1640 my $suffix = access_cfg('git-check-suffix','git-suffix',
1641 'RETURN-UNDEF') // '.git';
1642 my $url = "$prefix/$package$suffix";
1643 my @cmd = (@curl, qw(-sS -I), $url);
1644 my $result = cmdoutput @cmd;
1645 $result =~ s/^\S+ 200 .*\n\r?\n//;
1646 # curl -sS -I with https_proxy prints
1647 # HTTP/1.0 200 Connection established
1648 $result =~ m/^\S+ (404|200) /s or
1649 fail "unexpected results from git check query - ".
1650 Dumper($prefix, $result);
1652 if ($code eq '404') {
1654 } elsif ($code eq '200') {
1659 } elsif ($how eq 'true') {
1661 } elsif ($how eq 'false') {
1664 badcfg "unknown git-check \`$how'";
1668 sub create_remote_git_repo () {
1669 my $how = access_cfg('git-create');
1670 if ($how eq 'ssh-cmd') {
1672 (access_cfg_ssh, access_gituserhost(),
1673 access_runeinfo("git-create $package").
1674 "set -e; cd ".access_cfg('git-path').";".
1675 " cp -a _template $package.git");
1676 } elsif ($how eq 'true') {
1679 badcfg "unknown git-create \`$how'";
1683 our ($dsc_hash,$lastpush_mergeinput);
1684 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1686 our $ud = '.git/dgit/unpack';
1696 sub mktree_in_ud_here () {
1697 runcmd qw(git init -q);
1698 runcmd qw(git config gc.auto 0);
1699 rmtree('.git/objects');
1700 symlink '../../../../objects','.git/objects' or die $!;
1704 sub git_write_tree () {
1705 my $tree = cmdoutput @git, qw(write-tree);
1706 $tree =~ m/^\w+$/ or die "$tree ?";
1710 sub git_add_write_tree () {
1711 runcmd @git, qw(add -Af .);
1712 return git_write_tree();
1715 sub remove_stray_gits ($) {
1717 my @gitscmd = qw(find -name .git -prune -print0);
1718 debugcmd "|",@gitscmd;
1719 open GITS, "-|", @gitscmd or die $!;
1724 print STDERR "$us: warning: removing from $what: ",
1725 (messagequote $_), "\n";
1729 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1732 sub mktree_in_ud_from_only_subdir ($;$) {
1733 my ($what,$raw) = @_;
1735 # changes into the subdir
1737 die "expected one subdir but found @dirs ?" unless @dirs==1;
1738 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1742 remove_stray_gits($what);
1743 mktree_in_ud_here();
1745 my ($format, $fopts) = get_source_format();
1746 if (madformat($format)) {
1751 my $tree=git_add_write_tree();
1752 return ($tree,$dir);
1755 our @files_csum_info_fields =
1756 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1757 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1758 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1760 sub dsc_files_info () {
1761 foreach my $csumi (@files_csum_info_fields) {
1762 my ($fname, $module, $method) = @$csumi;
1763 my $field = $dsc->{$fname};
1764 next unless defined $field;
1765 eval "use $module; 1;" or die $@;
1767 foreach (split /\n/, $field) {
1769 m/^(\w+) (\d+) (\S+)$/ or
1770 fail "could not parse .dsc $fname line \`$_'";
1771 my $digester = eval "$module"."->$method;" or die $@;
1776 Digester => $digester,
1781 fail "missing any supported Checksums-* or Files field in ".
1782 $dsc->get_option('name');
1786 map { $_->{Filename} } dsc_files_info();
1789 sub files_compare_inputs (@) {
1794 my $showinputs = sub {
1795 return join "; ", map { $_->get_option('name') } @$inputs;
1798 foreach my $in (@$inputs) {
1800 my $in_name = $in->get_option('name');
1802 printdebug "files_compare_inputs $in_name\n";
1804 foreach my $csumi (@files_csum_info_fields) {
1805 my ($fname) = @$csumi;
1806 printdebug "files_compare_inputs $in_name $fname\n";
1808 my $field = $in->{$fname};
1809 next unless defined $field;
1812 foreach (split /\n/, $field) {
1815 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1816 fail "could not parse $in_name $fname line \`$_'";
1818 printdebug "files_compare_inputs $in_name $fname $f\n";
1822 my $re = \ $record{$f}{$fname};
1824 $fchecked{$f}{$in_name} = 1;
1826 fail "hash or size of $f varies in $fname fields".
1827 " (between: ".$showinputs->().")";
1832 @files = sort @files;
1833 $expected_files //= \@files;
1834 "@$expected_files" eq "@files" or
1835 fail "file list in $in_name varies between hash fields!";
1838 fail "$in_name has no files list field(s)";
1840 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1843 grep { keys %$_ == @$inputs-1 } values %fchecked
1844 or fail "no file appears in all file lists".
1845 " (looked in: ".$showinputs->().")";
1848 sub is_orig_file_in_dsc ($$) {
1849 my ($f, $dsc_files_info) = @_;
1850 return 0 if @$dsc_files_info <= 1;
1851 # One file means no origs, and the filename doesn't have a "what
1852 # part of dsc" component. (Consider versions ending `.orig'.)
1853 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1857 sub is_orig_file_of_vsn ($$) {
1858 my ($f, $upstreamvsn) = @_;
1859 my $base = srcfn $upstreamvsn, '';
1860 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1864 sub changes_update_origs_from_dsc ($$$$) {
1865 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1867 printdebug "checking origs needed ($upstreamvsn)...\n";
1868 $_ = getfield $changes, 'Files';
1869 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1870 fail "cannot find section/priority from .changes Files field";
1871 my $placementinfo = $1;
1873 printdebug "checking origs needed placement '$placementinfo'...\n";
1874 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1875 $l =~ m/\S+$/ or next;
1877 printdebug "origs $file | $l\n";
1878 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1879 printdebug "origs $file is_orig\n";
1880 my $have = archive_query('file_in_archive', $file);
1881 if (!defined $have) {
1883 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1889 printdebug "origs $file \$#\$have=$#$have\n";
1890 foreach my $h (@$have) {
1893 foreach my $csumi (@files_csum_info_fields) {
1894 my ($fname, $module, $method, $archivefield) = @$csumi;
1895 next unless defined $h->{$archivefield};
1896 $_ = $dsc->{$fname};
1897 next unless defined;
1898 m/^(\w+) .* \Q$file\E$/m or
1899 fail ".dsc $fname missing entry for $file";
1900 if ($h->{$archivefield} eq $1) {
1904 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1907 die "$file ".Dumper($h)." ?!" if $same && @differ;
1910 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1913 printdebug "origs $file f.same=$found_same".
1914 " #f._differ=$#found_differ\n";
1915 if (@found_differ && !$found_same) {
1917 "archive contains $file with different checksum",
1920 # Now we edit the changes file to add or remove it
1921 foreach my $csumi (@files_csum_info_fields) {
1922 my ($fname, $module, $method, $archivefield) = @$csumi;
1923 next unless defined $changes->{$fname};
1925 # in archive, delete from .changes if it's there
1926 $changed{$file} = "removed" if
1927 $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
1928 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
1929 # not in archive, but it's here in the .changes
1931 my $dsc_data = getfield $dsc, $fname;
1932 $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
1934 $extra =~ s/ \d+ /$&$placementinfo /
1935 or die "$fname $extra >$dsc_data< ?"
1936 if $fname eq 'Files';
1937 $changes->{$fname} .= "\n". $extra;
1938 $changed{$file} = "added";
1943 foreach my $file (keys %changed) {
1945 "edited .changes for archive .orig contents: %s %s",
1946 $changed{$file}, $file;
1948 my $chtmp = "$changesfile.tmp";
1949 $changes->save($chtmp);
1951 rename $chtmp,$changesfile or die "$changesfile $!";
1953 progress "[new .changes left in $changesfile]";
1956 progress "$changesfile already has appropriate .orig(s) (if any)";
1960 sub make_commit ($) {
1962 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1965 sub make_commit_text ($) {
1968 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1970 print Dumper($text) if $debuglevel > 1;
1971 my $child = open2($out, $in, @cmd) or die $!;
1974 print $in $text or die $!;
1975 close $in or die $!;
1977 $h =~ m/^\w+$/ or die;
1979 printdebug "=> $h\n";
1982 waitpid $child, 0 == $child or die "$child $!";
1983 $? and failedcmd @cmd;
1987 sub clogp_authline ($) {
1989 my $author = getfield $clogp, 'Maintainer';
1990 $author =~ s#,.*##ms;
1991 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
1992 my $authline = "$author $date";
1993 $authline =~ m/$git_authline_re/o or
1994 fail "unexpected commit author line format \`$authline'".
1995 " (was generated from changelog Maintainer field)";
1996 return ($1,$2,$3) if wantarray;
2000 sub vendor_patches_distro ($$) {
2001 my ($checkdistro, $what) = @_;
2002 return unless defined $checkdistro;
2004 my $series = "debian/patches/\L$checkdistro\E.series";
2005 printdebug "checking for vendor-specific $series ($what)\n";
2007 if (!open SERIES, "<", $series) {
2008 die "$series $!" unless $!==ENOENT;
2017 Unfortunately, this source package uses a feature of dpkg-source where
2018 the same source package unpacks to different source code on different
2019 distros. dgit cannot safely operate on such packages on affected
2020 distros, because the meaning of source packages is not stable.
2022 Please ask the distro/maintainer to remove the distro-specific series
2023 files and use a different technique (if necessary, uploading actually
2024 different packages, if different distros are supposed to have
2028 fail "Found active distro-specific series file for".
2029 " $checkdistro ($what): $series, cannot continue";
2031 die "$series $!" if SERIES->error;
2035 sub check_for_vendor_patches () {
2036 # This dpkg-source feature doesn't seem to be documented anywhere!
2037 # But it can be found in the changelog (reformatted):
2039 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2040 # Author: Raphael Hertzog <hertzog@debian.org>
2041 # Date: Sun Oct 3 09:36:48 2010 +0200
2043 # dpkg-source: correctly create .pc/.quilt_series with alternate
2046 # If you have debian/patches/ubuntu.series and you were
2047 # unpacking the source package on ubuntu, quilt was still
2048 # directed to debian/patches/series instead of
2049 # debian/patches/ubuntu.series.
2051 # debian/changelog | 3 +++
2052 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2053 # 2 files changed, 6 insertions(+), 1 deletion(-)
2056 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2057 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2058 "Dpkg::Vendor \`current vendor'");
2059 vendor_patches_distro(access_basedistro(),
2060 "(base) distro being accessed");
2061 vendor_patches_distro(access_nomdistro(),
2062 "(nominal) distro being accessed");
2065 sub generate_commits_from_dsc () {
2066 # See big comment in fetch_from_archive, below.
2067 # See also README.dsc-import.
2071 my @dfi = dsc_files_info();
2072 foreach my $fi (@dfi) {
2073 my $f = $fi->{Filename};
2074 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2075 my $upper_f = "../../../../$f";
2077 printdebug "considering reusing $f: ";
2079 if (link_ltarget "$upper_f,fetch", $f) {
2080 printdebug "linked (using ...,fetch).\n";
2081 } elsif ((printdebug "($!) "),
2083 fail "accessing ../$f,fetch: $!";
2084 } elsif (link_ltarget $upper_f, $f) {
2085 printdebug "linked.\n";
2086 } elsif ((printdebug "($!) "),
2088 fail "accessing ../$f: $!";
2090 printdebug "absent.\n";
2094 complete_file_from_dsc('.', $fi, \$refetched)
2097 printdebug "considering saving $f: ";
2099 if (link $f, $upper_f) {
2100 printdebug "linked.\n";
2101 } elsif ((printdebug "($!) "),
2103 fail "saving ../$f: $!";
2104 } elsif (!$refetched) {
2105 printdebug "no need.\n";
2106 } elsif (link $f, "$upper_f,fetch") {
2107 printdebug "linked (using ...,fetch).\n";
2108 } elsif ((printdebug "($!) "),
2110 fail "saving ../$f,fetch: $!";
2112 printdebug "cannot.\n";
2116 # We unpack and record the orig tarballs first, so that we only
2117 # need disk space for one private copy of the unpacked source.
2118 # But we can't make them into commits until we have the metadata
2119 # from the debian/changelog, so we record the tree objects now and
2120 # make them into commits later.
2122 my $upstreamv = upstreamversion $dsc->{version};
2123 my $orig_f_base = srcfn $upstreamv, '';
2125 foreach my $fi (@dfi) {
2126 # We actually import, and record as a commit, every tarball
2127 # (unless there is only one file, in which case there seems
2130 my $f = $fi->{Filename};
2131 printdebug "import considering $f ";
2132 (printdebug "only one dfi\n"), next if @dfi == 1;
2133 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2134 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2138 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2140 printdebug "Y ", (join ' ', map { $_//"(none)" }
2141 $compr_ext, $orig_f_part
2144 my $input = new IO::File $f, '<' or die "$f $!";
2148 if (defined $compr_ext) {
2150 Dpkg::Compression::compression_guess_from_filename $f;
2151 fail "Dpkg::Compression cannot handle file $f in source package"
2152 if defined $compr_ext && !defined $cname;
2154 new Dpkg::Compression::Process compression => $cname;
2155 my @compr_cmd = $compr_proc->get_uncompress_cmdline();
2156 my $compr_fh = new IO::Handle;
2157 my $compr_pid = open $compr_fh, "-|" // die $!;
2159 open STDIN, "<&", $input or die $!;
2161 die "dgit (child): exec $compr_cmd[0]: $!\n";
2166 rmtree "_unpack-tar";
2167 mkdir "_unpack-tar" or die $!;
2168 my @tarcmd = qw(tar -x -f -
2169 --no-same-owner --no-same-permissions
2170 --no-acls --no-xattrs --no-selinux);
2171 my $tar_pid = fork // die $!;
2173 chdir "_unpack-tar" or die $!;
2174 open STDIN, "<&", $input or die $!;
2176 die "dgit (child): exec $tarcmd[0]: $!";
2178 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2179 !$? or failedcmd @tarcmd;
2182 (@compr_cmd ? failedcmd @compr_cmd
2184 # finally, we have the results in "tarball", but maybe
2185 # with the wrong permissions
2187 runcmd qw(chmod -R +rwX _unpack-tar);
2188 changedir "_unpack-tar";
2189 remove_stray_gits($f);
2190 mktree_in_ud_here();
2192 my ($tree) = git_add_write_tree();
2193 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2194 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2196 printdebug "one subtree $1\n";
2198 printdebug "multiple subtrees\n";
2201 rmtree "_unpack-tar";
2203 my $ent = [ $f, $tree ];
2205 Orig => !!$orig_f_part,
2206 Sort => (!$orig_f_part ? 2 :
2207 $orig_f_part =~ m/-/g ? 1 :
2215 # put any without "_" first (spec is not clear whether files
2216 # are always in the usual order). Tarballs without "_" are
2217 # the main orig or the debian tarball.
2218 $a->{Sort} <=> $b->{Sort} or
2222 my $any_orig = grep { $_->{Orig} } @tartrees;
2224 my $dscfn = "$package.dsc";
2226 my $treeimporthow = 'package';
2228 open D, ">", $dscfn or die "$dscfn: $!";
2229 print D $dscdata or die "$dscfn: $!";
2230 close D or die "$dscfn: $!";
2231 my @cmd = qw(dpkg-source);
2232 push @cmd, '--no-check' if $dsc_checked;
2233 if (madformat $dsc->{format}) {
2234 push @cmd, '--skip-patches';
2235 $treeimporthow = 'unpatched';
2237 push @cmd, qw(-x --), $dscfn;
2240 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2241 if (madformat $dsc->{format}) {
2242 check_for_vendor_patches();
2246 if (madformat $dsc->{format}) {
2247 my @pcmd = qw(dpkg-source --before-build .);
2248 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2250 $dappliedtree = git_add_write_tree();
2253 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2254 debugcmd "|",@clogcmd;
2255 open CLOGS, "-|", @clogcmd or die $!;
2260 printdebug "import clog search...\n";
2263 my $stanzatext = do { local $/=""; <CLOGS>; };
2264 printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
2265 last if !defined $stanzatext;
2267 my $desc = "package changelog, entry no.$.";
2268 open my $stanzafh, "<", \$stanzatext or die;
2269 my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
2270 $clogp //= $thisstanza;
2272 printdebug "import clog $thisstanza->{version} $desc...\n";
2274 last if !$any_orig; # we don't need $r1clogp
2276 # We look for the first (most recent) changelog entry whose
2277 # version number is lower than the upstream version of this
2278 # package. Then the last (least recent) previous changelog
2279 # entry is treated as the one which introduced this upstream
2280 # version and used for the synthetic commits for the upstream
2283 # One might think that a more sophisticated algorithm would be
2284 # necessary. But: we do not want to scan the whole changelog
2285 # file. Stopping when we see an earlier version, which
2286 # necessarily then is an earlier upstream version, is the only
2287 # realistic way to do that. Then, either the earliest
2288 # changelog entry we have seen so far is indeed the earliest
2289 # upload of this upstream version; or there are only changelog
2290 # entries relating to later upstream versions (which is not
2291 # possible unless the changelog and .dsc disagree about the
2292 # version). Then it remains to choose between the physically
2293 # last entry in the file, and the one with the lowest version
2294 # number. If these are not the same, we guess that the
2295 # versions were created in a non-monotic order rather than
2296 # that the changelog entries have been misordered.
2298 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2300 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2301 $r1clogp = $thisstanza;
2303 printdebug "import clog $r1clogp->{version} becomes r1\n";
2305 die $! if CLOGS->error;
2306 close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
2308 $clogp or fail "package changelog has no entries!";
2310 my $authline = clogp_authline $clogp;
2311 my $changes = getfield $clogp, 'Changes';
2312 my $cversion = getfield $clogp, 'Version';
2315 $r1clogp //= $clogp; # maybe there's only one entry;
2316 my $r1authline = clogp_authline $r1clogp;
2317 # Strictly, r1authline might now be wrong if it's going to be
2318 # unused because !$any_orig. Whatever.
2320 printdebug "import tartrees authline $authline\n";
2321 printdebug "import tartrees r1authline $r1authline\n";
2323 foreach my $tt (@tartrees) {
2324 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2326 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2329 committer $r1authline
2333 [dgit import orig $tt->{F}]
2341 [dgit import tarball $package $cversion $tt->{F}]
2346 printdebug "import main commit\n";
2348 open C, ">../commit.tmp" or die $!;
2349 print C <<END or die $!;
2352 print C <<END or die $! foreach @tartrees;
2355 print C <<END or die $!;
2361 [dgit import $treeimporthow $package $cversion]
2365 my $rawimport_hash = make_commit qw(../commit.tmp);
2367 if (madformat $dsc->{format}) {
2368 printdebug "import apply patches...\n";
2370 # regularise the state of the working tree so that
2371 # the checkout of $rawimport_hash works nicely.
2372 my $dappliedcommit = make_commit_text(<<END);
2379 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2381 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2383 # We need the answers to be reproducible
2384 my @authline = clogp_authline($clogp);
2385 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2386 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2387 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2388 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2389 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2390 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2392 my $path = $ENV{PATH} or die;
2394 foreach my $use_absurd (qw(0 1)) {
2395 runcmd @git, qw(checkout -q unpa);
2396 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2397 local $ENV{PATH} = $path;
2400 progress "warning: $@";
2401 $path = "$absurdity:$path";
2402 progress "$us: trying slow absurd-git-apply...";
2403 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2408 die "forbid absurd git-apply\n" if $use_absurd
2409 && forceing [qw(import-gitapply-no-absurd)];
2410 die "only absurd git-apply!\n" if !$use_absurd
2411 && forceing [qw(import-gitapply-absurd)];
2413 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2414 local $ENV{PATH} = $path if $use_absurd;
2416 my @showcmd = (gbp_pq, qw(import));
2417 my @realcmd = shell_cmd
2418 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2419 debugcmd "+",@realcmd;
2420 if (system @realcmd) {
2421 die +(shellquote @showcmd).
2423 failedcmd_waitstatus()."\n";
2426 my $gapplied = git_rev_parse('HEAD');
2427 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2428 $gappliedtree eq $dappliedtree or
2430 gbp-pq import and dpkg-source disagree!
2431 gbp-pq import gave commit $gapplied
2432 gbp-pq import gave tree $gappliedtree
2433 dpkg-source --before-build gave tree $dappliedtree
2435 $rawimport_hash = $gapplied;
2440 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2445 progress "synthesised git commit from .dsc $cversion";
2447 my $rawimport_mergeinput = {
2448 Commit => $rawimport_hash,
2449 Info => "Import of source package",
2451 my @output = ($rawimport_mergeinput);
2453 if ($lastpush_mergeinput) {
2454 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2455 my $oversion = getfield $oldclogp, 'Version';
2457 version_compare($oversion, $cversion);
2459 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2460 { Message => <<END, ReverseParents => 1 });
2461 Record $package ($cversion) in archive suite $csuite
2463 } elsif ($vcmp > 0) {
2464 print STDERR <<END or die $!;
2466 Version actually in archive: $cversion (older)
2467 Last version pushed with dgit: $oversion (newer or same)
2470 @output = $lastpush_mergeinput;
2472 # Same version. Use what's in the server git branch,
2473 # discarding our own import. (This could happen if the
2474 # server automatically imports all packages into git.)
2475 @output = $lastpush_mergeinput;
2478 changedir '../../../..';
2483 sub complete_file_from_dsc ($$;$) {
2484 our ($dstdir, $fi, $refetched) = @_;
2485 # Ensures that we have, in $dstdir, the file $fi, with the correct
2486 # contents. (Downloading it from alongside $dscurl if necessary.)
2487 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2488 # and will set $$refetched=1 if it did so (or tried to).
2490 my $f = $fi->{Filename};
2491 my $tf = "$dstdir/$f";
2495 my $checkhash = sub {
2496 open F, "<", "$tf" or die "$tf: $!";
2497 $fi->{Digester}->reset();
2498 $fi->{Digester}->addfile(*F);
2499 F->error and die $!;
2500 my $got = $fi->{Digester}->hexdigest();
2501 return $got eq $fi->{Hash};
2504 if (stat_exists $tf) {
2505 if ($checkhash->()) {
2506 progress "using existing $f";
2510 fail "file $f has hash $got but .dsc".
2511 " demands hash $fi->{Hash} ".
2512 "(perhaps you should delete this file?)";
2514 progress "need to fetch correct version of $f";
2515 unlink $tf or die "$tf $!";
2518 printdebug "$tf does not exist, need to fetch\n";
2522 $furl =~ s{/[^/]+$}{};
2524 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2525 die "$f ?" if $f =~ m#/#;
2526 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2527 return 0 if !act_local();
2530 fail "file $f has hash $got but .dsc".
2531 " demands hash $fi->{Hash} ".
2532 "(got wrong file from archive!)";
2537 sub ensure_we_have_orig () {
2538 my @dfi = dsc_files_info();
2539 foreach my $fi (@dfi) {
2540 my $f = $fi->{Filename};
2541 next unless is_orig_file_in_dsc($f, \@dfi);
2542 complete_file_from_dsc('..', $fi)
2547 #---------- git fetch ----------
2549 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2550 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2552 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2553 # locally fetched refs because they have unhelpful names and clutter
2554 # up gitk etc. So we track whether we have "used up" head ref (ie,
2555 # whether we have made another local ref which refers to this object).
2557 # (If we deleted them unconditionally, then we might end up
2558 # re-fetching the same git objects each time dgit fetch was run.)
2560 # So, each use of lrfetchrefs needs to be accompanied by arrangements
2561 # in git_fetch_us to fetch the refs in question, and possibly a call
2562 # to lrfetchref_used.
2564 our (%lrfetchrefs_f, %lrfetchrefs_d);
2565 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2567 sub lrfetchref_used ($) {
2568 my ($fullrefname) = @_;
2569 my $objid = $lrfetchrefs_f{$fullrefname};
2570 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2573 sub git_lrfetch_sane {
2574 my ($supplementary, @specs) = @_;
2575 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2576 # at least as regards @specs. Also leave the results in
2577 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2578 # able to clean these up.
2580 # With $supplementary==1, @specs must not contain wildcards
2581 # and we add to our previous fetches (non-atomically).
2583 # This is rather miserable:
2584 # When git fetch --prune is passed a fetchspec ending with a *,
2585 # it does a plausible thing. If there is no * then:
2586 # - it matches subpaths too, even if the supplied refspec
2587 # starts refs, and behaves completely madly if the source
2588 # has refs/refs/something. (See, for example, Debian #NNNN.)
2589 # - if there is no matching remote ref, it bombs out the whole
2591 # We want to fetch a fixed ref, and we don't know in advance
2592 # if it exists, so this is not suitable.
2594 # Our workaround is to use git ls-remote. git ls-remote has its
2595 # own qairks. Notably, it has the absurd multi-tail-matching
2596 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2597 # refs/refs/foo etc.
2599 # Also, we want an idempotent snapshot, but we have to make two
2600 # calls to the remote: one to git ls-remote and to git fetch. The
2601 # solution is use git ls-remote to obtain a target state, and
2602 # git fetch to try to generate it. If we don't manage to generate
2603 # the target state, we try again.
2605 my $url = access_giturl();
2607 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2609 my $specre = join '|', map {
2612 my $wildcard = $x =~ s/\\\*$/.*/;
2613 die if $wildcard && $supplementary;
2616 printdebug "git_lrfetch_sane specre=$specre\n";
2617 my $wanted_rref = sub {
2619 return m/^(?:$specre)$/;
2622 my $fetch_iteration = 0;
2625 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2626 if (++$fetch_iteration > 10) {
2627 fail "too many iterations trying to get sane fetch!";
2630 my @look = map { "refs/$_" } @specs;
2631 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2635 open GITLS, "-|", @lcmd or die $!;
2637 printdebug "=> ", $_;
2638 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2639 my ($objid,$rrefname) = ($1,$2);
2640 if (!$wanted_rref->($rrefname)) {
2642 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2646 $wantr{$rrefname} = $objid;
2649 close GITLS or failedcmd @lcmd;
2651 # OK, now %want is exactly what we want for refs in @specs
2653 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2654 "+refs/$_:".lrfetchrefs."/$_";
2657 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2659 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2660 runcmd_ordryrun_local @fcmd if @fspecs;
2662 if (!$supplementary) {
2663 %lrfetchrefs_f = ();
2667 git_for_each_ref(lrfetchrefs, sub {
2668 my ($objid,$objtype,$lrefname,$reftail) = @_;
2669 $lrfetchrefs_f{$lrefname} = $objid;
2670 $objgot{$objid} = 1;
2673 if ($supplementary) {
2677 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2678 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2679 if (!exists $wantr{$rrefname}) {
2680 if ($wanted_rref->($rrefname)) {
2682 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2686 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2689 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2690 delete $lrfetchrefs_f{$lrefname};
2694 foreach my $rrefname (sort keys %wantr) {
2695 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2696 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2697 my $want = $wantr{$rrefname};
2698 next if $got eq $want;
2699 if (!defined $objgot{$want}) {
2701 warning: git ls-remote suggests we want $lrefname
2702 warning: and it should refer to $want
2703 warning: but git fetch didn't fetch that object to any relevant ref.
2704 warning: This may be due to a race with someone updating the server.
2705 warning: Will try again...
2707 next FETCH_ITERATION;
2710 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2712 runcmd_ordryrun_local @git, qw(update-ref -m),
2713 "dgit fetch git fetch fixup", $lrefname, $want;
2714 $lrfetchrefs_f{$lrefname} = $want;
2719 if (defined $csuite) {
2720 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2721 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2722 my ($objid,$objtype,$lrefname,$reftail) = @_;
2723 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2724 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2728 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2729 Dumper(\%lrfetchrefs_f);
2732 sub git_fetch_us () {
2733 # Want to fetch only what we are going to use, unless
2734 # deliberately-not-ff, in which case we must fetch everything.
2736 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2738 (quiltmode_splitbrain
2739 ? (map { $_->('*',access_nomdistro) }
2740 \&debiantag_new, \&debiantag_maintview)
2741 : debiantags('*',access_nomdistro));
2742 push @specs, server_branch($csuite);
2743 push @specs, $rewritemap;
2744 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2746 git_lrfetch_sane 0, @specs;
2749 my @tagpats = debiantags('*',access_nomdistro);
2751 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2752 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2753 printdebug "currently $fullrefname=$objid\n";
2754 $here{$fullrefname} = $objid;
2756 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2757 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2758 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2759 printdebug "offered $lref=$objid\n";
2760 if (!defined $here{$lref}) {
2761 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2762 runcmd_ordryrun_local @upd;
2763 lrfetchref_used $fullrefname;
2764 } elsif ($here{$lref} eq $objid) {
2765 lrfetchref_used $fullrefname;
2768 "Not updating $lref from $here{$lref} to $objid.\n";
2773 #---------- dsc and archive handling ----------
2775 sub mergeinfo_getclogp ($) {
2776 # Ensures thit $mi->{Clogp} exists and returns it
2778 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2781 sub mergeinfo_version ($) {
2782 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2785 sub fetch_from_archive_record_1 ($) {
2787 runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
2788 'DGIT_ARCHIVE', $hash;
2789 cmdoutput @git, qw(log -n2), $hash;
2790 # ... gives git a chance to complain if our commit is malformed
2793 sub fetch_from_archive_record_2 ($) {
2795 my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
2799 dryrun_report @upd_cmd;
2803 sub parse_dsc_field_def_dsc_distro () {
2804 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2805 dgit.default.distro);
2808 sub parse_dsc_field ($$) {
2809 my ($dsc, $what) = @_;
2811 foreach my $field (@ourdscfield) {
2812 $f = $dsc->{$field};
2817 progress "$what: NO git hash";
2818 parse_dsc_field_def_dsc_distro();
2819 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2820 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2821 progress "$what: specified git info ($dsc_distro)";
2822 $dsc_hint_tag = [ $dsc_hint_tag ];
2823 } elsif ($f =~ m/^\w+\s*$/) {
2825 parse_dsc_field_def_dsc_distro();
2826 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2828 progress "$what: specified git hash";
2830 fail "$what: invalid Dgit info";
2834 sub resolve_dsc_field_commit ($$) {
2835 my ($already_distro, $already_mapref) = @_;
2837 return unless defined $dsc_hash;
2840 defined $already_mapref &&
2841 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2842 ? $already_mapref : undef;
2846 my ($what, @fetch) = @_;
2848 local $idistro = $dsc_distro;
2849 my $lrf = lrfetchrefs;
2851 if (!$chase_dsc_distro) {
2853 "not chasing .dsc distro $dsc_distro: not fetching $what";
2858 ".dsc names distro $dsc_distro: fetching $what";
2860 my $url = access_giturl();
2861 if (!defined $url) {
2862 defined $dsc_hint_url or fail <<END;
2863 .dsc Dgit metadata is in context of distro $dsc_distro
2864 for which we have no configured url and .dsc provides no hint
2867 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2868 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2869 parse_cfg_bool "dsc-url-proto-ok", 'false',
2870 cfg("dgit.dsc-url-proto-ok.$proto",
2871 "dgit.default.dsc-url-proto-ok")
2873 .dsc Dgit metadata is in context of distro $dsc_distro
2874 for which we have no configured url;
2875 .dsc provices hinted url with protocol $proto which is unsafe.
2876 (can be overridden by config - consult documentation)
2878 $url = $dsc_hint_url;
2881 git_lrfetch_sane 1, @fetch;
2886 my $rewrite_enable = do {
2887 local $idistro = $dsc_distro;
2888 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2891 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2892 if (!defined $mapref) {
2893 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2894 $mapref = $lrf.'/'.$rewritemap;
2896 my $rewritemapdata = git_cat_file $mapref.':map';
2897 if (defined $rewritemapdata
2898 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2900 "server's git history rewrite map contains a relevant entry!";
2903 if (defined $dsc_hash) {
2904 progress "using rewritten git hash in place of .dsc value";
2906 progress "server data says .dsc hash is to be disregarded";
2911 if (!defined git_cat_file $dsc_hash) {
2912 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2913 my $lrf = $do_fetch->("additional commits", @tags) &&
2914 defined git_cat_file $dsc_hash
2916 .dsc Dgit metadata requires commit $dsc_hash
2917 but we could not obtain that object anywhere.
2919 foreach my $t (@tags) {
2920 my $fullrefname = $lrf.'/'.$t;
2921 # print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2922 next unless $lrfetchrefs_f{$fullrefname};
2923 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2924 lrfetchref_used $fullrefname;
2929 sub fetch_from_archive () {
2930 ensure_setup_existing_tree();
2932 # Ensures that lrref() is what is actually in the archive, one way
2933 # or another, according to us - ie this client's
2934 # appropritaely-updated archive view. Also returns the commit id.
2935 # If there is nothing in the archive, leaves lrref alone and
2936 # returns undef. git_fetch_us must have already been called.
2940 parse_dsc_field($dsc, 'last upload to archive');
2941 resolve_dsc_field_commit access_basedistro,
2942 lrfetchrefs."/".$rewritemap
2944 progress "no version available from the archive";
2947 # If the archive's .dsc has a Dgit field, there are three
2948 # relevant git commitids we need to choose between and/or merge
2950 # 1. $dsc_hash: the Dgit field from the archive
2951 # 2. $lastpush_hash: the suite branch on the dgit git server
2952 # 3. $lastfetch_hash: our local tracking brach for the suite
2954 # These may all be distinct and need not be in any fast forward
2957 # If the dsc was pushed to this suite, then the server suite
2958 # branch will have been updated; but it might have been pushed to
2959 # a different suite and copied by the archive. Conversely a more
2960 # recent version may have been pushed with dgit but not appeared
2961 # in the archive (yet).
2963 # $lastfetch_hash may be awkward because archive imports
2964 # (particularly, imports of Dgit-less .dscs) are performed only as
2965 # needed on individual clients, so different clients may perform a
2966 # different subset of them - and these imports are only made
2967 # public during push. So $lastfetch_hash may represent a set of
2968 # imports different to a subsequent upload by a different dgit
2971 # Our approach is as follows:
2973 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
2974 # descendant of $dsc_hash, then it was pushed by a dgit user who
2975 # had based their work on $dsc_hash, so we should prefer it.
2976 # Otherwise, $dsc_hash was installed into this suite in the
2977 # archive other than by a dgit push, and (necessarily) after the
2978 # last dgit push into that suite (since a dgit push would have
2979 # been descended from the dgit server git branch); thus, in that
2980 # case, we prefer the archive's version (and produce a
2981 # pseudo-merge to overwrite the dgit server git branch).
2983 # (If there is no Dgit field in the archive's .dsc then
2984 # generate_commit_from_dsc uses the version numbers to decide
2985 # whether the suite branch or the archive is newer. If the suite
2986 # branch is newer it ignores the archive's .dsc; otherwise it
2987 # generates an import of the .dsc, and produces a pseudo-merge to
2988 # overwrite the suite branch with the archive contents.)
2990 # The outcome of that part of the algorithm is the `public view',
2991 # and is same for all dgit clients: it does not depend on any
2992 # unpublished history in the local tracking branch.
2994 # As between the public view and the local tracking branch: The
2995 # local tracking branch is only updated by dgit fetch, and
2996 # whenever dgit fetch runs it includes the public view in the
2997 # local tracking branch. Therefore if the public view is not
2998 # descended from the local tracking branch, the local tracking
2999 # branch must contain history which was imported from the archive
3000 # but never pushed; and, its tip is now out of date. So, we make
3001 # a pseudo-merge to overwrite the old imports and stitch the old
3004 # Finally: we do not necessarily reify the public view (as
3005 # described above). This is so that we do not end up stacking two
3006 # pseudo-merges. So what we actually do is figure out the inputs
3007 # to any public view pseudo-merge and put them in @mergeinputs.
3010 # $mergeinputs[]{Commit}
3011 # $mergeinputs[]{Info}
3012 # $mergeinputs[0] is the one whose tree we use
3013 # @mergeinputs is in the order we use in the actual commit)
3016 # $mergeinputs[]{Message} is a commit message to use
3017 # $mergeinputs[]{ReverseParents} if def specifies that parent
3018 # list should be in opposite order
3019 # Such an entry has no Commit or Info. It applies only when found
3020 # in the last entry. (This ugliness is to support making
3021 # identical imports to previous dgit versions.)
3023 my $lastpush_hash = git_get_ref(lrfetchref());
3024 printdebug "previous reference hash=$lastpush_hash\n";
3025 $lastpush_mergeinput = $lastpush_hash && {
3026 Commit => $lastpush_hash,
3027 Info => "dgit suite branch on dgit git server",
3030 my $lastfetch_hash = git_get_ref(lrref());
3031 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3032 my $lastfetch_mergeinput = $lastfetch_hash && {
3033 Commit => $lastfetch_hash,
3034 Info => "dgit client's archive history view",
3037 my $dsc_mergeinput = $dsc_hash && {
3038 Commit => $dsc_hash,
3039 Info => "Dgit field in .dsc from archive",
3043 my $del_lrfetchrefs = sub {
3046 printdebug "del_lrfetchrefs...\n";
3047 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3048 my $objid = $lrfetchrefs_d{$fullrefname};
3049 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3051 $gur ||= new IO::Handle;
3052 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3054 printf $gur "delete %s %s\n", $fullrefname, $objid;
3057 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3061 if (defined $dsc_hash) {
3062 ensure_we_have_orig();
3063 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3064 @mergeinputs = $dsc_mergeinput
3065 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3066 print STDERR <<END or die $!;
3068 Git commit in archive is behind the last version allegedly pushed/uploaded.
3069 Commit referred to by archive: $dsc_hash
3070 Last version pushed with dgit: $lastpush_hash
3073 @mergeinputs = ($lastpush_mergeinput);
3075 # Archive has .dsc which is not a descendant of the last dgit
3076 # push. This can happen if the archive moves .dscs about.
3077 # Just follow its lead.
3078 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3079 progress "archive .dsc names newer git commit";
3080 @mergeinputs = ($dsc_mergeinput);
3082 progress "archive .dsc names other git commit, fixing up";
3083 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3087 @mergeinputs = generate_commits_from_dsc();
3088 # We have just done an import. Now, our import algorithm might
3089 # have been improved. But even so we do not want to generate
3090 # a new different import of the same package. So if the
3091 # version numbers are the same, just use our existing version.
3092 # If the version numbers are different, the archive has changed
3093 # (perhaps, rewound).
3094 if ($lastfetch_mergeinput &&
3095 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3096 (mergeinfo_version $mergeinputs[0]) )) {
3097 @mergeinputs = ($lastfetch_mergeinput);
3099 } elsif ($lastpush_hash) {
3100 # only in git, not in the archive yet
3101 @mergeinputs = ($lastpush_mergeinput);
3102 print STDERR <<END or die $!;
3104 Package not found in the archive, but has allegedly been pushed using dgit.
3108 printdebug "nothing found!\n";
3109 if (defined $skew_warning_vsn) {
3110 print STDERR <<END or die $!;
3112 Warning: relevant archive skew detected.
3113 Archive allegedly contains $skew_warning_vsn
3114 But we were not able to obtain any version from the archive or git.
3118 unshift @end, $del_lrfetchrefs;
3122 if ($lastfetch_hash &&
3124 my $h = $_->{Commit};
3125 $h and is_fast_fwd($lastfetch_hash, $h);
3126 # If true, one of the existing parents of this commit
3127 # is a descendant of the $lastfetch_hash, so we'll
3128 # be ff from that automatically.
3132 push @mergeinputs, $lastfetch_mergeinput;
3135 printdebug "fetch mergeinfos:\n";
3136 foreach my $mi (@mergeinputs) {
3138 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3140 printdebug sprintf " ReverseParents=%d Message=%s",
3141 $mi->{ReverseParents}, $mi->{Message};
3145 my $compat_info= pop @mergeinputs
3146 if $mergeinputs[$#mergeinputs]{Message};
3148 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3151 if (@mergeinputs > 1) {
3153 my $tree_commit = $mergeinputs[0]{Commit};
3155 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3156 $tree =~ m/\n\n/; $tree = $`;
3157 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3160 # We use the changelog author of the package in question the
3161 # author of this pseudo-merge. This is (roughly) correct if
3162 # this commit is simply representing aa non-dgit upload.
3163 # (Roughly because it does not record sponsorship - but we
3164 # don't have sponsorship info because that's in the .changes,
3165 # which isn't in the archivw.)
3167 # But, it might be that we are representing archive history
3168 # updates (including in-archive copies). These are not really
3169 # the responsibility of the person who created the .dsc, but
3170 # there is no-one whose name we should better use. (The
3171 # author of the .dsc-named commit is clearly worse.)
3173 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3174 my $author = clogp_authline $useclogp;
3175 my $cversion = getfield $useclogp, 'Version';
3177 my $mcf = ".git/dgit/mergecommit";
3178 open MC, ">", $mcf or die "$mcf $!";
3179 print MC <<END or die $!;
3183 my @parents = grep { $_->{Commit} } @mergeinputs;
3184 @parents = reverse @parents if $compat_info->{ReverseParents};
3185 print MC <<END or die $! foreach @parents;
3189 print MC <<END or die $!;
3195 if (defined $compat_info->{Message}) {
3196 print MC $compat_info->{Message} or die $!;
3198 print MC <<END or die $!;
3199 Record $package ($cversion) in archive suite $csuite
3203 my $message_add_info = sub {
3205 my $mversion = mergeinfo_version $mi;
3206 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3210 $message_add_info->($mergeinputs[0]);
3211 print MC <<END or die $!;
3212 should be treated as descended from
3214 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3218 $hash = make_commit $mcf;
3220 $hash = $mergeinputs[0]{Commit};
3222 printdebug "fetch hash=$hash\n";
3225 my ($lasth, $what) = @_;
3226 return unless $lasth;
3227 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3230 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3232 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3234 fetch_from_archive_record_1($hash);
3236 if (defined $skew_warning_vsn) {
3238 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3239 my $gotclogp = commit_getclogp($hash);
3240 my $got_vsn = getfield $gotclogp, 'Version';
3241 printdebug "SKEW CHECK GOT $got_vsn\n";
3242 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3243 print STDERR <<END or die $!;
3245 Warning: archive skew detected. Using the available version:
3246 Archive allegedly contains $skew_warning_vsn
3247 We were able to obtain only $got_vsn
3253 if ($lastfetch_hash ne $hash) {
3254 fetch_from_archive_record_2($hash);
3257 lrfetchref_used lrfetchref();
3259 unshift @end, $del_lrfetchrefs;
3263 sub set_local_git_config ($$) {
3265 runcmd @git, qw(config), $k, $v;
3268 sub setup_mergechangelogs (;$) {
3270 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3272 my $driver = 'dpkg-mergechangelogs';
3273 my $cb = "merge.$driver";
3274 my $attrs = '.git/info/attributes';
3275 ensuredir '.git/info';
3277 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3278 if (!open ATTRS, "<", $attrs) {
3279 $!==ENOENT or die "$attrs: $!";
3283 next if m{^debian/changelog\s};
3284 print NATTRS $_, "\n" or die $!;
3286 ATTRS->error and die $!;
3289 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3292 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3293 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3295 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3298 sub setup_useremail (;$) {
3300 return unless $always || access_cfg_bool(1, 'setup-useremail');
3303 my ($k, $envvar) = @_;
3304 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3305 return unless defined $v;
3306 set_local_git_config "user.$k", $v;
3309 $setup->('email', 'DEBEMAIL');
3310 $setup->('name', 'DEBFULLNAME');
3313 sub ensure_setup_existing_tree () {
3314 my $k = "remote.$remotename.skipdefaultupdate";
3315 my $c = git_get_config $k;
3316 return if defined $c;
3317 set_local_git_config $k, 'true';
3320 sub open_gitattrs () {
3321 my $gai = new IO::File ".git/info/attributes"
3323 or die "open .git/info/attributes: $!";
3327 sub is_gitattrs_setup () {
3328 my $gai = open_gitattrs();
3329 return 0 unless $gai;
3331 return 1 if m{^\[attr\]dgit-defuse-attrs\s};
3333 $gai->error and die $!;
3337 sub setup_gitattrs (;$) {
3339 return unless $always || access_cfg_bool(1, 'setup-gitattributes');
3341 if (is_gitattrs_setup()) {
3343 [attr]dgit-defuse-attrs already found in .git/info/attributes
3344 not doing further gitattributes setup
3348 my $af = ".git/info/attributes";
3349 open GAO, "> $af.new" or die $!;
3350 print GAO <<END or die $!;
3352 [attr]dgit-defuse-attrs -text -eol -crlf -ident -filter
3353 # ^ see dgit(7). To undo, leave a definition of [attr]dgit-defuse-attrs
3355 my $gai = open_gitattrs();
3359 print GAO $_, "\n" or die $!;
3361 $gai->error and die $!;
3363 close GAO or die $!;
3364 rename "$af.new", "$af" or die "install $af: $!";
3367 sub setup_new_tree () {
3368 setup_mergechangelogs();
3373 sub multisuite_suite_child ($$$) {
3374 my ($tsuite, $merginputs, $fn) = @_;
3375 # in child, sets things up, calls $fn->(), and returns undef
3376 # in parent, returns canonical suite name for $tsuite
3377 my $canonsuitefh = IO::File::new_tmpfile;
3378 my $pid = fork // die $!;
3382 $us .= " [$isuite]";
3383 $debugprefix .= " ";
3384 progress "fetching $tsuite...";
3385 canonicalise_suite();
3386 print $canonsuitefh $csuite, "\n" or die $!;
3387 close $canonsuitefh or die $!;
3391 waitpid $pid,0 == $pid or die $!;
3392 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3393 seek $canonsuitefh,0,0 or die $!;
3394 local $csuite = <$canonsuitefh>;
3395 die $! unless defined $csuite && chomp $csuite;
3397 printdebug "multisuite $tsuite missing\n";
3400 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3401 push @$merginputs, {
3408 sub fork_for_multisuite ($) {
3409 my ($before_fetch_merge) = @_;
3410 # if nothing unusual, just returns ''
3413 # returns 0 to caller in child, to do first of the specified suites
3414 # in child, $csuite is not yet set
3416 # returns 1 to caller in parent, to finish up anything needed after
3417 # in parent, $csuite is set to canonicalised portmanteau
3419 my $org_isuite = $isuite;
3420 my @suites = split /\,/, $isuite;
3421 return '' unless @suites > 1;
3422 printdebug "fork_for_multisuite: @suites\n";
3426 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3428 return 0 unless defined $cbasesuite;
3430 fail "package $package missing in (base suite) $cbasesuite"
3431 unless @mergeinputs;
3433 my @csuites = ($cbasesuite);
3435 $before_fetch_merge->();
3437 foreach my $tsuite (@suites[1..$#suites]) {
3438 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3444 # xxx collecte the ref here
3446 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3447 push @csuites, $csubsuite;
3450 foreach my $mi (@mergeinputs) {
3451 my $ref = git_get_ref $mi->{Ref};
3452 die "$mi->{Ref} ?" unless length $ref;
3453 $mi->{Commit} = $ref;
3456 $csuite = join ",", @csuites;
3458 my $previous = git_get_ref lrref;
3460 unshift @mergeinputs, {
3461 Commit => $previous,
3462 Info => "local combined tracking branch",
3464 "archive seems to have rewound: local tracking branch is ahead!",
3468 foreach my $ix (0..$#mergeinputs) {
3469 $mergeinputs[$ix]{Index} = $ix;
3472 @mergeinputs = sort {
3473 -version_compare(mergeinfo_version $a,
3474 mergeinfo_version $b) # highest version first
3476 $a->{Index} <=> $b->{Index}; # earliest in spec first
3482 foreach my $mi (@mergeinputs) {
3483 printdebug "multisuite merge check $mi->{Info}\n";
3484 foreach my $previous (@needed) {
3485 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3486 printdebug "multisuite merge un-needed $previous->{Info}\n";
3490 printdebug "multisuite merge this-needed\n";
3491 $mi->{Character} = '+';
3494 $needed[0]{Character} = '*';
3496 my $output = $needed[0]{Commit};
3499 printdebug "multisuite merge nontrivial\n";
3500 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3502 my $commit = "tree $tree\n";
3503 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3504 "Input branches:\n";
3506 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3507 printdebug "multisuite merge include $mi->{Info}\n";
3508 $mi->{Character} //= ' ';
3509 $commit .= "parent $mi->{Commit}\n";
3510 $msg .= sprintf " %s %-25s %s\n",
3512 (mergeinfo_version $mi),
3515 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3517 " * marks the highest version branch, which choose to use\n".
3518 " + marks each branch which was not already an ancestor\n\n".
3519 "[dgit multi-suite $csuite]\n";
3521 "author $authline\n".
3522 "committer $authline\n\n";
3523 $output = make_commit_text $commit.$msg;
3524 printdebug "multisuite merge generated $output\n";
3527 fetch_from_archive_record_1($output);
3528 fetch_from_archive_record_2($output);
3530 progress "calculated combined tracking suite $csuite";
3535 sub clone_set_head () {
3536 open H, "> .git/HEAD" or die $!;
3537 print H "ref: ".lref()."\n" or die $!;
3540 sub clone_finish ($) {
3542 runcmd @git, qw(reset --hard), lrref();
3543 runcmd qw(bash -ec), <<'END';
3545 git ls-tree -r --name-only -z HEAD | \
3546 xargs -0r touch -h -r . --
3548 printdone "ready for work in $dstdir";
3553 badusage "dry run makes no sense with clone" unless act_local();
3555 my $multi_fetched = fork_for_multisuite(sub {
3556 printdebug "multi clone before fetch merge\n";
3559 if ($multi_fetched) {
3560 printdebug "multi clone after fetch merge\n";
3562 clone_finish($dstdir);
3565 printdebug "clone main body\n";
3567 canonicalise_suite();
3568 my $hasgit = check_for_git();
3569 mkdir $dstdir or fail "create \`$dstdir': $!";
3571 runcmd @git, qw(init -q);
3573 my $giturl = access_giturl(1);
3574 if (defined $giturl) {
3575 runcmd @git, qw(remote add), 'origin', $giturl;
3578 progress "fetching existing git history";
3580 runcmd_ordryrun_local @git, qw(fetch origin);
3582 progress "starting new git history";
3584 fetch_from_archive() or no_such_package;
3585 my $vcsgiturl = $dsc->{'Vcs-Git'};
3586 if (length $vcsgiturl) {
3587 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3588 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3591 clone_finish($dstdir);
3595 canonicalise_suite();
3596 if (check_for_git()) {
3599 fetch_from_archive() or no_such_package();
3600 printdone "fetched into ".lrref();
3604 my $multi_fetched = fork_for_multisuite(sub { });
3605 fetch() unless $multi_fetched; # parent
3606 return if $multi_fetched eq '0'; # child
3607 runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
3609 printdone "fetched to ".lrref()." and merged into HEAD";
3612 sub check_not_dirty () {
3613 foreach my $f (qw(local-options local-patch-header)) {
3614 if (stat_exists "debian/source/$f") {
3615 fail "git tree contains debian/source/$f";
3619 return if $ignoredirty;
3621 my @cmd = (@git, qw(diff --quiet HEAD));
3623 $!=0; $?=-1; system @cmd;
3626 fail "working tree is dirty (does not match HEAD)";
3632 sub commit_admin ($) {
3635 runcmd_ordryrun_local @git, qw(commit -m), $m;
3638 sub commit_quilty_patch () {
3639 my $output = cmdoutput @git, qw(status --porcelain);
3641 foreach my $l (split /\n/, $output) {
3642 next unless $l =~ m/\S/;
3643 if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
3647 delete $adds{'.pc'}; # if there wasn't one before, don't add it
3649 progress "nothing quilty to commit, ok.";
3652 my @adds = map { s/[][*?\\]/\\$&/g; $_; } sort keys %adds;
3653 runcmd_ordryrun_local @git, qw(add -f), @adds;
3655 Commit Debian 3.0 (quilt) metadata
3657 [dgit ($our_version) quilt-fixup]
3661 sub get_source_format () {
3663 if (open F, "debian/source/options") {
3667 s/\s+$//; # ignore missing final newline
3669 my ($k, $v) = ($`, $'); #');
3670 $v =~ s/^"(.*)"$/$1/;
3676 F->error and die $!;
3679 die $! unless $!==&ENOENT;
3682 if (!open F, "debian/source/format") {
3683 die $! unless $!==&ENOENT;
3687 F->error and die $!;
3689 return ($_, \%options);
3692 sub madformat_wantfixup ($) {
3694 return 0 unless $format eq '3.0 (quilt)';
3695 our $quilt_mode_warned;
3696 if ($quilt_mode eq 'nocheck') {
3697 progress "Not doing any fixup of \`$format' due to".
3698 " ----no-quilt-fixup or --quilt=nocheck"
3699 unless $quilt_mode_warned++;
3702 progress "Format \`$format', need to check/update patch stack"
3703 unless $quilt_mode_warned++;
3707 sub maybe_split_brain_save ($$$) {
3708 my ($headref, $dgitview, $msg) = @_;
3709 # => message fragment "$saved" describing disposition of $dgitview
3710 return "commit id $dgitview" unless defined $split_brain_save;
3711 my @cmd = (shell_cmd "cd ../../../..",
3712 @git, qw(update-ref -m),
3713 "dgit --dgit-view-save $msg HEAD=$headref",
3714 $split_brain_save, $dgitview);
3716 return "and left in $split_brain_save";
3719 # An "infopair" is a tuple [ $thing, $what ]
3720 # (often $thing is a commit hash; $what&n