3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2016 Ian Jackson
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 use Dpkg::Control::Hash;
30 use File::Temp qw(tempdir);
37 use List::Util qw(any);
38 use List::MoreUtils qw(pairwise);
39 use Text::Glob qw(match_glob);
40 use Fcntl qw(:DEFAULT :flock);
45 our $our_version = 'UNRELEASED'; ###substituted###
46 our $absurdity = undef; ###substituted###
48 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
51 our $isuite = 'unstable';
57 our $dryrun_level = 0;
59 our $buildproductsdir = '..';
65 our $existing_package = 'dpkg';
67 our $changes_since_version;
69 our $overwrite_version; # undef: not specified; '': check changelog
71 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
73 our $dodep14tag_re = 'want|no|always';
74 our $split_brain_save;
75 our $we_are_responder;
76 our $initiator_tempdir;
77 our $patches_applied_dirtily = 00;
81 our $chase_dsc_distro=1;
83 our %forceopts = map { $_=>0 }
84 qw(unrepresentable unsupported-source-format
85 dsc-changes-mismatch changes-origs-exactly
86 import-gitapply-absurd
87 import-gitapply-no-absurd
88 import-dsc-with-dgit-field);
90 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
92 our $suite_re = '[-+.0-9a-z]+';
93 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
94 our $orig_f_comp_re = 'orig(?:-[-0-9a-z]+)?';
95 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
96 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
98 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
99 our $splitbraincache = 'dgit-intern/quilt-cache';
100 our $rewritemap = 'dgit-rewrite/map';
102 our (@git) = qw(git);
103 our (@dget) = qw(dget);
104 our (@curl) = qw(curl);
105 our (@dput) = qw(dput);
106 our (@debsign) = qw(debsign);
107 our (@gpg) = qw(gpg);
108 our (@sbuild) = qw(sbuild);
110 our (@dgit) = qw(dgit);
111 our (@aptget) = qw(apt-get);
112 our (@aptcache) = qw(apt-cache);
113 our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
114 our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
115 our (@dpkggenchanges) = qw(dpkg-genchanges);
116 our (@mergechanges) = qw(mergechanges -f);
117 our (@gbp_build) = ('');
118 our (@gbp_pq) = ('gbp pq');
119 our (@changesopts) = ('');
121 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
124 'debsign' => \@debsign,
126 'sbuild' => \@sbuild,
130 'apt-get' => \@aptget,
131 'apt-cache' => \@aptcache,
132 'dpkg-source' => \@dpkgsource,
133 'dpkg-buildpackage' => \@dpkgbuildpackage,
134 'dpkg-genchanges' => \@dpkggenchanges,
135 'gbp-build' => \@gbp_build,
136 'gbp-pq' => \@gbp_pq,
137 'ch' => \@changesopts,
138 'mergechanges' => \@mergechanges);
140 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
141 our %opts_cfg_insertpos = map {
143 scalar @{ $opts_opt_map{$_} }
144 } keys %opts_opt_map;
146 sub parseopts_late_defaults();
152 our $supplementary_message = '';
153 our $need_split_build_invocation = 0;
154 our $split_brain = 0;
158 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
161 our $remotename = 'dgit';
162 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
166 if (!defined $absurdity) {
168 $absurdity =~ s{/[^/]+$}{/absurd} or die;
172 my ($v,$distro) = @_;
173 return $tagformatfn->($v, $distro);
176 sub debiantag_maintview ($$) {
177 my ($v,$distro) = @_;
178 return "$distro/".dep14_version_mangle $v;
181 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
183 sub lbranch () { return "$branchprefix/$csuite"; }
184 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
185 sub lref () { return "refs/heads/".lbranch(); }
186 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
187 sub rrref () { return server_ref($csuite); }
197 return "${package}_".(stripepoch $vsn).$sfx
202 return srcfn($vsn,".dsc");
205 sub changespat ($;$) {
206 my ($vsn, $arch) = @_;
207 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
210 sub upstreamversion ($) {
222 foreach my $f (@end) {
224 print STDERR "$us: cleanup: $@" if length $@;
228 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
230 sub forceable_fail ($$) {
231 my ($forceoptsl, $msg) = @_;
232 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
233 print STDERR "warning: overriding problem due to --force:\n". $msg;
237 my ($forceoptsl) = @_;
238 my @got = grep { $forceopts{$_} } @$forceoptsl;
239 return 0 unless @got;
241 "warning: skipping checks or functionality due to --force-$got[0]\n";
244 sub no_such_package () {
245 print STDERR "$us: package $package does not exist in suite $isuite\n";
251 printdebug "CD $newdir\n";
252 chdir $newdir or confess "chdir: $newdir: $!";
255 sub deliberately ($) {
257 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
260 sub deliberately_not_fast_forward () {
261 foreach (qw(not-fast-forward fresh-repo)) {
262 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
266 sub quiltmode_splitbrain () {
267 $quilt_mode =~ m/gbp|dpm|unapplied/;
270 sub opts_opt_multi_cmd {
272 push @cmd, split /\s+/, shift @_;
278 return opts_opt_multi_cmd @gbp_pq;
281 #---------- remote protocol support, common ----------
283 # remote push initiator/responder protocol:
284 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
285 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
286 # < dgit-remote-push-ready <actual-proto-vsn>
293 # > supplementary-message NBYTES # $protovsn >= 3
298 # > file parsed-changelog
299 # [indicates that output of dpkg-parsechangelog follows]
300 # > data-block NBYTES
301 # > [NBYTES bytes of data (no newline)]
302 # [maybe some more blocks]
311 # > param head DGIT-VIEW-HEAD
312 # > param csuite SUITE
313 # > param tagformat old|new
314 # > param maint-view MAINT-VIEW-HEAD
316 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
317 # # goes into tag, for replay prevention
320 # [indicates that signed tag is wanted]
321 # < data-block NBYTES
322 # < [NBYTES bytes of data (no newline)]
323 # [maybe some more blocks]
327 # > want signed-dsc-changes
328 # < data-block NBYTES [transfer of signed dsc]
330 # < data-block NBYTES [transfer of signed changes]
338 sub i_child_report () {
339 # Sees if our child has died, and reap it if so. Returns a string
340 # describing how it died if it failed, or undef otherwise.
341 return undef unless $i_child_pid;
342 my $got = waitpid $i_child_pid, WNOHANG;
343 return undef if $got <= 0;
344 die unless $got == $i_child_pid;
345 $i_child_pid = undef;
346 return undef unless $?;
347 return "build host child ".waitstatusmsg();
352 fail "connection lost: $!" if $fh->error;
353 fail "protocol violation; $m not expected";
356 sub badproto_badread ($$) {
358 fail "connection lost: $!" if $!;
359 my $report = i_child_report();
360 fail $report if defined $report;
361 badproto $fh, "eof (reading $wh)";
364 sub protocol_expect (&$) {
365 my ($match, $fh) = @_;
368 defined && chomp or badproto_badread $fh, "protocol message";
376 badproto $fh, "\`$_'";
379 sub protocol_send_file ($$) {
380 my ($fh, $ourfn) = @_;
381 open PF, "<", $ourfn or die "$ourfn: $!";
384 my $got = read PF, $d, 65536;
385 die "$ourfn: $!" unless defined $got;
387 print $fh "data-block ".length($d)."\n" or die $!;
388 print $fh $d or die $!;
390 PF->error and die "$ourfn $!";
391 print $fh "data-end\n" or die $!;
395 sub protocol_read_bytes ($$) {
396 my ($fh, $nbytes) = @_;
397 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
399 my $got = read $fh, $d, $nbytes;
400 $got==$nbytes or badproto_badread $fh, "data block";
404 sub protocol_receive_file ($$) {
405 my ($fh, $ourfn) = @_;
406 printdebug "() $ourfn\n";
407 open PF, ">", $ourfn or die "$ourfn: $!";
409 my ($y,$l) = protocol_expect {
410 m/^data-block (.*)$/ ? (1,$1) :
411 m/^data-end$/ ? (0,) :
415 my $d = protocol_read_bytes $fh, $l;
416 print PF $d or die $!;
421 #---------- remote protocol support, responder ----------
423 sub responder_send_command ($) {
425 return unless $we_are_responder;
426 # called even without $we_are_responder
427 printdebug ">> $command\n";
428 print PO $command, "\n" or die $!;
431 sub responder_send_file ($$) {
432 my ($keyword, $ourfn) = @_;
433 return unless $we_are_responder;
434 printdebug "]] $keyword $ourfn\n";
435 responder_send_command "file $keyword";
436 protocol_send_file \*PO, $ourfn;
439 sub responder_receive_files ($@) {
440 my ($keyword, @ourfns) = @_;
441 die unless $we_are_responder;
442 printdebug "[[ $keyword @ourfns\n";
443 responder_send_command "want $keyword";
444 foreach my $fn (@ourfns) {
445 protocol_receive_file \*PI, $fn;
448 protocol_expect { m/^files-end$/ } \*PI;
451 #---------- remote protocol support, initiator ----------
453 sub initiator_expect (&) {
455 protocol_expect { &$match } \*RO;
458 #---------- end remote code ----------
461 if ($we_are_responder) {
463 responder_send_command "progress ".length($m) or die $!;
464 print PO $m or die $!;
474 $ua = LWP::UserAgent->new();
478 progress "downloading $what...";
479 my $r = $ua->get(@_) or die $!;
480 return undef if $r->code == 404;
481 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
482 return $r->decoded_content(charset => 'none');
485 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
490 failedcmd @_ if system @_;
493 sub act_local () { return $dryrun_level <= 1; }
494 sub act_scary () { return !$dryrun_level; }
497 if (!$dryrun_level) {
498 progress "$us ok: @_";
500 progress "would be ok: @_ (but dry run only)";
505 printcmd(\*STDERR,$debugprefix."#",@_);
508 sub runcmd_ordryrun {
516 sub runcmd_ordryrun_local {
525 my ($first_shell, @cmd) = @_;
526 return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
529 our $helpmsg = <<END;
531 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
532 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
533 dgit [dgit-opts] build [dpkg-buildpackage-opts]
534 dgit [dgit-opts] sbuild [sbuild-opts]
535 dgit [dgit-opts] push [dgit-opts] [suite]
536 dgit [dgit-opts] rpush build-host:build-dir ...
537 important dgit options:
538 -k<keyid> sign tag and package with <keyid> instead of default
539 --dry-run -n do not change anything, but go through the motions
540 --damp-run -L like --dry-run but make local changes, without signing
541 --new -N allow introducing a new package
542 --debug -D increase debug level
543 -c<name>=<value> set git config option (used directly by dgit too)
546 our $later_warning_msg = <<END;
547 Perhaps the upload is stuck in incoming. Using the version from git.
551 print STDERR "$us: @_\n", $helpmsg or die $!;
556 @ARGV or badusage "too few arguments";
557 return scalar shift @ARGV;
561 print $helpmsg or die $!;
565 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
567 our %defcfg = ('dgit.default.distro' => 'debian',
568 'dgit.default.old-dsc-distro' => 'debian',
569 'dgit-suite.*-security.distro' => 'debian-security',
570 'dgit.default.username' => '',
571 'dgit.default.archive-query-default-component' => 'main',
572 'dgit.default.ssh' => 'ssh',
573 'dgit.default.archive-query' => 'madison:',
574 'dgit.default.sshpsql-dbname' => 'service=projectb',
575 'dgit.default.aptget-components' => 'main',
576 'dgit.default.dgit-tag-format' => 'new,old,maint',
577 'dgit.dsc-url-proto-ok.http' => 'true',
578 'dgit.dsc-url-proto-ok.https' => 'true',
579 'dgit.dsc-url-proto-ok.git' => 'true',
580 'dgit.default.dsc-url-proto-ok' => 'false',
581 # old means "repo server accepts pushes with old dgit tags"
582 # new means "repo server accepts pushes with new dgit tags"
583 # maint means "repo server accepts split brain pushes"
584 # hist means "repo server may have old pushes without new tag"
585 # ("hist" is implied by "old")
586 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
587 'dgit-distro.debian.git-check' => 'url',
588 'dgit-distro.debian.git-check-suffix' => '/info/refs',
589 'dgit-distro.debian.new-private-pushers' => 't',
590 'dgit-distro.debian/push.git-url' => '',
591 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
592 'dgit-distro.debian/push.git-user-force' => 'dgit',
593 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
594 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
595 'dgit-distro.debian/push.git-create' => 'true',
596 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
597 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
598 # 'dgit-distro.debian.archive-query-tls-key',
599 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
600 # ^ this does not work because curl is broken nowadays
601 # Fixing #790093 properly will involve providing providing the key
602 # in some pacagke and maybe updating these paths.
604 # 'dgit-distro.debian.archive-query-tls-curl-args',
605 # '--ca-path=/etc/ssl/ca-debian',
606 # ^ this is a workaround but works (only) on DSA-administered machines
607 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
608 'dgit-distro.debian.git-url-suffix' => '',
609 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
610 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
611 'dgit-distro.debian-security.archive-query' => 'aptget:',
612 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
613 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
614 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
615 'dgit-distro.debian-security.nominal-distro' => 'debian',
616 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
617 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
618 'dgit-distro.ubuntu.git-check' => 'false',
619 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
620 'dgit-distro.test-dummy.ssh' => "$td/ssh",
621 'dgit-distro.test-dummy.username' => "alice",
622 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
623 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
624 'dgit-distro.test-dummy.git-url' => "$td/git",
625 'dgit-distro.test-dummy.git-host' => "git",
626 'dgit-distro.test-dummy.git-path' => "$td/git",
627 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
628 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
629 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
630 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
634 our @gitcfgsources = qw(cmdline local global system);
636 sub git_slurp_config () {
637 local ($debuglevel) = $debuglevel-2;
640 # This algoritm is a bit subtle, but this is needed so that for
641 # options which we want to be single-valued, we allow the
642 # different config sources to override properly. See #835858.
643 foreach my $src (@gitcfgsources) {
644 next if $src eq 'cmdline';
645 # we do this ourselves since git doesn't handle it
647 my @cmd = (@git, qw(config -z --get-regexp), "--$src", qw(.*));
650 open GITS, "-|", @cmd or die $!;
653 printdebug "=> ", (messagequote $_), "\n";
655 push @{ $gitcfgs{$src}{$`} }, $'; #';
659 or ($!==0 && $?==256)
664 sub git_get_config ($) {
666 foreach my $src (@gitcfgsources) {
667 my $l = $gitcfgs{$src}{$c};
668 croak "$l $c" if $l && !ref $l;
669 printdebug"C $c ".(defined $l ?
670 join " ", map { messagequote "'$_'" } @$l :
674 @$l==1 or badcfg "multiple values for $c".
675 " (in $src git config)" if @$l > 1;
683 return undef if $c =~ /RETURN-UNDEF/;
684 printdebug "C? $c\n" if $debuglevel >= 5;
685 my $v = git_get_config($c);
686 return $v if defined $v;
687 my $dv = $defcfg{$c};
689 printdebug "CD $c $dv\n" if $debuglevel >= 4;
693 badcfg "need value for one of: @_\n".
694 "$us: distro or suite appears not to be (properly) supported";
697 sub access_basedistro__noalias () {
698 if (defined $idistro) {
701 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
702 return $def if defined $def;
703 foreach my $src (@gitcfgsources, 'internal') {
704 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
706 foreach my $k (keys %$kl) {
707 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
709 next unless match_glob $dpat, $isuite;
713 return cfg("dgit.default.distro");
717 sub access_basedistro () {
718 my $noalias = access_basedistro__noalias();
719 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
720 return $canon // $noalias;
723 sub access_nomdistro () {
724 my $base = access_basedistro();
725 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
726 $r =~ m/^$distro_re$/ or badcfg
727 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
731 sub access_quirk () {
732 # returns (quirk name, distro to use instead or undef, quirk-specific info)
733 my $basedistro = access_basedistro();
734 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
736 if (defined $backports_quirk) {
737 my $re = $backports_quirk;
738 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
740 $re =~ s/\%/([-0-9a-z_]+)/
741 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
742 if ($isuite =~ m/^$re$/) {
743 return ('backports',"$basedistro-backports",$1);
746 return ('none',undef);
751 sub parse_cfg_bool ($$$) {
752 my ($what,$def,$v) = @_;
755 $v =~ m/^[ty1]/ ? 1 :
756 $v =~ m/^[fn0]/ ? 0 :
757 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
760 sub access_forpush_config () {
761 my $d = access_basedistro();
765 parse_cfg_bool('new-private-pushers', 0,
766 cfg("dgit-distro.$d.new-private-pushers",
769 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
772 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
773 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
774 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
775 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
778 sub access_forpush () {
779 $access_forpush //= access_forpush_config();
780 return $access_forpush;
784 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
785 badcfg "pushing but distro is configured readonly"
786 if access_forpush_config() eq '0';
788 $supplementary_message = <<'END' unless $we_are_responder;
789 Push failed, before we got started.
790 You can retry the push, after fixing the problem, if you like.
792 parseopts_late_defaults();
796 parseopts_late_defaults();
799 sub supplementary_message ($) {
801 if (!$we_are_responder) {
802 $supplementary_message = $msg;
804 } elsif ($protovsn >= 3) {
805 responder_send_command "supplementary-message ".length($msg)
807 print PO $msg or die $!;
811 sub access_distros () {
812 # Returns list of distros to try, in order
815 # 0. `instead of' distro name(s) we have been pointed to
816 # 1. the access_quirk distro, if any
817 # 2a. the user's specified distro, or failing that } basedistro
818 # 2b. the distro calculated from the suite }
819 my @l = access_basedistro();
821 my (undef,$quirkdistro) = access_quirk();
822 unshift @l, $quirkdistro;
823 unshift @l, $instead_distro;
824 @l = grep { defined } @l;
826 push @l, access_nomdistro();
828 if (access_forpush()) {
829 @l = map { ("$_/push", $_) } @l;
834 sub access_cfg_cfgs (@) {
837 # The nesting of these loops determines the search order. We put
838 # the key loop on the outside so that we search all the distros
839 # for each key, before going on to the next key. That means that
840 # if access_cfg is called with a more specific, and then a less
841 # specific, key, an earlier distro can override the less specific
842 # without necessarily overriding any more specific keys. (If the
843 # distro wants to override the more specific keys it can simply do
844 # so; whereas if we did the loop the other way around, it would be
845 # impossible to for an earlier distro to override a less specific
846 # key but not the more specific ones without restating the unknown
847 # values of the more specific keys.
850 # We have to deal with RETURN-UNDEF specially, so that we don't
851 # terminate the search prematurely.
853 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
856 foreach my $d (access_distros()) {
857 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
859 push @cfgs, map { "dgit.default.$_" } @realkeys;
866 my (@cfgs) = access_cfg_cfgs(@keys);
867 my $value = cfg(@cfgs);
871 sub access_cfg_bool ($$) {
872 my ($def, @keys) = @_;
873 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
876 sub string_to_ssh ($) {
878 if ($spec =~ m/\s/) {
879 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
885 sub access_cfg_ssh () {
886 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
887 if (!defined $gitssh) {
890 return string_to_ssh $gitssh;
894 sub access_runeinfo ($) {
896 return ": dgit ".access_basedistro()." $info ;";
899 sub access_someuserhost ($) {
901 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
902 defined($user) && length($user) or
903 $user = access_cfg("$some-user",'username');
904 my $host = access_cfg("$some-host");
905 return length($user) ? "$user\@$host" : $host;
908 sub access_gituserhost () {
909 return access_someuserhost('git');
912 sub access_giturl (;$) {
914 my $url = access_cfg('git-url','RETURN-UNDEF');
917 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
918 return undef unless defined $proto;
921 access_gituserhost().
922 access_cfg('git-path');
924 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
927 return "$url/$package$suffix";
930 sub parsecontrolfh ($$;$) {
931 my ($fh, $desc, $allowsigned) = @_;
932 our $dpkgcontrolhash_noissigned;
935 my %opts = ('name' => $desc);
936 $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
937 $c = Dpkg::Control::Hash->new(%opts);
938 $c->parse($fh,$desc) or die "parsing of $desc failed";
939 last if $allowsigned;
940 last if $dpkgcontrolhash_noissigned;
941 my $issigned= $c->get_option('is_pgp_signed');
942 if (!defined $issigned) {
943 $dpkgcontrolhash_noissigned= 1;
944 seek $fh, 0,0 or die "seek $desc: $!";
945 } elsif ($issigned) {
946 fail "control file $desc is (already) PGP-signed. ".
947 " Note that dgit push needs to modify the .dsc and then".
948 " do the signature itself";
957 my ($file, $desc, $allowsigned) = @_;
958 my $fh = new IO::Handle;
959 open $fh, '<', $file or die "$file: $!";
960 my $c = parsecontrolfh($fh,$desc,$allowsigned);
961 $fh->error and die $!;
967 my ($dctrl,$field) = @_;
968 my $v = $dctrl->{$field};
969 return $v if defined $v;
970 fail "missing field $field in ".$dctrl->get_option('name');
974 my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
975 my $p = new IO::Handle;
976 my @cmd = (qw(dpkg-parsechangelog), @_);
977 open $p, '-|', @cmd or die $!;
979 $?=0; $!=0; close $p or failedcmd @cmd;
983 sub commit_getclogp ($) {
984 # Returns the parsed changelog hashref for a particular commit
986 our %commit_getclogp_memo;
987 my $memo = $commit_getclogp_memo{$objid};
988 return $memo if $memo;
990 my $mclog = ".git/dgit/clog-$objid";
991 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
992 "$objid:debian/changelog";
993 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
998 defined $d or fail "getcwd failed: $!";
1002 sub parse_dscdata () {
1003 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1004 printdebug Dumper($dscdata) if $debuglevel>1;
1005 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1006 printdebug Dumper($dsc) if $debuglevel>1;
1011 sub archive_query ($;@) {
1012 my ($method) = shift @_;
1013 fail "this operation does not support multiple comma-separated suites"
1015 my $query = access_cfg('archive-query','RETURN-UNDEF');
1016 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1019 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1022 sub archive_query_prepend_mirror {
1023 my $m = access_cfg('mirror');
1024 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1027 sub pool_dsc_subpath ($$) {
1028 my ($vsn,$component) = @_; # $package is implict arg
1029 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1030 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1033 sub cfg_apply_map ($$$) {
1034 my ($varref, $what, $mapspec) = @_;
1035 return unless $mapspec;
1037 printdebug "config $what EVAL{ $mapspec; }\n";
1039 eval "package Dgit::Config; $mapspec;";
1044 #---------- `ftpmasterapi' archive query method (nascent) ----------
1046 sub archive_api_query_cmd ($) {
1048 my @cmd = (@curl, qw(-sS));
1049 my $url = access_cfg('archive-query-url');
1050 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1052 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1053 foreach my $key (split /\:/, $keys) {
1054 $key =~ s/\%HOST\%/$host/g;
1056 fail "for $url: stat $key: $!" unless $!==ENOENT;
1059 fail "config requested specific TLS key but do not know".
1060 " how to get curl to use exactly that EE key ($key)";
1061 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1062 # # Sadly the above line does not work because of changes
1063 # # to gnutls. The real fix for #790093 may involve
1064 # # new curl options.
1067 # Fixing #790093 properly will involve providing a value
1068 # for this on clients.
1069 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1070 push @cmd, split / /, $kargs if defined $kargs;
1072 push @cmd, $url.$subpath;
1076 sub api_query ($$;$) {
1078 my ($data, $subpath, $ok404) = @_;
1079 badcfg "ftpmasterapi archive query method takes no data part"
1081 my @cmd = archive_api_query_cmd($subpath);
1082 my $url = $cmd[$#cmd];
1083 push @cmd, qw(-w %{http_code});
1084 my $json = cmdoutput @cmd;
1085 unless ($json =~ s/\d+\d+\d$//) {
1086 failedcmd_report_cmd undef, @cmd;
1087 fail "curl failed to print 3-digit HTTP code";
1090 return undef if $code eq '404' && $ok404;
1091 fail "fetch of $url gave HTTP code $code"
1092 unless $url =~ m#^file://# or $code =~ m/^2/;
1093 return decode_json($json);
1096 sub canonicalise_suite_ftpmasterapi {
1097 my ($proto,$data) = @_;
1098 my $suites = api_query($data, 'suites');
1100 foreach my $entry (@$suites) {
1102 my $v = $entry->{$_};
1103 defined $v && $v eq $isuite;
1104 } qw(codename name);
1105 push @matched, $entry;
1107 fail "unknown suite $isuite" unless @matched;
1110 @matched==1 or die "multiple matches for suite $isuite\n";
1111 $cn = "$matched[0]{codename}";
1112 defined $cn or die "suite $isuite info has no codename\n";
1113 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1115 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1120 sub archive_query_ftpmasterapi {
1121 my ($proto,$data) = @_;
1122 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1124 my $digester = Digest::SHA->new(256);
1125 foreach my $entry (@$info) {
1127 my $vsn = "$entry->{version}";
1128 my ($ok,$msg) = version_check $vsn;
1129 die "bad version: $msg\n" unless $ok;
1130 my $component = "$entry->{component}";
1131 $component =~ m/^$component_re$/ or die "bad component";
1132 my $filename = "$entry->{filename}";
1133 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1134 or die "bad filename";
1135 my $sha256sum = "$entry->{sha256sum}";
1136 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1137 push @rows, [ $vsn, "/pool/$component/$filename",
1138 $digester, $sha256sum ];
1140 die "bad ftpmaster api response: $@\n".Dumper($entry)
1143 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1144 return archive_query_prepend_mirror @rows;
1147 sub file_in_archive_ftpmasterapi {
1148 my ($proto,$data,$filename) = @_;
1149 my $pat = $filename;
1152 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1153 my $info = api_query($data, "file_in_archive/$pat", 1);
1156 #---------- `aptget' archive query method ----------
1159 our $aptget_releasefile;
1160 our $aptget_configpath;
1162 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1163 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1165 sub aptget_cache_clean {
1166 runcmd_ordryrun_local qw(sh -ec),
1167 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1171 sub aptget_lock_acquire () {
1172 my $lockfile = "$aptget_base/lock";
1173 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1174 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1177 sub aptget_prep ($) {
1179 return if defined $aptget_base;
1181 badcfg "aptget archive query method takes no data part"
1184 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1187 ensuredir "$cache/dgit";
1189 access_cfg('aptget-cachekey','RETURN-UNDEF')
1190 // access_nomdistro();
1192 $aptget_base = "$cache/dgit/aptget";
1193 ensuredir $aptget_base;
1195 my $quoted_base = $aptget_base;
1196 die "$quoted_base contains bad chars, cannot continue"
1197 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1199 ensuredir $aptget_base;
1201 aptget_lock_acquire();
1203 aptget_cache_clean();
1205 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1206 my $sourceslist = "source.list#$cachekey";
1208 my $aptsuites = $isuite;
1209 cfg_apply_map(\$aptsuites, 'suite map',
1210 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1212 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1213 printf SRCS "deb-src %s %s %s\n",
1214 access_cfg('mirror'),
1216 access_cfg('aptget-components')
1219 ensuredir "$aptget_base/cache";
1220 ensuredir "$aptget_base/lists";
1222 open CONF, ">", $aptget_configpath or die $!;
1224 Debug::NoLocking "true";
1225 APT::Get::List-Cleanup "false";
1226 #clear APT::Update::Post-Invoke-Success;
1227 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1228 Dir::State::Lists "$quoted_base/lists";
1229 Dir::Etc::preferences "$quoted_base/preferences";
1230 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1231 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1234 foreach my $key (qw(
1237 Dir::Cache::Archives
1238 Dir::Etc::SourceParts
1239 Dir::Etc::preferencesparts
1241 ensuredir "$aptget_base/$key";
1242 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1245 my $oldatime = (time // die $!) - 1;
1246 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1247 next unless stat_exists $oldlist;
1248 my ($mtime) = (stat _)[9];
1249 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1252 runcmd_ordryrun_local aptget_aptget(), qw(update);
1255 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1256 next unless stat_exists $oldlist;
1257 my ($atime) = (stat _)[8];
1258 next if $atime == $oldatime;
1259 push @releasefiles, $oldlist;
1261 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1262 @releasefiles = @inreleasefiles if @inreleasefiles;
1263 die "apt updated wrong number of Release files (@releasefiles), erk"
1264 unless @releasefiles == 1;
1266 ($aptget_releasefile) = @releasefiles;
1269 sub canonicalise_suite_aptget {
1270 my ($proto,$data) = @_;
1273 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1275 foreach my $name (qw(Codename Suite)) {
1276 my $val = $release->{$name};
1278 printdebug "release file $name: $val\n";
1279 $val =~ m/^$suite_re$/o or fail
1280 "Release file ($aptget_releasefile) specifies intolerable $name";
1281 cfg_apply_map(\$val, 'suite rmap',
1282 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1289 sub archive_query_aptget {
1290 my ($proto,$data) = @_;
1293 ensuredir "$aptget_base/source";
1294 foreach my $old (<$aptget_base/source/*.dsc>) {
1295 unlink $old or die "$old: $!";
1298 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1299 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1300 # avoids apt-get source failing with ambiguous error code
1302 runcmd_ordryrun_local
1303 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1304 aptget_aptget(), qw(--download-only --only-source source), $package;
1306 my @dscs = <$aptget_base/source/*.dsc>;
1307 fail "apt-get source did not produce a .dsc" unless @dscs;
1308 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1310 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1313 my $uri = "file://". uri_escape $dscs[0];
1314 $uri =~ s{\%2f}{/}gi;
1315 return [ (getfield $pre_dsc, 'Version'), $uri ];
1318 #---------- `dummyapicat' archive query method ----------
1320 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1321 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1323 sub file_in_archive_dummycatapi ($$$) {
1324 my ($proto,$data,$filename) = @_;
1325 my $mirror = access_cfg('mirror');
1326 $mirror =~ s#^file://#/# or die "$mirror ?";
1328 my @cmd = (qw(sh -ec), '
1330 find -name "$2" -print0 |
1332 ', qw(x), $mirror, $filename);
1333 debugcmd "-|", @cmd;
1334 open FIA, "-|", @cmd or die $!;
1337 printdebug "| $_\n";
1338 m/^(\w+) (\S+)$/ or die "$_ ?";
1339 push @out, { sha256sum => $1, filename => $2 };
1341 close FIA or die failedcmd @cmd;
1345 #---------- `madison' archive query method ----------
1347 sub archive_query_madison {
1348 return archive_query_prepend_mirror
1349 map { [ @$_[0..1] ] } madison_get_parse(@_);
1352 sub madison_get_parse {
1353 my ($proto,$data) = @_;
1354 die unless $proto eq 'madison';
1355 if (!length $data) {
1356 $data= access_cfg('madison-distro','RETURN-UNDEF');
1357 $data //= access_basedistro();
1359 $rmad{$proto,$data,$package} ||= cmdoutput
1360 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1361 my $rmad = $rmad{$proto,$data,$package};
1364 foreach my $l (split /\n/, $rmad) {
1365 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1366 \s*( [^ \t|]+ )\s* \|
1367 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1368 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1369 $1 eq $package or die "$rmad $package ?";
1376 $component = access_cfg('archive-query-default-component');
1378 $5 eq 'source' or die "$rmad ?";
1379 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1381 return sort { -version_compare($a->[0],$b->[0]); } @out;
1384 sub canonicalise_suite_madison {
1385 # madison canonicalises for us
1386 my @r = madison_get_parse(@_);
1388 "unable to canonicalise suite using package $package".
1389 " which does not appear to exist in suite $isuite;".
1390 " --existing-package may help";
1394 sub file_in_archive_madison { return undef; }
1396 #---------- `sshpsql' archive query method ----------
1399 my ($data,$runeinfo,$sql) = @_;
1400 if (!length $data) {
1401 $data= access_someuserhost('sshpsql').':'.
1402 access_cfg('sshpsql-dbname');
1404 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1405 my ($userhost,$dbname) = ($`,$'); #';
1407 my @cmd = (access_cfg_ssh, $userhost,
1408 access_runeinfo("ssh-psql $runeinfo").
1409 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1410 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1412 open P, "-|", @cmd or die $!;
1415 printdebug(">|$_|\n");
1418 $!=0; $?=0; close P or failedcmd @cmd;
1420 my $nrows = pop @rows;
1421 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1422 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1423 @rows = map { [ split /\|/, $_ ] } @rows;
1424 my $ncols = scalar @{ shift @rows };
1425 die if grep { scalar @$_ != $ncols } @rows;
1429 sub sql_injection_check {
1430 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1433 sub archive_query_sshpsql ($$) {
1434 my ($proto,$data) = @_;
1435 sql_injection_check $isuite, $package;
1436 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1437 SELECT source.version, component.name, files.filename, files.sha256sum
1439 JOIN src_associations ON source.id = src_associations.source
1440 JOIN suite ON suite.id = src_associations.suite
1441 JOIN dsc_files ON dsc_files.source = source.id
1442 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1443 JOIN component ON component.id = files_archive_map.component_id
1444 JOIN files ON files.id = dsc_files.file
1445 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1446 AND source.source='$package'
1447 AND files.filename LIKE '%.dsc';
1449 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1450 my $digester = Digest::SHA->new(256);
1452 my ($vsn,$component,$filename,$sha256sum) = @$_;
1453 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1455 return archive_query_prepend_mirror @rows;
1458 sub canonicalise_suite_sshpsql ($$) {
1459 my ($proto,$data) = @_;
1460 sql_injection_check $isuite;
1461 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1462 SELECT suite.codename
1463 FROM suite where suite_name='$isuite' or codename='$isuite';
1465 @rows = map { $_->[0] } @rows;
1466 fail "unknown suite $isuite" unless @rows;
1467 die "ambiguous $isuite: @rows ?" if @rows>1;
1471 sub file_in_archive_sshpsql ($$$) { return undef; }
1473 #---------- `dummycat' archive query method ----------
1475 sub canonicalise_suite_dummycat ($$) {
1476 my ($proto,$data) = @_;
1477 my $dpath = "$data/suite.$isuite";
1478 if (!open C, "<", $dpath) {
1479 $!==ENOENT or die "$dpath: $!";
1480 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1484 chomp or die "$dpath: $!";
1486 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1490 sub archive_query_dummycat ($$) {
1491 my ($proto,$data) = @_;
1492 canonicalise_suite();
1493 my $dpath = "$data/package.$csuite.$package";
1494 if (!open C, "<", $dpath) {
1495 $!==ENOENT or die "$dpath: $!";
1496 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1504 printdebug "dummycat query $csuite $package $dpath | $_\n";
1505 my @row = split /\s+/, $_;
1506 @row==2 or die "$dpath: $_ ?";
1509 C->error and die "$dpath: $!";
1511 return archive_query_prepend_mirror
1512 sort { -version_compare($a->[0],$b->[0]); } @rows;
1515 sub file_in_archive_dummycat () { return undef; }
1517 #---------- tag format handling ----------
1519 sub access_cfg_tagformats () {
1520 split /\,/, access_cfg('dgit-tag-format');
1523 sub access_cfg_tagformats_can_splitbrain () {
1524 my %y = map { $_ => 1 } access_cfg_tagformats;
1525 foreach my $needtf (qw(new maint)) {
1526 next if $y{$needtf};
1532 sub need_tagformat ($$) {
1533 my ($fmt, $why) = @_;
1534 fail "need to use tag format $fmt ($why) but also need".
1535 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1536 " - no way to proceed"
1537 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1538 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1541 sub select_tagformat () {
1543 return if $tagformatfn && !$tagformat_want;
1544 die 'bug' if $tagformatfn && $tagformat_want;
1545 # ... $tagformat_want assigned after previous select_tagformat
1547 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1548 printdebug "select_tagformat supported @supported\n";
1550 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1551 printdebug "select_tagformat specified @$tagformat_want\n";
1553 my ($fmt,$why,$override) = @$tagformat_want;
1555 fail "target distro supports tag formats @supported".
1556 " but have to use $fmt ($why)"
1558 or grep { $_ eq $fmt } @supported;
1560 $tagformat_want = undef;
1562 $tagformatfn = ${*::}{"debiantag_$fmt"};
1564 fail "trying to use unknown tag format \`$fmt' ($why) !"
1565 unless $tagformatfn;
1568 #---------- archive query entrypoints and rest of program ----------
1570 sub canonicalise_suite () {
1571 return if defined $csuite;
1572 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1573 $csuite = archive_query('canonicalise_suite');
1574 if ($isuite ne $csuite) {
1575 progress "canonical suite name for $isuite is $csuite";
1577 progress "canonical suite name is $csuite";
1581 sub get_archive_dsc () {
1582 canonicalise_suite();
1583 my @vsns = archive_query('archive_query');
1584 foreach my $vinfo (@vsns) {
1585 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1586 $dscurl = $vsn_dscurl;
1587 $dscdata = url_get($dscurl);
1589 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1594 $digester->add($dscdata);
1595 my $got = $digester->hexdigest();
1597 fail "$dscurl has hash $got but".
1598 " archive told us to expect $digest";
1601 my $fmt = getfield $dsc, 'Format';
1602 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1603 "unsupported source format $fmt, sorry";
1605 $dsc_checked = !!$digester;
1606 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1610 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1613 sub check_for_git ();
1614 sub check_for_git () {
1616 my $how = access_cfg('git-check');
1617 if ($how eq 'ssh-cmd') {
1619 (access_cfg_ssh, access_gituserhost(),
1620 access_runeinfo("git-check $package").
1621 " set -e; cd ".access_cfg('git-path').";".
1622 " if test -d $package.git; then echo 1; else echo 0; fi");
1623 my $r= cmdoutput @cmd;
1624 if (defined $r and $r =~ m/^divert (\w+)$/) {
1626 my ($usedistro,) = access_distros();
1627 # NB that if we are pushing, $usedistro will be $distro/push
1628 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1629 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1630 progress "diverting to $divert (using config for $instead_distro)";
1631 return check_for_git();
1633 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1635 } elsif ($how eq 'url') {
1636 my $prefix = access_cfg('git-check-url','git-url');
1637 my $suffix = access_cfg('git-check-suffix','git-suffix',
1638 'RETURN-UNDEF') // '.git';
1639 my $url = "$prefix/$package$suffix";
1640 my @cmd = (@curl, qw(-sS -I), $url);
1641 my $result = cmdoutput @cmd;
1642 $result =~ s/^\S+ 200 .*\n\r?\n//;
1643 # curl -sS -I with https_proxy prints
1644 # HTTP/1.0 200 Connection established
1645 $result =~ m/^\S+ (404|200) /s or
1646 fail "unexpected results from git check query - ".
1647 Dumper($prefix, $result);
1649 if ($code eq '404') {
1651 } elsif ($code eq '200') {
1656 } elsif ($how eq 'true') {
1658 } elsif ($how eq 'false') {
1661 badcfg "unknown git-check \`$how'";
1665 sub create_remote_git_repo () {
1666 my $how = access_cfg('git-create');
1667 if ($how eq 'ssh-cmd') {
1669 (access_cfg_ssh, access_gituserhost(),
1670 access_runeinfo("git-create $package").
1671 "set -e; cd ".access_cfg('git-path').";".
1672 " cp -a _template $package.git");
1673 } elsif ($how eq 'true') {
1676 badcfg "unknown git-create \`$how'";
1680 our ($dsc_hash,$lastpush_mergeinput);
1681 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1683 our $ud = '.git/dgit/unpack';
1693 sub mktree_in_ud_here () {
1694 runcmd qw(git init -q);
1695 runcmd qw(git config gc.auto 0);
1696 rmtree('.git/objects');
1697 symlink '../../../../objects','.git/objects' or die $!;
1700 sub git_write_tree () {
1701 my $tree = cmdoutput @git, qw(write-tree);
1702 $tree =~ m/^\w+$/ or die "$tree ?";
1706 sub git_add_write_tree () {
1707 runcmd @git, qw(add -Af .);
1708 return git_write_tree();
1711 sub remove_stray_gits ($) {
1713 my @gitscmd = qw(find -name .git -prune -print0);
1714 debugcmd "|",@gitscmd;
1715 open GITS, "-|", @gitscmd or die $!;
1720 print STDERR "$us: warning: removing from $what: ",
1721 (messagequote $_), "\n";
1725 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1728 sub mktree_in_ud_from_only_subdir ($;$) {
1729 my ($what,$raw) = @_;
1731 # changes into the subdir
1733 die "expected one subdir but found @dirs ?" unless @dirs==1;
1734 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1738 remove_stray_gits($what);
1739 mktree_in_ud_here();
1741 my ($format, $fopts) = get_source_format();
1742 if (madformat($format)) {
1747 my $tree=git_add_write_tree();
1748 return ($tree,$dir);
1751 our @files_csum_info_fields =
1752 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1753 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1754 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1756 sub dsc_files_info () {
1757 foreach my $csumi (@files_csum_info_fields) {
1758 my ($fname, $module, $method) = @$csumi;
1759 my $field = $dsc->{$fname};
1760 next unless defined $field;
1761 eval "use $module; 1;" or die $@;
1763 foreach (split /\n/, $field) {
1765 m/^(\w+) (\d+) (\S+)$/ or
1766 fail "could not parse .dsc $fname line \`$_'";
1767 my $digester = eval "$module"."->$method;" or die $@;
1772 Digester => $digester,
1777 fail "missing any supported Checksums-* or Files field in ".
1778 $dsc->get_option('name');
1782 map { $_->{Filename} } dsc_files_info();
1785 sub files_compare_inputs (@) {
1790 my $showinputs = sub {
1791 return join "; ", map { $_->get_option('name') } @$inputs;
1794 foreach my $in (@$inputs) {
1796 my $in_name = $in->get_option('name');
1798 printdebug "files_compare_inputs $in_name\n";
1800 foreach my $csumi (@files_csum_info_fields) {
1801 my ($fname) = @$csumi;
1802 printdebug "files_compare_inputs $in_name $fname\n";
1804 my $field = $in->{$fname};
1805 next unless defined $field;
1808 foreach (split /\n/, $field) {
1811 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1812 fail "could not parse $in_name $fname line \`$_'";
1814 printdebug "files_compare_inputs $in_name $fname $f\n";
1818 my $re = \ $record{$f}{$fname};
1820 $fchecked{$f}{$in_name} = 1;
1822 fail "hash or size of $f varies in $fname fields".
1823 " (between: ".$showinputs->().")";
1828 @files = sort @files;
1829 $expected_files //= \@files;
1830 "@$expected_files" eq "@files" or
1831 fail "file list in $in_name varies between hash fields!";
1834 fail "$in_name has no files list field(s)";
1836 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1839 grep { keys %$_ == @$inputs-1 } values %fchecked
1840 or fail "no file appears in all file lists".
1841 " (looked in: ".$showinputs->().")";
1844 sub is_orig_file_in_dsc ($$) {
1845 my ($f, $dsc_files_info) = @_;
1846 return 0 if @$dsc_files_info <= 1;
1847 # One file means no origs, and the filename doesn't have a "what
1848 # part of dsc" component. (Consider versions ending `.orig'.)
1849 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1853 sub is_orig_file_of_vsn ($$) {
1854 my ($f, $upstreamvsn) = @_;
1855 my $base = srcfn $upstreamvsn, '';
1856 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1860 sub changes_update_origs_from_dsc ($$$$) {
1861 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1863 printdebug "checking origs needed ($upstreamvsn)...\n";
1864 $_ = getfield $changes, 'Files';
1865 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1866 fail "cannot find section/priority from .changes Files field";
1867 my $placementinfo = $1;
1869 printdebug "checking origs needed placement '$placementinfo'...\n";
1870 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1871 $l =~ m/\S+$/ or next;
1873 printdebug "origs $file | $l\n";
1874 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1875 printdebug "origs $file is_orig\n";
1876 my $have = archive_query('file_in_archive', $file);
1877 if (!defined $have) {
1879 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1885 printdebug "origs $file \$#\$have=$#$have\n";
1886 foreach my $h (@$have) {
1889 foreach my $csumi (@files_csum_info_fields) {
1890 my ($fname, $module, $method, $archivefield) = @$csumi;
1891 next unless defined $h->{$archivefield};
1892 $_ = $dsc->{$fname};
1893 next unless defined;
1894 m/^(\w+) .* \Q$file\E$/m or
1895 fail ".dsc $fname missing entry for $file";
1896 if ($h->{$archivefield} eq $1) {
1900 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1903 die "$file ".Dumper($h)." ?!" if $same && @differ;
1906 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1909 printdebug "origs $file f.same=$found_same".
1910 " #f._differ=$#found_differ\n";
1911 if (@found_differ && !$found_same) {
1913 "archive contains $file with different checksum",
1916 # Now we edit the changes file to add or remove it
1917 foreach my $csumi (@files_csum_info_fields) {
1918 my ($fname, $module, $method, $archivefield) = @$csumi;
1919 next unless defined $changes->{$fname};
1921 # in archive, delete from .changes if it's there
1922 $changed{$file} = "removed" if
1923 $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
1924 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
1925 # not in archive, but it's here in the .changes
1927 my $dsc_data = getfield $dsc, $fname;
1928 $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
1930 $extra =~ s/ \d+ /$&$placementinfo /
1931 or die "$fname $extra >$dsc_data< ?"
1932 if $fname eq 'Files';
1933 $changes->{$fname} .= "\n". $extra;
1934 $changed{$file} = "added";
1939 foreach my $file (keys %changed) {
1941 "edited .changes for archive .orig contents: %s %s",
1942 $changed{$file}, $file;
1944 my $chtmp = "$changesfile.tmp";
1945 $changes->save($chtmp);
1947 rename $chtmp,$changesfile or die "$changesfile $!";
1949 progress "[new .changes left in $changesfile]";
1952 progress "$changesfile already has appropriate .orig(s) (if any)";
1956 sub make_commit ($) {
1958 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1961 sub make_commit_text ($) {
1964 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1966 print Dumper($text) if $debuglevel > 1;
1967 my $child = open2($out, $in, @cmd) or die $!;
1970 print $in $text or die $!;
1971 close $in or die $!;
1973 $h =~ m/^\w+$/ or die;
1975 printdebug "=> $h\n";
1978 waitpid $child, 0 == $child or die "$child $!";
1979 $? and failedcmd @cmd;
1983 sub clogp_authline ($) {
1985 my $author = getfield $clogp, 'Maintainer';
1986 $author =~ s#,.*##ms;
1987 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
1988 my $authline = "$author $date";
1989 $authline =~ m/$git_authline_re/o or
1990 fail "unexpected commit author line format \`$authline'".
1991 " (was generated from changelog Maintainer field)";
1992 return ($1,$2,$3) if wantarray;
1996 sub vendor_patches_distro ($$) {
1997 my ($checkdistro, $what) = @_;
1998 return unless defined $checkdistro;
2000 my $series = "debian/patches/\L$checkdistro\E.series";
2001 printdebug "checking for vendor-specific $series ($what)\n";
2003 if (!open SERIES, "<", $series) {
2004 die "$series $!" unless $!==ENOENT;
2013 Unfortunately, this source package uses a feature of dpkg-source where
2014 the same source package unpacks to different source code on different
2015 distros. dgit cannot safely operate on such packages on affected
2016 distros, because the meaning of source packages is not stable.
2018 Please ask the distro/maintainer to remove the distro-specific series
2019 files and use a different technique (if necessary, uploading actually
2020 different packages, if different distros are supposed to have
2024 fail "Found active distro-specific series file for".
2025 " $checkdistro ($what): $series, cannot continue";
2027 die "$series $!" if SERIES->error;
2031 sub check_for_vendor_patches () {
2032 # This dpkg-source feature doesn't seem to be documented anywhere!
2033 # But it can be found in the changelog (reformatted):
2035 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2036 # Author: Raphael Hertzog <hertzog@debian.org>
2037 # Date: Sun Oct 3 09:36:48 2010 +0200
2039 # dpkg-source: correctly create .pc/.quilt_series with alternate
2042 # If you have debian/patches/ubuntu.series and you were
2043 # unpacking the source package on ubuntu, quilt was still
2044 # directed to debian/patches/series instead of
2045 # debian/patches/ubuntu.series.
2047 # debian/changelog | 3 +++
2048 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2049 # 2 files changed, 6 insertions(+), 1 deletion(-)
2052 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2053 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2054 "Dpkg::Vendor \`current vendor'");
2055 vendor_patches_distro(access_basedistro(),
2056 "(base) distro being accessed");
2057 vendor_patches_distro(access_nomdistro(),
2058 "(nominal) distro being accessed");
2061 sub generate_commits_from_dsc () {
2062 # See big comment in fetch_from_archive, below.
2063 # See also README.dsc-import.
2067 my @dfi = dsc_files_info();
2068 foreach my $fi (@dfi) {
2069 my $f = $fi->{Filename};
2070 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2072 printdebug "considering linking $f: ";
2074 link_ltarget "../../../../$f", $f
2075 or ((printdebug "($!) "), 0)
2079 printdebug "linked.\n";
2081 complete_file_from_dsc('.', $fi)
2084 if (is_orig_file_in_dsc($f, \@dfi)) {
2085 link $f, "../../../../$f"
2091 # We unpack and record the orig tarballs first, so that we only
2092 # need disk space for one private copy of the unpacked source.
2093 # But we can't make them into commits until we have the metadata
2094 # from the debian/changelog, so we record the tree objects now and
2095 # make them into commits later.
2097 my $upstreamv = upstreamversion $dsc->{version};
2098 my $orig_f_base = srcfn $upstreamv, '';
2100 foreach my $fi (@dfi) {
2101 # We actually import, and record as a commit, every tarball
2102 # (unless there is only one file, in which case there seems
2105 my $f = $fi->{Filename};
2106 printdebug "import considering $f ";
2107 (printdebug "only one dfi\n"), next if @dfi == 1;
2108 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2109 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2113 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2115 printdebug "Y ", (join ' ', map { $_//"(none)" }
2116 $compr_ext, $orig_f_part
2119 my $input = new IO::File $f, '<' or die "$f $!";
2123 if (defined $compr_ext) {
2125 Dpkg::Compression::compression_guess_from_filename $f;
2126 fail "Dpkg::Compression cannot handle file $f in source package"
2127 if defined $compr_ext && !defined $cname;
2129 new Dpkg::Compression::Process compression => $cname;
2130 my @compr_cmd = $compr_proc->get_uncompress_cmdline();
2131 my $compr_fh = new IO::Handle;
2132 my $compr_pid = open $compr_fh, "-|" // die $!;
2134 open STDIN, "<&", $input or die $!;
2136 die "dgit (child): exec $compr_cmd[0]: $!\n";
2141 rmtree "_unpack-tar";
2142 mkdir "_unpack-tar" or die $!;
2143 my @tarcmd = qw(tar -x -f -
2144 --no-same-owner --no-same-permissions
2145 --no-acls --no-xattrs --no-selinux);
2146 my $tar_pid = fork // die $!;
2148 chdir "_unpack-tar" or die $!;
2149 open STDIN, "<&", $input or die $!;
2151 die "dgit (child): exec $tarcmd[0]: $!";
2153 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2154 !$? or failedcmd @tarcmd;
2157 (@compr_cmd ? failedcmd @compr_cmd
2159 # finally, we have the results in "tarball", but maybe
2160 # with the wrong permissions
2162 runcmd qw(chmod -R +rwX _unpack-tar);
2163 changedir "_unpack-tar";
2164 remove_stray_gits($f);
2165 mktree_in_ud_here();
2167 my ($tree) = git_add_write_tree();
2168 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2169 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2171 printdebug "one subtree $1\n";
2173 printdebug "multiple subtrees\n";
2176 rmtree "_unpack-tar";
2178 my $ent = [ $f, $tree ];
2180 Orig => !!$orig_f_part,
2181 Sort => (!$orig_f_part ? 2 :
2182 $orig_f_part =~ m/-/g ? 1 :
2190 # put any without "_" first (spec is not clear whether files
2191 # are always in the usual order). Tarballs without "_" are
2192 # the main orig or the debian tarball.
2193 $a->{Sort} <=> $b->{Sort} or
2197 my $any_orig = grep { $_->{Orig} } @tartrees;
2199 my $dscfn = "$package.dsc";
2201 my $treeimporthow = 'package';
2203 open D, ">", $dscfn or die "$dscfn: $!";
2204 print D $dscdata or die "$dscfn: $!";
2205 close D or die "$dscfn: $!";
2206 my @cmd = qw(dpkg-source);
2207 push @cmd, '--no-check' if $dsc_checked;
2208 if (madformat $dsc->{format}) {
2209 push @cmd, '--skip-patches';
2210 $treeimporthow = 'unpatched';
2212 push @cmd, qw(-x --), $dscfn;
2215 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2216 if (madformat $dsc->{format}) {
2217 check_for_vendor_patches();
2221 if (madformat $dsc->{format}) {
2222 my @pcmd = qw(dpkg-source --before-build .);
2223 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2225 $dappliedtree = git_add_write_tree();
2228 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2229 debugcmd "|",@clogcmd;
2230 open CLOGS, "-|", @clogcmd or die $!;
2235 printdebug "import clog search...\n";
2238 my $stanzatext = do { local $/=""; <CLOGS>; };
2239 printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
2240 last if !defined $stanzatext;
2242 my $desc = "package changelog, entry no.$.";
2243 open my $stanzafh, "<", \$stanzatext or die;
2244 my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
2245 $clogp //= $thisstanza;
2247 printdebug "import clog $thisstanza->{version} $desc...\n";
2249 last if !$any_orig; # we don't need $r1clogp
2251 # We look for the first (most recent) changelog entry whose
2252 # version number is lower than the upstream version of this
2253 # package. Then the last (least recent) previous changelog
2254 # entry is treated as the one which introduced this upstream
2255 # version and used for the synthetic commits for the upstream
2258 # One might think that a more sophisticated algorithm would be
2259 # necessary. But: we do not want to scan the whole changelog
2260 # file. Stopping when we see an earlier version, which
2261 # necessarily then is an earlier upstream version, is the only
2262 # realistic way to do that. Then, either the earliest
2263 # changelog entry we have seen so far is indeed the earliest
2264 # upload of this upstream version; or there are only changelog
2265 # entries relating to later upstream versions (which is not
2266 # possible unless the changelog and .dsc disagree about the
2267 # version). Then it remains to choose between the physically
2268 # last entry in the file, and the one with the lowest version
2269 # number. If these are not the same, we guess that the
2270 # versions were created in a non-monotic order rather than
2271 # that the changelog entries have been misordered.
2273 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2275 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2276 $r1clogp = $thisstanza;
2278 printdebug "import clog $r1clogp->{version} becomes r1\n";
2280 die $! if CLOGS->error;
2281 close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
2283 $clogp or fail "package changelog has no entries!";
2285 my $authline = clogp_authline $clogp;
2286 my $changes = getfield $clogp, 'Changes';
2287 my $cversion = getfield $clogp, 'Version';
2290 $r1clogp //= $clogp; # maybe there's only one entry;
2291 my $r1authline = clogp_authline $r1clogp;
2292 # Strictly, r1authline might now be wrong if it's going to be
2293 # unused because !$any_orig. Whatever.
2295 printdebug "import tartrees authline $authline\n";
2296 printdebug "import tartrees r1authline $r1authline\n";
2298 foreach my $tt (@tartrees) {
2299 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2301 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2304 committer $r1authline
2308 [dgit import orig $tt->{F}]
2316 [dgit import tarball $package $cversion $tt->{F}]
2321 printdebug "import main commit\n";
2323 open C, ">../commit.tmp" or die $!;
2324 print C <<END or die $!;
2327 print C <<END or die $! foreach @tartrees;
2330 print C <<END or die $!;
2336 [dgit import $treeimporthow $package $cversion]
2340 my $rawimport_hash = make_commit qw(../commit.tmp);
2342 if (madformat $dsc->{format}) {
2343 printdebug "import apply patches...\n";
2345 # regularise the state of the working tree so that
2346 # the checkout of $rawimport_hash works nicely.
2347 my $dappliedcommit = make_commit_text(<<END);
2354 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2356 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2358 # We need the answers to be reproducible
2359 my @authline = clogp_authline($clogp);
2360 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2361 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2362 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2363 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2364 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2365 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2367 my $path = $ENV{PATH} or die;
2369 foreach my $use_absurd (qw(0 1)) {
2370 runcmd @git, qw(checkout -q unpa);
2371 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2372 local $ENV{PATH} = $path;
2375 progress "warning: $@";
2376 $path = "$absurdity:$path";
2377 progress "$us: trying slow absurd-git-apply...";
2378 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2383 die "forbid absurd git-apply\n" if $use_absurd
2384 && forceing [qw(import-gitapply-no-absurd)];
2385 die "only absurd git-apply!\n" if !$use_absurd
2386 && forceing [qw(import-gitapply-absurd)];
2388 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2389 local $ENV{PATH} = $path if $use_absurd;
2391 my @showcmd = (gbp_pq, qw(import));
2392 my @realcmd = shell_cmd
2393 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2394 debugcmd "+",@realcmd;
2395 if (system @realcmd) {
2396 die +(shellquote @showcmd).
2398 failedcmd_waitstatus()."\n";
2401 my $gapplied = git_rev_parse('HEAD');
2402 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2403 $gappliedtree eq $dappliedtree or
2405 gbp-pq import and dpkg-source disagree!
2406 gbp-pq import gave commit $gapplied
2407 gbp-pq import gave tree $gappliedtree
2408 dpkg-source --before-build gave tree $dappliedtree
2410 $rawimport_hash = $gapplied;
2415 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2420 progress "synthesised git commit from .dsc $cversion";
2422 my $rawimport_mergeinput = {
2423 Commit => $rawimport_hash,
2424 Info => "Import of source package",
2426 my @output = ($rawimport_mergeinput);
2428 if ($lastpush_mergeinput) {
2429 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2430 my $oversion = getfield $oldclogp, 'Version';
2432 version_compare($oversion, $cversion);
2434 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2435 { Message => <<END, ReverseParents => 1 });
2436 Record $package ($cversion) in archive suite $csuite
2438 } elsif ($vcmp > 0) {
2439 print STDERR <<END or die $!;
2441 Version actually in archive: $cversion (older)
2442 Last version pushed with dgit: $oversion (newer or same)
2445 @output = $lastpush_mergeinput;
2447 # Same version. Use what's in the server git branch,
2448 # discarding our own import. (This could happen if the
2449 # server automatically imports all packages into git.)
2450 @output = $lastpush_mergeinput;
2453 changedir '../../../..';
2458 sub complete_file_from_dsc ($$) {
2459 our ($dstdir, $fi) = @_;
2460 # Ensures that we have, in $dir, the file $fi, with the correct
2461 # contents. (Downloading it from alongside $dscurl if necessary.)
2463 my $f = $fi->{Filename};
2464 my $tf = "$dstdir/$f";
2467 if (stat_exists $tf) {
2468 progress "using existing $f";
2470 printdebug "$tf does not exist, need to fetch\n";
2472 $furl =~ s{/[^/]+$}{};
2474 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2475 die "$f ?" if $f =~ m#/#;
2476 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2477 return 0 if !act_local();
2481 open F, "<", "$tf" or die "$tf: $!";
2482 $fi->{Digester}->reset();
2483 $fi->{Digester}->addfile(*F);
2484 F->error and die $!;
2485 my $got = $fi->{Digester}->hexdigest();
2486 $got eq $fi->{Hash} or
2487 fail "file $f has hash $got but .dsc".
2488 " demands hash $fi->{Hash} ".
2489 ($downloaded ? "(got wrong file from archive!)"
2490 : "(perhaps you should delete this file?)");
2495 sub ensure_we_have_orig () {
2496 my @dfi = dsc_files_info();
2497 foreach my $fi (@dfi) {
2498 my $f = $fi->{Filename};
2499 next unless is_orig_file_in_dsc($f, \@dfi);
2500 complete_file_from_dsc('..', $fi)
2505 #---------- git fetch ----------
2507 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2508 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2510 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2511 # locally fetched refs because they have unhelpful names and clutter
2512 # up gitk etc. So we track whether we have "used up" head ref (ie,
2513 # whether we have made another local ref which refers to this object).
2515 # (If we deleted them unconditionally, then we might end up
2516 # re-fetching the same git objects each time dgit fetch was run.)
2518 # So, leach use of lrfetchrefs needs to be accompanied by arrangements
2519 # in git_fetch_us to fetch the refs in question, and possibly a call
2520 # to lrfetchref_used.
2522 our (%lrfetchrefs_f, %lrfetchrefs_d);
2523 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2525 sub lrfetchref_used ($) {
2526 my ($fullrefname) = @_;
2527 my $objid = $lrfetchrefs_f{$fullrefname};
2528 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2531 sub git_lrfetch_sane {
2532 my ($supplementary, @specs) = @_;
2533 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2534 # at least as regards @specs. Also leave the results in
2535 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2536 # able to clean these up.
2538 # With $supplementary==1, @specs must not contain wildcards
2539 # and we add to our previous fetches (non-atomically).
2541 # This is rather miserable:
2542 # When git fetch --prune is passed a fetchspec ending with a *,
2543 # it does a plausible thing. If there is no * then:
2544 # - it matches subpaths too, even if the supplied refspec
2545 # starts refs, and behaves completely madly if the source
2546 # has refs/refs/something. (See, for example, Debian #NNNN.)
2547 # - if there is no matching remote ref, it bombs out the whole
2549 # We want to fetch a fixed ref, and we don't know in advance
2550 # if it exists, so this is not suitable.
2552 # Our workaround is to use git ls-remote. git ls-remote has its
2553 # own qairks. Notably, it has the absurd multi-tail-matching
2554 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2555 # refs/refs/foo etc.
2557 # Also, we want an idempotent snapshot, but we have to make two
2558 # calls to the remote: one to git ls-remote and to git fetch. The
2559 # solution is use git ls-remote to obtain a target state, and
2560 # git fetch to try to generate it. If we don't manage to generate
2561 # the target state, we try again.
2563 my $url = access_giturl();
2565 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2567 my $specre = join '|', map {
2570 my $wildcard = $x =~ s/\\\*$/.*/;
2571 die if $wildcard && $supplementary;
2574 printdebug "git_lrfetch_sane specre=$specre\n";
2575 my $wanted_rref = sub {
2577 return m/^(?:$specre)$/;
2580 my $fetch_iteration = 0;
2583 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2584 if (++$fetch_iteration > 10) {
2585 fail "too many iterations trying to get sane fetch!";
2588 my @look = map { "refs/$_" } @specs;
2589 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2593 open GITLS, "-|", @lcmd or die $!;
2595 printdebug "=> ", $_;
2596 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2597 my ($objid,$rrefname) = ($1,$2);
2598 if (!$wanted_rref->($rrefname)) {
2600 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2604 $wantr{$rrefname} = $objid;
2607 close GITLS or failedcmd @lcmd;
2609 # OK, now %want is exactly what we want for refs in @specs
2611 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2612 "+refs/$_:".lrfetchrefs."/$_";
2615 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2617 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2618 runcmd_ordryrun_local @fcmd if @fspecs;
2620 if (!$supplementary) {
2621 %lrfetchrefs_f = ();
2625 git_for_each_ref(lrfetchrefs, sub {
2626 my ($objid,$objtype,$lrefname,$reftail) = @_;
2627 $lrfetchrefs_f{$lrefname} = $objid;
2628 $objgot{$objid} = 1;
2631 if ($supplementary) {
2635 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2636 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2637 if (!exists $wantr{$rrefname}) {
2638 if ($wanted_rref->($rrefname)) {
2640 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2644 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2647 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2648 delete $lrfetchrefs_f{$lrefname};
2652 foreach my $rrefname (sort keys %wantr) {
2653 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2654 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2655 my $want = $wantr{$rrefname};
2656 next if $got eq $want;
2657 if (!defined $objgot{$want}) {
2659 warning: git ls-remote suggests we want $lrefname
2660 warning: and it should refer to $want
2661 warning: but git fetch didn't fetch that object to any relevant ref.
2662 warning: This may be due to a race with someone updating the server.
2663 warning: Will try again...
2665 next FETCH_ITERATION;
2668 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2670 runcmd_ordryrun_local @git, qw(update-ref -m),
2671 "dgit fetch git fetch fixup", $lrefname, $want;
2672 $lrfetchrefs_f{$lrefname} = $want;
2676 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2677 Dumper(\%lrfetchrefs_f);
2680 sub git_fetch_us () {
2681 # Want to fetch only what we are going to use, unless
2682 # deliberately-not-ff, in which case we must fetch everything.
2684 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2686 (quiltmode_splitbrain
2687 ? (map { $_->('*',access_nomdistro) }
2688 \&debiantag_new, \&debiantag_maintview)
2689 : debiantags('*',access_nomdistro));
2690 push @specs, server_branch($csuite);
2691 push @specs, $rewritemap;
2692 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2694 git_lrfetch_sane 0, @specs;
2697 my @tagpats = debiantags('*',access_nomdistro);
2699 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2700 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2701 printdebug "currently $fullrefname=$objid\n";
2702 $here{$fullrefname} = $objid;
2704 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2705 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2706 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2707 printdebug "offered $lref=$objid\n";
2708 if (!defined $here{$lref}) {
2709 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2710 runcmd_ordryrun_local @upd;
2711 lrfetchref_used $fullrefname;
2712 } elsif ($here{$lref} eq $objid) {
2713 lrfetchref_used $fullrefname;
2716 "Not updateting $lref from $here{$lref} to $objid.\n";
2721 #---------- dsc and archive handling ----------
2723 sub mergeinfo_getclogp ($) {
2724 # Ensures thit $mi->{Clogp} exists and returns it
2726 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2729 sub mergeinfo_version ($) {
2730 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2733 sub fetch_from_archive_record_1 ($) {
2735 runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
2736 'DGIT_ARCHIVE', $hash;
2737 cmdoutput @git, qw(log -n2), $hash;
2738 # ... gives git a chance to complain if our commit is malformed
2741 sub fetch_from_archive_record_2 ($) {
2743 my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
2747 dryrun_report @upd_cmd;
2751 sub parse_dsc_field ($$) {
2752 my ($dsc, $what) = @_;
2754 foreach my $field (@ourdscfield) {
2755 $f = $dsc->{$field};
2759 progress "$what: NO git hash";
2760 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2761 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2762 progress "$what: specified git info ($dsc_distro)";
2763 $dsc_hint_tag = [ $dsc_hint_tag ];
2764 } elsif ($f =~ m/^\w+\s*$/) {
2766 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2767 dgit.default.distro);
2768 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2770 progress "$what: specified git hash";
2772 fail "$what: invalid Dgit info";
2776 sub resolve_dsc_field_commit ($$) {
2777 my ($already_distro, $already_mapref) = @_;
2779 return unless defined $dsc_hash;
2782 defined $already_mapref &&
2783 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2784 ? $already_mapref : undef;
2788 my ($what, @fetch) = @_;
2790 local $idistro = $dsc_distro;
2791 my $lrf = lrfetchrefs;
2793 if (!$chase_dsc_distro) {
2795 "not chasing .dsc distro $dsc_distro: not fetching $what";
2800 ".dsc names distro $dsc_distro: fetching $what";
2802 my $url = access_giturl();
2803 if (!defined $url) {
2804 defined $dsc_hint_url or fail <<END;
2805 .dsc Dgit metadata is in context of distro $dsc_distro
2806 for which we have no configured url and .dsc provides no hint
2809 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2810 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2811 parse_cfg_bool "dsc-url-proto-ok", 'false',
2812 cfg("dgit.dsc-url-proto-ok.$proto",
2813 "dgit.default.dsc-url-proto-ok")
2815 .dsc Dgit metadata is in context of distro $dsc_distro
2816 for which we have no configured url;
2817 .dsc provices hinted url with protocol $proto which is unsafe.
2818 (can be overridden by config - consult documentation)
2820 $url = $dsc_hint_url;
2823 git_lrfetch_sane 1, @fetch;
2828 my $rewrite_enable = do {
2829 local $idistro = $dsc_distro;
2830 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2833 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2834 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2835 $mapref = $lrf.'/'.$rewritemap;
2836 my $rewritemapdata = git_cat_file $mapref.':map';
2837 if (defined $rewritemapdata
2838 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2840 "server's git history rewrite map contains a relevant entry!";
2843 if (defined $dsc_hash) {
2844 progress "using rewritten git hash in place of .dsc value";
2846 progress "server data says .dsc hash is to be disregarded";
2851 if (!defined git_cat_file $dsc_hash) {
2852 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2853 my $lrf = $do_fetch->("additional commits", @tags) &&
2854 defined git_cat_file $dsc_hash
2856 .dsc Dgit metadata requires commit $dsc_hash
2857 but we could not obtain that object anywhere.
2859 foreach my $t (@tags) {
2860 my $fullrefname = $lrf.'/'.$t;
2861 print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2862 next unless $lrfetchrefs_f{$fullrefname};
2863 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2864 lrfetchref_used $fullrefname;
2869 sub fetch_from_archive () {
2870 ensure_setup_existing_tree();
2872 # Ensures that lrref() is what is actually in the archive, one way
2873 # or another, according to us - ie this client's
2874 # appropritaely-updated archive view. Also returns the commit id.
2875 # If there is nothing in the archive, leaves lrref alone and
2876 # returns undef. git_fetch_us must have already been called.
2880 parse_dsc_field($dsc, 'last upload to archive');
2881 resolve_dsc_field_commit access_basedistro,
2882 lrfetchrefs."/".$rewritemap
2884 progress "no version available from the archive";
2887 # If the archive's .dsc has a Dgit field, there are three
2888 # relevant git commitids we need to choose between and/or merge
2890 # 1. $dsc_hash: the Dgit field from the archive
2891 # 2. $lastpush_hash: the suite branch on the dgit git server
2892 # 3. $lastfetch_hash: our local tracking brach for the suite
2894 # These may all be distinct and need not be in any fast forward
2897 # If the dsc was pushed to this suite, then the server suite
2898 # branch will have been updated; but it might have been pushed to
2899 # a different suite and copied by the archive. Conversely a more
2900 # recent version may have been pushed with dgit but not appeared
2901 # in the archive (yet).
2903 # $lastfetch_hash may be awkward because archive imports
2904 # (particularly, imports of Dgit-less .dscs) are performed only as
2905 # needed on individual clients, so different clients may perform a
2906 # different subset of them - and these imports are only made
2907 # public during push. So $lastfetch_hash may represent a set of
2908 # imports different to a subsequent upload by a different dgit
2911 # Our approach is as follows:
2913 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
2914 # descendant of $dsc_hash, then it was pushed by a dgit user who
2915 # had based their work on $dsc_hash, so we should prefer it.
2916 # Otherwise, $dsc_hash was installed into this suite in the
2917 # archive other than by a dgit push, and (necessarily) after the
2918 # last dgit push into that suite (since a dgit push would have
2919 # been descended from the dgit server git branch); thus, in that
2920 # case, we prefer the archive's version (and produce a
2921 # pseudo-merge to overwrite the dgit server git branch).
2923 # (If there is no Dgit field in the archive's .dsc then
2924 # generate_commit_from_dsc uses the version numbers to decide
2925 # whether the suite branch or the archive is newer. If the suite
2926 # branch is newer it ignores the archive's .dsc; otherwise it
2927 # generates an import of the .dsc, and produces a pseudo-merge to
2928 # overwrite the suite branch with the archive contents.)
2930 # The outcome of that part of the algorithm is the `public view',
2931 # and is same for all dgit clients: it does not depend on any
2932 # unpublished history in the local tracking branch.
2934 # As between the public view and the local tracking branch: The
2935 # local tracking branch is only updated by dgit fetch, and
2936 # whenever dgit fetch runs it includes the public view in the
2937 # local tracking branch. Therefore if the public view is not
2938 # descended from the local tracking branch, the local tracking
2939 # branch must contain history which was imported from the archive
2940 # but never pushed; and, its tip is now out of date. So, we make
2941 # a pseudo-merge to overwrite the old imports and stitch the old
2944 # Finally: we do not necessarily reify the public view (as
2945 # described above). This is so that we do not end up stacking two
2946 # pseudo-merges. So what we actually do is figure out the inputs
2947 # to any public view pseudo-merge and put them in @mergeinputs.
2950 # $mergeinputs[]{Commit}
2951 # $mergeinputs[]{Info}
2952 # $mergeinputs[0] is the one whose tree we use
2953 # @mergeinputs is in the order we use in the actual commit)
2956 # $mergeinputs[]{Message} is a commit message to use
2957 # $mergeinputs[]{ReverseParents} if def specifies that parent
2958 # list should be in opposite order
2959 # Such an entry has no Commit or Info. It applies only when found
2960 # in the last entry. (This ugliness is to support making
2961 # identical imports to previous dgit versions.)
2963 my $lastpush_hash = git_get_ref(lrfetchref());
2964 printdebug "previous reference hash=$lastpush_hash\n";
2965 $lastpush_mergeinput = $lastpush_hash && {
2966 Commit => $lastpush_hash,
2967 Info => "dgit suite branch on dgit git server",
2970 my $lastfetch_hash = git_get_ref(lrref());
2971 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
2972 my $lastfetch_mergeinput = $lastfetch_hash && {
2973 Commit => $lastfetch_hash,
2974 Info => "dgit client's archive history view",
2977 my $dsc_mergeinput = $dsc_hash && {
2978 Commit => $dsc_hash,
2979 Info => "Dgit field in .dsc from archive",
2983 my $del_lrfetchrefs = sub {
2986 printdebug "del_lrfetchrefs...\n";
2987 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
2988 my $objid = $lrfetchrefs_d{$fullrefname};
2989 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
2991 $gur ||= new IO::Handle;
2992 open $gur, "|-", qw(git update-ref --stdin) or die $!;
2994 printf $gur "delete %s %s\n", $fullrefname, $objid;
2997 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3001 if (defined $dsc_hash) {
3002 ensure_we_have_orig();
3003 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3004 @mergeinputs = $dsc_mergeinput
3005 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3006 print STDERR <<END or die $!;
3008 Git commit in archive is behind the last version allegedly pushed/uploaded.
3009 Commit referred to by archive: $dsc_hash
3010 Last version pushed with dgit: $lastpush_hash
3013 @mergeinputs = ($lastpush_mergeinput);
3015 # Archive has .dsc which is not a descendant of the last dgit
3016 # push. This can happen if the archive moves .dscs about.
3017 # Just follow its lead.
3018 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3019 progress "archive .dsc names newer git commit";
3020 @mergeinputs = ($dsc_mergeinput);
3022 progress "archive .dsc names other git commit, fixing up";
3023 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3027 @mergeinputs = generate_commits_from_dsc();
3028 # We have just done an import. Now, our import algorithm might
3029 # have been improved. But even so we do not want to generate
3030 # a new different import of the same package. So if the
3031 # version numbers are the same, just use our existing version.
3032 # If the version numbers are different, the archive has changed
3033 # (perhaps, rewound).
3034 if ($lastfetch_mergeinput &&
3035 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3036 (mergeinfo_version $mergeinputs[0]) )) {
3037 @mergeinputs = ($lastfetch_mergeinput);
3039 } elsif ($lastpush_hash) {
3040 # only in git, not in the archive yet
3041 @mergeinputs = ($lastpush_mergeinput);
3042 print STDERR <<END or die $!;
3044 Package not found in the archive, but has allegedly been pushed using dgit.
3048 printdebug "nothing found!\n";
3049 if (defined $skew_warning_vsn) {
3050 print STDERR <<END or die $!;
3052 Warning: relevant archive skew detected.
3053 Archive allegedly contains $skew_warning_vsn
3054 But we were not able to obtain any version from the archive or git.
3058 unshift @end, $del_lrfetchrefs;
3062 if ($lastfetch_hash &&
3064 my $h = $_->{Commit};
3065 $h and is_fast_fwd($lastfetch_hash, $h);
3066 # If true, one of the existing parents of this commit
3067 # is a descendant of the $lastfetch_hash, so we'll
3068 # be ff from that automatically.
3072 push @mergeinputs, $lastfetch_mergeinput;
3075 printdebug "fetch mergeinfos:\n";
3076 foreach my $mi (@mergeinputs) {
3078 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3080 printdebug sprintf " ReverseParents=%d Message=%s",
3081 $mi->{ReverseParents}, $mi->{Message};
3085 my $compat_info= pop @mergeinputs
3086 if $mergeinputs[$#mergeinputs]{Message};
3088 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3091 if (@mergeinputs > 1) {
3093 my $tree_commit = $mergeinputs[0]{Commit};
3095 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3096 $tree =~ m/\n\n/; $tree = $`;
3097 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3100 # We use the changelog author of the package in question the
3101 # author of this pseudo-merge. This is (roughly) correct if
3102 # this commit is simply representing aa non-dgit upload.
3103 # (Roughly because it does not record sponsorship - but we
3104 # don't have sponsorship info because that's in the .changes,
3105 # which isn't in the archivw.)
3107 # But, it might be that we are representing archive history
3108 # updates (including in-archive copies). These are not really
3109 # the responsibility of the person who created the .dsc, but
3110 # there is no-one whose name we should better use. (The
3111 # author of the .dsc-named commit is clearly worse.)
3113 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3114 my $author = clogp_authline $useclogp;
3115 my $cversion = getfield $useclogp, 'Version';
3117 my $mcf = ".git/dgit/mergecommit";
3118 open MC, ">", $mcf or die "$mcf $!";
3119 print MC <<END or die $!;
3123 my @parents = grep { $_->{Commit} } @mergeinputs;
3124 @parents = reverse @parents if $compat_info->{ReverseParents};
3125 print MC <<END or die $! foreach @parents;
3129 print MC <<END or die $!;
3135 if (defined $compat_info->{Message}) {
3136 print MC $compat_info->{Message} or die $!;
3138 print MC <<END or die $!;
3139 Record $package ($cversion) in archive suite $csuite
3143 my $message_add_info = sub {
3145 my $mversion = mergeinfo_version $mi;
3146 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3150 $message_add_info->($mergeinputs[0]);
3151 print MC <<END or die $!;
3152 should be treated as descended from
3154 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3158 $hash = make_commit $mcf;
3160 $hash = $mergeinputs[0]{Commit};
3162 printdebug "fetch hash=$hash\n";
3165 my ($lasth, $what) = @_;
3166 return unless $lasth;
3167 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3170 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3172 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3174 fetch_from_archive_record_1($hash);
3176 if (defined $skew_warning_vsn) {
3178 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3179 my $gotclogp = commit_getclogp($hash);
3180 my $got_vsn = getfield $gotclogp, 'Version';
3181 printdebug "SKEW CHECK GOT $got_vsn\n";
3182 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3183 print STDERR <<END or die $!;
3185 Warning: archive skew detected. Using the available version:
3186 Archive allegedly contains $skew_warning_vsn
3187 We were able to obtain only $got_vsn
3193 if ($lastfetch_hash ne $hash) {
3194 fetch_from_archive_record_2($hash);
3197 lrfetchref_used lrfetchref();
3199 unshift @end, $del_lrfetchrefs;
3203 sub set_local_git_config ($$) {
3205 runcmd @git, qw(config), $k, $v;
3208 sub setup_mergechangelogs (;$) {
3210 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3212 my $driver = 'dpkg-mergechangelogs';
3213 my $cb = "merge.$driver";
3214 my $attrs = '.git/info/attributes';
3215 ensuredir '.git/info';
3217 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3218 if (!open ATTRS, "<", $attrs) {
3219 $!==ENOENT or die "$attrs: $!";
3223 next if m{^debian/changelog\s};
3224 print NATTRS $_, "\n" or die $!;
3226 ATTRS->error and die $!;
3229 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3232 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3233 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3235 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3238 sub setup_useremail (;$) {
3240 return unless $always || access_cfg_bool(1, 'setup-useremail');
3243 my ($k, $envvar) = @_;
3244 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3245 return unless defined $v;
3246 set_local_git_config "user.$k", $v;
3249 $setup->('email', 'DEBEMAIL');
3250 $setup->('name', 'DEBFULLNAME');
3253 sub ensure_setup_existing_tree () {
3254 my $k = "remote.$remotename.skipdefaultupdate";
3255 my $c = git_get_config $k;
3256 return if defined $c;
3257 set_local_git_config $k, 'true';
3260 sub setup_new_tree () {
3261 setup_mergechangelogs();
3265 sub multisuite_suite_child ($$$) {
3266 my ($tsuite, $merginputs, $fn) = @_;
3267 # in child, sets things up, calls $fn->(), and returns undef
3268 # in parent, returns canonical suite name for $tsuite
3269 my $canonsuitefh = IO::File::new_tmpfile;
3270 my $pid = fork // die $!;
3273 $us .= " [$isuite]";
3274 $debugprefix .= " ";
3275 progress "fetching $tsuite...";
3276 canonicalise_suite();
3277 print $canonsuitefh $csuite, "\n" or die $!;
3278 close $canonsuitefh or die $!;
3282 waitpid $pid,0 == $pid or die $!;
3283 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3284 seek $canonsuitefh,0,0 or die $!;
3285 local $csuite = <$canonsuitefh>;
3286 die $! unless defined $csuite && chomp $csuite;
3288 printdebug "multisuite $tsuite missing\n";
3291 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3292 push @$merginputs, {
3299 sub fork_for_multisuite ($) {
3300 my ($before_fetch_merge) = @_;
3301 # if nothing unusual, just returns ''
3304 # returns 0 to caller in child, to do first of the specified suites
3305 # in child, $csuite is not yet set
3307 # returns 1 to caller in parent, to finish up anything needed after
3308 # in parent, $csuite is set to canonicalised portmanteau
3310 my $org_isuite = $isuite;
3311 my @suites = split /\,/, $isuite;
3312 return '' unless @suites > 1;
3313 printdebug "fork_for_multisuite: @suites\n";
3317 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3319 return 0 unless defined $cbasesuite;
3321 fail "package $package missing in (base suite) $cbasesuite"
3322 unless @mergeinputs;
3324 my @csuites = ($cbasesuite);
3326 $before_fetch_merge->();
3328 foreach my $tsuite (@suites[1..$#suites]) {
3329 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3335 # xxx collecte the ref here
3337 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3338 push @csuites, $csubsuite;
3341 foreach my $mi (@mergeinputs) {
3342 my $ref = git_get_ref $mi->{Ref};
3343 die "$mi->{Ref} ?" unless length $ref;
3344 $mi->{Commit} = $ref;
3347 $csuite = join ",", @csuites;
3349 my $previous = git_get_ref lrref;
3351 unshift @mergeinputs, {
3352 Commit => $previous,
3353 Info => "local combined tracking branch",
3355 "archive seems to have rewound: local tracking branch is ahead!",
3359 foreach my $ix (0..$#mergeinputs) {
3360 $mergeinputs[$ix]{Index} = $ix;
3363 @mergeinputs = sort {
3364 -version_compare(mergeinfo_version $a,
3365 mergeinfo_version $b) # highest version first
3367 $a->{Index} <=> $b->{Index}; # earliest in spec first
3373 foreach my $mi (@mergeinputs) {
3374 printdebug "multisuite merge check $mi->{Info}\n";
3375 foreach my $previous (@needed) {
3376 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3377 printdebug "multisuite merge un-needed $previous->{Info}\n";
3381 printdebug "multisuite merge this-needed\n";
3382 $mi->{Character} = '+';
3385 $needed[0]{Character} = '*';
3387 my $output = $needed[0]{Commit};
3390 printdebug "multisuite merge nontrivial\n";
3391 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3393 my $commit = "tree $tree\n";
3394 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3395 "Input branches:\n";
3397 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3398 printdebug "multisuite merge include $mi->{Info}\n";
3399 $mi->{Character} //= ' ';
3400 $commit .= "parent $mi->{Commit}\n";
3401 $msg .= sprintf " %s %-25s %s\n",
3403 (mergeinfo_version $mi),
3406 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3408 " * marks the highest version branch, which choose to use\n".
3409 " + marks each branch which was not already an ancestor\n\n".
3410 "[dgit multi-suite $csuite]\n";
3412 "author $authline\n".
3413 "committer $authline\n\n";
3414 $output = make_commit_text $commit.$msg;
3415 printdebug "multisuite merge generated $output\n";
3418 fetch_from_archive_record_1($output);
3419 fetch_from_archive_record_2($output);
3421 progress "calculated combined tracking suite $csuite";
3426 sub clone_set_head () {
3427 open H, "> .git/HEAD" or die $!;
3428 print H "ref: ".lref()."\n" or die $!;
3431 sub clone_finish ($) {
3433 runcmd @git, qw(reset --hard), lrref();
3434 runcmd qw(bash -ec), <<'END';
3436 git ls-tree -r --name-only -z HEAD | \
3437 xargs -0r touch -h -r . --
3439 printdone "ready for work in $dstdir";
3444 badusage "dry run makes no sense with clone" unless act_local();
3446 my $multi_fetched = fork_for_multisuite(sub {
3447 printdebug "multi clone before fetch merge\n";
3450 if ($multi_fetched) {
3451 printdebug "multi clone after fetch merge\n";
3453 clone_finish($dstdir);
3456 printdebug "clone main body\n";
3458 canonicalise_suite();
3459 my $hasgit = check_for_git();
3460 mkdir $dstdir or fail "create \`$dstdir': $!";
3462 runcmd @git, qw(init -q);
3464 my $giturl = access_giturl(1);
3465 if (defined $giturl) {
3466 runcmd @git, qw(remote add), 'origin', $giturl;
3469 progress "fetching existing git history";
3471 runcmd_ordryrun_local @git, qw(fetch origin);
3473 progress "starting new git history";
3475 fetch_from_archive() or no_such_package;
3476 my $vcsgiturl = $dsc->{'Vcs-Git'};
3477 if (length $vcsgiturl) {
3478 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3479 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3482 clone_finish($dstdir);
3486 canonicalise_suite();
3487 if (check_for_git()) {
3490 fetch_from_archive() or no_such_package();
3491 printdone "fetched into ".lrref();
3495 my $multi_fetched = fork_for_multisuite(sub { });
3496 fetch() unless $multi_fetched; # parent
3497 return if $multi_fetched eq '0'; # child
3498 runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
3500 printdone "fetched to ".lrref()." and merged into HEAD";
3503 sub check_not_dirty () {
3504 foreach my $f (qw(local-options local-patch-header)) {
3505 if (stat_exists "debian/source/$f") {
3506 fail "git tree contains debian/source/$f";
3510 return if $ignoredirty;
3512 my @cmd = (@git, qw(diff --quiet HEAD));
3514 $!=0; $?=-1; system @cmd;
3517 fail "working tree is dirty (does not match HEAD)";
3523 sub commit_admin ($) {
3526 runcmd_ordryrun_local @git, qw(commit -m), $m;
3529 sub commit_quilty_patch () {
3530 my $output = cmdoutput @git, qw(status --porcelain);
3532 foreach my $l (split /\n/, $output) {
3533 next unless $l =~ m/\S/;
3534 if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
3538 delete $adds{'.pc'}; # if there wasn't one before, don't add it
3540 progress "nothing quilty to commit, ok.";
3543 my @adds = map { s/[][*?\\]/\\$&/g; $_; } sort keys %adds;
3544 runcmd_ordryrun_local @git, qw(add -f), @adds;
3546 Commit Debian 3.0 (quilt) metadata
3548 [dgit ($our_version) quilt-fixup]
3552 sub get_source_format () {
3554 if (open F, "debian/source/options") {
3558 s/\s+$//; # ignore missing final newline
3560 my ($k, $v) = ($`, $'); #');
3561 $v =~ s/^"(.*)"$/$1/;
3567 F->error and die $!;
3570 die $! unless $!==&ENOENT;
3573 if (!open F, "debian/source/format") {
3574 die $! unless $!==&ENOENT;
3578 F->error and die $!;
3580 return ($_, \%options);
3583 sub madformat_wantfixup ($) {
3585 return 0 unless $format eq '3.0 (quilt)';
3586 our $quilt_mode_warned;
3587 if ($quilt_mode eq 'nocheck') {
3588 progress "Not doing any fixup of \`$format' due to".
3589 " ----no-quilt-fixup or --quilt=nocheck"
3590 unless $quilt_mode_warned++;
3593 progress "Format \`$format', need to check/update patch stack"
3594 unless $quilt_mode_warned++;
3598 sub maybe_split_brain_save ($$$) {
3599 my ($headref, $dgitview, $msg) = @_;
3600 # => message fragment "$saved" describing disposition of $dgitview
3601 return "commit id $dgitview" unless defined $split_brain_save;
3602 my @cmd = (shell_cmd "cd ../../../..",
3603 @git, qw(update-ref -m),
3604 "dgit --dgit-view-save $msg HEAD=$headref",
3605 $split_brain_save, $dgitview);
3607 return "and left in $split_brain_save";
3610 # An "infopair" is a tuple [ $thing, $what ]
3611 # (often $thing is a commit hash; $what is a description)
3613 sub infopair_cond_equal ($$) {
3615 $x->[0] eq $y->[0] or fail <<END;
3616 $x->[1] ($x->[0]) not equal to $y->[1] ($y->[0])
3620 sub infopair_lrf_tag_lookup ($$) {
3621 my ($tagnames, $what) = @_;
3622 # $tagname may be an array ref
3623 my @tagnames = ref $tagnames ? @$tagnames : ($tagnames);
3624 printdebug "infopair_lrfetchref_tag_lookup $what @tagnames\n";
3625 foreach my $tagname (@tagnames) {
3626 my $lrefname = lrfetchrefs."/tags/$tagname";
3627 my $tagobj = $lrfetchrefs_f{$lrefname};
3628 next unless defined $tagobj;
3629 printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n";
3630 return [ git_rev_parse($tagobj), $what ];
3632 fail @tagnames==1 ? <<END : <<END;
3633 Wanted tag $what (@tagnames) on dgit server, but not found
3635 Wanted tag $what (one of: @tagnames) on dgit server, but not found
3639 sub infopair_cond_ff ($$) {
3640 my ($anc,$desc) = @_;
3641 is_fast_fwd($anc->[0], $desc->[0]) or fail <<END;
3642 $anc->[1] ($anc->[0]) .. $desc->[1] ($desc->[0]) is not fast forward
3646 sub pseudomerge_version_check ($$) {
3647 my ($clogp, $archive_hash) = @_;
3649 my $arch_clogp = commit_getclogp $archive_hash;
3650 my $i_arch_v = [ (getfield $arch_clogp, 'Version'),
3651 'version currently in archive' ];
3652 if (defined $overwrite_version) {
3653 if (length $overwrite_version) {
3654 infopair_cond_equal([ $overwrite_version,
3655 '--overwrite= version' ],
3658 my $v = $i_arch_v->[0];
3659 progress "Checking package changelog for archive version $v ...";
3661 my @xa = ("-f$v", "-t$v");
3662 my $vclogp = parsechangelog @xa;
3663 my $cv = [ (getfield $vclogp, 'Version'),
3664 "Version field from dpkg-parsechangelog @xa" ];
3665 infopair_cond_equal($i_arch_v, $cv);
3668 $@ =~ s/^dgit: //gm;
3670 "Perhaps debian/changelog does not mention $v ?";
3675 printdebug "pseudomerge_version_check i_arch_v @$i_arch_v\n";
3679 sub pseudomerge_make_commit ($$$$ $$) {
3680 my ($clogp, $dgitview, $archive_hash, $i_arch_v,
3681 $msg_cmd, $msg_msg) = @_;
3682 progress "Declaring that HEAD inciudes all changes in $i_arch_v->[0]...";
3684 my $tree = cmdoutput qw(git rev-parse), "${dgitview}:";
3685 my $authline = clogp_authline $clogp;
3689 !defined $overwrite_version ? ""
3690 : !length $overwrite_version ? " --overwrite"
3691 : " --overwrite=".$overwrite_version;
3694 my $pmf = ".git/dgit/pseudomerge";
3695 open MC, ">", $pmf or die "$pmf $!";
3696 print MC <<END or die $!;
3699 parent $archive_hash
3709 return make_commit($pmf);
3712 sub splitbrain_pseudomerge ($$$$) {
3713 my ($clogp, $maintview, $dgitview, $archive_hash) = @_;
3714 # => $merged_dgitview
3715 printdebug "splitbrain_pseudomerge...\n";
3717 # We: debian/PREVIOUS HEAD($maintview)
3718 # expect: o ----------------- o