3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2018 Ian Jackson
6 # Copyright (C)2017-2018 Sean Whitton
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 END { $? = $Debian::Dgit::ExitStatus::desired // -1; };
22 use Debian::Dgit::ExitStatus;
26 use Debian::Dgit qw(:DEFAULT :playground);
32 use Dpkg::Control::Hash;
34 use File::Temp qw(tempdir);
37 use Dpkg::Compression;
38 use Dpkg::Compression::Process;
43 use List::MoreUtils qw(pairwise);
44 use Text::Glob qw(match_glob);
45 use Fcntl qw(:DEFAULT :flock);
50 our $our_version = 'UNRELEASED'; ###substituted###
51 our $absurdity = undef; ###substituted###
53 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
64 our $dryrun_level = 0;
66 our $buildproductsdir;
69 our $includedirty = 0;
73 our $existing_package = 'dpkg';
75 our $changes_since_version;
77 our $overwrite_version; # undef: not specified; '': check changelog
79 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
81 our %internal_object_save;
82 our $we_are_responder;
83 our $we_are_initiator;
84 our $initiator_tempdir;
85 our $patches_applied_dirtily = 00;
89 our $chase_dsc_distro=1;
91 our %forceopts = map { $_=>0 }
92 qw(unrepresentable unsupported-source-format
93 dsc-changes-mismatch changes-origs-exactly
94 uploading-binaries uploading-source-only
95 import-gitapply-absurd
96 import-gitapply-no-absurd
97 import-dsc-with-dgit-field);
99 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
101 our $suite_re = '[-+.0-9a-z]+';
102 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
104 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
105 our $splitbraincache = 'dgit-intern/quilt-cache';
106 our $rewritemap = 'dgit-rewrite/map';
108 our @dpkg_source_ignores = qw(-i(?:^|/)\.git(?:/|$) -I.git);
110 our (@git) = qw(git);
111 our (@dget) = qw(dget);
112 our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L));
113 our (@dput) = qw(dput);
114 our (@debsign) = qw(debsign);
115 our (@gpg) = qw(gpg);
116 our (@sbuild) = qw(sbuild);
118 our (@dgit) = qw(dgit);
119 our (@git_debrebase) = qw(git-debrebase);
120 our (@aptget) = qw(apt-get);
121 our (@aptcache) = qw(apt-cache);
122 our (@dpkgbuildpackage) = (qw(dpkg-buildpackage), @dpkg_source_ignores);
123 our (@dpkgsource) = (qw(dpkg-source), @dpkg_source_ignores);
124 our (@dpkggenchanges) = qw(dpkg-genchanges);
125 our (@mergechanges) = qw(mergechanges -f);
126 our (@gbp_build) = ('');
127 our (@gbp_pq) = ('gbp pq');
128 our (@changesopts) = ('');
129 our (@pbuilder) = ("sudo -E pbuilder");
130 our (@cowbuilder) = ("sudo -E cowbuilder");
132 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
135 'debsign' => \@debsign,
137 'sbuild' => \@sbuild,
141 'git-debrebase' => \@git_debrebase,
142 'apt-get' => \@aptget,
143 'apt-cache' => \@aptcache,
144 'dpkg-source' => \@dpkgsource,
145 'dpkg-buildpackage' => \@dpkgbuildpackage,
146 'dpkg-genchanges' => \@dpkggenchanges,
147 'gbp-build' => \@gbp_build,
148 'gbp-pq' => \@gbp_pq,
149 'ch' => \@changesopts,
150 'mergechanges' => \@mergechanges,
151 'pbuilder' => \@pbuilder,
152 'cowbuilder' => \@cowbuilder);
154 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
155 our %opts_cfg_insertpos = map {
157 scalar @{ $opts_opt_map{$_} }
158 } keys %opts_opt_map;
160 sub parseopts_late_defaults();
161 sub setup_gitattrs(;$);
162 sub check_gitattrs($$);
169 our $supplementary_message = '';
170 our $split_brain = 0;
174 return unless forkcheck_mainprocess();
175 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
178 our $remotename = 'dgit';
179 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
183 if (!defined $absurdity) {
185 $absurdity =~ s{/[^/]+$}{/absurd} or die;
189 my ($v,$distro) = @_;
190 return $tagformatfn->($v, $distro);
193 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
195 sub lbranch () { return "$branchprefix/$csuite"; }
196 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
197 sub lref () { return "refs/heads/".lbranch(); }
198 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
199 sub rrref () { return server_ref($csuite); }
202 my ($vsn, $sfx) = @_;
203 return &source_file_leafname($package, $vsn, $sfx);
205 sub is_orig_file_of_vsn ($$) {
206 my ($f, $upstreamvsn) = @_;
207 return is_orig_file_of_p_v($f, $package, $upstreamvsn);
212 return srcfn($vsn,".dsc");
215 sub changespat ($;$) {
216 my ($vsn, $arch) = @_;
217 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
226 return unless forkcheck_mainprocess();
227 foreach my $f (@end) {
229 print STDERR "$us: cleanup: $@" if length $@;
233 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; finish 12; }
235 sub forceable_fail ($$) {
236 my ($forceoptsl, $msg) = @_;
237 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
238 print STDERR "warning: overriding problem due to --force:\n". $msg;
242 my ($forceoptsl) = @_;
243 my @got = grep { $forceopts{$_} } @$forceoptsl;
244 return 0 unless @got;
246 "warning: skipping checks or functionality due to --force-$got[0]\n";
249 sub no_such_package () {
250 print STDERR "$us: package $package does not exist in suite $isuite\n";
254 sub deliberately ($) {
256 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
259 sub deliberately_not_fast_forward () {
260 foreach (qw(not-fast-forward fresh-repo)) {
261 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
265 sub quiltmode_splitbrain () {
266 $quilt_mode =~ m/gbp|dpm|unapplied/;
269 sub opts_opt_multi_cmd {
272 push @cmd, split /\s+/, shift @_;
279 return opts_opt_multi_cmd [], @gbp_pq;
282 sub dgit_privdir () {
283 our $dgit_privdir_made //= ensure_a_playground 'dgit';
287 my $r = $buildproductsdir;
288 $r = "$maindir/$r" unless $r =~ m{^/};
292 sub get_tree_of_commit ($) {
293 my ($commitish) = @_;
294 my $cdata = cmdoutput @git, qw(cat-file commit), $commitish;
295 $cdata =~ m/\n\n/; $cdata = $`;
296 $cdata =~ m/^tree (\w+)$/m or confess "cdata $cdata ?";
300 sub branch_gdr_info ($$) {
301 my ($symref, $head) = @_;
302 my ($status, $msg, $current, $ffq_prev, $gdrlast) =
303 gdr_ffq_prev_branchinfo($symref);
304 return () unless $status eq 'branch';
305 $ffq_prev = git_get_ref $ffq_prev;
306 $gdrlast = git_get_ref $gdrlast;
307 $gdrlast &&= is_fast_fwd $gdrlast, $head;
308 return ($ffq_prev, $gdrlast);
311 sub branch_is_gdr_unstitched_ff ($$$) {
312 my ($symref, $head, $ancestor) = @_;
313 my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head);
314 return 0 unless $ffq_prev;
315 return 0 unless !defined $ancestor or is_fast_fwd $ancestor, $ffq_prev;
319 sub branch_is_gdr ($) {
321 # This is quite like git-debrebase's keycommits.
322 # We have our own implementation because:
323 # - our algorighm can do fewer tests so is faster
324 # - it saves testing to see if gdr is installed
326 # NB we use this jsut for deciding whether to run gdr make-patches
327 # Before reusing this algorithm for somthing else, its
328 # suitability should be reconsidered.
331 local $Debian::Dgit::debugcmd_when_debuglevel = 3;
332 printdebug "branch_is_gdr $head...\n";
333 my $get_patches = sub {
334 my $t = git_cat_file "$_[0]:debian/patches", [qw(missing tree)];
337 my $tip_patches = $get_patches->($head);
340 my $cdata = git_cat_file $walk, 'commit';
341 my ($hdrs,$msg) = $cdata =~ m{\n\n} ? ($`,$') : ($cdata,'');
342 if ($msg =~ m{^\[git-debrebase\ (
343 anchor | changelog | make-patches |
344 merged-breakwater | pseudomerge
346 # no need to analyse this - it's sufficient
347 # (gdr classifications: Anchor, MergedBreakwaters)
348 # (made by gdr: Pseudomerge, Changelog)
349 printdebug "branch_is_gdr $walk gdr $1 YES\n";
352 my @parents = ($hdrs =~ m/^parent (\w+)$/gm);
354 my $walk_tree = get_tree_of_commit $walk;
355 foreach my $p (@parents) {
356 my $p_tree = get_tree_of_commit $p;
357 if ($p_tree eq $walk_tree) { # pseudomerge contriburor
358 # (gdr classification: Pseudomerge; not made by gdr)
359 printdebug "branch_is_gdr $walk unmarked pseudomerge\n"
365 # some other non-gdr merge
366 # (gdr classification: VanillaMerge, DgitImportUnpatched, ?)
367 printdebug "branch_is_gdr $walk ?-2-merge NO\n";
371 # (gdr classification: ?)
372 printdebug "branch_is_gdr $walk ?-octopus NO\n";
375 if ($get_patches->($walk) ne $tip_patches) {
376 # Our parent added, removed, or edited patches, and wasn't
377 # a gdr make-patches commit. gdr make-patches probably
378 # won't do that well, then.
379 # (gdr classification of parent: AddPatches or ?)
380 printdebug "branch_is_gdr $walk ?-patches NO\n";
383 if ($tip_patches eq '' and
384 !defined git_cat_file "$walk:debian") {
385 # (gdr classification of parent: BreakwaterStart
386 printdebug "branch_is_gdr $walk unmarked BreakwaterStart YES\n";
389 # (gdr classification: Upstream Packaging Mixed Changelog)
390 printdebug "branch_is_gdr $walk plain\n"
396 #---------- remote protocol support, common ----------
398 # remote push initiator/responder protocol:
399 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
400 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
401 # < dgit-remote-push-ready <actual-proto-vsn>
408 # > supplementary-message NBYTES # $protovsn >= 3
413 # > file parsed-changelog
414 # [indicates that output of dpkg-parsechangelog follows]
415 # > data-block NBYTES
416 # > [NBYTES bytes of data (no newline)]
417 # [maybe some more blocks]
426 # > param head DGIT-VIEW-HEAD
427 # > param csuite SUITE
428 # > param tagformat old|new
429 # > param maint-view MAINT-VIEW-HEAD
431 # > param buildinfo-filename P_V_X.buildinfo # zero or more times
432 # > file buildinfo # for buildinfos to sign
434 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
435 # # goes into tag, for replay prevention
438 # [indicates that signed tag is wanted]
439 # < data-block NBYTES
440 # < [NBYTES bytes of data (no newline)]
441 # [maybe some more blocks]
445 # > want signed-dsc-changes
446 # < data-block NBYTES [transfer of signed dsc]
448 # < data-block NBYTES [transfer of signed changes]
450 # < data-block NBYTES [transfer of each signed buildinfo
451 # [etc] same number and order as "file buildinfo"]
459 sub i_child_report () {
460 # Sees if our child has died, and reap it if so. Returns a string
461 # describing how it died if it failed, or undef otherwise.
462 return undef unless $i_child_pid;
463 my $got = waitpid $i_child_pid, WNOHANG;
464 return undef if $got <= 0;
465 die unless $got == $i_child_pid;
466 $i_child_pid = undef;
467 return undef unless $?;
468 return "build host child ".waitstatusmsg();
473 fail "connection lost: $!" if $fh->error;
474 fail "protocol violation; $m not expected";
477 sub badproto_badread ($$) {
479 fail "connection lost: $!" if $!;
480 my $report = i_child_report();
481 fail $report if defined $report;
482 badproto $fh, "eof (reading $wh)";
485 sub protocol_expect (&$) {
486 my ($match, $fh) = @_;
489 defined && chomp or badproto_badread $fh, "protocol message";
497 badproto $fh, "\`$_'";
500 sub protocol_send_file ($$) {
501 my ($fh, $ourfn) = @_;
502 open PF, "<", $ourfn or die "$ourfn: $!";
505 my $got = read PF, $d, 65536;
506 die "$ourfn: $!" unless defined $got;
508 print $fh "data-block ".length($d)."\n" or die $!;
509 print $fh $d or die $!;
511 PF->error and die "$ourfn $!";
512 print $fh "data-end\n" or die $!;
516 sub protocol_read_bytes ($$) {
517 my ($fh, $nbytes) = @_;
518 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
520 my $got = read $fh, $d, $nbytes;
521 $got==$nbytes or badproto_badread $fh, "data block";
525 sub protocol_receive_file ($$) {
526 my ($fh, $ourfn) = @_;
527 printdebug "() $ourfn\n";
528 open PF, ">", $ourfn or die "$ourfn: $!";
530 my ($y,$l) = protocol_expect {
531 m/^data-block (.*)$/ ? (1,$1) :
532 m/^data-end$/ ? (0,) :
536 my $d = protocol_read_bytes $fh, $l;
537 print PF $d or die $!;
542 #---------- remote protocol support, responder ----------
544 sub responder_send_command ($) {
546 return unless $we_are_responder;
547 # called even without $we_are_responder
548 printdebug ">> $command\n";
549 print PO $command, "\n" or die $!;
552 sub responder_send_file ($$) {
553 my ($keyword, $ourfn) = @_;
554 return unless $we_are_responder;
555 printdebug "]] $keyword $ourfn\n";
556 responder_send_command "file $keyword";
557 protocol_send_file \*PO, $ourfn;
560 sub responder_receive_files ($@) {
561 my ($keyword, @ourfns) = @_;
562 die unless $we_are_responder;
563 printdebug "[[ $keyword @ourfns\n";
564 responder_send_command "want $keyword";
565 foreach my $fn (@ourfns) {
566 protocol_receive_file \*PI, $fn;
569 protocol_expect { m/^files-end$/ } \*PI;
572 #---------- remote protocol support, initiator ----------
574 sub initiator_expect (&) {
576 protocol_expect { &$match } \*RO;
579 #---------- end remote code ----------
582 if ($we_are_responder) {
584 responder_send_command "progress ".length($m) or die $!;
585 print PO $m or die $!;
595 $ua = LWP::UserAgent->new();
599 progress "downloading $what...";
600 my $r = $ua->get(@_) or die $!;
601 return undef if $r->code == 404;
602 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
603 return $r->decoded_content(charset => 'none');
606 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
608 sub act_local () { return $dryrun_level <= 1; }
609 sub act_scary () { return !$dryrun_level; }
612 if (!$dryrun_level) {
613 progress "$us ok: @_";
615 progress "would be ok: @_ (but dry run only)";
620 printcmd(\*STDERR,$debugprefix."#",@_);
623 sub runcmd_ordryrun {
631 sub runcmd_ordryrun_local {
639 our $helpmsg = <<END;
641 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
642 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
643 dgit [dgit-opts] build [dpkg-buildpackage-opts]
644 dgit [dgit-opts] sbuild [sbuild-opts]
645 dgit [dgit-opts] pbuilder|cowbuilder [debbuildopts]
646 dgit [dgit-opts] push [dgit-opts] [suite]
647 dgit [dgit-opts] push-source [dgit-opts] [suite]
648 dgit [dgit-opts] rpush build-host:build-dir ...
649 important dgit options:
650 -k<keyid> sign tag and package with <keyid> instead of default
651 --dry-run -n do not change anything, but go through the motions
652 --damp-run -L like --dry-run but make local changes, without signing
653 --new -N allow introducing a new package
654 --debug -D increase debug level
655 -c<name>=<value> set git config option (used directly by dgit too)
658 our $later_warning_msg = <<END;
659 Perhaps the upload is stuck in incoming. Using the version from git.
663 print STDERR "$us: @_\n", $helpmsg or die $!;
668 @ARGV or badusage "too few arguments";
669 return scalar shift @ARGV;
673 not_necessarily_a_tree();
676 print $helpmsg or die $!;
680 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
682 our %defcfg = ('dgit.default.distro' => 'debian',
683 'dgit.default.default-suite' => 'unstable',
684 'dgit.default.old-dsc-distro' => 'debian',
685 'dgit-suite.*-security.distro' => 'debian-security',
686 'dgit.default.username' => '',
687 'dgit.default.archive-query-default-component' => 'main',
688 'dgit.default.ssh' => 'ssh',
689 'dgit.default.archive-query' => 'madison:',
690 'dgit.default.sshpsql-dbname' => 'service=projectb',
691 'dgit.default.aptget-components' => 'main',
692 'dgit.default.dgit-tag-format' => 'new,old,maint',
693 'dgit.default.source-only-uploads' => 'ok',
694 'dgit.dsc-url-proto-ok.http' => 'true',
695 'dgit.dsc-url-proto-ok.https' => 'true',
696 'dgit.dsc-url-proto-ok.git' => 'true',
697 'dgit.vcs-git.suites', => 'sid', # ;-separated
698 'dgit.default.dsc-url-proto-ok' => 'false',
699 # old means "repo server accepts pushes with old dgit tags"
700 # new means "repo server accepts pushes with new dgit tags"
701 # maint means "repo server accepts split brain pushes"
702 # hist means "repo server may have old pushes without new tag"
703 # ("hist" is implied by "old")
704 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
705 'dgit-distro.debian.git-check' => 'url',
706 'dgit-distro.debian.git-check-suffix' => '/info/refs',
707 'dgit-distro.debian.new-private-pushers' => 't',
708 'dgit-distro.debian.source-only-uploads' => 'not-wholly-new',
709 'dgit-distro.debian/push.git-url' => '',
710 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
711 'dgit-distro.debian/push.git-user-force' => 'dgit',
712 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
713 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
714 'dgit-distro.debian/push.git-create' => 'true',
715 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
716 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
717 # 'dgit-distro.debian.archive-query-tls-key',
718 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
719 # ^ this does not work because curl is broken nowadays
720 # Fixing #790093 properly will involve providing providing the key
721 # in some pacagke and maybe updating these paths.
723 # 'dgit-distro.debian.archive-query-tls-curl-args',
724 # '--ca-path=/etc/ssl/ca-debian',
725 # ^ this is a workaround but works (only) on DSA-administered machines
726 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
727 'dgit-distro.debian.git-url-suffix' => '',
728 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
729 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
730 'dgit-distro.debian-security.archive-query' => 'aptget:',
731 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
732 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
733 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
734 'dgit-distro.debian-security.nominal-distro' => 'debian',
735 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
736 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
737 'dgit-distro.ubuntu.git-check' => 'false',
738 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
739 'dgit-distro.test-dummy.ssh' => "$td/ssh",
740 'dgit-distro.test-dummy.username' => "alice",
741 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
742 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
743 'dgit-distro.test-dummy.git-url' => "$td/git",
744 'dgit-distro.test-dummy.git-host' => "git",
745 'dgit-distro.test-dummy.git-path' => "$td/git",
746 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
747 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
748 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
749 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
753 our @gitcfgsources = qw(cmdline local global system);
754 our $invoked_in_git_tree = 1;
756 sub git_slurp_config () {
757 # This algoritm is a bit subtle, but this is needed so that for
758 # options which we want to be single-valued, we allow the
759 # different config sources to override properly. See #835858.
760 foreach my $src (@gitcfgsources) {
761 next if $src eq 'cmdline';
762 # we do this ourselves since git doesn't handle it
764 $gitcfgs{$src} = git_slurp_config_src $src;
768 sub git_get_config ($) {
770 foreach my $src (@gitcfgsources) {
771 my $l = $gitcfgs{$src}{$c};
772 confess "internal error ($l $c)" if $l && !ref $l;
773 printdebug"C $c ".(defined $l ?
774 join " ", map { messagequote "'$_'" } @$l :
778 @$l==1 or badcfg "multiple values for $c".
779 " (in $src git config)" if @$l > 1;
787 return undef if $c =~ /RETURN-UNDEF/;
788 printdebug "C? $c\n" if $debuglevel >= 5;
789 my $v = git_get_config($c);
790 return $v if defined $v;
791 my $dv = $defcfg{$c};
793 printdebug "CD $c $dv\n" if $debuglevel >= 4;
797 badcfg "need value for one of: @_\n".
798 "$us: distro or suite appears not to be (properly) supported";
801 sub not_necessarily_a_tree () {
802 # needs to be called from pre_*
803 @gitcfgsources = grep { $_ ne 'local' } @gitcfgsources;
804 $invoked_in_git_tree = 0;
807 sub access_basedistro__noalias () {
808 if (defined $idistro) {
811 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
812 return $def if defined $def;
813 foreach my $src (@gitcfgsources, 'internal') {
814 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
816 foreach my $k (keys %$kl) {
817 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
819 next unless match_glob $dpat, $isuite;
823 return cfg("dgit.default.distro");
827 sub access_basedistro () {
828 my $noalias = access_basedistro__noalias();
829 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
830 return $canon // $noalias;
833 sub access_nomdistro () {
834 my $base = access_basedistro();
835 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
836 $r =~ m/^$distro_re$/ or badcfg
837 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
841 sub access_quirk () {
842 # returns (quirk name, distro to use instead or undef, quirk-specific info)
843 my $basedistro = access_basedistro();
844 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
846 if (defined $backports_quirk) {
847 my $re = $backports_quirk;
848 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
850 $re =~ s/\%/([-0-9a-z_]+)/
851 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
852 if ($isuite =~ m/^$re$/) {
853 return ('backports',"$basedistro-backports",$1);
856 return ('none',undef);
861 sub parse_cfg_bool ($$$) {
862 my ($what,$def,$v) = @_;
865 $v =~ m/^[ty1]/ ? 1 :
866 $v =~ m/^[fn0]/ ? 0 :
867 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
870 sub access_forpush_config () {
871 my $d = access_basedistro();
875 parse_cfg_bool('new-private-pushers', 0,
876 cfg("dgit-distro.$d.new-private-pushers",
879 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
882 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
883 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
884 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
885 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
888 sub access_forpush () {
889 $access_forpush //= access_forpush_config();
890 return $access_forpush;
894 confess 'internal error '.Dumper($access_forpush)," ?" if
895 defined $access_forpush and !$access_forpush;
896 badcfg "pushing but distro is configured readonly"
897 if access_forpush_config() eq '0';
899 $supplementary_message = <<'END' unless $we_are_responder;
900 Push failed, before we got started.
901 You can retry the push, after fixing the problem, if you like.
903 parseopts_late_defaults();
907 parseopts_late_defaults();
910 sub supplementary_message ($) {
912 if (!$we_are_responder) {
913 $supplementary_message = $msg;
915 } elsif ($protovsn >= 3) {
916 responder_send_command "supplementary-message ".length($msg)
918 print PO $msg or die $!;
922 sub access_distros () {
923 # Returns list of distros to try, in order
926 # 0. `instead of' distro name(s) we have been pointed to
927 # 1. the access_quirk distro, if any
928 # 2a. the user's specified distro, or failing that } basedistro
929 # 2b. the distro calculated from the suite }
930 my @l = access_basedistro();
932 my (undef,$quirkdistro) = access_quirk();
933 unshift @l, $quirkdistro;
934 unshift @l, $instead_distro;
935 @l = grep { defined } @l;
937 push @l, access_nomdistro();
939 if (access_forpush()) {
940 @l = map { ("$_/push", $_) } @l;
945 sub access_cfg_cfgs (@) {
948 # The nesting of these loops determines the search order. We put
949 # the key loop on the outside so that we search all the distros
950 # for each key, before going on to the next key. That means that
951 # if access_cfg is called with a more specific, and then a less
952 # specific, key, an earlier distro can override the less specific
953 # without necessarily overriding any more specific keys. (If the
954 # distro wants to override the more specific keys it can simply do
955 # so; whereas if we did the loop the other way around, it would be
956 # impossible to for an earlier distro to override a less specific
957 # key but not the more specific ones without restating the unknown
958 # values of the more specific keys.
961 # We have to deal with RETURN-UNDEF specially, so that we don't
962 # terminate the search prematurely.
964 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
967 foreach my $d (access_distros()) {
968 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
970 push @cfgs, map { "dgit.default.$_" } @realkeys;
977 my (@cfgs) = access_cfg_cfgs(@keys);
978 my $value = cfg(@cfgs);
982 sub access_cfg_bool ($$) {
983 my ($def, @keys) = @_;
984 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
987 sub string_to_ssh ($) {
989 if ($spec =~ m/\s/) {
990 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
996 sub access_cfg_ssh () {
997 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
998 if (!defined $gitssh) {
1001 return string_to_ssh $gitssh;
1005 sub access_runeinfo ($) {
1007 return ": dgit ".access_basedistro()." $info ;";
1010 sub access_someuserhost ($) {
1012 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
1013 defined($user) && length($user) or
1014 $user = access_cfg("$some-user",'username');
1015 my $host = access_cfg("$some-host");
1016 return length($user) ? "$user\@$host" : $host;
1019 sub access_gituserhost () {
1020 return access_someuserhost('git');
1023 sub access_giturl (;$) {
1024 my ($optional) = @_;
1025 my $url = access_cfg('git-url','RETURN-UNDEF');
1028 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
1029 return undef unless defined $proto;
1032 access_gituserhost().
1033 access_cfg('git-path');
1035 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
1038 return "$url/$package$suffix";
1041 sub commit_getclogp ($) {
1042 # Returns the parsed changelog hashref for a particular commit
1044 our %commit_getclogp_memo;
1045 my $memo = $commit_getclogp_memo{$objid};
1046 return $memo if $memo;
1048 my $mclog = dgit_privdir()."clog";
1049 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
1050 "$objid:debian/changelog";
1051 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
1054 sub parse_dscdata () {
1055 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1056 printdebug Dumper($dscdata) if $debuglevel>1;
1057 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1058 printdebug Dumper($dsc) if $debuglevel>1;
1063 sub archive_query ($;@) {
1064 my ($method) = shift @_;
1065 fail "this operation does not support multiple comma-separated suites"
1067 my $query = access_cfg('archive-query','RETURN-UNDEF');
1068 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1071 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1074 sub archive_query_prepend_mirror {
1075 my $m = access_cfg('mirror');
1076 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1079 sub pool_dsc_subpath ($$) {
1080 my ($vsn,$component) = @_; # $package is implict arg
1081 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1082 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1085 sub cfg_apply_map ($$$) {
1086 my ($varref, $what, $mapspec) = @_;
1087 return unless $mapspec;
1089 printdebug "config $what EVAL{ $mapspec; }\n";
1091 eval "package Dgit::Config; $mapspec;";
1096 #---------- `ftpmasterapi' archive query method (nascent) ----------
1098 sub archive_api_query_cmd ($) {
1100 my @cmd = (@curl, qw(-sS));
1101 my $url = access_cfg('archive-query-url');
1102 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1104 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1105 foreach my $key (split /\:/, $keys) {
1106 $key =~ s/\%HOST\%/$host/g;
1108 fail "for $url: stat $key: $!" unless $!==ENOENT;
1111 fail "config requested specific TLS key but do not know".
1112 " how to get curl to use exactly that EE key ($key)";
1113 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1114 # # Sadly the above line does not work because of changes
1115 # # to gnutls. The real fix for #790093 may involve
1116 # # new curl options.
1119 # Fixing #790093 properly will involve providing a value
1120 # for this on clients.
1121 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1122 push @cmd, split / /, $kargs if defined $kargs;
1124 push @cmd, $url.$subpath;
1128 sub api_query ($$;$) {
1130 my ($data, $subpath, $ok404) = @_;
1131 badcfg "ftpmasterapi archive query method takes no data part"
1133 my @cmd = archive_api_query_cmd($subpath);
1134 my $url = $cmd[$#cmd];
1135 push @cmd, qw(-w %{http_code});
1136 my $json = cmdoutput @cmd;
1137 unless ($json =~ s/\d+\d+\d$//) {
1138 failedcmd_report_cmd undef, @cmd;
1139 fail "curl failed to print 3-digit HTTP code";
1142 return undef if $code eq '404' && $ok404;
1143 fail "fetch of $url gave HTTP code $code"
1144 unless $url =~ m#^file://# or $code =~ m/^2/;
1145 return decode_json($json);
1148 sub canonicalise_suite_ftpmasterapi {
1149 my ($proto,$data) = @_;
1150 my $suites = api_query($data, 'suites');
1152 foreach my $entry (@$suites) {
1154 my $v = $entry->{$_};
1155 defined $v && $v eq $isuite;
1156 } qw(codename name);
1157 push @matched, $entry;
1159 fail "unknown suite $isuite, maybe -d would help" unless @matched;
1162 @matched==1 or die "multiple matches for suite $isuite\n";
1163 $cn = "$matched[0]{codename}";
1164 defined $cn or die "suite $isuite info has no codename\n";
1165 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1167 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1172 sub archive_query_ftpmasterapi {
1173 my ($proto,$data) = @_;
1174 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1176 my $digester = Digest::SHA->new(256);
1177 foreach my $entry (@$info) {
1179 my $vsn = "$entry->{version}";
1180 my ($ok,$msg) = version_check $vsn;
1181 die "bad version: $msg\n" unless $ok;
1182 my $component = "$entry->{component}";
1183 $component =~ m/^$component_re$/ or die "bad component";
1184 my $filename = "$entry->{filename}";
1185 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1186 or die "bad filename";
1187 my $sha256sum = "$entry->{sha256sum}";
1188 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1189 push @rows, [ $vsn, "/pool/$component/$filename",
1190 $digester, $sha256sum ];
1192 die "bad ftpmaster api response: $@\n".Dumper($entry)
1195 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1196 return archive_query_prepend_mirror @rows;
1199 sub file_in_archive_ftpmasterapi {
1200 my ($proto,$data,$filename) = @_;
1201 my $pat = $filename;
1204 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1205 my $info = api_query($data, "file_in_archive/$pat", 1);
1208 sub package_not_wholly_new_ftpmasterapi {
1209 my ($proto,$data,$pkg) = @_;
1210 my $info = api_query($data,"madison?package=${pkg}&f=json");
1214 #---------- `aptget' archive query method ----------
1217 our $aptget_releasefile;
1218 our $aptget_configpath;
1220 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1221 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1223 sub aptget_cache_clean {
1224 runcmd_ordryrun_local qw(sh -ec),
1225 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1229 sub aptget_lock_acquire () {
1230 my $lockfile = "$aptget_base/lock";
1231 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1232 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1235 sub aptget_prep ($) {
1237 return if defined $aptget_base;
1239 badcfg "aptget archive query method takes no data part"
1242 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1245 ensuredir "$cache/dgit";
1247 access_cfg('aptget-cachekey','RETURN-UNDEF')
1248 // access_nomdistro();
1250 $aptget_base = "$cache/dgit/aptget";
1251 ensuredir $aptget_base;
1253 my $quoted_base = $aptget_base;
1254 die "$quoted_base contains bad chars, cannot continue"
1255 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1257 ensuredir $aptget_base;
1259 aptget_lock_acquire();
1261 aptget_cache_clean();
1263 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1264 my $sourceslist = "source.list#$cachekey";
1266 my $aptsuites = $isuite;
1267 cfg_apply_map(\$aptsuites, 'suite map',
1268 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1270 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1271 printf SRCS "deb-src %s %s %s\n",
1272 access_cfg('mirror'),
1274 access_cfg('aptget-components')
1277 ensuredir "$aptget_base/cache";
1278 ensuredir "$aptget_base/lists";
1280 open CONF, ">", $aptget_configpath or die $!;
1282 Debug::NoLocking "true";
1283 APT::Get::List-Cleanup "false";
1284 #clear APT::Update::Post-Invoke-Success;
1285 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1286 Dir::State::Lists "$quoted_base/lists";
1287 Dir::Etc::preferences "$quoted_base/preferences";
1288 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1289 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1292 foreach my $key (qw(
1295 Dir::Cache::Archives
1296 Dir::Etc::SourceParts
1297 Dir::Etc::preferencesparts
1299 ensuredir "$aptget_base/$key";
1300 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1303 my $oldatime = (time // die $!) - 1;
1304 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1305 next unless stat_exists $oldlist;
1306 my ($mtime) = (stat _)[9];
1307 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1310 runcmd_ordryrun_local aptget_aptget(), qw(update);
1313 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1314 next unless stat_exists $oldlist;
1315 my ($atime) = (stat _)[8];
1316 next if $atime == $oldatime;
1317 push @releasefiles, $oldlist;
1319 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1320 @releasefiles = @inreleasefiles if @inreleasefiles;
1321 if (!@releasefiles) {
1323 apt seemed to not to update dgit's cached Release files for $isuite.
1325 is on a filesystem mounted `noatime'; if so, please use `relatime'.)
1328 die "apt updated too many Release files (@releasefiles), erk"
1329 unless @releasefiles == 1;
1331 ($aptget_releasefile) = @releasefiles;
1334 sub canonicalise_suite_aptget {
1335 my ($proto,$data) = @_;
1338 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1340 foreach my $name (qw(Codename Suite)) {
1341 my $val = $release->{$name};
1343 printdebug "release file $name: $val\n";
1344 $val =~ m/^$suite_re$/o or fail
1345 "Release file ($aptget_releasefile) specifies intolerable $name";
1346 cfg_apply_map(\$val, 'suite rmap',
1347 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1354 sub archive_query_aptget {
1355 my ($proto,$data) = @_;
1358 ensuredir "$aptget_base/source";
1359 foreach my $old (<$aptget_base/source/*.dsc>) {
1360 unlink $old or die "$old: $!";
1363 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1364 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1365 # avoids apt-get source failing with ambiguous error code
1367 runcmd_ordryrun_local
1368 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1369 aptget_aptget(), qw(--download-only --only-source source), $package;
1371 my @dscs = <$aptget_base/source/*.dsc>;
1372 fail "apt-get source did not produce a .dsc" unless @dscs;
1373 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1375 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1378 my $uri = "file://". uri_escape $dscs[0];
1379 $uri =~ s{\%2f}{/}gi;
1380 return [ (getfield $pre_dsc, 'Version'), $uri ];
1383 sub file_in_archive_aptget () { return undef; }
1384 sub package_not_wholly_new_aptget () { return undef; }
1386 #---------- `dummyapicat' archive query method ----------
1388 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1389 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1391 sub dummycatapi_run_in_mirror ($@) {
1392 # runs $fn with FIA open onto rune
1393 my ($rune, $argl, $fn) = @_;
1395 my $mirror = access_cfg('mirror');
1396 $mirror =~ s#^file://#/# or die "$mirror ?";
1397 my @cmd = (qw(sh -ec), 'cd "$1"; shift'."\n".$rune,
1398 qw(x), $mirror, @$argl);
1399 debugcmd "-|", @cmd;
1400 open FIA, "-|", @cmd or die $!;
1402 close FIA or ($!==0 && $?==141) or die failedcmd @cmd;
1406 sub file_in_archive_dummycatapi ($$$) {
1407 my ($proto,$data,$filename) = @_;
1409 dummycatapi_run_in_mirror '
1410 find -name "$1" -print0 |
1412 ', [$filename], sub {
1415 printdebug "| $_\n";
1416 m/^(\w+) (\S+)$/ or die "$_ ?";
1417 push @out, { sha256sum => $1, filename => $2 };
1423 sub package_not_wholly_new_dummycatapi {
1424 my ($proto,$data,$pkg) = @_;
1425 dummycatapi_run_in_mirror "
1426 find -name ${pkg}_*.dsc
1433 #---------- `madison' archive query method ----------
1435 sub archive_query_madison {
1436 return archive_query_prepend_mirror
1437 map { [ @$_[0..1] ] } madison_get_parse(@_);
1440 sub madison_get_parse {
1441 my ($proto,$data) = @_;
1442 die unless $proto eq 'madison';
1443 if (!length $data) {
1444 $data= access_cfg('madison-distro','RETURN-UNDEF');
1445 $data //= access_basedistro();
1447 $rmad{$proto,$data,$package} ||= cmdoutput
1448 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1449 my $rmad = $rmad{$proto,$data,$package};
1452 foreach my $l (split /\n/, $rmad) {
1453 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1454 \s*( [^ \t|]+ )\s* \|
1455 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1456 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1457 $1 eq $package or die "$rmad $package ?";
1464 $component = access_cfg('archive-query-default-component');
1466 $5 eq 'source' or die "$rmad ?";
1467 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1469 return sort { -version_compare($a->[0],$b->[0]); } @out;
1472 sub canonicalise_suite_madison {
1473 # madison canonicalises for us
1474 my @r = madison_get_parse(@_);
1476 "unable to canonicalise suite using package $package".
1477 " which does not appear to exist in suite $isuite;".
1478 " --existing-package may help";
1482 sub file_in_archive_madison { return undef; }
1483 sub package_not_wholly_new_madison { return undef; }
1485 #---------- `sshpsql' archive query method ----------
1488 my ($data,$runeinfo,$sql) = @_;
1489 if (!length $data) {
1490 $data= access_someuserhost('sshpsql').':'.
1491 access_cfg('sshpsql-dbname');
1493 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1494 my ($userhost,$dbname) = ($`,$'); #';
1496 my @cmd = (access_cfg_ssh, $userhost,
1497 access_runeinfo("ssh-psql $runeinfo").
1498 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1499 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1501 open P, "-|", @cmd or die $!;
1504 printdebug(">|$_|\n");
1507 $!=0; $?=0; close P or failedcmd @cmd;
1509 my $nrows = pop @rows;
1510 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1511 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1512 @rows = map { [ split /\|/, $_ ] } @rows;
1513 my $ncols = scalar @{ shift @rows };
1514 die if grep { scalar @$_ != $ncols } @rows;
1518 sub sql_injection_check {
1519 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1522 sub archive_query_sshpsql ($$) {
1523 my ($proto,$data) = @_;
1524 sql_injection_check $isuite, $package;
1525 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1526 SELECT source.version, component.name, files.filename, files.sha256sum
1528 JOIN src_associations ON source.id = src_associations.source
1529 JOIN suite ON suite.id = src_associations.suite
1530 JOIN dsc_files ON dsc_files.source = source.id
1531 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1532 JOIN component ON component.id = files_archive_map.component_id
1533 JOIN files ON files.id = dsc_files.file
1534 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1535 AND source.source='$package'
1536 AND files.filename LIKE '%.dsc';
1538 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1539 my $digester = Digest::SHA->new(256);
1541 my ($vsn,$component,$filename,$sha256sum) = @$_;
1542 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1544 return archive_query_prepend_mirror @rows;
1547 sub canonicalise_suite_sshpsql ($$) {
1548 my ($proto,$data) = @_;
1549 sql_injection_check $isuite;
1550 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1551 SELECT suite.codename
1552 FROM suite where suite_name='$isuite' or codename='$isuite';
1554 @rows = map { $_->[0] } @rows;
1555 fail "unknown suite $isuite" unless @rows;
1556 die "ambiguous $isuite: @rows ?" if @rows>1;
1560 sub file_in_archive_sshpsql ($$$) { return undef; }
1561 sub package_not_wholly_new_sshpsql ($$$) { return undef; }
1563 #---------- `dummycat' archive query method ----------
1565 sub canonicalise_suite_dummycat ($$) {
1566 my ($proto,$data) = @_;
1567 my $dpath = "$data/suite.$isuite";
1568 if (!open C, "<", $dpath) {
1569 $!==ENOENT or die "$dpath: $!";
1570 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1574 chomp or die "$dpath: $!";
1576 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1580 sub archive_query_dummycat ($$) {
1581 my ($proto,$data) = @_;
1582 canonicalise_suite();
1583 my $dpath = "$data/package.$csuite.$package";
1584 if (!open C, "<", $dpath) {
1585 $!==ENOENT or die "$dpath: $!";
1586 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1594 printdebug "dummycat query $csuite $package $dpath | $_\n";
1595 my @row = split /\s+/, $_;
1596 @row==2 or die "$dpath: $_ ?";
1599 C->error and die "$dpath: $!";
1601 return archive_query_prepend_mirror
1602 sort { -version_compare($a->[0],$b->[0]); } @rows;
1605 sub file_in_archive_dummycat () { return undef; }
1606 sub package_not_wholly_new_dummycat () { return undef; }
1608 #---------- tag format handling ----------
1610 sub access_cfg_tagformats () {
1611 split /\,/, access_cfg('dgit-tag-format');
1614 sub access_cfg_tagformats_can_splitbrain () {
1615 my %y = map { $_ => 1 } access_cfg_tagformats;
1616 foreach my $needtf (qw(new maint)) {
1617 next if $y{$needtf};
1623 sub need_tagformat ($$) {
1624 my ($fmt, $why) = @_;
1625 fail "need to use tag format $fmt ($why) but also need".
1626 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1627 " - no way to proceed"
1628 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1629 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1632 sub select_tagformat () {
1634 return if $tagformatfn && !$tagformat_want;
1635 die 'bug' if $tagformatfn && $tagformat_want;
1636 # ... $tagformat_want assigned after previous select_tagformat
1638 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1639 printdebug "select_tagformat supported @supported\n";
1641 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1642 printdebug "select_tagformat specified @$tagformat_want\n";
1644 my ($fmt,$why,$override) = @$tagformat_want;
1646 fail "target distro supports tag formats @supported".
1647 " but have to use $fmt ($why)"
1649 or grep { $_ eq $fmt } @supported;
1651 $tagformat_want = undef;
1653 $tagformatfn = ${*::}{"debiantag_$fmt"};
1655 fail "trying to use unknown tag format \`$fmt' ($why) !"
1656 unless $tagformatfn;
1659 #---------- archive query entrypoints and rest of program ----------
1661 sub canonicalise_suite () {
1662 return if defined $csuite;
1663 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1664 $csuite = archive_query('canonicalise_suite');
1665 if ($isuite ne $csuite) {
1666 progress "canonical suite name for $isuite is $csuite";
1668 progress "canonical suite name is $csuite";
1672 sub get_archive_dsc () {
1673 canonicalise_suite();
1674 my @vsns = archive_query('archive_query');
1675 foreach my $vinfo (@vsns) {
1676 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1677 $dscurl = $vsn_dscurl;
1678 $dscdata = url_get($dscurl);
1680 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1685 $digester->add($dscdata);
1686 my $got = $digester->hexdigest();
1688 fail "$dscurl has hash $got but".
1689 " archive told us to expect $digest";
1692 my $fmt = getfield $dsc, 'Format';
1693 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1694 "unsupported source format $fmt, sorry";
1696 $dsc_checked = !!$digester;
1697 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1701 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1704 sub check_for_git ();
1705 sub check_for_git () {
1707 my $how = access_cfg('git-check');
1708 if ($how eq 'ssh-cmd') {
1710 (access_cfg_ssh, access_gituserhost(),
1711 access_runeinfo("git-check $package").
1712 " set -e; cd ".access_cfg('git-path').";".
1713 " if test -d $package.git; then echo 1; else echo 0; fi");
1714 my $r= cmdoutput @cmd;
1715 if (defined $r and $r =~ m/^divert (\w+)$/) {
1717 my ($usedistro,) = access_distros();
1718 # NB that if we are pushing, $usedistro will be $distro/push
1719 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1720 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1721 progress "diverting to $divert (using config for $instead_distro)";
1722 return check_for_git();
1724 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1726 } elsif ($how eq 'url') {
1727 my $prefix = access_cfg('git-check-url','git-url');
1728 my $suffix = access_cfg('git-check-suffix','git-suffix',
1729 'RETURN-UNDEF') // '.git';
1730 my $url = "$prefix/$package$suffix";
1731 my @cmd = (@curl, qw(-sS -I), $url);
1732 my $result = cmdoutput @cmd;
1733 $result =~ s/^\S+ 200 .*\n\r?\n//;
1734 # curl -sS -I with https_proxy prints
1735 # HTTP/1.0 200 Connection established
1736 $result =~ m/^\S+ (404|200) /s or
1737 fail "unexpected results from git check query - ".
1738 Dumper($prefix, $result);
1740 if ($code eq '404') {
1742 } elsif ($code eq '200') {
1747 } elsif ($how eq 'true') {
1749 } elsif ($how eq 'false') {
1752 badcfg "unknown git-check \`$how'";
1756 sub create_remote_git_repo () {
1757 my $how = access_cfg('git-create');
1758 if ($how eq 'ssh-cmd') {
1760 (access_cfg_ssh, access_gituserhost(),
1761 access_runeinfo("git-create $package").
1762 "set -e; cd ".access_cfg('git-path').";".
1763 " cp -a _template $package.git");
1764 } elsif ($how eq 'true') {
1767 badcfg "unknown git-create \`$how'";
1771 our ($dsc_hash,$lastpush_mergeinput);
1772 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1776 dgit_privdir(); # ensures that $dgit_privdir_made is based on $maindir
1777 $playground = fresh_playground 'dgit/unpack';
1780 sub mktree_in_ud_here () {
1781 playtree_setup $gitcfgs{local};
1784 sub git_write_tree () {
1785 my $tree = cmdoutput @git, qw(write-tree);
1786 $tree =~ m/^\w+$/ or die "$tree ?";
1790 sub git_add_write_tree () {
1791 runcmd @git, qw(add -Af .);
1792 return git_write_tree();
1795 sub remove_stray_gits ($) {
1797 my @gitscmd = qw(find -name .git -prune -print0);
1798 debugcmd "|",@gitscmd;
1799 open GITS, "-|", @gitscmd or die $!;
1804 print STDERR "$us: warning: removing from $what: ",
1805 (messagequote $_), "\n";
1809 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1812 sub mktree_in_ud_from_only_subdir ($;$) {
1813 my ($what,$raw) = @_;
1814 # changes into the subdir
1817 die "expected one subdir but found @dirs ?" unless @dirs==1;
1818 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1822 remove_stray_gits($what);
1823 mktree_in_ud_here();
1825 my ($format, $fopts) = get_source_format();
1826 if (madformat($format)) {
1831 my $tree=git_add_write_tree();
1832 return ($tree,$dir);
1835 our @files_csum_info_fields =
1836 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1837 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1838 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1840 sub dsc_files_info () {
1841 foreach my $csumi (@files_csum_info_fields) {
1842 my ($fname, $module, $method) = @$csumi;
1843 my $field = $dsc->{$fname};
1844 next unless defined $field;
1845 eval "use $module; 1;" or die $@;
1847 foreach (split /\n/, $field) {
1849 m/^(\w+) (\d+) (\S+)$/ or
1850 fail "could not parse .dsc $fname line \`$_'";
1851 my $digester = eval "$module"."->$method;" or die $@;
1856 Digester => $digester,
1861 fail "missing any supported Checksums-* or Files field in ".
1862 $dsc->get_option('name');
1866 map { $_->{Filename} } dsc_files_info();
1869 sub files_compare_inputs (@) {
1874 my $showinputs = sub {
1875 return join "; ", map { $_->get_option('name') } @$inputs;
1878 foreach my $in (@$inputs) {
1880 my $in_name = $in->get_option('name');
1882 printdebug "files_compare_inputs $in_name\n";
1884 foreach my $csumi (@files_csum_info_fields) {
1885 my ($fname) = @$csumi;
1886 printdebug "files_compare_inputs $in_name $fname\n";
1888 my $field = $in->{$fname};
1889 next unless defined $field;
1892 foreach (split /\n/, $field) {
1895 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1896 fail "could not parse $in_name $fname line \`$_'";
1898 printdebug "files_compare_inputs $in_name $fname $f\n";
1902 my $re = \ $record{$f}{$fname};
1904 $fchecked{$f}{$in_name} = 1;
1906 fail "hash or size of $f varies in $fname fields".
1907 " (between: ".$showinputs->().")";
1912 @files = sort @files;
1913 $expected_files //= \@files;
1914 "@$expected_files" eq "@files" or
1915 fail "file list in $in_name varies between hash fields!";
1918 fail "$in_name has no files list field(s)";
1920 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1923 grep { keys %$_ == @$inputs-1 } values %fchecked
1924 or fail "no file appears in all file lists".
1925 " (looked in: ".$showinputs->().")";
1928 sub is_orig_file_in_dsc ($$) {
1929 my ($f, $dsc_files_info) = @_;
1930 return 0 if @$dsc_files_info <= 1;
1931 # One file means no origs, and the filename doesn't have a "what
1932 # part of dsc" component. (Consider versions ending `.orig'.)
1933 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1937 # This function determines whether a .changes file is source-only from
1938 # the point of view of dak. Thus, it permits *_source.buildinfo
1941 # It does not, however, permit any other buildinfo files. After a
1942 # source-only upload, the buildds will try to upload files like
1943 # foo_1.2.3_amd64.buildinfo. If the package maintainer included files
1944 # named like this in their (otherwise) source-only upload, the uploads
1945 # of the buildd can be rejected by dak. Fixing the resultant
1946 # situation can require manual intervention. So we block such
1947 # .buildinfo files when the user tells us to perform a source-only
1948 # upload (such as when using the push-source subcommand with the -C
1949 # option, which calls this function).
1951 # Note, though, that when dgit is told to prepare a source-only
1952 # upload, such as when subcommands like build-source and push-source
1953 # without -C are used, dgit has a more restrictive notion of
1954 # source-only .changes than dak: such uploads will never include
1955 # *_source.buildinfo files. This is because there is no use for such
1956 # files when using a tool like dgit to produce the source package, as
1957 # dgit ensures the source is identical to git HEAD.
1958 sub test_source_only_changes ($) {
1960 foreach my $l (split /\n/, getfield $changes, 'Files') {
1961 $l =~ m/\S+$/ or next;
1962 # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages
1963 unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) {
1964 print "purportedly source-only changes polluted by $&\n";
1971 sub changes_update_origs_from_dsc ($$$$) {
1972 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1974 printdebug "checking origs needed ($upstreamvsn)...\n";
1975 $_ = getfield $changes, 'Files';
1976 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1977 fail "cannot find section/priority from .changes Files field";
1978 my $placementinfo = $1;
1980 printdebug "checking origs needed placement '$placementinfo'...\n";
1981 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1982 $l =~ m/\S+$/ or next;
1984 printdebug "origs $file | $l\n";
1985 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1986 printdebug "origs $file is_orig\n";
1987 my $have = archive_query('file_in_archive', $file);
1988 if (!defined $have) {
1990 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1996 printdebug "origs $file \$#\$have=$#$have\n";
1997 foreach my $h (@$have) {
2000 foreach my $csumi (@files_csum_info_fields) {
2001 my ($fname, $module, $method, $archivefield) = @$csumi;
2002 next unless defined $h->{$archivefield};
2003 $_ = $dsc->{$fname};
2004 next unless defined;
2005 m/^(\w+) .* \Q$file\E$/m or
2006 fail ".dsc $fname missing entry for $file";
2007 if ($h->{$archivefield} eq $1) {
2011 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
2014 die "$file ".Dumper($h)." ?!" if $same && @differ;
2017 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
2020 printdebug "origs $file f.same=$found_same".
2021 " #f._differ=$#found_differ\n";
2022 if (@found_differ && !$found_same) {
2024 "archive contains $file with different checksum",
2027 # Now we edit the changes file to add or remove it
2028 foreach my $csumi (@files_csum_info_fields) {
2029 my ($fname, $module, $method, $archivefield) = @$csumi;
2030 next unless defined $changes->{$fname};
2032 # in archive, delete from .changes if it's there
2033 $changed{$file} = "removed" if
2034 $changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m;
2035 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) {
2036 # not in archive, but it's here in the .changes
2038 my $dsc_data = getfield $dsc, $fname;
2039 $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?";
2041 $extra =~ s/ \d+ /$&$placementinfo /
2042 or die "$fname $extra >$dsc_data< ?"
2043 if $fname eq 'Files';
2044 $changes->{$fname} .= "\n". $extra;
2045 $changed{$file} = "added";
2050 foreach my $file (keys %changed) {
2052 "edited .changes for archive .orig contents: %s %s",
2053 $changed{$file}, $file;
2055 my $chtmp = "$changesfile.tmp";
2056 $changes->save($chtmp);
2058 rename $chtmp,$changesfile or die "$changesfile $!";
2060 progress "[new .changes left in $changesfile]";
2063 progress "$changesfile already has appropriate .orig(s) (if any)";
2067 sub make_commit ($) {
2069 return cmdoutput @git, qw(hash-object -w -t commit), $file;
2072 sub clogp_authline ($) {
2074 my $author = getfield $clogp, 'Maintainer';
2075 if ($author =~ m/^[^"\@]+\,/) {
2076 # single entry Maintainer field with unquoted comma
2077 $author = ($& =~ y/,//rd).$'; # strip the comma
2079 # git wants a single author; any remaining commas in $author
2080 # are by now preceded by @ (or "). It seems safer to punt on
2081 # "..." for now rather than attempting to dequote or something.
2082 $author =~ s#,.*##ms unless $author =~ m/"/;
2083 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
2084 my $authline = "$author $date";
2085 $authline =~ m/$git_authline_re/o or
2086 fail "unexpected commit author line format \`$authline'".
2087 " (was generated from changelog Maintainer field)";
2088 return ($1,$2,$3) if wantarray;
2092 sub vendor_patches_distro ($$) {
2093 my ($checkdistro, $what) = @_;
2094 return unless defined $checkdistro;
2096 my $series = "debian/patches/\L$checkdistro\E.series";
2097 printdebug "checking for vendor-specific $series ($what)\n";
2099 if (!open SERIES, "<", $series) {
2100 die "$series $!" unless $!==ENOENT;
2109 Unfortunately, this source package uses a feature of dpkg-source where
2110 the same source package unpacks to different source code on different
2111 distros. dgit cannot safely operate on such packages on affected
2112 distros, because the meaning of source packages is not stable.
2114 Please ask the distro/maintainer to remove the distro-specific series
2115 files and use a different technique (if necessary, uploading actually
2116 different packages, if different distros are supposed to have
2120 fail "Found active distro-specific series file for".
2121 " $checkdistro ($what): $series, cannot continue";
2123 die "$series $!" if SERIES->error;
2127 sub check_for_vendor_patches () {
2128 # This dpkg-source feature doesn't seem to be documented anywhere!
2129 # But it can be found in the changelog (reformatted):
2131 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2132 # Author: Raphael Hertzog <hertzog@debian.org>
2133 # Date: Sun Oct 3 09:36:48 2010 +0200
2135 # dpkg-source: correctly create .pc/.quilt_series with alternate
2138 # If you have debian/patches/ubuntu.series and you were
2139 # unpacking the source package on ubuntu, quilt was still
2140 # directed to debian/patches/series instead of
2141 # debian/patches/ubuntu.series.
2143 # debian/changelog | 3 +++
2144 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2145 # 2 files changed, 6 insertions(+), 1 deletion(-)
2148 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2149 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2150 "Dpkg::Vendor \`current vendor'");
2151 vendor_patches_distro(access_basedistro(),
2152 "(base) distro being accessed");
2153 vendor_patches_distro(access_nomdistro(),
2154 "(nominal) distro being accessed");
2157 sub generate_commits_from_dsc () {
2158 # See big comment in fetch_from_archive, below.
2159 # See also README.dsc-import.
2161 changedir $playground;
2163 my @dfi = dsc_files_info();
2164 foreach my $fi (@dfi) {
2165 my $f = $fi->{Filename};
2166 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2167 my $upper_f = (bpd_abs()."/$f");
2169 printdebug "considering reusing $f: ";
2171 if (link_ltarget "$upper_f,fetch", $f) {
2172 printdebug "linked (using ...,fetch).\n";
2173 } elsif ((printdebug "($!) "),
2175 fail "accessing $buildproductsdir/$f,fetch: $!";
2176 } elsif (link_ltarget $upper_f, $f) {
2177 printdebug "linked.\n";
2178 } elsif ((printdebug "($!) "),
2180 fail "accessing $buildproductsdir/$f: $!";
2182 printdebug "absent.\n";
2186 complete_file_from_dsc('.', $fi, \$refetched)
2189 printdebug "considering saving $f: ";
2191 if (link $f, $upper_f) {
2192 printdebug "linked.\n";
2193 } elsif ((printdebug "($!) "),
2195 fail "saving $buildproductsdir/$f: $!";
2196 } elsif (!$refetched) {
2197 printdebug "no need.\n";
2198 } elsif (link $f, "$upper_f,fetch") {
2199 printdebug "linked (using ...,fetch).\n";
2200 } elsif ((printdebug "($!) "),
2202 fail "saving $buildproductsdir/$f,fetch: $!";
2204 printdebug "cannot.\n";
2208 # We unpack and record the orig tarballs first, so that we only
2209 # need disk space for one private copy of the unpacked source.
2210 # But we can't make them into commits until we have the metadata
2211 # from the debian/changelog, so we record the tree objects now and
2212 # make them into commits later.
2214 my $upstreamv = upstreamversion $dsc->{version};
2215 my $orig_f_base = srcfn $upstreamv, '';
2217 foreach my $fi (@dfi) {
2218 # We actually import, and record as a commit, every tarball
2219 # (unless there is only one file, in which case there seems
2222 my $f = $fi->{Filename};
2223 printdebug "import considering $f ";
2224 (printdebug "only one dfi\n"), next if @dfi == 1;
2225 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2226 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2230 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2232 printdebug "Y ", (join ' ', map { $_//"(none)" }
2233 $compr_ext, $orig_f_part
2236 my $input = new IO::File $f, '<' or die "$f $!";
2240 if (defined $compr_ext) {
2242 Dpkg::Compression::compression_guess_from_filename $f;
2243 fail "Dpkg::Compression cannot handle file $f in source package"
2244 if defined $compr_ext && !defined $cname;
2246 new Dpkg::Compression::Process compression => $cname;
2247 @compr_cmd = $compr_proc->get_uncompress_cmdline();
2248 my $compr_fh = new IO::Handle;
2249 my $compr_pid = open $compr_fh, "-|" // die $!;
2251 open STDIN, "<&", $input or die $!;
2253 die "dgit (child): exec $compr_cmd[0]: $!\n";
2258 rmtree "_unpack-tar";
2259 mkdir "_unpack-tar" or die $!;
2260 my @tarcmd = qw(tar -x -f -
2261 --no-same-owner --no-same-permissions
2262 --no-acls --no-xattrs --no-selinux);
2263 my $tar_pid = fork // die $!;
2265 chdir "_unpack-tar" or die $!;
2266 open STDIN, "<&", $input or die $!;
2268 die "dgit (child): exec $tarcmd[0]: $!";
2270 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2271 !$? or failedcmd @tarcmd;
2274 (@compr_cmd ? ($?==SIGPIPE || failedcmd @compr_cmd)
2276 # finally, we have the results in "tarball", but maybe
2277 # with the wrong permissions
2279 runcmd qw(chmod -R +rwX _unpack-tar);
2280 changedir "_unpack-tar";
2281 remove_stray_gits($f);
2282 mktree_in_ud_here();
2284 my ($tree) = git_add_write_tree();
2285 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2286 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2288 printdebug "one subtree $1\n";
2290 printdebug "multiple subtrees\n";
2293 rmtree "_unpack-tar";
2295 my $ent = [ $f, $tree ];
2297 Orig => !!$orig_f_part,
2298 Sort => (!$orig_f_part ? 2 :
2299 $orig_f_part =~ m/-/g ? 1 :
2307 # put any without "_" first (spec is not clear whether files
2308 # are always in the usual order). Tarballs without "_" are
2309 # the main orig or the debian tarball.
2310 $a->{Sort} <=> $b->{Sort} or
2314 my $any_orig = grep { $_->{Orig} } @tartrees;
2316 my $dscfn = "$package.dsc";
2318 my $treeimporthow = 'package';
2320 open D, ">", $dscfn or die "$dscfn: $!";
2321 print D $dscdata or die "$dscfn: $!";
2322 close D or die "$dscfn: $!";
2323 my @cmd = qw(dpkg-source);
2324 push @cmd, '--no-check' if $dsc_checked;
2325 if (madformat $dsc->{format}) {
2326 push @cmd, '--skip-patches';
2327 $treeimporthow = 'unpatched';
2329 push @cmd, qw(-x --), $dscfn;
2332 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2333 if (madformat $dsc->{format}) {
2334 check_for_vendor_patches();
2338 if (madformat $dsc->{format}) {
2339 my @pcmd = qw(dpkg-source --before-build .);
2340 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2342 $dappliedtree = git_add_write_tree();
2345 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2349 printdebug "import clog search...\n";
2350 parsechangelog_loop \@clogcmd, "package changelog", sub {
2351 my ($thisstanza, $desc) = @_;
2352 no warnings qw(exiting);
2354 $clogp //= $thisstanza;
2356 printdebug "import clog $thisstanza->{version} $desc...\n";
2358 last if !$any_orig; # we don't need $r1clogp
2360 # We look for the first (most recent) changelog entry whose
2361 # version number is lower than the upstream version of this
2362 # package. Then the last (least recent) previous changelog
2363 # entry is treated as the one which introduced this upstream
2364 # version and used for the synthetic commits for the upstream
2367 # One might think that a more sophisticated algorithm would be
2368 # necessary. But: we do not want to scan the whole changelog
2369 # file. Stopping when we see an earlier version, which
2370 # necessarily then is an earlier upstream version, is the only
2371 # realistic way to do that. Then, either the earliest
2372 # changelog entry we have seen so far is indeed the earliest
2373 # upload of this upstream version; or there are only changelog
2374 # entries relating to later upstream versions (which is not
2375 # possible unless the changelog and .dsc disagree about the
2376 # version). Then it remains to choose between the physically
2377 # last entry in the file, and the one with the lowest version
2378 # number. If these are not the same, we guess that the
2379 # versions were created in a non-monotonic order rather than
2380 # that the changelog entries have been misordered.
2382 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2384 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2385 $r1clogp = $thisstanza;
2387 printdebug "import clog $r1clogp->{version} becomes r1\n";
2390 $clogp or fail "package changelog has no entries!";
2392 my $authline = clogp_authline $clogp;
2393 my $changes = getfield $clogp, 'Changes';
2394 $changes =~ s/^\n//; # Changes: \n
2395 my $cversion = getfield $clogp, 'Version';
2398 $r1clogp //= $clogp; # maybe there's only one entry;
2399 my $r1authline = clogp_authline $r1clogp;
2400 # Strictly, r1authline might now be wrong if it's going to be
2401 # unused because !$any_orig. Whatever.
2403 printdebug "import tartrees authline $authline\n";
2404 printdebug "import tartrees r1authline $r1authline\n";
2406 foreach my $tt (@tartrees) {
2407 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2409 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2412 committer $r1authline
2416 [dgit import orig $tt->{F}]
2424 [dgit import tarball $package $cversion $tt->{F}]
2429 printdebug "import main commit\n";
2431 open C, ">../commit.tmp" or die $!;
2432 print C <<END or die $!;
2435 print C <<END or die $! foreach @tartrees;
2438 print C <<END or die $!;
2444 [dgit import $treeimporthow $package $cversion]
2448 my $rawimport_hash = make_commit qw(../commit.tmp);
2450 if (madformat $dsc->{format}) {
2451 printdebug "import apply patches...\n";
2453 # regularise the state of the working tree so that
2454 # the checkout of $rawimport_hash works nicely.
2455 my $dappliedcommit = make_commit_text(<<END);
2462 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2464 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2466 # We need the answers to be reproducible
2467 my @authline = clogp_authline($clogp);
2468 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2469 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2470 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2471 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2472 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2473 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2475 my $path = $ENV{PATH} or die;
2477 # we use ../../gbp-pq-output, which (given that we are in
2478 # $playground/PLAYTREE, and $playground is .git/dgit/unpack,
2481 foreach my $use_absurd (qw(0 1)) {
2482 runcmd @git, qw(checkout -q unpa);
2483 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2484 local $ENV{PATH} = $path;
2487 progress "warning: $@";
2488 $path = "$absurdity:$path";
2489 progress "$us: trying slow absurd-git-apply...";
2490 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2495 die "forbid absurd git-apply\n" if $use_absurd
2496 && forceing [qw(import-gitapply-no-absurd)];
2497 die "only absurd git-apply!\n" if !$use_absurd
2498 && forceing [qw(import-gitapply-absurd)];
2500 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2501 local $ENV{PATH} = $path if $use_absurd;
2503 my @showcmd = (gbp_pq, qw(import));
2504 my @realcmd = shell_cmd
2505 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2506 debugcmd "+",@realcmd;
2507 if (system @realcmd) {
2508 die +(shellquote @showcmd).
2510 failedcmd_waitstatus()."\n";
2513 my $gapplied = git_rev_parse('HEAD');
2514 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2515 $gappliedtree eq $dappliedtree or
2517 gbp-pq import and dpkg-source disagree!
2518 gbp-pq import gave commit $gapplied
2519 gbp-pq import gave tree $gappliedtree
2520 dpkg-source --before-build gave tree $dappliedtree
2522 $rawimport_hash = $gapplied;
2527 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2532 progress "synthesised git commit from .dsc $cversion";
2534 my $rawimport_mergeinput = {
2535 Commit => $rawimport_hash,
2536 Info => "Import of source package",
2538 my @output = ($rawimport_mergeinput);
2540 if ($lastpush_mergeinput) {
2541 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2542 my $oversion = getfield $oldclogp, 'Version';
2544 version_compare($oversion, $cversion);
2546 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2547 { Message => <<END, ReverseParents => 1 });
2548 Record $package ($cversion) in archive suite $csuite
2550 } elsif ($vcmp > 0) {
2551 print STDERR <<END or die $!;
2553 Version actually in archive: $cversion (older)
2554 Last version pushed with dgit: $oversion (newer or same)
2557 @output = $lastpush_mergeinput;
2559 # Same version. Use what's in the server git branch,
2560 # discarding our own import. (This could happen if the
2561 # server automatically imports all packages into git.)
2562 @output = $lastpush_mergeinput;
2570 sub complete_file_from_dsc ($$;$) {
2571 our ($dstdir, $fi, $refetched) = @_;
2572 # Ensures that we have, in $dstdir, the file $fi, with the correct
2573 # contents. (Downloading it from alongside $dscurl if necessary.)
2574 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2575 # and will set $$refetched=1 if it did so (or tried to).
2577 my $f = $fi->{Filename};
2578 my $tf = "$dstdir/$f";
2582 my $checkhash = sub {
2583 open F, "<", "$tf" or die "$tf: $!";
2584 $fi->{Digester}->reset();
2585 $fi->{Digester}->addfile(*F);
2586 F->error and die $!;
2587 $got = $fi->{Digester}->hexdigest();
2588 return $got eq $fi->{Hash};
2591 if (stat_exists $tf) {
2592 if ($checkhash->()) {
2593 progress "using existing $f";
2597 fail "file $f has hash $got but .dsc".
2598 " demands hash $fi->{Hash} ".
2599 "(perhaps you should delete this file?)";
2601 progress "need to fetch correct version of $f";
2602 unlink $tf or die "$tf $!";
2605 printdebug "$tf does not exist, need to fetch\n";
2609 $furl =~ s{/[^/]+$}{};
2611 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2612 die "$f ?" if $f =~ m#/#;
2613 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2614 return 0 if !act_local();
2617 fail "file $f has hash $got but .dsc".
2618 " demands hash $fi->{Hash} ".
2619 "(got wrong file from archive!)";
2624 sub ensure_we_have_orig () {
2625 my @dfi = dsc_files_info();
2626 foreach my $fi (@dfi) {
2627 my $f = $fi->{Filename};
2628 next unless is_orig_file_in_dsc($f, \@dfi);
2629 complete_file_from_dsc($buildproductsdir, $fi)
2634 #---------- git fetch ----------
2636 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2637 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2639 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2640 # locally fetched refs because they have unhelpful names and clutter
2641 # up gitk etc. So we track whether we have "used up" head ref (ie,
2642 # whether we have made another local ref which refers to this object).
2644 # (If we deleted them unconditionally, then we might end up
2645 # re-fetching the same git objects each time dgit fetch was run.)
2647 # So, each use of lrfetchrefs needs to be accompanied by arrangements
2648 # in git_fetch_us to fetch the refs in question, and possibly a call
2649 # to lrfetchref_used.
2651 our (%lrfetchrefs_f, %lrfetchrefs_d);
2652 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2654 sub lrfetchref_used ($) {
2655 my ($fullrefname) = @_;
2656 my $objid = $lrfetchrefs_f{$fullrefname};
2657 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2660 sub git_lrfetch_sane {
2661 my ($url, $supplementary, @specs) = @_;
2662 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2663 # at least as regards @specs. Also leave the results in
2664 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2665 # able to clean these up.
2667 # With $supplementary==1, @specs must not contain wildcards
2668 # and we add to our previous fetches (non-atomically).
2670 # This is rather miserable:
2671 # When git fetch --prune is passed a fetchspec ending with a *,
2672 # it does a plausible thing. If there is no * then:
2673 # - it matches subpaths too, even if the supplied refspec
2674 # starts refs, and behaves completely madly if the source
2675 # has refs/refs/something. (See, for example, Debian #NNNN.)
2676 # - if there is no matching remote ref, it bombs out the whole
2678 # We want to fetch a fixed ref, and we don't know in advance
2679 # if it exists, so this is not suitable.
2681 # Our workaround is to use git ls-remote. git ls-remote has its
2682 # own qairks. Notably, it has the absurd multi-tail-matching
2683 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2684 # refs/refs/foo etc.
2686 # Also, we want an idempotent snapshot, but we have to make two
2687 # calls to the remote: one to git ls-remote and to git fetch. The
2688 # solution is use git ls-remote to obtain a target state, and
2689 # git fetch to try to generate it. If we don't manage to generate
2690 # the target state, we try again.
2692 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2694 my $specre = join '|', map {
2697 my $wildcard = $x =~ s/\\\*$/.*/;
2698 die if $wildcard && $supplementary;
2701 printdebug "git_lrfetch_sane specre=$specre\n";
2702 my $wanted_rref = sub {
2704 return m/^(?:$specre)$/;
2707 my $fetch_iteration = 0;
2710 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2711 if (++$fetch_iteration > 10) {
2712 fail "too many iterations trying to get sane fetch!";
2715 my @look = map { "refs/$_" } @specs;
2716 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2720 open GITLS, "-|", @lcmd or die $!;
2722 printdebug "=> ", $_;
2723 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2724 my ($objid,$rrefname) = ($1,$2);
2725 if (!$wanted_rref->($rrefname)) {
2727 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2731 $wantr{$rrefname} = $objid;
2734 close GITLS or failedcmd @lcmd;
2736 # OK, now %want is exactly what we want for refs in @specs
2738 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2739 "+refs/$_:".lrfetchrefs."/$_";
2742 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2744 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2745 runcmd_ordryrun_local @fcmd if @fspecs;
2747 if (!$supplementary) {
2748 %lrfetchrefs_f = ();
2752 git_for_each_ref(lrfetchrefs, sub {
2753 my ($objid,$objtype,$lrefname,$reftail) = @_;
2754 $lrfetchrefs_f{$lrefname} = $objid;
2755 $objgot{$objid} = 1;
2758 if ($supplementary) {
2762 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2763 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2764 if (!exists $wantr{$rrefname}) {
2765 if ($wanted_rref->($rrefname)) {
2767 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2771 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2774 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2775 delete $lrfetchrefs_f{$lrefname};
2779 foreach my $rrefname (sort keys %wantr) {
2780 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2781 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2782 my $want = $wantr{$rrefname};
2783 next if $got eq $want;
2784 if (!defined $objgot{$want}) {
2785 fail <<END unless act_local();
2786 --dry-run specified but we actually wanted the results of git fetch,
2787 so this is not going to work. Try running dgit fetch first,
2788 or using --damp-run instead of --dry-run.
2791 warning: git ls-remote suggests we want $lrefname
2792 warning: and it should refer to $want
2793 warning: but git fetch didn't fetch that object to any relevant ref.
2794 warning: This may be due to a race with someone updating the server.
2795 warning: Will try again...
2797 next FETCH_ITERATION;
2800 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2802 runcmd_ordryrun_local @git, qw(update-ref -m),
2803 "dgit fetch git fetch fixup", $lrefname, $want;
2804 $lrfetchrefs_f{$lrefname} = $want;
2809 if (defined $csuite) {
2810 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2811 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2812 my ($objid,$objtype,$lrefname,$reftail) = @_;
2813 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2814 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2818 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2819 Dumper(\%lrfetchrefs_f);
2822 sub git_fetch_us () {
2823 # Want to fetch only what we are going to use, unless
2824 # deliberately-not-ff, in which case we must fetch everything.
2826 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2828 (quiltmode_splitbrain
2829 ? (map { $_->('*',access_nomdistro) }
2830 \&debiantag_new, \&debiantag_maintview)
2831 : debiantags('*',access_nomdistro));
2832 push @specs, server_branch($csuite);
2833 push @specs, $rewritemap;
2834 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2836 my $url = access_giturl();
2837 git_lrfetch_sane $url, 0, @specs;
2840 my @tagpats = debiantags('*',access_nomdistro);
2842 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2843 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2844 printdebug "currently $fullrefname=$objid\n";
2845 $here{$fullrefname} = $objid;
2847 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2848 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2849 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2850 printdebug "offered $lref=$objid\n";
2851 if (!defined $here{$lref}) {
2852 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2853 runcmd_ordryrun_local @upd;
2854 lrfetchref_used $fullrefname;
2855 } elsif ($here{$lref} eq $objid) {
2856 lrfetchref_used $fullrefname;
2859 "Not updating $lref from $here{$lref} to $objid.\n";
2864 #---------- dsc and archive handling ----------
2866 sub mergeinfo_getclogp ($) {
2867 # Ensures thit $mi->{Clogp} exists and returns it
2869 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2872 sub mergeinfo_version ($) {
2873 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2876 sub fetch_from_archive_record_1 ($) {
2878 runcmd git_update_ref_cmd "dgit fetch $csuite", 'DGIT_ARCHIVE', $hash;
2879 cmdoutput @git, qw(log -n2), $hash;
2880 # ... gives git a chance to complain if our commit is malformed
2883 sub fetch_from_archive_record_2 ($) {
2885 my @upd_cmd = (git_update_ref_cmd 'dgit fetch', lrref(), $hash);
2889 dryrun_report @upd_cmd;
2893 sub parse_dsc_field_def_dsc_distro () {
2894 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2895 dgit.default.distro);
2898 sub parse_dsc_field ($$) {
2899 my ($dsc, $what) = @_;
2901 foreach my $field (@ourdscfield) {
2902 $f = $dsc->{$field};
2907 progress "$what: NO git hash";
2908 parse_dsc_field_def_dsc_distro();
2909 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2910 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2911 progress "$what: specified git info ($dsc_distro)";
2912 $dsc_hint_tag = [ $dsc_hint_tag ];
2913 } elsif ($f =~ m/^\w+\s*$/) {
2915 parse_dsc_field_def_dsc_distro();
2916 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2918 progress "$what: specified git hash";
2920 fail "$what: invalid Dgit info";
2924 sub resolve_dsc_field_commit ($$) {
2925 my ($already_distro, $already_mapref) = @_;
2927 return unless defined $dsc_hash;
2930 defined $already_mapref &&
2931 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2932 ? $already_mapref : undef;
2936 my ($what, @fetch) = @_;
2938 local $idistro = $dsc_distro;
2939 my $lrf = lrfetchrefs;
2941 if (!$chase_dsc_distro) {
2943 "not chasing .dsc distro $dsc_distro: not fetching $what";
2948 ".dsc names distro $dsc_distro: fetching $what";
2950 my $url = access_giturl();
2951 if (!defined $url) {
2952 defined $dsc_hint_url or fail <<END;
2953 .dsc Dgit metadata is in context of distro $dsc_distro
2954 for which we have no configured url and .dsc provides no hint
2957 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2958 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2959 parse_cfg_bool "dsc-url-proto-ok", 'false',
2960 cfg("dgit.dsc-url-proto-ok.$proto",
2961 "dgit.default.dsc-url-proto-ok")
2963 .dsc Dgit metadata is in context of distro $dsc_distro
2964 for which we have no configured url;
2965 .dsc provides hinted url with protocol $proto which is unsafe.
2966 (can be overridden by config - consult documentation)
2968 $url = $dsc_hint_url;
2971 git_lrfetch_sane $url, 1, @fetch;
2976 my $rewrite_enable = do {
2977 local $idistro = $dsc_distro;
2978 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2981 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2982 if (!defined $mapref) {
2983 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2984 $mapref = $lrf.'/'.$rewritemap;
2986 my $rewritemapdata = git_cat_file $mapref.':map';
2987 if (defined $rewritemapdata
2988 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2990 "server's git history rewrite map contains a relevant entry!";
2993 if (defined $dsc_hash) {
2994 progress "using rewritten git hash in place of .dsc value";
2996 progress "server data says .dsc hash is to be disregarded";
3001 if (!defined git_cat_file $dsc_hash) {
3002 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
3003 my $lrf = $do_fetch->("additional commits", @tags) &&
3004 defined git_cat_file $dsc_hash
3006 .dsc Dgit metadata requires commit $dsc_hash
3007 but we could not obtain that object anywhere.
3009 foreach my $t (@tags) {
3010 my $fullrefname = $lrf.'/'.$t;
3011 # print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
3012 next unless $lrfetchrefs_f{$fullrefname};
3013 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
3014 lrfetchref_used $fullrefname;
3019 sub fetch_from_archive () {
3020 ensure_setup_existing_tree();
3022 # Ensures that lrref() is what is actually in the archive, one way
3023 # or another, according to us - ie this client's
3024 # appropritaely-updated archive view. Also returns the commit id.
3025 # If there is nothing in the archive, leaves lrref alone and
3026 # returns undef. git_fetch_us must have already been called.
3030 parse_dsc_field($dsc, 'last upload to archive');
3031 resolve_dsc_field_commit access_basedistro,
3032 lrfetchrefs."/".$rewritemap
3034 progress "no version available from the archive";
3037 # If the archive's .dsc has a Dgit field, there are three
3038 # relevant git commitids we need to choose between and/or merge
3040 # 1. $dsc_hash: the Dgit field from the archive
3041 # 2. $lastpush_hash: the suite branch on the dgit git server
3042 # 3. $lastfetch_hash: our local tracking brach for the suite
3044 # These may all be distinct and need not be in any fast forward
3047 # If the dsc was pushed to this suite, then the server suite
3048 # branch will have been updated; but it might have been pushed to
3049 # a different suite and copied by the archive. Conversely a more
3050 # recent version may have been pushed with dgit but not appeared
3051 # in the archive (yet).
3053 # $lastfetch_hash may be awkward because archive imports
3054 # (particularly, imports of Dgit-less .dscs) are performed only as
3055 # needed on individual clients, so different clients may perform a
3056 # different subset of them - and these imports are only made
3057 # public during push. So $lastfetch_hash may represent a set of
3058 # imports different to a subsequent upload by a different dgit
3061 # Our approach is as follows:
3063 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
3064 # descendant of $dsc_hash, then it was pushed by a dgit user who
3065 # had based their work on $dsc_hash, so we should prefer it.
3066 # Otherwise, $dsc_hash was installed into this suite in the
3067 # archive other than by a dgit push, and (necessarily) after the
3068 # last dgit push into that suite (since a dgit push would have
3069 # been descended from the dgit server git branch); thus, in that
3070 # case, we prefer the archive's version (and produce a
3071 # pseudo-merge to overwrite the dgit server git branch).
3073 # (If there is no Dgit field in the archive's .dsc then
3074 # generate_commit_from_dsc uses the version numbers to decide
3075 # whether the suite branch or the archive is newer. If the suite
3076 # branch is newer it ignores the archive's .dsc; otherwise it
3077 # generates an import of the .dsc, and produces a pseudo-merge to
3078 # overwrite the suite branch with the archive contents.)
3080 # The outcome of that part of the algorithm is the `public view',
3081 # and is same for all dgit clients: it does not depend on any
3082 # unpublished history in the local tracking branch.
3084 # As between the public view and the local tracking branch: The
3085 # local tracking branch is only updated by dgit fetch, and
3086 # whenever dgit fetch runs it includes the public view in the
3087 # local tracking branch. Therefore if the public view is not
3088 # descended from the local tracking branch, the local tracking
3089 # branch must contain history which was imported from the archive
3090 # but never pushed; and, its tip is now out of date. So, we make
3091 # a pseudo-merge to overwrite the old imports and stitch the old
3094 # Finally: we do not necessarily reify the public view (as
3095 # described above). This is so that we do not end up stacking two
3096 # pseudo-merges. So what we actually do is figure out the inputs
3097 # to any public view pseudo-merge and put them in @mergeinputs.
3100 # $mergeinputs[]{Commit}
3101 # $mergeinputs[]{Info}
3102 # $mergeinputs[0] is the one whose tree we use
3103 # @mergeinputs is in the order we use in the actual commit)
3106 # $mergeinputs[]{Message} is a commit message to use
3107 # $mergeinputs[]{ReverseParents} if def specifies that parent
3108 # list should be in opposite order
3109 # Such an entry has no Commit or Info. It applies only when found
3110 # in the last entry. (This ugliness is to support making
3111 # identical imports to previous dgit versions.)
3113 my $lastpush_hash = git_get_ref(lrfetchref());
3114 printdebug "previous reference hash=$lastpush_hash\n";
3115 $lastpush_mergeinput = $lastpush_hash && {
3116 Commit => $lastpush_hash,
3117 Info => "dgit suite branch on dgit git server",
3120 my $lastfetch_hash = git_get_ref(lrref());
3121 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3122 my $lastfetch_mergeinput = $lastfetch_hash && {
3123 Commit => $lastfetch_hash,
3124 Info => "dgit client's archive history view",
3127 my $dsc_mergeinput = $dsc_hash && {
3128 Commit => $dsc_hash,
3129 Info => "Dgit field in .dsc from archive",
3133 my $del_lrfetchrefs = sub {
3136 printdebug "del_lrfetchrefs...\n";
3137 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3138 my $objid = $lrfetchrefs_d{$fullrefname};
3139 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3141 $gur ||= new IO::Handle;
3142 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3144 printf $gur "delete %s %s\n", $fullrefname, $objid;
3147 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3151 if (defined $dsc_hash) {
3152 ensure_we_have_orig();
3153 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3154 @mergeinputs = $dsc_mergeinput
3155 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3156 print STDERR <<END or die $!;
3158 Git commit in archive is behind the last version allegedly pushed/uploaded.
3159 Commit referred to by archive: $dsc_hash
3160 Last version pushed with dgit: $lastpush_hash
3163 @mergeinputs = ($lastpush_mergeinput);
3165 # Archive has .dsc which is not a descendant of the last dgit
3166 # push. This can happen if the archive moves .dscs about.
3167 # Just follow its lead.
3168 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3169 progress "archive .dsc names newer git commit";
3170 @mergeinputs = ($dsc_mergeinput);
3172 progress "archive .dsc names other git commit, fixing up";
3173 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3177 @mergeinputs = generate_commits_from_dsc();
3178 # We have just done an import. Now, our import algorithm might
3179 # have been improved. But even so we do not want to generate
3180 # a new different import of the same package. So if the
3181 # version numbers are the same, just use our existing version.
3182 # If the version numbers are different, the archive has changed
3183 # (perhaps, rewound).
3184 if ($lastfetch_mergeinput &&
3185 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3186 (mergeinfo_version $mergeinputs[0]) )) {
3187 @mergeinputs = ($lastfetch_mergeinput);
3189 } elsif ($lastpush_hash) {
3190 # only in git, not in the archive yet
3191 @mergeinputs = ($lastpush_mergeinput);
3192 print STDERR <<END or die $!;
3194 Package not found in the archive, but has allegedly been pushed using dgit.
3198 printdebug "nothing found!\n";
3199 if (defined $skew_warning_vsn) {
3200 print STDERR <<END or die $!;
3202 Warning: relevant archive skew detected.
3203 Archive allegedly contains $skew_warning_vsn
3204 But we were not able to obtain any version from the archive or git.
3208 unshift @end, $del_lrfetchrefs;
3212 if ($lastfetch_hash &&
3214 my $h = $_->{Commit};
3215 $h and is_fast_fwd($lastfetch_hash, $h);
3216 # If true, one of the existing parents of this commit
3217 # is a descendant of the $lastfetch_hash, so we'll
3218 # be ff from that automatically.
3222 push @mergeinputs, $lastfetch_mergeinput;
3225 printdebug "fetch mergeinfos:\n";
3226 foreach my $mi (@mergeinputs) {
3228 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3230 printdebug sprintf " ReverseParents=%d Message=%s",
3231 $mi->{ReverseParents}, $mi->{Message};
3235 my $compat_info= pop @mergeinputs
3236 if $mergeinputs[$#mergeinputs]{Message};
3238 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3241 if (@mergeinputs > 1) {
3243 my $tree_commit = $mergeinputs[0]{Commit};
3245 my $tree = get_tree_of_commit $tree_commit;;
3247 # We use the changelog author of the package in question the
3248 # author of this pseudo-merge. This is (roughly) correct if
3249 # this commit is simply representing aa non-dgit upload.
3250 # (Roughly because it does not record sponsorship - but we
3251 # don't have sponsorship info because that's in the .changes,
3252 # which isn't in the archivw.)
3254 # But, it might be that we are representing archive history
3255 # updates (including in-archive copies). These are not really
3256 # the responsibility of the person who created the .dsc, but
3257 # there is no-one whose name we should better use. (The
3258 # author of the .dsc-named commit is clearly worse.)
3260 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3261 my $author = clogp_authline $useclogp;
3262 my $cversion = getfield $useclogp, 'Version';
3264 my $mcf = dgit_privdir()."/mergecommit";
3265 open MC, ">", $mcf or die "$mcf $!";
3266 print MC <<END or die $!;
3270 my @parents = grep { $_->{Commit} } @mergeinputs;
3271 @parents = reverse @parents if $compat_info->{ReverseParents};
3272 print MC <<END or die $! foreach @parents;
3276 print MC <<END or die $!;
3282 if (defined $compat_info->{Message}) {
3283 print MC $compat_info->{Message} or die $!;
3285 print MC <<END or die $!;
3286 Record $package ($cversion) in archive suite $csuite
3290 my $message_add_info = sub {
3292 my $mversion = mergeinfo_version $mi;
3293 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3297 $message_add_info->($mergeinputs[0]);
3298 print MC <<END or die $!;
3299 should be treated as descended from
3301 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3305 $hash = make_commit $mcf;
3307 $hash = $mergeinputs[0]{Commit};
3309 printdebug "fetch hash=$hash\n";
3312 my ($lasth, $what) = @_;
3313 return unless $lasth;
3314 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3317 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3319 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3321 fetch_from_archive_record_1($hash);
3323 if (defined $skew_warning_vsn) {
3324 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3325 my $gotclogp = commit_getclogp($hash);
3326 my $got_vsn = getfield $gotclogp, 'Version';
3327 printdebug "SKEW CHECK GOT $got_vsn\n";
3328 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3329 print STDERR <<END or die $!;
3331 Warning: archive skew detected. Using the available version:
3332 Archive allegedly contains $skew_warning_vsn
3333 We were able to obtain only $got_vsn
3339 if ($lastfetch_hash ne $hash) {
3340 fetch_from_archive_record_2($hash);
3343 lrfetchref_used lrfetchref();
3345 check_gitattrs($hash, "fetched source tree");
3347 unshift @end, $del_lrfetchrefs;
3351 sub set_local_git_config ($$) {
3353 runcmd @git, qw(config), $k, $v;
3356 sub setup_mergechangelogs (;$) {
3358 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3360 my $driver = 'dpkg-mergechangelogs';
3361 my $cb = "merge.$driver";
3362 confess unless defined $maindir;
3363 my $attrs = "$maindir_gitcommon/info/attributes";
3364 ensuredir "$maindir_gitcommon/info";
3366 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3367 if (!open ATTRS, "<", $attrs) {
3368 $!==ENOENT or die "$attrs: $!";
3372 next if m{^debian/changelog\s};
3373 print NATTRS $_, "\n" or die $!;
3375 ATTRS->error and die $!;
3378 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3381 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3382 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3384 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3387 sub setup_useremail (;$) {
3389 return unless $always || access_cfg_bool(1, 'setup-useremail');
3392 my ($k, $envvar) = @_;
3393 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3394 return unless defined $v;
3395 set_local_git_config "user.$k", $v;
3398 $setup->('email', 'DEBEMAIL');
3399 $setup->('name', 'DEBFULLNAME');
3402 sub ensure_setup_existing_tree () {
3403 my $k = "remote.$remotename.skipdefaultupdate";
3404 my $c = git_get_config $k;
3405 return if defined $c;
3406 set_local_git_config $k, 'true';
3409 sub open_main_gitattrs () {
3410 confess 'internal error no maindir' unless defined $maindir;
3411 my $gai = new IO::File "$maindir_gitcommon/info/attributes"
3413 or die "open $maindir_gitcommon/info/attributes: $!";
3417 our $gitattrs_ourmacro_re = qr{^\[attr\]dgit-defuse-attrs\s};
3419 sub is_gitattrs_setup () {
3422 # 1: gitattributes set up and should be left alone
3424 # 0: there is a dgit-defuse-attrs but it needs fixing
3425 # undef: there is none
3426 my $gai = open_main_gitattrs();
3427 return 0 unless $gai;
3429 next unless m{$gitattrs_ourmacro_re};
3430 return 1 if m{\s-working-tree-encoding\s};
3431 printdebug "is_gitattrs_setup: found old macro\n";
3434 $gai->error and die $!;
3435 printdebug "is_gitattrs_setup: found nothing\n";
3439 sub setup_gitattrs (;$) {
3441 return unless $always || access_cfg_bool(1, 'setup-gitattributes');
3443 my $already = is_gitattrs_setup();
3446 [attr]dgit-defuse-attrs already found, and proper, in .git/info/attributes
3447 not doing further gitattributes setup
3451 my $new = "[attr]dgit-defuse-attrs $negate_harmful_gitattrs";
3452 my $af = "$maindir_gitcommon/info/attributes";
3453 ensuredir "$maindir_gitcommon/info";
3455 open GAO, "> $af.new" or die $!;
3456 print GAO <<END or die $! unless defined $already;
3459 # ^ see GITATTRIBUTES in dgit(7) and dgit setup-new-tree in dgit(1)
3461 my $gai = open_main_gitattrs();
3464 if (m{$gitattrs_ourmacro_re}) {
3465 die unless defined $already;
3469 print GAO $_, "\n" or die $!;
3471 $gai->error and die $!;
3473 close GAO or die $!;
3474 rename "$af.new", "$af" or die "install $af: $!";
3477 sub setup_new_tree () {
3478 setup_mergechangelogs();
3483 sub check_gitattrs ($$) {
3484 my ($treeish, $what) = @_;
3486 return if is_gitattrs_setup;
3489 my @cmd = (@git, qw(ls-tree -lrz --), "${treeish}:");
3491 my $gafl = new IO::File;
3492 open $gafl, "-|", @cmd or die $!;
3495 s/^\d+\s+\w+\s+\w+\s+(\d+)\t// or die;
3497 next unless m{(?:^|/)\.gitattributes$};
3499 # oh dear, found one
3501 dgit: warning: $what contains .gitattributes
3502 dgit: .gitattributes not (fully) defused. Recommended: dgit setup-new-tree.
3507 # tree contains no .gitattributes files
3508 $?=0; $!=0; close $gafl or failedcmd @cmd;
3512 sub multisuite_suite_child ($$$) {
3513 my ($tsuite, $mergeinputs, $fn) = @_;
3514 # in child, sets things up, calls $fn->(), and returns undef
3515 # in parent, returns canonical suite name for $tsuite
3516 my $canonsuitefh = IO::File::new_tmpfile;
3517 my $pid = fork // die $!;
3521 $us .= " [$isuite]";
3522 $debugprefix .= " ";
3523 progress "fetching $tsuite...";
3524 canonicalise_suite();
3525 print $canonsuitefh $csuite, "\n" or die $!;
3526 close $canonsuitefh or die $!;
3530 waitpid $pid,0 == $pid or die $!;
3531 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3532 seek $canonsuitefh,0,0 or die $!;
3533 local $csuite = <$canonsuitefh>;
3534 die $! unless defined $csuite && chomp $csuite;
3536 printdebug "multisuite $tsuite missing\n";
3539 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3540 push @$mergeinputs, {
3547 sub fork_for_multisuite ($) {
3548 my ($before_fetch_merge) = @_;
3549 # if nothing unusual, just returns ''
3552 # returns 0 to caller in child, to do first of the specified suites
3553 # in child, $csuite is not yet set
3555 # returns 1 to caller in parent, to finish up anything needed after
3556 # in parent, $csuite is set to canonicalised portmanteau
3558 my $org_isuite = $isuite;
3559 my @suites = split /\,/, $isuite;
3560 return '' unless @suites > 1;
3561 printdebug "fork_for_multisuite: @suites\n";
3565 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3567 return 0 unless defined $cbasesuite;
3569 fail "package $package missing in (base suite) $cbasesuite"
3570 unless @mergeinputs;
3572 my @csuites = ($cbasesuite);
3574 $before_fetch_merge->();
3576 foreach my $tsuite (@suites[1..$#suites]) {
3577 $tsuite =~ s/^-/$cbasesuite-/;
3578 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3585 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3586 push @csuites, $csubsuite;
3589 foreach my $mi (@mergeinputs) {
3590 my $ref = git_get_ref $mi->{Ref};
3591 die "$mi->{Ref} ?" unless length $ref;
3592 $mi->{Commit} = $ref;
3595 $csuite = join ",", @csuites;
3597 my $previous = git_get_ref lrref;
3599 unshift @mergeinputs, {
3600 Commit => $previous,
3601 Info => "local combined tracking branch",
3603 "archive seems to have rewound: local tracking branch is ahead!",
3607 foreach my $ix (0..$#mergeinputs) {
3608 $mergeinputs[$ix]{Index} = $ix;
3611 @mergeinputs = sort {
3612 -version_compare(mergeinfo_version $a,
3613 mergeinfo_version $b) # highest version first
3615 $a->{Index} <=> $b->{Index}; # earliest in spec first
3621 foreach my $mi (@mergeinputs) {
3622 printdebug "multisuite merge check $mi->{Info}\n";
3623 foreach my $previous (@needed) {
3624 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3625 printdebug "multisuite merge un-needed $previous->{Info}\n";
3629 printdebug "multisuite merge this-needed\n";
3630 $mi->{Character} = '+';
3633 $needed[0]{Character} = '*';
3635 my $output = $needed[0]{Commit};
3638 printdebug "multisuite merge nontrivial\n";
3639 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3641 my $commit = "tree $tree\n";
3642 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3643 "Input branches:\n";
3645 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3646 printdebug "multisuite merge include $mi->{Info}\n";
3647 $mi->{Character} //= ' ';
3648 $commit .= "parent $mi->{Commit}\n";
3649 $msg .= sprintf " %s %-25s %s\n",
3651 (mergeinfo_version $mi),
3654 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3656 " * marks the highest version branch, which choose to use\n".
3657 " + marks each branch which was not already an ancestor\n\n".
3658 "[dgit multi-suite $csuite]\n";
3660 "author $authline\n".
3661 "committer $authline\n\n";
3662 $output = make_commit_text $commit.$msg;
3663 printdebug "multisuite merge generated $output\n";
3666 fetch_from_archive_record_1($output);
3667 fetch_from_archive_record_2($output);
3669 progress "calculated combined tracking suite $csuite";
3674 sub clone_set_head () {
3675 open H, "> .git/HEAD" or die $!;
3676 print H "ref: ".lref()."\n" or die $!;
3679 sub clone_finish ($) {
3681 runcmd @git, qw(reset --hard), lrref();
3682 runcmd qw(bash -ec), <<'END';
3684 git ls-tree -r --name-only -z HEAD | \
3685 xargs -0r touch -h -r . --
3687 printdone "ready for work in $dstdir";
3691 # in multisuite, returns twice!
3692 # once in parent after first suite fetched,
3693 # and then again in child after everything is finished
3695 badusage "dry run makes no sense with clone" unless act_local();
3697 my $multi_fetched = fork_for_multisuite(sub {
3698 printdebug "multi clone before fetch merge\n";