3 # Integration between git and Debian-style archives
5 # Copyright (C)2013-2016 Ian Jackson
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation, either version 3 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
28 use Dpkg::Control::Hash;
30 use File::Temp qw(tempdir);
37 use List::MoreUtils qw(pairwise);
38 use Text::Glob qw(match_glob);
39 use Fcntl qw(:DEFAULT :flock);
44 our $our_version = 'UNRELEASED'; ###substituted###
45 our $absurdity = undef; ###substituted###
47 our @rpushprotovsn_support = qw(4 3 2); # 4 is new tag format
56 our $dryrun_level = 0;
58 our $buildproductsdir = '..';
64 our $existing_package = 'dpkg';
66 our $changes_since_version;
68 our $overwrite_version; # undef: not specified; '': check changelog
70 our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
72 our $split_brain_save;
73 our $we_are_responder;
74 our $we_are_initiator;
75 our $initiator_tempdir;
76 our $patches_applied_dirtily = 00;
80 our $chase_dsc_distro=1;
82 our %forceopts = map { $_=>0 }
83 qw(unrepresentable unsupported-source-format
84 dsc-changes-mismatch changes-origs-exactly
85 import-gitapply-absurd
86 import-gitapply-no-absurd
87 import-dsc-with-dgit-field);
89 our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
91 our $suite_re = '[-+.0-9a-z]+';
92 our $cleanmode_re = 'dpkg-source(?:-d)?|git|git-ff|check|none';
93 our $orig_f_comp_re = 'orig(?:-[-0-9a-z]+)?';
94 our $orig_f_sig_re = '\\.(?:asc|gpg|pgp)';
95 our $orig_f_tail_re = "$orig_f_comp_re\\.tar(?:\\.\\w+)?(?:$orig_f_sig_re)?";
97 our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
98 our $splitbraincache = 'dgit-intern/quilt-cache';
99 our $rewritemap = 'dgit-rewrite/map';
101 our (@git) = qw(git);
102 our (@dget) = qw(dget);
103 our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L));
104 our (@dput) = qw(dput);
105 our (@debsign) = qw(debsign);
106 our (@gpg) = qw(gpg);
107 our (@sbuild) = qw(sbuild);
109 our (@dgit) = qw(dgit);
110 our (@aptget) = qw(apt-get);
111 our (@aptcache) = qw(apt-cache);
112 our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
113 our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
114 our (@dpkggenchanges) = qw(dpkg-genchanges);
115 our (@mergechanges) = qw(mergechanges -f);
116 our (@gbp_build) = ('');
117 our (@gbp_pq) = ('gbp pq');
118 our (@changesopts) = ('');
120 our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
123 'debsign' => \@debsign,
125 'sbuild' => \@sbuild,
129 'apt-get' => \@aptget,
130 'apt-cache' => \@aptcache,
131 'dpkg-source' => \@dpkgsource,
132 'dpkg-buildpackage' => \@dpkgbuildpackage,
133 'dpkg-genchanges' => \@dpkggenchanges,
134 'gbp-build' => \@gbp_build,
135 'gbp-pq' => \@gbp_pq,
136 'ch' => \@changesopts,
137 'mergechanges' => \@mergechanges);
139 our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1);
140 our %opts_cfg_insertpos = map {
142 scalar @{ $opts_opt_map{$_} }
143 } keys %opts_opt_map;
145 sub parseopts_late_defaults();
146 sub setup_gitattrs(;$);
147 sub check_gitattrs($$);
153 our $supplementary_message = '';
154 our $need_split_build_invocation = 0;
155 our $split_brain = 0;
159 return unless forkcheck_mainprocess();
160 print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg;
163 our $remotename = 'dgit';
164 our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
168 if (!defined $absurdity) {
170 $absurdity =~ s{/[^/]+$}{/absurd} or die;
174 my ($v,$distro) = @_;
175 return $tagformatfn->($v, $distro);
178 sub debiantag_maintview ($$) {
179 my ($v,$distro) = @_;
180 return "$distro/".dep14_version_mangle $v;
183 sub madformat ($) { $_[0] eq '3.0 (quilt)' }
185 sub lbranch () { return "$branchprefix/$csuite"; }
186 my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
187 sub lref () { return "refs/heads/".lbranch(); }
188 sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
189 sub rrref () { return server_ref($csuite); }
199 return "${package}_".(stripepoch $vsn).$sfx
204 return srcfn($vsn,".dsc");
207 sub changespat ($;$) {
208 my ($vsn, $arch) = @_;
209 return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
212 sub upstreamversion ($) {
224 return unless forkcheck_mainprocess();
225 foreach my $f (@end) {
227 print STDERR "$us: cleanup: $@" if length $@;
231 sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
233 sub forceable_fail ($$) {
234 my ($forceoptsl, $msg) = @_;
235 fail $msg unless grep { $forceopts{$_} } @$forceoptsl;
236 print STDERR "warning: overriding problem due to --force:\n". $msg;
240 my ($forceoptsl) = @_;
241 my @got = grep { $forceopts{$_} } @$forceoptsl;
242 return 0 unless @got;
244 "warning: skipping checks or functionality due to --force-$got[0]\n";
247 sub no_such_package () {
248 print STDERR "$us: package $package does not exist in suite $isuite\n";
254 printdebug "CD $newdir\n";
255 chdir $newdir or confess "chdir: $newdir: $!";
258 sub deliberately ($) {
260 return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies;
263 sub deliberately_not_fast_forward () {
264 foreach (qw(not-fast-forward fresh-repo)) {
265 return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_");
269 sub quiltmode_splitbrain () {
270 $quilt_mode =~ m/gbp|dpm|unapplied/;
273 sub opts_opt_multi_cmd {
275 push @cmd, split /\s+/, shift @_;
281 return opts_opt_multi_cmd @gbp_pq;
284 #---------- remote protocol support, common ----------
286 # remote push initiator/responder protocol:
287 # $ dgit remote-push-build-host <n-rargs> <rargs>... <push-args>...
288 # where <rargs> is <push-host-dir> <supported-proto-vsn>,... ...
289 # < dgit-remote-push-ready <actual-proto-vsn>
296 # > supplementary-message NBYTES # $protovsn >= 3
301 # > file parsed-changelog
302 # [indicates that output of dpkg-parsechangelog follows]
303 # > data-block NBYTES
304 # > [NBYTES bytes of data (no newline)]
305 # [maybe some more blocks]
314 # > param head DGIT-VIEW-HEAD
315 # > param csuite SUITE
316 # > param tagformat old|new
317 # > param maint-view MAINT-VIEW-HEAD
319 # > param buildinfo-filename P_V_X.buildinfo # zero or more times
320 # > file buildinfo # for buildinfos to sign
322 # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward
323 # # goes into tag, for replay prevention
326 # [indicates that signed tag is wanted]
327 # < data-block NBYTES
328 # < [NBYTES bytes of data (no newline)]
329 # [maybe some more blocks]
333 # > want signed-dsc-changes
334 # < data-block NBYTES [transfer of signed dsc]
336 # < data-block NBYTES [transfer of signed changes]
338 # < data-block NBYTES [transfer of each signed buildinfo
339 # [etc] same number and order as "file buildinfo"]
347 sub i_child_report () {
348 # Sees if our child has died, and reap it if so. Returns a string
349 # describing how it died if it failed, or undef otherwise.
350 return undef unless $i_child_pid;
351 my $got = waitpid $i_child_pid, WNOHANG;
352 return undef if $got <= 0;
353 die unless $got == $i_child_pid;
354 $i_child_pid = undef;
355 return undef unless $?;
356 return "build host child ".waitstatusmsg();
361 fail "connection lost: $!" if $fh->error;
362 fail "protocol violation; $m not expected";
365 sub badproto_badread ($$) {
367 fail "connection lost: $!" if $!;
368 my $report = i_child_report();
369 fail $report if defined $report;
370 badproto $fh, "eof (reading $wh)";
373 sub protocol_expect (&$) {
374 my ($match, $fh) = @_;
377 defined && chomp or badproto_badread $fh, "protocol message";
385 badproto $fh, "\`$_'";
388 sub protocol_send_file ($$) {
389 my ($fh, $ourfn) = @_;
390 open PF, "<", $ourfn or die "$ourfn: $!";
393 my $got = read PF, $d, 65536;
394 die "$ourfn: $!" unless defined $got;
396 print $fh "data-block ".length($d)."\n" or die $!;
397 print $fh $d or die $!;
399 PF->error and die "$ourfn $!";
400 print $fh "data-end\n" or die $!;
404 sub protocol_read_bytes ($$) {
405 my ($fh, $nbytes) = @_;
406 $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
408 my $got = read $fh, $d, $nbytes;
409 $got==$nbytes or badproto_badread $fh, "data block";
413 sub protocol_receive_file ($$) {
414 my ($fh, $ourfn) = @_;
415 printdebug "() $ourfn\n";
416 open PF, ">", $ourfn or die "$ourfn: $!";
418 my ($y,$l) = protocol_expect {
419 m/^data-block (.*)$/ ? (1,$1) :
420 m/^data-end$/ ? (0,) :
424 my $d = protocol_read_bytes $fh, $l;
425 print PF $d or die $!;
430 #---------- remote protocol support, responder ----------
432 sub responder_send_command ($) {
434 return unless $we_are_responder;
435 # called even without $we_are_responder
436 printdebug ">> $command\n";
437 print PO $command, "\n" or die $!;
440 sub responder_send_file ($$) {
441 my ($keyword, $ourfn) = @_;
442 return unless $we_are_responder;
443 printdebug "]] $keyword $ourfn\n";
444 responder_send_command "file $keyword";
445 protocol_send_file \*PO, $ourfn;
448 sub responder_receive_files ($@) {
449 my ($keyword, @ourfns) = @_;
450 die unless $we_are_responder;
451 printdebug "[[ $keyword @ourfns\n";
452 responder_send_command "want $keyword";
453 foreach my $fn (@ourfns) {
454 protocol_receive_file \*PI, $fn;
457 protocol_expect { m/^files-end$/ } \*PI;
460 #---------- remote protocol support, initiator ----------
462 sub initiator_expect (&) {
464 protocol_expect { &$match } \*RO;
467 #---------- end remote code ----------
470 if ($we_are_responder) {
472 responder_send_command "progress ".length($m) or die $!;
473 print PO $m or die $!;
483 $ua = LWP::UserAgent->new();
487 progress "downloading $what...";
488 my $r = $ua->get(@_) or die $!;
489 return undef if $r->code == 404;
490 $r->is_success or fail "failed to fetch $what: ".$r->status_line;
491 return $r->decoded_content(charset => 'none');
494 our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
496 sub act_local () { return $dryrun_level <= 1; }
497 sub act_scary () { return !$dryrun_level; }
500 if (!$dryrun_level) {
501 progress "$us ok: @_";
503 progress "would be ok: @_ (but dry run only)";
508 printcmd(\*STDERR,$debugprefix."#",@_);
511 sub runcmd_ordryrun {
519 sub runcmd_ordryrun_local {
528 my ($first_shell, @cmd) = @_;
529 return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
532 our $helpmsg = <<END;
534 dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
535 dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
536 dgit [dgit-opts] build [dpkg-buildpackage-opts]
537 dgit [dgit-opts] sbuild [sbuild-opts]
538 dgit [dgit-opts] push [dgit-opts] [suite]
539 dgit [dgit-opts] rpush build-host:build-dir ...
540 important dgit options:
541 -k<keyid> sign tag and package with <keyid> instead of default
542 --dry-run -n do not change anything, but go through the motions
543 --damp-run -L like --dry-run but make local changes, without signing
544 --new -N allow introducing a new package
545 --debug -D increase debug level
546 -c<name>=<value> set git config option (used directly by dgit too)
549 our $later_warning_msg = <<END;
550 Perhaps the upload is stuck in incoming. Using the version from git.
554 print STDERR "$us: @_\n", $helpmsg or die $!;
559 @ARGV or badusage "too few arguments";
560 return scalar shift @ARGV;
567 print $helpmsg or die $!;
571 our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
573 our %defcfg = ('dgit.default.distro' => 'debian',
574 'dgit.default.default-suite' => 'unstable',
575 'dgit.default.old-dsc-distro' => 'debian',
576 'dgit-suite.*-security.distro' => 'debian-security',
577 'dgit.default.username' => '',
578 'dgit.default.archive-query-default-component' => 'main',
579 'dgit.default.ssh' => 'ssh',
580 'dgit.default.archive-query' => 'madison:',
581 'dgit.default.sshpsql-dbname' => 'service=projectb',
582 'dgit.default.aptget-components' => 'main',
583 'dgit.default.dgit-tag-format' => 'new,old,maint',
584 'dgit.dsc-url-proto-ok.http' => 'true',
585 'dgit.dsc-url-proto-ok.https' => 'true',
586 'dgit.dsc-url-proto-ok.git' => 'true',
587 'dgit.default.dsc-url-proto-ok' => 'false',
588 # old means "repo server accepts pushes with old dgit tags"
589 # new means "repo server accepts pushes with new dgit tags"
590 # maint means "repo server accepts split brain pushes"
591 # hist means "repo server may have old pushes without new tag"
592 # ("hist" is implied by "old")
593 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
594 'dgit-distro.debian.git-check' => 'url',
595 'dgit-distro.debian.git-check-suffix' => '/info/refs',
596 'dgit-distro.debian.new-private-pushers' => 't',
597 'dgit-distro.debian/push.git-url' => '',
598 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
599 'dgit-distro.debian/push.git-user-force' => 'dgit',
600 'dgit-distro.debian/push.git-proto' => 'git+ssh://',
601 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos',
602 'dgit-distro.debian/push.git-create' => 'true',
603 'dgit-distro.debian/push.git-check' => 'ssh-cmd',
604 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
605 # 'dgit-distro.debian.archive-query-tls-key',
606 # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
607 # ^ this does not work because curl is broken nowadays
608 # Fixing #790093 properly will involve providing providing the key
609 # in some pacagke and maybe updating these paths.
611 # 'dgit-distro.debian.archive-query-tls-curl-args',
612 # '--ca-path=/etc/ssl/ca-debian',
613 # ^ this is a workaround but works (only) on DSA-administered machines
614 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org',
615 'dgit-distro.debian.git-url-suffix' => '',
616 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
617 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
618 'dgit-distro.debian-security.archive-query' => 'aptget:',
619 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
620 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
621 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
622 'dgit-distro.debian-security.nominal-distro' => 'debian',
623 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
624 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
625 'dgit-distro.ubuntu.git-check' => 'false',
626 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
627 'dgit-distro.test-dummy.ssh' => "$td/ssh",
628 'dgit-distro.test-dummy.username' => "alice",
629 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
630 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
631 'dgit-distro.test-dummy.git-url' => "$td/git",
632 'dgit-distro.test-dummy.git-host' => "git",
633 'dgit-distro.test-dummy.git-path' => "$td/git",
634 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
635 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
636 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
637 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
641 our @gitcfgsources = qw(cmdline local global system);
643 sub git_slurp_config () {
644 # This algoritm is a bit subtle, but this is needed so that for
645 # options which we want to be single-valued, we allow the
646 # different config sources to override properly. See #835858.
647 foreach my $src (@gitcfgsources) {
648 next if $src eq 'cmdline';
649 # we do this ourselves since git doesn't handle it
651 $gitcfgs{$src} = git_slurp_config_src $src;
655 sub git_get_config ($) {
657 foreach my $src (@gitcfgsources) {
658 my $l = $gitcfgs{$src}{$c};
659 confess "internal error ($l $c)" if $l && !ref $l;
660 printdebug"C $c ".(defined $l ?
661 join " ", map { messagequote "'$_'" } @$l :
665 @$l==1 or badcfg "multiple values for $c".
666 " (in $src git config)" if @$l > 1;
674 return undef if $c =~ /RETURN-UNDEF/;
675 printdebug "C? $c\n" if $debuglevel >= 5;
676 my $v = git_get_config($c);
677 return $v if defined $v;
678 my $dv = $defcfg{$c};
680 printdebug "CD $c $dv\n" if $debuglevel >= 4;
684 badcfg "need value for one of: @_\n".
685 "$us: distro or suite appears not to be (properly) supported";
688 sub no_local_git_cfg () {
689 # needs to be called from pre_*
690 @gitcfgsources = grep { $_ ne 'local' } @gitcfgsources;
693 sub access_basedistro__noalias () {
694 if (defined $idistro) {
697 my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
698 return $def if defined $def;
699 foreach my $src (@gitcfgsources, 'internal') {
700 my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
702 foreach my $k (keys %$kl) {
703 next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
705 next unless match_glob $dpat, $isuite;
709 return cfg("dgit.default.distro");
713 sub access_basedistro () {
714 my $noalias = access_basedistro__noalias();
715 my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF');
716 return $canon // $noalias;
719 sub access_nomdistro () {
720 my $base = access_basedistro();
721 my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
722 $r =~ m/^$distro_re$/ or badcfg
723 "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
727 sub access_quirk () {
728 # returns (quirk name, distro to use instead or undef, quirk-specific info)
729 my $basedistro = access_basedistro();
730 my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
732 if (defined $backports_quirk) {
733 my $re = $backports_quirk;
734 $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
736 $re =~ s/\%/([-0-9a-z_]+)/
737 or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
738 if ($isuite =~ m/^$re$/) {
739 return ('backports',"$basedistro-backports",$1);
742 return ('none',undef);
747 sub parse_cfg_bool ($$$) {
748 my ($what,$def,$v) = @_;
751 $v =~ m/^[ty1]/ ? 1 :
752 $v =~ m/^[fn0]/ ? 0 :
753 badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
756 sub access_forpush_config () {
757 my $d = access_basedistro();
761 parse_cfg_bool('new-private-pushers', 0,
762 cfg("dgit-distro.$d.new-private-pushers",
765 my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF');
768 $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
769 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
770 $v =~ m/^[a]/ ? '' : # auto, forpush = ''
771 badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
774 sub access_forpush () {
775 $access_forpush //= access_forpush_config();
776 return $access_forpush;
780 die "$access_forpush ?" if ($access_forpush // 1) ne 1;
781 badcfg "pushing but distro is configured readonly"
782 if access_forpush_config() eq '0';
784 $supplementary_message = <<'END' unless $we_are_responder;
785 Push failed, before we got started.
786 You can retry the push, after fixing the problem, if you like.
788 parseopts_late_defaults();
792 parseopts_late_defaults();
795 sub supplementary_message ($) {
797 if (!$we_are_responder) {
798 $supplementary_message = $msg;
800 } elsif ($protovsn >= 3) {
801 responder_send_command "supplementary-message ".length($msg)
803 print PO $msg or die $!;
807 sub access_distros () {
808 # Returns list of distros to try, in order
811 # 0. `instead of' distro name(s) we have been pointed to
812 # 1. the access_quirk distro, if any
813 # 2a. the user's specified distro, or failing that } basedistro
814 # 2b. the distro calculated from the suite }
815 my @l = access_basedistro();
817 my (undef,$quirkdistro) = access_quirk();
818 unshift @l, $quirkdistro;
819 unshift @l, $instead_distro;
820 @l = grep { defined } @l;
822 push @l, access_nomdistro();
824 if (access_forpush()) {
825 @l = map { ("$_/push", $_) } @l;
830 sub access_cfg_cfgs (@) {
833 # The nesting of these loops determines the search order. We put
834 # the key loop on the outside so that we search all the distros
835 # for each key, before going on to the next key. That means that
836 # if access_cfg is called with a more specific, and then a less
837 # specific, key, an earlier distro can override the less specific
838 # without necessarily overriding any more specific keys. (If the
839 # distro wants to override the more specific keys it can simply do
840 # so; whereas if we did the loop the other way around, it would be
841 # impossible to for an earlier distro to override a less specific
842 # key but not the more specific ones without restating the unknown
843 # values of the more specific keys.
846 # We have to deal with RETURN-UNDEF specially, so that we don't
847 # terminate the search prematurely.
849 if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
852 foreach my $d (access_distros()) {
853 push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
855 push @cfgs, map { "dgit.default.$_" } @realkeys;
862 my (@cfgs) = access_cfg_cfgs(@keys);
863 my $value = cfg(@cfgs);
867 sub access_cfg_bool ($$) {
868 my ($def, @keys) = @_;
869 parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF'));
872 sub string_to_ssh ($) {
874 if ($spec =~ m/\s/) {
875 return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
881 sub access_cfg_ssh () {
882 my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
883 if (!defined $gitssh) {
886 return string_to_ssh $gitssh;
890 sub access_runeinfo ($) {
892 return ": dgit ".access_basedistro()." $info ;";
895 sub access_someuserhost ($) {
897 my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
898 defined($user) && length($user) or
899 $user = access_cfg("$some-user",'username');
900 my $host = access_cfg("$some-host");
901 return length($user) ? "$user\@$host" : $host;
904 sub access_gituserhost () {
905 return access_someuserhost('git');
908 sub access_giturl (;$) {
910 my $url = access_cfg('git-url','RETURN-UNDEF');
913 my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
914 return undef unless defined $proto;
917 access_gituserhost().
918 access_cfg('git-path');
920 $suffix = access_cfg('git-url-suffix','RETURN-UNDEF');
923 return "$url/$package$suffix";
926 sub parsecontrolfh ($$;$) {
927 my ($fh, $desc, $allowsigned) = @_;
928 our $dpkgcontrolhash_noissigned;
931 my %opts = ('name' => $desc);
932 $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
933 $c = Dpkg::Control::Hash->new(%opts);
934 $c->parse($fh,$desc) or die "parsing of $desc failed";
935 last if $allowsigned;
936 last if $dpkgcontrolhash_noissigned;
937 my $issigned= $c->get_option('is_pgp_signed');
938 if (!defined $issigned) {
939 $dpkgcontrolhash_noissigned= 1;
940 seek $fh, 0,0 or die "seek $desc: $!";
941 } elsif ($issigned) {
942 fail "control file $desc is (already) PGP-signed. ".
943 " Note that dgit push needs to modify the .dsc and then".
944 " do the signature itself";
953 my ($file, $desc, $allowsigned) = @_;
954 my $fh = new IO::Handle;
955 open $fh, '<', $file or die "$file: $!";
956 my $c = parsecontrolfh($fh,$desc,$allowsigned);
957 $fh->error and die $!;
963 my ($dctrl,$field) = @_;
964 my $v = $dctrl->{$field};
965 return $v if defined $v;
966 fail "missing field $field in ".$dctrl->get_option('name');
970 my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
971 my $p = new IO::Handle;
972 my @cmd = (qw(dpkg-parsechangelog), @_);
973 open $p, '-|', @cmd or die $!;
975 $?=0; $!=0; close $p or failedcmd @cmd;
979 sub commit_getclogp ($) {
980 # Returns the parsed changelog hashref for a particular commit
982 our %commit_getclogp_memo;
983 my $memo = $commit_getclogp_memo{$objid};
984 return $memo if $memo;
986 my $mclog = ".git/dgit/clog-$objid";
987 runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob),
988 "$objid:debian/changelog";
989 $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog");
994 defined $d or fail "getcwd failed: $!";
998 sub parse_dscdata () {
999 my $dscfh = new IO::File \$dscdata, '<' or die $!;
1000 printdebug Dumper($dscdata) if $debuglevel>1;
1001 $dsc = parsecontrolfh($dscfh,$dscurl,1);
1002 printdebug Dumper($dsc) if $debuglevel>1;
1007 sub archive_query ($;@) {
1008 my ($method) = shift @_;
1009 fail "this operation does not support multiple comma-separated suites"
1011 my $query = access_cfg('archive-query','RETURN-UNDEF');
1012 $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
1015 { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
1018 sub archive_query_prepend_mirror {
1019 my $m = access_cfg('mirror');
1020 return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
1023 sub pool_dsc_subpath ($$) {
1024 my ($vsn,$component) = @_; # $package is implict arg
1025 my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
1026 return "/pool/$component/$prefix/$package/".dscfn($vsn);
1029 sub cfg_apply_map ($$$) {
1030 my ($varref, $what, $mapspec) = @_;
1031 return unless $mapspec;
1033 printdebug "config $what EVAL{ $mapspec; }\n";
1035 eval "package Dgit::Config; $mapspec;";
1040 #---------- `ftpmasterapi' archive query method (nascent) ----------
1042 sub archive_api_query_cmd ($) {
1044 my @cmd = (@curl, qw(-sS));
1045 my $url = access_cfg('archive-query-url');
1046 if ($url =~ m#^https://([-.0-9a-z]+)/#) {
1048 my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //'';
1049 foreach my $key (split /\:/, $keys) {
1050 $key =~ s/\%HOST\%/$host/g;
1052 fail "for $url: stat $key: $!" unless $!==ENOENT;
1055 fail "config requested specific TLS key but do not know".
1056 " how to get curl to use exactly that EE key ($key)";
1057 # push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
1058 # # Sadly the above line does not work because of changes
1059 # # to gnutls. The real fix for #790093 may involve
1060 # # new curl options.
1063 # Fixing #790093 properly will involve providing a value
1064 # for this on clients.
1065 my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF');
1066 push @cmd, split / /, $kargs if defined $kargs;
1068 push @cmd, $url.$subpath;
1072 sub api_query ($$;$) {
1074 my ($data, $subpath, $ok404) = @_;
1075 badcfg "ftpmasterapi archive query method takes no data part"
1077 my @cmd = archive_api_query_cmd($subpath);
1078 my $url = $cmd[$#cmd];
1079 push @cmd, qw(-w %{http_code});
1080 my $json = cmdoutput @cmd;
1081 unless ($json =~ s/\d+\d+\d$//) {
1082 failedcmd_report_cmd undef, @cmd;
1083 fail "curl failed to print 3-digit HTTP code";
1086 return undef if $code eq '404' && $ok404;
1087 fail "fetch of $url gave HTTP code $code"
1088 unless $url =~ m#^file://# or $code =~ m/^2/;
1089 return decode_json($json);
1092 sub canonicalise_suite_ftpmasterapi {
1093 my ($proto,$data) = @_;
1094 my $suites = api_query($data, 'suites');
1096 foreach my $entry (@$suites) {
1098 my $v = $entry->{$_};
1099 defined $v && $v eq $isuite;
1100 } qw(codename name);
1101 push @matched, $entry;
1103 fail "unknown suite $isuite" unless @matched;
1106 @matched==1 or die "multiple matches for suite $isuite\n";
1107 $cn = "$matched[0]{codename}";
1108 defined $cn or die "suite $isuite info has no codename\n";
1109 $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
1111 die "bad ftpmaster api response: $@\n".Dumper(\@matched)
1116 sub archive_query_ftpmasterapi {
1117 my ($proto,$data) = @_;
1118 my $info = api_query($data, "dsc_in_suite/$isuite/$package");
1120 my $digester = Digest::SHA->new(256);
1121 foreach my $entry (@$info) {
1123 my $vsn = "$entry->{version}";
1124 my ($ok,$msg) = version_check $vsn;
1125 die "bad version: $msg\n" unless $ok;
1126 my $component = "$entry->{component}";
1127 $component =~ m/^$component_re$/ or die "bad component";
1128 my $filename = "$entry->{filename}";
1129 $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
1130 or die "bad filename";
1131 my $sha256sum = "$entry->{sha256sum}";
1132 $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
1133 push @rows, [ $vsn, "/pool/$component/$filename",
1134 $digester, $sha256sum ];
1136 die "bad ftpmaster api response: $@\n".Dumper($entry)
1139 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1140 return archive_query_prepend_mirror @rows;
1143 sub file_in_archive_ftpmasterapi {
1144 my ($proto,$data,$filename) = @_;
1145 my $pat = $filename;
1148 $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
1149 my $info = api_query($data, "file_in_archive/$pat", 1);
1152 #---------- `aptget' archive query method ----------
1155 our $aptget_releasefile;
1156 our $aptget_configpath;
1158 sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
1159 sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
1161 sub aptget_cache_clean {
1162 runcmd_ordryrun_local qw(sh -ec),
1163 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
1167 sub aptget_lock_acquire () {
1168 my $lockfile = "$aptget_base/lock";
1169 open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
1170 flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
1173 sub aptget_prep ($) {
1175 return if defined $aptget_base;
1177 badcfg "aptget archive query method takes no data part"
1180 my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
1183 ensuredir "$cache/dgit";
1185 access_cfg('aptget-cachekey','RETURN-UNDEF')
1186 // access_nomdistro();
1188 $aptget_base = "$cache/dgit/aptget";
1189 ensuredir $aptget_base;
1191 my $quoted_base = $aptget_base;
1192 die "$quoted_base contains bad chars, cannot continue"
1193 if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
1195 ensuredir $aptget_base;
1197 aptget_lock_acquire();
1199 aptget_cache_clean();
1201 $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
1202 my $sourceslist = "source.list#$cachekey";
1204 my $aptsuites = $isuite;
1205 cfg_apply_map(\$aptsuites, 'suite map',
1206 access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
1208 open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
1209 printf SRCS "deb-src %s %s %s\n",
1210 access_cfg('mirror'),
1212 access_cfg('aptget-components')
1215 ensuredir "$aptget_base/cache";
1216 ensuredir "$aptget_base/lists";
1218 open CONF, ">", $aptget_configpath or die $!;
1220 Debug::NoLocking "true";
1221 APT::Get::List-Cleanup "false";
1222 #clear APT::Update::Post-Invoke-Success;
1223 Dir::Etc::SourceList "$quoted_base/$sourceslist";
1224 Dir::State::Lists "$quoted_base/lists";
1225 Dir::Etc::preferences "$quoted_base/preferences";
1226 Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
1227 Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
1230 foreach my $key (qw(
1233 Dir::Cache::Archives
1234 Dir::Etc::SourceParts
1235 Dir::Etc::preferencesparts
1237 ensuredir "$aptget_base/$key";
1238 print CONF "$key \"$quoted_base/$key\";\n" or die $!;
1241 my $oldatime = (time // die $!) - 1;
1242 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1243 next unless stat_exists $oldlist;
1244 my ($mtime) = (stat _)[9];
1245 utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
1248 runcmd_ordryrun_local aptget_aptget(), qw(update);
1251 foreach my $oldlist (<$aptget_base/lists/*Release>) {
1252 next unless stat_exists $oldlist;
1253 my ($atime) = (stat _)[8];
1254 next if $atime == $oldatime;
1255 push @releasefiles, $oldlist;
1257 my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
1258 @releasefiles = @inreleasefiles if @inreleasefiles;
1259 die "apt updated wrong number of Release files (@releasefiles), erk"
1260 unless @releasefiles == 1;
1262 ($aptget_releasefile) = @releasefiles;
1265 sub canonicalise_suite_aptget {
1266 my ($proto,$data) = @_;
1269 my $release = parsecontrol $aptget_releasefile, "Release file", 1;
1271 foreach my $name (qw(Codename Suite)) {
1272 my $val = $release->{$name};
1274 printdebug "release file $name: $val\n";
1275 $val =~ m/^$suite_re$/o or fail
1276 "Release file ($aptget_releasefile) specifies intolerable $name";
1277 cfg_apply_map(\$val, 'suite rmap',
1278 access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
1285 sub archive_query_aptget {
1286 my ($proto,$data) = @_;
1289 ensuredir "$aptget_base/source";
1290 foreach my $old (<$aptget_base/source/*.dsc>) {
1291 unlink $old or die "$old: $!";
1294 my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
1295 return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
1296 # avoids apt-get source failing with ambiguous error code
1298 runcmd_ordryrun_local
1299 shell_cmd 'cd "$1"/source; shift', $aptget_base,
1300 aptget_aptget(), qw(--download-only --only-source source), $package;
1302 my @dscs = <$aptget_base/source/*.dsc>;
1303 fail "apt-get source did not produce a .dsc" unless @dscs;
1304 fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
1306 my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
1309 my $uri = "file://". uri_escape $dscs[0];
1310 $uri =~ s{\%2f}{/}gi;
1311 return [ (getfield $pre_dsc, 'Version'), $uri ];
1314 sub file_in_archive_aptget () { return undef; }
1316 #---------- `dummyapicat' archive query method ----------
1318 sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
1319 sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
1321 sub file_in_archive_dummycatapi ($$$) {
1322 my ($proto,$data,$filename) = @_;
1323 my $mirror = access_cfg('mirror');
1324 $mirror =~ s#^file://#/# or die "$mirror ?";
1326 my @cmd = (qw(sh -ec), '
1328 find -name "$2" -print0 |
1330 ', qw(x), $mirror, $filename);
1331 debugcmd "-|", @cmd;
1332 open FIA, "-|", @cmd or die $!;
1335 printdebug "| $_\n";
1336 m/^(\w+) (\S+)$/ or die "$_ ?";
1337 push @out, { sha256sum => $1, filename => $2 };
1339 close FIA or die failedcmd @cmd;
1343 #---------- `madison' archive query method ----------
1345 sub archive_query_madison {
1346 return archive_query_prepend_mirror
1347 map { [ @$_[0..1] ] } madison_get_parse(@_);
1350 sub madison_get_parse {
1351 my ($proto,$data) = @_;
1352 die unless $proto eq 'madison';
1353 if (!length $data) {
1354 $data= access_cfg('madison-distro','RETURN-UNDEF');
1355 $data //= access_basedistro();
1357 $rmad{$proto,$data,$package} ||= cmdoutput
1358 qw(rmadison -asource),"-s$isuite","-u$data",$package;
1359 my $rmad = $rmad{$proto,$data,$package};
1362 foreach my $l (split /\n/, $rmad) {
1363 $l =~ m{^ \s*( [^ \t|]+ )\s* \|
1364 \s*( [^ \t|]+ )\s* \|
1365 \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
1366 \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
1367 $1 eq $package or die "$rmad $package ?";
1374 $component = access_cfg('archive-query-default-component');
1376 $5 eq 'source' or die "$rmad ?";
1377 push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
1379 return sort { -version_compare($a->[0],$b->[0]); } @out;
1382 sub canonicalise_suite_madison {
1383 # madison canonicalises for us
1384 my @r = madison_get_parse(@_);
1386 "unable to canonicalise suite using package $package".
1387 " which does not appear to exist in suite $isuite;".
1388 " --existing-package may help";
1392 sub file_in_archive_madison { return undef; }
1394 #---------- `sshpsql' archive query method ----------
1397 my ($data,$runeinfo,$sql) = @_;
1398 if (!length $data) {
1399 $data= access_someuserhost('sshpsql').':'.
1400 access_cfg('sshpsql-dbname');
1402 $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
1403 my ($userhost,$dbname) = ($`,$'); #';
1405 my @cmd = (access_cfg_ssh, $userhost,
1406 access_runeinfo("ssh-psql $runeinfo").
1407 " export LC_MESSAGES=C; export LC_CTYPE=C;".
1408 " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
1410 open P, "-|", @cmd or die $!;
1413 printdebug(">|$_|\n");
1416 $!=0; $?=0; close P or failedcmd @cmd;
1418 my $nrows = pop @rows;
1419 $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
1420 @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
1421 @rows = map { [ split /\|/, $_ ] } @rows;
1422 my $ncols = scalar @{ shift @rows };
1423 die if grep { scalar @$_ != $ncols } @rows;
1427 sub sql_injection_check {
1428 foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
1431 sub archive_query_sshpsql ($$) {
1432 my ($proto,$data) = @_;
1433 sql_injection_check $isuite, $package;
1434 my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
1435 SELECT source.version, component.name, files.filename, files.sha256sum
1437 JOIN src_associations ON source.id = src_associations.source
1438 JOIN suite ON suite.id = src_associations.suite
1439 JOIN dsc_files ON dsc_files.source = source.id
1440 JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
1441 JOIN component ON component.id = files_archive_map.component_id
1442 JOIN files ON files.id = dsc_files.file
1443 WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
1444 AND source.source='$package'
1445 AND files.filename LIKE '%.dsc';
1447 @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
1448 my $digester = Digest::SHA->new(256);
1450 my ($vsn,$component,$filename,$sha256sum) = @$_;
1451 [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
1453 return archive_query_prepend_mirror @rows;
1456 sub canonicalise_suite_sshpsql ($$) {
1457 my ($proto,$data) = @_;
1458 sql_injection_check $isuite;
1459 my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
1460 SELECT suite.codename
1461 FROM suite where suite_name='$isuite' or codename='$isuite';
1463 @rows = map { $_->[0] } @rows;
1464 fail "unknown suite $isuite" unless @rows;
1465 die "ambiguous $isuite: @rows ?" if @rows>1;
1469 sub file_in_archive_sshpsql ($$$) { return undef; }
1471 #---------- `dummycat' archive query method ----------
1473 sub canonicalise_suite_dummycat ($$) {
1474 my ($proto,$data) = @_;
1475 my $dpath = "$data/suite.$isuite";
1476 if (!open C, "<", $dpath) {
1477 $!==ENOENT or die "$dpath: $!";
1478 printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
1482 chomp or die "$dpath: $!";
1484 printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
1488 sub archive_query_dummycat ($$) {
1489 my ($proto,$data) = @_;
1490 canonicalise_suite();
1491 my $dpath = "$data/package.$csuite.$package";
1492 if (!open C, "<", $dpath) {
1493 $!==ENOENT or die "$dpath: $!";
1494 printdebug "dummycat query $csuite $package $dpath ENOENT\n";
1502 printdebug "dummycat query $csuite $package $dpath | $_\n";
1503 my @row = split /\s+/, $_;
1504 @row==2 or die "$dpath: $_ ?";
1507 C->error and die "$dpath: $!";
1509 return archive_query_prepend_mirror
1510 sort { -version_compare($a->[0],$b->[0]); } @rows;
1513 sub file_in_archive_dummycat () { return undef; }
1515 #---------- tag format handling ----------
1517 sub access_cfg_tagformats () {
1518 split /\,/, access_cfg('dgit-tag-format');
1521 sub access_cfg_tagformats_can_splitbrain () {
1522 my %y = map { $_ => 1 } access_cfg_tagformats;
1523 foreach my $needtf (qw(new maint)) {
1524 next if $y{$needtf};
1530 sub need_tagformat ($$) {
1531 my ($fmt, $why) = @_;
1532 fail "need to use tag format $fmt ($why) but also need".
1533 " to use tag format $tagformat_want->[0] ($tagformat_want->[1])".
1534 " - no way to proceed"
1535 if $tagformat_want && $tagformat_want->[0] ne $fmt;
1536 $tagformat_want = [$fmt, $why, $tagformat_want->[2] // 0];
1539 sub select_tagformat () {
1541 return if $tagformatfn && !$tagformat_want;
1542 die 'bug' if $tagformatfn && $tagformat_want;
1543 # ... $tagformat_want assigned after previous select_tagformat
1545 my (@supported) = grep { $_ =~ m/^(?:old|new)$/ } access_cfg_tagformats();
1546 printdebug "select_tagformat supported @supported\n";
1548 $tagformat_want //= [ $supported[0], "distro access configuration", 0 ];
1549 printdebug "select_tagformat specified @$tagformat_want\n";
1551 my ($fmt,$why,$override) = @$tagformat_want;
1553 fail "target distro supports tag formats @supported".
1554 " but have to use $fmt ($why)"
1556 or grep { $_ eq $fmt } @supported;
1558 $tagformat_want = undef;
1560 $tagformatfn = ${*::}{"debiantag_$fmt"};
1562 fail "trying to use unknown tag format \`$fmt' ($why) !"
1563 unless $tagformatfn;
1566 #---------- archive query entrypoints and rest of program ----------
1568 sub canonicalise_suite () {
1569 return if defined $csuite;
1570 fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
1571 $csuite = archive_query('canonicalise_suite');
1572 if ($isuite ne $csuite) {
1573 progress "canonical suite name for $isuite is $csuite";
1575 progress "canonical suite name is $csuite";
1579 sub get_archive_dsc () {
1580 canonicalise_suite();
1581 my @vsns = archive_query('archive_query');
1582 foreach my $vinfo (@vsns) {
1583 my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
1584 $dscurl = $vsn_dscurl;
1585 $dscdata = url_get($dscurl);
1587 $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
1592 $digester->add($dscdata);
1593 my $got = $digester->hexdigest();
1595 fail "$dscurl has hash $got but".
1596 " archive told us to expect $digest";
1599 my $fmt = getfield $dsc, 'Format';
1600 $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
1601 "unsupported source format $fmt, sorry";
1603 $dsc_checked = !!$digester;
1604 printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
1608 printdebug "get_archive_dsc: nothing in archive, returning undef\n";
1611 sub check_for_git ();
1612 sub check_for_git () {
1614 my $how = access_cfg('git-check');
1615 if ($how eq 'ssh-cmd') {
1617 (access_cfg_ssh, access_gituserhost(),
1618 access_runeinfo("git-check $package").
1619 " set -e; cd ".access_cfg('git-path').";".
1620 " if test -d $package.git; then echo 1; else echo 0; fi");
1621 my $r= cmdoutput @cmd;
1622 if (defined $r and $r =~ m/^divert (\w+)$/) {
1624 my ($usedistro,) = access_distros();
1625 # NB that if we are pushing, $usedistro will be $distro/push
1626 $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
1627 $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
1628 progress "diverting to $divert (using config for $instead_distro)";
1629 return check_for_git();
1631 failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
1633 } elsif ($how eq 'url') {
1634 my $prefix = access_cfg('git-check-url','git-url');
1635 my $suffix = access_cfg('git-check-suffix','git-suffix',
1636 'RETURN-UNDEF') // '.git';
1637 my $url = "$prefix/$package$suffix";
1638 my @cmd = (@curl, qw(-sS -I), $url);
1639 my $result = cmdoutput @cmd;
1640 $result =~ s/^\S+ 200 .*\n\r?\n//;
1641 # curl -sS -I with https_proxy prints
1642 # HTTP/1.0 200 Connection established
1643 $result =~ m/^\S+ (404|200) /s or
1644 fail "unexpected results from git check query - ".
1645 Dumper($prefix, $result);
1647 if ($code eq '404') {
1649 } elsif ($code eq '200') {
1654 } elsif ($how eq 'true') {
1656 } elsif ($how eq 'false') {
1659 badcfg "unknown git-check \`$how'";
1663 sub create_remote_git_repo () {
1664 my $how = access_cfg('git-create');
1665 if ($how eq 'ssh-cmd') {
1667 (access_cfg_ssh, access_gituserhost(),
1668 access_runeinfo("git-create $package").
1669 "set -e; cd ".access_cfg('git-path').";".
1670 " cp -a _template $package.git");
1671 } elsif ($how eq 'true') {
1674 badcfg "unknown git-create \`$how'";
1678 our ($dsc_hash,$lastpush_mergeinput);
1679 our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
1681 our $ud = '.git/dgit/unpack';
1691 sub mktree_in_ud_here () {
1692 workarea_setup $gitcfgs{local};
1695 sub git_write_tree () {
1696 my $tree = cmdoutput @git, qw(write-tree);
1697 $tree =~ m/^\w+$/ or die "$tree ?";
1701 sub git_add_write_tree () {
1702 runcmd @git, qw(add -Af .);
1703 return git_write_tree();
1706 sub remove_stray_gits ($) {
1708 my @gitscmd = qw(find -name .git -prune -print0);
1709 debugcmd "|",@gitscmd;
1710 open GITS, "-|", @gitscmd or die $!;
1715 print STDERR "$us: warning: removing from $what: ",
1716 (messagequote $_), "\n";
1720 $!=0; $?=0; close GITS or failedcmd @gitscmd;
1723 sub mktree_in_ud_from_only_subdir ($;$) {
1724 my ($what,$raw) = @_;
1726 # changes into the subdir
1728 die "expected one subdir but found @dirs ?" unless @dirs==1;
1729 $dirs[0] =~ m#^([^/]+)/\.$# or die;
1733 remove_stray_gits($what);
1734 mktree_in_ud_here();
1736 my ($format, $fopts) = get_source_format();
1737 if (madformat($format)) {
1742 my $tree=git_add_write_tree();
1743 return ($tree,$dir);
1746 our @files_csum_info_fields =
1747 (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
1748 ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
1749 ['Files', 'Digest::MD5', 'new()', 'md5sum']);
1751 sub dsc_files_info () {
1752 foreach my $csumi (@files_csum_info_fields) {
1753 my ($fname, $module, $method) = @$csumi;
1754 my $field = $dsc->{$fname};
1755 next unless defined $field;
1756 eval "use $module; 1;" or die $@;
1758 foreach (split /\n/, $field) {
1760 m/^(\w+) (\d+) (\S+)$/ or
1761 fail "could not parse .dsc $fname line \`$_'";
1762 my $digester = eval "$module"."->$method;" or die $@;
1767 Digester => $digester,
1772 fail "missing any supported Checksums-* or Files field in ".
1773 $dsc->get_option('name');
1777 map { $_->{Filename} } dsc_files_info();
1780 sub files_compare_inputs (@) {
1785 my $showinputs = sub {
1786 return join "; ", map { $_->get_option('name') } @$inputs;
1789 foreach my $in (@$inputs) {
1791 my $in_name = $in->get_option('name');
1793 printdebug "files_compare_inputs $in_name\n";
1795 foreach my $csumi (@files_csum_info_fields) {
1796 my ($fname) = @$csumi;
1797 printdebug "files_compare_inputs $in_name $fname\n";
1799 my $field = $in->{$fname};
1800 next unless defined $field;
1803 foreach (split /\n/, $field) {
1806 my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
1807 fail "could not parse $in_name $fname line \`$_'";
1809 printdebug "files_compare_inputs $in_name $fname $f\n";
1813 my $re = \ $record{$f}{$fname};
1815 $fchecked{$f}{$in_name} = 1;
1817 fail "hash or size of $f varies in $fname fields".
1818 " (between: ".$showinputs->().")";
1823 @files = sort @files;
1824 $expected_files //= \@files;
1825 "@$expected_files" eq "@files" or
1826 fail "file list in $in_name varies between hash fields!";
1829 fail "$in_name has no files list field(s)";
1831 printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
1834 grep { keys %$_ == @$inputs-1 } values %fchecked
1835 or fail "no file appears in all file lists".
1836 " (looked in: ".$showinputs->().")";
1839 sub is_orig_file_in_dsc ($$) {
1840 my ($f, $dsc_files_info) = @_;
1841 return 0 if @$dsc_files_info <= 1;
1842 # One file means no origs, and the filename doesn't have a "what
1843 # part of dsc" component. (Consider versions ending `.orig'.)
1844 return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
1848 sub is_orig_file_of_vsn ($$) {
1849 my ($f, $upstreamvsn) = @_;
1850 my $base = srcfn $upstreamvsn, '';
1851 return 0 unless $f =~ m/^\Q$base\E\.$orig_f_tail_re$/;
1855 sub changes_update_origs_from_dsc ($$$$) {
1856 my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
1858 printdebug "checking origs needed ($upstreamvsn)...\n";
1859 $_ = getfield $changes, 'Files';
1860 m/^\w+ \d+ (\S+ \S+) \S+$/m or
1861 fail "cannot find section/priority from .changes Files field";
1862 my $placementinfo = $1;
1864 printdebug "checking origs needed placement '$placementinfo'...\n";
1865 foreach my $l (split /\n/, getfield $dsc, 'Files') {
1866 $l =~ m/\S+$/ or next;
1868 printdebug "origs $file | $l\n";
1869 next unless is_orig_file_of_vsn $file, $upstreamvsn;
1870 printdebug "origs $file is_orig\n";
1871 my $have = archive_query('file_in_archive', $file);
1872 if (!defined $have) {
1874 archive does not support .orig check; hope you used --ch:--sa/-sd if needed
1880 printdebug "origs $file \$#\$have=$#$have\n";
1881 foreach my $h (@$have) {
1884 foreach my $csumi (@files_csum_info_fields) {
1885 my ($fname, $module, $method, $archivefield) = @$csumi;
1886 next unless defined $h->{$archivefield};
1887 $_ = $dsc->{$fname};
1888 next unless defined;
1889 m/^(\w+) .* \Q$file\E$/m or
1890 fail ".dsc $fname missing entry for $file";
1891 if ($h->{$archivefield} eq $1) {
1895 "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
1898 die "$file ".Dumper($h)." ?!" if $same && @differ;
1901 push @found_differ, "archive $h->{filename}: ".join "; ", @differ
1904 printdebug "origs $file f.same=$found_same".
1905 " #f._differ=$#found_differ\n";
1906 if (@found_differ && !$found_same) {
1908 "archive contains $file with different checksum",
1911 # Now we edit the changes file to add or remove it
1912 foreach my $csumi (@files_csum_info_fields) {
1913 my ($fname, $module, $method, $archivefield) = @$csumi;
1914 next unless defined $changes->{$fname};
1916 # in archive, delete from .changes if it's there
1917 $changed{$file} = "removed" if
1918 $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
1919 } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
1920 # not in archive, but it's here in the .changes
1922 my $dsc_data = getfield $dsc, $fname;
1923 $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
1925 $extra =~ s/ \d+ /$&$placementinfo /
1926 or die "$fname $extra >$dsc_data< ?"
1927 if $fname eq 'Files';
1928 $changes->{$fname} .= "\n". $extra;
1929 $changed{$file} = "added";
1934 foreach my $file (keys %changed) {
1936 "edited .changes for archive .orig contents: %s %s",
1937 $changed{$file}, $file;
1939 my $chtmp = "$changesfile.tmp";
1940 $changes->save($chtmp);
1942 rename $chtmp,$changesfile or die "$changesfile $!";
1944 progress "[new .changes left in $changesfile]";
1947 progress "$changesfile already has appropriate .orig(s) (if any)";
1951 sub make_commit ($) {
1953 return cmdoutput @git, qw(hash-object -w -t commit), $file;
1956 sub make_commit_text ($) {
1959 my @cmd = (@git, qw(hash-object -w -t commit --stdin));
1961 print Dumper($text) if $debuglevel > 1;
1962 my $child = open2($out, $in, @cmd) or die $!;
1965 print $in $text or die $!;
1966 close $in or die $!;
1968 $h =~ m/^\w+$/ or die;
1970 printdebug "=> $h\n";
1973 waitpid $child, 0 == $child or die "$child $!";
1974 $? and failedcmd @cmd;
1978 sub clogp_authline ($) {
1980 my $author = getfield $clogp, 'Maintainer';
1981 if ($author =~ m/^[^"\@]+\,/) {
1982 # single entry Maintainer field with unquoted comma
1983 $author = ($& =~ y/,//rd).$'; # strip the comma
1985 # git wants a single author; any remaining commas in $author
1986 # are by now preceded by @ (or "). It seems safer to punt on
1987 # "..." for now rather than attempting to dequote or something.
1988 $author =~ s#,.*##ms unless $author =~ m/"/;
1989 my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
1990 my $authline = "$author $date";
1991 $authline =~ m/$git_authline_re/o or
1992 fail "unexpected commit author line format \`$authline'".
1993 " (was generated from changelog Maintainer field)";
1994 return ($1,$2,$3) if wantarray;
1998 sub vendor_patches_distro ($$) {
1999 my ($checkdistro, $what) = @_;
2000 return unless defined $checkdistro;
2002 my $series = "debian/patches/\L$checkdistro\E.series";
2003 printdebug "checking for vendor-specific $series ($what)\n";
2005 if (!open SERIES, "<", $series) {
2006 die "$series $!" unless $!==ENOENT;
2015 Unfortunately, this source package uses a feature of dpkg-source where
2016 the same source package unpacks to different source code on different
2017 distros. dgit cannot safely operate on such packages on affected
2018 distros, because the meaning of source packages is not stable.
2020 Please ask the distro/maintainer to remove the distro-specific series
2021 files and use a different technique (if necessary, uploading actually
2022 different packages, if different distros are supposed to have
2026 fail "Found active distro-specific series file for".
2027 " $checkdistro ($what): $series, cannot continue";
2029 die "$series $!" if SERIES->error;
2033 sub check_for_vendor_patches () {
2034 # This dpkg-source feature doesn't seem to be documented anywhere!
2035 # But it can be found in the changelog (reformatted):
2037 # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
2038 # Author: Raphael Hertzog <hertzog@debian.org>
2039 # Date: Sun Oct 3 09:36:48 2010 +0200
2041 # dpkg-source: correctly create .pc/.quilt_series with alternate
2044 # If you have debian/patches/ubuntu.series and you were
2045 # unpacking the source package on ubuntu, quilt was still
2046 # directed to debian/patches/series instead of
2047 # debian/patches/ubuntu.series.
2049 # debian/changelog | 3 +++
2050 # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
2051 # 2 files changed, 6 insertions(+), 1 deletion(-)
2054 vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
2055 vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
2056 "Dpkg::Vendor \`current vendor'");
2057 vendor_patches_distro(access_basedistro(),
2058 "(base) distro being accessed");
2059 vendor_patches_distro(access_nomdistro(),
2060 "(nominal) distro being accessed");
2063 sub generate_commits_from_dsc () {
2064 # See big comment in fetch_from_archive, below.
2065 # See also README.dsc-import.
2069 my @dfi = dsc_files_info();
2070 foreach my $fi (@dfi) {
2071 my $f = $fi->{Filename};
2072 die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
2073 my $upper_f = "../../../../$f";
2075 printdebug "considering reusing $f: ";
2077 if (link_ltarget "$upper_f,fetch", $f) {
2078 printdebug "linked (using ...,fetch).\n";
2079 } elsif ((printdebug "($!) "),
2081 fail "accessing ../$f,fetch: $!";
2082 } elsif (link_ltarget $upper_f, $f) {
2083 printdebug "linked.\n";
2084 } elsif ((printdebug "($!) "),
2086 fail "accessing ../$f: $!";
2088 printdebug "absent.\n";
2092 complete_file_from_dsc('.', $fi, \$refetched)
2095 printdebug "considering saving $f: ";
2097 if (link $f, $upper_f) {
2098 printdebug "linked.\n";
2099 } elsif ((printdebug "($!) "),
2101 fail "saving ../$f: $!";
2102 } elsif (!$refetched) {
2103 printdebug "no need.\n";
2104 } elsif (link $f, "$upper_f,fetch") {
2105 printdebug "linked (using ...,fetch).\n";
2106 } elsif ((printdebug "($!) "),
2108 fail "saving ../$f,fetch: $!";
2110 printdebug "cannot.\n";
2114 # We unpack and record the orig tarballs first, so that we only
2115 # need disk space for one private copy of the unpacked source.
2116 # But we can't make them into commits until we have the metadata
2117 # from the debian/changelog, so we record the tree objects now and
2118 # make them into commits later.
2120 my $upstreamv = upstreamversion $dsc->{version};
2121 my $orig_f_base = srcfn $upstreamv, '';
2123 foreach my $fi (@dfi) {
2124 # We actually import, and record as a commit, every tarball
2125 # (unless there is only one file, in which case there seems
2128 my $f = $fi->{Filename};
2129 printdebug "import considering $f ";
2130 (printdebug "only one dfi\n"), next if @dfi == 1;
2131 (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
2132 (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
2136 $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
2138 printdebug "Y ", (join ' ', map { $_//"(none)" }
2139 $compr_ext, $orig_f_part
2142 my $input = new IO::File $f, '<' or die "$f $!";
2146 if (defined $compr_ext) {
2148 Dpkg::Compression::compression_guess_from_filename $f;
2149 fail "Dpkg::Compression cannot handle file $f in source package"
2150 if defined $compr_ext && !defined $cname;
2152 new Dpkg::Compression::Process compression => $cname;
2153 @compr_cmd = $compr_proc->get_uncompress_cmdline();
2154 my $compr_fh = new IO::Handle;
2155 my $compr_pid = open $compr_fh, "-|" // die $!;
2157 open STDIN, "<&", $input or die $!;
2159 die "dgit (child): exec $compr_cmd[0]: $!\n";
2164 rmtree "_unpack-tar";
2165 mkdir "_unpack-tar" or die $!;
2166 my @tarcmd = qw(tar -x -f -
2167 --no-same-owner --no-same-permissions
2168 --no-acls --no-xattrs --no-selinux);
2169 my $tar_pid = fork // die $!;
2171 chdir "_unpack-tar" or die $!;
2172 open STDIN, "<&", $input or die $!;
2174 die "dgit (child): exec $tarcmd[0]: $!";
2176 $!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
2177 !$? or failedcmd @tarcmd;
2180 (@compr_cmd ? ($?==SIGPIPE || failedcmd @compr_cmd)
2182 # finally, we have the results in "tarball", but maybe
2183 # with the wrong permissions
2185 runcmd qw(chmod -R +rwX _unpack-tar);
2186 changedir "_unpack-tar";
2187 remove_stray_gits($f);
2188 mktree_in_ud_here();
2190 my ($tree) = git_add_write_tree();
2191 my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
2192 if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
2194 printdebug "one subtree $1\n";
2196 printdebug "multiple subtrees\n";
2199 rmtree "_unpack-tar";
2201 my $ent = [ $f, $tree ];
2203 Orig => !!$orig_f_part,
2204 Sort => (!$orig_f_part ? 2 :
2205 $orig_f_part =~ m/-/g ? 1 :
2213 # put any without "_" first (spec is not clear whether files
2214 # are always in the usual order). Tarballs without "_" are
2215 # the main orig or the debian tarball.
2216 $a->{Sort} <=> $b->{Sort} or
2220 my $any_orig = grep { $_->{Orig} } @tartrees;
2222 my $dscfn = "$package.dsc";
2224 my $treeimporthow = 'package';
2226 open D, ">", $dscfn or die "$dscfn: $!";
2227 print D $dscdata or die "$dscfn: $!";
2228 close D or die "$dscfn: $!";
2229 my @cmd = qw(dpkg-source);
2230 push @cmd, '--no-check' if $dsc_checked;
2231 if (madformat $dsc->{format}) {
2232 push @cmd, '--skip-patches';
2233 $treeimporthow = 'unpatched';
2235 push @cmd, qw(-x --), $dscfn;
2238 my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
2239 if (madformat $dsc->{format}) {
2240 check_for_vendor_patches();
2244 if (madformat $dsc->{format}) {
2245 my @pcmd = qw(dpkg-source --before-build .);
2246 runcmd shell_cmd 'exec >/dev/null', @pcmd;
2248 $dappliedtree = git_add_write_tree();
2251 my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
2252 debugcmd "|",@clogcmd;
2253 open CLOGS, "-|", @clogcmd or die $!;
2258 printdebug "import clog search...\n";
2261 my $stanzatext = do { local $/=""; <CLOGS>; };
2262 printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
2263 last if !defined $stanzatext;
2265 my $desc = "package changelog, entry no.$.";
2266 open my $stanzafh, "<", \$stanzatext or die;
2267 my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
2268 $clogp //= $thisstanza;
2270 printdebug "import clog $thisstanza->{version} $desc...\n";
2272 last if !$any_orig; # we don't need $r1clogp
2274 # We look for the first (most recent) changelog entry whose
2275 # version number is lower than the upstream version of this
2276 # package. Then the last (least recent) previous changelog
2277 # entry is treated as the one which introduced this upstream
2278 # version and used for the synthetic commits for the upstream
2281 # One might think that a more sophisticated algorithm would be
2282 # necessary. But: we do not want to scan the whole changelog
2283 # file. Stopping when we see an earlier version, which
2284 # necessarily then is an earlier upstream version, is the only
2285 # realistic way to do that. Then, either the earliest
2286 # changelog entry we have seen so far is indeed the earliest
2287 # upload of this upstream version; or there are only changelog
2288 # entries relating to later upstream versions (which is not
2289 # possible unless the changelog and .dsc disagree about the
2290 # version). Then it remains to choose between the physically
2291 # last entry in the file, and the one with the lowest version
2292 # number. If these are not the same, we guess that the
2293 # versions were created in a non-monotic order rather than
2294 # that the changelog entries have been misordered.
2296 printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
2298 last if version_compare($thisstanza->{version}, $upstreamv) < 0;
2299 $r1clogp = $thisstanza;
2301 printdebug "import clog $r1clogp->{version} becomes r1\n";
2303 die $! if CLOGS->error;
2304 close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
2306 $clogp or fail "package changelog has no entries!";
2308 my $authline = clogp_authline $clogp;
2309 my $changes = getfield $clogp, 'Changes';
2310 $changes =~ s/^\n//; # Changes: \n
2311 my $cversion = getfield $clogp, 'Version';
2314 $r1clogp //= $clogp; # maybe there's only one entry;
2315 my $r1authline = clogp_authline $r1clogp;
2316 # Strictly, r1authline might now be wrong if it's going to be
2317 # unused because !$any_orig. Whatever.
2319 printdebug "import tartrees authline $authline\n";
2320 printdebug "import tartrees r1authline $r1authline\n";
2322 foreach my $tt (@tartrees) {
2323 printdebug "import tartree $tt->{F} $tt->{Tree}\n";
2325 $tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
2328 committer $r1authline
2332 [dgit import orig $tt->{F}]
2340 [dgit import tarball $package $cversion $tt->{F}]
2345 printdebug "import main commit\n";
2347 open C, ">../commit.tmp" or die $!;
2348 print C <<END or die $!;
2351 print C <<END or die $! foreach @tartrees;
2354 print C <<END or die $!;
2360 [dgit import $treeimporthow $package $cversion]
2364 my $rawimport_hash = make_commit qw(../commit.tmp);
2366 if (madformat $dsc->{format}) {
2367 printdebug "import apply patches...\n";
2369 # regularise the state of the working tree so that
2370 # the checkout of $rawimport_hash works nicely.
2371 my $dappliedcommit = make_commit_text(<<END);
2378 runcmd @git, qw(checkout -q -b dapplied), $dappliedcommit;
2380 runcmd @git, qw(checkout -q -b unpa), $rawimport_hash;
2382 # We need the answers to be reproducible
2383 my @authline = clogp_authline($clogp);
2384 local $ENV{GIT_COMMITTER_NAME} = $authline[0];
2385 local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
2386 local $ENV{GIT_COMMITTER_DATE} = $authline[2];
2387 local $ENV{GIT_AUTHOR_NAME} = $authline[0];
2388 local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
2389 local $ENV{GIT_AUTHOR_DATE} = $authline[2];
2391 my $path = $ENV{PATH} or die;
2393 foreach my $use_absurd (qw(0 1)) {
2394 runcmd @git, qw(checkout -q unpa);
2395 runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
2396 local $ENV{PATH} = $path;
2399 progress "warning: $@";
2400 $path = "$absurdity:$path";
2401 progress "$us: trying slow absurd-git-apply...";
2402 rename "../../gbp-pq-output","../../gbp-pq-output.0"
2407 die "forbid absurd git-apply\n" if $use_absurd
2408 && forceing [qw(import-gitapply-no-absurd)];
2409 die "only absurd git-apply!\n" if !$use_absurd
2410 && forceing [qw(import-gitapply-absurd)];
2412 local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
2413 local $ENV{PATH} = $path if $use_absurd;
2415 my @showcmd = (gbp_pq, qw(import));
2416 my @realcmd = shell_cmd
2417 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
2418 debugcmd "+",@realcmd;
2419 if (system @realcmd) {
2420 die +(shellquote @showcmd).
2422 failedcmd_waitstatus()."\n";
2425 my $gapplied = git_rev_parse('HEAD');
2426 my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
2427 $gappliedtree eq $dappliedtree or
2429 gbp-pq import and dpkg-source disagree!
2430 gbp-pq import gave commit $gapplied
2431 gbp-pq import gave tree $gappliedtree
2432 dpkg-source --before-build gave tree $dappliedtree
2434 $rawimport_hash = $gapplied;
2439 { local $@; eval { runcmd qw(cat ../../gbp-pq-output); }; }
2444 progress "synthesised git commit from .dsc $cversion";
2446 my $rawimport_mergeinput = {
2447 Commit => $rawimport_hash,
2448 Info => "Import of source package",
2450 my @output = ($rawimport_mergeinput);
2452 if ($lastpush_mergeinput) {
2453 my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
2454 my $oversion = getfield $oldclogp, 'Version';
2456 version_compare($oversion, $cversion);
2458 @output = ($rawimport_mergeinput, $lastpush_mergeinput,
2459 { Message => <<END, ReverseParents => 1 });
2460 Record $package ($cversion) in archive suite $csuite
2462 } elsif ($vcmp > 0) {
2463 print STDERR <<END or die $!;
2465 Version actually in archive: $cversion (older)
2466 Last version pushed with dgit: $oversion (newer or same)
2469 @output = $lastpush_mergeinput;
2471 # Same version. Use what's in the server git branch,
2472 # discarding our own import. (This could happen if the
2473 # server automatically imports all packages into git.)
2474 @output = $lastpush_mergeinput;
2477 changedir '../../../..';
2482 sub complete_file_from_dsc ($$;$) {
2483 our ($dstdir, $fi, $refetched) = @_;
2484 # Ensures that we have, in $dstdir, the file $fi, with the correct
2485 # contents. (Downloading it from alongside $dscurl if necessary.)
2486 # If $refetched is defined, can overwrite "$dstdir/$fi->{Filename}"
2487 # and will set $$refetched=1 if it did so (or tried to).
2489 my $f = $fi->{Filename};
2490 my $tf = "$dstdir/$f";
2494 my $checkhash = sub {
2495 open F, "<", "$tf" or die "$tf: $!";
2496 $fi->{Digester}->reset();
2497 $fi->{Digester}->addfile(*F);
2498 F->error and die $!;
2499 $got = $fi->{Digester}->hexdigest();
2500 return $got eq $fi->{Hash};
2503 if (stat_exists $tf) {
2504 if ($checkhash->()) {
2505 progress "using existing $f";
2509 fail "file $f has hash $got but .dsc".
2510 " demands hash $fi->{Hash} ".
2511 "(perhaps you should delete this file?)";
2513 progress "need to fetch correct version of $f";
2514 unlink $tf or die "$tf $!";
2517 printdebug "$tf does not exist, need to fetch\n";
2521 $furl =~ s{/[^/]+$}{};
2523 die "$f ?" unless $f =~ m/^\Q${package}\E_/;
2524 die "$f ?" if $f =~ m#/#;
2525 runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
2526 return 0 if !act_local();
2529 fail "file $f has hash $got but .dsc".
2530 " demands hash $fi->{Hash} ".
2531 "(got wrong file from archive!)";
2536 sub ensure_we_have_orig () {
2537 my @dfi = dsc_files_info();
2538 foreach my $fi (@dfi) {
2539 my $f = $fi->{Filename};
2540 next unless is_orig_file_in_dsc($f, \@dfi);
2541 complete_file_from_dsc('..', $fi)
2546 #---------- git fetch ----------
2548 sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
2549 sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
2551 # We fetch some parts of lrfetchrefs/*. Ideally we delete these
2552 # locally fetched refs because they have unhelpful names and clutter
2553 # up gitk etc. So we track whether we have "used up" head ref (ie,
2554 # whether we have made another local ref which refers to this object).
2556 # (If we deleted them unconditionally, then we might end up
2557 # re-fetching the same git objects each time dgit fetch was run.)
2559 # So, each use of lrfetchrefs needs to be accompanied by arrangements
2560 # in git_fetch_us to fetch the refs in question, and possibly a call
2561 # to lrfetchref_used.
2563 our (%lrfetchrefs_f, %lrfetchrefs_d);
2564 # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
2566 sub lrfetchref_used ($) {
2567 my ($fullrefname) = @_;
2568 my $objid = $lrfetchrefs_f{$fullrefname};
2569 $lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
2572 sub git_lrfetch_sane {
2573 my ($url, $supplementary, @specs) = @_;
2574 # Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
2575 # at least as regards @specs. Also leave the results in
2576 # %lrfetchrefs_f, and arrange for lrfetchref_used to be
2577 # able to clean these up.
2579 # With $supplementary==1, @specs must not contain wildcards
2580 # and we add to our previous fetches (non-atomically).
2582 # This is rather miserable:
2583 # When git fetch --prune is passed a fetchspec ending with a *,
2584 # it does a plausible thing. If there is no * then:
2585 # - it matches subpaths too, even if the supplied refspec
2586 # starts refs, and behaves completely madly if the source
2587 # has refs/refs/something. (See, for example, Debian #NNNN.)
2588 # - if there is no matching remote ref, it bombs out the whole
2590 # We want to fetch a fixed ref, and we don't know in advance
2591 # if it exists, so this is not suitable.
2593 # Our workaround is to use git ls-remote. git ls-remote has its
2594 # own qairks. Notably, it has the absurd multi-tail-matching
2595 # behaviour: git ls-remote R refs/foo can report refs/foo AND
2596 # refs/refs/foo etc.
2598 # Also, we want an idempotent snapshot, but we have to make two
2599 # calls to the remote: one to git ls-remote and to git fetch. The
2600 # solution is use git ls-remote to obtain a target state, and
2601 # git fetch to try to generate it. If we don't manage to generate
2602 # the target state, we try again.
2604 printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
2606 my $specre = join '|', map {
2609 my $wildcard = $x =~ s/\\\*$/.*/;
2610 die if $wildcard && $supplementary;
2613 printdebug "git_lrfetch_sane specre=$specre\n";
2614 my $wanted_rref = sub {
2616 return m/^(?:$specre)$/;
2619 my $fetch_iteration = 0;
2622 printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
2623 if (++$fetch_iteration > 10) {
2624 fail "too many iterations trying to get sane fetch!";
2627 my @look = map { "refs/$_" } @specs;
2628 my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
2632 open GITLS, "-|", @lcmd or die $!;
2634 printdebug "=> ", $_;
2635 m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
2636 my ($objid,$rrefname) = ($1,$2);
2637 if (!$wanted_rref->($rrefname)) {
2639 warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
2643 $wantr{$rrefname} = $objid;
2646 close GITLS or failedcmd @lcmd;
2648 # OK, now %want is exactly what we want for refs in @specs
2650 !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
2651 "+refs/$_:".lrfetchrefs."/$_";
2654 printdebug "git_lrfetch_sane fspecs @fspecs\n";
2656 my @fcmd = (@git, qw(fetch -p -n -q), $url, @fspecs);
2657 runcmd_ordryrun_local @fcmd if @fspecs;
2659 if (!$supplementary) {
2660 %lrfetchrefs_f = ();
2664 git_for_each_ref(lrfetchrefs, sub {
2665 my ($objid,$objtype,$lrefname,$reftail) = @_;
2666 $lrfetchrefs_f{$lrefname} = $objid;
2667 $objgot{$objid} = 1;
2670 if ($supplementary) {
2674 foreach my $lrefname (sort keys %lrfetchrefs_f) {
2675 my $rrefname = 'refs'.substr($lrefname, length lrfetchrefs);
2676 if (!exists $wantr{$rrefname}) {
2677 if ($wanted_rref->($rrefname)) {
2679 git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
2683 warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
2686 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2687 delete $lrfetchrefs_f{$lrefname};
2691 foreach my $rrefname (sort keys %wantr) {
2692 my $lrefname = lrfetchrefs.substr($rrefname, 4);
2693 my $got = $lrfetchrefs_f{$lrefname} // '<none>';
2694 my $want = $wantr{$rrefname};
2695 next if $got eq $want;
2696 if (!defined $objgot{$want}) {
2698 warning: git ls-remote suggests we want $lrefname
2699 warning: and it should refer to $want
2700 warning: but git fetch didn't fetch that object to any relevant ref.
2701 warning: This may be due to a race with someone updating the server.
2702 warning: Will try again...
2704 next FETCH_ITERATION;
2707 git-fetch @fspecs made $lrefname=$got but want git ls-remote @look says $want
2709 runcmd_ordryrun_local @git, qw(update-ref -m),
2710 "dgit fetch git fetch fixup", $lrefname, $want;
2711 $lrfetchrefs_f{$lrefname} = $want;
2716 if (defined $csuite) {
2717 printdebug "git_lrfetch_sane: tidying any old suite lrfetchrefs\n";
2718 git_for_each_ref("refs/dgit-fetch/$csuite", sub {
2719 my ($objid,$objtype,$lrefname,$reftail) = @_;
2720 next if $lrfetchrefs_f{$lrefname}; # $csuite eq $distro ?
2721 runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
2725 printdebug "git_lrfetch_sane: git fetch --no-insane emulation complete\n",
2726 Dumper(\%lrfetchrefs_f);
2729 sub git_fetch_us () {
2730 # Want to fetch only what we are going to use, unless
2731 # deliberately-not-ff, in which case we must fetch everything.
2733 my @specs = deliberately_not_fast_forward ? qw(tags/*) :
2735 (quiltmode_splitbrain
2736 ? (map { $_->('*',access_nomdistro) }
2737 \&debiantag_new, \&debiantag_maintview)
2738 : debiantags('*',access_nomdistro));
2739 push @specs, server_branch($csuite);
2740 push @specs, $rewritemap;
2741 push @specs, qw(heads/*) if deliberately_not_fast_forward;
2743 my $url = access_giturl();
2744 git_lrfetch_sane $url, 0, @specs;
2747 my @tagpats = debiantags('*',access_nomdistro);
2749 git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
2750 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2751 printdebug "currently $fullrefname=$objid\n";
2752 $here{$fullrefname} = $objid;
2754 git_for_each_ref([map { lrfetchrefs."/tags/".$_ } @tagpats], sub {
2755 my ($objid,$objtype,$fullrefname,$reftail) = @_;
2756 my $lref = "refs".substr($fullrefname, length(lrfetchrefs));
2757 printdebug "offered $lref=$objid\n";
2758 if (!defined $here{$lref}) {
2759 my @upd = (@git, qw(update-ref), $lref, $objid, '');
2760 runcmd_ordryrun_local @upd;
2761 lrfetchref_used $fullrefname;
2762 } elsif ($here{$lref} eq $objid) {
2763 lrfetchref_used $fullrefname;
2766 "Not updating $lref from $here{$lref} to $objid.\n";
2771 #---------- dsc and archive handling ----------
2773 sub mergeinfo_getclogp ($) {
2774 # Ensures thit $mi->{Clogp} exists and returns it
2776 $mi->{Clogp} = commit_getclogp($mi->{Commit});
2779 sub mergeinfo_version ($) {
2780 return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
2783 sub fetch_from_archive_record_1 ($) {
2785 runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
2786 'DGIT_ARCHIVE', $hash;
2787 cmdoutput @git, qw(log -n2), $hash;
2788 # ... gives git a chance to complain if our commit is malformed
2791 sub fetch_from_archive_record_2 ($) {
2793 my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
2797 dryrun_report @upd_cmd;
2801 sub parse_dsc_field_def_dsc_distro () {
2802 $dsc_distro //= cfg qw(dgit.default.old-dsc-distro
2803 dgit.default.distro);
2806 sub parse_dsc_field ($$) {
2807 my ($dsc, $what) = @_;
2809 foreach my $field (@ourdscfield) {
2810 $f = $dsc->{$field};
2815 progress "$what: NO git hash";
2816 parse_dsc_field_def_dsc_distro();
2817 } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
2818 = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
2819 progress "$what: specified git info ($dsc_distro)";
2820 $dsc_hint_tag = [ $dsc_hint_tag ];
2821 } elsif ($f =~ m/^\w+\s*$/) {
2823 parse_dsc_field_def_dsc_distro();
2824 $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
2826 progress "$what: specified git hash";
2828 fail "$what: invalid Dgit info";
2832 sub resolve_dsc_field_commit ($$) {
2833 my ($already_distro, $already_mapref) = @_;
2835 return unless defined $dsc_hash;
2838 defined $already_mapref &&
2839 ($already_distro eq $dsc_distro || !$chase_dsc_distro)
2840 ? $already_mapref : undef;
2844 my ($what, @fetch) = @_;
2846 local $idistro = $dsc_distro;
2847 my $lrf = lrfetchrefs;
2849 if (!$chase_dsc_distro) {
2851 "not chasing .dsc distro $dsc_distro: not fetching $what";
2856 ".dsc names distro $dsc_distro: fetching $what";
2858 my $url = access_giturl();
2859 if (!defined $url) {
2860 defined $dsc_hint_url or fail <<END;
2861 .dsc Dgit metadata is in context of distro $dsc_distro
2862 for which we have no configured url and .dsc provides no hint
2865 $dsc_hint_url =~ m#^([-+0-9a-zA-Z]+):# ? $1 :
2866 $dsc_hint_url =~ m#^/# ? 'file' : 'bad-syntax';
2867 parse_cfg_bool "dsc-url-proto-ok", 'false',
2868 cfg("dgit.dsc-url-proto-ok.$proto",
2869 "dgit.default.dsc-url-proto-ok")
2871 .dsc Dgit metadata is in context of distro $dsc_distro
2872 for which we have no configured url;
2873 .dsc provides hinted url with protocol $proto which is unsafe.
2874 (can be overridden by config - consult documentation)
2876 $url = $dsc_hint_url;
2879 git_lrfetch_sane $url, 1, @fetch;
2884 my $rewrite_enable = do {
2885 local $idistro = $dsc_distro;
2886 access_cfg('rewrite-map-enable', 'RETURN-UNDEF');
2889 if (parse_cfg_bool 'rewrite-map-enable', 'true', $rewrite_enable) {
2890 if (!defined $mapref) {
2891 my $lrf = $do_fetch->("rewrite map", $rewritemap) or return;
2892 $mapref = $lrf.'/'.$rewritemap;
2894 my $rewritemapdata = git_cat_file $mapref.':map';
2895 if (defined $rewritemapdata
2896 && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
2898 "server's git history rewrite map contains a relevant entry!";
2901 if (defined $dsc_hash) {
2902 progress "using rewritten git hash in place of .dsc value";
2904 progress "server data says .dsc hash is to be disregarded";
2909 if (!defined git_cat_file $dsc_hash) {
2910 my @tags = map { "tags/".$_ } @$dsc_hint_tag;
2911 my $lrf = $do_fetch->("additional commits", @tags) &&
2912 defined git_cat_file $dsc_hash
2914 .dsc Dgit metadata requires commit $dsc_hash
2915 but we could not obtain that object anywhere.
2917 foreach my $t (@tags) {
2918 my $fullrefname = $lrf.'/'.$t;
2919 # print STDERR "CHK $t $fullrefname ".Dumper(\%lrfetchrefs_f);
2920 next unless $lrfetchrefs_f{$fullrefname};
2921 next unless is_fast_fwd "$fullrefname~0", $dsc_hash;
2922 lrfetchref_used $fullrefname;
2927 sub fetch_from_archive () {
2928 ensure_setup_existing_tree();
2930 # Ensures that lrref() is what is actually in the archive, one way
2931 # or another, according to us - ie this client's
2932 # appropritaely-updated archive view. Also returns the commit id.
2933 # If there is nothing in the archive, leaves lrref alone and
2934 # returns undef. git_fetch_us must have already been called.
2938 parse_dsc_field($dsc, 'last upload to archive');
2939 resolve_dsc_field_commit access_basedistro,
2940 lrfetchrefs."/".$rewritemap
2942 progress "no version available from the archive";
2945 # If the archive's .dsc has a Dgit field, there are three
2946 # relevant git commitids we need to choose between and/or merge
2948 # 1. $dsc_hash: the Dgit field from the archive
2949 # 2. $lastpush_hash: the suite branch on the dgit git server
2950 # 3. $lastfetch_hash: our local tracking brach for the suite
2952 # These may all be distinct and need not be in any fast forward
2955 # If the dsc was pushed to this suite, then the server suite
2956 # branch will have been updated; but it might have been pushed to
2957 # a different suite and copied by the archive. Conversely a more
2958 # recent version may have been pushed with dgit but not appeared
2959 # in the archive (yet).
2961 # $lastfetch_hash may be awkward because archive imports
2962 # (particularly, imports of Dgit-less .dscs) are performed only as
2963 # needed on individual clients, so different clients may perform a
2964 # different subset of them - and these imports are only made
2965 # public during push. So $lastfetch_hash may represent a set of
2966 # imports different to a subsequent upload by a different dgit
2969 # Our approach is as follows:
2971 # As between $dsc_hash and $lastpush_hash: if $lastpush_hash is a
2972 # descendant of $dsc_hash, then it was pushed by a dgit user who
2973 # had based their work on $dsc_hash, so we should prefer it.
2974 # Otherwise, $dsc_hash was installed into this suite in the
2975 # archive other than by a dgit push, and (necessarily) after the
2976 # last dgit push into that suite (since a dgit push would have
2977 # been descended from the dgit server git branch); thus, in that
2978 # case, we prefer the archive's version (and produce a
2979 # pseudo-merge to overwrite the dgit server git branch).
2981 # (If there is no Dgit field in the archive's .dsc then
2982 # generate_commit_from_dsc uses the version numbers to decide
2983 # whether the suite branch or the archive is newer. If the suite
2984 # branch is newer it ignores the archive's .dsc; otherwise it
2985 # generates an import of the .dsc, and produces a pseudo-merge to
2986 # overwrite the suite branch with the archive contents.)
2988 # The outcome of that part of the algorithm is the `public view',
2989 # and is same for all dgit clients: it does not depend on any
2990 # unpublished history in the local tracking branch.
2992 # As between the public view and the local tracking branch: The
2993 # local tracking branch is only updated by dgit fetch, and
2994 # whenever dgit fetch runs it includes the public view in the
2995 # local tracking branch. Therefore if the public view is not
2996 # descended from the local tracking branch, the local tracking
2997 # branch must contain history which was imported from the archive
2998 # but never pushed; and, its tip is now out of date. So, we make
2999 # a pseudo-merge to overwrite the old imports and stitch the old
3002 # Finally: we do not necessarily reify the public view (as
3003 # described above). This is so that we do not end up stacking two
3004 # pseudo-merges. So what we actually do is figure out the inputs
3005 # to any public view pseudo-merge and put them in @mergeinputs.
3008 # $mergeinputs[]{Commit}
3009 # $mergeinputs[]{Info}
3010 # $mergeinputs[0] is the one whose tree we use
3011 # @mergeinputs is in the order we use in the actual commit)
3014 # $mergeinputs[]{Message} is a commit message to use
3015 # $mergeinputs[]{ReverseParents} if def specifies that parent
3016 # list should be in opposite order
3017 # Such an entry has no Commit or Info. It applies only when found
3018 # in the last entry. (This ugliness is to support making
3019 # identical imports to previous dgit versions.)
3021 my $lastpush_hash = git_get_ref(lrfetchref());
3022 printdebug "previous reference hash=$lastpush_hash\n";
3023 $lastpush_mergeinput = $lastpush_hash && {
3024 Commit => $lastpush_hash,
3025 Info => "dgit suite branch on dgit git server",
3028 my $lastfetch_hash = git_get_ref(lrref());
3029 printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
3030 my $lastfetch_mergeinput = $lastfetch_hash && {
3031 Commit => $lastfetch_hash,
3032 Info => "dgit client's archive history view",
3035 my $dsc_mergeinput = $dsc_hash && {
3036 Commit => $dsc_hash,
3037 Info => "Dgit field in .dsc from archive",
3041 my $del_lrfetchrefs = sub {
3044 printdebug "del_lrfetchrefs...\n";
3045 foreach my $fullrefname (sort keys %lrfetchrefs_d) {
3046 my $objid = $lrfetchrefs_d{$fullrefname};
3047 printdebug "del_lrfetchrefs: $objid $fullrefname\n";
3049 $gur ||= new IO::Handle;
3050 open $gur, "|-", qw(git update-ref --stdin) or die $!;
3052 printf $gur "delete %s %s\n", $fullrefname, $objid;
3055 close $gur or failedcmd "git update-ref delete lrfetchrefs";
3059 if (defined $dsc_hash) {
3060 ensure_we_have_orig();
3061 if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
3062 @mergeinputs = $dsc_mergeinput
3063 } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
3064 print STDERR <<END or die $!;
3066 Git commit in archive is behind the last version allegedly pushed/uploaded.
3067 Commit referred to by archive: $dsc_hash
3068 Last version pushed with dgit: $lastpush_hash
3071 @mergeinputs = ($lastpush_mergeinput);
3073 # Archive has .dsc which is not a descendant of the last dgit
3074 # push. This can happen if the archive moves .dscs about.
3075 # Just follow its lead.
3076 if (is_fast_fwd($lastpush_hash,$dsc_hash)) {
3077 progress "archive .dsc names newer git commit";
3078 @mergeinputs = ($dsc_mergeinput);
3080 progress "archive .dsc names other git commit, fixing up";
3081 @mergeinputs = ($dsc_mergeinput, $lastpush_mergeinput);
3085 @mergeinputs = generate_commits_from_dsc();
3086 # We have just done an import. Now, our import algorithm might
3087 # have been improved. But even so we do not want to generate
3088 # a new different import of the same package. So if the
3089 # version numbers are the same, just use our existing version.
3090 # If the version numbers are different, the archive has changed
3091 # (perhaps, rewound).
3092 if ($lastfetch_mergeinput &&
3093 !version_compare( (mergeinfo_version $lastfetch_mergeinput),
3094 (mergeinfo_version $mergeinputs[0]) )) {
3095 @mergeinputs = ($lastfetch_mergeinput);
3097 } elsif ($lastpush_hash) {
3098 # only in git, not in the archive yet
3099 @mergeinputs = ($lastpush_mergeinput);
3100 print STDERR <<END or die $!;
3102 Package not found in the archive, but has allegedly been pushed using dgit.
3106 printdebug "nothing found!\n";
3107 if (defined $skew_warning_vsn) {
3108 print STDERR <<END or die $!;
3110 Warning: relevant archive skew detected.
3111 Archive allegedly contains $skew_warning_vsn
3112 But we were not able to obtain any version from the archive or git.
3116 unshift @end, $del_lrfetchrefs;
3120 if ($lastfetch_hash &&
3122 my $h = $_->{Commit};
3123 $h and is_fast_fwd($lastfetch_hash, $h);
3124 # If true, one of the existing parents of this commit
3125 # is a descendant of the $lastfetch_hash, so we'll
3126 # be ff from that automatically.
3130 push @mergeinputs, $lastfetch_mergeinput;
3133 printdebug "fetch mergeinfos:\n";
3134 foreach my $mi (@mergeinputs) {
3136 printdebug " commit $mi->{Commit} $mi->{Info}\n";
3138 printdebug sprintf " ReverseParents=%d Message=%s",
3139 $mi->{ReverseParents}, $mi->{Message};
3143 my $compat_info= pop @mergeinputs
3144 if $mergeinputs[$#mergeinputs]{Message};
3146 @mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
3149 if (@mergeinputs > 1) {
3151 my $tree_commit = $mergeinputs[0]{Commit};
3153 my $tree = cmdoutput @git, qw(cat-file commit), $tree_commit;
3154 $tree =~ m/\n\n/; $tree = $`;
3155 $tree =~ m/^tree (\w+)$/m or die "$dsc_hash tree ?";
3158 # We use the changelog author of the package in question the
3159 # author of this pseudo-merge. This is (roughly) correct if
3160 # this commit is simply representing aa non-dgit upload.
3161 # (Roughly because it does not record sponsorship - but we
3162 # don't have sponsorship info because that's in the .changes,
3163 # which isn't in the archivw.)
3165 # But, it might be that we are representing archive history
3166 # updates (including in-archive copies). These are not really
3167 # the responsibility of the person who created the .dsc, but
3168 # there is no-one whose name we should better use. (The
3169 # author of the .dsc-named commit is clearly worse.)
3171 my $useclogp = mergeinfo_getclogp $mergeinputs[0];
3172 my $author = clogp_authline $useclogp;
3173 my $cversion = getfield $useclogp, 'Version';
3175 my $mcf = ".git/dgit/mergecommit";
3176 open MC, ">", $mcf or die "$mcf $!";
3177 print MC <<END or die $!;
3181 my @parents = grep { $_->{Commit} } @mergeinputs;
3182 @parents = reverse @parents if $compat_info->{ReverseParents};
3183 print MC <<END or die $! foreach @parents;
3187 print MC <<END or die $!;
3193 if (defined $compat_info->{Message}) {
3194 print MC $compat_info->{Message} or die $!;
3196 print MC <<END or die $!;
3197 Record $package ($cversion) in archive suite $csuite
3201 my $message_add_info = sub {
3203 my $mversion = mergeinfo_version $mi;
3204 printf MC " %-20s %s\n", $mversion, $mi->{Info}
3208 $message_add_info->($mergeinputs[0]);
3209 print MC <<END or die $!;
3210 should be treated as descended from
3212 $message_add_info->($_) foreach @mergeinputs[1..$#mergeinputs];
3216 $hash = make_commit $mcf;
3218 $hash = $mergeinputs[0]{Commit};
3220 printdebug "fetch hash=$hash\n";
3223 my ($lasth, $what) = @_;
3224 return unless $lasth;
3225 die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
3228 $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
3230 $chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
3232 fetch_from_archive_record_1($hash);
3234 if (defined $skew_warning_vsn) {
3236 printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
3237 my $gotclogp = commit_getclogp($hash);
3238 my $got_vsn = getfield $gotclogp, 'Version';
3239 printdebug "SKEW CHECK GOT $got_vsn\n";
3240 if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
3241 print STDERR <<END or die $!;
3243 Warning: archive skew detected. Using the available version:
3244 Archive allegedly contains $skew_warning_vsn
3245 We were able to obtain only $got_vsn
3251 if ($lastfetch_hash ne $hash) {
3252 fetch_from_archive_record_2($hash);
3255 lrfetchref_used lrfetchref();
3257 check_gitattrs($hash, "fetched source tree");
3259 unshift @end, $del_lrfetchrefs;
3263 sub set_local_git_config ($$) {
3265 runcmd @git, qw(config), $k, $v;
3268 sub setup_mergechangelogs (;$) {
3270 return unless $always || access_cfg_bool(1, 'setup-mergechangelogs');
3272 my $driver = 'dpkg-mergechangelogs';
3273 my $cb = "merge.$driver";
3274 my $attrs = '.git/info/attributes';
3275 ensuredir '.git/info';
3277 open NATTRS, ">", "$attrs.new" or die "$attrs.new $!";
3278 if (!open ATTRS, "<", $attrs) {
3279 $!==ENOENT or die "$attrs: $!";
3283 next if m{^debian/changelog\s};
3284 print NATTRS $_, "\n" or die $!;
3286 ATTRS->error and die $!;
3289 print NATTRS "debian/changelog merge=$driver\n" or die $!;
3292 set_local_git_config "$cb.name", 'debian/changelog merge driver';
3293 set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
3295 rename "$attrs.new", "$attrs" or die "$attrs: $!";
3298 sub setup_useremail (;$) {
3300 return unless $always || access_cfg_bool(1, 'setup-useremail');
3303 my ($k, $envvar) = @_;
3304 my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
3305 return unless defined $v;
3306 set_local_git_config "user.$k", $v;
3309 $setup->('email', 'DEBEMAIL');
3310 $setup->('name', 'DEBFULLNAME');
3313 sub ensure_setup_existing_tree () {
3314 my $k = "remote.$remotename.skipdefaultupdate";
3315 my $c = git_get_config $k;
3316 return if defined $c;
3317 set_local_git_config $k, 'true';
3320 sub open_gitattrs () {
3321 my $gai = new IO::File ".git/info/attributes"
3323 or die "open .git/info/attributes: $!";
3327 sub is_gitattrs_setup () {
3328 my $gai = open_gitattrs();
3329 return 0 unless $gai;
3331 return 1 if m{^\[attr\]dgit-defuse-attrs\s};
3333 $gai->error and die $!;
3337 sub setup_gitattrs (;$) {
3339 return unless $always || access_cfg_bool(1, 'setup-gitattributes');
3341 if (is_gitattrs_setup()) {
3343 [attr]dgit-defuse-attrs already found in .git/info/attributes
3344 not doing further gitattributes setup
3348 my $af = ".git/info/attributes";
3349 ensuredir '.git/info';
3350 open GAO, "> $af.new" or die $!;
3351 print GAO <<END or die $!;
3353 [attr]dgit-defuse-attrs $negate_harmful_gitattrs
3354 # ^ see GITATTRIBUTES in dgit(7) and dgit setup-new-tree in dgit(1)
3356 my $gai = open_gitattrs();
3360 print GAO $_, "\n" or die $!;
3362 $gai->error and die $!;
3364 close GAO or die $!;
3365 rename "$af.new", "$af" or die "install $af: $!";
3368 sub setup_new_tree () {
3369 setup_mergechangelogs();
3374 sub check_gitattrs ($$) {
3375 my ($treeish, $what) = @_;
3377 return if is_gitattrs_setup;
3380 my @cmd = (@git, qw(ls-tree -lrz --), "${treeish}:");
3382 my $gafl = new IO::File;
3383 open $gafl, "-|", @cmd or die $!;
3386 s/^\d+\s+\w+\s+\w+\s+(\d+)\t// or die;
3388 next unless m{(?:^|/)\.gitattributes$};
3390 # oh dear, found one
3392 dgit: warning: $what contains .gitattributes
3393 dgit: .gitattributes have not been defused. Recommended: dgit setup-new-tree.
3398 # tree contains no .gitattributes files
3399 $?=0; $!=0; close $gafl or failedcmd @cmd;
3403 sub multisuite_suite_child ($$$) {
3404 my ($tsuite, $merginputs, $fn) = @_;
3405 # in child, sets things up, calls $fn->(), and returns undef
3406 # in parent, returns canonical suite name for $tsuite
3407 my $canonsuitefh = IO::File::new_tmpfile;
3408 my $pid = fork // die $!;
3412 $us .= " [$isuite]";
3413 $debugprefix .= " ";
3414 progress "fetching $tsuite...";
3415 canonicalise_suite();
3416 print $canonsuitefh $csuite, "\n" or die $!;
3417 close $canonsuitefh or die $!;
3421 waitpid $pid,0 == $pid or die $!;
3422 fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
3423 seek $canonsuitefh,0,0 or die $!;
3424 local $csuite = <$canonsuitefh>;
3425 die $! unless defined $csuite && chomp $csuite;
3427 printdebug "multisuite $tsuite missing\n";
3430 printdebug "multisuite $tsuite ok (canon=$csuite)\n";
3431 push @$merginputs, {
3438 sub fork_for_multisuite ($) {
3439 my ($before_fetch_merge) = @_;
3440 # if nothing unusual, just returns ''
3443 # returns 0 to caller in child, to do first of the specified suites
3444 # in child, $csuite is not yet set
3446 # returns 1 to caller in parent, to finish up anything needed after
3447 # in parent, $csuite is set to canonicalised portmanteau
3449 my $org_isuite = $isuite;
3450 my @suites = split /\,/, $isuite;
3451 return '' unless @suites > 1;
3452 printdebug "fork_for_multisuite: @suites\n";
3456 my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
3458 return 0 unless defined $cbasesuite;
3460 fail "package $package missing in (base suite) $cbasesuite"
3461 unless @mergeinputs;
3463 my @csuites = ($cbasesuite);
3465 $before_fetch_merge->();
3467 foreach my $tsuite (@suites[1..$#suites]) {
3468 $tsuite =~ s/^-/$cbasesuite-/;
3469 my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
3475 # xxx collecte the ref here
3477 $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
3478 push @csuites, $csubsuite;
3481 foreach my $mi (@mergeinputs) {
3482 my $ref = git_get_ref $mi->{Ref};
3483 die "$mi->{Ref} ?" unless length $ref;
3484 $mi->{Commit} = $ref;
3487 $csuite = join ",", @csuites;
3489 my $previous = git_get_ref lrref;
3491 unshift @mergeinputs, {
3492 Commit => $previous,
3493 Info => "local combined tracking branch",
3495 "archive seems to have rewound: local tracking branch is ahead!",
3499 foreach my $ix (0..$#mergeinputs) {
3500 $mergeinputs[$ix]{Index} = $ix;
3503 @mergeinputs = sort {
3504 -version_compare(mergeinfo_version $a,
3505 mergeinfo_version $b) # highest version first
3507 $a->{Index} <=> $b->{Index}; # earliest in spec first
3513 foreach my $mi (@mergeinputs) {
3514 printdebug "multisuite merge check $mi->{Info}\n";
3515 foreach my $previous (@needed) {
3516 next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
3517 printdebug "multisuite merge un-needed $previous->{Info}\n";
3521 printdebug "multisuite merge this-needed\n";
3522 $mi->{Character} = '+';
3525 $needed[0]{Character} = '*';
3527 my $output = $needed[0]{Commit};
3530 printdebug "multisuite merge nontrivial\n";
3531 my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
3533 my $commit = "tree $tree\n";
3534 my $msg = "Combine archive branches $csuite [dgit]\n\n".
3535 "Input branches:\n";
3537 foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
3538 printdebug "multisuite merge include $mi->{Info}\n";
3539 $mi->{Character} //= ' ';
3540 $commit .= "parent $mi->{Commit}\n";
3541 $msg .= sprintf " %s %-25s %s\n",
3543 (mergeinfo_version $mi),
3546 my $authline = clogp_authline mergeinfo_getclogp $needed[0];
3548 " * marks the highest version branch, which choose to use\n".
3549 " + marks each branch which was not already an ancestor\n\n".
3550 "[dgit multi-suite $csuite]\n";
3552 "author $authline\n".
3553 "committer $authline\n\n";
3554 $output = make_commit_text $commit.$msg;
3555 printdebug "multisuite merge generated $output\n";
3558 fetch_from_archive_record_1($output);
3559 fetch_from_archive_record_2($output);
3561 progress "calculated combined tracking suite $csuite";
3566 sub clone_set_head () {
3567 open H, "> .git/HEAD" or die $!;
3568 print H "ref: ".lref()."\n" or die $!;
3571 sub clone_finish ($) {
3573 runcmd @git, qw(reset --hard), lrref();
3574 runcmd qw(bash -ec), <<'END';
3576 git ls-tree -r --name-only -z HEAD | \
3577 xargs -0r touch -h -r . --
3579 printdone "ready for work in $dstdir";
3583 # in multisuite, returns twice!
3584 # once in parent after first suite fetched,
3585 # and then again in child after everything is finished
3587 badusage "dry run makes no sense with clone" unless act_local();
3589 my $multi_fetched = fork_for_multisuite(sub {
3590 printdebug "multi clone before fetch merge\n";
3593 if ($multi_fetched) {
3594 printdebug "multi clone after fetch merge\n";
3596 clone_finish($dstdir);
3599 printdebug "clone main body\n";
3601 canonicalise_suite();
3602 my $hasgit = check_for_git();
3603 mkdir $dstdir or fail "create \`$dstdir': $!";
3605 runcmd @git, qw(init -q);
3608 my $giturl = access_giturl(1);
3609 if (defined $giturl) {
3610 runcmd @git, qw(remote add), 'origin', $giturl;
3613 progress "fetching existing git history";
3615 runcmd_ordryrun_local @git, qw(fetch origin);
3617 progress "starting new git history";
3619 fetch_from_archive() or no_such_package;
3620 my $vcsgiturl = $dsc->{'Vcs-Git'};
3621 if (length $vcsgiturl) {
3622 $vcsgiturl =~ s/\s+-b\s+\S+//g;
3623 runcmd @git, qw(remote add vcs-git), $vcsgiturl;
3625 clone_finish($dstdir);
3629 canonicalise_suite();
3630 if (check_for_git()) {
3633 fetch_from_archive() or no_such_package();
3634 printdone "fetched into ".lrref();
3638 my $multi_fetched = fork_for_multisuite(sub { });
3639 fetch() unless $multi_fetched; # parent
3640 return if $multi_fetched eq '0'; # child
3641 runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
3643 printdone "fetched to ".lrref()." and merged into HEAD";
3646 sub check_not_dirty () {
3647 foreach my $f (qw(local-options local-patch-header)) {
3648 if (stat_exists "debian/source/$f") {
3649 fail "git tree contains debian/source/$f";
3653 return if $ignoredirty;
3655 my @cmd = (@git, qw(diff --quiet HEAD));
3657 $!=0; $?=-1; system @cmd;
3660 fail "working tree is dirty (does not match HEAD)";
3666 sub commit_admin ($) {
3669 runcmd_ordryrun_local @git, qw(commit -m), $m;
3672 sub commit_quilty_patch () {
3673 my $output = cmdoutput @git, qw(status --porcelain);
3675 foreach my $l (split /\n/, $output) {
3676 next unless $l =~ m/\S/;
3677 if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
3681 delete $adds{'.pc'}; # if there wasn't one before, don't add it
3683 progress "nothing quilty to commit, ok.";
3686 my @adds = map { s/[][*?\\]/\\$&/g; $_; } sort keys %adds;
3687 runcmd_ordryrun_local @git, qw(add -f), @adds;
3689 Commit Debian 3.0 (quilt) metadata
3691 [dgit ($our_version) quilt-fixup]
3695 sub get_source_format () {
3697 if (open F, "debian/source/options") {
3701 s/\s+$//; # ignore missing final newline
3703 my ($k, $v) = ($`, $'); #');
3704 $v =~ s/^"(.*)"$/$1/;
3710 F->error and die $!;
3713 die $! unless $!==&ENOENT;
3716 if (!open F, "debian/source/format") {
3717 die $! unless $!==&ENOENT;