# along with this program. If not, see <http://www.gnu.org/licenses/>.
use strict;
+$SIG{__WARN__} = sub { die $_[0]; };
use IO::Handle;
use Data::Dumper;
use LWP::UserAgent;
use Dpkg::Control::Hash;
use File::Path;
+use File::Temp qw(tempdir);
use File::Basename;
use Dpkg::Version;
use POSIX;
+use IPC::Open2;
+use Digest::SHA;
+use Digest::MD5;
+use Config;
-our $dgit_version = 0.14;
+use Debian::Dgit;
+
+our $our_version = 'UNRELEASED'; ###substituted###
+
+our $rpushprotovsn = 2;
our $isuite = 'unstable';
our $idistro;
our $package;
+our @ropts;
our $sign = 1;
-our $dryrun = 0;
+our $dryrun_level = 0;
our $changesfile;
+our $buildproductsdir = '..';
our $new_package = 0;
our $ignoredirty = 0;
-our $noquilt = 0;
+our $rmonerror = 1;
+our @deliberatelies;
+our %supersedes;
our $existing_package = 'dpkg';
our $cleanmode = 'dpkg-source';
+our $changes_since_version;
+our $quilt_mode;
+our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck';
+our $we_are_responder;
+our $initiator_tempdir;
our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
+our $suite_re = '[-+.0-9a-z]+';
+
our (@git) = qw(git);
our (@dget) = qw(dget);
+our (@curl) = qw(curl -f);
our (@dput) = qw(dput);
our (@debsign) = qw(debsign);
+our (@gpg) = qw(gpg);
our (@sbuild) = qw(sbuild -A);
+our (@ssh) = 'ssh';
+our (@dgit) = qw(dgit);
our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
our (@dpkggenchanges) = qw(dpkg-genchanges);
our (@mergechanges) = qw(mergechanges -f);
our (@changesopts) = ('');
-our %opts_opt_map = ('dget' => \@dget,
+our %opts_opt_map = ('dget' => \@dget, # accept for compatibility
+ 'curl' => \@curl,
'dput' => \@dput,
'debsign' => \@debsign,
+ 'gpg' => \@gpg,
'sbuild' => \@sbuild,
+ 'ssh' => \@ssh,
+ 'dgit' => \@dgit,
'dpkg-source' => \@dpkgsource,
'dpkg-buildpackage' => \@dpkgbuildpackage,
'dpkg-genchanges' => \@dpkggenchanges,
'ch' => \@changesopts,
'mergechanges' => \@mergechanges);
+our %opts_opt_cmdonly = ('gpg' => 1);
+
our $keyid;
-our $debug = 0;
-open DEBUG, ">/dev/null" or die $!;
+autoflush STDOUT 1;
our $remotename = 'dgit';
our @ourdscfield = qw(Dgit Vcs-Dgit-Master);
-our $branchprefix = 'dgit';
our $csuite;
+our $instead_distro;
sub lbranch () { return "$branchprefix/$csuite"; }
my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$';
sub lref () { return "refs/heads/".lbranch(); }
-sub lrref () { return "refs/remotes/$remotename/$branchprefix/$csuite"; }
-sub rrref () { return "refs/$branchprefix/$csuite"; }
-sub debiantag ($) {
- my ($v) = @_;
- $v =~ y/~:/_%/;
- return "debian/$v";
-}
+sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); }
+sub rrref () { return server_ref($csuite); }
sub stripepoch ($) {
my ($vsn) = @_;
return $vsn;
}
+sub srcfn ($$) {
+ my ($vsn,$sfx) = @_;
+ return "${package}_".(stripepoch $vsn).$sfx
+}
+
sub dscfn ($) {
my ($vsn) = @_;
- return "${package}_".(stripepoch $vsn).".dsc";
+ return srcfn($vsn,".dsc");
}
-sub changesopts () { return @changesopts[1..$#changesopts]; }
-
our $us = 'dgit';
+initdebug('');
+
+our @end;
+END {
+ local ($?);
+ foreach my $f (@end) {
+ eval { $f->(); };
+ warn "$us: cleanup: $@" if length $@;
+ }
+};
+
+our @signames = split / /, $Config{sig_name};
+
+sub waitstatusmsg () {
+ if (!$?) {
+ return "terminated, reporting successful completion";
+ } elsif (!($? & 255)) {
+ return "failed with error exit status ".WEXITSTATUS($?);
+ } elsif (WIFSIGNALED($?)) {
+ my $signum=WTERMSIG($?);
+ return "died due to fatal signal ".
+ ($signames[$signum] // "number $signum").
+ ($? & 128 ? " (core dumped)" : ""); # POSIX(3pm) has no WCOREDUMP
+ } else {
+ return "failed with unknown wait status ".$?;
+ }
+}
-sub fail { die "$us: @_\n"; }
+sub fail {
+ my $s = "@_\n";
+ my $prefix = $us.($we_are_responder ? " (build host)" : "").": ";
+ $s =~ s/^/$prefix/gm;
+ die $s;
+}
sub badcfg { print STDERR "$us: invalid configuration: @_\n"; exit 12; }
return "+".rrref().":".lrref();
}
+sub changedir ($) {
+ my ($newdir) = @_;
+ printdebug "CD $newdir\n";
+ chdir $newdir or die "chdir: $newdir: $!";
+}
+
+sub deliberately ($) {
+ return !!grep { $_[0] eq $_ } @deliberatelies;
+}
+
+#---------- remote protocol support, common ----------
+
+# remote push initiator/responder protocol:
+# < dgit-remote-push-ready [optional extra info ignored by old initiators]
+#
+# > file parsed-changelog
+# [indicates that output of dpkg-parsechangelog follows]
+# > data-block NBYTES
+# > [NBYTES bytes of data (no newline)]
+# [maybe some more blocks]
+# > data-end
+#
+# > file dsc
+# [etc]
+#
+# > file changes
+# [etc]
+#
+# > param head HEAD
+#
+# > want signed-tag
+# [indicates that signed tag is wanted]
+# < data-block NBYTES
+# < [NBYTES bytes of data (no newline)]
+# [maybe some more blocks]
+# < data-end
+# < files-end
+#
+# > want signed-dsc-changes
+# < data-block NBYTES [transfer of signed dsc]
+# [etc]
+# < data-block NBYTES [transfer of signed changes]
+# [etc]
+# < files-end
+#
+# > complete
+
+our $i_child_pid;
+
+sub i_child_report () {
+ # Sees if our child has died, and reap it if so. Returns a string
+ # describing how it died if it failed, or undef otherwise.
+ return undef unless $i_child_pid;
+ my $got = waitpid $i_child_pid, WNOHANG;
+ return undef if $got <= 0;
+ die unless $got == $i_child_pid;
+ $i_child_pid = undef;
+ return undef unless $?;
+ return "build host child ".waitstatusmsg();
+}
+
+sub badproto ($$) {
+ my ($fh, $m) = @_;
+ fail "connection lost: $!" if $fh->error;
+ fail "protocol violation; $m not expected";
+}
+
+sub badproto_badread ($$) {
+ my ($fh, $wh) = @_;
+ fail "connection lost: $!" if $!;
+ my $report = i_child_report();
+ fail $report if defined $report;
+ badproto $fh, "eof (reading $wh)";
+}
+
+sub protocol_expect (&$) {
+ my ($match, $fh) = @_;
+ local $_;
+ $_ = <$fh>;
+ defined && chomp or badproto_badread $fh, "protocol message";
+ if (wantarray) {
+ my @r = &$match;
+ return @r if @r;
+ } else {
+ my $r = &$match;
+ return $r if $r;
+ }
+ badproto $fh, "\`$_'";
+}
+
+sub protocol_send_file ($$) {
+ my ($fh, $ourfn) = @_;
+ open PF, "<", $ourfn or die "$ourfn: $!";
+ for (;;) {
+ my $d;
+ my $got = read PF, $d, 65536;
+ die "$ourfn: $!" unless defined $got;
+ last if !$got;
+ print $fh "data-block ".length($d)."\n" or die $!;
+ print $fh $d or die $!;
+ }
+ PF->error and die "$ourfn $!";
+ print $fh "data-end\n" or die $!;
+ close PF;
+}
+
+sub protocol_read_bytes ($$) {
+ my ($fh, $nbytes) = @_;
+ $nbytes =~ m/^[1-9]\d{0,5}$/ or badproto \*RO, "bad byte count";
+ my $d;
+ my $got = read $fh, $d, $nbytes;
+ $got==$nbytes or badproto_badread $fh, "data block";
+ return $d;
+}
+
+sub protocol_receive_file ($$) {
+ my ($fh, $ourfn) = @_;
+ printdebug "() $ourfn\n";
+ open PF, ">", $ourfn or die "$ourfn: $!";
+ for (;;) {
+ my ($y,$l) = protocol_expect {
+ m/^data-block (.*)$/ ? (1,$1) :
+ m/^data-end$/ ? (0,) :
+ ();
+ } $fh;
+ last unless $y;
+ my $d = protocol_read_bytes $fh, $l;
+ print PF $d or die $!;
+ }
+ close PF or die $!;
+}
+
+#---------- remote protocol support, responder ----------
+
+sub responder_send_command ($) {
+ my ($command) = @_;
+ return unless $we_are_responder;
+ # called even without $we_are_responder
+ printdebug ">> $command\n";
+ print PO $command, "\n" or die $!;
+}
+
+sub responder_send_file ($$) {
+ my ($keyword, $ourfn) = @_;
+ return unless $we_are_responder;
+ printdebug "]] $keyword $ourfn\n";
+ responder_send_command "file $keyword";
+ protocol_send_file \*PO, $ourfn;
+}
+
+sub responder_receive_files ($@) {
+ my ($keyword, @ourfns) = @_;
+ die unless $we_are_responder;
+ printdebug "[[ $keyword @ourfns\n";
+ responder_send_command "want $keyword";
+ foreach my $fn (@ourfns) {
+ protocol_receive_file \*PI, $fn;
+ }
+ printdebug "[[\$\n";
+ protocol_expect { m/^files-end$/ } \*PI;
+}
+
+#---------- remote protocol support, initiator ----------
+
+sub initiator_expect (&) {
+ my ($match) = @_;
+ protocol_expect { &$match } \*RO;
+}
+
+#---------- end remote code ----------
+
+sub progress {
+ if ($we_are_responder) {
+ my $m = join '', @_;
+ responder_send_command "progress ".length($m) or die $!;
+ print PO $m or die $!;
+ } else {
+ print @_, "\n";
+ }
+}
+
our $ua;
sub url_get {
$ua->env_proxy;
}
my $what = $_[$#_];
- print "downloading $what...\n";
+ progress "downloading $what...";
my $r = $ua->get(@_) or die $!;
return undef if $r->code == 404;
$r->is_success or fail "failed to fetch $what: ".$r->status_line;
- return $r->decoded_content();
+ return $r->decoded_content(charset => 'none');
}
-our ($dscdata,$dscurl,$dsc);
-
-sub printcmd {
- my $fh = shift @_;
- my $intro = shift @_;
- print $fh $intro or die $!;
- local $_;
- foreach my $a (@_) {
- $_ = $a;
- if (s{['\\]}{\\$&}g || m{\s} || m{[^-_./0-9a-z]}i) {
- print $fh " '$_'" or die $!;
- } else {
- print $fh " $_" or die $!;
- }
- }
- print $fh "\n" or die $!;
-}
+our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn);
sub failedcmd {
- { local ($!); printcmd \*STDERR, "$_[0]: failed command:", @_ or die $!; };
+ { local ($!); printcmd \*STDERR, "$us: failed command:", @_ or die $!; };
if ($!) {
fail "failed to fork/exec: $!";
- } elsif (!($? & 0xff)) {
- fail "subprocess failed with error exit status ".($?>>8);
} elsif ($?) {
- fail "subprocess crashed (wait status $?)";
+ fail "subprocess ".waitstatusmsg();
} else {
fail "subprocess produced invalid output";
}
}
sub runcmd {
- printcmd(\*DEBUG,"+",@_) if $debug>0;
+ debugcmd "+",@_;
$!=0; $?=0;
failedcmd @_ if system @_;
}
+sub act_local () { return $dryrun_level <= 1; }
+sub act_scary () { return !$dryrun_level; }
+
sub printdone {
- if (!$dryrun) {
- print "dgit ok: @_\n";
+ if (!$dryrun_level) {
+ progress "dgit ok: @_";
} else {
- print "would be ok: @_ (but dry run only)\n";
+ progress "would be ok: @_ (but dry run only)";
}
}
sub cmdoutput_errok {
die Dumper(\@_)." ?" if grep { !defined } @_;
- printcmd(\*DEBUG,"|",@_) if $debug>0;
+ debugcmd "|",@_;
open P, "-|", @_ or die $!;
my $d;
$!=0; $?=0;
{ local $/ = undef; $d = <P>; }
die $! if P->error;
- if (!close P) { print DEBUG "=>!$?\n" if $debug>0; return undef; }
+ if (!close P) { printdebug "=>!$?\n"; return undef; }
chomp $d;
$d =~ m/^.*/;
- print DEBUG "=> \`$&'",(length $' ? '...' : ''),"\n" if $debug>0; #';
+ printdebug "=> \`$&'",(length $' ? '...' : ''),"\n" if $debuglevel>0; #';
return $d;
}
}
sub dryrun_report {
- printcmd(\*STDOUT,"#",@_);
+ printcmd(\*STDERR,$debugprefix."#",@_);
}
sub runcmd_ordryrun {
- if (!$dryrun) {
+ if (act_scary()) {
+ runcmd @_;
+ } else {
+ dryrun_report @_;
+ }
+}
+
+sub runcmd_ordryrun_local {
+ if (act_local()) {
runcmd @_;
} else {
dryrun_report @_;
}
}
+sub shell_cmd {
+ my ($first_shell, @cmd) = @_;
+ return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
+}
+
our $helpmsg = <<END;
main usages:
dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
dgit [dgit-opts] build [git-buildpackage-opts|dpkg-buildpackage-opts]
dgit [dgit-opts] push [dgit-opts] [suite]
+ dgit [dgit-opts] rpush build-host:build-dir ...
important dgit options:
-k<keyid> sign tag and package with <keyid> instead of default
--dry-run -n do not change anything, but go through the motions
+ --damp-run -L like --dry-run but make local changes, without signing
--new -N allow introducing a new package
--debug -D increase debug level
-c<name>=<value> set git config option (used directly by dgit too)
exit 8;
}
+sub nextarg {
+ @ARGV or badusage "too few arguments";
+ return scalar shift @ARGV;
+}
+
sub cmd_help () {
print $helpmsg or die $!;
exit 0;
}
+our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
+
our %defcfg = ('dgit.default.distro' => 'debian',
'dgit.default.username' => '',
'dgit.default.archive-query-default-component' => 'main',
'dgit.default.ssh' => 'ssh',
- 'dgit-distro.debian.git-host' => 'git.debian.org',
+ 'dgit.default.archive-query' => 'madison:',
+ 'dgit.default.sshpsql-dbname' => 'service=projectb',
+ 'dgit-distro.debian.archive-query' => 'ftpmasterapi:',
+ 'dgit-distro.debian.git-host' => 'dgit-git.debian.net',
+ 'dgit-distro.debian.git-user-force' => 'dgit',
'dgit-distro.debian.git-proto' => 'git+ssh://',
- 'dgit-distro.debian.git-path' => '/git/dgit-repos/repos',
+ 'dgit-distro.debian.git-path' => '/dgit/debian/repos',
'dgit-distro.debian.git-check' => 'ssh-cmd',
- 'dgit-distro.debian.git-create' => 'ssh-cmd',
- 'dgit-distro.debian.sshdakls-host' => 'coccia.debian.org',
- 'dgit-distro.debian.sshdakls-dir' =>
- '/srv/ftp-master.debian.org/ftp/dists',
+ 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/',
+ 'dgit-distro.debian.archive-query-tls-key',
+ '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem',
+ 'dgit-distro.debian.diverts.alioth' => '/alioth',
+ 'dgit-distro.debian/alioth.git-host' => 'git.debian.org',
+ 'dgit-distro.debian/alioth.git-user-force' => '',
+ 'dgit-distro.debian/alioth.git-proto' => 'git+ssh://',
+ 'dgit-distro.debian/alioth.git-path' => '/git/dgit-repos/repos',
+ 'dgit-distro.debian/alioth.git-create' => 'ssh-cmd',
'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
- 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/');
+ 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
+ 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
+ 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
+ 'dgit-distro.ubuntu.git-check' => 'false',
+ 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu',
+ 'dgit-distro.test-dummy.ssh' => "$td/ssh",
+ 'dgit-distro.test-dummy.username' => "alice",
+ 'dgit-distro.test-dummy.git-check' => "ssh-cmd",
+ 'dgit-distro.test-dummy.git-create' => "ssh-cmd",
+ 'dgit-distro.test-dummy.git-url' => "$td/git",
+ 'dgit-distro.test-dummy.git-host' => "git",
+ 'dgit-distro.test-dummy.git-path' => "$td/git",
+ 'dgit-distro.test-dummy.archive-query' => "ftpmasterapi:",
+ 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
+ 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
+ 'dgit-distro.test-dummy.upload-host' => 'test-dummy',
+ );
sub cfg {
foreach my $c (@_) {
my @cmd = (@git, qw(config --), $c);
my $v;
{
- local ($debug) = $debug-1;
+ local ($debuglevel) = $debuglevel-1;
$v = cmdoutput_errok @cmd;
};
if ($?==0) {
my $dv = $defcfg{$c};
return $dv if defined $dv;
}
- badcfg "need value for one of: @_";
+ badcfg "need value for one of: @_\n".
+ "$us: distro or suite appears not to be (properly) supported";
+}
+
+sub access_basedistro () {
+ if (defined $idistro) {
+ return $idistro;
+ } else {
+ return cfg("dgit-suite.$isuite.distro",
+ "dgit.default.distro");
+ }
+}
+
+sub access_quirk () {
+ # returns (quirk name, distro to use instead or undef, quirk-specific info)
+ my $basedistro = access_basedistro();
+ my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk",
+ 'RETURN-UNDEF');
+ if (defined $backports_quirk) {
+ my $re = $backports_quirk;
+ $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
+ $re =~ s/\*/.*/g;
+ $re =~ s/\%/([-0-9a-z_]+)/
+ or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
+ if ($isuite =~ m/^$re$/) {
+ return ('backports',"$basedistro-backports",$1);
+ }
+ }
+ return ('none',undef);
}
-sub access_distro () {
- return cfg("dgit-suite.$isuite.distro",
- "dgit.default.distro");
+sub access_distros () {
+ # Returns list of distros to try, in order
+ #
+ # We want to try:
+ # 0. `instead of' distro name(s) we have been pointed to
+ # 1. the access_quirk distro, if any
+ # 2a. the user's specified distro, or failing that } basedistro
+ # 2b. the distro calculated from the suite }
+ my @l = access_basedistro();
+
+ my (undef,$quirkdistro) = access_quirk();
+ unshift @l, $quirkdistro;
+ unshift @l, $instead_distro;
+ return grep { defined } @l;
}
sub access_cfg (@) {
my (@keys) = @_;
- my $distro = $idistro || access_distro();
- my $value = cfg(map { ("dgit-distro.$distro.$_",
- "dgit.default.$_") } @keys);
+ my @cfgs;
+ # The nesting of these loops determines the search order. We put
+ # the key loop on the outside so that we search all the distros
+ # for each key, before going on to the next key. That means that
+ # if access_cfg is called with a more specific, and then a less
+ # specific, key, an earlier distro can override the less specific
+ # without necessarily overriding any more specific keys. (If the
+ # distro wants to override the more specific keys it can simply do
+ # so; whereas if we did the loop the other way around, it would be
+ # impossible to for an earlier distro to override a less specific
+ # key but not the more specific ones without restating the unknown
+ # values of the more specific keys.
+ my @realkeys;
+ my @rundef;
+ # We have to deal with RETURN-UNDEF specially, so that we don't
+ # terminate the search prematurely.
+ foreach (@keys) {
+ if (m/RETURN-UNDEF/) { push @rundef, $_; last; }
+ push @realkeys, $_
+ }
+ foreach my $d (access_distros()) {
+ push @cfgs, map { "dgit-distro.$d.$_" } @realkeys;
+ }
+ push @cfgs, map { "dgit.default.$_" } @realkeys;
+ push @cfgs, @rundef;
+ my $value = cfg(@cfgs);
return $value;
}
+sub string_to_ssh ($) {
+ my ($spec) = @_;
+ if ($spec =~ m/\s/) {
+ return qw(sh -ec), 'exec '.$spec.' "$@"', 'x';
+ } else {
+ return ($spec);
+ }
+}
+
+sub access_cfg_ssh () {
+ my $gitssh = access_cfg('ssh', 'RETURN-UNDEF');
+ if (!defined $gitssh) {
+ return @ssh;
+ } else {
+ return string_to_ssh $gitssh;
+ }
+}
+
+sub access_runeinfo ($) {
+ my ($info) = @_;
+ return ": dgit ".access_basedistro()." $info ;";
+}
+
sub access_someuserhost ($) {
my ($some) = @_;
- my $user = access_cfg("$some-user",'username');
+ my $user = access_cfg("$some-user-force", 'RETURN-UNDEF');
+ defined($user) && length($user) or
+ $user = access_cfg("$some-user",'username');
my $host = access_cfg("$some-host");
return length($user) ? "$user\@$host" : $host;
}
return access_someuserhost('git');
}
-sub access_giturl () {
+sub access_giturl (;$) {
+ my ($optional) = @_;
my $url = access_cfg('git-url','RETURN-UNDEF');
if (!defined $url) {
+ my $proto = access_cfg('git-proto', 'RETURN-UNDEF');
+ return undef unless defined $proto;
$url =
- access_cfg('git-proto').
+ $proto.
access_gituserhost().
access_cfg('git-path');
}
return "$url/$package.git";
}
-sub parsecontrolfh ($$@) {
- my ($fh, $desc, @opts) = @_;
- my %opts = ('name' => $desc, @opts);
- my $c = Dpkg::Control::Hash->new(%opts);
- $c->parse($fh) or die "parsing of $desc failed";
+sub parsecontrolfh ($$;$) {
+ my ($fh, $desc, $allowsigned) = @_;
+ our $dpkgcontrolhash_noissigned;
+ my $c;
+ for (;;) {
+ my %opts = ('name' => $desc);
+ $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
+ $c = Dpkg::Control::Hash->new(%opts);
+ $c->parse($fh,$desc) or die "parsing of $desc failed";
+ last if $allowsigned;
+ last if $dpkgcontrolhash_noissigned;
+ my $issigned= $c->get_option('is_pgp_signed');
+ if (!defined $issigned) {
+ $dpkgcontrolhash_noissigned= 1;
+ seek $fh, 0,0 or die "seek $desc: $!";
+ } elsif ($issigned) {
+ fail "control file $desc is (already) PGP-signed. ".
+ " Note that dgit push needs to modify the .dsc and then".
+ " do the signature itself";
+ } else {
+ last;
+ }
+ }
return $c;
}
sub parsechangelog {
my $c = Dpkg::Control::Hash->new();
my $p = new IO::Handle;
- my @cmd = (qw(dpkg-parsechangelog));
+ my @cmd = (qw(dpkg-parsechangelog), @_);
open $p, '-|', @cmd or die $!;
$c->parse($p);
$?=0; $!=0; close $p or failedcmd @cmd;
return $c;
}
+sub git_get_ref ($) {
+ my ($refname) = @_;
+ my $got = cmdoutput_errok @git, qw(show-ref --), $refname;
+ if (!defined $got) {
+ $?==256 or fail "git show-ref failed (status $?)";
+ printdebug "ref $refname= [show-ref exited 1]\n";
+ return '';
+ }
+ if ($got =~ m/^(\w+) \Q$refname\E$/m) {
+ printdebug "ref $refname=$1\n";
+ return $1;
+ } else {
+ printdebug "ref $refname= [no match]\n";
+ return '';
+ }
+}
+
+sub must_getcwd () {
+ my $d = getcwd();
+ defined $d or fail "getcwd failed: $!";
+ return $d;
+}
+
our %rmad;
sub archive_query ($) {
my ($method) = @_;
my $query = access_cfg('archive-query','RETURN-UNDEF');
- if (!defined $query) {
- my $distro = access_distro();
- if ($distro eq 'debian') {
- $query = "sshdakls:".
- access_someuserhost('sshdakls').':'.
- access_cfg('sshdakls-dir');
- } else {
- $query = "madison:$distro";
- }
- }
$query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
my $proto = $1;
my $data = $'; #';
{ no strict qw(refs); &{"${method}_${proto}"}($proto,$data); }
}
-sub archive_query_madison ($$) {
- my ($proto,$data) = @_;
- die unless $proto eq 'madison';
- $rmad{$package} ||= cmdoutput
- qw(rmadison -asource),"-s$isuite","-u$data",$package;
- my $rmad = $rmad{$package};
- return madison_parse($rmad);
+sub pool_dsc_subpath ($$) {
+ my ($vsn,$component) = @_; # $package is implict arg
+ my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
+ return "/pool/$component/$prefix/$package/".dscfn($vsn);
+}
+
+#---------- `ftpmasterapi' archive query method (nascent) ----------
+
+sub archive_api_query_cmd ($) {
+ my ($subpath) = @_;
+ my @cmd = qw(curl -sS);
+ my $url = access_cfg('archive-query-url');
+ if ($url =~ m#^https://([-.0-9a-z]+)/#) {
+ my $host = $1;
+ my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF');
+ foreach my $key (split /\:/, $keys) {
+ $key =~ s/\%HOST\%/$host/g;
+ if (!stat $key) {
+ fail "for $url: stat $key: $!" unless $!==ENOENT;
+ next;
+ }
+ push @cmd, "--ca-certificate=$key", "--ca-directory=/dev/enoent";
+ last;
+ }
+ }
+ push @cmd, $url.$subpath;
+ return @cmd;
+}
+
+sub api_query ($$) {
+ use JSON;
+ my ($data, $subpath) = @_;
+ badcfg "ftpmasterapi archive query method takes no data part"
+ if length $data;
+ my @cmd = archive_api_query_cmd($subpath);
+ my $json = cmdoutput @cmd;
+ return decode_json($json);
}
-sub archive_query_sshdakls ($$) {
+sub canonicalise_suite_ftpmasterapi () {
my ($proto,$data) = @_;
- $data =~ s/:.*// or badcfg "invalid sshdakls method string \`$data'";
- my $dakls = cmdoutput
- access_cfg('ssh'), $data, qw(dak ls -asource),"-s$isuite",$package;
- return madison_parse($dakls);
+ my $suites = api_query($data, 'suites');
+ my @matched;
+ foreach my $entry (@$suites) {
+ next unless grep {
+ my $v = $entry->{$_};
+ defined $v && $v eq $isuite;
+ } qw(codename name);
+ push @matched, $entry;
+ }
+ fail "unknown suite $isuite" unless @matched;
+ my $cn;
+ eval {
+ @matched==1 or die "multiple matches for suite $isuite\n";
+ $cn = "$matched[0]{codename}";
+ defined $cn or die "suite $isuite info has no codename\n";
+ $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
+ };
+ die "bad ftpmaster api response: $@\n".Dumper(\@matched)
+ if length $@;
+ return $cn;
+}
+
+sub archive_query_ftpmasterapi () {
+ my ($proto,$data) = @_;
+ my $info = api_query($data, "dsc_in_suite/$isuite/$package");
+ my @rows;
+ my $digester = Digest::SHA->new(256);
+ foreach my $entry (@$info) {
+ eval {
+ my $vsn = "$entry->{version}";
+ my ($ok,$msg) = version_check $vsn;
+ die "bad version: $msg\n" unless $ok;
+ my $component = "$entry->{component}";
+ $component =~ m/^$component_re$/ or die "bad component";
+ my $filename = "$entry->{filename}";
+ $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
+ or die "bad filename";
+ my $sha256sum = "$entry->{sha256sum}";
+ $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
+ push @rows, [ $vsn, "/pool/$component/$filename",
+ $digester, $sha256sum ];
+ };
+ die "bad ftpmaster api response: $@\n".Dumper($entry)
+ if length $@;
+ }
+ @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
+ return @rows;
}
-sub canonicalise_suite_sshdakls ($$) {
+#---------- `madison' archive query method ----------
+
+sub archive_query_madison {
+ return map { [ @$_[0..1] ] } madison_get_parse(@_);
+}
+
+sub madison_get_parse {
my ($proto,$data) = @_;
- $data =~ m/:/ or badcfg "invalid sshdakls method string \`$data'";
- my @cmd =
- (access_cfg('ssh'), $`,
- "set -e; cd $';".
- " if test -h $isuite; then readlink $isuite; exit 0; fi;".
- " if test -d $isuite; then echo $isuite; exit 0; fi;".
- " exit 1");
- my $dakls = cmdoutput @cmd;
- failedcmd @cmd unless $dakls =~ m/^\w/;
- return $dakls;
-}
-
-sub madison_parse ($) {
- my ($rmad) = @_;
+ die unless $proto eq 'madison';
+ if (!length $data) {
+ $data= access_cfg('madison-distro','RETURN-UNDEF');
+ $data //= access_basedistro();
+ }
+ $rmad{$proto,$data,$package} ||= cmdoutput
+ qw(rmadison -asource),"-s$isuite","-u$data",$package;
+ my $rmad = $rmad{$proto,$data,$package};
+
my @out;
foreach my $l (split /\n/, $rmad) {
$l =~ m{^ \s*( [^ \t|]+ )\s* \|
\s*( [^ \t|]+ )\s* \|
\s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
- \s*( [^ \t|]+ )\s* }x or die "$rmad $?";
+ \s*( [^ \t|]+ )\s* }x or die "$rmad ?";
$1 eq $package or die "$rmad $package ?";
my $vsn = $2;
my $newsuite = $3;
$component = access_cfg('archive-query-default-component');
}
$5 eq 'source' or die "$rmad ?";
- my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1);
- my $subpath = "/pool/$component/$prefix/$package/".dscfn($vsn);
- push @out, [$vsn,$subpath,$newsuite];
+ push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
}
- return sort { -version_compare_string($a->[0],$b->[0]); } @out;
+ return sort { -version_compare($a->[0],$b->[0]); } @out;
}
-sub canonicalise_suite_madison ($$) {
- my @r = archive_query_madison($_[0],$_[1]);
+sub canonicalise_suite_madison {
+ # madison canonicalises for us
+ my @r = madison_get_parse(@_);
@r or fail
"unable to canonicalise suite using package $package".
" which does not appear to exist in suite $isuite;".
return $r[0][2];
}
+#---------- `sshpsql' archive query method ----------
+
+sub sshpsql ($$$) {
+ my ($data,$runeinfo,$sql) = @_;
+ if (!length $data) {
+ $data= access_someuserhost('sshpsql').':'.
+ access_cfg('sshpsql-dbname');
+ }
+ $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
+ my ($userhost,$dbname) = ($`,$'); #';
+ my @rows;
+ my @cmd = (access_cfg_ssh, $userhost,
+ access_runeinfo("ssh-psql $runeinfo").
+ " export LC_MESSAGES=C; export LC_CTYPE=C;".
+ " ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
+ debugcmd "|",@cmd;
+ open P, "-|", @cmd or die $!;
+ while (<P>) {
+ chomp or die;
+ printdebug("$debugprefix>|$_|\n");
+ push @rows, $_;
+ }
+ $!=0; $?=0; close P or failedcmd @cmd;
+ @rows or die;
+ my $nrows = pop @rows;
+ $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
+ @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
+ @rows = map { [ split /\|/, $_ ] } @rows;
+ my $ncols = scalar @{ shift @rows };
+ die if grep { scalar @$_ != $ncols } @rows;
+ return @rows;
+}
+
+sub sql_injection_check {
+ foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
+}
+
+sub archive_query_sshpsql ($$) {
+ my ($proto,$data) = @_;
+ sql_injection_check $isuite, $package;
+ my @rows = sshpsql($data, "archive-query $isuite $package", <<END);
+ SELECT source.version, component.name, files.filename, files.sha256sum
+ FROM source
+ JOIN src_associations ON source.id = src_associations.source
+ JOIN suite ON suite.id = src_associations.suite
+ JOIN dsc_files ON dsc_files.source = source.id
+ JOIN files_archive_map ON files_archive_map.file_id = dsc_files.file
+ JOIN component ON component.id = files_archive_map.component_id
+ JOIN files ON files.id = dsc_files.file
+ WHERE ( suite.suite_name='$isuite' OR suite.codename='$isuite' )
+ AND source.source='$package'
+ AND files.filename LIKE '%.dsc';
+END
+ @rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
+ my $digester = Digest::SHA->new(256);
+ @rows = map {
+ my ($vsn,$component,$filename,$sha256sum) = @$_;
+ [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
+ } @rows;
+ return @rows;
+}
+
+sub canonicalise_suite_sshpsql ($$) {
+ my ($proto,$data) = @_;
+ sql_injection_check $isuite;
+ my @rows = sshpsql($data, "canonicalise-suite $isuite", <<END);
+ SELECT suite.codename
+ FROM suite where suite_name='$isuite' or codename='$isuite';
+END
+ @rows = map { $_->[0] } @rows;
+ fail "unknown suite $isuite" unless @rows;
+ die "ambiguous $isuite: @rows ?" if @rows>1;
+ return $rows[0];
+}
+
+#---------- `dummycat' archive query method ----------
+
+sub canonicalise_suite_dummycat ($$) {
+ my ($proto,$data) = @_;
+ my $dpath = "$data/suite.$isuite";
+ if (!open C, "<", $dpath) {
+ $!==ENOENT or die "$dpath: $!";
+ printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
+ return $isuite;
+ }
+ $!=0; $_ = <C>;
+ chomp or die "$dpath: $!";
+ close C;
+ printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
+ return $_;
+}
+
+sub archive_query_dummycat ($$) {
+ my ($proto,$data) = @_;
+ canonicalise_suite();
+ my $dpath = "$data/package.$csuite.$package";
+ if (!open C, "<", $dpath) {
+ $!==ENOENT or die "$dpath: $!";
+ printdebug "dummycat query $csuite $package $dpath ENOENT\n";
+ return ();
+ }
+ my @rows;
+ while (<C>) {
+ next if m/^\#/;
+ next unless m/\S/;
+ die unless chomp;
+ printdebug "dummycat query $csuite $package $dpath | $_\n";
+ my @row = split /\s+/, $_;
+ @row==2 or die "$dpath: $_ ?";
+ push @rows, \@row;
+ }
+ C->error and die "$dpath: $!";
+ close C;
+ return sort { -version_compare($a->[0],$b->[0]); } @rows;
+}
+
+#---------- archive query entrypoints and rest of program ----------
+
sub canonicalise_suite () {
return if defined $csuite;
fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
$csuite = archive_query('canonicalise_suite');
if ($isuite ne $csuite) {
- # madison canonicalises for us
- print "canonical suite name for $isuite is $csuite\n";
+ progress "canonical suite name for $isuite is $csuite";
}
}
canonicalise_suite();
my @vsns = archive_query('archive_query');
foreach my $vinfo (@vsns) {
- my ($vsn,$subpath) = @$vinfo;
+ my ($vsn,$subpath,$digester,$digest) = @$vinfo;
$dscurl = access_cfg('mirror').$subpath;
$dscdata = url_get($dscurl);
- next unless defined $dscdata;
+ if (!$dscdata) {
+ $skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
+ next;
+ }
+ if ($digester) {
+ $digester->reset();
+ $digester->add($dscdata);
+ my $got = $digester->hexdigest();
+ $got eq $digest or
+ fail "$dscurl has hash $got but".
+ " archive told us to expect $digest";
+ }
my $dscfh = new IO::File \$dscdata, '<' or die $!;
- print DEBUG Dumper($dscdata) if $debug>1;
- $dsc = parsecontrolfh($dscfh,$dscurl, allow_pgp=>1);
- print DEBUG Dumper($dsc) if $debug>1;
+ printdebug Dumper($dscdata) if $debuglevel>1;
+ $dsc = parsecontrolfh($dscfh,$dscurl,1);
+ printdebug Dumper($dsc) if $debuglevel>1;
my $fmt = getfield $dsc, 'Format';
fail "unsupported source format $fmt, sorry" unless $format_ok{$fmt};
- return $dsc;
+ $dsc_checked = !!$digester;
+ return;
}
- return undef;
+ $dsc = undef;
}
+sub check_for_git ();
sub check_for_git () {
# returns 0 or 1
my $how = access_cfg('git-check');
if ($how eq 'ssh-cmd') {
my @cmd =
- (access_cfg('ssh'),access_gituserhost(),
+ (access_cfg_ssh, access_gituserhost(),
+ access_runeinfo("git-check $package").
" set -e; cd ".access_cfg('git-path').";".
" if test -d $package.git; then echo 1; else echo 0; fi");
my $r= cmdoutput @cmd;
+ if ($r =~ m/^divert (\w+)$/) {
+ my $divert=$1;
+ my ($usedistro,) = access_distros();
+ $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
+ $instead_distro =~ s{^/}{ access_basedistro()."/" }e;
+ printdebug "diverting $divert so using distro $instead_distro\n";
+ return check_for_git();
+ }
failedcmd @cmd unless $r =~ m/^[01]$/;
return $r+0;
+ } elsif ($how eq 'true') {
+ return 1;
+ } elsif ($how eq 'false') {
+ return 0;
} else {
badcfg "unknown git-check \`$how'";
}
my $how = access_cfg('git-create');
if ($how eq 'ssh-cmd') {
runcmd_ordryrun
- (access_cfg('ssh'),access_gituserhost(),
+ (access_cfg_ssh, access_gituserhost(),
+ access_runeinfo("git-create $package").
"set -e; cd ".access_cfg('git-path').";".
" cp -a _template $package.git");
+ } elsif ($how eq 'true') {
+ # nothing to do
} else {
badcfg "unknown git-create \`$how'";
}
}
-our ($dsc_hash,$upload_hash);
+our ($dsc_hash,$lastpush_hash);
our $ud = '.git/dgit/unpack';
mkdir $ud or die $!;
}
+sub mktree_in_ud_here () {
+ runcmd qw(git init -q);
+ rmtree('.git/objects');
+ symlink '../../../../objects','.git/objects' or die $!;
+}
+
+sub git_write_tree () {
+ my $tree = cmdoutput @git, qw(write-tree);
+ $tree =~ m/^\w+$/ or die "$tree ?";
+ return $tree;
+}
+
sub mktree_in_ud_from_only_subdir () {
# changes into the subdir
my (@dirs) = <*/.>;
die unless @dirs==1;
$dirs[0] =~ m#^([^/]+)/\.$# or die;
my $dir = $1;
- chdir $dir or die "$dir $!";
- fail "source package contains .git directory" if stat '.git';
- die $! unless $!==&ENOENT;
- runcmd qw(git init -q);
- rmtree('.git/objects');
- symlink '../../../../objects','.git/objects' or die $!;
+ changedir $dir;
+ fail "source package contains .git directory" if stat_exists '.git';
+ mktree_in_ud_here();
+ my $format=get_source_format();
+ if (madformat($format)) {
+ rmtree '.pc';
+ }
runcmd @git, qw(add -Af);
- my $tree = cmdoutput @git, qw(write-tree);
- $tree =~ m/^\w+$/ or die "$tree ?";
+ my $tree=git_write_tree();
return ($tree,$dir);
}
map { $_->{Filename} } dsc_files_info();
}
-sub is_orig_file ($) {
- local ($_) = @_;
- m/\.orig(?:-\w+)?\.tar\.\w+$/;
+sub is_orig_file ($;$) {
+ local ($_) = $_[0];
+ my $base = $_[1];
+ m/\.orig(?:-\w+)?\.tar\.\w+$/ or return 0;
+ defined $base or return 1;
+ return $` eq $base;
}
sub make_commit ($) {
return cmdoutput @git, qw(hash-object -w -t commit), $file;
}
+sub clogp_authline ($) {
+ my ($clogp) = @_;
+ my $author = getfield $clogp, 'Maintainer';
+ $author =~ s#,.*##ms;
+ my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
+ my $authline = "$author $date";
+ $authline =~ m/^[^<>]+ \<\S+\> \d+ [-+]\d+$/ or
+ fail "unexpected commit author line format \`$authline'".
+ " (was generated from changelog Maintainer field)";
+ return $authline;
+}
+
sub generate_commit_from_dsc () {
prep_ud();
- chdir $ud or die $!;
- my @files;
- foreach my $f (dsc_files()) {
+ changedir $ud;
+
+ foreach my $fi (dsc_files_info()) {
+ my $f = $fi->{Filename};
die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
- push @files, $f;
+
link "../../../$f", $f
or $!==&ENOENT
or die "$f $!";
+
+ complete_file_from_dsc('.', $fi);
+
+ if (is_orig_file($f)) {
+ link $f, "../../../../$f"
+ or $!==&EEXIST
+ or die "$f $!";
+ }
}
- runcmd @dget, qw(--), $dscurl;
- foreach my $f (grep { is_orig_file($_) } @files) {
- link $f, "../../../../$f"
- or $!==&EEXIST
- or die "$f $!";
- }
+
+ my $dscfn = "$package.dsc";
+
+ open D, ">", $dscfn or die "$dscfn: $!";
+ print D $dscdata or die "$dscfn: $!";
+ close D or die "$dscfn: $!";
+ my @cmd = qw(dpkg-source);
+ push @cmd, '--no-check' if $dsc_checked;
+ push @cmd, qw(-x --), $dscfn;
+ runcmd @cmd;
+
my ($tree,$dir) = mktree_in_ud_from_only_subdir();
runcmd qw(sh -ec), 'dpkg-parsechangelog >../changelog.tmp';
my $clogp = parsecontrol('../changelog.tmp',"commit's changelog");
- my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
- my $author = getfield $clogp, 'Maintainer';
- $author =~ s#,.*##ms;
- my $authline = "$author $date";
- $authline =~ m/^[^<>]+ \<\S+\> \d+ [-+]\d+$/ or
- fail "unexpected commit author line format \`$authline'".
- " (was generated from changelog Maintainer field)";
+ my $authline = clogp_authline $clogp;
my $changes = getfield $clogp, 'Changes';
open C, ">../commit.tmp" or die $!;
print C <<END or die $!;
close C or die $!;
my $outputhash = make_commit qw(../commit.tmp);
my $cversion = getfield $clogp, 'Version';
- print "synthesised git commit from .dsc $cversion\n";
- if ($upload_hash) {
- runcmd @git, qw(reset --hard), $upload_hash;
+ progress "synthesised git commit from .dsc $cversion";
+ if ($lastpush_hash) {
+ runcmd @git, qw(reset --hard), $lastpush_hash;
runcmd qw(sh -ec), 'dpkg-parsechangelog >>../changelogold.tmp';
my $oldclogp = parsecontrol('../changelogold.tmp','previous changelog');
my $oversion = getfield $oldclogp, 'Version';
my $vcmp =
- version_compare_string($oversion, $cversion);
+ version_compare($oversion, $cversion);
if ($vcmp < 0) {
# git upload/ is earlier vsn than archive, use archive
open C, ">../commit2.tmp" or die $!;
print C <<END or die $!;
tree $tree
-parent $upload_hash
+parent $lastpush_hash
parent $outputhash
author $authline
committer $authline
Last allegedly pushed/uploaded: $oversion (newer or same)
$later_warning_msg
END
- $outputhash = $upload_hash;
+ $outputhash = $lastpush_hash;
} else {
- $outputhash = $upload_hash;
+ $outputhash = $lastpush_hash;
}
}
- chdir '../../../..' or die $!;
+ changedir '../../../..';
runcmd @git, qw(update-ref -m),"dgit fetch import $cversion",
'DGIT_ARCHIVE', $outputhash;
cmdoutput @git, qw(log -n2), $outputhash;
return $outputhash;
}
+sub complete_file_from_dsc ($$) {
+ our ($dstdir, $fi) = @_;
+ # Ensures that we have, in $dir, the file $fi, with the correct
+ # contents. (Downloading it from alongside $dscurl if necessary.)
+
+ my $f = $fi->{Filename};
+ my $tf = "$dstdir/$f";
+ my $downloaded = 0;
+
+ if (stat_exists $tf) {
+ progress "using existing $f";
+ } else {
+ my $furl = $dscurl;
+ $furl =~ s{/[^/]+$}{};
+ $furl .= "/$f";
+ die "$f ?" unless $f =~ m/^${package}_/;
+ die "$f ?" if $f =~ m#/#;
+ runcmd_ordryrun_local @curl,qw(-o),$tf,'--',"$furl";
+ next if !act_local();
+ $downloaded = 1;
+ }
+
+ open F, "<", "$tf" or die "$tf: $!";
+ $fi->{Digester}->reset();
+ $fi->{Digester}->addfile(*F);
+ F->error and die $!;
+ my $got = $fi->{Digester}->hexdigest();
+ $got eq $fi->{Hash} or
+ fail "file $f has hash $got but .dsc".
+ " demands hash $fi->{Hash} ".
+ ($downloaded ? "(got wrong file from archive!)"
+ : "(perhaps you should delete this file?)");
+}
+
sub ensure_we_have_orig () {
foreach my $fi (dsc_files_info()) {
my $f = $fi->{Filename};
next unless is_orig_file($f);
- if (open F, "<", "../$f") {
- $fi->{Digester}->reset();
- $fi->{Digester}->addfile(*F);
- F->error and die $!;
- my $got = $fi->{Digester}->hexdigest();
- $got eq $fi->{Hash} or
- fail "existing file $f has hash $got but .dsc".
- " demands hash $fi->{Hash}".
- " (perhaps you should delete this file?)";
- print "using existing $f\n";
- next;
- } else {
- die "$f $!" unless $!==&ENOENT;
- }
- my $origurl = $dscurl;
- $origurl =~ s{/[^/]+$}{};
- $origurl .= "/$f";
- die "$f ?" unless $f =~ m/^${package}_/;
- die "$f ?" if $f =~ m#/#;
- runcmd_ordryrun qw(sh -ec),'cd ..; exec "$@"','x',
- @dget,'--',$origurl;
+ complete_file_from_dsc('..', $fi);
}
}
}
sub git_fetch_us () {
- runcmd_ordryrun @git, qw(fetch),access_giturl(),fetchspec();
+ runcmd_ordryrun_local @git, qw(fetch),access_giturl(),fetchspec();
}
sub fetch_from_archive () {
# ensures that lrref() is what is actually in the archive,
# one way or another
- get_archive_dsc() or return 0;
- foreach my $field (@ourdscfield) {
- $dsc_hash = $dsc->{$field};
- last if defined $dsc_hash;
- }
- if (defined $dsc_hash) {
- $dsc_hash =~ m/\w+/ or fail "invalid hash in .dsc \`$dsc_hash'";
- $dsc_hash = $&;
- print "last upload to archive specified git hash\n";
- } else {
- print "last upload to archive has NO git hash\n";
- }
+ get_archive_dsc();
- my $lrref_fn = ".git/".lrref();
- if (open H, $lrref_fn) {
- $upload_hash = <H>;
- chomp $upload_hash;
- die "$lrref_fn $upload_hash ?" unless $upload_hash =~ m/^\w+$/;
- } elsif ($! == &ENOENT) {
- $upload_hash = '';
+ if ($dsc) {
+ foreach my $field (@ourdscfield) {
+ $dsc_hash = $dsc->{$field};
+ last if defined $dsc_hash;
+ }
+ if (defined $dsc_hash) {
+ $dsc_hash =~ m/\w+/ or fail "invalid hash in .dsc \`$dsc_hash'";
+ $dsc_hash = $&;
+ progress "last upload to archive specified git hash";
+ } else {
+ progress "last upload to archive has NO git hash";
+ }
} else {
- die "$lrref_fn $!";
+ progress "no version available from the archive";
}
- print DEBUG "previous reference hash=$upload_hash\n";
+
+ $lastpush_hash = git_get_ref(lrref());
+ printdebug "previous reference hash=$lastpush_hash\n";
my $hash;
if (defined $dsc_hash) {
- fail "missing git history even though dsc has hash -".
- " could not find commit $dsc_hash".
- " (should be in ".access_giturl()."#".rrref().")"
- unless $upload_hash;
+ fail "missing remote git history even though dsc has hash -".
+ " could not find ref ".lrref().
+ " (should have been fetched from ".access_giturl()."#".rrref().")"
+ unless $lastpush_hash;
$hash = $dsc_hash;
ensure_we_have_orig();
- if ($dsc_hash eq $upload_hash) {
- } elsif (is_fast_fwd($dsc_hash,$upload_hash)) {
+ if ($dsc_hash eq $lastpush_hash) {
+ } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
print STDERR <<END or die $!;
Git commit in archive is behind the last version allegedly pushed/uploaded.
Commit referred to by archive: $dsc_hash
-Last allegedly pushed/uploaded: $upload_hash
+Last allegedly pushed/uploaded: $lastpush_hash
$later_warning_msg
END
- $hash = $upload_hash;
+ $hash = $lastpush_hash;
} else {
- fail "archive's .dsc refers to ".$dsc_hash.
- " but this is an ancestor of ".$upload_hash;
+ fail "git head (".lrref()."=$lastpush_hash) is not a ".
+ "descendant of archive's .dsc hash ($dsc_hash)";
}
- } else {
+ } elsif ($dsc) {
$hash = generate_commit_from_dsc();
+ } elsif ($lastpush_hash) {
+ # only in git, not in the archive yet
+ $hash = $lastpush_hash;
+ print STDERR <<END or die $!;
+
+Package not found in the archive, but has allegedly been pushed using dgit.
+$later_warning_msg
+END
+ } else {
+ printdebug "nothing found!\n";
+ if (defined $skew_warning_vsn) {
+ print STDERR <<END or die $!;
+
+Warning: relevant archive skew detected.
+Archive allegedly contains $skew_warning_vsn
+But we were not able to obtain any version from the archive or git.
+
+END
+ }
+ return 0;
}
- print DEBUG "current hash=$hash\n";
- if ($upload_hash) {
+ printdebug "current hash=$hash\n";
+ if ($lastpush_hash) {
fail "not fast forward on last upload branch!".
" (archive's version left in DGIT_ARCHIVE)"
- unless is_fast_fwd($upload_hash, $hash);
+ unless is_fast_fwd($lastpush_hash, $hash);
+ }
+ if (defined $skew_warning_vsn) {
+ mkpath '.git/dgit';
+ printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
+ my $clogf = ".git/dgit/changelog.tmp";
+ runcmd shell_cmd "exec >$clogf",
+ @git, qw(cat-file blob), "$hash:debian/changelog";
+ my $gotclogp = parsechangelog("-l$clogf");
+ my $got_vsn = getfield $gotclogp, 'Version';
+ printdebug "SKEW CHECK GOT $got_vsn\n";
+ if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
+ print STDERR <<END or die $!;
+
+Warning: archive skew detected. Using the available version:
+Archive allegedly contains $skew_warning_vsn
+We were able to obtain only $got_vsn
+
+END
+ }
}
- if ($upload_hash ne $hash) {
+ if ($lastpush_hash ne $hash) {
my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
- if (!$dryrun) {
+ if (act_local()) {
cmdoutput @upd_cmd;
} else {
dryrun_report @upd_cmd;
sub clone ($) {
my ($dstdir) = @_;
canonicalise_suite();
- badusage "dry run makes no sense with clone" if $dryrun;
+ badusage "dry run makes no sense with clone" unless act_local();
+ my $hasgit = check_for_git();
mkdir $dstdir or die "$dstdir $!";
- chdir "$dstdir" or die "$dstdir $!";
+ changedir $dstdir;
runcmd @git, qw(init -q);
- runcmd @git, qw(config), "remote.$remotename.fetch", fetchspec();
- open H, "> .git/HEAD" or die $!;
- print H "ref: ".lref()."\n" or die $!;
- close H or die $!;
- runcmd @git, qw(remote add), 'origin', access_giturl();
- if (check_for_git()) {
- print "fetching existing git history\n";
+ my $giturl = access_giturl(1);
+ if (defined $giturl) {
+ runcmd @git, qw(config), "remote.$remotename.fetch", fetchspec();
+ open H, "> .git/HEAD" or die $!;
+ print H "ref: ".lref()."\n" or die $!;
+ close H or die $!;
+ runcmd @git, qw(remote add), 'origin', $giturl;
+ }
+ if ($hasgit) {
+ progress "fetching existing git history";
git_fetch_us();
- runcmd_ordryrun @git, qw(fetch origin);
+ runcmd_ordryrun_local @git, qw(fetch origin);
} else {
- print "starting new git history\n";
+ progress "starting new git history";
}
fetch_from_archive() or no_such_package;
+ my $vcsgiturl = $dsc->{'Vcs-Git'};
+ if (length $vcsgiturl) {
+ $vcsgiturl =~ s/\s+-b\s+\S+//g;
+ runcmd @git, qw(remote add vcs-git), $vcsgiturl;
+ }
runcmd @git, qw(reset --hard), lrref();
printdone "ready for work in $dstdir";
}
sub pull () {
fetch();
- runcmd_ordryrun @git, qw(merge -m),"Merge from $csuite [dgit]",
+ runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
lrref();
printdone "fetched to ".lrref()." and merged into HEAD";
}
sub check_not_dirty () {
return if $ignoredirty;
my @cmd = (@git, qw(diff --quiet HEAD));
- printcmd(\*DEBUG,"+",@cmd) if $debug>0;
+ debugcmd "+",@cmd;
$!=0; $?=0; system @cmd;
return if !$! && !$?;
if (!$! && $?==256) {
}
}
+sub commit_admin ($) {
+ my ($m) = @_;
+ progress "$m";
+ runcmd_ordryrun_local @git, qw(commit -m), $m;
+}
+
sub commit_quilty_patch () {
my $output = cmdoutput @git, qw(status --porcelain);
my %adds;
- my $bad=0;
foreach my $l (split /\n/, $output) {
next unless $l =~ m/\S/;
if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
$adds{$1}++;
- } else {
- print STDERR "git status: $l\n";
- $bad++;
}
}
- fail "unexpected output from git status (is tree clean?)" if $bad;
+ delete $adds{'.pc'}; # if there wasn't one before, don't add it
if (!%adds) {
- print "nothing quilty to commit, ok.\n";
+ progress "nothing quilty to commit, ok.";
return;
}
- runcmd_ordryrun @git, qw(add), sort keys %adds;
- my $m = "Commit Debian 3.0 (quilt) metadata";
- print "$m\n";
- runcmd_ordryrun @git, qw(commit -m), $m;
+ runcmd_ordryrun_local @git, qw(add), sort keys %adds;
+ commit_admin "Commit Debian 3.0 (quilt) metadata";
+}
+
+sub get_source_format () {
+ if (!open F, "debian/source/format") {
+ die $! unless $!==&ENOENT;
+ return '';
+ }
+ $_ = <F>;
+ F->error and die $!;
+ chomp;
+ return $_;
}
sub madformat ($) {
my ($format) = @_;
return 0 unless $format eq '3.0 (quilt)';
- print "Format \`$format', urgh\n";
- if ($noquilt) {
- print "Not doing any fixup of \`$format' due to --no-quilt-fixup";
+ if ($quilt_mode eq 'nocheck') {
+ progress "Not doing any fixup of \`$format' due to --no-quilt-fixup";
return 0;
}
+ progress "Format \`$format', checking/updating patch stack";
return 1;
}
-sub dopush () {
- print DEBUG "actually entering push\n";
- my $clogp = parsechangelog();
+sub push_parse_changelog ($) {
+ my ($clogpfn) = @_;
+
+ my $clogp = Dpkg::Control::Hash->new();
+ $clogp->load($clogpfn) or die;
+
$package = getfield $clogp, 'Source';
my $cversion = getfield $clogp, 'Version';
+ my $tag = debiantag($cversion);
+ runcmd @git, qw(check-ref-format), $tag;
+
my $dscfn = dscfn($cversion);
- stat "../$dscfn" or
- fail "looked for .dsc $dscfn, but $!;".
- " maybe you forgot to build";
- $dsc = parsecontrol("../$dscfn","$dscfn");
- my $dscpackage = getfield $dsc, 'Source';
- my $format = getfield $dsc, 'Format';
+
+ return ($clogp, $cversion, $tag, $dscfn);
+}
+
+sub push_parse_dsc ($$$) {
+ my ($dscfn,$dscfnwhat, $cversion) = @_;
+ $dsc = parsecontrol($dscfn,$dscfnwhat);
my $dversion = getfield $dsc, 'Version';
+ my $dscpackage = getfield $dsc, 'Source';
($dscpackage eq $package && $dversion eq $cversion) or
- fail "$dsc is for $dscpackage $dversion".
+ fail "$dscfn is for $dscpackage $dversion".
" but debian/changelog is for $package $cversion";
- print DEBUG "format $format\n";
+}
+
+sub push_mktag ($$$$$$$) {
+ my ($head,$clogp,$tag,
+ $dscfn,
+ $changesfile,$changesfilewhat,
+ $tfn) = @_;
+
+ $dsc->{$ourdscfield[0]} = $head;
+ $dsc->save("$dscfn.tmp") or die $!;
+
+ my $changes = parsecontrol($changesfile,$changesfilewhat);
+ foreach my $field (qw(Source Distribution Version)) {
+ $changes->{$field} eq $clogp->{$field} or
+ fail "changes field $field \`$changes->{$field}'".
+ " does not match changelog \`$clogp->{$field}'";
+ }
+
+ my $cversion = getfield $clogp, 'Version';
+ my $clogsuite = getfield $clogp, 'Distribution';
+
+ # We make the git tag by hand because (a) that makes it easier
+ # to control the "tagger" (b) we can do remote signing
+ my $authline = clogp_authline $clogp;
+ my $delibs = join(" ", "",@deliberatelies);
+ my $declaredistro = access_basedistro();
+ open TO, '>', $tfn->('.tmp') or die $!;
+ print TO <<END or die $!;
+object $head
+type commit
+tag $tag
+tagger $authline
+
+$package release $cversion for $clogsuite ($csuite) [dgit]
+[dgit distro=$declaredistro$delibs]
+END
+ foreach my $ref (sort keys %supersedes) {
+ print TO <<END or die $!;
+[dgit supersede:$ref=$supersedes{$ref}]
+END
+ }
+
+ close TO or die $!;
+
+ my $tagobjfn = $tfn->('.tmp');
+ if ($sign) {
+ if (!defined $keyid) {
+ $keyid = access_cfg('keyid','RETURN-UNDEF');
+ }
+ unlink $tfn->('.tmp.asc') or $!==&ENOENT or die $!;
+ my @sign_cmd = (@gpg, qw(--detach-sign --armor));
+ push @sign_cmd, qw(-u),$keyid if defined $keyid;
+ push @sign_cmd, $tfn->('.tmp');
+ runcmd_ordryrun @sign_cmd;
+ if (act_scary()) {
+ $tagobjfn = $tfn->('.signed.tmp');
+ runcmd shell_cmd "exec >$tagobjfn", qw(cat --),
+ $tfn->('.tmp'), $tfn->('.tmp.asc');
+ }
+ }
+
+ return ($tagobjfn);
+}
+
+sub sign_changes ($) {
+ my ($changesfile) = @_;
+ if ($sign) {
+ my @debsign_cmd = @debsign;
+ push @debsign_cmd, "-k$keyid" if defined $keyid;
+ push @debsign_cmd, "-p$gpg[0]" if $gpg[0] ne 'gpg';
+ push @debsign_cmd, $changesfile;
+ runcmd_ordryrun @debsign_cmd;
+ }
+}
+
+sub dopush () {
+ printdebug "actually entering push\n";
+ prep_ud();
+
+ access_giturl(); # check that success is vaguely likely
+
+ my $clogpfn = ".git/dgit/changelog.822.tmp";
+ runcmd shell_cmd "exec >$clogpfn", qw(dpkg-parsechangelog);
+
+ responder_send_file('parsed-changelog', $clogpfn);
+
+ my ($clogp, $cversion, $tag, $dscfn) =
+ push_parse_changelog("$clogpfn");
+
+ my $dscpath = "$buildproductsdir/$dscfn";
+ stat_exists $dscpath or
+ fail "looked for .dsc $dscfn, but $!;".
+ " maybe you forgot to build";
+
+ responder_send_file('dsc', $dscpath);
+
+ push_parse_dsc($dscpath, $dscfn, $cversion);
+
+ my $format = getfield $dsc, 'Format';
+ printdebug "format $format\n";
if (madformat($format)) {
commit_quilty_patch();
}
check_not_dirty();
- prep_ud();
- chdir $ud or die $!;
- print "checking that $dscfn corresponds to HEAD\n";
- runcmd qw(dpkg-source -x --), "../../../../$dscfn";
+ changedir $ud;
+ progress "checking that $dscfn corresponds to HEAD";
+ runcmd qw(dpkg-source -x --),
+ $dscpath =~ m#^/# ? $dscpath : "../../../$dscpath";
my ($tree,$dir) = mktree_in_ud_from_only_subdir();
- chdir '../../../..' or die $!;
- printcmd \*DEBUG,"+",@_;
- my @diffcmd = (@git, qw(diff --exit-code), $tree);
+ changedir '../../../..';
+ my $diffopt = $debuglevel>0 ? '--exit-code' : '--quiet';
+ my @diffcmd = (@git, qw(diff), $diffopt, $tree);
+ debugcmd "+",@diffcmd;
$!=0; $?=0;
- if (system @diffcmd) {
- if ($! && $?==256) {
+ my $r = system @diffcmd;
+ if ($r) {
+ if ($r==256) {
fail "$dscfn specifies a different tree to your HEAD commit;".
- " perhaps you forgot to build";
+ " perhaps you forgot to build".
+ ($diffopt eq '--exit-code' ? "" :
+ " (run with -D to see full diff output)");
} else {
failedcmd @diffcmd;
}
# runcmd @git, qw(fetch -p ), "$alioth_git/$package.git",
# map { lref($_).":".rref($_) }
# (uploadbranch());
- $dsc->{$ourdscfield[0]} = rev_parse('HEAD');
- $dsc->save("../$dscfn.tmp") or die $!;
+ my $head = rev_parse('HEAD');
if (!$changesfile) {
- my $multi = "../${package}_".(stripepoch $cversion)."_multi.changes";
- if (stat "$multi") {
+ my $multi = "$buildproductsdir/".
+ "${package}_".(stripepoch $cversion)."_multi.changes";
+ if (stat_exists "$multi") {
$changesfile = $multi;
} else {
- $!==&ENOENT or die "$multi: $!";
my $pat = "${package}_".(stripepoch $cversion)."_*.changes";
- my @cs = glob "../$pat";
+ my @cs = glob "$buildproductsdir/$pat";
fail "failed to find unique changes file".
- " (looked for $pat in .., or $multi);".
+ " (looked for $pat in $buildproductsdir, or $multi);".
" perhaps you need to use dgit -C"
unless @cs==1;
($changesfile) = @cs;
}
+ } else {
+ $changesfile = "$buildproductsdir/$changesfile";
}
- my $changes = parsecontrol($changesfile,$changesfile);
- foreach my $field (qw(Source Distribution Version)) {
- $changes->{$field} eq $clogp->{$field} or
- fail "changes field $field \`$changes->{$field}'".
- " does not match changelog \`$clogp->{$field}'";
+
+ responder_send_file('changes',$changesfile);
+ responder_send_command("param head $head");
+ responder_send_command("param csuite $csuite");
+
+ my $forceflag = deliberately('not-fast-forward') ? '+' : '';
+ if ($forceflag && defined $lastpush_hash) {
+ git_for_each_tag_referring($lastpush_hash, sub {
+ my ($objid,$fullrefname,$tagname) = @_;
+ responder_send_command("supersedes $fullrefname=$objid");
+ $supersedes{$fullrefname} = $objid;
+ });
}
- my $tag = debiantag($dversion);
+
+ my $tfn = sub { ".git/dgit/tag$_[0]"; };
+ my $tagobjfn;
+
+ if ($we_are_responder) {
+ $tagobjfn = $tfn->('.signed.tmp');
+ responder_receive_files('signed-tag', $tagobjfn);
+ } else {
+ $tagobjfn =
+ push_mktag($head,$clogp,$tag,
+ $dscpath,
+ $changesfile,$changesfile,
+ $tfn);
+ }
+
+ my $tag_obj_hash = cmdoutput @git, qw(hash-object -w -t tag), $tagobjfn;
+ runcmd_ordryrun @git, qw(verify-tag), $tag_obj_hash;
+ runcmd_ordryrun_local @git, qw(update-ref), "refs/tags/$tag", $tag_obj_hash;
+ runcmd_ordryrun @git, qw(tag -v --), $tag;
+
if (!check_for_git()) {
create_remote_git_repo();
}
- runcmd_ordryrun @git, qw(push),access_giturl(),"HEAD:".rrref();
+ runcmd_ordryrun @git, qw(push),access_giturl(),
+ $forceflag."HEAD:".rrref(), "refs/tags/$tag";
runcmd_ordryrun @git, qw(update-ref -m), 'dgit push', lrref(), 'HEAD';
- if (!$dryrun) {
- rename "../$dscfn.tmp","../$dscfn" or die "$dscfn $!";
+
+ if ($we_are_responder) {
+ my $dryrunsuffix = act_local() ? "" : ".tmp";
+ responder_receive_files('signed-dsc-changes',
+ "$dscpath$dryrunsuffix",
+ "$changesfile$dryrunsuffix");
} else {
- print "[new .dsc left in $dscfn.tmp]\n";
- }
- if ($sign) {
- if (!defined $keyid) {
- $keyid = access_cfg('keyid','RETURN-UNDEF');
+ if (act_local()) {
+ rename "$dscpath.tmp",$dscpath or die "$dscfn $!";
+ } else {
+ progress "[new .dsc left in $dscpath.tmp]";
}
- my @tag_cmd = (@git, qw(tag -s -m),
- "$package release $dversion for $csuite [dgit]");
- push @tag_cmd, qw(-u),$keyid if defined $keyid;
- push @tag_cmd, $tag;
- runcmd_ordryrun @tag_cmd;
- my @debsign_cmd = @debsign;
- push @debsign_cmd, "-k$keyid" if defined $keyid;
- push @debsign_cmd, $changesfile;
- runcmd_ordryrun @debsign_cmd;
+ sign_changes $changesfile;
}
- runcmd_ordryrun @git, qw(push),access_giturl(),"refs/tags/$tag";
+
my $host = access_cfg('upload-host','RETURN-UNDEF');
my @hostarg = defined($host) ? ($host,) : ();
runcmd_ordryrun @dput, @hostarg, $changesfile;
- printdone "pushed and uploaded $dversion";
+ printdone "pushed and uploaded $cversion";
+
+ responder_send_command("complete");
}
sub cmd_clone {
badusage "incorrect arguments to dgit clone";
}
$dstdir ||= "$package";
+
+ if (stat_exists $dstdir) {
+ fail "$dstdir already exists";
+ }
+
+ my $cwd_remove;
+ if ($rmonerror && !$dryrun_level) {
+ $cwd_remove= getcwd();
+ unshift @end, sub {
+ return unless defined $cwd_remove;
+ if (!chdir "$cwd_remove") {
+ return if $!==&ENOENT;
+ die "chdir $cwd_remove: $!";
+ }
+ rmtree($dstdir) or die "remove $dstdir: $!\n";
+ };
+ }
+
clone($dstdir);
+ $cwd_remove = undef;
}
sub branchsuite () {
$isuite = getfield $clogp, 'Distribution';
}
canonicalise_suite();
- print "fetching from suite $csuite\n";
+ progress "fetching from suite $csuite";
} elsif (@ARGV==1) {
($isuite) = @ARGV;
canonicalise_suite();
check_not_dirty();
my $clogp = parsechangelog();
$package = getfield $clogp, 'Source';
+ my $specsuite;
if (@ARGV==0) {
- $isuite = getfield $clogp, 'Distribution';
- if ($new_package) {
- local ($package) = $existing_package; # this is a hack
- canonicalise_suite();
- }
+ } elsif (@ARGV==1) {
+ ($specsuite) = (@ARGV);
} else {
badusage "incorrect arguments to dgit push";
}
+ $isuite = getfield $clogp, 'Distribution';
+ if ($new_package) {
+ local ($package) = $existing_package; # this is a hack
+ canonicalise_suite();
+ }
+ if (defined $specsuite && $specsuite ne $isuite) {
+ canonicalise_suite();
+ $csuite eq $specsuite or
+ fail "dgit push: changelog specifies $isuite ($csuite)".
+ " but command line specifies $specsuite";
+ }
if (check_for_git()) {
git_fetch_us();
}
is_fast_fwd(lrref(), 'HEAD') or
fail "dgit push: HEAD is not a descendant".
" of the archive's version.\n".
- "$us: To overwrite it, use git-merge -s ours ".lrref().".";
+ "$us: To overwrite it, use git merge -s ours ".lrref().".";
} else {
$new_package or
fail "package appears to be new in this suite;".
dopush();
}
+#---------- remote commands' implementation ----------
+
+sub cmd_remote_push_build_host {
+ my ($nrargs) = shift @ARGV;
+ my (@rargs) = @ARGV[0..$nrargs-1];
+ @ARGV = @ARGV[$nrargs..$#ARGV];
+ die unless @rargs;
+ my ($dir,$vsnwant) = @rargs;
+ # vsnwant is a comma-separated list; we report which we have
+ # chosen in our ready response (so other end can tell if they
+ # offered several)
+ $debugprefix = ' ';
+ $we_are_responder = 1;
+
+ open PI, "<&STDIN" or die $!;
+ open STDIN, "/dev/null" or die $!;
+ open PO, ">&STDOUT" or die $!;
+ autoflush PO 1;
+ open STDOUT, ">&STDERR" or die $!;
+ autoflush STDOUT 1;
+
+ $vsnwant //= 1;
+ fail "build host has dgit rpush protocol version".
+ " $rpushprotovsn but invocation host has $vsnwant"
+ unless grep { $rpushprotovsn eq $_ } split /,/, $vsnwant;
+
+ responder_send_command("dgit-remote-push-ready $rpushprotovsn");
+
+ changedir $dir;
+ &cmd_push;
+}
+
+sub cmd_remote_push_responder { cmd_remote_push_build_host(); }
+# ... for compatibility with proto vsn.1 dgit (just so that user gets
+# a good error message)
+
+our $i_tmp;
+
+sub i_cleanup {
+ local ($@, $?);
+ my $report = i_child_report();
+ if (defined $report) {
+ printdebug "($report)\n";
+ } elsif ($i_child_pid) {
+ printdebug "(killing build host child $i_child_pid)\n";
+ kill 15, $i_child_pid;
+ }
+ if (defined $i_tmp && !defined $initiator_tempdir) {
+ changedir "/";
+ eval { rmtree $i_tmp; };
+ }
+}
+
+END { i_cleanup(); }
+
+sub i_method {
+ my ($base,$selector,@args) = @_;
+ $selector =~ s/\-/_/g;
+ { no strict qw(refs); &{"${base}_${selector}"}(@args); }
+}
+
+sub cmd_rpush {
+ my $host = nextarg;
+ my $dir;
+ if ($host =~ m/^((?:[^][]|\[[^][]*\])*)\:/) {
+ $host = $1;
+ $dir = $'; #';
+ } else {
+ $dir = nextarg;
+ }
+ $dir =~ s{^-}{./-};
+ my @rargs = ($dir,$rpushprotovsn);
+ my @rdgit;
+ push @rdgit, @dgit;
+ push @rdgit, @ropts;
+ push @rdgit, qw(remote-push-build-host), (scalar @rargs), @rargs;
+ push @rdgit, @ARGV;
+ my @cmd = (@ssh, $host, shellquote @rdgit);
+ debugcmd "+",@cmd;
+
+ if (defined $initiator_tempdir) {
+ rmtree $initiator_tempdir;
+ mkdir $initiator_tempdir, 0700 or die "$initiator_tempdir: $!";
+ $i_tmp = $initiator_tempdir;
+ } else {
+ $i_tmp = tempdir();
+ }
+ $i_child_pid = open2(\*RO, \*RI, @cmd);
+ changedir $i_tmp;
+ initiator_expect { m/^dgit-remote-push-ready/ };
+ for (;;) {
+ my ($icmd,$iargs) = initiator_expect {
+ m/^(\S+)(?: (.*))?$/;
+ ($1,$2);
+ };
+ i_method "i_resp", $icmd, $iargs;
+ }
+}
+
+sub i_resp_progress ($) {
+ my ($rhs) = @_;
+ my $msg = protocol_read_bytes \*RO, $rhs;
+ progress $msg;
+}
+
+sub i_resp_complete {
+ my $pid = $i_child_pid;
+ $i_child_pid = undef; # prevents killing some other process with same pid
+ printdebug "waiting for build host child $pid...\n";
+ my $got = waitpid $pid, 0;
+ die $! unless $got == $pid;
+ die "build host child failed $?" if $?;
+
+ i_cleanup();
+ printdebug "all done\n";
+ exit 0;
+}
+
+sub i_resp_file ($) {
+ my ($keyword) = @_;
+ my $localname = i_method "i_localname", $keyword;
+ my $localpath = "$i_tmp/$localname";
+ stat_exists $localpath and
+ badproto \*RO, "file $keyword ($localpath) twice";
+ protocol_receive_file \*RO, $localpath;
+ i_method "i_file", $keyword;
+}
+
+our %i_param;
+
+sub i_resp_param ($) {
+ $_[0] =~ m/^(\S+) (.*)$/ or badproto \*RO, "bad param spec";
+ $i_param{$1} = $2;
+}
+
+sub i_resp_supersedes ($) {
+ $_[0] =~ m#^(refs/tags/\S+)=(\w+)$#
+ or badproto \*RO, "bad supersedes spec";
+ my $r = system qw(git check-ref-format), $1;
+ die "bad supersedes ref spec ($r)" if $r;
+ $supersedes{$1} = $2;
+}
+
+our %i_wanted;
+
+sub i_resp_want ($) {
+ my ($keyword) = @_;
+ die "$keyword ?" if $i_wanted{$keyword}++;
+ my @localpaths = i_method "i_want", $keyword;
+ printdebug "[[ $keyword @localpaths\n";
+ foreach my $localpath (@localpaths) {
+ protocol_send_file \*RI, $localpath;
+ }
+ print RI "files-end\n" or die $!;
+}
+
+our ($i_clogp, $i_version, $i_tag, $i_dscfn, $i_changesfn);
+
+sub i_localname_parsed_changelog {
+ return "remote-changelog.822";
+}
+sub i_file_parsed_changelog {
+ ($i_clogp, $i_version, $i_tag, $i_dscfn) =
+ push_parse_changelog "$i_tmp/remote-changelog.822";
+ die if $i_dscfn =~ m#/|^\W#;
+}
+
+sub i_localname_dsc {
+ defined $i_dscfn or badproto \*RO, "dsc (before parsed-changelog)";
+ return $i_dscfn;
+}
+sub i_file_dsc { }
+
+sub i_localname_changes {
+ defined $i_dscfn or badproto \*RO, "dsc (before parsed-changelog)";
+ $i_changesfn = $i_dscfn;
+ $i_changesfn =~ s/\.dsc$/_dgit.changes/ or die;
+ return $i_changesfn;
+}
+sub i_file_changes { }
+
+sub i_want_signed_tag {
+ printdebug Dumper(\%i_param, $i_dscfn);
+ defined $i_param{'head'} && defined $i_dscfn && defined $i_clogp
+ && defined $i_param{'csuite'}
+ or badproto \*RO, "premature desire for signed-tag";
+ my $head = $i_param{'head'};
+ die if $head =~ m/[^0-9a-f]/ || $head !~ m/^../;
+
+ die unless $i_param{'csuite'} =~ m/^$suite_re$/;
+ $csuite = $&;
+ push_parse_dsc $i_dscfn, 'remote dsc', $i_version;
+
+ my $tagobjfn =
+ push_mktag $head, $i_clogp, $i_tag,
+ $i_dscfn,
+ $i_changesfn, 'remote changes',
+ sub { "tag$_[0]"; };
+
+ return $tagobjfn;
+}
+
+sub i_want_signed_dsc_changes {
+ rename "$i_dscfn.tmp","$i_dscfn" or die "$i_dscfn $!";
+ sign_changes $i_changesfn;
+ return ($i_dscfn, $i_changesfn);
+}
+
+#---------- building etc. ----------
+
our $version;
our $sourcechanges;
our $dscfn;
+#----- `3.0 (quilt)' handling -----
+
our $fakeeditorenv = 'DGIT_FAKE_EDITOR_QUILT';
-sub build_maybe_quilt_fixup () {
- if (!open F, "debian/source/format") {
- die $! unless $!==&ENOENT;
- return;
- }
- $_ = <F>;
- F->error and die $!;
- chomp;
- return unless madformat($_);
- # sigh
- my $clogp = parsechangelog();
- my $version = getfield $clogp, 'Version';
- my $author = getfield $clogp, 'Maintainer';
- my $headref = rev_parse('HEAD');
- my $time = time;
- my $ncommits = 3;
- my $patchname = "auto-$version-$headref-$time";
- my $msg = cmdoutput @git, qw(log), "-n$ncommits";
+sub quiltify_dpkg_commit ($$$;$) {
+ my ($patchname,$author,$msg, $xinfo) = @_;
+ $xinfo //= '';
+
mkpath '.git/dgit';
my $descfn = ".git/dgit/quilt-description.tmp";
open O, '>', $descfn or die "$descfn: $!";
+ $msg =~ s/\s+$//g;
$msg =~ s/\n/\n /g;
$msg =~ s/^\s+$/ ./mg;
print O <<END or die $!;
-Description: Automatically generated patch ($clogp->{Version})
- Last (up to) $ncommits git changes, FYI:
- .
- $msg
+Description: $msg
Author: $author
-
+$xinfo
---
END
close O or die $!;
+
{
local $ENV{'EDITOR'} = cmdoutput qw(realpath --), $0;
local $ENV{'VISUAL'} = $ENV{'EDITOR'};
local $ENV{$fakeeditorenv} = cmdoutput qw(realpath --), $descfn;
- runcmd_ordryrun @dpkgsource, qw(--commit .), $patchname;
+ runcmd_ordryrun_local @dpkgsource, qw(--commit .), $patchname;
+ }
+}
+
+sub quiltify_trees_differ ($$) {
+ my ($x,$y) = @_;
+ # returns 1 iff the two tree objects differ other than in debian/
+ local $/=undef;
+ my @cmd = (@git, qw(diff-tree --name-only -z), $x, $y);
+ my $diffs= cmdoutput @cmd;
+ foreach my $f (split /\0/, $diffs) {
+ next if $f eq 'debian';
+ return 1;
+ }
+ return 0;
+}
+
+sub quiltify_tree_sentinelfiles ($) {
+ # lists the `sentinel' files present in the tree
+ my ($x) = @_;
+ my $r = cmdoutput @git, qw(ls-tree --name-only), $x,
+ qw(-- debian/rules debian/control);
+ $r =~ s/\n/,/g;
+ return $r;
+}
+
+sub quiltify ($$) {
+ my ($clogp,$target) = @_;
+
+ # Quilt patchification algorithm
+ #
+ # We search backwards through the history of the main tree's HEAD
+ # (T) looking for a start commit S whose tree object is identical
+ # to to the patch tip tree (ie the tree corresponding to the
+ # current dpkg-committed patch series). For these purposes
+ # `identical' disregards anything in debian/ - this wrinkle is
+ # necessary because dpkg-source treates debian/ specially.
+ #
+ # We can only traverse edges where at most one of the ancestors'
+ # trees differs (in changes outside in debian/). And we cannot
+ # handle edges which change .pc/ or debian/patches. To avoid
+ # going down a rathole we avoid traversing edges which introduce
+ # debian/rules or debian/control. And we set a limit on the
+ # number of edges we are willing to look at.
+ #
+ # If we succeed, we walk forwards again. For each traversed edge
+ # PC (with P parent, C child) (starting with P=S and ending with
+ # C=T) to we do this:
+ # - git checkout C
+ # - dpkg-source --commit with a patch name and message derived from C
+ # After traversing PT, we git commit the changes which
+ # should be contained within debian/patches.
+
+ changedir '../fake';
+ mktree_in_ud_here();
+ rmtree '.pc';
+ runcmd @git, 'add', '.';
+ my $oldtiptree=git_write_tree();
+ changedir '../work';
+
+ # The search for the path S..T is breadth-first. We maintain a
+ # todo list containing search nodes. A search node identifies a
+ # commit, and looks something like this:
+ # $p = {
+ # Commit => $git_commit_id,
+ # Child => $c, # or undef if P=T
+ # Whynot => $reason_edge_PC_unsuitable, # in @nots only
+ # Nontrivial => true iff $p..$c has relevant changes
+ # };
+
+ my @todo;
+ my @nots;
+ my $sref_S;
+ my $max_work=100;
+ my %considered; # saves being exponential on some weird graphs
+
+ my $t_sentinels = quiltify_tree_sentinelfiles $target;
+
+ my $not = sub {
+ my ($search,$whynot) = @_;
+ printdebug " search NOT $search->{Commit} $whynot\n";
+ $search->{Whynot} = $whynot;
+ push @nots, $search;
+ no warnings qw(exiting);
+ next;
+ };
+
+ push @todo, {
+ Commit => $target,
+ };
+
+ while (@todo) {
+ my $c = shift @todo;
+ next if $considered{$c->{Commit}}++;
+
+ $not->($c, "maximum search space exceeded") if --$max_work <= 0;
+
+ printdebug "quiltify investigate $c->{Commit}\n";
+
+ # are we done?
+ if (!quiltify_trees_differ $c->{Commit}, $oldtiptree) {
+ printdebug " search finished hooray!\n";
+ $sref_S = $c;
+ last;
+ }
+
+ if ($quilt_mode eq 'nofix') {
+ fail "quilt fixup required but quilt mode is \`nofix'\n".
+ "HEAD commit $c->{Commit} differs from tree implied by ".
+ " debian/patches (tree object $oldtiptree)";
+ }
+ if ($quilt_mode eq 'smash') {
+ printdebug " search quitting smash\n";
+ last;
+ }
+
+ my $c_sentinels = quiltify_tree_sentinelfiles $c->{Commit};
+ $not->($c, "has $c_sentinels not $t_sentinels")
+ if $c_sentinels ne $t_sentinels;
+
+ my $commitdata = cmdoutput @git, qw(cat-file commit), $c->{Commit};
+ $commitdata =~ m/\n\n/;
+ $commitdata =~ $`;
+ my @parents = ($commitdata =~ m/^parent (\w+)$/gm);
+ @parents = map { { Commit => $_, Child => $c } } @parents;
+
+ $not->($c, "root commit") if !@parents;
+
+ foreach my $p (@parents) {
+ $p->{Nontrivial}= quiltify_trees_differ $p->{Commit},$c->{Commit};
+ }
+ my $ndiffers = grep { $_->{Nontrivial} } @parents;
+ $not->($c, "merge ($ndiffers nontrivial parents)") if $ndiffers > 1;
+
+ foreach my $p (@parents) {
+ printdebug "considering C=$c->{Commit} P=$p->{Commit}\n";
+
+ my @cmd= (@git, qw(diff-tree -r --name-only),
+ $p->{Commit},$c->{Commit}, qw(-- debian/patches .pc));
+ my $patchstackchange = cmdoutput @cmd;
+ if (length $patchstackchange) {
+ $patchstackchange =~ s/\n/,/g;
+ $not->($p, "changed $patchstackchange");
+ }
+
+ printdebug " search queue P=$p->{Commit} ",
+ ($p->{Nontrivial} ? "NT" : "triv"),"\n";
+ push @todo, $p;
+ }
+ }
+
+ if (!$sref_S) {
+ printdebug "quiltify want to smash\n";
+
+ my $abbrev = sub {
+ my $x = $_[0]{Commit};
+ $x =~ s/(.*?[0-9a-z]{8})[0-9a-z]*$/$1/;
+ return $;
+ };
+ my $reportnot = sub {
+ my ($notp) = @_;
+ my $s = $abbrev->($notp);
+ my $c = $notp->{Child};
+ $s .= "..".$abbrev->($c) if $c;
+ $s .= ": ".$c->{Whynot};
+ return $s;
+ };
+ if ($quilt_mode eq 'linear') {
+ print STDERR "$us: quilt fixup cannot be linear. Stopped at:\n";
+ foreach my $notp (@nots) {
+ print STDERR "$us: ", $reportnot->($notp), "\n";
+ }
+ fail "quilt fixup naive history linearisation failed.\n".
+ "Use dpkg-source --commit by hand; or, --quilt=smash for one ugly patch";
+ } elsif ($quilt_mode eq 'smash') {
+ } elsif ($quilt_mode eq 'auto') {
+ progress "quilt fixup cannot be linear, smashing...";
+ } else {
+ die "$quilt_mode ?";
+ }
+
+ my $time = time;
+ my $ncommits = 3;
+ my $msg = cmdoutput @git, qw(log), "-n$ncommits";
+
+ quiltify_dpkg_commit "auto-$version-$target-$time",
+ (getfield $clogp, 'Maintainer'),
+ "Automatically generated patch ($clogp->{Version})\n".
+ "Last (up to) $ncommits git changes, FYI:\n\n". $msg;
+ return;
}
+ progress "quiltify linearisation planning successful, executing...";
+
+ for (my $p = $sref_S;
+ my $c = $p->{Child};
+ $p = $p->{Child}) {
+ printdebug "quiltify traverse $p->{Commit}..$c->{Commit}\n";
+ next unless $p->{Nontrivial};
+
+ my $cc = $c->{Commit};
+
+ my $commitdata = cmdoutput @git, qw(cat-file commit), $cc;
+ $commitdata =~ m/\n\n/ or die "$c ?";
+ $commitdata = $`;
+ my $msg = $'; #';
+ $commitdata =~ m/^author (.*) \d+ [-+0-9]+$/m or die "$cc ?";
+ my $author = $1;
+
+ $msg =~ s/^(.*)\n*/$1\n/ or die "$cc $msg ?";
+
+ my $title = $1;
+ my $patchname = $title;
+ $patchname =~ s/[.:]$//;
+ $patchname =~ y/ A-Z/-a-z/;
+ $patchname =~ y/-a-z0-9_.+=~//cd;
+ $patchname =~ s/^\W/x-$&/;
+ $patchname = substr($patchname,0,40);
+ my $index;
+ for ($index='';
+ stat "debian/patches/$patchname$index";
+ $index++) { }
+ $!==ENOENT or die "$patchname$index $!";
+
+ runcmd @git, qw(checkout -q), $cc;
+
+ # We use the tip's changelog so that dpkg-source doesn't
+ # produce complaining messages from dpkg-parsechangelog. None
+ # of the information dpkg-source gets from the changelog is
+ # actually relevant - it gets put into the original message
+ # which dpkg-source provides our stunt editor, and then
+ # overwritten.
+ runcmd @git, qw(checkout -q), $target, qw(debian/changelog);
+
+ quiltify_dpkg_commit "$patchname$index", $author, $msg,
+ "X-Dgit-Generated: $clogp->{Version} $cc\n";
+
+ runcmd @git, qw(checkout -q), $cc, qw(debian/changelog);
+ }
+
+ runcmd @git, qw(checkout -q master);
+}
+
+sub build_maybe_quilt_fixup () {
+ my $format=get_source_format;
+ return unless madformat $format;
+ # sigh
+
+ # Our objective is:
+ # - honour any existing .pc in case it has any strangeness
+ # - determine the git commit corresponding to the tip of
+ # the patch stack (if there is one)
+ # - if there is such a git commit, convert each subsequent
+ # git commit into a quilt patch with dpkg-source --commit
+ # - otherwise convert all the differences in the tree into
+ # a single git commit
+ #
+ # To do this we:
+
+ # Our git tree doesn't necessarily contain .pc. (Some versions of
+ # dgit would include the .pc in the git tree.) If there isn't
+ # one, we need to generate one by unpacking the patches that we
+ # have.
+ #
+ # We first look for a .pc in the git tree. If there is one, we
+ # will use it. (This is not the normal case.)
+ #
+ # Otherwise need to regenerate .pc so that dpkg-source --commit
+ # can work. We do this as follows:
+ # 1. Collect all relevant .orig from parent directory
+ # 2. Generate a debian.tar.gz out of
+ # debian/{patches,rules,source/format}
+ # 3. Generate a fake .dsc containing just these fields:
+ # Format Source Version Files
+ # 4. Extract the fake .dsc
+ # Now the fake .dsc has a .pc directory.
+ # (In fact we do this in every case, because in future we will
+ # want to search for a good base commit for generating patches.)
+ #
+ # Then we can actually do the dpkg-source --commit
+ # 1. Make a new working tree with the same object
+ # store as our main tree and check out the main
+ # tree's HEAD.
+ # 2. Copy .pc from the fake's extraction, if necessary
+ # 3. Run dpkg-source --commit
+ # 4. If the result has changes to debian/, then
+ # - git-add them them
+ # - git-add .pc if we had a .pc in-tree
+ # - git-commit
+ # 5. If we had a .pc in-tree, delete it, and git-commit
+ # 6. Back in the main tree, fast forward to the new HEAD
+
+ my $clogp = parsechangelog();
+ my $headref = rev_parse('HEAD');
+
+ prep_ud();
+ changedir $ud;
+
+ my $upstreamversion=$version;
+ $upstreamversion =~ s/-[^-]*$//;
+
+ my $fakeversion="$upstreamversion-~~DGITFAKE";
+
+ my $fakedsc=new IO::File 'fake.dsc', '>' or die $!;
+ print $fakedsc <<END or die $!;
+Format: 3.0 (quilt)
+Source: $package
+Version: $fakeversion
+Files:
+END
+
+ my $dscaddfile=sub {
+ my ($b) = @_;
+
+ my $md = new Digest::MD5;
+
+ my $fh = new IO::File $b, '<' or die "$b $!";
+ stat $fh or die $!;
+ my $size = -s _;
+
+ $md->addfile($fh);
+ print $fakedsc " ".$md->hexdigest." $size $b\n" or die $!;
+ };
+
+ foreach my $f (<../../../../*>) { #/){
+ my $b=$f; $b =~ s{.*/}{};
+ next unless is_orig_file $b, srcfn $upstreamversion,'';
+ link $f, $b or die "$b $!";
+ $dscaddfile->($b);
+ }
+
+ my @files=qw(debian/source/format debian/rules);
+ if (stat_exists '../../../debian/patches') {
+ push @files, 'debian/patches';
+ }
+
+ my $debtar= srcfn $fakeversion,'.debian.tar.gz';
+ runcmd qw(env GZIP=-1 tar -zcf), "./$debtar", qw(-C ../../..), @files;
+
+ $dscaddfile->($debtar);
+ close $fakedsc or die $!;
+
+ runcmd qw(sh -ec), 'exec dpkg-source --no-check -x fake.dsc >/dev/null';
+
+ my $fakexdir= $package.'-'.(stripepoch $upstreamversion);
+ rename $fakexdir, "fake" or die "$fakexdir $!";
+
+ mkdir "work" or die $!;
+ changedir "work";
+ mktree_in_ud_here();
+ runcmd @git, qw(reset --hard), $headref;
+
+ my $mustdeletepc=0;
+ if (stat_exists ".pc") {
+ -d _ or die;
+ progress "Tree already contains .pc - will use it then delete it.";
+ $mustdeletepc=1;
+ } else {
+ rename '../fake/.pc','.pc' or die $!;
+ }
+
+ quiltify($clogp,$headref);
+
if (!open P, '>>', ".pc/applied-patches") {
$!==&ENOENT or die $!;
} else {
}
commit_quilty_patch();
+
+ if ($mustdeletepc) {
+ runcmd @git, qw(rm -rq .pc);
+ commit_admin "Commit removal of .pc (quilt series tracking data)";
+ }
+
+ changedir '../../../..';
+ runcmd @git, qw(pull --ff-only -q .git/dgit/unpack/work master);
}
sub quilt_fixup_editor () {
exit 0;
}
+#----- other building -----
+
+sub clean_tree () {
+ if ($cleanmode eq 'dpkg-source') {
+ runcmd_ordryrun_local @dpkgbuildpackage, qw(-T clean);
+ } elsif ($cleanmode eq 'git') {
+ runcmd_ordryrun_local @git, qw(clean -xdf);
+ } elsif ($cleanmode eq 'none') {
+ } else {
+ die "$cleanmode ?";
+ }
+}
+
+sub cmd_clean () {
+ badusage "clean takes no additional arguments" if @ARGV;
+ clean_tree();
+}
+
sub build_prep () {
badusage "-p is not allowed when building" if defined $package;
check_not_dirty();
+ clean_tree();
my $clogp = parsechangelog();
$isuite = getfield $clogp, 'Distribution';
$package = getfield $clogp, 'Source';
build_maybe_quilt_fixup();
}
+sub changesopts () {
+ my @opts =@changesopts[1..$#changesopts];
+ if (!defined $changes_since_version) {
+ my @vsns = archive_query('archive_query');
+ my @quirk = access_quirk();
+ if ($quirk[0] eq 'backports') {
+ local $isuite = $quirk[2];
+ local $csuite;
+ canonicalise_suite();
+ push @vsns, archive_query('archive_query');
+ }
+ if (@vsns) {
+ @vsns = map { $_->[0] } @vsns;
+ @vsns = sort { -version_compare($a, $b) } @vsns;
+ $changes_since_version = $vsns[0];
+ progress "changelog will contain changes since $vsns[0]";
+ } else {
+ $changes_since_version = '_';
+ progress "package seems new, not specifying -v<version>";
+ }
+ }
+ if ($changes_since_version ne '_') {
+ unshift @opts, "-v$changes_since_version";
+ }
+ return @opts;
+}
+
sub cmd_build {
- badusage "dgit build implies --clean=dpkg-source"
- if $cleanmode ne 'dpkg-source';
build_prep();
- runcmd_ordryrun @dpkgbuildpackage, qw(-us -uc), changesopts(), @ARGV;
+ runcmd_ordryrun_local @dpkgbuildpackage, qw(-us -uc), changesopts(), @ARGV;
printdone "build successful\n";
}
sub cmd_git_build {
- badusage "dgit git-build implies --clean=dpkg-source"
- if $cleanmode ne 'dpkg-source';
build_prep();
my @cmd =
(qw(git-buildpackage -us -uc --git-no-sign-tags),
push @cmd, "--git-debian-branch=".lbranch();
}
push @cmd, changesopts();
- runcmd_ordryrun @cmd, @ARGV;
+ runcmd_ordryrun_local @cmd, @ARGV;
printdone "build successful\n";
}
$sourcechanges = "${package}_".(stripepoch $version)."_source.changes";
$dscfn = dscfn($version);
if ($cleanmode eq 'dpkg-source') {
- runcmd_ordryrun (@dpkgbuildpackage, qw(-us -uc -S)), changesopts();
+ runcmd_ordryrun_local (@dpkgbuildpackage, qw(-us -uc -S)),
+ changesopts();
} else {
- if ($cleanmode eq 'git') {
- runcmd_ordryrun @git, qw(clean -xdf);
- } elsif ($cleanmode eq 'none') {
- } else {
- die "$cleanmode ?";
- }
- my $pwd = cmdoutput qw(env - pwd);
+ my $pwd = must_getcwd();
my $leafdir = basename $pwd;
- chdir ".." or die $!;
- runcmd_ordryrun @dpkgsource, qw(-b --), $leafdir;
- chdir $pwd or die $!;
- runcmd_ordryrun qw(sh -ec),
+ changedir "..";
+ runcmd_ordryrun_local @dpkgsource, qw(-b --), $leafdir;
+ changedir $pwd;
+ runcmd_ordryrun_local qw(sh -ec),
'exec >$1; shift; exec "$@"','x',
"../$sourcechanges",
@dpkggenchanges, qw(-S), changesopts();
sub cmd_sbuild {
build_source();
- chdir ".." or die $!;
+ changedir "..";
my $pat = "${package}_".(stripepoch $version)."_*.changes";
- if (!$dryrun) {
- stat $dscfn or fail "$dscfn (in parent directory): $!";
- stat $sourcechanges or fail "$sourcechanges (in parent directory): $!";
+ if (act_local()) {
+ stat_exist $dscfn or fail "$dscfn (in parent directory): $!";
+ stat_exists $sourcechanges
+ or fail "$sourcechanges (in parent directory): $!";
foreach my $cf (glob $pat) {
next if $cf eq $sourcechanges;
unlink $cf or fail "remove $cf: $!";
}
}
- runcmd_ordryrun @sbuild, @ARGV, qw(-d), $isuite, $dscfn;
- runcmd_ordryrun @mergechanges, glob $pat;
+ runcmd_ordryrun_local @sbuild, @ARGV, qw(-d), $isuite, $dscfn;
+ my @changesfiles = glob $pat;
+ @changesfiles = sort {
+ ($b =~ m/_source\.changes$/ <=> $a =~ m/_source\.changes$/)
+ or $a cmp $b
+ } @changesfiles;
+ fail "wrong number of different changes files (@changesfiles)"
+ unless @changesfiles;
+ runcmd_ordryrun_local @mergechanges, @changesfiles;
my $multichanges = "${package}_".(stripepoch $version)."_multi.changes";
- if (!$dryrun) {
- stat $multichanges or fail "$multichanges: $!";
+ if (act_local()) {
+ stat_exists $multichanges or fail "$multichanges: $!";
}
printdone "build successful, results in $multichanges\n" or die $!;
}
badusage "incorrect arguments to dgit quilt-fixup" if @ARGV;
my $clogp = parsechangelog();
$version = getfield $clogp, 'Version';
+ $package = getfield $clogp, 'Source';
build_maybe_quilt_fixup();
}
+sub cmd_archive_api_query {
+ badusage "need only 1 subpath argument" unless @ARGV==1;
+ my ($subpath) = @ARGV;
+ my @cmd = archive_api_query_cmd($subpath);
+ exec @cmd or fail "exec curl: $!\n";
+}
+
+#---------- argument parsing and main program ----------
+
sub cmd_version {
- print "dgit version $dgit_version\n";
+ print "dgit version $our_version\n" or die $!;
+ exit 0;
}
sub parseopts () {
my $om;
+
+ if (defined $ENV{'DGIT_SSH'}) {
+ @ssh = string_to_ssh $ENV{'DGIT_SSH'};
+ } elsif (defined $ENV{'GIT_SSH'}) {
+ @ssh = ($ENV{'GIT_SSH'});
+ }
+
while (@ARGV) {
last unless $ARGV[0] =~ m/^-/;
$_ = shift @ARGV;
last if m/^--?$/;
if (m/^--/) {
if (m/^--dry-run$/) {
- $dryrun=1;
+ push @ropts, $_;
+ $dryrun_level=2;
+ } elsif (m/^--damp-run$/) {
+ push @ropts, $_;
+ $dryrun_level=1;
} elsif (m/^--no-sign$/) {
+ push @ropts, $_;
$sign=0;
} elsif (m/^--help$/) {
cmd_help();
+ } elsif (m/^--version$/) {
+ cmd_version();
} elsif (m/^--new$/) {
+ push @ropts, $_;
$new_package=1;
- } elsif (m/^--(\w+)=(.*)/s &&
+ } elsif (m/^--since-version=([^_]+|_)$/) {
+ push @ropts, $_;
+ $changes_since_version = $1;
+ } elsif (m/^--([-0-9a-z]+)=(.*)/s &&
($om = $opts_opt_map{$1}) &&
length $om->[0]) {
+ push @ropts, $_;
$om->[0] = $2;
- } elsif (m/^--(\w+):(.*)/s &&
+ } elsif (m/^--([-0-9a-z]+):(.*)/s &&
+ !$opts_opt_cmdonly{$1} &&
($om = $opts_opt_map{$1})) {
+ push @ropts, $_;
push @$om, $2;
} elsif (m/^--existing-package=(.*)/s) {
+ push @ropts, $_;
$existing_package = $1;
+ } elsif (m/^--initiator-tempdir=(.*)/s) {
+ $initiator_tempdir = $1;
+ $initiator_tempdir =~ m#^/# or
+ badusage "--initiator-tempdir must be used specify an".
+ " absolute, not relative, directory."
} elsif (m/^--distro=(.*)/s) {
+ push @ropts, $_;
$idistro = $1;
+ } elsif (m/^--build-products-dir=(.*)/s) {
+ push @ropts, $_;
+ $buildproductsdir = $1;
} elsif (m/^--clean=(dpkg-source|git|none)$/s) {
+ push @ropts, $_;
$cleanmode = $1;
} elsif (m/^--clean=(.*)$/s) {
badusage "unknown cleaning mode \`$1'";
+ } elsif (m/^--quilt=($quilt_modes_re)$/s) {
+ push @ropts, $_;
+ $quilt_mode = $1;
+ } elsif (m/^--quilt=(.*)$/s) {
+ badusage "unknown quilt fixup mode \`$1'";
} elsif (m/^--ignore-dirty$/s) {
+ push @ropts, $_;
$ignoredirty = 1;
} elsif (m/^--no-quilt-fixup$/s) {
- $noquilt = 1;
+ push @ropts, $_;
+ $quilt_mode = 'nocheck';
+ } elsif (m/^--no-rm-on-error$/s) {
+ push @ropts, $_;
+ $rmonerror = 0;
+ } elsif (m/^--deliberately-($suite_re)$/s) {
+ push @ropts, $_;
+ push @deliberatelies, $&;
} else {
badusage "unknown long option \`$_'";
}
} else {
while (m/^-./s) {
if (s/^-n/-/) {
- $dryrun=1;
+ push @ropts, $&;
+ $dryrun_level=2;
+ } elsif (s/^-L/-/) {
+ push @ropts, $&;
+ $dryrun_level=1;
} elsif (s/^-h/-/) {
cmd_help();
} elsif (s/^-D/-/) {
- open DEBUG, ">&STDERR" or die $!;
- $debug++;
+ push @ropts, $&;
+ $debuglevel++;
+ enabledebug();
} elsif (s/^-N/-/) {
+ push @ropts, $&;
$new_package=1;
- } elsif (m/^-[vm]/) {
+ } elsif (s/^-v([^_]+|_)$//s) {
+ push @ropts, $&;
+ $changes_since_version = $1;
+ } elsif (m/^-m/) {
+ push @ropts, $&;
push @changesopts, $_;
$_ = '';
} elsif (s/^-c(.*=.*)//s) {
+ push @ropts, $&;
push @git, '-c', $1;
- } elsif (s/^-d(.*)//s) {
+ } elsif (s/^-d(.+)//s) {
+ push @ropts, $&;
$idistro = $1;
- } elsif (s/^-C(.*)//s) {
+ } elsif (s/^-C(.+)//s) {
+ push @ropts, $&;
$changesfile = $1;
- } elsif (s/^-k(.*)//s) {
+ if ($changesfile =~ s#^(.*)/##) {
+ $buildproductsdir = $1;
+ }
+ } elsif (s/^-k(.+)//s) {
$keyid=$1;
- } elsif (s/^-wn//s) {
+ } elsif (m/^-[vdCk]$/) {
+ badusage
+ "option \`$_' requires an argument (and no space before the argument)";
+ } elsif (s/^-wn$//s) {
+ push @ropts, $&;
$cleanmode = 'none';
- } elsif (s/^-wg//s) {
+ } elsif (s/^-wg$//s) {
+ push @ropts, $&;
$cleanmode = 'git';
- } elsif (s/^-wd//s) {
+ } elsif (s/^-wd$//s) {
+ push @ropts, $&;
$cleanmode = 'dpkg-source';
} else {
badusage "unknown short option \`$_'";
quilt_fixup_editor();
}
-delete $ENV{'DGET_UNPACK'};
-
parseopts();
-print STDERR "DRY RUN ONLY\n" if $dryrun;
+print STDERR "DRY RUN ONLY\n" if $dryrun_level > 1;
+print STDERR "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n"
+ if $dryrun_level == 1;
if (!@ARGV) {
print STDERR $helpmsg or die $!;
exit 8;
}
my $cmd = shift @ARGV;
$cmd =~ y/-/_/;
-{ no strict qw(refs); &{"cmd_$cmd"}(); }
+
+if (!defined $quilt_mode) {
+ $quilt_mode = cfg('dgit.force.quilt-mode', 'RETURN-UNDEF')
+ // access_cfg('quilt-mode', 'RETURN-UNDEF')
+ // 'linear';
+ $quilt_mode =~ m/^($quilt_modes_re)$/
+ or badcfg "unknown quilt-mode \`$quilt_mode'";
+ $quilt_mode = $1;
+}
+
+my $fn = ${*::}{"cmd_$cmd"};
+$fn or badusage "unknown operation $cmd";
+$fn->();