;
};
}
#---------- `madison' archive query method ----------
sub archive_query_madison {
return archive_query_prepend_mirror
map { [ @$_[0..1] ] } madison_get_parse(@_);
}
sub madison_get_parse {
my ($proto,$data) = @_;
die unless $proto eq 'madison';
if (!length $data) {
$data= access_cfg('madison-distro','RETURN-UNDEF');
$data //= access_basedistro();
}
$rmad{$proto,$data,$package} ||= cmdoutput
qw(rmadison -asource),"-s$isuite","-u$data",$package;
my $rmad = $rmad{$proto,$data,$package};
my @out;
foreach my $l (split /\n/, $rmad) {
$l =~ m{^ \s*( [^ \t|]+ )\s* \|
\s*( [^ \t|]+ )\s* \|
\s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \|
\s*( [^ \t|]+ )\s* }x or die "$rmad ?";
$1 eq $package or die "$rmad $package ?";
my $vsn = $2;
my $newsuite = $3;
my $component;
if (defined $4) {
$component = $4;
} else {
$component = access_cfg('archive-query-default-component');
}
$5 eq 'source' or die "$rmad ?";
push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite];
}
return sort { -version_compare($a->[0],$b->[0]); } @out;
}
sub canonicalise_suite_madison {
# madison canonicalises for us
my @r = madison_get_parse(@_);
@r or fail f_
"unable to canonicalise suite using package %s".
" which does not appear to exist in suite %s;".
" --existing-package may help",
$package, $isuite;
return $r[0][2];
}
sub file_in_archive_madison { return undef; }
sub package_not_wholly_new_madison { return undef; }
#---------- `sshpsql' archive query method ----------
# (untranslated, because this is obsolete)
sub sshpsql ($$$) {
my ($data,$runeinfo,$sql) = @_;
if (!length $data) {
$data= access_someuserhost('sshpsql').':'.
access_cfg('sshpsql-dbname');
}
$data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'";
my ($userhost,$dbname) = ($`,$'); #';
my @rows;
my @cmd = (access_cfg_ssh, $userhost,
access_runeinfo("ssh-psql $runeinfo").
" export LC_MESSAGES=C; export LC_CTYPE=C;".
" ".shellquote qw(psql -A), $dbname, qw(-c), $sql);
debugcmd "|",@cmd;
open P, "-|", @cmd or confess "$!";
while () {
chomp or die;
printdebug(">|$_|\n");
push @rows, $_;
}
$!=0; $?=0; close P or failedcmd @cmd;
@rows or die;
my $nrows = pop @rows;
$nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?";
@rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?";
@rows = map { [ split /\|/, $_ ] } @rows;
my $ncols = scalar @{ shift @rows };
die if grep { scalar @$_ != $ncols } @rows;
return @rows;
}
sub sql_injection_check {
foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; }
}
sub archive_query_sshpsql ($$) {
my ($proto,$data) = @_;
sql_injection_check $isuite, $package;
my @rows = sshpsql($data, "archive-query $isuite $package", <[0],$b->[0]) } @rows;
my $digester = Digest::SHA->new(256);
@rows = map {
my ($vsn,$component,$filename,$sha256sum) = @$_;
[ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
} @rows;
return archive_query_prepend_mirror @rows;
}
sub canonicalise_suite_sshpsql ($$) {
my ($proto,$data) = @_;
sql_injection_check $isuite;
my @rows = sshpsql($data, "canonicalise-suite $isuite", <[0] } @rows;
fail "unknown suite $isuite" unless @rows;
die "ambiguous $isuite: @rows ?" if @rows>1;
return $rows[0];
}
sub file_in_archive_sshpsql ($$$) { return undef; }
sub package_not_wholly_new_sshpsql ($$$) { return undef; }
#---------- `dummycat' archive query method ----------
# (untranslated, because this is for testing purposes etc.)
sub canonicalise_suite_dummycat ($$) {
my ($proto,$data) = @_;
my $dpath = "$data/suite.$isuite";
if (!open C, "<", $dpath) {
$!==ENOENT or die "$dpath: $!";
printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n";
return $isuite;
}
$!=0; $_ = ;
chomp or die "$dpath: $!";
close C;
printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n";
return $_;
}
sub archive_query_dummycat ($$) {
my ($proto,$data) = @_;
canonicalise_suite();
my $dpath = "$data/package.$csuite.$package";
if (!open C, "<", $dpath) {
$!==ENOENT or die "$dpath: $!";
printdebug "dummycat query $csuite $package $dpath ENOENT\n";
return ();
}
my @rows;
while () {
next if m/^\#/;
next unless m/\S/;
die unless chomp;
printdebug "dummycat query $csuite $package $dpath | $_\n";
my @row = split /\s+/, $_;
@row==2 or die "$dpath: $_ ?";
push @rows, \@row;
}
C->error and die "$dpath: $!";
close C;
return archive_query_prepend_mirror
sort { -version_compare($a->[0],$b->[0]); } @rows;
}
sub file_in_archive_dummycat () { return undef; }
sub package_not_wholly_new_dummycat () { return undef; }
#---------- archive query entrypoints and rest of program ----------
sub canonicalise_suite () {
return if defined $csuite;
fail f_ "cannot operate on %s suite", $isuite if $isuite eq 'UNRELEASED';
$csuite = archive_query('canonicalise_suite');
if ($isuite ne $csuite) {
progress f_ "canonical suite name for %s is %s", $isuite, $csuite;
} else {
progress f_ "canonical suite name is %s", $csuite;
}
}
sub get_archive_dsc () {
canonicalise_suite();
my @vsns = archive_query('archive_query');
foreach my $vinfo (@vsns) {
my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
$dscurl = $vsn_dscurl;
$dscdata = url_get($dscurl);
if (!$dscdata) {
$skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
next;
}
if ($digester) {
$digester->reset();
$digester->add($dscdata);
my $got = $digester->hexdigest();
$got eq $digest or
fail f_ "%s has hash %s but archive told us to expect %s",
$dscurl, $got, $digest;
}
parse_dscdata();
my $fmt = getfield $dsc, 'Format';
$format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
f_ "unsupported source format %s, sorry", $fmt;
$dsc_checked = !!$digester;
printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
return;
}
$dsc = undef;
printdebug "get_archive_dsc: nothing in archive, returning undef\n";
}
sub check_for_git ();
sub check_for_git () {
# returns 0 or 1
my $how = access_cfg('git-check');
if ($how eq 'ssh-cmd') {
my @cmd =
(access_cfg_ssh, access_gituserhost(),
access_runeinfo("git-check $package").
" set -e; cd ".access_cfg('git-path').";".
" if test -d $package.git; then echo 1; else echo 0; fi");
my $r= cmdoutput @cmd;
if (defined $r and $r =~ m/^divert (\w+)$/) {
my $divert=$1;
my ($usedistro,) = access_distros();
# NB that if we are pushing, $usedistro will be $distro/push
$instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
$instead_distro =~ s{^/}{ access_basedistro()."/" }e;
progress f_ "diverting to %s (using config for %s)",
$divert, $instead_distro;
return check_for_git();
}
failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
return $r+0;
} elsif ($how eq 'url') {
my $prefix = access_cfg('git-check-url','git-url');
my $suffix = access_cfg('git-check-suffix','git-suffix',
'RETURN-UNDEF') // '.git';
my $url = "$prefix/$package$suffix";
my @cmd = (@curl, qw(-sS -I), $url);
my $result = cmdoutput @cmd;
$result =~ s/^\S+ 200 .*\n\r?\n//;
# curl -sS -I with https_proxy prints
# HTTP/1.0 200 Connection established
$result =~ m/^\S+ (404|200) /s or
fail +(__ "unexpected results from git check query - ").
Dumper($prefix, $result);
my $code = $1;
if ($code eq '404') {
return 0;
} elsif ($code eq '200') {
return 1;
} else {
die;
}
} elsif ($how eq 'true') {
return 1;
} elsif ($how eq 'false') {
return 0;
} else {
badcfg f_ "unknown git-check \`%s'", $how;
}
}
sub create_remote_git_repo () {
my $how = access_cfg('git-create');
if ($how eq 'ssh-cmd') {
runcmd_ordryrun
(access_cfg_ssh, access_gituserhost(),
access_runeinfo("git-create $package").
"set -e; cd ".access_cfg('git-path').";".
" cp -a _template $package.git");
} elsif ($how eq 'true') {
# nothing to do
} else {
badcfg f_ "unknown git-create \`%s'", $how;
}
}
our ($dsc_hash,$lastpush_mergeinput);
our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url);
sub prep_ud () {
dgit_privdir(); # ensures that $dgit_privdir_made is based on $maindir
$playground = fresh_playground 'dgit/unpack';
}
sub mktree_in_ud_here () {
playtree_setup $gitcfgs{local};
}
sub git_write_tree () {
my $tree = cmdoutput @git, qw(write-tree);
$tree =~ m/^\w+$/ or die "$tree ?";
return $tree;
}
sub git_add_write_tree () {
runcmd @git, qw(add -Af .);
return git_write_tree();
}
sub remove_stray_gits ($) {
my ($what) = @_;
my @gitscmd = qw(find -name .git -prune -print0);
debugcmd "|",@gitscmd;
open GITS, "-|", @gitscmd or confess "$!";
{
local $/="\0";
while () {
chomp or die;
print STDERR f_ "%s: warning: removing from %s: %s\n",
$us, $what, (messagequote $_);
rmtree $_;
}
}
$!=0; $?=0; close GITS or failedcmd @gitscmd;
}
sub mktree_in_ud_from_only_subdir ($;$) {
my ($what,$raw) = @_;
# changes into the subdir
my (@dirs) = <*/.>;
confess "expected one subdir but found @dirs ?" unless @dirs==1;
$dirs[0] =~ m#^([^/]+)/\.$# or die;
my $dir = $1;
changedir $dir;
remove_stray_gits($what);
mktree_in_ud_here();
if (!$raw) {
my ($format, $fopts) = get_source_format();
if (madformat($format)) {
rmtree '.pc';
}
}
my $tree=git_add_write_tree();
return ($tree,$dir);
}
our @files_csum_info_fields =
(['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
['Files', 'Digest::MD5', 'new()', 'md5sum']);
sub dsc_files_info () {
foreach my $csumi (@files_csum_info_fields) {
my ($fname, $module, $method) = @$csumi;
my $field = $dsc->{$fname};
next unless defined $field;
eval "use $module; 1;" or die $@;
my @out;
foreach (split /\n/, $field) {
next unless m/\S/;
m/^(\w+) (\d+) (\S+)$/ or
fail f_ "could not parse .dsc %s line \`%s'", $fname, $_;
my $digester = eval "$module"."->$method;" or die $@;
push @out, {
Hash => $1,
Bytes => $2,
Filename => $3,
Digester => $digester,
};
}
return @out;
}
fail f_ "missing any supported Checksums-* or Files field in %s",
$dsc->get_option('name');
}
sub dsc_files () {
map { $_->{Filename} } dsc_files_info();
}
sub files_compare_inputs (@) {
my $inputs = \@_;
my %record;
my %fchecked;
my $showinputs = sub {
return join "; ", map { $_->get_option('name') } @$inputs;
};
foreach my $in (@$inputs) {
my $expected_files;
my $in_name = $in->get_option('name');
printdebug "files_compare_inputs $in_name\n";
foreach my $csumi (@files_csum_info_fields) {
my ($fname) = @$csumi;
printdebug "files_compare_inputs $in_name $fname\n";
my $field = $in->{$fname};
next unless defined $field;
my @files;
foreach (split /\n/, $field) {
next unless m/\S/;
my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or
fail "could not parse $in_name $fname line \`$_'";
printdebug "files_compare_inputs $in_name $fname $f\n";
push @files, $f;
my $re = \ $record{$f}{$fname};
if (defined $$re) {
$fchecked{$f}{$in_name} = 1;
$$re eq $info or
fail f_
"hash or size of %s varies in %s fields (between: %s)",
$f, $fname, $showinputs->();
} else {
$$re = $info;
}
}
@files = sort @files;
$expected_files //= \@files;
"@$expected_files" eq "@files" or
fail f_ "file list in %s varies between hash fields!",
$in_name;
}
$expected_files or
fail f_ "%s has no files list field(s)", $in_name;
}
printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
if $debuglevel>=2;
grep { keys %$_ == @$inputs-1 } values %fchecked
or fail f_ "no file appears in all file lists (looked in: %s)",
$showinputs->();
}
sub is_orig_file_in_dsc ($$) {
my ($f, $dsc_files_info) = @_;
return 0 if @$dsc_files_info <= 1;
# One file means no origs, and the filename doesn't have a "what
# part of dsc" component. (Consider versions ending `.orig'.)
return 0 unless $f =~ m/\.$orig_f_tail_re$/o;
return 1;
}
# This function determines whether a .changes file is source-only from
# the point of view of dak. Thus, it permits *_source.buildinfo
# files.
#
# It does not, however, permit any other buildinfo files. After a
# source-only upload, the buildds will try to upload files like
# foo_1.2.3_amd64.buildinfo. If the package maintainer included files
# named like this in their (otherwise) source-only upload, the uploads
# of the buildd can be rejected by dak. Fixing the resultant
# situation can require manual intervention. So we block such
# .buildinfo files when the user tells us to perform a source-only
# upload (such as when using the push-source subcommand with the -C
# option, which calls this function).
#
# Note, though, that when dgit is told to prepare a source-only
# upload, such as when subcommands like build-source and push-source
# without -C are used, dgit has a more restrictive notion of
# source-only .changes than dak: such uploads will never include
# *_source.buildinfo files. This is because there is no use for such
# files when using a tool like dgit to produce the source package, as
# dgit ensures the source is identical to git HEAD.
sub test_source_only_changes ($) {
my ($changes) = @_;
foreach my $l (split /\n/, getfield $changes, 'Files') {
$l =~ m/\S+$/ or next;
# \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages
unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) {
print f_ "purportedly source-only changes polluted by %s\n", $&;
return 0;
}
}
return 1;
}
sub changes_update_origs_from_dsc ($$$$) {
my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
my %changes_f;
printdebug "checking origs needed ($upstreamvsn)...\n";
$_ = getfield $changes, 'Files';
m/^\w+ \d+ (\S+ \S+) \S+$/m or
fail __ "cannot find section/priority from .changes Files field";
my $placementinfo = $1;
my %changed;
printdebug "checking origs needed placement '$placementinfo'...\n";
foreach my $l (split /\n/, getfield $dsc, 'Files') {
$l =~ m/\S+$/ or next;
my $file = $&;
printdebug "origs $file | $l\n";
next unless is_orig_file_of_vsn $file, $upstreamvsn;
printdebug "origs $file is_orig\n";
my $have = archive_query('file_in_archive', $file);
if (!defined $have) {
print STDERR __ <{$archivefield};
$_ = $dsc->{$fname};
next unless defined;
m/^(\w+) .* \Q$file\E$/m or
fail f_ ".dsc %s missing entry for %s", $fname, $file;
if ($h->{$archivefield} eq $1) {
$same++;
} else {
push @differ, f_
"%s: %s (archive) != %s (local .dsc)",
$archivefield, $h->{$archivefield}, $1;
}
}
confess "$file ".Dumper($h)." ?!" if $same && @differ;
$found_same++
if $same;
push @found_differ,
f_ "archive %s: %s", $h->{filename}, join "; ", @differ
if @differ;
}
printdebug "origs $file f.same=$found_same".
" #f._differ=$#found_differ\n";
if (@found_differ && !$found_same) {
fail join "\n",
(f_ "archive contains %s with different checksum", $file),
@found_differ;
}
# Now we edit the changes file to add or remove it
foreach my $csumi (@files_csum_info_fields) {
my ($fname, $module, $method, $archivefield) = @$csumi;
next unless defined $changes->{$fname};
if ($found_same) {
# in archive, delete from .changes if it's there
$changed{$file} = "removed" if
$changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m;
} elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) {
# not in archive, but it's here in the .changes
} else {
my $dsc_data = getfield $dsc, $fname;
$dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?";
my $extra = $1;
$extra =~ s/ \d+ /$&$placementinfo /
or confess "$fname $extra >$dsc_data< ?"
if $fname eq 'Files';
$changes->{$fname} .= "\n". $extra;
$changed{$file} = "added";
}
}
}
if (%changed) {
foreach my $file (keys %changed) {
progress f_
"edited .changes for archive .orig contents: %s %s",
$changed{$file}, $file;
}
my $chtmp = "$changesfile.tmp";
$changes->save($chtmp);
if (act_local()) {
rename $chtmp,$changesfile or die "$changesfile $!";
} else {
progress f_ "[new .changes left in %s]", $changesfile;
}
} else {
progress f_ "%s already has appropriate .orig(s) (if any)",
$changesfile;
}
}
sub clogp_authline ($) {
my ($clogp) = @_;
my $author = getfield $clogp, 'Maintainer';
if ($author =~ m/^[^"\@]+\,/) {
# single entry Maintainer field with unquoted comma
$author = ($& =~ y/,//rd).$'; # strip the comma
}
# git wants a single author; any remaining commas in $author
# are by now preceded by @ (or "). It seems safer to punt on
# "..." for now rather than attempting to dequote or something.
$author =~ s#,.*##ms unless $author =~ m/"/;
my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
my $authline = "$author $date";
$authline =~ m/$git_authline_re/o or
fail f_ "unexpected commit author line format \`%s'".
" (was generated from changelog Maintainer field)",
$authline;
return ($1,$2,$3) if wantarray;
return $authline;
}
sub vendor_patches_distro ($$) {
my ($checkdistro, $what) = @_;
return unless defined $checkdistro;
my $series = "debian/patches/\L$checkdistro\E.series";
printdebug "checking for vendor-specific $series ($what)\n";
if (!open SERIES, "<", $series) {
confess "$series $!" unless $!==ENOENT;
return;
}
while () {
next unless m/\S/;
next if m/^\s+\#/;
print STDERR __ <error;
close SERIES;
}
sub check_for_vendor_patches () {
# This dpkg-source feature doesn't seem to be documented anywhere!
# But it can be found in the changelog (reformatted):
# commit 4fa01b70df1dc4458daee306cfa1f987b69da58c
# Author: Raphael Hertzog
# Date: Sun Oct 3 09:36:48 2010 +0200
# dpkg-source: correctly create .pc/.quilt_series with alternate
# series files
#
# If you have debian/patches/ubuntu.series and you were
# unpacking the source package on ubuntu, quilt was still
# directed to debian/patches/series instead of
# debian/patches/ubuntu.series.
#
# debian/changelog | 3 +++
# scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++-
# 2 files changed, 6 insertions(+), 1 deletion(-)
use Dpkg::Vendor;
vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
__ "Dpkg::Vendor \`current vendor'");
vendor_patches_distro(access_basedistro(),
__ "(base) distro being accessed");
vendor_patches_distro(access_nomdistro(),
__ "(nominal) distro being accessed");
}
sub check_bpd_exists () {
stat $buildproductsdir
or fail f_ "build-products-dir %s is not accessible: %s\n",
$buildproductsdir, $!;
}
sub dotdot_bpd_transfer_origs ($$$) {
my ($bpd_abs, $upstreamversion, $wanted) = @_;
# checks is_orig_file_of_vsn and if
# calls $wanted->{$leaf} and expects boolish
return if $buildproductsdir eq '..';
my $warned;
my $dotdot = $maindir;
$dotdot =~ s{/[^/]+$}{};
opendir DD, $dotdot or fail "opendir .. ($dotdot): $!";
while ($!=0, defined(my $leaf = readdir DD)) {
{
local ($debuglevel) = $debuglevel-1;
printdebug "DD_BPD $leaf ?\n";
}
next unless is_orig_file_of_vsn $leaf, $upstreamversion;
next unless $wanted->($leaf);
next if lstat "$bpd_abs/$leaf";
print STDERR f_
"%s: found orig(s) in .. missing from build-products-dir, transferring:\n",
$us
unless $warned++;
$! == &ENOENT or fail f_
"check orig file %s in bpd %s: %s", $leaf, $bpd_abs, $!;
lstat "$dotdot/$leaf" or fail f_
"check orig file %s in ..: %s", $leaf, $!;
if (-l _) {
stat "$dotdot/$leaf" or fail f_
"check target of orig symlink %s in ..: %s", $leaf, $!;
my $ltarget = readlink "$dotdot/$leaf" or
die "readlink $dotdot/$leaf: $!";
if ($ltarget !~ m{^/}) {
$ltarget = "$dotdot/$ltarget";
}
symlink $ltarget, "$bpd_abs/$leaf"
or die "$ltarget $bpd_abs $leaf: $!";
print STDERR f_
"%s: cloned orig symlink from ..: %s\n",
$us, $leaf;
} elsif (link "$dotdot/$leaf", "$bpd_abs/$leaf") {
print STDERR f_
"%s: hardlinked orig from ..: %s\n",
$us, $leaf;
} elsif ($! != EXDEV) {
fail f_ "failed to make %s a hardlink to %s: %s",
"$bpd_abs/$leaf", "$dotdot/$leaf", $!;
} else {
symlink "$bpd_abs/$leaf", "$dotdot/$leaf"
or die "$bpd_abs $dotdot $leaf $!";
print STDERR f_
"%s: symmlinked orig from .. on other filesystem: %s\n",
$us, $leaf;
}
}
die "$dotdot; $!" if $!;
closedir DD;
}
sub import_tarball_tartrees ($$) {
my ($upstreamv, $dfi) = @_;
# cwd should be the playground
# We unpack and record the orig tarballs first, so that we only
# need disk space for one private copy of the unpacked source.
# But we can't make them into commits until we have the metadata
# from the debian/changelog, so we record the tree objects now and
# make them into commits later.
my @tartrees;
my $orig_f_base = srcfn $upstreamv, '';
foreach my $fi (@$dfi) {
# We actually import, and record as a commit, every tarball
# (unless there is only one file, in which case there seems
# little point.
my $f = $fi->{Filename};
printdebug "import considering $f ";
(printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/;
(printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o;
my $compr_ext = $1;
my ($orig_f_part) =
$f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/;
printdebug "Y ", (join ' ', map { $_//"(none)" }
$compr_ext, $orig_f_part
), "\n";
my $path = $fi->{Path} // $f;
my $input = new IO::File $f, '<' or die "$f $!";
my $compr_pid;
my @compr_cmd;
if (defined $compr_ext) {
my $cname =
Dpkg::Compression::compression_guess_from_filename $f;
fail "Dpkg::Compression cannot handle file $f in source package"
if defined $compr_ext && !defined $cname;
my $compr_proc =
new Dpkg::Compression::Process compression => $cname;
@compr_cmd = $compr_proc->get_uncompress_cmdline();
my $compr_fh = new IO::Handle;
my $compr_pid = open $compr_fh, "-|" // confess "$!";
if (!$compr_pid) {
open STDIN, "<&", $input or confess "$!";
exec @compr_cmd;
die "dgit (child): exec $compr_cmd[0]: $!\n";
}
$input = $compr_fh;
}
rmtree "_unpack-tar";
mkdir "_unpack-tar" or confess "$!";
my @tarcmd = qw(tar -x -f -
--no-same-owner --no-same-permissions
--no-acls --no-xattrs --no-selinux);
my $tar_pid = fork // confess "$!";
if (!$tar_pid) {
chdir "_unpack-tar" or confess "$!";
open STDIN, "<&", $input or confess "$!";
exec @tarcmd;
die f_ "dgit (child): exec %s: %s", $tarcmd[0], $!;
}
$!=0; (waitpid $tar_pid, 0) == $tar_pid or confess "$!";
!$? or failedcmd @tarcmd;
close $input or
(@compr_cmd ? ($?==SIGPIPE || failedcmd @compr_cmd)
: confess "$!");
# finally, we have the results in "tarball", but maybe
# with the wrong permissions
runcmd qw(chmod -R +rwX _unpack-tar);
changedir "_unpack-tar";
remove_stray_gits($f);
mktree_in_ud_here();
my ($tree) = git_add_write_tree();
my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
$tree = $1;
printdebug "one subtree $1\n";
} else {
printdebug "multiple subtrees\n";
}
changedir "..";
rmtree "_unpack-tar";
my $ent = [ $f, $tree ];
push @tartrees, {
Orig => !!$orig_f_part,
Sort => (!$orig_f_part ? 2 :
$orig_f_part =~ m/-/g ? 1 :
0),
F => $f,
Tree => $tree,
};
}
@tartrees = sort {
# put any without "_" first (spec is not clear whether files
# are always in the usual order). Tarballs without "_" are
# the main orig or the debian tarball.
$a->{Sort} <=> $b->{Sort} or
$a->{F} cmp $b->{F}
} @tartrees;
@tartrees;
}
sub import_tarball_commits ($$) {
my ($tartrees, $upstreamv) = @_;
# cwd should be a playtree which has a relevant debian/changelog
# fills in $tt->{Commit} for each one
my $any_orig = grep { $_->{Orig} } @$tartrees;
my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
my $clogp;
my $r1clogp;
printdebug "import clog search...\n";
parsechangelog_loop \@clogcmd, (__ "package changelog"), sub {
my ($thisstanza, $desc) = @_;
no warnings qw(exiting);
$clogp //= $thisstanza;
printdebug "import clog $thisstanza->{version} $desc...\n";
last if !$any_orig; # we don't need $r1clogp
# We look for the first (most recent) changelog entry whose
# version number is lower than the upstream version of this
# package. Then the last (least recent) previous changelog
# entry is treated as the one which introduced this upstream
# version and used for the synthetic commits for the upstream
# tarballs.
# One might think that a more sophisticated algorithm would be
# necessary. But: we do not want to scan the whole changelog
# file. Stopping when we see an earlier version, which
# necessarily then is an earlier upstream version, is the only
# realistic way to do that. Then, either the earliest
# changelog entry we have seen so far is indeed the earliest
# upload of this upstream version; or there are only changelog
# entries relating to later upstream versions (which is not
# possible unless the changelog and .dsc disagree about the
# version). Then it remains to choose between the physically
# last entry in the file, and the one with the lowest version
# number. If these are not the same, we guess that the
# versions were created in a non-monotonic order rather than
# that the changelog entries have been misordered.
printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
last if version_compare($thisstanza->{version}, $upstreamv) < 0;
$r1clogp = $thisstanza;
printdebug "import clog $r1clogp->{version} becomes r1\n";
};
$clogp or fail __ "package changelog has no entries!";
my $authline = clogp_authline $clogp;
my $changes = getfield $clogp, 'Changes';
$changes =~ s/^\n//; # Changes: \n
my $cversion = getfield $clogp, 'Version';
if (@$tartrees) {
$r1clogp //= $clogp; # maybe there's only one entry;
my $r1authline = clogp_authline $r1clogp;
# Strictly, r1authline might now be wrong if it's going to be
# unused because !$any_orig. Whatever.
printdebug "import tartrees authline $authline\n";
printdebug "import tartrees r1authline $r1authline\n";
foreach my $tt (@$tartrees) {
printdebug "import tartree $tt->{F} $tt->{Tree}\n";
my $mbody = f_ "Import %s", $tt->{F};
$tt->{Commit} = hash_commit_text($tt->{Orig} ? <{Tree}
author $r1authline
committer $r1authline
$mbody
[dgit import orig $tt->{F}]
END_O
tree $tt->{Tree}
author $authline
committer $authline
$mbody
[dgit import tarball $package $cversion $tt->{F}]
END_T
}
}
return ($authline, $clogp, $changes);
}
sub generate_commits_from_dsc () {
# See big comment in fetch_from_archive, below.
# See also README.dsc-import.
prep_ud();
changedir $playground;
my $bpd_abs = bpd_abs();
my $upstreamv = upstreamversion $dsc->{version};
my @dfi = dsc_files_info();
dotdot_bpd_transfer_origs $bpd_abs, $upstreamv,
sub { grep { $_->{Filename} eq $_[0] } @dfi };
foreach my $fi (@dfi) {
my $f = $fi->{Filename};
die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
my $upper_f = "$bpd_abs/$f";
printdebug "considering reusing $f: ";
if (link_ltarget "$upper_f,fetch", $f) {
printdebug "linked (using ...,fetch).\n";
} elsif ((printdebug "($!) "),
$! != ENOENT) {
fail f_ "accessing %s: %s", "$buildproductsdir/$f,fetch", $!;
} elsif (link_ltarget $upper_f, $f) {
printdebug "linked.\n";
} elsif ((printdebug "($!) "),
$! != ENOENT) {
fail f_ "accessing %s: %s", "$buildproductsdir/$f", $!;
} else {
printdebug "absent.\n";
}
my $refetched;
complete_file_from_dsc('.', $fi, \$refetched)
or next;
printdebug "considering saving $f: ";
if (rename_link_xf 1, $f, $upper_f) {
printdebug "linked.\n";
} elsif ((printdebug "($@) "),
$! != EEXIST) {
fail f_ "saving %s: %s", "$buildproductsdir/$f", $@;
} elsif (!$refetched) {
printdebug "no need.\n";
} elsif (rename_link_xf 1, $f, "$upper_f,fetch") {
printdebug "linked (using ...,fetch).\n";
} elsif ((printdebug "($@) "),
$! != EEXIST) {
fail f_ "saving %s: %s", "$buildproductsdir/$f,fetch", $@;
} else {
printdebug "cannot.\n";
}
}
my @tartrees;
@tartrees = import_tarball_tartrees($upstreamv, \@dfi)
unless @dfi == 1; # only one file in .dsc
my $dscfn = "$package.dsc";
my $treeimporthow = 'package';
open D, ">", $dscfn or die "$dscfn: $!";
print D $dscdata or die "$dscfn: $!";
close D or die "$dscfn: $!";
my @cmd = qw(dpkg-source);
push @cmd, '--no-check' if $dsc_checked;
if (madformat $dsc->{format}) {
push @cmd, '--skip-patches';
$treeimporthow = 'unpatched';
}
push @cmd, qw(-x --), $dscfn;
runcmd @cmd;
my ($tree,$dir) = mktree_in_ud_from_only_subdir(__ "source package");
if (madformat $dsc->{format}) {
check_for_vendor_patches();
}
my $dappliedtree;
if (madformat $dsc->{format}) {
my @pcmd = qw(dpkg-source --before-build .);
runcmd shell_cmd 'exec >/dev/null', @pcmd;
rmtree '.pc';
$dappliedtree = git_add_write_tree();
}
my ($authline, $clogp, $changes) =
import_tarball_commits(\@tartrees, $upstreamv);
my $cversion = getfield $clogp, 'Version';
printdebug "import main commit\n";
open C, ">../commit.tmp" or confess "$!";
print C <{Commit}
END
print C <{format}) {
printdebug "import apply patches...\n";
# regularise the state of the working tree so that
# the checkout of $rawimport_hash works nicely.
my $dappliedcommit = hash_commit_text(</dev/null 2>>../../gbp-pq-output', @showcmd;
debugcmd "+",@realcmd;
if (system @realcmd) {
die f_ "%s failed: %s\n",
+(shellquote @showcmd),
failedcmd_waitstatus();
}
my $gapplied = git_rev_parse('HEAD');
my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
$gappliedtree eq $dappliedtree or
fail f_ < $rawimport_hash,
Info => __ "Import of source package",
};
my @output = ($rawimport_mergeinput);
if ($lastpush_mergeinput) {
my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput);
my $oversion = getfield $oldclogp, 'Version';
my $vcmp =
version_compare($oversion, $cversion);
if ($vcmp < 0) {
@output = ($rawimport_mergeinput, $lastpush_mergeinput,
{ ReverseParents => 1,
Message => (f_ < 0) {
print STDERR f_ <{Filename}"
# and will set $$refetched=1 if it did so (or tried to).
my $f = $fi->{Filename};
my $tf = "$dstdir/$f";
my $downloaded = 0;
my $got;
my $checkhash = sub {
open F, "<", "$tf" or die "$tf: $!";
$fi->{Digester}->reset();
$fi->{Digester}->addfile(*F);
F->error and confess "$!";
$got = $fi->{Digester}->hexdigest();
return $got eq $fi->{Hash};
};
if (stat_exists $tf) {
if ($checkhash->()) {
progress f_ "using existing %s", $f;
return 1;
}
if (!$refetched) {
fail f_ "file %s has hash %s but .dsc demands hash %s".
" (perhaps you should delete this file?)",
$f, $got, $fi->{Hash};
}
progress f_ "need to fetch correct version of %s", $f;
unlink $tf or die "$tf $!";
$$refetched = 1;
} else {
printdebug "$tf does not exist, need to fetch\n";
}
my $furl = $dscurl;
$furl =~ s{/[^/]+$}{};
$furl .= "/$f";
die "$f ?" unless $f =~ m/^\Q${package}\E_/;
die "$f ?" if $f =~ m#/#;
runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl";
return 0 if !act_local();
$checkhash->() or
fail f_ "file %s has hash %s but .dsc demands hash %s".
" (got wrong file from archive!)",
$f, $got, $fi->{Hash};
return 1;
}
sub ensure_we_have_orig () {
my @dfi = dsc_files_info();
foreach my $fi (@dfi) {
my $f = $fi->{Filename};
next unless is_orig_file_in_dsc($f, \@dfi);
complete_file_from_dsc($buildproductsdir, $fi)
or next;
}
}
#---------- git fetch ----------
sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); }
sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); }
# We fetch some parts of lrfetchrefs/*. Ideally we delete these
# locally fetched refs because they have unhelpful names and clutter
# up gitk etc. So we track whether we have "used up" head ref (ie,
# whether we have made another local ref which refers to this object).
#
# (If we deleted them unconditionally, then we might end up
# re-fetching the same git objects each time dgit fetch was run.)
#
# So, each use of lrfetchrefs needs to be accompanied by arrangements
# in git_fetch_us to fetch the refs in question, and possibly a call
# to lrfetchref_used.
our (%lrfetchrefs_f, %lrfetchrefs_d);
# $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid
sub lrfetchref_used ($) {
my ($fullrefname) = @_;
my $objid = $lrfetchrefs_f{$fullrefname};
$lrfetchrefs_d{$fullrefname} = $objid if defined $objid;
}
sub git_lrfetch_sane {
my ($url, $supplementary, @specs) = @_;
# Make a 'refs/'.lrfetchrefs.'/*' be just like on server,
# at least as regards @specs. Also leave the results in
# %lrfetchrefs_f, and arrange for lrfetchref_used to be
# able to clean these up.
#
# With $supplementary==1, @specs must not contain wildcards
# and we add to our previous fetches (non-atomically).
# This is rather miserable:
# When git fetch --prune is passed a fetchspec ending with a *,
# it does a plausible thing. If there is no * then:
# - it matches subpaths too, even if the supplied refspec
# starts refs, and behaves completely madly if the source
# has refs/refs/something. (See, for example, Debian #NNNN.)
# - if there is no matching remote ref, it bombs out the whole
# fetch.
# We want to fetch a fixed ref, and we don't know in advance
# if it exists, so this is not suitable.
#
# Our workaround is to use git ls-remote. git ls-remote has its
# own qairks. Notably, it has the absurd multi-tail-matching
# behaviour: git ls-remote R refs/foo can report refs/foo AND
# refs/refs/foo etc.
#
# Also, we want an idempotent snapshot, but we have to make two
# calls to the remote: one to git ls-remote and to git fetch. The
# solution is use git ls-remote to obtain a target state, and
# git fetch to try to generate it. If we don't manage to generate
# the target state, we try again.
printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n";
my $specre = join '|', map {
my $x = $_;
$x =~ s/\W/\\$&/g;
my $wildcard = $x =~ s/\\\*$/.*/;
die if $wildcard && $supplementary;
"(?:refs/$x)";
} @specs;
printdebug "git_lrfetch_sane specre=$specre\n";
my $wanted_rref = sub {
local ($_) = @_;
return m/^(?:$specre)$/;
};
my $fetch_iteration = 0;
FETCH_ITERATION:
for (;;) {
printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
if (++$fetch_iteration > 10) {
fail __ "too many iterations trying to get sane fetch!";
}
my @look = map { "refs/$_" } @specs;
my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look);
debugcmd "|",@lcmd;
my %wantr;
open GITLS, "-|", @lcmd or confess "$!";
while () {
printdebug "=> ", $_;
m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
my ($objid,$rrefname) = ($1,$2);
if (!$wanted_rref->($rrefname)) {
print STDERR f_ <($rrefname)) {
printdebug <';
my $want = $wantr{$rrefname};
next if $got eq $want;
if (!defined $objgot{$want}) {
fail __ <{Clogp} exists and returns it
my ($mi) = @_;
$mi->{Clogp} = commit_getclogp($mi->{Commit});
}
sub mergeinfo_version ($) {
return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
}
sub fetch_from_archive_record_1 ($) {
my ($hash) = @_;
runcmd git_update_ref_cmd "dgit fetch $csuite", 'DGIT_ARCHIVE', $hash;
cmdoutput @git, qw(log -n2), $hash;
# ... gives git a chance to complain if our commit is malformed
}
sub fetch_from_archive_record_2 ($) {
my ($hash) = @_;
my @upd_cmd = (git_update_ref_cmd 'dgit fetch', lrref(), $hash);
if (act_local()) {
cmdoutput @upd_cmd;
} else {
dryrun_report @upd_cmd;
}
}
sub parse_dsc_field_def_dsc_distro () {
$dsc_distro //= cfg qw(dgit.default.old-dsc-distro
dgit.default.distro);
}
sub parse_dsc_field ($$) {
my ($dsc, $what) = @_;
my $f;
foreach my $field (@ourdscfield) {
$f = $dsc->{$field};
last if defined $f;
}
if (!defined $f) {
progress f_ "%s: NO git hash", $what;
parse_dsc_field_def_dsc_distro();
} elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url)
= $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) {
progress f_ "%s: specified git info (%s)", $what, $dsc_distro;
$dsc_hint_tag = [ $dsc_hint_tag ];
} elsif ($f =~ m/^\w+\s*$/) {
$dsc_hash = $&;
parse_dsc_field_def_dsc_distro();
$dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'),
$dsc_distro ];
progress f_ "%s: specified git hash", $what;
} else {
fail f_ "%s: invalid Dgit info", $what;
}
}
sub resolve_dsc_field_commit ($$) {
my ($already_distro, $already_mapref) = @_;
return unless defined $dsc_hash;
my $mapref =
defined $already_mapref &&
($already_distro eq $dsc_distro || !$chase_dsc_distro)
? $already_mapref : undef;
my $do_fetch;
$do_fetch = sub {
my ($what, @fetch) = @_;
local $idistro = $dsc_distro;
my $lrf = lrfetchrefs;
if (!$chase_dsc_distro) {
progress f_ "not chasing .dsc distro %s: not fetching %s",
$dsc_distro, $what;
return 0;
}
progress f_ ".dsc names distro %s: fetching %s", $dsc_distro, $what;
my $url = access_giturl();
if (!defined $url) {
defined $dsc_hint_url or fail f_ <((__ "rewrite map"), $rewritemap) or return;
$mapref = $lrf.'/'.$rewritemap;
}
my $rewritemapdata = git_cat_file $mapref.':map';
if (defined $rewritemapdata
&& $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
progress __
"server's git history rewrite map contains a relevant entry!";
$dsc_hash = $1;
if (defined $dsc_hash) {
progress __ "using rewritten git hash in place of .dsc value";
} else {
progress __ "server data says .dsc hash is to be disregarded";
}
}
}
if (!defined git_cat_file $dsc_hash) {
my @tags = map { "tags/".$_ } @$dsc_hint_tag;
my $lrf = $do_fetch->((__ "additional commits"), @tags) &&
defined git_cat_file $dsc_hash
or fail f_ < $lastpush_hash,
Info => (__ "dgit suite branch on dgit git server"),
};
my $lastfetch_hash = git_get_ref(lrref());
printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n";
my $lastfetch_mergeinput = $lastfetch_hash && {
Commit => $lastfetch_hash,
Info => (__ "dgit client's archive history view"),
};
my $dsc_mergeinput = $dsc_hash && {
Commit => $dsc_hash,
Info => (__ "Dgit field in .dsc from archive"),
};
my $cwd = getcwd();
my $del_lrfetchrefs = sub {
changedir $cwd;
my $gur;
printdebug "del_lrfetchrefs...\n";
foreach my $fullrefname (sort keys %lrfetchrefs_d) {
my $objid = $lrfetchrefs_d{$fullrefname};
printdebug "del_lrfetchrefs: $objid $fullrefname\n";
if (!$gur) {
$gur ||= new IO::Handle;
open $gur, "|-", qw(git update-ref --stdin) or confess "$!";
}
printf $gur "delete %s %s\n", $fullrefname, $objid;
}
if ($gur) {
close $gur or failedcmd "git update-ref delete lrfetchrefs";
}
};
if (defined $dsc_hash) {
ensure_we_have_orig();
if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
@mergeinputs = $dsc_mergeinput
} elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
print STDERR f_ <{Commit};
$h and is_fast_fwd($lastfetch_hash, $h);
# If true, one of the existing parents of this commit
# is a descendant of the $lastfetch_hash, so we'll
# be ff from that automatically.
} @mergeinputs
) {
# Otherwise:
push @mergeinputs, $lastfetch_mergeinput;
}
printdebug "fetch mergeinfos:\n";
foreach my $mi (@mergeinputs) {
if ($mi->{Info}) {
printdebug " commit $mi->{Commit} $mi->{Info}\n";
} else {
printdebug sprintf " ReverseParents=%d Message=%s",
$mi->{ReverseParents}, $mi->{Message};
}
}
my $compat_info= pop @mergeinputs
if $mergeinputs[$#mergeinputs]{Message};
@mergeinputs = grep { defined $_->{Commit} } @mergeinputs;
my $hash;
if (@mergeinputs > 1) {
# here we go, then:
my $tree_commit = $mergeinputs[0]{Commit};
my $tree = get_tree_of_commit $tree_commit;;
# We use the changelog author of the package in question the
# author of this pseudo-merge. This is (roughly) correct if
# this commit is simply representing aa non-dgit upload.
# (Roughly because it does not record sponsorship - but we
# don't have sponsorship info because that's in the .changes,
# which isn't in the archivw.)
#
# But, it might be that we are representing archive history
# updates (including in-archive copies). These are not really
# the responsibility of the person who created the .dsc, but
# there is no-one whose name we should better use. (The
# author of the .dsc-named commit is clearly worse.)
my $useclogp = mergeinfo_getclogp $mergeinputs[0];
my $author = clogp_authline $useclogp;
my $cversion = getfield $useclogp, 'Version';
my $mcf = dgit_privdir()."/mergecommit";
open MC, ">", $mcf or die "$mcf $!";
print MC <{Commit} } @mergeinputs;
@parents = reverse @parents if $compat_info->{ReverseParents};
print MC <{Commit}
END
print MC <{Message}) {
print MC $compat_info->{Message} or confess "$!";
} else {
print MC f_ <{Info}
or confess "$!";
};
$message_add_info->($mergeinputs[0]);
print MC __ <($_) foreach @mergeinputs[1..$#mergeinputs];
}
close MC or confess "$!";
$hash = hash_commit $mcf;
} else {
$hash = $mergeinputs[0]{Commit};
}
printdebug "fetch hash=$hash\n";
my $chkff = sub {
my ($lasth, $what) = @_;
return unless $lasth;
confess "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
};
$chkff->($lastpush_hash, __ 'dgit repo server tip (last push)')
if $lastpush_hash;
$chkff->($lastfetch_hash, __ 'local tracking tip (last fetch)');
fetch_from_archive_record_1($hash);
if (defined $skew_warning_vsn) {
printdebug "SKEW CHECK WANT $skew_warning_vsn\n";
my $gotclogp = commit_getclogp($hash);
my $got_vsn = getfield $gotclogp, 'Version';
printdebug "SKEW CHECK GOT $got_vsn\n";
if (version_compare($got_vsn, $skew_warning_vsn) < 0) {
print STDERR f_ <", "$attrs.new" or die "$attrs.new $!";
if (!open ATTRS, "<", $attrs) {
$!==ENOENT or die "$attrs: $!";
} else {
while () {
chomp;
next if m{^debian/changelog\s};
print NATTRS $_, "\n" or confess "$!";
}
ATTRS->error and confess "$!";
close ATTRS;
}
print NATTRS "debian/changelog merge=$driver\n" or confess "$!";
close NATTRS;
set_local_git_config "$cb.name", __ 'debian/changelog merge driver';
set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A';
rename "$attrs.new", "$attrs" or die "$attrs: $!";
}
sub setup_useremail (;$) {
my ($always) = @_;
return unless $always || access_cfg_bool(1, 'setup-useremail');
my $setup = sub {
my ($k, $envvar) = @_;
my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar};
return unless defined $v;
set_local_git_config "user.$k", $v;
};
$setup->('email', 'DEBEMAIL');
$setup->('name', 'DEBFULLNAME');
}
sub ensure_setup_existing_tree () {
my $k = "remote.$remotename.skipdefaultupdate";
my $c = git_get_config $k;
return if defined $c;
set_local_git_config $k, 'true';
}
sub open_main_gitattrs () {
confess 'internal error no maindir' unless defined $maindir;
my $gai = new IO::File "$maindir_gitcommon/info/attributes"
or $!==ENOENT
or die "open $maindir_gitcommon/info/attributes: $!";
return $gai;
}
our $gitattrs_ourmacro_re = qr{^\[attr\]dgit-defuse-attrs\s};
sub is_gitattrs_setup () {
# return values:
# trueish
# 1: gitattributes set up and should be left alone
# falseish
# 0: there is a dgit-defuse-attrs but it needs fixing
# undef: there is none
my $gai = open_main_gitattrs();
return 0 unless $gai;
while (<$gai>) {
next unless m{$gitattrs_ourmacro_re};
return 1 if m{\s-working-tree-encoding\s};
printdebug "is_gitattrs_setup: found old macro\n";
return 0;
}
$gai->error and confess "$!";
printdebug "is_gitattrs_setup: found nothing\n";
return undef;
}
sub setup_gitattrs (;$) {
my ($always) = @_;
return unless $always || access_cfg_bool(1, 'setup-gitattributes');
my $already = is_gitattrs_setup();
if ($already) {
progress __ < $af.new" or confess "$!";
print GAO <) {
if (m{$gitattrs_ourmacro_re}) {
die unless defined $already;
$_ = $new;
}
chomp;
print GAO $_, "\n" or confess "$!";
}
$gai->error and confess "$!";
}
close GAO or confess "$!";
rename "$af.new", "$af" or fail f_ "install %s: %s", $af, $!;
}
sub setup_new_tree () {
setup_mergechangelogs();
setup_useremail();
setup_gitattrs();
}
sub check_gitattrs ($$) {
my ($treeish, $what) = @_;
return if is_gitattrs_setup;
local $/="\0";
my @cmd = (@git, qw(ls-tree -lrz --), "${treeish}:");
debugcmd "|",@cmd;
my $gafl = new IO::File;
open $gafl, "-|", @cmd or confess "$!";
while (<$gafl>) {
chomp or die;
s/^\d+\s+\w+\s+\w+\s+(\d+)\t// or die;
next if $1 == 0;
next unless m{(?:^|/)\.gitattributes$};
# oh dear, found one
print STDERR f_ <(), and returns undef
# in parent, returns canonical suite name for $tsuite
my $canonsuitefh = IO::File::new_tmpfile;
my $pid = fork // confess "$!";
if (!$pid) {
forkcheck_setup();
$isuite = $tsuite;
$us .= " [$isuite]";
$debugprefix .= " ";
progress f_ "fetching %s...", $tsuite;
canonicalise_suite();
print $canonsuitefh $csuite, "\n" or confess "$!";
close $canonsuitefh or confess "$!";
$fn->();
return undef;
}
waitpid $pid,0 == $pid or confess "$!";
fail f_ "failed to obtain %s: %s", $tsuite, waitstatusmsg()
if $? && $?!=256*4;
seek $canonsuitefh,0,0 or confess "$!";
local $csuite = <$canonsuitefh>;
confess "$!" unless defined $csuite && chomp $csuite;
if ($? == 256*4) {
printdebug "multisuite $tsuite missing\n";
return $csuite;
}
printdebug "multisuite $tsuite ok (canon=$csuite)\n";
push @$mergeinputs, {
Ref => lrref,
Info => $csuite,
};
return $csuite;
}
sub fork_for_multisuite ($) {
my ($before_fetch_merge) = @_;
# if nothing unusual, just returns ''
#
# if multisuite:
# returns 0 to caller in child, to do first of the specified suites
# in child, $csuite is not yet set
#
# returns 1 to caller in parent, to finish up anything needed after
# in parent, $csuite is set to canonicalised portmanteau
my $org_isuite = $isuite;
my @suites = split /\,/, $isuite;
return '' unless @suites > 1;
printdebug "fork_for_multisuite: @suites\n";
my @mergeinputs;
my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
sub { });
return 0 unless defined $cbasesuite;
fail f_ "package %s missing in (base suite) %s", $package, $cbasesuite
unless @mergeinputs;
my @csuites = ($cbasesuite);
$before_fetch_merge->();
foreach my $tsuite (@suites[1..$#suites]) {
$tsuite =~ s/^-/$cbasesuite-/;
my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
sub {
@end = ();
fetch_one();
finish 0;
});
$csubsuite =~ s/^\Q$cbasesuite\E-/-/;
push @csuites, $csubsuite;
}
foreach my $mi (@mergeinputs) {
my $ref = git_get_ref $mi->{Ref};
die "$mi->{Ref} ?" unless length $ref;
$mi->{Commit} = $ref;
}
$csuite = join ",", @csuites;
my $previous = git_get_ref lrref;
if ($previous) {
unshift @mergeinputs, {
Commit => $previous,
Info => (__ "local combined tracking branch"),
Warning => (__
"archive seems to have rewound: local tracking branch is ahead!"),
};
}
foreach my $ix (0..$#mergeinputs) {
$mergeinputs[$ix]{Index} = $ix;
}
@mergeinputs = sort {
-version_compare(mergeinfo_version $a,
mergeinfo_version $b) # highest version first
or
$a->{Index} <=> $b->{Index}; # earliest in spec first
} @mergeinputs;
my @needed;
NEEDED:
foreach my $mi (@mergeinputs) {
printdebug "multisuite merge check $mi->{Info}\n";
foreach my $previous (@needed) {
next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
printdebug "multisuite merge un-needed $previous->{Info}\n";
next NEEDED;
}
push @needed, $mi;
printdebug "multisuite merge this-needed\n";
$mi->{Character} = '+';
}
$needed[0]{Character} = '*';
my $output = $needed[0]{Commit};
if (@needed > 1) {
printdebug "multisuite merge nontrivial\n";
my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
my $commit = "tree $tree\n";
my $msg = f_ "Combine archive branches %s [dgit]\n\n".
"Input branches:\n",
$csuite;
foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
printdebug "multisuite merge include $mi->{Info}\n";
$mi->{Character} //= ' ';
$commit .= "parent $mi->{Commit}\n";
$msg .= sprintf " %s %-25s %s\n",
$mi->{Character},
(mergeinfo_version $mi),
$mi->{Info};
}
my $authline = clogp_authline mergeinfo_getclogp $needed[0];
$msg .= __ "\nKey\n".
" * marks the highest version branch, which choose to use\n".
" + marks each branch which was not already an ancestor\n\n";
$msg .=
"[dgit multi-suite $csuite]\n";
$commit .=
"author $authline\n".
"committer $authline\n\n";
$output = hash_commit_text $commit.$msg;
printdebug "multisuite merge generated $output\n";
}
fetch_from_archive_record_1($output);
fetch_from_archive_record_2($output);
progress f_ "calculated combined tracking suite %s", $csuite;
return 1;
}
sub clone_set_head () {
open H, "> .git/HEAD" or confess "$!";
print H "ref: ".lref()."\n" or confess "$!";
close H or confess "$!";
}
sub clone_finish ($) {
my ($dstdir) = @_;
runcmd @git, qw(reset --hard), lrref();
runcmd qw(bash -ec), <<'END';
set -o pipefail
git ls-tree -r --name-only -z HEAD | \
xargs -0r touch -h -r . --
END
printdone f_ "ready for work in %s", $dstdir;
}
sub clone ($) {
# in multisuite, returns twice!
# once in parent after first suite fetched,
# and then again in child after everything is finished
my ($dstdir) = @_;
badusage __ "dry run makes no sense with clone" unless act_local();
my $multi_fetched = fork_for_multisuite(sub {
printdebug "multi clone before fetch merge\n";
changedir $dstdir;
record_maindir();
});
if ($multi_fetched) {
printdebug "multi clone after fetch merge\n";
clone_set_head();
clone_finish($dstdir);
return;
}
printdebug "clone main body\n";
mkdir $dstdir or fail f_ "create \`%s': %s", $dstdir, $!;
changedir $dstdir;
check_bpd_exists();
canonicalise_suite();
my $hasgit = check_for_git();
runcmd @git, qw(init -q);
record_maindir();
setup_new_tree();
clone_set_head();
my $giturl = access_giturl(1);
if (defined $giturl) {
runcmd @git, qw(remote add), 'origin', $giturl;
}
if ($hasgit) {
progress __ "fetching existing git history";
git_fetch_us();
runcmd_ordryrun_local @git, qw(fetch origin);
} else {
progress __ "starting new git history";
}
fetch_from_archive() or no_such_package;
my $vcsgiturl = $dsc->{'Vcs-Git'};
if (length $vcsgiturl) {
$vcsgiturl =~ s/\s+-b\s+\S+//g;
runcmd @git, qw(remote add vcs-git), $vcsgiturl;
}
clone_finish($dstdir);
}
sub fetch_one () {
canonicalise_suite();
if (check_for_git()) {
git_fetch_us();
}
fetch_from_archive() or no_such_package();
my $vcsgiturl = $dsc && $dsc->{'Vcs-Git'};
if (length $vcsgiturl and
(grep { $csuite eq $_ }
split /\;/,
cfg 'dgit.vcs-git.suites')) {
my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF';
if (defined $current && $current ne $vcsgiturl) {
print STDERR f_ <) {
next if m/^\s*\#/;
next unless m/\S/;
s/\s+$//; # ignore missing final newline
if (m/\s*\#\s*/) {
my ($k, $v) = ($`, $'); #');
$v =~ s/^"(.*)"$/$1/;
$options{$k} = $v;
} else {
$options{$_} = 1;
}
}
F->error and confess "$!";
close F;
} else {
confess "$!" unless $!==&ENOENT;
}
if (!open F, "debian/source/format") {
confess "$!" unless $!==&ENOENT;
return '';
}
$_ = ;
F->error and confess "$!";
chomp;
return ($_, \%options);
}
sub madformat_wantfixup ($) {
my ($format) = @_;
return 0 unless $format eq '3.0 (quilt)';
our $quilt_mode_warned;
if ($quilt_mode eq 'nocheck') {
progress f_ "Not doing any fixup of \`%s'".
" due to ----no-quilt-fixup or --quilt=nocheck", $format
unless $quilt_mode_warned++;
return 0;
}
progress f_ "Format \`%s', need to check/update patch stack", $format
unless $quilt_mode_warned++;
return 1;
}
sub maybe_split_brain_save ($$$) {
my ($headref, $dgitview, $msg) = @_;
# => message fragment "$saved" describing disposition of $dgitview
# (used inside parens, in the English texts)
my $save = $internal_object_save{'dgit-view'};
return f_ "commit id %s", $dgitview unless defined $save;
my @cmd = (shell_cmd 'cd "$1"; shift', $maindir,
git_update_ref_cmd
"dgit --dgit-view-save $msg HEAD=$headref",
$save, $dgitview);
runcmd @cmd;
return f_ "and left in %s", $save;
}
# An "infopair" is a tuple [ $thing, $what ]
# (often $thing is a commit hash; $what is a description)
sub infopair_cond_equal ($$) {
my ($x,$y) = @_;
$x->[0] eq $y->[0] or fail <[1] ($x->[0]) not equal to $y->[1] ($y->[0])
END
};
sub infopair_lrf_tag_lookup ($$) {
my ($tagnames, $what) = @_;
# $tagname may be an array ref
my @tagnames = ref $tagnames ? @$tagnames : ($tagnames);
printdebug "infopair_lrfetchref_tag_lookup $what @tagnames\n";
foreach my $tagname (@tagnames) {
my $lrefname = lrfetchrefs."/tags/$tagname";
my $tagobj = $lrfetchrefs_f{$lrefname};
next unless defined $tagobj;
printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n";
return [ git_rev_parse($tagobj), $what ];
}
fail @tagnames==1 ? (f_ <[0], $desc->[0]) or
fail f_ <[1], $anc->[0], $desc->[1], $desc->[0];
%s (%s) .. %s (%s) is not fast forward
END
};
sub pseudomerge_version_check ($$) {
my ($clogp, $archive_hash) = @_;
my $arch_clogp = commit_getclogp $archive_hash;
my $i_arch_v = [ (getfield $arch_clogp, 'Version'),
__ 'version currently in archive' ];
if (defined $overwrite_version) {
if (length $overwrite_version) {
infopair_cond_equal([ $overwrite_version,
'--overwrite= version' ],
$i_arch_v);
} else {
my $v = $i_arch_v->[0];
progress f_
"Checking package changelog for archive version %s ...", $v;
my $cd;
eval {
my @xa = ("-f$v", "-t$v");
my $vclogp = parsechangelog @xa;
my $gf = sub {
my ($fn) = @_;
[ (getfield $vclogp, $fn),
(f_ "%s field from dpkg-parsechangelog %s",
$fn, "@xa") ];
};
my $cv = $gf->('Version');
infopair_cond_equal($i_arch_v, $cv);
$cd = $gf->('Distribution');
};
if ($@) {
$@ =~ s/^\n//s;
$@ =~ s/^dgit: //gm;
fail "$@".
f_ "Perhaps debian/changelog does not mention %s ?", $v;
}
fail f_ <[1], $cd->[0], $v
%s is %s
Your tree seems to based on earlier (not uploaded) %s.
END
if $cd->[0] =~ m/UNRELEASED/;
}
}
printdebug "pseudomerge_version_check i_arch_v @$i_arch_v\n";
return $i_arch_v;
}
sub pseudomerge_hash_commit ($$$$ $$) {
my ($clogp, $dgitview, $archive_hash, $i_arch_v,
$msg_cmd, $msg_msg) = @_;
progress f_ "Declaring that HEAD includes all changes in %s...",
$i_arch_v->[0];
my $tree = cmdoutput qw(git rev-parse), "${dgitview}:";
my $authline = clogp_authline $clogp;
chomp $msg_msg;
$msg_cmd .=
!defined $overwrite_version ? ""
: !length $overwrite_version ? " --overwrite"
: " --overwrite=".$overwrite_version;
# Contributing parent is the first parent - that makes
# git rev-list --first-parent DTRT.
my $pmf = dgit_privdir()."/pseudomerge";
open MC, ">", $pmf or die "$pmf $!";
print MC < $merged_dgitview
printdebug "splitbrain_pseudomerge...\n";
#
# We: debian/PREVIOUS HEAD($maintview)
# expect: o ----------------- o
# \ \
# o o
# a/d/PREVIOUS $dgitview
# $archive_hash \
# If so, \ \
# we do: `------------------ o
# this: $dgitview'
#
return $dgitview unless defined $archive_hash;
return $dgitview if deliberately_not_fast_forward();
printdebug "splitbrain_pseudomerge...\n";
my $i_arch_v = pseudomerge_version_check($clogp, $archive_hash);
if (!defined $overwrite_version) {
progress __ "Checking that HEAD includes all changes in archive...";
}
return $dgitview if is_fast_fwd $archive_hash, $dgitview;
if (defined $overwrite_version) {
} elsif (!eval {
my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_nomdistro;
my $i_dep14 = infopair_lrf_tag_lookup($t_dep14,
__ "maintainer view tag");
my $t_dgit = debiantag_new $i_arch_v->[0], access_nomdistro;
my $i_dgit = infopair_lrf_tag_lookup($t_dgit, __ "dgit view tag");
my $i_archive = [ $archive_hash, __ "current archive contents" ];
printdebug "splitbrain_pseudomerge i_archive @$i_archive\n";
infopair_cond_equal($i_dgit, $i_archive);
infopair_cond_ff($i_dep14, $i_dgit);
infopair_cond_ff($i_dep14, [ $maintview, 'HEAD' ]);
1;
}) {
$@ =~ s/^\n//; chomp $@;
print STDERR <[0];
my $r = pseudomerge_hash_commit
$clogp, $dgitview, $archive_hash, $i_arch_v,
"dgit --quilt=$quilt_mode",
(defined $overwrite_version
? f_ "Declare fast forward from %s\n", $arch_v
: f_ "Make fast forward from %s\n", $arch_v);
maybe_split_brain_save $maintview, $r, "pseudomerge";
progress f_ "Made pseudo-merge of %s into dgit view.", $arch_v;
return $r;
}
sub plain_overwrite_pseudomerge ($$$) {
my ($clogp, $head, $archive_hash) = @_;
printdebug "plain_overwrite_pseudomerge...";
my $i_arch_v = pseudomerge_version_check($clogp, $archive_hash);
return $head if is_fast_fwd $archive_hash, $head;
my $m = f_ "Declare fast forward from %s", $i_arch_v->[0];
my $r = pseudomerge_hash_commit
$clogp, $head, $archive_hash, $i_arch_v,
"dgit", $m;
runcmd git_update_ref_cmd $m, 'HEAD', $r, $head;
progress f_ "Make pseudo-merge of %s into your HEAD.", $i_arch_v->[0];
return $r;
}
sub push_parse_changelog ($) {
my ($clogpfn) = @_;
my $clogp = Dpkg::Control::Hash->new();
$clogp->load($clogpfn) or die;
my $clogpackage = getfield $clogp, 'Source';
$package //= $clogpackage;
fail f_ "-p specified %s but changelog specified %s",
$package, $clogpackage
unless $package eq $clogpackage;
my $cversion = getfield $clogp, 'Version';
if (!$we_are_initiator) {
# rpush initiator can't do this because it doesn't have $isuite yet
my $tag = debiantag_new($cversion, access_nomdistro);
runcmd @git, qw(check-ref-format), $tag;
}
my $dscfn = dscfn($cversion);
return ($clogp, $cversion, $dscfn);
}
sub push_parse_dsc ($$$) {
my ($dscfn,$dscfnwhat, $cversion) = @_;
$dsc = parsecontrol($dscfn,$dscfnwhat);
my $dversion = getfield $dsc, 'Version';
my $dscpackage = getfield $dsc, 'Source';
($dscpackage eq $package && $dversion eq $cversion) or
fail f_ "%s is for %s %s but debian/changelog is for %s %s",
$dscfn, $dscpackage, $dversion,
$package, $cversion;
}
sub push_tagwants ($$$$) {
my ($cversion, $dgithead, $maintviewhead, $tfbase) = @_;
my @tagwants;
push @tagwants, {
TagFn => \&debiantag_new,
Objid => $dgithead,
TfSuffix => '',
View => 'dgit',
};
if (defined $maintviewhead) {
push @tagwants, {
TagFn => \&debiantag_maintview,
Objid => $maintviewhead,
TfSuffix => '-maintview',
View => 'maint',
};
} elsif ($dodep14tag ne 'no') {
push @tagwants, {
TagFn => \&debiantag_maintview,
Objid => $dgithead,
TfSuffix => '-dgit',
View => 'dgit',
};
};
foreach my $tw (@tagwants) {
$tw->{Tag} = $tw->{TagFn}($cversion, access_nomdistro);
$tw->{Tfn} = sub { $tfbase.$tw->{TfSuffix}.$_[0]; };
}
printdebug 'push_tagwants: ', Dumper(\@_, \@tagwants);
return @tagwants;
}
sub push_mktags ($$ $$ $) {
my ($clogp,$dscfn,
$changesfile,$changesfilewhat,
$tagwants) = @_;
die unless $tagwants->[0]{View} eq 'dgit';
my $declaredistro = access_nomdistro();
my $reader_giturl = do { local $access_forpush=0; access_giturl(); };
$dsc->{$ourdscfield[0]} = join " ",
$tagwants->[0]{Objid}, $declaredistro, $tagwants->[0]{Tag},
$reader_giturl;
$dsc->save("$dscfn.tmp") or confess "$!";
my $changes = parsecontrol($changesfile,$changesfilewhat);
foreach my $field (qw(Source Distribution Version)) {
$changes->{$field} eq $clogp->{$field} or
fail f_ "changes field %s \`%s' does not match changelog \`%s'",
$field, $changes->{$field}, $clogp->{$field};
}
my $cversion = getfield $clogp, 'Version';
my $clogsuite = getfield $clogp, 'Distribution';
# We make the git tag by hand because (a) that makes it easier
# to control the "tagger" (b) we can do remote signing
my $authline = clogp_authline $clogp;
my $delibs = join(" ", "",@deliberatelies);
my $mktag = sub {
my ($tw) = @_;
my $tfn = $tw->{Tfn};
my $head = $tw->{Objid};
my $tag = $tw->{Tag};
open TO, '>', $tfn->('.tmp') or confess "$!";
print TO <{View} eq 'dgit') {
print TO f_ <{View} eq 'maint') {
print TO f_ <('.tmp');
if ($sign) {
if (!defined $keyid) {
$keyid = access_cfg('keyid','RETURN-UNDEF');
}
if (!defined $keyid) {
$keyid = getfield $clogp, 'Maintainer';
}
unlink $tfn->('.tmp.asc') or $!==&ENOENT or confess "$!";
my @sign_cmd = (@gpg, qw(--detach-sign --armor));
push @sign_cmd, qw(-u),$keyid if defined $keyid;
push @sign_cmd, $tfn->('.tmp');
runcmd_ordryrun @sign_cmd;
if (act_scary()) {
$tagobjfn = $tfn->('.signed.tmp');
runcmd shell_cmd "exec >$tagobjfn", qw(cat --),
$tfn->('.tmp'), $tfn->('.tmp.asc');
}
}
return $tagobjfn;
};
my @r = map { $mktag->($_); } @$tagwants;
return @r;
}
sub sign_changes ($) {
my ($changesfile) = @_;
if ($sign) {
my @debsign_cmd = @debsign;
push @debsign_cmd, "-k$keyid" if defined $keyid;
push @debsign_cmd, "-p$gpg[0]" if $gpg[0] ne 'gpg';
push @debsign_cmd, $changesfile;
runcmd_ordryrun @debsign_cmd;
}
}
sub dopush () {
printdebug "actually entering push\n";
supplementary_message(__ <<'END');
Push failed, while checking state of the archive.
You can retry the push, after fixing the problem, if you like.
END
if (check_for_git()) {
git_fetch_us();
}
my $archive_hash = fetch_from_archive();
if (!$archive_hash) {
$new_package or
fail __ "package appears to be new in this suite;".
" if this is intentional, use --new";
}
supplementary_message(__ <<'END');
Push failed, while preparing your push.
You can retry the push, after fixing the problem, if you like.
END
prep_ud();
access_giturl(); # check that success is vaguely likely
rpush_handle_protovsn_bothends() if $we_are_initiator;
my $clogpfn = dgit_privdir()."/changelog.822.tmp";
runcmd shell_cmd "exec >$clogpfn", qw(dpkg-parsechangelog);
responder_send_file('parsed-changelog', $clogpfn);
my ($clogp, $cversion, $dscfn) =
push_parse_changelog("$clogpfn");
my $dscpath = "$buildproductsdir/$dscfn";
stat_exists $dscpath or
fail f_ "looked for .dsc %s, but %s; maybe you forgot to build",
$dscpath, $!;
responder_send_file('dsc', $dscpath);
push_parse_dsc($dscpath, $dscfn, $cversion);
my $format = getfield $dsc, 'Format';
my $symref = git_get_symref();
my $actualhead = git_rev_parse('HEAD');
if (branch_is_gdr_unstitched_ff($symref, $actualhead, $archive_hash)) {
if (quiltmode_splitting()) {
my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $actualhead);
fail f_ <{Version};
if (madformat_wantfixup($format)) {
# user might have not used dgit build, so maybe do this now:
if (do_split_brain()) {
changedir $playground;
my $cachekey;
($dgithead, $cachekey) =
quilt_check_splitbrain_cache($actualhead, $upstreamversion);
$dgithead or fail f_
"--quilt=%s but no cached dgit view:
perhaps HEAD changed since dgit build[-source] ?",
$quilt_mode;
}
if (!do_split_brain()) {
# In split brain mode, do not attempt to incorporate dirty
# stuff from the user's working tree. That would be mad.
commit_quilty_patch();
}
}
if (do_split_brain()) {
$made_split_brain = 1;
$dgithead = splitbrain_pseudomerge($clogp,
$actualhead, $dgithead,
$archive_hash);
$maintviewhead = $actualhead;
changedir $maindir;
prep_ud(); # so _only_subdir() works, below
}
if (defined $overwrite_version && !defined $maintviewhead
&& $archive_hash) {
$dgithead = plain_overwrite_pseudomerge($clogp,
$dgithead,
$archive_hash);
}
check_not_dirty();
my $forceflag = '';
if ($archive_hash) {
if (is_fast_fwd($archive_hash, $dgithead)) {
# ok
} elsif (deliberately_not_fast_forward) {
$forceflag = '+';
} else {
fail __ "dgit push: HEAD is not a descendant".
" of the archive's version.\n".
"To overwrite the archive's contents,".
" pass --overwrite[=VERSION].\n".
"To rewind history, if permitted by the archive,".
" use --deliberately-not-fast-forward.";
}
}
confess unless !!$made_split_brain == do_split_brain();
changedir $playground;
progress f_ "checking that %s corresponds to HEAD", $dscfn;
runcmd qw(dpkg-source -x --),
$dscpath =~ m#^/# ? $dscpath : "$maindir/$dscpath";
my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
check_for_vendor_patches() if madformat($dsc->{format});
changedir $maindir;
my @diffcmd = (@git, qw(diff --quiet), $tree, $dgithead);
debugcmd "+",@diffcmd;
$!=0; $?=-1;
my $r = system @diffcmd;
if ($r) {
if ($r==256) {
my $referent = $made_split_brain ? $dgithead : 'HEAD';
my $diffs = cmdoutput @git, qw(diff --stat), $tree, $dgithead;
my @mode_changes;
my $raw = cmdoutput @git,
qw(diff --no-renames -z -r --raw), $tree, $dgithead;
my $changed;
foreach (split /\0/, $raw) {
if (defined $changed) {
push @mode_changes, "$changed: $_\n" if $changed;
$changed = undef;
next;
} elsif (m/^:0+ 0+ /) {
$changed = '';
} elsif (m/^:(?:10*)?(\d+) (?:10*)?(\d+) /) {
$changed = "Mode change from $1 to $2"
} else {
die "$_ ?";
}
}
if (@mode_changes) {
fail +(f_ <{Files} =~ m{\.deb$}m;
my $sourceonlypolicy = access_cfg 'source-only-uploads';
if ($sourceonlypolicy eq 'ok') {
} elsif ($sourceonlypolicy eq 'always') {
forceable_fail [qw(uploading-binaries)],
__ "uploading binaries, although distro policy is source only"
if $hasdebs;
} elsif ($sourceonlypolicy eq 'never') {
forceable_fail [qw(uploading-source-only)],
__ "source-only upload, although distro policy requires .debs"
if !$hasdebs;
} elsif ($sourceonlypolicy eq 'not-wholly-new') {
forceable_fail [qw(uploading-source-only)],
f_ "source-only upload, even though package is entirely NEW\n".
"(this is contrary to policy in %s)",
access_nomdistro()
if !$hasdebs
&& $new_package
&& !(archive_query('package_not_wholly_new', $package) // 1);
} else {
badcfg f_ "unknown source-only-uploads policy \`%s'",
$sourceonlypolicy;
}
# Perhaps adjust .dsc to contain right set of origs
changes_update_origs_from_dsc($dsc, $changes, $upstreamversion,
$changesfile)
unless forceing [qw(changes-origs-exactly)];
# Checks complete, we're going to try and go ahead:
responder_send_file('changes',$changesfile);
responder_send_command("param head $dgithead");
responder_send_command("param csuite $csuite");
responder_send_command("param isuite $isuite");
responder_send_command("param tagformat new"); # needed in $protovsn==4
if (defined $maintviewhead) {
responder_send_command("param maint-view $maintviewhead");
}
# Perhaps send buildinfo(s) for signing
my $changes_files = getfield $changes, 'Files';
my @buildinfos = ($changes_files =~ m/ .* (\S+\.buildinfo)$/mg);
foreach my $bi (@buildinfos) {
responder_send_command("param buildinfo-filename $bi");
responder_send_file('buildinfo', "$buildproductsdir/$bi");
}
if (deliberately_not_fast_forward) {
git_for_each_ref(lrfetchrefs, sub {
my ($objid,$objtype,$lrfetchrefname,$reftail) = @_;
my $rrefname= substr($lrfetchrefname, length(lrfetchrefs) + 1);
responder_send_command("previously $rrefname=$objid");
$previously{$rrefname} = $objid;
});
}
my @tagwants = push_tagwants($cversion, $dgithead, $maintviewhead,
dgit_privdir()."/tag");
my @tagobjfns;
supplementary_message(__ <<'END');
Push failed, while signing the tag.
You can retry the push, after fixing the problem, if you like.
END
# If we manage to sign but fail to record it anywhere, it's fine.
if ($we_are_responder) {
@tagobjfns = map { $_->{Tfn}('.signed-tmp') } @tagwants;
responder_receive_files('signed-tag', @tagobjfns);
} else {
@tagobjfns = push_mktags($clogp,$dscpath,
$changesfile,$changesfile,
\@tagwants);
}
supplementary_message(__ <<'END');
Push failed, *after* signing the tag.
If you want to try again, you should use a new version number.
END
pairwise { $a->{TagObjFn} = $b } @tagwants, @tagobjfns;
foreach my $tw (@tagwants) {
my $tag = $tw->{Tag};
my $tagobjfn = $tw->{TagObjFn};
my $tag_obj_hash =
cmdoutput @git, qw(hash-object -w -t tag), $tagobjfn;
runcmd_ordryrun @git, qw(verify-tag), $tag_obj_hash;
runcmd_ordryrun_local
@git, qw(update-ref), "refs/tags/$tag", $tag_obj_hash;
}
supplementary_message(__ <<'END');
Push failed, while updating the remote git repository - see messages above.
If you want to try again, you should use a new version number.
END
if (!check_for_git()) {
create_remote_git_repo();
}
my @pushrefs = $forceflag.$dgithead.":".rrref();
foreach my $tw (@tagwants) {
push @pushrefs, $forceflag."refs/tags/$tw->{Tag}";
}
runcmd_ordryrun @git,
qw(-c push.followTags=false push), access_giturl(), @pushrefs;
runcmd_ordryrun git_update_ref_cmd 'dgit push', lrref(), $dgithead;
supplementary_message(__ <<'END');
Push failed, while obtaining signatures on the .changes and .dsc.
If it was just that the signature failed, you may try again by using
debsign by hand to sign the changes file (see the command dgit tried,
above), and then dput that changes file to complete the upload.
If you need to change the package, you must use a new version number.
END
if ($we_are_responder) {
my $dryrunsuffix = act_local() ? "" : ".tmp";
my @rfiles = ($dscpath, $changesfile);
push @rfiles, map { "$buildproductsdir/$_" } @buildinfos;
responder_receive_files('signed-dsc-changes',
map { "$_$dryrunsuffix" } @rfiles);
} else {
if (act_local()) {
rename "$dscpath.tmp",$dscpath or die "$dscfn $!";
} else {
progress f_ "[new .dsc left in %s.tmp]", $dscpath;
}
sign_changes $changesfile;
}
supplementary_message(f_ <&STDOUT" or confess "$!";
autoflush PO 1;
open STDOUT, ">&STDERR" or confess "$!";
autoflush STDOUT 1;
$vsnwant //= 1;
($protovsn) = grep {
$vsnwant =~ m{^(?:.*,)?$_(?:,.*)?$}
} @rpushprotovsn_support;
fail f_ "build host has dgit rpush protocol versions %s".
" but invocation host has %s",
(join ",", @rpushprotovsn_support), $vsnwant
unless defined $protovsn;
changedir $dir;
}
sub cmd_remote_push_build_host {
responder_send_command("dgit-remote-push-ready $protovsn");
&cmd_push;
}
sub pre_remote_push_responder { pre_remote_push_build_host(); }
sub cmd_remote_push_responder { cmd_remote_push_build_host(); }
# ... for compatibility with proto vsn.1 dgit (just so that user gets
# a good error message)
sub rpush_handle_protovsn_bothends () {
}
our $i_tmp;
sub i_cleanup {
local ($@, $?);
my $report = i_child_report();
if (defined $report) {
printdebug "($report)\n";
} elsif ($i_child_pid) {
printdebug "(killing build host child $i_child_pid)\n";
kill 15, $i_child_pid;
}
if (defined $i_tmp && !defined $initiator_tempdir) {
changedir "/";
eval { rmtree $i_tmp; };
}
}
END {
return unless forkcheck_mainprocess();
i_cleanup();
}
sub i_method {
my ($base,$selector,@args) = @_;
$selector =~ s/\-/_/g;
{ no strict qw(refs); &{"${base}_${selector}"}(@args); }
}
sub pre_rpush () {
not_necessarily_a_tree();
}
sub cmd_rpush {
my $host = nextarg;
my $dir;
if ($host =~ m/^((?:[^][]|\[[^][]*\])*)\:/) {
$host = $1;
$dir = $'; #';
} else {
$dir = nextarg;
}
$dir =~ s{^-}{./-};
my @rargs = ($dir);
push @rargs, join ",", @rpushprotovsn_support;
my @rdgit;
push @rdgit, @dgit;
push @rdgit, @ropts;
push @rdgit, qw(remote-push-build-host), (scalar @rargs), @rargs;
push @rdgit, @ARGV;
my @cmd = (@ssh, $host, shellquote @rdgit);
debugcmd "+",@cmd;
$we_are_initiator=1;
if (defined $initiator_tempdir) {
rmtree $initiator_tempdir;
mkdir $initiator_tempdir, 0700
or fail f_ "create %s: %s", $initiator_tempdir, $!;
$i_tmp = $initiator_tempdir;
} else {
$i_tmp = tempdir();
}
$i_child_pid = open2(\*RO, \*RI, @cmd);
changedir $i_tmp;
($protovsn) = initiator_expect { m/^dgit-remote-push-ready (\S+)/ };
die "$protovsn ?" unless grep { $_ eq $protovsn } @rpushprotovsn_support;
for (;;) {
my ($icmd,$iargs) = initiator_expect {
m/^(\S+)(?: (.*))?$/;
($1,$2);
};
i_method "i_resp", $icmd, $iargs;
}
}
sub i_resp_progress ($) {
my ($rhs) = @_;
my $msg = protocol_read_bytes \*RO, $rhs;
progress $msg;
}
sub i_resp_supplementary_message ($) {
my ($rhs) = @_;
$supplementary_message = protocol_read_bytes \*RO, $rhs;
}
sub i_resp_complete {
my $pid = $i_child_pid;
$i_child_pid = undef; # prevents killing some other process with same pid
printdebug "waiting for build host child $pid...\n";
my $got = waitpid $pid, 0;
confess "$!" unless $got == $pid;
fail f_ "build host child failed: %s", waitstatusmsg() if $?;
i_cleanup();
printdebug __ "all done\n";
finish 0;
}
sub i_resp_file ($) {
my ($keyword) = @_;
my $localname = i_method "i_localname", $keyword;
my $localpath = "$i_tmp/$localname";
stat_exists $localpath and
badproto \*RO, f_ "file %s (%s) twice", $keyword, $localpath;
protocol_receive_file \*RO, $localpath;
i_method "i_file", $keyword;
}
our %i_param;
sub i_resp_param ($) {
$_[0] =~ m/^(\S+) (.*)$/ or badproto \*RO, __ "bad param spec";
$i_param{$1} = $2;
}
sub i_resp_previously ($) {
$_[0] =~ m#^(refs/tags/\S+)=(\w+)$#
or badproto \*RO, __ "bad previously spec";
my $r = system qw(git check-ref-format), $1;
confess "bad previously ref spec ($r)" if $r;
$previously{$1} = $2;
}
our %i_wanted;
sub i_resp_want ($) {
my ($keyword) = @_;
die "$keyword ?" if $i_wanted{$keyword}++;
defined $i_param{'csuite'} or badproto \*RO, "premature desire, no csuite";
$isuite = $i_param{'isuite'} // $i_param{'csuite'};
die unless $isuite =~ m/^$suite_re$/;
pushing();
rpush_handle_protovsn_bothends();
my @localpaths = i_method "i_want", $keyword;
printdebug "[[ $keyword @localpaths\n";
foreach my $localpath (@localpaths) {
protocol_send_file \*RI, $localpath;
}
print RI "files-end\n" or confess "$!";
}
our ($i_clogp, $i_version, $i_dscfn, $i_changesfn, @i_buildinfos);
sub i_localname_parsed_changelog {
return "remote-changelog.822";
}
sub i_file_parsed_changelog {
($i_clogp, $i_version, $i_dscfn) =
push_parse_changelog "$i_tmp/remote-changelog.822";
die if $i_dscfn =~ m#/|^\W#;
}
sub i_localname_dsc {
defined $i_dscfn or badproto \*RO, "dsc (before parsed-changelog)";
return $i_dscfn;
}
sub i_file_dsc { }
sub i_localname_buildinfo ($) {
my $bi = $i_param{'buildinfo-filename'};
defined $bi or badproto \*RO, "buildinfo before filename";
defined $i_changesfn or badproto \*RO, "buildinfo before changes";
$bi =~ m{^\Q$package\E_[!-.0-~]*\.buildinfo$}s
or badproto \*RO, "improper buildinfo filename";
return $&;
}
sub i_file_buildinfo {
my $bi = $i_param{'buildinfo-filename'};
my $bd = parsecontrol "$i_tmp/$bi", $bi;
my $ch = parsecontrol "$i_tmp/$i_changesfn", 'changes';
if (!forceing [qw(buildinfo-changes-mismatch)]) {
files_compare_inputs($bd, $ch);
(getfield $bd, $_) eq (getfield $ch, $_) or
fail f_ "buildinfo mismatch in field %s", $_
foreach qw(Source Version);
!defined $bd->{$_} or
fail f_ "buildinfo contains forbidden field %s", $_
foreach qw(Changes Changed-by Distribution);
}
push @i_buildinfos, $bi;
delete $i_param{'buildinfo-filename'};
}
sub i_localname_changes {
defined $i_dscfn or badproto \*RO, "dsc (before parsed-changelog)";
$i_changesfn = $i_dscfn;
$i_changesfn =~ s/\.dsc$/_dgit.changes/ or die;
return $i_changesfn;
}
sub i_file_changes { }
sub i_want_signed_tag {
printdebug Dumper(\%i_param, $i_dscfn);
defined $i_param{'head'} && defined $i_dscfn && defined $i_clogp
&& defined $i_param{'csuite'}
or badproto \*RO, "premature desire for signed-tag";
my $head = $i_param{'head'};
die if $head =~ m/[^0-9a-f]/ || $head !~ m/^../;
my $maintview = $i_param{'maint-view'};
die if defined $maintview && $maintview =~ m/[^0-9a-f]/;
if ($protovsn == 4) {
my $p = $i_param{'tagformat'} // '';
$p eq 'new'
or badproto \*RO, "tag format mismatch: $p vs. new";
}
die unless $i_param{'csuite'} =~ m/^$suite_re$/;
$csuite = $&;
push_parse_dsc $i_dscfn, 'remote dsc', $i_version;
my @tagwants = push_tagwants $i_version, $head, $maintview, "tag";
return
push_mktags $i_clogp, $i_dscfn,
$i_changesfn, (__ 'remote changes file'),
\@tagwants;
}
sub i_want_signed_dsc_changes {
rename "$i_dscfn.tmp","$i_dscfn" or die "$i_dscfn $!";
sign_changes $i_changesfn;
return ($i_dscfn, $i_changesfn, @i_buildinfos);
}
#---------- building etc. ----------
our $version;
our $sourcechanges;
our $dscfn;
#----- `3.0 (quilt)' handling -----
our $fakeeditorenv = 'DGIT_FAKE_EDITOR_QUILT';
sub quiltify_dpkg_commit ($$$;$) {
my ($patchname,$author,$msg, $xinfo) = @_;
$xinfo //= '';
mkpath '.git/dgit'; # we are in playtree
my $descfn = ".git/dgit/quilt-description.tmp";
open O, '>', $descfn or confess "$descfn: $!";
$msg =~ s/\n+/\n\n/;
print O <{$fn}
# is set for each modified .gitignore filename $fn
# if $unrepres is defined, array ref to which is appeneded
# a list of unrepresentable changes (removals of upstream files
# (as messages)
local $/=undef;
my @cmd = (@git, qw(diff-tree -z --no-renames));
push @cmd, qw(--name-only) unless $unrepres;
push @cmd, qw(-r) if $finegrained || $unrepres;
push @cmd, $x, $y;
my $diffs= cmdoutput @cmd;
my $r = 0;
my @lmodes;
foreach my $f (split /\0/, $diffs) {
if ($unrepres && !@lmodes) {
@lmodes = $f =~ m/^\:(\w+) (\w+) \w+ \w+ / or die "$_ ?";
next;
}
my ($oldmode,$newmode) = @lmodes;
@lmodes = ();
next if $f =~ m#^debian(?:/.*)?$#s;
if ($unrepres) {
eval {
die __ "not a plain file or symlink\n"
unless $newmode =~ m/^(?:10|12)\d{4}$/ ||
$oldmode =~ m/^(?:10|12)\d{4}$/;
if ($oldmode =~ m/[^0]/ &&
$newmode =~ m/[^0]/) {
# both old and new files exist
die __ "mode or type changed\n" if $oldmode ne $newmode;
die __ "modified symlink\n" unless $newmode =~ m/^10/;
} elsif ($oldmode =~ m/[^0]/) {
# deletion
die __ "deletion of symlink\n"
unless $oldmode =~ m/^10/;
} else {
# creation
die __ "creation with non-default mode\n"
unless $newmode =~ m/^100644$/ or
$newmode =~ m/^120000$/;
}
};
if ($@) {
local $/="\n"; chomp $@;
push @$unrepres, [ $f, "$@ ($oldmode->$newmode)" ];
}
}
my $isignore = $f =~ m#^(?:.*/)?.gitignore$#s;
$r |= $isignore ? 02 : 01;
$ignorenamesr->{$f}=1 if $ignorenamesr && $isignore;
}
printdebug "quiltify_trees_differ $x $y => $r\n";
return $r;
}
sub quiltify_tree_sentinelfiles ($) {
# lists the `sentinel' files present in the tree
my ($x) = @_;
my $r = cmdoutput @git, qw(ls-tree --name-only), $x,
qw(-- debian/rules debian/control);
$r =~ s/\n/,/g;
return $r;
}
sub quiltify_splitting ($$$$$$$) {
my ($clogp, $unapplied, $headref, $oldtiptree, $diffbits,
$editedignores, $cachekey) = @_;
my $gitignore_special = 1;
if ($quilt_mode !~ m/gbp|dpm|baredebian/) {
# treat .gitignore just like any other upstream file
$diffbits = { %$diffbits };
$_ = !!$_ foreach values %$diffbits;
$gitignore_special = 0;
}
# We would like any commits we generate to be reproducible
my @authline = clogp_authline($clogp);
local $ENV{GIT_COMMITTER_NAME} = $authline[0];
local $ENV{GIT_COMMITTER_EMAIL} = $authline[1];
local $ENV{GIT_COMMITTER_DATE} = $authline[2];
local $ENV{GIT_AUTHOR_NAME} = $authline[0];
local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
local $ENV{GIT_AUTHOR_DATE} = $authline[2];
confess unless do_split_brain();
my $fulldiffhint = sub {
my ($x,$y) = @_;
my $cmd = "git diff $x $y -- :/ ':!debian'";
$cmd .= " ':!/.gitignore' ':!*/.gitignore'" if $gitignore_special;
return f_ "\nFor full diff showing the problem(s), type:\n %s\n",
$cmd;
};
if ($quilt_mode =~ m/gbp|unapplied|baredebian/ &&
($diffbits->{O2H} & 01)) {
my $msg = f_
"--quilt=%s specified, implying patches-unapplied git tree\n".
" but git tree differs from orig in upstream files.",
$quilt_mode;
$msg .= $fulldiffhint->($unapplied, 'HEAD');
if (!stat_exists "debian/patches" and $quilt_mode !~ m/baredebian/) {
$msg .= __
"\n ... debian/patches is missing; perhaps this is a patch queue branch?";
}
fail $msg;
}
if ($quilt_mode =~ m/dpm/ &&
($diffbits->{H2A} & 01)) {
fail +(f_ <($oldtiptree,'HEAD');
--quilt=%s specified, implying patches-applied git tree
but git tree differs from result of applying debian/patches to upstream
END
}
if ($quilt_mode =~ m/baredebian/) {
# We need to construct a merge which has upstream files from
# upstream and debian/ files from HEAD.
read_tree_upstream $quilt_upstream_commitish, 1, $headref;
my $version = getfield $clogp, 'Version';
my $upsversion = upstreamversion $version;
my $merge = make_commit
[ $headref, $quilt_upstream_commitish ],
[ +(f_ <{O2A} & 01)) { # some patches
progress __ "dgit view: creating patches-applied version using gbp pq";
runcmd shell_cmd 'exec >/dev/null', gbp_pq, qw(import);
# gbp pq import creates a fresh branch; push back to dgit-view
runcmd @git, qw(update-ref refs/heads/dgit-view HEAD);
runcmd @git, qw(checkout -q dgit-view);
}
if ($quilt_mode =~ m/gbp|dpm/ &&
($diffbits->{O2A} & 02)) {
fail f_ <{O2H} & 02) && # user has modified .gitignore
!($diffbits->{O2A} & 02)) { # patches do not change .gitignore
progress __
"dgit view: creating patch to represent .gitignore changes";
ensuredir "debian/patches";
my $gipatch = "debian/patches/auto-gitignore";
open GIPATCH, ">>", "$gipatch" or confess "$gipatch: $!";
stat GIPATCH or confess "$gipatch: $!";
fail f_ "%s already exists; but want to create it".
" to record .gitignore changes",
$gipatch
if (stat _)[7];
print GIPATCH +(__ <>$gipatch", @git, qw(diff),
$unapplied, $headref, "--", sort keys %$editedignores;
open SERIES, "+>>", "debian/patches/series" or confess "$!";
defined seek SERIES, -1, 2 or $!==EINVAL or confess "$!";
my $newline;
defined read SERIES, $newline, 1 or confess "$!";
print SERIES "\n" or confess "$!" unless $newline eq "\n";
print SERIES "auto-gitignore\n" or confess "$!";
close SERIES or die $!;
runcmd @git, qw(add -f -- debian/patches/series), $gipatch;
commit_admin +(__ < $git_commit_id,
# Child => $c, # or undef if P=T
# Whynot => $reason_edge_PC_unsuitable, # in @nots only
# Nontrivial => true iff $p..$c has relevant changes
# };
my @todo;
my @nots;
my $sref_S;
my $max_work=100;
my %considered; # saves being exponential on some weird graphs
my $t_sentinels = quiltify_tree_sentinelfiles $target;
my $not = sub {
my ($search,$whynot) = @_;
printdebug " search NOT $search->{Commit} $whynot\n";
$search->{Whynot} = $whynot;
push @nots, $search;
no warnings qw(exiting);
next;
};
push @todo, {
Commit => $target,
};
while (@todo) {
my $c = shift @todo;
next if $considered{$c->{Commit}}++;
$not->($c, __ "maximum search space exceeded") if --$max_work <= 0;
printdebug "quiltify investigate $c->{Commit}\n";
# are we done?
if (!quiltify_trees_differ $c->{Commit}, $oldtiptree) {
printdebug " search finished hooray!\n";
$sref_S = $c;
last;
}
quiltify_nofix_bail " $c->{Commit}", " (tree object $oldtiptree)";
if ($quilt_mode eq 'smash') {
printdebug " search quitting smash\n";
last;
}
my $c_sentinels = quiltify_tree_sentinelfiles $c->{Commit};
$not->($c, f_ "has %s not %s", $c_sentinels, $t_sentinels)
if $c_sentinels ne $t_sentinels;
my $commitdata = cmdoutput @git, qw(cat-file commit), $c->{Commit};
$commitdata =~ m/\n\n/;
$commitdata =~ $`;
my @parents = ($commitdata =~ m/^parent (\w+)$/gm);
@parents = map { { Commit => $_, Child => $c } } @parents;
$not->($c, __ "root commit") if !@parents;
foreach my $p (@parents) {
$p->{Nontrivial}= quiltify_trees_differ $p->{Commit},$c->{Commit};
}
my $ndiffers = grep { $_->{Nontrivial} } @parents;
$not->($c, f_ "merge (%s nontrivial parents)", $ndiffers)
if $ndiffers > 1;
foreach my $p (@parents) {
printdebug "considering C=$c->{Commit} P=$p->{Commit}\n";
my @cmd= (@git, qw(diff-tree -r --name-only),
$p->{Commit},$c->{Commit},
qw(-- debian/patches .pc debian/source/format));
my $patchstackchange = cmdoutput @cmd;
if (length $patchstackchange) {
$patchstackchange =~ s/\n/,/g;
$not->($p, f_ "changed %s", $patchstackchange);
}
printdebug " search queue P=$p->{Commit} ",
($p->{Nontrivial} ? "NT" : "triv"),"\n";
push @todo, $p;
}
}
if (!$sref_S) {
printdebug "quiltify want to smash\n";
my $abbrev = sub {
my $x = $_[0]{Commit};
$x =~ s/(.*?[0-9a-z]{8})[0-9a-z]*$/$1/;
return $x;
};
if ($quilt_mode eq 'linear') {
print STDERR f_
"\n%s: error: quilt fixup cannot be linear. Stopped at:\n",
$us;
my $all_gdr = !!@nots;
foreach my $notp (@nots) {
my $c = $notp->{Child};
my $cprange = $abbrev->($notp);
$cprange .= "..".$abbrev->($c) if $c;
print STDERR f_ "%s: %s: %s\n",
$us, $cprange, $notp->{Whynot};
$all_gdr &&= $notp->{Child} &&
(git_cat_file $notp->{Child}{Commit}, 'commit')
=~ m{^\[git-debrebase(?! split[: ]).*\]$}m;
}
print STDERR "\n";
$failsuggestion =
[ grep { $_->[0] ne 'quilt-mode' } @$failsuggestion ]
if $all_gdr;
print STDERR "$us: $_->[1]\n" foreach @$failsuggestion;
fail __
"quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n";
} elsif ($quilt_mode eq 'smash') {
} elsif ($quilt_mode eq 'auto') {
progress __ "quilt fixup cannot be linear, smashing...";
} else {
confess "$quilt_mode ?";
}
my $time = $ENV{'GIT_COMMITTER_DATE'} || time;
$time =~ s/\s.*//; # trim timezone from GIT_COMMITTER_DATE
my $ncommits = 3;
my $msg = cmdoutput @git, qw(log), "-n$ncommits";
quiltify_dpkg_commit "auto-$version-$target-$time",
(getfield $clogp, 'Maintainer'),
(f_ "Automatically generated patch (%s)\n".
"Last (up to) %s git changes, FYI:\n\n",
$clogp->{Version}, $ncommits).
$msg;
return;
}
progress __ "quiltify linearisation planning successful, executing...";
for (my $p = $sref_S;
my $c = $p->{Child};
$p = $p->{Child}) {
printdebug "quiltify traverse $p->{Commit}..$c->{Commit}\n";
next unless $p->{Nontrivial};
my $cc = $c->{Commit};
my $commitdata = cmdoutput @git, qw(cat-file commit), $cc;
$commitdata =~ m/\n\n/ or die "$c ?";
$commitdata = $`;
my $msg = $'; #';
$commitdata =~ m/^author (.*) \d+ [-+0-9]+$/m or die "$cc ?";
my $author = $1;
my $commitdate = cmdoutput
@git, qw(log -n1 --pretty=format:%aD), $cc;
$msg =~ s/^(.*)\n*/$1\n/ or die "$cc $msg ?";
my $strip_nls = sub { $msg =~ s/\n+$//; $msg .= "\n"; };
$strip_nls->();
my $title = $1;
my $patchname;
my $patchdir;
my $gbp_check_suitable = sub {
$_ = shift;
my ($what) = @_;
eval {
die __ "contains unexpected slashes\n" if m{//} || m{/$};
die __ "contains leading punctuation\n" if m{^\W} || m{/\W};
die __ "contains bad character(s)\n" if m{[^-a-z0-9_.+=~/]}i;
die __ "is series file\n" if m{$series_filename_re}o;
die __ "too long\n" if length > 200;
};
return $_ unless $@;
print STDERR f_
"quiltifying commit %s: ignoring/dropping Gbp-Pq %s: %s",
$cc, $what, $@;
return undef;
};
if ($msg =~ s/^ (?: gbp(?:-pq)? : \s* name \s+ |
gbp-pq-name: \s* )
(\S+) \s* \n //ixm) {
$patchname = $gbp_check_suitable->($1, 'Name');
}
if ($msg =~ s/^ (?: gbp(?:-pq)? : \s* topic \s+ |
gbp-pq-topic: \s* )
(\S+) \s* \n //ixm) {
$patchdir = $gbp_check_suitable->($1, 'Topic');
}
$strip_nls->();
if (!defined $patchname) {
$patchname = $title;
$patchname =~ s/[.:]$//;
use Text::Iconv;
eval {
my $converter = new Text::Iconv qw(UTF-8 ASCII//TRANSLIT);
my $translitname = $converter->convert($patchname);
die unless defined $translitname;
$patchname = $translitname;
};
print STDERR
+(f_ "dgit: patch title transliteration error: %s", $@)
if $@;
$patchname =~ y/ A-Z/-a-z/;
$patchname =~ y/-a-z0-9_.+=~//cd;
$patchname =~ s/^\W/x-$&/;
$patchname = substr($patchname,0,40);
$patchname .= ".patch";
}
if (!defined $patchdir) {
$patchdir = '';
}
if (length $patchdir) {
$patchname = "$patchdir/$patchname";
}
if ($patchname =~ m{^(.*)/}) {
mkpath "debian/patches/$1";
}
my $index;
for ($index='';
stat "debian/patches/$patchname$index";
$index++) { }
$!==ENOENT or confess "$patchname$index $!";
runcmd @git, qw(checkout -q), $cc;
# We use the tip's changelog so that dpkg-source doesn't
# produce complaining messages from dpkg-parsechangelog. None
# of the information dpkg-source gets from the changelog is
# actually relevant - it gets put into the original message
# which dpkg-source provides our stunt editor, and then
# overwritten.
runcmd @git, qw(checkout -q), $target, qw(debian/changelog);
quiltify_dpkg_commit "$patchname$index", $author, $msg,
"Date: $commitdate\n".
"X-Dgit-Generated: $clogp->{Version} $cc\n";
runcmd @git, qw(checkout -q), $cc, qw(debian/changelog);
}
}
sub build_maybe_quilt_fixup () {
my ($format,$fopts) = get_source_format;
return unless madformat_wantfixup $format;
# sigh
check_for_vendor_patches();
my $clogp = parsechangelog();
my $headref = git_rev_parse('HEAD');
my $symref = git_get_symref();
my $upstreamversion = upstreamversion $version;
prep_ud();
changedir $playground;
my $splitbrain_cachekey;
if (do_split_brain()) {
my $cachehit;
($cachehit, $splitbrain_cachekey) =
quilt_check_splitbrain_cache($headref, $upstreamversion);
if ($cachehit) {
changedir $maindir;
return;
}
}
unpack_playtree_need_cd_work($headref);
if (do_split_brain()) {
runcmd @git, qw(checkout -q -b dgit-view);
# so long as work is not deleted, its current branch will
# remain dgit-view, rather than master, so subsequent calls to
# unpack_playtree_need_cd_work
# will DTRT, resetting dgit-view.
confess if $made_split_brain;
$made_split_brain = 1;
}
chdir '..';
if ($fopts->{'single-debian-patch'}) {
fail f_
"quilt mode %s does not make sense (or is not supported) with single-debian-patch",
$quilt_mode
if quiltmode_splitting();
quilt_fixup_singlepatch($clogp, $headref, $upstreamversion);
} else {
quilt_fixup_multipatch($clogp, $headref, $upstreamversion,
$splitbrain_cachekey);
}
if (do_split_brain()) {
my $dgitview = git_rev_parse 'HEAD';
changedir $maindir;
reflog_cache_insert "refs/$splitbraincache",
$splitbrain_cachekey, $dgitview;
changedir "$playground/work";
my $saved = maybe_split_brain_save $headref, $dgitview, __ "converted";
progress f_ "dgit view: created (%s)", $saved;
}
changedir $maindir;
runcmd_ordryrun_local
@git, qw(pull --ff-only -q), "$playground/work", qw(master);
}
sub build_check_quilt_splitbrain () {
build_maybe_quilt_fixup();
}
sub unpack_playtree_need_cd_work ($) {
my ($headref) = @_;
# prep_ud() must have been called already.
if (!chdir "work") {
# Check in the filesystem because sometimes we run prep_ud
# in between multiple calls to unpack_playtree_need_cd_work.
confess "$!" unless $!==ENOENT;
mkdir "work" or confess "$!";
changedir "work";
mktree_in_ud_here();
}
runcmd @git, qw(reset -q --hard), $headref;
}
sub unpack_playtree_linkorigs ($$) {
my ($upstreamversion, $fn) = @_;
# calls $fn->($leafname);
my $bpd_abs = bpd_abs();
dotdot_bpd_transfer_origs $bpd_abs, $upstreamversion, sub { 1 };
opendir QFD, $bpd_abs or fail "buildproductsdir: $bpd_abs: $!";
while ($!=0, defined(my $leaf = readdir QFD)) {
my $f = bpd_abs()."/".$leaf;
{
local ($debuglevel) = $debuglevel-1;
printdebug "QF linkorigs bpd $leaf, $f ?\n";
}
next unless is_orig_file_of_vsn $leaf, $upstreamversion;
printdebug "QF linkorigs $leaf, $f Y\n";
link_ltarget $f, $leaf or die "$leaf $!";
$fn->($leaf);
}
die "$buildproductsdir: $!" if $!;
closedir QFD;
}
sub quilt_fixup_delete_pc () {
runcmd @git, qw(rm -rqf .pc);
commit_admin +(__ <' or confess "$!";
print $fakedsc <addfile($fh);
print $fakedsc " ".$md->hexdigest." $size $leaf\n" or confess "$!";
};
unpack_playtree_linkorigs($upstreamversion, $dscaddfile);
my @files=qw(debian/source/format debian/rules
debian/control debian/changelog);
foreach my $maybe (qw(debian/patches debian/source/options
debian/tests/control)) {
next unless stat_exists "$maindir/$maybe";
push @files, $maybe;
}
my $debtar= srcfn $fakeversion,'.debian.tar.gz';
runcmd qw(env GZIP=-1n tar -zcf), "./$debtar", qw(-C), $maindir, @files;
$dscaddfile->($debtar);
close $fakedsc or confess "$!";
}
sub quilt_fakedsc2unapplied ($$) {
my ($headref, $upstreamversion) = @_;
# must be run in the playground
# quilt_need_fake_dsc must have been called
quilt_need_fake_dsc($upstreamversion);
runcmd qw(sh -ec),
'exec dpkg-source --no-check --skip-patches -x fake.dsc >/dev/null';
my $fakexdir= $package.'-'.(stripepoch $upstreamversion);
rename $fakexdir, "fake" or die "$fakexdir $!";
changedir 'fake';
remove_stray_gits(__ "source package");
mktree_in_ud_here();
rmtree '.pc';
rmtree 'debian'; # git checkout commitish paths does not delete!
runcmd @git, qw(checkout -f), $headref, qw(-- debian);
my $unapplied=git_add_write_tree();
printdebug "fake orig tree object $unapplied\n";
return $unapplied;
}
sub quilt_check_splitbrain_cache ($$) {
my ($headref, $upstreamversion) = @_;
# Called only if we are in (potentially) split brain mode.
# Called in playground.
# Computes the cache key and looks in the cache.
# Returns ($dgit_view_commitid, $cachekey) or (undef, $cachekey)
quilt_need_fake_dsc($upstreamversion);
my $splitbrain_cachekey;
progress f_
"dgit: split brain (separate dgit view) may be needed (--quilt=%s).",
$quilt_mode;
# we look in the reflog of dgit-intern/quilt-cache
# we look for an entry whose message is the key for the cache lookup
my @cachekey = (qw(dgit), $our_version);
push @cachekey, $upstreamversion;
push @cachekey, $quilt_mode;
push @cachekey, $headref;
push @cachekey, $quilt_upstream_commitish // '-';
push @cachekey, hashfile('fake.dsc');
my $srcshash = Digest::SHA->new(256);
my %sfs = ( %INC, '$0(dgit)' => $0 );
foreach my $sfk (sort keys %sfs) {
next unless $sfk =~ m/^\$0\b/ || $sfk =~ m{^Debian/Dgit\b};
$srcshash->add($sfk," ");
$srcshash->add(hashfile($sfs{$sfk}));
$srcshash->add("\n");
}
push @cachekey, $srcshash->hexdigest();
$splitbrain_cachekey = "@cachekey";
printdebug "splitbrain cachekey $splitbrain_cachekey\n";
my $cachehit = reflog_cache_lookup
"refs/$splitbraincache", $splitbrain_cachekey;
if ($cachehit) {
unpack_playtree_need_cd_work($headref);
my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit";
if ($cachehit ne $headref) {
progress f_ "dgit view: found cached (%s)", $saved;
runcmd @git, qw(checkout -q -b dgit-view), $cachehit;
$made_split_brain = 1;
return ($cachehit, $splitbrain_cachekey);
}
progress __ "dgit view: found cached, no changes required";
return ($headref, $splitbrain_cachekey);
}
printdebug "splitbrain cache miss\n";
return (undef, $splitbrain_cachekey);
}
sub quilt_fixup_multipatch ($$$) {
my ($clogp, $headref, $upstreamversion, $splitbrain_cachekey) = @_;
progress f_ "examining quilt state (multiple patches, %s mode)",
$quilt_mode;
# Our objective is:
# - honour any existing .pc in case it has any strangeness
# - determine the git commit corresponding to the tip of
# the patch stack (if there is one)
# - if there is such a git commit, convert each subsequent
# git commit into a quilt patch with dpkg-source --commit
# - otherwise convert all the differences in the tree into
# a single git commit
#
# To do this we:
# Our git tree doesn't necessarily contain .pc. (Some versions of
# dgit would include the .pc in the git tree.) If there isn't
# one, we need to generate one by unpacking the patches that we
# have.
#
# We first look for a .pc in the git tree. If there is one, we
# will use it. (This is not the normal case.)
#
# Otherwise need to regenerate .pc so that dpkg-source --commit
# can work. We do this as follows:
# 1. Collect all relevant .orig from parent directory
# 2. Generate a debian.tar.gz out of
# debian/{patches,rules,source/format,source/options}
# 3. Generate a fake .dsc containing just these fields:
# Format Source Version Files
# 4. Extract the fake .dsc
# Now the fake .dsc has a .pc directory.
# (In fact we do this in every case, because in future we will
# want to search for a good base commit for generating patches.)
#
# Then we can actually do the dpkg-source --commit
# 1. Make a new working tree with the same object
# store as our main tree and check out the main
# tree's HEAD.
# 2. Copy .pc from the fake's extraction, if necessary
# 3. Run dpkg-source --commit
# 4. If the result has changes to debian/, then
# - git add them them
# - git add .pc if we had a .pc in-tree
# - git commit
# 5. If we had a .pc in-tree, delete it, and git commit
# 6. Back in the main tree, fast forward to the new HEAD
# Another situation we may have to cope with is gbp-style
# patches-unapplied trees.
#
# We would want to detect these, so we know to escape into
# quilt_fixup_gbp. However, this is in general not possible.
# Consider a package with a one patch which the dgit user reverts
# (with git revert or the moral equivalent).
#
# That is indistinguishable in contents from a patches-unapplied
# tree. And looking at the history to distinguish them is not
# useful because the user might have made a confusing-looking git
# history structure (which ought to produce an error if dgit can't
# cope, not a silent reintroduction of an unwanted patch).
#
# So gbp users will have to pass an option. But we can usually
# detect their failure to do so: if the tree is not a clean
# patches-applied tree, quilt linearisation fails, but the tree
# _is_ a clean patches-unapplied tree, we can suggest that maybe
# they want --quilt=unapplied.
#
# To help detect this, when we are extracting the fake dsc, we
# first extract it with --skip-patches, and then apply the patches
# afterwards with dpkg-source --before-build. That lets us save a
# tree object corresponding to .origs.
if ($quilt_mode eq 'linear'
&& branch_is_gdr($headref)) {
# This is much faster. It also makes patches that gdr
# likes better for future updates without laundering.
#
# However, it can fail in some casses where we would
# succeed: if there are existing patches, which correspond
# to a prefix of the branch, but are not in gbp/gdr
# format, gdr will fail (exiting status 7), but we might
# be able to figure out where to start linearising. That
# will be slower so hopefully there's not much to do.
unpack_playtree_need_cd_work $headref;
my @cmd = (@git_debrebase,
qw(--noop-ok -funclean-mixed -funclean-ordering
make-patches --quiet-would-amend));
# We tolerate soe snags that gdr wouldn't, by default.
if (act_local()) {
debugcmd "+",@cmd;
$!=0; $?=-1;
failedcmd @cmd
if system @cmd
and not ($? == 7*256 or
$? == -1 && $!==ENOENT);
} else {
dryrun_report @cmd;
}
$headref = git_rev_parse('HEAD');
chdir '..';
}
my $unapplied=quilt_fakedsc2unapplied($headref, $upstreamversion);
ensuredir '.pc';
my @bbcmd = (qw(sh -ec), 'exec dpkg-source --before-build . >/dev/null');
$!=0; $?=-1;
if (system @bbcmd) {
failedcmd @bbcmd if $? < 0;
fail __ < quiltify_trees_differ($unapplied,$uheadref, 1,
\%editedignores, \@unrepres),
H2A => quiltify_trees_differ($uheadref, $oldtiptree,1),
O2A => quiltify_trees_differ($unapplied,$oldtiptree,1),
};
my @dl;
foreach my $bits (qw(01 02)) {
foreach my $v (qw(O2H O2A H2A)) {
push @dl, ($diffbits->{$v} & $bits) ? '##' : '==';
}
}
printdebug "differences \@dl @dl.\n";
progress f_
"%s: base trees orig=%.20s o+d/p=%.20s",
$us, $unapplied, $oldtiptree;
# TRANSLATORS: Try to keep this ascii-art layout right. The 0s in
# %9.00009s will be ignored and are there to make the format the
# same length (9 characters) as the output it generates. If you
# change the value 9, your translation of "upstream" must fit into
# the new length, and you should change the number of 0s. Do
# not reduce it below 4 as HEAD has to fit too.
progress f_
"%s: quilt differences: src: %s orig %s gitignores: %s orig %s\n".
"%s: quilt differences: %9.00009s %s o+d/p %9.00009s %s o+d/p",
$us, $dl[0], $dl[1], $dl[3], $dl[4],
$us, $uhead_whatshort, $dl[2], $uhead_whatshort, $dl[5];
if (@unrepres && $quilt_mode !~ m/baredebian/) {
# With baredebian, even if the upstream commitish has this
# problem, we don't want to print this message, as nothing
# is going to try to make a patch out of it anyway.
print STDERR f_ "dgit: cannot represent change: %s: %s\n",
$_->[1], $_->[0]
foreach @unrepres;
forceable_fail [qw(unrepresentable)], __ <{O2H} & $diffbits->{O2A})) {
push @failsuggestion, [ 'unapplied', __
"This might be a patches-unapplied branch." ];
} elsif (!($diffbits->{H2A} & $diffbits->{O2A})) {
push @failsuggestion, [ 'applied', __
"This might be a patches-applied branch." ];
}
push @failsuggestion, [ 'quilt-mode', __
"Maybe you need one of --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?" ];
push @failsuggestion, [ 'gitattrs', __
"Warning: Tree has .gitattributes. See GITATTRIBUTES in dgit(7)." ]
if stat_exists '.gitattributes';
push @failsuggestion, [ 'origs', __
"Maybe orig tarball(s) are not identical to git representation?" ]
unless $onlydebian && $quilt_mode !~ m/baredebian/;
# ^ in that case, we didn't really look properly
if (quiltmode_splitting()) {
quiltify_splitting($clogp, $unapplied, $headref, $oldtiptree,
$diffbits, \%editedignores,
$splitbrain_cachekey);
return;
}
progress f_ "starting quiltify (multiple patches, %s mode)", $quilt_mode;
quiltify($clogp,$headref,$oldtiptree,\@failsuggestion);
runcmd @git, qw(checkout -q), (qw(master dgit-view)[do_split_brain()]);
if (!open P, '>>', ".pc/applied-patches") {
$!==&ENOENT or confess "$!";
} else {
close P;
}
commit_quilty_patch();
if ($mustdeletepc) {
quilt_fixup_delete_pc();
}
}
sub quilt_fixup_editor () {
my $descfn = $ENV{$fakeeditorenv};
my $editing = $ARGV[$#ARGV];
open I1, '<', $descfn or confess "$descfn: $!";
open I2, '<', $editing or confess "$editing: $!";
unlink $editing or confess "$editing: $!";
open O, '>', $editing or confess "$editing: $!";
while () { print O or confess "$!"; } I1->error and confess "$!";
my $copying = 0;
while () {
$copying ||= m/^\-\-\- /;
next unless $copying;
print O or confess "$!";
}
I2->error and confess "$!";
close O or die $1;
finish 0;
}
sub maybe_apply_patches_dirtily () {
return unless $quilt_mode =~ m/gbp|unapplied|baredebian/;
print STDERR __ <[0] } @vsns;
@vsns = sort { -version_compare($a, $b) } @vsns;
$changes_since_version = $vsns[0];
progress f_ "changelog will contain changes since %s", $vsns[0];
} else {
$changes_since_version = '_';
progress __ "package seems new, not specifying -v";
}
}
if ($changes_since_version ne '_') {
return ("-v$changes_since_version");
} else {
return ();
}
}
sub changesopts () {
return (changesopts_initial(), changesopts_version());
}
sub massage_dbp_args ($;$) {
my ($cmd,$xargs) = @_;
# Since we split the source build out so we can do strange things
# to it, massage the arguments to dpkg-buildpackage so that the
# main build doessn't build source (or add an argument to stop it
# building source by default).
debugcmd '#massaging#', @$cmd if $debuglevel>1;
# -nc has the side effect of specifying -b if nothing else specified
# and some combinations of -S, -b, et al, are errors, rather than
# later simply overriding earlie. So we need to:
# - search the command line for these options
# - pick the last one
# - perhaps add our own as a default
# - perhaps adjust it to the corresponding non-source-building version
my $dmode = '-F';
foreach my $l ($cmd, $xargs) {
next unless $l;
@$l = grep { !(m/^-[SgGFABb]$|^--build=/s and $dmode=$_) } @$l;
}
push @$cmd, '-nc';
#print STDERR "MASS1 ",Dumper($cmd, $xargs, $dmode);
my $r = WANTSRC_BUILDER;
printdebug "massage split $dmode.\n";
if ($dmode =~ s/^--build=//) {
$r = 0;
my @d = split /,/, $dmode;
$r |= WANTSRC_SOURCE if grep { s/^full$/binary/ } @d;
$r |= WANTSRC_SOURCE if grep { s/^source$// } @d;
$r |= WANTSRC_BUILDER if grep { m/./ } @d;
fail __ "Wanted to build nothing!" unless $r;
$dmode = '--build='. join ',', grep m/./, @d;
} else {
$r =
$dmode =~ m/[S]/ ? WANTSRC_SOURCE :
$dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER :
$dmode =~ m/[ABb]/ ? WANTSRC_BUILDER :
confess "$dmode ?";
}
printdebug "massage done $r $dmode.\n";
push @$cmd, $dmode;
#print STDERR "MASS2 ",Dumper($cmd, $xargs, $r);
return $r;
}
sub in_bpd (&) {
my ($fn) = @_;
my $wasdir = must_getcwd();
changedir $buildproductsdir;
$fn->();
changedir $wasdir;
}
# this sub must run with CWD=$buildproductsdir (eg in in_bpd)
sub postbuild_mergechanges ($) {
my ($msg_if_onlyone) = @_;
# If there is only one .changes file, fail with $msg_if_onlyone,
# or if that is undef, be a no-op.
# Returns the changes file to report to the user.
my $pat = changespat $version;
my @changesfiles = grep { !m/_multi\.changes/ } glob $pat;
@changesfiles = sort {
($b =~ m/_source\.changes$/ <=> $a =~ m/_source\.changes$/)
or $a cmp $b
} @changesfiles;
my $result;
if (@changesfiles==1) {
fail +(f_ < !$includedirty
return !$includedirty;
}
sub build_source {
$sourcechanges = changespat $version,'source';
if (act_local()) {
unlink "$buildproductsdir/$sourcechanges" or $!==ENOENT
or fail f_ "remove %s: %s", $sourcechanges, $!;
}
# confess unless !!$made_split_brain == do_split_brain();
my @cmd = (@dpkgsource, qw(-b --));
my $leafdir;
if (building_source_in_playtree()) {
$leafdir = 'work';
my $headref = git_rev_parse('HEAD');
# If we are in split brain, there is already a playtree with
# the thing we should package into a .dsc (thanks to quilt
# fixup). If not, make a playtree
prep_ud() unless $made_split_brain;
changedir $playground;
unless ($made_split_brain) {
my $upstreamversion = upstreamversion $version;
unpack_playtree_linkorigs($upstreamversion, sub { });
unpack_playtree_need_cd_work($headref);
changedir '..';
}
} else {
$leafdir = basename $maindir;
if ($buildproductsdir ne '..') {
# Well, we are going to run dpkg-source -b which consumes
# origs from .. and generates output there. To make this
# work when the bpd is not .. , we would have to (i) link
# origs from bpd to .. , (ii) check for files that
# dpkg-source -b would/might overwrite, and afterwards
# (iii) move all the outputs back to the bpd (iv) except
# for the origs which should be deleted from .. if they
# weren't there beforehand. And if there is an error and
# we don't run to completion we would necessarily leave a
# mess. This is too much. The real way to fix this
# is for dpkg-source to have bpd support.
confess unless $includedirty;
fail __
"--include-dirty not supported with --build-products-dir, sorry";
}
changedir '..';
}
runcmd_ordryrun_local @cmd, $leafdir;
changedir $leafdir;
runcmd_ordryrun_local qw(sh -ec),
'exec >../$1; shift; exec "$@"','x', $sourcechanges,
@dpkggenchanges, qw(-S), changesopts();
changedir '..';
printdebug "moving $dscfn, $sourcechanges, etc. to ".bpd_abs()."\n";
$dsc = parsecontrol($dscfn, "source package");
my $mv = sub {
my ($why, $l) = @_;
printdebug " renaming ($why) $l\n";
rename_link_xf 0, "$l", bpd_abs()."/$l"
or fail f_ "put in place new built file (%s): %s", $l, $@;
};
foreach my $l (split /\n/, getfield $dsc, 'Files') {
$l =~ m/\S+$/ or next;
$mv->('Files', $&);
}
$mv->('dsc', $dscfn);
$mv->('changes', $sourcechanges);
changedir $maindir;
}
sub cmd_build_source {
badusage __ "build-source takes no additional arguments" if @ARGV;
build_prep(WANTSRC_SOURCE);
build_source();
maybe_unapply_patches_again();
printdone f_ "source built, results in %s and %s",
$dscfn, $sourcechanges;
}
sub cmd_push_source {
prep_push();
fail __
"dgit push-source: --include-dirty/--ignore-dirty does not make".
"sense with push-source!"
if $includedirty;
build_check_quilt_splitbrain();
if ($changesfile) {
my $changes = parsecontrol("$buildproductsdir/$changesfile",
__ "source changes file");
unless (test_source_only_changes($changes)) {
fail __ "user-specified changes file is not source-only";
}
} else {
# Building a source package is very fast, so just do it
build_source();
confess "er, patches are applied dirtily but shouldn't be.."
if $patches_applied_dirtily;
$changesfile = $sourcechanges;
}
dopush();
}
sub binary_builder {
my ($bbuilder, $pbmc_msg, @args) = @_;
build_prep(WANTSRC_SOURCE);
build_source();
midbuild_checkchanges();
in_bpd {
if (act_local()) {
stat_exists $dscfn or fail f_
"%s (in build products dir): %s", $dscfn, $!;
stat_exists $sourcechanges or fail f_
"%s (in build products dir): %s", $sourcechanges, $!;
}
runcmd_ordryrun_local @$bbuilder, @args;
};
maybe_unapply_patches_again();
in_bpd {
postbuild_mergechanges($pbmc_msg);
};
}
sub cmd_sbuild {
build_prep_early();
binary_builder(\@sbuild, (__ <; };
D->error and fail f_ "read %s: %s", $dscfn, $!;
close C;
# we don't normally need this so import it here
use Dpkg::Source::Package;
my $dp = new Dpkg::Source::Package filename => $dscfn,
require_valid_signature => $needsig;
{
local $SIG{__WARN__} = sub {
print STDERR $_[0];
return unless $needsig;
fail __ "import-dsc signature check failed";
};
if (!$dp->is_signed()) {
warn f_ "%s: warning: importing unsigned .dsc\n", $us;
} else {
my $r = $dp->check_signature();
confess "->check_signature => $r" if $needsig && $r;
}
}
parse_dscdata();
$package = getfield $dsc, 'Source';
parse_dsc_field($dsc, __ "Dgit metadata in .dsc")
unless forceing [qw(import-dsc-with-dgit-field)];
parse_dsc_field_def_dsc_distro();
$isuite = 'DGIT-IMPORT-DSC';
$idistro //= $dsc_distro;
notpushing();
if (defined $dsc_hash) {
progress __
"dgit: import-dsc of .dsc with Dgit field, using git hash";
resolve_dsc_field_commit undef, undef;
}
if (defined $dsc_hash) {
my @cmd = (qw(sh -ec),
"echo $dsc_hash | git cat-file --batch-check");
my $objgot = cmdoutput @cmd;
if ($objgot =~ m#^\w+ missing\b#) {
fail f_ < 0) {
progress __ "Not fast forward, forced update.";
} else {
fail f_ "Not fast forward to %s", $dsc_hash;
}
}
import_dsc_result $dstbranch, $dsc_hash,
"dgit import-dsc (Dgit): $info",
f_ "updated git ref %s", $dstbranch;
return 0;
}
fail f_ <{Filename};
# We transfer all the pieces of the dsc to the bpd, not just
# origs. This is by analogy with dgit fetch, which wants to
# keep them somewhere to avoid downloading them again.
# We make symlinks, though. If the user wants copies, then
# they can copy the parts of the dsc to the bpd using dcmd,
# or something.
my $here = "$buildproductsdir/$f";
if (lstat $here) {
if (stat $here) {
next;
}
fail f_ "lstat %s works but stat gives %s !", $here, $!;
}
fail f_ "stat %s: %s", $here, $! unless $! == ENOENT;
printdebug "not in bpd, $f ...\n";
# $f does not exist in bpd, we need to transfer it
my $there = $dscfn;
$there =~ s{[^/]+$}{$f} or confess "$there ?";
# $there is file we want, relative to user's cwd, or abs
printdebug "not in bpd, $f, test $there ...\n";
stat $there or fail f_
"import %s requires %s, but: %s", $dscfn, $there, $!;
if ($there =~ m#^(?:\./+)?\.\./+#) {
# $there is relative to user's cwd
my $there_from_parent = $';
if ($buildproductsdir !~ m{^/}) {
# abs2rel, despite its name, can take two relative paths
$there = File::Spec->abs2rel($there,$buildproductsdir);
# now $there is relative to bpd, great
printdebug "not in bpd, $f, abs2rel, $there ...\n";
} else {
$there = (dirname $maindir)."/$there_from_parent";
# now $there is absoute
printdebug "not in bpd, $f, rel2rel, $there ...\n";
}
} elsif ($there =~ m#^/#) {
# $there is absolute already
printdebug "not in bpd, $f, abs, $there ...\n";
} else {
fail f_
"cannot import %s which seems to be inside working tree!",
$dscfn;
}
symlink $there, $here or fail f_
"symlink %s to %s: %s", $there, $here, $!;
progress f_ "made symlink %s -> %s", $here, $there;
# print STDERR Dumper($fi);
}
my @mergeinputs = generate_commits_from_dsc();
die unless @mergeinputs == 1;
my $newhash = $mergeinputs[0]{Commit};
if ($oldhash) {
if ($force > 0) {
progress __
"Import, forced update - synthetic orphan git history.";
} elsif ($force < 0) {
progress __ "Import, merging.";
my $tree = cmdoutput @git, qw(rev-parse), "$newhash:";
my $version = getfield $dsc, 'Version';
my $clogp = commit_getclogp $newhash;
my $authline = clogp_authline $clogp;
$newhash = hash_commit_text <",@cmd;
exec @cmd or fail f_ "exec curl: %s\n", $!;
}
sub repos_server_url () {
$package = '_dgit-repos-server';
local $access_forpush = 1;
local $isuite = 'DGIT-REPOS-SERVER';
my $url = access_giturl();
}
sub pre_clone_dgit_repos_server () {
not_necessarily_a_tree();
}
sub cmd_clone_dgit_repos_server {
badusage __ "need destination argument" unless @ARGV==1;
my ($destdir) = @ARGV;
my $url = repos_server_url();
my @cmd = (@git, qw(clone), $url, $destdir);
debugcmd ">",@cmd;
exec @cmd or fail f_ "exec git clone: %s\n", $!;
}
sub pre_print_dgit_repos_server_source_url () {
not_necessarily_a_tree();
}
sub cmd_print_dgit_repos_server_source_url {
badusage __
"no arguments allowed to dgit print-dgit-repos-server-source-url"
if @ARGV;
my $url = repos_server_url();
print $url, "\n" or confess "$!";
}
sub pre_print_dpkg_source_ignores {
not_necessarily_a_tree();
}
sub cmd_print_dpkg_source_ignores {
badusage __
"no arguments allowed to dgit print-dpkg-source-ignores"
if @ARGV;
print "@dpkg_source_ignores\n" or confess "$!";
}
sub cmd_setup_mergechangelogs {
badusage __ "no arguments allowed to dgit setup-mergechangelogs"
if @ARGV;
local $isuite = 'DGIT-SETUP-TREE';
setup_mergechangelogs(1);
}
sub cmd_setup_useremail {
badusage __ "no arguments allowed to dgit setup-useremail" if @ARGV;
local $isuite = 'DGIT-SETUP-TREE';
setup_useremail(1);
}
sub cmd_setup_gitattributes {
badusage __ "no arguments allowed to dgit setup-useremail" if @ARGV;
local $isuite = 'DGIT-SETUP-TREE';
setup_gitattrs(1);
}
sub cmd_setup_new_tree {
badusage __ "no arguments allowed to dgit setup-tree" if @ARGV;
local $isuite = 'DGIT-SETUP-TREE';
setup_new_tree();
}
#---------- argument parsing and main program ----------
sub cmd_version {
print "dgit version $our_version\n" or confess "$!";
finish 0;
}
our (%valopts_long, %valopts_short);
our (%funcopts_long);
our @rvalopts;
our (@modeopt_cfgs);
sub defvalopt ($$$$) {
my ($long,$short,$val_re,$how) = @_;
my $oi = { Long => $long, Short => $short, Re => $val_re, How => $how };
$valopts_long{$long} = $oi;
$valopts_short{$short} = $oi;
# $how subref should:
# do whatever assignemnt or thing it likes with $_[0]
# if the option should not be passed on to remote, @rvalopts=()
# or $how can be a scalar ref, meaning simply assign the value
}
defvalopt '--since-version', '-v', '[^_]+|_', \$changes_since_version;
defvalopt '--distro', '-d', '.+', \$idistro;
defvalopt '', '-k', '.+', \$keyid;
defvalopt '--existing-package','', '.*', \$existing_package;
defvalopt '--build-products-dir','','.*', \$buildproductsdir;
defvalopt '--clean', '', $cleanmode_re, \$cleanmode;
defvalopt '--package', '-p', $package_re, \$package;
defvalopt '--quilt', '', $quilt_modes_re, \$quilt_mode;
defvalopt '', '-C', '.+', sub {
($changesfile) = (@_);
if ($changesfile =~ s#^(.*)/##) {
$buildproductsdir = $1;
}
};
defvalopt '--initiator-tempdir','','.*', sub {
($initiator_tempdir) = (@_);
$initiator_tempdir =~ m#^/# or
badusage __ "--initiator-tempdir must be used specify an".
" absolute, not relative, directory."
};
sub defoptmodes ($@) {
my ($varref, $cfgkey, $default, %optmap) = @_;
my %permit;
while (my ($opt,$val) = each %optmap) {
$funcopts_long{$opt} = sub { $$varref = $val; };
$permit{$val} = $val;
}
push @modeopt_cfgs, {
Var => $varref,
Key => $cfgkey,
Default => $default,
Vals => \%permit
};
}
defoptmodes \$dodep14tag, qw( dep14tag want
--dep14tag want
--no-dep14tag no
--always-dep14tag always );
sub parseopts () {
my $om;
if (defined $ENV{'DGIT_SSH'}) {
@ssh = string_to_ssh $ENV{'DGIT_SSH'};
} elsif (defined $ENV{'GIT_SSH'}) {
@ssh = ($ENV{'GIT_SSH'});
}
my $oi;
my $val;
my $valopt = sub {
my ($what) = @_;
@rvalopts = ($_);
if (!defined $val) {
badusage f_ "%s needs a value", $what unless @ARGV;
$val = shift @ARGV;
push @rvalopts, $val;
}
badusage f_ "bad value \`%s' for %s", $val, $what unless
$val =~ m/^$oi->{Re}$(?!\n)/s;
my $how = $oi->{How};
if (ref($how) eq 'SCALAR') {
$$how = $val;
} else {
$how->($val);
}
push @ropts, @rvalopts;
};
while (@ARGV) {
last unless $ARGV[0] =~ m/^-/;
$_ = shift @ARGV;
last if m/^--?$/;
if (m/^--/) {
if (m/^--dry-run$/) {
push @ropts, $_;
$dryrun_level=2;
} elsif (m/^--damp-run$/) {
push @ropts, $_;
$dryrun_level=1;
} elsif (m/^--no-sign$/) {
push @ropts, $_;
$sign=0;
} elsif (m/^--help$/) {
cmd_help();
} elsif (m/^--version$/) {
cmd_version();
} elsif (m/^--new$/) {
push @ropts, $_;
$new_package=1;
} elsif (m/^--([-0-9a-z]+)=(.+)/s &&
($om = $opts_opt_map{$1}) &&
length $om->[0]) {
push @ropts, $_;
$om->[0] = $2;
} elsif (m/^--([-0-9a-z]+):(.*)/s &&
!$opts_opt_cmdonly{$1} &&
($om = $opts_opt_map{$1})) {
push @ropts, $_;
push @$om, $2;
} elsif (m/^--([-0-9a-z]+)\!:(.*)/s &&
!$opts_opt_cmdonly{$1} &&
($om = $opts_opt_map{$1})) {
push @ropts, $_;
my $cmd = shift @$om;
@$om = ($cmd, grep { $_ ne $2 } @$om);
} elsif (m/^--($quilt_options_re)$/s) {
push @ropts, "--quilt=$1";
$quilt_mode = $1;
} elsif (m/^--(?:ignore|include)-dirty$/s) {
push @ropts, $_;
$includedirty = 1;
} elsif (m/^--no-quilt-fixup$/s) {
push @ropts, $_;
$quilt_mode = 'nocheck';
} elsif (m/^--no-rm-on-error$/s) {
push @ropts, $_;
$rmonerror = 0;
} elsif (m/^--no-chase-dsc-distro$/s) {
push @ropts, $_;
$chase_dsc_distro = 0;
} elsif (m/^--overwrite$/s) {
push @ropts, $_;
$overwrite_version = '';
} elsif (m/^--split-(?:view|brain)$/s) {
push @ropts, $_;
$splitview_mode = 'always';
} elsif (m/^--split-(?:view|brain)=($splitview_modes_re)$/s) {
push @ropts, $_;
$splitview_mode = $1;
} elsif (m/^--overwrite=(.+)$/s) {
push @ropts, $_;
$overwrite_version = $1;
} elsif (m/^--delayed=(\d+)$/s) {
push @ropts, $_;
push @dput, $_;
} elsif (m/^--upstream-commitish=(.+)$/s) {
push @ropts, $_;
$quilt_upstream_commitish = $1;
} elsif (m/^--save-(dgit-view)=(.+)$/s ||
m/^--(dgit-view)-save=(.+)$/s
) {
my ($k,$v) = ($1,$2);
push @ropts, $_;
$v =~ s#^(?!refs/)#refs/heads/#;
$internal_object_save{$k} = $v;
} elsif (m/^--(no-)?rm-old-changes$/s) {
push @ropts, $_;
$rmchanges = !$1;
} elsif (m/^--deliberately-($deliberately_re)$/s) {
push @ropts, $_;
push @deliberatelies, $&;
} elsif (m/^--force-(.*)/ && defined $forceopts{$1}) {
push @ropts, $&;
$forceopts{$1} = 1;
$_='';
} elsif (m/^--force-/) {
print STDERR
f_ "%s: warning: ignoring unknown force option %s\n",
$us, $_;
$_='';
} elsif (m/^--config-lookup-explode=(.+)$/s) {
# undocumented, for testing
push @ropts, $_;
$gitcfgs{cmdline}{$1} = 'CONFIG-LOOKUP-EXPLODE';
# ^ it's supposed to be an array ref
} elsif (m/^(--[-0-9a-z]+)(=|$)/ && ($oi = $valopts_long{$1})) {
$val = $2 ? $' : undef; #';
$valopt->($oi->{Long});
} elsif ($funcopts_long{$_}) {
push @ropts, $_;
$funcopts_long{$_}();
} else {
badusage f_ "unknown long option \`%s'", $_;
}
} else {
while (m/^-./s) {
if (s/^-n/-/) {
push @ropts, $&;
$dryrun_level=2;
} elsif (s/^-L/-/) {
push @ropts, $&;
$dryrun_level=1;
} elsif (s/^-h/-/) {
cmd_help();
} elsif (s/^-D/-/) {
push @ropts, $&;
$debuglevel++;
enabledebug();
} elsif (s/^-N/-/) {
push @ropts, $&;
$new_package=1;
} elsif (m/^-m/) {
push @ropts, $&;
push @changesopts, $_;
$_ = '';
} elsif (s/^-wn$//s) {
push @ropts, $&;
$cleanmode = 'none';
} elsif (s/^-wg(f?)(a?)$//s) {
push @ropts, $&;
$cleanmode = 'git';
$cleanmode .= '-ff' if $1;
$cleanmode .= ',always' if $2;
} elsif (s/^-wd(d?)([na]?)$//s) {
push @ropts, $&;
$cleanmode = 'dpkg-source';
$cleanmode .= '-d' if $1;
$cleanmode .= ',no-check' if $2 eq 'n';
$cleanmode .= ',all-check' if $2 eq 'a';
} elsif (s/^-wc$//s) {
push @ropts, $&;
$cleanmode = 'check';
} elsif (s/^-wci$//s) {
push @ropts, $&;
$cleanmode = 'check,ignores';
} elsif (s/^-c([^=]*)\=(.*)$//s) {
push @git, '-c', $&;
$gitcfgs{cmdline}{$1} = [ $2 ];
} elsif (s/^-c([^=]+)$//s) {
push @git, '-c', $&;
$gitcfgs{cmdline}{$1} = [ 'true' ];
} elsif (m/^-[a-zA-Z]/ && ($oi = $valopts_short{$&})) {
$val = $'; #';
$val = undef unless length $val;
$valopt->($oi->{Short});
$_ = '';
} else {
badusage f_ "unknown short option \`%s'", $_;
}
}
}
}
}
sub check_env_sanity () {
my $blocked = new POSIX::SigSet;
sigprocmask SIG_UNBLOCK, $blocked, $blocked or confess "$!";
eval {
foreach my $name (qw(PIPE CHLD)) {
my $signame = "SIG$name";
my $signum = eval "POSIX::$signame" // die;
die f_ "%s is set to something other than SIG_DFL\n",
$signame
if defined $SIG{$name} and $SIG{$name} ne 'DEFAULT';
$blocked->ismember($signum) and
die f_ "%s is blocked\n", $signame;
}
};
return unless $@;
chomp $@;
fail f_ <[0];
$om->[0] = $v;
}
foreach my $c (access_cfg_cfgs("opts-$k")) {
my @vl =
map { $_ ? @$_ : () }
map { $gitcfgs{$_}{$c} }
reverse @gitcfgsources;
printdebug "CL $c ", (join " ", map { shellquote } @vl),
"\n" if $debuglevel >= 4;
next unless @vl;
badcfg f_ "cannot configure options for %s", $k
if $opts_opt_cmdonly{$k};
my $insertpos = $opts_cfg_insertpos{$k};
@$om = ( @$om[0..$insertpos-1],
@vl,
@$om[$insertpos..$#$om] );
}
}
if (!defined $rmchanges) {
local $access_forpush;
$rmchanges = access_cfg_bool(0, 'rm-old-changes');
}
if (!defined $quilt_mode) {
local $access_forpush;
$quilt_mode = cfg('dgit.force.quilt-mode', 'RETURN-UNDEF')
// access_cfg('quilt-mode', 'RETURN-UNDEF')
// 'linear';
$quilt_mode =~ m/^($quilt_modes_re)$/
or badcfg f_ "unknown quilt-mode \`%s'", $quilt_mode;
$quilt_mode = $1;
}
foreach my $moc (@modeopt_cfgs) {
local $access_forpush;
my $vr = $moc->{Var};
next if defined $$vr;
$$vr = access_cfg($moc->{Key}, 'RETURN-UNDEF') // $moc->{Default};
my $v = $moc->{Vals}{$$vr};
badcfg f_ "unknown %s setting \`%s'", $moc->{Key}, $$vr
unless defined $v;
$$vr = $v;
}
{
local $access_forpush;
default_from_access_cfg(\$cleanmode, 'clean-mode', 'dpkg-source',
$cleanmode_re);
}
$buildproductsdir //= access_cfg('build-products-dir', 'RETURN-UNDEF');
$buildproductsdir //= '..';
$bpd_glob = $buildproductsdir;
$bpd_glob =~ s#[][\\{}*?~]#\\$g;
}
setlocale(LC_MESSAGES, "");
textdomain("dgit");
if ($ENV{$fakeeditorenv}) {
git_slurp_config();
quilt_fixup_editor();
}
parseopts();
check_env_sanity();
print STDERR __ "DRY RUN ONLY\n" if $dryrun_level > 1;
print STDERR __ "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n"
if $dryrun_level == 1;
if (!@ARGV) {
print STDERR __ $helpmsg or confess "$!";
finish 8;
}
$cmd = $subcommand = shift @ARGV;
$cmd =~ y/-/_/;
my $pre_fn = ${*::}{"pre_$cmd"};
$pre_fn->() if $pre_fn;
if ($invoked_in_git_tree) {
changedir_git_toplevel();
record_maindir();
}
git_slurp_config();
my $fn = ${*::}{"cmd_$cmd"};
$fn or badusage f_ "unknown operation %s", $cmd;
$fn->();
finish 0;