use Digest::MD5;
use List::Util qw(any);
use List::MoreUtils qw(pairwise);
+use Text::Glob qw(match_glob);
+use Fcntl qw(:DEFAULT :flock);
use Carp;
use Debian::Dgit;
our $overwrite_version; # undef: not specified; '': check changelog
our $quilt_mode;
our $quilt_modes_re = 'linear|smash|auto|nofix|nocheck|gbp|dpm|unapplied';
+our $dodep14tag;
+our $dodep14tag_re = 'want|no|always';
+our $split_brain_save;
our $we_are_responder;
our $initiator_tempdir;
our $patches_applied_dirtily = 00;
our %forceopts = map { $_=>0 }
qw(unrepresentable unsupported-source-format
- dsc-changes-mismatch
+ dsc-changes-mismatch changes-origs-exactly
import-gitapply-absurd
- import-gitapply-no-absurd);
+ import-gitapply-no-absurd
+ import-dsc-with-dgit-field);
our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)");
our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$';
our $splitbraincache = 'dgit-intern/quilt-cache';
+our $rewritemap = 'dgit-rewrite/map';
our (@git) = qw(git);
our (@dget) = qw(dget);
our (@sbuild) = qw(sbuild);
our (@ssh) = 'ssh';
our (@dgit) = qw(dgit);
+our (@aptget) = qw(apt-get);
+our (@aptcache) = qw(apt-cache);
our (@dpkgbuildpackage) = qw(dpkg-buildpackage -i\.git/ -I.git);
our (@dpkgsource) = qw(dpkg-source -i\.git/ -I.git);
our (@dpkggenchanges) = qw(dpkg-genchanges);
'ssh' => \@ssh,
'dgit' => \@dgit,
'git' => \@git,
+ 'apt-get' => \@aptget,
+ 'apt-cache' => \@aptcache,
'dpkg-source' => \@dpkgsource,
'dpkg-buildpackage' => \@dpkgbuildpackage,
'dpkg-genchanges' => \@dpkggenchanges,
sub debiantag_maintview ($$) {
my ($v,$distro) = @_;
- $v =~ y/~:/_%/;
- return "$distro/$v";
+ return "$distro/".dep14_version_mangle $v;
}
sub madformat ($) { $_[0] eq '3.0 (quilt)' }
return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes";
}
+sub upstreamversion ($) {
+ my ($vsn) = @_;
+ $vsn =~ s/-[^-]+$//;
+ return $vsn;
+}
+
our $us = 'dgit';
initdebug('');
sub printdone {
if (!$dryrun_level) {
- progress "dgit ok: @_";
+ progress "$us ok: @_";
} else {
progress "would be ok: @_ (but dry run only)";
}
our $td = $ENV{DGIT_TEST_DUMMY_DIR} || "DGIT_TEST_DUMMY_DIR-unset";
our %defcfg = ('dgit.default.distro' => 'debian',
+ 'dgit-suite.*-security.distro' => 'debian-security',
'dgit.default.username' => '',
'dgit.default.archive-query-default-component' => 'main',
'dgit.default.ssh' => 'ssh',
'dgit.default.archive-query' => 'madison:',
'dgit.default.sshpsql-dbname' => 'service=projectb',
+ 'dgit.default.aptget-components' => 'main',
'dgit.default.dgit-tag-format' => 'new,old,maint',
# old means "repo server accepts pushes with old dgit tags"
# new means "repo server accepts pushes with new dgit tags"
'dgit-distro.debian.git-url-suffix' => '',
'dgit-distro.debian.upload-host' => 'ftp-master', # for dput
'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/',
+ 'dgit-distro.debian-security.archive-query' => 'aptget:',
+ 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/',
+ 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#',
+ 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#',
+ 'dgit-distro.debian-security.nominal-distro' => 'debian',
'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*',
'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/',
'dgit-distro.ubuntu.git-check' => 'false',
'dgit-distro.test-dummy.git-url' => "$td/git",
'dgit-distro.test-dummy.git-host' => "git",
'dgit-distro.test-dummy.git-path' => "$td/git",
- 'dgit-distro.test-dummy.archive-query' => "ftpmasterapi:",
+ 'dgit-distro.test-dummy.archive-query' => "dummycatapi:",
'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/",
'dgit-distro.test-dummy.mirror' => "file://$td/mirror/",
'dgit-distro.test-dummy.upload-host' => 'test-dummy',
if (defined $idistro) {
return $idistro;
} else {
- return cfg("dgit-suite.$isuite.distro",
- "dgit.default.distro");
+ my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF');
+ return $def if defined $def;
+ foreach my $src (@gitcfgsources, 'internal') {
+ my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src};
+ next unless $kl;
+ foreach my $k (keys %$kl) {
+ next unless $k =~ m#^dgit-suite\.(.*)\.distro$#;
+ my $dpat = $1;
+ next unless match_glob $dpat, $isuite;
+ return $kl->{$k};
+ }
+ }
+ return cfg("dgit.default.distro");
}
}
+sub access_nomdistro () {
+ my $base = access_basedistro();
+ return cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
+}
+
sub access_quirk () {
# returns (quirk name, distro to use instead or undef, quirk-specific info)
my $basedistro = access_basedistro();
unshift @l, $instead_distro;
@l = grep { defined } @l;
+ push @l, access_nomdistro();
+
if (access_forpush()) {
@l = map { ("$_/push", $_) } @l;
}
}
sub parsecontrol {
- my ($file, $desc) = @_;
+ my ($file, $desc, $allowsigned) = @_;
my $fh = new IO::Handle;
open $fh, '<', $file or die "$file: $!";
- my $c = parsecontrolfh($fh,$desc);
+ my $c = parsecontrolfh($fh,$desc,$allowsigned);
$fh->error and die $!;
close $fh;
return $c;
return $d;
}
+sub parse_dscdata () {
+ my $dscfh = new IO::File \$dscdata, '<' or die $!;
+ printdebug Dumper($dscdata) if $debuglevel>1;
+ $dsc = parsecontrolfh($dscfh,$dscurl,1);
+ printdebug Dumper($dsc) if $debuglevel>1;
+}
+
our %rmad;
-sub archive_query ($) {
- my ($method) = @_;
+sub archive_query ($;@) {
+ my ($method) = shift @_;
+ fail "this operation does not support multiple comma-separated suites"
+ if $isuite =~ m/,/;
my $query = access_cfg('archive-query','RETURN-UNDEF');
$query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
my $proto = $1;
my $data = $'; #';
- { no strict qw(refs); &{"${method}_${proto}"}($proto,$data); }
+ { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); }
+}
+
+sub archive_query_prepend_mirror {
+ my $m = access_cfg('mirror');
+ return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_;
}
sub pool_dsc_subpath ($$) {
return "/pool/$component/$prefix/$package/".dscfn($vsn);
}
+sub cfg_apply_map ($$$) {
+ my ($varref, $what, $mapspec) = @_;
+ return unless $mapspec;
+
+ printdebug "config $what EVAL{ $mapspec; }\n";
+ $_ = $$varref;
+ eval "package Dgit::Config; $mapspec;";
+ die $@ if $@;
+ $$varref = $_;
+}
+
#---------- `ftpmasterapi' archive query method (nascent) ----------
sub archive_api_query_cmd ($) {
return @cmd;
}
-sub api_query ($$) {
+sub api_query ($$;$) {
use JSON;
- my ($data, $subpath) = @_;
+ my ($data, $subpath, $ok404) = @_;
badcfg "ftpmasterapi archive query method takes no data part"
if length $data;
my @cmd = archive_api_query_cmd($subpath);
fail "curl failed to print 3-digit HTTP code";
}
my $code = $&;
+ return undef if $code eq '404' && $ok404;
fail "fetch of $url gave HTTP code $code"
unless $url =~ m#^file://# or $code =~ m/^2/;
return decode_json($json);
}
-sub canonicalise_suite_ftpmasterapi () {
+sub canonicalise_suite_ftpmasterapi {
my ($proto,$data) = @_;
my $suites = api_query($data, 'suites');
my @matched;
return $cn;
}
-sub archive_query_ftpmasterapi () {
+sub archive_query_ftpmasterapi {
my ($proto,$data) = @_;
my $info = api_query($data, "dsc_in_suite/$isuite/$package");
my @rows;
if length $@;
}
@rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
- return @rows;
+ return archive_query_prepend_mirror @rows;
+}
+
+sub file_in_archive_ftpmasterapi {
+ my ($proto,$data,$filename) = @_;
+ my $pat = $filename;
+ $pat =~ s/_/\\_/g;
+ $pat = "%/$pat";
+ $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge;
+ my $info = api_query($data, "file_in_archive/$pat", 1);
+}
+
+#---------- `aptget' archive query method ----------
+
+our $aptget_base;
+our $aptget_releasefile;
+our $aptget_configpath;
+
+sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; }
+sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; }
+
+sub aptget_cache_clean {
+ runcmd_ordryrun_local qw(sh -ec),
+ 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --',
+ 'x', $aptget_base;
+}
+
+sub aptget_lock_acquire () {
+ my $lockfile = "$aptget_base/lock";
+ open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
+ flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
+}
+
+sub aptget_prep ($) {
+ my ($data) = @_;
+ return if defined $aptget_base;
+
+ badcfg "aptget archive query method takes no data part"
+ if length $data;
+
+ my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
+
+ ensuredir $cache;
+ ensuredir "$cache/dgit";
+ my $cachekey =
+ access_cfg('aptget-cachekey','RETURN-UNDEF')
+ // access_nomdistro();
+
+ $aptget_base = "$cache/dgit/aptget";
+ ensuredir $aptget_base;
+
+ my $quoted_base = $aptget_base;
+ die "$quoted_base contains bad chars, cannot continue"
+ if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
+
+ ensuredir $aptget_base;
+
+ aptget_lock_acquire();
+
+ aptget_cache_clean();
+
+ $aptget_configpath = "$aptget_base/apt.conf#$cachekey";
+ my $sourceslist = "source.list#$cachekey";
+
+ my $aptsuites = $isuite;
+ cfg_apply_map(\$aptsuites, 'suite map',
+ access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
+
+ open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
+ printf SRCS "deb-src %s %s %s\n",
+ access_cfg('mirror'),
+ $aptsuites,
+ access_cfg('aptget-components')
+ or die $!;
+
+ ensuredir "$aptget_base/cache";
+ ensuredir "$aptget_base/lists";
+
+ open CONF, ">", $aptget_configpath or die $!;
+ print CONF <<END;
+Debug::NoLocking "true";
+APT::Get::List-Cleanup "false";
+#clear APT::Update::Post-Invoke-Success;
+Dir::Etc::SourceList "$quoted_base/$sourceslist";
+Dir::State::Lists "$quoted_base/lists";
+Dir::Etc::preferences "$quoted_base/preferences";
+Dir::Cache::srcpkgcache "$quoted_base/cache/srcs#$cachekey";
+Dir::Cache::pkgcache "$quoted_base/cache/pkgs#$cachekey";
+END
+
+ foreach my $key (qw(
+ Dir::Cache
+ Dir::State
+ Dir::Cache::Archives
+ Dir::Etc::SourceParts
+ Dir::Etc::preferencesparts
+ )) {
+ ensuredir "$aptget_base/$key";
+ print CONF "$key \"$quoted_base/$key\";\n" or die $!;
+ };
+
+ my $oldatime = (time // die $!) - 1;
+ foreach my $oldlist (<$aptget_base/lists/*Release>) {
+ next unless stat_exists $oldlist;
+ my ($mtime) = (stat _)[9];
+ utime $oldatime, $mtime, $oldlist or die "$oldlist $!";
+ }
+
+ runcmd_ordryrun_local aptget_aptget(), qw(update);
+
+ my @releasefiles;
+ foreach my $oldlist (<$aptget_base/lists/*Release>) {
+ next unless stat_exists $oldlist;
+ my ($atime) = (stat _)[8];
+ next if $atime == $oldatime;
+ push @releasefiles, $oldlist;
+ }
+ my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
+ @releasefiles = @inreleasefiles if @inreleasefiles;
+ die "apt updated wrong number of Release files (@releasefiles), erk"
+ unless @releasefiles == 1;
+
+ ($aptget_releasefile) = @releasefiles;
+}
+
+sub canonicalise_suite_aptget {
+ my ($proto,$data) = @_;
+ aptget_prep($data);
+
+ my $release = parsecontrol $aptget_releasefile, "Release file", 1;
+
+ foreach my $name (qw(Codename Suite)) {
+ my $val = $release->{$name};
+ if (defined $val) {
+ printdebug "release file $name: $val\n";
+ $val =~ m/^$suite_re$/o or fail
+ "Release file ($aptget_releasefile) specifies intolerable $name";
+ cfg_apply_map(\$val, 'suite rmap',
+ access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
+ return $val
+ }
+ }
+ return $isuite;
+}
+
+sub archive_query_aptget {
+ my ($proto,$data) = @_;
+ aptget_prep($data);
+
+ ensuredir "$aptget_base/source";
+ foreach my $old (<$aptget_base/source/*.dsc>) {
+ unlink $old or die "$old: $!";
+ }
+
+ my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package;
+ return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi;
+ # avoids apt-get source failing with ambiguous error code
+
+ runcmd_ordryrun_local
+ shell_cmd 'cd "$1"/source; shift', $aptget_base,
+ aptget_aptget(), qw(--download-only --only-source source), $package;
+
+ my @dscs = <$aptget_base/source/*.dsc>;
+ fail "apt-get source did not produce a .dsc" unless @dscs;
+ fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
+
+ my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
+
+ use URI::Escape;
+ my $uri = "file://". uri_escape $dscs[0];
+ $uri =~ s{\%2f}{/}gi;
+ return [ (getfield $pre_dsc, 'Version'), $uri ];
+}
+
+#---------- `dummyapicat' archive query method ----------
+
+sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
+sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
+
+sub file_in_archive_dummycatapi ($$$) {
+ my ($proto,$data,$filename) = @_;
+ my $mirror = access_cfg('mirror');
+ $mirror =~ s#^file://#/# or die "$mirror ?";
+ my @out;
+ my @cmd = (qw(sh -ec), '
+ cd "$1"
+ find -name "$2" -print0 |
+ xargs -0r sha256sum
+ ', qw(x), $mirror, $filename);
+ debugcmd "-|", @cmd;
+ open FIA, "-|", @cmd or die $!;
+ while (<FIA>) {
+ chomp or die;
+ printdebug "| $_\n";
+ m/^(\w+) (\S+)$/ or die "$_ ?";
+ push @out, { sha256sum => $1, filename => $2 };
+ }
+ close FIA or die failedcmd @cmd;
+ return \@out;
}
#---------- `madison' archive query method ----------
sub archive_query_madison {
- return map { [ @$_[0..1] ] } madison_get_parse(@_);
+ return archive_query_prepend_mirror
+ map { [ @$_[0..1] ] } madison_get_parse(@_);
}
sub madison_get_parse {
return $r[0][2];
}
+sub file_in_archive_madison { return undef; }
+
#---------- `sshpsql' archive query method ----------
sub sshpsql ($$$) {
my ($vsn,$component,$filename,$sha256sum) = @$_;
[ $vsn, "/pool/$component/$filename",$digester,$sha256sum ];
} @rows;
- return @rows;
+ return archive_query_prepend_mirror @rows;
}
sub canonicalise_suite_sshpsql ($$) {
return $rows[0];
}
+sub file_in_archive_sshpsql ($$$) { return undef; }
+
#---------- `dummycat' archive query method ----------
sub canonicalise_suite_dummycat ($$) {
}
C->error and die "$dpath: $!";
close C;
- return sort { -version_compare($a->[0],$b->[0]); } @rows;
+ return archive_query_prepend_mirror
+ sort { -version_compare($a->[0],$b->[0]); } @rows;
}
+sub file_in_archive_dummycat () { return undef; }
+
#---------- tag format handling ----------
sub access_cfg_tagformats () {
split /\,/, access_cfg('dgit-tag-format');
}
+sub access_cfg_tagformats_can_splitbrain () {
+ my %y = map { $_ => 1 } access_cfg_tagformats;
+ foreach my $needtf (qw(new maint)) {
+ next if $y{$needtf};
+ return 0;
+ }
+ return 1;
+}
+
sub need_tagformat ($$) {
my ($fmt, $why) = @_;
fail "need to use tag format $fmt ($why) but also need".
$csuite = archive_query('canonicalise_suite');
if ($isuite ne $csuite) {
progress "canonical suite name for $isuite is $csuite";
+ } else {
+ progress "canonical suite name is $csuite";
}
}
canonicalise_suite();
my @vsns = archive_query('archive_query');
foreach my $vinfo (@vsns) {
- my ($vsn,$subpath,$digester,$digest) = @$vinfo;
- $dscurl = access_cfg('mirror').$subpath;
+ my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo;
+ $dscurl = $vsn_dscurl;
$dscdata = url_get($dscurl);
if (!$dscdata) {
$skew_warning_vsn = $vsn if !defined $skew_warning_vsn;
fail "$dscurl has hash $got but".
" archive told us to expect $digest";
}
- my $dscfh = new IO::File \$dscdata, '<' or die $!;
- printdebug Dumper($dscdata) if $debuglevel>1;
- $dsc = parsecontrolfh($dscfh,$dscurl,1);
- printdebug Dumper($dsc) if $debuglevel>1;
+ parse_dscdata();
my $fmt = getfield $dsc, 'Format';
$format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
"unsupported source format $fmt, sorry";
return $tree;
}
-sub remove_stray_gits () {
+sub git_add_write_tree () {
+ runcmd @git, qw(add -Af .);
+ return git_write_tree();
+}
+
+sub remove_stray_gits ($) {
+ my ($what) = @_;
my @gitscmd = qw(find -name .git -prune -print0);
debugcmd "|",@gitscmd;
open GITS, "-|", @gitscmd or die $!;
local $/="\0";
while (<GITS>) {
chomp or die;
- print STDERR "$us: warning: removing from source package: ",
+ print STDERR "$us: warning: removing from $what: ",
(messagequote $_), "\n";
rmtree $_;
}
$!=0; $?=0; close GITS or failedcmd @gitscmd;
}
-sub mktree_in_ud_from_only_subdir (;$) {
- my ($raw) = @_;
+sub mktree_in_ud_from_only_subdir ($;$) {
+ my ($what,$raw) = @_;
# changes into the subdir
my (@dirs) = <*/.>;
my $dir = $1;
changedir $dir;
- remove_stray_gits();
+ remove_stray_gits($what);
mktree_in_ud_here();
if (!$raw) {
my ($format, $fopts) = get_source_format();
}
}
- runcmd @git, qw(add -Af);
- my $tree=git_write_tree();
+ my $tree=git_add_write_tree();
return ($tree,$dir);
}
our @files_csum_info_fields =
- (['Checksums-Sha256','Digest::SHA', 'new(256)'],
- ['Checksums-Sha1', 'Digest::SHA', 'new(1)'],
- ['Files', 'Digest::MD5', 'new()']);
+ (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'],
+ ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'],
+ ['Files', 'Digest::MD5', 'new()', 'md5sum']);
sub dsc_files_info () {
foreach my $csumi (@files_csum_info_fields) {
return 1;
}
+sub changes_update_origs_from_dsc ($$$$) {
+ my ($dsc, $changes, $upstreamvsn, $changesfile) = @_;
+ my %changes_f;
+ printdebug "checking origs needed ($upstreamvsn)...\n";
+ $_ = getfield $changes, 'Files';
+ m/^\w+ \d+ (\S+ \S+) \S+$/m or
+ fail "cannot find section/priority from .changes Files field";
+ my $placementinfo = $1;
+ my %changed;
+ printdebug "checking origs needed placement '$placementinfo'...\n";
+ foreach my $l (split /\n/, getfield $dsc, 'Files') {
+ $l =~ m/\S+$/ or next;
+ my $file = $&;
+ printdebug "origs $file | $l\n";
+ next unless is_orig_file_of_vsn $file, $upstreamvsn;
+ printdebug "origs $file is_orig\n";
+ my $have = archive_query('file_in_archive', $file);
+ if (!defined $have) {
+ print STDERR <<END;
+archive does not support .orig check; hope you used --ch:--sa/-sd if needed
+END
+ return;
+ }
+ my $found_same = 0;
+ my @found_differ;
+ printdebug "origs $file \$#\$have=$#$have\n";
+ foreach my $h (@$have) {
+ my $same = 0;
+ my @differ;
+ foreach my $csumi (@files_csum_info_fields) {
+ my ($fname, $module, $method, $archivefield) = @$csumi;
+ next unless defined $h->{$archivefield};
+ $_ = $dsc->{$fname};
+ next unless defined;
+ m/^(\w+) .* \Q$file\E$/m or
+ fail ".dsc $fname missing entry for $file";
+ if ($h->{$archivefield} eq $1) {
+ $same++;
+ } else {
+ push @differ,
+ "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
+ }
+ }
+ die "$file ".Dumper($h)." ?!" if $same && @differ;
+ $found_same++
+ if $same;
+ push @found_differ, "archive $h->{filename}: ".join "; ", @differ
+ if @differ;
+ }
+ printdebug "origs $file f.same=$found_same".
+ " #f._differ=$#found_differ\n";
+ if (@found_differ && !$found_same) {
+ fail join "\n",
+ "archive contains $file with different checksum",
+ @found_differ;
+ }
+ # Now we edit the changes file to add or remove it
+ foreach my $csumi (@files_csum_info_fields) {
+ my ($fname, $module, $method, $archivefield) = @$csumi;
+ next unless defined $changes->{$fname};
+ if ($found_same) {
+ # in archive, delete from .changes if it's there
+ $changed{$file} = "removed" if
+ $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
+ } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
+ # not in archive, but it's here in the .changes
+ } else {
+ my $dsc_data = getfield $dsc, $fname;
+ $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
+ my $extra = $1;
+ $extra =~ s/ \d+ /$&$placementinfo /
+ or die "$fname $extra >$dsc_data< ?"
+ if $fname eq 'Files';
+ $changes->{$fname} .= "\n". $extra;
+ $changed{$file} = "added";
+ }
+ }
+ }
+ if (%changed) {
+ foreach my $file (keys %changed) {
+ progress sprintf
+ "edited .changes for archive .orig contents: %s %s",
+ $changed{$file}, $file;
+ }
+ my $chtmp = "$changesfile.tmp";
+ $changes->save($chtmp);
+ if (act_local()) {
+ rename $chtmp,$changesfile or die "$changesfile $!";
+ } else {
+ progress "[new .changes left in $changesfile]";
+ }
+ } else {
+ progress "$changesfile already has appropriate .orig(s) (if any)";
+ }
+}
+
sub make_commit ($) {
my ($file) = @_;
return cmdoutput @git, qw(hash-object -w -t commit), $file;
vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
"Dpkg::Vendor \`current vendor'");
vendor_patches_distro(access_basedistro(),
- "distro being accessed");
+ "(base) distro being accessed");
+ vendor_patches_distro(access_nomdistro(),
+ "(nominal) distro being accessed");
}
sub generate_commits_from_dsc () {
my $f = $fi->{Filename};
die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
- link_ltarget "../../../$f", $f
+ printdebug "considering linking $f: ";
+
+ link_ltarget "../../../../$f", $f
+ or ((printdebug "($!) "), 0)
or $!==&ENOENT
or die "$f $!";
+ printdebug "linked.\n";
+
complete_file_from_dsc('.', $fi)
or next;
# from the debian/changelog, so we record the tree objects now and
# make them into commits later.
my @tartrees;
- my $upstreamv = $dsc->{version};
- $upstreamv =~ s/-[^-]+$//;
+ my $upstreamv = upstreamversion $dsc->{version};
my $orig_f_base = srcfn $upstreamv, '';
foreach my $fi (@dfi) {
$input = $compr_fh;
}
- rmtree "../unpack-tar";
- mkdir "../unpack-tar" or die $!;
+ rmtree "_unpack-tar";
+ mkdir "_unpack-tar" or die $!;
my @tarcmd = qw(tar -x -f -
--no-same-owner --no-same-permissions
--no-acls --no-xattrs --no-selinux);
my $tar_pid = fork // die $!;
if (!$tar_pid) {
- chdir "../unpack-tar" or die $!;
+ chdir "_unpack-tar" or die $!;
open STDIN, "<&", $input or die $!;
exec @tarcmd;
die "dgit (child): exec $tarcmd[0]: $!";
# finally, we have the results in "tarball", but maybe
# with the wrong permissions
- runcmd qw(chmod -R +rwX ../unpack-tar);
- changedir "../unpack-tar";
- my ($tree) = mktree_in_ud_from_only_subdir(1);
- changedir "../../unpack";
- rmtree "../unpack-tar";
+ runcmd qw(chmod -R +rwX _unpack-tar);
+ changedir "_unpack-tar";
+ remove_stray_gits($f);
+ mktree_in_ud_here();
+
+ my ($tree) = git_add_write_tree();
+ my $tentries = cmdoutput @git, qw(ls-tree -z), $tree;
+ if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) {
+ $tree = $1;
+ printdebug "one subtree $1\n";
+ } else {
+ printdebug "multiple subtrees\n";
+ }
+ changedir "..";
+ rmtree "_unpack-tar";
my $ent = [ $f, $tree ];
push @tartrees, {
push @cmd, qw(-x --), $dscfn;
runcmd @cmd;
- my ($tree,$dir) = mktree_in_ud_from_only_subdir();
+ my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
if (madformat $dsc->{format}) {
check_for_vendor_patches();
}
my @pcmd = qw(dpkg-source --before-build .);
runcmd shell_cmd 'exec >/dev/null', @pcmd;
rmtree '.pc';
- runcmd @git, qw(add -Af);
- $dappliedtree = git_write_tree();
+ $dappliedtree = git_add_write_tree();
}
my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
my $path = $ENV{PATH} or die;
foreach my $use_absurd (qw(0 1)) {
+ runcmd @git, qw(checkout -q unpa);
+ runcmd @git, qw(update-ref -d refs/heads/patch-queue/unpa);
local $ENV{PATH} = $path;
if ($use_absurd) {
chomp $@;
die "only absurd git-apply!\n" if !$use_absurd
&& forceing [qw(import-gitapply-absurd)];
- local $ENV{PATH} = $path if $use_absurd;
+ local $ENV{DGIT_ABSURD_DEBUG} = $debuglevel if $use_absurd;
+ local $ENV{PATH} = $path if $use_absurd;
my @showcmd = (gbp_pq, qw(import));
my @realcmd = shell_cmd
- 'exec >/dev/null 2>../../gbp-pq-output', @showcmd;
+ 'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
debugcmd "+",@realcmd;
if (system @realcmd) {
die +(shellquote @showcmd).
if (stat_exists $tf) {
progress "using existing $f";
} else {
+ printdebug "$tf does not exist, need to fetch\n";
my $furl = $dscurl;
$furl =~ s{/[^/]+$}{};
$furl .= "/$f";
my @specs = deliberately_not_fast_forward ? qw(tags/*) :
map { "tags/$_" }
(quiltmode_splitbrain
- ? (map { $_->('*',access_basedistro) }
+ ? (map { $_->('*',access_nomdistro) }
\&debiantag_new, \&debiantag_maintview)
- : debiantags('*',access_basedistro));
+ : debiantags('*',access_nomdistro));
push @specs, server_branch($csuite);
+ push @specs, $rewritemap;
push @specs, qw(heads/*) if deliberately_not_fast_forward;
# This is rather miserable:
# git fetch to try to generate it. If we don't manage to generate
# the target state, we try again.
+ printdebug "git_fetch_us specs @specs\n";
+
my $specre = join '|', map {
my $x = $_;
$x =~ s/\W/\\$&/g;
my $fetch_iteration = 0;
FETCH_ITERATION:
for (;;) {
+ printdebug "git_fetch_us iteration $fetch_iteration\n";
if (++$fetch_iteration > 10) {
fail "too many iterations trying to get sane fetch!";
}
# OK, now %want is exactly what we want for refs in @specs
my @fspecs = map {
- return () if !m/\*$/ && !exists $wantr{"refs/$_"};
+ !m/\*$/ && !exists $wantr{"refs/$_"} ? () :
"+refs/$_:".lrfetchrefs."/$_";
} @specs;
+ printdebug "git_fetch_us fspecs @fspecs\n";
+
my @fcmd = (@git, qw(fetch -p -n -q), access_giturl(), @fspecs);
runcmd_ordryrun_local @git, qw(fetch -p -n -q), access_giturl(),
@fspecs;
Dumper(\%lrfetchrefs_f);
my %here;
- my @tagpats = debiantags('*',access_basedistro);
+ my @tagpats = debiantags('*',access_nomdistro);
git_for_each_ref([map { "refs/tags/$_" } @tagpats], sub {
my ($objid,$objtype,$fullrefname,$reftail) = @_;
return getfield( (mergeinfo_getclogp $_[0]), 'Version' );
}
+sub fetch_from_archive_record_1 ($) {
+ my ($hash) = @_;
+ runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
+ 'DGIT_ARCHIVE', $hash;
+ cmdoutput @git, qw(log -n2), $hash;
+ # ... gives git a chance to complain if our commit is malformed
+}
+
+sub fetch_from_archive_record_2 ($) {
+ my ($hash) = @_;
+ my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
+ if (act_local()) {
+ cmdoutput @upd_cmd;
+ } else {
+ dryrun_report @upd_cmd;
+ }
+}
+
sub fetch_from_archive () {
ensure_setup_existing_tree();
progress "no version available from the archive";
}
+ my $rewritemapdata = git_cat_file lrfetchrefs."/".$rewritemap.':map';
+ if (defined $rewritemapdata
+ && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) {
+ progress "server's git history rewrite map contains a relevant entry!";
+ $dsc_hash = $1;
+ if (defined $dsc_hash) {
+ progress "using rewritten git hash in place of .dsc value";
+ } else {
+ progress "server data says .dsc hash is to be disregarded";
+ }
+ }
+
# If the archive's .dsc has a Dgit field, there are three
# relevant git commitids we need to choose between and/or merge
# together:
};
if (defined $dsc_hash) {
- fail "missing remote git history even though dsc has hash -".
- " could not find ref ".rref()." at ".access_giturl()
- unless $lastpush_hash;
ensure_we_have_orig();
- if ($dsc_hash eq $lastpush_hash) {
+ if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
@mergeinputs = $dsc_mergeinput
} elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
print STDERR <<END or die $!;
die "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash);
};
- $chkff->($lastpush_hash, 'dgit repo server tip (last push)');
+ $chkff->($lastpush_hash, 'dgit repo server tip (last push)')
+ if $lastpush_hash;
$chkff->($lastfetch_hash, 'local tracking tip (last fetch)');
- runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
- 'DGIT_ARCHIVE', $hash;
- cmdoutput @git, qw(log -n2), $hash;
- # ... gives git a chance to complain if our commit is malformed
+ fetch_from_archive_record_1($hash);
if (defined $skew_warning_vsn) {
mkpath '.git/dgit';
}
if ($lastfetch_hash ne $hash) {
- my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
- if (act_local()) {
- cmdoutput @upd_cmd;
- } else {
- dryrun_report @upd_cmd;
- }
+ fetch_from_archive_record_2($hash);
}
lrfetchref_used lrfetchref();
setup_useremail();
}
+sub multisuite_suite_child ($$$) {
+ my ($tsuite, $merginputs, $fn) = @_;
+ # in child, sets things up, calls $fn->(), and returns undef
+ # in parent, returns canonical suite name for $tsuite
+ my $canonsuitefh = IO::File::new_tmpfile;
+ my $pid = fork // die $!;
+ if (!$pid) {
+ $isuite = $tsuite;
+ $us .= " [$isuite]";
+ $debugprefix .= " ";
+ progress "fetching $tsuite...";
+ canonicalise_suite();
+ print $canonsuitefh $csuite, "\n" or die $!;
+ close $canonsuitefh or die $!;
+ $fn->();
+ return undef;
+ }
+ waitpid $pid,0 == $pid or die $!;
+ fail "failed to obtain $tsuite: ".waitstatusmsg() if $? && $?!=256*4;
+ seek $canonsuitefh,0,0 or die $!;
+ local $csuite = <$canonsuitefh>;
+ die $! unless defined $csuite && chomp $csuite;
+ if ($? == 256*4) {
+ printdebug "multisuite $tsuite missing\n";
+ return $csuite;
+ }
+ printdebug "multisuite $tsuite ok (canon=$csuite)\n";
+ push @$merginputs, {
+ Ref => lrref,
+ Info => $csuite,
+ };
+ return $csuite;
+}
+
+sub fork_for_multisuite ($) {
+ my ($before_fetch_merge) = @_;
+ # if nothing unusual, just returns ''
+ #
+ # if multisuite:
+ # returns 0 to caller in child, to do first of the specified suites
+ # in child, $csuite is not yet set
+ #
+ # returns 1 to caller in parent, to finish up anything needed after
+ # in parent, $csuite is set to canonicalised portmanteau
+
+ my $org_isuite = $isuite;
+ my @suites = split /\,/, $isuite;
+ return '' unless @suites > 1;
+ printdebug "fork_for_multisuite: @suites\n";
+
+ my @mergeinputs;
+
+ my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs,
+ sub { });
+ return 0 unless defined $cbasesuite;
+
+ fail "package $package missing in (base suite) $cbasesuite"
+ unless @mergeinputs;
+
+ my @csuites = ($cbasesuite);
+
+ $before_fetch_merge->();
+
+ foreach my $tsuite (@suites[1..$#suites]) {
+ my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
+ sub {
+ @end = ();
+ fetch();
+ exit 0;
+ });
+ # xxx collecte the ref here
+
+ $csubsuite =~ s/^\Q$cbasesuite\E-/-/;
+ push @csuites, $csubsuite;
+ }
+
+ foreach my $mi (@mergeinputs) {
+ my $ref = git_get_ref $mi->{Ref};
+ die "$mi->{Ref} ?" unless length $ref;
+ $mi->{Commit} = $ref;
+ }
+
+ $csuite = join ",", @csuites;
+
+ my $previous = git_get_ref lrref;
+ if ($previous) {
+ unshift @mergeinputs, {
+ Commit => $previous,
+ Info => "local combined tracking branch",
+ Warning =>
+ "archive seems to have rewound: local tracking branch is ahead!",
+ };
+ }
+
+ foreach my $ix (0..$#mergeinputs) {
+ $mergeinputs[$ix]{Index} = $ix;
+ }
+
+ @mergeinputs = sort {
+ -version_compare(mergeinfo_version $a,
+ mergeinfo_version $b) # highest version first
+ or
+ $a->{Index} <=> $b->{Index}; # earliest in spec first
+ } @mergeinputs;
+
+ my @needed;
+
+ NEEDED:
+ foreach my $mi (@mergeinputs) {
+ printdebug "multisuite merge check $mi->{Info}\n";
+ foreach my $previous (@needed) {
+ next unless is_fast_fwd $mi->{Commit}, $previous->{Commit};
+ printdebug "multisuite merge un-needed $previous->{Info}\n";
+ next NEEDED;
+ }
+ push @needed, $mi;
+ printdebug "multisuite merge this-needed\n";
+ $mi->{Character} = '+';
+ }
+
+ $needed[0]{Character} = '*';
+
+ my $output = $needed[0]{Commit};
+
+ if (@needed > 1) {
+ printdebug "multisuite merge nontrivial\n";
+ my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':';
+
+ my $commit = "tree $tree\n";
+ my $msg = "Combine archive branches $csuite [dgit]\n\n".
+ "Input branches:\n";
+
+ foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) {
+ printdebug "multisuite merge include $mi->{Info}\n";
+ $mi->{Character} //= ' ';
+ $commit .= "parent $mi->{Commit}\n";
+ $msg .= sprintf " %s %-25s %s\n",
+ $mi->{Character},
+ (mergeinfo_version $mi),
+ $mi->{Info};
+ }
+ my $authline = clogp_authline mergeinfo_getclogp $needed[0];
+ $msg .= "\nKey\n".
+ " * marks the highest version branch, which choose to use\n".
+ " + marks each branch which was not already an ancestor\n\n".
+ "[dgit multi-suite $csuite]\n";
+ $commit .=
+ "author $authline\n".
+ "committer $authline\n\n";
+ $output = make_commit_text $commit.$msg;
+ printdebug "multisuite merge generated $output\n";
+ }
+
+ fetch_from_archive_record_1($output);
+ fetch_from_archive_record_2($output);
+
+ progress "calculated combined tracking suite $csuite";
+
+ return 1;
+}
+
+sub clone_set_head () {
+ open H, "> .git/HEAD" or die $!;
+ print H "ref: ".lref()."\n" or die $!;
+ close H or die $!;
+}
+sub clone_finish ($) {
+ my ($dstdir) = @_;
+ runcmd @git, qw(reset --hard), lrref();
+ runcmd qw(bash -ec), <<'END';
+ set -o pipefail
+ git ls-tree -r --name-only -z HEAD | \
+ xargs -0r touch -h -r . --
+END
+ printdone "ready for work in $dstdir";
+}
+
sub clone ($) {
my ($dstdir) = @_;
- canonicalise_suite();
badusage "dry run makes no sense with clone" unless act_local();
+
+ my $multi_fetched = fork_for_multisuite(sub {
+ printdebug "multi clone before fetch merge\n";
+ changedir $dstdir;
+ });
+ if ($multi_fetched) {
+ printdebug "multi clone after fetch merge\n";
+ clone_set_head();
+ clone_finish($dstdir);
+ exit 0;
+ }
+ printdebug "clone main body\n";
+
+ canonicalise_suite();
my $hasgit = check_for_git();
mkdir $dstdir or fail "create \`$dstdir': $!";
changedir $dstdir;
runcmd @git, qw(init -q);
+ clone_set_head();
my $giturl = access_giturl(1);
if (defined $giturl) {
- open H, "> .git/HEAD" or die $!;
- print H "ref: ".lref()."\n" or die $!;
- close H or die $!;
runcmd @git, qw(remote add), 'origin', $giturl;
}
if ($hasgit) {
runcmd @git, qw(remote add vcs-git), $vcsgiturl;
}
setup_new_tree();
- runcmd @git, qw(reset --hard), lrref();
- printdone "ready for work in $dstdir";
+ clone_finish($dstdir);
}
sub fetch () {
+ canonicalise_suite();
if (check_for_git()) {
git_fetch_us();
}
}
sub pull () {
- fetch();
+ my $multi_fetched = fork_for_multisuite(sub { });
+ fetch() unless $multi_fetched; # parent
+ return if $multi_fetched eq '0'; # child
runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
lrref();
printdone "fetched to ".lrref()." and merged into HEAD";
return 1;
}
+sub maybe_split_brain_save ($$$) {
+ my ($headref, $dgitview, $msg) = @_;
+ # => message fragment "$saved" describing disposition of $dgitview
+ return "commit id $dgitview" unless defined $split_brain_save;
+ my @cmd = (shell_cmd "cd ../../../..",
+ @git, qw(update-ref -m),
+ "dgit --dgit-view-save $msg HEAD=$headref",
+ $split_brain_save, $dgitview);
+ runcmd @cmd;
+ return "and left in $split_brain_save";
+}
+
# An "infopair" is a tuple [ $thing, $what ]
# (often $thing is a commit hash; $what is a description)
parent $dgitview
parent $archive_hash
author $authline
-commiter $authline
+committer $authline
$msg_msg
# this: $dgitview'
#
+ return $dgitview unless defined $archive_hash;
+
printdebug "splitbrain_pseudomerge...\n";
my $i_arch_v = pseudomerge_version_check($clogp, $archive_hash);
- return $dgitview unless defined $archive_hash;
-
if (!defined $overwrite_version) {
progress "Checking that HEAD inciudes all changes in archive...";
}
if (defined $overwrite_version) {
} elsif (!eval {
- my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_basedistro;
+ my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_nomdistro;
my $i_dep14 = infopair_lrf_tag_lookup($t_dep14, "maintainer view tag");
- my $t_dgit = debiantag_new $i_arch_v->[0], access_basedistro;
+ my $t_dgit = debiantag_new $i_arch_v->[0], access_nomdistro;
my $i_dgit = infopair_lrf_tag_lookup($t_dgit, "dgit view tag");
my $i_archive = [ $archive_hash, "current archive contents" ];
Make fast forward from $i_arch_v->[0]
END_MAKEFF
+ maybe_split_brain_save $maintview, $r, "pseudomerge";
+
progress "Made pseudo-merge of $i_arch_v->[0] into dgit view.";
return $r;
}
my $clogp = Dpkg::Control::Hash->new();
$clogp->load($clogpfn) or die;
- $package = getfield $clogp, 'Source';
+ my $clogpackage = getfield $clogp, 'Source';
+ $package //= $clogpackage;
+ fail "-p specified $package but changelog specified $clogpackage"
+ unless $package eq $clogpackage;
my $cversion = getfield $clogp, 'Version';
- my $tag = debiantag($cversion, access_basedistro);
+ my $tag = debiantag($cversion, access_nomdistro);
runcmd @git, qw(check-ref-format), $tag;
my $dscfn = dscfn($cversion);
TfSuffix => '-maintview',
View => 'maint',
};
- }
+ } elsif ($dodep14tag eq 'no' ? 0
+ : $dodep14tag eq 'want' ? access_cfg_tagformats_can_splitbrain
+ : $dodep14tag eq 'always'
+ ? (access_cfg_tagformats_can_splitbrain or fail <<END)
+--dep14tag-always (or equivalent in config) means server must support
+ both "new" and "maint" tag formats, but config says it doesn't.
+END
+ : die "$dodep14tag ?") {
+ push @tagwants, {
+ TagFn => \&debiantag_maintview,
+ Objid => $dgithead,
+ TfSuffix => '-dgit',
+ View => 'dgit',
+ };
+ };
foreach my $tw (@tagwants) {
- $tw->{Tag} = $tw->{TagFn}($cversion, access_basedistro);
+ $tw->{Tag} = $tw->{TagFn}($cversion, access_nomdistro);
$tw->{Tfn} = sub { $tfbase.$tw->{TfSuffix}.$_[0]; };
}
printdebug 'push_tagwants: ', Dumper(\@_, \@tagwants);
# to control the "tagger" (b) we can do remote signing
my $authline = clogp_authline $clogp;
my $delibs = join(" ", "",@deliberatelies);
- my $declaredistro = access_basedistro();
+ my $declaredistro = access_nomdistro();
my $mktag = sub {
my ($tw) = @_;
my $dscpath = "$buildproductsdir/$dscfn";
stat_exists $dscpath or
- fail "looked for .dsc $dscfn, but $!;".
+ fail "looked for .dsc $dscpath, but $!;".
" maybe you forgot to build";
responder_send_file('dsc', $dscpath);
my $dgithead = $actualhead;
my $maintviewhead = undef;
+ my $upstreamversion = upstreamversion $clogp->{Version};
+
if (madformat_wantfixup($format)) {
# user might have not used dgit build, so maybe do this now:
if (quiltmode_splitbrain()) {
- my $upstreamversion = $clogp->{Version};
- $upstreamversion =~ s/-[^-]*$//;
changedir $ud;
quilt_make_fake_dsc($upstreamversion);
my $cachekey;
progress "checking that $dscfn corresponds to HEAD";
runcmd qw(dpkg-source -x --),
$dscpath =~ m#^/# ? $dscpath : "../../../$dscpath";
- my ($tree,$dir) = mktree_in_ud_from_only_subdir();
+ my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
check_for_vendor_patches() if madformat($dsc->{format});
changedir '../../../..';
my @diffcmd = (@git, qw(diff --quiet), $tree, $dgithead);
# Check that changes and .dsc agree enough
$changesfile =~ m{[^/]*$};
- files_compare_inputs($dsc, parsecontrol($changesfile,$&))
+ my $changes = parsecontrol($changesfile,$&);
+ files_compare_inputs($dsc, $changes)
unless forceing [qw(dsc-changes-mismatch)];
+ # Perhaps adjust .dsc to contain right set of origs
+ changes_update_origs_from_dsc($dsc, $changes, $upstreamversion,
+ $changesfile)
+ unless forceing [qw(changes-origs-exactly)];
+
# Checks complete, we're going to try and go ahead:
responder_send_file('changes',$changesfile);
runcmd_ordryrun @git, qw(update-ref -m), 'dgit push', lrref(), $dgithead;
supplementary_message(<<'END');
-Push failed, after updating the remote git repository.
-If you want to try again, you must use a new version number.
+Push failed, while obtaining signatures on the .changes and .dsc.
+If it was just that the signature failed, you may try again by using
+debsign by hand to sign the changes
+ $changesfile
+and then dput to complete the upload.
+If you need to change the package, you must use a new version number.
END
if ($we_are_responder) {
my $dryrunsuffix = act_local() ? "" : ".tmp";
return if $!==&ENOENT;
die "chdir $cwd_remove: $!";
}
+ printdebug "clone rmonerror removing $dstdir\n";
if (stat $dstdir) {
rmtree($dstdir) or die "remove $dstdir: $!\n";
} elsif (grep { $! == $_ }
$package = getfield $sourcep, 'Source';
}
if (@ARGV==0) {
-# $isuite = branchsuite(); # this doesn't work because dak hates canons
+ $isuite = branchsuite();
if (!$isuite) {
my $clogp = parsechangelog();
$isuite = getfield $clogp, 'Distribution';
}
- canonicalise_suite();
- progress "fetching from suite $csuite";
} elsif (@ARGV==1) {
($isuite) = @ARGV;
- canonicalise_suite();
} else {
badusage "incorrect arguments to dgit fetch or dgit pull";
}
sub cmd_fetch {
parseopts();
fetchpullargs();
+ my $multi_fetched = fork_for_multisuite(sub { });
+ exit 0 if $multi_fetched;
fetch();
}
sub cmd_pull {
parseopts();
fetchpullargs();
+ if (quiltmode_splitbrain()) {
+ my ($format, $fopts) = get_source_format();
+ madformat($format) and fail <<END
+dgit pull not yet supported in split view mode (--quilt=$quilt_mode)
+END
+ }
pull();
}
local $ENV{'EDITOR'} = cmdoutput qw(realpath --), $0;
local $ENV{'VISUAL'} = $ENV{'EDITOR'};
local $ENV{$fakeeditorenv} = cmdoutput qw(realpath --), $descfn;
- runcmd @dpkgsource, qw(--commit .), $patchname;
+ runcmd @dpkgsource, qw(--commit --include-removal .), $patchname;
}
}
if ($unrepres) {
eval {
- die "deleted\n" unless $newmode =~ m/[^0]/;
- die "not a plain file\n" unless $newmode =~ m/^10\d{4}$/;
- if ($oldmode =~ m/[^0]/) {
+ die "not a plain file\n"
+ unless $newmode =~ m/^10\d{4}$/ ||
+ $oldmode =~ m/^10\d{4}$/;
+ if ($oldmode =~ m/[^0]/ &&
+ $newmode =~ m/[^0]/) {
die "mode changed\n" if $oldmode ne $newmode;
} else {
- die "non-default mode\n" unless $newmode =~ m/^100644$/;
+ die "non-default mode\n"
+ unless $newmode =~ m/^100644$/ ||
+ $oldmode =~ m/^100644$/;
}
};
if ($@) {
local $/="\n"; chomp $@;
- push @$unrepres, [ $f, $@ ];
+ push @$unrepres, [ $f, "$@ ($oldmode->$newmode)" ];
}
}
runcmd @git, qw(update-ref -m), $cachekey, "refs/$splitbraincache",
$dgitview;
- progress "dgit view: created (commit id $dgitview)";
-
changedir '.git/dgit/unpack/work';
+
+ my $saved = maybe_split_brain_save $headref, $dgitview, "converted";
+ progress "dgit view: created ($saved)";
}
sub quiltify ($$$$) {
check_for_vendor_patches();
if (quiltmode_splitbrain) {
- foreach my $needtf (qw(new maint)) {
- next if grep { $_ eq $needtf } access_cfg_tagformats;
- fail <<END
+ fail <<END unless access_cfg_tagformats_can_splitbrain;
quilt mode $quilt_mode requires split view so server needs to support
both "new" and "maint" tag formats, but config says it doesn't.
END
- }
}
my $clogp = parsechangelog();
prep_ud();
changedir $ud;
- my $upstreamversion=$version;
- $upstreamversion =~ s/-[^-]*$//;
+ my $upstreamversion = upstreamversion $version;
if ($fopts->{'single-debian-patch'}) {
quilt_fixup_singlepatch($clogp, $headref, $upstreamversion);
my $cachehit = $1;
quilt_fixup_mkwork($headref);
+ my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit";
if ($cachehit ne $headref) {
- progress "dgit view: found cached (commit id $cachehit)";
+ progress "dgit view: found cached ($saved)";
runcmd @git, qw(checkout -q -b dgit-view), $cachehit;
$split_brain = 1;
return ($cachehit, $splitbrain_cachekey);
changedir 'fake';
- remove_stray_gits();
+ remove_stray_gits("source package");
mktree_in_ud_here();
rmtree '.pc';
- runcmd @git, qw(add -Af .);
- my $unapplied=git_write_tree();
+ my $unapplied=git_add_write_tree();
printdebug "fake orig tree object $unapplied\n";
ensuredir '.pc';
changedir '../fake';
rmtree '.pc';
- runcmd @git, qw(add -Af .);
- my $oldtiptree=git_write_tree();
+ my $oldtiptree=git_add_write_tree();
printdebug "fake o+d/p tree object $unapplied\n";
changedir '../work';
maybe_unapply_patches_again();
}
-sub build_prep () {
+sub build_prep_early () {
+ our $build_prep_early_done //= 0;
+ return if $build_prep_early_done++;
notpushing();
badusage "-p is not allowed when building" if defined $package;
- check_not_dirty();
- clean_tree();
my $clogp = parsechangelog();
$isuite = getfield $clogp, 'Distribution';
$package = getfield $clogp, 'Source';
$version = getfield $clogp, 'Version';
+ check_not_dirty();
+}
+
+sub build_prep () {
+ build_prep_early();
+ clean_tree();
build_maybe_quilt_fixup();
if ($rmchanges) {
my $pat = changespat $version;
printdone "build successful, results in $result\n" or die $!;
}
+sub midbuild_checkchanges () {
+ my $pat = changespat $version;
+ return if $rmchanges;
+ my @unwanted = map { s#^\.\./##; $_; } glob "../$pat";
+ @unwanted = grep { $_ ne changespat $version,'source' } @unwanted;
+ fail <<END
+changes files other than source matching $pat already present; building would result in ambiguity about the intended results.
+Suggest you delete @unwanted.
+END
+ if @unwanted;
+}
+
+sub midbuild_checkchanges_vanilla ($) {
+ my ($wantsrc) = @_;
+ midbuild_checkchanges() if $wantsrc == 1;
+}
+
sub postbuild_mergechanges_vanilla ($) {
my ($wantsrc) = @_;
if ($wantsrc == 1) {
my $wantsrc = massage_dbp_args \@dbp;
if ($wantsrc > 0) {
build_source();
+ midbuild_checkchanges_vanilla $wantsrc;
} else {
build_prep();
}
}
sub cmd_gbp_build {
+ build_prep_early();
+
+ # gbp can make .origs out of thin air. In my tests it does this
+ # even for a 1.0 format package, with no origs present. So I
+ # guess it keys off just the version number. We don't know
+ # exactly what .origs ought to exist, but let's assume that we
+ # should run gbp if: the version has an upstream part and the main
+ # orig is absent.
+ my $upstreamversion = upstreamversion $version;
+ my $origfnpat = srcfn $upstreamversion, '.orig.tar.*';
+ my $gbp_make_orig = $version =~ m/-/ && !(() = glob "../$origfnpat");
+
+ if ($gbp_make_orig) {
+ clean_tree();
+ $cleanmode = 'none'; # don't do it again
+ $need_split_build_invocation = 1;
+ }
+
my @dbp = @dpkgbuildpackage;
my $wantsrc = massage_dbp_args \@dbp, \@ARGV;
push @cmd, (qw(-us -uc --git-no-sign-tags), "--git-builder=@dbp");
+ if ($gbp_make_orig) {
+ ensuredir '.git/dgit';
+ my $ok = '.git/dgit/origs-gen-ok';
+ unlink $ok or $!==&ENOENT or die $!;
+ my @origs_cmd = @cmd;
+ push @origs_cmd, qw(--git-cleaner=true);
+ push @origs_cmd, "--git-prebuild=touch $ok .git/dgit/no-such-dir/ok";
+ push @origs_cmd, @ARGV;
+ if (act_local()) {
+ debugcmd @origs_cmd;
+ system @origs_cmd;
+ do { local $!; stat_exists $ok; }
+ or failedcmd @origs_cmd;
+ } else {
+ dryrun_report @origs_cmd;
+ }
+ }
+
if ($wantsrc > 0) {
build_source();
+ midbuild_checkchanges_vanilla $wantsrc;
} else {
if (!$clean_using_builder) {
push @cmd, '--git-cleaner=true';
sub cmd_sbuild {
build_source();
- my $pat = changespat $version;
- if (!$rmchanges) {
- my @unwanted = map { s#^\.\./##; $_; } glob "../$pat";
- @unwanted = grep { $_ ne changespat $version,'source' } @unwanted;
- fail <<END
-changes files other than source matching $pat already present; building would result in ambiguity about the intended results.
-Suggest you delete @unwanted.
-END
- if @unwanted;
- }
+ midbuild_checkchanges();
in_parent {
if (act_local()) {
stat_exists $dscfn or fail "$dscfn (in parent directory): $!";
build_maybe_quilt_fixup();
}
+sub cmd_import_dsc {
+ my $needsig = 0;
+
+ while (@ARGV) {
+ last unless $ARGV[0] =~ m/^-/;
+ $_ = shift @ARGV;
+ last if m/^--?$/;
+ if (m/^--require-valid-signature$/) {
+ $needsig = 1;
+ } else {
+ badusage "unknown dgit import-dsc sub-option \`$_'";
+ }
+ }
+
+ badusage "usage: dgit import-dsc .../PATH/TO/.DSC BRANCH" unless @ARGV==2;
+ my ($dscfn, $dstbranch) = @ARGV;
+
+ badusage "dry run makes no sense with import-dsc" unless act_local();
+
+ my $force = $dstbranch =~ s/^\+// ? +1 :
+ $dstbranch =~ s/^\.\.// ? -1 :
+ 0;
+ my $info = $force ? " $&" : '';
+ $info = "$dscfn$info";
+
+ my $specbranch = $dstbranch;
+ $dstbranch = "refs/heads/$dstbranch" unless $dstbranch =~ m#^refs/#;
+ $dstbranch = cmdoutput @git, qw(check-ref-format --normalize), $dstbranch;
+
+ my @symcmd = (@git, qw(symbolic-ref -q HEAD));
+ my $chead = cmdoutput_errok @symcmd;
+ defined $chead or $?==256 or failedcmd @symcmd;
+
+ fail "$dstbranch is checked out - will not update it"
+ if defined $chead and $chead eq $dstbranch;
+
+ my $oldhash = git_get_ref $dstbranch;
+
+ open D, "<", $dscfn or fail "open import .dsc ($dscfn): $!";
+ $dscdata = do { local $/ = undef; <D>; };
+ D->error and fail "read $dscfn: $!";
+ close C;
+
+ # we don't normally need this so import it here
+ use Dpkg::Source::Package;
+ my $dp = new Dpkg::Source::Package filename => $dscfn,
+ require_valid_signature => $needsig;
+ {
+ local $SIG{__WARN__} = sub {
+ print STDERR $_[0];
+ return unless $needsig;
+ fail "import-dsc signature check failed";
+ };
+ if (!$dp->is_signed()) {
+ warn "$us: warning: importing unsigned .dsc\n";
+ } else {
+ my $r = $dp->check_signature();
+ die "->check_signature => $r" if $needsig && $r;
+ }
+ }
+
+ parse_dscdata();
+
+ my $dgit_commit = $dsc->{$ourdscfield[0]};
+ if (defined $dgit_commit
+ && !forceing [qw(import-dsc-with-dgit-field)]) {
+ $dgit_commit =~ m/\w+/ or fail "invalid hash in .dsc";
+ $dgit_commit = $&;
+ progress "dgit: import-dsc of .dsc with Dgit field, using git hash";
+ my @cmd = (qw(sh -ec),
+ "echo $dgit_commit | git cat-file --batch-check");
+ my $objgot = cmdoutput @cmd;
+ if ($objgot =~ m#^\w+ missing\b#) {
+ fail <<END
+.dsc contains Dgit field referring to object $dgit_commit
+Your git tree does not have that object. Try `git fetch' from a
+plausible server (browse.dgit.d.o? alioth?), and try the import-dsc again.
+END
+ }
+ if ($oldhash && !is_fast_fwd $oldhash, $dgit_commit) {
+ if ($force > 0) {
+ progress "Not fast forward, forced update.";
+ } else {
+ fail "Not fast forward to $dgit_commit";
+ }
+ }
+ @cmd = (@git, qw(update-ref -m), "dgit import-dsc (Dgit): $info",
+ $dstbranch, $dgit_commit);
+ runcmd @cmd;
+ progress "dgit: import-dsc updated git ref $dstbranch";
+ return 0;
+ }
+
+ fail <<END
+Branch $dstbranch already exists
+Specify ..$specbranch for a pseudo-merge, binding in existing history
+Specify +$specbranch to overwrite, discarding existing history
+END
+ if $oldhash && !$force;
+
+ $package = getfield $dsc, 'Source';
+ my @dfi = dsc_files_info();
+ foreach my $fi (@dfi) {
+ my $f = $fi->{Filename};
+ my $here = "../$f";
+ next if lstat $here;
+ fail "stat $here: $!" unless $! == ENOENT;
+ my $there = $dscfn;
+ if ($dscfn =~ m#^(?:\./+)?\.\./+#) {
+ $there = $';
+ } elsif ($dscfn =~ m#^/#) {
+ $there = $dscfn;
+ } else {
+ fail "cannot import $dscfn which seems to be inside working tree!";
+ }
+ $there =~ s#/+[^/]+$## or
+ fail "cannot import $dscfn which seems to not have a basename";
+ $there .= "/$f";
+ symlink $there, $here or fail "symlink $there to $here: $!";
+ progress "made symlink $here -> $there";
+# print STDERR Dumper($fi);
+ }
+ my @mergeinputs = generate_commits_from_dsc();
+ die unless @mergeinputs == 1;
+
+ my $newhash = $mergeinputs[0]{Commit};
+
+ if ($oldhash) {
+ if ($force > 0) {
+ progress "Import, forced update - synthetic orphan git history.";
+ } elsif ($force < 0) {
+ progress "Import, merging.";
+ my $tree = cmdoutput @git, qw(rev-parse), "$newhash:";
+ my $version = getfield $dsc, 'Version';
+ my $clogp = commit_getclogp $newhash;
+ my $authline = clogp_authline $clogp;
+ $newhash = make_commit_text <<END;
+tree $tree
+parent $newhash
+parent $oldhash
+author $authline
+committer $authline
+
+Merge $package ($version) import into $dstbranch
+END
+ } else {
+ die; # caught earlier
+ }
+ }
+
+ my @cmd = (@git, qw(update-ref -m), "dgit import-dsc: $info",
+ $dstbranch, $newhash);
+ runcmd @cmd;
+ progress "dgit: import-dsc results are in in git ref $dstbranch";
+}
+
sub cmd_archive_api_query {
badusage "need only 1 subpath argument" unless @ARGV==1;
my ($subpath) = @ARGV;
defvalopt '--existing-package','', '.*', \$existing_package;
defvalopt '--build-products-dir','','.*', \$buildproductsdir;
defvalopt '--clean', '', $cleanmode_re, \$cleanmode;
+defvalopt '--package', '-p', $package_re, \$package;
defvalopt '--quilt', '', $quilt_modes_re, \$quilt_mode;
defvalopt '', '-C', '.+', sub {
} elsif (m/^--overwrite=(.+)$/s) {
push @ropts, $_;
$overwrite_version = $1;
+ } elsif (m/^--dep14tag$/s) {
+ push @ropts, $_;
+ $dodep14tag= 'want';
+ } elsif (m/^--no-dep14tag$/s) {
+ push @ropts, $_;
+ $dodep14tag= 'no';
+ } elsif (m/^--always-dep14tag$/s) {
+ push @ropts, $_;
+ $dodep14tag= 'always';
+ } elsif (m/^--delayed=(\d+)$/s) {
+ push @ropts, $_;
+ push @dput, $_;
+ } elsif (m/^--dgit-view-save=(.+)$/s) {
+ push @ropts, $_;
+ $split_brain_save = $1;
+ $split_brain_save =~ s#^(?!refs/)#refs/heads/#;
} elsif (m/^--(no-)?rm-old-changes$/s) {
push @ropts, $_;
$rmchanges = !$1;
$quilt_mode = $1;
}
+if (!defined $dodep14tag) {
+ local $access_forpush;
+ $dodep14tag = access_cfg('dep14tag', 'RETURN-UNDEF') // 'want';
+ $dodep14tag =~ m/^($dodep14tag_re)$/
+ or badcfg "unknown dep14tag setting \`$dodep14tag'";
+ $dodep14tag = $1;
+}
+
$need_split_build_invocation ||= quiltmode_splitbrain();
if (!defined $cleanmode) {