our $sign = 1;
our $dryrun_level = 0;
our $changesfile;
-our $buildproductsdir = '..';
+our $buildproductsdir;
+our $bpd_glob;
our $new_package = 0;
our $ignoredirty = 0;
our $rmonerror = 1;
our %forceopts = map { $_=>0 }
qw(unrepresentable unsupported-source-format
dsc-changes-mismatch changes-origs-exactly
+ uploading-binaries uploading-source-only
import-gitapply-absurd
import-gitapply-no-absurd
import-dsc-with-dgit-field);
return $tagformatfn->($v, $distro);
}
-sub debiantag_maintview ($$) {
- my ($v,$distro) = @_;
- return "$distro/".dep14_version_mangle $v;
-}
-
sub madformat ($) { $_[0] eq '3.0 (quilt)' }
sub lbranch () { return "$branchprefix/$csuite"; }
our $dgit_privdir_made //= ensure_a_playground 'dgit';
}
+sub bpd_abs () {
+ my $r = $buildproductsdir;
+ $r = "$maindir/$r" unless $r =~ m{^/};
+}
+
sub branch_gdr_info ($$) {
my ($symref, $head) = @_;
my ($status, $msg, $current, $ffq_prev, $gdrlast) =
}
}
-sub shell_cmd {
- my ($first_shell, @cmd) = @_;
- return qw(sh -ec), $first_shell.'; exec "$@"', 'x', @cmd;
-}
-
our $helpmsg = <<END;
main usages:
dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
'dgit.default.sshpsql-dbname' => 'service=projectb',
'dgit.default.aptget-components' => 'main',
'dgit.default.dgit-tag-format' => 'new,old,maint',
+ 'dgit.default.source-only-uploads' => 'ok',
'dgit.dsc-url-proto-ok.http' => 'true',
'dgit.dsc-url-proto-ok.https' => 'true',
'dgit.dsc-url-proto-ok.git' => 'true',
+ 'dgit.vcs-git.suites', => 'sid', # ;-separated
'dgit.default.dsc-url-proto-ok' => 'false',
# old means "repo server accepts pushes with old dgit tags"
# new means "repo server accepts pushes with new dgit tags"
'dgit-distro.debian.git-check' => 'url',
'dgit-distro.debian.git-check-suffix' => '/info/refs',
'dgit-distro.debian.new-private-pushers' => 't',
+ 'dgit-distro.debian.source-only-uploads' => 'not-wholly-new',
'dgit-distro.debian/push.git-url' => '',
'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org',
'dgit-distro.debian/push.git-user-force' => 'dgit',
return "$url/$package$suffix";
}
-sub parsecontrolfh ($$;$) {
- my ($fh, $desc, $allowsigned) = @_;
- our $dpkgcontrolhash_noissigned;
- my $c;
- for (;;) {
- my %opts = ('name' => $desc);
- $opts{allow_pgp}= $allowsigned || !$dpkgcontrolhash_noissigned;
- $c = Dpkg::Control::Hash->new(%opts);
- $c->parse($fh,$desc) or die "parsing of $desc failed";
- last if $allowsigned;
- last if $dpkgcontrolhash_noissigned;
- my $issigned= $c->get_option('is_pgp_signed');
- if (!defined $issigned) {
- $dpkgcontrolhash_noissigned= 1;
- seek $fh, 0,0 or die "seek $desc: $!";
- } elsif ($issigned) {
- fail "control file $desc is (already) PGP-signed. ".
- " Note that dgit push needs to modify the .dsc and then".
- " do the signature itself";
- } else {
- last;
- }
- }
- return $c;
-}
-
-sub parsecontrol {
- my ($file, $desc, $allowsigned) = @_;
- my $fh = new IO::Handle;
- open $fh, '<', $file or die "$file: $!";
- my $c = parsecontrolfh($fh,$desc,$allowsigned);
- $fh->error and die $!;
- close $fh;
- return $c;
-}
-
-sub getfield ($$) {
- my ($dctrl,$field) = @_;
- my $v = $dctrl->{$field};
- return $v if defined $v;
- fail "missing field $field in ".$dctrl->get_option('name');
-}
-
-sub parsechangelog {
- my $c = Dpkg::Control::Hash->new(name => 'parsed changelog');
- my $p = new IO::Handle;
- my @cmd = (qw(dpkg-parsechangelog), @_);
- open $p, '-|', @cmd or die $!;
- $c->parse($p);
- $?=0; $!=0; close $p or failedcmd @cmd;
- return $c;
-}
-
sub commit_getclogp ($) {
# Returns the parsed changelog hashref for a particular commit
my ($objid) = @_;
my $info = api_query($data, "file_in_archive/$pat", 1);
}
+sub package_not_wholly_new_ftpmasterapi {
+ my ($proto,$data,$pkg) = @_;
+ my $info = api_query($data,"madison?package=${pkg}&f=json");
+ return !!@$info;
+}
+
#---------- `aptget' archive query method ----------
our $aptget_base;
}
my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
@releasefiles = @inreleasefiles if @inreleasefiles;
- die "apt updated wrong number of Release files (@releasefiles), erk"
+ if (!@releasefiles) {
+ fail <<END;
+apt seemed to not to update dgit's cached Release files for $isuite.
+(Perhaps $cache
+ is on a filesystem mounted `noatime'; if so, please use `relatime'.)
+END
+ }
+ die "apt updated too many Release files (@releasefiles), erk"
unless @releasefiles == 1;
($aptget_releasefile) = @releasefiles;
}
sub file_in_archive_aptget () { return undef; }
+sub package_not_wholly_new_aptget () { return undef; }
#---------- `dummyapicat' archive query method ----------
sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
-sub file_in_archive_dummycatapi ($$$) {
- my ($proto,$data,$filename) = @_;
+sub dummycatapi_run_in_mirror ($@) {
+ # runs $fn with FIA open onto rune
+ my ($rune, $argl, $fn) = @_;
+
my $mirror = access_cfg('mirror');
$mirror =~ s#^file://#/# or die "$mirror ?";
- my @out;
- my @cmd = (qw(sh -ec), '
- cd "$1"
- find -name "$2" -print0 |
- xargs -0r sha256sum
- ', qw(x), $mirror, $filename);
+ my @cmd = (qw(sh -ec), 'cd "$1"; shift'."\n".$rune,
+ qw(x), $mirror, @$argl);
debugcmd "-|", @cmd;
open FIA, "-|", @cmd or die $!;
- while (<FIA>) {
- chomp or die;
- printdebug "| $_\n";
- m/^(\w+) (\S+)$/ or die "$_ ?";
- push @out, { sha256sum => $1, filename => $2 };
- }
- close FIA or die failedcmd @cmd;
+ my $r = $fn->();
+ close FIA or ($!==0 && $?==141) or die failedcmd @cmd;
+ return $r;
+}
+
+sub file_in_archive_dummycatapi ($$$) {
+ my ($proto,$data,$filename) = @_;
+ my @out;
+ dummycatapi_run_in_mirror '
+ find -name "$1" -print0 |
+ xargs -0r sha256sum
+ ', [$filename], sub {
+ while (<FIA>) {
+ chomp or die;
+ printdebug "| $_\n";
+ m/^(\w+) (\S+)$/ or die "$_ ?";
+ push @out, { sha256sum => $1, filename => $2 };
+ }
+ };
return \@out;
}
+sub package_not_wholly_new_dummycatapi {
+ my ($proto,$data,$pkg) = @_;
+ dummycatapi_run_in_mirror "
+ find -name ${pkg}_*.dsc
+ ", [], sub {
+ local $/ = undef;
+ !!<FIA>;
+ };
+}
+
#---------- `madison' archive query method ----------
sub archive_query_madison {
}
sub file_in_archive_madison { return undef; }
+sub package_not_wholly_new_madison { return undef; }
#---------- `sshpsql' archive query method ----------
}
sub file_in_archive_sshpsql ($$$) { return undef; }
+sub package_not_wholly_new_sshpsql ($$$) { return undef; }
#---------- `dummycat' archive query method ----------
}
sub file_in_archive_dummycat () { return undef; }
+sub package_not_wholly_new_dummycat () { return undef; }
#---------- tag format handling ----------
if ($found_same) {
# in archive, delete from .changes if it's there
$changed{$file} = "removed" if
- $changes->{$fname} =~ s/^.* \Q$file\E$(?:)\n//m;
- } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)\n/m) {
+ $changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m;
+ } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) {
# not in archive, but it's here in the .changes
} else {
my $dsc_data = getfield $dsc, $fname;
- $dsc_data =~ m/^(.* \Q$file\E$)\n/m or die "$dsc_data $file ?";
+ $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?";
my $extra = $1;
$extra =~ s/ \d+ /$&$placementinfo /
or die "$fname $extra >$dsc_data< ?"
foreach my $fi (@dfi) {
my $f = $fi->{Filename};
die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#;
- my $upper_f = "$maindir/../$f";
+ my $upper_f = (bpd_abs()."/$f");
printdebug "considering reusing $f: ";
printdebug "linked (using ...,fetch).\n";
} elsif ((printdebug "($!) "),
$! != ENOENT) {
- fail "accessing ../$f,fetch: $!";
+ fail "accessing $buildproductsdir/$f,fetch: $!";
} elsif (link_ltarget $upper_f, $f) {
printdebug "linked.\n";
} elsif ((printdebug "($!) "),
$! != ENOENT) {
- fail "accessing ../$f: $!";
+ fail "accessing $buildproductsdir/$f: $!";
} else {
printdebug "absent.\n";
}
printdebug "linked.\n";
} elsif ((printdebug "($!) "),
$! != EEXIST) {
- fail "saving ../$f: $!";
+ fail "saving $buildproductsdir/$f: $!";
} elsif (!$refetched) {
printdebug "no need.\n";
} elsif (link $f, "$upper_f,fetch") {
printdebug "linked (using ...,fetch).\n";
} elsif ((printdebug "($!) "),
$! != EEXIST) {
- fail "saving ../$f,fetch: $!";
+ fail "saving $buildproductsdir/$f,fetch: $!";
} else {
printdebug "cannot.\n";
}
}
my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all);
- debugcmd "|",@clogcmd;
- open CLOGS, "-|", @clogcmd or die $!;
-
my $clogp;
my $r1clogp;
printdebug "import clog search...\n";
+ parsechangelog_loop \@clogcmd, "package changelog", sub {
+ my ($thisstanza, $desc) = @_;
+ no warnings qw(exiting);
- for (;;) {
- my $stanzatext = do { local $/=""; <CLOGS>; };
- printdebug "import clogp ".Dumper($stanzatext) if $debuglevel>1;
- last if !defined $stanzatext;
-
- my $desc = "package changelog, entry no.$.";
- open my $stanzafh, "<", \$stanzatext or die;
- my $thisstanza = parsecontrolfh $stanzafh, $desc, 1;
$clogp //= $thisstanza;
printdebug "import clog $thisstanza->{version} $desc...\n";
# version). Then it remains to choose between the physically
# last entry in the file, and the one with the lowest version
# number. If these are not the same, we guess that the
- # versions were created in a non-monotic order rather than
+ # versions were created in a non-monotonic order rather than
# that the changelog entries have been misordered.
printdebug "import clog $thisstanza->{version} vs $upstreamv...\n";
$r1clogp = $thisstanza;
printdebug "import clog $r1clogp->{version} becomes r1\n";
- }
- die $! if CLOGS->error;
- close CLOGS or $?==SIGPIPE or failedcmd @clogcmd;
+ };
$clogp or fail "package changelog has no entries!";
foreach my $fi (@dfi) {
my $f = $fi->{Filename};
next unless is_orig_file_in_dsc($f, \@dfi);
- complete_file_from_dsc('..', $fi)
+ complete_file_from_dsc($buildproductsdir, $fi)
or next;
}
}
my $want = $wantr{$rrefname};
next if $got eq $want;
if (!defined $objgot{$want}) {
+ fail <<END unless act_local();
+--dry-run specified but we actually wanted the results of git fetch,
+so this is not going to work. Try running dgit fetch first,
+or using --damp-run instead of --dry-run.
+END
print STDERR <<END;
warning: git ls-remote suggests we want $lrefname
warning: and it should refer to $want
sub fetch_from_archive_record_1 ($) {
my ($hash) = @_;
- runcmd @git, qw(update-ref -m), "dgit fetch $csuite",
- 'DGIT_ARCHIVE', $hash;
+ runcmd git_update_ref_cmd "dgit fetch $csuite", 'DGIT_ARCHIVE', $hash;
cmdoutput @git, qw(log -n2), $hash;
# ... gives git a chance to complain if our commit is malformed
}
sub fetch_from_archive_record_2 ($) {
my ($hash) = @_;
- my @upd_cmd = (@git, qw(update-ref -m), 'dgit fetch', lrref(), $hash);
+ my @upd_cmd = (git_update_ref_cmd 'dgit fetch', lrref(), $hash);
if (act_local()) {
cmdoutput @upd_cmd;
} else {
my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs,
sub {
@end = ();
- fetch();
+ fetch_one();
finish 0;
});
# xxx collecte the ref here
clone_finish($dstdir);
}
-sub fetch () {
+sub fetch_one () {
canonicalise_suite();
if (check_for_git()) {
git_fetch_us();
}
fetch_from_archive() or no_such_package();
+
+ my $vcsgiturl = $dsc && $dsc->{'Vcs-Git'};
+ if (length $vcsgiturl and
+ (grep { $csuite eq $_ }
+ split /\;/,
+ cfg 'dgit.vcs-git.suites')) {
+ my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF';
+ if (defined $current && $current ne $vcsgiturl) {
+ print STDERR <<END;
+FYI: Vcs-Git in $csuite has different url to your vcs-git remote.
+ Your vcs-git remote url may be out of date. Use dgit update-vcs-git ?
+END
+ }
+ }
printdone "fetched into ".lrref();
}
-sub pull () {
+sub dofetch () {
my $multi_fetched = fork_for_multisuite(sub { });
- fetch() unless $multi_fetched; # parent
- return if $multi_fetched eq '0'; # child
+ fetch_one() unless $multi_fetched; # parent
+ finish 0 if $multi_fetched eq '0'; # child
+}
+
+sub pull () {
+ dofetch();
runcmd_ordryrun_local @git, qw(merge -m),"Merge from $csuite [dgit]",
lrref();
printdone "fetched to ".lrref()." and merged into HEAD";
}
sub commit_quilty_patch () {
- my $output = cmdoutput @git, qw(status --porcelain);
+ my $output = cmdoutput @git, qw(status --ignored --porcelain);
my %adds;
foreach my $l (split /\n/, $output) {
next unless $l =~ m/\S/;
- if ($l =~ m{^(?:\?\?| M) (.pc|debian/patches)}) {
+ if ($l =~ m{^(?:[?!][?!]| [MADRC]) (.pc|debian/patches)}) {
$adds{$1}++;
}
}
# => message fragment "$saved" describing disposition of $dgitview
return "commit id $dgitview" unless defined $split_brain_save;
my @cmd = (shell_cmd 'cd "$1"; shift', $maindir,
- @git, qw(update-ref -m),
+ git_update_ref_cmd
"dgit --dgit-view-save $msg HEAD=$headref",
$split_brain_save, $dgitview);
runcmd @cmd;
infopair_cond_ff($i_dep14, [ $maintview, 'HEAD' ]);
1;
}) {
+ $@ =~ s/^\n//; chomp $@;
print STDERR <<END;
-$us: check failed (maybe --overwrite is needed, consult documentation)
+$@
+| Not fast forward; maybe --overwrite is needed ? Please see dgit(1).
END
- die "$@";
+ finish -1;
}
my $r = pseudomerge_make_commit
$clogp, $head, $archive_hash, $i_arch_v,
"dgit", $m;
- runcmd @git, qw(update-ref -m), $m, 'HEAD', $r, $head;
+ runcmd git_update_ref_cmd $m, 'HEAD', $r, $head;
progress "Make pseudo-merge of $i_arch_v->[0] into your HEAD.";
return $r;
}
}
- if (defined $overwrite_version && !defined $maintviewhead) {
+ if (defined $overwrite_version && !defined $maintviewhead
+ && $archive_hash) {
$dgithead = plain_overwrite_pseudomerge($clogp,
$dgithead,
$archive_hash);
files_compare_inputs($dsc, $changes)
unless forceing [qw(dsc-changes-mismatch)];
+ # Check whether this is a source only upload
+ my $hasdebs = $changes->{Files} =~ m{\.deb$}m;
+ my $sourceonlypolicy = access_cfg 'source-only-uploads';
+ if ($sourceonlypolicy eq 'ok') {
+ } elsif ($sourceonlypolicy eq 'always') {
+ forceable_fail [qw(uploading-binaries)],
+ "uploading binaries, although distroy policy is source only"
+ if $hasdebs;
+ } elsif ($sourceonlypolicy eq 'never') {
+ forceable_fail [qw(uploading-source-only)],
+ "source-only upload, although distroy policy requires .debs"
+ if !$hasdebs;
+ } elsif ($sourceonlypolicy eq 'not-wholly-new') {
+ forceable_fail [qw(uploading-source-only)],
+ "source-only upload, even though package is entirely NEW\n".
+ "(this is contrary to policy in ".(access_nomdistro()).")"
+ if !$hasdebs
+ && $new_package
+ && !(archive_query('package_not_wholly_new', $package) // 1);
+ } else {
+ badcfg "unknown source-only-uploads policy \`$sourceonlypolicy'";
+ }
+
# Perhaps adjust .dsc to contain right set of origs
changes_update_origs_from_dsc($dsc, $changes, $upstreamversion,
$changesfile)
runcmd_ordryrun @git,
qw(-c push.followTags=false push), access_giturl(), @pushrefs;
- runcmd_ordryrun @git, qw(update-ref -m), 'dgit push', lrref(), $dgithead;
+ runcmd_ordryrun git_update_ref_cmd 'dgit push', lrref(), $dgithead;
supplementary_message(<<'END');
Push failed, while obtaining signatures on the .changes and .dsc.
}
}
-sub fetchpullargs () {
+sub package_from_d_control () {
if (!defined $package) {
my $sourcep = parsecontrol('debian/control','debian/control');
$package = getfield $sourcep, 'Source';
}
+}
+
+sub fetchpullargs () {
+ package_from_d_control();
if (@ARGV==0) {
$isuite = branchsuite();
if (!$isuite) {
sub cmd_fetch {
parseopts();
fetchpullargs();
- my $multi_fetched = fork_for_multisuite(sub { });
- finish 0 if $multi_fetched;
- fetch();
+ dofetch();
}
sub cmd_pull {
pull();
}
+sub cmd_checkout {
+ parseopts();
+ package_from_d_control();
+ @ARGV==1 or badusage "dgit checkout needs a suite argument";
+ ($isuite) = @ARGV;
+ notpushing();
+
+ foreach my $canon (qw(0 1)) {
+ if (!$canon) {
+ $csuite= $isuite;
+ } else {
+ undef $csuite;
+ canonicalise_suite();
+ }
+ if (length git_get_ref lref()) {
+ # local branch already exists, yay
+ last;
+ }
+ if (!length git_get_ref lrref()) {
+ if (!$canon) {
+ # nope
+ next;
+ }
+ dofetch();
+ }
+ # now lrref exists
+ runcmd (@git, qw(update-ref), lref(), lrref(), '');
+ last;
+ }
+ local $ENV{GIT_REFLOG_ACTION} = git_reflog_action_msg
+ "dgit checkout $isuite";
+ runcmd (@git, qw(checkout), lbranch());
+}
+
+sub cmd_update_vcs_git () {
+ my $specsuite;
+ if (@ARGV==0 || $ARGV[0] =~ m/^-/) {
+ ($specsuite,) = split /\;/, cfg 'dgit.vcs-git.suites';
+ } else {
+ ($specsuite) = (@ARGV);
+ shift @ARGV;
+ }
+ my $dofetch=1;
+ if (@ARGV) {
+ if ($ARGV[0] eq '-') {
+ $dofetch = 0;
+ } elsif ($ARGV[0] eq '-') {
+ shift;
+ }
+ }
+
+ package_from_d_control();
+ my $ctrl;
+ if ($specsuite eq '.') {
+ $ctrl = parsecontrol 'debian/control', 'debian/control';
+ } else {
+ $isuite = $specsuite;
+ get_archive_dsc();
+ $ctrl = $dsc;
+ }
+ my $url = getfield $ctrl, 'Vcs-Git';
+
+ my @cmd;
+ my $orgurl = cfg 'remote.vcs-git.url', 'RETURN-UNDEF';
+ if (!defined $orgurl) {
+ print STDERR "setting up vcs-git: $url\n";
+ @cmd = (@git, qw(remote add vcs-git), $url);
+ } elsif ($orgurl eq $url) {
+ print STDERR "vcs git already configured: $url\n";
+ } else {
+ print STDERR "changing vcs-git url to: $url\n";
+ @cmd = (@git, qw(remote set-url vcs-git), $url);
+ }
+ runcmd_ordryrun_local @cmd;
+ if ($dofetch) {
+ print "fetching (@ARGV)\n";
+ runcmd_ordryrun_local @git, qw(fetch vcs-git), @ARGV;
+ }
+}
+
sub prep_push () {
parseopts();
build_or_push_prep_early();
}
}
-sub quiltify_splitbrain ($$$$$$) {
- my ($clogp, $unapplied, $headref, $diffbits,
+sub quiltify_splitbrain ($$$$$$$) {
+ my ($clogp, $unapplied, $headref, $oldtiptree, $diffbits,
$editedignores, $cachekey) = @_;
+ my $gitignore_special = 1;
if ($quilt_mode !~ m/gbp|dpm/) {
# treat .gitignore just like any other upstream file
$diffbits = { %$diffbits };
$_ = !!$_ foreach values %$diffbits;
+ $gitignore_special = 0;
}
# We would like any commits we generate to be reproducible
my @authline = clogp_authline($clogp);
local $ENV{GIT_AUTHOR_EMAIL} = $authline[1];
local $ENV{GIT_AUTHOR_DATE} = $authline[2];
+ my $fulldiffhint = sub {
+ my ($x,$y) = @_;
+ my $cmd = "git diff $x $y -- :/ ':!debian'";
+ $cmd .= " ':!/.gitignore' ':!*/.gitignore'" if $gitignore_special;
+ return "\nFor full diff showing the problem(s), type:\n $cmd\n";
+ };
+
if ($quilt_mode =~ m/gbp|unapplied/ &&
($diffbits->{O2H} & 01)) {
my $msg =
"--quilt=$quilt_mode specified, implying patches-unapplied git tree\n".
" but git tree differs from orig in upstream files.";
+ $msg .= $fulldiffhint->($unapplied, 'HEAD');
if (!stat_exists "debian/patches") {
$msg .=
"\n ... debian/patches is missing; perhaps this is a patch queue branch?";
}
if ($quilt_mode =~ m/dpm/ &&
($diffbits->{H2A} & 01)) {
- fail <<END;
+ fail <<END. $fulldiffhint->($oldtiptree,'HEAD');
--quilt=$quilt_mode specified, implying patches-applied git tree
but git tree differs from result of applying debian/patches to upstream
END
}
if ($quilt_mode =~ m/gbp|dpm/ &&
($diffbits->{O2A} & 02)) {
- fail <<END
+ fail <<END;
--quilt=$quilt_mode specified, implying that HEAD is for use with a
tool which does not create patches for changes to upstream
.gitignores: but, such patches exist in debian/patches.
print SERIES "\n" or die $! unless $newline eq "\n";
print SERIES "auto-gitignore\n" or die $!;
close SERIES or die $!;
- runcmd @git, qw(add -- debian/patches/series), $gipatch;
+ runcmd @git, qw(add -f -- debian/patches/series), $gipatch;
commit_admin <<END
Commit patch to update .gitignore
return $s;
};
if ($quilt_mode eq 'linear') {
- print STDERR "$us: quilt fixup cannot be linear. Stopped at:\n";
+ print STDERR "\n$us: error: quilt fixup cannot be linear. Stopped at:\n";
foreach my $notp (@nots) {
print STDERR "$us: ", $reportnot->($notp), "\n";
}
print STDERR "$us: $_\n" foreach @$failsuggestion;
- fail "quilt fixup naive history linearisation failed.\n".
+ fail
+ "quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n".
"Use dpkg-source --commit by hand; or, --quilt=smash for one ugly patch";
} elsif ($quilt_mode eq 'smash') {
} elsif ($quilt_mode eq 'auto') {
make-patches --quiet-would-amend));
# We tolerate soe snags that gdr wouldn't, by default.
if (act_local()) {
+ debugcmd "+",@cmd;
$!=0; $?=-1;
- failedcmd @cmd if system @cmd and $?!=7;
+ failedcmd @cmd if system @cmd and $?!=7*256;
} else {
dryrun_report @cmd;
}
@git, qw(pull --ff-only -q), "$playground/work", qw(master);
}
-sub quilt_fixup_mkwork ($) {
+sub unpack_playtree_mkwork ($) {
my ($headref) = @_;
mkdir "work" or die $!;
runcmd @git, qw(reset -q --hard), $headref;
}
-sub quilt_fixup_linkorigs ($$) {
+sub unpack_playtree_linkorigs ($$) {
my ($upstreamversion, $fn) = @_;
# calls $fn->($leafname);
- foreach my $f (<$maindir/../*>) { #/){
- my $b=$f; $b =~ s{.*/}{};
+ opendir QFD, bpd_abs();
+ while ($!=0, defined(my $b = readdir QFD)) {
+ my $f = bpd_abs()."/".$b;
{
local ($debuglevel) = $debuglevel-1;
printdebug "QF linkorigs $b, $f ?\n";
link_ltarget $f, $b or die "$b $!";
$fn->($b);
}
+ die "$buildproductsdir: $!" if $!;
+ closedir QFD;
}
sub quilt_fixup_delete_pc () {
# get it to generate debian/patches/debian-changes, it is
# necessary to build the source package.
- quilt_fixup_linkorigs($upstreamversion, sub { });
- quilt_fixup_mkwork($headref);
+ unpack_playtree_linkorigs($upstreamversion, sub { });
+ unpack_playtree_mkwork($headref);
rmtree("debian/patches");
print $fakedsc " ".$md->hexdigest." $size $b\n" or die $!;
};
- quilt_fixup_linkorigs($upstreamversion, $dscaddfile);
+ unpack_playtree_linkorigs($upstreamversion, $dscaddfile);
my @files=qw(debian/source/format debian/rules
debian/control debian/changelog);
next unless m/^(\w+) (\S.*\S)$/ && $2 eq $splitbrain_cachekey;
my $cachehit = $1;
- quilt_fixup_mkwork($headref);
+ unpack_playtree_mkwork($headref);
my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit";
if ($cachehit ne $headref) {
progress "dgit view: found cached ($saved)";
failed to apply your git tree's patch stack (from debian/patches/) to
the corresponding upstream tarball(s). Your source tree and .orig
are probably too inconsistent. dgit can only fix up certain kinds of
- anomaly (depending on the quilt mode). See --quilt= in dgit(1).
+ anomaly (depending on the quilt mode). Please see --quilt= in dgit(1).
END
}
changedir '..';
- quilt_fixup_mkwork($headref);
+ unpack_playtree_mkwork($headref);
my $mustdeletepc=0;
if (stat_exists ".pc") {
" --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?";
if (quiltmode_splitbrain()) {
- quiltify_splitbrain($clogp, $unapplied, $headref,
+ quiltify_splitbrain($clogp, $unapplied, $headref, $oldtiptree,
$diffbits, \%editedignores,
$splitbrain_cachekey);
return;
sub changesopts_version () {
if (!defined $changes_since_version) {
- my @vsns = archive_query('archive_query');
- my @quirk = access_quirk();
- if ($quirk[0] eq 'backports') {
- local $isuite = $quirk[2];
- local $csuite;
- canonicalise_suite();
- push @vsns, archive_query('archive_query');
+ my @vsns;
+ unless (eval {
+ @vsns = archive_query('archive_query');
+ my @quirk = access_quirk();
+ if ($quirk[0] eq 'backports') {
+ local $isuite = $quirk[2];
+ local $csuite;
+ canonicalise_suite();
+ push @vsns, archive_query('archive_query');
+ }
+ 1;
+ }) {
+ print STDERR $@;
+ fail
+ "archive query failed (queried because --since-version not specified)";
}
if (@vsns) {
@vsns = map { $_->[0] } @vsns;
return $r;
}
-sub in_parent (&) {
+sub in_bpd (&) {
my ($fn) = @_;
my $wasdir = must_getcwd();
- changedir "..";
+ changedir $buildproductsdir;
$fn->();
changedir $wasdir;
}
-sub postbuild_mergechanges ($) { # must run with CWD=.. (eg in in_parent)
+# this sub must run with CWD=$buildproductsdir (eg in in_bpd)
+sub postbuild_mergechanges ($) {
my ($msg_if_onlyone) = @_;
# If there is only one .changes file, fail with $msg_if_onlyone,
# or if that is undef, be a no-op.
sub midbuild_checkchanges () {
my $pat = changespat $version;
return if $rmchanges;
- my @unwanted = map { s#^\.\./##; $_; } glob "../$pat";
+ my @unwanted = map { s#.*/##; $_; } glob "$bpd_glob/$pat";
@unwanted = grep { $_ ne changespat $version,'source' } @unwanted;
fail <<END
changes files other than source matching $pat already present; building would result in ambiguity about the intended results.
sub postbuild_mergechanges_vanilla ($) {
my ($wantsrc) = @_;
if ($wantsrc == 1) {
- in_parent {
+ in_bpd {
postbuild_mergechanges(undef);
};
} else {
# orig is absent.
my $upstreamversion = upstreamversion $version;
my $origfnpat = srcfn $upstreamversion, '.orig.tar.*';
- my $gbp_make_orig = $version =~ m/-/ && !(() = glob "../$origfnpat");
+ my $gbp_make_orig = $version =~ m/-/ && !(() = glob "$bpd_glob/$origfnpat");
if ($gbp_make_orig) {
clean_tree();
build_prep();
$sourcechanges = changespat $version,'source';
if (act_local()) {
- unlink "../$sourcechanges" or $!==ENOENT
+ unlink "$buildproductsdir/$sourcechanges" or $!==ENOENT
or fail "remove $sourcechanges: $!";
}
$dscfn = dscfn($version);
($f =~ m/\.debian\.tar(?:\.\w+)$/ &&
$f eq srcfn($version, $&));
printdebug "source copy, found $f - renaming\n";
- rename "$playground/$f", "../$f" or $!==ENOENT
+ rename "$playground/$f", "$buildproductsdir/$f" or $!==ENOENT
or fail "put in place new source file ($f): $!";
}
} else {
}
runcmd_ordryrun_local qw(sh -ec),
'exec >$1; shift; exec "$@"','x',
- "../$sourcechanges",
+ "$buildproductsdir/$sourcechanges",
@dpkggenchanges, qw(-S), changesopts();
}
sub cmd_sbuild {
build_source();
midbuild_checkchanges();
- in_parent {
+ in_bpd {
if (act_local()) {
stat_exists $dscfn or fail "$dscfn (in parent directory): $!";
stat_exists $sourcechanges
runcmd_ordryrun_local @sbuild, qw(-d), $isuite, @ARGV, $dscfn;
};
maybe_unapply_patches_again();
- in_parent {
+ in_bpd {
postbuild_mergechanges(<<END);
perhaps you need to pass -A ? (sbuild's default is to build only
arch-specific binaries; dgit 1.4 used to override that.)
sub import_dsc_result {
my ($dstref, $newhash, $what_log, $what_msg) = @_;
- my @cmd = (@git, qw(update-ref -m), $what_log, $dstref, $newhash);
+ my @cmd = (git_update_ref_cmd $what_log, $dstref, $newhash);
runcmd @cmd;
check_gitattrs($newhash, "source tree");
my @dfi = dsc_files_info();
foreach my $fi (@dfi) {
my $f = $fi->{Filename};
- my $here = "../$f";
+ my $here = "$buildproductsdir/$f";
if (lstat $here) {
next if stat $here;
fail "lstat $here works but stat gives $! !";
badcfg "unknown clean-mode \`$cleanmode'" unless
$cleanmode =~ m/^($cleanmode_re)$(?!\n)/s;
}
+
+ $buildproductsdir //= access_cfg('build-products-dir', 'RETURN-UNDEF');
+ $buildproductsdir //= '..';
+ $bpd_glob = $buildproductsdir;
+ $bpd_glob =~ s#[][\\{}*?~]#\\$&#g;
}
if ($ENV{$fakeeditorenv}) {