X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~ianmdlvl/git?p=dgit.git;a=blobdiff_plain;f=dgit;h=4804c9bd055f39880a28676ce5ec8bac62099cd2;hp=696978b2ee7abb53f26fa198652666b1e18c2e4d;hb=524d221c7e3b70db546be01d4929a1e9a56e14b7;hpb=b0879c98de6356234edf1365c6553a694725d040 diff --git a/dgit b/dgit index 696978b2..4804c9bd 100755 --- a/dgit +++ b/dgit @@ -30,7 +30,7 @@ setup_sigwarn(); use IO::Handle; use Data::Dumper; -use LWP::UserAgent; +use WWW::Curl::Easy; use Dpkg::Control::Hash; use File::Path; use File::Spec; @@ -46,6 +46,7 @@ use Digest::SHA; use Digest::MD5; use List::MoreUtils qw(pairwise); use Text::Glob qw(match_glob); +use Text::CSV; use Fcntl qw(:DEFAULT :flock); use Carp; @@ -101,6 +102,7 @@ our %forceopts = map { $_=>0 } qw(unrepresentable unsupported-source-format dsc-changes-mismatch changes-origs-exactly uploading-binaries uploading-source-only + reusing-version import-gitapply-absurd import-gitapply-no-absurd import-dsc-with-dgit-field); @@ -639,20 +641,6 @@ sub progress { our $ua; -sub url_get { - if (!$ua) { - $ua = LWP::UserAgent->new(); - $ua->env_proxy; - } - my $what = $_[$#_]; - progress "downloading $what..."; - my $r = $ua->get(@_) or confess "$!"; - return undef if $r->code == 404; - $r->is_success or fail f_ "failed to fetch %s: %s", - $what, $r->status_line; - return $r->decoded_content(charset => 'none'); -} - our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn); sub act_local () { return $dryrun_level <= 1; } @@ -881,6 +869,22 @@ sub access_basedistro__noalias () { return $kl->{$k}; } } + foreach my $csvf () { + my $csv_distro = + $csvf =~ m{/(\w+)\.csv$} ? $1 : do { + printdebug "skipping $csvf\n"; + next; + }; + my $csv = Text::CSV->new({ binary => 1, auto_diag => 2 }) or die; + my $fh = new IO::File $csvf, "<:encoding(utf8)" + or die "open $csvf: $!"; + while (my $cols = $csv->getline($fh)) { + next unless $cols->[2] eq $isuite; + return $csv_distro; + } + die "$csvf $!" if $fh->error; + close $fh; + } return cfg("dgit.default.distro"); } } @@ -1192,12 +1196,11 @@ sub cfg_apply_map ($$$) { $$varref = $_; } -#---------- `ftpmasterapi' archive query method (nascent) ---------- - -sub archive_api_query_curl ($) { - my ($url) = @_; - - use WWW::Curl::Easy; +sub url_fetch ($;@) { + my ($url, %xopts) = @_; + # Ok404 => 1 means give undef for 404 + # AccessBase => 'archive-query' (eg) + # CurlOpts => { key => value } my $curl = WWW::Curl::Easy->new; my $setopt = sub { @@ -1206,38 +1209,53 @@ sub archive_api_query_curl ($) { confess "$k $v ".$curl->strerror($x)." ?" if $x; }; - my $response_body; + my $response_body = ''; + $setopt->(CURLOPT_FOLLOWLOCATION, 1); $setopt->(CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTPS|CURLPROTO_HTTP); $setopt->(CURLOPT_URL, $url); + $setopt->(CURLOPT_NOSIGNAL, 1); $setopt->(CURLOPT_WRITEDATA, \$response_body); - if ($url =~ m#^https://([-.0-9a-z]+)/#) { - foreach my $k (qw(archive-query-tls-key - archive-query-tls-curl-ca-args)) { + my $xcurlopts = $xopts{CurlOpts} // { }; + keys %$xcurlopts; + while (my ($k,$v) = each %$xcurlopts) { $setopt->($k,$v); } + + if ($xopts{AccessBase} && $url =~ m#^https://([-.0-9a-z]+)/#) { + foreach my $k ("$xopts{AccessBase}-tls-key", + "$xopts{AccessBase}-tls-curl-ca-args") { fail "config option $k is obsolete and no longer supported" if defined access_cfg($k, 'RETURN-UNDEF'); } } - printdebug "archive api query: fetching $url...\n"; + printdebug "query: fetching $url...\n"; + + local $SIG{PIPE} = 'IGNORE'; my $x = $curl->perform(); fail f_ "fetch of %s failed (%s): %s", $url, $curl->strerror($x), $curl->errbuf if $x; - return $curl->getinfo(CURLINFO_HTTP_CODE), $response_body; + my $code = $curl->getinfo(CURLINFO_HTTP_CODE); + if ($code eq '404' && $xopts{Ok404}) { return undef; } + + fail f_ "fetch of %s gave HTTP code %s", $url, $code + unless $url =~ m#^file://# or $code =~ m/^2/; + + confess unless defined $response_body; + return $response_body; } +#---------- `ftpmasterapi' archive query method (nascent) ---------- + sub api_query_raw ($;$) { my ($subpath, $ok404) = @_; my $url = access_cfg('archive-query-url'); $url .= $subpath; - my ($code,$json) = archive_api_query_curl($url); - return undef if $code eq '404' && $ok404; - fail f_ "fetch of %s gave HTTP code %s", $url, $code - unless $url =~ m#^file://# or $code =~ m/^2/; - return $json; + return url_fetch $url, + Ok404 => $ok404, + AccessBase => 'archive-query'; } sub api_query ($$;$) { @@ -1737,7 +1755,7 @@ sub get_archive_dsc () { foreach my $vinfo (@vsns) { my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo; $dscurl = $vsn_dscurl; - $dscdata = url_get($dscurl); + $dscdata = url_fetch($dscurl, Ok404 => 1 ); if (!$dscdata) { $skew_warning_vsn = $vsn if !defined $skew_warning_vsn; next; @@ -1791,22 +1809,13 @@ sub check_for_git () { my $suffix = access_cfg('git-check-suffix','git-suffix', 'RETURN-UNDEF') // '.git'; my $url = "$prefix/$package$suffix"; - my @cmd = (@curl, qw(-sS -I), $url); - my $result = cmdoutput @cmd; - $result =~ s/^\S+ 200 .*\n\r?\n//; - # curl -sS -I with https_proxy prints - # HTTP/1.0 200 Connection established - $result =~ m/^\S+ (404|200) /s or - fail +(__ "unexpected results from git check query - "). - Dumper($prefix, $result); - my $code = $1; - if ($code eq '404') { - return 0; - } elsif ($code eq '200') { - return 1; - } else { - die; - } + my $result = url_fetch $url, + CurlOpts => { CURLOPT_NOBODY() => 1 }, + Ok404 => 1, + AccessBase => 'git-check'; + $result = defined $result; + printdebug "dgit-repos check_for_git => $result.\n"; + return $result; } elsif ($how eq 'true') { return 1; } elsif ($how eq 'false') { @@ -2025,7 +2034,7 @@ sub test_source_only_changes ($) { foreach my $l (split /\n/, getfield $changes, 'Files') { $l =~ m/\S+$/ or next; # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages - unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) { + unless ($& =~ m/(?:\.dsc|\.diff\.gz|$tarball_f_ext_re|_source\.buildinfo)$/) { print f_ "purportedly source-only changes polluted by %s\n", $&; return 0; } @@ -3858,6 +3867,16 @@ END printdone f_ "ready for work in %s", $dstdir; } +sub vcs_git_url_of_ctrl ($) { + my ($ctrl) = @_; + my $vcsgiturl = $ctrl->{'Vcs-Git'}; + if (length $vcsgiturl) { + $vcsgiturl =~ s/\s+-b\s+\S+//g; + $vcsgiturl =~ s/\s+\[[^][]*\]//g; + } + return $vcsgiturl; +} + sub clone ($) { # in multisuite, returns twice! # once in parent after first suite fetched, @@ -3896,9 +3915,8 @@ sub clone ($) { progress __ "starting new git history"; } fetch_from_archive() or no_such_package; - my $vcsgiturl = $dsc->{'Vcs-Git'}; + my $vcsgiturl = vcs_git_url_of_ctrl $dsc; if (length $vcsgiturl) { - $vcsgiturl =~ s/\s+-b\s+\S+//g; runcmd @git, qw(remote add vcs-git), $vcsgiturl; } clone_finish($dstdir); @@ -4034,6 +4052,7 @@ sub get_source_format () { } $_ = ; F->error and confess "$!"; + close F; chomp; return ($_, \%options); } @@ -4584,6 +4603,20 @@ END confess unless !!$made_split_brain == do_split_brain(); + my $tagname = debiantag_new $cversion, access_nomdistro(); + if (!(forceing[qw(reusing-version)]) && git_get_ref "refs/tags/$tagname") { + supplementary_message ''; + print STDERR f_ <