Cmake url tests: Add special handling for some ftp-urls

This commit is contained in:
Kornel Benko 2016-01-05 17:27:49 +01:00
parent fb11ac511f
commit 7b4064bbcb
3 changed files with 72 additions and 8 deletions

View File

@ -25,7 +25,7 @@ sub check_http_url($$$$);
sub check_ftp_dir_entry($$); sub check_ftp_dir_entry($$);
sub check_ftp_url($$$$); sub check_ftp_url($$$$);
sub check_unknown_url($$$$); sub check_unknown_url($$$$);
sub check_url($); sub check_url($$);
################ ################
sub check_http_url($$$$) sub check_http_url($$$$)
@ -104,6 +104,50 @@ sub check_ftp_dir_entry($$)
return(1,$isdir); return(1,$isdir);
} }
sub check_ftp2_url($$$$)
{
my ($protocol, $host, $path, $file) = @_;
my $checkentry = 1;
print "\nhost $host\n";
print "path $path\n";
print "file $file\n";
my $url = "$protocol://$host";
$path =~ s/\/$//;
if (defined($file)) {
$url = "$url/$path/$file";
}
else {
$url = "$url/$path/.";
}
print "curl $url, file = $file\n";
my %listfiles = ();
if (open(FFTP, "curl --anyauth -l $url|")) {
while (my $l = <FFTP>) {
chomp($l);
$listfiles{$l} = 1;
}
close(FFTP);
}
if (%listfiles) {
if (! defined($file)) {
return(0, "OK");
}
elsif (defined($listfiles{$file})) {
return(0, "OK");
}
elsif (defined($listfiles{"ftpinfo.txt"})) {
return(0, "Probably a directory");
}
else {
return(1, "Not found");
}
}
else {
return(1, "Error");
}
}
sub check_ftp_url($$$$) sub check_ftp_url($$$$)
{ {
use Net::FTP; use Net::FTP;
@ -200,9 +244,9 @@ sub check_unknown_url($$$$)
# #
# Main entry # Main entry
sub check_url($) sub check_url($$)
{ {
my($url) = @_; my($url,$use_curl) = @_;
my $file = undef; my $file = undef;
my ($protocol,$host,$path); my ($protocol,$host,$path);
@ -232,7 +276,12 @@ sub check_url($)
} }
elsif ($protocol eq "ftp") { elsif ($protocol eq "ftp") {
my $message; my $message;
($res, $message) = check_ftp_url($protocol, $host, $path, $file); if ($use_curl) {
($res, $message) = check_ftp2_url($protocol, $host, $path, $file);
}
else {
($res, $message) = check_ftp_url($protocol, $host, $path, $file);
}
return $res; return $res;
} }
else { else {

View File

@ -14,4 +14,5 @@ http://www.photogrammetry.ethz.ch/tarasp_workshop/isprs.cls
# The following ftp url is correct, but # The following ftp url is correct, but
# ftp commands like 'dir', 'get' do not work. # ftp commands like 'dir', 'get' do not work.
# We get a timeout. Reading with firefox is OK. # We get a timeout. Reading with firefox is OK.
ftp://ftp.edpsciences.org/pub/aa/readme.html # Read with curl works too, added a flag
UseCurl ftp://ftp.edpsciences.org/pub/aa/readme.html

View File

@ -103,6 +103,8 @@ for my $arg (@ARGV) {
} }
my @urls = sort keys %URLS, keys %extraURLS; my @urls = sort keys %URLS, keys %extraURLS;
# Tests
#my @urls = ("ftp://ftp.edpsciences.org/pub/aa/readme.html", "ftp://ftp.springer.de/pub/tex/latex/compsc/proc/author");
my $errorcount = 0; my $errorcount = 0;
my $URLScount = 0; my $URLScount = 0;
@ -112,7 +114,15 @@ for my $u (@urls) {
$ignoredURLS{$u}->{count} += 1; $ignoredURLS{$u}->{count} += 1;
next; next;
} }
next if (defined($knownToRegisterURLS{$u})); my $use_curl = 0;
if (defined($knownToRegisterURLS{$u})) {
if ($knownToRegisterURLS{$u}->{use_curl}) {
$use_curl = 1;
}
else {
next;
}
}
if (defined($selectedURLS{$u})) { if (defined($selectedURLS{$u})) {
${selectedURLS}{$u}->{count} += 1; ${selectedURLS}{$u}->{count} += 1;
} }
@ -121,7 +131,7 @@ for my $u (@urls) {
print "Checking '$u': "; print "Checking '$u': ";
my ($res, $prnt, $outSum); my ($res, $prnt, $outSum);
try { try {
$res = check_url($u); $res = check_url($u, $use_curl);
if ($res) { if ($res) {
print "Failed\n"; print "Failed\n";
$prnt = ""; $prnt = "";
@ -224,8 +234,12 @@ sub readUrls($\%)
$l =~ s/\s*\#.*$//; # remove comment $l =~ s/\s*\#.*$//; # remove comment
$l = &replaceSpecialChar($l); $l = &replaceSpecialChar($l);
next if ($l eq ""); next if ($l eq "");
my $use_curl = 0;
if ($l =~ s/^\s*UseCurl\s*//) {
$use_curl = 1;
}
if (! defined($rUrls->{$l} )) { if (! defined($rUrls->{$l} )) {
$rUrls->{$l} = {$file => $line, count => 1}; $rUrls->{$l} = {$file => $line, count => 1, use_curl => $use_curl};
} }
} }
close(ULIST); close(ULIST);