#!/usr/bin/perl -w
-#
+# $Id$
-$commits_list = "pld-cvs-commit\@pld-linux.org";
+use IPC::Run qw(run);
+
+$commits_list = "pld-cvs-commit\@lists.pld-linux.org";
$spool_dir = "./spool";
$copy_dir = "src"; # relative to ftp root
$no_url_dir = "./upload";
$df_server = "distfiles.pld-linux.org";
-$df_scp = "plddist\@$df_server:ftp";
+$df_scp = "plddist\@ep09.pld-linux.org:ftp";
+$user_agent = "PLD/distfiles";
@md5 = ();
-%url = ();
+@url = ();
$problems = "";
$normal_out = "";
$requester = "";
$fetched_count = 0;
$force_reply = 0;
$req_login = "nobody";
+$spec = "";
@files = ();
# try lookup some file in spool, exit if it cannot be done
}
# read file from spool, and try unlink it. if cannot unlink -- exit
-# sets $requester (email), $problems, @md5 (arrays of md5's)
-# and %url (map from md5 to urls)
+# sets $requester (email), $problems, @md5 (arrays of md5's)
+# and @url (map from md5 to urls)
sub read_spool_file()
{
open(F, "< $file") || exit 0;
$requester = <F>;
chomp $requester;
- $requester =~ /^[a-zA-Z_\-0-9\@\.]+$/
+ $requester =~ /^[a-zA-Z_0-9@.-]+$/
or die "$file: evil requester: $requester";
$req_login = "";
$requester =~ /^([^@]+)\@/ and $req_login = $1;
- $req_login =~ /^[a-z0-9A-Z_]+$/ or die "$file: evil requester $requester";
+ $req_login =~ /^[a-z0-9A-Z_.]+$/ or die "$file: evil requester $requester";
+ $spec = <F>;
+ $spec =~ s/\.spec$//;
my $flags = <F>;
$force_reply++ if ($flags =~ /force-reply/);
-
+
while (<F>) {
if (/^ERROR/) {
s/^ERROR: //;
$problems .= $_;
next;
}
- /^([a-f0-9]{32})\s+((ftp|http|no-url|no-url-copy):\/\/([=\?a-z0-9A-Z:\+\~\.\-\/_]|\%[0-9])+)\s*$/
- or die "$file: corrupted";
- push @md5, $1;
- $url{$1} = $2;
- /\/$/ and die "$file: cannot fetch dir";
+ if (/^([a-f0-9]{32})\s+((ftp|http|https|no-url|no-url-copy):\/\/([=\@\?a-z0-9A-Z:\+\~\.,\-\/_]|\%[0-9])+)\s*$/) {
+ if (/\/$/) {
+ $problems .= "$file: cannot fetch dir";
+ } else {
+ push @md5, $1;
+ push @url, $2;
+ }
+ } else {
+ $problems .= "FILE: $file: corrupted";
+ }
}
close(F);
while (<E>) {
$oops .= $_;
}
- close(E);
+ $oops .= "\nThe command has exited with a non-zero status."
+ unless (close (E));
$problems .= "scp problems: $cmd:\n$oops\n"
if ($oops ne "");
return ($oops ne "");
my ($md5, $url, $local_copy) = @_;
my $bn = basename($url);
-
+
if ($local_copy ne "$tmp_dir/$md5/$bn") {
if (system("mv -f \"$local_copy\" \"$tmp_dir/$md5/$bn\"")) {
$problems .= "FATAL: cannot move $local_copy to $tmp_dir\n";
}
$local_copy = "$tmp_dir/$md5/$bn";
}
-
+
if (open(D, "> $tmp_dir/$md5/$bn.desc")) {
print D "URL: $url\n";
print D "Login: $req_login\n";
my $dir = by_md5($md5, $url);
$dir =~ s|/[^/]+/[^/]+$||;
if (copy_to_df("$tmp_dir/$md5/", $dir) == 0) {
- $normal_out .=
+ $normal_out .=
"STORED: $url\n" .
"\t$md5 " . basename($url) . "\n" .
"\tSize: " . (-s $local_copy) . " bytes\n";
sub make_src_symlink($$)
{
my ($md5, $url) = @_;
-
+
return unless ($url =~ /^no-url/);
-
+
my $b = basename($url);
if (open(S, "> $tmp_dir/$b.link")) {
if ($url =~ /^no-url-copy/) {
sub md5($)
{
my $file = shift;
- my $md5 = `md5sum "$file" 2>/dev/null`;
+ my $in = "";
+ my $md5 = "";
+ my $err = "";
+ my @cmd = ("md5sum", $file);
+
+ run \@cmd, \$in, \$md5, \$err;
+ if ($err ne "") {
+ chomp($err);
+ $problems .= "FATAL: " . $err . "\n";
+ return "error";
+ }
+ chomp $md5;
$md5 =~ /^([a-f0-9]{32})/ and $md5 = $1;
return $md5;
}
sub handle_no_url($$)
{
my ($md5, $url) = @_;
-
- $url =~ m|://([^/]+)| or die "corrupted! (no-url)";
+
+ unless ($url =~ m#://([^/]+)#) {
+ $problems .= "$url: corrupted! (no-url)";
+ return;
+ }
my $basename = $1;
my $file = "$no_url_dir/$req_login/$basename";
my $all_out = "";
my $bn = basename($url);
my $local = "$tmp_dir/$md5/$bn";
- my $cmd = "wget -nv -O $local \"$url\"";
- my $cmd2 = "wget -nv --passive-ftp -O $local \"$url\"";
+ my $cmd = "wget -nv --no-check-certificate --user-agent=$user_agent -O $local \"$url\"";
+ my $cmd2 = "wget -nv --no-check-certificate --user-agent=$user_agent --passive-ftp -O $local \"$url\"";
push @files, $bn;
if (got_on_distfiles($md5, $url)) {
- $normal_out .=
+ $normal_out .=
"ALREADY GOT: $url\n" .
"\t$md5 " . basename($url) . "\n";
make_src_symlink($md5, $url);
return;
}
- mkdir("$tmp_dir/$md5") or die;
-
+ mkdir("$tmp_dir/$md5");
+
if ($url =~ /^no-url/) {
handle_no_url($md5, $url);
return;
}
-
+
open(W, "$cmd 2>&1 |");
while (<W>) {
$all_out .= $_;
if ($out ne "") {
$problems .= "$cmd:\n$out\n\n";
}
- if (-f $local && -s $local > 0 && $url =~ /^ftp:/) {
+ if (-f $local && -s $local == 0 && $url =~ /^ftp:/) {
+ $out = "";
open(W, "$cmd2 2>&1 |");
while (<W>) {
- $all_out .= $_;
+ $all_out .= "\n\t\t$_";
/URL:.*\s+\-\>\s+.*/ and next;
$out .= $_;
}
$problems .= "$cmd:\n$out\n\n";
}
}
- if (-f $local && -s $local > 0) {
+ if (-r $local && -s $local > 0) {
my $computed_md5 = md5($local);
if ($computed_md5 ne $md5) {
$problems .= "FATAL: $url md5 mismatch, needed $md5, got $computed_md5\n";
} else {
my $testcmd = "file \"$local\" |";
my $testres = "";
- if ($url =~ /^http:/ && $local =~ /\.(tar\.(bz2|gz)|tgz|zip|jar|xpi)$/) {
+ if ($url =~ /^(http|https):/ && $local =~ /\.(tar\.(bz2|gz)|tgz|zip|jar|xpi)$/) {
open(T, $testcmd) or die;
$testres = <T>;
close(T);
move_file($md5, $url, $local);
}
}
+ } elsif (-f $local && -s $local > 0) {
+ $problems .= "FATAL: $url ($md5) was not fetched ($cmd: $all_out): file is not readable\n";
} else {
$problems .= "FATAL: $url ($md5) was not fetched ($cmd: $all_out)\n";
}
sub fetch_files()
{
$problems .= "\n\n" if ($problems ne "");
- foreach $md5 (@md5) {
- fetch_file($md5, $url{$md5});
+ foreach $i (0..$#md5) {
+ fetch_file($md5[$i], $url[$i]);
}
}
splice(@files, 10, @files - 10, "...")
if (@files > 10);
-
- print EMAIL
+
+ print EMAIL
"From: $req_login <$requester>
To: $commits_list
Cc: $requester
-Subject: DISTFILES: ${marker}@{files}
+Subject: DISTFILES: ${spec}: ${marker}@{files}
Message-ID: <$$." . time . "\@distfiles.pld-linux.org>
X-distfiles-program: file-fetcher.pl
X-distfiles-version: " . '$Id$' . "