#!@PERL_BINARY@
+# Copyright (C) 2000, HELM Team.
+#
+# This file is part of HELM, an Hypertextual, Electronic
+# Library of Mathematics, developed at the Computer Science
+# Department, University of Bologna, Italy.
+#
+# HELM is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# HELM is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with HELM; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# For details, see the HELM World-Wide-Web page,
+# http://cs.unibo.it/helm/.
+
+my $VERSION = "@VERSION@";
+
# First of all, let's load HELM configuration
use Env;
-my $HELM_LIBRARY_DIR = $ENV{"HELM_LIBRARY_DIR"};
+my $HELM_LIB_DIR = $ENV{"HELM_LIB_DIR"};
# this should be the only fixed constant
-my $DEFAULT_HELM_LIBRARY_DIR = "@DEFAULT_HELM_LIBRARY_DIR@";
-if (defined ($HELM_LIBRARY_DIR)) {
- $HELM_LIBRARY_PATH = $HELM_LIBRARY_DIR."/configuration.pl";
+my $DEFAULT_HELM_LIB_DIR = "@HELM_LIB_DIR@";
+if (defined ($HELM_LIB_DIR)) {
+ $HELM_LIB_PATH = $HELM_LIB_DIR."/configuration.pl";
} else {
- $HELM_LIBRARY_PATH = $DEFAULT_HELM_LIBRARY_DIR."/configuration.pl";
+ $HELM_LIB_PATH = $DEFAULT_HELM_LIB_DIR."/configuration.pl";
}
+
+# <ZACK>: TODO temporary, move this setting to configuration file
+# set the cache mode, may be "gzipped" or "normal"
+my $cachemode = $ENV{'HTTP_GETTER_CACHE_MODE'} || 'gzipped';
+if (($cachemode ne 'gzipped') and ($cachemode ne 'normal')) {
+ die "Invalid HTTP_GETTER_CACHE_MODE environment variable, must be".
+ "'normal' or 'gzipped'\n";
+}
+# </ZACK>
+
# next require defines: $helm_dir, $html_link, $dtd_dir, $uris_dbm
-require $HELM_LIBRARY_PATH;
+require $HELM_LIB_PATH;
+
+# Let's override the configuration file
+$style_dir = $ENV{"HELM_STYLE_DIR"} if (defined ($ENV{"HELM_STYLE_DIR"}));
+$dtd_dir = $ENV{"HELM_DTD_DIR"} if (defined ($ENV{"HELM_DTD_DIR"}));
use HTTP::Daemon;
use HTTP::Status;
use HTTP::Request;
use LWP::UserAgent;
use DB_File;
+use Compress::Zlib;
#CSC: mancano i controlli sulle condizioni di errore di molte funzioni
#CSC: ==> non e' robusto
#CSC: altra roba da sistemare segnata con CSC
my $d = new HTTP::Daemon LocalPort => 8081;
+my $myownurl = $d->url;
+
+# Let's patch the returned URL
+$myownurl =~ s/\/$//; # chop the final slash
+my $myownport = $myownurl;
+$myownport =~ s/http:\/\/(.*):(.*)/$2/;
+$myownurl =~ s/http:\/\/(.*):(.*)/$1/;
+($myownurl) = gethostbyname($myownurl);
+$myownurl = "http://".$myownurl.":".$myownport;
+
tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664);
-print "Please contact me at: <URL:", $d->url, ">\n";
+print "Please contact me at: <URL:", $myownurl."/", ">\n";
print "helm_dir: $helm_dir\n";
+print "style_dir: $style_dir\n";
print "dtd_dir: $dtd_dir\n";
print "urls_of_uris.db: $uris_dbm.db\n";
+print "cache mode: $cachemode\n";
+
$SIG{CHLD} = "IGNORE"; # do not accumulate defunct processes
$SIG{USR1} = \&update; # sent by the child to make the parent update
while (my $c = $d->accept) {
print "\nRequest: ".$r->url."\n\n";
my $http_method = $r->method;
my $http_path = $r->url->path;
+ my $http_query = $r->url->query;
if ($http_method eq 'GET' and $http_path eq "/getciconly") {
# finds the uri, url and filename
my $cicfilename = $cicuri;
$cicfilename =~ s/cic:(.*)/$1/;
$cicfilename =~ s/theory:(.*)/$1/;
- $cicfilename = $helm_dir.$cicfilename.".xml";
my $cicurl = $map{$cicuri};
+ my $extension;
+ if ($cicurl =~ /\.xml$/) { # non gzipped file
+ $extension = ".xml";
+ } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file
+ $extension = ".xml.gz";
+ } else { # error: unknown extension
+ die "unexpected extension in url: $cicurl, might be '.xml'".
+ "or '.xml.gz'";
+ }
+ $cicfilename = $helm_dir.$cicfilename.$extension;
+
if (!defined($cicurl)) {
print "\nNOT FOUND!!!!!\n";
$c->send_error(RC_NOT_FOUND)
$cicfilename =~ s/theory:(.*)/$1/;
$cicfilename = $helm_dir.$cicfilename;
- my $typesfilename = $cicfilename.".types.xml" if $typesuri;
- my $annfilename = $cicfilename.$annsuffix.".xml" if $annuri;
- $cicfilename .= ".xml";
-
my $cicurl = $map{$cicuri};
- my $typesurl = $map{$typesuri} if $typesuri;
- my $annurl = $map{$annuri} if $annuri;
+ my $typesurl = $map{$typesuri} if (defined($typesuri));
+ my $annurl = $map{$annuri} if (defined($annuri));
+ my ($cicext, $typesext, $annext);
+ if ($cicurl =~ /\.xml$/) { # normal file
+ $cicext = ".xml";
+ } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file
+ $cicext = ".xml.gz";
+ } else {
+ die "unexpected extension in url: $cicurl;".
+ "might be '.xml' or '.xml.gz'";
+ }
+ if (defined($typesuri)) { # extension selection for types file
+ if ($typesurl =~ /\.xml$/) { # normal file
+ $typesext = ".types.xml";
+ } elsif ($typesurl =~ /\.xml\.gz$/) { # gzipped file
+ $typesext = ".types.xml.gz";
+ } else {
+ die "unexpected extension in url: $typesurl;".
+ "might be '.xml' or '.xml.gz'";
+ }
+ }
+ if (defined($annuri)) { # extension selection for annotation file
+ if ($annurl =~ /\.xml$/) { # normal file
+ $annext = ".xml";
+ } elsif ($annurl =~ /\.xml\.gz$/) { # gzipped file
+ $annext = ".xml.gz";
+ } else {
+ die "unexpected extension in url: $annurl".
+ "might be '.xml' or '.xml.gz'";
+ }
+ }
+ my $typesfilename = $cicfilename.$typesext if $typesuri;
+ my $annfilename = $cicfilename.$annsuffix.$annext if $annuri;
+ $cicfilename .= $cicext;
if (!defined($cicurl) ||
(!defined($typesurl) && $typesuri) ||
print "DTD: $inputuri ==> ($filename)\n";
if (stat($filename)) {
print "Using local copy\n";
- open(FD, $filename);
+ open(FD, $filename) or die "Cannot open $filename\n";
$cont = "";
- while(<FD>) { $cont .= $_; }
+ while(<FD>) {
+ # Vary bad heuristic for substituion of absolute URLs
+ # for relative ones
+ s/ENTITY (.*) SYSTEM\s+"/ENTITY $1 SYSTEM "$myownurl\/getdtd?uri=/g;
+ $cont .= $_;
+ }
close(FD);
answer($c,$cont);
} else {
die "Could not find DTD!";
}
+ } elsif ($http_method eq 'GET' and $http_path eq "/getstyleconf") {
+ my $filename = $inputuri;
+ $filename = $style_dir."/config/".$filename;
+ if (stat($filename)) {
+ open(FD, $filename) or die "Cannot open $filename\n";
+ $cont = "";
+ while(<FD>) {
+ s/DOCTYPE (.*) SYSTEM\s+"/DOCTYPE $1 SYSTEM "$myownurl\/getstyleconf?uri=/g;
+ $cont .= $_;
+ }
+ close(FD);
+ answer($c,$cont);
+ } else {
+ die "Could not find Style Configuration File!";
+ }
+ } elsif ($http_method eq 'GET' and $http_path eq "/getxslt") {
+ my $filename = $inputuri;
+ $filename = $style_dir."/".$filename;
+ print "XSLT: $inputuri ==> ($filename)\n";
+ if (stat($filename)) {
+ print "Using local copy\n";
+ open(FD, $filename) or die "Cannot open $filename\n";
+ $cont = "";
+ while(<FD>) {
+ # Vary bad heuristic for substituion of absolute URLs
+ # for relative ones
+ s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g ;
+ s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g ;
+ $cont .= $_;
+ }
+ close(FD);
+ answer($c,$cont);
+ } else {
+ die "Could not find XSLT!";
+ }
} elsif ($http_method eq 'GET' and $http_path eq "/conf") {
my $quoted_html_link = $html_link;
$quoted_html_link =~ s/&/&/g;
$cont = "<?xml version=\"1.0\"?><html_link>$quoted_html_link</html_link>";
answer($c,$cont);
} elsif ($http_method eq 'GET' and $http_path eq "/update") {
- print "Update requested...";
- update();
- kill(USR1,getppid());
+ # rebuild urls_of_uris.db
+ print "Update requested...\n";
+ mk_urls_of_uris();
+ kill(USR1,getppid()); # signal changes to parent
print " done\n";
answer($c,"<html><body><h1>Update done</h1></body></html>");
+ } elsif ($http_method eq 'GET' and $http_path eq "/ls") {
+ # send back keys that begin with a given uri
+ my $baseuri = $http_query;
+ $baseuri =~ s/^.*baseuri=(.*)&.*$/$1/;
+ chop $baseuri if ($baseuri =~ /.*\/$/); # remove trailing "/"
+ my $outype = $http_query; # output type, might be 'txt' or 'xml'
+ $outype =~ s/^.*&type=(.*)$/$1/;
+ if (($outype ne 'txt') and ($outype ne 'xml')) { # invalid out type
+ print "Invalid output type specified: $outype\n";
+ answer($c,"<html><body><h1>Invalid output type, may be ".
+ "\"txt\" or \"xml\"</h1></body></html>");
+ } else { # valid output type
+ print "BASEURI $baseuri, TYPE $outype\n";
+ my $key;
+ $cont = "";
+ $cont .= "<urilist>\n" if ($outype eq "xml");
+ foreach (keys(%map)) { # search for uri that begin with $baseuri
+ if ($_ =~ /^$baseuri\//) {
+ $cont .= "<uri>" if ($outype eq "xml");
+ $cont .= $_;
+ $cont .= "\n" if ($outype eq "txt");
+ $cont .= "</uri>\n" if ($outype eq "xml");
+ }
+ }
+ $cont .= "</urilist>" if ($outype eq "xml");
+ answer($c,$cont);
+ }
+ } elsif ($http_method eq 'GET' and $http_path eq "/version") {
+ print "Version requested!";
+ answer($c,"<html><body><h1>HTTP Getter Version ".
+ $VERSION."</h1></body></html>");
} else {
- print "\nINVALID REQUEST!!!!!\n";
+ print "\n";
+ print "INVALID REQUEST!!!!!\n";
+ print "(PATH: ",$http_path,", ";
+ print "QUERY: ",$http_query,")\n";
$c->send_error(RC_FORBIDDEN)
}
print "\nRequest solved: ".$r->url."\n\n";
$cont .= $data;
}
+sub gunzip { # gunzip a file and return the deflated content
+ my ($filename) = @_;
+
+ my ($gz, $buffer, $cont);
+
+ print "deflating $filename ...\n";
+ $gz = gzopen($filename, "r")
+ or die "Cannot open gzip'ed file $filename: $gzerrno";
+ $cont = "";
+ while ( $gz->gzread($buffer) > 0 ) {
+ $cont .= $buffer;
+ }
+ die "Error while reading : $gzerrno\n" if $gzerrno != Z_STREAM_END ;
+ $gz->gzclose();
+
+ return $cont;
+}
+
+sub gzip { # gzip the content argument and save it to filename argument
+ my ($cont, $filename) = @_;
+
+ my ($gz, $cont);
+
+ $gz = gzopen($filename, "w")
+ or die "Cannot gzopen for writing file $filename: $gzerrno";
+ $gz->gzwrite($cont) or die "error writing: $gzerrno\n" ;
+ $gz->gzclose();
+}
+
sub download
{
my ($remove_headers,$str,$url,$filename) = @_;
- $cont = ""; # modified by side-effect by the callback function
- if (stat($filename)) {
- print "Using local copy for the $str file\n";
- open(FD, $filename);
- while(<FD>) { $cont .= $_; }
- close(FD);
+ my ($gz, $buffer);
+
+ my $resourcetype; # retrieve mode: "normal" (.xml) or "gzipped" (.xml.gz)
+ if ($filename =~ /\.xml$/) { # set retrieve mode
+ $resourcetype = "normal";
+ } elsif ($filename =~ /\.xml\.gz$/) {
+ $resourcetype = "gzipped";
} else {
- print "Downloading the $str file\n";
- $ua = LWP::UserAgent->new;
- $request = HTTP::Request->new(GET => "$url");
- $response = $ua->request($request, \&callback);
+ die "Unsupported download extension, might be '.gz' or '.xml'\n";
+ }
+ my $basefname = $filename;
+ $basefname =~ s/\.gz$//; # get base resource name removing trailing .gz
+ $cont = ""; # modified by side-effect by the callback function
+
+ my $localfname="";
+ if (stat($basefname)) {
+ $localfname=$basefname;
+ } elsif (stat($basefname.".gz")) {
+ $localfname=$basefname.".gz";
+ }
+ if ($localfname ne "") { # we already have local copy of requested file
+ # check both possible cache entry: gzipped or normal
+ print "Using local copy for the $str file\n";
+ if ($localfname =~ /\.xml\.gz$/) { # deflating cached file and return it
+ $cont = gunzip($localfname);
+ } elsif ($localfname =~ /\.xml$/) { # just return cached file
+ open(FD, $localfname) or die "Cannot open $localfname";
+ while(<FD>) { $cont .= $_; }
+ close(FD);
+ } else { # error
+ die "Internal error: unexpected file name $localfname,"
+ ."must end with '.gz' or '.xml.gz'\n";
+ }
+ } else { # download file from net
+ print "Downloading the $str file\n"; # download file
+ my $ua = LWP::UserAgent->new;
+ my $request = HTTP::Request->new(GET => "$url");
+ my $response = $ua->request($request, \&callback);
- print "Storing the $str file\n";
- mkdirs($filename);
- open(FD, ">".$filename);
- print FD $cont;
- close(FD);
+ # cache retrieved file to disk
+# <ZACK/> TODO: inefficent, I haven't yet undestood how to deflate
+# in memory gzipped file, without call "gzopen"
+ print "Storing the $str file\n";
+ mkdirs($filename);
+ open(FD, ">".$filename.".tmp") or die "Cannot open $filename.tmp\n";
+ print FD $cont;
+ close(FD);
+
+ # handle cache conversion normal->gzipped or gzipped->normal as user choice
+ if (($cachemode eq 'normal') and ($resourcetype eq 'normal')) {
+ # cache the file as is
+ rename "$filename.tmp", $filename;
+ } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'gzipped')) {
+ # cache the file as is
+ # and update the $cont variabile with deflated content
+ rename "$filename.tmp", $filename;
+ $cont = gunzip($filename);
+ } elsif (($cachemode eq 'normal') and ($resourcetype eq 'gzipped')) {
+ # deflate cache entry
+ # and update $cont
+ open(FD, "> $basefname") or die "cannot open $basefname\n";
+ $cont = gunzip($filename.".tmp");
+ print FD $cont;
+ close(FD);
+ unlink "$filename.tmp"; # delete old gzipped file
+ } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'normal')) {
+ # compress cache entry
+ gzip($cont, $basefname.".gz");
+ unlink "$filename.tmp"; # delete old uncompressed file
+ } else {
+ die "Internal error, unsopported cachemode, resourcetype couple\n";
+ }
+ # $cont now contained uncompressed data
+
}
if ($remove_headers) {
$cont =~ s/<\?xml [^?]*\?>//sg;
$cont =~ s/<!DOCTYPE [^>]*>//sg;
+ } else {
+ $cont =~ s/DOCTYPE (.*) SYSTEM\s+"http:\/\/www.cs.unibo.it\/helm\/dtd\//DOCTYPE $1 SYSTEM "$myownurl\/getdtd?uri=/g;
}
return $cont;
}
$c->send_response($res);
}
+sub helm_wget {
+#retrieve a file from an url and write it to a temp dir
+#used for retrieve resource index from servers
+ $cont = "";
+ my ($prefix, $URL) = @_;
+ my $ua = LWP::UserAgent->new;
+ my $request = HTTP::Request->new(GET => "$URL");
+ my $response = $ua->request($request, \&callback);
+ my ($filename) = reverse (split "/", $URL); # get filename part of the URL
+ open (TEMP, "> $prefix/$filename")
+ || die "Cannot open temporary file: $prefix/$filename\n";
+ print TEMP $cont;
+ close TEMP;
+}
+
sub update {
untie %map;
tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664);
}
+
+sub mk_urls_of_uris {
+#rebuild $uris_dbm.db fetching resource indexes from servers
+ my (
+ $server, $idxfile, $uri, $url, $comp, $line,
+ @servers,
+ %urls_of_uris
+ );
+
+ untie %map;
+ if (stat $uris_dbm.".db") { # remove old db file
+ unlink($uris_dbm.".db") or
+ die "cannot unlink old db file: $uris_dbm.db\n";
+ }
+ tie(%urls_of_uris, 'DB_File', $uris_dbm.".db", O_RDWR|O_CREAT, 0664);
+
+ open (SRVS, "< $servers_file") or
+ die "cannot open servers file: $servers_file\n";
+ @servers = <SRVS>;
+ close (SRVS);
+ while ($server = pop @servers) { #cicle on servers in reverse order
+ print "processing server: $server ...\n";
+ chomp $server;
+ helm_wget($tmp_dir, $server."/".$indexname); #get index
+ $idxfile = $tmp_dir."/".$indexname;
+ open (INDEX, "< $idxfile") or
+ die "cannot open temporary index file: $idxfile\n";
+ while ($line = <INDEX>) { #parse index and add entry to urls_of_uris
+ chomp $line;
+ ($uri,$comp) = split /[ \t]+/, $line;
+ # build url:
+ if ($comp =~ /gz/) {
+ $url = $uri . ".xml" . ".gz";
+ } else {
+ $url = $uri . ".xml";
+ }
+ $url =~ s/cic:/$server/;
+ $url =~ s/theory:/$server/;
+ $urls_of_uris{$uri} = $url;
+ }
+ close INDEX;
+ die "cannot unlink temporary file: $idxfile\n"
+ if (unlink $idxfile) != 1;
+ }
+
+ untie(%urls_of_uris);
+ tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664);
+}
+