# For details, see the HELM World-Wide-Web page,
# http://cs.unibo.it/helm/.
+#use strict;
+
my $VERSION = "@VERSION@";
+# various variables
+my ($HELM_LIB_PATH);
+my $cgi_dir = "@HELM_CGI_DIR@";
+my (%xml_map, %rdf_map, %xslt_map);
+
# First of all, let's load HELM configuration
use Env;
my $HELM_LIB_DIR = $ENV{"HELM_LIB_DIR"};
$HELM_LIB_PATH = $DEFAULT_HELM_LIB_DIR."/configuration.pl";
}
-# Let's override the configuration file
-$style_dir = $ENV{"HELM_STYLE_DIR"} if (defined ($ENV{"HELM_STYLE_DIR"}));
-$dtd_dir = $ENV{"HELM_DTD_DIR"} if (defined ($ENV{"HELM_DTD_DIR"}));
+# next require defines: $helm_dir, $html_link, $dtd_dir, $uris_dbm, $indexname
+require $HELM_LIB_PATH;
-# <ZACK>: TODO temporary, move this setting to configuration file
-# set the cache mode, may be "gzipped" or "normal"
-my $cachemode = $ENV{'HTTP_GETTER_CACHE_MODE'} || 'gzipped';
+# TEMP: TODO put these vars in configuration file configuration.xml
+# <move_to_conf_file>
+my $cachemode = $ENV{'HTTP_GETTER_CACHE_MODE'} ||
+ 'gzipped';
if (($cachemode ne 'gzipped') and ($cachemode ne 'normal')) {
die "Invalid HTTP_GETTER_CACHE_MODE environment variable, must be".
"'normal' or 'gzipped'\n";
}
-# </ZACK>
-# next require defines: $helm_dir, $html_link, $dtd_dir, $uris_dbm
-require $HELM_LIB_PATH;
+my $helm_rdf_dir = $ENV{'HTTP_GETTER_RDF_DIR'} ||
+ "/usr/local/helm/rdf_library";
+my $rdf_dbm = $ENV{'HTTP_GETTER_RDF_DBM'} ||
+ "/usr/local/helm/rdf_urls_of_uris.db";
+my $xslt_dbm = $ENV{'HTTP_GETTER_XSLT_DBM'} ||
+ "/usr/local/helm/xslt_urls_of_uris.db";
+my $rdf_indexname = $ENV{'HTTP_GETTER_RDF_INDEXNAME'} ||
+ "rdf_index.txt";
+my $xslt_indexname = $ENV{'HTTP_GETTER_XSLT_INDEXNAME'} ||
+ "xslt_index.txt";
+$servers_file = $ENV{'HTTP_GETTER_SERVERS_FILE'} || $servers_file;
+$uris_dbm = $ENV{'HTTP_GETTER_URIS_DBM'} || $uris_dbm;
+
+# </move_to_conf_file>
+
+# Let's override the configuration file
+$style_dir = $ENV{"HELM_STYLE_DIR"} if (defined ($ENV{"HELM_STYLE_DIR"}));
+$dtd_dir = $ENV{"HELM_DTD_DIR"} if (defined ($ENV{"HELM_DTD_DIR"}));
use HTTP::Daemon;
use HTTP::Status;
use LWP::UserAgent;
use DB_File;
use Compress::Zlib;
+use CGI;
+use URI;
+use URI::Escape;
#CSC: mancano i controlli sulle condizioni di errore di molte funzioni
#CSC: ==> non e' robusto
#CSC: altra roba da sistemare segnata con CSC
-my $d = new HTTP::Daemon LocalPort => 8081;
+my $d = new HTTP::Daemon LocalPort => 8081
+ or die "Error: port 8081 not available.";
my $myownurl = $d->url;
# Let's patch the returned URL
($myownurl) = gethostbyname($myownurl);
$myownurl = "http://".$myownurl.":".$myownport;
-tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664);
+tie(%xml_map, 'DB_File', $uris_dbm, O_RDWR, 0664); # open dbs
+tie(%rdf_map, 'DB_File', $rdf_dbm, O_RDWR, 0664);
+tie(%xslt_map, 'DB_File', $xslt_dbm, O_RDWR, 0664);
+
+print "\n";
+print "HTTP Getter $VERSION\n"; # print hello information
print "Please contact me at: <URL:", $myownurl."/", ">\n";
+print "\n";
+print "servers_file: $servers_file\n";
print "helm_dir: $helm_dir\n";
+print "helm_rdf_dir: $helm_rdf_dir\n";
print "style_dir: $style_dir\n";
print "dtd_dir: $dtd_dir\n";
-print "urls_of_uris.db: $uris_dbm.db\n";
+print "urls_of_uris db: $uris_dbm\n";
+print "rdf db: $rdf_dbm\n";
+print "xslt db: $xslt_dbm\n";
print "cache mode: $cachemode\n";
+print "indexname: $indexname\n";
+print "rdf_indexname: $rdf_indexname\n";
+print "xslt_indexname: $xslt_indexname\n";
+print "\n";
$SIG{CHLD} = "IGNORE"; # do not accumulate defunct processes
$SIG{USR1} = \&update; # sent by the child to make the parent update
+
while (my $c = $d->accept) {
if (fork() == 0) {
while (my $r = $c->get_request) {
#CSC: mancano i controlli di sicurezza
- my $inputuri = $r->url;
- $inputuri =~ s/^[^?]*\?uri=(.*)/$1/;
- print "\nRequest: ".$r->url."\n\n";
my $http_method = $r->method;
my $http_path = $r->url->path;
+ my $http_query = uri_unescape($r->url->query);
+ my $cgi = new CGI("$http_query");
+ my $inputuri = $cgi->param('uri');
+ print "\nRequest: ".$r->url."\n\n";
- if ($http_method eq 'GET' and $http_path eq "/getciconly") {
- # finds the uri, url and filename
- my $cicuri = $inputuri;
-
- my $cicfilename = $cicuri;
- $cicfilename =~ s/cic:(.*)/$1/;
- $cicfilename =~ s/theory:(.*)/$1/;
-
- my $cicurl = $map{$cicuri};
- my $extension;
- if ($cicurl =~ /\.xml$/) { # non gzipped file
- $extension = ".xml";
- } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file
- $extension = ".xml.gz";
- } else { # error: unknown extension
- die "unexpected extension in url: $cicurl, might be '.xml'".
- "or '.xml.gz'";
- }
- $cicfilename = $helm_dir.$cicfilename.$extension;
+ print "\nUnescaped query: ".$http_query."\n";
- if (!defined($cicurl)) {
- print "\nNOT FOUND!!!!!\n";
- $c->send_error(RC_NOT_FOUND)
- } else {
- print_request("cic",$cicuri,$cicurl,$cicfilename);
-
- # Retrieves the file
- my $ciccontent = download(0,"cic",$cicurl,$cicfilename);
+ # "getxml" works with rdf uris
+ if ($http_method eq 'GET' and $http_path eq "/getxml") {
+ # finds the uri, url and filename
- # Answering the client
- answer($c,$ciccontent);
+ my $answerformat = $cgi->param('format');
+ my $patch_dtd = $cgi->param('patch_dtd');
+ $answerformat = "" if (not defined($answerformat));
+ $patch_dtd = "yes" if (not defined($patch_dtd));
+ if (($answerformat ne "gz") and ($answerformat ne "normal")
+ and ($answerformat ne "")) {
+ die "Wrong output format: $answerformat, must be 'normal' ".
+ "or 'gz'\n";
}
- } elsif ($http_method eq 'GET' and $http_path eq "/get") {
- # finds the uris, urls and filenames
- my $cicuri = $inputuri,
- $typesuri = $inputuri,
- $annuri = $inputuri;
- my $annsuffix;
- if ($inputuri =~ /\.types$/) {
- $cicuri =~ s/(.*)\.types$/$1/;
- undef($annuri);
- } elsif ($inputuri =~ /\.types\.ann$/) {
- $cicuri =~ s/(.*)\.types\.ann$/$1/;
- $typesuri =~ s/(.*)\.ann$/$1/;
- $annsuffix = ".types.ann";
- } elsif ($inputuri =~ /\.ann$/) {
- $cicuri =~ s/(.*)\.ann$/$1/;
- undef($typesuri);
- $annsuffix = ".ann";
- } else {
- undef($typesuri);
- undef($annuri);
+ if (($patch_dtd ne "yes") and ($patch_dtd ne "no")
+ and ($patch_dtd ne "")) {
+ die "Wrong param, patch_dtd must be 'yes' or 'no'\n";
}
- my $cicfilename = $cicuri;
- $cicfilename =~ s/cic:(.*)/$1/;
- $cicfilename =~ s/theory:(.*)/$1/;
- $cicfilename = $helm_dir.$cicfilename;
-
- my $cicurl = $map{$cicuri};
- my $typesurl = $map{$typesuri} if (defined($typesuri));
- my $annurl = $map{$annuri} if (defined($annuri));
- my ($cicext, $typesext, $annext);
- if ($cicurl =~ /\.xml$/) { # normal file
- $cicext = ".xml";
- } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file
- $cicext = ".xml.gz";
- } else {
- die "unexpected extension in url: $cicurl;".
- "might be '.xml' or '.xml.gz'";
- }
- if (defined($typesuri)) { # extension selection for types file
- if ($typesurl =~ /\.xml$/) { # normal file
- $typesext = ".types.xml";
- } elsif ($typesurl =~ /\.xml\.gz$/) { # gzipped file
- $typesext = ".types.xml.gz";
- } else {
- die "unexpected extension in url: $typesurl;".
- "might be '.xml' or '.xml.gz'";
- }
- }
- if (defined($annuri)) { # extension selection for annotation file
- if ($annurl =~ /\.xml$/) { # normal file
- $annext = ".xml";
- } elsif ($annurl =~ /\.xml\.gz$/) { # gzipped file
- $annext = ".xml.gz";
- } else {
- die "unexpected extension in url: $annurl".
- "might be '.xml' or '.xml.gz'";
- }
+ my $filename = $inputuri;
+ my $prefix;
+ if (not isRdfUri($inputuri)) { # standad cic: or theory: uri
+ $filename =~ s/^cic:(.*)/$1/;
+ $filename =~ s/^theory:(.*)/$1/;
+ } else { # rdf uri
+ # The "helm:rdf/<<...>>//cic:" prefix is kept, but quoted
+ # (so that a "helm:rdf/<<...>>//cic:" dir is created
+ $prefix = $filename;
+ $filename =~ s/^(.*)\/\/cic:(.*)/$2/;
+ $filename =~ s/^(.*)\/\/theory:(.*)/$2/;
+ $prefix =~ s/\Q$filename\E//;
+ $prefix =~ s/_/__/g;
+ $prefix =~ s/\//_/g;
+ $filename = $prefix.$filename;
}
- my $typesfilename = $cicfilename.$typesext if $typesuri;
- my $annfilename = $cicfilename.$annsuffix.$annext if $annuri;
- $cicfilename .= $cicext;
-
- if (!defined($cicurl) ||
- (!defined($typesurl) && $typesuri) ||
- (!defined($annuri) && $annuri))
- {
- print "\nNOT FOUND!!!!!\n";
- $c->send_error(RC_NOT_FOUND)
- } else {
- print_request("cic",$cicuri,$cicurl,$cicfilename);
- print_request("types",$typesuri,$typesurl,$typesfilename)
- if ($typesuri);
- print_request("ann",$annuri,$annurl,$annfilename)
- if ($annuri);
-
- # Retrieves the files
- my $ciccontent = download(1,"cic",$cicurl,$cicfilename);
- my $typescontent =
- download(1,"types",$typesurl,$typesfilename) if ($typesuri);
- my $anncontent =
- download(1,"ann",$annurl,$annfilename) if ($annuri);
-
- # Merging the files together
-
- my $merged = <<EOT;
-<?xml version="1.0" encoding="UTF-8"?>
-<cicxml uri="$cicuri">
-$ciccontent
-$typescontent
-$anncontent
-</cicxml>
-EOT
-
- # Answering the client
- answer($c,$merged);
+ my $url = resolve ($inputuri); # resolve uri in url
+ if (not defined($url)) { # uri not found in uri2url map
+ die "NOT FOUND: uri \"$inputuri\" can't be resolved\n";
+ $c->send_error(RC_NOT_FOUND)
+ } else { # uri found and mapped to url
+ my $extension; # file extension
+ if ($url =~ /\.xml$/) { # non gzipped file
+ $extension = ".xml";
+ } elsif ($url =~ /\.xml\.gz$/) { # gzipped file
+ $extension = ".xml.gz";
+ } else { # error: unknown extension
+ die "unexpected extension in url: $url, might be '.xml'".
+ "or '.xml.gz'";
+ }
+
+ if (not isRdfUri ($inputuri)) { # save in uri std cache dir
+ $filename = $helm_dir.$filename.$extension;
+ } else { # save in rdf metadata cache dir
+ $filename = $helm_rdf_dir."/".$filename.$extension;
+ }
+
+ print_request($inputuri,$url,$filename);
+
+ # Retrieves the file
+ my $ciccontent = download($patch_dtd,$url,$filename,$answerformat);
+
+ if ($answerformat eq "normal") { # answer the client in text/xml
+ answer($c,$ciccontent,"text/xml","");
+ } else { # answer the client in text/xml, gzip encoding
+ answer($c,$ciccontent,"text/xml","x-gzip");
+ }
}
- } elsif ($http_method eq 'GET' and $http_path eq "/getdtd") {
+
+ # "/register" does not work with rdf uris
+ } elsif ($http_method eq 'GET' and $http_path eq "/register") {
+ my $inputurl = $cgi->param('url');
+ print "Register requested...\n";
+ $xml_map{$inputuri}=$inputurl;
+
+ # Now let's clean the cache
+ my $cicfilename = $inputuri;
+ $cicfilename =~ s/cic:(.*)/$1/;
+ $cicfilename =~ s/theory:(.*)/$1/;
+
+ print "Unlinking ".$helm_dir.$cicfilename.".xml[.gz]\n";
+ unlink ($helm_dir.$cicfilename.".xml");
+ unlink ($helm_dir.$cicfilename.".xml.gz");
+
+ kill(USR1,getppid()); # signal changes to parent
+ untie %xml_map;
+ print "done.\n";
+ html_nice_answer($c,"Register done");
+
+ # "/resolve" works with rdf uri
+ } elsif ($http_method eq 'GET' and $http_path eq "/resolve") {
+ #my $outputurl = $xml_map{$inputuri};
+ my $outputurl = resolve($inputuri);
+ $outputurl = "" if (not defined($outputurl));
+ $cont = "<?xml version=\"1.0\" ?>\n\n";
+ if ($outputurl eq "") {
+ $cont .= "<unresolved />\n";
+ } else {
+ $cont .= "<url value=\"$outputurl\" />\n";
+ }
+ answer($c,$cont,"text/xml","");
+
+ } elsif ($http_method eq 'GET' and $http_path eq "/getdtd") {
+
my $filename = $inputuri;
$filename = $dtd_dir."/".$filename;
print "DTD: $inputuri ==> ($filename)\n";
$cont .= $_;
}
close(FD);
- answer($c,$cont);
+ answer($c,$cont,"text/xml","");
} else {
die "Could not find DTD!";
}
- } elsif ($http_method eq 'GET' and $http_path eq "/getstyleconf") {
- my $filename = $inputuri;
- $filename = $style_dir."/config/".$filename;
- if (stat($filename)) {
- open(FD, $filename) or die "Cannot open $filename\n";
- $cont = "";
- while(<FD>) {
- s/DOCTYPE (.*) SYSTEM\s+"/DOCTYPE $1 SYSTEM "$myownurl\/getstyleconf?uri=/g;
- $cont .= $_;
- }
- close(FD);
- answer($c,$cont);
- } else {
- die "Could not find Style Configuration File!";
- }
+
} elsif ($http_method eq 'GET' and $http_path eq "/getxslt") {
- my $filename = $inputuri;
- $filename = $style_dir."/".$filename;
- print "XSLT: $inputuri ==> ($filename)\n";
- if (stat($filename)) {
- print "Using local copy\n";
- open(FD, $filename) or die "Cannot open $filename\n";
- $cont = "";
- while(<FD>) {
- # Vary bad heuristic for substituion of absolute URLs
- # for relative ones
- s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g ;
- s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g ;
- $cont .= $_;
- }
- close(FD);
- answer($c,$cont);
- } else {
- die "Could not find XSLT!";
- }
- } elsif ($http_method eq 'GET' and $http_path eq "/conf") {
- my $quoted_html_link = $html_link;
- $quoted_html_link =~ s/&/&/g;
- $quoted_html_link =~ s/</</g;
- $quoted_html_link =~ s/>/>/g;
- $quoted_html_link =~ s/'/'/g;
- $quoted_html_link =~ s/"/"/g;
- print "\nConfiguration requested, returned #$quoted_html_link#\n";
- $cont = "<?xml version=\"1.0\"?><html_link>$quoted_html_link</html_link>";
- answer($c,$cont);
+
+ $url = $xslt_map{$inputuri};
+ print "Downloading XSLT ...\n";
+ helm_wget($tmp_dir, $url);
+ print "XSLT downloaded!\n";
+ $inputuri =~ s/.*\///;
+ my $filename = $tmp_dir . "/" . $inputuri;
+ my $cont;
+ die "Could not find XSLT!\n" unless stat($filename);
+ open(FD, $filename) or die "Can't open xslt local copy: '$filename'\n";
+ while(<FD>) {
+ s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g;
+ s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g;
+ $cont .= $_;
+ }
+ close(FD);
+ answer($c, $cont, "text/xml", "");
+ unlink $filename or
+ print "Can't unlink XSLT temp file '$filename'";
+
+# <old_version>
+# my $filename = $inputuri;
+# $filename = $style_dir."/".$filename;
+# print "XSLT: $inputuri ==> ($filename)\n";
+# if (stat($filename)) {
+# print "Using local copy\n";
+# open(FD, $filename) or die "Cannot open $filename\n";
+# $cont = "";
+# while(<FD>) {
+# # Vary bad heuristic for substituion of absolute URLs
+# # for relative ones
+# s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g ;
+# s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g ;
+# $cont .= $_;
+# }
+# close(FD);
+# answer($c,$cont,"text/xml","");
+# } else {
+# die "Could not find XSLT!";
+# }
+# </old_version>
+
+ # "/update" works with rdf uri
} elsif ($http_method eq 'GET' and $http_path eq "/update") {
- print "Update requested...";
- update();
- kill(USR1,getppid());
+ # rebuild urls_of_uris db and rdf uris db
+ print "Update requested...\n";
+ update_dbs();
+ kill(USR1,getppid()); # signal changes to parent
print " done\n";
- answer($c,"<html><body><h1>Update done</h1></body></html>");
- } elsif ($http_method eq 'GET' and $http_path eq "/version") {
- print "Version requested!";
- answer($c,"<html><body><h1>HTTP Getter Version ".$VERSION."</h1></body></html>");
- } else {
- print "\nINVALID REQUEST!!!!!\n";
+ html_nice_answer($c,"Update done");
+
+ # "/getalluris" does not work with rdf uris
+ } elsif ($http_method eq 'GET' and $http_path eq "/getalluris") {
+ # send back all the keys in xml
+ print "BASEURI $baseuri, FORMAT $outype\n";
+ $cont = getalluris();
+ answer($c,$cont,"text/xml","");
+
+ # "/getallrdfuris"
+ } elsif ($http_method eq 'GET' and $http_path eq "/getallrdfuris") {
+ # send back all the keys in xml
+ my $class = $cgi->param('class');
+ print "BASEURI $baseuri, FORMAT $outype, CLASS $class\n";
+ $cont = getallrdfuris($class);
+ answer($c,$cont,"text/xml","");
+
+ } elsif ($http_method eq 'GET' and $http_path eq "/ls") {
+ # send back keys that begin with a given uri
+ my ($uritype,$uripattern,$uriprefix);
+ my $baseuri = $cgi->param('baseuri');
+ chop $baseuri if ($baseuri =~ /.*\/$/); # remove trailing "/"
+ # output type, might be 'txt' or 'xml':
+ my $outype = $cgi->param('format');
+ $uripattern = $baseuri;
+ $uripattern =~ s/^.*:(.*)/$1/;
+ if ($baseuri =~ /^cic:/) {
+ $uritype = "cic";
+ } elsif ($baseuri =~ /^theory:/) {
+ $uritype = "theory";
+ } else {
+ $uritype = "invalid";
+ }
+ if ($uritype ne "invalid") { # uri is valid
+ if (($outype ne 'txt') and ($outype ne 'xml')) { # invalid out type
+ print "Invalid output type specified: $outype\n";
+ html_nice_answer($c,"Invalid output type, must be ".
+ "'txt' or 'xml'");
+ } else { # valid output
+ print "BASEURI $baseuri, FORMAT $outype\n";
+ $cont = finduris($uritype,$uripattern,$outype);
+ if ($outype eq 'txt') {
+ answer($c,$cont,"text/plain","");
+ } elsif ($outype eq 'xml') {
+ answer($c,$cont,"text/xml","");
+ } else {
+ die "Internal error, exiting!";
+ }
+ }
+ } else { # invalid uri
+ print "Invalid uri: $baseuri, may begin with 'cic:', ".
+ "'theory:' or '*:'\n";
+ html_nice_answer($c,"Invalid uri , must begin with ".
+ "'cic:' or 'theory:'");
+ }
+
+ } elsif ($http_method eq 'GET' and $http_path eq "/help") {
+ # help request
+ print "Help requested!\n";
+ html_nice_answer($c,"HTTP Getter Version: $VERSION");
+
+ } elsif ($http_method eq 'GET' and $http_path eq "/getempty") {
+ # return an empty, but valid, xml document
+ my $emptyxml = <<EOXML;
+<?xml version="1.0"?>
+<!DOCTYPE empty [
+ <!ELEMENT empty EMPTY>
+]>
+<empty />
+EOXML
+ answer($c, $emptyxml, "", "");
+
+ } elsif ($http_method eq 'GET' and $http_path =~ /\.cgi$/) {
+ # cgi handling
+ print "CGI requested!\n";
+ if ($http_path !~ /^\/[^\/]*\.cgi$/) {
+ html_nice_answer($c,"Invalid CGI name: $http_path, ".
+ "you can't request CGI that contain a slash in the name\n");
+ } elsif (stat "$cgi_dir"."$http_path") {
+ if (not -x "$cgi_dir/$http_path") {
+ html_nice_answer($c,"CGI $http_path found but not ".
+ "executable\n");
+ } else { # exec CGI and anser back its output
+ my %old_ENV = %ENV;
+ %ENV = ();
+ $ENV{'QUERY_STRING'} = $http_query;
+ my $cgi_output = `$cgi_dir/$http_path`;
+ answer($c,$cgi_output,"","");
+ %ENV = %old_ENV;
+ }
+ } else {
+ html_nice_answer($c,"CGI '$http_path' not found ".
+ "in CGI dir '$cgi_dir'");
+ }
+
+ } else { # unsupported request
+ print "\n";
+ print "INVALID REQUEST!!!!!\n";
+ print "(PATH: ",$http_path,", ";
+ print "QUERY: ",$http_query,")\n";
$c->send_error(RC_FORBIDDEN)
}
+
print "\nRequest solved: ".$r->url."\n\n";
}
+
$c->close;
undef($c);
print "\nCONNECTION CLOSED\n\n";
#================================
+sub isRdfUri { # return true if the uri is an rdf uri, false otherwise
+# typycal rdf uri:
+# helm:rdf/cic:www.cs.unibo.it/helm/rdf/foo_schema//cic:\
+# /Coq/Init/Logic/True_rec.con.types
+#
+# the format is "helm:rdf/<metadata_tree>:<metadata_scheme>//<xml_file_uri>"
+#
+ my ($uri) = @_;
+ if ($uri =~ /^helm:rdf(.*):(.*)\/\/(.*)/) {
+ return 1;
+ } else {
+ return 0;
+ }
+}
+
+sub resolve { # resolve an uri in a url, work both with standard cic: or theory:
+ # uris and rdf uris
+ print "RESOLVE subroutine\n";
+ my ($uri) = @_;
+ print "GIVEN URI: \"$uri\"\n";
+ if (isRdfUri ($uri)) { # rdf uri, resolve using rdf db
+ print "IS A RDF URI\n";
+ print "I WILL RETURN '$rdf_map{$uri}'\n";
+ return ($rdf_map{$uri});
+ } else { # standard cic: or theory: uri, resolve using std uri db
+ print "IS NOT A RDF URI\n";
+ print "I WILL RETURN '$xml_map{$uri}'\n";
+ return ($xml_map{$uri});
+ }
+}
+
+sub getalluris { # get all the keys whose prefix is cic
+ my $content = "";
+ my ($uri);
+ my $debug=1; # for debug
+
+ $content .= '<?xml version="1.0" encoding="ISO-8859-1"?>' . "\n";
+ $content .= "<!DOCTYPE alluris SYSTEM ";
+ $content .= "\"$myownurl/getdtd?uri=alluris.dtd\">" . "\n\n";
+ $content .= "<alluris>\n";
+ foreach $uri (sort (keys(%xml_map))) { # select matching uris
+ if ($uri =~ /^cic:/ && not $uri =~ /.types$/) {
+ print "GETALLURI: $uri\n" if defined($debug);
+ $content .= "\t<uri value=\"$uri\"/>\n";
+ }
+ }
+ $content .= "</alluris>\n";
+ return $content;
+}
+
+sub getallrdfuris {
+ my $class = $_[0];
+ my $content = "";
+ my ($uri);
+ my $debug=1; # for debug
+
+ $content .= '<?xml version="1.0" encoding="ISO-8859-1"?>' . "\n";
+ $content .= "<!DOCTYPE allrdfuris SYSTEM ";
+ $content .= "\"$myownurl/getdtd?uri=alluris.dtd\">" . "\n\n";
+ $content .= "<allrdfuris>\n";
+ foreach $uri (sort (keys(%rdf_map))) {
+ if ($class eq "forward" &&
+ $uri =~ /^helm:rdf:www.cs.unibo.it\/helm\/rdf\/forward/
+ ||
+ $class eq "backward" &&
+ $uri =~ /^helm:rdf:www.cs.unibo.it\/helm\/rdf\/backward/) {
+ print "GETALLRDFURI: $uri\n" if defined($debug);
+ $content .= "\t<uri value=\"$uri\"/>\n";
+ }
+ }
+ $content .= "</allrdfuris>\n";
+ return $content;
+}
+
+sub finduris { # find uris for cic and theory trees generation
+ my ($uritype,$uripattern,$format) = @_;
+ my $content = "";
+ my ($uri,$localpart,$basepart,$dirname,$suffix,$flags,$key);
+ my (@itemz,@already_pushed_dir);
+ my (%objects,%dirs); # map uris to suffixes' flags
+ my $debug=1; # for debug
+
+ print "FINDURIS, uritype: $uritype, uripattern: $uripattern, ".
+ "format: $format\n\n" if defined($debug);
+
+ if (($uritype eq "cic") or ($uritype eq "theory")) {
+ # get info only of one type: cic or theory
+ foreach (keys(%xml_map)) { # select matching uris
+ $uri = $_;
+ if ($uri =~ /^$uritype:$uripattern(\/|$|\.)/) {
+ if ($uri =~ /^$uritype:$uripattern\//) { # directory match
+ $localpart = $uri;
+ $localpart =~ s/^$uritype:$uripattern\/(.*)/$1/;
+ } elsif ($uri =~ /^$uritype:$uripattern($|\.)/) { # file match
+ $localpart = $uri;
+ $localpart =~ s/^.*\/([^\/]*)/$1/;
+ } else {
+ die "Internal error, seems that requested match is none of ".
+ "directory match or file match";
+ }
+ print "LOCALPART: $localpart\n" if defined($debug);
+
+ if ($localpart =~ /^[^\/]*$/) { # no slash, an OBJECT
+ $basepart = $localpart;
+ $basepart =~ s/^([^.]*\.[^.]*)(\.types)?(\.ann)?/$1/;
+ # remove exts .types or
+ # .types.ann
+ $flags = $objects{$basepart}; # get old flags
+ if ($localpart =~ /\.ann$/) {
+ $flags = add_flag("ann","YES",$flags);
+ } else {
+ $flags = add_flag("ann","NO",$flags);
+ }
+ if ($localpart =~ /\.types$/) {
+ $flags = add_flag("types","YES",$flags);
+ } elsif ($localpart =~ /\.types\.ann$/) {
+ $flags = add_flag("types","ANN",$flags);
+ } else {
+ $flags = add_flag("types","NO",$flags);
+ }
+ $objects{$basepart} = $flags; # save new flags
+ } else { # exists at least one slash, a DIR
+ ($dirname) = split (/\//, $localpart);
+ $dirs{$dirname} = ""; # no flags requirement for dir
+ }
+ }
+ }
+ } else {
+ die "getter internal error: unsupported uritype: \"$uritype\"";
+ }
+ # now generate OUTPUT:
+ # output will be saved in $content
+ if ($format eq "txt") { # TXT output
+ foreach $key (sort(keys %dirs)) {
+ $content .= "dir, " . $key . "\n";
+ }
+ foreach $key (sort(keys %objects)) {
+ $content .= "object, $key, " . $objects{$key} . "\n";
+ }
+ } elsif ($format eq "xml") { # XML output
+ $content .= '<?xml version="1.0" encoding="ISO-8859-1"?>' . "\n";
+ $content .= "<!DOCTYPE ls SYSTEM ";
+ $content .= "\"$myownurl/getdtd?uri=ls.dtd\">" . "\n\n";
+ $content .= "<ls>\n";
+ foreach $key (sort(keys %dirs)) {
+ $content .= "\t<section>$key</section>\n";
+ }
+ foreach $key (sort(keys %objects)) {
+ $content .= "\t<object name=\"$key\">\n";
+ $flags = $objects{$key};
+ $flags =~ s/^<(.*)>$/$1/;
+ my ($annflag,$typesflag) = split /,/,$flags;
+ $content .= "\t\t<ann value=\"$annflag\" />\n";
+ $content .= "\t\t<types value=\"$typesflag\" />\n";
+ $content .= "\t</object>\n";
+ }
+ $content .= "</ls>\n";
+ } else { # may not enter this branch
+ die "Getter internal error: invalid format received by finduris sub";
+ }
+ return $content;
+}
+
+sub add_flag {
+# manage string like: "<ann_flag,type_flag>"
+# "ann_flag" may be one of "ann_YES", "ann_NO"
+# "type_flag" may be one of "types_NO", "types_YES", "types_ANN"
+# when adding a flag the max between the current flag and the new flag
+# is taken, the orders are ann_NO < ann_YES and types_NO < types_YES <
+# types_ANN
+ my ($flagtype,$newflag,$str) = @_;
+ $str = "<,>" if ($str eq "");
+ ($str =~ s/^<(.*,.*)>$/$1/) or die "Internal error: ".
+ "wrong string format for flag adding in $str";
+ my ($annflag,$typeflag) = split /,/,$str;
+ if ($flagtype eq "ann") { # ANN flag handling
+ if ($newflag eq "YES") {
+ $annflag = "YES";
+ } elsif ($newflag eq "NO") {
+ $annflag = "NO" unless ($annflag eq "YES");
+ } else {
+ die "Internal error: annflag must be \"YES\" or \"NO\"";
+ }
+ } elsif ($flagtype eq "types") { # TYPES flag handling
+ if ($newflag eq "ANN") {
+ $typeflag = "ANN";
+ } elsif ($newflag eq "YES") {
+ $typeflag = "YES" unless ($typeflag eq "ANN");
+ } elsif ($newflag eq "NO") {
+ $typeflag = "NO"
+ unless (($typeflag eq "ANN") or ($typeflag eq "YES"));
+ } else {
+ die "Internal error: typeflag must be \"YES\", \"NO\" or \"ANN\"";
+ }
+ } else {
+ die "Internal error: unsupported flagtype \"$flagtype\"";
+ }
+ $str = "<$annflag,$typeflag>";
+}
#CSC: Too much powerful: creates even /home, /home/users/, ...
#CSC: Does not raise errors if could not create dirs/files
sub print_request
{
- my ($str,$uri,$url,$filename) = @_;
- print $str."uri: $uri\n";
- print $str."url: $url\n";
- print $str."filename: $filename\n\n";
-}
-
-sub callback
-{
- my ($data) = @_;
- $cont .= $data;
+ my ($uri,$url,$filename) = @_;
+ print "<download_request>\n";
+ print "uri: $uri\n";
+ print "url: $url\n";
+ print "filename: $filename\n\n";
+ print "</download_request>\n";
}
sub gunzip { # gunzip a file and return the deflated content
sub gzip { # gzip the content argument and save it to filename argument
my ($cont, $filename) = @_;
+ #my $debug=1; # debug only
- my ($gz, $cont);
-
- $gz = gzopen($filename, "w")
+ print "gzopening $filename ...\n" if (defined($debug));
+ my $gz = gzopen($filename, "w")
or die "Cannot gzopen for writing file $filename: $gzerrno";
- $gz->gzwrite($cont) or die "error writing: $gzerrno\n" ;
+ print "gzwriting ...\n" if (defined($debug));
+ $gz->gzwrite($cont) or die "error writing: $gzerrno, exiting!\n";
+ print "gzclosing ...\n" if (defined($debug));
$gz->gzclose();
}
-sub download
-{
- my ($remove_headers,$str,$url,$filename) = @_;
+sub download {
+ my ($patch_dtd,$url,$filename,$format) = @_;
my ($gz, $buffer);
+# print "DOWNLOAD subs receives url: \"$url\"\n";
+# print "DOWNLOAD subs receives filename: \"$filename\"\n";
+
my $resourcetype; # retrieve mode: "normal" (.xml) or "gzipped" (.xml.gz)
if ($filename =~ /\.xml$/) { # set retrieve mode
$resourcetype = "normal";
}
if ($localfname ne "") { # we already have local copy of requested file
# check both possible cache entry: gzipped or normal
- print "Using local copy for the $str file\n";
+ print "Using local copy.\n";
if ($localfname =~ /\.xml\.gz$/) { # deflating cached file and return it
$cont = gunzip($localfname);
} elsif ($localfname =~ /\.xml$/) { # just return cached file
."must end with '.gz' or '.xml.gz'\n";
}
} else { # download file from net
- print "Downloading the $str file\n"; # download file
- $ua = LWP::UserAgent->new;
- $request = HTTP::Request->new(GET => "$url");
- $response = $ua->request($request, \&callback);
-
- # cache retrieved file to disk
-# <ZACK/> TODO: inefficent, I haven't yet undestood how to deflate
-# in memory gzipped file, without call "gzopen"
- print "Storing the $str file\n";
- mkdirs($filename);
- open(FD, ">".$filename.".tmp") or die "Cannot open $filename.tmp\n";
- print FD $cont;
- close(FD);
-
- # handle cache conversion normal->gzipped or gzipped->normal as user choice
- if (($cachemode eq 'normal') and ($resourcetype eq 'normal')) {
- # cache the file as is
- rename "$filename.tmp", $filename;
- } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'gzipped')) {
- # cache the file as is
- # and update the $cont variabile with deflated content
- rename "$filename.tmp", $filename;
- $cont = gunzip($filename);
- } elsif (($cachemode eq 'normal') and ($resourcetype eq 'gzipped')) {
- # deflate cache entry
- # and update $cont
- open(FD, "> $basefname") or die "cannot open $basefname\n";
- $cont = gunzip($filename.".tmp");
- print FD $cont;
- close(FD);
- unlink "$filename.tmp"; # delete old gzipped file
- } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'normal')) {
- # compress cache entry
- gzip($cont, $basefname.".gz");
- unlink "$filename.tmp"; # delete old uncompressed file
- } else {
- die "Internal error, unsopported cachemode, resourcetype couple\n";
- }
- # $cont now contained uncompressed data
+ print "Downloading file ...\n"; # download file
+ my $ua = LWP::UserAgent->new;
+ my $request = HTTP::Request->new(GET => "$url");
+ my $response = $ua->request($request, \&callback);
+
+ # cache retrieved file to disk
+ # <ZACK/> TODO: inefficent, I haven't yet undestood how to deflate
+ # in memory gzipped file, without call "gzopen"
+ print "Storing file ...\n";
+# print "Making dirs ...\n";
+ mkdirs($filename);
+# print "Opening tmp file for writing ...\n";
+ open(FD, ">".$filename.".tmp") or die "Cannot open $filename.tmp\n";
+# print "Writing on tmp file ...\n";
+ print FD $cont;
+# print "Closing tmp file ...\n";
+ close(FD);
+ # handle cache conversion normal->gzipped or gzipped->normal as user choice
+ print "cachemode:$cachemode, resourcetype:$resourcetype\n"
+ if (defined($debug));
+ if (($cachemode eq 'normal') and ($resourcetype eq 'normal')) {
+ # cache the file as is
+ rename "$filename.tmp", $filename;
+ } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'gzipped')) {
+ # cache the file as is
+ # and update the $cont variabile with deflated content
+ rename "$filename.tmp", $filename;
+ $cont = gunzip($filename);
+ } elsif (($cachemode eq 'normal') and ($resourcetype eq 'gzipped')) {
+ # deflate cache entry
+ # and update $cont
+ open(FD, "> $basefname") or die "cannot open $basefname\n";
+ $cont = gunzip($filename.".tmp");
+ print FD $cont;
+ close(FD);
+ unlink "$filename.tmp"; # delete old gzipped file
+ } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'normal')) {
+ # compress cache entry
+ print "gzipping ...\n" if (defined($debug));
+ gzip($cont, $basefname.".gz");
+ unlink "$filename.tmp"; # delete old uncompressed file
+ } else {
+ die "Internal error, unsopported cachemode, resourcetype couple\n";
+ }
+ # $cont now contained uncompressed data
}
- if ($remove_headers) {
- $cont =~ s/<\?xml [^?]*\?>//sg;
- $cont =~ s/<!DOCTYPE [^>]*>//sg;
+ if ($patch_dtd eq "yes") {
+ $cont =~ s/DOCTYPE (.*) SYSTEM\s+"http:\/\/www.cs.unibo.it\/helm\/dtd\//DOCTYPE $1 SYSTEM "$myownurl\/getdtd?uri=/g;
}
+ if ($format eq "gz") {
+ gzip($cont,"$basefname.tmp");
+ open (TMP, "< $basefname.tmp")
+ or die "Can't open tempfile: $filename.tmp, exiting!\n";
+ $cont = "";
+ while(<TMP>) {
+ $cont .= $_;
+ }
+ close TMP;
+ unlink ($basefname . ".tmp") or
+ die "cannot unlink temporary file: $basefname.tmp\n";
+ }
+
return $cont;
}
-sub answer
-{
- my ($c,$cont) = @_;
+sub answer {
+# answer the client setting content, Content-Type and Content-Enconding
+# of the answer
+ my ($c,$cont,$contype,$contenc) = @_;
my $res = new HTTP::Response;
$res->content($cont);
+ $res->push_header("Content-Type" => $contype)
+ unless ($contype eq "");
+ $res->push_header("Content-Encoding" => $contenc)
+ unless ($contenc eq "");
+ $res->push_header("Cache-Control" => "no-cache");
+ $res->push_header("Pragma" => "no-cache");
+ $res->push_header("Expires" => "0");
$c->send_response($res);
}
+sub html_nice_answer {
+# answer the client whith a nice html document
+ my ($c,$content) = @_;
+ $content = "<html><body><h1>$content</h1></body></html>";
+ answer($c,$content,"text/html","");
+}
+
+sub callback {
+ my ($data) = @_;
+ $cont .= $data;
+}
+
+sub helm_wget {
+# retrieve a file from an url and write it to a temp dir
+# used for retrieve resource index from servers
+ $cont = "";
+ my ($prefix, $URL) = @_;
+ my $ua = LWP::UserAgent->new;
+ my $request = HTTP::Request->new(GET => "$URL");
+ my $response = $ua->request($request, \&callback);
+ my ($filename) = reverse (split "/", $URL); # get filename part of the URL
+ open (TEMP, "> $prefix/$filename")
+ || die "Cannot open temporary file: $prefix/$filename\n";
+ print TEMP $cont;
+ close TEMP;
+}
+
sub update {
- untie %map;
- tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664);
+# retie dbs
+ untie %xml_map;
+ tie(%xml_map, 'DB_File', $uris_dbm, O_RDWR, 0664);
+ untie %rdf_map;
+ tie(%rdf_map, 'DB_File', $rdf_dbm, O_RDWR, 0664);
+ untie %xslt_map;
+ tie(%xslt_map, 'DB_File', $xslt_dbm, O_RDWR, 0664);
}
+
+sub update_dbs {
+# rebuild dbs fetching resource indexes from servers.
+# Rebuild urls_of_uris db (1), rdf db (2) and xslt db (3)
+ my (
+ $server, $idxfile, $rdf_idxfile, $uri, $url, $comp, $line,
+ @servers,
+ %urls_of_uris, %rdf, %xslt # local proxy of dbs
+ );
+
+ untie %xml_map;
+ untie %rdf_map;
+ untie %xslt_map;
+ foreach my $file ($uris_dbm, $rdf_dbm, $xslt_dbm) { # remove old db file
+ if (stat $file) { # remove if exists
+ unlink ($file) or die "can't unlink old db file: $file\n";
+ }
+ }
+ tie(%urls_of_uris, 'DB_File', $uris_dbm, O_RDWR|O_CREAT, 0664);
+ tie(%rdf, 'DB_File', $rdf_dbm, O_RDWR|O_CREAT, 0664);
+ tie(%xslt, 'DB_File', $xslt_dbm, O_RDWR|O_CREAT, 0664);
+
+ open (SRVS, "< $servers_file") or
+ die "cannot open servers file: $servers_file\n";
+ @servers = <SRVS>; # read all servers
+ close (SRVS);
+
+ while ($server = pop @servers) { # cicle on servers in _reverse_ order
+ print "processing server: $server ...\n";
+ chomp $server;
+ helm_wget($tmp_dir, $server."/".$indexname); # get index
+ helm_wget($tmp_dir, $server."/".$rdf_indexname); # get rdf index
+ helm_wget($tmp_dir, $server."/".$xslt_indexname); # get xslt index
+ $idxfile = $tmp_dir."/".$indexname;
+ $rdf_idxfile = $tmp_dir."/".$rdf_indexname;
+ $xslt_idxfile = $tmp_dir . "/" . $xslt_indexname;
+
+ # (1) REBUILD URLS_OF_URIS DB
+ open (INDEX, "< $idxfile") or
+ die "cannot open temporary index file: $idxfile\n";
+ while (my $line = <INDEX>) { # parse index and add entry to urls_of_uris db
+ chomp $line;
+ ($uri,$comp) = split /[ \t]+/, $line;
+ # build url:
+ if ($comp =~ /gz/) {
+ $url = $uri . ".xml" . ".gz";
+ } else {
+ $url = $uri . ".xml";
+ }
+ $url =~ s/cic:/$server/;
+ $url =~ s/theory:/$server/;
+ $urls_of_uris{$uri} = $url;
+ }
+
+ # (2) REBUILD RDF DB
+ open (RDF_INDEX, "< $rdf_idxfile") or
+ die "cannot open temporary RDF index file: $rdf_idxfile\n";
+ while (my $line = <RDF_INDEX>) { # parse index and add entry to rdf db
+ chomp $line;
+ ($uri,$comp) = split /[ \t]+/, $line; # comp is "gz" or nothing
+ # build url:
+ if ($comp =~ /gz/) {
+ $url = $uri . ".xml" . ".gz";
+ } else {
+ $url = $uri . ".xml";
+ }
+ $url =~ s/^helm:rdf(.*)\/\/cic:/$server/;
+ $url =~ s/^helm:rdf(.*)\/\/theory:/$server/;
+ $rdf{$uri} = $url;
+ }
+
+ # (3) REBUILD XSLT DB
+ open (XSLT_INDEX, "< $xslt_idxfile") or
+ die "cannot open temporary XSLT index file: $xslt_idxfile\n";
+ while (my $line = <XSLT_INDEX>) { # parse index and add entry to xslt db
+ chomp $line;
+ my $stylesheet = $line;
+ my $url = $server . "/" . $stylesheet;
+ $xslt{$stylesheet} = $url;
+ }
+
+ close INDEX; # close indexes
+ close RDF_INDEX;
+ close XSLT_INDEX;
+
+ # remove temp files
+ foreach my $file ($idxfile, $rdf_idxfile, $xslt_idxfile) {
+ print "cannot unlink temporary file: $file\n" if (unlink $file) != 1;
+ }
+
+ }
+
+ untie(%urls_of_uris); # untie local proxies
+ untie(%rdf);
+ untie(%xslt);
+ tie(%xml_map, 'DB_File', $uris_dbm, O_RDWR, 0664); # retie global ones
+ tie(%rdf_map, 'DB_File', $rdf_dbm, O_RDWR, 0664);
+ tie(%xslt_map, 'DB_File', $xslt_dbm, O_RDWR, 0664);
+
+} # update_dbs
+
+# vim modline: do not remove!
+# vim: set ft=perl:
+