X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fhttp_getter%2Fhttp_getter.pl.in;h=e9d1f4a3e04609137ad68b8db5658759be797f81;hb=4e683370099ea7aaea877bc345ca8821938d49ed;hp=9c71fce076a1e4cd6e69f95f2713e269c1fd5d3d;hpb=9b03d3f69b91921f10e764d1a465880248c14e55;p=helm.git diff --git a/helm/http_getter/http_getter.pl.in b/helm/http_getter/http_getter.pl.in index 9c71fce07..e9d1f4a3e 100755 --- a/helm/http_getter/http_getter.pl.in +++ b/helm/http_getter/http_getter.pl.in @@ -30,7 +30,7 @@ my $VERSION = "@VERSION@"; # various variables my ($HELM_LIB_PATH); my $cgi_dir = "@HELM_CGI_DIR@"; -my (%map, %rdf_map); +my (%xml_map, %rdf_map, %xslt_map); # First of all, let's load HELM configuration use Env; @@ -43,23 +43,28 @@ if (defined ($HELM_LIB_DIR)) { $HELM_LIB_PATH = $DEFAULT_HELM_LIB_DIR."/configuration.pl"; } -# : TODO temporary, move this setting to configuration file -# set the cache mode, may be "gzipped" or "normal" -my $cachemode = $ENV{'HTTP_GETTER_CACHE_MODE'} || 'gzipped'; -if (($cachemode ne 'gzipped') and ($cachemode ne 'normal')) { - die "Invalid HTTP_GETTER_CACHE_MODE environment variable, must be". - "'normal' or 'gzipped'\n"; -} -# - # next require defines: $helm_dir, $html_link, $dtd_dir, $uris_dbm, $indexname require $HELM_LIB_PATH; # TEMP: TODO put these vars in configuration file configuration.xml # -$helm_rdf_dir = "/usr/local/helm/rdf_library"; -$rdf_dbm = "/usr/local/helm/rdf_urls_of_uris"; -$rdf_indexname = "rdf_index.txt"; +my $cachemode = $ENV{'HTTP_GETTER_CACHE_MODE'} || + 'gzipped'; +if (($cachemode ne 'gzipped') and ($cachemode ne 'normal')) { + die "Invalid HTTP_GETTER_CACHE_MODE environment variable, must be". + "'normal' or 'gzipped'\n"; +} + +$helm_rdf_dir = $ENV{'HTTP_GETTER_RDF_DIR'} || $helm_rdf_dir; +$rdf_dbm = $ENV{'HTTP_GETTER_RDF_DBM'} || $rdf_dbm; +$xslt_dbm = $ENV{'HTTP_GETTER_XSLT_DBM'} || $xslt_dbm; +$rdf_indexname = $ENV{'HTTP_GETTER_RDF_INDEXNAME'} || $rdf_indexname; +$xslt_indexname = $ENV{'HTTP_GETTER_XSLT_INDEXNAME'} || $xslt_indexname; +$servers_file = $ENV{'HTTP_GETTER_SERVERS_FILE'} || $servers_file; +$uris_dbm = $ENV{'HTTP_GETTER_URIS_DBM'} || $uris_dbm; +$dtdbaseurl = $ENV{'HTTP_GETTER_DTD_BASE_URL'} || $dtdbaseurl; +$getterport = $ENV{'HTTP_GETTER_PORT'} || $getterport; + # # Let's override the configuration file @@ -73,13 +78,15 @@ use LWP::UserAgent; use DB_File; use Compress::Zlib; use CGI; +use URI; use URI::Escape; #CSC: mancano i controlli sulle condizioni di errore di molte funzioni #CSC: ==> non e' robusto #CSC: altra roba da sistemare segnata con CSC -my $d = new HTTP::Daemon LocalPort => 8081; +my $d = new HTTP::Daemon LocalPort => $getterport + or die "Error: port $getterport not available."; my $myownurl = $d->url; # Let's patch the returned URL @@ -90,22 +97,28 @@ $myownurl =~ s/http:\/\/(.*):(.*)/$1/; ($myownurl) = gethostbyname($myownurl); $myownurl = "http://".$myownurl.":".$myownport; -tie(%map, 'DB_File', $uris_dbm.".db", O_RDWR, 0664); # open dbs -tie(%rdf_map, 'DB_File', $rdf_dbm.".db", O_RDWR, 0664); +tie(%xml_map, 'DB_File', $uris_dbm, O_RDWR, 0664); # open dbs +tie(%rdf_map, 'DB_File', $rdf_dbm, O_RDWR, 0664); +tie(%xslt_map, 'DB_File', $xslt_dbm, O_RDWR, 0664); print "\n"; print "HTTP Getter $VERSION\n"; # print hello information print "Please contact me at: \n"; print "\n"; +print "servers_file: $servers_file\n"; print "helm_dir: $helm_dir\n"; print "helm_rdf_dir: $helm_rdf_dir\n"; print "style_dir: $style_dir\n"; print "dtd_dir: $dtd_dir\n"; -print "urls_of_uris db: $uris_dbm.db\n"; -print "rdf db: $rdf_dbm.db\n"; +print "urls_of_uris db: $uris_dbm\n"; +print "rdf db: $rdf_dbm\n"; +print "xslt db: $xslt_dbm\n"; print "cache mode: $cachemode\n"; print "indexname: $indexname\n"; print "rdf_indexname: $rdf_indexname\n"; +print "xslt_indexname: $xslt_indexname\n"; +print "dtdbaseurl: $dtdbaseurl\n"; +print "getterport: $getterport\n"; print "\n"; $SIG{CHLD} = "IGNORE"; # do not accumulate defunct processes @@ -144,12 +157,20 @@ while (my $c = $d->accept) { } my $filename = $inputuri; + my $prefix; if (not isRdfUri($inputuri)) { # standad cic: or theory: uri $filename =~ s/^cic:(.*)/$1/; $filename =~ s/^theory:(.*)/$1/; } else { # rdf uri + # The "helm:rdf/<<...>>//cic:" prefix is kept, but quoted + # (so that a "helm:rdf/<<...>>//cic:" dir is created + $prefix = $filename; $filename =~ s/^(.*)\/\/cic:(.*)/$2/; $filename =~ s/^(.*)\/\/theory:(.*)/$2/; + $prefix =~ s/\Q$filename\E//; + $prefix =~ s/_/__/g; + $prefix =~ s/\//_/g; + $filename = $prefix.$filename; } my $url = resolve ($inputuri); # resolve uri in url @@ -170,7 +191,7 @@ while (my $c = $d->accept) { if (not isRdfUri ($inputuri)) { # save in uri std cache dir $filename = $helm_dir.$filename.$extension; } else { # save in rdf metadata cache dir - $filename = $helm_rdf_dir.$filename.$extension; + $filename = $helm_rdf_dir."/".$filename.$extension; } print_request($inputuri,$url,$filename); @@ -189,7 +210,7 @@ while (my $c = $d->accept) { } elsif ($http_method eq 'GET' and $http_path eq "/register") { my $inputurl = $cgi->param('url'); print "Register requested...\n"; - $map{$inputuri}=$inputurl; + $xml_map{$inputuri}=$inputurl; # Now let's clean the cache my $cicfilename = $inputuri; @@ -201,13 +222,13 @@ while (my $c = $d->accept) { unlink ($helm_dir.$cicfilename.".xml.gz"); kill(USR1,getppid()); # signal changes to parent - untie %map; + untie %xml_map; print "done.\n"; html_nice_answer($c,"Register done"); # "/resolve" works with rdf uri } elsif ($http_method eq 'GET' and $http_path eq "/resolve") { - #my $outputurl = $map{$inputuri}; + #my $outputurl = $xml_map{$inputuri}; my $outputurl = resolve($inputuri); $outputurl = "" if (not defined($outputurl)); $cont = "\n\n"; @@ -219,6 +240,7 @@ while (my $c = $d->accept) { answer($c,$cont,"text/xml",""); } elsif ($http_method eq 'GET' and $http_path eq "/getdtd") { + my $filename = $inputuri; $filename = $dtd_dir."/".$filename; print "DTD: $inputuri ==> ($filename)\n"; @@ -238,42 +260,48 @@ while (my $c = $d->accept) { die "Could not find DTD!"; } - } elsif ($http_method eq 'GET' and $http_path eq "/getstyleconf") { - my $filename = $inputuri; - $filename = $style_dir."/config/".$filename; - if (stat($filename)) { - open(FD, $filename) or die "Cannot open $filename\n"; - $cont = ""; - while() { - s/DOCTYPE (.*) SYSTEM\s+"/DOCTYPE $1 SYSTEM "$myownurl\/getstyleconf?uri=/g; - $cont .= $_; - } - close(FD); - answer($c,$cont,"text/plain",""); - } else { - die "Could not find Style Configuration File!"; - } - } elsif ($http_method eq 'GET' and $http_path eq "/getxslt") { - my $filename = $inputuri; - $filename = $style_dir."/".$filename; - print "XSLT: $inputuri ==> ($filename)\n"; - if (stat($filename)) { - print "Using local copy\n"; - open(FD, $filename) or die "Cannot open $filename\n"; - $cont = ""; - while() { - # Vary bad heuristic for substituion of absolute URLs - # for relative ones - s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g ; - s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g ; - $cont .= $_; - } - close(FD); - answer($c,$cont,"text/xml",""); - } else { - die "Could not find XSLT!"; - } + + $url = $xslt_map{$inputuri}; + print "Downloading XSLT ...\n"; + helm_wget($tmp_dir, $url); + print "XSLT downloaded!\n"; + $inputuri =~ s/.*\///; + my $filename = $tmp_dir . "/" . $inputuri; + my $cont; + die "Could not find XSLT!\n" unless stat($filename); + open(FD, $filename) or die "Can't open xslt local copy: '$filename'\n"; + while() { + s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g; + s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g; + $cont .= $_; + } + close(FD); + answer($c, $cont, "text/xml", ""); + unlink $filename or + print "Can't unlink XSLT temp file '$filename'"; + +# +# my $filename = $inputuri; +# $filename = $style_dir."/".$filename; +# print "XSLT: $inputuri ==> ($filename)\n"; +# if (stat($filename)) { +# print "Using local copy\n"; +# open(FD, $filename) or die "Cannot open $filename\n"; +# $cont = ""; +# while() { +# # Vary bad heuristic for substituion of absolute URLs +# # for relative ones +# s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g ; +# s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g ; +# $cont .= $_; +# } +# close(FD); +# answer($c,$cont,"text/xml",""); +# } else { +# die "Could not find XSLT!"; +# } +# # "/update" works with rdf uri } elsif ($http_method eq 'GET' and $http_path eq "/update") { @@ -284,7 +312,21 @@ while (my $c = $d->accept) { print " done\n"; html_nice_answer($c,"Update done"); - # "/ls" does not work with rdf uris + # "/getalluris" does not work with rdf uris + } elsif ($http_method eq 'GET' and $http_path eq "/getalluris") { + # send back all the keys in xml + print "BASEURI $baseuri, FORMAT $outype\n"; + $cont = getalluris(); + answer($c,$cont,"text/xml",""); + + # "/getallrdfuris" + } elsif ($http_method eq 'GET' and $http_path eq "/getallrdfuris") { + # send back all the keys in xml + my $class = $cgi->param('class'); + print "BASEURI $baseuri, FORMAT $outype, CLASS $class\n"; + $cont = getallrdfuris($class); + answer($c,$cont,"text/xml",""); + } elsif ($http_method eq 'GET' and $http_path eq "/ls") { # send back keys that begin with a given uri my ($uritype,$uripattern,$uriprefix); @@ -329,6 +371,17 @@ while (my $c = $d->accept) { print "Help requested!\n"; html_nice_answer($c,"HTTP Getter Version: $VERSION"); + } elsif ($http_method eq 'GET' and $http_path eq "/getempty") { + # return an empty, but valid, xml document + my $emptyxml = < + +]> + +EOXML + answer($c, $emptyxml, "", ""); + } elsif ($http_method eq 'GET' and $http_path =~ /\.cgi$/) { # cgi handling print "CGI requested!\n"; @@ -398,11 +451,54 @@ sub resolve { # resolve an uri in a url, work both with standard cic: or theory: return ($rdf_map{$uri}); } else { # standard cic: or theory: uri, resolve using std uri db print "IS NOT A RDF URI\n"; - print "I WILL RETURN '$map{$uri}'\n"; - return ($map{$uri}); + print "I WILL RETURN '$xml_map{$uri}'\n"; + return ($xml_map{$uri}); } } +sub getalluris { # get all the keys whose prefix is cic + my $content = ""; + my ($uri); + my $debug=1; # for debug + + $content .= '' . "\n"; + $content .= "" . "\n\n"; + $content .= "\n"; + foreach $uri (sort (keys(%xml_map))) { # select matching uris + if ($uri =~ /^cic:/ && not $uri =~ /.types$/) { + print "GETALLURI: $uri\n" if defined($debug); + $content .= "\t\n"; + } + } + $content .= "\n"; + return $content; +} + +sub getallrdfuris { + my $class = $_[0]; + my $content = ""; + my ($uri); + my $debug=1; # for debug + + $content .= '' . "\n"; + $content .= "" . "\n\n"; + $content .= "\n"; + foreach $uri (sort (keys(%rdf_map))) { + if ($class eq "forward" && + $uri =~ /^helm:rdf:www.cs.unibo.it\/helm\/rdf\/forward/ + || + $class eq "backward" && + $uri =~ /^helm:rdf:www.cs.unibo.it\/helm\/rdf\/backward/) { + print "GETALLRDFURI: $uri\n" if defined($debug); + $content .= "\t\n"; + } + } + $content .= "\n"; + return $content; +} + sub finduris { # find uris for cic and theory trees generation my ($uritype,$uripattern,$format) = @_; my $content = ""; @@ -416,7 +512,7 @@ sub finduris { # find uris for cic and theory trees generation if (($uritype eq "cic") or ($uritype eq "theory")) { # get info only of one type: cic or theory - foreach (keys(%map)) { # select matching uris + foreach (keys(%xml_map)) { # select matching uris $uri = $_; if ($uri =~ /^$uritype:$uripattern(\/|$|\.)/) { if ($uri =~ /^$uritype:$uripattern\//) { # directory match @@ -433,9 +529,9 @@ sub finduris { # find uris for cic and theory trees generation if ($localpart =~ /^[^\/]*$/) { # no slash, an OBJECT $basepart = $localpart; - $basepart =~ s/^([^.]*\.[^.]*)(\.types)?(\.ann)?/$1/; - # remove exts .types or - # .types.ann + $basepart =~ s/^([^.]*\.[^.]*)((\.body)|(\.types))?(\.ann)?/$1/; + # remove exts .types, .body, + # .types.ann or .body.ann $flags = $objects{$basepart}; # get old flags if ($localpart =~ /\.ann$/) { $flags = add_flag("ann","YES",$flags); @@ -449,6 +545,13 @@ sub finduris { # find uris for cic and theory trees generation } else { $flags = add_flag("types","NO",$flags); } + if ($localpart =~ /\.body$/) { + $flags = add_flag("body","YES",$flags); + } elsif ($localpart =~ /\.body\.ann$/) { + $flags = add_flag("body","ANN",$flags); + } else { + $flags = add_flag("body","NO",$flags); + } $objects{$basepart} = $flags; # save new flags } else { # exists at least one slash, a DIR ($dirname) = split (/\//, $localpart); @@ -480,9 +583,10 @@ sub finduris { # find uris for cic and theory trees generation $content .= "\t\n"; $flags = $objects{$key}; $flags =~ s/^<(.*)>$/$1/; - my ($annflag,$typesflag) = split /,/,$flags; + my ($annflag,$typesflag,$bodyflag) = split /,/,$flags; $content .= "\t\t\n"; $content .= "\t\t\n"; + $content .= "\t\t\n"; $content .= "\t\n"; } $content .= "\n"; @@ -493,17 +597,18 @@ sub finduris { # find uris for cic and theory trees generation } sub add_flag { -# manage string like: "" +# manage string like: "" # "ann_flag" may be one of "ann_YES", "ann_NO" # "type_flag" may be one of "types_NO", "types_YES", "types_ANN" +# "body_flag" may be one of "body_NO", "body_YES", "body_ANN" # when adding a flag the max between the current flag and the new flag -# is taken, the orders are ann_NO < ann_YES and types_NO < types_YES < -# types_ANN +# is taken, the orders are ann_NO < ann_YES, types_NO < types_YES < +# types_ANN and body_NO < body_YES < body_ANN my ($flagtype,$newflag,$str) = @_; - $str = "<,>" if ($str eq ""); - ($str =~ s/^<(.*,.*)>$/$1/) or die "Internal error: ". + $str = "<,,>" if ($str eq ""); + ($str =~ s/^<(.*,.*,.*)>$/$1/) or die "Internal error: ". "wrong string format for flag adding in $str"; - my ($annflag,$typeflag) = split /,/,$str; + my ($annflag,$typeflag,$bodyflag) = split /,/,$str; if ($flagtype eq "ann") { # ANN flag handling if ($newflag eq "YES") { $annflag = "YES"; @@ -523,10 +628,21 @@ sub add_flag { } else { die "Internal error: typeflag must be \"YES\", \"NO\" or \"ANN\""; } + } elsif ($flagtype eq "body") { # BODY flag handling + if ($newflag eq "ANN") { + $bodyflag = "ANN"; + } elsif ($newflag eq "YES") { + $bodyflag = "YES" unless ($bodyflag eq "ANN"); + } elsif ($newflag eq "NO") { + $bodyflag = "NO" + unless (($bodyflag eq "ANN") or ($bodyflag eq "YES")); + } else { + die "Internal error: typeflag must be \"YES\", \"NO\" or \"ANN\""; + } } else { die "Internal error: unsupported flagtype \"$flagtype\""; } - $str = "<$annflag,$typeflag>"; + $str = "<$annflag,$typeflag,$bodyflag>"; } #CSC: Too much powerful: creates even /home, /home/users/, ... @@ -553,12 +669,6 @@ sub print_request print "\n"; } -sub callback -{ - my ($data) = @_; - $cont .= $data; -} - sub gunzip { # gunzip a file and return the deflated content my ($filename) = @_; @@ -595,7 +705,7 @@ sub download { my ($gz, $buffer); # print "DOWNLOAD subs receives url: \"$url\"\n"; -$ print "DOWNLOAD subs receives filename: \"$filename\"\n"; +# print "DOWNLOAD subs receives filename: \"$filename\"\n"; my $resourcetype; # retrieve mode: "normal" (.xml) or "gzipped" (.xml.gz) if ($filename =~ /\.xml$/) { # set retrieve mode @@ -677,7 +787,7 @@ $ print "DOWNLOAD subs receives filename: \"$filename\"\n"; # $cont now contained uncompressed data } if ($patch_dtd eq "yes") { - $cont =~ s/DOCTYPE (.*) SYSTEM\s+"http:\/\/www.cs.unibo.it\/helm\/dtd\//DOCTYPE $1 SYSTEM "$myownurl\/getdtd?uri=/g; + $cont =~ s/DOCTYPE (.*) SYSTEM\s+"$dtdbaseurl\//DOCTYPE $1 SYSTEM "$myownurl\/getdtd?uri=/g; } if ($format eq "gz") { gzip($cont,"$basefname.tmp"); @@ -709,6 +819,7 @@ sub answer { $res->push_header("Pragma" => "no-cache"); $res->push_header("Expires" => "0"); $c->send_response($res); + $c->force_last_request(); } sub html_nice_answer { @@ -718,9 +829,14 @@ sub html_nice_answer { answer($c,$content,"text/html",""); } +sub callback { + my ($data) = @_; + $cont .= $data; +} + sub helm_wget { -#retrieve a file from an url and write it to a temp dir -#used for retrieve resource index from servers +# retrieve a file from an url and write it to a temp dir +# used for retrieve resource index from servers $cont = ""; my ($prefix, $URL) = @_; my $ua = LWP::UserAgent->new; @@ -734,30 +850,35 @@ sub helm_wget { } sub update { - untie %map; - tie(%map, 'DB_File', $uris_dbm.".db", O_RDWR, 0664); +# retie dbs + untie %xml_map; + tie(%xml_map, 'DB_File', $uris_dbm, O_RDWR, 0664); untie %rdf_map; - tie(%rdf_map, 'DB_File', $rdf_dbm.".db", O_RDWR, 0664); + tie(%rdf_map, 'DB_File', $rdf_dbm, O_RDWR, 0664); + untie %xslt_map; + tie(%xslt_map, 'DB_File', $xslt_dbm, O_RDWR, 0664); } sub update_dbs { # rebuild dbs fetching resource indexes from servers. -# Rebuild urls_of_uris db (1) and rdf db (2) +# Rebuild urls_of_uris db (1), rdf db (2) and xslt db (3) my ( $server, $idxfile, $rdf_idxfile, $uri, $url, $comp, $line, @servers, - %urls_of_uris, %rdf # local proxy of $map and $rdf_map + %urls_of_uris, %rdf, %xslt # local proxy of dbs ); - untie %map; + untie %xml_map; untie %rdf_map; - foreach my $file ($uris_dbm, $rdf_dbm) { # remove old db file - if (stat $file.".db") { # remove if exists - unlink ($file.".db") or die "can't unlink old db file: $file.db\n"; + untie %xslt_map; + foreach my $file ($uris_dbm, $rdf_dbm, $xslt_dbm) { # remove old db file + if (stat $file) { # remove if exists + unlink ($file) or die "can't unlink old db file: $file\n"; } } - tie(%urls_of_uris, 'DB_File', $uris_dbm.".db", O_RDWR|O_CREAT, 0664); - tie(%rdf, 'DB_File', $rdf_dbm.".db", O_RDWR|O_CREAT, 0664); + tie(%urls_of_uris, 'DB_File', $uris_dbm, O_RDWR|O_CREAT, 0664); + tie(%rdf, 'DB_File', $rdf_dbm, O_RDWR|O_CREAT, 0664); + tie(%xslt, 'DB_File', $xslt_dbm, O_RDWR|O_CREAT, 0664); open (SRVS, "< $servers_file") or die "cannot open servers file: $servers_file\n"; @@ -769,12 +890,15 @@ sub update_dbs { chomp $server; helm_wget($tmp_dir, $server."/".$indexname); # get index helm_wget($tmp_dir, $server."/".$rdf_indexname); # get rdf index + helm_wget($tmp_dir, $server."/".$xslt_indexname); # get xslt index $idxfile = $tmp_dir."/".$indexname; $rdf_idxfile = $tmp_dir."/".$rdf_indexname; + $xslt_idxfile = $tmp_dir . "/" . $xslt_indexname; - open (INDEX, "< $idxfile") or # (1) REBUILD URLS_OF_URIS DB + # (1) REBUILD URLS_OF_URIS DB + open (INDEX, "< $idxfile") or die "cannot open temporary index file: $idxfile\n"; - while ($line = ) { # parse index and add entry to urls_of_uris db + while (my $line = ) { # parse index and add entry to urls_of_uris db chomp $line; ($uri,$comp) = split /[ \t]+/, $line; # build url: @@ -788,9 +912,10 @@ sub update_dbs { $urls_of_uris{$uri} = $url; } - open (RDF_INDEX, "< $rdf_idxfile") or # (2) REBUILD RDF DB - die "cannot open temporary rdf index file: $rdf_idxfile\n"; - while ($line = ) { # parse index and add entry to rdf db + # (2) REBUILD RDF DB + open (RDF_INDEX, "< $rdf_idxfile") or + die "cannot open temporary RDF index file: $rdf_idxfile\n"; + while (my $line = ) { # parse index and add entry to rdf db chomp $line; ($uri,$comp) = split /[ \t]+/, $line; # comp is "gz" or nothing # build url: @@ -804,18 +929,36 @@ sub update_dbs { $rdf{$uri} = $url; } + # (3) REBUILD XSLT DB + open (XSLT_INDEX, "< $xslt_idxfile") or + die "cannot open temporary XSLT index file: $xslt_idxfile\n"; + while (my $line = ) { # parse index and add entry to xslt db + chomp $line; + my $stylesheet = $line; + my $url = $server . "/" . $stylesheet; + $xslt{$stylesheet} = $url; + } + close INDEX; # close indexes close RDF_INDEX; - die "cannot unlink temporary file: $idxfile\n" # remove temp files - if (unlink $idxfile) != 1; - die "cannot unlink temporary file: $rdf_idxfile\n" - if (unlink $rdf_idxfile) != 1; + close XSLT_INDEX; + + # remove temp files + foreach my $file ($idxfile, $rdf_idxfile, $xslt_idxfile) { + print "cannot unlink temporary file: $file\n" if (unlink $file) != 1; + } } untie(%urls_of_uris); # untie local proxies untie(%rdf); - tie(%map, 'DB_File', $uris_dbm.".db", O_RDWR, 0664); # retie global ones - tie(%rdf_map, 'DB_File', $rdf_dbm.".db", O_RDWR, 0664); + untie(%xslt); + tie(%xml_map, 'DB_File', $uris_dbm, O_RDWR, 0664); # retie global ones + tie(%rdf_map, 'DB_File', $rdf_dbm, O_RDWR, 0664); + tie(%xslt_map, 'DB_File', $xslt_dbm, O_RDWR, 0664); + } # update_dbs +# vim modline: do not remove! +# vim: set ft=perl: +