X-Git-Url: http://matita.cs.unibo.it/gitweb/?a=blobdiff_plain;f=helm%2Fhttp_getter%2Fhttp_getter.pl.in;h=e73d9f9878689b039352916e90b29a0abcfcd303;hb=b564f0671c133928e372e352d972a7663b6a9136;hp=43c7ff0790ec3750ce8858c6a54e972071a269c9;hpb=985d664bf27762902b93a2158e755aeabe6ee27f;p=helm.git diff --git a/helm/http_getter/http_getter.pl.in b/helm/http_getter/http_getter.pl.in index 43c7ff079..e73d9f987 100755 --- a/helm/http_getter/http_getter.pl.in +++ b/helm/http_getter/http_getter.pl.in @@ -23,6 +23,8 @@ # For details, see the HELM World-Wide-Web page, # http://cs.unibo.it/helm/. +my $VERSION = "@VERSION@"; + # First of all, let's load HELM configuration use Env; my $HELM_LIB_DIR = $ENV{"HELM_LIB_DIR"}; @@ -38,28 +40,46 @@ if (defined ($HELM_LIB_DIR)) { # set the cache mode, may be "gzipped" or "normal" my $cachemode = $ENV{'HTTP_GETTER_CACHE_MODE'} || 'gzipped'; if (($cachemode ne 'gzipped') and ($cachemode ne 'normal')) { - die "Invalid HTTP_GETTER_CACHE_MODE environment variable, must be 'normal' or 'gzipped'\n"; + die "Invalid HTTP_GETTER_CACHE_MODE environment variable, must be". + "'normal' or 'gzipped'\n"; } # # next require defines: $helm_dir, $html_link, $dtd_dir, $uris_dbm require $HELM_LIB_PATH; +# Let's override the configuration file +$style_dir = $ENV{"HELM_STYLE_DIR"} if (defined ($ENV{"HELM_STYLE_DIR"})); +$dtd_dir = $ENV{"HELM_DTD_DIR"} if (defined ($ENV{"HELM_DTD_DIR"})); + use HTTP::Daemon; use HTTP::Status; use HTTP::Request; use LWP::UserAgent; use DB_File; use Compress::Zlib; +use CGI; +use URI::Escape; #CSC: mancano i controlli sulle condizioni di errore di molte funzioni #CSC: ==> non e' robusto #CSC: altra roba da sistemare segnata con CSC my $d = new HTTP::Daemon LocalPort => 8081; +my $myownurl = $d->url; + +# Let's patch the returned URL +$myownurl =~ s/\/$//; # chop the final slash +my $myownport = $myownurl; +$myownport =~ s/http:\/\/(.*):(.*)/$2/; +$myownurl =~ s/http:\/\/(.*):(.*)/$1/; +($myownurl) = gethostbyname($myownurl); +$myownurl = "http://".$myownurl.":".$myownport; + tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664); -print "Please contact me at: url, ">\n"; +print "Please contact me at: \n"; print "helm_dir: $helm_dir\n"; +print "style_dir: $style_dir\n"; print "dtd_dir: $dtd_dir\n"; print "urls_of_uris.db: $uris_dbm.db\n"; print "cache mode: $cachemode\n"; @@ -71,11 +91,15 @@ while (my $c = $d->accept) { while (my $r = $c->get_request) { #CSC: mancano i controlli di sicurezza - my $inputuri = $r->url; + my $inputuri = uri_unescape($r->url); $inputuri =~ s/^[^?]*\?uri=(.*)/$1/; print "\nRequest: ".$r->url."\n\n"; my $http_method = $r->method; my $http_path = $r->url->path; + my $http_query = uri_unescape($r->url->query); + my $cgi = new CGI("$http_query"); + + print "\nUnescaped query: ".$http_query."\n"; if ($http_method eq 'GET' and $http_path eq "/getciconly") { # finds the uri, url and filename @@ -84,21 +108,19 @@ while (my $c = $d->accept) { my $cicfilename = $cicuri; $cicfilename =~ s/cic:(.*)/$1/; $cicfilename =~ s/theory:(.*)/$1/; -# $cicfilename = $helm_dir.$cicfilename.".xml"; -# + my $cicurl = $map{$cicuri}; - my $extension; - if ($cicurl =~ /\.xml$/) { # non gzipped file - $extension = ".xml"; - } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file - $extension = ".xml.gz"; - } else { # error: unknown extension - die "unexpected extension in url: $cicurl, might be '.xml' or '.xml.gz'"; - } + my $extension; + if ($cicurl =~ /\.xml$/) { # non gzipped file + $extension = ".xml"; + } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file + $extension = ".xml.gz"; + } else { # error: unknown extension + die "unexpected extension in url: $cicurl, might be '.xml'". + "or '.xml.gz'"; + } $cicfilename = $helm_dir.$cicfilename.$extension; - #my $cicurl = $map{$cicuri}; -# if (!defined($cicurl)) { print "\nNOT FOUND!!!!!\n"; $c->send_error(RC_NOT_FOUND) @@ -138,45 +160,41 @@ while (my $c = $d->accept) { $cicfilename =~ s/theory:(.*)/$1/; $cicfilename = $helm_dir.$cicfilename; -# my $typesfilename = $cicfilename.".types.xml" if $typesuri; -# my $annfilename = $cicfilename.$annsuffix.".xml" if $annuri; -# $cicfilename .= ".xml"; - -# my $cicurl = $map{$cicuri}; my $typesurl = $map{$typesuri} if (defined($typesuri)); my $annurl = $map{$annuri} if (defined($annuri)); - my ($cicext, $typesext, $annext); - if ($cicurl =~ /\.xml$/) { # normal file - $cicext = ".xml"; - } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file - $cicext = ".xml.gz"; - } else { - die "unexpected extension in url: $cicurl; might be '.xml' or '.xml.gz'"; - } - if (defined($typesuri)) { # extension selection for types file - if ($typesurl =~ /\.xml$/) { # normal file - $typesext = ".types.xml"; - } elsif ($typesurl =~ /\.xml\.gz$/) { # gzipped file - $typesext = ".types.xml.gz"; - } else { - die "unexpected extension in url: $typesurl; might be '.xml' or '.xml.gz'"; - } - } - if (defined($annuri)) { # extension selection for annotation file - if ($annurl =~ /\.xml$/) { # normal file - $annext = ".xml"; - } elsif ($annurl =~ /\.xml\.gz$/) { # gzipped file - $annext = ".xml.gz"; - } else { - die "unexpected extension in url: $annurl might be '.xml' or '.xml.gz'"; - } - } + my ($cicext, $typesext, $annext); + if ($cicurl =~ /\.xml$/) { # normal file + $cicext = ".xml"; + } elsif ($cicurl =~ /\.xml\.gz$/) { # gzipped file + $cicext = ".xml.gz"; + } else { + die "unexpected extension in url: $cicurl;". + "might be '.xml' or '.xml.gz'"; + } + if (defined($typesuri)) { # extension selection for types file + if ($typesurl =~ /\.xml$/) { # normal file + $typesext = ".types.xml"; + } elsif ($typesurl =~ /\.xml\.gz$/) { # gzipped file + $typesext = ".types.xml.gz"; + } else { + die "unexpected extension in url: $typesurl;". + "might be '.xml' or '.xml.gz'"; + } + } + if (defined($annuri)) { # extension selection for annotation file + if ($annurl =~ /\.xml$/) { # normal file + $annext = ".xml"; + } elsif ($annurl =~ /\.xml\.gz$/) { # gzipped file + $annext = ".xml.gz"; + } else { + die "unexpected extension in url: $annurl". + "might be '.xml' or '.xml.gz'"; + } + } my $typesfilename = $cicfilename.$typesext if $typesuri; my $annfilename = $cicfilename.$annsuffix.$annext if $annuri; $cicfilename .= $cicext; -# - if (!defined($cicurl) || (!defined($typesurl) && $typesuri) || @@ -221,12 +239,52 @@ EOT print "Using local copy\n"; open(FD, $filename) or die "Cannot open $filename\n"; $cont = ""; - while() { $cont .= $_; } + while() { + # Vary bad heuristic for substituion of absolute URLs + # for relative ones + s/ENTITY (.*) SYSTEM\s+"/ENTITY $1 SYSTEM "$myownurl\/getdtd?uri=/g; + $cont .= $_; + } close(FD); answer($c,$cont); } else { die "Could not find DTD!"; } + } elsif ($http_method eq 'GET' and $http_path eq "/getstyleconf") { + my $filename = $inputuri; + $filename = $style_dir."/config/".$filename; + if (stat($filename)) { + open(FD, $filename) or die "Cannot open $filename\n"; + $cont = ""; + while() { + s/DOCTYPE (.*) SYSTEM\s+"/DOCTYPE $1 SYSTEM "$myownurl\/getstyleconf?uri=/g; + $cont .= $_; + } + close(FD); + answer($c,$cont); + } else { + die "Could not find Style Configuration File!"; + } + } elsif ($http_method eq 'GET' and $http_path eq "/getxslt") { + my $filename = $inputuri; + $filename = $style_dir."/".$filename; + print "XSLT: $inputuri ==> ($filename)\n"; + if (stat($filename)) { + print "Using local copy\n"; + open(FD, $filename) or die "Cannot open $filename\n"; + $cont = ""; + while() { + # Vary bad heuristic for substituion of absolute URLs + # for relative ones + s/xsl:import\s+href="/xsl:import href="$myownurl\/getxslt?uri=/g ; + s/xsl:include\s+href="/xsl:include href="$myownurl\/getxslt?uri=/g ; + $cont .= $_; + } + close(FD); + answer($c,$cont); + } else { + die "Could not find XSLT!"; + } } elsif ($http_method eq 'GET' and $http_path eq "/conf") { my $quoted_html_link = $html_link; $quoted_html_link =~ s/&/&/g; @@ -238,13 +296,52 @@ EOT $cont = "$quoted_html_link"; answer($c,$cont); } elsif ($http_method eq 'GET' and $http_path eq "/update") { - print "Update requested..."; - update(); - kill(USR1,getppid()); + # rebuild urls_of_uris.db + print "Update requested...\n"; + mk_urls_of_uris(); + kill(USR1,getppid()); # signal changes to parent print " done\n"; answer($c,"

Update done

"); + } elsif ($http_method eq 'GET' and $http_path eq "/ls") { + # send back keys that begin with a given uri + my ($uritype,$uripattern,$uriprefix); + my $baseuri = $cgi->param('baseuri'); + chop $baseuri if ($baseuri =~ /.*\/$/); # remove trailing "/" + my $outype = $cgi->param('format'); # output type, might be 'txt' or 'xml' + $uripattern = $baseuri; + $uripattern =~ s/^.*:(.*)/$1/; + if ($baseuri =~ /^cic:/) { + $uritype = "cic"; + } elsif ($baseuri =~ /^theory:/) { + $uritype = "theory"; + } else { + $uritype = "invalid"; + } + if ($uritype ne "invalid") { # uri is valid + if (($outype ne 'txt') and ($outype ne 'xml')) { # invalid out type + print "Invalid output type specified: $outype\n"; + answer($c,"

Invalid output type, may be ". + "\"txt\" or \"xml\"

"); + } else { # valid output + print "BASEURI $baseuri, FORMAT $outype\n"; + $cont = finduris($uritype,$uripattern,$outype); + answer($c,$cont); + } + } else { # invalid uri + print "Invalid uri: $baseuri, may begin with 'cic:', ". + "'theory:' or '*:'\n"; + answer($c,"

Invalid uri , may begin with ". + "\"cic:\", \"theory:\" or \"*:\"

"); + } + } elsif ($http_method eq 'GET' and $http_path eq "/version") { + print "Version requested!"; + answer($c,"

HTTP Getter Version ". + $VERSION."

"); } else { - print "\nINVALID REQUEST!!!!!\n"; + print "\n"; + print "INVALID REQUEST!!!!!\n"; + print "(PATH: ",$http_path,", "; + print "QUERY: ",$http_query,")\n"; $c->send_error(RC_FORBIDDEN) } print "\nRequest solved: ".$r->url."\n\n"; @@ -258,6 +355,117 @@ EOT #================================ +sub finduris { # find uris for cic and theory trees generation + my ($uritype,$uripattern,$format) = @_; + my $content = ""; + my ($uri,$localpart,$basepart,$dirname,$suffix,$flags,$key); + my (@itemz,@already_pushed_dir); + my (%objects,%dirs); # map uris to suffixes' flags + + print "FINDURIS, uritype: $uritype, uripattern: $uripattern, ". + "format: $format\n\n"; + + if (($uritype eq "cic") or ($uritype eq "theory")) { + # get info only of one type: cic or theory + foreach (keys(%map)) { # select matching uris + $uri = $_; + if ($uri =~ /^$uritype:$uripattern\//) { + $localpart = $uri; + $localpart =~ s/^$uritype:$uripattern\/(.*)/$1/; + + if ($localpart =~ /^[^\/]*$/) { # no slash, an OBJECT + $basepart = $localpart; + $basepart =~ s/^(.*)\.types(\.ann)?/$1/; # remove exts .types or + # .types.ann + $flags = $objects{$basepart}; # get old flags + if ($localpart =~ /\.ann$/) { + $flags = add_flag("ann","YES",$flags); + } else { + $flags = add_flag("ann","NO",$flags); + } + if ($localpart =~ /\.types$/) { + $flags = add_flag("types","YES",$flags); + } elsif ($localpart =~ /\.types\.ann$/) { + $flags = add_flag("types","ANN",$flags); + } else { + $flags = add_flag("types","NO",$flags); + } + $objects{$basepart} = $flags; # save new flags + } else { # exists at least one slash, a DIR + ($dirname) = split (/\//, $localpart); + $dirs{$dirname} = ""; # no flags requirement for dir + } + } + } + } else { + die "getter internal error: unsupported uritype: \"$uritype\""; + } + # now generate OUTPUT: + # output will be saved in $content + if ($format eq "txt") { + foreach $key (sort(keys %dirs)) { + $content .= "dir, " . $key . "\n"; + } + foreach $key (sort(keys %objects)) { + $content .= "object, $key, " . $objects{$key} . "\n"; + } + } elsif ($format eq "xml") { + $content .= "\n"; + foreach $key (sort(keys %dirs)) { + $content .= "\t
$key
\n"; + } + foreach $key (sort(keys %objects)) { + $content .= "\t\n"; + $flags = $objects{$key}; + $flags =~ s/^<(.*)>$/$1/; + my ($annflag,$typesflag) = split /,/,$flags; + $content .= "\t\t\n"; + $content .= "\t\t\n"; + $content .= "\t\n"; + } + $content .= "
\n"; + } else { # may not enter this branch + die "Getter internal error: invalid format received by finduris sub"; + } + return $content; +} + +sub add_flag { +# manage string like: "" +# "ann_flag" may be one of "ann_YES", "ann_NO" +# "type_flag" may be one of "types_NO", "types_YES", "types_ANN" +# when adding a flag the max between the current flag and the new flag +# is taken, the orders are ann_NO < ann_YES and types_NO < types_YES < +# types_ANN + my ($flagtype,$newflag,$str) = @_; + $str = "<,>" if ($str eq ""); + ($str =~ s/^<(.*,.*)>$/$1/) or die "Internal error: ". + "wrong string format for flag adding in $str"; + my ($annflag,$typeflag) = split /,/,$str; + if ($flagtype eq "ann") { # ANN flag handling + if ($newflag eq "YES") { + $annflag = "YES"; + } elsif ($newflag eq "NO") { + $annflag = "NO" unless ($annflag eq "YES"); + } else { + die "Internal error: annflag must be \"YES\" or \"NO\""; + } + } elsif ($flagtype eq "types") { # TYPES flag handling + if ($newflag eq "ANN") { + $typeflag = "ANN"; + } elsif ($newflag eq "YES") { + $typeflag = "YES" unless ($typeflag eq "ANN"); + } elsif ($newflag eq "NO") { + $typeflag = "NO" + unless (($typeflag eq "ANN") or ($typeflag eq "YES")); + } else { + die "Internal error: typeflag must be \"YES\", \"NO\" or \"ANN\""; + } + } else { + die "Internal error: unsupported flagtype \"$flagtype\""; + } + $str = "<$annflag,$typeflag>"; +} #CSC: Too much powerful: creates even /home, /home/users/, ... #CSC: Does not raise errors if could not create dirs/files @@ -293,7 +501,8 @@ sub gunzip { # gunzip a file and return the deflated content my ($gz, $buffer, $cont); print "deflating $filename ...\n"; - $gz = gzopen($filename, "r") or die "Cannot open gzip'ed file $filename: $gzerrno"; + $gz = gzopen($filename, "r") + or die "Cannot open gzip'ed file $filename: $gzerrno"; $cont = ""; while ( $gz->gzread($buffer) > 0 ) { $cont .= $buffer; @@ -309,7 +518,8 @@ sub gzip { # gzip the content argument and save it to filename argument my ($gz, $cont); - $gz = gzopen($filename, "w") or die "Cannot gzopen for writing file $filename: $gzerrno"; + $gz = gzopen($filename, "w") + or die "Cannot gzopen for writing file $filename: $gzerrno"; $gz->gzwrite($cont) or die "error writing: $gzerrno\n" ; $gz->gzclose(); } @@ -317,7 +527,6 @@ sub gzip { # gzip the content argument and save it to filename argument sub download { my ($remove_headers,$str,$url,$filename) = @_; -# my ($gz, $buffer); my $resourcetype; # retrieve mode: "normal" (.xml) or "gzipped" (.xml.gz) @@ -329,8 +538,7 @@ sub download die "Unsupported download extension, might be '.gz' or '.xml'\n"; } my $basefname = $filename; - $basefname =~ s/\.gz$//; # get base resource name removing trailing .gz -# + $basefname =~ s/\.gz$//; # get base resource name removing trailing .gz $cont = ""; # modified by side-effect by the callback function my $localfname=""; @@ -339,63 +547,66 @@ sub download } elsif (stat($basefname.".gz")) { $localfname=$basefname.".gz"; } - if ($localfname ne "") { # we already have local copy of requested file - # check both possible cache entry: gzipped or normal - print "Using local copy for the $str file\n"; -# - if ($localfname =~ /\.xml\.gz$/) { # deflating cached file and return it - $cont = gunzip($localfname); - } elsif ($localfname =~ /\.xml$/) { # just return cached file - open(FD, $localfname) or die "Cannot open $localfname"; - while() { $cont .= $_; } - close(FD); - } else { # error - die "Internal error: unexpected file name $localfname, must end with '.gz' or '.xml.gz'\n"; - } -# - } else { # download file from net - print "Downloading the $str file\n"; # download file - $ua = LWP::UserAgent->new; - $request = HTTP::Request->new(GET => "$url"); - $response = $ua->request($request, \&callback); + if ($localfname ne "") { # we already have local copy of requested file + # check both possible cache entry: gzipped or normal + print "Using local copy for the $str file\n"; + if ($localfname =~ /\.xml\.gz$/) { # deflating cached file and return it + $cont = gunzip($localfname); + } elsif ($localfname =~ /\.xml$/) { # just return cached file + open(FD, $localfname) or die "Cannot open $localfname"; + while() { $cont .= $_; } + close(FD); + } else { # error + die "Internal error: unexpected file name $localfname," + ."must end with '.gz' or '.xml.gz'\n"; + } + } else { # download file from net + print "Downloading the $str file\n"; # download file + my $ua = LWP::UserAgent->new; + my $request = HTTP::Request->new(GET => "$url"); + my $response = $ua->request($request, \&callback); - # cache retrieved file to disk -# TODO: inefficent, I haven't yet undestood how to deflate in memory gzipped file, -# without call "gzopen" -# - print "Storing the $str file\n"; - mkdirs($filename); - open(FD, ">".$filename.".tmp") or die "Cannot open $filename.tmp\n"; - print FD $cont; - close(FD); - - # handle cache conversion normal->gzipped or gzipped->normal as user choice - if (($cachemode eq 'normal') and ($resourcetype eq 'normal')) { # cache the file as is - rename "$filename.tmp", $filename; - } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'gzipped')) { # cache the file as is - # and update the $cont variabile with deflated content - rename "$filename.tmp", $filename; - $cont = gunzip($filename); - } elsif (($cachemode eq 'normal') and ($resourcetype eq 'gzipped')) { # deflate cache entry - # and update $cont - open(FD, "> $basefname") or die "cannot open $basefname\n"; - $cont = gunzip($filename.".tmp"); - print FD $cont; - close(FD); - unlink "$filename.tmp"; # delete old gzipped file - } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'normal')) { # compress cache entry - gzip($cont, $basefname.".gz"); - unlink "$filename.tmp"; # delete old uncompressed file - } else { - die "Internal error, unsopported cachemode, resourcetype couple\n"; - } - # $cont now contained uncompressed data + # cache retrieved file to disk +# TODO: inefficent, I haven't yet undestood how to deflate +# in memory gzipped file, without call "gzopen" + print "Storing the $str file\n"; + mkdirs($filename); + open(FD, ">".$filename.".tmp") or die "Cannot open $filename.tmp\n"; + print FD $cont; + close(FD); + + # handle cache conversion normal->gzipped or gzipped->normal as user choice + if (($cachemode eq 'normal') and ($resourcetype eq 'normal')) { + # cache the file as is + rename "$filename.tmp", $filename; + } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'gzipped')) { + # cache the file as is + # and update the $cont variabile with deflated content + rename "$filename.tmp", $filename; + $cont = gunzip($filename); + } elsif (($cachemode eq 'normal') and ($resourcetype eq 'gzipped')) { + # deflate cache entry + # and update $cont + open(FD, "> $basefname") or die "cannot open $basefname\n"; + $cont = gunzip($filename.".tmp"); + print FD $cont; + close(FD); + unlink "$filename.tmp"; # delete old gzipped file + } elsif (($cachemode eq 'gzipped') and ($resourcetype eq 'normal')) { + # compress cache entry + gzip($cont, $basefname.".gz"); + unlink "$filename.tmp"; # delete old uncompressed file + } else { + die "Internal error, unsopported cachemode, resourcetype couple\n"; + } + # $cont now contained uncompressed data -# } if ($remove_headers) { $cont =~ s/<\?xml [^?]*\?>//sg; $cont =~ s/]*>//sg; + } else { + $cont =~ s/DOCTYPE (.*) SYSTEM\s+"http:\/\/www.cs.unibo.it\/helm\/dtd\//DOCTYPE $1 SYSTEM "$myownurl\/getdtd?uri=/g; } return $cont; } @@ -408,7 +619,71 @@ sub answer $c->send_response($res); } +sub helm_wget { +#retrieve a file from an url and write it to a temp dir +#used for retrieve resource index from servers + $cont = ""; + my ($prefix, $URL) = @_; + my $ua = LWP::UserAgent->new; + my $request = HTTP::Request->new(GET => "$URL"); + my $response = $ua->request($request, \&callback); + my ($filename) = reverse (split "/", $URL); # get filename part of the URL + open (TEMP, "> $prefix/$filename") + || die "Cannot open temporary file: $prefix/$filename\n"; + print TEMP $cont; + close TEMP; +} + sub update { untie %map; tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664); } + +sub mk_urls_of_uris { +#rebuild $uris_dbm.db fetching resource indexes from servers + my ( + $server, $idxfile, $uri, $url, $comp, $line, + @servers, + %urls_of_uris + ); + + untie %map; + if (stat $uris_dbm.".db") { # remove old db file + unlink($uris_dbm.".db") or + die "cannot unlink old db file: $uris_dbm.db\n"; + } + tie(%urls_of_uris, 'DB_File', $uris_dbm.".db", O_RDWR|O_CREAT, 0664); + + open (SRVS, "< $servers_file") or + die "cannot open servers file: $servers_file\n"; + @servers = ; + close (SRVS); + while ($server = pop @servers) { #cicle on servers in reverse order + print "processing server: $server ...\n"; + chomp $server; + helm_wget($tmp_dir, $server."/".$indexname); #get index + $idxfile = $tmp_dir."/".$indexname; + open (INDEX, "< $idxfile") or + die "cannot open temporary index file: $idxfile\n"; + while ($line = ) { #parse index and add entry to urls_of_uris + chomp $line; + ($uri,$comp) = split /[ \t]+/, $line; + # build url: + if ($comp =~ /gz/) { + $url = $uri . ".xml" . ".gz"; + } else { + $url = $uri . ".xml"; + } + $url =~ s/cic:/$server/; + $url =~ s/theory:/$server/; + $urls_of_uris{$uri} = $url; + } + close INDEX; + die "cannot unlink temporary file: $idxfile\n" + if (unlink $idxfile) != 1; + } + + untie(%urls_of_uris); + tie(%map, 'DB_File', $uris_dbm.".db", O_RDONLY, 0664); +} +