function defaultConverter($file, $dataset, $setup = array()) { cecho("Importing dataset: " . cecho($setup["datasetURI"], 'UNDERSCORE', TRUE) . "\n\n", 'CYAN'); /* We have to split it. The procesure is simple: (1) we index the big file into a temporary Virtuoso graph (2) we get 100 records to index at a time (3) we index the records slices using CRUD: Update (4) we delete the temporary graph */ $importDataset = rtrim($setup["datasetURI"], '/') . '/import'; if (isset($dataset['forceReloadSolrIndex']) && strtolower($dataset['forceReloadSolrIndex']) == 'true') { $importDataset = $dataset['datasetURI']; } // Create a connection to the triple store $data_ini = parse_ini_file(WebService::$data_ini . "data.ini", TRUE); $db = new DBVirtuoso($data_ini["triplestore"]["username"], $data_ini["triplestore"]["password"], $data_ini["triplestore"]["dsn"], $data_ini["triplestore"]["host"]); // Check if the dataset is existing, if it doesn't, we try to create it $datasetRead = new DatasetReadQuery($setup["targetStructWSF"]); $datasetRead->excludeMeta()->uri($setup["datasetURI"])->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$datasetRead->isSuccessful()) { if ($datasetRead->error->id == 'WS-DATASET-READ-304') { // not existing, so we create it $datasetCreate = new DatasetCreateQuery($setup["targetStructWSF"]); $datasetCreate->creator(isset($dataset['creator']) ? $dataset['creator'] : '')->uri($dataset["datasetURI"])->description(isset($dataset['description']) ? $dataset['description'] : '')->title(isset($dataset['title']) ? $dataset['title'] : '')->globalPermissions(new CRUDPermission(FALSE, TRUE, FALSE, FALSE))->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$datasetCreate->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($datasetCreate, TRUE)); @cecho('Can\'t create the dataset for reloading it. ' . $datasetCreate->getStatusMessage() . $datasetCreate->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); exit(1); } else { cecho('Dataset not existing, creating it: ' . $dataset["datasetURI"] . "\n", 'MAGENTA'); } } } if (isset($dataset['forceReloadSolrIndex']) && strtolower($dataset['forceReloadSolrIndex']) == 'true') { cecho('Reloading dataset in Solr: ' . $dataset["datasetURI"] . "\n", 'MAGENTA'); } // If we want to reload the dataset, we first delete it in structWSF if (isset($dataset['forceReload']) && strtolower($dataset['forceReload']) == 'true') { cecho('Reloading dataset: ' . $dataset["datasetURI"] . "\n", 'MAGENTA'); // First we get information about the dataset (creator, title, description, etc) $datasetRead = new DatasetReadQuery($setup["targetStructWSF"]); $datasetRead->excludeMeta()->uri($setup["datasetURI"])->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$datasetRead->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($datasetRead, TRUE)); @cecho('Can\'t read the dataset for reloading it. ' . $datasetRead->getStatusMessage() . $datasetRead->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); exit(1); } else { cecho('Dataset description read: ' . $dataset["datasetURI"] . "\n", 'MAGENTA'); $datasetRecord = $datasetRead->getResultset()->getResultset(); $datasetRecord = $datasetRecord['unspecified'][$setup["datasetURI"]]; // Then we delete it $datasetDelete = new DatasetDeleteQuery($setup["targetStructWSF"]); $datasetDelete->uri($setup["datasetURI"])->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$datasetDelete->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($datasetDelete, TRUE)); @cecho('Can\'t delete the dataset for reloading it. ' . $datasetDelete->getStatusMessage() . $datasetDelete->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); exit(1); } else { cecho('Dataset deleted: ' . $dataset["datasetURI"] . "\n", 'MAGENTA'); // Finally we re-create it $datasetCreate = new DatasetCreateQuery($setup["targetStructWSF"]); $datasetCreate->creator($datasetRecord[Namespaces::$dcterms . 'creator'][0]['uri'])->uri($setup["datasetURI"])->description($datasetRecord['description'])->title($datasetRecord['prefLabel'])->globalPermissions(new CRUDPermission(FALSE, TRUE, FALSE, FALSE))->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$datasetCreate->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($datasetCreate, TRUE)); @cecho('Can\'t create the dataset for reloading it. ' . $datasetCreate->getStatusMessage() . $datasetCreate->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); exit(1); } else { cecho('Dataset re-created: ' . $dataset["datasetURI"] . "\n", 'MAGENTA'); } } } echo "\n"; } // Start by deleting the import graph that may have been left over. if (!isset($dataset['forceReloadSolrIndex']) || strtolower($dataset['forceReloadSolrIndex']) == 'false') { $sqlQuery = "sparql clear graph <" . $importDataset . ">"; $resultset = $db->query($sqlQuery); if (odbc_error()) { cecho("Error: can't delete the graph used for importing the file [" . odbc_errormsg() . "]\n", 'RED'); return; } unset($resultset); // Import the big file into Virtuoso if (stripos($file, ".n3") !== FALSE) { $sqlQuery = "DB.DBA.TTLP_MT(file_to_string_output('" . $file . "'),'" . $importDataset . "','" . $importDataset . "')"; } else { $sqlQuery = "DB.DBA.RDF_LOAD_RDFXML_MT(file_to_string_output('" . $file . "'),'" . $importDataset . "','" . $importDataset . "')"; } $resultset = $db->query($sqlQuery); if (odbc_error()) { cecho("Error: can't import the file: {$file}, into the triple store [" . odbc_errormsg() . "]\n", 'RED'); return; } unset($resultset); } // count the number of records $sparqlQuery = "\n \n select count(distinct ?s) as ?nb from <" . $importDataset . ">\n where\n {\n ?s a ?o .\n }\n \n "; $resultset = $db->query($db->build_sparql_query($sparqlQuery, array('nb'), FALSE)); $nb = odbc_result($resultset, 1); unset($resultset); $nbRecordsDone = 0; while ($nbRecordsDone < $nb && $nb > 0) { // Create slices of records $sparqlQuery = "\n \n select ?s ?p ?o (DATATYPE(?o)) as ?otype (LANG(?o)) as ?olang\n where \n {\n {\n select distinct ?s from <" . $importDataset . "> \n where \n {\n ?s a ?type.\n } \n limit " . $setup["sliceSize"] . " \n offset " . $nbRecordsDone . "\n } \n \n ?s ?p ?o\n }\n \n "; $crudCreates = ''; $crudUpdates = ''; $crudDeletes = array(); $rdfDocumentN3 = ""; $start = microtime_float(); $currentSubject = ""; $subjectDescription = ""; $data_ini = parse_ini_file(WebService::$data_ini . "data.ini", TRUE); $ch = curl_init(); curl_setopt($ch, CURLOPT_URL, $data_ini['triplestore']['host'] . ":" . $data_ini['triplestore']['port'] . "/sparql/"); curl_setopt($ch, CURLOPT_HTTPHEADER, array("Accept: application/sparql-results+xml")); curl_setopt($ch, CURLOPT_FOLLOWLOCATION, 1); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); curl_setopt($ch, CURLOPT_POST, 1); curl_setopt($ch, CURLOPT_POSTFIELDS, "default-graph-uri=" . urlencode($importDataset) . "&query=" . urlencode($sparqlQuery) . "&format=" . urlencode("application/sparql-results+xml") . "&debug=on"); curl_setopt($ch, CURLOPT_HEADER, TRUE); $xml_data = curl_exec($ch); if ($xml_data === FALSE) { } $header = substr($xml_data, 0, strpos($xml_data, "\r\n\r\n")); $data = substr($xml_data, strpos($xml_data, "\r\n\r\n") + 4, strlen($xml_data) - (strpos($xml_data, "\r\n\r\n") - 4)); curl_close($ch); $resultset = new SimpleXMLElement($data); $crudAction = "create"; foreach ($resultset->results->result as $result) { $s = ""; $p = ""; $o = ""; $olang = ""; $otype = ""; foreach ($result->binding as $binding) { switch ((string) $binding["name"]) { case "s": $s = (string) $binding->uri; break; case "p": $p = (string) $binding->uri; break; case "o": if ($binding->uri) { $o = (string) $binding->uri; } else { $o = (string) $binding->literal; } break; case "olang": $olang = (string) $binding->literal; break; case "otype": $otype = (string) $binding->uri; break; } } if ($s != $currentSubject) { switch (strtolower($crudAction)) { case "update": $crudUpdates .= $subjectDescription; break; case "delete": array_push($crudDeletes, $currentSubject); break; case "create": default: $crudCreates .= $subjectDescription; break; } $subjectDescription = ""; $crudAction = "create"; $currentSubject = $s; } // Check to see if a "crudAction" property/value has been defined for this record. If not, // then we simply consider it as "create" if ($p != "http://purl.org/ontology/wsf#crudAction") { if ($otype != "" || $olang != "") { if ($olang != "") { $subjectDescription .= "<{$s}> <{$p}> \"\"\"" . n3Encode($o) . "\"\"\"@{$olang} .\n"; } elseif ($otype != 'http://www.w3.org/2001/XMLSchema#string') { $subjectDescription .= "<{$s}> <{$p}> \"\"\"" . n3Encode($o) . "\"\"\"^^<{$otype}>.\n"; } else { $subjectDescription .= "<{$s}> <{$p}> \"\"\"" . n3Encode($o) . "\"\"\" .\n"; } } else { $subjectDescription .= "<{$s}> <{$p}> <{$o}> .\n"; } } else { switch (strtolower($o)) { case "update": $crudAction = "update"; break; case "delete": $crudAction = "delete"; break; case "create": default: $crudAction = "create"; break; } } } // Add the last record that got processed above switch (strtolower($crudAction)) { case "update": $crudUpdates .= $subjectDescription; break; case "delete": array_push($crudDeletes, $currentSubject); break; case "create": default: $crudCreates .= $subjectDescription; break; } $end = microtime_float(); cecho('Create N3 file(s): ' . round($end - $start, 3) . ' seconds' . "\n", 'WHITE'); unset($resultset); if ($crudCreates != "") { $crudCreates = "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n\n" . $crudCreates; $start = microtime_float(); $crudCreate = new CrudCreateQuery($dataset["targetStructWSF"]); $crudCreate->dataset($dataset["datasetURI"])->documentMimeIsRdfN3()->document($crudCreates)->registeredIp('self'); if (isset($dataset['forceReloadSolrIndex']) && strtolower($dataset['forceReloadSolrIndex']) == 'true') { $crudCreate->enableSearchIndexationMode(); } else { $crudCreate->enableFullIndexationMode(); } $crudCreate->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$crudCreate->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($crudCreate, TRUE)); @cecho('Can\'t commit (CRUD Create) a slice to the target dataset. ' . $crudCreate->getStatusMessage() . $crudCreate->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); } $end = microtime_float(); if (isset($dataset['forceReloadSolrIndex']) && strtolower($dataset['forceReloadSolrIndex']) == 'true') { cecho('Records created in Solr: ' . round($end - $start, 3) . ' seconds' . "\n", 'WHITE'); } else { cecho('Records created in Virtuoso & Solr: ' . round($end - $start, 3) . ' seconds' . "\n", 'WHITE'); } unset($wsq); } if ($crudUpdates != "") { $crudUpdates = "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n\n" . $crudUpdates; $start = microtime_float(); $crudUpdate = new CrudUpdateQuery($dataset["targetStructWSF"]); $crudUpdate->dataset($dataset["datasetURI"])->documentMimeIsRdfN3()->document($crudUpdates)->registeredIp('self')->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$crudUpdate->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($crudUpdate, TRUE)); @cecho('Can\'t commit (CRUD Updates) a slice to the target dataset. ' . $crudUpdate->getStatusMessage() . $crudUpdate->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); } $end = microtime_float(); cecho('Records updated: ' . round($end - $start, 3) . ' seconds' . "\n", 'WHITE'); unset($wsq); } if (count($crudDeletes) > 0) { $start = microtime_float(); foreach ($crudDeletes as $uri) { $crudDelete = new CrudDeleteQuery($dataset["targetStructWSF"]); $crudDelete->dataset($setup["datasetURI"])->uri($uri)->registeredIp('self')->send(isset($dataset['targetStructWSFQueryExtension']) ? new $dataset['targetStructWSFQueryExtension']() : NULL); if (!$crudDelete->isSuccessful()) { $debugFile = md5(microtime()) . '.error'; file_put_contents('/tmp/' . $debugFile, var_export($crudDelete, TRUE)); @cecho('Can\'t commit (CRUD Delete) a record to the target dataset. ' . $crudDelete->getStatusMessage() . $crudDelete->getStatusMessageDescription() . "\nDebug file: /tmp/{$debugFile}\n", 'RED'); } } $end = microtime_float(); cecho('Records deleted: ' . round($end - $start, 3) . ' seconds' . "\n", 'WHITE'); unset($wsq); } $nbRecordsDone += $setup["sliceSize"]; cecho("{$nbRecordsDone}/{$nb} records for file: {$file}\n", 'WHITE'); } // Now check what are the properties and types used in this dataset, check which ones // are existing in the ontology, and report the ones that are not defined in the loaded // ontologies. if (!isset($dataset['forceReloadSolrIndex']) || strtolower($dataset['forceReloadSolrIndex']) == 'false') { $usedProperties = array(); $usedTypes = array(); // Get used properties $sparqlQuery = "\n \n select distinct ?p from <" . $importDataset . ">\n where \n {\n ?s ?p ?o .\n }\n \n "; $data_ini = parse_ini_file(WebService::$data_ini . "data.ini", TRUE); $ch = curl_init(); curl_setopt($ch, CURLOPT_URL, $data_ini['triplestore']['host'] . ":" . $data_ini['triplestore']['port'] . "/sparql/"); curl_setopt($ch, CURLOPT_HTTPHEADER, array("Accept: application/sparql-results+xml")); curl_setopt($ch, CURLOPT_FOLLOWLOCATION, 1); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); curl_setopt($ch, CURLOPT_POST, 1); curl_setopt($ch, CURLOPT_POSTFIELDS, "default-graph-uri=" . urlencode($importDataset) . "&query=" . urlencode($sparqlQuery) . "&format=" . urlencode("application/sparql-results+xml") . "&debug=on"); curl_setopt($ch, CURLOPT_HEADER, TRUE); $xml_data = curl_exec($ch); $header = substr($xml_data, 0, strpos($xml_data, "\r\n\r\n")); $data = substr($xml_data, strpos($xml_data, "\r\n\r\n") + 4, strlen($xml_data) - (strpos($xml_data, "\r\n\r\n") - 4)); curl_close($ch); $resultset = new SimpleXMLElement($data); foreach ($resultset->results->result as $result) { foreach ($result->binding as $binding) { switch ((string) $binding["name"]) { case "p": $p = (string) $binding->uri; if (!in_array($p, $usedProperties)) { array_push($usedProperties, $p); } break; } } } // Get used types $sparqlQuery = "\n \n select distinct ?o from <" . $importDataset . ">\n where \n {\n ?s a ?o .\n }\n \n "; $data_ini = parse_ini_file(WebService::$data_ini . "data.ini", TRUE); $ch = curl_init(); curl_setopt($ch, CURLOPT_URL, $data_ini['triplestore']['host'] . ":" . $data_ini['triplestore']['port'] . "/sparql/"); curl_setopt($ch, CURLOPT_HTTPHEADER, array("Accept: application/sparql-results+xml")); curl_setopt($ch, CURLOPT_FOLLOWLOCATION, 1); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); curl_setopt($ch, CURLOPT_POST, 1); curl_setopt($ch, CURLOPT_POSTFIELDS, "default-graph-uri=" . urlencode($importDataset) . "&query=" . urlencode($sparqlQuery) . "&format=" . urlencode("application/sparql-results+xml") . "&debug=on"); curl_setopt($ch, CURLOPT_HEADER, TRUE); $xml_data = curl_exec($ch); $header = substr($xml_data, 0, strpos($xml_data, "\r\n\r\n")); $data = substr($xml_data, strpos($xml_data, "\r\n\r\n") + 4, strlen($xml_data) - (strpos($xml_data, "\r\n\r\n") - 4)); curl_close($ch); $resultset = new SimpleXMLElement($data); foreach ($resultset->results->result as $result) { foreach ($result->binding as $binding) { switch ((string) $binding["name"]) { case "o": $o = (string) $binding->uri; if (!in_array($o, $usedTypes)) { array_push($usedTypes, $o); } break; } } } // Now check to make sure that all the predicates and types are in the ontological structure. $undefinedPredicates = array(); $undefinedTypes = array(); $filename = $setup["ontologiesStructureFiles"] . 'classHierarchySerialized.srz'; $f = fopen($filename, "r"); $classHierarchy = fread($f, filesize($filename)); $classHierarchy = unserialize($classHierarchy); fclose($f); $filename = $setup["ontologiesStructureFiles"] . 'propertyHierarchySerialized.srz'; $f = fopen($filename, "r"); $propertyHierarchy = fread($f, filesize($filename)); $propertyHierarchy = unserialize($propertyHierarchy); fclose($f); foreach ($usedProperties as $usedPredicate) { $found = FALSE; foreach ($propertyHierarchy->properties as $property) { if ($property->name == $usedPredicate) { $found = TRUE; break; } } if ($found === FALSE) { array_push($undefinedPredicates, $usedPredicate); } } foreach ($usedTypes as $type) { $found = FALSE; foreach ($classHierarchy->classes as $class) { if ($class->name == $type) { $found = TRUE; break; } } if ($found === FALSE) { array_push($undefinedTypes, $type); } } $filename = substr($file, strrpos($file, "/") + 1); $filename = substr($filename, 0, strlen($filename) - 3); file_put_contents($setup["missingVocabulary"] . $filename . ".undefined.types.log", implode("\n", $undefinedTypes)); file_put_contents($setup["missingVocabulary"] . $filename . ".undefined.predicates.log", implode("\n", $undefinedPredicates)); // Now delete the graph we used to import the file $sqlQuery = "sparql clear graph <" . $importDataset . ">"; $resultset = $db->query($sqlQuery); if (odbc_error()) { cecho("Error: can't delete the graph used for importing the file [" . odbc_errormsg() . "]\n", 'RED'); return; } unset($resultset); } $db->close(); echo "\n"; }
include_once "../../SplClassLoader.php"; use StructuredDynamics\structwsf\ws\framework\DBVirtuoso; use StructuredDynamics\structwsf\ws\framework\WebService; /* Set this variable to TRUE is you want to make your statistics available publicly. If this variable is set to TRUE, it means that anybody will be able to get statistics of your structWSF instance by query the script at this address: http://[your-web-site]/ws/statisticsBroker.php If you don't want to, you can simply remove this file from your web server. */ $enableStatisticsBroadcast = FALSE; if ($enableStatisticsBroadcast) { $data_ini = parse_ini_file(WebService::$data_ini . "data.ini", TRUE); $db = new DBVirtuoso($data_ini["triplestore"]["username"], $data_ini["triplestore"]["password"], $data_ini["triplestore"]["dsn"], $data_ini["triplestore"]["host"]); $sparql = " select count(?record) as ?nb_records\n from <" . $data_ini["datasets"]["wsf_graph"] . "datasets/>\n where\n {\n ?dataset a <http://rdfs.org/ns/void#Dataset>.\n graph ?dataset\n {\n ?record a ?type.\n }\n }"; $resultset = @$db->query($db->build_sparql_query(str_replace(array("\n", "\r", "\t"), " ", $sparql), array(), FALSE)); $nbRecords = "0"; if (!odbc_error()) { $nbRecords = odbc_result($resultset, 1); } $sparql = " select count(?dataset) as ?nb_datasets\n from <" . $data_ini["datasets"]["wsf_graph"] . "datasets/>\n where\n {\n ?dataset a <http://rdfs.org/ns/void#Dataset>.\n }"; $resultset = @$db->query($db->build_sparql_query(str_replace(array("\n", "\r", "\t"), " ", $sparql), array(), FALSE)); $nbDatasets = "0"; if (!odbc_error()) { $nbDatasets = odbc_result($resultset, 1); } $sparql = " select count(?o) as ?nb_triples\n from <" . $data_ini["datasets"]["wsf_graph"] . "datasets/>\n where\n {\n ?dataset a <http://rdfs.org/ns/void#Dataset>.\n graph ?dataset\n {\n ?record ?p ?o.\n }\n }"; $resultset = @$db->query($db->build_sparql_query(str_replace(array("\n", "\r", "\t"), " ", $sparql), array(), FALSE)); $nbTriples = "0";
break; case "create_tracker_ws": $rdf = "@prefix wsf: <http://purl.org/ontology/wsf#> .\n @prefix void: <http://rdfs.org/ns/void#> .\n @prefix dcterms: <http://purl.org/dc/terms/> .\n @prefix foaf: <http://xmlns.com/foaf/0.1/> .\n @prefix owl: <http://www.w3.org/2002/07/owl#> .\n @prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n \n <{$server_address}/wsf/> wsf:hasWebService <{$server_address}/wsf/ws/tracker/create/> .\n \n <{$server_address}/wsf/ws/tracker/create/> rdf:type wsf:WebService ;\n dcterms:title \"Tracker Create web service\" ;\n wsf:endpoint \"\"\"{$server_address}/ws/tracker/create/\"\"\";\n wsf:hasCrudUsage <{$server_address}/wsf/usage/tracker/create/> .\n \n <{$server_address}/wsf/usage/tracker/create/> rdf:type wsf:CrudUsage ;\n wsf:create \"True\" ;\n wsf:read \"False\" ;\n wsf:update \"False\" ;\n wsf:delete \"False\" . \n \n <{$server_address}/wsf/access/5b2b633495a58612b63724ef71729ea6102> rdf:type wsf:Access ;\n dcterms:description \"\"\"This access is used to enable the authentication registrar web service to register new web services to the WSF\"\"\";\n wsf:registeredIP \"{$wsf_local_ip}\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/tracker/create/> ;\n wsf:datasetAccess <{$server_address}/wsf/track/> ."; break; case "create_world_accessible_ontology": $rdf = "@prefix wsf: <http://purl.org/ontology/wsf#> .\n @prefix void: <http://rdfs.org/ns/void#> .\n @prefix dcterms: <http://purl.org/dc/terms/> .\n @prefix foaf: <http://xmlns.com/foaf/0.1/> .\n @prefix owl: <http://www.w3.org/2002/07/owl#> .\n @prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n \n <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/ontology/0.0.0.0/") . "> rdf:type wsf:Access ;\n wsf:registeredIP \"0.0.0.0\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/create/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/delete/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/read/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/update/> ;\n wsf:datasetAccess <{$server_address}/wsf/ontologies/> ."; break; case "create_user_full_access": $rdf = "@prefix wsf: <http://purl.org/ontology/wsf#> .\n @prefix void: <http://rdfs.org/ns/void#> .\n @prefix dcterms: <http://purl.org/dc/terms/> .\n @prefix foaf: <http://xmlns.com/foaf/0.1/> .\n @prefix owl: <http://www.w3.org/2002/07/owl#> .\n @prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n \n <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/" . $user_address) . "> rdf:type wsf:Access ;\n wsf:registeredIP \"{$user_address}\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/auth/lister/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/auth/registrar/ws/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/auth/registrar/access/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/create/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/read/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/delete/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/update/> ;\n wsf:datasetAccess <{$server_address}/wsf/> .\n \n <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/ontologies/" . $user_address) . "> rdf:type wsf:Access ;\n wsf:registeredIP \"{$user_address}\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/create/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/delete/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/read/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/update/> ;\n wsf:datasetAccess <{$server_address}/wsf/ontologies/> . \n \n <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/datasets/" . $user_address) . "> rdf:type wsf:Access ;\n wsf:registeredIP \"{$user_address}\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/create/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/read/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/delete/> ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/dataset/update/> ;\n wsf:datasetAccess <{$server_address}/wsf/datasets/> .\n \n <{$server_address}/wsf/ws/auth/validator/> rdf:type wsf:AuthenticationWebService ;\n wsf:hasAccess <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/datasets/" . $user_address) . "> ;\n wsf:hasAccess <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/ontologies/" . $user_address) . "> ; \n wsf:hasAccess <{$server_address}/wsf/access/" . md5("{$server_address}/wsf/" . $user_address) . "> ."; break; case "update_wsf_for_ontology_ws": $rdf = "@prefix wsf: <http://purl.org/ontology/wsf#> .\n @prefix void: <http://rdfs.org/ns/void#> .\n @prefix dcterms: <http://purl.org/dc/terms/> .\n @prefix foaf: <http://xmlns.com/foaf/0.1/> .\n @prefix owl: <http://www.w3.org/2002/07/owl#> .\n @prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n \n <{$server_address}/wsf/> rdf:type wsf:WebServiceFramework ;\n wsf:hasWebService <{$server_address}/wsf/ws/ontology/create/> ;\n wsf:hasWebService <{$server_address}/wsf/ws/ontology/delete/> ;\n wsf:hasWebService <{$server_address}/wsf/ws/ontology/read/> ;\n wsf:hasWebService <{$server_address}/wsf/ws/ontology/update/>.\n \n <{$server_address}/wsf/access/459f32962858ffa9677a27c4612cb875> rdf:type wsf:Access ;\n dcterms:description \"\"\"This access is used to enable the admin of the WSF to generate and manage ontologies of the WSF\"\"\";\n wsf:registeredIP \"127.0.0.1\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/create/> ;\n wsf:datasetAccess <{$server_address}/wsf/ontologies/> . \n \n <{$server_address}/wsf/access/459f32962858ffa9677a27c4612cb876> rdf:type wsf:Access ;\n dcterms:description \"\"\"This access is used to enable the admin of the WSF to generate and manage ontologies of the WSF\"\"\";\n wsf:registeredIP \"127.0.0.1\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/delete/> ;\n wsf:datasetAccess <{$server_address}/wsf/ontologies/> . \n \n <{$server_address}/wsf/access/459f32962858ffa9677a27c4612cb877> rdf:type wsf:Access ;\n dcterms:description \"\"\"This access is used to enable the admin of the WSF to generate and manage ontologies of the WSF\"\"\";\n wsf:registeredIP \"127.0.0.1\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/read/> ;\n wsf:datasetAccess <{$server_address}/wsf/ontologies/> . \n \n <{$server_address}/wsf/access/459f32962858ffa9677a27c4612cb878> rdf:type wsf:Access ;\n dcterms:description \"\"\"This access is used to enable the admin of the WSF to generate and manage ontologies of the WSF\"\"\";\n wsf:registeredIP \"127.0.0.1\" ;\n wsf:create \"True\" ;\n wsf:read \"True\" ;\n wsf:update \"True\" ;\n wsf:delete \"True\" ;\n wsf:webServiceAccess <{$server_address}/wsf/ws/ontology/update/> ;\n wsf:datasetAccess <{$server_address}/wsf/ontologies/> . \n\n <{$server_address}/wsf/ws/ontology/create/> rdf:type wsf:WebService ;\n dcterms:title \"Ontology Create web service\" ;\n wsf:endpoint \"\"\"{$server_address}/ws/ontology/create/\"\"\";\n wsf:hasCrudUsage <{$server_address}/wsf/usage/ontology/create/> .\n\n <{$server_address}/wsf/usage/ontology/create/> rdf:type wsf:CrudUsage ;\n wsf:create \"True\" ;\n wsf:read \"False\" ;\n wsf:update \"False\" ;\n wsf:delete \"False\" . \n \n <{$server_address}/wsf/ws/ontology/read/> rdf:type wsf:WebService ;\n dcterms:title \"Ontology Read web service\" ;\n wsf:endpoint \"\"\"{$server_address}/ws/ontology/read/\"\"\";\n wsf:hasCrudUsage <{$server_address}/wsf/usage/ontology/read/> . \n \n <{$server_address}/wsf/usage/ontology/read/> rdf:type wsf:CrudUsage ;\n wsf:create \"False\" ;\n wsf:read \"True\" ;\n wsf:update \"False\" ;\n wsf:delete \"False\" . \n \n <{$server_address}/wsf/ws/ontology/update/> rdf:type wsf:WebService ;\n dcterms:title \"Ontology Update web service\" ;\n wsf:endpoint \"\"\"{$server_address}/ws/ontology/update/\"\"\";\n wsf:hasCrudUsage <{$server_address}/wsf/usage/ontology/update/> .\n \n <{$server_address}/wsf/usage/ontology/update/> rdf:type wsf:CrudUsage ;\n wsf:create \"False\" ;\n wsf:read \"False\" ;\n wsf:update \"True\" ;\n wsf:delete \"False\" . \n \n <{$server_address}/wsf/ws/ontology/delete/> rdf:type wsf:WebService ;\n dcterms:title \"Ontology Delete web service\" ;\n wsf:endpoint \"\"\"{$server_address}/ws/ontology/delete/\"\"\";\n wsf:hasCrudUsage <{$server_address}/wsf/usage/ontology/delete/> . \n\n <{$server_address}/wsf/usage/ontology/delete/> rdf:type wsf:CrudUsage ;\n wsf:create \"False\" ;\n wsf:read \"False\" ;\n wsf:update \"False\" ;\n wsf:delete \"True\" ."; break; case "reset": $db = new DBVirtuoso($username, $password, $dsn, $host); $query = "exst('select * from (sparql clear graph <{$server_address}/wsf/ontologies/>) sub')"; $db->query($query); $query = "exst('select * from (sparql clear graph <{$server_address}/wsf/datasets/>) sub')"; $db->query($query); $query = "exst('select * from (sparql clear graph <{$server_address}/wsf/>) sub')"; $db->query($query); $db->close(); echo "WSF instance reseted"; return; break; } $db = new DBVirtuoso($username, $password, $dsn, $host); $db->query("DB.DBA.TTLP_MT('" . preg_replace("/\\\\*'/", "\\\\'", $rdf) . "', '{$server_address}/wsf/', '{$server_address}/wsf/')"); $db->close(); echo "Done"; //@}