Esempio n. 1
0
 private function importOntology()
 {
     $tmp = sys_get_temp_dir() . '/OntoKiWiQueue/';
     if (file_exists($tmp)) {
         $this->recursiveRemoveDirectory($tmp);
     }
     mkdir($tmp);
     if (array_key_exists('OntoKiWi', $GLOBALS['wgDebugLogGroups'])) {
         $log = $GLOBALS['wgDebugLogGroups']['OntoKiWi'];
     } else {
         if ($GLOBALS['wgDebugLogFile'] && $GLOBALS['wgDebugLogFile'] != '') {
             $log = $GLOBALS['wgDebugLogFile'];
         } else {
             $log = sys_get_temp_dir() . '/mediawikiimportfromtext.log';
         }
     }
     $request = $this->getRequest();
     $options = $request->getValues();
     $ontAbbr = $options['ontology_abbrv'];
     $graph = $options['ontology_graph_url'];
     $fullname = $options['ontology_fullname'];
     $id = strtolower($ontAbbr);
     $sql = new SQLStore(wfGetDB(DB_MASTER));
     $status = $sql->insertOntology($id, $options);
     if ($status) {
         wfDebugLog('OntoKiWi', sprintf('OKW\\Special\\ImportOntology: valid ontology: queued pages will be stored in $s', $tmp));
         $ontology = new OntologyData($ontAbbr);
         $rdf = $ontology->getRDF();
         $this->importObjectProperty($id, $options, $sql, $rdf, $graph);
         $this->importAnnotationProperty($id, $options, $sql, $rdf, $graph);
         $ontID = $sql->getOntologyID($ontAbbr);
         $annotationMagic = $sql->getAnnotationMagicWords($ontID);
         $objectMagic = $sql->getObjectMagicWords($ontID);
         $objects = array();
         foreach ($objectMagic as $magic => $object) {
             $objects[$magic] = $magic;
             $objects[$object['iri']] = $magic;
             $objects[$object['id']] = $magic;
         }
         $operations = array();
         foreach ($GLOBALS['okwRDFConfig']['restriction']['operation'] as $operation => $operationIRI) {
             $operations[$operationIRI] = $operation;
             $operations[$operation] = $operation;
         }
         $types = array();
         foreach ($GLOBALS['okwRDFConfig']['restriction']['type'] as $type => $typeIRI) {
             $types[$typeIRI] = $type;
             $types[$type] = $type;
         }
         $count = $rdf->countAllClass($graph);
         if ($count >= 10000) {
             $source = file_get_contents($options['source']);
             preg_match_all('/xmlns:([\\w]*)[\\s]?=[\\s]?"([^"]*)"[\\s]?/', $source, $matches, PREG_SET_ORDER);
             $prefix = array();
             foreach ($matches as $match) {
                 $prefix[$match[1]] = $match[2];
             }
             if (preg_match_all('/[\\s]?<owl:Class[\\s]?rdf:about[\\s]?=[\\s]?"(&([\\w]*);)?([^"]*)"[\\s]?[\\/]?>/', $source, $matches, PREG_SET_ORDER)) {
                 $classes = array();
                 foreach ($matches as $match) {
                     if ($match[1] != '' && $match[2] != '') {
                         $classes[] = $prefix[$match[2]] . $match[3];
                     } else {
                         $classes[] = $match[3];
                     }
                 }
             } else {
                 $sql->deleteOntology($id);
                 return array(self::EXCESSIVE_CLASS, null);
             }
         } else {
             if ($count == 0) {
                 $sql->deleteOntology($id);
                 return array(self::NO_CLASS_FOUND, null);
             } else {
                 $classes = $rdf->getAllClass($graph);
             }
         }
         $filename = "Category:{$ontAbbr}";
         file_put_contents($tmp . $filename, $fullname);
         $output = array();
         foreach ($classes as $index => $class) {
             if ($class == $GLOBALS['okwRDFConfig']['Thing']) {
                 continue;
             }
             $term = $ontology->parseTermByIRI($class);
             $id = $term->id;
             $filename = "{$ontAbbr}:{$id}";
             if (!OntologyValidator::isValidTitleText($filename)) {
                 throw new MWException("Unable to process term: {$id}. Please check the correctness of the Ontology");
             }
             $related = $ontology->parseTermRelated($term);
             $wikiText = "[[Category:{$ontAbbr}]]";
             $title = Title::newFromText($filename);
             if ($title->exists()) {
                 continue;
             }
             $output[$class] = $term->label . " ({$ontAbbr}:{$id})";
             $annotations = array();
             foreach ($annotationMagic as $name => $value) {
                 if (array_key_exists($value['iri'], $related)) {
                     $annotations[$value['iri']] = $rdf->getObject($graph, $term->iri, $value['iri']);
                 }
             }
             list($wikiText, $annotations) = AnnotationParser::reformatWikiText($wikiText, $annotations);
             $axiomData = $rdf->getAxiom($graph, $term->iri);
             $axioms = array();
             foreach ($axiomData['subclassof'] as $data) {
                 $axiom = array();
                 $axiom['type'] = 'subclassof';
                 $axiom['text'] = ManchesterSyntaxHandler::writeRecursiveManchester($data, array_merge($objects, $operations, $types));
                 $axioms[] = $axiom;
             }
             foreach ($axiomData['equivalent'] as $data) {
                 $axiom = array();
                 $axiom['type'] = 'equivalent';
                 $axiom['text'] = ManchesterSyntaxHandler::writeRecursiveManchester($data, array_merge($objects, $operations, $types));
                 $axioms[] = $axiom;
             }
             list($wikiText, $axioms) = AxiomParser::reformatWikiText($ontAbbr, $wikiText, $axioms, true);
             $supClasses = array_keys($rdf->getSupClass($graph, $term->iri));
             if (empty($supClasses)) {
                 $supClasses = array($GLOBALS['okwRDFConfig']['Thing']);
             }
             list($wikiText, $supClasses) = HierarchyParser::reformatWikiText($ontAbbr, $wikiText, $supClasses);
             $common['label'] = $term->label;
             list($wikiText, $common) = CommonParser::reformatWikiText($wikiText, $common);
             file_put_contents($tmp . $filename, $wikiText);
         }
         wfDebugLog('OntoKiWi', 'OKW\\Special\\ImportOntology: ontology SPARQL query completes, pages will be created using maintenance scripts in the background');
         $cmd = "( cd {$tmp} && for file in *; do php " . $GLOBALS['IP'] . "/maintenance/edit.php -u bot \$file < \$file; done && rm -R /tmp/OntoKiWiQueue ) > {$log} 2>&1 &";
         exec($cmd, $output, $return);
         return array(self::SUCCESS, $output);
     } else {
         return array(self::INVALID_SPARQL, null);
     }
 }
Esempio n. 2
0
    public static function reformatWikiText($ontAbbr, $wikiText, $validAxioms = null, $newWiki = false)
    {
        preg_match_all('/{{\\s*[#]?Axiom\\s*:[\\s]*[^|]([^}]*)}}/', $wikiText, $matches, PREG_SET_ORDER);
        $options = array();
        $valids = array();
        $invalids = array();
        if (!empty($matches) || !is_null($validAxioms)) {
            $ontology = new OntologyData($ontAbbr);
            $sql = new SQLStore(wfGetDB(DB_SLAVE));
            $magics = $sql->getObjectMagicWords($ontAbbr);
            $objects = array();
            foreach ($magics as $magic => $object) {
                $objects[$magic] = $object['iri'];
                $objects[$object['iri']] = $object['iri'];
                $objects[$object['id']] = $object['iri'];
            }
            $operations = $GLOBALS['okwRDFConfig']['restriction']['operation'];
            $types = $GLOBALS['okwRDFConfig']['restriction']['type'];
            foreach ($matches as $match) {
                preg_match_all('/[\\s]*[|]([^|]*)/', $match[1], $params, PREG_PATTERN_ORDER);
                list($option, $valid, $invalid) = self::extractAxiom($params[1], $ontology, $objects, $operations, $types, $newWiki);
                $options = array_merge($options, $option);
                $valids = array_merge($valids, $valid);
                $invalids = array_merge($invalids, $invalid);
            }
        }
        if (!is_null($validAxioms)) {
            $valids = array();
            $output = array();
            foreach ($validAxioms as $value) {
                $index = uniqid();
                $options[$index][] = $value['type'];
                $options[$index][] = $value['text'];
                list($valid, $data) = ManchesterSyntaxHandler::parseRecursiveManchester(true, $value['text'], $ontology, $objects, $operations, $types, $newWiki);
                if ($valid) {
                    $valids[$index]['type'] = $value['type'];
                    $valids[$index]['text'] = $value['text'];
                    $valids[$index]['data'] = $data;
                } else {
                    $invalids[$index] = self::ERROR_INVALID_SYNTAX;
                }
            }
        } else {
            $output = array();
            foreach ($valids as $axiom) {
                $output[$axiom['type']][] = $axiom['data'];
            }
        }
        #TODO: Duplication checking
        if (!empty($valids) || !empty($invalids)) {
            $text = <<<END
{{ #Axiom: <!-- Auto formatted ontology axiom wikitext -->
END;
            foreach ($valids as $index => $axiom) {
                $type = $axiom['type'];
                $value = $axiom['text'];
                $text .= <<<END

| {$type} = {$value}
END;
            }
            foreach ($invalids as $index => $error) {
                $msg = self::getErrorMessage($error);
                if (sizeof($options[$index]) == 1) {
                    $param = $options[$index][0];
                    $text .= <<<END

| {$msg} {$param}
END;
                } else {
                    $name = $options[$index][0];
                    $value = $options[$index][1];
                    $text .= <<<END

| {$msg} {$name} = {$value}
END;
                }
            }
            $text .= <<<END

}}

END;
        }
        $text .= preg_replace('/([\\s]?{{\\s*[#]?Axiom\\s*:[\\s]*[^|][^}]*}}[\\s]?)/', '', $wikiText);
        return array($text, $output);
    }