Exemplo n.º 1
0
 public function getpage($page, $revid = null, $detectEditConflict = false)
 {
     $append = '';
     if ($revid != null) {
         $append = '&rvstartid=' . $revid;
     }
     for ($i = 0; $i < 5; $i++) {
         $x = parent::query('?action=query&format=json&prop=revisions&titles=' . urlencode($page) . '&rvlimit=1&rvprop=content|timestamp' . $append);
         if (isset($x['query']['pages'])) {
             foreach ($x['query']['pages'] as $ret) {
                 if (isset($ret['revisions'][0]['*'])) {
                     if ($detectEditConflict) {
                         $this->ecTimestamp = $ret['revisions'][0]['timestamp'];
                     }
                     return $ret['revisions'][0]['*'];
                 } elseif (isset($ret['missing'])) {
                     return false;
                 }
             }
         }
         sleep(1);
     }
     return $x;
 }
Exemplo n.º 2
0
 *
 *  You should have received a copy of the GNU General Public License
 *  along with this program; if not, write to the Free Software
 *  Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 *   
 *  Developers (add your self here if you worked on the code):
 *    James Hare - [[User:Harej]] - Wrote everything
 **/
ini_set("display_errors", 1);
error_reporting(E_ALL ^ E_NOTICE);
$botuser = '******';
require_once 'botclasses.php';
// Botclasses.php was written by User:Chris_G and is available under the GNU General Public License
require_once 'harejpass.php';
echo "Logging in...";
$objwiki = new wikipedia();
$objwiki->login($botuser, $botpass);
echo " done.\n";
function nbprocess($pagearray, $color, $submissionpage)
{
    global $objwiki;
    $noticeboardlisting = "";
    foreach ($pagearray as $topic => $page) {
        $raw = $objwiki->query("?action=parse&page=" . urlencode($page) . "&prop=sections&format=json");
        $sections = array();
        $seccount = 0;
        for ($i = 0; $i < count($raw["parse"]["sections"]); $i++) {
            if ($raw["parse"]["sections"][$i]["level"] == 2) {
                $sections[$seccount] = $raw["parse"]["sections"][$i]["line"];
                $seccount++;
            }
<?php

set_include_path(get_include_path() . PATH_SEPARATOR . $_SERVER['DOCUMENT_ROOT']);
include_once 'wikipediaapi/botclasses.php';
//$start = new http();
$lifeobj = new wikipedia();
$page = 'Yoga';
$lifeobj->getpage($page, $revid = null);
echo $wwords;
Exemplo n.º 4
0
    if (!array_key_exists('entities', $ret)) {
        $gCachedWikidataLabels[$id] = array();
        return array();
    }
    foreach ($ret['entities'] as $entity) {
        if (!array_key_exists('labels', $entity)) {
            continue;
        }
        foreach ($entity['labels'] as $label) {
            $labels[] = $label['value'];
        }
    }
    $gCachedWikidataLabels[$id] = $labels;
    return $labels;
}
$bot = new wikipedia($gImportAPIURL);
if (count($gImportPageList) > 0) {
    $members = $gImportPageList;
} elseif ($gImportCategoryName == '') {
    $members = $bot->whatusethetemplate($gImportTemplateName);
} else {
    $members = $bot->categorymembers("Category:" . $gImportCategoryName, true);
}
$members = array_unique($members);
foreach ($members as $i => $member) {
    // Get rid of pages not in the main namespace - categories,
    // templates, etc.
    if (strpos($member, ':') !== false) {
        unset($members[$i]);
    }
}
Exemplo n.º 5
0
function callwikipedia($page, $objid)
{
    //  1. make sure wikipedia url  2.  extract  page last par wiki/pagename   3.  pass pagename to wikipedia api  4.  prepare top words
    $lifeobj = new wikipedia();
    $wdefwords = $lifeobj->getpage($page, $revid = null);
    //print_r($wdefwords);
    $cleandef = stripclean($wdefwords);
    defininitionwords($objid, $cleandef);
    specialwords();
    defweightedw($objid);
    echo 'finished';
    $sdate = time();
    //  also need to create a new lifestyle average start score of 1
    $db->query = "INSERT INTO " . RSSDATA . ".lifestyleaverage (date, idlifestart, postratio, avglife) VALUES ('{$sdate}', '{$objid}', '1', '1' ) ";
    // execute query grouped words
    $resultavgs = mysql_query($db->query) or die("Error in query: {$db->query}. " . mysql_error());
}
Exemplo n.º 6
0
 *
 *  You should have received a copy of the GNU General Public License
 *  along with this program; if not, write to the Free Software
 *  Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 *   
 *  Developers (add your self here if you worked on the code):
 *    James Hare - [[User:Harej]] - Wrote everything
 **/
ini_set("display_errors", 1);
error_reporting(E_ALL ^ E_NOTICE);
$botuser = '******';
require_once 'botclasses.php';
// Botclasses.php was written by User:Chris_G and is available under the GNU General Public License
require_once 'harejpass.php';
echo "Logging in...";
$objwiki = new wikipedia();
$objwiki->login($botuser, $botpass);
echo " done.\n";
$relists = array("M" => "", "O" => "", "B" => "", "S" => "", "W" => "", "G" => "", "T" => "", "F" => "", "P" => "", "I" => "", "?" => "", "U" => "");
echo "Checking category members...";
$transcludes = $objwiki->categorymembers("Category:Relisted AfD debates");
echo " done.\n";
for ($i = 0; $i < count($transcludes); $i++) {
    preg_match("/(Wikipedia:Articles for deletion)\\/(?!Log)/", $transcludes[$i], $m);
    echo "Retrieving {$transcludes[$i]} contents... \n";
    $contents = $objwiki->getpage($transcludes[$i]);
    if ($m[0] != "") {
        preg_match("/Please do not modify it/", $contents, $p);
        if ($p[0] != "") {
            $contents = str_replace("{{#ifeq:{{FULLPAGENAME}}|" . $transcludes[$i] . "|[[Category:Relisted AfD debates|{{SUBPAGENAME}}]]|}}", "", $contents);
            // backwards compatibility