Example #1
0
 function setup_database()
 {
     global $wgVersion, $wgDBmwschema, $wgDBts2schema, $wgDBport, $wgDBuser;
     // Make sure that we can write to the correct schema
     // If not, Postgres will happily and silently go to the next search_path item
     $ctest = "mediawiki_test_table";
     $safeschema = $this->quote_ident($wgDBmwschema);
     if ($this->tableExists($ctest, $wgDBmwschema)) {
         $this->doQuery("DROP TABLE {$safeschema}.{$ctest}");
     }
     $SQL = "CREATE TABLE {$safeschema}.{$ctest}(a int)";
     $olde = error_reporting(0);
     $res = $this->doQuery($SQL);
     error_reporting($olde);
     if (!$res) {
         print "<b>FAILED</b>. Make sure that the user \"" . htmlspecialchars($wgDBuser) . "\" can write to the schema \"" . htmlspecialchars($wgDBmwschema) . "\"</li>\n";
         dieout("</ul>");
     }
     $this->doQuery("DROP TABLE {$safeschema}.{$ctest}");
     $res = dbsource("../maintenance/postgres/tables.sql", $this);
     ## Update version information
     $mwv = $this->addQuotes($wgVersion);
     $pgv = $this->addQuotes($this->getServerVersion());
     $pgu = $this->addQuotes($this->mUser);
     $mws = $this->addQuotes($wgDBmwschema);
     $tss = $this->addQuotes($wgDBts2schema);
     $pgp = $this->addQuotes($wgDBport);
     $dbn = $this->addQuotes($this->mDBname);
     $ctype = $this->addQuotes(pg_fetch_result($this->doQuery("SHOW lc_ctype"), 0, 0));
     $SQL = "UPDATE mediawiki_version SET mw_version={$mwv}, pg_version={$pgv}, pg_user={$pgu}, " . "mw_schema = {$mws}, ts2_schema = {$tss}, pg_port={$pgp}, pg_dbname={$dbn}, " . "ctype = {$ctype} " . "WHERE type = 'Creation'";
     $this->query($SQL);
     ## Avoid the non-standard "REPLACE INTO" syntax
     $f = fopen("../maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     ## We simply assume it is already empty as we have just created it
     $SQL = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$SQL} {$matches['1']},{$matches['2']})");
     }
     print " (table interwiki successfully populated)...\n";
     $this->doQuery("COMMIT");
 }
        $localSettings = "<" . "?php{$endl}{$local}";
        // Fix up a common line-ending problem (due to CVS on Windows)
        $localSettings = str_replace("\r\n", "\n", $localSettings);
        $f = fopen("LocalSettings.php", 'xt');
        if ($f == false) {
            print "</li>\n";
            dieout("<p>Couldn't write out LocalSettings.php. Check that the directory permissions are correct and that there isn't already a file of that name here...</p>\n" . "<p>Here's the file that would have been written, try to paste it into place manually:</p>\n" . "<pre>\n" . htmlspecialchars($localSettings) . "</pre>\n");
        }
        if (fwrite($f, $localSettings)) {
            fclose($f);
            print "<hr/>\n";
            writeSuccessMessage();
            print "</li>\n";
        } else {
            fclose($f);
            dieout("<p class='error'>An error occured while writing the config/LocalSettings.php file. Check user rights and disk space then try again.</p></li>\n");
        }
    } while (false);
}
print "</ul>\n";
$mainListOpened = false;
if (count($errs)) {
    /* Display options form */
    if ($conf->posted) {
        echo "<p class='error-top'>Something's not quite right yet; make sure everything below is filled out correctly.</p>\n";
    }
    ?>

<form action="<?php 
    echo defined('MW_INSTALL_PHP5_EXT') ? 'index.php5' : 'index.php';
    ?>
Example #3
0
function importTweets($p)
{
    global $twitterApi, $db, $config, $access, $search;
    $p = trim($p);
    if (!$twitterApi->validateUserParam($p)) {
        return false;
    }
    $maxCount = 200;
    $tweets = array();
    $sinceID = 0;
    $maxID = 0;
    echo l("Importing:\n");
    // Do we already have tweets?
    $pd = $twitterApi->getUserParam($p);
    if ($pd['name'] == "screen_name") {
        $uid = $twitterApi->getUserId($pd['value']);
        $screenname = $pd['value'];
    } else {
        $uid = $pd['value'];
        $screenname = $twitterApi->getScreenName($pd['value']);
    }
    $tiQ = $db->query("SELECT `tweetid` FROM `" . DTP . "tweets` WHERE `userid` = '" . $db->s($uid) . "' ORDER BY `id` DESC LIMIT 1");
    if ($db->numRows($tiQ) > 0) {
        $ti = $db->fetch($tiQ);
        $sinceID = $ti['tweetid'];
    }
    echo l("User ID: " . $uid . "\n");
    // Find total number of tweets
    $total = totalTweets($p);
    if ($total > 3200) {
        $total = 3200;
    }
    // Due to current Twitter limitation
    $pages = ceil($total / $maxCount);
    echo l("Total tweets: <strong>" . $total . "</strong>, Approx. page total: <strong>" . $pages . "</strong>\n");
    if ($sinceID) {
        echo l("Newest tweet I've got: <strong>" . $sinceID . "</strong>\n");
    }
    // Retrieve tweets
    do {
        // Determine path to Twitter timeline resource
        $path = "1/statuses/user_timeline.json?" . $p . "&include_rts=true&include_entities=true&count=" . $maxCount . ($sinceID ? "&since_id=" . $sinceID : "") . ($maxID ? "&max_id=" . $maxID : "");
        // Announce
        echo l("Retrieving page <strong>#" . ($i + 1) . "</strong>: <span class=\"address\">" . ls($path) . "</span>\n");
        // Get data
        $data = $twitterApi->query($path);
        // Drop out on connection error
        if (is_array($data) && $data[0] === false) {
            dieout(l(bad("Error: " . $data[1] . "/" . $data[2])));
        }
        // Start parsing
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> new tweets on this page\n");
        if (!empty($data)) {
            echo l("<ul>");
            foreach ($data as $i => $tweet) {
                // Shield against duplicate tweet from max_id
                if (!IS64BIT && $i == 0 && $maxID == $tweet->id_str) {
                    unset($data[0]);
                    continue;
                }
                // List tweet
                echo l("<li>" . $tweet->id_str . " " . $tweet->created_at . "</li>\n");
                // Create tweet element and add to list
                $tweets[] = $twitterApi->transformTweet($tweet);
                // Determine new max_id
                $maxID = $tweet->id_str;
                // Subtracting 1 from max_id to prevent duplicate, but only if we support 64-bit integer handling
                if (IS64BIT) {
                    $maxID = (int) $tweet->id - 1;
                }
            }
            echo l("</ul>");
        }
        /*if(count($data) < ($maxCount - 50)){
        			echo l("We've reached last page\n");
        			break;
        		}*/
    } while (!empty($data));
    if (count($tweets) > 0) {
        // Ascending sort, oldest first
        $tweets = array_reverse($tweets);
        echo l("<strong>All tweets collected. Reconnecting to DB...</strong>\n");
        $db->reconnect();
        // Sometimes, DB connection times out during tweet loading. This is our counter-action
        echo l("Inserting into DB...\n");
        $error = false;
        foreach ($tweets as $tweet) {
            $q = $db->query($twitterApi->insertQuery($tweet));
            if (!$q) {
                dieout(l(bad("DATABASE ERROR: " . $db->error())));
            }
            $text = $tweet['text'];
            $te = $tweet['extra'];
            if (is_string($te)) {
                $te = @unserialize($tweet['extra']);
            }
            if (is_array($te)) {
                // Because retweets might get cut off otherwise
                $text = array_key_exists("rt", $te) && !empty($te['rt']) && !empty($te['rt']['screenname']) && !empty($te['rt']['text']) ? "RT @" . $te['rt']['screenname'] . ": " . $te['rt']['text'] : $tweet['text'];
            }
            $search->index($db->insertID(), $text);
        }
        echo !$error ? l(good("Done!\n")) : "";
    } else {
        echo l(bad("Nothing to insert.\n"));
    }
    // Checking personal favorites -- scanning all
    echo l("\n<strong>Syncing favourites...</strong>\n");
    // Resetting these
    $favs = array();
    $maxID = 0;
    $sinceID = 0;
    do {
        $path = "1/favorites.json?" . $p . "&count=" . $maxCount . ($maxID ? "&max_id=" . $maxID : "");
        echo l("Retrieving page <strong>#" . ($i + 1) . "</strong>: <span class=\"address\">" . ls($path) . "</span>\n");
        $data = $twitterApi->query($path);
        if (is_array($data) && $data[0] === false) {
            dieout(l(bad("Error: " . $data[1] . "/" . $data[2])));
        }
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> total favorite tweets on this page\n");
        if (!empty($data)) {
            echo l("<ul>");
            foreach ($data as $tweet) {
                if (!IS64BIT && $i == 0 && $maxID == $tweet->id_str) {
                    unset($data[0]);
                    continue;
                }
                if ($tweet->user->id_str == $uid) {
                    echo l("<li>" . $tweet->id_str . " " . $tweet->created_at . "</li>\n");
                    $favs[] = $maxID = $tweet->id_str;
                    if (IS64BIT) {
                        $maxID = (int) $tweet->id - 1;
                    }
                }
            }
            echo l("</ul>");
        }
        echo l("<strong>" . count($favs) . "</strong> favorite own tweets so far\n");
        //if(count($data) < ($maxCount - 50)){ break; } // We've reached last page
    } while (!empty($data));
    // Blank all favorites
    $db->query("UPDATE `" . DTP . "tweets` SET `favorite` = '0'");
    // Insert favorites into DB
    $db->query("UPDATE `" . DTP . "tweets` SET `favorite` = '1' WHERE `tweetid` IN ('" . implode("', '", $favs) . "')");
    echo l(good("Updated favorites!"));
}
Example #4
0
 function setup_database()
 {
     global $wgVersion, $wgDBmwschema, $wgDBts2schema, $wgDBport;
     dbsource("../maintenance/postgres/tables.sql", $this);
     ## Update version information
     $mwv = $this->addQuotes($wgVersion);
     $pgv = $this->addQuotes($this->getServerVersion());
     $pgu = $this->addQuotes($this->mUser);
     $mws = $this->addQuotes($wgDBmwschema);
     $tss = $this->addQuotes($wgDBts2schema);
     $pgp = $this->addQuotes($wgDBport);
     $dbn = $this->addQuotes($this->mDBname);
     $ctype = pg_fetch_result($this->doQuery("SHOW lc_ctype"), 0, 0);
     $SQL = "UPDATE mediawiki_version SET mw_version={$mwv}, pg_version={$pgv}, pg_user={$pgu}, " . "mw_schema = {$mws}, ts2_schema = {$tss}, pg_port={$pgp}, pg_dbname={$dbn}, " . "ctype = '{$ctype}' " . "WHERE type = 'Creation'";
     $this->query($SQL);
     ## Avoid the non-standard "REPLACE INTO" syntax
     $f = fopen("../maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     ## We simply assume it is already empty as we have just created it
     $SQL = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$SQL} {$matches['1']},{$matches['2']})");
     }
     print " (table interwiki successfully populated)...\n";
 }
Example #5
0
 function update_interwiki()
 {
     ## Avoid the non-standard "REPLACE INTO" syntax
     ## Called by config/index.php
     $f = fopen("../maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     ## We simply assume it is already empty as we have just created it
     $SQL = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         if (!preg_match("/^\\s*(\\(.+?),(\\d)\\)/", $line, $matches)) {
             continue;
         }
         $yesno = $matches[2];
         ## ? "'true'" : "'false'";
         $this->query("{$SQL} {$matches['1']},{$matches['2']})");
     }
     print " (table interwiki successfully populated)...\n";
 }
Example #6
0
 /**
  * Called by the installer script
  * - this is the same way as DatabasePostgresql.php, MySQL reads in tables.sql and interwiki.sql using dbsource (which calls db->sourceFile)
  */
 public function setup_database()
 {
     global $IP, $wgDBTableOptions;
     $wgDBTableOptions = '';
     $mysql_tmpl = "{$IP}/maintenance/tables.sql";
     $mysql_iw = "{$IP}/maintenance/interwiki.sql";
     $mssql_tmpl = "{$IP}/maintenance/mssql/tables.sql";
     # Make an MSSQL template file if it doesn't exist (based on the same one MySQL uses to create a new wiki db)
     if (!file_exists($mssql_tmpl)) {
         # todo: make this conditional again
         $sql = file_get_contents($mysql_tmpl);
         $sql = preg_replace('/^\\s*--.*?$/m', '', $sql);
         # strip comments
         $sql = preg_replace('/^\\s*(UNIQUE )?(INDEX|KEY|FULLTEXT).+?$/m', '', $sql);
         # These indexes should be created with a CREATE INDEX query
         $sql = preg_replace('/(\\sKEY) [^\\(]+\\(/is', '$1 (', $sql);
         # "KEY foo (foo)" should just be "KEY (foo)"
         $sql = preg_replace('/(varchar\\([0-9]+\\))\\s+binary/i', '$1', $sql);
         # "varchar(n) binary" cannot be followed by "binary"
         $sql = preg_replace('/(var)?binary\\(([0-9]+)\\)/ie', '"varchar(".strlen(pow(2,$2)).")"', $sql);
         # use varchar(chars) not binary(bits)
         $sql = preg_replace('/ (var)?binary/i', ' varchar', $sql);
         # use varchar not binary
         $sql = preg_replace('/(varchar\\([0-9]+\\)(?! N))/', '$1 NULL', $sql);
         # MSSQL complains if NULL is put into a varchar
         #$sql = preg_replace('/ binary/i',' varchar',$sql); # MSSQL binary's can't be assigned with strings, so use varchar's instead
         #$sql = preg_replace('/(binary\([0-9]+\) (NOT NULL )?default) [\'"].*?[\'"]/i','$1 0',$sql); # binary default cannot be string
         $sql = preg_replace('/[a-z]*(blob|text)([ ,])/i', 'text$2', $sql);
         # no BLOB types in MSSQL
         $sql = preg_replace('/\\).+?;/', ');', $sql);
         # remove all table options
         $sql = preg_replace('/ (un)?signed/i', '', $sql);
         $sql = preg_replace('/ENUM\\(.+?\\)/', 'TEXT', $sql);
         # Make ENUM's into TEXT's
         $sql = str_replace(' bool ', ' bit ', $sql);
         $sql = str_replace('auto_increment', 'IDENTITY(1,1)', $sql);
         #$sql = preg_replace('/NOT NULL(?! IDENTITY)/', 'NULL', $sql); # Allow NULL's for non IDENTITY columns
         # Tidy up and write file
         $sql = preg_replace('/,\\s*\\)/s', "\n)", $sql);
         # Remove spurious commas left after INDEX removals
         $sql = preg_replace('/^\\s*^/m', '', $sql);
         # Remove empty lines
         $sql = preg_replace('/;$/m', ";\n", $sql);
         # Separate each statement with an empty line
         file_put_contents($mssql_tmpl, $sql);
     }
     # Parse the MSSQL template replacing inline variables such as /*$wgDBprefix*/
     $err = $this->sourceFile($mssql_tmpl);
     if ($err !== true) {
         $this->reportQueryError($err, 0, $sql, __FUNCTION__);
     }
     # Use DatabasePostgres's code to populate interwiki from MySQL template
     $f = fopen($mysql_iw, 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     $sql = "INSERT INTO {$this->mTablePrefix}interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$sql} {$matches['1']},{$matches['2']})");
     }
 }
Example #7
0
            $titleobj = Title::newFromText(wfMsgNoDB("mainpage"));
            $article = new Article($titleobj);
            $newid = $article->insertOn($wgDatabase);
            $revision = new Revision(array('page' => $newid, 'text' => wfMsg('mainpagetext') . "\n\n" . wfMsg('mainpagedocfooter'), 'comment' => '', 'user' => 0, 'user_text' => 'MediaWiki default'));
            $revid = $revision->insertOn($wgDatabase);
            $article->updateRevisionOn($wgDatabase, $revision);
        }
        /* Write out the config file now that all is well */
        print "<li style=\"list-style: none\">\n";
        print "<p>Creating LocalSettings.php...</p>\n\n";
        $localSettings = "<" . "?php{$endl}{$local}{$endl}?" . ">\r\n";
        // Fix up a common line-ending problem (due to CVS on Windows)
        $localSettings = str_replace("\r\n", "\n", $localSettings);
        $f = fopen("LocalSettings.php", 'xt');
        if ($f == false) {
            dieout("<p>Couldn't write out LocalSettings.php. Check that the directory permissions are correct and that there isn't already a file of that name here...</p>\n" . "<p>Here's the file that would have been written, try to paste it into place manually:</p>\n" . "<pre>\n" . htmlspecialchars($localSettings) . "</pre>\n");
        }
        if (fwrite($f, $localSettings)) {
            fclose($f);
            writeSuccessMessage();
        } else {
            fclose($f);
            die("<p class='error'>An error occured while writing the config/LocalSettings.php file. Check user rights and disk space then try again.</p>\n");
        }
        print "</li>\n";
    } while (false);
}
?>
</ul>

Example #8
0
function importTweets($p)
{
    global $twitterApi, $db, $config, $access, $search;
    $p = trim($p);
    if (!$twitterApi->validateUserParam($p)) {
        return false;
    }
    $tweets = array();
    echo l("Importing:\n");
    // Do we already have tweets?
    $pd = $twitterApi->getUserParam($p);
    if ($pd['name'] == "screen_name") {
        $uid = $twitterApi->getUserId($pd['value']);
        $screenname = $pd['value'];
    } else {
        $uid = $pd['value'];
        $screenname = $twitterApi->getScreenName($pd['value']);
    }
    $tiQ = $db->query("SELECT `tweetid` FROM `" . DTP . "tweets` WHERE `userid` = '" . $db->s($uid) . "' ORDER BY `id` DESC LIMIT 1");
    if ($db->numRows($tiQ) > 0) {
        $ti = $db->fetch($tiQ);
        $sinceID = $ti['tweetid'];
    }
    echo l("User ID: " . $uid . "\n");
    $loadedArchives = is_readable('loadarchivelog.txt') ? file('loadarchivelog.txt') : array();
    // go through every file in archive folder
    foreach (glob(dirname(__FILE__) . '/../archive/[0-9][0-9][0-9][0-9]_[0-1][0-9].js') as $filename) {
        if (in_array(basename($filename) . PHP_EOL, $loadedArchives)) {
            echo l("Found in archivelog -> Skipping file\n");
            continue;
        }
        $data = loadArchiveFile($filename);
        if (!is_array($data)) {
            dieout(l(bad("Error: Could not parse JSON ")));
        }
        // Start parsing
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> tweets in this file\n");
        if (!empty($data)) {
            echo l("<ul>");
            foreach ($data as $i => $tweet) {
                // List tweet
                echo l("<li>" . $tweet->id_str . " " . $tweet->created_at . "</li>\n");
                // Create tweet element and add to list
                $tweets[] = $twitterApi->transformTweet(normalizeTweet($tweet));
            }
            echo l("</ul>");
            // Ascending sort, oldest first
            $tweets = array_reverse($tweets);
            $db->reconnect();
            // Sometimes, DB connection times out during tweet loading. This is our counter-action
            foreach ($tweets as $tweet) {
                $q = $db->query($twitterApi->insertQuery($tweet));
                if (!$q) {
                    dieout(l(bad("DATABASE ERROR: " . $db->error())));
                }
                $text = $tweet['text'];
                $te = $tweet['extra'];
                if (is_string($te)) {
                    $te = @unserialize($tweet['extra']);
                }
                if (is_array($te)) {
                    // Because retweets might get cut off otherwise
                    $text = array_key_exists("rt", $te) && !empty($te['rt']) && !empty($te['rt']['screenname']) && !empty($te['rt']['text']) ? "RT @" . $te['rt']['screenname'] . ": " . $te['rt']['text'] : $tweet['text'];
                }
                $search->index($db->insertID(), $text);
            }
        }
        // reset tweets array
        $tweets = array();
        file_put_contents('loadarchivelog.txt', basename($filename) . PHP_EOL, FILE_APPEND);
    }
}
Example #9
0
 /**
  * Called by the installer script (when modified according to the MediaWikiLite installation instructions)
  * - this is the same way PostgreSQL works, MySQL reads in tables.sql and interwiki.sql using dbsource (which calls db->sourceFile)
  */
 public function setup_database()
 {
     global $IP, $wgSQLiteDataDir, $wgDBTableOptions;
     $wgDBTableOptions = '';
     # Process common MySQL/SQLite table definitions
     $err = $this->sourceFile("{$IP}/maintenance/tables.sql");
     if ($err !== true) {
         $this->reportQueryError($err, 0, $sql, __FUNCTION__);
         exit(1);
     }
     # Use DatabasePostgres's code to populate interwiki from MySQL template
     $f = fopen("{$IP}/maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     $sql = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$sql} {$matches['1']},{$matches['2']})");
     }
 }
Example #10
0
 function setup_database()
 {
     global $wgVersion, $wgDBmwschema, $wgDBts2schema, $wgDBport, $wgDBuser;
     $res = $this->sourceFile("../maintenance/ora/tables.sql");
     if ($res === true) {
         print " done.</li>\n";
     } else {
         print " <b>FAILED</b></li>\n";
         dieout(htmlspecialchars($res));
     }
     // Avoid the non-standard "REPLACE INTO" syntax
     echo "<li>Populating interwiki table</li>\n";
     $f = fopen("../maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("Could not find the interwiki.sql file");
     }
     // do it like the postgres :D
     $SQL = "INSERT INTO " . $this->tableName('interwiki') . " (iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$SQL} {$matches['1']},{$matches['2']})");
     }
     echo "<li>Table interwiki successfully populated</li>\n";
 }
Example #11
0
 /**
  * Called by the installer script (when modified according to the MediaWikiLite installation instructions)
  * - this is the same way PostgreSQL works, MySQL reads in tables.sql and interwiki.sql using dbsource (which calls db->sourceFile)
  */
 public function setup_database()
 {
     global $IP;
     # Process common MySQL/SQLite table definitions
     $err = $this->sourceFile("{$IP}/maintenance/tables.sql");
     if ($err !== true) {
         echo " <b>FAILED</b></li>";
         dieout(htmlspecialchars($err));
     }
     echo " done.</li>";
     # Use DatabasePostgres's code to populate interwiki from MySQL template
     $f = fopen("{$IP}/maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("Could not find the interwiki.sql file.");
     }
     $sql = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$sql} {$matches['1']},{$matches['2']})");
     }
 }
Example #12
0
function importTweets($p)
{
    global $twitterApi, $db, $config, $access, $search;
    $p = trim($p);
    if (!$twitterApi->validateUserParam($p)) {
        return false;
    }
    $maxCount = 200;
    $tweets = array();
    $sinceID = 0;
    $maxID = 0;
    echo l("Importing:\n");
    // Do we already have tweets?
    $pd = $twitterApi->getUserParam($p);
    if ($pd['name'] == "screen_name") {
        $uid = $twitterApi->getUserId($pd['value']);
        $screenname = $pd['value'];
    } else {
        $uid = $pd['value'];
        $screenname = $twitterApi->getScreenName($pd['value']);
    }
    $tiQ = $db->query("SELECT `tweetid` FROM `" . DTP . "tweets` WHERE `userid` = '" . $db->s($uid) . "' ORDER BY `tweetid` DESC LIMIT 1");
    if ($db->numRows($tiQ) > 0) {
        $ti = $db->fetch($tiQ);
        $sinceID = $ti['tweetid'];
    }
    echo l("User ID: " . $uid . "\n");
    // Find total number of tweets
    $total = totalTweets($p);
    if ($total > 3200) {
        $total = 3200;
    }
    // Due to current Twitter bug
    $pages = ceil($total / $maxCount);
    echo l("Total tweets: <strong>" . $total . "</strong>, Pages: <strong>" . $pages . "</strong>\n");
    // Retrieve tweets
    for ($i = 0; $i < $pages; $i++) {
        $path = "1/statuses/user_timeline.json?" . $p . "&include_rts=true&count=" . $maxCount . ($sinceID > 0 ? "&since_id=" . $sinceID : "") . ($maxID > 0 ? "&max_id=" . $maxID : "");
        echo l("Retrieving page <strong>#" . ($i + 1) . "</strong>: <span class=\"address\">" . ls($path) . "</span>\n");
        $data = $twitterApi->query($path);
        if (is_array($data) && $data[0] === false) {
            dieout(l(bad("Error: " . $data[1] . "/" . $data[2])));
        }
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> new tweets on this page\n");
        if (!$data) {
            break;
        }
        // No more tweets
        echo l("<ul>");
        foreach ($data as $tweet) {
            echo l("<li>" . $tweet->id . " " . $tweet->created_at . "</li>\n");
            $tweets[] = $twitterApi->transformTweet($tweet);
            $maxID = (double) ((double) $tweet->id - 1);
        }
        echo l("</ul>");
        if (count($data) < $maxCount - 50) {
            echo l("We've reached last page\n");
            break;
        }
    }
    if (count($tweets) > 0) {
        // Ascending sort, oldest first
        $tweets = array_reverse($tweets);
        echo l("<strong>All tweets collected. Reconnecting to DB...</strong>\n");
        $db->reconnect();
        // Sometimes, DB connection times out during tweet loading. This is our counter-action
        echo l("Inserting into DB...\n");
        $error = false;
        foreach ($tweets as $tweet) {
            $q = $db->query($twitterApi->insertQuery($tweet));
            if (!$q) {
                dieout(l(bad("DATABASE ERROR: " . $db->error())));
            }
            $text = $tweet['text'];
            $te = $tweet['extra'];
            if (is_string($te)) {
                $te = @unserialize($tweet['extra']);
            }
            if (is_array($te)) {
                // Because retweets might get cut off otherwise
                $text = array_key_exists("rt", $te) && !empty($te['rt']) && !empty($te['rt']['screenname']) && !empty($te['rt']['text']) ? "RT @" . $te['rt']['screenname'] . ": " . $te['rt']['text'] : $tweet['text'];
            }
            $search->index($db->insertID(), $text);
        }
        echo !$error ? l(good("Done!\n")) : "";
    } else {
        echo l(bad("Nothing to insert.\n"));
    }
    // Checking personal favorites -- scanning all
    echo l("\n<strong>Syncing favourites...</strong>\n");
    $pages = ceil($total / $maxCount);
    // Resetting these
    $sinceID = 0;
    $maxID = 0;
    $favs = array();
    for ($i = 0; $i < $pages; $i++) {
        $path = "1/favorites.json?" . $p . "&count=" . $maxCount . ($i > 0 ? "&page=" . $i : "");
        echo l("Retrieving page <strong>#" . ($i + 1) . "</strong>: <span class=\"address\">" . ls($path) . "</span>\n");
        $data = $twitterApi->query($path);
        if (is_array($data) && $data[0] === false) {
            dieout(l(bad("Error: " . $data[1] . "/" . $data[2])));
        }
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> total favorite tweets on this page\n");
        if (!$data) {
            break;
        }
        // No more tweets
        echo l("<ul>");
        foreach ($data as $tweet) {
            if ($tweet->user->id == $uid) {
                echo l("<li>" . $tweet->id . " " . $tweet->created_at . "</li>\n");
                $favs[] = $tweet->id . "";
            }
        }
        echo l("</ul>");
        if (count($data) > 0) {
            echo l("<strong>" . count($favs) . "</strong> favorite own tweets on this page\n");
        }
        if (count($data) < $maxCount - 50) {
            break;
        }
        // We've reached last page
    }
    $db->query("UPDATE `" . DTP . "tweets` SET `favorite` = '0'");
    // Blank all favorites
    $db->query("UPDATE `" . DTP . "tweets` SET `favorite` = '1' WHERE `tweetid` IN ('" . implode("', '", $favs) . "')");
    echo l(good("Updated favorites!"));
}
Example #13
0
 function setup_database()
 {
     global $wgDBuser;
     // Make sure that we can write to the correct schema
     $ctest = "mediawiki_test_table";
     if ($this->tableExists($ctest)) {
         $this->doQuery("DROP TABLE {$ctest}");
     }
     $SQL = "CREATE TABLE {$ctest} (a int)";
     $res = $this->doQuery($SQL);
     if (!$res) {
         print "<b>FAILED</b>. Make sure that the user " . htmlspecialchars($wgDBuser) . " can write to the database</li>\n";
         dieout();
     }
     $this->doQuery("DROP TABLE {$ctest}");
     $res = $this->sourceFile("../maintenance/mssql/tables.sql");
     if ($res !== true) {
         echo " <b>FAILED</b></li>";
         dieout(htmlspecialchars($res));
     }
     # Avoid the non-standard "REPLACE INTO" syntax
     $f = fopen("../maintenance/interwiki.sql", 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     # We simply assume it is already empty as we have just created it
     $SQL = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$SQL} {$matches['1']},{$matches['2']})");
     }
     print " (table interwiki successfully populated)...\n";
     $this->commit();
 }
Example #14
0
 /**
  * Called by the installer script (when modified according to the MediaWikiLite installation instructions)
  * - this is the same way PostgreSQL works, MySQL reads in tables.sql and interwiki.sql using dbsource (which calls db->sourceFile)
  */
 public function setup_database()
 {
     global $IP, $wgSQLiteDataDir, $wgDBTableOptions;
     $wgDBTableOptions = '';
     $mysql_tmpl = "{$IP}/maintenance/tables.sql";
     $mysql_iw = "{$IP}/maintenance/interwiki.sql";
     $sqlite_tmpl = "{$IP}/maintenance/sqlite/tables.sql";
     # Make an SQLite template file if it doesn't exist (based on the same one MySQL uses to create a new wiki db)
     if (!file_exists($sqlite_tmpl)) {
         $sql = file_get_contents($mysql_tmpl);
         $sql = preg_replace('/^\\s*--.*?$/m', '', $sql);
         # strip comments
         $sql = preg_replace('/^\\s*(UNIQUE)?\\s*(PRIMARY)?\\s*KEY.+?$/m', '', $sql);
         $sql = preg_replace('/^\\s*(UNIQUE )?INDEX.+?$/m', '', $sql);
         # These indexes should be created with a CREATE INDEX query
         $sql = preg_replace('/^\\s*FULLTEXT.+?$/m', '', $sql);
         # Full text indexes
         $sql = preg_replace('/ENUM\\(.+?\\)/', 'TEXT', $sql);
         # Make ENUM's into TEXT's
         $sql = preg_replace('/binary\\(\\d+\\)/', 'BLOB', $sql);
         $sql = preg_replace('/(TYPE|MAX_ROWS|AVG_ROW_LENGTH)=\\w+/', '', $sql);
         $sql = preg_replace('/,\\s*\\)/s', ')', $sql);
         # removing previous items may leave a trailing comma
         $sql = str_replace('binary', '', $sql);
         $sql = str_replace('auto_increment', 'PRIMARY KEY AUTOINCREMENT', $sql);
         $sql = str_replace(' unsigned', '', $sql);
         $sql = str_replace(' int ', ' INTEGER ', $sql);
         $sql = str_replace('NOT NULL', '', $sql);
         # Tidy up and write file
         $sql = preg_replace('/^\\s*^/m', '', $sql);
         # Remove empty lines
         $sql = preg_replace('/;$/m', ";\n", $sql);
         # Separate each statement with an empty line
         file_put_contents($sqlite_tmpl, $sql);
     }
     # Parse the SQLite template replacing inline variables such as /*$wgDBprefix*/
     $err = $this->sourceFile($sqlite_tmpl);
     if ($err !== true) {
         $this->reportQueryError($err, 0, $sql, __FUNCTION__);
     }
     # Use DatabasePostgres's code to populate interwiki from MySQL template
     $f = fopen($mysql_iw, 'r');
     if ($f == false) {
         dieout("<li>Could not find the interwiki.sql file");
     }
     $sql = "INSERT INTO interwiki(iw_prefix,iw_url,iw_local) VALUES ";
     while (!feof($f)) {
         $line = fgets($f, 1024);
         $matches = array();
         if (!preg_match('/^\\s*(\\(.+?),(\\d)\\)/', $line, $matches)) {
             continue;
         }
         $this->query("{$sql} {$matches['1']},{$matches['2']})");
     }
 }
Example #15
0
function importTweets($p)
{
    global $twitterApi, $db, $config, $access, $search;
    $p = trim($p);
    if (!$twitterApi->validateUserParam($p)) {
        return false;
    }
    $maxCount = 200;
    $tweets = array();
    $sinceID = 0;
    $maxID = 0;
    // Check for authentication
    if (!isset($config['consumer_key']) || !isset($config['consumer_secret'])) {
        die("Consumer key and secret not found. These are required for authentication to Twitter. \n" . "Please point your browser to the authorize.php file to configure these.\n");
    }
    list($userparam, $uservalue) = explode('=', $p);
    echo l("Importing:\n");
    // Do we already have tweets?
    $pd = $twitterApi->getUserParam($p);
    if ($pd['name'] == "screen_name") {
        $uid = $twitterApi->getUserId($pd['value']);
        $screenname = $pd['value'];
    } else {
        $uid = $pd['value'];
        $screenname = $twitterApi->getScreenName($pd['value']);
    }
    $tiQ = $db->query("SELECT `tweetid` FROM `" . DTP . "tweets` WHERE `userid` = '" . $db->s($uid) . "' ORDER BY `time` DESC LIMIT 1");
    if ($db->numRows($tiQ) > 0) {
        $ti = $db->fetch($tiQ);
        $sinceID = $ti['tweetid'];
    }
    echo l("User ID: " . $uid . "\n");
    // Find total number of tweets
    $total = totalTweets($p);
    if (is_numeric($total)) {
        if ($total > 3200) {
            $total = 3200;
        }
        // Due to current Twitter limitation
        $pages = ceil($total / $maxCount);
        echo l("Total tweets: <strong>" . $total . "</strong>, Approx. page total: <strong>" . $pages . "</strong>\n");
    }
    if ($sinceID) {
        echo l("Newest tweet I've got: <strong>" . $sinceID . "</strong>\n");
    }
    $page = 1;
    // Retrieve tweets
    do {
        // Announce
        echo l("Retrieving page <strong>#" . $page . "</strong>:\n");
        // Get data
        $params = array($userparam => $uservalue, 'include_rts' => true, 'include_entities' => true, 'count' => $maxCount);
        if ($sinceID) {
            $params['since_id'] = $sinceID;
        }
        if ($maxID) {
            $params['max_id'] = $maxID;
        }
        $data = $twitterApi->query('statuses/user_timeline', $params);
        // Drop out on connection error
        if (is_array($data) && $data[0] === false) {
            dieout(l(bad("Error: " . $data[1] . "/" . $data[2])));
        }
        // Start parsing
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> new tweets on this page\n");
        if (!empty($data)) {
            echo l("<ul>");
            foreach ($data as $i => $tweet) {
                // First, let's check if an API error occured
                if (is_array($tweet) && is_object($tweet[0]) && property_exists($tweet[0], 'message')) {
                    dieout(l(bad('A Twitter API error occured: ' . $tweet[0]->message)));
                }
                // Shield against duplicate tweet from max_id
                if (!IS64BIT && $i == 0 && $maxID == $tweet->id_str) {
                    unset($data[0]);
                    continue;
                }
                // List tweet
                echo l("<li>" . $tweet->id_str . " " . $tweet->created_at . "</li>\n");
                // Create tweet element and add to list
                $tweets[] = $twitterApi->transformTweet($tweet);
                // Determine new max_id
                $maxID = $tweet->id_str;
                // Subtracting 1 from max_id to prevent duplicate, but only if we support 64-bit integer handling
                if (IS64BIT) {
                    $maxID = (int) $tweet->id - 1;
                }
            }
            echo l("</ul>");
        }
        $page++;
    } while (!empty($data));
    if (count($tweets) > 0) {
        // Ascending sort, oldest first
        $tweets = array_reverse($tweets);
        echo l("<strong>All tweets collected. Reconnecting to DB...</strong>\n");
        $db->reconnect();
        // Sometimes, DB connection times out during tweet loading. This is our counter-action
        echo l("Inserting into DB...\n");
        $error = false;
        foreach ($tweets as $tweet) {
            $q = $db->query($twitterApi->insertQuery($tweet));
            if (!$q) {
                dieout(l(bad("DATABASE ERROR: " . $db->error())));
            }
            $text = $tweet['text'];
            $te = $tweet['extra'];
            if (is_string($te)) {
                $te = @unserialize($tweet['extra']);
            }
            if (is_array($te)) {
                // Because retweets might get cut off otherwise
                $text = array_key_exists("rt", $te) && !empty($te['rt']) && !empty($te['rt']['screenname']) && !empty($te['rt']['text']) ? "RT @" . $te['rt']['screenname'] . ": " . $te['rt']['text'] : $tweet['text'];
            }
            $search->index($db->insertID(), $text);
        }
        echo !$error ? l(good("Done!\n")) : "";
    } else {
        echo l(bad("Nothing to insert.\n"));
    }
    // Checking personal favorites -- scanning all
    echo l("\n<strong>Syncing favourites...</strong>\n");
    // Resetting these
    $favs = array();
    $maxID = 0;
    $sinceID = 0;
    $page = 1;
    do {
        echo l("Retrieving page <strong>#" . $page . "</strong>:\n");
        $params = array($userparam => $uservalue, 'count' => $maxCount);
        if ($maxID) {
            $params['max_id'] = $maxID;
        }
        $data = $twitterApi->query('favorites/list', $params);
        if (is_array($data) && $data[0] === false) {
            dieout(l(bad("Error: " . $data[1] . "/" . $data[2])));
        }
        echo l("<strong>" . ($data ? count($data) : 0) . "</strong> total favorite tweets on this page\n");
        if (!empty($data)) {
            echo l("<ul>");
            foreach ($data as $i => $tweet) {
                // First, let's check if an API error occured
                if (is_array($tweet) && is_object($tweet[0]) && property_exists($tweet[0], 'message')) {
                    dieout(l(bad('A Twitter API error occured: ' . $tweet[0]->message)));
                }
                if (!IS64BIT && $i == 0 && $maxID == $tweet->id_str) {
                    unset($data[0]);
                    continue;
                }
                if ($tweet->user->id_str == $uid) {
                    echo l("<li>" . $tweet->id_str . " " . $tweet->created_at . "</li>\n");
                    $favs[] = $tweet->id_str;
                }
                $maxID = $tweet->id_str;
                if (IS64BIT) {
                    $maxID = (int) $tweet->id - 1;
                }
            }
            echo l("</ul>");
        }
        echo l("<strong>" . count($favs) . "</strong> favorite own tweets so far\n");
        $page++;
    } while (!empty($data));
    // Blank all favorites
    $db->query("UPDATE `" . DTP . "tweets` SET `favorite` = '0'");
    // Insert favorites into DB
    $db->query("UPDATE `" . DTP . "tweets` SET `favorite` = '1' WHERE `tweetid` IN ('" . implode("', '", $favs) . "')");
    echo l(good("Updated favorites!"));
}