Example #1
0
function madlib_get_entry($file)
{
    $page = @wiki_load($file, 'Current');
    if (!$page['body']) {
        return $file;
    }
    $lines = explode("\n", $page['body']);
    while (!$line and ++$count < 10) {
        $line = $lines[array_rand($lines)];
    }
    return htmlentities($line);
}
Example #2
0
function wiki_save_page($pagename, $data, $logmessage = 'No message', $headers = array(), $noindex = false)
{
    global $WIKI_REPOSITORY, $_SERVER, $WIKI_PAGEDIR, $PATH_INFO, $currentver, $revisions;
    $blacklist = file(WIKI_BLACKLIST);
    foreach ($blacklist as $b) {
        $b = trim($b);
        if (preg_match('!/!', $b) && ip_in_block($_SERVER['REMOTE_ADDR'], $b) || $_SERVER['REMOTE_ADDR'] == $b) {
            mail('*****@*****.**', "Wiki Spam Post from " . $_SERVER['REMOTE_ADDR'], "Page: {$pagename}\nData:\n{$data}\n\nLog Message: {$logmessage}\n");
            sleep(60);
            return true;
        }
    }
    $headers = array('title' => $pagename);
    if ($noindex) {
        if (!succeeds(robots_exclude("*", $_SERVER['SCRIPT_NAME'] . "/{$WIKI_REPOSITORY}" . $pagename))) {
            die("Robots exclude error: " . errmsg());
        }
        $headers['robots'] = 'NoIndex';
    }
    if ($previous = wiki_load($pagename, 'Current')) {
        $headers = array_merge(mime_parse_header($previous['header']), $headers);
    }
    if (succeeds(rcs_store($WIKI_PAGEDIR . "{$pagename}", mime_make_header($headers) . "\n\n" . str_replace("\r\n", "\n", $data), (is_logged_in() ? "by " . $_SERVER['REMOTE_ADDR'] . "\n" : "") . $logmessage))) {
        $revisions = array_keys(rcs_get_revisions($WIKI_PAGEDIR . "{$pagename}"));
        $currentver = array_shift($revisions);
        //			if($revisions[2] == $srcversion and $srcversion) {
        //FIXME: rcs_version_inc is a hack -- should check to see
        // what revision came up.  Some pages get saved with no
        // changes, see...
        return true;
    } else {
        return false;
    }
}