Ejemplo n.º 1
0
function ViewpageProps($name, $pagestore)
{
    global $dbi, $showpagesource, $datetimeformat, $FieldSeparator;
    $pagehash = RetrievePage($dbi, $name, $pagestore);
    if ($pagehash == -1) {
        $table = sprintf(gettext("Page name '%s' is not in the database"), $name) . "\n";
    } else {
        $table = "<table border=1 bgcolor=white>\n";
        while (list($key, $val) = each($pagehash)) {
            if ($key > 0 || !$key) {
                #key is an array index
                continue;
            }
            if (gettype($val) == "array" && $showpagesource == "on") {
                $val = implode($val, "{$FieldSeparator}#BR#{$FieldSeparator}\n");
                $val = htmlspecialchars($val);
                $val = str_replace("{$FieldSeparator}#BR#{$FieldSeparator}", "<br>", $val);
            } elseif ($key == 'lastmodified' || $key == 'created') {
                $val = date($datetimeformat, $val);
            } else {
                $val = htmlspecialchars($val);
            }
            $table .= "<tr><td>{$key}</td><td>{$val}</td></tr>\n";
        }
        $table .= "</table>";
    }
    return $table;
}
Ejemplo n.º 2
0
/**
 * The main() function which generates a zip archive of a PhpWiki.
 *
 * If $include_archive is false, only the current version of each page
 * is included in the zip file; otherwise all archived versions are
 * included as well.
 */
function MakeWikiZip($include_archive = false)
{
    global $dbi, $WikiPageStore, $ArchivePageStore;
    $pages = GetAllWikiPageNames($dbi);
    $zipname = "wiki.zip";
    if ($include_archive) {
        $zipname = "wikidb.zip";
    }
    $zip = new ZipWriter("Created by PhpWiki", $zipname);
    for (reset($pages); $pagename = current($pages); next($pages)) {
        set_time_limit(30);
        // Reset watchdog.
        $pagehash = RetrievePage($dbi, $pagename, $WikiPageStore);
        if (!is_array($pagehash)) {
            continue;
        }
        if ($include_archive) {
            $oldpagehash = RetrievePage($dbi, $pagename, $ArchivePageStore);
        } else {
            $oldpagehash = false;
        }
        $attrib = array('mtime' => $pagehash['lastmodified'], 'is_ascii' => 1);
        if (($pagehash['flags'] & FLAG_PAGE_LOCKED) != 0) {
            $attrib['write_protected'] = 1;
        }
        $content = MailifyPage($pagehash, $oldpagehash);
        $zip->addRegularFile(encode_pagename_for_wikizip($pagehash['pagename']), $content, $attrib);
    }
    $zip->finish();
}
Ejemplo n.º 3
0
function SavePage($dbi, $page, $source)
{
    global $WikiPageStore;
    $pagename = $page['pagename'];
    $version = $page['version'];
    if (is_array($current = RetrievePage($dbi, $pagename, $WikiPageStore))) {
        if ($version <= $current['version']) {
            $page['version'] = $current['version'] + 1;
            $version = $page['version'] . " [was {$version}]";
        }
        SaveCopyToArchive($dbi, $pagename, $current);
    }
    printf(gettext("Inserting page %s, version %s from %s"), "<b>" . htmlspecialchars($pagename) . "</b>", $version, $source);
    print "<br>\n";
    flush();
    InsertPage($dbi, $pagename, $page);
}
Ejemplo n.º 4
0
<!-- $Id: lockpage.php,v 1.1 2004/09/28 21:48:44 gcasse Exp $ -->
<?php 
if (isset($lock)) {
    $page = $lock;
} elseif (isset($unlock)) {
    $page = $unlock;
}
$argv[0] = $page;
// necessary for displaying the page afterwards
$pagename = rawurldecode($page);
$pagehash = RetrievePage($dbi, $pagename, $WikiPageStore);
if (!is_array($pagehash)) {
    ExitWiki("Unknown page '" . htmlspecialchars($pagename) . "'\n");
}
if (isset($lock)) {
    $pagehash['flags'] |= FLAG_PAGE_LOCKED;
    InsertPage($dbi, $pagename, $pagehash);
    // echo htmlspecialchars($page) . " locked\n";
} elseif (isset($unlock)) {
    $pagehash['flags'] &= ~FLAG_PAGE_LOCKED;
    InsertPage($dbi, $pagename, $pagehash);
    // echo htmlspecialchars($page) . " unlocked\n";
}
Ejemplo n.º 5
0
function SetWikiPageLinks($dbi, $pagename, $linklist)
{
    $cache = array();
    // Phase 1: fetch the relevant pairs from 'wikilinks' into $cache
    // ---------------------------------------------------------------
    // first the info for $pagename
    $linkinfo = RetrievePage($dbi, $pagename, 'wikilinks');
    if (is_array($linkinfo)) {
        // page exists?
        $cache[$pagename] = $linkinfo;
    } else {
        // create info for page
        $cache[$pagename] = array('fromlinks' => array(), 'tolinks' => array());
        // look up pages that link to $pagename
        $pname = dbmfirstkey($dbi['wikilinks']);
        while ($pname) {
            $linkinfo = RetrievePage($dbi, $pname, 'wikilinks');
            if ($linkinfo['tolinks'][$pagename]) {
                $cache[$pagename]['fromlinks'][$pname] = 1;
            }
            $pname = dbmnextkey($dbi['wikilinks'], $pname);
        }
    }
    // then the info for the pages that $pagename used to point to
    $oldTolinks = $cache[$pagename]['tolinks'];
    reset($oldTolinks);
    while (list($link, $dummy) = each($oldTolinks)) {
        $linkinfo = RetrievePage($dbi, $link, 'wikilinks');
        if (is_array($linkinfo)) {
            $cache[$link] = $linkinfo;
        }
    }
    // finally the info for the pages that $pagename will point to
    reset($linklist);
    while (list($link, $dummy) = each($linklist)) {
        $linkinfo = RetrievePage($dbi, $link, 'wikilinks');
        if (is_array($linkinfo)) {
            $cache[$link] = $linkinfo;
        }
    }
    // Phase 2: delete the old links
    // ---------------------------------------------------------------
    // delete the old tolinks for $pagename
    // $cache[$pagename]['tolinks'] = array();
    // (overwritten anyway in Phase 3)
    // remove $pagename from the fromlinks of pages in $oldTolinks
    reset($oldTolinks);
    while (list($oldTolink, $dummy) = each($oldTolinks)) {
        if ($cache[$oldTolink]) {
            // links to existing page?
            $oldFromlinks = $cache[$oldTolink]['fromlinks'];
            $cache[$oldTolink]['fromlinks'] = array();
            // erase fromlinks
            reset($oldFromlinks);
            // comp. new fr.links
            while (list($fromlink, $dummy) = each($oldFromlinks)) {
                if ($fromlink != $pagename) {
                    $cache[$oldTolink]['fromlinks'][$fromlink] = 1;
                }
            }
        }
    }
    // Phase 3: add the new links
    // ---------------------------------------------------------------
    // set the new tolinks for $pagename
    $cache[$pagename]['tolinks'] = $linklist;
    // add $pagename to the fromlinks of pages in $linklist
    reset($linklist);
    while (list($link, $dummy) = each($linklist)) {
        if ($cache[$link]) {
            // existing page?
            $cache[$link]['fromlinks'][$pagename] = 1;
        }
    }
    // Phase 4: write $cache back to 'wikilinks'
    // ---------------------------------------------------------------
    reset($cache);
    while (list($link, $fromAndTolinks) = each($cache)) {
        InsertPage($dbi, $link, $fromAndTolinks, 'wikilinks');
    }
}
Ejemplo n.º 6
0
$directory = $dumpserial;
$pages = GetAllWikiPagenames($dbi);
// see if we can access the directory the user wants us to use
if (!file_exists($directory)) {
    if (!mkdir($directory, 0755)) {
        ExitWiki("Cannot create directory '{$directory}'<br>\n");
    } else {
        $html = "Created directory '{$directory}' for the page dump...<br>\n";
    }
} else {
    $html = "Using directory '{$directory}'<br>\n";
}
$numpages = count($pages);
for ($x = 0; $x < $numpages; $x++) {
    $pagename = htmlspecialchars($pages[$x]);
    $filename = preg_replace('/^\\./', '%2e', rawurlencode($pages[$x]));
    $html .= "<br>{$pagename} ... ";
    if ($pagename != $filename) {
        $html .= "<small>saved as {$filename}</small> ... ";
    }
    $data = serialize(RetrievePage($dbi, $pages[$x], $WikiPageStore));
    if ($fd = fopen("{$directory}/{$filename}", "w")) {
        $num = fwrite($fd, $data, strlen($data));
        $html .= "<small>{$num} bytes written</small>\n";
    } else {
        ExitWiki("<b>couldn't open file '{$directory}/{$filename}' for writing</b>\n");
    }
}
$html .= "<p><b>Dump complete.</b>";
GeneratePage('MESSAGE', $html, 'Dump serialized pages', 0);
ExitWiki('');
Ejemplo n.º 7
0
    function _diff_header($xbeg, $xlen, $ybeg, $ylen)
    {
        $xlen = $xlen == 1 ? '' : ",{$xlen}";
        $ylen = $ylen == 1 ? '' : ",{$ylen}";
        return "@@ -{$xbeg}{$xlen} +{$ybeg}{$ylen} @@";
    }
}
/////////////////////////////////////////////////////////////////
if ($diff) {
    if (get_magic_quotes_gpc()) {
        $diff = stripslashes($diff);
    }
    $pagename = $diff;
    $wiki = RetrievePage($dbi, $pagename, $WikiPageStore);
    //  $dba = OpenDataBase($ArchivePageStore);
    $archive = RetrievePage($dbi, $pagename, $ArchivePageStore);
    $aLang = getLang();
    $html = '<div>';
    $aText = $aLang == "en" ? "Current page:" : "Page courante :";
    $html .= "\n<dl><dt>{$aText}</dt>";
    if (is_array($wiki)) {
        $html .= "\n<dd>";
        $html .= sprintf(gettext("Version %s"), $wiki['version']);
        $html .= "\n</dd><dd>";
        $aText = $aLang == "en" ? "Last modified on %s" : "Dernière modification : %s";
        $html .= sprintf($aText, date($datetimeformat, $wiki['lastmodified']));
        $html .= "\n</dd><dd>";
        $aText = $aLang == "en" ? "By %s" : "Par %s";
        $html .= sprintf($aText, $wiki['author']);
        $html .= "</dd>";
    } else {
Ejemplo n.º 8
0
    ExitWiki(gettext("No page name passed into editpage!"));
}
if (is_array($pagehash)) {
    if ($pagehash['flags'] & FLAG_PAGE_LOCKED && !defined('WIKI_ADMIN')) {
        $html = "<p>";
        $html .= gettext("This page has been locked by the administrator and cannot be edited.");
        $html .= "\n<p>";
        $html .= gettext("Sorry for the inconvenience.");
        $html .= "\n";
        GeneratePage('MESSAGE', $html, sprintf(gettext("Problem while editing %s"), $pagename), 0);
        ExitWiki("");
    }
    $textarea = htmlspecialchars(implode("\n", $pagehash["content"]));
    if (isset($copy)) {
        // $cdbi = OpenDataBase($WikiPageStore);
        $currentpage = RetrievePage($dbi, $pagename, $WikiPageStore);
        $pagehash["version"] = $currentpage["version"];
    } elseif ($pagehash["version"] > 1) {
        if (IsInArchive($dbi, $pagename)) {
            $pagehash["copy"] = 1;
        }
    }
} else {
    if (preg_match("/^{$WikiNameRegexp}\$/", $pagename)) {
        $newpage = $pagename;
    } else {
        $newpage = "[{$pagename}]";
    }
    $textarea = htmlspecialchars(sprintf(gettext("Describe %s here."), $newpage));
    unset($pagehash);
    $pagehash["version"] = 0;
Ejemplo n.º 9
0
function BackLinkSearchNextMatch($dbi, &$pos)
{
    global $WikiPageStore;
    while (list($key, $page) = each($pos['data'])) {
        $pagedata = RetrievePage($dbi, $page, $WikiPageStore);
        if (!is_array($pagedata)) {
            printf(gettext("%s: bad data<br>\n"), htmlspecialchars($page));
            continue;
        }
        while (list($i, $line) = each($pagedata['content'])) {
            if (preg_match($pos['search'], $line)) {
                return $page;
            }
        }
    }
    return 0;
}
Ejemplo n.º 10
0
function FullSearchNextMatch($dbi, $res)
{
    global $WikiPageStore;
    if ($row = msql_fetch_row($res)) {
        return RetrievePage($dbi, $row[0], $WikiPageStore);
    } else {
        return 0;
    }
}
Ejemplo n.º 11
0
function UpdateRecentChanges($dbi, $pagename, $isnewpage)
{
    global $remoteuser;
    // this is set in the config
    global $dateformat;
    global $WikiPageStore;
    $recentchanges = RetrievePage($dbi, gettext("RecentChanges"), $WikiPageStore);
    // this shouldn't be necessary, since PhpWiki loads
    // default pages if this is a new baby Wiki
    if ($recentchanges == -1) {
        $recentchanges = array();
    }
    $now = time();
    $today = date($dateformat, $now);
    if (date($dateformat, $recentchanges['lastmodified']) != $today) {
        $isNewDay = TRUE;
        $recentchanges['lastmodified'] = $now;
    } else {
        $isNewDay = FALSE;
    }
    $numlines = sizeof($recentchanges['content']);
    $newpage = array();
    $k = 0;
    // scroll through the page to the first date and break
    // dates are marked with "____" at the beginning of the line
    for ($i = 0; $i < $numlines; $i++) {
        if (preg_match("/^____/", $recentchanges['content'][$i])) {
            break;
        } else {
            $newpage[$k++] = $recentchanges['content'][$i];
        }
    }
    // if it's a new date, insert it
    $newpage[$k++] = $isNewDay ? "____{$today}" : $recentchanges['content'][$i++];
    // add the updated page's name to the array
    if ($isnewpage) {
        $newpage[$k++] = "\n \n* [{$pagename}] (new) ..... {$remoteuser}";
    } else {
        $diffurl = "phpwiki:?diff=" . rawurlencode($pagename);
        $newpage[$k++] = "\n \n* [{$pagename}] ([diff|{$diffurl}]) ..... {$remoteuser}";
    }
    if ($isNewDay) {
        $newpage[$k++] = "\r";
    }
    // copy the rest of the page into the new array
    // and skip previous entry for $pagename
    $pagename = preg_quote($pagename);
    for (; $i < $numlines; $i++) {
        if (!preg_match("|\\[{$pagename}\\]|", $recentchanges['content'][$i])) {
            $newpage[$k++] = $recentchanges['content'][$i];
        }
    }
    // copy the new page back into recentchanges, skipping empty days
    $numlines = sizeof($newpage);
    $recentchanges['content'] = array();
    $k = 0;
    for ($i = 0; $i < $numlines; $i++) {
        if ($i != $numlines - 1 && preg_match("/^____/", $newpage[$i]) && preg_match("/^[\r\n]*\$/", $newpage[$i + 1])) {
            $i++;
        } else {
            $recentchanges['content'][$k++] = $newpage[$i];
        }
    }
    InsertPage($dbi, gettext("RecentChanges"), $recentchanges);
}