コード例 #1
0
ファイル: cache_use.test.php プロジェクト: richmahn/Door43
 /**
  * @group flaky
  */
 function test_confnocaching()
 {
     global $conf;
     $conf['cachetime'] = -1;
     // disables renderer caching
     $this->assertFalse($this->cache->useCache());
     $this->assertNotEmpty($this->cache->_nocache);
 }
コード例 #2
0
 function test_staleness()
 {
     global $ID;
     $ID = 'stale';
     $file = wikiFN($ID);
     # Prepare test page
     saveWikiText($ID, 'Fresh', 'Created');
     # Create stale cache
     $cache = new cache_renderer($ID, $file, 'xhtml');
     $cache->storeCache('Stale');
     $stale = $cache->retrieveCache();
     # Prepare stale cache for testing
     $time = filemtime($file);
     touch($cache->cache, $time);
     # Make the test
     $fresh = p_cached_output($file, 'xhtml', $ID);
     $this->assertNotEquals($fresh, $stale, 'Stale cache failed to expire');
 }
 /**
  * @group slow
  */
 function test_cache_handling()
 {
     $testid = 'wiki:bar:test';
     saveWikiText($testid, '[[wiki:foo:]]', 'Test setup');
     idx_addPage($testid);
     saveWikiText('wiki:foo:start', 'bar', 'Test setup');
     idx_addPage('wiki:foo:start');
     sleep(1);
     // wait in order to make sure that conditions with < give the right result.
     p_wiki_xhtml($testid);
     // populate cache
     $cache = new cache_renderer($testid, wikiFN($testid), 'xhtml');
     $this->assertTrue($cache->useCache());
     /** @var helper_plugin_move_op $move */
     $move = plugin_load('helper', 'move_op');
     $this->assertTrue($move->movePage('wiki:foo:start', 'wiki:foo2:start'));
     $cache = new cache_renderer($testid, wikiFN($testid), 'xhtml');
     $this->assertFalse($cache->useCache());
 }
コード例 #4
0
ファイル: parserutils.php プロジェクト: nextghost/dokuwiki
/**
 * returns the metadata of a page
 *
 * @param string $id The id of the page the metadata should be returned from
 * @param string $key The key of the metdata value that shall be read (by default everything) - separate hierarchies by " " like "date created"
 * @param int $render If the page should be rendererd - possible values:
 *     METADATA_DONT_RENDER, METADATA_RENDER_USING_SIMPLE_CACHE, METADATA_RENDER_USING_CACHE
 *     METADATA_RENDER_UNLIMITED (also combined with the previous two options),
 *     default: METADATA_RENDER_USING_CACHE
 * @return mixed The requested metadata fields
 *
 * @author Esther Brunner <*****@*****.**>
 * @author Michael Hamann <*****@*****.**>
 */
function p_get_metadata($id, $key = '', $render = METADATA_RENDER_USING_CACHE)
{
    global $ID;
    static $render_count = 0;
    // track pages that have already been rendered in order to avoid rendering the same page
    // again
    static $rendered_pages = array();
    // cache the current page
    // Benchmarking shows the current page's metadata is generally the only page metadata
    // accessed several times. This may catch a few other pages, but that shouldn't be an issue.
    $cache = $ID == $id;
    $meta = p_read_metadata($id, $cache);
    if (!is_numeric($render)) {
        if ($render) {
            $render = METADATA_RENDER_USING_SIMPLE_CACHE;
        } else {
            $render = METADATA_DONT_RENDER;
        }
    }
    // prevent recursive calls in the cache
    static $recursion = false;
    if (!$recursion && $render != METADATA_DONT_RENDER && !isset($rendered_pages[$id]) && page_exists($id)) {
        $recursion = true;
        $cachefile = new cache_renderer($id, wikiFN($id), 'metadata');
        $do_render = false;
        if ($render & METADATA_RENDER_UNLIMITED || $render_count < P_GET_METADATA_RENDER_LIMIT) {
            if ($render & METADATA_RENDER_USING_SIMPLE_CACHE) {
                $pagefn = wikiFN($id);
                $metafn = metaFN($id, '.meta');
                if (!@file_exists($metafn) || @filemtime($pagefn) > @filemtime($cachefile->cache)) {
                    $do_render = true;
                }
            } elseif (!$cachefile->useCache()) {
                $do_render = true;
            }
        }
        if ($do_render) {
            ++$render_count;
            $rendered_pages[$id] = true;
            $old_meta = $meta;
            $meta = p_render_metadata($id, $meta);
            // only update the file when the metadata has been changed
            if ($meta == $old_meta || p_save_metadata($id, $meta)) {
                // store a timestamp in order to make sure that the cachefile is touched
                $cachefile->storeCache(time());
            } elseif ($meta != $old_meta) {
                msg('Unable to save metadata file. Hint: disk full; file permissions; safe_mode setting.', -1);
            }
        }
        $recursion = false;
    }
    $val = $meta['current'];
    // filter by $key
    foreach (preg_split('/\\s+/', $key, 2, PREG_SPLIT_NO_EMPTY) as $cur_key) {
        if (!isset($val[$cur_key])) {
            return null;
        }
        $val = $val[$cur_key];
    }
    return $val;
}
コード例 #5
0
ファイル: parserutils.php プロジェクト: Harvie/dokuwiki
/**
 * Returns the given file parsed into the requested output format
 *
 * @author Andreas Gohr <*****@*****.**>
 * @author Chris Smith <*****@*****.**>
 */
function p_cached_output($file, $format = 'xhtml', $id = '')
{
    global $conf;
    $cache = new cache_renderer($id, $file, $format);
    if ($cache->useCache()) {
        $parsed = $cache->retrieveCache(false);
        if ($conf['allowdebug'] && $format == 'xhtml') {
            $parsed .= "\n<!-- cachefile {$cache->cache} used -->\n";
        }
    } else {
        $parsed = p_render($format, p_cached_instructions($file, false, $id), $info);
        if ($info['cache']) {
            $cache->storeCache($parsed);
            //save cachefile
            if ($conf['allowdebug'] && $format == 'xhtml') {
                $parsed .= "\n<!-- no cachefile used, but created {$cache->cache} -->\n";
            }
        } else {
            $cache->removeCache();
            //try to delete cachefile
            if ($conf['allowdebug'] && $format == 'xhtml') {
                $parsed .= "\n<!-- no cachefile used, caching forbidden -->\n";
            }
        }
    }
    return $parsed;
}
コード例 #6
0
ファイル: common.php プロジェクト: nextghost/dokuwiki
/**
 * Saves a wikitext by calling io_writeWikiPage.
 * Also directs changelog and attic updates.
 *
 * @author Andreas Gohr <*****@*****.**>
 * @author Ben Coburn <*****@*****.**>
 */
function saveWikiText($id, $text, $summary, $minor = false)
{
    /* Note to developers:
         This code is subtle and delicate. Test the behavior of
         the attic and changelog with dokuwiki and external edits
         after any changes. External edits change the wiki page
         directly without using php or dokuwiki.
       */
    global $conf;
    global $lang;
    global $REV;
    // ignore if no changes were made
    if ($text == rawWiki($id, '')) {
        return;
    }
    $file = wikiFN($id);
    $old = @filemtime($file);
    // from page
    $wasRemoved = trim($text) == '';
    // check for empty or whitespace only
    $wasCreated = !@file_exists($file);
    $wasReverted = $REV == true;
    $newRev = false;
    $oldRev = getRevisions($id, -1, 1, 1024);
    // from changelog
    $oldRev = (int) (empty($oldRev) ? 0 : $oldRev[0]);
    if (!@file_exists(wikiFN($id, $old)) && @file_exists($file) && $old >= $oldRev) {
        // add old revision to the attic if missing
        saveOldRevision($id);
        // add a changelog entry if this edit came from outside dokuwiki
        if ($old > $oldRev) {
            addLogEntry($old, $id, DOKU_CHANGE_TYPE_EDIT, $lang['external_edit'], '', array('ExternalEdit' => true));
            // remove soon to be stale instructions
            $cache = new cache_instructions($id, $file);
            $cache->removeCache();
        }
    }
    if ($wasRemoved) {
        // Send "update" event with empty data, so plugins can react to page deletion
        $data = array(array($file, '', false), getNS($id), noNS($id), false);
        trigger_event('IO_WIKIPAGE_WRITE', $data);
        // pre-save deleted revision
        @touch($file);
        clearstatcache();
        $newRev = saveOldRevision($id);
        // remove empty file
        @unlink($file);
        // don't remove old meta info as it should be saved, plugins can use IO_WIKIPAGE_WRITE for removing their metadata...
        // purge non-persistant meta data
        p_purge_metadata($id);
        $del = true;
        // autoset summary on deletion
        if (empty($summary)) {
            $summary = $lang['deleted'];
        }
        // remove empty namespaces
        io_sweepNS($id, 'datadir');
        io_sweepNS($id, 'mediadir');
    } else {
        // save file (namespace dir is created in io_writeWikiPage)
        io_writeWikiPage($file, $text, $id);
        // pre-save the revision, to keep the attic in sync
        $newRev = saveOldRevision($id);
        $del = false;
    }
    // select changelog line type
    $extra = '';
    $type = DOKU_CHANGE_TYPE_EDIT;
    if ($wasReverted) {
        $type = DOKU_CHANGE_TYPE_REVERT;
        $extra = $REV;
    } else {
        if ($wasCreated) {
            $type = DOKU_CHANGE_TYPE_CREATE;
        } else {
            if ($wasRemoved) {
                $type = DOKU_CHANGE_TYPE_DELETE;
            } else {
                if ($minor && $conf['useacl'] && $_SERVER['REMOTE_USER']) {
                    $type = DOKU_CHANGE_TYPE_MINOR_EDIT;
                }
            }
        }
    }
    //minor edits only for logged in users
    addLogEntry($newRev, $id, $type, $summary, $extra);
    // send notify mails
    notify($id, 'admin', $old, $summary, $minor);
    notify($id, 'subscribers', $old, $summary, $minor);
    // update the purgefile (timestamp of the last time anything within the wiki was changed)
    io_saveFile($conf['cachedir'] . '/purgefile', time());
    // if useheading is enabled, purge the cache of all linking pages
    if (useHeading('content')) {
        $pages = ft_backlinks($id);
        foreach ($pages as $page) {
            $cache = new cache_renderer($page, wikiFN($page), 'xhtml');
            $cache->removeCache();
        }
    }
}
コード例 #7
0
ファイル: common.php プロジェクト: splitbrain/dokuwiki
/**
 * Saves a wikitext by calling io_writeWikiPage.
 * Also directs changelog and attic updates.
 *
 * @author Andreas Gohr <*****@*****.**>
 * @author Ben Coburn <*****@*****.**>
 *
 * @param string $id       page id
 * @param string $text     wikitext being saved
 * @param string $summary  summary of text update
 * @param bool   $minor    mark this saved version as minor update
 */
function saveWikiText($id, $text, $summary, $minor = false)
{
    /* Note to developers:
         This code is subtle and delicate. Test the behavior of
         the attic and changelog with dokuwiki and external edits
         after any changes. External edits change the wiki page
         directly without using php or dokuwiki.
       */
    global $conf;
    global $lang;
    global $REV;
    /* @var Input $INPUT */
    global $INPUT;
    // prepare data for event
    $svdta = array();
    $svdta['id'] = $id;
    $svdta['file'] = wikiFN($id);
    $svdta['revertFrom'] = $REV;
    $svdta['oldRevision'] = @filemtime($svdta['file']);
    $svdta['newRevision'] = 0;
    $svdta['newContent'] = $text;
    $svdta['oldContent'] = rawWiki($id);
    $svdta['summary'] = $summary;
    $svdta['contentChanged'] = $svdta['newContent'] != $svdta['oldContent'];
    $svdta['changeInfo'] = '';
    $svdta['changeType'] = DOKU_CHANGE_TYPE_EDIT;
    $svdta['sizechange'] = null;
    // select changelog line type
    if ($REV) {
        $svdta['changeType'] = DOKU_CHANGE_TYPE_REVERT;
        $svdta['changeInfo'] = $REV;
    } else {
        if (!file_exists($svdta['file'])) {
            $svdta['changeType'] = DOKU_CHANGE_TYPE_CREATE;
        } else {
            if (trim($text) == '') {
                // empty or whitespace only content deletes
                $svdta['changeType'] = DOKU_CHANGE_TYPE_DELETE;
                // autoset summary on deletion
                if (blank($svdta['summary'])) {
                    $svdta['summary'] = $lang['deleted'];
                }
            } else {
                if ($minor && $conf['useacl'] && $INPUT->server->str('REMOTE_USER')) {
                    //minor edits only for logged in users
                    $svdta['changeType'] = DOKU_CHANGE_TYPE_MINOR_EDIT;
                }
            }
        }
    }
    $event = new Doku_Event('COMMON_WIKIPAGE_SAVE', $svdta);
    if (!$event->advise_before()) {
        return;
    }
    // if the content has not been changed, no save happens (plugins may override this)
    if (!$svdta['contentChanged']) {
        return;
    }
    detectExternalEdit($id);
    if ($svdta['changeType'] == DOKU_CHANGE_TYPE_CREATE || $svdta['changeType'] == DOKU_CHANGE_TYPE_REVERT && !file_exists($svdta['file'])) {
        $filesize_old = 0;
    } else {
        $filesize_old = filesize($svdta['file']);
    }
    if ($svdta['changeType'] == DOKU_CHANGE_TYPE_DELETE) {
        // Send "update" event with empty data, so plugins can react to page deletion
        $data = array(array($svdta['file'], '', false), getNS($id), noNS($id), false);
        trigger_event('IO_WIKIPAGE_WRITE', $data);
        // pre-save deleted revision
        @touch($svdta['file']);
        clearstatcache();
        $data['newRevision'] = saveOldRevision($id);
        // remove empty file
        @unlink($svdta['file']);
        $filesize_new = 0;
        // don't remove old meta info as it should be saved, plugins can use IO_WIKIPAGE_WRITE for removing their metadata...
        // purge non-persistant meta data
        p_purge_metadata($id);
        // remove empty namespaces
        io_sweepNS($id, 'datadir');
        io_sweepNS($id, 'mediadir');
    } else {
        // save file (namespace dir is created in io_writeWikiPage)
        io_writeWikiPage($svdta['file'], $svdta['newContent'], $id);
        // pre-save the revision, to keep the attic in sync
        $svdta['newRevision'] = saveOldRevision($id);
        $filesize_new = filesize($svdta['file']);
    }
    $svdta['sizechange'] = $filesize_new - $filesize_old;
    $event->advise_after();
    addLogEntry($svdta['newRevision'], $svdta['id'], $svdta['changeType'], $svdta['summary'], $svdta['changeInfo'], null, $svdta['sizechange']);
    // send notify mails
    notify($svdta['id'], 'admin', $svdta['oldRevision'], $svdta['summary'], $minor);
    notify($svdta['id'], 'subscribers', $svdta['oldRevision'], $svdta['summary'], $minor);
    // update the purgefile (timestamp of the last time anything within the wiki was changed)
    io_saveFile($conf['cachedir'] . '/purgefile', time());
    // if useheading is enabled, purge the cache of all linking pages
    if (useHeading('content')) {
        $pages = ft_backlinks($id, true);
        foreach ($pages as $page) {
            $cache = new cache_renderer($page, wikiFN($page), 'xhtml');
            $cache->removeCache();
        }
    }
}
コード例 #8
0
ファイル: parserutils.php プロジェクト: ryankask/dokuwiki
/**
 * returns the metadata of a page
 *
 * @param string $id The id of the page the metadata should be returned from
 * @param string $key The key of the metdata value that shall be read (by default everything) - separate hierarchies by " " like "date created"
 * @param boolean $render If the page should be rendererd when the cache can't be used - default true
 * @return mixed The requested metadata fields
 *
 * @author Esther Brunner <*****@*****.**>
 * @author Michael Hamann <*****@*****.**>
 */
function p_get_metadata($id, $key = '', $render = true)
{
    global $ID;
    // cache the current page
    // Benchmarking shows the current page's metadata is generally the only page metadata
    // accessed several times. This may catch a few other pages, but that shouldn't be an issue.
    $cache = $ID == $id;
    $meta = p_read_metadata($id, $cache);
    // prevent recursive calls in the cache
    static $recursion = false;
    if (!$recursion && $render) {
        $recursion = true;
        $cachefile = new cache_renderer($id, wikiFN($id), 'metadata');
        if (page_exists($id) && !$cachefile->useCache()) {
            $old_meta = $meta;
            $meta = p_render_metadata($id, $meta);
            // only update the file when the metadata has been changed
            if ($meta == $old_meta || p_save_metadata($id, $meta)) {
                // store a timestamp in order to make sure that the cachefile is touched
                $cachefile->storeCache(time());
            } else {
                msg('Unable to save metadata file. Hint: disk full; file permissions; safe_mode setting.', -1);
            }
        }
        $recursion = false;
    }
    $val = $meta['current'];
    // filter by $key
    foreach (preg_split('/\\s+/', $key, 2, PREG_SPLIT_NO_EMPTY) as $cur_key) {
        if (!isset($val[$cur_key])) {
            return null;
        }
        $val = $val[$cur_key];
    }
    return $val;
}
コード例 #9
0
 private function sync($id)
 {
     global $ID;
     // save $ID
     $save_ID = $ID;
     $pages_path = DOKU_DATA . 'pages/' . implode('/', explode(':', $this->ID)) . '/';
     $path = $pages_path . $id . '.txt';
     $ID = $this->ID . ":" . $id;
     // clear cache
     $cache = new cache_renderer($ID, $path, 'metadata');
     $cache->removeCache();
     $cache = new cache_renderer($ID, $path, 'xhtml');
     $cache->removeCache();
     $cache = new cache_instructions($ID, $path);
     $cache->removeCache();
     p_cached_output($path, 'metadata', $ID);
     // restore $ID
     $ID = $save_ID;
 }
コード例 #10
0
ファイル: action.php プロジェクト: houshuang/folders2web
 function _custom_delete_page($id, $summary)
 {
     global $ID, $INFO, $conf;
     // mark as nonexist to prevent indexerWebBug
     if ($id == $ID) {
         $INFO['exists'] = 0;
     }
     // delete page, meta and attic
     $file = wikiFN($id);
     $old = @filemtime($file);
     // from page
     if (file_exists($file)) {
         unlink($file);
     }
     $opts['oldname'] = $this->_FN(noNS($id));
     $opts['oldns'] = $this->_FN(getNS($id));
     if ($opts['oldns']) {
         $opts['oldns'] .= '/';
     }
     $this->_locate_filepairs($opts, 'metadir', '/^' . $opts['oldname'] . '\\.(?!mlist)\\w*?$/');
     $this->_locate_filepairs($opts, 'olddir', '/^' . $opts['oldname'] . '\\.\\d{10}\\.txt(\\.gz|\\.bz2)?$/');
     $this->_apply_deletes($opts);
     io_sweepNS($id, 'datadir');
     io_sweepNS($id, 'metadir');
     io_sweepNS($id, 'olddir');
     // send notify mails
     notify($id, 'admin', $old, $summary);
     notify($id, 'subscribers', $old, $summary);
     // update the purgefile (timestamp of the last time anything within the wiki was changed)
     io_saveFile($conf['cachedir'] . '/purgefile', time());
     // if useheading is enabled, purge the cache of all linking pages
     if (useHeading('content')) {
         $pages = ft_backlinks($id);
         foreach ($pages as $page) {
             $cache = new cache_renderer($page, wikiFN($page), 'xhtml');
             $cache->removeCache();
         }
     }
 }