if ($_REQUEST['dictionary'] != "") { $GLOBALS['dict'] = $_REQUEST['dictionary']; } } */ /* // leave out the dicrtionary support for the moment if ($_REQUEST['init'] == 1) { // don't put spaces between these as js is going to tokenize them up echo "<div id='HA-spellcheck-dictionaries'>en,en_GB,en_US,en_CA,sv_SE,de_DE,pt_PT</div>"; } */ if (get_magic_quotes_gpc()) { foreach ($_REQUEST as $k => $v) { $_REQUEST["{$k}"] = stripslashes($v); } } require_once DIRNAME(__FILE__) . '/spell_parser.inc'; require_once 'XML/XML_HTMLSax.php'; $handler = new Spell_Parser(); $handler->setLanguage($GLOBALS['dict']); $parser = new XML_HTMLSax(); $parser->set_object($handler); $parser->set_element_handler('openHandler', 'closeHandler'); $parser->set_data_handler('dataHandler'); $string_to_parse = stripslashes($_POST['content']); $parser->parse($string_to_parse); ?> <body> </html>
/** * Get PukiWiki Page List via http using cmd=filelist * @access public * @param string $url PukiWiki URL (cmd=filelist) * @return array * Page list whose each element has keys 'href', 'page', 'file'. * FALSE if HTTP GET failed. * @uses PKWKFilelistHandler * @uses PEAR XML/XML_HTMLSax.php * @uses $GLOBALS['ADMINPASS'] * @uses $GLOBALS['USERNAME'] * @uses $GLOBALS['USERPASS'] */ function &pkwk_get_existpages($url, $filter = NULL, $except = NULL) { $parsed = parse_url($url); $queries = array(); parse_str($parsed['query'], $queries); $cmd = $queries['cmd']; if ($cmd == 'filelist' && $GLOBALS['ADMINPASS'] != '') { // POST adminpass require_once 'HTTP/Request.php'; $req = new HTTP_Request($url); $req->setMethod(HTTP_REQUEST_METHOD_POST); $req->addPostData('pass', $GLOBALS['ADMINPASS']); $req->setBasicAuth($GLOBALS['USERNAME'], $GLOBALS['USERPASS']); if (PEAR::isError($req->sendRequest())) { return FALSE; } $html = $req->getResponseBody(); } else { if (($html = http_get_contents($url, $GLOBALS['USERNAME'], $GLOBALS['USERPASS'])) === FALSE) { return FALSE; } } require_once 'XML/XML_HTMLSax.php'; $parser = new XML_HTMLSax(); $handler = new PKWKFilelistHandler(); $parser->set_object($handler); $parser->set_element_handler('openHandler', 'closeHandler'); $parser->set_data_handler('dataHandler'); $parser->parse($html); if ($filter !== NULL) { $pregfilter = '/' . str_replace('/', '\\/', $filter) . '/'; foreach ($handler->pages as $i => $page) { if (!preg_match($pregfilter, $page['page'])) { unset($handler->pages[$i]); } } } if ($except !== NULL) { $pregexcept = '/' . str_replace('/', '\\/', $except) . '/'; foreach ($handler->pages as $i => $page) { if (preg_match($pregexcept, $page['page'])) { unset($handler->pages[$i]); } } } if ($cmd != 'filelist') { foreach ($handler->pages as $i => $page) { $handler->pages[$i]['file'] = get_wikifilename($page['page']); } } // unique (probably this can be done in html parsing process concurrently, though) $uniq_pages = array(); foreach ($handler->pages as $page) { $uniq_pages[] = $page['page']; } $uniq_pages = array_unique($uniq_pages); $pages = array(); foreach ($uniq_pages as $i => $page) { $pages[] = $handler->pages[$i]; } return $pages; }