function doSection ($attrstr) {
	$attrstr = html_entity_decode($attrstr);
	$attr = new Attributes($attrstr);
	$preface = $attr->getAttrib('preface');
	if ($preface !== false) {
/*		flush_section(0);
		if ($preface == 'start')
			$ret = "<abstract>";
		elseif ($preface == 'end')
			$ret = "</abstract>"; */
	}
	else {
		global $current_section;
		$level = $attr->getAttrib('level');
		$ret = flush_section($level) . "<section";
		$current_section = $level;
		$label = trim($attr->getAttrib('label'));
		if ($label != '') {
			$ret .= ' id="'.substitute_umlauts($label).'"';
			$ret .= ' wikipage="'.substitute_umlauts($label).'"';
		}
		$title = $attr->getAttrib('title');
		$ret.= "><title>".xmlencode($attr->getAttrib('title'))."</title>";
	}
	return $ret;
}
	function convertPagesToXML ($pageinfo) {		
		$xml = "<!-- Frontmatter -->\n";			
		if ($pageinfo['preface'] != '') {
			// extract title and author information
			message("processing trail page ".$this->pagename, 'start');

			while (preg_match('/(\(:(\w+)(\s+.+?)?:\).*?\(:end\2:\)\s*)/s', $pageinfo['preface'], $m)) {
				$xml .= preg_replace('#^\s*<p>(.*)</p>\s*$#s', '$1', MarkupToHTML($this->pagename, trim($m[1])));
				$pageinfo['preface'] = str_replace($m[1], '', $pageinfo['preface']);
			}
			
			while (preg_match('/(\(:\w+(\s+.+?)?:\))\s*/s', $pageinfo['preface'], $m)) {
				$xml .= preg_replace('#^\s*<p>(.*)</p>\s*$#s', '$1', MarkupToHTML($this->pagename, trim($m[1])));
				$pageinfo['preface'] = str_replace($m[1], '', $pageinfo['preface']);
			}

			message('', 'end');
		}
		unset($pageinfo['preface']);
			
		foreach ($pageinfo as $pi) {
			global $WikiDir;
			$pagefile = $WikiDir->pagefile($pi['pagename']);
			$cachefile = $this->outputDir($pi['pagename'])."/cache.xml";
			$msg = "processing page ".htmlentities($pi['pagename']);
			if (0) { // @@
				message("$msg from cache", 'start');
				$localxml = file_get_contents($cachefile);
			}
			else {
				global $ProcessedPages;
				$empty = file_exists($pagefile) ? '' : ' (page empty)';
				message($msg.$empty, 'start');
				$ProcessedPages[] = $pi['pagename'];
			
				list($group) = explode('.', $pi['pagename']);
				$page = ReadPage($pi['pagename']);
				$text = trim($page['text']);
				$text = preg_replace('/^!+.*?\\n/', '', $text);  // remove title header from wiki page
				$text = html_entity_decode($text);
				$title = $pi['title'] ? $pi['title'] : $pi['pagename'];
				$sectcmd = "(:section level=$pi[depth] title=\"$pi[title]\" label=".Keep($pi[pagename]).":)";
				$text = "{$sectcmd}\n$text";  // ...then add a title with name of wiki page

				$text = str_replace(chr(128), '¤', $text);  // das "Windows" ¤-Zeichen liegt auf 128...

				$localxml = MarkupToHTML($pi['pagename'], $text);  // generate XML of current wiki page
				if (file_exists($pagefile)) {            // does wikipage exist?
					recursive_mkdir(dirname($cachefile));
					$f = fopen($cachefile, 'w');
					fputs($f, $localxml);
					fclose($f);
				}
			}
			$xml .= $localxml;
			message('', 'end');
		}
		// close all open section elements
		$xml .= flush_section(0);
		
		$xml = "<article page-width='210mm' page-height='297mm' margin='2cm'>$xml</article>";
		return $xml;
	}