if (isset($kwargs['serialize']['xml'])) { XmlSerializer::clean('cache', 'xml'); } } } $files = dojo_get_files($args); $nodes = new Freezer('cache', 'nodes'); $resources = new Freezer('cache', 'resources'); print "=== PARSING FILES ===\n"; flush(); foreach ($files as $set) { list($namespace, $file) = $set; if (!$namespaces[$namespace]) { $namespaces[$namespace] = true; } $ctime = dojo_get_file_time($namespace, $file); if ($ctime == $resources->open($namespace . '%' . $file, null)) { continue; } printf("%-100s %6s KB\n", $namespace . '/' . $file, number_format(memory_get_usage() / 1024)); flush(); $contents = dojo_get_contents($namespace, $file); $provides = $contents['#provides']; unset($contents['#provides']); $resource = $contents['#resource']; unset($contents['#resource']); $requires = $contents['#requires']; unset($contents['#requires']); foreach ($contents as $var => $content) { foreach ($content as $key_key => $key_value) { $key_type = 'undefined';
function dojo_get_contents_cache($namespace, $file, $forceNew = false) { // summary: a shim to dojo_get_contents, providing filemtime checking/caching // from parsing: XML takes ~ 80000ms on my MacBook Pro, from cache: // 7000ms ... pass true as third param to force cache reloading. // if the file hasn't been change since the last time, skip parsing it $mtime = dojo_get_file_time($namespace, $file); $cfile = "./cache/" . md5($namespace . $file) . "." . $mtime; if (!$forceNew && file_exists($cfile)) { // read it from the cache: $cache = file_get_contents($cfile); $data = unserialize($cache); } else { // parse the file, and save the cached results: $data = @dojo_get_contents($namespace, $file); $cache = serialize($data); $fp = fopen($cfile, "w+"); fputs($fp, $cache); fclose($fp); } return $data; }