function do_stream_insert($mode, $stream_name = '') { global $mvgIP, $MVStreams, $options, $args, $wgDBname; $dbr = wfGetDB(DB_SLAVE); if ($mode == 'all') { $sql = "SELECT * FROM `metavid`.`streams` WHERE `sync_status`='in_sync'"; } elseif ($mode == 'files') { $sql = "SELECT * FROM `metavid`.`streams` WHERE `trascoded` != 'none'"; } elseif ($mode == 'all_in_wiki') { $sql = "SELECT `metavid`.`streams`.* FROM `{$wgDBname}`.`mv_streams` LEFT JOIN `metavid`.`streams` ON (`{$wgDBname}`.`mv_streams`.`name` = `metavid`.`streams`.`name`) "; } elseif ($mode == 'all_sync_past_date') { print "doing all after: " . $args[$options['date']] . "\n"; list($month, $day, $year) = explode('/', $args[$options['date']]); $date_time = mktime(0, 0, 0, $month, $day, $year); $sql = "SELECT * FROM `metavid`.`streams` WHERE `sync_status`= 'in_sync' AND `adj_start_time` > {$date_time}"; } else { $sql = "SELECT * FROM `metavid`.`streams` WHERE `name` LIKE '{$stream_name}'"; } $res = $dbr->query($sql); if ($dbr->numRows($res) == 0) { die('could not find stream: ' . $stream_name . "\n"); } // load all stream names: while ($row = $dbr->fetchObject($res)) { $streams[] = $row; } print "working on " . count($streams) . ' streams' . "\n"; foreach ($streams as $stream) { print "on stream {$stream->name} \n"; $force = isset($options['force']) ? true : false; // init the stream $MVStreams[$stream->name] = new MV_Stream($stream); // check if the stream has already been added to the wiki (if not add it) $mvTitle = new MV_Title('Stream:' . $stream->name); if (!$mvTitle->doesStreamExist()) { // print 'do stream desc'."\n"; do_add_stream($mvTitle, $stream); echo "stream " . $mvTitle->getStreamName() . " added \n"; } else { do_update_wiki_page($stream->name, mv_semantic_stream_desc($mvTitle, $stream), MV_NS_STREAM, $force); // $updated = ' updated' echo "stream " . $mvTitle->getStreamName() . " already present $updated\n"; } if ($mode != 'all_in_wiki') { // add duration and start_time attr do_stream_attr_check($stream); } // do insert/copy all media images if (!isset($options['skipimage'])) { do_process_images($stream, $force); print "done with images\n"; } if (!isset($options['skipfiles'])) { // check for files (make sure they match with metavid db values do_stream_file_check($stream); } if (!isset($options['skiptext'])) { // process all stream text: do_process_text($stream, $force); } if (!isset($options['skipSpeechMeta'])) { // do annoative track for continus speches do_annotate_speeches($stream, $force); } } }
function run_archive_org_update($stream_name = '') { //first get all the streams: include_once 'metavid2mvWiki.inc.php'; $dbr = wfGetDB(DB_READ); $dbw = wfGetDB(DB_WRITE); if ($stream_name != '') { $sql = "SELECT * FROM `mv_streams` WHERE `name`='{$stream_name}' LIMIT 1"; } else { $sql = "SELECT * FROM `mv_streams` LIMIT 0, 5000"; } $result = $dbr->query($sql); while ($stream = $dbr->fetchObject($result)) { //get the wiki page: $streamTitle = Title::newFromText($stream->name, MV_NS_STREAM); $mArticle = new Article($streamTitle); $mvTitle = new MV_Title($stream->name); $stream->archive_org = true; $out = mv_semantic_stream_desc($mvTitle, $stream); if (trim($out) != '') { //get all the existing cats: $wtext = $mArticle->getContent(); preg_match_all('/Category\\:([^\\]]*)/', $wtext, $matches); if (isset($matches[1])) { foreach ($matches[1] as $category) { $out .= "\n[[Category:{$category}]]"; } } //now that we keept categories force update the page: do_update_wiki_page($streamTitle, $out, MV_NS_STREAM, $force = true); } } }