private function resetUrls() { global $wgRequest, $wgOut; $urls = preg_split("@\n@", trim($wgRequest->getVal('if_urls'))); foreach ($urls as $url) { if (!empty($url)) { $t = WikiPhoto::getArticleTitle($url); if ($t && $t->exists()) { $aids[] = $t->getArticleId(); } else { $invalid[] = $url; } } } $numUrls = sizeof($aids); if ($numUrls) { $dbw = wfGetDB(DB_MASTER); $aidsList = "(" . implode(",", $aids) . ")"; $dbw->delete('image_feedback', array("ii_img_page_id IN {$aidsList}"), __METHOD__); } if (sizeof($invalid)) { $invalid = "These input urls are invalid:<br><br>" . implode("<br>", $invalid); } $wgOut->setArticleBodyOnly(true); $wgOut->addHtml("{$numUrls} reset.{$invalid}"); }
/** * Parse the input field into an array of URLs and Title objects */ private static function parseURLlist($type, $pageList) { $pageList = preg_split('@[\\r\\n]+@', $pageList); $urls = array(); foreach ($pageList as $url) { $url = trim($url); if (!empty($url)) { $title = WikiPhoto::getArticleTitle($url); if ($title && $title->exists()) { $id = $title->getArticleId(); $url = array('url' => $url, 'title' => $title, 'id' => $id); if ('descs' == $type) { $meta = new ArticleMetaInfo($title); $desc = $meta->getDescription(); $url['desc'] = $desc; } else { $tt = TitleTests::newFromTitle($title); if ($tt) { $pageTitle = $tt->getTitle(); } else { $pageTitle = '<i>error generating title</i>'; } $url['page-title'] = $pageTitle; } $urls[] = $url; } } } return $urls; }
/** * Parse the input field into an array of URLs and Title objects */ private static function parseURLlist($pageList) { $pageList = preg_split('@[\\r\\n]+@', $pageList); $urls = array(); foreach ($pageList as $url) { $url = trim($url); if (!empty($url)) { $title = WikiPhoto::getArticleTitleNoCheck(urldecode($url)); $urls[] = array('url' => $url, 'title' => $title); } } return $urls; }
public function processHybridMedia($articleID, $creator, $videoList, $photoList) { $err = ''; $numSteps = 0; $replaced = 0; $vidBrTag = self::BRTAG_TO_VID ? self::BRTAG : ''; $imgBrTag = self::BRTAG_TO_IMG ? self::BRTAG : ''; self::d("processHybridMedia parse out steps section replacing it with a token, leaving the above and below wikitext intact"); // parse out steps section replacing it with a token, leaving // the above and below wikitext intact list($text, $url, $title) = $this->getArticleDetails($articleID); if (!$text || !$title) { $err = 'Could not find article ID ' . $articleID; } self::d("getArticleDetails: err:" . $err); if (!$err) { list($text, $steps, $stepsToken) = $this->cutStepsSection($text); if (!$stepsToken) { if (preg_match('@^(\\s|\\n)*#redirect@i', $text)) { $err = 'Could not parse Steps section out of article -- article text is #REDIRECT'; } else { $err = 'Could not parse Steps section out of article'; } } } $hybridMediaList = null; // try to place videos into wikitext, using tokens as placeholders. if (!$err) { $userIsScreenshotter = $this->isCreatorKnownScreenShotter($creator); list($err, $hybridMediaList) = $this->placeHybridMediaInSteps($articleID, $title, $videoList, $photoList, $text, $steps, $numSteps, $replaced, $userIsScreenshotter); } // detect if no photos and videos were to be processed if (!$err) { if (count($videoList) == 0 && count($photoList) == 0) { $err = 'No photos and videos to process'; } } // replace the tokens within the video or image tag if (!$err && $hybridMediaList && count($hybridMediaList) > 0) { $isAllLandscape = true; $hadColourProblems = false; $hadSizeProblems = false; $userIsScreenshotter = false; $isAllPhotoLandscape = count(photoList) > 0 ? true : false; $text = str_replace($stepsToken, $steps, $text); foreach ($hybridMediaList as &$media) { $video = $media['video']; if ($video) { //video related validation if (!empty($video['width']) && !empty($video['height']) && $video['width'] > $video['height']) { $sizeParam = WikiVisualTranscoder::VIDEO_LANDSCAPE_WIDTH; } else { $sizeParam = WikiVisualTranscoder::VIDEO_PORTRAIT_WIDTH; // Log first portrait video if (!$isAllLandscape) { $warning .= "portrait:{$video['name']}\n"; } $isAllLandscape = false; } // Log pixel width issues if (!$userIsScreenshotter && !$hadSizeProblems && !empty($video['width']) && $video['width'] < WikiVisualTranscoder::VIDEO_WARNING_MIN_WIDTH) { $warning .= "size:{$video['width']}px:{$video['name']}\n"; $hadSizeProblems = true; } } $image = $media['photo']; if ($image) { if (!empty($image['width']) && !empty($image['height']) && $image['width'] > $image['height']) { $sizeParam = WikiVisualTranscoder::IMAGE_LANDSCAPE_WIDTH; } else { $sizeParam = WikiVisualTranscoder::IMAGE_PORTRAIT_WIDTH; // Log first portrait image if (!$isAllPhotoLandscape) { $warning .= "portrait:{$image['name']}\n"; } $isAllPhotoLandscape = false; } // Detect colour profile issues if (!$hadColourProblems && !empty($image['filename'])) { $exifProfile = WikiPhoto::getExifColourProfile($image['filename']); if ($exifProfile && WikiPhoto::isBadWebColourProfile($exifProfile)) { $warning .= "colour:{$exifProfile}:{$image['name']}\n"; $hadColourProblems = true; } } // Log pixel width issues if (!$userIsScreenshotter && !$hadSizeProblems && !empty($image['width']) && $image['width'] < WikiVisualTranscoder::WARNING_MIN_WIDTH) { $warning .= "size:{$image['width']}px:{$image['name']}\n"; $hadSizeProblems = true; } // Log pixel width issues if (!$userIsScreenshotter && !$hadSizeProblems && !empty($image['width'])) { if ($image['width'] < WikiVisualTranscoder::WARNING_MIN_WIDTH) { $warning .= "size:{$image['width']}px:{$image['name']}\n"; $hadSizeProblems = true; } else { $maxImgDimen = $image['width'] > $image['height'] ? $image['width'] : $image['height']; if ($maxImgDimen > WikiVisualTranscoder::ERROR_MAX_IMG_DIMEN) { $err .= "size:{$image['width']}px > max size " . WikiVisualTranscoder::ERROR_MAX_IMG_DIMEN . "px:{$image['name']}\n"; $hadSizeProblems = true; } } } } self::d("video={$video}, image={$image}"); $mediaTag = null; if ($video && !$image) { //video only $mediaTag = $vidBrTag . '{{whvid|' . $video['mediawikiName'] . '|' . $video['previewMediawikiName'] . '}}'; $text = str_replace($video['token'], $mediaTag, $text); } elseif (!$video && $image) { //image only $mediaTag = $imgBrTag . '[[Image:' . $image['mediawikiName'] . '|center|' . $sizeParam . ']]'; $text = str_replace($image['token'], $mediaTag, $text); } elseif ($video && $image) { //hybrid $mediaTag = $vidBrTag . '{{whvid|' . $video['mediawikiName'] . '|' . $video['previewMediawikiName'] . '|' . $image['mediawikiName'] . '}}'; $text = str_replace($video['token'], $mediaTag, $text); } } } // remove certain templates from start of wikitext if (!$err) { $templates = array('illustrations', 'pictures', 'screenshots', 'stub'); $text = $this->removeTemplates($text, $templates); } // write wikitext and add/update wikivideo row if (!$err) { $err = $this->saveArticleText($articleID, $text); } // try to enlarge the uploaded photos of certain users if (!$err) { // now we want to ALWAYS enlarge the images for articles with ALL Landscape if ($isAllPhotoLandscape) { Wikitext::enlargeImages($title, true, AdminEnlargeImages::DEFAULT_CENTER_PIXELS); } } // if ($err) { // self::dbSetArticleProcessed($articleID, $creator, $err, $warning, $url, 0, $numSteps, 0, self::STATUS_ERROR); // } else { // self::dbSetArticleProcessed($articleID, $creator, '', $warning, $url, count($videoList), $numSteps, $replaced, self::STATUS_COMPLETE); // } // remove transcoding job db entries and s3 URIs //self::removeOldTranscodingJobs($articleID); $numPhotos = $photoList ? count($photoList) : 0; $numVideos = $photoList ? count($videoList) : 0; self::i("processed wikitext: {$creator} {$articleID} {$url} " . "photos=" . $numPhotos . ", " . "videos=" . $numVideos . " {$err}"); return array($err, $warning, $url, $numSteps, $replaced); // if ($err) { // self::dbSetArticleProcessed($articleID, $creator, $err, $warning, $url, 0, $numSteps, 0, self::STATUS_ERROR); // } else { // self::dbSetArticleProcessed($articleID, $creator, '', $warning, $url, count($videoList), $numSteps, $replaced, self::STATUS_COMPLETE); // } }
function execute($par) { global $wgOut, $wgUser, $wgRequest; global $wgUseAjax, $wgAjaxUploadDestCheck, $wgAjaxLicensePreview; if ($wgUser->isBlocked()) { $wgOut->blockedPage(); return; } if ($wgUser->getID() == 0) { $wgOut->setRobotpolicy('noindex,nofollow'); $wgOut->showErrorPage('nosuchspecialpage', 'nospecialpagetext'); return; } if (!in_array('staff', $wgUser->getGroups())) { $wgOut->setRobotpolicy('noindex,nofollow'); $wgOut->showErrorPage('nosuchspecialpage', 'nospecialpagetext'); return; } $this->errorFile = ""; $this->errorTitle = ""; if ($wgRequest->getVal('delete')) { $wgOut->setArticleBodyOnly(true); $hpid = str_replace('delete_', '', $wgRequest->getVal('delete')); $html = self::deleteHPImage($hpid); $wgOut->addHTML($html); return; } $this->postSuccessful = true; if ($wgRequest->wasPosted()) { if ($wgRequest->getVal("updateActive")) { $dbw = wfGetDB(DB_MASTER); //first clear them all $dbw->update(WikihowHomepageAdmin::HP_TABLE, array('hp_active' => 0, 'hp_order' => 0), '*', __METHOD__); $images = $wgRequest->getArray("hp_images"); $count = 1; foreach ($images as $image) { if (!$image) { continue; } $dbw->update(WikihowHomepageAdmin::HP_TABLE, array('hp_active' => 1, 'hp_order' => $count), array('hp_id' => $image)); $count++; } } else { $title = WikiPhoto::getArticleTitleNoCheck($wgRequest->getVal('articleName')); if (!$title->exists()) { $this->postSuccessful = false; $this->errorTitle = "* That article does not exist."; } if ($this->postSuccessful) { //keep going $imageTitle = Title::newFromText($wgRequest->getVal('wpDestFile'), NS_IMAGE); $file = new LocalFile($imageTitle, RepoGroup::singleton()->getLocalRepo()); $file->upload($wgRequest->getFileTempName('wpUploadFile'), '', ''); $filesize = $file->getSize(); if ($filesize > 0) { $dbw = wfGetDB(DB_MASTER); $dbw->insert(WikihowHomepageAdmin::HP_TABLE, array('hp_page' => $title->getArticleID(), 'hp_image' => $imageTitle->getArticleID())); $article = new Article($imageTitle); $limit = array(); $limit['move'] = "sysop"; $limit['edit'] = "sysop"; $protectResult = $article->updateRestrictions($limit, "Used on homepage"); } else { $this->postSuccessful = false; $this->errorFile = "* We encountered an error uploading that file."; } } } } $useAjaxDestCheck = $wgUseAjax && $wgAjaxUploadDestCheck; $useAjaxLicensePreview = $wgUseAjax && $wgAjaxLicensePreview; $adc = wfBoolToStr($useAjaxDestCheck); $alp = wfBoolToStr($useAjaxLicensePreview); $wgOut->setPageTitle('WikiHow Homepage Admin'); $wgOut->addScript("<script type=\"text/javascript\">\nwgAjaxUploadDestCheck = {$adc};\nwgAjaxLicensePreview = {$alp};\n</script>"); $wgOut->addScript(HtmlSnips::makeUrlTags('js', array('jquery-ui-1.8.custom.min.js'), 'extensions/wikihow/common/ui/js', false)); $wgOut->addScript(HtmlSnips::makeUrlTags('js', array('wikihowhomepageadmin.js'), 'extensions/wikihow/homepage', false)); $wgOut->addScript(HtmlSnips::makeUrlTags('css', array('wikihowhomepageadmin.css'), 'extensions/wikihow/homepage', false)); $wgOut->addScript(HtmlSnips::makeUrlTags('js', array('upload.js'), 'skins/common', false)); $this->displayHomepageData(); $this->displayForm(); }
<?php require_once 'commandLine.inc'; $dbw = wfGetDB(DB_MASTER); $wgUser = User::newFromName('MiscBot'); // get first x pages. $pages = WikiPhoto::getAllPages($dbw); $count = 0; foreach ($pages as $page) { $rev = Revision::loadFromPageId($dbw, $page['id']); if ($rev) { $wikitext = $rev->getText(); $intro = Wikitext::getIntro($rev->getText()); if (strpos($intro, "{{nointroimg}}") !== false) { $intro = str_replace("{{nointroimg}}", "", $intro); $wikitext = Wikitext::replaceIntro($wikitext, $intro, true); $title = Title::newFromID($page['id']); print "Removing from: " . $title . "\n"; $article = new Article($title); $article->doEdit($wikitext, "Removing nointroimg template"); $count++; } } } print "Deleted from {$count} articles\n";
function getIdsFromUrls(&$urls) { $ids = array(); $urls = explode("\n", trim($urls)); foreach ($urls as $url) { $t = WikiPhoto::getArticleTitle($url); if ($t && $t->exists()) { $ids[] = $t->getArticleId(); } } return $ids; }
/** * Execute special page. Only available to wikihow staff. */ function execute() { global $wgRequest, $wgOut, $wgUser, $wgLang; //$userGroups = $wgUser->getGroups(); if ($wgUser->isBlocked()) { $wgOut->setRobotpolicy('noindex,nofollow'); $wgOut->errorpage('nosuchspecialpage', 'nospecialpagetext'); return; } if ($wgRequest->wasPosted()) { $dbr = wfGetDB(DB_SLAVE); $pageList = $wgRequest->getVal('pages-list', ''); $wgOut->setArticleBodyOnly(true); $pageList = preg_split('@[\\r\\n]+@', $pageList); foreach ($pageList as $url) { $url = trim($url); if (!empty($url)) { $id = WikiPhoto::getArticleID($url); $images = ''; if (!empty($id)) { $hasNoImages = WikiPhoto::articleBodyHasNoImages($dbr, $id); $images = $hasNoImages ? 'no' : 'yes'; } $urls[] = array('url' => $url, 'id' => $id, 'images' => $images); } } $html = '<style>.tres tr:nth-child(even) {background: #ccc;}</style>'; $html .= '<table class="tres"><tr><th width="450px">URL</th><th>ID</th><th>Has steps images?</th></tr>'; foreach ($urls as $row) { $html .= "<tr><td><a href='{$row['url']}'>{$row['url']}</a></td><td>{$row['id']}</td><td>{$row['images']}</td></tr>"; } $html .= '</table>'; $result = array('result' => $html); print json_encode($result); return; } $wgOut->setHTMLTitle('Admin - Lookup Pages - wikiHow'); $tmpl = self::getGuts('AdminLookupPages'); $wgOut->addHTML($tmpl); }
<?php /** * List all articles within a top level category (which includes its sub- * categories). * * Usage: php listAllCategoryArticles.php Category-Name */ require_once 'commandLine.inc'; if (count($argv) < 1) { print "usage: php listAllCategoryArticles.php <category-name-encoded>\n"; print " example of category name: Hobbies-and-Crafts\n"; exit; } $dbr = wfGetDB(DB_SLAVE); $topLevel = $argv[0]; $file = $topLevel . '.csv'; // get the category and all sub-categories $cats = WikiPhoto::getAllSubcats($dbr, $topLevel); $cats[] = $topLevel; sort($cats); $cats = array_unique($cats); // get all pages $pages = array(); foreach ($cats as $cat) { $results = WikiPhoto::getPages($dbr, $cat); // make results unique based on page_id foreach ($results as $result) { print WikiPhoto::BASE_URL . $result['key'] . "\n"; } }
/** * Process images on S3 instead of from the images web server dir */ private static function processS3Images() { $s3 = new S3(WH_AWS_WIKIPHOTO_ACCESS_KEY, WH_AWS_WIKIPHOTO_SECRET_KEY); //$file = '/tmp/whp'; //if (!file_exists($file)) { $articles = self::getS3Articles($s3, self::AWS_BUCKET); $processed = self::dbGetArticlesUpdatedAll(); //$out = yaml_emit(array($articles, $processed)); //file_put_contents($file, $out); //} else { //list($articles, $processed) = yaml_parse(file_get_contents($file)); //} // process all articles foreach ($articles as $id => $details) { $debug = self::$debugArticleID; if ($debug && $debug != $id) { continue; } if (@$details['err']) { if (!$processed[$id]) { self::dbSetArticleProcessed($id, $details['user'], $details['err'], '', '', 0, 0, 0); } continue; } // if article needs to be processed again because new files were // uploaded, but article has already been processed, we should // just flag as a retry attempt if (!$debug && isset($processed[$id]) && !$processed[$id]['retry'] && $processed[$id]['processed'] < $details['time']) { if ($details['time'] >= self::REPROCESS_EPOCH) { $processed[$id]['retry'] = 1; $processed[$id]['error'] = ''; } else { // don't reprocess stuff from before a certain point in time continue; } } // if this article was already processed, and nothing about its // images has changes, and it's not set to be retried, don't // process it again if (!$debug && isset($processed[$id]) && !$processed[$id]['retry'] && $processed[$id]['processed'] > $details['time']) { continue; } // if article is not on Wikiphoto article exclude list if (WikiPhoto::checkExcludeList($id)) { $err = 'Article was found on Wikiphoto EXCLUDE list'; self::dbSetArticleProcessed($id, $details['user'], $err, '', '', 0, 0, 0); continue; } // pull zip file into staging area $stageDir = ''; $imageList = array(); if ($details['zip']) { $prefix = $details['user'] . '/'; $zipFile = $id . '.zip'; $files = array($zipFile); list($err, $stageDir) = self::pullFiles($id, $s3, $prefix, $files); if (!$err) { list($err, $files) = self::unzip($stageDir, $zipFile); } if (!$err) { foreach ($files as $file) { $imageList[] = array('name' => basename($file), 'filename' => $file); } } } else { // no zip -- ignore continue; } if (!$err && in_array($id, self::$excludeArticles)) { $err = 'Forced skipping this article because there was an repeated error when processing it'; } if (!$err) { $warning = @$details['warning']; list($err, $title) = self::processImages($id, $details['user'], $imageList, $warning); } else { self::dbSetArticleProcessed($id, $details['user'], $err, '', '', 0, 0, 0); } if ($stageDir) { self::safeCleanupDir($stageDir); } $titleStr = $title ? ' (' . $title->getText() . ')' : ''; $errStr = $err ? ' err=' . $err : ''; $imageCount = count($imageList); print date('Y/M/d H:i') . " processed: {$details['user']}/{$id}{$titleStr} images={$imageCount}{$errStr}\n"; } }
sort($cats); $cats = array_unique($cats); // get all pages $pages = array(); foreach ($cats as $cat) { $results = WikiPhoto::getPages($dbr, $cat); // make results unique based on page_id foreach ($results as $result) { $pages[$result['id']] = $result; } } $pages = array_values($pages); shuffle($pages); $lines = array(); foreach ($pages as $page) { if (WikiPhoto::articleBodyHasNoImages($dbr, $page['id'])) { $lines[] = array(WikiPhoto::BASE_URL . "{$page['key']}", $page['id']); if (count($lines) >= $numArticles) { break; } } } $fp = fopen($file, 'w'); if ($fp) { foreach ($lines as $line) { fputcsv($fp, $line); } fclose($fp); print "output is in {$file}\n"; } else { print "unable to open file {$file}\n";
<?php /** * Grab a bunch of random articles from a category and its subcategories, as * long as the articles have no images in the Steps section. * * Usage: php getAllArticlesNoStepPhotos.php Category-Name */ require_once 'commandLine.inc'; $file = 'all-articles-with-video.csv'; $dbr = wfGetDB(DB_SLAVE); // get all pages $pages = WikiPhoto::getAllPages($dbr); $lines = array(); foreach ($pages as $page) { if (WikiPhoto::articleHasVideo($dbr, $page['id'])) { $lines[] = array(WikiPhoto::BASE_URL . "{$page['key']}", $page['id']); } } $fp = fopen($file, 'w'); if ($fp) { foreach ($lines as $line) { fputcsv($fp, $line); } fclose($fp); print "output is in {$file}\n"; } else { print "unable to open file {$file}\n"; }
/** * Process images on S3 instead of from the images web server dir */ private function processS3Media() { $s3 = new S3(WH_AWS_WIKIVISUAL_ACCESS_KEY, WH_AWS_WIKIVISUAL_SECRET_KEY); // $file = '/tmp/whp'; // if (!file_exists($file)) { $articles = $this->getS3Articles($s3, self::AWS_BUCKET); $processed = $this->dbGetArticlesUpdatedAll(); // $out = yaml_emit(array($articles, $processed)); // file_put_contents($file, $out); // } else { // list($articles, $processed) = yaml_parse(file_get_contents($file)); // } // process all articles $articlesProcessed = 0; foreach ($articles as $id => $details) { $debug = self::$debugArticleID; if ($debug && $debug != $id) { continue; } if (@$details['err']) { if (!$processed[$id]) { self::dbSetArticleProcessed($id, $details['user'], $details['err'], '', '', 0, 0, 0, 0, self::STATUS_ERROR, 0, ''); } continue; } // if article needs to be processed again because new files were // uploaded, but article has already been processed, we should // just flag as a retry attempt if (!$debug && isset($processed[$id]) && !$processed[$id]['retry'] && $processed[$id]['processed'] < $details['time']) { if ($details['time'] >= self::REPROCESS_EPOCH) { $processed[$id]['retry'] = 1; $processed[$id]['error'] = ''; } else { self::d("don't reprocess stuff from before a certain point in time: Article id :" . $id); // don't reprocess stuff from before a certain point in time continue; } } // if this article was already processed, and nothing about its // images has changes, and it's not set to be retried, don't // process it again if (!$debug && isset($processed[$id]) && !$processed[$id]['retry'] && $processed[$id]['processed'] > $details['time']) { self::d("if this article was already processed, and nothing about its images has changes, and it's not set to be retried, don't process it again:" . $id . ", processed[id]['processed']=" . $processed[$id]['processed'] . " > details['time']=" . $details['time']); continue; } // if article is not on Wikiphoto article exclude list if (WikiPhoto::checkExcludeList($id)) { $err = 'Article was found on Wikiphoto EXCLUDE list'; self::dbSetArticleProcessed($id, $details['user'], $err, '', '', 0, 0, 0, 0, self::STATUS_ERROR, 0, ''); continue; } // pull zip file into staging area $stageDir = ''; $photoList = array(); $videoList = array(); if ($details['zip']) { $prefix = $details['user'] . '/'; $zipFile = $id . '.zip'; $files = array($zipFile); list($err, $stageDir) = $this->pullFiles($id, $s3, $prefix, $files); if (!$err) { list($err, $files) = $this->unzip($stageDir, $zipFile); } if (!$err) { list($photoList, $videoList) = self::splitSrcMediaFileList($files); } } else { // no zip -- ignore continue; } if (!$err && in_array($id, self::$excludeArticles)) { $err = 'Forced skipping this article because there was an repeated error when processing it'; } self::d("PhotoList size " . count($photoList) . ", VideoList size " . count($videoList) . " err={$err}"); $isHybridMedia = false; $photoCnt = 0; $vidCnt = 0; if (!$err) { $warning = @$details['warning']; $photoCnt = count($photoList); $vidCnt = count($videoList); self::dbSetArticleProcessed($id, $details['user'], $err, $warning, '', $vidCnt, $photoCnt, 0, 0, self::STATUS_PROCESSING_UPLOADS, 0, $stageDir); $isHybridMedia = $photoCnt > 0 && $vidCnt > 0; self::d("isHybridMedia={$isHybridMedia}"); //start processing uploads if ($photoCnt > 0 && $vidCnt <= 0) { list($err, $title, $warning, $url, $photoCnt, $replaced) = $this->imageTranscoder->processMedia($id, $details['user'], $photoList, $warning, $isHybridMedia); $this->updateArticleStatusPhotoProcessed($id, $err, $warning, $url, $photoCnt, $replaced, true); } else { if (!$err && $vidCnt > 0) { self::d("Processing mp4Transcoder->processMedia"); list($err, $url, $status) = $this->mp4Transcoder->processMedia($id, $details['user'], $videoList, $warning, $isHybridMedia); $this->updateArticleStatusVideoTranscoding($id, $err, $warning, $url, $status); } } $articlesProcessed++; } else { self::dbSetArticleProcessed($id, $details['user'], $err, '', '', 0, 0, 0, 0, self::STATUS_ERROR, 0, ''); } //don't cleanup if isHybridMedia is present and zip file contains images. if (!empty($stageDir) && $isHybridMedia === false) { self::safeCleanupDir($stageDir); } $titleStr = $title ? ' (' . $title->getText() . ')' : ''; $errStr = $err ? ', err=' . $err : ''; $mediaCount = count($files); self::i("processed: {$details['user']}/{$id}{$titleStr} original mediaFilesCount={$mediaCount} {$errStr}"); if (self::$DEBUG !== false && self::$exitAfterNumArticles > 0 && $articlesProcessed >= self::$exitAfterNumArticles) { self::d("articlesProcessed {$articlesProcessed} >= self::\$exitAfterNumArticles " . self::$exitAfterNumArticles . ", hence stopping further processing of articles if there are any."); break; } } }
<?php /* * Opens a CSV file and adds page IDs to the file. Part of the WikiPhoto * project. */ require_once 'commandLine.inc'; if (count($argv) < 2) { print "usage: php wikiphotoAddPageIDs.php infile.csv outfile.csv\n"; exit; } $infile = $argv[0]; $outfile = $argv[1]; $in = fopen($infile, 'r'); $out = fopen($outfile, 'w'); if (!$in || !$out) { print "error: opening a file\n"; exit; } $dbr = wfGetDB(DB_SLAVE); while (($data = fgetcsv($in)) !== false) { $id = WikiPhoto::getArticleID($data[0]); $data[1] = $id; $data[2] = ''; if (!empty($id)) { $hasNoImages = WikiPhoto::articleBodyHasNoImages($dbr, $id); $images = intval(!$hasNoImages); $data[2] = $images; } fputcsv($out, $data); }