/**
 * Perform one cron 'tick' of crawl processing
 *
 * Has limits of both how many urls to crawl
 * and a soft time limit on total crawl time.
 */
function local_linkchecker_robot_crawl($verbose = false)
{
    global $CFG, $DB;
    $robot = new \local_linkchecker_robot\robot\crawler();
    $config = $robot::get_config();
    $crawlstart = $config->crawlstart;
    $crawlend = $config->crawlend;
    // Check if valid, otherwise bail quickly.
    // If we need to start a new crawl, push the seed url into the crawl queue.
    if (!$crawlstart || $crawlstart <= $crawlend) {
        $start = time();
        set_config('crawlstart', $start, 'local_linkchecker_robot');
        $robot->mark_for_crawl($CFG->wwwroot . '/', $config->seedurl);
        // Create a new history record.
        $history = new stdClass();
        $history->startcrawl = $start;
        $history->urls = 0;
        $history->links = 0;
        $history->broken = 0;
        $history->oversize = 0;
        $history->cronticks = 0;
        $history->id = $DB->insert_record('linkchecker_history', $history);
    } else {
        $history = $DB->get_record('linkchecker_history', array('startcrawl' => $crawlstart));
    }
    // While we are not exceeding the maxcron time, and the queue is not empty
    // find the next url in the queue and crawl it.
    // If the queue is empty then mark the crawl as ended.
    $cronstart = time();
    $cronstop = $cronstart + $config->maxcrontime;
    $hasmore = true;
    $hastime = true;
    while ($hasmore && $hastime) {
        $hasmore = $robot->process_queue($verbose);
        $hastime = time() < $cronstop;
        set_config('crawltick', time(), 'local_linkchecker_robot');
    }
    if ($hastime) {
        // Time left over, which means the queue is empty!
        // Mark the crawl as ended.
        $history->endcrawl = time();
        set_config('crawlend', time(), 'local_linkchecker_robot');
    }
    $history->urls = $robot->get_processed();
    $history->links = $robot->get_num_links();
    $history->broken = $robot->get_num_broken_urls();
    $history->oversize = $robot->get_num_oversize();
    $history->cronticks++;
    $DB->update_record('linkchecker_history', $history);
}
require_capability('moodle/site:config', context_system::instance());
admin_externalpage_setup('local_linkchecker_robot_status');
echo $OUTPUT->header();
$action = optional_param('action', '', PARAM_ALPHANUMEXT);
$robot = new \local_linkchecker_robot\robot\crawler();
$config = $robot::get_config();
if ($action == 'makebot') {
    $botuser = $robot->auto_create_bot();
}
$crawlstart = $config->crawlstart;
$crawlend = $config->crawlend;
$crawltick = $config->crawltick;
$boterror = $robot->is_bot_valid();
$queuesize = $robot->get_queue_size();
$recent = $robot->get_processed();
$numlinks = $robot->get_num_links();
$oldqueuesize = $robot->get_old_queue_size();
$numurlsbroken = $robot->get_num_broken_urls();
$numpageswithurlsbroken = $robot->get_pages_withbroken_links();
$oversize = $robot->get_num_oversize();
if ($queuesize == 0) {
    $progress = 1;
} else {
    if ($oldqueuesize == 0) {
        $progress = $recent / ($recent + $queuesize);
    } else {
        $progress = $recent / ($recent + max($oldqueuesize, $queuesize));
    }
}
// If old queue is zero the use current queue.
$duration = time() - $crawlstart;