/** * Inserts the access log information in * the database * * @param array $alogs Access Log lines * @param $fromServer array The sender of these access logs * * @throws \Kassner\LogParser\FormatException */ public function insertAccessLogging(array $alogs, $fromServer, $parseString) { try { if (!(count($alogs) > 0)) { return; } $parser = new \Kassner\LogParser\LogParser(); $parser->setFormat($parseString); $toBeInserted = []; foreach ($alogs as $line) { $userSpec = $parser->parse($line); if ($userSpec->host === '::1') { continue; } $userSpec->device = parse_user_agent($userSpec->HeaderUserAgent); $city = new Reader(database_path() . '/GeoLite2-City.mmdb'); $geoRecord = $city->city($userSpec->host); $userSpec->fromServer = $fromServer; $userSpec->location = ['city' => $geoRecord->city->name, 'country' => $geoRecord->country->name]; $userSpec->createdAt = time(); $toBeInserted[] = $userSpec; } $this->mongoCollectionForAccess->batchInsert($toBeInserted); } catch (\Exception $e) { error_log(print_r($e, true)); } }
/** * Perform command. */ public function perform() { try { $entry = []; $parser = new \Kassner\LogParser\LogParser(); $parser->setFormat('%h %l %u %t "%r" %>s %O "%{Referer}i" \\"%{User-Agent}i"'); $lines = file('/var/log/apache2/access.log', FILE_IGNORE_NEW_LINES | FILE_SKIP_EMPTY_LINES); foreach ($lines as &$line) { $userSpec = $parser->parse($line); $userSpec->device = parse_user_agent($userSpec->HeaderUserAgent); $city = new Reader(__DIR__ . '/../Database/GeoLite2-City.mmdb'); $country = new Reader(__DIR__ . '/../Database/GeoLite2-Country.mmdb'); $userSpec->location = ['city' => $city->city($userSpec->host)->city->name, 'country' => $country->country($userSpec->host)->country->name]; $entry[] = $userSpec; } file_put_contents('/var/log/apache2/access.log', ''); } catch (\Exception $e) { file_put_contents('/var/log/apache2/access.log', ''); } if (count($entry) > 0) { AccessReport::odmCollection()->batchInsert($entry); print "wrote " . count($entry) . " records"; } else { print 0; } }
/** * Insert Raw Varnish data into database * * @param $log * @throws \Kassner\LogParser\FormatException */ public function importDataFromLogFile($log) { $parser = new \Kassner\LogParser\LogParser(); $parser->setFormat('%h %l %u %t \\"%r\\" %>s %b \\"%{Referer}i\\" \\"%{User-agent}i\\"'); $lines = explode("\n", $log); foreach ($lines as $line) { if (strlen(trim($line)) > 0) { $entry = $parser->parse($line); $requestURIParts = explode(' ', $entry->request); $logLine = new VarnishLog(); $logLine->setHost($entry->host); $logLine->setReferer($entry->HeaderReferer); $logLine->setRequestUri($requestURIParts[1]); $logLine->setStatus($entry->status); $logLine->setUserAgent($entry->HeaderUseragent); $this->em->persist($logLine); $this->em->flush(); } } }
/** * Excellent php file tailing * Taken from: http://stackoverflow.com/questions/6451232/reading-large-files-from-end * * Modified to exclude http-status codes * * @return array of parsed log objects * * @param string $filename The log filename * @param integer $lines Amount of lines to return */ public static function last_log_lines($path, $line_count, $log_format, $exclude_status, $block_size = 512) { if (!file_exists($path)) { return false; } // Store parsed lines here $lines = array(); // we will always have a fragment of a non-complete line // keep this in here till we have our next entire line. $leftover = ""; // Remove whitespace and empty values $exclude_status = preg_replace('/\\s+/', '', $exclude_status); $exclude_array = array_filter(explode(",", $exclude_status), 'strlen'); // For parsing logs with common log format $parser = new \Kassner\LogParser\LogParser($log_format); $fh = fopen($path, 'r'); if (!$fh) { return false; } // go to the end of the file fseek($fh, 0, SEEK_END); do { // need to know whether we can actually go back // $block_size bytes $can_read = $block_size; if (ftell($fh) < $block_size) { $can_read = ftell($fh); } // go back as many bytes as we can // read them to $data and then move the file pointer // back to where we were. fseek($fh, -$can_read, SEEK_CUR); $data = fread($fh, $can_read); $data .= $leftover; fseek($fh, -$can_read, SEEK_CUR); // split lines by \n. Then reverse them, // now the last line is most likely not a complete // line which is why we do not directly add it, but // append it to the data read the next time. $split_data = array_reverse(explode("\n", $data)); $new_lines = array_slice($split_data, 0, -1); $parsed_lines = array(); // Check conditions on new lines foreach ($new_lines as $line) { if ($line == '') { continue; } // LogParser FormatException is handled by calling routine $log_entry = $parser->parse($line); //Append into lines if log_entry has bad status code if (!in_array($log_entry->status, $exclude_array)) { $parsed_lines[] = $parser->parse($line); } } $lines = array_merge($lines, $parsed_lines); $leftover = $split_data[count($split_data) - 1]; } while (count($lines) < $line_count && ftell($fh) != 0); if (ftell($fh) == 0) { //Parse the last line too $lines[] = $parser->parse($leftover); } fclose($fh); // Usually, we will read too many lines, correct that here. return array_reverse(array_slice($lines, 0, $line_count)); }