function dump_main() { $date = date("Y-m-d H:i:00"); $influx = new influx(); while (list($xdate, $array) = each($GLOBALS["MAIN"])) { if ($xdate == $date) { continue; } while (list($category, $count) = each($array)) { $zArray = array(); $zArray["tags"]["category"] = $category; $zArray["fields"]["hits"] = intval($count); $zArray["tags"]["proxyname"] = $GLOBALS["MYHOSTNAME"]; squid_watchdog_events("Influx -> {$xdate}/{$date}: {$category}: {$count}"); $influx->insert("hypercache", $zArray); } unset($GLOBALS["MAIN"][$xdate]); } reset($GLOBALS["MAIN"]); }
function start() { $sock = new sockets(); $SquidPerformance = intval($sock->GET_INFO("SquidPerformance")); if ($SquidPerformance > 1) { if ($GLOBALS["VERBOSE"]) { echo "Squid Performance disabled</br>"; } return; } $influx = new influx(); $now = strtotime(date("Y-m-d H:00:00")); $sql = "select sum(SIZE) as size from MAIN_SIZE group by time(5m) where time > {$now}"; $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = $row->time; $min = date("i", $time); $size = $row->size / 1024; if ($GLOBALS["VERBOSE"]) { echo "{$min} -> {$size}<bR>\n"; } $size = $size / 1024; $xdata[] = $min; $ydata[] = $size; } $page = CurrentPageName(); $time = time(); if (count($xdata) > 1) { krsort($xdata); krsort($ydata); $array["xdata"] = $xdata; $array["ydata"] = $ydata; $data = urlencode(base64_encode(serialize($array))); $f1[] = "<div style='width:665px;height:240px' id='SQUIDRTT-{$time}-2'></div>"; $f2[] = "function FDeux{$time}(){\n\t\t\tLoadjsSilent('{$page}?rtt-hour=yes&container=SQUIDRTT-{$time}-2&data={$data}',false);\n\t\t}\n\t\tsetTimeout(\"FDeux{$time}()\",500);"; } $html = @implode("\n", $f1) . "<script>" . @implode("\n", $f2) . "</script>"; echo $html; }
function list1() { $page = CurrentPageName(); $tpl = new templates(); $influx = new influx(); $q = new mysql_squid_builder(); $USER_FIELD = $_GET["user"]; $search = $_GET["search"]; if ($search == null) { $search = "*"; } $from = strtotime("{$_GET["date1"]} {$_GET["time1"]}"); if (!isset($_GET["date1"])) { $from = strtotime("-1 hour"); } if (isset($_POST["qtype"])) { if ($_POST["query"] != null) { $search = str_replace("*", ".*", $_POST["query"]); $search_query = "AND {$_POST["qtype"]} =~ /{$search}/"; } } $to = strtotime("{$_GET["date2"]} {$_GET["time2"]}"); $md5_table = md5("{$from}{$to}{$USER_FIELD}"); $_SESSION["SQUID_STATS_MEMBER_SEARCH"] = $search; $Maxlines = $_GET["Maxlines"]; $_SESSION["SQUID_STATS_MAX_LINES"] = $Maxlines; if ($search != "*") { //$search_query="AND (SITE =~ /$search/ or $USER_FIELD =~ /$search/)"; } $totext = "and time < {$to}s"; if (!isset($_POST["rp"])) { $_POST["rp"] = 100; } if ($from == $to) { $totext = null; } $totext = null; $sql = "SELECT * from access_log WHERE time > {$from}s {$totext} {$search_query} ORDER BY ASC LIMIT {$_POST["rp"]}"; $main = $influx->QUERY_SQL($sql); $data = array(); $data['page'] = 1; $data['total'] = 0; $data['rows'] = array(); $c = 0; $fontsize = "18px"; $color = null; $ipClass = new IP(); foreach ($main as $row) { $USER = trim($row->USERID); if ($row->SIZE == 0) { continue; } $time = date("H:i:00", InfluxToTime($row->time)); $CURTIME = strtotime($time); $CATEGORY = $row->CATEGORY; $SITE = $row->SITE; $RQS = $row->RQS; $MAC_link = null; $MAC = $row->MAC; $IPADDR = $row->IPADDR; $USER = trim($row->USERID); $SIZE = $row->SIZE; $MD5 = md5("{$time}{$MAC}{$IPADDR}{$USER}{$SITE}"); if (!isset($MAIN[$CURTIME][$MD5])) { $MAIN[$CURTIME][$MD5]["TIME"] = $time; $MAIN[$CURTIME][$MD5]["RQS"] = $RQS; $MAIN[$CURTIME][$MD5]["MAC"] = $MAC; $MAIN[$CURTIME][$MD5]["IPADDR"] = $IPADDR; $MAIN[$CURTIME][$MD5]["USER"] = $USER; $MAIN[$CURTIME][$MD5]["SIZE"] = $SIZE; $MAIN[$CURTIME][$MD5]["SITE"] = $SITE; $MAIN[$CURTIME][$MD5]["CATEGORY"] = $CATEGORY; } else { $MAIN[$CURTIME][$MD5]["RQS"] = $MAIN[$MD5]["RQS"] + $RQS; $MAIN[$CURTIME][$MD5]["SIZE"] = $MAIN[$MD5]["SIZE"] + $SIZE; $MAIN[$CURTIME][$MD5]["CATEGORY"] = $CATEGORY; } } krsort($MAIN); while (list($curtime, $array0) = each($MAIN)) { while (list($MDKey, $array) = each($array0)) { $USER = trim($array["USER"]); if ($color == null) { $color = "#F2F0F1"; } else { $color = null; } $time = $array["TIME"]; $RQS = $array["RQS"]; $MAC_link = null; $SIZE = $array["SIZE"]; if ($SIZE > 1024) { $size = FormatBytes($SIZE / 1024); } else { $size = "{$SIZE} Bytes"; } $MAC = $array["MAC"]; $IPADDR = $array["IPADDR"]; $CATEGORY = $array["CATEGORY"]; $SITE = $array["SITE"]; $RQS = FormatNumber($RQS); $c++; if ($ipClass->IsvalidMAC($MAC)) { $MAC_link = "<a href=\"javascript:blur();\"\n\t\t\tOnClick=\"javascript:Loadjs('squid.nodes.php?node-infos-js=yes&MAC=" . urlencode($MAC) . "');\"\n\t\t\tstyle='font-size:{$fontsize};text-decoration:underline'>\t\t\n\t\t\t"; } if ($ipClass->isValid($SITE)) { $SITE = "<a href=\"https://db-ip.com/{$SITE}\" style='text-decoration:underline;color:black' target=_new>{$SITE}</a>"; } if ($c > $_POST["rp"]) { break; } $data['rows'][] = array('id' => $c, 'cell' => array("<span style='font-size:{$fontsize}'>{$time}</a></span>", "<span style='font-size:{$fontsize}'>{$SITE}</a></span>", "<span style='font-size:{$fontsize}'>{$CATEGORY}</a></span>", "<span style='font-size:{$fontsize}'>{$MAC_link}{$MAC}</a></span>", "<span style='font-size:{$fontsize}'>{$IPADDR}</a></span>", "<span style='font-size:{$fontsize}'>{$USER}</a></span>", "<span style='font-size:{$fontsize}'>{$RQS}</a></span>", "<span style='font-size:{$fontsize}'>{$size}</a></span>")); } } $data['total'] = $c; echo json_encode($data); return; }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $from = InfluxQueryFromUTC($params["FROM"]); $to = InfluxQueryFromUTC($params["TO"]); $interval = $params["INTERVAL"]; $USER_FIELD = $params["USER"]; $md5_table = md5(__FUNCTION__ . "." . "{$from}{$to}"); $searchsites = trim($params["searchsites"]); $searchuser = trim($params["searchuser"]); $searchsites_sql = null; $searchuser_sql = null; if ($searchsites == "*") { $searchsites = null; } if ($searchuser == "*") { $searchuser = null; } if ($searchsites != null) { $searchsites_sql = str_replace("*", ".*", $searchsites); $searchsites_sql = " AND FAMILYSITE =~ /{$searchsites_sql}/"; } if ($searchuser != null) { $searchuser_sql = str_replace("*", ".*", $searchuser); $searchuser_sql = " AND {$USER_FIELD} =~ /{$searchuser_sql}/"; } $SRF["USERID"] = true; $SRF["IPADDR"] = true; $SRF["MAC"] = true; unset($SRF[$USER_FIELD]); while (list($A, $P) = each($SRF)) { $srg[] = $A; } $users_fiels = @implode(",", $srg); $Z[] = "SELECT SIZE,RQS,FAMILYSITE,{$USER_FIELD},{$users_fiels} FROM access_log"; $Z[] = "WHERE (time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "')"; if ($searchsites_sql != null) { $Z[] = "{$searchsites_sql}"; } if ($searchuser_sql != null) { $Z[] = "{$searchuser_sql}"; } $sql = @implode(" ", $Z); echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $main = $influx->QUERY_SQL($sql); $GLOBALS["CSV1"][] = array("date", "website", "member", "ipaddr", "mac", "SizeBytes", "SizeText", "hits"); foreach ($main as $row) { $time = InfluxToTime($row->time); $USER = $row->USERID; $IPADDR = $row->IPADDR; $MAC = $row->MAC; $SIZE = intval($row->SIZE); $RQS = intval($row->RQS); $FAMILYSITE = $row->FAMILYSITE; if (trim($FAMILYSITE) == null) { continue; } if (trim($IPADDR) == null) { continue; } $HOURLY = date("Y-m-d H:00:00", $time); $MDKey = md5("{$FAMILYSITE}{$USER}{$IPADDR}{$MAC}"); if ($SIZE == 0) { continue; } $TIME_TEXT = date("Y-m-d H:i:s", $time); $SizeText = FormatBytes($SIZE / 1024, true); $GLOBALS["CSV1"][] = array($TIME_TEXT, $FAMILYSITE, $USER, $IPADDR, $MAC, $SIZE, $SizeText, $RQS); if (!isset($MAIN_ARRAY[$HOURLY][$MDKey])) { $MAIN_ARRAY[$HOURLY][$MDKey]["FAMILYSITE"] = $FAMILYSITE; $MAIN_ARRAY[$HOURLY][$MDKey]["USER"] = $USER; $MAIN_ARRAY[$HOURLY][$MDKey]["MAC"] = $MAC; $MAIN_ARRAY[$HOURLY][$MDKey]["IPADDR"] = $IPADDR; $MAIN_ARRAY[$HOURLY][$MDKey]["RQS"] = $RQS; $MAIN_ARRAY[$HOURLY][$MDKey]["SIZE"] = $SIZE; } else { $MAIN_ARRAY[$HOURLY][$MDKey]["RQS"] = $MAIN_ARRAY[$HOURLY][$MDKey]["RQS"] + $RQS; $MAIN_ARRAY[$HOURLY][$MDKey]["SIZE"] = $MAIN_ARRAY[$HOURLY][$MDKey]["SIZE"] + $SIZE; } } if (count($MAIN_ARRAY) == 0) { echo "MAIN_ARRAY is null....\n"; return false; } echo "MAIN_ARRAY (1) = " . count($MAIN_ARRAY) . "\n"; build_progress("{step} {insert_data}: MySQL engine, {please_wait}", 8); $f = array(); $sql = "CREATE TABLE IF NOT EXISTS `{$md5}user` (\n\t`USERID` VARCHAR(128),\n\t`MAC` VARCHAR(90),\n\t`IPADDR` VARCHAR(90),\n\t`familysite` VARCHAR(128),\n\t`zDate` DATETIME,\n\t`size` INT UNSIGNED NOT NULL DEFAULT 1,\n\t`hits` INT UNSIGNED NOT NULL DEFAULT 1,\n\tKEY `USERID`(`USERID`),\n\tKEY `MAC`(`MAC`),\n\tKEY `IPADDR`(`IPADDR`),\n\tKEY `hits`(`hits`),\n\tKEY `familysite`(`familysite`),\n\tKEY `size`(`size`)\n\t) ENGINE = MYISAM;"; $q = new mysql_squid_builder(); $q->QUERY_SQL($sql); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } $c = 0; while (list($TIME, $SUBARRAY) = each($MAIN_ARRAY)) { while (list($MDKey, $array) = each($SUBARRAY)) { $USER = $array["USER"]; $HITS = $array["RQS"]; $SIZE = $array["SIZE"]; $MAC = $array["MAC"]; $IPADDR = $array["IPADDR"]; $FAMILYSITE = $array["FAMILYSITE"]; if (trim($FAMILYSITE) == null) { continue; } if (trim($IPADDR) == null) { continue; } $c++; $SIZE_LOGS = $SIZE; $f[] = "('{$TIME}','{$FAMILYSITE}','{$USER}','{$MAC}','{$IPADDR}','{$SIZE}','{$HITS}')"; echo "('{$TIME}','{$FAMILYSITE}','{$USER}','{$SIZE_LOGS}','{$HITS}')\n"; if (count($f) > 500) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}user` (zDate,familysite,USERID,MAC,IPADDR,size,hits) VALUES " . @implode(",", $f)); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } $f = array(); } } } if (count($f) > 0) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}user` (zDate,familysite,USERID,MAC,IPADDR,size,hits) VALUES " . @implode(",", $f)); $f = array(); } echo "{$c} items inserted to MySQL\n"; return true; }
function query_influx($sql) { $GLOBALS["VERBOSE"] = true; $GLOBALS["DEBUG_INFLUX_VERBOSE"] = true; $influx = new influx(); $influx->ROOT_DUMP_ALL_DATABASES(); $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { echo "TIME: " . date("Y-m-d H:i:s", $row->time) . "\n"; echo "SIZE: {$row->size}\n"; var_dump($row, $row->time); } echo "today is " . strtotime(date("Y-m-d H:i:s")) . "\n"; }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $mintime = strtotime("2008-01-01 00:00:00"); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); if ($params["FROM"] < $mintime) { $params["FROM"] = strtotime(date("Y-m-d 00:00:00")); } $params["TO"] = intval($params["TO"]); if ($params["TO"] < $mintime) { $params["TO"] = time(); } $influx = new influx(); $ou = $ligne["ou"]; $uid = $ligne["uid"]; $to = $params["TO"]; $from = $params["FROM"]; $interval = $params["INTERVAL"]; $user = $params["USER"]; $md5_table = "{$md5}report"; $search = $params["searchuser"]; echo "SMTP Flow: FROM {$from} to {$to} {$interval} organization:{$ou} user:{$user} {$search}\n"; $table = "smtpstats"; //zdate,mailfrom,domainfrom,mailto,domainto,subject,size,spamscore,spamreport,disclaimer,backuped,infected,filtered,whitelisted,compressed,stripped $sqlSource = null; $TimeGroup = "date_trunc('hour', zdate) as zdate"; $TimeGroupBy = "date_trunc('hour', zdate)"; $distance = $influx->DistanceHour($from, $to); echo "Distance: {$distance} hours\n"; $FilterDate = "(zdate >='" . date("Y-m-d H:i:s", $from) . "' and zdate <= '" . date("Y-m-d H:i:s", $to) . "')"; if ($search == "*") { $search = null; } if ($search != null) { $search = str_replace("*", ".*", $search); $SSEARCH = "WHERE ( (mailfrom ~* '{$search}') OR (mailto ~* '{$search}')"; } if ($ou != null) { $ldap = new clladp(); $domains = $ldap->hash_get_domains_ou($ou); while (list($domain, $MAIN) = each($domains)) { $domain = trim(strtolower($domain)); if ($domain == null) { continue; } echo "Domain: {$domain}\n"; $FDOMS[] = "domainfrom ='{$domain}'"; $FDOMS2[] = "domainto ='{$domain}'"; } $imploded1 = @implode(" OR ", $FDOMS); $imploded2 = @implode(" OR ", $FDOMS2); $sqlSource = "(select count(*) as hits,sum(size) as size,mailfrom,domainfrom,mailto,domainto,{$TimeGroup} FROM smtpstats WHERE {$FilterDate} AND (({$imploded1}) OR ({$imploded2})) GROUP BY mailfrom,domainfrom,mailto,domainto,{$TimeGroupBy} ORDER BY {$TimeGroupBy} ) as t"; $sqlSource = "select * FROM {$sqlSource} {$SSEARCH}"; } if ($sqlSource == null) { $sqlSource = "(select count(*) as hits,sum(size) as size,mailfrom,domainfrom,mailto,domainto,{$TimeGroup} FROM smtpstats WHERE {$FilterDate} GROUP BY mailfrom,domainfrom,mailto,domainto,{$TimeGroupBy} ORDER BY {$TimeGroupBy} ) as t"; $sqlSource = "select * FROM {$sqlSource} {$SSEARCH}"; } $sql = "CREATE TABLE IF NOT EXISTS \"{$md5}report\"\n\t(zdate timestamp,\n\tmailfrom VARCHAR(128),\n\tmailto VARCHAR(128),\n\tdomainfrom VARCHAR(128),\n\tdomainto VARCHAR(128),\t\n\tsize BIGINT,\n\thits BIGINT)"; echo "TEMP:\n{$sql}\n"; $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\n{$q->mysql_error}\n***************\n"; return false; } $q->QUERY_SQL("TRUNCATE TABLE \"{$md5}report\""); $q->QUERY_SQL("create index zdate{$md5}report on \"{$md5}report\"(zdate);"); $q->QUERY_SQL("create index mailfrom{$md5}report on \"{$md5}report\"(mailfrom,mailto,reason);"); $sql = "INSERT INTO \"{$md5}report\" (hits,size,mailfrom,domainfrom,mailto,domainto,zdate) {$sqlSource}"; echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, {please_wait}", 6); $postgres = new postgres_sql(); $results = $postgres->QUERY_SQL($sql); if (!$postgres->ok) { echo "ERROR.....\n"; echo "***************\n{$postgres->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $sql = "SELECT COUNT(*) AS tcount FROM \"{$md5}report\""; $ligne = pg_fetch_assoc($postgres->QUERY_SQL($sql)); $total = intval($ligne["tcount"]); echo "Members {$total} items inserted to PostGreSQL\n"; if ($total == 0) { $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } return true; }
#!/usr/bin/php -q <?php include_once dirname(__FILE__) . "/ressources/class.influx.inc"; $GLOBALS["DEBUG"] = false; //ini_set('html_errors',0);ini_set('display_errors', 1);ini_set('error_reporting', E_ALL);ini_set('error_prepend_string','');ini_set('error_append_string',''); error_reporting(0); $GLOBALS["MYPID"] = getmypid(); if (preg_match("#--gpid=([0-9]+)#", @implode(" ", $argv), $re)) { $GLOBALS["GPID"] = $re[1]; WLOG("Starting Group Number:{$GLOBALS["GPID"]}"); LOADRULES($GLOBALS["GPID"]); } WLOG("Starting PID:{$GLOBALS["MYPID"]}"); WLOG("SSL : Starting SNI certificate verification.. ARGV=[" . @implode(" ", $argv) . "]"); $q = new influx(); $c = 0; $DCOUNT = 0; while (!feof(STDIN)) { $data = trim(fgets(STDIN)); if ($data == null) { continue; } $c++; $DCOUNT++; $array["tags"]["website"] = $data; $array["fields"]["RQS"] = 1; $q->insert("sni_certs", $array); $result = isMatches($data); if ($c > 500) { WLOG("{$DCOUNT} requests..."); $c = 0;
function DUMP_FULL($MAIN) { $backupdir = "/home/apache/artica-stats/works/backup"; $failedPath = "/home/apache/artica-stats/works/failed"; @mkdir($backupdir, 0755, true); @mkdir($failedPath, 0755, true); $PROXYNAME = $GLOBALS["MYHOSTNAME_PROXY"]; $AS_POSTGRES = false; $suffix = "influx"; if (is_file("/usr/local/ArticaStats/bin/postgres")) { $AS_POSTGRES = true; $suffix = "postgres"; } $prefix = "(zDate,IPADDR,SITENAME,HTTP_CODE,RQS,SIZE,PROXYNAME)"; $q = new influx(); $FINAL = array(); while (list($MD5, $ARRAY) = each($MAIN)) { $SIZE = $ARRAY["SIZE"]; $RQS = $ARRAY["RQS"]; $SITENAME = $ARRAY["SITENAME"]; $DATE = $ARRAY["DATE"]; $INFLUX_TIME = $ARRAY["INFLUX_TIME"]; $IPADDR = $ARRAY["IPADDR"]; $HTTP_CODE = $ARRAY["HTTP_CODE"]; $zDate = $ARRAY["DATE"]; if ($AS_POSTGRES) { $FINAL[] = "('{$zDate}','{$IPADDR}','{$SITENAME}','{$HTTP_CODE}','{$RQS}','{$SIZE}','{$PROXYNAME}')"; continue; } $zArray["precision"] = "s"; $zArray["time"] = $INFLUX_TIME; $zArray["fields"]["RQS"] = $RQS; $zArray["fields"]["SIZE"] = $SIZE; $zArray["fields"]["HTTP_CODE"] = $HTTP_CODE; $zArray["tags"]["SITENAME"] = $SITENAME; $zArray["tags"]["IPADDR"] = $IPADDR; $line = $q->prepare("apache_size", $zArray); $FINAL[] = $line; } if (count($FINAL) > 0) { $backupfile = "{$backupdir}/apache." . time() . ".{$suffix}.log"; $failedPath = "{$failedPath}/apache." . time() . ".{$suffix}.log"; if ($AS_POSTGRES) { $sql = "INSERT INTO apache_size {$prefix} VALUES " . @implode(",", $FINAL); $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { events("INJECTION Failed: backup to {$failedPath} ({$q->curl_error})"); @file_put_contents($failedPath, @implode("\n", $sql)); return false; } } if (!$AS_POSTGRES) { if (!$q->bulk_inject($FINAL)) { apache_admin_mysql(0, "INJECTION Failed ({$q->curl_error})", ": backup to {$failedPath}", __FILE__, __LINE__); @file_put_contents($failedPath, @implode("\n", $FINAL)); sleep(1); return true; } } events("INJECTION Success: backup to {$backupfile}"); @file_put_contents($backupfile, @implode("\n", $FINAL)); $FINAL = array(); } sleep(1); return true; }
function search() { $tpl = new templates(); $MyPage = CurrentPageName(); $sock = new sockets(); $q = new mysql(); $t = $_GET["t"]; $search = '%'; $page = 1; $FORCE_FILTER = null; $total = 0; $table = "last_boot"; $data = array(); $data['page'] = $page; $data['total'] = 0; $data['rows'] = array(); $fontsize = 20; $style = "style='font-size:20px'"; $c = 0; $tpl = new templates(); $curr = $tpl->javascript_parse_text("{current}"); $tables = $tpl->javascript_parse_text("{tables}"); $db = new influx(); $DBS = $db->ROOT_DUMP_ALL_DATABASES(); while (list($database, $size) = each($DBS)) { $c++; $current = null; $ms5 = md5($database); $TABLES = $db->LIST_TABLES($database); $CountOfTables = count($TABLES); $delete = imgsimple("delete-32.png", null, "Loadjs('{$MyPage}?delete-db-js={$database}')"); $color = "black"; if ($database == $db->systemid) { $current = " ({$curr})"; } $size = FormatBytes(intval($size) / 1024); $data['rows'][] = array('id' => $ms5, 'cell' => array("<span {$style}>{$database}{$current} {$CountOfTables} {$tables}</a></span>", "<center>{$delete}</a></center>")); } if ($c == 0) { json_error_show("no data"); } $data['total'] = $c; echo json_encode($data); }
function UFDB_LOG_HOURLY_DUMP($MEM) { events("Dumping " . count($MEM) . " entries"); $q = new influx(); while (list($KEYMD5, $subarray) = each($MEM)) { $array["precision"] = "s"; $array["time"] = $subarray["TIME"]; $array["tags"]["uid"] = $subarray["uid"]; $array["fields"]["TIME"] = $subarray["ZDATE"]; $array["fields"]["RQS"] = $subarray["RQS"]; $array["tags"]["category"] = $subarray["category"]; $array["tags"]["rulename"] = $subarray["rulename"]; $array["tags"]["public_ip"] = $subarray["public_ip"]; $array["tags"]["blocktype"] = $subarray["blocktype"]; $array["tags"]["why"] = $subarray["why"]; $array["tags"]["hostname"] = $subarray["hostname"]; $array["tags"]["website"] = $subarray["website"]; $array["tags"]["client"] = $subarray["client"]; $line = $q->prepare("webfilter", $array); if ($GLOBALS["VERBOSE"]) { echo "{$line}\n"; } $FINAL[] = $line; } if (count($FINAL) > 0) { $backupdir = "{$GLOBALS["LogFileDeamonLogDir"]}/webfilter-backup"; $faildir = "{$GLOBALS["LogFileDeamonLogDir"]}/webfilter-failed"; @mkdir($faildir, 0755, true); @mkdir($backupdir, 0755, true); $backupfile = "{$backupdir}/" . time() . ".influx.log"; $failedPath = "{$faildir}/" . time() . ".influx.log"; if (!$q->bulk_inject($FINAL)) { events("INJECTION Failed: backup to {$failedPath} ({$q->curl_error})"); @file_put_contents($failedPath, @implode("\n", $FINAL)); return false; } events("INJECTION Success: backup to {$backupfile}"); @file_put_contents($backupfile, @implode("\n", $FINAL)); $FINAL = array(); } return true; }
function ExplodeFile($filepath, $md5file = null) { $unix = new unix(); $LastScannLine = 0; $GLOBALS["MYSQL_CATZ"] = new mysql_catz(); $GLOBALS["SQUID_FAMILY_CLASS"] = new squid_familysite(); if (!isset($GLOBALS["MYHOSTNAME"])) { $unix = new unix(); $GLOBALS["MYHOSTNAME"] = $unix->hostname_g(); } $GLOBALS["SEQUENCE"] = md5_file($filepath); if (!is_file("{$filepath}.last")) { if (is_file("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log")) { $influx = new influx(); if ($influx->files_inject("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log")) { @unlink("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log"); return true; } } } $handle = @fopen($filepath, "r"); if (!$handle) { echo "Fopen failed on {$filepath}\n"; if ($md5file != null) { mysql_progress($md5file, 100, 3, "Fopen {failed} on {$filepath}"); } return false; } $countlines = 0; if ($md5file != null) { $countlines = $unix->COUNT_LINES_OF_FILE($filepath); if ($md5file != null) { mysql_progress($md5file, 10, 0, "Parsing {$countlines}"); } } if (is_file("{$filepath}.last")) { $LastScannLine = intval(@file_get_contents("{$filepath}.last")); } $c = 0; $d = 0; $e = 0; $prc = 0; $prc_text = 0; $mysql_first_time = 0; if ($LastScannLine > 0) { fseek($handle, $LastScannLine, SEEK_SET); } while (!feof($handle)) { $c++; $d++; $e++; if ($countlines > 0) { $prc = $c / $countlines; $prc = round($prc * 100); if (!isset($GLOBALS["LAST_PRC"])) { if ($GLOBALS["PROGRESS"]) { echo "{$prc}%\n"; } $GLOBALS["LAST_PRC"] = $prc; } else { if ($GLOBALS["LAST_PRC"] != $prc) { if ($GLOBALS["PROGRESS"]) { echo "{$prc}%\n"; } $GLOBALS["LAST_PRC"] = $prc; } } if ($prc > 10) { if ($prc < 99) { if ($prc > $prc_text) { $array_load = sys_getloadavg(); $internal_load = $array_load[0]; $mem = round(memory_get_usage() / 1024 / 1000, 2); $prc_design = FormatNumber($c) . "/" . FormatNumber($countlines); if ($md5file != null) { mysql_progress($md5file, $prc, 1, "{parsing} {$prc_design} {load}:{$internal_load} {memory}:{$mem}MB"); } $prc_text = $prc; } } } } if ($d > 50) { $iSeek = ftell($handle); @file_put_contents("{$filepath}.last", $iSeek); if ($GLOBALS["VERBOSE"]) { $prc_design = FormatNumber($c) . "/" . FormatNumber($countlines); echo "{$prc}% {$prc_design}\n"; } $d = 0; } if ($e > 500) { $mem = round(memory_get_usage() / 1024 / 1000, 2); $prc_design = FormatNumber($c) . "/" . FormatNumber($countlines); if ($md5file != null) { mysql_progress($md5file, $prc, 1, "{parsing} {$prc_design} {load}:{$internal_load} {memory}:{$mem}MB"); } $e = 0; } $buffer = trim(fgets($handle)); if ($buffer == null) { continue; } $array = parseAccessLine($buffer); if (count($array) == 0) { continue; } if ($mysql_first_time == 0) { if (date("Y", $array["TIME"]) > 2001) { $mysql_first_time = $array["TIME"]; mysql_first_time($md5file, $mysql_first_time); } } CachedSizeMem($array["TIME"], $array["CACHED"], $array["SIZE"]); if (intval($array["SIZE"]) == 0) { if ($GLOBALS["VERBOSE"]) { echo "Size = 0 " . __LINE__ . "\n"; } } CachedUserMem($array["TIME"], $array["SITENAME"], $array["SIZE"], null, $array["UID"], $array["IPADDR"], $array["CATEGORY"], $array["FAMILYSITE"]); } @unlink("{$filepath}.last"); mysql_last_time($md5file, $array["TIME"]); CachedUserMem_dump(); CachedSizeMem_dump(); $influx = new influx(); $size = filesize("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log"); $size = $size / 1024; $size = $size / 1024; echo "Importing {$size}MB of data....\n"; if (!$influx->files_inject("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log")) { @unlink("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log"); return false; } @unlink("/home/artica/import-temp/{$GLOBALS["SEQUENCE"]}.working.log"); return true; }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); while (list($A, $P) = each($params)) { echo "Params {$A}......: {$P}\n"; } echo "Date To......: {$params["TO"]}\n"; $influx = new influx(); $mintime = strtotime("2008-01-01 00:00:00"); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); echo "Date From....: {$params["FROM"]} <> {$mintime}\n"; if ($params["FROM"] < $mintime) { echo "Date From....: {$params["FROM"]} < {$mintime} !!!\n"; $params["FROM"] = strtotime(date("Y-m-d 00:00:00")); } $params["TO"] = intval($params["TO"]); if ($params["TO"] < $mintime) { $params["TO"] = time(); } echo "Date From....: {$params["FROM"]}\n"; $from = InfluxQueryFromUTC($params["FROM"]); $to = InfluxQueryFromUTC($params["TO"]); $interval = $params["INTERVAL"]; $USER_FIELD = $params["USER"]; echo "LIMIT........: {$mintime}\n"; echo "Date From....: {$params["FROM"]}/{$from}/" . date("Y-m-d H:i:s", $from) . "\n"; echo "Date To......: {$params["TO"]}/{$to}/" . date("Y-m-d H:i:s", $to) . "\n"; echo "USER_FIELD...: {$USER_FIELD}\n"; $SSEARCH = null; $searchsites = trim($params["searchsites"]); $searchuser = trim($params["searchuser"]); $categories = trim($params["categories"]); $searchsites_sql = null; $searchuser_sql = null; if ($categories == "*") { $categories = null; } if ($searchuser == "*") { $searchuser = null; } if ($searchuser != null) { $searchuser_sql = str_replace(".", "\\.", $searchuser); $searchuser_sql = str_replace("*", ".*", $searchuser_sql); if ($searchuser_sql != null) { $SSEARCH = " ({$USER_FIELD} ~* '{$searchuser_sql}') AND "; } } $SRF["USERID"] = true; $SRF["IPADDR"] = true; $SRF["MAC"] = true; unset($SRF[$USER_FIELD]); while (list($A, $P) = each($SRF)) { $srg[] = $A; } $users_fiels = @implode(",", $srg); if ($searchuser != null) { $searchuser = str_replace(".", "\\.", $searchuser); $searchuser = str_replace("*", ".*", $searchuser); $SSEARCH = " (client ~* '{$searchuser}') AND "; } $q = new mysql_squid_builder(); $TimeGroup = "date_trunc('hour', zdate)"; $distance = $influx->DistanceHour($from, $to); echo "Distance: {$distance} hours\n"; $sql = "CREATE TABLE IF NOT EXISTS \"{$md5}report\" (zDate timestamp,\n\twebsite VARCHAR(128),\n\tcategory VARCHAR(64),\n\trulename VARCHAR(128),\n\thostname VARCHAR(128),\n\tclient VARCHAR(128),\n\trqs BIGINT)"; $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\n{$q->mysql_error}\n***************\n"; return false; } $q->QUERY_SQL("create index zdate{$md5}report on \"{$md5}report\"(zdate);"); $q->QUERY_SQL("create index website{$md5}report on \"{$md5}report\"(website);"); $q->QUERY_SQL("create index hostname{$md5}report on \"{$md5}report\"(hostname);"); $q->QUERY_SQL("create index client{$md5}report on \"{$md5}report\"(client);"); $q->QUERY_SQL("TRUNCATE TABLE \"{$md5}report\""); $Z[] = "SELECT SUM(RQS) AS RQS,{$TimeGroup} as zdate,rulename,category,hostname,website,client FROM webfilter"; $Z[] = "WHERE {$SSEARCH}(zdate >='" . date("Y-m-d H:i:s", $from) . "'"; $Z[] = "and zdate <= '" . date("Y-m-d H:i:s", $to) . "')"; $Z[] = "GROUP BY {$TimeGroup},rulename,category,hostname,website,client"; $sql = @implode(" ", $Z); echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $sql = "INSERT INTO \"{$md5}report\" (rqs,zdate,rulename,category,hostname,website,client) {$sql}"; $postgres = new postgres_sql(); $results = $postgres->QUERY_SQL($sql); if (!$postgres->ok) { echo $postgres->mysql_error . "\n"; return false; } $ligne = pg_fetch_assoc($q->QUERY_SQL("SELECT COUNT(*) as tcount FROM \"{$md5}report\"")); if (!$q->ok) { echo "***************\nERROR {$q->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $c = $ligne["tcount"]; if ($c == 0) { echo "No data....\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } echo "{$c} items inserted to PostgreSQL\n"; $MAIN_ARRAY = array(); return true; }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $to = InfluxQueryFromUTC($params["TO"]); $from = InfluxQueryFromUTC($params["FROM"]); $interval = $params["INTERVAL"]; $user = $params["USER"]; $md5_table = "{$md5}sites"; $search = $params["SEARCH"]; $USER_FIELD = $params["USER"]; echo "FLOW: FROM {$from} to {$to} {$interval} user:{$user} {$search}\n"; if ($search == "*") { $search = null; } if ($search != null) { $search = str_replace("*", ".*", $search); $SSEARCH = " AND ({$USER_FIELD}=~ /{$search}/)"; } $sql = "SELECT {$user},SIZE FROM access_log WHERE (time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "'){$SSEARCH}"; echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = InfluxToTime($row->time); $SIZE = intval($row->SIZE); $USER = $row->{$USER_FIELD}; if ($SIZE == 0) { continue; } if (!isset($MAIN_ARRAY[$USER])) { $MAIN_ARRAY[$USER] = $SIZE; } else { $MAIN_ARRAY[$USER] = $MAIN_ARRAY[$USER] + $SIZE; } } if (count($MAIN_ARRAY) == 0) { echo "MAIN_ARRAY is null....\n"; return false; } echo "MAIN_ARRAY (1) = " . count($MAIN_ARRAY) . "\n"; build_progress("{step} {insert_data}: MySQL engine, {please_wait}", 8); $f = array(); $GLOBALS["CSV1"][] = array("member", "SizeBytes"); $sql = "CREATE TABLE IF NOT EXISTS `{$md5}user` \n\t(`user` VARCHAR(128),`size` INT UNSIGNED NOT NULL DEFAULT 1,\n\tKEY `user`(`user`),\n\tKEY `size`(`size`)\n\t) ENGINE = MYISAM;"; $q = new mysql_squid_builder(); $q->QUERY_SQL($sql); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } while (list($USER, $SIZE) = each($MAIN_ARRAY)) { $c = 0; $f[] = "('{$USER}','{$SIZE}')"; if ($GLOBALS["VERBOSE"]) { echo "{$USER} -> {$SIZE}\n"; } $GLOBALS["CSV1"][] = array($USER, $SIZE); if (count($f) > 500) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}user` (user,size) VALUES " . @implode(",", $f)); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } $f = array(); } } if (count($f) > 0) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}user` (user,size) VALUES " . @implode(",", $f)); $f = array(); } echo "Members {$c} items inserted to MySQL\n"; return true; }
function parse() { $TimeFile = "/etc/artica-postfix/pids/exec.squid.interface-size.php.time"; $pidfile = "/etc/artica-postfix/pids/exec.squid.interface-size.php.pid"; $unix = new unix(); $sock = new sockets(); $EnableKerbAuth = intval($sock->GET_INFO("EnableKerbAuth")); $ActiveDirectoryEmergency = intval($sock->GET_INFO("ActiveDirectoryEmergency")); if ($ActiveDirectoryEmergency == 1) { $EnableKerbAuth = 0; } $pid = @file_get_contents($pidfile); if ($pid < 100) { $pid = null; } if ($unix->process_exists($pid, basename(__FILE__))) { $timepid = $unix->PROCCESS_TIME_MIN($pid); if ($GLOBALS["VERBOSE"]) { echo "{$pid} already executed since {$timepid}Mn\n"; } if (!$GLOBALS["FORCE"]) { if ($timepid < 14) { return; } $kill = $unix->find_program("kill"); unix_system_kill_force($pid); } } @file_put_contents($pidfile, getmypid()); if (!$GLOBALS["FORCE"]) { if (!$GLOBALS["VERBOSE"]) { $time = $unix->file_time_min($TimeFile); if ($time < 14) { echo "Current {$time}Mn, require at least 14mn\n"; return; } } } @unlink($TimeFile); @file_put_contents($TimeFile, time()); $sock = new sockets(); $SquidPerformance = intval($sock->GET_INFO("SquidPerformance")); events("Proxy performance set to {$SquidPerformance}"); build_progress("{refresh_dashboard_values}", 10); system_values(); $php = $unix->LOCATE_PHP5_BIN(); build_progress("{refresh_dashboard_values}", 11); $dateint = InfluxQueryFromUTC(strtotime("-48 hours")); $date = date("Y-m-d H:00:00", $dateint); $qSimple = new mysql(); $sql = "SELECT COUNT(ID) as tcount FROM squid_admin_mysql WHERE severity=0 AND zDate>'{$date}'"; $ligne = mysql_fetch_array($qSimple->QUERY_SQL($sql, "artica_events")); @file_put_contents("{$GLOBALS["BASEDIR"]}/WATCHDOG_COUNT_EVENTS", $ligne["tcount"]); @chmod("{$GLOBALS["BASEDIR"]}/WATCHDOG_COUNT_EVENTS", 0777); if ($SquidPerformance > 1) { if (is_file("/etc/cron.d/artica-stats-hourly")) { @unlink("/etc/cron.d/artica-stats-hourly"); system("/etc/init.d/cron reload"); } build_progress("{statistics_are_disabled}", 110); die; } if (!is_file("/etc/cron.d/artica-stats-hourly")) { @unlink("/etc/cron.d/artica-stats-hourly"); } @mkdir("/usr/share/artica-postfix/ressources/interface-cache", 0755, true); $t1 = time(); $q = new mysql_squid_builder(); $tables[] = "dashboard_size_day"; $tables[] = "dashboard_countwebsite_day"; $tables[] = "dashboard_countuser_day"; $tables[] = "dashboard_user_day"; $tables[] = "dashboard_notcached"; $tables[] = "dashboard_cached"; $tables[] = "dashboard_blocked_day"; while (list($num, $table) = each($array)) { if (!$q->TABLE_EXISTS($table)) { events("Table: {$table} is not yet ready..."); continue; } $NUM = $q->COUNT_ROWS($table); events("Table: {$table} {$NUM} rows"); } build_progress("{calculate_cache_rate}", 12); CachedOrNot(); squidhour_clean(); $t1 = time(); $influx = new influx(); $now = InfluxQueryFromUTC(strtotime("-24 hour")); build_progress("{refresh_dashboard_values}", 13); // ----------------------------------------------------------------------------------------------------- $COUNT_DE_SNI_CERTS_TIME = 1000000; $COUNT_DE_SNI_CERTS = "{$GLOBALS["BASEDIR"]}/COUNT_DE_SNI_CERTS"; if (is_file($COUNT_DE_SNI_CERTS)) { $COUNT_DE_SNI_CERTS_TIME = $unix->file_time_min($COUNT_DE_SNI_CERTS); } if ($GLOBALS["VERBOSE"]) { echo "COUNT_DE_SNI_CERTS_TIME = {$COUNT_DE_SNI_CERTS_TIME}\n"; $COUNT_DE_SNI_CERTS_TIME = 999999; } if ($COUNT_DE_SNI_CERTS_TIME > 60) { $sql = "SELECT COUNT(website) as tcount,website FROM sni_certs WHERE time > {$now}s GROUP BY time(24h),website"; $f = array(); echo $sql . "\n"; $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $website = $row->website; $count = intval($row->tcount); if ($GLOBALS["VERBOSE"]) { echo "SNI: {$website} -> {$count}\n"; } $f[] = "('{$count}','{$website}')"; } if (count($f) > 0) { $q = new mysql_squid_builder(); $q->QUERY_SQL("CREATE TABLE IF NOT EXISTS sni_certs (`hits` BIGINT UNSIGNED, `websites` VARCHAR(128) NOT NULL PRIMARY KEY, KEY `hits` (`hits`) ) ENGINE=MYISAM"); $q->QUERY_SQL("TRUNCATE TABLE sni_certs"); $q->QUERY_SQL("INSERT IGNORE INTO sni_certs (hits,websites) VALUES " . @implode(",", $f)); @unlink($COUNT_DE_SNI_CERTS); @file_put_contents($COUNT_DE_SNI_CERTS, count($f)); } } build_progress("{refresh_dashboard_values}", 14); $NETS = $unix->NETWORK_ALL_INTERFACES(); // ----------------------------------------------------------------------------------------------------- while (list($Interface, $array) = each($NETS)) { $sql = "SELECT SUM(RX) as size FROM ethrxtx WHERE time > {$now}s AND ETH='{$Interface}' GROUP BY time(10m) ORDER BY ASC"; if ($GLOBALS["VERBOSE"]) { echo "\n*****\n{$sql}\n******\n"; } $MAIN = array(); $xdata = array(); $ydata = array(); $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = InfluxToTime($row->time); $min = date("H:i", $time); $size = intval($row->size) / 1024; if ($GLOBALS["VERBOSE"]) { echo "({$time}): ethrxtx {$Interface}:RX: {$min} -> {$size}\n"; } $size = $size / 1024; if (round($size) == 0) { continue; } $xdata[] = $min; $ydata[] = round($size); } $MAIN["xdata"] = $xdata; $MAIN["ydata"] = $ydata; @file_put_contents("{$GLOBALS["BASEDIR"]}/FLUX_{$Interface}_RX", serialize($MAIN)); if (count($xdata) < 2) { @unlink("{$GLOBALS["BASEDIR"]}/FLUX_{$Interface}_RX"); } $sql = "SELECT SUM(TX) as size FROM ethrxtx WHERE time > {$now}s AND ETH='{$Interface}' GROUP BY time(10m) ORDER BY ASC"; $MAIN = array(); $xdata = array(); $ydata = array(); build_progress("{refresh_dashboard_values}", 15); $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = InfluxToTime($row->time); $min = date("H:i", $time); $size = intval($row->size) / 1024; $size = $size / 1024; if ($size == 0) { continue; } if ($GLOBALS["VERBOSE"]) { echo "ethrxtx {$Interface}:TX: {$min} -> {$size}\n"; } $xdata[] = $min; $ydata[] = round($size); } $MAIN["xdata"] = $xdata; $MAIN["ydata"] = $ydata; @file_put_contents("{$GLOBALS["BASEDIR"]}/FLUX_{$Interface}_TX", serialize($MAIN)); if (count($xdata) < 2) { @unlink("{$GLOBALS["BASEDIR"]}/FLUX_{$Interface}_TX"); } } // ----------------------------------------------------------------------------------------------------- build_progress("{cleaning_databases}", 16); squidhour_clean(); build_progress("{refresh_dashboard_values}", 17); FLUX_RQS(); build_progress("{refresh_dashboard_values}", 18); build_progress("{refresh_dashboard_values}", 19); USERAGENTS(); build_progress("{calculate_dates}", 20); MAX_MIN(); backup_size(); build_progress("{refresh_dashboard_values}", 21); WEBFILTERING(); build_progress("{refresh_dashboard_values}", 22); $f = array(); // ----------------------------------------------------------------------------------------------------- $q = new mysql_squid_builder(); $ligne = @mysql_fetch_array($q->QUERY_SQL("SELECT COUNT(*) as tcount FROM proxy_ports WHERE enabled=1 AND transparent=1 AND Tproxy=1")); if ($q->ok) { @file_put_contents("{$GLOBALS["BASEDIR"]}/COUNT_DE_TRANSPARENT", intval($ligne["tcount"])); } // ----------------------------------------------------------------------------------------------------- build_progress("{refresh_dashboard_values}", 51); $MAIN = array(); $xdata = array(); $ydata = array(); $f = array(); // ----------------------------------------------------------------------------------------------------- // Calcul des caches en cours. $SquidCacheLevel = $sock->GET_INFO("SquidCacheLevel"); if (!is_numeric($SquidCacheLevel)) { $SquidCacheLevel = 4; } if ($SquidCacheLevel == 0) { @file_put_contents("{$GLOBALS["BASEDIR"]}/COUNT_DE_CACHES", 0); } build_progress("{refresh_dashboard_values}", 52); $q = new mysql(); $sql = "SELECT cache_size,cache_type FROM squid_caches_center WHERE remove=0"; $xsize = 0; $results = $q->QUERY_SQL($sql, "artica_backup"); while ($ligne = @mysql_fetch_array($results, MYSQL_ASSOC)) { $cache_size = $ligne["cache_size"]; $cache_type = $ligne["cache_type"]; if ($cache_type == "Cachenull") { continue; } $xsize = $xsize + $cache_size; } if ($GLOBALS["VERBOSE"]) { echo "COUNT_DE_CACHES: {$xsize}MB\n"; } @file_put_contents("{$GLOBALS["BASEDIR"]}/COUNT_DE_CACHES", $xsize); if ($GLOBALS["PROGRESS"]) { build_progress("{refresh_dashboard_values}", 90); system("{$php} /usr/share/artica-postfix/exec.status.php --all --verbose"); } build_progress("{refresh_dashboard_values} {done}", 100); // ----------------------------------------------------------------------------------------------------- }
function GenerateGraph($nopid = false) { if (!is_file("/etc/artica-postfix/settings/Daemons/EnableDNSPerfs")) { @file_put_contents("/etc/artica-postfix/settings/Daemons/EnableDNSPerfs", 1); } $unix = new unix(); $pidtime = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".time"; $time = $unix->file_time_min($pidtime); if (!$GLOBALS["FORCE"]) { if ($time < 60) { return; } } @unlink($pidtime); @file_put_contents($pidtime, time()); $q = new mysql(); $EnableDNSPerfs = intval(@file_get_contents("/etc/artica-postfix/settings/Daemons/EnableDNSPerfs")); if ($EnableDNSPerfs == 0) { $q->QUERY_SQL("DROP TABLE dashboard_dnsperf_day", "artica_events"); die; } $q = new mysql(); $q->QUERY_SQL("DROP TABLE dashboard_dnsperf_day", "artica_events"); $q = new postgres_sql(); $q->QUERY_SQL("DROP TABLE dashboard_dnsperf_day"); $q->QUERY_SQL("CREATE TABLE IF NOT EXISTS dashboard_dnsperf_day (time timestamp,DNS VARCHAR(128), percent FLOAT(5), response FLOAT(5) )"); $q->create_index("dashboard_dnsperf_day", "ikey", array("time", "dns", "percent", "response")); $hostname = $unix->hostname_g(); $now = strtotime("-24 hour"); $q = new postgres_sql(); $sql = "SELECT AVG(PERCENT) as PERCENT,AVG(RESPONSE) as RESPONSE,DNS FROM dnsperfs where PROXYNAME='{$hostname}' and time > {$now}s GROUP BY DNS,time(10m) ORDER BY ASC"; $results = $q->QUERY_SQL($sql); while ($ligne = @pg_fetch_assoc($results)) { $zDate = $ligne["zDate"]; $PERCENT = $ligne["PERCENT"]; if (!is_numeric($PERCENT)) { continue; } $RESPONSE = $ligne["RESPONSE"]; $DNS = $ligne["DNS"]; $f[] = "('{$zDate}','{$DNS}','{$RESPONSE}','{$PERCENT}')"; } if (count($f) > 0) { print_r($f); $q->QUERY_SQL("INSERT INTO dashboard_dnsperf_day (time,dns,response,percent) \n\t\t\t\tVALUES " . @implode(",", $f), "artica_events"); } $hostname = $unix->hostname_g(); $now = date("Y-m-d H:i:s", strtotime("-30 day")); $influx = new influx(); $sql = "SELECT MEAN(PERCENT) as PERCENT,MEAN(RESPONSE) as RESPONSE,\n\tDNS FROM dnsperfs where PROXYNAME='{$hostname}' and time > '{$now}' GROUP BY DNS,time(1d) ORDER BY ASC"; $main = $influx->QUERY_SQL($sql); $f = array(); foreach ($main as $row) { $time = InfluxToTime($row->time); if (!is_numeric($row->PERCENT)) { continue; } $PERCENT = $row->PERCENT; $RESPONSE = $row->RESPONSE; $DNS = $row->DNS; $zDate = date("Y-m-d", $time); $f[] = "('{$zDate}','{$DNS}','{$RESPONSE}','{$PERCENT}')"; } $sql = "SELECT AVG(PERCENT) as PERCENT FROM dashboard_dnsperf_day"; $ligne = @mysql_fetch_array($q->QUERY_SQL($sql, "artica_events")); $sock = new sockets(); $sock->SET_INFO("DashBoardDNSPerfsStats", $ligne["PERCENT"]); $q->QUERY_SQL("DROP TABLE dashboard_dnsperf_month", "artica_events"); $q->QUERY_SQL("CREATE TABLE IF NOT EXISTS dashboard_dnsperf_month (`TIME` timestamp,`DNS` VARCHAR(128),`PERCENT` FLOAT(5),`RESPONSE` FLOAT(5)"); $q->create_index("dashboard_dnsperf_month", "ikey", array("time", "dns", "percent", "response")); if (count($f) > 0) { print_r($f); $q->QUERY_SQL("INSERT INTO dashboard_dnsperf_month (`TIME`,`DNS`,`RESPONSE`,`PERCENT`)\n\t\t\t\tVALUES " . @implode(",", $f), "artica_events"); } }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $from = InfluxQueryFromUTC($params["FROM"]); $to = InfluxQueryFromUTC($params["TO"]); $interval = $params["INTERVAL"]; $user = $params["USER"]; $md5_table = "{$md5}sites"; echo "FLOW: FROM {$from} to {$to} {$interval} user:{$user}\n"; $sql = "SELECT SIZE,FAMILYSITE FROM access_log WHERE time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "'"; echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = InfluxToTime($row->time); $SIZE = intval($row->SIZE); $FAMILYSITE = $row->FAMILYSITE; $Hour = date("Y-m-d H:00:00", $time); if ($SIZE == 0) { continue; } if (!isset($MAIN_ARRAY[$Hour][$FAMILYSITE])) { $MAIN_ARRAY[$Hour][$FAMILYSITE]["SIZE"] = $SIZE; } else { $MAIN_ARRAY[$Hour][$FAMILYSITE]["SIZE"] = $MAIN_ARRAY[$Hour][$FAMILYSITE]["SIZE"] + $SIZE; } } if (count($MAIN_ARRAY) == 0) { echo "MAIN_ARRAY is null....\n"; return false; } echo "MAIN_ARRAY (1) = " . count($MAIN_ARRAY) . "\n"; build_progress("{step} {insert_data}: MySQL engine, {please_wait}", 8); $f = array(); $GLOBALS["CSV1"][] = array("Date", "Websites", "SizeBytes"); $sql = "CREATE TABLE IF NOT EXISTS `{$md5}sites` \n\t(`zDate` DATETIME,`familysite` VARCHAR(128),`size` INT UNSIGNED NOT NULL DEFAULT 1,\n\tKEY `familysite`(`familysite`),\n\tKEY `zDate`(`zDate`),\n\tKEY `size`(`size`)\n\t) ENGINE = MYISAM;"; $q = new mysql_squid_builder(); $q->QUERY_SQL($sql); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } while (list($curhour, $array) = each($MAIN_ARRAY)) { while (list($FAMILYSITE, $Tarray) = each($array)) { $SIZE = $Tarray["SIZE"]; $c = 0; $f[] = "('{$curhour}','{$FAMILYSITE}','{$SIZE}')"; $GLOBALS["CSV1"][] = array($curhour, $FAMILYSITE, $SIZE); if (count($f) > 500) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}sites` (zDate,familysite,size) VALUES " . @implode(",", $f)); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } $f = array(); } } } if (count($f) > 0) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}sites` (zDate,familysite,size) VALUES " . @implode(",", $f)); $f = array(); } echo "Websites {$c} items inserted to MySQL\n"; $sql = "CREATE TABLE IF NOT EXISTS `{$md5}users`\n\t(`user` VARCHAR(128),`size` INT UNSIGNED NOT NULL DEFAULT 1, KEY `user`(`user`), KEY `size`(`size`)\n\t) ENGINE = MYISAM;"; $q = new mysql_squid_builder(); $q->QUERY_SQL($sql); if (!$q->ok) { echo $q->mysql_error; REMOVE_TABLES($md5); return false; } $sql = "SELECT SIZE,{$user} FROM access_log WHERE time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "'"; echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 8); $main = $influx->QUERY_SQL($sql); $MAIN_ARRAY = array(); $c = 0; foreach ($main as $row) { $SIZE = intval($row->SIZE); $USER = $row->{$user}; if ($SIZE == 0) { continue; } if (!isset($MAIN_ARRAY[$USER])) { $MAIN_ARRAY[$USER] = $SIZE; } else { $MAIN_ARRAY[$USER] = $MAIN_ARRAY[$USER] + $SIZE; } } echo "MAIN_ARRAY (2) = " . count($MAIN_ARRAY) . "\n"; $c = 0; $GLOBALS["CSV2"][] = array("member", "SizeBytes"); while (list($USER, $SIZE) = each($MAIN_ARRAY)) { $GLOBALS["CSV2"][] = array($USER, $SIZE); $f[] = "('{$USER}','{$SIZE}')"; $c++; if (count($f) > 500) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}users` (user,size) VALUES " . @implode(",", $f)); $f = array(); } } if (count($f) > 0) { $q->QUERY_SQL("INSERT IGNORE INTO `{$md5}users` (user,size) VALUES " . @implode(",", $f)); $f = array(); } echo "Members {$c} items inserted to MySQL\n"; return true; }
function xqueries() { $page = CurrentPageName(); $tpl = new templates(); $influx = new influx(); $sql = "SELECT MAX(ZDATE) AS MAX FROM access_log"; $main = $influx->QUERY_SQL($sql); $MAX = $main[0]->MAX; $LastEntry = $tpl->time_to_date($MAX, true); if ($GLOBALS["VERBOSE"]) { echo "<p style='color:blue'>{$MAX} -> {$LastEntry}</p>"; } $from_gmt = $tpl->time_to_date($MAX - 300, true); $from = QueryToUTC($MAX - 300); $fromTime = date("Y-m-d H:i:s", $from); $ToTime = date("Y-m-d H:i:s", QueryToUTC($MAX)); $sql = "SELECT * from access_log WHERE time > '{$fromTime}' AND time < '{$ToTime}'"; //echo "<hr>$sql</HR>"; $main = null; $influx2 = new influx(); $QUERY2 = $influx2->QUERY_SQL($sql); $color = null; $ipClass = new IP(); $q = new mysql_squid_builder(); $c = 0; $D = 0; foreach ($QUERY2 as $row) { $USER = trim($row->USERID); $IPADDR = trim($row->IPADDR); $MAC = trim($row->MAC); if ($row->SIZE == 0) { continue; } if (is_numeric($USER)) { continue; } $RQS = $row->RQS; $time = InfluxToTime($row->time); $DATEKEY = date("H:00", $time); $KEYMD5 = md5("{$USER}{$IPADDR}{$MAC}"); $c = $c + $RQS; $D = $D + $row->SIZE; if (!isset($MAIN[$DATEKEY][$KEYMD5])) { $MAIN[$DATEKEY][$KEYMD5]["USER"] = $USER; $MAIN[$DATEKEY][$KEYMD5]["IPADDR"] = $IPADDR; $MAIN[$DATEKEY][$KEYMD5]["MAC"] = $MAC; $MAIN[$DATEKEY][$KEYMD5]["SIZE"] = $row->SIZE; $MAIN[$DATEKEY][$KEYMD5]["RQS"] = $RQS; } else { $MAIN[$DATEKEY][$KEYMD5]["SIZE"] = $MAIN[$DATEKEY][$KEYMD5]["SIZE"] + $row->SIZE; $MAIN[$DATEKEY][$KEYMD5]["RQS"] = $MAIN[$DATEKEY][$KEYMD5]["RQS"] + $RQS; } } $D = FormatBytes($D / 1024); $requests = $tpl->javascript_parse_text("{requests}"); $last_entry_on = $tpl->javascript_parse_text("{last_entry_on}"); $since = $tpl->_ENGINE_parse_body("{since}"); $html[] = "\n\t\n\t<div style='width:98%' class=form>\n\t<div style='margin-top:5px;font-size:16px;text-align:right;margin-bottom:15px;font-weight:bold'>\n\t\t{$since} 5mn ({$c} {$requests} / {$D}) UTC:" . $tpl->time_to_date($from, true) . " - GMT {$from_gmt} / {$last_entry_on}: {$LastEntry}</div>"; $html[] = "\n\t\t\n\t<table style='width:100%'>"; $html[] = $tpl->_ENGINE_parse_body("<tr>\n\t\t\t<th style='font-size:18px'>{time}</th>\n\t\t\t<th style='font-size:18px'>{MAC}</th>\n\t\t\t<th style='font-size:18px'>{ipaddr}</th>\n\t\t\t<th style='font-size:18px'>{uid}</th>\n\t\t\t<th style='font-size:18px'>{requests}</th>\n\t\t\t<th style='font-size:18px'>{size}</th>\n\t\t\t</tr>\n\t\t\t"); while (list($time, $SUBARRAY) = each($MAIN)) { while (list($KEYMD5, $BIGARRAY) = each($SUBARRAY)) { if ($color == null) { $color = "#F2F0F1"; } else { $color = null; } $MAC = $BIGARRAY["MAC"]; $RQS = $BIGARRAY["RQS"]; $SIZE = $BIGARRAY["SIZE"]; $USER = $BIGARRAY["USER"]; $IPADDR = $BIGARRAY["IPADDR"]; $MAC_link = null; if ($SIZE > 1024) { $size = FormatBytes($SIZE / 1024); } else { $size = "{$SIZE}Bytes"; } $RQS = FormatNumber($RQS); if ($ipClass->IsvalidMAC($MAC)) { $MAC_link = "<a href=\"javascript:blur();\"\n\t\t\tOnClick=\"javascript:Loadjs('squid.nodes.php?node-infos-js=yes&MAC=" . urlencode($MAC) . "');\"\n\t\t\tstyle='font-size:16px;text-decoration:underline;font-weight:bold'>\t\t\n\t\t\t"; if (trim($USER) == null) { $USER = $q->MacToUid($MAC); } } $html[] = "<tr style='background-color:{$color}'>"; $html[] = "<td style='font-size:16px;width:50px;padding:10px;font-weight:bold'>{$time}</td>"; $html[] = "<td style='font-size:16px;width:50px;padding:10px;font-weight:bold'>{$MAC_link}{$MAC}</a></td>"; $html[] = "<td style='font-size:16px;width:50px;padding:10px;font-weight:bold'>{$IPADDR}</td>"; $html[] = "<td style='font-size:16px;width:50px;padding:10px;font-weight:bold'>{$USER}</td>"; $html[] = "<td style='font-size:16px;width:50px;text-align:right;padding:10px' nowrap>{$RQS}</td>"; $html[] = "<td style='font-size:16px;width:50px;text-align:right;padding:10px' nowrap>{$size}</td>"; $html[] = "</tr>"; } } $html[] = "</table>"; $html[] = "</div>"; $html[] = "\n\t<script>\n\t\tfunction FollowerRefresh(){\n\t\t\tif(!document.getElementById('ZRTRQUESTS_COMPTER')){ return;}\n\t\t\tvar compter=parseInt(document.getElementById('ZRTRQUESTS_COMPTER').value);\n\t\t\tif(compter<10){\n\t\t\t\tcompter=compter+1;\n\t\t\t\tdocument.getElementById('ZRTRQUESTS_COMPTER').value=compter;\n\t\t\t\tsetTimeout(\"FollowerRefresh()\",1000);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t\n\t\t\tdocument.getElementById('ZRTRQUESTS_COMPTER').value=0;\n\t\t\tif(!document.getElementById('proxy-follower-table')){ return;}\n\t\t\tLoadAjaxSilent('proxy-follower-table','{$page}?follow=yes&t={$_GET["t"]}');\n\t\t}\n\t\t\t\n\t\t\t\n\tsetTimeout(\"FollowerRefresh()\",1000);\n\t</script>"; echo @implode("\n", $html); }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $mintime = strtotime("2008-01-01 00:00:00"); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); if ($params["FROM"] < $mintime) { $params["FROM"] = strtotime(date("Y-m-d 00:00:00")); } $params["TO"] = intval($params["TO"]); if ($params["TO"] < $mintime) { $params["TO"] = time(); } $influx = new influx(); $to = $params["TO"]; $from = $params["FROM"]; $interval = $params["INTERVAL"]; $user = $params["USER"]; $md5_table = "{$md5}report"; $search = $params["SEARCH"]; $USER_FIELD = strtolower($params["USER"]); echo "FLOW: FROM {$from} to {$to} {$interval} user:{$user} {$search}\n"; if ($search == "*") { $search = null; } if ($search != null) { $search = str_replace("*", ".*", $search); $SSEARCH = " ({$USER_FIELD} ~* '{$search}') AND "; } $sql = "CREATE TABLE IF NOT EXISTS \"{$md5}report\"\n\t(zDate timestamp,\n\t{$USER_FIELD} VARCHAR(128),\n\tsize BIGINT)"; echo "TEMP:\n{$sql}\n"; $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\n{$q->mysql_error}\n***************\n"; return false; } $TimeGroup = "date_trunc('hour', zdate) as zdate"; $distance = $influx->DistanceHour($from, $to); echo "Distance: {$distance} hours\n"; $FilterDate = "(zdate >='" . date("Y-m-d H:i:s", $from) . "' and zdate <= '" . date("Y-m-d H:i:s", $to) . "')"; $sql = "SELECT SUM(size) as size,{$TimeGroup},{$USER_FIELD} FROM maillog\n\tWHERE {$SSEARCH} {$FilterDate}\n\tGROUP BY zdate, {$USER_FIELD}"; /* if($distance>23){ echo "Distance: {$distance} hours use the Month table\n"; $sql="SELECT SUM(SIZE) as size,zdate,$USER_FIELD FROM access_month WHERE $SSEARCH$FilterDate GROUP BY zdate, $USER_FIELD"; } if($distance>720){ echo "Distance: {$distance} hours use the Year table\n"; $sql="SELECT SUM(SIZE) as size,zdate,$USER_FIELD FROM access_year WHERE $SSEARCH$FilterDate GROUP BY zdate, $USER_FIELD"; } */ $q->QUERY_SQL("TRUNCATE TABLE \"{$md5}report\""); $q->QUERY_SQL("create index zdate{$md5}report on \"{$md5}report\"(zdate);"); $q->QUERY_SQL("create index {$USER_FIELD}{$md5}report on \"{$md5}report\"({$USER_FIELD});"); $sql = "INSERT INTO \"{$md5}report\" (size,zdate,{$USER_FIELD}) {$sql}"; echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $postgres = new postgres_sql(); $results = $postgres->QUERY_SQL($sql); if (!$postgres->ok) { echo "ERROR.....\n"; echo "***************\n{$postgres->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $sql = "SELECT COUNT(*) AS tcount FROM \"{$md5}report\""; $ligne = pg_fetch_assoc($postgres->QUERY_SQL($sql)); $total = intval($ligne["tcount"]); echo "Members {$total} items inserted to PostGreSQL\n"; if ($total == 0) { $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } return true; }
function UFDB_LOG_HOURLY_DUMP($MEM) { $AS_POSTGRES = false; $suffix = "influx"; if (is_file("/usr/local/ArticaStats/bin/postgres")) { $AS_POSTGRES = true; $suffix = "postgres"; } events("Dumping " . count($MEM) . " entries"); $q = new influx(); $PROXYNAME = $GLOBALS["MYHOSTNAME_PROXY"]; $prefix_sql = "(zDate,website,category,rulename,public_ip,blocktype,why,hostname,client,PROXYNAME,rqs)"; while (list($KEYMD5, $subarray) = each($MEM)) { $website = $subarray["website"]; $category = $subarray["category"]; $rulename = $subarray["rulename"]; $public_ip = $subarray["public_ip"]; $blocktype = $subarray["blocktype"]; $why = $subarray["why"]; $hostname = $subarray["hostname"]; $client = $subarray["client"]; $zDate = date("Y-m-d H:i:s", $subarray["ZDATE"]); $RQS = $subarray["RQS"]; if ($AS_POSTGRES) { $FINAL[] = "('{$zDate}','{$website}','{$category}','{$rulename}','{$public_ip}','{$blocktype}','{$why}','{$hostname}','{$client}','{$PROXYNAME}','{$RQS}')"; continue; } $array["precision"] = "s"; $array["time"] = $subarray["TIME"]; $array["tags"]["uid"] = $subarray["uid"]; $array["fields"]["TIME"] = $subarray["ZDATE"]; $array["fields"]["RQS"] = $subarray["RQS"]; $array["tags"]["category"] = $subarray["category"]; $array["tags"]["rulename"] = $subarray["rulename"]; $array["tags"]["public_ip"] = $subarray["public_ip"]; $array["tags"]["blocktype"] = $subarray["blocktype"]; $array["tags"]["why"] = $subarray["why"]; $array["tags"]["hostname"] = $subarray["hostname"]; $array["tags"]["website"] = $subarray["website"]; $array["tags"]["client"] = $subarray["client"]; $line = $q->prepare("webfilter", $array); if ($GLOBALS["VERBOSE"]) { echo "{$line}\n"; } $FINAL[] = $line; } if (count($FINAL) > 0) { $backupdir = "{$GLOBALS["LogFileDeamonLogDir"]}/webfilter-backup"; $faildir = "{$GLOBALS["LogFileDeamonLogDir"]}/webfilter-failed"; @mkdir($faildir, 0755, true); @mkdir($backupdir, 0755, true); $backupfile = "{$backupdir}/" . time() . ".{$suffix}.log"; $failedPath = "{$faildir}/" . time() . ".{$suffix}.log"; if ($AS_POSTGRES) { $sql = "INSERT INTO webfilter {$prefix_sql} VALUES " . @implode(",", $FINAL); $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { events("WEBFILTERING INJECTION Failed: backup to {$failedPath} ({$q->mysql_error})"); @file_put_contents($failedPath, @implode("\n", $sql)); return false; } } if (!$AS_POSTGRES) { if (!$q->bulk_inject($FINAL)) { events("WEBFILTERING INJECTION Failed: backup to {$failedPath} ({$q->curl_error})"); @file_put_contents($failedPath, @implode("\n", $FINAL)); return false; } } events("WEBFILTERING INJECTION Success: backup to {$backupfile}"); @file_put_contents($backupfile, @implode("\n", $FINAL)); $FINAL = array(); } return true; }
function cpustats() { $xdata = array(); $ydata = array(); $unix = new unix(); $hostname = $unix->hostname_g(); $filecache = dirname(__FILE__) . "/ressources/logs/web/cpustatsH.db"; $filecache_load = dirname(__FILE__) . "/ressources/logs/web/INTERFACE_LOAD_AVGH.db"; $filecache_mem = dirname(__FILE__) . "/ressources/logs/web/INTERFACE_LOAD_AVG2H.db"; $now = InfluxQueryFromUTC(strtotime("-24 hour")); $influx = new influx(); $sql = "SELECT MEAN(CPU_STATS) as cpu,MEAN(LOAD_AVG) as load,MEAN(MEM_STATS) as memory FROM SYSTEM where proxyname='{$hostname}' and time > {$now}s GROUP BY time(10m) ORDER BY ASC"; if ($GLOBALS["VERBOSE"]) { echo "{$sql}\n"; } $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = InfluxToTime($row->time); if (!is_numeric($row->cpu)) { continue; } if (!is_numeric($row->load)) { continue; } $min = date("l H:i", $time) . "mn"; $xdata[] = $min; $ydata[] = round($row->cpu, 2); $ydataL[] = round($row->load, 2); $ydataM[] = round($row->memory, 2); if ($GLOBALS["VERBOSE"]) { echo "{$min} -> {$row->cpu} | {$row->load} | {$row->memory}\n"; } } if (count($xdata) > 1) { $ARRAY = array($xdata, $ydata); $ARRAYL = array($xdata, $ydataL); $ARRAYM = array($xdata, $ydataM); if ($GLOBALS["VERBOSE"]) { echo "-> {$filecache}\n"; } @file_put_contents($filecache, serialize($ARRAY)); @file_put_contents($filecache_load, serialize($ARRAYL)); @file_put_contents($filecache_mem, serialize($ARRAYM)); @chmod($filecache, 0755); @chmod($filecache_load, 0755); @chmod($filecache_mem, 0755); } $xdata = array(); $ydata = array(); $ydataL = array(); $ydataM = array(); $filecache = dirname(__FILE__) . "/ressources/logs/web/cpustats.db"; $filecache_load = dirname(__FILE__) . "/ressources/logs/web/INTERFACE_LOAD_AVG.db"; $filecache_mem = dirname(__FILE__) . "/ressources/logs/web/INTERFACE_LOAD_AVG2.db"; $now = InfluxQueryFromUTC(strtotime("-168 hour")); $influx = new influx(); $sql = "SELECT MEAN(CPU_STATS) as cpu,MEAN(LOAD_AVG) as load,MEAN(MEM_STATS) as memory FROM SYSTEM where proxyname='{$hostname}' and time > {$now}s GROUP BY time(1h) ORDER BY ASC"; if ($GLOBALS["VERBOSE"]) { echo "{$sql}\n"; } $main = $influx->QUERY_SQL($sql); foreach ($main as $row) { $time = InfluxToTime($row->time); if (!is_numeric($row->cpu)) { continue; } if (!is_numeric($row->load)) { continue; } $min = date("l H:00", $time); $xdata[] = $min; $ydata[] = round($row->cpu, 2); $ydataL[] = round($row->load, 2); $ydataM[] = round($row->memory / 1024, 2); if ($GLOBALS["VERBOSE"]) { echo "{$min} -> {$row->cpu} | {$row->load} | {$row->memory}\n"; } } if (count($xdata) > 1) { $ARRAY = array($xdata, $ydata); $ARRAYL = array($xdata, $ydataL); $ARRAYM = array($xdata, $ydataM); if ($GLOBALS["VERBOSE"]) { echo "-> {$filecache}\n"; } @file_put_contents($filecache, serialize($ARRAY)); @file_put_contents($filecache_load, serialize($ARRAYL)); @file_put_contents($filecache_mem, serialize($ARRAYM)); @chmod($filecache, 0755); @chmod($filecache_load, 0755); @chmod($filecache_mem, 0755); } }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); while (list($num, $val) = each($params)) { echo "{$num}........: {$val}\n"; } $influx = new influx(); $mintime = strtotime("2008-01-01 00:00:00"); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); if ($params["FROM"] < $mintime) { $params["FROM"] = strtotime(date("Y-m-d 00:00:00")); } $params["TO"] = intval($params["TO"]); if ($params["TO"] < $mintime) { $params["TO"] = time(); } $influx = new influx(); $from = $params["FROM"]; $to = $params["TO"]; $interval = $params["INTERVAL"]; $USER_FIELD = strtolower($params["USER"]); $SEARCH = $params["SEARCH"]; $sql = "CREATE TABLE IF NOT EXISTS \"{$md5}report\"\n\t(zdate timestamp,\n\tmac macaddr,\n\tipaddr INET,\n\tuserid VARCHAR(64) NULL,\n\tcategory VARCHAR(64) NULL,\n\tfamilysite VARCHAR(128) NULL,\n\tsize BIGINT,\n\trqs BIGINT)"; $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { echo "********** FAILED **********\n"; echo $q->mysql_error . "\n"; build_progress("{step} {insert_data}: PostreSQL engine, {failed}", 110); return false; } $q->QUERY_SQL("create index zdate{$md5}report on \"{$md5}report\"(zdate);"); $q->QUERY_SQL("create index familysite{$md5}report on \"{$md5}report\"(familysite);"); $q->QUERY_SQL("create index user{$md5}report on \"{$md5}report\"(ipaddr,userid,familysite);"); $FIELDS["MAC"] = "mac"; $FIELDS["IPADDR"] = "ipaddr"; $FIELDS["USERID"] = "userid"; $TimeGroup = "zdate"; $distance = $influx->DistanceHour($from, $to); echo "Distance: {$distance} hours\n"; if ($distance > 4) { $TimeGroup = "date_trunc('hour', zdate) as zdate"; } $USER_FIELD = "category"; if ($SEARCH == "unknown") { $SEARCH = null; } $sqlA[] = "SELECT SUM(size) as size, SUM(rqs) as RQS,{$TimeGroup},FAMILYSITE, CATEGORY, MAC, IPADDR, USERID FROM access_log"; $sqlA[] = "WHERE {$USER_FIELD}='{$SEARCH}' and (zDate >'" . date("Y-m-d H:i:s", $from) . "'"; $sqlA[] = "and zDate < '" . date("Y-m-d H:i:s", $to) . "')"; $sqlA[] = "GROUP BY zdate, FAMILYSITE, CATEGORY, MAC, IPADDR, USERID"; if ($distance > 23) { $sqlA = array(); echo "Distance: {$distance} hours: Use the Month table\n"; $sqlA[] = "SELECT SUM(SIZE) as size, SUM(RQS) as rqs,zdate,familysite, category, mac, ipaddr, userid FROM access_month"; $sqlA[] = "WHERE {$USER_FIELD}='{$SEARCH}' and (zDate >='" . date("Y-m-d H:i:s", $from) . "'"; $sqlA[] = "and zDate <= '" . date("Y-m-d H:i:s", $to) . "')"; $sqlA[] = "GROUP BY zdate, FAMILYSITE, CATEGORY, MAC, IPADDR, USERID"; } if ($distance > 720) { $sqlA = array(); echo "Distance: {$distance} hours: Use the Year table\n"; $sqlA[] = "SELECT SUM(SIZE) as size, SUM(RQS) as rqs,zdate,familysite, category, mac, ipaddr, userid FROM access_year"; $sqlA[] = "WHERE {$USER_FIELD}='{$SEARCH}' and (zDate >='" . date("Y-m-d H:i:s", $from) . "'"; $sqlA[] = "and zDate <= '" . date("Y-m-d H:i:s", $to) . "')"; $sqlA[] = "GROUP BY zdate, FAMILYSITE, CATEGORY, MAC, IPADDR, USERID"; } $sql = @implode(" ", $sqlA); $sql = "INSERT INTO \"{$md5}report\" (size,rqs,zdate,familysite, category, mac, ipaddr, userid) {$sql}"; echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\n{$postgres->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $sql = "SELECT COUNT(*) AS tcount FROM \"{$md5}report\""; $ligne = pg_fetch_assoc($q->QUERY_SQL($sql)); $total = intval($ligne["tcount"]); echo "Member {$total} items inserted to PostGreSQL\n"; if ($total == 0) { $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } return true; }
function start_week() { $unix = new unix(); $hostname = $unix->hostname_g(); $now = InfluxQueryFromUTC(strtotime("-7 day")); $today = date("Y-m-d", $now) . " 00:00:00"; $sql = "SELECT SUM(TX) as TX, SUM(RX) as RX,ETH FROM ethrxtx WHERE proxyname='{$hostname}' AND time >'{$today}' group by time(4h),ETH"; $influx = new influx(); echo "{$sql}\n"; $main = $influx->QUERY_SQL($sql); $c = 0; $f = array(); foreach ($main as $row) { $time = date("Y-m-d H:i:s", InfluxToTime($row->time)); $ETH = $row->ETH; if ($ETH == "lo") { continue; } $RX = $row->RX; $TX = $row->TX; $f[] = "('{$time}','{$ETH}','{$RX}','{$TX}')"; } if (count($f) == 0) { return; } $q = new mysql(); if ($q->TABLE_EXISTS("RXTX_WEEK", "artica_events")) { $q->QUERY_SQL("TRUNCATE TABLE `RXTX_WEEK`", "artica_events"); } $sql = "CREATE TABLE IF NOT EXISTS `RXTX_WEEK`\n\t(`ZDATE` DATETIME,\n\t`RX` INT UNSIGNED NOT NULL DEFAULT 1,\n\t`TX` INT UNSIGNED NOT NULL DEFAULT 1,\n\t`ETH` VARCHAR(60),\n\tKEY `ZDATE`(`ZDATE`),\n\tKEY `RX`(`RX`),\n\tKEY `TX`(`TX`),\n\tKEY `ETH`(`ETH`) ) ENGINE = MYISAM;"; $q->QUERY_SQL($sql, "artica_events"); if (!$q->ok) { return; } $q->QUERY_SQL("INSERT IGNORE INTO RXTX_WEEK (ZDATE,ETH,RX,TX) VALUES " . @implode(",", $f), "artica_events"); }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $mintime = strtotime("2008-01-01 00:00:00"); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); if ($params["FROM"] < $mintime) { $params["FROM"] = strtotime(date("Y-m-d 00:00:00")); } $params["TO"] = intval($params["TO"]); if ($params["TO"] < $mintime) { $params["TO"] = time(); } $influx = new influx(); $from = $params["FROM"]; $to = $params["TO"]; $interval = $params["INTERVAL"]; $USER_FIELD = $params["USER"]; $md5_table = md5(__FUNCTION__ . "." . "{$from}{$to}"); $searchsites = trim($params["searchsites"]); $searchuser = trim($params["searchuser"]); $searchsites_sql = null; $searchuser_sql = null; if ($searchsites == "*") { $searchsites = null; } if ($searchuser == "*") { $searchuser = null; } $SSEARCH = array(); $distance = $influx->DistanceHour($from, $to); echo "Distance: {$distance} hours\n"; $TimeGroup = "date_trunc('hour', zdate) as zdate"; $SQLA[] = "SELECT SUM(xcount) as xcount,{$TimeGroup},src_ip,dst_ip,dst_port,proto,severity,signature FROM suricata_events"; $SQLA[] = "WHERE"; $SQLA[] = "(zdate >='" . date("Y-m-d H:i:s", $from) . "' and zdate <= '" . date("Y-m-d H:i:s", $to) . "')"; $SQLA[] = "GROUP BY zdate, src_ip,dst_ip,dst_port,proto,severity,signature"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $unix = new unix(); $hostname = $unix->hostname_g(); $sql = "CREATE TABLE IF NOT EXISTS \"{$md5}report\" (\n\t\tzDate timestamp,\n\t\tsrc_ip inet,\n\t\tdst_ip inet,\n\t\tdst_port smallint NOT NULL,\n\t\tproto varchar(10) NOT NULL,\n\t\tseverity smallint NOT NULL,\n\t\tsignature BIGINT,\n\t\txcount BIGINT )"; $q = new postgres_sql(); $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\n{$q->mysql_error}\n***************\n"; return false; } $q->QUERY_SQL("create index zdate{$md5}report on \"{$md5}report\"(zdate);"); $q->QUERY_SQL("create index src_ip{$md5}report on \"{$md5}report\"(src_ip);"); $q->QUERY_SQL("create index dst_ip{$md5}report on \"{$md5}report\"(dst_ip);"); $q->QUERY_SQL("TRUNCATE TABLE \"{$md5}report\""); $sql = @implode(" ", $SQLA); $sql = "INSERT INTO \"{$md5}report\" (xcount,zdate,src_ip,dst_ip,dst_port,proto,severity,signature) {$sql}"; echo "***************\n{$sql}\n*****************\n"; $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\nERROR {$q->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $ligne = pg_fetch_assoc($q->QUERY_SQL("SELECT COUNT(*) as tcount FROM \"{$md5}report\"")); if (!$q->ok) { echo "***************\nERROR {$q->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $c = $ligne["tcount"]; if ($c == 0) { echo "No data....\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } echo "{$c} items inserted to PostgreSQL\n"; $MAIN_ARRAY = array(); return true; }
function Parseline($buffer) { $buffer = trim($buffer); if ($buffer == null) { return null; } $mdbuff = md5($buffer); if (isset($GLOBALS['MDBUFF'][$mdbuff])) { return; } $GLOBALS['MDBUFF'][$mdbuff] = true; if (count($GLOBALS['MDBUFF']) > 1000) { $GLOBALS['MDBUFF'] = array(); } if (strpos($buffer, "] PASS ") > 0) { return; } if (strpos($buffer, "UFDBinitHTTPSchecker") > 0) { return; } if (strpos($buffer, "IP socket port") > 0) { return; } if (strpos($buffer, "listening on interface") > 0) { return; } if (strpos($buffer, "yielding") > 0) { return; } if (strpos($buffer, "system:") > 0) { return; } if (strpos($buffer, "URL verification threads and") > 0) { return; } if (strpos($buffer, "worker threads") > 0) { return; } if (strpos($buffer, "license status") > 0) { return; } if (strpos($buffer, "redirect-fatal-error") > 0) { return; } if (strpos($buffer, "using OpenSSL library") > 0) { return; } if (strpos($buffer, "CA certificates are") > 0) { return; } if (strpos($buffer, "Failure to load the CA database") > 0) { return; } if (strpos($buffer, "CA file is") > 0) { return; } if (strpos($buffer, "ufdbHandleAlarmForTimeEvents") > 0) { return; } if (strpos($buffer, "Changing daemon status") > 0) { return; } if (strpos($buffer, "UFDBchangeStatus") > 0) { return; } if (strpos($buffer, "url-lookup-delay-during-database-reload") > 0) { return; } if (strpos($buffer, "url-lookup-result-during-database-reload") > 0) { return; } if (strpos($buffer, "url-lookup-result-when-fatal-error") > 0) { return; } if (strpos($buffer, "no http-server") > 0) { return; } if (strpos($buffer, "upload-stats") > 0) { return; } if (strpos($buffer, "analyse-uncategorised-urls") > 0) { return; } if (strpos($buffer, "redirect-loading-database") > 0) { return; } if (strpos($buffer, "ufdb-expression-debug") > 0) { return; } if (strpos($buffer, "ufdb-debug-filter") > 0) { return; } if (strpos($buffer, "database status: up to date") > 0) { return; } if (strpos($buffer, "ufdbGenTable should be called with the") > 0) { return; } if (strpos($buffer, "is deprecated and ignored") > 0) { return; } if (strpos($buffer, "init domainlist") > 0) { return; } if (strpos($buffer, "is empty !") > 0) { return; } if (strpos($buffer, "init expressionlist") > 0) { return; } if (strpos($buffer, "is optimised to one expression") > 0) { return; } if (strpos($buffer, "be analysed since there is no proper database") > 0) { return; } if (strpos($buffer, "REDIRECT 302") > 0) { return; } if (strpos($buffer, "close fd") > 0) { return; } if (strpos($buffer, ": open fd ") > 0) { return; } if (strpos($buffer, "acl {") > 0) { return; } if (strpos($buffer, "URL verifications") > 0) { return; } if (strpos($buffer, "must be part of the security") > 0) { return; } if (strpos($buffer, "}") > 0) { return; } if (strpos($buffer, "finished retrieving") > 0) { return; } if (strpos($buffer, "loading URL table from") > 0) { return; } if (strpos($buffer, "] option") > 0) { return; } if (strpos($buffer, "{") > 0) { return; } if (strpos($buffer, "] category \"") > 0) { return; } if (strpos($buffer, "] domainlist \"") > 0) { return; } if (strpos($buffer, "] pass ") > 0) { return; } if (strpos($buffer, "] safe-search") > 0) { return; } if (strpos($buffer, "configuration file") > 0) { return; } if (strpos($buffer, "refreshdomainlist") > 0) { return; } if (strpos($buffer, "software suite is free and Open Source Software") > 0) { return; } if (strpos($buffer, "by URLfilterDB") > 0) { return; } if (strpos($buffer, "] configuration status") > 0) { return; } if (strpos($buffer, 'expressionlist "') > 0) { return; } if (strpos($buffer, 'is newer than') > 0) { return; } if (strpos($buffer, 'source "') > 0) { return; } if (strpos($buffer, 'youtube-edufilter-id') > 0) { return; } if (trim($buffer) == null) { return; } if (strpos($buffer, 'max-logfile-size') > 0) { return; } if (strpos($buffer, 'check-proxy-tunnels') > 0) { return; } if (strpos($buffer, 'seconds to allow worker') > 0) { return; } if (strpos($buffer, '] loading URL category') > 0) { return; } if (preg_match("#\\] REDIR\\s+#", $buffer)) { return; } if (strpos($buffer, 'execdomainlist for') > 0) { return; } if (strpos($buffer, 'dynamic_domainlist_updater_main') > 0) { return; } if (preg_match("#FATAL ERROR: connection queue is full#", $buffer)) { $TimeFile = "/etc/artica-postfix/pids/webfiltering-connection.queue.full"; if (!IfFileTime($TimeFile, 5)) { return; } $Threads = intval(@file_get_contents("/etc/artica-postfix/settings/Daemons/UfdbGuardThreads")); $ThreadNew = $Threads + 5; if ($ThreadNew > 128) { $ThreadNew = 128; } squid_admin_mysql(0, "Webfiltering Service connection queue is full increase Threads from {$Threads} to {$ThreadNew} [action=restart]", $buffer, __FILE__, __LINE__); @file_put_contents("/etc/artica-postfix/settings/Daemons/UfdbGuardThreads", $ThreadNew); shell_exec("{$GLOBALS["nohup"]} /etc/init.d/ufdb restart --force >/dev/null 2>&1 &"); return; } if (stripos(" {$buffer}", "HUP signal received to reload the configuration") > 0) { squid_admin_mysql(1, "Webfiltering Service was reloaded - reloading databases [action=notify]", $buffer, __FILE__, __LINE__); events_ufdb_exec("Webfiltering Service was reloaded, wait 15 seconds"); return; } if (stripos(" {$buffer}", "ufdbGuard daemon stopped") > 0) { squid_admin_mysql(1, "Webfiltering Service was stopped [action=notify]", $buffer, __FILE__, __LINE__); events_ufdb_exec("Webfiltering Service was stopped, wait 15 seconds"); return; } if (stripos(" {$buffer}", 'Changing daemon status to "started"') > 0) { squid_admin_mysql(1, "Webfiltering Service was started [action=notify]", $buffer, __FILE__, __LINE__); events_ufdb_exec("Webfiltering Service was started, wait 15 seconds"); return; } if (preg_match("#thread socket-handler caught signal 11#", $buffer, $re)) { $TimeFile = "/etc/artica-postfix/pids/webfiltering-emergency"; if (!IfFileTime($TimeFile, 5)) { return; } squid_admin_mysql(0, "Webfiltering crash [action=Webfiltering Emergency]", $buffer, __FILE__, __LINE__); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.squid.urgency.remove.php --ufdb-on >/dev/null 2>&1 &"); return; } if (preg_match("#Changing daemon status to \"error\"#", $buffer, $re)) { $TimeFile = "/etc/artica-postfix/pids/webfiltering-emergency"; if (!IfFileTime($TimeFile, 5)) { return; } squid_admin_mysql(0, "Webfiltering service error [action=Webfiltering Emergency]", $buffer, __FILE__, __LINE__); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.ufdb.emergency.php --ufdb-on >/dev/null 2>&1 &"); return; } if (preg_match("#FATAL ERROR: cannot open configuration file\\s+\\/etc\\/squid3\\/ufdbGuard\\.conf#i", $buffer, $re)) { squid_admin_mysql(0, "Webfiltering error, Open Configuration File failed [action=restart service]", $buffer, __FILE__, __LINE__); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.ufdb.php --restart --force --ufdbtail --fatal-error >/dev/null 2>&1 &"); return; } if (preg_match("#FATAL.*?read failed on \"(.+?)\".*?Bad address#i", $buffer, $re)) { squid_admin_mysql(0, "Webfiltering service error on database: {$re[1]} [action=Webfiltering Emergency]", $buffer, __FILE__, __LINE__); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.ufdb.emergency.php --ufdb-on >/dev/null 2>&1 &"); return; } if (preg_match("#FATAL ERROR: cannot read from.*?No such file or directory#", $buffer, $re)) { squid_admin_mysql(0, "Webfiltering error: a database is missing [action=reconfigure]", $buffer, __FILE__, __LINE__); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.squidguard.php --build --force >/dev/null 2>&1 &"); return; } if (preg_match("#There are no sources and there is no default ACL#i", $buffer)) { events("Seems not to be defined -> build compilation."); xsyslog("{reconfigure} ufdb service..."); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.squidguard.php --build --force >/dev/null 2>&1 &"); return; } if (preg_match("#ERROR: cannot write to PID file\\s+(.+)#i", $buffer, $re)) { xsyslog("Apply permissions on {$re[1]}"); $pidfile = $re[1]; $pidpath = dirname($pidfile); @mkdir($pidpath, 0755, true); @chown($pidpath, "squid"); @chmod($pidpath, 0755); return; } if (preg_match("#\\] Changing daemon status to.*?error#", $buffer, $re)) { squid_admin_mysql(0, "Fatal! Webfilter daemon is turned to error", $buffer, __FILE__, __LINE__); return; } if (preg_match("#\\] Changing daemon status to.*?terminated#", $buffer, $re)) { squid_admin_mysql(1, "Webfilter daemon is turned to OFF", $buffer, __FILE__, __LINE__); return; } if (preg_match("#can't execute command of execdomainlist.*?popen failed: Cannot allocate memory#", $buffer, $re)) { @file_put_contents("/etc/artica-postfix/settings/Daemons/UfdbExecDomainList", 0); squid_admin_mysql(0, "Not Enough memory to use execdomainlist feature [action=reconfigure]", "{$buffer}\nexecdomainlist feature will be disabled..", __FILE__, __LINE__); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.squidguard.php --build --force >/dev/null 2>&1 &"); return; } if (preg_match('#FATAL ERROR: table "(.+?)"\\s+could not be parsed.*?error code = [0-9]+#', $buffer, $re)) { $direname = dirname($re[1]); squid_admin_mysql(0, "Database {$direname} corrupted", $buffer . "\nReconfigure ufdb service after removing {$direname}...", __FILE__, __LINE__); events("Webfiltering engine error on {$direname}"); if (!is_dir($direname)) { return; } shell_exec("{$GLOBALS["SBIN_RM"]} -rf {$direname} >/dev/null 2>&1"); xsyslog("{reconfigure} ufdb service after removing {$direname}..."); shell_exec("{$GLOBALS["nohup"]} {$GLOBALS["PHP5_BIN"]} /usr/share/artica-postfix/exec.squidguard.php --build --force >/dev/null 2>&1 &"); return; } if (preg_match("#BLOCK-FATAL\\s+#", $buffer, $re)) { $TimeFile = "/etc/artica-postfix/pids/UFDB_BLOCK_FATAL"; if (!IfFileTime($TimeFile, 10)) { return; } events("Webfiltering engine error, reload service"); events_ufdb_exec("service was restarted, {$buffer}"); squid_admin_mysql(0, "Fatal, Web filtering engine error", $buffer . "\nThe service will be reloaded", __FILE__, __LINE__); xsyslog("Reloading ufdb service..."); shell_exec("{$GLOBALS["nohup"]} /etc/init.d/ufdb reload >/dev/null 2>&1 &"); return; } if (preg_match("#FATAL ERROR: connection queue is full#", $buffer, $re)) { $TimeFile = "/etc/artica-postfix/pids/UFDB_QUEUE_IS_FULL"; $Threads = @file_get_contents("/etc/artica-postfix/settings/Daemons/UfdbGuardThreads"); if (!is_numeric($Threads)) { $Threads = 48; } $Threads = $Threads + 1; if ($Threads > 140) { $Threads = 140; } @file_put_contents("/etc/artica-postfix/settings/Daemons/UfdbGuardThreads", $Threads); if (!IfFileTime($TimeFile, 2)) { return; } squid_admin_mysql(0, "Fatal, Web filtering connection queue is full", $buffer . "\nThe service will be restarted and threads are increased to {$Threads}", __FILE__, __LINE__); xsyslog("Restarting ufdb service after connection queue is full..."); shell_exec("{$GLOBALS["nohup"]} /etc/init.d/ufdb restart >/dev/null 2>&1 &"); return; } if (preg_match('#FATAL\\*\\s+table\\s+"(.+?)"\\s+could not be parsed.+?14#', $buffer, $re)) { events("Table on {$re[1]} crashed"); squid_admin_mysql(0, "Database {$re[1]} corrupted", $buffer, __FILE__, __LINE__); ufdbguard_admin_events("Table on {$re[1]} crashed\n{$buffer}", __FUNCTION__, __FILE__, __LINE__, "ufdbguard-service"); events_ufdb_exec("{$buffer}"); $GLOBALS["CLASS_UNIX"]->send_email_events("ufdbguard: {$re[1]} could not be parsed", "Ufdbguard claim: {$buffer}\n\n\t\tYou need to compile this database", "proxy"); return; } if (preg_match("#FATAL ERROR: cannot bind daemon socket: Address already in use#", $buffer)) { events_ufdb_exec("ERROR DETECTED : {$buffer} `cannot bind daemon socket`"); squid_admin_mysql(1, "Fatal ERROR: cannot bind daemon socket: Address already in use [action=restart]", $buffer, __FILE__, __LINE__); ufdbguard_admin_events("Fatal ERROR: cannot bind daemon socket: Address already in use", __FUNCTION__, __FILE__, __LINE__, "ufdbguard-service"); xsyslog("Restarting ufdb service..."); shell_exec("{$GLOBALS["nohup"]} /etc/init.d/ufdb restart >/dev/null 2>&1 &"); return; } if (preg_match('#\\] FATAL ERROR: cannot read from "(.+?)".*?No such file or directory#', $buffer, $re)) { squid_admin_mysql(0, "Database {$re[1]} missing", $buffer, __FILE__, __LINE__); events("cannot read '{$re[1]}' -> \"{$buffer}\""); squid_admin_mysql(2, "Web filtering issue on {$re[1]}", "Launch recover_a_database()", __FILE__, __LINE__); recover_a_database($re[1]); return; } if (preg_match('#\\*FATAL.+? cannot read from "(.+?)".+?: No such file or directory#', $buffer, $re)) { squid_admin_mysql(0, "Database {$re[1]} missing", $buffer, __FILE__, __LINE__); events("cannot read '{$re[1]}' -> \"{$buffer}\""); squid_admin_mysql(2, "Web filtering issue on {$re[1]}", "Launch recover_a_database()", __FILE__, __LINE__); recover_a_database($re[1]); return; } if (preg_match('#\\*FATAL\\*\\s+cannot read from\\s+"(.+?)"#', $buffer, $re)) { squid_admin_mysql(0, "Database {$re[1]} missing", $buffer, __FILE__, __LINE__); events("Problem on {$re[1]}"); events_ufdb_exec("{$buffer}"); squid_admin_mysql(2, "Web filtering issue on {$re[1]}", "Launch recover_a_database()", __FILE__, __LINE__); recover_a_database($re[1]); $GLOBALS["CLASS_UNIX"]->send_email_events("ufdbguard: {$re[1]} Not compiled..", "Ufdbguard claim: {$buffer}\nYou need to compile your databases"); return; } if (preg_match("#\\*FATAL\\*\\s+cannot read from\\s+\"(.+?)\\.ufdb\".+?No such file or directory#", $buffer, $re)) { squid_admin_mysql(0, "Database {$re[1]} missing", $buffer . "\n Problem on {$re[1]}\n\nYou need to compile your databases", __FILE__, __LINE__); events("UFDB database missing : Problem on {$re[1]}"); if (!is_file($re[1])) { @mkdir(dirname($re[1]), 666, true); shell_exec("/bin/touch {$re[1]}"); } $GLOBALS["CLASS_UNIX"]->send_email_events("ufdbguard: {$re[1]} Not compiled..", "Ufdbguard claim: {$buffer}\nYou need to compile your databases", "ufdbguard-service"); return; } if (preg_match("#thread worker-[0-1]+.+?caught signal\\s+[0-1]+#", $buffer, $re)) { squid_admin_mysql(0, "Webfiltering Daemon as crashed - Start a new one", $buffer, __FILE__, __LINE__); $GLOBALS["CLASS_UNIX"]->send_email_events("ufdbguard: crashed", "Ufdbguard claim: {$buffer}\n", "proxy"); shell_exec("/etc/init.d/ufdb start &"); } if (preg_match("#\\*FATAL\\*\\s+expression list\\s+(.+?): Permission denied#", $buffer, $re)) { squid_admin_mysql(0, "Database {$re[1]} permission denied", $buffer . "\nProblem on '{$re[1]}' -> chown squid:squid", __FILE__, __LINE__); events("UFDB expression permission issue : Problem on '{$re[1]}' -> chown squid:squid"); shell_exec("{$GLOBALS["chown"]} -R squid:squid " . dirname($re[1])); return; } if (preg_match("#\\*FATAL.+?expression list\\s+(.+?):\\s+No such file or directory#", $buffer, $re)) { squid_admin_mysql(0, "Database {$re[1]} missing", $buffer . "\nProblem on '{$re[1]}' -> Try to repair", __FILE__, __LINE__); events("Expression list: Problem on {$re[1]} -> \"{$buffer}\""); events("Creating directory " . dirname($re[1])); @mkdir(dirname($re[1]), 0755, true); events("Creating empty file '" . $re[1] . "'"); @file_put_contents($re[1], "\n"); events("ufdbguard tail: Service will be reloaded"); $GLOBALS["CLASS_UNIX"]->send_email_events(basename(__FILE__) . ":Service ufdb will be reloaded ", "Cause:{$buffer}", "ufdbguard-service"); squid_admin_mysql(2, "Ask to reload the Web filtering service", "Cause:{$buffer}"); ufdbguard_admin_events("ufdbguard tail: Service will be reloaded", __FUNCTION__, __FILE__, __LINE__, "watchdog"); shell_exec("{$GLOBALS["RELOADCMD"]} --function==" . __FUNCTION__ . " --line=" . __LINE__ . " " . "--filename=" . basename(__FILE__) . " >/dev/null 2>&1 &"); return; } if (preg_match("#database table \\/var\\/lib\\/squidguard\\/(.+?)\\/domains\\s+is empty#", $buffer, $re)) { //ufdbguard_admin_events("Database {$re[1]} as no datas, you should recompile your databases",__FUNCTION__,__FILE__,__LINE__,"ufdbguard-service"); //$GLOBALS["CLASS_UNIX"]->send_email_events("ufdbguard: {$re[1]} database is empty, please compile your databases","Ufdbguard claim: $buffer\nYou need to compile your databases","proxy"); return; } if (preg_match("#the new configuration and database are loaded for ufdbguardd ([0-9\\.]+)#", $buffer, $re)) { squid_admin_mysql(2, "Web Filtering engine service v{$re[1]} has reloaded new configuration and databases", ""); $GLOBALS["CLASS_UNIX"]->send_email_events("UfdbGuard v{$re[1]} has reloaded new configuration and databases", null, "ufdbguard-service"); return; } if (preg_match("#statistics:(.+)#", $buffer, $re)) { if (preg_match("#blocked ([0-9]+) times#", $re[1], $ri)) { if ($ri[1] > 0) { //squid_admin_mysql(2, "{$re[1]}",""); } } return; } if (preg_match("#BLOCK (.*?)\\s+(.+?)\\s+(.+?)\\s+(.+?)\\s+(|http|https|ftp|ftps)://(.+?)myip=(.+)\$#", $buffer, $re)) { $user = trim($re[1]); $local_ip = $re[2]; $rulename = $re[3]; $category = $re[4]; $www = $re[6]; $public_ip = $re[7]; //events("BLOCK[".__LINE__."]: $user/$local_ip - $www"); if (strpos($www, "/") > 0) { $tb = explode("/", $www); $www = $tb[0]; } if (preg_match("#^www\\.(.+)#", $www, $re)) { $www = $re[1]; } if (preg_match("#([0-9]+)\\.addr#", $www)) { $www = long2ip($re[1]); } if (preg_match("#^([0-9\\.]+)#", $local_ip, $re)) { $local_ip = $re[1]; } $date = time(); $table = date('Ymd') . "_blocked"; $category = CategoryCodeToCatName($category); if ($user == "-") { $user = null; } $MAC = $GLOBALS["CLASS_UNIX"]->IpToMac($local_ip); $time = time(); if (preg_match("#^[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+\$#", $www)) { $public_ip = $www; $www = $GLOBALS["CLASS_UNIX"]->IpToHostname($www); } $Clienthostname = $GLOBALS["CLASS_UNIX"]->IpToHostname($local_ip); if ($Clienthostname == null) { $Clienthostname = $local_ip; } paranoidmode($local_ip, $www); $q = new influx(); if ($GLOBALS["UfdbguardSMTPNotifs"]["BLOCK_NOTIFS"] == 1) { events("Write notif"); $line_notif = date("H:i:s") . " [{$www}]: blocked domain: User: {$user}/{$local_ip}/{$Clienthostname}, Category: {$category}, Rule: {$rulename}"; $q->insert_ufdb_notif($line_notif); } if ($GLOBALS["SQUID_PERFORMANCE"] > 2) { return; } $line = "{$time}:::{$user}:::{$category}:::{$rulename}:::{$public_ip}:::blocked domain:::blocked domain:::{$Clienthostname}:::{$www}:::{$local_ip}"; $q->insert_ufdb($line); return; } if (preg_match("#BLOCK\\s+(.*?)\\s+(.+?)\\s+(.*?)\\s+(.+?)\\s+(.+?)\\s+[A-Z]+#", $buffer, $re)) { $date = time(); $user = trim($re[1]); $local_ip = $re[2]; $rulename = $re[3]; $category = $re[4]; $uri = $re[5]; //events("BLOCK[".__LINE__."]: $user/$local_ip - $www Notif:{$GLOBALS["UfdbguardSMTPNotifs"]["BLOCK_NOTIFS"]}"); if (preg_match("#^([0-9\\.]+)#", $local_ip, $re)) { $local_ip = $re[1]; } $time = time(); $array = parse_url($uri); $www = $array["host"]; if (strpos($www, ":") > 0) { $t = explode(":", $www); $www = $t[0]; } if (preg_match("#([0-9]+)\\.addr#", $www)) { $www = long2ip($re[1]); } $category = CategoryCodeToCatName($category); $MAC = $GLOBALS["CLASS_UNIX"]->IpToMac($local_ip); if (preg_match("#^[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+\$#", $www)) { $public_ip = $www; $www = $GLOBALS["CLASS_UNIX"]->IpToHostname($www); } else { $public_ip = HostnameToIp($www); } if (preg_match("#^www\\.(.+)#", $www, $re)) { $www = $re[1]; } $Clienthostname = $GLOBALS["CLASS_UNIX"]->IpToHostname($local_ip); if ($Clienthostname == null) { $Clienthostname = $local_ip; } if ($user == "-") { $user = null; } CreateCounter($www, $local_ip, $user, $category); paranoidmode($local_ip, $www); $q = new influx(); if ($GLOBALS["UfdbguardSMTPNotifs"]["BLOCK_NOTIFS"] == 1) { $line_notif = date("H:i:s") . " [{$www}]: blocked domain: User: {$user}/{$local_ip}/{$Clienthostname}, Category: {$category}, Rule: {$rulename}"; $q->insert_ufdb_notif($line_notif); } if ($GLOBALS["SQUID_PERFORMANCE"] > 2) { return; } $q = new influx(); $line = "{$time}:::{$user}:::{$category}:::{$rulename}:::{$public_ip}:::blocked domain:::blocked domain:::{$Clienthostname}:::{$www}:::{$local_ip}"; $q->insert_ufdb($line); return; } events("Not filtered: {$buffer}"); }
} if ($unix->isIPAddress($ComputerName)) { $ipaddr = $ComputerName; $ComputerName = $unix->IpToHostname($ipaddr); } else { $ipaddr = gethostbyname($ComputerName); } if (trim($InfectedFileName) == null) { $InfectedFileName = $InfectedPath; } $MAC = $unix->IpToMac($ipaddr); $public_ip = $unix->IpToHostname($www); $user = $ipaddr; $ipaddr = gethostbyaddr($ipaddr); $time = time(); $q = new influx(); $line = "{$time}:::{$user}:::Infected:::Kaspersky-Antivirus:::{$public_ip}:::Security issue:::THREAT {$VirusName} DETECTED:::{$Clienthostname}:::{$www}:::{$local_ip}"; $q->insert_ufdb($line); } else { events("{$InfectedPath} -> no match", __FUNCTION__, __FILE__, __LINE__); } $sock = new sockets(); $sock->getFrameWork("system.php?parse-blocked=yes"); $SquidAutoblock = $sock->GET_INFO("SquidAutoblock"); events("SquidAutoblock={$SquidAutoblock}", __FUNCTION__, __FILE__, __LINE__); if ($sock->GET_INFO("SquidAutoblock") == 1) { $InfectedPath = str_replace(basename($InfectedPath), "", $InfectedPath); $sql = "INSERT INTO squid_block(uri,task_type,zDate)\n\tVALUES('{$InfectedPath}','autoblock {$VirusName}',NOW());"; $q->QUERY_SQL($sql, "artica_backup"); $sock->getFrameWork("cmd.php?squidnewbee=yes"); }
function USERAGENTS() { echo __FUNCTION__ . "\n"; $now = InfluxQueryFromUTC(strtotime("-4 hour")); $MAIN = array(); $xdata = array(); $ydata = array(); $influx = new influx(); $sock = new sockets(); $UserAgentsStatistics = intval($sock->GET_INFO("UserAgentsStatistics")); if ($UserAgentsStatistics == 0) { return; } $sql = "SELECT MAC,RQS,SIZE,UID,USERAGENT FROM useragents WHERE time>{$now}s"; echo __FUNCTION__ . ": QUERY\n"; $main = $influx->QUERY_SQL($sql); echo __FUNCTION__ . ": PARSING\n"; foreach ($main as $row) { $SIZE = intval($row->SIZE); $RQS = intval($row->RQS); $UID = $row->UID; $MAC = $row->MAC; $USERAGENT = $row->USERAGENT; if ($MAC == null) { if ($UID == null) { continue; } } $md5 = md5("{$UID}{$USERAGENT}{$MAC}"); if (!isset($TMAIN[$md5])) { $TMAIN[$md5]["UID"] = $UID; $TMAIN[$md5]["USERAGENT"] = $USERAGENT; $TMAIN[$md5]["MAC"] = $MAC; $TMAIN[$md5]["RQS"] = $RQS; $TMAIN[$md5]["SIZE"] = $SIZE; } else { $TMAIN[$md5]["SIZE"] = $TMAIN[$md5]["SIZE"] + $SIZE; $TMAIN[$md5]["RQS"] = $TMAIN[$md5]["RQS"] + $RQS; } } while (list($md5, $array) = each($TMAIN)) { $USERAGENT = trim($array["USERAGENT"]); $MAC = $array["MAC"]; $RQS = $array["RQS"]; $SIZE = $array["SIZE"]; $UID = $array["UID"]; $USERAGENT = mysql_escape_string2($USERAGENT); if ($GLOBALS["VERBOSE"]) { echo "('{$USERAGENT}','{$SIZE}','{$RQS}','{$MAC}','{$UID}')\n"; } $f[] = "('{$USERAGENT}','{$SIZE}','{$RQS}','{$MAC}','{$UID}')"; } if (count($f) > 0) { $q = new mysql_squid_builder(); $q->QUERY_SQL("CREATE TABLE IF NOT EXISTS USERAGENTS4H (\n\t\t\t\t`hits` BIGINT UNSIGNED, \n\t\t\t\t`size` BIGINT UNSIGNED,\n\t\t\t\t`USERAGENT` VARCHAR(128) NOT NULL ,\n\t\t\t\t`UID` VARCHAR(128) NOT NULL,\n\t\t\t\t`MAC` VARCHAR(128) NOT NULL,\n\t\t\t\tKEY `hits` (`hits`), \n\t\t\t\tKEY `size` (`size`),\n\t\t\t\tKEY `UID` (`UID`),\n\t\t\t\tKEY `MAC` (`MAC`),\n\t\t\t\tKEY `USERAGENT` (`USERAGENT`)\n\t\t\t\t) ENGINE=MYISAM"); $q->QUERY_SQL("TRUNCATE TABLE USERAGENTS4H"); $q->QUERY_SQL("INSERT IGNORE INTO USERAGENTS4H (USERAGENT,size,hits,MAC,UID) VALUES " . @implode(",", $f)); } }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $from = InfluxQueryFromUTC($params["FROM"]); $to = InfluxQueryFromUTC($params["TO"]); $interval = $params["INTERVAL"]; $q = new mysql_squid_builder(); $q->QUERY_SQL("DROP TABLE `tmp_{$md5}user`"); $sql = "CREATE TABLE IF NOT EXISTS `tmp_{$md5}user`\n\t(`ZDATE` DATETIME,\n\t`SIZE` INT UNSIGNED NOT NULL DEFAULT 1,\n\t`RQS` INT UNSIGNED NOT NULL DEFAULT 1,\n\t`CATEGORY` VARCHAR(60),\n\t`FAMILYSITE` VARCHAR(128),\n\t`USERID` VARCHAR(60),\n\t`IPADDR` VARCHAR(60),\n\t`MAC` VARCHAR(60),\n\tKEY `ZDATE`(`ZDATE`),\n\tKEY `CATEGORY`(`CATEGORY`),\n\tKEY `FAMILYSITE`(`FAMILYSITE`),\n\tKEY `USERID`(`USERID`),\n\tKEY `IPADDR`(`IPADDR`),\n\tKEY `MAC`(`MAC`)) ENGINE = MYISAM;"; $q->QUERY_SQL($sql); if (!$q->ok) { echo "********** FAILED **********\n"; echo $q->mysql_error . "\n"; build_progress("{step} {insert_data}: MySQL engine, {failed}", 110); return false; } $FIELDS["MAC"] = "MAC"; $FIELDS["IPADDR"] = "IPADDR"; $FIELDS["USERID"] = "USERID"; $sql = "SELECT SIZE,FAMILYSITE,RQS,CATEGORY,MAC,IPADDR,USERID FROM access_log WHERE (time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "')"; if (isset($params["USER"])) { while (list($field, $size) = each($FIELDS)) { $FINAL_FIELDS[] = $field; } $sql = "SELECT SIZE,FAMILYSITE,RQS,CATEGORY," . @implode(",", $FINAL_FIELDS) . " FROM access_log WHERE (time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "')"; } echo "{$sql}\n"; build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $GLOBALS["CSV1"][] = array("date", "website", "uid", "ipaddr", "mac", "SizeBytes", "SizeText", "hits"); $main = $influx->QUERY_SQL($sql); echo "MAIN(1): " . count($main) . " items\n"; if (count($main) < 2) { $sql = "SELECT SIZE,FAMILYSITE,RQS," . @implode(",", $FINAL_FIELDS) . " FROM access_log WHERE (time >'" . date("Y-m-d H:i:s", $from) . "' and time < '" . date("Y-m-d H:i:s", $to) . "')"; $main = $influx->QUERY_SQL($sql); echo "MAIN(2): " . count($main) . " items\n"; } $c = 0; foreach ($main as $row) { $time = InfluxToTime($row->time); $SIZE = intval($row->SIZE); if ($SIZE == 0) { continue; } $RQS = intval($row->RQS); $CATEGORY = mysql_escape_string2($row->CATEGORY); $FAMILYSITE = mysql_escape_string2($row->FAMILYSITE); $MAC = mysql_escape_string2($row->MAC); $IPADDR = mysql_escape_string2($row->IPADDR); $USERID = mysql_escape_string2($row->USERID); $DATE = date("Y-m-d H:00:00", $time); //if($GLOBALS["VERBOSE"]){echo "$DATE','$SIZE','$RQS','$CATEGORY','$FAMILYSITE','$USERID','$IPADDR','$MAC'\n";} $f[] = "('{$DATE}','{$SIZE}','{$RQS}','{$CATEGORY}','{$FAMILYSITE}','{$USERID}','{$IPADDR}','{$MAC}')"; $SIZE_TEXT = FormatBytes($SIZE / 1024); $GLOBALS["CSV1"][] = array($DATE, $FAMILYSITE, $USERID, $IPADDR, $MAC, $SIZE, $SIZE_TEXT, $RQS); $c++; if (count($f) > 500) { $q->QUERY_SQL("INSERT IGNORE INTO `tmp_{$md5}user` (`ZDATE`,`SIZE`,`RQS`,`CATEGORY`,`FAMILYSITE`,`USERID`,`IPADDR`,`MAC`)\n\t\t\tVALUES " . @implode(",", $f)); $f = array(); if (!$q->ok) { echo "********** FAILED **********\n"; echo $q->mysql_error . "\n"; build_progress("{step} {insert_data}: MySQL engine, {failed}", 110); return false; } } } if (count($f) > 0) { $q->QUERY_SQL("INSERT IGNORE INTO `tmp_{$md5}user` (`ZDATE`,`SIZE`,`RQS`,`CATEGORY`,`FAMILYSITE`,`USERID`,`IPADDR`,`MAC`) VALUES " . @implode(",", $f)); if (!$q->ok) { echo "********** FAILED **********\n"; echo $q->mysql_error . "\n"; build_progress("{step} {insert_data}: MySQL engine, {failed}", 110); return false; } } if ($c == 0) { echo "MAIN_ARRAY is null....\n"; return false; } return true; }
function GRAB_DATAS($ligne, $md5) { $GLOBALS["zMD5"] = $md5; $params = unserialize($ligne["params"]); $influx = new influx(); $mintime = strtotime("2008-01-01 00:00:00"); $params["TO"] = intval($params["TO"]); $params["FROM"] = abs(intval($params["FROM"])); if ($params["FROM"] < $mintime) { $params["FROM"] = strtotime(date("Y-m-d 00:00:00")); } $params["TO"] = intval($params["TO"]); if ($params["TO"] < $mintime) { $params["TO"] = time(); } $influx = new influx(); $from = $params["FROM"]; $to = $params["TO"]; $interval = $params["INTERVAL"]; $user = strtolower($params["USER"]); $search = trim($params["SEARCH"]); if ($search == "*") { $search = null; } $md5_table = "{$md5}sites"; $SSEARCH = null; echo "FLOW: FROM {$from} to {$to} {$interval} user:{$user} search:{$search}\n"; if ($search != null) { $search = str_replace("*", ".*", $search); $SSEARCH = " (\"{$user}\" ~* '{$search}') AND "; } if ($user == "ipaddr") { $ip = new IP(); $operator = null; if (substr($search, 0, 1) == ">") { $operator = "<"; $search = substr($search, 1, strlen($search)); } if (substr($search, 0, 1) == "<") { $operator = ">"; $search = substr($search, 1, strlen($search)); } if (preg_match("#[0-9\\.]+\\/[0-9]+#", $search)) { $SSEARCH = " ( inet '{$search}' >> ipaddr) AND "; } if (preg_match("#^[0-9\\.]+\$#", $search)) { $SSEARCH = " ( inet '{$search}' {$operator}= ipaddr) AND "; } } $sql = "CREATE TABLE IF NOT EXISTS \"{$md5}report\" (zDate timestamp, familysite VARCHAR(128), \"user\" VARCHAR(128), size BIGINT)"; $q = new postgres_sql(); $q->QUERY_SQL($sql); echo $sql . "\n"; if (!$q->ok) { echo "***************\n{$q->mysql_error}\n***************\n"; return false; } $q->QUERY_SQL("create index zdate{$md5}report on \"{$md5}report\"(zdate);"); $q->QUERY_SQL("create index familysite{$md5}report on \"{$md5}report\"(familysite);"); $distance = $influx->DistanceHour($from, $to); echo "Distance: {$distance} hours\n"; if ($distance > 4) { $TimeGroup = "date_trunc('hour', zdate) as zdate"; } $sql = "SELECT SUM(SIZE) as size,familysite,{$TimeGroup},\"{$user}\"\n\t\tFROM access_log WHERE {$SSEARCH}zdate >'" . date("Y-m-d H:i:s", $from) . "' \n\t\tand zdate < '" . date("Y-m-d H:i:s", $to) . "' GROUP BY zdate,familysite,\"{$user}\""; if ($distance > 23) { echo "Distance: {$distance} hours: Use the Month table\n"; $sql = "SELECT SUM(SIZE) as size,familysite,zdate,\"{$user}\"\n\t\tFROM access_month WHERE {$SSEARCH}zdate >='" . date("Y-m-d H:i:s", $from) . "'\n\t\tand zdate <= '" . date("Y-m-d H:i:s", $to) . "' GROUP BY zdate,familysite,\"{$user}\""; } if ($distance > 720) { echo "Distance: {$distance} hours: Use the Year table\n"; $sql = "SELECT SUM(SIZE) as size,familysite,zdate,\"{$user}\"\n\t\tFROM access_year WHERE {$SSEARCH}zdate >='" . date("Y-m-d H:i:s", $from) . "'\n\t\tand zdate <= '" . date("Y-m-d H:i:s", $to) . "' GROUP BY zdate,familysite,\"{$user}\""; } $q->QUERY_SQL("TRUNCATE TABLE \"{$md5}report\""); build_progress("{step} {waiting_data}: BigData engine, (websites) {please_wait}", 6); $sql = "INSERT INTO \"{$md5}report\" (size,familysite,zdate,\"user\") {$sql}"; echo "***************\n{$sql}\n*****************\n"; $q->QUERY_SQL($sql); if (!$q->ok) { echo "***************\nERROR {$q->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $ligne = pg_fetch_assoc($q->QUERY_SQL("SELECT COUNT(*) as tcount FROM \"{$md5}report\"")); if (!$q->ok) { echo "***************\nERROR {$q->mysql_error}\n***************\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } $c = $ligne["tcount"]; if ($c == 0) { echo "\n\n\n!!! No data....!!!\n\n\n"; $q->QUERY_SQL("DROP TABLE \"{$md5}report\""); return false; } echo "{$c} items inserted to PostgreSQL\n"; $MAIN_ARRAY = array(); return true; }
function memstats() { $time = time(); $page = CurrentPageName(); $influx = new influx(); $q = new mysql(); $_SESSION["SQUID_STATS_MEM_DATE1"] = $_GET["date1"]; $_SESSION["SQUID_STATS_MEM_TIME1"] = $_GET["time1"]; $_SESSION["SQUID_STATS_MEM_DATE2"] = $_GET["date2"]; $_SESSION["SQUID_STATS_MEM_TIME2"] = $_GET["time2"]; $from = strtotime("{$_GET["date1"]} {$_GET["time1"]}"); $to = strtotime("{$_GET["date2"]} {$_GET["time2"]}"); $md5 = md5("{$_GET["interval"]}{$from}{$to}"); $sql = "SELECT MEM_STATS FROM SYSTEM WHERE time > {$from}s and time < {$to}s GROUP BY time({$_GET["interval"]})"; $main = $influx->QUERY_SQL($sql); echo "// {$sql}"; $per["1m"] = "Y-m-d H:i:00"; $per["5m"] = "Y-m-d H:i:00"; $per["10m"] = "Y-m-d H:i:00"; $per["15m"] = "Y-m-d H:i:00"; $per["30m"] = "Y-m-d H:i:00"; $per["1h"] = "Y-m-d H:00"; $per["1d"] = "Y-m-d"; foreach ($main as $row) { $time = $row->time; $min = date($per[$_GET["interval"]], $time); $f[] = "('{$min}','{$row->MEM_STATS}')"; } $temptable = "tmp_{$md5}"; $sql = "CREATE TABLE IF NOT EXISTS `{$temptable}` (\n\t`zDate` DATETIME PRIMARY KEY,\n\t`size` INT UNSIGNED NOT NULL DEFAULT 1,\n\tKEY `size`(`size`)\n\t) ENGINE = MYISAM;"; $q->QUERY_SQL($sql, "artica_backup"); if (!$q->ok) { echo "//{$q->mysql_error}\n"; } $q->QUERY_SQL("INSERT IGNORE INTO `{$temptable}` (`zDate`,`size`) VALUES " . @implode(",", $f), "artica_backup"); $results = $q->QUERY_SQL("SELECT AVG(size) as MEM_STATS,zDate FROM {$temptable} GROUP BY zDate ORDER BY zDate", "artica_backup"); while ($ligne = @mysql_fetch_array($results, MYSQL_ASSOC)) { $time = strtotime($ligne["zDate"]); $xdata[] = date("H:i", $time); $ydata[] = $ligne["MEM_STATS"] / 1024; } $q->QUERY_SQL("DROP TABLE {$temptable}", "artica_backup"); $page = CurrentPageName(); $time = time(); $title = "{server_memory_consumption} (GB)"; $timetext = $_GET["interval"]; $highcharts = new highcharts(); $highcharts->container = $_GET["container"]; $highcharts->xAxis = $xdata; $highcharts->Title = $title; //$highcharts->subtitle="<a href=\"javascript:blur();\" OnClick=\"javascript:Loadjs('squid.rtt.php')\" style='font-size:16px;text-decoration:underline'>{realtime_flow}</a>"; $highcharts->TitleFontSize = "14px"; $highcharts->AxisFontsize = "12px"; $highcharts->yAxisTtitle = "GB"; $highcharts->xAxis_labels = true; $highcharts->LegendSuffix = "GB"; $highcharts->xAxisTtitle = $timetext; $highcharts->datas = array("{size}" => $ydata); echo $highcharts->BuildChart(); }
function node_infos_realtime_list() { $page = CurrentPageName(); $tpl = new templates(); $sock = new sockets(); $influx = new influx(); $from = strtotime('-1 hour'); if (isset($_POST['page'])) { $page = $_POST['page']; } if (isset($_POST['rp'])) { $rp = $_POST['rp']; } $ip = new IP(); if ($ip->isIPAddress($_GET["ipaddr"])) { $Select = "IPADDR"; $FORCE_FILTER = " IPADDR='{$_GET["ipaddr"]}'"; } if ($ip->IsvalidMAC($_GET["MAC"])) { $Select = "MAC"; $FORCE_FILTER = " MAC='{$_GET["MAC"]}'"; } $sql = "SELECT * FROM access_log where time >{$from}s and {$FORCE_FILTER} ORDER BY time DESC LIMIT {$rp}"; $data = array(); $data['page'] = $page; $data['total'] = 0; $data['rows'] = array(); $today = date("Y-m-d"); $tcp = new IP(); $main = $influx->QUERY_SQL($sql); $c = 0; foreach ($main as $row) { $color = "black"; $return_code_text = null; $ff = array(); $color = "black"; $uri = $row->SITE; $xtimelog = null; $date = date("H:i:s", InfluxToTime($row->time)); $mac = $row->MAC; $ip = $row->IPADDR; $user = $row->uid; $size = $row->SIZE; $rqs = $row->RQS; $ident = array(); $md = md5(serialize($row)); $c++; $spanON = "<span style='color:{$color};font-size:16px'>"; $spanOFF = "</span>"; $cached_text = null; $size = FormatBytes($size / 1024); $data['rows'][] = array('id' => $md, 'cell' => array("{$spanON}{$date}{$spanOFF}", "{$spanON}{$uri}{$spanOFF}", "{$spanON}{$rqs}{$spanOFF}", "{$spanON}{$size}{$spanOFF}")); } $data['total'] = $c; echo json_encode($data); }