function build() { $webappFile = null; $tomcatDir = "/var/lib/tomcat6/webapps"; if (!is_dir($tomcatDir)) { echo "Starting......: " . date("H:i:s") . " C.A.S server failed `{$tomcatDir}` no such directory...\n"; return; } $unix = new unix(); $dirfiles = $unix->DirFiles("/usr/share/cas-server/modules", "cas-server-webapp-.*?\\.war"); echo "Starting......: " . date("H:i:s") . " C.A.S server checking libraries...\n"; echo "Starting......: " . date("H:i:s") . " C.A.S server TomCat webapps `{$tomcatDir}`\n"; while (list($num, $line) = each($dirfiles)) { $webappFile = "/usr/share/cas-server/modules/{$num}"; } if ($webappFile == null) { echo "Starting......: " . date("H:i:s") . " C.A.S server failed to retrive cas-server-webapp war file\n"; return; } if (is_file("{$tomcatDir}/cas.war")) { @unlink("{$tomcatDir}/cas.war"); } echo "Starting......: " . date("H:i:s") . " C.A.S server installing {$webappFile} into {$tomcatDir}..\n"; @link($webappFile, "{$tomcatDir}/cas.war"); writesettings(); maven2(); tomcat_config(); log4jxml(); if (is_file("/etc/init.d/tomcat6")) { echo "Starting......: " . date("H:i:s") . " C.A.S server restarting tomcat server...\n"; shell_exec("/etc/init.d/tomcat6 restart >/dev/null 2>&1"); } WEB_INF_deployerConfigContext(); log4jxml(); }
function ScanQueue() { $users = new usersMenus(); $GLOBALS["SAMBA_INSTALLED"] = $users->SAMBA_INSTALLED; $unix = new unix(); $path = "/var/log/artica-postfix/xapian"; $SartOn = time(); $files = $unix->DirFiles($path); if (count($files) == 0) { return; } cpulimitProcessName("omindex"); while (list($num, $file) = each($files)) { $toScan = "{$path}/{$file}"; if (ScanFile($toScan)) { @unlink($toScan); } } $SartOff = time(); $time = distanceOfTimeInWords($SartOn, $SartOff); $countdir = count($GLOBALS["DIRS"]); cpulimitProcessNameKill("omindex"); $echo = "InstantSearch {items}: {skipped}: {$GLOBALS["SKIPPED"]} {files}<br>{indexed}: {$GLOBALS["INDEXED"]} {files}<br>{duration}:{$time}"; if ($GLOBALS["INDEXED"] > 0) { @file_put_contents("/usr/share/artica-postfix/ressources/logs/xapian.results", $echo); @chmod("/usr/share/artica-postfix/ressources/logs/xapian.results", 0777); } echo $echo . "\n"; }
function cron() { $unix = new unix(); $files = $unix->DirFiles("/etc/cron.d"); $php5 = $unix->LOCATE_PHP5_BIN(); $sql = "SELECT CronSchedule,ID FROM imapsync"; $q = new mysql(); $results = $q->QUERY_SQL($sql, "artica_backup"); if (!$q->ok) { return null; } while (list($index, $line) = each($files)) { if ($index == null) { continue; } if (preg_match("#^imapsync-#", $index)) { @unlink("/etc/cron.d/{$index}"); } } $sql = "SELECT CronSchedule,ID FROM imapsync"; $q = new mysql(); $results = $q->QUERY_SQL($sql, "artica_backup"); while ($ligne = @mysql_fetch_array($results, MYSQL_ASSOC)) { if (trim($ligne["CronSchedule"] == null)) { continue; } $f[] = "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/X11R6/bin:/usr/share/artica-postfix/bin"; $f[] = "MAILTO=\"\""; $f[] = "{$ligne["CronSchedule"]} root {$php5} " . __FILE__ . " --sync {$ligne["ID"]}"; $f[] = ""; @file_put_contents("/etc/cron.d/imapsync-{$ligne["ID"]}", implode("\n", $f)); @chmod("/etc/cron.d/imapsync-{$ligne["ID"]}", 600); unset($f); } }
function schedules() { $unix = new unix(); $files = $unix->DirFiles("/etc/cron.d"); $cron = new cron_macros(); $php5 = $unix->LOCATE_PHP5_BIN(); while (list($index, $line) = each($files)) { if ($index == null) { continue; } if (preg_match("#^LdapImport-#", $index)) { @unlink("/etc/cron.d/{$index}"); } } $sql = "SELECT * FROM ldap_ou_import WHERE enabled=1"; $q = new mysql(); $results = $q->QUERY_SQL($sql, "artica_backup"); while ($ligne = @mysql_fetch_array($results, MYSQL_ASSOC)) { if (trim($ligne["ScheduleMin"] == null)) { continue; } $schedule = $cron->cron_defined_macros[$ligne["ScheduleMin"]]; $f[] = "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/X11R6/bin:/usr/share/artica-postfix/bin"; $f[] = "MAILTO=\"\""; $f[] = "{$schedule} root {$php5} " . __FILE__ . " --import {$ligne["ID"]} >/dev/null 2>&1"; $f[] = ""; @file_put_contents("/etc/cron.d/LdapImport-{$ligne["ID"]}", implode("\n", $f)); @chmod("/etc/cron.d/LdapImport-{$ligne["ID"]}", 600); unset($f); } }
function reset2() { system("clear"); echo "Remove databases\n"; $q = new mysql(); echo "Remove database settings\n"; $q->DELETE_DATABASE("artica_backup"); echo "Remove database events\n"; $q->DELETE_DATABASE("artica_events"); echo "Remove database Proxy\n"; $q = new mysql_squid_builder(); $q->QUERY_SQL("DROP DATABASE `squidlogs`"); echo "Remove Artica settings Proxy\n"; $unix = new unix(); $files = $unix->DirFiles("/etc/artica-postfix/settings/Daemons"); while (list($filename, $value) = each($files)) { $fulename = "/etc/artica-postfix/settings/Daemons/{$filename}"; echo "Removing {$filename}\n"; @unlink($fulename); } @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/ProxyUseArticaDB", 1); @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/StatsPerfsSquidAnswered", 1); @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/CacheManagement2", 1); @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/EnablePHPFPM", 0); @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/EnableArticaFrontEndToNGninx", 0); @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/EnableArticaFrontEndToApache", 1); @file_put_contents("/root/build/etc/artica-postfix/settings/Daemons/EnableNginx", 0); echo "Restarting Web Console...\n"; system('/etc/init.d/artica-webconsole restart'); system("clear"); echo "All data has been erased..\n"; echo "Type Enter key to exit\n"; $answer = trim(strtolower(fgets(STDIN))); die; }
function sigtool() { $unix = new unix(); $sigtool = $unix->find_program("sigtool"); if (strlen($sigtool) < 5) { die; } if (is_file("/usr/share/artica-postfix/ressources/interface-cache/ClamAVBases")) { $ttim = $unix->file_time_min("/usr/share/artica-postfix/ressources/interface-cache/ClamAVBases"); if ($ttim < 30) { return; } } $baseDir = "/var/lib/clamav"; $patnz = $unix->DirFiles($baseDir, "\\.(cvd|cld|hdb|ign2|ndb)\$"); while (list($path, $none) = each($patnz)) { $patterns[basename($path)] = true; } while (list($pattern, $none) = each($patterns)) { if (!is_file("{$baseDir}/{$pattern}")) { continue; } $results = array(); exec("{$sigtool} --info={$baseDir}/{$pattern} 2>&1", $results); while (list($index, $line) = each($results)) { if (preg_match("#Build time:\\s+(.+)#", $line, $re)) { $time = strtotime($re[1]); $MAIN[$pattern]["zDate"] = date("Y-m-d H:i:s"); continue; } if (preg_match("#Version:\\s+([0-9]+)#", $line, $re)) { $MAIN[$pattern]["version"] = $re[1]; continue; } if (preg_match("#Signatures:\\s+([0-9]+)#", $line, $re)) { $MAIN[$pattern]["signatures"] = $re[1]; continue; } } if (!isset($MAIN[$pattern]["zDate"])) { $time = filemtime("{$baseDir}/{$pattern}"); $MAIN[$pattern]["zDate"] = date("Y-m-d H:i:s", $time); if (!isset($MAIN[$pattern]["version"])) { $MAIN[$pattern]["version"] = date("YmdHi", $time); } } if (!isset($MAIN[$pattern]["signatures"])) { $MAIN[$pattern]["signatures"] = $unix->COUNT_LINES_OF_FILE("{$baseDir}/{$pattern}"); } } if (count($MAIN) == 0) { return; } @file_put_contents("/usr/share/artica-postfix/ressources/interface-cache/ClamAVBases", serialize($MAIN)); }
function parse() { $TimeFile = "/etc/artica-postfix/pids/exec.squid.stats.quota-week.parser.php.time"; $pidfile = "/etc/artica-postfix/pids/exec.squid.stats.quota-week.parser.php.pid"; $unix = new unix(); $pid = $unix->get_pid_from_file($pidfile); if ($unix->process_exists($pid, basename(__FILE__))) { $timepid = $unix->PROCCESS_TIME_MIN($pid); if ($GLOBALS["VERBOSE"]) { echo "{$pid} already executed since {$timepid}Mn\n"; } if ($timepid < 14) { return; } $kill = $unix->find_program("kill"); unix_system_kill_force($pid); } @file_put_contents($pidfile, getmypid()); $sock = new sockets(); $SquidPerformance = intval($sock->GET_INFO("SquidPerformance")); $time = $unix->file_time_min($TimeFile); if (!$GLOBALS["FORCE"]) { if ($time < 1440) { return; } } @unlink($TimeFile); @file_put_contents($TimeFile, time()); $f = $unix->DirFiles("/var/log/squid", "[0-9]+_QUOTASIZE\\.db"); $export_path = "/home/artica/squid/dbExport"; @mkdir($export_path, 0755, true); while (list($filename, $none) = each($f)) { preg_match("#([0-9]+)_#", $filename, $re); $xdate = $re[1]; echo "{$filename} ( {$xdate} )\n"; if ($SquidPerformance > 1) { if (!@copy("/var/log/squid/{$filename}", "{$export_path}/{$filename}")) { continue; } @unlink("/var/log/squid/{$filename}"); continue; } if (!parse_file("/var/log/squid/{$filename}", $xdate)) { continue; } if (!@copy("/var/log/squid/{$filename}", "{$export_path}/{$filename}")) { continue; } @unlink("/var/log/squid/{$filename}"); } }
function purge_bysquid() { $unix = new unix(); $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $pid = @file_get_contents($pidfile); if ($pid < 100) { $pid = null; } if ($unix->process_exists($pid, basename(__FILE__))) { $timepid = $unix->PROCCESS_TIME_MIN($pid); ufdbguard_admin_events("Already executed pid {$pid} since {$timepid}", __FUNCTION__, __FILE__, __LINE__, "purge"); if ($GLOBALS["VERBOSE"]) { echo "Already executed pid {$pid}\n"; } return; } @file_put_contents($pidfile, getmypid()); $sock = new sockets(); $users = new usersMenus(); $rm = $unix->find_program("rm"); $df = $unix->find_program("df"); $DF_RESULTS[] = "Scanning Artica directories in /var/log\ncurrent status:"; exec("{$df} -i /var/log 2>&1", $DF_RESULTS); $DF_RESULTS[] = ""; exec("{$df} -h /var/log 2>&1", $DF_RESULTS); $dirs = $unix->DirFiles("/var/log/artica-postfix"); while (list($directory, $b) = each($dirs)) { $DF_RESULTS[] = ""; $DF_RESULTS[] = ""; $DF_RESULTS[] = date("Y-m-d H:i:s") . " Removing content of {$directory}"; $DF_RESULTS[] = date("Y-m-d H:i:s") . " {$directory} Before:"; $DF_RESULTS[] = ""; exec("{$df} -i {$directory} 2>&1", $DF_RESULTS); $DF_RESULTS[] = ""; exec("{$df} -h {$directory} 2>&1", $DF_RESULTS); shell_exec("{$rm} -rf {$directory}/* 2>&1"); $DF_RESULTS[] = date("Y-m-d H:i:s") . " {$directory} After removing content:"; exec("{$df} -i {$directory} 2>&1", $DF_RESULTS); $DF_RESULTS[] = ""; exec("{$df} -h {$directory} 2>&1", $DF_RESULTS); $DF_RESULTS[] = ""; } squid_admin_mysql(0, "Log partition cleaning report", @implode("\n", $DF_RESULTS) . __FILE__, __LINE__); }
function restore_all() { $unix = new unix(); $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $pid = @file_get_contents($pidfile); if ($pid < 100) { $pid = null; } if ($unix->process_exists($pid, basename(__FILE__))) { $timepid = $unix->PROCCESS_TIME_MIN($pid); ufdbguard_admin_events("Already executed pid {$pid} since {$timepid}", __FUNCTION__, __FILE__, __LINE__, "reports"); if ($GLOBALS["VERBOSE"]) { echo "Already executed pid {$pid}\n"; } return; } @file_put_contents($pidfile, getmypid()); $sock = new sockets(); $ArticaProxyStatisticsRestoreFolder = $sock->GET_INFO("ArticaProxyStatisticsRestoreFolder"); if ($ArticaProxyStatisticsRestoreFolder == null) { $ArticaProxyStatisticsRestoreFolder = "/home/artica/squid/backup-statistics-restore"; } if (!is_dir($ArticaProxyStatisticsRestoreFolder)) { ufdbguard_admin_events("{$ArticaProxyStatisticsRestoreFolder} no such directory", __FUNCTION__, __FILE__, __LINE__, "reports"); } $SUCC = 0; $FAI = 0; $t = time(); $files = $unix->DirFiles($ArticaProxyStatisticsRestoreFolder); while (list($srf, $line) = each($files)) { $fullfilename = "{$ArticaProxyStatisticsRestoreFolder}/{$srf}"; if (restore($fullfilename, true)) { $SUCC++; } else { $FAI++; } } $took = $unix->distanceOfTimeInWords($t, time(), true); ufdbguard_admin_events("{$SUCC} restored backup(s), {$FAI} failed, took {$took}", __FUNCTION__, __FILE__, __LINE__, "reports"); ScanDays(); }
function ScanQueue() { $unix = new unix(); $GLOBALS["omindex"] = $unix->find_program("omindex"); $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $pid = $unix->get_pid_from_file($pidfile); if ($unix->process_exists($pid)) { writelogs("Already instance executed pid:{$olpid}", __FUNCTION__, __FILE__, __LINE__); die; } @file_put_contents($pidfile, getmypid()); $users = new usersMenus(); $GLOBALS["SAMBA_INSTALLED"] = $users->SAMBA_INSTALLED; $path = "{$GLOBALS["ARTICALOGDIR"]}/xapian"; $SartOn = time(); $files = $unix->DirFiles($path); if (count($files) == 0) { return; } cpulimitProcessName("omindex"); while (list($num, $file) = each($files)) { $toScan = "{$path}/{$file}"; if (ScanFile($toScan)) { @unlink($toScan); } } $SartOff = time(); $time = distanceOfTimeInWords($SartOn, $SartOff); $countdir = count($GLOBALS["DIRS"]); cpulimitProcessNameKill("omindex"); $echo = "InstantSearch {items}: {skipped}: {$GLOBALS["SKIPPED"]} {files}<br>{indexed}: {$GLOBALS["INDEXED"]} {files}<br>{duration}:{$time}"; if ($GLOBALS["INDEXED"] > 0) { @file_put_contents("/usr/share/artica-postfix/ressources/logs/xapian.results", $echo); @chmod("/usr/share/artica-postfix/ressources/logs/xapian.results", 0777); } echo $echo . "\n"; }
function clean_events() { $q = new mysql(); $unix = new unix(); $rm = $unix->find_program("rm"); $nohup = $unix->find_program("nohup"); $TABLES = $q->LIST_TABLES_EVENTS_SYSTEM(); while (list($tablename, $line) = each($TABLES)) { echo "DROP {$tablename}\n"; $q->QUERY_SQL("DROP TABLE `{$tablename}`", "artica_events"); } $datadir = $unix->MYSQL_DATA_DIR(); shell_exec("{$rm} -f {$datadir}/artica_events/*.BAK"); if (is_dir("{$datadir}/syslogstore")) { $q->DELETE_DATABASE("syslogstore"); shell_exec("{$rm} -f {$datadir}/syslogstore/*.BAK"); } $files = $unix->DirFiles("{$datadir}/artica_events", "TaskSq[0-9]+\\.MYI"); while (list($file, $line) = each($files)) { $file = str_replace(".MYI", "", $file); $q->QUERY_SQL("DROP TABLE `{$file}`", "artica_events"); } $q->QUERY_SQL("TRUNCATE TABLE `nmap_events`", "artica_events"); $q->QUERY_SQL("TRUNCATE TABLE `nmap_events`", "artica_events"); $q->QUERY_SQL("TRUNCATE TABLE `avgreports`", "artica_events"); $q->QUERY_SQL("TRUNCATE TABLE `events`", "artica_events"); $q->QUERY_SQL("TRUNCATE TABLE `dhcpd_logs`", "artica_events"); $q->QUERY_SQL("TRUNCATE TABLE `update_events`", "artica_events"); shell_exec("{$nohup} /etc/init.d/mysql restart --framework=" . __FILE__ . " >/dev/null 2>&1 &"); shell_exec($unix->LOCATE_PHP5_BIN() . " /usr/share/artica-postfix/exec.mysql.start.php --engines --verbose --framework=" . __FILE__ . " 2>&1 &"); }
function ParseLangs() { $unix = new unix(); $dirs = $unix->dirdir("/root/squid-lang"); while (list($dirpath, $value) = each($dirs)) { $lang = basename($dirpath); $files = $unix->DirFiles($dirpath); while (list($filename, $value) = each($files)) { if (strpos($filename, ".") > 0) { continue; } $array[$lang][$filename] = ParseLangs_content("{$dirpath}/{$filename}"); } @file_put_contents("/usr/share/artica-postfix/ressources/databases/squid.default.templates.db", serialize($array)); } }
function ParseRetranslatorLogs() { $unix = new unix(); if ($unix->PIDOF("/usr/share/artica-postfix/bin/retranslator.bin") > 0) { return; } $dir = "/var/log/kretranslator"; if (!is_dir($dir)) { return null; } $unix = new unix(); $files = $unix->DirFiles($dir); while (list($num, $file) = each($files)) { if (!preg_match("#retranslator-([0-9\\-]+)_([0-9]+)-([0-9]+)-([0-9]+).debug#", $file, $re)) { continue; } $date = "{$re[1]} {$re[2]}:{$re[3]}:{$re[4]}"; $NumberofFilesUpdated = NumberofRestransFilesUpdated("{$dir}/{$file}"); if ($NumberofFilesUpdated[0] > 0) { $subject = "Kaspersky Retranslator: {$NumberofFilesUpdated[0]} files updated ({$NumberofFilesUpdated[1]})"; send_email_events($subject, @file_get_contents("{$dir}/{$file}"), "KASPERSKY_UPDATES", $date); @unlink("{$dir}/{$file}"); continue; } @unlink("{$dir}/{$file}"); } }
function UFDB_LOG_HOURLY_BACKUP() { $unix = new unix(); $sourcefile = "/home/ufdb/relatime-events/ACCESS_LOG"; $Workpath = "{$GLOBALS["LogFileDeamonLogDir"]}/webfilter-work"; $backupdir = "{$GLOBALS["LogFileDeamonLogDir"]}/webfilter-backup"; @mkdir($Workpath, 0755, true); @mkdir($backupdir, 0755, true); if (is_file($sourcefile)) { $workfile = $Workpath . "/" . time() . ".log"; if (is_file($workfile)) { return; } if (!@copy($sourcefile, "{$workfile}")) { return; } @unlink($sourcefile); } $files = $unix->DirFiles($Workpath); while (list($basename, $subarray) = each($files)) { events("WEBFILTERING Scanning {$Workpath}/{$basename}"); UFDB_LOG_HOURLY_SCAN("{$Workpath}/{$basename}"); } }
function Dir_Files() { $queryregex = null; if (isset($_GET["queryregex"])) { if ($_GET["queryregex"] != null) { $queryregex = base64_decode($_GET["queryregex"]); } } $path = base64_decode($_GET["Dir-Files"]); $path = utf8_encode($path); writelogs_framework("{$path} ({$queryregex})", __FUNCTION__, __FILE__, __LINE__); $unix = new unix(); $array = $unix->DirFiles($path, $queryregex); writelogs_framework("{$path}=" . count($array) . " files", __FUNCTION__, __FILE__, __LINE__); echo "<articadatascgi>" . base64_encode(serialize($array)) . "</articadatascgi>"; }
function repair_tables() { $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $timefile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".time"; $pid = @file_get_contents($pidfile); $unix = new unix(); if ($unix->process_exists($pid, basename(__FILE__))) { events_tail("Already executed pid {$pid}"); if ($GLOBALS["VERBOSE"]) { echo "Already executed pid {$pid}\n"; } return; } $files = $unix->DirFiles("/usr/share/artica-postfix/ressources/logs/categorize-tables"); $php5 = $unix->LOCATE_PHP5_BIN(); while (list($none, $tablename) = each($files)) { $filePath = "/usr/share/artica-postfix/ressources/logs/categorize-tables/{$tablename}"; if (!is_file($filePath)) { @unlink($filePath); continue; } $ARRAY = unserialize(@file_get_contents($filePath)); if (!is_array($ARRAY)) { @unlink($filePath); continue; } $PID = $ARRAY["PID"]; $CUR = $ARRAY["CURRENT"]; $MAX = $ARRAY["MAX"]; if ($CUR == $MAX) { @unlink($filePath); continue; } if ($unix->process_exists($PID)) { continue; } categorize_tables_events("Ask to schedule table Current:{$CUR}/{$MAX}", null, $tablename, 1); $unix->THREAD_COMMAND_SET("{$php5} " . __FILE__ . " --table {$tablename}"); } }
function logrotatelogs($nopid = false) { $unix = new unix(); $sock = new sockets(); if ($nopid) { $pidpath = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $pid = @file_get_contents($pidpath); if ($unix->process_exists($pid)) { $pidtime = $unix->PROCCESS_TIME_MIN($pid); system_admin_events(basename(__FILE__) . ":: " . __FUNCTION__ . " Already process {$pid} running since {$pidtime} Mn.. Aborting", __FUNCTION__, __FILE__, __LINE__); return; } @file_put_contents($pidpath, getmypid()); } $echo = $unix->find_program("echo"); $LogsRotateDeleteSize = $sock->GET_INFO("LogsRotateDeleteSize"); if (!is_numeric($LogsRotateDeleteSize)) { $LogsRotateDeleteSize = 5000; } include_once dirname(__FILE__) . "/ressources/class.mysql.syslog.inc"; if ($GLOBALS["VERBOSE"]) { echo __FUNCTION__ . " line:" . __LINE__ . "\n"; } $q = new mysql_syslog(); if ($q->COUNT_ROWS("logrotate") == 0) { $q->CheckDefaults(); } $sql = "SELECT RotateFiles FROM logrotate WHERE enabled=1"; $results = $q->QUERY_SQL($sql); if (!$q->ok) { echo $q->mysql_error; } while ($ligne = mysql_fetch_assoc($results)) { $filepath = $ligne["RotateFiles"]; if (strpos($filepath, "*") > 0) { if ($GLOBALS["VERBOSE"]) { echo __FUNCTION__ . ":: Scanning {$filepath} line:" . __LINE__ . "\n"; } foreach (glob($filepath) as $filename) { $size = $unix->file_size($filename); $size = $size / 1024; $size = round($size / 1000, 2); $ARRAY[$filename] = $size; } } else { if (is_file($filepath)) { $size = $unix->file_size($filepath); $size = $size / 1024; $size = round($size / 1000, 2); $ARRAY[$filepath] = $size; } if (is_dir($filepath)) { while (list($num, $filename) = each($f)) { $filepath = "/var/log/{$filename}"; $f = $unix->DirFiles("{$filepath}"); $size = $unix->file_size($filepath); $size = $size / 1024; $size = round($size / 1000, 2); $ARRAY[$filepath] = $size; } } } } $f = $unix->DirFiles("/var/log"); while (list($num, $filename) = each($f)) { $filepath = "/var/log/{$filename}"; $size = $unix->file_size($filepath); $size = $size / 1024; $size = round($size / 1000, 2); $ARRAY[$filepath] = $size; } $f = $unix->DirFiles("/var/log/artica-postfix"); while (list($num, $filename) = each($f)) { $filepath = "/var/log/artica-postfix/{$filename}"; $size = $unix->file_size($filepath); $size = $size / 1024; $size = round($size / 1000, 2); $ARRAY[$filepath] = $size; } $restart = false; while (list($filepath, $sizeM) = each($ARRAY)) { if ($sizeM > $LogsRotateDeleteSize) { shell_exec("{$echo} \"\" >{$filepath}"); $restart = true; $unix->send_email_events("{$filepath} was cleaned ({$sizeM}M)", "It exceed maximal size {$LogsRotateDeleteSize}M", "system"); } } if ($restart) { shell_exec("/etc/init.d/syslog restart"); shell_exec("/etc/init.d/artica-syslog restart"); shell_exec("/etc/init.d/auth-tail restart"); shell_exec("/etc/init.d/postfix-logger restart"); } }
function ScanBackup() { $q = new mysql(); $InFluxBackupDatabaseDir = @file_get_contents("/etc/artica-postfix/settings/Daemons/InFluxBackupDatabaseDir"); if ($InFluxBackupDatabaseDir == null) { $InFluxBackupDatabaseDir = "/home/artica/influx/backup"; } $PostGresBackupMaxContainers = @file_get_contents("/etc/artica-postfix/settings/Daemons/PostGresBackupMaxContainers"); $sql = "CREATE TABLE IF NOT EXISTS `postgres_backups` (\n\t\t\t\t`filename` VARCHAR( 90 ),\n\t\t\t\t`filepath` VARCHAR( 250 ),\n\t\t\t\t`filesize` BIGINT UNSIGNED,\n\t\t\t\t`filetime` BIGINT UNSIGNED,\n\t\t\t\t PRIMARY KEY (`filepath`),\n\t\t\t\t KEY `filesize` (`filesize`),\n\t\t\t\t KEY `filetime` (`filetime`)\n\t\t\t\t) ENGINE=MYISAM;"; $q->QUERY_SQL($sql, "artica_backup"); $unix = new unix(); $patnz = $unix->DirFiles($InFluxBackupDatabaseDir, "\\.gz\$"); $q->QUERY_SQL("TRUNCATE TABLE postgres_backups"); while (list($filepath, $none) = each($patnz)) { $filepath = "{$InFluxBackupDatabaseDir}/{$filepath}"; $filename = basename($filepath); $filesize = @filesize($filepath); $filetime = filemtime($filepath); $ARRAY[$filepath] = $filesize; $q->QUERY_SQL("INSERT IGNORE INTO postgres_backups (`filename`,`filepath`,`filesize`,`filetime`) VALUES ('{$filename}','{$filepath}','{$filesize}','{$filetime}')", "artica_backup"); } @file_put_contents("/etc/artica-postfix/settings/Daemons/InfluxDBRestoreArray", serialize($ARRAY)); $ligne = mysql_fetch_array($q->QUERY_SQL("SELECT COUNT(*) as tcount FROM postgres_backups", "artica_backup")); $containers = $ligne["tcount"]; echo "Containers:{$containers}\n"; if ($containers > $PostGresBackupMaxContainers) { $results = $q->QUERY_SQL("SELECT filepath FROM postgres_backups ORDER BY filetime LIMIT 0,{$ContainersToDelete}", "artica_backup"); $c = 0; while ($ligne = @mysql_fetch_array($results, MYSQL_ASSOC)) { $c++; if ($c == $PostGresBackupMaxContainers) { break; } if ($c > $PostGresBackupMaxContainers) { break; } @unlink($ligne["filepath"]); $q->QUERY_SQL("DELETE FROM postgres_backups WHERE filepath='{$ligne["filepath"]}'"); } } }
function Dir_Files() { $path = base64_decode($_GET["Dir-Files"]); writelogs_framework("{$path}", __FUNCTION__, __FILE__, __LINE__); $unix = new unix(); $array = $unix->DirFiles($path); writelogs_framework("{$path}=" . count($array) . " files", __FUNCTION__, __FILE__, __LINE__); echo "<articadatascgi>" . base64_encode(serialize($array)) . "</articadatascgi>"; }
function HyperCacheLogs() { HyperCacheSizeLog("/usr/share/squid3/HyperCacheSizeLog.db"); $q = new mysql_squid_builder(); $unix = new unix(); $files = $unix->DirFiles("/usr/share/squid3", "[0-9]+_HyperCacheSizeLog\\.db"); while (list($num, $filename) = each($files)) { $filepath = "/usr/share/squid3/{$filename}"; if (!is_file($filepath)) { continue; } $time = $q->TIME_FROM_DAY_TABLE($filename); echo "{$filepath} {$time} " . date("Y-m-d") . "\n"; } }
function buildcron() { $unix = new unix(); $path = "/etc/cron.d"; $sql = "SELECT * FROM backup_schedules ORDER BY ID DESC"; $q = new mysql(); $results = $q->QUERY_SQL($sql, "artica_backup"); if (!$q->ok) { return null; } $files = $unix->DirFiles("/etc/cron.d"); while (list($num, $filename) = each($files)) { if (preg_match("#artica-backup-([0-9]+)\$#", $filename)) { echo "Starting......: " . date("H:i:s") . " Backup remove {$filename}\n"; @unlink("{$path}/{$filename}"); } } while ($ligne = @mysql_fetch_array($results, MYSQL_ASSOC)) { $schedule = $ligne["schedule"]; echo "Starting......: " . date("H:i:s") . " Backup {$schedule}\n"; $f[] = "{$schedule} " . LOCATE_PHP5_BIN() . " " . __FILE__ . " {$ligne["ID"]} >/dev/null 2>&1"; } @file_put_contents("/etc/artica-postfix/backup.tasks", @implode("\n", $f)); if (!$GLOBALS["NO_RELOAD"]) { system("/etc/init.d/artica-postfix restart daemon"); } }
function PushToRepo_alls() { $WORKING_DIR = "/home/working_toulouse_databases"; $WORKING_DOWNLOAD = "{$WORKING_DIR}/dowloads"; $OUTPUTDIR = "{$WORKING_DIR}/uploads"; $unix = new unix(); $FILES = $unix->DirFiles($OUTPUTDIR); while (list($filename, $category) = each($FILES)) { $srcfile = "{$OUTPUTDIR}/{$filename}"; PushToRepo($srcfile); } }
function clean_tmd() { $unix = new unix(); $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . ".MAIN.pid"; $pidfileTime = "/etc/artica-postfix/pids/exec.mysql.clean.php.clean_tmd.time"; $pid = $unix->get_pid_from_file($pidfile); if ($unix->process_exists($pid, basename(__FILE__))) { system_admin_events("Already process {$pid} exists", __FUNCTION__, __FILE__, __LINE__, "clean"); die; } $timeExec = $unix->file_time_min($pidfileTime); if ($timeExec < 240) { return; } @unlink($pidfileTime); @file_put_contents($pidfileTime, time()); @file_put_contents($pidfile, getmypid()); $SIZES = 0; $Dirs = $unix->dirdir("/var/lib/mysql"); while (list($directory, $none) = each($Dirs)) { $Files = $unix->DirFiles($directory, "\\.[0-9]+\\.TMD\$"); while (list($filename, $none) = each($Files)) { $fullpath = "{$directory}/{$filename}"; if ($unix->file_time_min($fullpath) < 240) { continue; } $SIZES = $SIZES + @filesize($fullpath); @unlink($fullpath); } $Files = $unix->DirFiles($directory, "\\.TMD-[0-9]+\$"); while (list($filename, $none) = each($Files)) { $fullpath = "{$directory}/{$filename}"; if ($unix->file_time_min($fullpath) < 240) { continue; } $SIZES = $SIZES + @filesize($fullpath); @unlink($fullpath); } } if (is_dir("/opt/squidsql/data")) { $Dirs = $unix->dirdir("/opt/squidsql/data"); while (list($directory, $none) = each($Dirs)) { $Files = $unix->DirFiles($directory, "\\.[0-9]+\\.TMD\$"); while (list($filename, $none) = each($Files)) { $fullpath = "{$directory}/{$filename}"; if ($unix->file_time_min($fullpath) < 240) { continue; } $SIZES = $SIZES + @filesize($fullpath); @unlink($fullpath); } $Files = $unix->DirFiles($directory, "\\.TMD-[0-9]+\$"); while (list($filename, $none) = each($Files)) { $fullpath = "{$directory}/{$filename}"; if ($unix->file_time_min($fullpath) < 240) { continue; } $SIZES = $SIZES + @filesize($fullpath); @unlink($fullpath); } } } }
function ScanThis($Directory = null) { if ($Directory == null) { return; } if (!is_dir($Directory)) { return null; } if (isset($GLOBALS["ALREADY_SCANNED"][$Directory])) { return null; } if ($GLOBALS["VERBOSE"]) { echo "About \"{$Directory}\"\n"; } $GLOBALS["ALREADY_SCANNED"][$Directory] = true; if (is_link($Directory)) { $Directory = @readlink($Directory); } $unix = new unix(); $dirs = $unix->dirdir($Directory); if (count($dirs) > 0) { while (list($directoryPath, $value) = each($dirs)) { if ($GLOBALS["VERBOSE"]) { echo "Rescan \"{$directoryPath}\"\n"; } ScanThis($directoryPath); } } $files = $unix->DirFiles($Directory); if (count($files) == 0) { return; } $FILES_ARRAY_SQL = array(); while (list($filename, $value) = each($files)) { $filepath = "{$Directory}/{$filename}"; $filetime = 0; $filesize = round(@filesize($filepath) / 1024, 2); if ($filetime == 0) { if (preg_match("#\\.([0-9]+)\\.[a-z]+\$#", $filename, $re)) { $filetime = $re[1]; } } if ($filetime == 0) { if (preg_match("#-([0-9]+)\\.[a-z]+\$#", $filename, $re)) { $filetime = $re[1]; } } if ($filetime == 0) { if (preg_match("#\\.log([0-9]+)\\.[a-z]+\$#", $filename, $re)) { $filetime = $re[1]; } } if ($filetime == 0) { if (preg_match("#-([0-9]+)-([0-9]+)-([0-9]+)-([0-9]+)\\.#", $filename, $re)) { if (strlen($re[4]) == 1) { $re[4] = "0{$re[4]}"; } $strdate = "{$re[1]}-{$re[2]}-{$re[3]} {$re[4]}:00:00"; $filetime = strtotime($strdate); } } if ($filetime == 0) { $filetime = filemtime($filepath); } $filedate = date("Y-m-d H:i:s", $filetime); $FILES_ARRAY_SQL[] = "('" . mysql_escape_string2($filepath) . "','{$filesize}','{$filedate}')"; if ($GLOBALS["VERBOSE"]) { echo "{$filedate} - {$filepath}: {$filesize}\n"; } } if (count($FILES_ARRAY_SQL) > 0) { $q = new mysql(); $q->QUERY_SQL("INSERT IGNORE INTO sysstorestatus \n\t\t\t\t(`filepath`,`filesize`,`zDate`) VALUES " . @implode(",", $FILES_ARRAY_SQL), "artica_events"); } }
function CompileLangs($language) { if (trim($language) == null) { return; } $base = "/usr/share/artica-postfix/ressources/language/{$language}"; $sock = new sockets(); $pattern = '#<([a-zA-Z0-9\\_\\-\\s\\.]+)>(.+?)<\\/([a-zA-Z0-9\\_\\-\\s\\.]+)>#is'; $unix = new unix(); $files = $unix->DirFiles($base); while (list($num, $val) = each($files)) { $datas = @file_get_contents("{$base}/{$val}"); if (preg_match_all($pattern, $datas, $reg)) { while (list($index, $word) = each($reg[1])) { $langs[$word] = $reg[2][$index]; } } } echo "writing /usr/share/artica-postfix/ressources/language/{$language}.db " . count($langs) . " words\n"; file_put_contents("/usr/share/artica-postfix/ressources/language/{$language}.db", serialize($langs)); }
function upload_mysql_zip($filename) { $unix = new unix(); $unzip = $unix->find_program("unzip"); $filepath = dirname(__FILE__) . "/ressources/conf/upload/{$filename}"; if (!is_file($filepath)) { build_progress_upload("{$filepath}, no such file", 110); echo "{$filepath}, no such file\n"; return false; } if (!is_file($unzip)) { build_progress_upload("Unzip, no such binary", 110); echo "Unzip, no such binary\n"; @unlink($filepath); return false; } $time = time(); $temp = $unix->TEMP_DIR() . "/{$time}"; @mkdir("{$temp}", 666, true); build_progress_upload("Uncompress {$filepath}", 15); $tmpunzip = $unix->TEMP_DIR() . "/unzip-{$time}.txt"; $cmd = "{$unzip} -j -o {$filepath} -d {$temp}/ >{$tmpunzip} 2>&1"; echo $cmd . "\n"; shell_exec($cmd); @unlink($filepath); $c = 0; $filescan = $unix->DirFiles($temp); if (count($filescan) == 0) { echo @file_get_contents($tmpunzip); @unlink($tmpunzip); build_progress_upload("{uncompress} {failed}", 110); return; } @unlink($tmpunzip); while (list($num, $filename) = each($filescan)) { $filepath = "{$temp}/{$filename}"; if (!is_file($filepath)) { build_progress_upload("{$filepath} no such file", 16); continue; } if (upload_mysql($filepath, true, false, true)) { $c++; } } if ($c == 0) { build_progress_upload("{failed}", 110); return; } $rm = $unix->find_program("rm"); build_progress_upload("remove directory", 90); shell_exec("{$rm} -rf {$temp}"); build_progress_upload("{done}", 100); }
function awstats() { $sock = new sockets(); $unix = new unix(); $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $pidTime = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".time"; if ($unix->file_time_min($pidTime) < 60) { return; } $pid = $unix->get_pid_from_file($pidfile); if ($unix->process_exists($pid, basename(__FILE__))) { $time = $unix->PROCCESS_TIME_MIN($pid); return; } @file_put_contents($pidfile, getmypid()); @unlink($pidTime); @file_put_contents($pidTime, time()); $sock = new sockets(); $EnableNginxStats = $sock->GET_INFO("EnableNginxStats"); if (!is_numeric($EnableNginxStats)) { $EnableNginxStats = 0; } if ($EnableNginxStats == 1) { return; } include_once dirname(__FILE__) . "/ressources/class.awstats.inc"; include_once dirname(__FILE__) . "/ressources/class.mysql.syslogs.inc"; $awstats_bin = $unix->LOCATE_AWSTATS_BIN(); $nice = EXEC_NICE(); $perl = $unix->find_program("perl"); $awstats_buildstaticpages = $unix->LOCATE_AWSTATS_BUILDSTATICPAGES_BIN(); if ($GLOBALS["VERBOSE"]) { echo "awstats......: {$awstats_bin}\n"; echo "statics Pages: {$awstats_buildstaticpages}\n"; echo "Nice.........: {$nice}\n"; echo "perl.........: {$perl}\n"; } if (!is_file($awstats_buildstaticpages)) { echo "buildstaticpages no such binary...\n"; return; } $sock = new sockets(); $kill = $unix->find_program("kill"); $NginxWorkLogsDir = $sock->GET_INFO("NginxWorkLogsDir"); if ($NginxWorkLogsDir == null) { $NginxWorkLogsDir = "/home/nginx/logsWork"; } $sys = new mysql_storelogs(); $files = $unix->DirFiles($NginxWorkLogsDir, "-([0-9\\-]+)\\.log"); while (list($filename, $line) = each($files)) { if (!preg_match("#^(.+?)-[0-9]+-[0-9]+-[0-9]+-[0-9]+\\.log\$#", $filename, $re)) { if ($GLOBALS["VERBOSE"]) { echo "{$filename}, skip\n"; } continue; } if ($GLOBALS["VERBOSE"]) { echo "{$filename}, domain:{$re[1]}\n"; } $servername = $re[1]; $GLOBALS["nice"] = $nice; $aw = new awstats($servername); $aw->set_LogFile("{$NginxWorkLogsDir}/{$filename}"); $aw->set_LogType("W"); $aw->set_LogFormat(1); $config = $aw->buildconf(); $SOURCE_FILE_PATH = "{$NginxWorkLogsDir}/{$filename}"; $configlength = strlen($config); if ($configlength < 10) { if ($GLOBALS["VERBOSE"]) { echo "configuration file lenght failed {$configlength} bytes, aborting {$servername}\n"; } return; } @file_put_contents("/etc/awstats/awstats.{$servername}.conf", $config); @chmod("/etc/awstats/awstats.{$servername}.conf", 644); $Lang = $aw->GET("Lang"); if ($Lang == null) { $Lang = "auto"; } @mkdir("/var/tmp/awstats/{$servername}", 666, true); $t1 = time(); $cmd = "{$nice}{$perl} {$awstats_buildstaticpages} -config={$servername} -update -lang={$Lang} -awstatsprog={$awstats_bin} -dir=/var/tmp/awstats/{$servername} -LogFile=\"{$SOURCE_FILE_PATH}\" 2>&1"; if ($GLOBALS["VERBOSE"]) { echo $cmd . "\n"; } shell_exec($cmd); $filedate = date('Y-m-d H:i:s', filemtime($SOURCE_FILE_PATH)); if (!awstats_import_sql($servername)) { continue; } $sys->ROTATE_TOMYSQL($SOURCE_FILE_PATH, $filedate); } }
function rotate() { $unix = new unix(); $pidfile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".pid"; $TimeFile = "/etc/artica-postfix/pids/" . basename(__FILE__) . "." . __FUNCTION__ . ".time"; if ($GLOBALS["VERBOSE"]) { echo "TimeFile={$TimeFile}\n"; } $pid = $unix->get_pid_from_file($pidfile); if ($unix->process_exists($pid, basename(__FILE__))) { $time = $unix->PROCCESS_TIME_MIN($pid); if ($GLOBALS["OUTPUT"]) { echo "Starting......: " . date("H:i:s") . " [INIT]: {$GLOBALS["TITLENAME"]} Already Artica task running PID {$pid} since {$time}mn\n"; } return; } @file_put_contents($pidfile, getmypid()); $xtime = $unix->file_time_min($TimeFile); if (!$GLOBALS['VERBOSE']) { if ($xtime < 5) { return; } } @unlink($TimeFile, time()); @file_put_contents($TimeFile, time()); $q = new mysql(); $echo = $unix->find_program("echo"); if (!$q->DATABASE_EXISTS("bwmng")) { $q->CREATE_DATABASE("bwmng"); } if (!$q->DATABASE_EXISTS("bwmng", true)) { return; } @copy("/home/artica/bwm-ng/interfaces.csv", "/home/artica/bwm-ng/interfaces.csv." . time()); @unlink("/home/artica/bwm-ng/interfaces.csv"); $files = $unix->DirFiles("/home/artica/bwm-ng"); if (system_is_overloaded(__FILE__)) { if ($GLOBALS["VERBOSE"]) { echo "OVERLOADED !!!!\n"; } return; } while (list($filename, $notused) = each($files)) { if ($filename == "interfaces.csv") { continue; } $filepath = "/home/artica/bwm-ng/{$filename}"; $filetime = $unix->file_time_min($filepath); if ($filetime > 60) { @unlink($filepath); continue; } if ($GLOBALS["VERBOSE"]) { echo "Open {$filepath} {$filetime}mn\n"; } $row = 1; if (($handle = fopen($filepath, "r")) !== FALSE) { while (($data = fgetcsv($handle, 1000, ";")) !== FALSE) { $num = count($data); if ($num == 0) { continue; } $row++; $uniq_key = md5(serialize($data)); $Unix_Timestamp = $data[0]; if (!is_numeric($Unix_Timestamp)) { continue; } $Interface_Name = $data[1]; if (trim($Interface_Name) == null) { print_r($data); continue; } $BytesOut = intval($data[2]); $BytesIn = intval($data[3]); $BytesTotal = $data[4]; $PacketsOut = $data[5]; $PacketsIn = $data[6]; $PacketsTotal = $data[7]; if ($BytesOut == 0 && $BytesIn == 0) { continue; } $Date = date("Y-m-d H:i:s", $Unix_Timestamp); $tableT = date("YmdH", $Unix_Timestamp) . "_bwmrt"; if ($Interface_Name == "total") { $array_total[$tableT][] = "('{$uniq_key}','{$Date}','{$BytesOut}','{$BytesIn}')"; continue; } $table = date("YmdH", $Unix_Timestamp) . "_bwmrh"; $array_eths[$table][] = "('{$uniq_key}','{$Interface_Name}','{$Date}','{$BytesOut}','{$BytesIn}')"; } fclose($handle); if (system_is_overloaded(__FILE__)) { break; } } if ($GLOBALS["VERBOSE"]) { echo "{$filepath} CLOSED: " . count($array_eths) . " eths, " . count($array_total) . " total\n"; } if (array_to_interfaces($array_eths)) { if (array_to_total($array_total)) { if ($GLOBALS["VERBOSE"]) { echo "{$filepath} > DELETE\n"; } @unlink($filepath); } } else { @unlink($filepath); } $array_eths = array(); $array_total = array(); } restart(true); if (system_is_overloaded(__FILE__)) { if ($GLOBALS["VERBOSE"]) { echo "OVERLOADED !!!!\n"; } return; } build_days(); build_current_time(); }
function restore_scan_dir() { $content_dir = $_GET["restore-scandir"]; $unix = new unix(); writelogs_framework("Scanning {$content_dir}", __FUNCTION__, __FILE__, __LINE__); $files = $unix->DirFiles($content_dir); while (list($num, $val) = each($files)) { writelogs_framework("Found {$content_dir}/{$num}", __FUNCTION__, __FILE__, __LINE__); $ARRAY["{$content_dir}/{$num}"] = @filesize("{$content_dir}/{$num}"); } @file_put_contents("/etc/artica-postfix/settings/Daemons/InfluxDBRestoreArray", serialize($ARRAY)); }
function META_MASTER_UFDBTABLES($force = false) { $sock = new sockets(); $unix = new unix(); $EnableArticaMetaServer = intval($sock->GET_INFO("EnableArticaMetaServer")); if ($EnableArticaMetaServer == 0) { return; } $ArticaMetaStorage = $sock->GET_INFO("ArticaMetaStorage"); if ($ArticaMetaStorage == null) { $ArticaMetaStorage = "/home/artica-meta"; } @mkdir("{$ArticaMetaStorage}/nightlys", 0755, true); @mkdir("{$ArticaMetaStorage}/releases", 0755, true); @mkdir("{$ArticaMetaStorage}/webfiltering", 0755, true); $srcdir = $GLOBALS["WORKDIR_LOCAL"]; $destfile = "{$ArticaMetaStorage}/webfiltering/ufdbartica.tgz"; $destdir = "{$ArticaMetaStorage}/webfiltering/ufdbartica"; META_MASTER_UFDBTABLES_ufdbartica_txt(); $rm = $unix->find_program("rm"); if (is_file($destfile)) { $timeFile = "/etc/artica-postfix/pids/exec.artica-meta-server.php.checkufdb.time"; $time = $unix->file_time_min($timeFile); if ($time < 1440) { return; } } $tar = $unix->find_program("tar"); $split = $unix->find_program("split"); @unlink($destfile); chdir($srcdir); shell_exec("{$tar} czf {$destfile} *"); if (is_dir($destdir)) { shell_exec("{$rm} -rf {$destdir}"); } @mkdir("{$destdir}", 0755, true); chdir("{$destdir}"); system("cd {$destdir}"); @copy($destfile, "{$destdir}/ufdbartica.tgz"); echo "Split...ufdbartica.tgz\n"; shell_exec("{$split} -b 1m -d ufdbartica.tgz ufdbartica.tgz."); @unlink("{$destdir}/ufdbartica.tgz"); $files = $unix->DirFiles("{$destdir}"); while (list($num, $ligne) = each($files)) { $Splited_md5 = md5_file("{$destdir}/{$num}"); $ARRAY["{$num}"] = $Splited_md5; } @file_put_contents("{$destdir}/ufdbartica.txt", serialize($ARRAY)); @unlink("{$ArticaMetaStorage}/webfiltering/ufdbartica.txt"); @unlink("{$ArticaMetaStorage}/webfiltering/ARTICAUFDB_LAST_DOWNLOAD"); @copy("/etc/artica-postfix/ARTICAUFDB_LAST_DOWNLOAD", "{$ArticaMetaStorage}/webfiltering/ARTICAUFDB_LAST_DOWNLOAD"); @file_get_contents("{$ArticaMetaStorage}/webfiltering/ufdbartica.txt", time()); if (is_file("/etc/artica-postfix/artica-webfilter-db-index.txt")) { @unlink("{$ArticaMetaStorage}/webfiltering/index.txt"); @copy("/etc/artica-postfix/artica-webfilter-db-index.txt", "{$ArticaMetaStorage}/webfiltering/index.txt"); } if (is_file("/etc/artica-postfix/ufdbcounts.txt")) { @unlink("{$ArticaMetaStorage}/webfiltering/ufdbcounts.txt"); @copy("/etc/artica-postfix/ufdbcounts.txt", "{$ArticaMetaStorage}/webfiltering/ufdbcounts.txt"); } calculate_categorized_websites(true); artica_update_event(2, "Artica Webfiltering databases: Success update Artica Meta webfiltering repository", @implode("\n", $GLOBALS["EVENTS"]), __FILE__, __LINE__); meta_admin_mysql(2, "Success update webfiltering repository with Webfiltering databases", null, __FILE__, __LINE__); }