public function EraseTmpFile($Filename) { $Command = "/usr/bin/sudo -u mysql /bin/rm -f {$Filename}"; $Output = array(); exec($Command, $Output, $Retvar); if ($Retvar != 0) { NeobitsLogError("Error in command {$Command}"); } }
function save_tmp_file($content, $dir1, $dir2, $filename) { if (!file_exists(Config::getTmpDir())) { NeobitsLogError("no tmp dir!"); return FALSE; } $csv_tmp = Config::getTmpDir() . "/" . $dir1; if (!file_exists($csv_tmp)) { if (!mkdir($csv_tmp)) { NeobitsLogError("cannot create csv dir {$csv_tmp}"); return FALSE; } } if (!file_exists($csv_tmp . "/" . $dir2)) { if (!mkdir($csv_tmp . "/" . $dir2)) { NeobitsLogError("cannot create csv dir {$csv_tmp}" . "/" . $dir2); return FALSE; } } $csv_dir = $csv_tmp . "/" . $dir2 . "/"; $csv_file = $filename; $fp = fopen($csv_dir . $csv_file, "w"); fwrite($fp, $content); fclose($fp); return $csv_dir; }
private function CreateMySQLRawTable() { // We need to know the largest field size for each input field so we can create the table accordingly $input = fopen($this->FileSpecs[0]["FileName"], "r"); $largestFieldSize = array(); for ($i = 0; $i < count($this->FileSpecs[0]["Fields"]); $i++) { $largestFieldSize[$i] = 1; } while ($fileRecord = fgetcsv($input)) { foreach ($fileRecord as $Key => $Value) { $fieldLength = strlen($Value); if (empty($largestFieldSize[$Key])) { NeobitsLogError("WRONG FEED FIELD COUNT, key: {$Key}\nfileRecord: " . print_r($fileRecord, true) . "largestFieldSize: " . print_r($largestFieldSize, true) . "this FileSpecs[0] " . print_r($this->FileSpecs[0], true)); } if ($fieldLength > $largestFieldSize[$Key]) { $largestFieldSize[$Key] = $fieldLength; } } } fclose($input); // Now, we can build our MySQL table $sqlStmt = "drop table if exists " . $this->TableName; $this->FeedsDbObj->Query($sqlStmt); $sqlStmt = ""; foreach ($this->FileSpecs[0]["Fields"] as $Name => $Value) { $fieldSize = $largestFieldSize[$Value - 1]; $sqlStmt .= ", {$Name} varchar({$fieldSize})"; } $sqlStmt = "create table {$this->TableName} (" . substr($sqlStmt, 2) . ")"; $this->FeedsDbObj->Query($sqlStmt); // Load the data into the MySQL raw feed table $sqlStmt = "load data local infile '{$this->FileSpecs[0]['FileName']}' into table {$this->TableName} fields terminated by ',' optionally enclosed by '\"' escaped by '\\\\' lines terminated by '\\n'"; $this->FeedsDbObj->Query($sqlStmt); // Create our list of duplicate UPC numbers for later use, if we can if (!is_null($this->UPCFieldName)) { if (isset($this->FileSpecs[0]["Fields"][$this->UPCFieldName])) { $upcFieldNumber = $this->FileSpecs[0]["Fields"][$this->UPCFieldName]; $uniqueName = tempnam(Config::getTmpDir(), "{$this->PartyID}-"); $this->FilesToPurge[$uniqueName] = NULL; $command = Config::CSVFix . " order -f {$upcFieldNumber} {$this->FileSpecs[0]['FileName']} | " . Config::CSVFix . " unique -f 1 -d | " . Config::CSVFix . " unique -f 1 -o {$uniqueName}"; $output = array(); $status = 0; $error = exec($command, $output, $status); if ($status != 0) { trigger_error("Finding duplicate UPCs failed on command {$command}", E_USER_ERROR); } $this->DuplicateUPCs = file($uniqueName); if ($this->DuplicateUPCs === FALSE) { trigger_error("Reading duplicate UPCs failed on file {$uniqueName}", E_USER_ERROR); } foreach ($this->DuplicateUPCs as $Key => $Value) { $this->DuplicateUPCs[$Key] = str_replace('"', "", trim($Value)); } } else { trigger_error("Could not determine UPC field number", E_USER_ERROR); } } }