function phptimestore_recover($engine_properties) { $source = $engine_properties['source']; $target = $engine_properties['target']; // 1) Identify feeds in folder $files = scandir($source); $feeds = array(); for ($i = 2; $i < count($files); $i++) { $filename_parts = explode("_", $files[$i]); $feedid = (int) $filename_parts[0]; if ($feedid > 0 && !in_array($feedid, $feeds)) { $feeds[] = $feedid; } } foreach ($feeds as $id) { $npoints = array(); print $id . "\n"; for ($l = 0; $l < 10; $l++) { $name = str_pad($id, 16, '0', STR_PAD_LEFT); if (file_exists($source . $name . "_" . $l . "_.dat")) { print "Copying data file layer {$id} {$l}\n"; copy_data(array('sourcefile' => $source . $name . "_" . $l . "_.dat", 'targetfile' => $target . $name . "_" . $l . "_.dat", 'bytelength' => 4)); clearstatcache($target . $name . "_" . $l . "_.dat"); $npoints[] = filesize($target . $name . "_" . $l . "_.dat") / 4.0; } } $feedname = str_pad($id, 16, '0', STR_PAD_LEFT) . ".tsdb"; $meta = new stdClass(); $metafile = fopen($source . $feedname, 'rb'); fseek($metafile, 8); $d = fread($metafile, 8); $tmp = unpack("h*", $d); // no longer used $tmp = unpack("I", fread($metafile, 4)); $meta->nmetrics = $tmp[1]; $tmp = unpack("I", fread($metafile, 4)); // no longer used $tmp = unpack("I", fread($metafile, 8)); $meta->start = $tmp[1]; $tmp = unpack("I", fread($metafile, 4)); $meta->interval = $tmp[1]; fclose($metafile); $metafile = fopen($target . $feedname, 'wb'); fwrite($metafile, pack("I", 0)); fwrite($metafile, pack("I", 0)); fwrite($metafile, pack("h*", strrev(str_pad(0, 16, '0', STR_PAD_LEFT)))); fwrite($metafile, pack("I", $meta->nmetrics)); fwrite($metafile, pack("I", 0)); // Legacy fwrite($metafile, pack("I", $meta->start)); fwrite($metafile, pack("I", 0)); fwrite($metafile, pack("I", $meta->interval)); fclose($metafile); } return $feeds; }
function phpfina_recover($engine_properties) { $source = $engine_properties['source']; $target = $engine_properties['target']; // Identify feeds in folder $files = scandir($source); $feeds = array(); for ($i = 2; $i < count($files); $i++) { $filename_parts = explode(".", $files[$i]); $feedid = (int) $filename_parts[0]; if ($feedid > 0 && !in_array($feedid, $feeds)) { $feeds[] = $feedid; } } foreach ($feeds as $id) { print "Copying feed {$id}: \n"; copy_data(array('sourcefile' => $source . $id . ".dat", 'targetfile' => $target . $id . ".dat", 'bytelength' => 4)); clearstatcache(); $npoints = filesize($target . $id . ".dat") / 4.0; if ((int) $npoints != $npoints) { print "filesize error\n"; } $meta = new stdClass(); $metafile = fopen($source . $id . ".meta", 'rb'); fseek($metafile, 8); $tmp = unpack("I", fread($metafile, 4)); $meta->interval = $tmp[1]; $tmp = unpack("I", fread($metafile, 4)); $meta->start_time = $tmp[1]; fclose($metafile); if ($meta->start_time == 0) { print "Feed start time error!\n"; } if ($meta->interval < 5) { print "Feed interval error!\n"; } $metafile = fopen($target . $id . ".meta", 'wb'); fwrite($metafile, pack("I", 0)); fwrite($metafile, pack("I", 0)); fwrite($metafile, pack("I", $meta->interval)); fwrite($metafile, pack("I", $meta->start_time)); fclose($metafile); } return $feeds; }
function phptimeseries_recover($engine_properties) { $source = $engine_properties['source']; $target = $engine_properties['target']; $files = scandir($source); $feeds = array(); for ($i = 2; $i < count($files); $i++) { $filename_parts = explode(".", $files[$i]); $filename_parts = explode("_", $filename_parts[0]); $feedid = (int) $filename_parts[1]; if ($feedid > 0 && !in_array($feedid, $feeds)) { $feeds[] = $feedid; } } foreach ($feeds as $id) { print "Copying feed {$id}: \n"; copy_data(array('sourcefile' => $source . "feed_" . $id . ".MYD", 'targetfile' => $target . "feed_" . $id . ".MYD", 'bytelength' => 9)); } return $feeds; }
// fetch meta data and build phpfina meta file. // update mysql and redis entries require "common.php"; define('EMONCMS_EXEC', 1); chdir("/var/www/emoncms"); require "process_settings.php"; $mysqli = @new mysqli($server, $username, $password, $database); $result = $mysqli->query("SELECT * FROM feeds WHERE `engine`=6"); $sourcedir = "/var/lib/phpfiwa/"; $targetdir = "/var/lib/phpfina/"; while ($row = $result->fetch_array()) { print $row['id'] . " " . $row['name'] . "\n"; $id = $row['id']; $sourcefile = $sourcedir . $id . "_0.dat"; $targetfile = $targetdir . $id . ".dat"; copy_data(array('sourcefile' => $sourcefile, 'targetfile' => $targetfile, 'bytelength' => 4)); $meta = new stdClass(); $metafile = fopen($sourcedir . $id . ".meta", 'rb'); $tmp = unpack("I", fread($metafile, 4)); $tmp = unpack("I", fread($metafile, 4)); $meta->start_time = $tmp[1]; $tmp = unpack("I", fread($metafile, 4)); $meta->nlayers = $tmp[1]; $meta->npoints = array(); for ($i = 0; $i < $meta->nlayers; $i++) { $tmp = unpack("I", fread($metafile, 4)); } $meta->interval = array(); for ($i = 0; $i < $meta->nlayers; $i++) { $tmp = unpack("I", fread($metafile, 4)); $meta->interval[$i] = $tmp[1];
<?php require 'alias_functions.php'; //Verifying Passkey obtained from Link $passkey = $_GET['passkey']; $result = match_confirm_code($passkey); //check whether user has already verified or not if ($result) { $email = $_SESSION['verified_email']; $verified = check_email("registered_users.txt", $email); } //Retrieve data from temporary users file if passkey verified if ($result) { if ($verified) { echo "Your have already requested for an alias. You will receive your alias shortly."; } else { echo "Your E-mail address (" . $email . ") has been verified. You will receive your alias shortly."; copy_data($email); } } else { echo "Wrong Confirmation Code."; }
/** * * Copy a column and return the new column_id of the copy * * @param int $column_id The column_id to copy * @param bool $copy_data Whether to copy the data that exists in the column or not * @param bool $copy_code_group Whether to copy the associated code group (if any) or not * @param bool|int $column_group_id The column group to assign to * @param bool|int $column_multi_group_id The column multi group to assign to * @return int The new column_id */ function copy_column($column_id, $copy_data = false, $copy_code_group = false, $column_group_id = 'NULL', $column_multi_group_id = 'NULL') { global $db; $db->StartTrans(); $code_level_id = 'code_level_id'; if ($copy_code_group) { $sql = "SELECT c.code_level_id, cl.code_group_id, cl.level\r\n\t\t\tFROM `column` as c, code_level as cl\r\n\t\t\tWHERE c.column_id = '{$column_id}'\r\n\t\t\tAND cl.code_level_id = c.code_level_id"; $rs = $db->GetRow($sql); if (!empty($rs)) { $cgi = $rs['code_group_id']; $level = $rs['level']; $ncgi = copy_code_group($cgi); $sql = "SELECT code_level_id\r\n\t\t\t\tFROM code_level\r\n\t\t\t\tWHERE level = '{$level}'\r\n\t\t\t\tAND code_group_id = '{$ncgi}'"; $cl = $db->GetRow($sql); if (!empty($cl)) { $code_level_id = $cl['code_level_id']; } } } $sql = "INSERT INTO `column` (`column_id`, `data_id`, `column_group_id`, `column_multi_group_id`, `name`, `description`, `startpos`, `width`, `type`, `in_input`, `sortorder`, `code_level_id`, `reference_column_group_id`)\r\n\t\tSELECT column_id, data_id, {$column_group_id}, {$column_multi_group_id}, CONCAT('" . T_("COPY") . "', name), CONCAT('" . T_("Copy of: ") . "', description), 0, width, type, 0, 0, {$code_level_id}, reference_column_group_id\r\n\t\tFROM `column`\r\n\t\tWHERE column_id = '{$column_id}'"; $db->Execute($sql); $ncolumn_id = $db->Insert_ID(); if ($copy_data) { copy_data($column_id, $ncolumn_id); } $db->CompleteTrans(); return $ncolumn_id; }
function phpfiwa_recover($engine_properties) { $source = $engine_properties['source']; $target = $engine_properties['target']; // 1) Identify feeds in folder $files = scandir($source); $feeds = array(); for ($i = 2; $i < count($files); $i++) { $filename_parts = explode(".", $files[$i]); $feedid = (int) $filename_parts[0]; if ($feedid > 0 && !in_array($feedid, $feeds)) { $feeds[] = $feedid; } } foreach ($feeds as $id) { $npoints = array(); print $id . "\n"; for ($l = 0; $l < 10; $l++) { if (file_exists($source . $id . "_" . $l . ".dat")) { print "Copying data file layer {$id} {$l}\n"; copy_data(array('sourcefile' => $source . $id . "_" . $l . ".dat", 'targetfile' => $target . $id . "_" . $l . ".dat", 'bytelength' => 4)); clearstatcache($target . $id . "_" . $l . ".dat"); $npoints[] = filesize($target . $id . "_" . $l . ".dat") / 4.0; } } foreach ($npoints as $np) { if ((int) $np != $np) { print "filesize error\n"; } } $meta = new stdClass(); $metafile = fopen($source . $id . ".meta", 'rb'); fseek($metafile, 4); $tmp = unpack("I", fread($metafile, 4)); $meta->start_time = $tmp[1]; $tmp = unpack("I", fread($metafile, 4)); $meta->nlayers = $tmp[1]; for ($i = 0; $i < $meta->nlayers; $i++) { $tmp = unpack("I", fread($metafile, 4)); } $meta->interval = array(); for ($i = 0; $i < $meta->nlayers; $i++) { $tmp = unpack("I", fread($metafile, 4)); $meta->interval[$i] = $tmp[1]; } fclose($metafile); if ($meta->start_time == 0) { print "Feed start time error!\n"; } if ($meta->interval[0] < 5) { print "Feed interval error!" . $meta->interval[0] . "\n"; } $metafile = fopen($target . $id . ".meta", 'wb'); fwrite($metafile, pack("I", 0)); fwrite($metafile, pack("I", $meta->start_time)); fwrite($metafile, pack("I", $meta->nlayers)); foreach ($meta->interval as $n) { fwrite($metafile, pack("I", 0)); } // Legacy foreach ($meta->interval as $d) { fwrite($metafile, pack("I", $d)); } fclose($metafile); } return $feeds; }