protected function loadFromDirectory(AbstractMetaModel $metamodel, array $filters = NULL, $path, $namespace, $level = 0) { $filecount = 0; $handle = opendir($path); if ($handle !== FALSE) { $indent = str_pad('', $level * 4); while (($filename = readdir($handle)) !== FALSE) { if (is_dir($path . DIRECTORY_SEPARATOR . $filename)) { if ($filename[0] != '.') { $folder = DIRECTORY_SEPARATOR . $filename; $nestedNameSpace = isset($namespace) ? NameSpaceHelper::addNameSpace($namespace, $filename) : $filename; LogHelper::log_debug(t("{$indent}Scanning '@folderName' ...", array('@folderName' => $folder))); $filecount += $this->loadFromDirectory($metamodel, $filters, $path . $folder, $nestedNameSpace, $level + 1); } } elseif ($this->fileNameEndsWithJson($filename)) { LogHelper::log_debug(t("{$indent}Processing '@filename' ...", array('@filename' => $filename))); $this->loadFromFile($metamodel, $filters, $namespace, $path . DIRECTORY_SEPARATOR, $filename); $filecount++; } } closedir($handle); } return $filecount; }
protected function loadFromDirectory(AbstractMetaModel $metamodel, array $filters = NULL, $path, $namespace, $level = 0) { $filecount = 0; $handle = opendir($path); if ($handle !== FALSE) { $indent = str_pad('', $level * 4); while (($filename = readdir($handle)) !== FALSE) { if (is_dir($path . DIRECTORY_SEPARATOR . $filename)) { if ($filename[0] != '.') { $folder = DIRECTORY_SEPARATOR . $filename; // once name space is defined we do not change it // it will be the same for all sub-folders regardless on depth $ns = isset($namespace) ? $namespace : $filename; LogHelper::log_debug(t("{$indent}Scanning '@folderName' ...", array('@folderName' => $folder))); $filecount += $this->loadFromDirectory($metamodel, $filters, $path . $folder, $ns, $level + 1); } } elseif ($this->fileNameEndsWithJson($filename)) { LogHelper::log_debug(t("{$indent}Processing '@filename' ...", array('@filename' => $filename))); $this->loadFromFile($metamodel, $filters, $namespace, $path . DIRECTORY_SEPARATOR, $filename); $filecount++; } } closedir($handle); } return $filecount; }
public function executeDatasetUpdateOperations(DataControllerCallContext $callcontext, DatasetMetaData $dataset, array $operations) { $request = new UpdateDatasetStorageRequest($dataset->name); $request->addOperations($operations); LogHelper::log_debug($request); $this->datasourceStructureHandler->updateDatasetStorage($callcontext, $request); }
public function dropDimensionStorage(DataControllerCallContext $callcontext, DataSourceStructureHandler $datasourceStructureHandler, DatasetMetaData $logicalDataset, $columnName) { $lookupDatasetName = StarSchemaNamingConvention::getAttributeRelatedName($logicalDataset->name, $columnName); $request = new DatasetStorageRequest($lookupDatasetName); LogHelper::log_debug($request); $datasourceStructureHandler->dropDatasetStorage($callcontext, $request); parent::dropDimensionStorage($callcontext, $datasourceStructureHandler, $logicalDataset, $columnName); }
public function getValues(array $names) { $timeStart = microtime(TRUE); $values = parent::getValues($names); $nameCount = count($names); $loadedValueCount = count($values); LogHelper::log_debug(t('[@cacheType] Requested entries: @entryNames', array('@cacheType' => $this->getCacheType(), '@entryNames' => ArrayHelper::printArray(array_values($names), ', ', TRUE, FALSE)))); LogHelper::log_debug(t('[@cacheType] Retrieved entries: @entryNames', array('@cacheType' => $this->getCacheType(), '@entryNames' => $nameCount == $loadedValueCount ? 'ALL' : (isset($values) ? ArrayHelper::printArray(array_keys($values), ', ', TRUE, FALSE) : 'NONE')))); LogHelper::log_info(t('[@cacheType] Execution time for retrieving @entryCount entries is !executionTime. @successFlag', array('@cacheType' => $this->getCacheType(), '@entryCount' => $nameCount, '!executionTime' => ExecutionPerformanceHelper::formatExecutionTime($timeStart), '@successFlag' => isset($values) ? $nameCount == $loadedValueCount ? 'Cache HIT' : "Cache hit for ONLY {$loadedValueCount} entries out of {$nameCount}" : 'Cache NOT hit'))); return $values; }
public function dropDatabase($datasourceName) { $datasourceName = StringHelper::trim($datasourceName); $this->checkDataSourceStructurePermission($datasourceName); $callcontext = $this->prepareCallContext(); $request = new DropDatabaseRequest($datasourceName); LogHelper::log_debug($request); $datasourceStructureHandler = $this->getDataSourceStructureHandler($datasourceName); $datasourceStructureHandler->dropDatabase($callcontext, $request); }
function get_node_field_value($node, $fieldName, $index = 0, $storageSuffixName = 'value', $required = FALSE) { $value = NULL; if (!isset($node->language)) { LogHelper::log_debug($node); throw new IllegalArgumentException(t( '%fieldName@index field is not accessible because language is not set for the node: %nodeId', array( '%nodeId' => $node->nid, '%fieldName' => $fieldName, '@index' => ((!isset($index) || ($index == 0)) ? '' : t('[%index]', array('%index' => $index)))))); } $fieldValue = isset($node->$fieldName) ? $node->$fieldName : NULL; if (isset($fieldValue[$node->language])) { $fieldLocalizedValues = $fieldValue[$node->language]; if (isset($index)) { // accessing individual value if (isset($fieldLocalizedValues[$index][$storageSuffixName])) { $value = StringHelper::trim($fieldLocalizedValues[$index][$storageSuffixName]); } } else { // we need to return an array of values foreach ($fieldLocalizedValues as $i => $fieldLocalizedValue) { $v = isset($fieldLocalizedValue[$storageSuffixName]) ? $fieldLocalizedValue[$storageSuffixName] : NULL; if (!isset($v)) { $v = StringHelper::trim($v); } $value[$i] = $v; } } } if ($required && !isset($value)) { LogHelper::log_debug($node); throw new IllegalArgumentException(t( '%fieldName@index field has not been set for the node: %nodeId', array( '%nodeId' => $node->nid, '%fieldName' => $fieldName, '@index' => ((!isset($index) || ($index == 0)) ? '' : t('[%index]', array('%index' => $index)))))); } return $value; }
public function detectSourceType(DatasetMetaData $dataset) { if (isset($dataset->sourceType)) { return $dataset->sourceType; } if (isset($dataset->source)) { $source = trim($dataset->source); $isTableName = strpos($source, ' ') === FALSE; return $isTableName ? TableDatasetSourceTypeHandler::SOURCE_TYPE : SQLDatasetSourceTypeHandler::SOURCE_TYPE; } LogHelper::log_debug($dataset); throw new IllegalArgumentException(t( 'Could not detect type of source for the dataset: %datasetName', array('%datasetName' => $dataset->publicName))); }
protected function castValueImpl($value) { $array = NULL; if (is_array($value)) { $array = $value; } elseif (is_object($value)) { $array = get_object_vars($value); } else { $array = json_decode($value, TRUE); if (!isset($array)) { LogHelper::log_debug($value); throw new IllegalArgumentException(t('Incorrect value of type ARRAY')); } } return $array; }
protected function castValueImpl($value) { $object = NULL; if (is_object($value)) { $object = $value; } elseif (is_array($value)) { $object = (object) $value; } else { $object = json_decode($value); if (!isset($object)) { LogHelper::log_debug($value); throw new IllegalArgumentException(t('Incorrect value of type OBJECT')); } } return $object; }
/** * @param DataControllerCallContext $callcontext * @param DatasetMetaData $newDataset * @param DatasetStorageObserver[] $observers */ protected function createDatasetStorage(DataControllerCallContext $callcontext, DatasetMetaData $newDataset, array $observers = NULL) { // creating physical storage for the dataset $request = new DatasetStorageRequest($newDataset->name); LogHelper::log_debug($request); $this->datasourceStructureHandler->createDatasetStorage($callcontext, $request); if (isset($observers)) { // notifying that storage for the dataset had been created foreach ($observers as $observer) { $observer->registerDataset($callcontext, $newDataset, DatasetStorageObserver::STAGE__BEFORE); } foreach ($observers as $observer) { $observer->registerDataset($callcontext, $newDataset, DatasetStorageObserver::STAGE__AFTER); } // notifying that columns have already been created foreach ($newDataset->getColumns(FALSE) as $column) { foreach ($observers as $observer) { $observer->registerColumn($callcontext, $newDataset, $column->name, DatasetStorageObserver::STAGE__BEFORE); } foreach ($observers as $observer) { $observer->registerColumn($callcontext, $newDataset, $column->name, DatasetStorageObserver::STAGE__AFTER); } if ($column->isUsed() && ($column->persistence == ColumnMetaData::PERSISTENCE__NO_STORAGE)) { foreach ($observers as $observer) { $observer->createColumnStorage( $callcontext, $this->datasourceStructureHandler, $newDataset, $column->name, DatasetStorageObserver::STAGE__BEFORE); } $column->persistence = ColumnMetaData::PERSISTENCE__STORAGE_CREATED; foreach ($observers as $observer) { $observer->createColumnStorage( $callcontext, $this->datasourceStructureHandler, $newDataset, $column->name, DatasetStorageObserver::STAGE__AFTER); } } } } }
/** * @param DataControllerCallContext $callcontext * @param DatasetMetaData $dataset * @param DatasetStorageObserver[] $observers */ protected function truncateDatasetStorage(DataControllerCallContext $callcontext, DatasetMetaData $dataset, array $observers = NULL) { if (isset($observers)) { // notifying observers that we are about to truncate dataset foreach ($observers as $observer) { $observer->truncateDatasetStorage($callcontext, $dataset); } } // truncating physical storage of the dataset $request = new DatasetStorageRequest($dataset->name); LogHelper::log_debug($request); $this->datasourceStructureHandler->truncateDatasetStorage($callcontext, $request); if (isset($observers)) { // truncating physical storage of the dataset columns foreach ($dataset->getColumns(FALSE) as $column) { if ($column->persistence == ColumnMetaData::PERSISTENCE__STORAGE_CREATED) { foreach ($observers as $observer) { $observer->truncateColumnStorage($callcontext, $this->datasourceStructureHandler, $dataset, $column->name); } } } } }
protected function executeCubeCountRequest(DataQueryControllerCubeRequest $request) { $callcontext = $this->prepareCallContext(); $requestPreparer = new DataSourceCubeQueryRequestPreparer(); $cubeCountRequest = $requestPreparer->prepareCubeCountRequest($request); $this->prepareCubeRequestMetaData($cubeCountRequest); $cubeResultFormatter = isset($request->resultFormatter) ? $request->resultFormatter : $this->getDefaultResultFormatter(); $cubeResultFormatter->adjustCubeCountRequest($callcontext, $cubeCountRequest); LogHelper::log_debug($request); $cubeName = $cubeCountRequest->getCubeName(); return $this->getDataSourceQueryHandlerByCubeName($cubeName)->countCubeRecords($callcontext, $cubeCountRequest, $cubeResultFormatter); }
public function registerCube(CubeMetaData $cube) { $this->checkAssemblingStarted(); if (!isset($cube->name)) { LogHelper::log_debug($cube); throw new IllegalArgumentException(t('Cube name has not been defined')); } $cubeName = $cube->name; NameSpaceHelper::checkAlias($cubeName); $existingCube = $this->findCube($cubeName, TRUE); if (isset($existingCube)) { if ($cube->isTemporary()) { $this->unregisterCube($cubeName); } else { LogHelper::log_debug($existingCube); LogHelper::log_debug($cube); throw new IllegalArgumentException(t( 'Cube with name %cubeName has already been defined', array('%cubeName' => $cube->publicName))); } } if (!$cube->isTemporary()) { // we support only one cube per dataset $cube2 = $this->findCubeByDatasetName($cube->factsDatasetName); if (isset($cube2)) { LogHelper::log_debug($cube2); LogHelper::log_debug($cube); throw new IllegalArgumentException(t( 'Found several cubes for %datasetName dataset: [%cubeName1, %cubeName2]', array('%datasetName' => $cube->factsDatasetName, '%cubeName1' => $cube->publicName, '%cubeName2' => $cube2->publicName))); } } // fixing cube properties if (isset($cube->dimensions)) { foreach ($cube->dimensions as $dimension) { if (!isset($dimension->attributeColumnName)) { $dimension->attributeColumnName = $dimension->name; } } } $this->cubes[$cubeName] = $cube; }
protected function errorDimensionNotFound($dimensionName) { // logging list of available dimension names $availableDimensionNames = NULL; if (isset($this->dimensions)) { foreach ($this->dimensions as $dimension) { if ($dimension->isUsed()) { $availableDimensionNames[] = $dimension->name; } } } LogHelper::log_debug(t('Available dimensions:')); LogHelper::log_debug($availableDimensionNames); throw new IllegalArgumentException(t( '%dimensionName dimension is not registered in %cubeName cube', array('%dimensionName' => $dimensionName, '%cubeName' => $this->publicName))); }
protected function transmitData(DataSourceMetaData $datasource, $serverRequest) { $COMMENT_MAX_LENGTH__REQUEST_BODY = 10000; $transmissionResponse->status = FALSE; $requestURL = $serverRequest->url; $requestBody = isset($serverRequest->body) ? $serverRequest->body : NULL; $lengthRequestBody = isset($requestBody) ? strlen($requestBody) : 0; $httpVersion = '1.1'; $httpMethod = isset($serverRequest->method) ? $serverRequest->method : (isset($requestBody) ? 'POST' : 'GET'); LogHelper::log_info(t('Request: @httpMethod @requestURL', array('@httpMethod' => $httpMethod, '@requestURL' => $requestURL))); if (isset($requestBody)) { LogHelper::log_debug($requestBody); } $timeStart = microtime(TRUE); // checking / opening connection to the database $serverKey = $this->prepareServerKey($datasource); $socket = isset($this->sockets[$serverKey]) ? $this->sockets[$serverKey] : NULL; if (!isset($socket)) { $socket = fsockopen($datasource->host, $datasource->port, $errno, $errstr); if ($socket === FALSE) { $transmissionResponse->error = t( "Could not connect to the database server: @errorCode-'@errorMessage'", array('@errorCode' => $errno, '@errorMessage' => $errstr)); return $transmissionResponse; } else { $this->sockets[$serverKey] = $socket; } } // preparing a request $request = "$httpMethod $requestURL HTTP/$httpVersion\r\nHost: $datasource->host\r\n"; if (isset($serverRequest->authorizationRequired) && $serverRequest->authorizationRequired) { $request .= 'Authorization: Basic ' . base64_encode($datasource->username . ':' . $datasource->password) . "\r\n"; } $request .= "Connection: Keep-Alive\r\n"; if (isset($requestBody)) { $request .= "Content-Type: application/json\r\n"; $request .= 'Content-Length: ' . $lengthRequestBody . "\r\n\r\n"; $request .= $requestBody; } // sending the request $bytesWritten = fwrite($socket, $request . "\r\n"); if ($bytesWritten === FALSE) { $transmissionResponse->error = t('Could not submit the request to the database server'); return $transmissionResponse; } // processing response headers $headers = NULL; while (TRUE) { $header = fgets($socket); if ($header === FALSE) { $transmissionResponse->error = t('Could not read the response header'); return $transmissionResponse; } else { $header = trim($header); if (strlen($header) === 0) { if (isset($headers)) { // it is a delimiter between response header and body break; } else { // empty lines before response header. Should not happen } } // processing only headers with values (':' is default delimiter) if (strpos($header, ':') !== FALSE) { list($key, $value) = explode(':', $header, 2); $headers[strtolower(trim($key))] = trim($value); } } } // reading response body (if any) $responseBody = ''; if (isset($headers['transfer-encoding']) && ($headers['transfer-encoding'] == 'chunked')) { // chunked response support do { $bytesToRead = 0; $line = fgets($socket); if ($line === FALSE) { $transmissionResponse->error = t('Could not read the response chunk size'); return $transmissionResponse; } $line = rtrim($line); if (preg_match('(^([0-9a-f]+)(?:;.*)?$)', $line, $match)) { $bytesToRead = hexdec($match[1]); $bytesLeft = $bytesToRead; while ($bytesLeft > 0) { $read = fread($socket, $bytesLeft + 2); if ($read === FALSE) { $transmissionResponse->error = t( 'Could not read the whole response. @bytesLeft bytes left', array('@bytesLeft' => $bytesLeft)); return $transmissionResponse; } else { $responseBody .= $read; $bytesLeft -= strlen($read); } } } } while ($bytesToRead > 0); } else { // Non-chunked response support $bytesToRead = (isset($headers['content-length']) ? (int) $headers['content-length'] : NULL); while (!isset($bytesToRead) || ($bytesToRead > 0)) { $read = isset($bytesToRead) ? fgets($socket, $bytesToRead + 1) : fgets($socket); if ($read === FALSE) { if (isset($bytesToRead) && ($bytesToRead > 0)) { $transmissionResponse->error = t( 'Could not read the whole response. $bytesToRead bytes left', array('@bytesToRead' => $bytesToRead)); return $transmissionResponse; } else { break; } } else { $responseBody .= $read; if (isset($bytesToRead)) { $bytesToRead -= strlen($read); } } } } LogHelper::log_info(t('Database execution time: !executionTime', array('!executionTime' => ExecutionPerformanceHelper::formatExecutionTime($timeStart)))); $responseBodyObject = $this->converterJson2PHP->convert($responseBody); if (!isset($responseBodyObject)) { $error = t('Database server did not provide parsable response body'); LogHelper::log_error($error); LogHelper::log_error(t('Response headers:')); LogHelper::log_error($headers); LogHelper::log_error(t('Response body:')); LogHelper::log_error($responseBody); throw new Exception($error); } $transmissionResponse->status = TRUE; $transmissionResponse->result = $responseBodyObject; return $transmissionResponse; }
function gd_dashboard_page_export ( $dashboardNode ) { // check to see if export is allowed if ( !gd_dashboard_get_setting('export') ) { LogHelper::log_notice('Exporting dashboards disabled globally.'); return MENU_NOT_FOUND; } if ( !gd_dashboard_access_view($dashboardNode) ) { return MENU_ACCESS_DENIED; } $exporterPath = gd_dashboard_get_setting('export_tool_path'); $arguments = array(); $arguments[] = array ( 'name' => '--title', 'value' => '\''.$dashboardNode->title.'\'' ); $callbackURL = GOVDASH_HOST; if ( user_is_logged_in() || isset($_GET['oauth_consumer_key']) ) { $callbackURL .= '/dashboards'; } else if ( gd_dashboard_is_public($dashboardNode) ) { $callbackURL .= '/public/dashboards'; } else { LogHelper::log_notice('Dashboard was requested anonymously but is not public. Requested: '.$dashboardNode->nid); return MENU_NOT_FOUND; } $params = $_GET; unset($params['q']); $params['export-view'] = true; $params['id'] = $dashboardNode->nid; $callbackURL .= '?'.http_build_query($params,null,'&'); if ( !isset($_GET['oauth_consumer_key']) ) { foreach ($_COOKIE as $key => $value) { $arguments[] = array( 'name' => '--cookie', 'value' => '\'' . $key . '\' \'' . $value . '\'' ); } } $arguments[] = array ( 'name' => '--user-style-sheet', 'value' => dirname(__FILE__) . '/css/export.css' ); $arguments[] = array ( 'name' => '--javascript-delay', 'value' => '5000' ); $arguments[] = array ( 'name' => '--page-size', 'value' => 'Letter' ); $arguments[] = array ( 'name' => '--header-html', 'value' => DRUPAL_ROOT.gd_dashboard_get_setting('export_header_path') ); $arguments[] = array ( 'name' => '--footer-html', 'value' => DRUPAL_ROOT.gd_dashboard_get_setting('export_footer_path') ); $arguments[] = '--print-media-type'; $command = $exporterPath; foreach ( $arguments as $arg ) { if ( is_array($arg) ) { $command .= ' ' . $arg['name'] . ' ' . escapeshellcmd($arg['value']); } else { $command .= ' '.escapeshellcmd($arg); } } // url input $command .= ' ' . escapeshellcmd($callbackURL); // pdf output $command .= ' -'; // stderr getting logged or tossed to black hole by default $command .= ' 2>'.escapeshellcmd(gd_dashboard_get_setting('export_log_path')); // keep oauth token out of logs. if ( !isset($_GET['oauth_consumer_key']) ) { LogHelper::log_debug($command); } // generate filename title $filename = str_replace(' ','_',trim($dashboardNode->title)); $filename .= '__'.date('Ymd'); ob_start(); header("Pragma: public"); header("Expires: 0"); header("Cache-Control: must-revalidate, post-check=0, pre-check=0"); header("Cache-Control: private",false); header('Content-Description: File Transfer'); if (strpos(php_sapi_name(), 'cgi') === false) { header('Content-Type: application/force-download'); header('Content-Type: application/octet-stream', false); header('Content-Type: application/download', false); header('Content-Type: application/pdf', false); } else { header('Content-Type: application/pdf'); } header('Content-Disposition: attachment; filename="' . $filename . '.pdf"'); header('Content-Transfer-Encoding: binary'); passthru($command,$error); if ( !isset($_SERVER['HTTP_ACCEPT_ENCODING']) || empty($_SERVER['HTTP_ACCEPT_ENCODING']) ) { // the content length may vary if the server is using compression header('Content-Length: '.ob_get_length()); } if ( $error ) { header_remove(); ob_get_clean(); gd_error_handler('Dashboard export failed to execute wkhtmltopdf successfully.'); return MENU_NOT_FOUND; } ob_end_flush(); exit(); }
/** * Script to send notification emails */ require_once DRUPAL_ROOT . '/includes/bootstrap.inc'; drupal_bootstrap(DRUPAL_BOOTSTRAP_FULL); //_drush_bootstrap_drupal_full(); try { $completedJobRequests = QueueUtil::getPendingEmailsInfo(); LogHelper::log_debug($completedJobRequests); } catch (Exception $claimException) { LogHelper::log_debug("{$logId}: Error while fetching job from queue: " . $claimException); return; } foreach ($completedJobRequests as $request) { LogHelper::log_info($request); try { global $conf; $dir = variable_get('file_public_path', 'sites/default/files') . '/' . $conf['check_book']['data_feeds']['output_file_dir']; $file = $dir . '/' . $request['filename']; $params = array("download_url" => $file, "download_url_compressed" => $file . '.zip', "expiration_date" => date('d-M-Y', $request['end_time'] + 3600 * 24 * 7), "contact_email" => $request['contact_email'], "tracking_num" => $request['token']); LogHelper::log_debug($params); $response = drupal_mail('checkbook_datafeeds', "download_notification", $request['contact_email'], null, $params); LogHelper::log_debug($response); if ($response['result']) { QueueUtil::updateJobRequestEmailStatus($request['rid']); } } catch (Exception $claimException) { LogHelper::log_debug("Error while Sending Email Notification: " . $claimException . $params); return; } }
protected function executeCubeCountRequest(DataQueryControllerCubeRequest $request) { $environment_metamodel = data_controller_get_environment_metamodel(); $metamodel = data_controller_get_metamodel(); $callcontext = $this->prepareCallContext(); $requestPreparer = new DataSourceCubeQueryRequestPreparer(); $cubeCountRequest = $requestPreparer->prepareCountRequest($request); $this->prepareCubeRequestMetaData($cubeCountRequest); if (isset($request->resultFormatter)) { $request->resultFormatter->adjustCubeCountRequest($callcontext, $cubeCountRequest); } LogHelper::log_debug($cubeCountRequest); $dataset = $metamodel->getDataset($request->datasetName); $datasource = $environment_metamodel->getDataSource($dataset->datasourceName); $isCacheSupported = $this->isCacheSupported($datasource); $cache = $isCacheSupported ? new DataQueryControllerCacheProxy($cubeCountRequest) : NULL; list($data, $cacheHit) = isset($cache) ? $cache->getCachedResult() : array(NULL, FALSE); if (!$cacheHit) { $data = RequestChainFactory::getInstance()->initializeChain()->countCubeRecords( $this->lookupDataSourceHandler($datasource->type), $callcontext, $cubeCountRequest); if ($isCacheSupported) { $cache->cacheResult($data); } } return $data; }
public function postAuthenticate() { if ($this->disabled) return; $attributes = $this->getIdentity(); \LogHelper::log_debug('ADFS Attributes'); \LogHelper::log_debug($attributes); if ( $attributes ) { global $user; $roles = array(); $r = user_roles(true); $db_user = db_select('users') ->fields('users', array('uid')) ->condition('name', db_like($attributes[ADFS_EMAIL_SCHEMA][0]), 'LIKE') ->range(0, 1) ->execute() ->fetchField(); if (isset($attributes[ADFS_GROUP_SCHEMA])) { $groups = $attributes[ADFS_GROUP_SCHEMA]; $defaultDatasource = null; foreach ($groups as $group) { if (isset($this->roleMappings[$group])) { foreach ($this->roleMappings[$group] as $role) { $roles[array_search($role, $r)] = TRUE; } } if (!isset($defaultDatasource) && isset($this->dsMappings[$group])) { $defaultDatasource = $this->dsMappings[$group][0]; } } foreach ($this->requiredGroups as $requiredGroup) { if (!in_array($requiredGroup, $groups)) { drupal_goto('forbidden'); } } } if (isset($defaultDatasource)) { $datasources = gd_datasource_get_all(); foreach ($datasources as $ds) { if ($ds->publicName == $defaultDatasource) { $defaultDatasource = $ds->name; break; } } } // Load user if it exists if ((bool) $db_user) { $u = user_load($db_user); // If user is blocked if ($u->status == 0) { drupal_goto('forbidden'); } foreach ($u->roles as $role) { if (in_array($role, $r)) { $roles[array_search($role, $r)] = TRUE; } } // Keep user roles the same. Sync the first and last name from ADFS $info = array( 'roles' => $roles, 'mail' => $attributes[ADFS_EMAIL_SCHEMA][0], 'field_gd_user_first_name' => array( LANGUAGE_NONE => array( 0 => array( 'value' => $attributes[ADFS_COMMON_NAME_SCHEMA][0] ) ) ), 'field_gd_user_last_name' => array( LANGUAGE_NONE => array( 0 => array( 'value' => $attributes[ADFS_SURNAME_SCHEMA][0] ) ) ) ); $user = user_save($u, $info); } else if ($this->autoCreate) { // Always give new users the authenticated user role $roles[array_search('authenticated user', $r)] = TRUE; $info = array( 'name' => $attributes[ADFS_EMAIL_SCHEMA][0], 'pass' => user_password(), 'mail' => $attributes[ADFS_EMAIL_SCHEMA][0], 'status' => 1, 'roles' => $roles, 'field_gd_user_first_name' => array( LANGUAGE_NONE => array( 0 => array( 'value' => $attributes[ADFS_COMMON_NAME_SCHEMA][0] ) ) ), 'field_gd_user_last_name' => array( LANGUAGE_NONE => array( 0 => array( 'value' => $attributes[ADFS_SURNAME_SCHEMA][0] ) ) ) ); $user = user_save(drupal_anonymous_user(), $info); } else { $message = t('Unauthorized account: @email', array('@email' => $attributes[ADFS_EMAIL_SCHEMA][0])); \LogHelper::log_error($message); drupal_goto('forbidden'); } user_login_finalize($info); if (isset($defaultDatasource)) { gd_datasource_set_active($defaultDatasource); } } }
public function getColumnTable($columnName, $visibleOnly = FALSE) { $table = $this->findColumnTable($columnName, $visibleOnly); if (!isset($table)) { LogHelper::log_debug($this); throw new IllegalArgumentException(t('Could not identify %columnName column in this statement', array('%columnName' => $columnName))); } return $table; }
public function executeQuery(DataControllerCallContext $callcontext, DataSourceMetaData $datasource, $sql) { $records = $this->executeQueryStatement( $callcontext, $datasource, $sql, new __SQLDataSourceHandler__QueryExecutionCallbackProxy($this->prepareQueryStatementExecutionCallbackInstance())); $count = count($records); LogHelper::log_info(t('Processed @count record(s)', array('@count' => $count))); LogHelper::log_debug($records); return $records; }
final public function formatRecords(array $records = NULL) { $formattedRecords = NULL; if (isset($records)) { LogHelper::log_debug(t("Using '!formatterClassName' to reformat result", array('!formatterClassName' => get_class($this)))); $this->start(); foreach ($records as $record) { $formattedRecord = NULL; foreach ($record as $columnName => $columnValue) { $this->setRecordColumnValue($formattedRecord, $columnName, $columnValue); } if (!$this->registerRecord($formattedRecords, $formattedRecord)) { $formattedRecords[] = $formattedRecord; } } $this->finish($formattedRecords); } return $formattedRecords; }
protected function prepareDatasetSequence(ReferenceCallContext $callcontext, MetaModel $metamodel, ReferenceLink $link, $referencePath) { $referenceParts = ReferencePathHelper::splitReference($referencePath); $parentColumnName = array_pop($referenceParts); $parentDatasetName = array_pop($referenceParts); $referencedColumnName = $referencedDatasetName = NULL; $leftReferencePartCount = count($referenceParts); $nestedReferencePath = $referencePath; if ($link->dataset->name == $parentDatasetName) { // dataset is there and it is link's dataset if ($leftReferencePartCount == 0) { return; } // assembling new reference path $nestedReferencePath = ReferencePathHelper::assembleReferenceParts($referenceParts); $referencedColumnName = array_pop($referenceParts); $referencedDatasetName = array_pop($referenceParts); if (!isset($referencedDatasetName)) { throw new UnsupportedOperationException(t( 'Dataset name is not set in the reference path: %referencePath', array('%referencePath' => $referencePath))); } } elseif (isset($parentDatasetName)) { $referencedColumnName = $parentColumnName; $referencedDatasetName = $parentDatasetName; $parentColumnName = $parentDatasetName = NULL; } else { if ($leftReferencePartCount > 0) { throw new UnsupportedOperationException(t( 'Dataset name is not set in the reference path: %referencePath', array('%referencePath' => $referencePath))); } // it means that we just point to column in link's dataset return; } // checking if there any references which could be used to find required dataset $references = $metamodel->findReferencesByDatasetName($link->dataset->name); if (!isset($references)) { return; } // maximum number of columns in direct references $directReferencePointColumnCount = NULL; // checking if there is any reference which directly link to referenced dataset $parentReferencePointIndex4References = $directReferencePointIndex4References = $transitionalReferencePointIndexes4References = NULL; foreach ($references as $reference) { $referencePointColumnCount = $reference->getPointColumnCount(); // checking if the reference can be used to link with other datasets $parentReferencePointIndex4Reference = $directReferencePointIndex4Reference = $transitionalReferencePointIndexes4Reference = NULL; for ($referencePointColumnIndex = 0; $referencePointColumnIndex < $referencePointColumnCount; $referencePointColumnIndex++) { $parentReferencePointIndex = $directReferencePointIndex = $transitionalReferencePointIndexes = NULL; foreach ($reference->points as $referencePointIndex => $referencePoint) { $datasetName = $referencePoint->columns[$referencePointColumnIndex]->datasetName; if (($link->dataset->name == $datasetName) && (!isset($parentColumnName) || ($parentColumnName == $referencePoint->columns[$referencePointColumnIndex]->columnName))) { if (isset($parentReferencePointIndex)) { // Found several possible ways to start a join from the referring dataset. // That happens because we do not have parent column name and possible join is ambiguous // We cannot use this way to proceed continue 3; } else { $parentReferencePointIndex = $referencePointIndex; } } elseif ($datasetName == $referencedDatasetName) { if (isset($directReferencePointIndex)) { // found several possible ways to join with the referenced dataset continue 3; } else { $directReferencePointIndex[$referencePointIndex] = TRUE; } } else { $transitionalReferencePointIndexes[$referencePointIndex] = FALSE; } } // this reference cannot be used because none of the reference points linked with parent dataset if (!isset($parentReferencePointIndex)) { continue 2; } if (isset($directReferencePointIndex)) { // if we have direct reference we do not care about indirect ones $transitionalReferencePointIndexes = NULL; } else { // there is no direct or indirect ways. This reference is useless :) if (!isset($transitionalReferencePointIndexes)) { continue 2; } } $parentReferencePointIndex4Reference[$referencePointColumnIndex] = $parentReferencePointIndex; if (isset($directReferencePointIndex)) { $directReferencePointIndex4Reference[$referencePointColumnIndex] = $directReferencePointIndex; } if (isset($transitionalReferencePointIndexes)) { $transitionalReferencePointIndexes4Reference[$referencePointColumnIndex] = $transitionalReferencePointIndexes; } } // we support only direct references between datasets // in this case we have direct reference based on some columns only. Rest columns are connected indirectly if (isset($directReferencePointIndex4Reference) && isset($transitionalReferencePointIndexes4Reference)) { continue; } $parentReferencePointIndex4References[$reference->name] = $parentReferencePointIndex4Reference; if (isset($directReferencePointIndex4Reference)) { $directReferencePointIndex4References[$reference->name] = $directReferencePointIndex4Reference; $directReferencePointColumnCount = MathHelper::max($directReferencePointColumnCount, $referencePointColumnCount); } if (isset($transitionalReferencePointIndexes4Reference)) { $transitionalReferencePointIndexes4References[$reference->name] = $transitionalReferencePointIndexes4Reference; } } // we could use none of the selected references if (!isset($parentReferencePointIndex4References)) { return; } // removing all useless direct and indirect references if there is a direct way if (isset($directReferencePointColumnCount)) { foreach ($parentReferencePointIndex4References as $referenceName => $parentReferencePointIndex4Reference) { $referencePointColumnCount = count($parentReferencePointIndex4Reference); if (isset($directReferencePointIndex4References[$referenceName])) { // we preserve only direct ways with maximum number of columns if ($referencePointColumnCount == $directReferencePointColumnCount) { continue; } } else { // we preserve only indirect ways with more columns than in direct way if ($referencePointColumnCount > $directReferencePointColumnCount) { continue; } } unset($parentReferencePointIndex4References[$referenceName]); unset($directReferencePointIndex4References[$referenceName]); unset($transitionalReferencePointIndexes4References[$referenceName]); } } foreach ($parentReferencePointIndex4References as $referenceName => $parentReferencePointIndex4Reference) { $reference = $references[ArrayHelper::search($references, 'name', $referenceName)]; $referencePointColumnCount = $reference->getPointColumnCount(); $referencePointIndexes4Reference = isset($directReferencePointIndex4References[$referenceName]) ? $directReferencePointIndex4References[$referenceName] : NULL; $isDirectReference = isset($referencePointIndexes4Reference); if (!$isDirectReference) { // 01/09/2014 reference can be reused only for direct references if (isset($callcontext->referenceNameStack[$referenceName])) { continue; } $referencePointIndexes4Reference = isset($transitionalReferencePointIndexes4References[$referenceName]) ? $transitionalReferencePointIndexes4References[$referenceName] : NULL; } // registering the reference in a stack to avoid excessive calls // registration was moved here because we could have date[->month->quarter->year] and year columns in one dataset // if references related to date and year are registered before we start to process individual references // we will end up with nested links for date->month->quarter->year which do not contain a reference to year // which leads to GOVDB-1313 issue $callcontext->referenceNameStack[$reference->name] = TRUE; // preparing dataset names for each reference point $referencePointDatasetNames = NULL; for ($referencePointColumnIndex = 0; $referencePointColumnIndex < $referencePointColumnCount; $referencePointColumnIndex++) { foreach ($referencePointIndexes4Reference[$referencePointColumnIndex] as $referencePointIndex => $directReferencePointFlag) { $referencePointColumn = $reference->points[$referencePointIndex]->columns[$referencePointColumnIndex]; $datasetName = $referencePointColumn->datasetName; // it is expected that dataset name is the same for all columns in a reference point if (isset($referencePointDatasetNames[$referencePointIndex])) { if ($referencePointDatasetNames[$referencePointIndex] != $datasetName) { // Dataset name is not the same for all columns for the reference point $referencePointDatasetNames[$referencePointIndex] = FALSE; } } else { $referencePointDatasetNames[$referencePointIndex] = $datasetName; } } } // removing all reference points which we cannot support now foreach ($referencePointDatasetNames as $referencePointIndex => $datasetName) { if ($datasetName === FALSE) { unset($referencePointDatasetNames[$referencePointIndex]); } } // if nothing left there is not need to proceed if (count($referencePointDatasetNames) == 0) { continue; } // preparing list of parent column names $parentColumnNames = NULL; for ($referencePointColumnIndex = 0; $referencePointColumnIndex < $referencePointColumnCount; $referencePointColumnIndex++) { $parentReferencePointIndex = $parentReferencePointIndex4Reference[$referencePointColumnIndex]; $parentReferencePointColumnName = $reference->points[$parentReferencePointIndex]->columns[$referencePointColumnIndex]->columnName; $parentColumnNames[$referencePointColumnIndex] = $parentReferencePointColumnName; } $referenceCallContext = $isDirectReference ? $callcontext : (clone $callcontext); // adding all indirect datasets in stack to prevent recursive calls if (!$isDirectReference) { foreach ($referencePointDatasetNames as $referencePointIndex => $datasetName) { if (isset($referenceCallContext->datasetNameStack[$datasetName])) { unset($referencePointDatasetNames[$referencePointIndex]); } else { $referenceCallContext->datasetNameStack[$datasetName] = TRUE; } } } foreach ($referencePointDatasetNames as $referencePointIndex => $datasetName) { // looking for existing link $referencedLink = $link->findNestedLinkByDatasetNameAndParentColumnNames($datasetName, $parentColumnNames); if (!isset($referencedLink)) { $dataset = $metamodel->getDataset($datasetName); $referencedLink = new ReferenceLink($dataset); foreach ($parentColumnNames as $referencePointColumnIndex => $parentReferencePointColumnName) { $referencePointColumn = $reference->points[$referencePointIndex]->columns[$referencePointColumnIndex]; $referencedLink->linkColumnWithParent($referencePointColumnIndex, $parentReferencePointColumnName, $referencePointColumn->columnName); } $link->registerNestedLink($referencedLink); } ArrayHelper::addUniqueValue($referencedLink->referenceNames, $referenceName); // marking the link as required for the branch so it will not be deleted by the optimizer later if ($isDirectReference) { $referencedLink->required = TRUE; } // because this reference path is not processed completely we need to continue scanning this branch if (isset($nestedReferencePath)) { LogHelper::log_debug(t( 'Checking sub-path[linkId: @linkId; stackSize: @stackSize]: @referencePath', array('@linkId' => $referencedLink->linkId, '@stackSize' => count($callcontext->referenceNameStack), '@referencePath' => $referencePath))); $referencePointCallContext = clone $referenceCallContext; $this->prepareDatasetSequence($referencePointCallContext, $metamodel, $referencedLink, $nestedReferencePath); } } } }
public function execute( DataSourceHandler $handler, DataControllerCallContext $callcontext, $connection, $sql, __SQLDataSourceHandler__AbstractQueryCallbackProxy $callbackInstance) { $datasource = $connection->datasource; $outputFolder = uniqid('dp', TRUE); $webhcatProxy = new WebHCat_CURLProxy($datasource); // executing the sql statement $statementHandler = $webhcatProxy->initializeHandler( 'POST', '/templeton/v1/hive', array('execute' => $sql, 'statusdir' => $outputFolder, 'define=hive.cli.print.header' => 'true')); $executor = new SingleCURLHandlerExecutor($statementHandler); $responseJob = $executor->execute(); // ... and preparing job identifier if (!isset($responseJob['id'])) { LogHelper::log_debug($responseJob); throw new IllegalStateException(t( 'Job ID is not available: %error', array('%error' => (isset($responseJob['info']['stderr']) ? $responseJob['info']['stderr']: 'error message is not provided')))); } $jobId = $responseJob['id']; // waiting for the execution job to complete $jobHandler = $webhcatProxy->initializeHandler('GET', "/templeton/v1/queue/$jobId"); $executor = new SingleCURLHandlerExecutor($jobHandler); $responseJobStatus = NULL; while (TRUE) { $responseJobStatus = $executor->execute(); if ($responseJobStatus['completed'] == 'done') { break; } usleep(1000000); } if ($responseJobStatus['exitValue'] != 0) { throw new IllegalStateException(t( '%resourceId execution completed unsuccessfully: %errorCode', array('%resourceId' => $jobHandler->resourceId, '%errorCode' => $responseJobStatus['exitValue']))); } $webhdfsProxy = new WebHDFS_CURLProxy($datasource); // reading result of the execution $data = NULL; while (TRUE) { $resultHandler = $webhdfsProxy->initializeHandler( 'GET', "/webhdfs/v1/user/{$datasource->username}/$outputFolder/stdout", array('op' => 'OPEN')); $executor = new SingleCURLHandlerExecutor($resultHandler); $data = $executor->execute(); // the file should contain at least column names if (isset($data)) { break; } // it looks like the result is empty. That happens because the file is not flushed yet usleep(10000); } // deleting the output folder. We do not need it any more $resultHandler = $webhdfsProxy->initializeHandler( 'DELETE', "/webhdfs/v1/user/{$datasource->username}/$outputFolder", array('op' => 'DELETE', 'recursive' => 'true')); $executor = new SingleCURLHandlerExecutor($resultHandler); $executor->execute(); // parsing data $parsedDataProvider = new SampleDataPreparer(FALSE); $parser = new DelimiterDataParser("\t"); $parser->isHeaderPresent = TRUE; $parser->parse( new StreamDataProvider($data), array( new ColumnNamePreparer(), new ColumnPublicNamePreparer(), new ColumnTypeAutoDetector(), $parsedDataProvider)); // calculating column database type foreach ($parser->metadata->getColumns() as $column) { $databaseType = NULL; switch ($column->type->applicationType) { case StringDataTypeHandler::DATA_TYPE: $databaseType = 'string'; break; case IntegerDataTypeHandler::DATA_TYPE: $databaseType = 'int'; break; default: throw new UnsupportedOperationException(t( 'Cannot provide data type mapping for %columnName column: %datatype', array('%columnName' => $column->name, '%datatype' => $column->type->applicationType))); } $column->type->databaseType = $databaseType; } $statement = new HiveStatement($parser->metadata, $parsedDataProvider->records); return $callbackInstance->callback($callcontext, $connection, $statement); }
protected function validateCube(CubeMetaData $cube) { $cubeDatasetName = $cube->sourceDatasetName; if (!isset($this->datasets[$cubeDatasetName])) { LogHelper::log_error($this->datasets); throw new IllegalStateException(t("Source dataset '@datasetName' for cube '@cubeName' cannot be resolved", array('@datasetName' => $cubeDatasetName, '@cubeName' => $cube->publicName))); } if (isset($cube->dimensions)) { foreach ($cube->dimensions as $dimension) { foreach ($dimension->levels as $levelIndex => $level) { // first level should have a reference to to source key if ($levelIndex == 0 && !isset($level->sourceColumnName)) { LogHelper::log_error($dimension); throw new IllegalStateException(t("First level (@levelName) in '@cubeName' cube in '@dimensionName' dimension should have a reference to source dataset ('sourceColumnName' attribute)", array('@cubeName' => $cube->publicName, '@dimensionName' => $dimension->publicName, '@levelName' => $level->publicName))); } if (!isset($level->datasetName)) { continue; } $datasetName = $level->datasetName; $dataset = $this->findDataset($datasetName); if (!isset($dataset)) { LogHelper::log_error($this->datasets); throw new IllegalStateException(t("Dataset '@datasetName' for cube '@cubeName' dimension '@dimensionName' level '@levelName' cannot be resolved", array('@datasetName' => $datasetName, '@cubeName' => $cube->publicName, '@dimensionName' => $dimension->publicName, '@levelName' => $level->publicName))); } // FIXME remove the following functionality // setting key field for each level if (!isset($level->key)) { $keyColumn = $dataset->findKeyColumn(); if (isset($keyColumn)) { $level->key = $keyColumn->name; } else { LogHelper::log_debug($dataset); LogHelper::log_error($dimension); throw new IllegalStateException(t("Could not identify 'key' attribute to access '@datasetName' dataset records for '@levelName' level of '@dimensionName' dimension of '@cubeName' cube", array('@datasetName' => $dataset->publicName, '@cubeName' => $cube->publicName, '@dimensionName' => $dimension->publicName, '@levelName' => $level->publicName))); } } } } } }
public function queryDataset(DataControllerCallContext $callcontext, DatasetQueryRequest $request) { $datasetName = $request->getDatasetName(); LogHelper::log_info(t('Querying script-based dataset: @datasetName', array('@datasetName' => $datasetName))); $metamodel = data_controller_get_metamodel(); $dataset = $metamodel->getDataset($datasetName); $serializer = new DatasetQueryUIRequestSerializer(); $parameters = $serializer->serialize($request); $records = $this->executeScriptFunction($dataset, 'queryDataset', $parameters); LogHelper::log_info(t('Received @count records(s)', array('@count' => count($records)))); // converting type of returned values if (isset($records)) { $columnTypeHandlers = NULL; foreach ($records as &$record) { foreach ($record as $columnName => $columnValue) { if (!isset($columnTypeHandlers[$columnName])) { $type = $dataset->getColumn($columnName)->type->applicationType; $columnTypeHandlers[$columnName] = DataTypeFactory::getInstance()->getHandler($type); } $record[$columnName] = $columnTypeHandlers[$columnName]->castValue($columnValue); } } unset($record); } LogHelper::log_debug($records); return $records; }
public function processResponse(CURLHandler $handler) { $output = curl_multi_getcontent($handler->ch); $isOutputRequired = isset($handler->outputFormatter); if ($isOutputRequired) { try { $output = $handler->outputFormatter->format($handler->resourceId, $output); } catch (Exception $e) { LogHelper::log_debug(new PreservedTextMessage($output)); throw $e; } } $this->releaseHandler($handler); return $isOutputRequired ? $output : NULL; }
protected function openEnvelope($cacheEntryName, $envelope, array $options = NULL) { if (isset($envelope)) { $sourceDataAsOfDateTime = $this->getSourceDataAsOfDateTime($options); if ($envelope->isEnvelopeStale($sourceDataAsOfDateTime)) { // data source was refreshed after the envelope was created LogHelper::log_debug(t( 'Forced envelope refresh for the cache entry name: @cacheEntryName', array('@cacheEntryName' => $cacheEntryName))); $envelope = NULL; } } // found unexpired data if (isset($envelope) && !$envelope->isDataStale()) { return $envelope->data; } $cacheLockEntryName = $this->assembleCacheLockEntryName($cacheEntryName); $isLockPresent = $this->isEntryPresentImpl($cacheLockEntryName); if ($isLockPresent) { if (isset($envelope)) { // found expired data which is in process of refreshing - returning stale data LogHelper::log_notice(t( 'Using stale data for the cache entry name: @cacheEntryName', array('@cacheEntryName' => $cacheEntryName))); return $envelope->data; } else { // there is no data, but someone started to generate it ... waiting ... for limited time $lockWaitTime = 1000000 * self::$LOCK_WAIT_TIME / self::$LOCK_CHECK_COUNT; // going into sleep mode and hope the lock is released for ($i = 0; $i < self::$LOCK_CHECK_COUNT; $i++) { usleep($lockWaitTime); $envelopes = $this->loadValuesImpl(array($cacheEntryName, $cacheLockEntryName)); // checking if value present now if (isset($envelopes[$cacheEntryName])) { return $envelopes[$cacheEntryName]->data; } // checking if the lock is still present if (isset($envelopes[$cacheLockEntryName])) { continue; } // the lock disappeared. There is nothing can be done now break; } } } else { if (isset($envelope)) { if (self::$currentThreadLockCount >= self::$LOCK_LIMIT_PER_THREAD) { // this thread done enough. From now on returning stale data for this thread LogHelper::log_notice(t( 'Using stale data for the cache entry name (lock limit reached): @cacheEntryName', array('@cacheEntryName' => $cacheEntryName))); return $envelope->data; } } // preparing lock $lock = new CacheEntryLock(); // calculating lock expiration time $lockExpirationTime = self::$LOCK_EXPIRATION_TIME; if ($lockExpirationTime < self::$LOCK_WAIT_TIME) { $lockExpirationTime = self::$LOCK_WAIT_TIME; } // setting the lock and ignoring if the operation was successful or not $this->storeValuesImpl( array($cacheLockEntryName => new CacheEntryEnvelope($lock)), $lockExpirationTime); self::$currentThreadLockCount++; } return NULL; }
public function executeQuery(DataControllerCallContext $callcontext, DataSourceMetaData $datasource, $sql, ResultFormatter $resultFormatter) { $timeStart = microtime(TRUE); $records = $this->executeQueryStatement($callcontext, $datasource, $sql, new __SQLDataSourceHandler__QueryExecutionCallbackProxy($this->prepareQueryStatementExecutionCallbackInstance(), $resultFormatter)); LogHelper::log_info(t('Database execution time: !executionTime', array('!executionTime' => ExecutionPerformanceHelper::formatExecutionTime($timeStart)))); $count = count($records); LogHelper::log_info(t('Processed @count record(s)', array('@count' => $count))); LogHelper::log_debug($records); return $records; }