protected function execute(ConduitAPIRequest $request) { $viewer = $request->getUser(); $hash = $request->getValue('contentHash'); $name = $request->getValue('name'); $view_policy = $request->getValue('viewPolicy'); $length = $request->getValue('contentLength'); $properties = array('name' => $name, 'authorPHID' => $viewer->getPHID(), 'viewPolicy' => $view_policy, 'isExplicitUpload' => true); $ttl = $request->getValue('deleteAfterEpoch'); if ($ttl) { $properties['ttl'] = $ttl; } $file = null; if ($hash) { $file = PhabricatorFile::newFileFromContentHash($hash, $properties); } if ($hash && !$file) { $chunked_hash = PhabricatorChunkedFileStorageEngine::getChunkedHash($viewer, $hash); $file = id(new PhabricatorFileQuery())->setViewer($viewer)->withContentHashes(array($chunked_hash))->executeOne(); } if (strlen($name) && !$hash && !$file) { if ($length > PhabricatorFileStorageEngine::getChunkThreshold()) { // If we don't have a hash, but this file is large enough to store in // chunks and thus may be resumable, try to find a partially uploaded // file by the same author with the same name and same length. This // allows us to resume uploads in Javascript where we can't efficiently // compute file hashes. $file = id(new PhabricatorFileQuery())->setViewer($viewer)->withAuthorPHIDs(array($viewer->getPHID()))->withNames(array($name))->withLengthBetween($length, $length)->withIsPartial(true)->setLimit(1)->executeOne(); } } if ($file) { return array('upload' => (bool) $file->getIsPartial(), 'filePHID' => $file->getPHID()); } // If there are any non-chunk engines which this file can fit into, // just tell the client to upload the file. $engines = PhabricatorFileStorageEngine::loadStorageEngines($length); if ($engines) { return array('upload' => true, 'filePHID' => null); } // Otherwise, this is a large file and we want to perform a chunked // upload if we have a chunk engine available. $chunk_engines = PhabricatorFileStorageEngine::loadWritableChunkEngines(); if ($chunk_engines) { $chunk_properties = $properties; if ($hash) { $chunk_properties += array('chunkedHash' => $chunked_hash); } $chunk_engine = head($chunk_engines); $file = $chunk_engine->allocateChunks($length, $chunk_properties); return array('upload' => true, 'filePHID' => $file->getPHID()); } // None of the storage engines can accept this file. if (PhabricatorFileStorageEngine::loadWritableEngines()) { $error = pht('Unable to upload file: this file is too large for any ' . 'configured storage engine.'); } else { $error = pht('Unable to upload file: the server is not configured with any ' . 'writable storage engines.'); } return array('upload' => false, 'filePHID' => null, 'error' => $error); }
private static function buildFromFileData($data, array $params = array()) { if (isset($params['storageEngines'])) { $engines = $params['storageEngines']; } else { $size = strlen($data); $engines = PhabricatorFileStorageEngine::loadStorageEngines($size); if (!$engines) { throw new Exception(pht('No configured storage engine can store this file. See ' . '"Configuring File Storage" in the documentation for ' . 'information on configuring storage engines.')); } } assert_instances_of($engines, 'PhabricatorFileStorageEngine'); if (!$engines) { throw new Exception(pht('No valid storage engines are available!')); } $file = self::initializeNewFile(); $data_handle = null; $engine_identifier = null; $exceptions = array(); foreach ($engines as $engine) { $engine_class = get_class($engine); try { list($engine_identifier, $data_handle) = $file->writeToEngine($engine, $data, $params); // We stored the file somewhere so stop trying to write it to other // places. break; } catch (PhabricatorFileStorageConfigurationException $ex) { // If an engine is outright misconfigured (or misimplemented), raise // that immediately since it probably needs attention. throw $ex; } catch (Exception $ex) { phlog($ex); // If an engine doesn't work, keep trying all the other valid engines // in case something else works. $exceptions[$engine_class] = $ex; } } if (!$data_handle) { throw new PhutilAggregateException(pht('All storage engines failed to write file:'), $exceptions); } $file->setByteSize(strlen($data)); $file->setContentHash(self::hashFileContent($data)); $file->setStorageEngine($engine_identifier); $file->setStorageHandle($data_handle); // TODO: This is probably YAGNI, but allows for us to do encryption or // compression later if we want. $file->setStorageFormat(self::STORAGE_FORMAT_RAW); $file->readPropertiesFromParameters($params); if (!$file->getMimeType()) { $tmp = new TempFile(); Filesystem::writeFile($tmp, $data); $file->setMimeType(Filesystem::getMimeType($tmp)); } try { $file->updateDimensions(false); } catch (Exception $ex) { // Do nothing } $file->save(); return $file; }
private static function buildFromFileData($data, array $params = array()) { if (isset($params['storageEngines'])) { $engines = $params['storageEngines']; } else { $size = strlen($data); $engines = PhabricatorFileStorageEngine::loadStorageEngines($size); if (!$engines) { throw new Exception(pht('No configured storage engine can store this file. See ' . '"Configuring File Storage" in the documentation for ' . 'information on configuring storage engines.')); } } assert_instances_of($engines, 'PhabricatorFileStorageEngine'); if (!$engines) { throw new Exception(pht('No valid storage engines are available!')); } $file = self::initializeNewFile(); $aes_type = PhabricatorFileAES256StorageFormat::FORMATKEY; $has_aes = PhabricatorKeyring::getDefaultKeyName($aes_type); if ($has_aes !== null) { $default_key = PhabricatorFileAES256StorageFormat::FORMATKEY; } else { $default_key = PhabricatorFileRawStorageFormat::FORMATKEY; } $key = idx($params, 'format', $default_key); // Callers can pass in an object explicitly instead of a key. This is // primarily useful for unit tests. if ($key instanceof PhabricatorFileStorageFormat) { $format = clone $key; } else { $format = clone PhabricatorFileStorageFormat::requireFormat($key); } $format->setFile($file); $properties = $format->newStorageProperties(); $file->setStorageFormat($format->getStorageFormatKey()); $file->setStorageProperties($properties); $data_handle = null; $engine_identifier = null; $exceptions = array(); foreach ($engines as $engine) { $engine_class = get_class($engine); try { list($engine_identifier, $data_handle) = $file->writeToEngine($engine, $data, $params); // We stored the file somewhere so stop trying to write it to other // places. break; } catch (PhabricatorFileStorageConfigurationException $ex) { // If an engine is outright misconfigured (or misimplemented), raise // that immediately since it probably needs attention. throw $ex; } catch (Exception $ex) { phlog($ex); // If an engine doesn't work, keep trying all the other valid engines // in case something else works. $exceptions[$engine_class] = $ex; } } if (!$data_handle) { throw new PhutilAggregateException(pht('All storage engines failed to write file:'), $exceptions); } $file->setByteSize(strlen($data)); $file->setContentHash(self::hashFileContent($data)); $file->setStorageEngine($engine_identifier); $file->setStorageHandle($data_handle); $file->readPropertiesFromParameters($params); if (!$file->getMimeType()) { $tmp = new TempFile(); Filesystem::writeFile($tmp, $data); $file->setMimeType(Filesystem::getMimeType($tmp)); } try { $file->updateDimensions(false); } catch (Exception $ex) { // Do nothing } $file->save(); return $file; }