public function rename($from, $to) { $fromUrl = parse_url($from); $repoId = $fromUrl["host"]; $repoObject = ConfService::getRepositoryById($repoId); $isViPR = $repoObject->getOption("IS_VIPR"); $isDir = false; if ($isViPR === true) { if (is_dir($from . "/")) { $from .= '/'; $to .= '/'; $isDir = true; } } if ($isDir === true || is_dir($from)) { AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Renaming dir {$from} to {$to}"); require_once "aws.phar"; $fromUrl = parse_url($from); $repoId = $fromUrl["host"]; $repoObject = ConfService::getRepositoryById($repoId); if (!isset($repoObject)) { $e = new Exception("Cannot find repository with id " . $repoId); self::$lastException = $e; throw $e; } // Get a client $options = array('key' => $repoObject->getOption("API_KEY"), 'secret' => $repoObject->getOption("SECRET_KEY")); $baseURL = $repoObject->getOption("STORAGE_URL"); if (!empty($baseURL)) { $options["base_url"] = $baseURL; } else { $options["region"] = $repoObject->getOption("REGION"); } $proxy = $repoObject->getOption("PROXY"); if (!empty($proxy)) { $options['request.options'] = array('proxy' => $proxy); } $s3Client = S3Client::factory($options); $bucket = $repoObject->getOption("CONTAINER"); $basePath = $repoObject->getOption("PATH"); $fromKeyname = trim(str_replace("//", "/", $basePath . parse_url($from, PHP_URL_PATH)), '/'); $toKeyname = trim(str_replace("//", "/", $basePath . parse_url($to, PHP_URL_PATH)), '/'); if ($isViPR) { $toKeyname .= '/'; $parts = explode('/', $bucket); $bucket = $parts[0]; if (isset($parts[1])) { $fromKeyname = $parts[1] . "/" . $fromKeyname; } } // Perform a batch of CopyObject operations. $batch = array(); $iterator = $s3Client->getIterator('ListObjects', array('Bucket' => $bucket, 'Prefix' => $fromKeyname . "/")); $toDelete = array(); AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Got iterator looking for prefix " . $fromKeyname . "/ , and toKeyName=" . $toKeyname); foreach ($iterator as $object) { $currentFrom = $object['Key']; $currentTo = $toKeyname . substr($currentFrom, strlen($fromKeyname)); if ($isViPR) { if (isset($parts[1])) { $currentTo = $parts[1] . "/" . $currentTo; } } AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Should move one object " . $currentFrom . " to new key :" . $currentTo); $batch[] = $s3Client->getCommand('CopyObject', array('Bucket' => $bucket, 'Key' => "{$currentTo}", 'CopySource' => "{$bucket}/" . rawurlencode($currentFrom))); $toDelete[] = $currentFrom; } try { AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Execute batch on " . count($batch) . " objects"); $successful = $s3Client->execute($batch); $failed = array(); $iterator->rewind(); $clear = new \Aws\S3\Model\ClearBucket($s3Client, $bucket); $clear->setIterator($iterator); $clear->clear(); } catch (\Guzzle\Service\Exception\CommandTransferException $e) { $successful = $e->getSuccessfulCommands(); $failed = $e->getFailedCommands(); } if (count($failed)) { foreach ($failed as $c) { // $c is a Aws\S3\Command\S3Command AJXP_Logger::error("S3Wrapper", __FUNCTION__, "Error while copying: " . $c->getOperation()->getServiceDescription()); } self::$lastException = new Exception("Failed moving folder: " . count($failed)); return false; } return true; } else { AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Execute standard rename on " . $from . " to " . $to); return parent::rename($from, $to); } }
/** * Remove file * * @param string $path file path * @return Guzzle\Service\Resource\Model **/ protected function _unlink($path) { $clear = new \Aws\S3\Model\ClearBucket($this->s3, $this->bucket); $iterator = $this->s3->getIterator('ListObjects', array('Bucket' => $this->bucket, 'Prefix' => $this->pathToKey($path))); $clear->setIterator($iterator); $clear->clear(); return $this->s3->deleteObject(array("Bucket" => $this->bucket, "Key" => $this->pathToKey($path))); }
/** * @inheritdoc */ public function rename($from, $to) { $fromUrl = parse_url($from); $repoId = $fromUrl["host"]; $repoObject = ConfService::getRepositoryById($repoId); $isViPR = $repoObject->getOption("IS_VIPR"); $isDir = false; if ($isViPR === true) { if (is_dir($from . "/")) { $from .= '/'; $to .= '/'; $isDir = true; } } if ($isDir === true || is_dir($from)) { AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Renaming dir {$from} to {$to}"); require_once "aws-v2.phar"; $fromUrl = parse_url($from); $repoId = $fromUrl["host"]; $repoObject = ConfService::getRepositoryById($repoId); if (!isset($repoObject)) { $e = new Exception("Cannot find repository with id " . $repoId); self::$lastException = $e; throw $e; } $s3Client = self::getClientForRepository($repoObject, false); $bucket = $repoObject->getOption("CONTAINER"); $basePath = $repoObject->getOption("PATH"); $fromKeyname = trim(str_replace("//", "/", $basePath . parse_url($from, PHP_URL_PATH)), '/'); $toKeyname = trim(str_replace("//", "/", $basePath . parse_url($to, PHP_URL_PATH)), '/'); if ($isViPR) { $toKeyname .= '/'; $parts = explode('/', $bucket); $bucket = $parts[0]; if (isset($parts[1])) { $fromKeyname = $parts[1] . "/" . $fromKeyname; } } // Perform a batch of CopyObject operations. $batch = array(); $failed = array(); $iterator = $s3Client->getIterator('ListObjects', array('Bucket' => $bucket, 'Prefix' => $fromKeyname . "/")); $toDelete = array(); AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Got iterator looking for prefix " . $fromKeyname . "/ , and toKeyName=" . $toKeyname); foreach ($iterator as $object) { $currentFrom = $object['Key']; $currentTo = $toKeyname . substr($currentFrom, strlen($fromKeyname)); if ($isViPR) { if (isset($parts[1])) { $currentTo = $parts[1] . "/" . $currentTo; } } AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Should move one object " . $currentFrom . " to new key :" . $currentTo); $batch[] = $s3Client->getCommand('CopyObject', array('Bucket' => $bucket, 'Key' => "{$currentTo}", 'CopySource' => "{$bucket}/" . rawurlencode($currentFrom))); $toDelete[] = $currentFrom; } AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Execute batch on " . count($batch) . " objects"); ConfService::getConfStorageImpl()->_loadPluginConfig("access.s3", $globalOptions); $sdkVersion = $globalOptions["SDK_VERSION"]; if ($sdkVersion === "v3") { foreach ($batch as $command) { $successful = $s3Client->execute($command); } //We must delete the "/" in $fromKeyname because we want to delete the folder $clear = \Aws\S3\BatchDelete::fromIterator($s3Client, $bucket, $s3Client->getIterator('ListObjects', array('Bucket' => $bucket, 'Prefix' => $fromKeyname))); $clear->delete(); } else { try { $successful = $s3Client->execute($batch); $clear = new \Aws\S3\Model\ClearBucket($s3Client, $bucket); $iterator->rewind(); $clear->setIterator($iterator); $clear->clear(); $failed = array(); } catch (\Guzzle\Service\Exception\CommandTransferException $e) { $successful = $e->getSuccessfulCommands(); $failed = $e->getFailedCommands(); } } if (count($failed)) { foreach ($failed as $c) { // $c is a Aws\S3\Command\S3Command AJXP_Logger::error("S3Wrapper", __FUNCTION__, "Error while copying: " . $c->getOperation()->getServiceDescription()); } self::$lastException = new Exception("Failed moving folder: " . count($failed)); return false; } return true; } else { AJXP_Logger::debug(__CLASS__, __FUNCTION__, "S3 Execute standard rename on " . $from . " to " . $to); return parent::rename($from, $to); } }