/** * Returns a recursive directory iterator that yields absolute filenames. * * This iterator is not broken like PHP's built-in DirectoryIterator (which * will read the first file from a stream wrapper, then rewind, then read * it again). * * @param string $path Path to traverse (e.g., s3://bucket/key, /tmp) * @param resource $context Stream context options. * * @return \Generator Yields absolute filenames. */ public static function recursiveDirIterator($path, $context = null) { $invalid = ['.' => true, '..' => true]; $pathLen = strlen($path) + 1; $iterator = self::dirIterator($path, $context); $queue = []; do { while ($iterator->valid()) { $file = $iterator->current(); $iterator->next(); if (isset($invalid[basename($file)])) { continue; } $fullPath = "{$path}/{$file}"; (yield $fullPath); if (is_dir($fullPath)) { $queue[] = $iterator; $iterator = t\to_iter(self::dirIterator($fullPath, $context), t\map(function ($file) use($fullPath, $pathLen) { return substr("{$fullPath}/{$file}", $pathLen); })); continue; } } $iterator = array_pop($queue); } while ($iterator); }
/** * Upload the source to S3 using multipart upload operations. * * @param int $concurrency Number of parts that the Uploader will upload * concurrently (in parallel). This defaults to 1. You may need to do * some experimenting to find the most optimum concurrency value * for your system, but using 20-25 usually yields decent results. * @param callable|null $before Callback to execute before each upload. * This callback will receive a PreparedEvent object as its argument. * * @return Result The result of the CompleteMultipartUpload operation. * @throws \LogicException if the upload is already complete or aborted. * @throws MultipartUploadException if an upload operation fails. */ public function upload($concurrency = 1, callable $before = null) { // Ensure the upload is in a valid state for uploading parts. if (!$this->state->isInitiated()) { $this->initiate(); } elseif ($this->state->isCompleted()) { throw new \LogicException('The upload has been completed.'); } elseif ($this->state->isAborted()) { throw new \LogicException('This upload has been aborted.'); } // Create iterator that will yield UploadPart commands for each part. $commands = t\to_iter($this->parts, t\map(function (array $partData) { return $this->createCommand('upload', $partData); })); // Execute the commands in parallel and process results. This collects // unhandled errors along the way and throws an exception at the end // that contains the state and a list of the failed parts. $errors = []; $this->client->executeAll($commands, ['pool_size' => $concurrency, 'prepared' => $before, 'process' => ['fn' => function (ProcessEvent $event) use(&$errors) { $command = $event->getCommand(); $partNumber = $command[$this->config['part']['param']]; if ($ex = $event->getException()) { $errors[$partNumber] = $ex->getMessage(); } else { unset($errors[$partNumber]); $this->config['fn']['result']($command, $event->getResult()); } }, 'priority' => 'last']]); if ($errors) { throw new MultipartUploadException($this->state, $errors); } return $this->complete(); }
/** * Creates an iterator that yields Commands from each filename. * * @param \Iterator $iter Iterator to wrap. * * @return \Iterator */ private function wrapIterator(\Iterator $iter) { $comp = []; // Filter out MUP uploads to send separate operations. if ($this->destScheme == 's3' && $this->sourceScheme == 'file') { $comp[] = t\filter(function ($file) { if ($this->sourceScheme == 'file' && filesize($file) >= $this->mup_threshold) { $this->mup($file); return false; } // Filter out "/" files stored on S3 as buckets. return substr($file, -1, 1) != '/'; }); } $comp[] = t\map($this->getTransferFunction($this->sourceScheme, $this->destScheme)); return t\to_iter($iter, call_user_func_array('transducers\\comp', $comp)); }