summaryrefslogtreecommitdiff
path: root/vendor/aws/aws-sdk-php/src/Glacier
diff options
context:
space:
mode:
authorAndrew Dolgov <[email protected]>2022-11-23 21:14:33 +0300
committerAndrew Dolgov <[email protected]>2022-11-23 21:14:33 +0300
commit0c8af4992cb0f7589dcafaad65ada12753c64594 (patch)
tree18e83d068c3e7dd2499331de977782b382279396 /vendor/aws/aws-sdk-php/src/Glacier
initial
Diffstat (limited to 'vendor/aws/aws-sdk-php/src/Glacier')
-rw-r--r--vendor/aws/aws-sdk-php/src/Glacier/Exception/GlacierException.php9
-rw-r--r--vendor/aws/aws-sdk-php/src/Glacier/GlacierClient.php251
-rw-r--r--vendor/aws/aws-sdk-php/src/Glacier/MultipartUploader.php284
-rw-r--r--vendor/aws/aws-sdk-php/src/Glacier/TreeHash.php118
4 files changed, 662 insertions, 0 deletions
diff --git a/vendor/aws/aws-sdk-php/src/Glacier/Exception/GlacierException.php b/vendor/aws/aws-sdk-php/src/Glacier/Exception/GlacierException.php
new file mode 100644
index 0000000..ff78c0b
--- /dev/null
+++ b/vendor/aws/aws-sdk-php/src/Glacier/Exception/GlacierException.php
@@ -0,0 +1,9 @@
+<?php
+namespace Aws\Glacier\Exception;
+
+use Aws\Exception\AwsException;
+
+/**
+ * Represents an error interacting with the Amazon Glacier service.
+ */
+class GlacierException extends AwsException {}
diff --git a/vendor/aws/aws-sdk-php/src/Glacier/GlacierClient.php b/vendor/aws/aws-sdk-php/src/Glacier/GlacierClient.php
new file mode 100644
index 0000000..a9a335c
--- /dev/null
+++ b/vendor/aws/aws-sdk-php/src/Glacier/GlacierClient.php
@@ -0,0 +1,251 @@
+<?php
+namespace Aws\Glacier;
+
+use Aws\Api\ApiProvider;
+use Aws\Api\DocModel;
+use Aws\Api\Service;
+use Aws\AwsClient;
+use Aws\CommandInterface;
+use Aws\Exception\CouldNotCreateChecksumException;
+use Aws\HashingStream;
+use Aws\Middleware;
+use Aws\PhpHash;
+use Psr\Http\Message\RequestInterface;
+
+/**
+ * This client is used to interact with the **Amazon Glacier** service.
+ *
+ * @method \Aws\Result abortMultipartUpload(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise abortMultipartUploadAsync(array $args = [])
+ * @method \Aws\Result abortVaultLock(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise abortVaultLockAsync(array $args = [])
+ * @method \Aws\Result addTagsToVault(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise addTagsToVaultAsync(array $args = [])
+ * @method \Aws\Result completeMultipartUpload(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise completeMultipartUploadAsync(array $args = [])
+ * @method \Aws\Result completeVaultLock(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise completeVaultLockAsync(array $args = [])
+ * @method \Aws\Result createVault(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise createVaultAsync(array $args = [])
+ * @method \Aws\Result deleteArchive(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise deleteArchiveAsync(array $args = [])
+ * @method \Aws\Result deleteVault(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise deleteVaultAsync(array $args = [])
+ * @method \Aws\Result deleteVaultAccessPolicy(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise deleteVaultAccessPolicyAsync(array $args = [])
+ * @method \Aws\Result deleteVaultNotifications(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise deleteVaultNotificationsAsync(array $args = [])
+ * @method \Aws\Result describeJob(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise describeJobAsync(array $args = [])
+ * @method \Aws\Result describeVault(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise describeVaultAsync(array $args = [])
+ * @method \Aws\Result getDataRetrievalPolicy(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise getDataRetrievalPolicyAsync(array $args = [])
+ * @method \Aws\Result getJobOutput(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise getJobOutputAsync(array $args = [])
+ * @method \Aws\Result getVaultAccessPolicy(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise getVaultAccessPolicyAsync(array $args = [])
+ * @method \Aws\Result getVaultLock(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise getVaultLockAsync(array $args = [])
+ * @method \Aws\Result getVaultNotifications(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise getVaultNotificationsAsync(array $args = [])
+ * @method \Aws\Result initiateJob(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise initiateJobAsync(array $args = [])
+ * @method \Aws\Result initiateMultipartUpload(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise initiateMultipartUploadAsync(array $args = [])
+ * @method \Aws\Result initiateVaultLock(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise initiateVaultLockAsync(array $args = [])
+ * @method \Aws\Result listJobs(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise listJobsAsync(array $args = [])
+ * @method \Aws\Result listMultipartUploads(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise listMultipartUploadsAsync(array $args = [])
+ * @method \Aws\Result listParts(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise listPartsAsync(array $args = [])
+ * @method \Aws\Result listProvisionedCapacity(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise listProvisionedCapacityAsync(array $args = [])
+ * @method \Aws\Result listTagsForVault(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise listTagsForVaultAsync(array $args = [])
+ * @method \Aws\Result listVaults(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise listVaultsAsync(array $args = [])
+ * @method \Aws\Result purchaseProvisionedCapacity(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise purchaseProvisionedCapacityAsync(array $args = [])
+ * @method \Aws\Result removeTagsFromVault(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise removeTagsFromVaultAsync(array $args = [])
+ * @method \Aws\Result setDataRetrievalPolicy(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise setDataRetrievalPolicyAsync(array $args = [])
+ * @method \Aws\Result setVaultAccessPolicy(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise setVaultAccessPolicyAsync(array $args = [])
+ * @method \Aws\Result setVaultNotifications(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise setVaultNotificationsAsync(array $args = [])
+ * @method \Aws\Result uploadArchive(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise uploadArchiveAsync(array $args = [])
+ * @method \Aws\Result uploadMultipartPart(array $args = [])
+ * @method \GuzzleHttp\Promise\Promise uploadMultipartPartAsync(array $args = [])
+ */
+class GlacierClient extends AwsClient
+{
+ public function __construct(array $args)
+ {
+ parent::__construct($args);
+
+ // Setup middleware.
+ $stack = $this->getHandlerList();
+ $stack->appendBuild($this->getApiVersionMiddleware(), 'glacier.api_version');
+ $stack->appendBuild($this->getChecksumsMiddleware(), 'glacier.checksum');
+ $stack->appendBuild(
+ Middleware::contentType(['UploadArchive', 'UploadPart']),
+ 'glacier.content_type'
+ );
+ $stack->appendInit(
+ Middleware::sourceFile($this->getApi(), 'body', 'sourceFile'),
+ 'glacier.source_file'
+ );
+ }
+
+ /**
+ * {@inheritdoc}
+ *
+ * Sets the default accountId to "-" for all operations.
+ */
+ public function getCommand($name, array $args = [])
+ {
+ return parent::getCommand($name, $args + ['accountId' => '-']);
+ }
+
+ /**
+ * Creates a middleware that updates a command with the content and tree
+ * hash headers for upload operations.
+ *
+ * @return callable
+ * @throws CouldNotCreateChecksumException if the body is not seekable.
+ */
+ private function getChecksumsMiddleware()
+ {
+ return function (callable $handler) {
+ return function (
+ CommandInterface $command,
+ RequestInterface $request = null
+ ) use ($handler) {
+ // Accept "ContentSHA256" with a lowercase "c" to match other Glacier params.
+ if (!$command['ContentSHA256'] && $command['contentSHA256']) {
+ $command['ContentSHA256'] = $command['contentSHA256'];
+ unset($command['contentSHA256']);
+ }
+
+ // If uploading, then make sure checksums are added.
+ $name = $command->getName();
+ if (($name === 'UploadArchive' || $name === 'UploadMultipartPart')
+ && (!$command['checksum'] || !$command['ContentSHA256'])
+ ) {
+ $body = $request->getBody();
+ if (!$body->isSeekable()) {
+ throw new CouldNotCreateChecksumException('sha256');
+ }
+
+ // Add a tree hash if not provided.
+ if (!$command['checksum']) {
+ $body = new HashingStream(
+ $body, new TreeHash(),
+ function ($result) use (&$request) {
+ $request = $request->withHeader(
+ 'x-amz-sha256-tree-hash',
+ bin2hex($result)
+ );
+ }
+ );
+ }
+
+ // Add a linear content hash if not provided.
+ if (!$command['ContentSHA256']) {
+ $body = new HashingStream(
+ $body, new PhpHash('sha256'),
+ function ($result) use ($command) {
+ $command['ContentSHA256'] = bin2hex($result);
+ }
+ );
+ }
+
+ // Read the stream in order to calculate the hashes.
+ while (!$body->eof()) {
+ $body->read(1048576);
+ }
+ $body->seek(0);
+ }
+
+ // Set the content hash header if a value is in the command.
+ if ($command['ContentSHA256']) {
+ $request = $request->withHeader(
+ 'x-amz-content-sha256',
+ $command['ContentSHA256']
+ );
+ }
+
+ return $handler($command, $request);
+ };
+ };
+ }
+
+ /**
+ * Creates a middleware that adds the API version header for all requests.
+ *
+ * @return callable
+ */
+ private function getApiVersionMiddleware()
+ {
+ return function (callable $handler) {
+ return function (
+ CommandInterface $command,
+ RequestInterface $request = null
+ ) use ($handler) {
+ return $handler($command, $request->withHeader(
+ 'x-amz-glacier-version',
+ $this->getApi()->getMetadata('apiVersion')
+ ));
+ };
+ };
+ }
+
+ /**
+ * @internal
+ * @codeCoverageIgnore
+ */
+ public static function applyDocFilters(array $api, array $docs)
+ {
+ // Add the SourceFile parameter.
+ $docs['shapes']['SourceFile']['base'] = 'The path to a file on disk to use instead of the body parameter.';
+ $api['shapes']['SourceFile'] = ['type' => 'string'];
+ $api['shapes']['UploadArchiveInput']['members']['sourceFile'] = ['shape' => 'SourceFile'];
+ $api['shapes']['UploadMultipartPartInput']['members']['sourceFile'] = ['shape' => 'SourceFile'];
+
+ // Add the ContentSHA256 parameter.
+ $docs['shapes']['ContentSHA256']['base'] = 'A SHA256 hash of the content of the request body';
+ $api['shapes']['ContentSHA256'] = ['type' => 'string'];
+ $api['shapes']['UploadArchiveInput']['members']['contentSHA256'] = ['shape' => 'ContentSHA256'];
+ $api['shapes']['UploadMultipartPartInput']['members']['contentSHA256'] = ['shape' => 'ContentSHA256'];
+
+ // Add information about "checksum" and "ContentSHA256" being optional.
+ $optional = '<div class="alert alert-info">The SDK will compute this value '
+ . 'for you on your behalf if it is not supplied.</div>';
+ $docs['shapes']['checksum']['append'] = $optional;
+ $docs['shapes']['ContentSHA256']['append'] = $optional;
+
+ // Make "accountId" optional for all operations.
+ foreach ($api['operations'] as $operation) {
+ $inputShape =& $api['shapes'][$operation['input']['shape']];
+ $accountIdIndex = array_search('accountId', $inputShape['required']);
+ unset($inputShape['required'][$accountIdIndex]);
+ }
+ // Add information about the default value for "accountId".
+ $optional = '<div class="alert alert-info">The SDK will set this value to "-" by default.</div>';
+ foreach ($docs['shapes']['string']['refs'] as $name => &$ref) {
+ if (strpos($name, 'accountId')) {
+ $ref .= $optional;
+ }
+ }
+
+ return [
+ new Service($api, ApiProvider::defaultProvider()),
+ new DocModel($docs)
+ ];
+ }
+}
diff --git a/vendor/aws/aws-sdk-php/src/Glacier/MultipartUploader.php b/vendor/aws/aws-sdk-php/src/Glacier/MultipartUploader.php
new file mode 100644
index 0000000..b282fc3
--- /dev/null
+++ b/vendor/aws/aws-sdk-php/src/Glacier/MultipartUploader.php
@@ -0,0 +1,284 @@
+<?php
+namespace Aws\Glacier;
+
+use Aws\CommandInterface;
+use Aws\HashingStream;
+use Aws\Multipart\AbstractUploader;
+use Aws\Multipart\UploadState;
+use Aws\PhpHash;
+use Aws\ResultInterface;
+use GuzzleHttp\Psr7;
+use Psr\Http\Message\StreamInterface as Stream;
+
+/**
+ * Encapsulates the execution of a multipart upload to Glacier.
+ */
+class MultipartUploader extends AbstractUploader
+{
+ const PART_MIN_SIZE = 1048576;
+
+ private static $validPartSizes = [
+ 1048576, // 1 MB
+ 2097152, // 2 MB
+ 4194304, // 4 MB
+ 8388608, // 8 MB
+ 16777216, // 16 MB
+ 33554432, // 32 MB
+ 67108864, // 64 MB
+ 134217728, // 128 MB
+ 268435456, // 256 MB
+ 536870912, // 512 MB
+ 1073741824, // 1 GB
+ 2147483648, // 2 GB
+ 4294967296, // 4 GB
+ ];
+
+ /**
+ * Creates an UploadState object for a multipart upload by querying the
+ * service for the specified upload's information.
+ *
+ * @param GlacierClient $client GlacierClient object to use.
+ * @param string $vaultName Vault name for the multipart upload.
+ * @param string $uploadId Upload ID for the multipart upload.
+ * @param string $accountId Account ID for the multipart upload.
+ *
+ * @return UploadState
+ */
+ public static function getStateFromService(
+ GlacierClient $client,
+ $vaultName,
+ $uploadId,
+ $accountId = '-'
+ ) {
+ $state = new UploadState([
+ 'accountId' => $accountId,
+ 'vaultName' => $vaultName,
+ 'uploadId' => $uploadId,
+ ]);
+
+ foreach ($client->getPaginator('ListParts', $state->getId()) as $result) {
+ // Get the part size from the first part in the first result.
+ if (!$state->getPartSize()) {
+ $state->setPartSize($result['PartSizeInBytes']);
+ }
+ // Mark all the parts returned by ListParts as uploaded.
+ foreach ($result['Parts'] as $part) {
+ list($rangeIndex, $rangeSize) = self::parseRange(
+ $part['RangeInBytes'],
+ $state->getPartSize()
+ );
+ $state->markPartAsUploaded($rangeIndex, [
+ 'size' => $rangeSize,
+ 'checksum' => $part['SHA256TreeHash'],
+ ]);
+ }
+ }
+
+ $state->setStatus(UploadState::INITIATED);
+
+ return $state;
+ }
+
+ /**
+ * Creates a multipart upload for a Glacier archive.
+ *
+ * The valid configuration options are as follows:
+ *
+ * - account_id: (string, default=string('-')) Account ID for the archive
+ * being uploaded, if different from the account making the request.
+ * - archive_description: (string) Description of the archive.
+ * - before_complete: (callable) Callback to invoke before the
+ * `CompleteMultipartUpload` operation. The callback should have a
+ * function signature like `function (Aws\Command $command) {...}`.
+ * - before_initiate: (callable) Callback to invoke before the
+ * `InitiateMultipartUpload` operation. The callback should have a
+ * function signature like `function (Aws\Command $command) {...}`.
+ * - before_upload: (callable) Callback to invoke before any
+ * `UploadMultipartPart` operations. The callback should have a function
+ * signature like `function (Aws\Command $command) {...}`.
+ * - concurrency: (int, default=int(3)) Maximum number of concurrent
+ * `UploadMultipartPart` operations allowed during the multipart upload.
+ * - part_size: (int, default=int(1048576)) Part size, in bytes, to use when
+ * doing a multipart upload. This must between 1 MB and 4 GB, and must be
+ * a power of 2 (in megabytes).
+ * - prepare_data_source: (callable) Callback to invoke before starting the
+ * multipart upload workflow. The callback should have a function
+ * signature like `function () {...}`.
+ * - state: (Aws\Multipart\UploadState) An object that represents the state
+ * of the multipart upload and that is used to resume a previous upload.
+ * When this options is provided, the `account_id`, `key`, and `part_size`
+ * options are ignored.
+ * - vault_name: (string, required) Vault name to use for the archive being
+ * uploaded.
+ *
+ * @param GlacierClient $client Client used for the upload.
+ * @param mixed $source Source of the data to upload.
+ * @param array $config Configuration used to perform the upload.
+ */
+ public function __construct(GlacierClient $client, $source, array $config = [])
+ {
+ parent::__construct($client, $source, $config + [
+ 'account_id' => '-',
+ 'vault_name' => null,
+ ]);
+ }
+
+ protected function loadUploadWorkflowInfo()
+ {
+ return [
+ 'command' => [
+ 'initiate' => 'InitiateMultipartUpload',
+ 'upload' => 'UploadMultipartPart',
+ 'complete' => 'CompleteMultipartUpload',
+ ],
+ 'id' => [
+ 'account_id' => 'accountId',
+ 'vault_name' => 'vaultName',
+ 'upload_id' => 'uploadId',
+ ],
+ 'part_num' => 'range',
+ ];
+ }
+
+ protected function determinePartSize()
+ {
+ // Make sure the part size is set.
+ $partSize = $this->config['part_size'] ?: self::PART_MIN_SIZE;
+
+ // Ensure that the part size is valid.
+ if (!in_array($partSize, self::$validPartSizes)) {
+ throw new \InvalidArgumentException('The part_size must be a power '
+ . 'of 2, in megabytes, such that 1 MB <= PART_SIZE <= 4 GB.');
+ }
+
+ return $partSize;
+ }
+
+ protected function createPart($seekable, $number)
+ {
+ $data = [];
+ $firstByte = $this->source->tell();
+
+ // Read from the source to create the body stream. This also
+ // calculates the linear and tree hashes as the data is read.
+ if ($seekable) {
+ // Case 1: Stream is seekable, can make stream from new handle.
+ $body = Psr7\Utils::tryFopen($this->source->getMetadata('uri'), 'r');
+ $body = $this->limitPartStream(Psr7\Utils::streamFor($body));
+ // Create another stream decorated with hashing streams and read
+ // through it, so we can get the hash values for the part.
+ $decoratedBody = $this->decorateWithHashes($body, $data);
+ while (!$decoratedBody->eof()) $decoratedBody->read(1048576);
+ // Seek the original source forward to the end of the range.
+ $this->source->seek($this->source->tell() + $body->getSize());
+ } else {
+ // Case 2: Stream is not seekable, must store part in temp stream.
+ $source = $this->limitPartStream($this->source);
+ $source = $this->decorateWithHashes($source, $data);
+ $body = Psr7\Utils::streamFor();
+ Psr7\Utils::copyToStream($source, $body);
+ }
+
+ // Do not create a part if the body size is zero.
+ if ($body->getSize() === 0) {
+ return false;
+ }
+
+ $body->seek(0);
+ $data['body'] = $body;
+ $lastByte = $this->source->tell() - 1;
+ $data['range'] = "bytes {$firstByte}-{$lastByte}/*";
+
+ return $data;
+ }
+
+ protected function handleResult(CommandInterface $command, ResultInterface $result)
+ {
+ list($rangeIndex, $rangeSize) = $this->parseRange(
+ $command['range'],
+ $this->state->getPartSize()
+ );
+
+ $this->state->markPartAsUploaded($rangeIndex, [
+ 'size' => $rangeSize,
+ 'checksum' => $command['checksum']
+ ]);
+ }
+
+ protected function getInitiateParams()
+ {
+ $params = ['partSize' => $this->state->getPartSize()];
+ if (isset($this->config['archive_description'])) {
+ $params['archiveDescription'] = $this->config['archive_description'];
+ }
+
+ return $params;
+ }
+
+ protected function getCompleteParams()
+ {
+ $treeHash = new TreeHash();
+ $archiveSize = 0;
+ foreach ($this->state->getUploadedParts() as $part) {
+ $archiveSize += $part['size'];
+ $treeHash->addChecksum($part['checksum']);
+ }
+
+ return [
+ 'archiveSize' => $archiveSize,
+ 'checksum' => bin2hex($treeHash->complete()),
+ ];
+ }
+
+ /**
+ * Decorates a stream with a tree AND linear sha256 hashing stream.
+ *
+ * @param Stream $stream Stream to decorate.
+ * @param array $data Data bag that results are injected into.
+ *
+ * @return Stream
+ */
+ private function decorateWithHashes(Stream $stream, array &$data)
+ {
+ // Make sure that a tree hash is calculated.
+ $stream = new HashingStream($stream, new TreeHash(),
+ function ($result) use (&$data) {
+ $data['checksum'] = bin2hex($result);
+ }
+ );
+
+ // Make sure that a linear SHA256 hash is calculated.
+ $stream = new HashingStream($stream, new PhpHash('sha256'),
+ function ($result) use (&$data) {
+ $data['ContentSHA256'] = bin2hex($result);
+ }
+ );
+
+ return $stream;
+ }
+
+ /**
+ * Parses a Glacier range string into a size and part number.
+ *
+ * @param string $range Glacier range string (e.g., "bytes 5-5000/*")
+ * @param int $partSize The chosen part size
+ *
+ * @return array
+ */
+ private static function parseRange($range, $partSize)
+ {
+ // Strip away the prefix and suffix.
+ if (strpos($range, 'bytes') !== false) {
+ $range = substr($range, 6, -2);
+ }
+
+ // Split that range into it's parts.
+ list($firstByte, $lastByte) = explode('-', $range);
+
+ // Calculate and return range index and range size
+ return [
+ intval($firstByte / $partSize) + 1,
+ $lastByte - $firstByte + 1,
+ ];
+ }
+}
diff --git a/vendor/aws/aws-sdk-php/src/Glacier/TreeHash.php b/vendor/aws/aws-sdk-php/src/Glacier/TreeHash.php
new file mode 100644
index 0000000..9289725
--- /dev/null
+++ b/vendor/aws/aws-sdk-php/src/Glacier/TreeHash.php
@@ -0,0 +1,118 @@
+<?php
+namespace Aws\Glacier;
+
+use Aws\HashInterface;
+
+/**
+ * Encapsulates the creation of a tree hash from streamed data
+ */
+class TreeHash implements HashInterface
+{
+ const MB = 1048576;
+ const EMPTY_HASH = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855';
+
+ /** @var string Algorithm used for hashing. */
+ private $algorithm;
+
+ /** @var string Buffered data that has not yet been hashed. */
+ private $buffer;
+
+ /** @var array Binary checksums from which the tree hash is derived. */
+ private $checksums = [];
+
+ /** @var string Resulting hash in binary form. */
+ private $hash;
+
+ public function __construct($algorithm = 'sha256')
+ {
+ $this->algorithm = $algorithm;
+ $this->reset();
+ }
+
+ /**
+ * {@inheritdoc}
+ * @throws \LogicException if the root tree hash is already calculated
+ */
+ public function update($data)
+ {
+ // Error if hash is already calculated.
+ if ($this->hash) {
+ throw new \LogicException('You may not add more data to a '
+ . 'complete tree hash.');
+ }
+
+ // Buffer incoming data.
+ $this->buffer .= $data;
+
+ // When there is more than a MB of data, create a checksum.
+ while (strlen($this->buffer) >= self::MB) {
+ $data = substr($this->buffer, 0, self::MB);
+ $this->buffer = substr($this->buffer, self::MB) ?: '';
+ $this->checksums[] = hash($this->algorithm, $data, true);
+ }
+
+ return $this;
+ }
+
+ /**
+ * Add a checksum to the tree hash directly
+ *
+ * @param string $checksum The checksum to add
+ * @param bool $inBinaryForm TRUE if checksum is in binary form
+ *
+ * @return self
+ * @throws \LogicException if the root tree hash is already calculated
+ */
+ public function addChecksum($checksum, $inBinaryForm = false)
+ {
+ // Error if hash is already calculated
+ if ($this->hash) {
+ throw new \LogicException('You may not add more checksums to a '
+ . 'complete tree hash.');
+ }
+
+ // Convert the checksum to binary form if necessary
+ $this->checksums[] = $inBinaryForm ? $checksum : hex2bin($checksum);
+
+ return $this;
+ }
+
+ public function complete()
+ {
+ if (!$this->hash) {
+ // Clear out the remaining buffer.
+ if (strlen($this->buffer) > 0) {
+ $this->checksums[] = hash($this->algorithm, $this->buffer, true);
+ $this->buffer = '';
+ }
+
+ // If no hashes, add the EMPTY_HASH.
+ if (!$this->checksums) {
+ $this->checksums[] = hex2bin(self::EMPTY_HASH);
+ }
+
+ // Perform hashes up the tree to arrive at the root checksum.
+ $hashes = $this->checksums;
+ while (count($hashes) > 1) {
+ $sets = array_chunk($hashes, 2);
+ $hashes = array();
+ foreach ($sets as $set) {
+ $hashes[] = (count($set) === 1)
+ ? $set[0]
+ : hash($this->algorithm, $set[0] . $set[1], true);
+ }
+ }
+
+ $this->hash = $hashes[0];
+ }
+
+ return $this->hash;
+ }
+
+ public function reset()
+ {
+ $this->hash = null;
+ $this->checksums = [];
+ $this->buffer = '';
+ }
+}