Commit aef27f09 authored by Jeremy Mikola's avatar Jeremy Mikola

Merge pull request #57

parents 298624b9 67c450b9
<?php
namespace MongoDB\Exception;
class GridFSCorruptFileException extends \MongoDB\Driver\Exception\RuntimeException implements Exception
{
}
<?php
namespace MongoDB\Exception;
class GridFSFileNotFoundException extends \MongoDB\Driver\Exception\RuntimeException implements Exception
{
public function __construct($filename, $namespace)
{
parent::__construct(sprintf('Unable to find file "%s" in namespace "%s"', $filename, $namespace));
}
}
<?php
namespace MongoDB\GridFS;
use MongoDB\BSON\ObjectId;
use MongoDB\Driver\Cursor;
use MongoDB\Driver\Manager;
use MongoDB\Exception\GridFSFileNotFoundException;
use MongoDB\Exception\InvalidArgumentTypeException;
use MongoDB\Operation\Find;
/**
* Bucket provides a public API for interacting with the GridFS files and chunks
* collections.
*
* @api
*/
class Bucket
{
private static $streamWrapper;
private $collectionsWrapper;
private $databaseName;
private $options;
/**
* Constructs a GridFS bucket.
*
* Supported options:
*
* * bucketName (string): The bucket name, which will be used as a prefix
* for the files and chunks collections. Defaults to "fs".
*
* * chunkSizeBytes (integer): The chunk size in bytes. Defaults to
* 261120 (i.e. 255 KiB).
*
* * readPreference (MongoDB\Driver\ReadPreference): Read preference.
*
* * writeConcern (MongoDB\Driver\WriteConcern): Write concern.
*
* @param Manager $manager Manager instance from the driver
* @param string $databaseName Database name
* @param array $options Bucket options
* @throws InvalidArgumentException
*/
public function __construct(Manager $manager, $databaseName, array $options = [])
{
$options += [
'bucketName' => 'fs',
'chunkSizeBytes' => 261120,
];
if (isset($options['bucketName']) && ! is_string($options['bucketName'])) {
throw new InvalidArgumentTypeException('"bucketName" option', $options['bucketName'], 'string');
}
if (isset($options['chunkSizeBytes']) && ! is_integer($options['chunkSizeBytes'])) {
throw new InvalidArgumentTypeException('"chunkSizeBytes" option', $options['chunkSizeBytes'], 'integer');
}
if (isset($options['readPreference']) && ! $options['readPreference'] instanceof ReadPreference) {
throw new InvalidArgumentTypeException('"readPreference" option', $options['readPreference'], 'MongoDB\Driver\ReadPreference');
}
if (isset($options['writeConcern']) && ! $options['writeConcern'] instanceof WriteConcern) {
throw new InvalidArgumentTypeException('"writeConcern" option', $options['writeConcern'], 'MongoDB\Driver\WriteConcern');
}
$this->databaseName = (string) $databaseName;
$this->options = $options;
$collectionOptions = array_intersect_key($options, ['readPreference' => 1, 'writeConcern' => 1]);
$this->collectionsWrapper = new GridFSCollectionsWrapper($manager, $databaseName, $options['bucketName'], $collectionOptions);
$this->registerStreamWrapper($manager);
}
/**
* Delete a file from the GridFS bucket.
*
* If the files collection document is not found, this method will still
* attempt to delete orphaned chunks.
*
* @param ObjectId $id ObjectId of the file
* @throws GridFSFileNotFoundException
*/
public function delete(ObjectId $id)
{
$file = $this->collectionsWrapper->getFilesCollection()->findOne(['_id' => $id]);
$this->collectionsWrapper->getFilesCollection()->deleteOne(['_id' => $id]);
$this->collectionsWrapper->getChunksCollection()->deleteMany(['files_id' => $id]);
if ($file === null) {
throw new GridFSFileNotFoundException($id, $this->collectionsWrapper->getFilesCollection()->getNameSpace());
}
}
/**
* Writes the contents of a GridFS file to a writable stream.
*
* @param ObjectId $id ObjectId of the file
* @param resource $destination Writable Stream
* @throws GridFSFileNotFoundException
*/
public function downloadToStream(ObjectId $id, $destination)
{
$file = $this->collectionsWrapper->getFilesCollection()->findOne(
['_id' => $id],
['typeMap' => ['root' => 'stdClass']]
);
if ($file === null) {
throw new GridFSFileNotFoundException($id, $this->collectionsWrapper->getFilesCollection()->getNameSpace());
}
$gridFsStream = new GridFSDownload($this->collectionsWrapper, $file);
$gridFsStream->downloadToStream($destination);
}
/**
* Writes the contents of a GridFS file, which is selected by name and
* revision, to a writable stream.
*
* Supported options:
*
* * revision (integer): Which revision (i.e. documents with the same
* filename and different uploadDate) of the file to retrieve. Defaults
* to -1 (i.e. the most recent revision).
*
* Revision numbers are defined as follows:
*
* * 0 = the original stored file
* * 1 = the first revision
* * 2 = the second revision
* * etc…
* * -2 = the second most recent revision
* * -1 = the most recent revision
*
* @param string $filename File name
* @param resource $destination Writable Stream
* @param array $options Download options
* @throws GridFSFileNotFoundException
*/
public function downloadToStreamByName($filename, $destination, array $options = [])
{
$options += ['revision' => -1];
$file = $this->findFileRevision($filename, $options['revision']);
$gridFsStream = new GridFSDownload($this->collectionsWrapper, $file);
$gridFsStream->downloadToStream($destination);
}
/**
* Drops the files and chunks collection associated with GridFS this bucket
*
*/
public function drop()
{
$this->collectionsWrapper->dropCollections();
}
/**
* Find files from the GridFS bucket's files collection.
*
* @see Find::__construct() for supported options
* @param array|object $filter Query by which to filter documents
* @param array $options Additional options
* @return Cursor
*/
public function find($filter, array $options = [])
{
return $this->collectionsWrapper->getFilesCollection()->find($filter, $options);
}
public function getCollectionsWrapper()
{
return $this->collectionsWrapper;
}
public function getDatabaseName()
{
return $this->databaseName;
}
/**
* Gets the ID of the GridFS file associated with a stream.
*
* @param resource $stream GridFS stream
* @return mixed
*/
public function getIdFromStream($stream)
{
$metadata = stream_get_meta_data($stream);
if ($metadata['wrapper_data'] instanceof StreamWrapper) {
return $metadata['wrapper_data']->getId();
}
return;
}
/**
* Opens a readable stream for reading a GridFS file.
*
* @param ObjectId $id ObjectId of the file
* @return resource
* @throws GridFSFileNotFoundException
*/
public function openDownloadStream(ObjectId $id)
{
$file = $this->collectionsWrapper->getFilesCollection()->findOne(
['_id' => $id],
['typeMap' => ['root' => 'stdClass']]
);
if ($file === null) {
throw new GridFSFileNotFoundException($id, $this->collectionsWrapper->getFilesCollection()->getNameSpace());
}
return $this->openDownloadStreamByFile($file);
}
/**
* Opens a readable stream stream to read a GridFS file, which is selected
* by name and revision.
*
* Supported options:
*
* * revision (integer): Which revision (i.e. documents with the same
* filename and different uploadDate) of the file to retrieve. Defaults
* to -1 (i.e. the most recent revision).
*
* Revision numbers are defined as follows:
*
* * 0 = the original stored file
* * 1 = the first revision
* * 2 = the second revision
* * etc…
* * -2 = the second most recent revision
* * -1 = the most recent revision
*
* @param string $filename File name
* @param array $options Download options
* @return resource
* @throws GridFSFileNotFoundException
*/
public function openDownloadStreamByName($filename, array $options = [])
{
$options += ['revision' => -1];
$file = $this->findFileRevision($filename, $options['revision']);
return $this->openDownloadStreamByFile($file);
}
/**
* Opens a writable stream for writing a GridFS file.
*
* Supported options:
*
* * chunkSizeBytes (integer): The chunk size in bytes. Defaults to the
* bucket's chunk size.
*
* @param string $filename File name
* @param array $options Stream options
* @return resource
*/
public function openUploadStream($filename, array $options = [])
{
$options += ['chunkSizeBytes' => $this->options['chunkSizeBytes']];
$streamOptions = [
'collectionsWrapper' => $this->collectionsWrapper,
'uploadOptions' => $options,
];
$context = stream_context_create(['gridfs' => $streamOptions]);
return fopen(sprintf('gridfs://%s/%s', $this->databaseName, $filename), 'w', false, $context);
}
/**
* Renames the GridFS file with the specified ID.
*
* @param ObjectId $id ID of the file to rename
* @param string $newFilename New file name
* @throws GridFSFileNotFoundException
*/
public function rename(ObjectId $id, $newFilename)
{
$filesCollection = $this->collectionsWrapper->getFilesCollection();
$result = $filesCollection->updateOne(['_id' => $id], ['$set' => ['filename' => $newFilename]]);
if($result->getModifiedCount() == 0) {
throw new GridFSFileNotFoundException($id, $this->collectionsWrapper->getFilesCollection()->getNameSpace());
}
}
/**
* Writes the contents of a readable stream to a GridFS file.
*
* Supported options:
*
* * chunkSizeBytes (integer): The chunk size in bytes. Defaults to the
* bucket's chunk size.
*
* @param string $filename File name
* @param resource $source Readable stream
* @param array $options Stream options
* @return ObjectId
*/
public function uploadFromStream($filename, $source, array $options = [])
{
$options += ['chunkSizeBytes' => $this->options['chunkSizeBytes']];
$gridFsStream = new GridFSUpload($this->collectionsWrapper, $filename, $options);
return $gridFsStream->uploadFromStream($source);
}
private function findFileRevision($filename, $revision)
{
if ($revision < 0) {
$skip = abs($revision) - 1;
$sortOrder = -1;
} else {
$skip = $revision;
$sortOrder = 1;
}
$filesCollection = $this->collectionsWrapper->getFilesCollection();
$file = $filesCollection->findOne(
['filename' => $filename],
[
'skip' => $skip,
'sort' => ['uploadDate' => $sortOrder],
'typeMap' => ['root' => 'stdClass'],
]
);
if ($file === null) {
throw new GridFSFileNotFoundException($filename, $filesCollection->getNameSpace());
}
return $file;
}
private function openDownloadStreamByFile($file)
{
$options = [
'collectionsWrapper' => $this->collectionsWrapper,
'file' => $file,
];
$context = stream_context_create(['gridfs' => $options]);
return fopen(sprintf('gridfs://%s/%s', $this->databaseName, $file->filename), 'r', false, $context);
}
private function registerStreamWrapper(Manager $manager)
{
if (isset(self::$streamWrapper)) {
return;
}
self::$streamWrapper = new StreamWrapper();
self::$streamWrapper->register($manager);
}
}
<?php
namespace MongoDB\GridFS;
use MongoDB\Collection;
use MongoDB\Driver\Manager;
use MongoDB\Driver\ReadPreference;
use MongoDB\Driver\WriteConcern;
use MongoDB\Exception\InvalidArgumentTypeException;
/**
* GridFSCollectionsWrapper abstracts the GridFS files and chunks collections.
*
* @internal
*/
class GridFSCollectionsWrapper
{
private $chunksCollection;
private $ensuredIndexes = false;
private $filesCollection;
/**
* Constructs a GridFS collection wrapper.
*
* @see Collection::__construct() for supported options
* @param Manager $manager Manager instance from the driver
* @param string $databaseName Database name
* @param string $bucketName Bucket name
* @param array $collectionOptions Collection options
* @throws InvalidArgumentException
*/
public function __construct(Manager $manager, $databaseName, $bucketName, array $collectionOptions = [])
{
$this->filesCollection = new Collection($manager, sprintf('%s.%s.files', $databaseName, $bucketName), $collectionOptions);
$this->chunksCollection = new Collection($manager, sprintf('%s.%s.chunks', $databaseName, $bucketName), $collectionOptions);
}
public function dropCollections(){
$this->filesCollection-> drop();
$this->chunksCollection->drop();
}
public function getChunksCollection()
{
return $this->chunksCollection;
}
public function getFilesCollection()
{
return $this->filesCollection;
}
public function insertChunk($chunk)
{
$this->ensureIndexes();
$this->chunksCollection->insertOne($chunk);
}
public function insertFile($file)
{
$this->ensureIndexes();
$this->filesCollection->insertOne($file);
}
private function ensureChunksIndex()
{
foreach ($this->chunksCollection->listIndexes() as $index) {
if ($index->isUnique() && $index->getKey() === ['files_id' => 1, 'n' => 1]) {
return;
}
}
$this->chunksCollection->createIndex(['files_id' => 1, 'n' => 1], ['unique' => true]);
}
private function ensureFilesIndex()
{
foreach ($this->filesCollection->listIndexes() as $index) {
if ($index->getKey() === ['filename' => 1, 'uploadDate' => 1]) {
return;
}
}
$this->filesCollection->createIndex(['filename' => 1, 'uploadDate' => 1]);
}
private function ensureIndexes()
{
if ($this->ensuredIndexes) {
return;
}
if ( ! $this->isFilesCollectionEmpty()) {
return;
}
$this->ensureFilesIndex();
$this->ensureChunksIndex();
$this->ensuredIndexes = true;
}
private function isFilesCollectionEmpty()
{
return null === $this->filesCollection->findOne([], [
'readPreference' => new ReadPreference(ReadPreference::RP_PRIMARY),
'projection' => ['_id' => 1],
]);
}
}
<?php
namespace MongoDB\GridFS;
use MongoDB\Driver\Exception\Exception;
use MongoDB\Exception\GridFSCorruptFileException;
use stdClass;
/**
* GridFSDownload abstracts the process of reading a GridFS file.
*
* @internal
*/
class GridFSDownload
{
private $buffer;
private $bufferEmpty = true;
private $bufferFresh = true;
private $bytesSeen = 0;
private $chunkOffset = 0;
private $chunksIterator;
private $collectionsWrapper;
private $file;
private $firstCheck = true;
private $iteratorEmpty = false;
private $numChunks;
/**
* Constructs a GridFS download stream.
*
* @param GridFSCollectionsWrapper $collectionsWrapper GridFS collections wrapper
* @param stdClass $file GridFS file document
* @throws GridFSCorruptFileException
*/
public function __construct(GridFSCollectionsWrapper $collectionsWrapper, stdClass $file)
{
$this->collectionsWrapper = $collectionsWrapper;
$this->file = $file;
try {
$cursor = $this->collectionsWrapper->getChunksCollection()->find(
['files_id' => $this->file->_id],
['sort' => ['n' => 1]]
);
} catch (Exception $e) {
// TODO: Why do we replace a driver exception with GridFSCorruptFileException here?
throw new GridFSCorruptFileException();
}
$this->chunksIterator = new \IteratorIterator($cursor);
$this->numChunks = ($file->length >= 0) ? ceil($file->length / $file->chunkSize) : 0;
$this->buffer = fopen('php://temp', 'w+');
}
public function close()
{
fclose($this->buffer);
}
public function downloadNumBytes($numToRead)
{
$output = "";
if ($this->bufferFresh) {
rewind($this->buffer);
$this->bufferFresh = false;
}
// TODO: Should we be checking for fread errors here?
$output = fread($this->buffer, $numToRead);
if (strlen($output) == $numToRead) {
return $output;
}
fclose($this->buffer);
$this->buffer = fopen("php://temp", "w+");
$this->bufferFresh = true;
$this->bufferEmpty = true;
$bytesLeft = $numToRead - strlen($output);
while (strlen($output) < $numToRead && $this->advanceChunks()) {
$bytesLeft = $numToRead - strlen($output);
$output .= substr($this->chunksIterator->current()->data->getData(), 0, $bytesLeft);
}
if ( ! $this->iteratorEmpty && $this->file->length > 0 && $bytesLeft < strlen($this->chunksIterator->current()->data->getData())) {
fwrite($this->buffer, substr($this->chunksIterator->current()->data->getData(), $bytesLeft));
$this->bufferEmpty = false;
}
return $output;
}
public function downloadToStream($destination)
{
while ($this->advanceChunks()) {
// TODO: Should we be checking for fwrite errors here?
fwrite($destination, $this->chunksIterator->current()->data->getData());
}
}
public function getFile()
{
return $this->file;
}
public function getId()
{
return $this->file->_id;
}
public function getSize()
{
return $this->file->length;
}
public function isEOF()
{
return ($this->iteratorEmpty && $this->bufferEmpty);
}
private function advanceChunks()
{
if ($this->chunkOffset >= $this->numChunks) {
$this->iteratorEmpty = true;
return false;
}
if ($this->firstCheck) {
$this->chunksIterator->rewind();
$this->firstCheck = false;
} else {
$this->chunksIterator->next();
}
if ( ! $this->chunksIterator->valid()) {
throw new GridFSCorruptFileException();
}
if ($this->chunksIterator->current()->n != $this->chunkOffset) {
throw new GridFSCorruptFileException();
}
$actualChunkSize = strlen($this->chunksIterator->current()->data->getData());
$expectedChunkSize = ($this->chunkOffset == $this->numChunks - 1)
? ($this->file->length - $this->bytesSeen)
: $this->file->chunkSize;
if ($actualChunkSize != $expectedChunkSize) {
throw new GridFSCorruptFileException();
}
$this->bytesSeen += $actualChunkSize;
$this->chunkOffset++;
return true;
}
}
<?php
namespace MongoDB\GridFS;
use MongoDB\BSON\Binary;
use MongoDB\BSON\ObjectId;
use MongoDB\BSON\UTCDateTime;
use MongoDB\Driver\Exception\Exception;
use MongoDB\Exception\InvalidArgumentTypeException;
/**
* GridFSUpload abstracts the process of writing a GridFS file.
*
* @internal
*/
class GridFSUpload
{
private $buffer;
private $bufferLength = 0;
private $chunkOffset = 0;
private $chunkSize;
private $collectionsWrapper;
private $ctx;
private $file;
private $indexChecker;
private $isClosed = false;
private $length = 0;
/**
* Constructs a GridFS upload stream.
*
* Supported options:
*
* * aliases (array of strings): DEPRECATED An array of aliases.
* Applications wishing to store aliases should add an aliases field to
* the metadata document instead.
*
* * chunkSizeBytes (integer): The chunk size in bytes. Defaults to
* 261120 (i.e. 255 KiB).
*
* * contentType (string): DEPRECATED content type to be stored with the
* file. This information should now be added to the metadata.
*
* * metadata (document): User data for the "metadata" field of the files
* collection document.
*
* @param GridFSCollectionsWrapper $collectionsWrapper GridFS collections wrapper
* @param string $filename File name
* @param array $options Upload options
* @throws InvalidArgumentTypeException
*/
public function __construct(GridFSCollectionsWrapper $collectionsWrapper, $filename, array $options = [])
{
$options += ['chunkSizeBytes' => 261120];
if (isset($options['aliases']) && ! \MongoDB\is_string_array($options['aliases'])) {
throw new InvalidArgumentTypeException('"aliases" option', $options['aliases'], 'array of strings');
}
if (isset($options['contentType']) && ! is_string($options['contentType'])) {
throw new InvalidArgumentTypeException('"contentType" option', $options['contentType'], 'string');
}
if (isset($options['metadata']) && ! is_array($options['metadata']) && ! is_object($options['metadata'])) {
throw new InvalidArgumentTypeException('"metadata" option', $options['metadata'], 'array or object');
}
$this->chunkSize = $options['chunkSizeBytes'];
$this->collectionsWrapper = $collectionsWrapper;
$this->buffer = fopen('php://temp', 'w+');
$this->ctx = hash_init('md5');
$this->file = [
'_id' => new ObjectId(),
'chunkSize' => $this->chunkSize,
'filename' => (string) $filename,
'uploadDate' => $this->createUploadDate(),
] + array_intersect_key($options, ['aliases' => 1, 'contentType' => 1, 'metadata' => 1]);
}
/**
* Closes an active stream and flushes all buffered data to GridFS.
*/
public function close()
{
if ($this->isClosed) {
// TODO: Should this be an error condition? e.g. BadMethodCallException
return;
}
rewind($this->buffer);
$cached = stream_get_contents($this->buffer);
if (strlen($cached) > 0) {
$this->insertChunk($cached);
}
fclose($this->buffer);
$this->fileCollectionInsert();
$this->isClosed = true;
}
public function getChunkSize()
{
return $this->chunkSize;
}
public function getFile()
{
return $this->file;
}
public function getId()
{
return $this->file['_id'];
}
public function getLength()
{
return $this->length;
}
public function getSize()
{
return $this->length;
}
/**
* Inserts binary data into GridFS via chunks.
*
* Data will be buffered internally until chunkSizeBytes are accumulated, at
* which point a chunk's worth of data will be inserted and the buffer
* reset.
*
* @param string $toWrite Binary data to write
* @return int
*/
public function insertChunks($toWrite)
{
if ($this->isClosed) {
// TODO: Should this be an error condition? e.g. BadMethodCallException
return;
}
$readBytes = 0;
while ($readBytes != strlen($toWrite)) {
$addToBuffer = substr($toWrite, $readBytes, $this->chunkSize - $this->bufferLength);
fwrite($this->buffer, $addToBuffer);
$readBytes += strlen($addToBuffer);
$this->bufferLength += strlen($addToBuffer);
if ($this->bufferLength == $this->chunkSize) {
rewind($this->buffer);
$this->insertChunk(stream_get_contents($this->buffer));
ftruncate($this->buffer, 0);
$this->bufferLength = 0;
}
}
return $readBytes;
}
public function isEOF()
{
return $this->isClosed;
}
/**
* Writes the contents of a readable stream to a GridFS file.
*
* @param resource $source Readable stream
* @return ObjectId
*/
public function uploadFromStream($source)
{
if ( ! is_resource($source) || get_resource_type($source) != "stream") {
throw new InvalidArgumentTypeException('$stream', $source, 'resource');
}
$streamMetadata = stream_get_meta_data($source);
while ($data = $this->readChunk($source)) {
$this->insertChunk($data);
}
return $this->fileCollectionInsert();
}
private function abort()
{
$this->collectionsWrapper->getChunksCollection()->deleteMany(['files_id' => $this->file['_id']]);
$this->collectionsWrapper->getFilesCollection()->deleteOne(['_id' => $this->file['_id']]);
$this->isClosed = true;
}
// From: http://stackoverflow.com/questions/3656713/how-to-get-current-time-in-milliseconds-in-php
private function createUploadDate()
{
$parts = explode(' ', microtime());
$milliseconds = sprintf('%d%03d', $parts[1], $parts[0] * 1000);
return new UTCDateTime($milliseconds);
}
private function fileCollectionInsert()
{
if ($this->isClosed) {
// TODO: Should this be an error condition? e.g. BadMethodCallException
return;
}
$md5 = hash_final($this->ctx);
$this->file['length'] = $this->length;
$this->file['md5'] = $md5;
$this->collectionsWrapper->insertFile($this->file);
return $this->file['_id'];
}
private function insertChunk($data)
{
if ($this->isClosed) {
// TODO: Should this be an error condition? e.g. BadMethodCallException
return;
}
$toUpload = [
'files_id' => $this->file['_id'],
'n' => $this->chunkOffset,
'data' => new Binary($data, Binary::TYPE_GENERIC),
];
hash_update($this->ctx, $data);
$this->collectionsWrapper->insertChunk($toUpload);
$this->length += strlen($data);
$this->chunkOffset++;
}
private function readChunk($source)
{
try {
$data = fread($source, $this->chunkSize);
} catch (Exception $e) {
$this->abort();
throw $e;
}
return $data;
}
}
<?php
namespace MongoDB\GridFS;
/**
* Stream wrapper for reading and writing a GridFS file.
*
* @internal
* @see Bucket::openUploadStream()
* @see Bucket::openDownloadStream()
*/
class StreamWrapper
{
public $context;
private $collectionsWrapper;
private $gridFSStream;
private $id;
private $mode;
public function getId()
{
return $this->id;
}
public function openReadStream()
{
$context = stream_context_get_options($this->context);
$this->gridFSStream = new GridFSDownload($this->collectionsWrapper, $context['gridfs']['file']);
$this->id = $this->gridFSStream->getId();
return true;
}
public function openWriteStream()
{
$context = stream_context_get_options($this->context);
$options = $context['gridfs']['uploadOptions'];
$this->gridFSStream = new GridFSUpload($this->collectionsWrapper, $this->identifier, $options);
$this->id = $this->gridFSStream->getId();
return true;
}
/**
* Register the GridFS stream wrapper.
*/
public static function register()
{
if (in_array('gridfs', stream_get_wrappers())) {
stream_wrapper_unregister('gridfs');
}
stream_wrapper_register('gridfs', get_called_class(), \STREAM_IS_URL);
}
public function stream_close()
{
$this->gridFSStream->close();
}
public function stream_eof()
{
return $this->gridFSStream->isEOF();
}
public function stream_open($path, $mode, $options, &$openedPath)
{
$this->initProtocol($path);
$context = stream_context_get_options($this->context);
$this->collectionsWrapper = $context['gridfs']['collectionsWrapper'];
$this->mode = $mode;
switch ($this->mode) {
case 'r': return $this->openReadStream();
case 'w': return $this->openWriteStream();
default: return false;
}
}
public function stream_read($count)
{
return $this->gridFSStream->downloadNumBytes($count);
}
public function stream_stat()
{
$stat = $this->getStatTemplate();
$stat[7] = $stat['size'] = $this->gridFSStream->getSize();
return $stat;
}
public function stream_write($data)
{
$this->gridFSStream->insertChunks($data);
return strlen($data);
}
/**
* Gets a URL stat template with default values
* from https://github.com/aws/aws-sdk-php/blob/master/src/S3/StreamWrapper.php
* @return array
*/
private function getStatTemplate()
{
return [
0 => 0, 'dev' => 0,
1 => 0, 'ino' => 0,
2 => 0, 'mode' => 0,
3 => 0, 'nlink' => 0,
4 => 0, 'uid' => 0,
5 => 0, 'gid' => 0,
6 => -1, 'rdev' => -1,
7 => 0, 'size' => 0,
8 => 0, 'atime' => 0,
9 => 0, 'mtime' => 0,
10 => 0, 'ctime' => 0,
11 => -1, 'blksize' => -1,
12 => -1, 'blocks' => -1,
];
}
private function initProtocol($path)
{
$parsed_path = parse_url($path);
$this->identifier = substr($parsed_path['path'], 1);
}
}
...@@ -140,3 +140,16 @@ function server_supports_feature(Server $server, $feature) ...@@ -140,3 +140,16 @@ function server_supports_feature(Server $server, $feature)
return ($minWireVersion <= $feature && $maxWireVersion >= $feature); return ($minWireVersion <= $feature && $maxWireVersion >= $feature);
} }
function is_string_array($input) {
if (!is_array($input)){
return false;
}
foreach($input as $item) {
if (!is_string($item)) {
return false;
}
}
return true;
}
<?php
namespace MongoDB\Tests\GridFS;
use MongoDB\GridFS;
/**
* Functional tests for the Bucket class.
*/
class BucketFunctionalTest extends FunctionalTestCase
{
/**
* @expectedException MongoDB\Exception\InvalidArgumentTypeException
* @dataProvider provideInvalidConstructorOptions
*/
public function testConstructorOptionTypeChecks(array $options)
{
new \MongoDB\GridFS\Bucket($this->manager, $this->getDatabaseName(), $options);
}
public function provideInvalidConstructorOptions()
{
$options = [];
$invalidBucketNames = [123, 3.14, true, [], new \stdClass];
$invalidChunkSizes = ['foo', 3.14, true, [], new \stdClass];
foreach ($this->getInvalidReadPreferenceValues() as $value) {
$options[][] = ['readPreference' => $value];
}
foreach ($this->getInvalidWriteConcernValues() as $value) {
$options[][] = ['writeConcern' => $value];
}
foreach ($invalidBucketNames as $value) {
$options[][] = ['bucketName' => $value];
}
foreach ($invalidChunkSizes as $value) {
$options[][] = ['chunkSizeBytes' => $value];
}
return $options;
}
public function testGetDatabaseName()
{
$this->assertEquals($this->getDatabaseName(), $this->bucket->getDatabaseName());
}
public function testBasicOperations()
{
$id = $this->bucket->uploadFromStream("test_filename", $this->generateStream("hello world"));
$contents = stream_get_contents($this->bucket->openDownloadStream($id));
$this->assertEquals("hello world", $contents);
$this->assertEquals(1, $this->bucket->getCollectionsWrapper()->getFilesCollection()->count());
$this->assertEquals(1, $this->bucket->getCollectionsWrapper()->getChunksCollection()->count());
$this->bucket->delete($id);
$error=null;
try{
$this->bucket->openDownloadStream($id);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$fileNotFound = '\MongoDB\Exception\GridFSFileNotFoundException';
$this->assertTrue($error instanceof $fileNotFound);
$this->assertEquals(0, $this->bucket->getCollectionsWrapper()->getFilesCollection()->count());
$this->assertEquals(0, $this->bucket->getCollectionsWrapper()->getChunksCollection()->count());
}
public function testMultiChunkDelete()
{
$id = $this->bucket->uploadFromStream("test_filename", $this->generateStream("hello"), ['chunkSizeBytes'=>1]);
$this->assertEquals(1, $this->bucket->getCollectionsWrapper()->getFilesCollection()->count());
$this->assertEquals(5, $this->bucket->getCollectionsWrapper()->getChunksCollection()->count());
$this->bucket->delete($id);
$this->assertEquals(0, $this->bucket->getCollectionsWrapper()->getFilesCollection()->count());
$this->assertEquals(0, $this->bucket->getCollectionsWrapper()->getChunksCollection()->count());
}
public function testEmptyFile()
{
$id = $this->bucket->uploadFromStream("test_filename",$this->generateStream(""));
$contents = stream_get_contents($this->bucket->openDownloadStream($id));
$this->assertEquals("", $contents);
$this->assertEquals(1, $this->bucket->getCollectionsWrapper()->getFilesCollection()->count());
$this->assertEquals(0, $this->bucket->getCollectionsWrapper()->getChunksCollection()->count());
$raw = $this->bucket->getCollectionsWrapper()->getFilesCollection()->findOne();
$this->assertEquals(0, $raw->length);
$this->assertEquals($id, $raw->_id);
$this->assertTrue($raw->uploadDate instanceof \MongoDB\BSON\UTCDateTime);
$this->assertEquals(255 * 1024, $raw->chunkSize);
$this->assertTrue(is_string($raw->md5));
}
public function testCorruptChunk()
{
$id = $this->bucket->uploadFromStream("test_filename", $this->generateStream("foobar"));
$this->collectionsWrapper->getChunksCollection()->updateOne(['files_id' => $id],
['$set' => ['data' => new \MongoDB\BSON\Binary('foo', \MongoDB\BSON\Binary::TYPE_GENERIC)]]);
$error = null;
try{
$download = $this->bucket->openDownloadStream($id);
stream_get_contents($download);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$corruptFileError = '\MongoDB\Exception\GridFSCOrruptFileException';
$this->assertTrue($error instanceof $corruptFileError);
}
public function testErrorsOnMissingChunk()
{
$id = $this->bucket->uploadFromStream("test_filename", $this->generateStream("hello world,abcdefghijklmnopqrstuv123456789"), ["chunkSizeBytes" => 1]);
$this->collectionsWrapper->getChunksCollection()->deleteOne(['files_id' => $id, 'n' => 7]);
$error = null;
try{
$download = $this->bucket->openDownloadStream($id);
stream_get_contents($download);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$corruptFileError = '\MongoDB\Exception\GridFSCOrruptFileException';
$this->assertTrue($error instanceof $corruptFileError);
}
public function testUploadEnsureIndexes()
{
$chunks = $this->bucket->getCollectionsWrapper()->getChunksCollection();
$files = $this->bucket->getCollectionsWrapper()->getFilesCollection();
$this->bucket->uploadFromStream("filename", $this->generateStream("junk"));
$chunksIndexed = false;
foreach($chunks->listIndexes() as $index) {
$chunksIndexed = $chunksIndexed || ($index->isUnique() && $index->getKey() === ['files_id' => 1, 'n' => 1]);
}
$this->assertTrue($chunksIndexed);
$filesIndexed = false;
foreach($files->listIndexes() as $index) {
$filesIndexed = $filesIndexed || ($index->getKey() === ['filename' => 1, 'uploadDate' => 1]);
}
$this->assertTrue($filesIndexed);
}
public function testGetLastVersion()
{
$idOne = $this->bucket->uploadFromStream("test",$this->generateStream("foo"));
$streamTwo = $this->bucket->openUploadStream("test");
fwrite($streamTwo, "bar");
//echo "Calling FSTAT\n";
//$stat = fstat($streamTwo);
$idTwo = $this->bucket->getIdFromStream($streamTwo);
//var_dump
//var_dump($idTwo);
fclose($streamTwo);
$idThree = $this->bucket->uploadFromStream("test",$this->generateStream("baz"));
$this->assertEquals("baz", stream_get_contents($this->bucket->openDownloadStreamByName("test")));
$this->bucket->delete($idThree);
$this->assertEquals("bar", stream_get_contents($this->bucket->openDownloadStreamByName("test")));
$this->bucket->delete($idTwo);
$this->assertEquals("foo", stream_get_contents($this->bucket->openDownloadStreamByName("test")));
$this->bucket->delete($idOne);
$error = null;
try{
$this->bucket->openDownloadStreamByName("test");
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$fileNotFound = '\MongoDB\Exception\GridFSFileNotFoundException';
$this->assertTrue($error instanceof $fileNotFound);
}
public function testGetVersion()
{
$this->bucket->uploadFromStream("test",$this->generateStream("foo"));
$this->bucket->uploadFromStream("test",$this->generateStream("bar"));
$this->bucket->uploadFromStream("test",$this->generateStream("baz"));
$this->assertEquals("foo", stream_get_contents($this->bucket->openDownloadStreamByName("test", ['revision' => 0])));
$this->assertEquals("bar", stream_get_contents($this->bucket->openDownloadStreamByName("test", ['revision' => 1])));
$this->assertEquals("baz", stream_get_contents($this->bucket->openDownloadStreamByName("test", ['revision' => 2])));
$this->assertEquals("baz", stream_get_contents($this->bucket->openDownloadStreamByName("test", ['revision' => -1])));
$this->assertEquals("bar", stream_get_contents($this->bucket->openDownloadStreamByName("test", ['revision' => -2])));
$this->assertEquals("foo", stream_get_contents($this->bucket->openDownloadStreamByName("test", ['revision' => -3])));
$fileNotFound = '\MongoDB\Exception\GridFSFileNotFoundException';
$error = null;
try{
$this->bucket->openDownloadStreamByName("test", ['revision' => 3]);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$this->assertTrue($error instanceof $fileNotFound);
$error = null;
try{
$this->bucket->openDownloadStreamByName("test", ['revision' => -4]);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$this->assertTrue($error instanceof $fileNotFound);
}
public function testGridfsFind()
{
$this->bucket->uploadFromStream("two",$this->generateStream("test2"));
usleep(5000);
$this->bucket->uploadFromStream("two",$this->generateStream("test2+"));
usleep(5000);
$this->bucket->uploadFromStream("one",$this->generateStream("test1"));
usleep(5000);
$this->bucket->uploadFromStream("two",$this->generateStream("test2++"));
$cursor = $this->bucket->find(["filename" => "two"]);
$count = count($cursor->toArray());
$this->assertEquals(3, $count);
$cursor = $this->bucket->find([]);
$count = count($cursor->toArray());
$this->assertEquals(4, $count);
$cursor = $this->bucket->find([], ["noCursorTimeout"=>false, "sort"=>["uploadDate"=> -1], "skip"=>1, "limit"=>2]);
$outputs = ["test1", "test2+"];
$i=0;
foreach($cursor as $file){
$contents = stream_get_contents($this->bucket->openDownloadStream($file->_id));
$this->assertEquals($outputs[$i], $contents);
$i++;
}
}
public function testGridInNonIntChunksize()
{
$id = $this->bucket->uploadFromStream("f",$this->generateStream("data"));
$this->bucket->getCollectionsWrapper()->getFilesCollection()->updateOne(["filename"=>"f"],
['$set'=> ['chunkSize' => 100.00]]);
$this->assertEquals("data", stream_get_contents($this->bucket->openDownloadStream($id)));
}
public function testBigInsert()
{
for ($tmpStream = tmpfile(), $i = 0; $i < 20; $i++) {
fwrite($tmpStream, str_repeat('a', 1048576));
}
fseek($tmpStream, 0);
$this->bucket->uploadFromStream("BigInsertTest", $tmpStream);
fclose($tmpStream);
}
public function testGetIdFromStream()
{
$upload = $this->bucket->openUploadStream("test");
$id = $this->bucket->getIdFromStream($upload);
fclose($upload);
$this->assertTrue($id instanceof \MongoDB\BSON\ObjectId);
$download = $this->bucket->openDownloadStream($id);
$id=null;
$id = $this->bucket->getIdFromStream($download);
fclose($download);
$this->assertTrue($id instanceof \MongoDB\BSON\ObjectId);
}
public function testRename()
{
$id = $this->bucket->uploadFromStream("first_name", $this->generateStream("testing"));
$this->assertEquals("testing", stream_get_contents($this->bucket->openDownloadStream($id)));
$this->bucket->rename($id, "second_name");
$error = null;
try{
$this->bucket->openDownloadStreamByName("first_name");
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$fileNotFound = '\MongoDB\Exception\GridFSFileNotFoundException';
$this->assertTrue($error instanceof $fileNotFound);
$this->assertEquals("testing", stream_get_contents($this->bucket->openDownloadStreamByName("second_name")));
}
public function testDrop()
{
$id = $this->bucket->uploadFromStream("test_filename", $this->generateStream("hello world"));
$this->bucket->drop();
$id = $this->bucket->uploadFromStream("test_filename", $this->generateStream("hello world"));
$this->assertEquals(1, $this->collectionsWrapper->getFilesCollection()->count());
}
/**
*@dataProvider provideInsertChunks
*/
public function testProvidedMultipleReads($data)
{
$upload = $this->bucket->openUploadStream("test", ["chunkSizeBytes"=>rand(1, 5)]);
fwrite($upload,$data);
$id = $this->bucket->getIdFromStream($upload);
fclose($upload);
$download = $this->bucket->openDownloadStream($id);
$readPos = 0;
while($readPos < strlen($data)){
$numToRead = rand(1, strlen($data) - $readPos);
$expected = substr($data, $readPos, $numToRead);
$actual = fread($download, $numToRead);
$this->assertEquals($expected,$actual);
$readPos+= $numToRead;
}
$actual = fread($download, 5);
$expected = "";
$this->assertEquals($expected,$actual);
fclose($download);
}
private function generateStream($input)
{
$stream = fopen('php://temp', 'w+');
fwrite($stream, $input);
rewind($stream);
return $stream;
}
}
<?php
namespace MongoDB\Tests\GridFS;
use MongoDB\GridFS;
use MongoDB\Collection;
use MongoDB\Tests\FunctionalTestCase as BaseFunctionalTestCase;
/**
* Base class for GridFS functional tests.
*/
abstract class FunctionalTestCase extends BaseFunctionalTestCase
{
protected $bucket;
protected $collectionsWrapper;
public function setUp()
{
parent::setUp();
foreach(['fs.files', 'fs.chunks'] as $collection){
$col = new Collection($this->manager, sprintf("%s.%s",$this->getDatabaseName(), $collection));
$col->drop();
}
$this->bucket = new \MongoDB\GridFS\Bucket($this->manager, $this->getDatabaseName());
$this->collectionsWrapper = $this->bucket->getCollectionsWrapper();
}
public function tearDown()
{
foreach(['fs.files', 'fs.chunks'] as $collection){
$col = new Collection($this->manager, sprintf("%s.%s",$this->getDatabaseName(), $collection));
$col->drop();
}
if ($this->hasFailed()) {
return;
}
}
public function provideInsertChunks()
{
$dataVals = [];
$testArgs[][] = "hello world";
$testArgs[][] = "1234567890";
$testArgs[][] = "~!@#$%^&*()_+";
for($j=0; $j<30; $j++){
$randomTest = "";
for($i=0; $i<100; $i++){
$randomTest .= chr(rand(0, 256));
}
$testArgs[][] = $randomTest;
}
$utf8="";
for($i=0; $i<256; $i++){
$utf8 .= chr($i);
}
$testArgs[][]=$utf8;
return $testArgs;
}
}
<?php
namespace MongoDB\Tests\GridFS;
use MongoDB\GridFS;
/**
* Functional tests for the Bucket class.
*/
class GridFSStreamTest extends FunctionalTestCase
{
public function testBasic()
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$upload->insertChunks("hello world");
$id = $upload->getId();
$upload->close();
$this->assertEquals(1, $this->collectionsWrapper->getFilesCollection()->count());
$this->assertEquals(1, $this->collectionsWrapper->getChunksCollection()->count());
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id"=>$id]);
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$stream = fopen('php://temp', 'w+');
$download->downloadToStream($stream);
rewind($stream);
$contents = stream_get_contents($stream);
$this->assertEquals("hello world", $contents);
fclose($stream);
#make sure it's still there!
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$stream = fopen('php://temp', 'w+');
$download->downloadToStream($stream);
rewind($stream);
$contents = stream_get_contents($stream);
$this->assertEquals("hello world", $contents);
fclose($stream);
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$id = $upload->getId();
$upload->close();
$this->assertEquals(2, $this->collectionsWrapper->getFilesCollection()->count());
$this->assertEquals(1, $this->collectionsWrapper->getChunksCollection()->count());
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id"=>$id]);
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$stream = fopen('php://temp', 'w+');
$download->downloadToStream($stream);
rewind($stream);
$contents = stream_get_contents($stream);
$this->assertEquals("", $contents);
}
public function testMd5()
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$upload->insertChunks("hello world\n");
$id = $upload->getId();
$upload->close();
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id"=>$id]);
$this->assertEquals("6f5902ac237024bdd0c176cb93063dc4", $file->md5);
}
public function testUploadDefaultOpts()
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$this->assertTrue($upload->getId() instanceof \MongoDB\BSON\ObjectId);
$this->assertTrue($upload->getFile()["uploadDate"] instanceof \MongoDB\BSON\UTCDateTime);
$this->assertEquals($upload->getFile()["filename"], "test");
$this->assertEquals($upload->getLength(),0);
$this->assertTrue(!isset($upload->getFile()["contentType"]));
$this->assertTrue(!isset($upload->getFile()["aliases"]));
$this->assertTrue(!isset($upload->getFile()["metadata"]));
$this->assertEquals(255 * 1024, $upload->getChunkSize());
}
public function testUploadCustomOpts()
{
$options = ["chunkSizeBytes" => 1,
"contentType" => "text/html",
"aliases" => ["foo", "bar"],
"metadata" => ["foo" => 1, "bar" => 2]
];
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test", $options);
$this->assertEquals($upload->getChunkSize(), 1);
$this->assertEquals($upload->getFile()["contentType"], "text/html");
$this->assertEquals($upload->getFile()["aliases"], ["foo", "bar"]);
$this->assertEquals($upload->getFile()["metadata"], ["foo" => 1, "bar" => 2]);
}
public function testDownloadDefaultOpts()
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$upload->close();
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id" => $upload->getId()]);
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$download->close();
$this->assertEquals($upload->getId(), $download->getId());
$this->assertEquals(0, $download->getFile()->length);
$this->assertTrue(!isset($download->getFile()->contentType));
$this->assertTrue(!isset($download->getFile()->aliases));
$this->assertTrue(!isset($download->getFile()->metadata));
$this->assertTrue($download->getFile()->uploadDate instanceof \MongoDB\BSON\UTCDateTime);
$this->assertEquals(255 * 1024, $download->getFile()->chunkSize);
$this->assertEquals("d41d8cd98f00b204e9800998ecf8427e", $download->getFile()->md5);
}
public function testDownloadCustomOpts()
{
$options = ["chunkSizeBytes" => 1000,
"contentType" => "text/html",
"aliases" => ["foo", "bar"],
"metadata" => ["foo" => 1, "bar" => 2]
];
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test", $options);
$upload->insertChunks("hello world");
$upload->close();
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id" => $upload->getId()]);
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$this->assertEquals("test", $download->getFile()->filename);
$this->assertEquals($upload->getId(), $download->getId());
$this->assertEquals(11, $download->getFile()->length);
$this->assertEquals("text/html", $download->getFile()->contentType);
$this->assertEquals(1000, $download->getFile()->chunkSize);
$this->assertEquals(["foo", "bar"], $download->getFile()->aliases);
$this->assertEquals(["foo"=> 1, "bar"=> 2], (array) $download->getFile()->metadata);
$this->assertEquals("5eb63bbbe01eeed093cb22bb8f5acdc3", $download->getFile()->md5);
}
/**
*@dataProvider provideInsertChunks
*/
public function testInsertChunks($data)
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$upload->insertChunks($data);
$upload->close();
$stream = $this->bucket->openDownloadStream($upload->getId());
$this->assertEquals($data, stream_get_contents($stream));
}
public function testMultiChunkFile()
{
$toUpload="";
for($i=0; $i<255*1024+1000; $i++){
$toUpload .= "a";
}
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test");
$upload->insertChunks($toUpload);
$upload->close();
$this->assertEquals(1, $this->collectionsWrapper->getFilesCollection()->count());
$this->assertEquals(2, $this->collectionsWrapper->getChunksCollection()->count());
$download = $this->bucket->openDownloadStream($upload->getId());
$this->assertEquals($toUpload, stream_get_contents($download));
}
/**
*@dataProvider provideInsertChunks
*/
public function testSmallChunks($data)
{
$options = ["chunkSizeBytes"=>1];
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test", $options);
$upload->insertChunks($data);
$upload->close();
$this->assertEquals(strlen($data), $this->collectionsWrapper->getChunksCollection()->count());
$this->assertEquals(1, $this->collectionsWrapper->getFilesCollection()->count());
$stream = $this->bucket->openDownloadStream($upload->getId());
$this->assertEquals($data, stream_get_contents($stream));
}
public function testMultipleReads()
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test", ["chunkSizeBytes"=>3]);
$upload->insertChunks("hello world");
$upload->close();
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id"=>$upload->getId()]);
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$this->assertEquals("he", $download->downloadNumBytes(2));
$this->assertEquals("ll", $download->downloadNumBytes(2));
$this->assertEquals("o ", $download->downloadNumBytes(2));
$this->assertEquals("wo", $download->downloadNumBytes(2));
$this->assertEquals("rl", $download->downloadNumBytes(2));
$this->assertEquals("d", $download->downloadNumBytes(2));
$this->assertEquals("", $download->downloadNumBytes(2));
$this->assertEquals("", $download->downloadNumBytes(2));
$download->close();
}
/**
*@dataProvider provideInsertChunks
*/
public function testProvidedMultipleReads($data)
{
$upload = new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper, "test", ["chunkSizeBytes"=>rand(1, 5)]);
$upload->insertChunks($data);
$upload->close();
$file = $this->collectionsWrapper->getFilesCollection()->findOne(["_id"=>$upload->getId()]);
$download = new \MongoDB\GridFS\GridFSDownload($this->collectionsWrapper, $file);
$readPos = 0;
while($readPos < strlen($data)){
$numToRead = rand(1, strlen($data) - $readPos);
$expected = substr($data, $readPos, $numToRead);
$actual = $download->downloadNumBytes($numToRead);
$this->assertEquals($expected,$actual);
$readPos+= $numToRead;
}
$actual = $download->downloadNumBytes(5);
$expected = "";
$this->assertEquals($expected,$actual);
$download->close();
}
/**
* @expectedException \MongoDB\Exception\InvalidArgumentTypeException
* @dataProvider provideInvalidUploadConstructorOptions
*/
public function testUploadConstructorOptionTypeChecks(array $options)
{
new \MongoDB\GridFS\GridFSUpload($this->collectionsWrapper,"test", $options);
}
public function provideInvalidUploadConstructorOptions()
{
$options = [];
$invalidContentType = [123, 3.14, true, [], new \stdClass];
$invalidAliases = ['foo', 3.14, true, [12, 34], new \stdClass];
$invalidMetadata = ['foo', 3.14, true];
foreach ($invalidContentType as $value) {
$options[][] = ['contentType' => $value];
}
foreach ($invalidAliases as $value) {
$options[][] = ['aliases' => $value];
}
foreach ($invalidMetadata as $value) {
$options[][] = ['metadata' => $value];
}
return $options;
}
}
GridFS Tests
============
The YAML and JSON files in this directory are platform-independent tests
meant to exercise a driver's implementation of GridFS.
Converting to JSON
==================
The tests are written in YAML because it is easier for humans to write
and read, and because YAML supports a standard comment format. Each test
is also provided in JSON format because in some languages it is easier
to parse JSON than YAML.
If you modify any test, you should modify the YAML file and then
regenerate the JSON file from it.
One way to convert the files is using an online web page. I used:
http://www.json2yaml.com/
It's advertised as a JSON to YAML converter but it can be used in either direction.
Note: the yaml2json utility from npm is not capable of converting these YAML tests
because it doesn't implement the full YAML spec.
Format
======
Each test file has two top level sections:
1. data
2. tests
The data section defines the initial contents of the files and chunks
collections for all tests in that file.
The tests section defines the tests to be run. The format of the tests
section will vary slightly depending on what tests are being defined.
In general, they will have the following sections:
1. description
2. arrange
3. act
4. assert
The arrange section, if present, defines changes to be made to the
initial contents of the files and chunks collections (as defined by
the data section) before this particular test is run. These changes
are described in the form of write commands that can be sent directly
to MongoDB.
The act section defines what operation (with which arguments) should
be performed.
The assert section defines what should be true at the end of the test.
This includes checking the return value of the operation, as well as
checking the expected contents of the files and chunks collections. The
expected contents of the files and chunks collections are described
in the form of write commands that modify collections named
expected.files and expected.chunks. Before running these commands,
load the initial files and chunks documents into the expected.files
and expected.chunks collections and then run the commands. At that point
you can assert that fs.files and expected.files are the same, and that
expected.chunks and fs.chunks are the same.
For operations that are expected to succeed the assert section contains
a "result" element describing the expected result. For operations
that are expected to fail the assert section contains an "error"
element describing the expected failure.
The "result" element is either the expected result when it is possible to
know the result in advance, or it is the special value "&result"
which means that we expect a result (not a failure) but the actual
value of the result could be anything. The notation "&result" is
modeled after YAML syntax for defining an anchor, and the
result value may be referenced later in the assert section as
"*result".
Another special notation in the assert section is "*actual", which
is used when the value of a field cannot be known in advance of the
test, so the assert logic should accept whatever the actual value
ended up being.
{
"data": {
"files": [
{
"_id": {
"$oid": "000000000000000000000001"
},
"length": 0,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"filename": "length-0",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000002"
},
"length": 0,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"filename": "length-0-with-empty-chunk",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000003"
},
"length": 2,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "c700ed4fdb1d27055aa3faa2c2432283",
"filename": "length-2",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000004"
},
"length": 8,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "dd254cdc958e53abaa67da9f797125f5",
"filename": "length-8",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
}
],
"chunks": [
{
"_id": {
"$oid": "000000000000000000000001"
},
"files_id": {
"$oid": "000000000000000000000002"
},
"n": 0,
"data": {
"$hex": ""
}
},
{
"_id": {
"$oid": "000000000000000000000002"
},
"files_id": {
"$oid": "000000000000000000000003"
},
"n": 0,
"data": {
"$hex": "1122"
}
},
{
"_id": {
"$oid": "000000000000000000000003"
},
"files_id": {
"$oid": "000000000000000000000004"
},
"n": 0,
"data": {
"$hex": "11223344"
}
},
{
"_id": {
"$oid": "000000000000000000000004"
},
"files_id": {
"$oid": "000000000000000000000004"
},
"n": 1,
"data": {
"$hex": "55667788"
}
}
]
},
"tests": [
{
"description": "Delete when length is 0",
"act": {
"operation": "delete",
"arguments": {
"id": {
"$oid": "000000000000000000000001"
}
}
},
"assert": {
"result": "void",
"data": [
{
"delete": "expected.files",
"deletes": [
{
"q": {
"_id": {
"$oid": "000000000000000000000001"
}
},
"limit": 1
}
]
}
]
}
},
{
"description": "Delete when length is 0 and there is one extra empty chunk",
"act": {
"operation": "delete",
"arguments": {
"id": {
"$oid": "000000000000000000000002"
}
}
},
"assert": {
"result": "void",
"data": [
{
"delete": "expected.files",
"deletes": [
{
"q": {
"_id": {
"$oid": "000000000000000000000002"
}
},
"limit": 1
}
]
},
{
"delete": "expected.chunks",
"deletes": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000002"
}
},
"limit": 0
}
]
}
]
}
},
{
"description": "Delete when length is 8",
"act": {
"operation": "delete",
"arguments": {
"id": {
"$oid": "000000000000000000000004"
}
}
},
"assert": {
"result": "void",
"data": [
{
"delete": "expected.files",
"deletes": [
{
"q": {
"_id": {
"$oid": "000000000000000000000004"
}
},
"limit": 1
}
]
},
{
"delete": "expected.chunks",
"deletes": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000004"
}
},
"limit": 0
}
]
}
]
}
},
{
"description": "Delete when files entry does not exist",
"act": {
"operation": "delete",
"arguments": {
"id": {
"$oid": "000000000000000000000000"
}
}
},
"assert": {
"error": "FileNotFound"
}
},
{
"description": "Delete when files entry does not exist and there are orphaned chunks",
"arrange": {
"data": [
{
"delete": "fs.files",
"deletes": [
{
"q": {
"_id": {
"$oid": "000000000000000000000004"
}
},
"limit": 1
}
]
}
]
},
"act": {
"operation": "delete",
"arguments": {
"id": {
"$oid": "000000000000000000000004"
}
}
},
"assert": {
"error": "FileNotFound",
"data": [
{
"delete": "expected.files",
"deletes": [
{
"q": {
"_id": {
"$oid": "000000000000000000000004"
}
},
"limit": 1
}
]
},
{
"delete": "expected.chunks",
"deletes": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000004"
}
},
"limit": 0
}
]
}
]
}
}
]
}
\ No newline at end of file
data:
files:
-
_id: { "$oid" : "000000000000000000000001" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000002" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0-with-empty-chunk"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000003" }
length: 2
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "c700ed4fdb1d27055aa3faa2c2432283"
filename: "length-2"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000004" }
length: 8
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "dd254cdc958e53abaa67da9f797125f5"
filename: "length-8"
contentType: "application/octet-stream"
aliases: []
metadata: {}
chunks:
- { _id : { "$oid" : "000000000000000000000001" }, files_id : { "$oid" : "000000000000000000000002" }, n : 0, data : { $hex : "" } }
- { _id : { "$oid" : "000000000000000000000002" }, files_id : { "$oid" : "000000000000000000000003" }, n : 0, data : { $hex : "1122" } }
- { _id : { "$oid" : "000000000000000000000003" }, files_id : { "$oid" : "000000000000000000000004" }, n : 0, data : { $hex : "11223344" } }
- { _id : { "$oid" : "000000000000000000000004" }, files_id : { "$oid" : "000000000000000000000004" }, n : 1, data : { $hex : "55667788" } }
tests:
-
description: "Delete when length is 0"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000001" }
assert:
result: void
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000001" } }, limit : 1 }
] }
-
description: "Delete when length is 0 and there is one extra empty chunk"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000002" }
assert:
result: void
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000002" } }, limit : 1 }
] }
-
{ delete : "expected.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000002" } }, limit : 0 }
] }
-
description: "Delete when length is 8"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000004" }
assert:
result: void
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000004" } }, limit : 1 }
] }
-
{ delete : "expected.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000004" } }, limit : 0 }
] }
-
description: "Delete when files entry does not exist"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000000" }
assert:
error: "FileNotFound"
-
description: "Delete when files entry does not exist and there are orphaned chunks"
arrange:
data:
-
{ delete : "fs.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000004" } }, limit : 1 }
] }
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000004" }
assert:
error: "FileNotFound"
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000004" } }, limit : 1 }
] }
-
{ delete : "expected.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000004" } }, limit : 0 }
] }
{
"data": {
"files": [
{
"_id": {
"$oid": "000000000000000000000001"
},
"length": 0,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"filename": "length-0",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000002"
},
"length": 0,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"filename": "length-0-with-empty-chunk",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000003"
},
"length": 2,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "c700ed4fdb1d27055aa3faa2c2432283",
"filename": "length-2",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000004"
},
"length": 8,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "dd254cdc958e53abaa67da9f797125f5",
"filename": "length-8",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000005"
},
"length": 10,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "57d83cd477bfb1ccd975ab33d827a92b",
"filename": "length-10",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
}
],
"chunks": [
{
"_id": {
"$oid": "000000000000000000000001"
},
"files_id": {
"$oid": "000000000000000000000002"
},
"n": 0,
"data": {
"$hex": ""
}
},
{
"_id": {
"$oid": "000000000000000000000002"
},
"files_id": {
"$oid": "000000000000000000000003"
},
"n": 0,
"data": {
"$hex": "1122"
}
},
{
"_id": {
"$oid": "000000000000000000000003"
},
"files_id": {
"$oid": "000000000000000000000004"
},
"n": 0,
"data": {
"$hex": "11223344"
}
},
{
"_id": {
"$oid": "000000000000000000000004"
},
"files_id": {
"$oid": "000000000000000000000004"
},
"n": 1,
"data": {
"$hex": "55667788"
}
},
{
"_id": {
"$oid": "000000000000000000000005"
},
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 0,
"data": {
"$hex": "11223344"
}
},
{
"_id": {
"$oid": "000000000000000000000006"
},
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 1,
"data": {
"$hex": "55667788"
}
},
{
"_id": {
"$oid": "000000000000000000000007"
},
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 2,
"data": {
"$hex": "99aa"
}
}
]
},
"tests": [
{
"description": "Download when length is zero",
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000001"
},
"options": {
}
}
},
"assert": {
"result": {
"$hex": ""
}
}
},
{
"description": "Download when length is zero and there is one empty chunk",
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000002"
},
"options": {
}
}
},
"assert": {
"result": {
"$hex": ""
}
}
},
{
"description": "Download when there is one chunk",
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000003"
},
"options": {
}
}
},
"assert": {
"result": {
"$hex": "1122"
}
}
},
{
"description": "Download when there are two chunks",
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000004"
},
"options": {
}
}
},
"assert": {
"result": {
"$hex": "1122334455667788"
}
}
},
{
"description": "Download when there are three chunks",
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000005"
},
"options": {
}
}
},
"assert": {
"result": {
"$hex": "112233445566778899aa"
}
}
},
{
"description": "Download when files entry does not exist",
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000000"
},
"options": {
}
}
},
"assert": {
"error": "FileNotFound"
}
},
{
"description": "Download when an intermediate chunk is missing",
"arrange": {
"data": [
{
"delete": "fs.chunks",
"deletes": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 1
},
"limit": 1
}
]
}
]
},
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000005"
}
}
},
"assert": {
"error": "ChunkIsMissing"
}
},
{
"description": "Download when final chunk is missing",
"arrange": {
"data": [
{
"delete": "fs.chunks",
"deletes": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 1
},
"limit": 1
}
]
}
]
},
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000005"
}
}
},
"assert": {
"error": "ChunkIsMissing"
}
},
{
"description": "Download when an intermediate chunk is the wrong size",
"arrange": {
"data": [
{
"update": "fs.chunks",
"updates": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 1
},
"u": {
"$set": {
"data": {
"$hex": "556677"
}
}
}
},
{
"q": {
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 2
},
"u": {
"$set": {
"data": {
"$hex": "8899aa"
}
}
}
}
]
}
]
},
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000005"
}
}
},
"assert": {
"error": "ChunkIsWrongSize"
}
},
{
"description": "Download when final chunk is the wrong size",
"arrange": {
"data": [
{
"update": "fs.chunks",
"updates": [
{
"q": {
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 2
},
"u": {
"$set": {
"data": {
"$hex": "99"
}
}
}
}
]
}
]
},
"act": {
"operation": "download",
"arguments": {
"id": {
"$oid": "000000000000000000000005"
}
}
},
"assert": {
"error": "ChunkIsWrongSize"
}
}
]
}
\ No newline at end of file
data:
files:
-
_id: { "$oid" : "000000000000000000000001" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000002" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0-with-empty-chunk"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000003" }
length: 2
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "c700ed4fdb1d27055aa3faa2c2432283"
filename: "length-2"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000004" }
length: 8
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "dd254cdc958e53abaa67da9f797125f5"
filename: "length-8"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000005" }
length: 10
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "57d83cd477bfb1ccd975ab33d827a92b"
filename: "length-10"
contentType: "application/octet-stream"
aliases: []
metadata: {}
chunks:
- { _id : { "$oid" : "000000000000000000000001" }, files_id : { "$oid" : "000000000000000000000002" }, n : 0, data : { $hex : "" } }
- { _id : { "$oid" : "000000000000000000000002" }, files_id : { "$oid" : "000000000000000000000003" }, n : 0, data : { $hex : "1122" } }
- { _id : { "$oid" : "000000000000000000000003" }, files_id : { "$oid" : "000000000000000000000004" }, n : 0, data : { $hex : "11223344" } }
- { _id : { "$oid" : "000000000000000000000004" }, files_id : { "$oid" : "000000000000000000000004" }, n : 1, data : { $hex : "55667788" } }
- { _id : { "$oid" : "000000000000000000000005" }, files_id : { "$oid" : "000000000000000000000005" }, n : 0, data : { $hex : "11223344" } }
- { _id : { "$oid" : "000000000000000000000006" }, files_id : { "$oid" : "000000000000000000000005" }, n : 1, data : { $hex : "55667788" } }
- { _id : { "$oid" : "000000000000000000000007" }, files_id : { "$oid" : "000000000000000000000005" }, n : 2, data : { $hex : "99aa" } }
tests:
-
description: "Download when length is zero"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000001" }
options: { }
assert:
result: { $hex : "" }
-
description: "Download when length is zero and there is one empty chunk"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000002" }
options: { }
assert:
result: { $hex : "" }
-
description: "Download when there is one chunk"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000003" }
options: { }
assert:
result: { $hex : "1122" }
-
description: "Download when there are two chunks"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000004" }
options: { }
assert:
result: { $hex : "1122334455667788" }
-
description: "Download when there are three chunks"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
options: { }
assert:
result: { $hex : "112233445566778899aa" }
-
description: "Download when files entry does not exist"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000000" }
options: { }
assert:
error: "FileNotFound"
-
description: "Download when an intermediate chunk is missing"
arrange:
data:
-
{ delete : "fs.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 1 }, limit : 1 }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsMissing"
-
description: "Download when final chunk is missing"
arrange:
data:
-
{ delete : "fs.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 1 }, limit : 1 }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsMissing"
-
description: "Download when an intermediate chunk is the wrong size"
arrange:
data:
-
{ update : "fs.chunks", updates : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 1 }, u : { $set : { data : { $hex : "556677" } } } },
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 2 }, u : { $set : { data : { $hex : "8899aa" } } } }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsWrongSize"
-
description: "Download when final chunk is the wrong size"
arrange:
data:
-
{ update : "fs.chunks", updates : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 2 }, u : { $set : { data : { $hex : "99" } } } }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsWrongSize"
{
"data": {
"files": [
{
"_id": {
"$oid": "000000000000000000000001"
},
"length": 1,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-01T00:00:00.000Z"
},
"md5": "47ed733b8d10be225eceba344d533586",
"filename": "abc",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000002"
},
"length": 1,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-02T00:00:00.000Z"
},
"md5": "b15835f133ff2e27c7cb28117bfae8f4",
"filename": "abc",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000003"
},
"length": 1,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-03T00:00:00.000Z"
},
"md5": "eccbc87e4b5ce2fe28308fd9f2a7baf3",
"filename": "abc",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000004"
},
"length": 1,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-04T00:00:00.000Z"
},
"md5": "f623e75af30e62bbd73d6df5b50bb7b5",
"filename": "abc",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
},
{
"_id": {
"$oid": "000000000000000000000005"
},
"length": 1,
"chunkSize": 4,
"uploadDate": {
"$date": "1970-01-05T00:00:00.000Z"
},
"md5": "4c614360da93c0a041b22e537de151eb",
"filename": "abc",
"contentType": "application/octet-stream",
"aliases": [
],
"metadata": {
}
}
],
"chunks": [
{
"_id": {
"$oid": "000000000000000000000001"
},
"files_id": {
"$oid": "000000000000000000000001"
},
"n": 0,
"data": {
"$hex": "11"
}
},
{
"_id": {
"$oid": "000000000000000000000002"
},
"files_id": {
"$oid": "000000000000000000000002"
},
"n": 0,
"data": {
"$hex": "22"
}
},
{
"_id": {
"$oid": "000000000000000000000003"
},
"files_id": {
"$oid": "000000000000000000000003"
},
"n": 0,
"data": {
"$hex": "33"
}
},
{
"_id": {
"$oid": "000000000000000000000004"
},
"files_id": {
"$oid": "000000000000000000000004"
},
"n": 0,
"data": {
"$hex": "44"
}
},
{
"_id": {
"$oid": "000000000000000000000005"
},
"files_id": {
"$oid": "000000000000000000000005"
},
"n": 0,
"data": {
"$hex": "55"
}
}
]
},
"tests": [
{
"description": "Download_by_name when revision is 0",
"act": {
"operation": "download_by_name",
"arguments": {
"filename": "abc",
"options": {
"revision": 0
}
}
},
"assert": {
"result": {
"$hex": "11"
}
}
},
{
"description": "Download_by_name when revision is 1",
"act": {
"operation": "download_by_name",
"arguments": {
"filename": "abc",
"options": {
"revision": 1
}
}
},
"assert": {
"result": {
"$hex": "22"
}
}
},
{
"description": "Download_by_name when revision is -2",
"act": {
"operation": "download_by_name",
"arguments": {
"filename": "abc",
"options": {
"revision": -2
}
}
},
"assert": {
"result": {
"$hex": "44"
}
}
},
{
"description": "Download_by_name when revision is -1",
"act": {
"operation": "download_by_name",
"arguments": {
"filename": "abc",
"options": {
"revision": -1
}
}
},
"assert": {
"result": {
"$hex": "55"
}
}
},
{
"description": "Download_by_name when files entry does not exist",
"act": {
"operation": "download_by_name",
"arguments": {
"filename": "xyz"
}
},
"assert": {
"error": "FileNotFound"
}
},
{
"description": "Download_by_name when revision does not exist",
"act": {
"operation": "download_by_name",
"arguments": {
"filename": "abc",
"options": {
"revision": 999
}
}
},
"assert": {
"error": "RevisionNotFound"
}
}
]
}
\ No newline at end of file
data:
files:
-
_id: { "$oid" : "000000000000000000000001" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "47ed733b8d10be225eceba344d533586"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000002" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-02T00:00:00.000Z" }
md5: "b15835f133ff2e27c7cb28117bfae8f4"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000003" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-03T00:00:00.000Z" }
md5: "eccbc87e4b5ce2fe28308fd9f2a7baf3"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000004" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-04T00:00:00.000Z" }
md5: "f623e75af30e62bbd73d6df5b50bb7b5"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000005" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-05T00:00:00.000Z" }
md5: "4c614360da93c0a041b22e537de151eb"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
chunks:
- { _id : { "$oid" : "000000000000000000000001" }, files_id : { "$oid" : "000000000000000000000001" }, n : 0, data : { $hex : "11" } }
- { _id : { "$oid" : "000000000000000000000002" }, files_id : { "$oid" : "000000000000000000000002" }, n : 0, data : { $hex : "22" } }
- { _id : { "$oid" : "000000000000000000000003" }, files_id : { "$oid" : "000000000000000000000003" }, n : 0, data : { $hex : "33" } }
- { _id : { "$oid" : "000000000000000000000004" }, files_id : { "$oid" : "000000000000000000000004" }, n : 0, data : { $hex : "44" } }
- { _id : { "$oid" : "000000000000000000000005" }, files_id : { "$oid" : "000000000000000000000005" }, n : 0, data : { $hex : "55" } }
tests:
-
description: "Download_by_name when revision is 0"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : 0 }
assert:
result: { $hex : "11" }
-
description: "Download_by_name when revision is 1"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : 1 }
assert:
result: { $hex : "22" }
-
description: "Download_by_name when revision is -2"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : -2 }
assert:
result: { $hex : "44" }
-
description: "Download_by_name when revision is -1"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : -1 }
assert:
result: { $hex : "55" }
-
description: "Download_by_name when files entry does not exist"
act:
operation: download_by_name
arguments:
filename: "xyz"
assert:
error: "FileNotFound"
-
description: "Download_by_name when revision does not exist"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : 999 }
assert:
error: "RevisionNotFound"
{
"data": {
"files": [
],
"chunks": [
]
},
"tests": [
{
"description": "Upload when length is 0",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": ""
},
"options": {
"chunkSizeBytes": 4
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 0,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "d41d8cd98f00b204e9800998ecf8427e",
"filename": "filename"
}
]
}
]
}
},
{
"description": "Upload when length is 1",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "11"
},
"options": {
"chunkSizeBytes": 4
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 1,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "47ed733b8d10be225eceba344d533586",
"filename": "filename"
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "11"
}
}
]
}
]
}
},
{
"description": "Upload when length is 3",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "112233"
},
"options": {
"chunkSizeBytes": 4
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 3,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "bafae3a174ab91fc70db7a6aa50f4f52",
"filename": "filename"
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "112233"
}
}
]
}
]
}
},
{
"description": "Upload when length is 4",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "11223344"
},
"options": {
"chunkSizeBytes": 4
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 4,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "7e7c77cff5705d1f7574a25ef6662117",
"filename": "filename"
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "11223344"
}
}
]
}
]
}
},
{
"description": "Upload when length is 5",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "1122334455"
},
"options": {
"chunkSizeBytes": 4
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 5,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "283d4fea5dded59cf837d3047328f5af",
"filename": "filename"
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "11223344"
}
},
{
"_id": "*actual",
"files_id": "*result",
"n": 1,
"data": {
"$hex": "55"
}
}
]
}
]
}
},
{
"description": "Upload when length is 8",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "1122334455667788"
},
"options": {
"chunkSizeBytes": 4
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 8,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "dd254cdc958e53abaa67da9f797125f5",
"filename": "filename"
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "11223344"
}
},
{
"_id": "*actual",
"files_id": "*result",
"n": 1,
"data": {
"$hex": "55667788"
}
}
]
}
]
}
},
{
"description": "Upload when contentType is provided",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "11"
},
"options": {
"chunkSizeBytes": 4,
"contentType": "image/jpeg"
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 1,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "47ed733b8d10be225eceba344d533586",
"filename": "filename",
"contentType": "image/jpeg"
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "11"
}
}
]
}
]
}
},
{
"description": "Upload when metadata is provided",
"act": {
"operation": "upload",
"arguments": {
"filename": "filename",
"source": {
"$hex": "11"
},
"options": {
"chunkSizeBytes": 4,
"metadata": {
"x": 1
}
}
}
},
"assert": {
"result": "&result",
"data": [
{
"insert": "expected.files",
"documents": [
{
"_id": "*result",
"length": 1,
"chunkSize": 4,
"uploadDate": "*actual",
"md5": "47ed733b8d10be225eceba344d533586",
"filename": "filename",
"metadata": {
"x": 1
}
}
]
},
{
"insert": "expected.chunks",
"documents": [
{
"_id": "*actual",
"files_id": "*result",
"n": 0,
"data": {
"$hex": "11"
}
}
]
}
]
}
}
]
}
\ No newline at end of file
data:
files: []
chunks: []
tests:
-
description: "Upload when length is 0"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 0, chunkSize : 4, uploadDate : "*actual", md5 : "d41d8cd98f00b204e9800998ecf8427e", filename : "filename" }
] }
-
description: "Upload when length is 1"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 1, chunkSize : 4, uploadDate : "*actual", md5 : "47ed733b8d10be225eceba344d533586", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11" } }
] }
-
description: "Upload when length is 3"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "112233" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 3, chunkSize : 4, uploadDate : "*actual", md5 : "bafae3a174ab91fc70db7a6aa50f4f52", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "112233" } }
] }
-
description: "Upload when length is 4"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11223344" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 4, chunkSize : 4, uploadDate : "*actual", md5 : "7e7c77cff5705d1f7574a25ef6662117", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11223344" } }
] }
-
description: "Upload when length is 5"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "1122334455" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 5, chunkSize : 4, uploadDate : "*actual", md5 : "283d4fea5dded59cf837d3047328f5af", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11223344" } },
{ _id : "*actual", files_id : "*result", n : 1, data : { $hex : "55" } }
] }
-
description: "Upload when length is 8"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "1122334455667788" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 8, chunkSize : 4, uploadDate : "*actual", md5 : "dd254cdc958e53abaa67da9f797125f5", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11223344" } },
{ _id : "*actual", files_id : "*result", n : 1, data : { $hex : "55667788" } }
] }
-
description: "Upload when contentType is provided"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11" }
options: { chunkSizeBytes : 4, contentType : "image/jpeg" }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 1, chunkSize : 4, uploadDate : "*actual", md5 : "47ed733b8d10be225eceba344d533586", filename : "filename", contentType : "image/jpeg" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11" } }
] }
-
description: "Upload when metadata is provided"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11" }
options:
chunkSizeBytes: 4
metadata: { x : 1 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 1, chunkSize : 4, uploadDate : "*actual", md5 : "47ed733b8d10be225eceba344d533586", filename : "filename", metadata : { x : 1 } }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11" } }
] }
<?php
namespace MongoDB\Tests\GridFS;
use \MongoDB\GridFS;
use \MongoDB\Collection;
use \MongoDB\BSON\ObjectId;
use \MongoDB\BSON\Binary;
use \MongoDB\Exception;
class SpecificationTests extends FunctionalTestCase
{
private $commands;
private $collections;
public function setUp()
{
parent::setUp();
$this->commands = array(
'insert' => function($col, $docs) {
$col->insertMany($docs['documents']);},
'update' => function($col, $docs) {
foreach($docs['updates'] as $update) {
$col->updateMany($update['q'], $update['u']);
}
},
'delete' => function($col, $docs){
foreach($docs['deletes'] as $delete){
$col->deleteMany($delete['q']);
}
}
);
}
/**
*@dataProvider provideSpecificationTests
*/
public function testSpecificationTests($testJson)
{
foreach ($testJson['tests'] as $test) {
$this->initializeDatabases($testJson['data'], $test);
if(isset($test['act']['arguments']['options'])){
$options = $test['act']['arguments']['options'];
} else {
$options =[];
}
$this->bucket = new \MongoDB\GridFS\Bucket($this->manager, $this->getDatabaseName(), $this->fixTypes($options,false));
$func = $test['act']['operation'] . "Command";
$error = null;
try {
$result = $this->$func($test['act']['arguments']);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$errors = ['FileNotFound' => '\MongoDB\Exception\GridFSFileNotFoundException',
'ChunkIsMissing' => '\MongoDB\Exception\GridFSCorruptFileException',
'ExtraChunk' => '\MongoDB\Exception\GridFSCorruptFileException',
'ChunkIsWrongSize' => '\MongoDB\Exception\GridFSCorruptFileException',
'RevisionNotFound' => '\MongoDB\Exception\GridFSFileNotFoundException'
];
if (!isset($test['assert']['error'])) {
$this->assertNull($error);
} else {
$shouldError = $test['assert']['error'];
$this->assertTrue($error instanceof $errors[$shouldError]);
}
if (isset($test['assert']['result'])) {
$testResult = $test['assert']['result'];
if ($testResult == '&result') {
$test['assert']['result'] = $result;
}
if ($testResult == "void") {
$test['assert']['result'] = null;
}
$fixedAssertFalse = $this->fixTypes($test['assert'], false);
$this->assertEquals($result, $fixedAssertFalse['result']);
}
$fixedAssertTrue = $this->fixTypes($test['assert'], true);
if (isset($test['assert']['data'])) {
$this->runCommands($fixedAssertTrue['data'], $result);
$this->collectionsEqual($this->collections['expected.files'],$this->bucket->getCollectionsWrapper()->getFilesCollection());
if(isset($this->collections['expected.chunks'])) {
$this->collectionsEqual($this->collections['expected.chunks'],$this->bucket->getCollectionsWrapper()->getChunksCollection());
}
}
}
}
public function provideSpecificationTests()
{
$testPath= __DIR__.'/Specification/tests/*.json';
$testArgs = [];
foreach(glob($testPath) as $filename) {
$fileContents = file_get_contents($filename);
$testJson = json_decode($fileContents, true);
$testArgs[][] = $testJson;
}
return $testArgs;
}
public function fixTypes($testJson, $makeBinary)
{
$result = $testJson;
foreach($result as $key =>$value) {
if (is_array($value) && isset($value['$hex'])) {
$result[$key] = hex2bin($value['$hex']);
if($makeBinary) {
$result[$key] = new \MongoDB\BSON\Binary($result[$key], \MongoDB\BSON\Binary::TYPE_GENERIC);
}
} else if (is_array($value) && isset($value['$oid'])) {
$result[$key] = new \MongoDB\BSON\ObjectId("".$value['$oid']);
} else if (is_array($value)) {
$result[$key] = $this->fixTypes($result[$key], $makeBinary);
} else if(is_string($value) && $value == '*actual') {
unset($result[$key]);
}
}
return $result;
}
public function collectionsEqual($col1, $col2)
{
$docs1 = $this->filterDoc($col1, true);
$docs2 = $this->filterDoc($col2, true);
$this->assertSameDocuments($docs1, $docs2);
}
public function filterDoc($collection, $ignoreId)
{
$output = [];
$documents = $collection->find();
foreach($documents as $doc){
if ($ignoreId) {
unset($doc->_id);
}
if(isset($doc->uploadDate)) {
// $this->assertTrue($doc->uploadDate instanceof DateTime);
unset($doc->uploadDate);
}
$output [] = $doc;
}
return $output;
}
public function runCommands($cmds, $result)
{
foreach($cmds as $cmd){
foreach($cmd as $key => $value) {
if(isset($this->commands[$key])) {
$cmdName = $key;
$collectionName = $value;
if(isset($cmd['documents'])){
foreach($cmd['documents'] as $docIndex => $doc) {
foreach($doc as $docKey => $docVal){
if(is_string($docVal)) {
if($docVal == '*result') {
$doc[$docKey] = $result;
}
}
}
$cmd['documents'][$docIndex] = $doc;
}
}
$collection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), $collectionName));
$this->commands[$key]($collection, $this->fixTypes($cmd, true));
$this->collections[$collectionName] = $collection;
}
}
}
}
public function initializeDatabases($data, $test)
{
$collectionsToDrop = ['fs.files','fs.chunks','expected.files','expected.chunks'];
$data = $this->fixTypes($data, true);
foreach ($collectionsToDrop as $collectionName) {
$collection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), $collectionName));
$collection->drop();
}
if (isset($data['files']) && count($data['files']) > 0) {
$filesCollection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), "fs.files"));
$filesCollection->insertMany($data['files']);
$expectedFilesCollection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), "expected.files"));
$expectedFilesCollection->insertMany($data['files']);
$this->collections['expected.files'] = $expectedFilesCollection;
}
if (isset($data['chunks']) && count($data['chunks']) > 0) {
$chunksCollection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), "fs.chunks"));
$chunksCollection->insertMany($data['chunks']);
$expectedChunksCollection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), "expected.chunks"));
$expectedChunksCollection->insertMany($data['chunks']);
$this->collections['expected.chunks'] = $expectedChunksCollection;
}
if(isset($test['arrange'])) {
foreach($test['arrange']['data'] as $cmd) {
foreach($cmd as $key => $value) {
if(isset($this->commands[$key])) {
$collection = new Collection($this->manager, sprintf("%s.%s", $this->getDatabaseName(), $cmd[$key]));
$this->commands[$key]($collection,$this->fixTypes($cmd, true));
}
}
}
}
}
public function uploadCommand($args)
{
$args = $this->fixTypes($args, false);
$stream = fopen('php://temp', 'w+');
fwrite($stream, $args['source']);
rewind($stream);
$result = $this->bucket->uploadFromStream($args['filename'], $stream, $args['options']);
fclose($stream);
return $result;
}
function downloadCommand($args)
{
$args = $this->fixTypes($args, false);
$streamWrapper = new \MongoDB\GridFS\StreamWrapper();
$streamWrapper->register($this->manager);
$stream = fopen('php://temp', 'w+');
$this->bucket->downloadToStream($args['id'], $stream);
rewind($stream);
$result = stream_get_contents($stream);
fclose($stream);
return $result;
}
function deleteCommand($args)
{
$args = $this->fixTypes($args, false);
$this->bucket->delete($args['id']);
}
function download_by_nameCommand($args)
{
$args = $this->fixTypes($args, false);
$streamWrapper = new \MongoDB\GridFS\StreamWrapper();
$streamWrapper->register($this->manager);
$stream = fopen('php://temp', 'w+');
if(isset($args['options']['revision'])) {
$this->bucket->downloadToStreamByName($args['filename'], $stream, $args['options']['revision']);
} else {
$this->bucket->downloadToStreamByName($args['filename'], $stream);
}
rewind($stream);
$result = stream_get_contents($stream);
fclose($stream);
return $result;
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment