Commit 3cf99441 authored by Jeremy Mikola's avatar Jeremy Mikola

Rewrite harness for GridFS spec tests

parent 583d65ab
<?php
namespace MongoDB\Tests\GridFS;
use MongoDB\Collection;
use MongoDB\BSON\Binary;
use MongoDB\BSON\ObjectId;
use MongoDB\BSON\UTCDateTime;
use MongoDB\Exception\RuntimeException;
use MongoDB\Operation\BulkWrite;
use DateTime;
use IteratorIterator;
use LogicException;
use MultipleIterator;
/**
* GridFS spec functional tests.
*
* @see https://github.com/mongodb/specifications/tree/master/source/gridfs/tests
*/
class SpecFunctionalTest extends FunctionalTestCase
{
private $expectedChunksCollection;
private $expectedFilesCollection;
public function setUp()
{
parent::setUp();
$this->expectedFilesCollection = new Collection($this->manager, $this->getDatabaseName(), 'expected.files');
$this->expectedFilesCollection->drop();
$this->expectedChunksCollection = new Collection($this->manager, $this->getDatabaseName(), 'expected.chunks');
$this->expectedChunksCollection->drop();
}
/**
* @dataProvider provideSpecificationTests
*/
public function testSpecification(array $initialData, array $test)
{
$this->setName(str_replace(' ', '_', $test['description']));
$this->initializeData($initialData);
if (isset($test['arrange'])) {
foreach ($test['arrange']['data'] as $dataModification) {
$this->executeDataModification($dataModification);
}
}
try {
$result = $this->executeAct($test['act']);
} catch (RuntimeException $e) {
$result = $e;
}
if (isset($test['assert'])) {
$this->executeAssert($test['assert'], $result);
}
}
public function provideSpecificationTests()
{
$testArgs = [];
foreach (glob(__DIR__ . '/spec-tests/*.json') as $filename) {
$json = json_decode(file_get_contents($filename), true);
foreach ($json['tests'] as $test) {
$testArgs[] = [$json['data'], $test];
}
}
return $testArgs;
}
/**
* Assert that the collections contain equivalent documents.
*
* This method will resolve references within the expected collection's
* documents before comparing documents. Occurrences of "*result" in the
* expected collection's documents will be replaced with the actual result.
* Occurrences of "*actual" in the expected collection's documents will be
* replaced with the corresponding value in the actual collection's document
* being compared.
*
* @param Collection $expectedCollection
* @param Collection $actualCollection
* @param mixed $actualResult
*/
private function assertEquivalentCollections($expectedCollection, $actualCollection, $actualResult)
{
$mi = new MultipleIterator;
$mi->attachIterator(new IteratorIterator($expectedCollection->find()));
$mi->attachIterator(new IteratorIterator($actualCollection->find()));
foreach ($mi as $documents) {
list($expectedDocument, $actualDocument) = $documents;
array_walk($expectedDocument, function(&$value) use ($actualResult) {
if ($value === '*result') {
$value = $actualResult;
}
});
array_walk($expectedDocument, function(&$value, $key) use ($actualDocument) {
if ( ! is_string($value)) {
return;
}
if ( ! strncmp($value, '*actual_', 8)) {
$value = $actualDocument[$key];
}
});
$this->assertSameDocument($expectedDocument, $actualDocument);
}
}
/**
* Convert encoded types in the array and return the modified array.
*
* Nested arrays with "$oid" and "$date" keys will be converted to ObjectID
* and UTCDateTime instances, respectively. Nested arrays with "$hex" keys
* will be converted to a string or Binary object.
*
* @param param $data
* @param boolean $createBinary If true, convert "$hex" values to a Binary
* @return array
*/
private function convertTypes(array $data, $createBinary = true)
{
/* array_walk_recursive() only visits leaf nodes within the array, so we
* need to manually recurse.
*/
array_walk($data, function(&$value) use ($createBinary) {
if ( ! is_array($value)) {
return;
}
if (isset($value['$oid'])) {
$value = new ObjectId($value['$oid']);
return;
}
if (isset($value['$hex'])) {
$value = $createBinary
? new Binary(hex2bin($value['$hex']), Binary::TYPE_GENERIC)
: hex2bin($value['$hex']);
return;
}
if (isset($value['$date'])) {
// TODO: This is necessary until PHPC-536 is implemented
$milliseconds = floor((new DateTime($value['$date']))->format('U.u') * 1000);
$value = new UTCDateTime($milliseconds);
return;
}
$value = $this->convertTypes($value, $createBinary);
});
return $data;
}
/**
* Executes an "act" block.
*
* @param array $act
* @return mixed
* @throws LogicException if the operation is unsupported
*/
private function executeAct(array $act)
{
$act = $this->convertTypes($act, false);
switch ($act['operation']) {
case 'delete':
return $this->bucket->delete($act['arguments']['id']);
case 'download':
return stream_get_contents($this->bucket->openDownloadStream($act['arguments']['id']));
case 'download_by_name':
return stream_get_contents($this->bucket->openDownloadStreamByName(
$act['arguments']['filename'],
isset($act['arguments']['options']) ? $act['arguments']['options'] : []
));
case 'upload':
return $this->bucket->uploadFromStream(
$act['arguments']['filename'],
$this->createStream($act['arguments']['source']),
isset($act['arguments']['options']) ? $act['arguments']['options'] : []
);
default:
throw new LogicException('Unsupported act: ' . $act['operation']);
}
}
/**
* Executes an "assert" block.
*
* @param array $assert
* @param mixed $actualResult
* @return mixed
* @throws FileNotFoundException
* @throws LogicException if the operation is unsupported
*/
private function executeAssert(array $assert, $actualResult)
{
if (isset($assert['error'])) {
$this->assertInstanceOf($this->getExceptionClassForError($assert['error']), $actualResult);
}
if (isset($assert['result'])) {
$this->executeAssertResult($assert['result'], $actualResult);
}
if ( ! isset($assert['data'])) {
return;
}
/* Since "*actual" may be used for an expected document's "_id", append
* a unique value to avoid duplicate key exceptions.
*/
array_walk_recursive($assert['data'], function(&$value) {
if ($value === '*actual') {
$value .= '_' . new ObjectId;
}
});
foreach ($assert['data'] as $dataModification) {
$this->executeDataModification($dataModification);
}
$this->assertEquivalentCollections($this->expectedFilesCollection, $this->filesCollection, $actualResult);
$this->assertEquivalentCollections($this->expectedChunksCollection, $this->chunksCollection, $actualResult);
}
/**
* Executes the "result" section of an "assert" block.
*
* @param mixed $expectedResult
* @param mixed $actualResult
* @param array $data
* @throws LogicException if the result assertion is unsupported
*/
private function executeAssertResult($expectedResult, $actualResult)
{
if ($expectedResult === 'void') {
return $this->assertNull($actualResult);
}
if ($expectedResult === '&result') {
// Do nothing; assertEquivalentCollections() will handle this
return;
}
if (isset($expectedResult['$hex'])) {
return $this->assertSame(hex2bin($expectedResult['$hex']), $actualResult);
}
throw new LogicException('Unsupported result assertion: ' . var_export($expectedResult, true));
}
/**
* Executes a data modification from an "arrange" or "assert" block.
*
* @param array $dataModification
* @return mixed
* @throws LogicException if the operation or collection is unsupported
*/
private function executeDataModification(array $dataModification)
{
foreach ($dataModification as $type => $collectionName) {
break;
}
if ( ! in_array($collectionName, ['fs.files', 'fs.chunks', 'expected.files', 'expected.chunks'])) {
throw new LogicException('Unsupported collection: ' . $collectionName);
}
$dataModification = $this->convertTypes($dataModification);
$operations = [];
switch ($type) {
case 'delete':
foreach ($dataModification['deletes'] as $delete) {
$operations[] = [ ($delete['limit'] === 1 ? 'deleteOne' : 'deleteMany') => [ $delete['q'] ] ];
}
break;
case 'insert':
foreach ($dataModification['documents'] as $document) {
$operations[] = [ 'insertOne' => [ $document ] ];
}
break;
case 'update':
foreach ($dataModification['updates'] as $update) {
$operations[] = [ 'updateOne' => [ $update['q'], $update['u'] ] ];
}
break;
default:
throw new LogicException('Unsupported arrangement: ' . $type);
}
$bulk = new BulkWrite($this->getDatabaseName(), $collectionName, $operations);
return $bulk->execute($this->getPrimaryServer());
}
/**
* Returns the exception class for the "error" section of an "assert" block.
*
* @param string $error
* @return string
* @throws LogicException if the error is unsupported
*/
private function getExceptionClassForError($error)
{
switch ($error) {
case 'FileNotFound':
case 'RevisionNotFound':
return 'MongoDB\GridFS\Exception\FileNotFoundException';
case 'ChunkIsMissing':
case 'ChunkIsWrongSize':
return 'MongoDB\GridFS\Exception\CorruptFileException';
default:
throw new LogicException('Unsupported error: ' . $error);
}
}
/**
* Initializes data in the files and chunks collections.
*
* @param array $data
*/
private function initializeData(array $data)
{
$data = $this->convertTypes($data);
if ( ! empty($data['files'])) {
$this->filesCollection->insertMany($data['files']);
$this->expectedFilesCollection->insertMany($data['files']);
}
if ( ! empty($data['chunks'])) {
$this->chunksCollection->insertMany($data['chunks']);
$this->expectedChunksCollection->insertMany($data['chunks']);
}
}
}
GridFS Tests
============
The YAML and JSON files in this directory are platform-independent tests
meant to exercise a driver's implementation of GridFS.
Converting to JSON
==================
The tests are written in YAML because it is easier for humans to write
and read, and because YAML supports a standard comment format. Each test
is also provided in JSON format because in some languages it is easier
to parse JSON than YAML.
If you modify any test, you should modify the YAML file and then
regenerate the JSON file from it.
One way to convert the files is using an online web page. I used:
http://www.json2yaml.com/
It's advertised as a JSON to YAML converter but it can be used in either direction.
Note: the yaml2json utility from npm is not capable of converting these YAML tests
because it doesn't implement the full YAML spec.
Format
======
Each test file has two top level sections:
1. data
2. tests
The data section defines the initial contents of the files and chunks
collections for all tests in that file.
The tests section defines the tests to be run. The format of the tests
section will vary slightly depending on what tests are being defined.
In general, they will have the following sections:
1. description
2. arrange
3. act
4. assert
The arrange section, if present, defines changes to be made to the
initial contents of the files and chunks collections (as defined by
the data section) before this particular test is run. These changes
are described in the form of write commands that can be sent directly
to MongoDB.
The act section defines what operation (with which arguments) should
be performed.
The assert section defines what should be true at the end of the test.
This includes checking the return value of the operation, as well as
checking the expected contents of the files and chunks collections. The
expected contents of the files and chunks collections are described
in the form of write commands that modify collections named
expected.files and expected.chunks. Before running these commands,
load the initial files and chunks documents into the expected.files
and expected.chunks collections and then run the commands. At that point
you can assert that fs.files and expected.files are the same, and that
expected.chunks and fs.chunks are the same.
For operations that are expected to succeed the assert section contains
a "result" element describing the expected result. For operations
that are expected to fail the assert section contains an "error"
element describing the expected failure.
The "result" element is either the expected result when it is possible to
know the result in advance, or it is the special value "&result"
which means that we expect a result (not a failure) but the actual
value of the result could be anything. The notation "&result" is
modeled after YAML syntax for defining an anchor, and the
result value may be referenced later in the assert section as
"*result".
Another special notation in the assert section is "*actual", which
is used when the value of a field cannot be known in advance of the
test, so the assert logic should accept whatever the actual value
ended up being.
data:
files:
-
_id: { "$oid" : "000000000000000000000001" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000002" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0-with-empty-chunk"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000003" }
length: 2
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "c700ed4fdb1d27055aa3faa2c2432283"
filename: "length-2"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000004" }
length: 8
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "dd254cdc958e53abaa67da9f797125f5"
filename: "length-8"
contentType: "application/octet-stream"
aliases: []
metadata: {}
chunks:
- { _id : { "$oid" : "000000000000000000000001" }, files_id : { "$oid" : "000000000000000000000002" }, n : 0, data : { $hex : "" } }
- { _id : { "$oid" : "000000000000000000000002" }, files_id : { "$oid" : "000000000000000000000003" }, n : 0, data : { $hex : "1122" } }
- { _id : { "$oid" : "000000000000000000000003" }, files_id : { "$oid" : "000000000000000000000004" }, n : 0, data : { $hex : "11223344" } }
- { _id : { "$oid" : "000000000000000000000004" }, files_id : { "$oid" : "000000000000000000000004" }, n : 1, data : { $hex : "55667788" } }
tests:
-
description: "Delete when length is 0"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000001" }
assert:
result: void
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000001" } }, limit : 1 }
] }
-
description: "Delete when length is 0 and there is one extra empty chunk"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000002" }
assert:
result: void
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000002" } }, limit : 1 }
] }
-
{ delete : "expected.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000002" } }, limit : 0 }
] }
-
description: "Delete when length is 8"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000004" }
assert:
result: void
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000004" } }, limit : 1 }
] }
-
{ delete : "expected.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000004" } }, limit : 0 }
] }
-
description: "Delete when files entry does not exist"
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000000" }
assert:
error: "FileNotFound"
-
description: "Delete when files entry does not exist and there are orphaned chunks"
arrange:
data:
-
{ delete : "fs.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000004" } }, limit : 1 }
] }
act:
operation: delete
arguments:
id: { "$oid" : "000000000000000000000004" }
assert:
error: "FileNotFound"
data:
-
{ delete : "expected.files", deletes : [
{ q : { _id : { "$oid" : "000000000000000000000004" } }, limit : 1 }
] }
-
{ delete : "expected.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000004" } }, limit : 0 }
] }
data:
files:
-
_id: { "$oid" : "000000000000000000000001" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000002" }
length: 0
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "d41d8cd98f00b204e9800998ecf8427e"
filename: "length-0-with-empty-chunk"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000003" }
length: 2
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "c700ed4fdb1d27055aa3faa2c2432283"
filename: "length-2"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000004" }
length: 8
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "dd254cdc958e53abaa67da9f797125f5"
filename: "length-8"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000005" }
length: 10
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "57d83cd477bfb1ccd975ab33d827a92b"
filename: "length-10"
contentType: "application/octet-stream"
aliases: []
metadata: {}
chunks:
- { _id : { "$oid" : "000000000000000000000001" }, files_id : { "$oid" : "000000000000000000000002" }, n : 0, data : { $hex : "" } }
- { _id : { "$oid" : "000000000000000000000002" }, files_id : { "$oid" : "000000000000000000000003" }, n : 0, data : { $hex : "1122" } }
- { _id : { "$oid" : "000000000000000000000003" }, files_id : { "$oid" : "000000000000000000000004" }, n : 0, data : { $hex : "11223344" } }
- { _id : { "$oid" : "000000000000000000000004" }, files_id : { "$oid" : "000000000000000000000004" }, n : 1, data : { $hex : "55667788" } }
- { _id : { "$oid" : "000000000000000000000005" }, files_id : { "$oid" : "000000000000000000000005" }, n : 0, data : { $hex : "11223344" } }
- { _id : { "$oid" : "000000000000000000000006" }, files_id : { "$oid" : "000000000000000000000005" }, n : 1, data : { $hex : "55667788" } }
- { _id : { "$oid" : "000000000000000000000007" }, files_id : { "$oid" : "000000000000000000000005" }, n : 2, data : { $hex : "99aa" } }
tests:
-
description: "Download when length is zero"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000001" }
options: { }
assert:
result: { $hex : "" }
-
description: "Download when length is zero and there is one empty chunk"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000002" }
options: { }
assert:
result: { $hex : "" }
-
description: "Download when there is one chunk"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000003" }
options: { }
assert:
result: { $hex : "1122" }
-
description: "Download when there are two chunks"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000004" }
options: { }
assert:
result: { $hex : "1122334455667788" }
-
description: "Download when there are three chunks"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
options: { }
assert:
result: { $hex : "112233445566778899aa" }
-
description: "Download when files entry does not exist"
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000000" }
options: { }
assert:
error: "FileNotFound"
-
description: "Download when an intermediate chunk is missing"
arrange:
data:
-
{ delete : "fs.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 1 }, limit : 1 }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsMissing"
-
description: "Download when final chunk is missing"
arrange:
data:
-
{ delete : "fs.chunks", deletes : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 1 }, limit : 1 }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsMissing"
-
description: "Download when an intermediate chunk is the wrong size"
arrange:
data:
-
{ update : "fs.chunks", updates : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 1 }, u : { $set : { data : { $hex : "556677" } } } },
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 2 }, u : { $set : { data : { $hex : "8899aa" } } } }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsWrongSize"
-
description: "Download when final chunk is the wrong size"
arrange:
data:
-
{ update : "fs.chunks", updates : [
{ q : { files_id : { "$oid" : "000000000000000000000005" }, n : 2 }, u : { $set : { data : { $hex : "99" } } } }
] }
act:
operation: download
arguments:
id: { "$oid" : "000000000000000000000005" }
assert:
error: "ChunkIsWrongSize"
data:
files:
-
_id: { "$oid" : "000000000000000000000001" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-01T00:00:00.000Z" }
md5: "47ed733b8d10be225eceba344d533586"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000002" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-02T00:00:00.000Z" }
md5: "b15835f133ff2e27c7cb28117bfae8f4"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000003" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-03T00:00:00.000Z" }
md5: "eccbc87e4b5ce2fe28308fd9f2a7baf3"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000004" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-04T00:00:00.000Z" }
md5: "f623e75af30e62bbd73d6df5b50bb7b5"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
-
_id: { "$oid" : "000000000000000000000005" }
length: 1
chunkSize: 4
uploadDate: { "$date" : "1970-01-05T00:00:00.000Z" }
md5: "4c614360da93c0a041b22e537de151eb"
filename: "abc"
contentType: "application/octet-stream"
aliases: []
metadata: {}
chunks:
- { _id : { "$oid" : "000000000000000000000001" }, files_id : { "$oid" : "000000000000000000000001" }, n : 0, data : { $hex : "11" } }
- { _id : { "$oid" : "000000000000000000000002" }, files_id : { "$oid" : "000000000000000000000002" }, n : 0, data : { $hex : "22" } }
- { _id : { "$oid" : "000000000000000000000003" }, files_id : { "$oid" : "000000000000000000000003" }, n : 0, data : { $hex : "33" } }
- { _id : { "$oid" : "000000000000000000000004" }, files_id : { "$oid" : "000000000000000000000004" }, n : 0, data : { $hex : "44" } }
- { _id : { "$oid" : "000000000000000000000005" }, files_id : { "$oid" : "000000000000000000000005" }, n : 0, data : { $hex : "55" } }
tests:
-
description: "Download_by_name when revision is 0"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : 0 }
assert:
result: { $hex : "11" }
-
description: "Download_by_name when revision is 1"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : 1 }
assert:
result: { $hex : "22" }
-
description: "Download_by_name when revision is -2"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : -2 }
assert:
result: { $hex : "44" }
-
description: "Download_by_name when revision is -1"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : -1 }
assert:
result: { $hex : "55" }
-
description: "Download_by_name when files entry does not exist"
act:
operation: download_by_name
arguments:
filename: "xyz"
assert:
error: "FileNotFound"
-
description: "Download_by_name when revision does not exist"
act:
operation: download_by_name
arguments:
filename: "abc"
options: { revision : 999 }
assert:
error: "RevisionNotFound"
data:
files: []
chunks: []
tests:
-
description: "Upload when length is 0"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 0, chunkSize : 4, uploadDate : "*actual", md5 : "d41d8cd98f00b204e9800998ecf8427e", filename : "filename" }
] }
-
description: "Upload when length is 1"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 1, chunkSize : 4, uploadDate : "*actual", md5 : "47ed733b8d10be225eceba344d533586", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11" } }
] }
-
description: "Upload when length is 3"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "112233" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 3, chunkSize : 4, uploadDate : "*actual", md5 : "bafae3a174ab91fc70db7a6aa50f4f52", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "112233" } }
] }
-
description: "Upload when length is 4"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11223344" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 4, chunkSize : 4, uploadDate : "*actual", md5 : "7e7c77cff5705d1f7574a25ef6662117", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11223344" } }
] }
-
description: "Upload when length is 5"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "1122334455" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 5, chunkSize : 4, uploadDate : "*actual", md5 : "283d4fea5dded59cf837d3047328f5af", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11223344" } },
{ _id : "*actual", files_id : "*result", n : 1, data : { $hex : "55" } }
] }
-
description: "Upload when length is 8"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "1122334455667788" }
options: { chunkSizeBytes : 4 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 8, chunkSize : 4, uploadDate : "*actual", md5 : "dd254cdc958e53abaa67da9f797125f5", filename : "filename" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11223344" } },
{ _id : "*actual", files_id : "*result", n : 1, data : { $hex : "55667788" } }
] }
-
description: "Upload when contentType is provided"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11" }
options: { chunkSizeBytes : 4, contentType : "image/jpeg" }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 1, chunkSize : 4, uploadDate : "*actual", md5 : "47ed733b8d10be225eceba344d533586", filename : "filename", contentType : "image/jpeg" }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11" } }
] }
-
description: "Upload when metadata is provided"
act:
operation: upload
arguments:
filename: "filename"
source: { $hex : "11" }
options:
chunkSizeBytes: 4
metadata: { x : 1 }
assert:
result: "&result"
data:
-
{ insert : "expected.files", documents : [
{ _id : "*result", length : 1, chunkSize : 4, uploadDate : "*actual", md5 : "47ed733b8d10be225eceba344d533586", filename : "filename", metadata : { x : 1 } }
] }
-
{ insert : "expected.chunks", documents : [
{ _id : "*actual", files_id : "*result", n : 0, data : { $hex : "11" } }
] }
<?php
namespace MongoDB\Tests\GridFS;
use \MongoDB\GridFS;
use \MongoDB\Collection;
use \MongoDB\BSON\ObjectId;
use \MongoDB\BSON\Binary;
use \MongoDB\Exception;
class SpecificationTests extends FunctionalTestCase
{
private $commands;
private $collections;
public function setUp()
{
parent::setUp();
$this->commands = array(
'insert' => function($col, $docs) {
$col->insertMany($docs['documents']);},
'update' => function($col, $docs) {
foreach($docs['updates'] as $update) {
$col->updateMany($update['q'], $update['u']);
}
},
'delete' => function($col, $docs){
foreach($docs['deletes'] as $delete){
$col->deleteMany($delete['q']);
}
}
);
}
/**
*@dataProvider provideSpecificationTests
*/
public function testSpecificationTests($testJson)
{
foreach ($testJson['tests'] as $test) {
$this->initializeDatabases($testJson['data'], $test);
if(isset($test['act']['arguments']['options'])){
$options = $test['act']['arguments']['options'];
} else {
$options =[];
}
$this->bucket = new \MongoDB\GridFS\Bucket($this->manager, $this->getDatabaseName(), $this->fixTypes($options,false));
$func = $test['act']['operation'] . "Command";
$error = null;
try {
$result = $this->$func($test['act']['arguments']);
} catch(\MongoDB\Exception\Exception $e) {
$error = $e;
}
$errors = ['FileNotFound' => '\MongoDB\GridFS\Exception\FileNotFoundException',
'ChunkIsMissing' => '\MongoDB\GridFS\Exception\CorruptFileException',
'ExtraChunk' => '\MongoDB\GridFS\Exception\CorruptFileException',
'ChunkIsWrongSize' => '\MongoDB\GridFS\Exception\CorruptFileException',
'RevisionNotFound' => '\MongoDB\GridFS\Exception\FileNotFoundException'
];
if (!isset($test['assert']['error'])) {
$this->assertNull($error);
} else {
$shouldError = $test['assert']['error'];
$this->assertTrue($error instanceof $errors[$shouldError]);
}
if (isset($test['assert']['result'])) {
$testResult = $test['assert']['result'];
if ($testResult == '&result') {
$test['assert']['result'] = $result;
}
if ($testResult == "void") {
$test['assert']['result'] = null;
}
$fixedAssertFalse = $this->fixTypes($test['assert'], false);
$this->assertEquals($result, $fixedAssertFalse['result']);
}
$fixedAssertTrue = $this->fixTypes($test['assert'], true);
if (isset($test['assert']['data'])) {
$this->runCommands($fixedAssertTrue['data'], $result);
$this->collectionsEqual($this->collections['expected.files'],$this->bucket->getCollectionWrapper()->getFilesCollection());
if(isset($this->collections['expected.chunks'])) {
$this->collectionsEqual($this->collections['expected.chunks'],$this->bucket->getCollectionWrapper()->getChunksCollection());
}
}
}
}
public function provideSpecificationTests()
{
$testPath= __DIR__.'/Specification/tests/*.json';
$testArgs = [];
foreach(glob($testPath) as $filename) {
$fileContents = file_get_contents($filename);
$testJson = json_decode($fileContents, true);
$testArgs[][] = $testJson;
}
return $testArgs;
}
public function fixTypes($testJson, $makeBinary)
{
$result = $testJson;
foreach($result as $key =>$value) {
if (is_array($value) && isset($value['$hex'])) {
$result[$key] = hex2bin($value['$hex']);
if($makeBinary) {
$result[$key] = new \MongoDB\BSON\Binary($result[$key], \MongoDB\BSON\Binary::TYPE_GENERIC);
}
} else if (is_array($value) && isset($value['$oid'])) {
$result[$key] = new \MongoDB\BSON\ObjectId("".$value['$oid']);
} else if (is_array($value)) {
$result[$key] = $this->fixTypes($result[$key], $makeBinary);
} else if(is_string($value) && $value == '*actual') {
unset($result[$key]);
}
}
return $result;
}
public function collectionsEqual($col1, $col2)
{
$docs1 = $this->filterDoc($col1, true);
$docs2 = $this->filterDoc($col2, true);
$this->assertSameDocuments($docs1, $docs2);
}
public function filterDoc($collection, $ignoreId)
{
$output = [];
$documents = $collection->find();
foreach($documents as $doc){
if ($ignoreId) {
unset($doc->_id);
}
if(isset($doc->uploadDate)) {
// $this->assertTrue($doc->uploadDate instanceof DateTime);
unset($doc->uploadDate);
}
$output [] = $doc;
}
return $output;
}
public function runCommands($cmds, $result)
{
foreach($cmds as $cmd){
foreach($cmd as $key => $value) {
if(isset($this->commands[$key])) {
$cmdName = $key;
$collectionName = $value;
if(isset($cmd['documents'])){
foreach($cmd['documents'] as $docIndex => $doc) {
foreach($doc as $docKey => $docVal){
if(is_string($docVal)) {
if($docVal == '*result') {
$doc[$docKey] = $result;
}
}
}
$cmd['documents'][$docIndex] = $doc;
}
}
$collection = new Collection($this->manager, $this->getDatabaseName(), $collectionName);
$this->commands[$key]($collection, $this->fixTypes($cmd, true));
$this->collections[$collectionName] = $collection;
}
}
}
}
public function initializeDatabases($data, $test)
{
$collectionsToDrop = ['fs.files','fs.chunks','expected.files','expected.chunks'];
$data = $this->fixTypes($data, true);
foreach ($collectionsToDrop as $collectionName) {
$collection = new Collection($this->manager, $this->getDatabaseName(), $collectionName);
$collection->drop();
}
if (isset($data['files']) && count($data['files']) > 0) {
$filesCollection = new Collection($this->manager, $this->getDatabaseName(), "fs.files");
$filesCollection->insertMany($data['files']);
$expectedFilesCollection = new Collection($this->manager, $this->getDatabaseName(), "expected.files");
$expectedFilesCollection->insertMany($data['files']);
$this->collections['expected.files'] = $expectedFilesCollection;
}
if (isset($data['chunks']) && count($data['chunks']) > 0) {
$chunksCollection = new Collection($this->manager, $this->getDatabaseName(), "fs.chunks");
$chunksCollection->insertMany($data['chunks']);
$expectedChunksCollection = new Collection($this->manager, $this->getDatabaseName(), "expected.chunks");
$expectedChunksCollection->insertMany($data['chunks']);
$this->collections['expected.chunks'] = $expectedChunksCollection;
}
if(isset($test['arrange'])) {
foreach($test['arrange']['data'] as $cmd) {
foreach($cmd as $key => $value) {
if(isset($this->commands[$key])) {
$collection = new Collection($this->manager, $this->getDatabaseName(), $cmd[$key]);
$this->commands[$key]($collection,$this->fixTypes($cmd, true));
}
}
}
}
}
public function uploadCommand($args)
{
$args = $this->fixTypes($args, false);
$stream = fopen('php://temp', 'w+');
fwrite($stream, $args['source']);
rewind($stream);
$result = $this->bucket->uploadFromStream($args['filename'], $stream, $args['options']);
fclose($stream);
return $result;
}
function downloadCommand($args)
{
$args = $this->fixTypes($args, false);
$stream = fopen('php://temp', 'w+');
$this->bucket->downloadToStream($args['id'], $stream);
rewind($stream);
$result = stream_get_contents($stream);
fclose($stream);
return $result;
}
function deleteCommand($args)
{
$args = $this->fixTypes($args, false);
$this->bucket->delete($args['id']);
}
function download_by_nameCommand($args)
{
$args = $this->fixTypes($args, false);
$stream = fopen('php://temp', 'w+');
if(isset($args['options'])) {
$this->bucket->downloadToStreamByName($args['filename'], $stream, $args['options']);
} else {
$this->bucket->downloadToStreamByName($args['filename'], $stream);
}
rewind($stream);
$result = stream_get_contents($stream);
fclose($stream);
return $result;
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment