Add some tests. Fiddle around AWS stuff

This commit is contained in:
Matt Godbolt
2018-08-11 11:24:17 -05:00
parent b538ab04f0
commit d33afe157c
3 changed files with 251 additions and 39 deletions

View File

@@ -63,33 +63,40 @@ const instanceD = {
]
};
AWS.mock('EC2', 'describeInstances', {
Reservations: [
{
Instances: [
instanceA,
instanceB,
instanceC,
instanceD
function setup() {
beforeEach(() => {
AWS.mock('EC2', 'describeInstances', {
Reservations: [
{
Instances: [
instanceA,
instanceB,
instanceC,
instanceD
]
}
]
}
]
});
});
AWS.mock('SSM', 'getParametersByPath', {
Parameters: [
{
Name: '/compiler-explorer/configValue',
Value: 'fromAws'
},
{
Name: '/compiler-explorer/onlyOnAws',
Value: 'bibble'
}
]
});
AWS.mock('SSM', 'getParametersByPath', {
Parameters: [
{
Name: '/compiler-explorer/configValue',
Value: 'fromAws'
},
{
Name: '/compiler-explorer/onlyOnAws',
Value: 'bibble'
}
]
});
});
afterEach(() => AWS.restore());
}
describe('AWS instance fetcher tests', () => {
setup();
it('Fetches Bob', () => {
const fakeProps = {
region: 'not-a-region',
@@ -112,6 +119,7 @@ describe('AWS instance fetcher tests', () => {
});
describe('AWS config tests', () => {
setup();
it('Doesn\'t fetch unless region is configured', () => {
const fakeProps = {
region: '',

View File

@@ -187,29 +187,41 @@ describe('On disk caches', () => {
});
const S3FS = {};
AWS.mock('S3', 'getObject', (params, callback) => {
params.Bucket.should.equal("test.bucket");
const result = S3FS[params.Key];
if (!result) {
const error = new Error("Not found");
error.code = "NoSuchKey";
callback(error);
} else {
callback(null, {Body: result});
}
});
AWS.mock('S3', 'putObject', (params, callback) => {
params.Bucket.should.equal("test.bucket");
S3FS[params.Key] = params.Body;
callback(null, {});
});
function setup() {
beforeEach(() => {
AWS.mock('S3', 'getObject', (params, callback) => {
params.Bucket.should.equal("test.bucket");
const result = S3FS[params.Key];
if (!result) {
const error = new Error("Not found");
error.code = "NoSuchKey";
callback(error);
} else {
callback(null, {Body: result});
}
});
AWS.mock('S3', 'putObject', (params, callback) => {
params.Bucket.should.equal("test.bucket");
S3FS[params.Key] = params.Body;
callback(null, {});
});
});
afterEach(() => {
AWS.restore();
});
}
describe('S3 tests', () => {
setup();
basicTests(() => new S3Cache('test.bucket', 'cache', 'uk-north-1'));
// BE VERY CAREFUL - the below can be used with sufficient permissions to test on prod (With mocks off)...
// basicTests(() => new S3Cache('storage.godbolt.org', 'cache', 'us-east-1'));
});
describe('Config tests', () => {
setup();
it('should create null cache on empty config', () => {
const cache = FromConfig.create("");
cache.constructor.should.eql(NullCache);

View File

@@ -0,0 +1,192 @@
// Copyright (c) 2018, Compiler Explorer Authors
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
const chai = require('chai'),
chaiAsPromised = require("chai-as-promised"),
properties = require('../../lib/properties'),
s3s = require('../../lib/storage/storage-s3'),
AWS = require('aws-sdk-mock');
chai.use(chaiAsPromised);
const should = chai.should();
// NB!!! Anything using mocked AWS calls needs to be initialised in the `it(...)` block! If you initialise it in the
// `describe()` top level code then it won't be mocked in time. We only mock and de-mock before/after else we end up
// fighting over the global AWS mocking stuff. I hate mocha...there's probably a better way...
function mockerise(service, method) {
const handlers = [];
beforeEach(() => {
console.log(`Mockerising ${service}/${method}`);
AWS.mock(service, method, (q, callback) => {
const qh = handlers.shift();
should.exist(qh);
try {
callback(null, qh(q));
} catch (e) {
callback(e, null);
}
});
});
afterEach(() => {
console.log(`Democking ${service}/${method}`);
AWS.restore(service, method);
});
return handlers;
}
////////////////
describe('Find unique subhash tests', () => {
const dynamoDbQueryHandlers = mockerise('DynamoDB', 'query');
const compilerProps = properties.fakeProps({});
const awsProps = properties.fakeProps({
region: 'not-a-region',
storageBucket: 'bucket',
storagePrefix: 'prefix',
storageDynamoTable: 'table'
});
it('works when empty', () => {
const storage = new s3s(compilerProps, awsProps);
dynamoDbQueryHandlers.push((q) => {
q.TableName.should.equal('table');
return {};
});
return storage.findUniqueSubhash("ABCDEFGHIJKLMNOPQRSTUV").should.eventually.deep.equal(
{
alreadyPresent: false,
prefix: "ABCDEF",
uniqueSubHash: "ABCDEF"
}
);
});
it('works when not empty', () => {
const storage = new s3s(compilerProps, awsProps);
dynamoDbQueryHandlers.push(() => {
return {
Items: [
{
full_hash: {S: 'ZZVZT'},
unique_subhash: {S: 'ZZVZT'}
}
]
};
});
return storage.findUniqueSubhash("ABCDEFGHIJKLMNOPQRSTUV").should.eventually.deep.equal(
{
alreadyPresent: false,
prefix: "ABCDEF",
uniqueSubHash: "ABCDEF"
}
);
});
it('works when there\' a collision', () => {
const storage = new s3s(compilerProps, awsProps);
dynamoDbQueryHandlers.push(() => {
return {
Items: [
{
full_hash: {S: 'ABCDEFZZ'},
unique_subhash: {S: 'ABCDEF'}
}
]
};
});
return storage.findUniqueSubhash("ABCDEFGHIJKLMNOPQRSTUV").should.eventually.deep.equal(
{
alreadyPresent: false,
prefix: "ABCDEF",
uniqueSubHash: "ABCDEFG"
}
);
});
it('finds an existing match', () => {
const storage = new s3s(compilerProps, awsProps);
dynamoDbQueryHandlers.push(() => {
return {
Items: [
{
full_hash: {S: 'ABCDEFGHIJKLMNOPQRSTUV'},
unique_subhash: {S: 'ABCDEF'}
}
]
};
});
return storage.findUniqueSubhash("ABCDEFGHIJKLMNOPQRSTUV").should.eventually.deep.equal(
{
alreadyPresent: true,
prefix: "ABCDEF",
uniqueSubHash: "ABCDEF"
}
);
});
});
describe('Stores to s3', () => {
const dynamoDbPutItemHandlers = mockerise('DynamoDB', 'putItem');
const s3PutObjectHandlers = mockerise('S3', 'putObject');
const compilerProps = properties.fakeProps({});
const awsProps = properties.fakeProps({
region: 'not-a-region',
storageBucket: 'bucket',
storagePrefix: 'prefix',
storageDynamoTable: 'table'
});
it('and works ok', () => {
const storage = new s3s(compilerProps, awsProps);
const object = {
prefix: "ABCDEF",
uniqueSubHash: "ABCDEFG",
fullHash: "ABCDEFGHIJKLMNOP",
config: "yo"
};
const ran = {s3: false, dynamo: false};
s3PutObjectHandlers.push((q) => {
console.log(q);
q.Bucket.should.equal('bucket');
q.Key.should.equal('prefix/ABCDEFGHIJKLMNOP');
q.Body.should.equal('yo');
ran.s3 = true;
return {};
});
dynamoDbPutItemHandlers.push((q) => {
q.TableName.should.equals('table');
q.Item.should.deep.equals({
prefix: {S: 'ABCDEF'},
unique_subhash: {S: 'ABCDEFG'},
full_hash: {S: 'ABCDEFGHIJKLMNOP'},
clicks: {N: '0'}
});
ran.dynamo = true;
return {};
});
return storage.storeItem(object).then(() => {
ran.should.deep.equal({s3: true, dynamo: true});
});
});
});