mirror of
https://github.com/compiler-explorer/compiler-explorer.git
synced 2025-12-27 05:53:49 -05:00
Vitest (#6219)
Port to vitest. Port everything to typescript. Remove chai, mocha and chai-as-promised. Adds some docs.
This commit is contained in:
@@ -21,7 +21,6 @@ extends:
|
||||
- plugin:@typescript-eslint/recommended
|
||||
- plugin:import/typescript
|
||||
env:
|
||||
mocha: true
|
||||
node: true
|
||||
es6: true
|
||||
rules:
|
||||
|
||||
2
.idea/prettier.xml
generated
2
.idea/prettier.xml
generated
@@ -1,6 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="PrettierConfiguration">
|
||||
<option name="myConfigurationMode" value="AUTOMATIC" />
|
||||
<option name="myRunOnSave" value="true" />
|
||||
<option name="myRunOnReformat" value="true" />
|
||||
</component>
|
||||
</project>
|
||||
15
.idea/runConfigurations/All_Mocha_Tests.xml
generated
15
.idea/runConfigurations/All_Mocha_Tests.xml
generated
@@ -1,15 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="All Mocha Tests" type="mocha-javascript-test-runner">
|
||||
<node-interpreter>/opt/compiler-explorer/node/bin/node</node-interpreter>
|
||||
<node-options>-r esm</node-options>
|
||||
<mocha-package>$PROJECT_DIR$/node_modules/mocha</mocha-package>
|
||||
<working-directory>$PROJECT_DIR$</working-directory>
|
||||
<pass-parent-env>true</pass-parent-env>
|
||||
<ui>bdd</ui>
|
||||
<extra-mocha-options>--recursive</extra-mocha-options>
|
||||
<test-kind>DIRECTORY</test-kind>
|
||||
<test-directory>$PROJECT_DIR$/test</test-directory>
|
||||
<recursive>false</recursive>
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
@@ -1,2 +0,0 @@
|
||||
recursive: true
|
||||
ignore: test/filter-tests.js
|
||||
@@ -1 +0,0 @@
|
||||
recursive: true
|
||||
@@ -1,6 +1,3 @@
|
||||
// eslint-disable-next-line node/no-unpublished-import
|
||||
import {it} from 'mocha';
|
||||
|
||||
import {assertNoConsoleOutput, stubConsoleOutput} from '../support/utils';
|
||||
|
||||
const PANE_DATA_MAP = {
|
||||
@@ -39,7 +36,8 @@ describe('Individual pane testing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
afterEach('Ensure no output in console', () => {
|
||||
afterEach(() => {
|
||||
// Ensure no output in console
|
||||
return cy.window().then(win => {
|
||||
assertNoConsoleOutput();
|
||||
});
|
||||
@@ -104,7 +102,7 @@ describe('Known good state test', () => {
|
||||
);
|
||||
});
|
||||
|
||||
afterEach('Ensure no output in console', () => {
|
||||
afterEach(() => {
|
||||
return cy.window().then(win => {
|
||||
assertNoConsoleOutput();
|
||||
});
|
||||
|
||||
@@ -3,11 +3,10 @@
|
||||
"compilerOptions": {
|
||||
/* Module resolution */
|
||||
"target": "esnext",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "classic",
|
||||
"module": "es2015",
|
||||
"moduleResolution": "bundler",
|
||||
/* Code generation */
|
||||
"typeRoots": ["../node_modules/@types"],
|
||||
"types": ["mocha", "chai", "chai-http"],
|
||||
/* https://github.com/cypress-io/cypress/issues/26203#issuecomment-1571861397 */
|
||||
"sourceMap": false
|
||||
}
|
||||
|
||||
19
docs/VitestCribSheet.md
Normal file
19
docs/VitestCribSheet.md
Normal file
@@ -0,0 +1,19 @@
|
||||
## `vitest` crib sheet
|
||||
|
||||
We just moved to a new testing framework, `vitest`. Here are some notes to help you get started.
|
||||
|
||||
### Running tests
|
||||
|
||||
- `npm test` will run the tests
|
||||
- `npm run test:watch` will run the watcher; which runs all the tests and then watches for changes and then runs on the
|
||||
changed tests. This is much quicker as a lot of the work is cached.
|
||||
- `npm run test:watch <path>` will do the same with just a path
|
||||
|
||||
### Writing tests
|
||||
|
||||
In general use `describe`, `it` and `expect` as you would with jest. Import these all from `vitest`. If you need async
|
||||
things then use `await expect(someAsyncThing()).resolves.toEqual(someValue)`.
|
||||
|
||||
The `expect` is pretty rich and supports a lot of matchers, like `expect(x).toContain("moo")` or
|
||||
`expect(y).not.toHaveProperty("badger")`. You can see the full list in the
|
||||
[vitest documentation](https://vitest.dev/api/expect.html).
|
||||
@@ -83,7 +83,7 @@ export class PascalDemangler extends BaseDemangler {
|
||||
];
|
||||
}
|
||||
|
||||
protected shouldIgnoreSymbol(text: string) {
|
||||
public shouldIgnoreSymbol(text: string) {
|
||||
for (const k in this.ignoredsymbols) {
|
||||
if (text.startsWith(this.ignoredsymbols[k])) {
|
||||
return true;
|
||||
@@ -93,7 +93,7 @@ export class PascalDemangler extends BaseDemangler {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected composeReadableMethodSignature(unitname, classname, methodname, params) {
|
||||
public composeReadableMethodSignature(unitname, classname, methodname, params) {
|
||||
let signature = '';
|
||||
|
||||
if (classname !== '') signature = classname.toLowerCase() + '.';
|
||||
@@ -104,7 +104,7 @@ export class PascalDemangler extends BaseDemangler {
|
||||
return signature;
|
||||
}
|
||||
|
||||
protected demangle(text) {
|
||||
public demangle(text) {
|
||||
if (!text.endsWith(':')) return false;
|
||||
if (this.shouldIgnoreSymbol(text)) return false;
|
||||
|
||||
@@ -201,11 +201,11 @@ export class PascalDemangler extends BaseDemangler {
|
||||
return unmangled;
|
||||
}
|
||||
|
||||
protected addDemangleToCache(text) {
|
||||
public addDemangleToCache(text) {
|
||||
this.demangle(text);
|
||||
}
|
||||
|
||||
protected demangleIfNeeded(text) {
|
||||
public demangleIfNeeded(text) {
|
||||
if (text.includes('$')) {
|
||||
if (this.shouldIgnoreSymbol(text)) {
|
||||
return text;
|
||||
|
||||
@@ -58,7 +58,7 @@ export class FormattingHandler {
|
||||
const result = await exec.execute(exe, [versionArgument], {});
|
||||
const match = result.stdout.match(versionRegExp);
|
||||
const formatterClass = getFormatterTypeByKey(type);
|
||||
const styleList = this.ceProps<string>(`formatter.${formatterName}.styles`);
|
||||
const styleList = this.ceProps<string>(`formatter.${formatterName}.styles`, '');
|
||||
const styles = styleList === '' ? [] : styleList.split(':');
|
||||
// If there is an explicit version, grab it. Otherwise try to filter the output
|
||||
const version = hasExplicitVersion
|
||||
|
||||
@@ -169,7 +169,7 @@ export class AsmParser extends AsmRegex implements IAsmParser {
|
||||
return inVLIWpacket;
|
||||
}
|
||||
|
||||
hasOpcode(line, inNvccCode, inVLIWpacket?) {
|
||||
hasOpcode(line, inNvccCode?, inVLIWpacket?) {
|
||||
// Remove any leading label definition...
|
||||
const match = line.match(this.labelDef);
|
||||
if (match) {
|
||||
|
||||
1691
package-lock.json
generated
1691
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
@@ -89,19 +89,15 @@
|
||||
"yaml": "^2.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/preset-typescript": "^7.23.3",
|
||||
"@smithy/util-stream": "^2.1.3",
|
||||
"@types/body-parser": "^1.19.5",
|
||||
"@types/bootstrap": "^5.2.10",
|
||||
"@types/chai": "^4.3.12",
|
||||
"@types/chai-as-promised": "^7.1.8",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/file-saver": "^2.0.7",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/http-proxy": "^1.17.14",
|
||||
"@types/jquery": "^3.5.29",
|
||||
"@types/js-cookie": "^3.0.6",
|
||||
"@types/mocha": "^10.0.6",
|
||||
"@types/node-targz": "^0.2.4",
|
||||
"@types/nopt": "^3.0.32",
|
||||
"@types/request": "^2.48.12",
|
||||
@@ -111,12 +107,10 @@
|
||||
"@types/webpack-env": "^1.18.4",
|
||||
"@typescript-eslint/eslint-plugin": "^7.1.1",
|
||||
"@typescript-eslint/parser": "^7.1.1",
|
||||
"@vitest/coverage-v8": "^1.3.1",
|
||||
"approvals": "^6.2.4",
|
||||
"aws-sdk-client-mock": "^3.0.1",
|
||||
"c8": "^9.1.0",
|
||||
"chai": "^4.4.1",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"chai-http": "^4.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"css-loader": "^6.10.0",
|
||||
"css-minimizer-webpack-plugin": "^6.0.0",
|
||||
@@ -137,7 +131,6 @@
|
||||
"file-loader": "^6.2.0",
|
||||
"lint-staged": "^15.2.2",
|
||||
"mini-css-extract-plugin": "^2.8.1",
|
||||
"mocha": "^10.3.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"monaco-editor-webpack-plugin": "^7.1.0",
|
||||
"nock": "^13.5.4",
|
||||
@@ -146,10 +139,12 @@
|
||||
"sass-loader": "^14.1.1",
|
||||
"sinon": "^17.0.1",
|
||||
"source-map-loader": "^5.0.0",
|
||||
"supertest": "^6.3.4",
|
||||
"supervisor": "^0.12.0",
|
||||
"terser-webpack-plugin": "^5.3.10",
|
||||
"ts-loader": "^9.5.1",
|
||||
"typescript": "^5.3.3",
|
||||
"vitest": "^1.3.1",
|
||||
"webpack": "^5.90.3",
|
||||
"webpack-cli": "^5.1.4",
|
||||
"webpack-dev-middleware": "^7.0.0",
|
||||
@@ -168,13 +163,14 @@
|
||||
},
|
||||
"scripts": {
|
||||
"ci-lint": "eslint --format github .",
|
||||
"ci-test": "c8 npm run test",
|
||||
"ci-test": "vitest run --coverage",
|
||||
"cypress": "cypress run",
|
||||
"lint": "eslint --max-warnings=0 . --fix",
|
||||
"lint-check": "eslint --max-warnings=0 . && prettier --check .",
|
||||
"lint-files": "eslint --max-warnings=0",
|
||||
"test": "node --no-warnings=ExperimentalWarning --loader ts-node/esm ./node_modules/mocha/bin/mocha.js -b 'test/**/*.ts' 'test/**/*.js'",
|
||||
"test-min": "node --no-warnings=ExperimentalWarning --loader ts-node/esm ./node_modules/mocha/bin/mocha.js -b --config .mocharc-min.yml",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"test-min": "vitest run --exclude test/filter-tests.ts",
|
||||
"fix": "npm run lint && npm run format && npm run ts-check",
|
||||
"check": "npm run ts-check && npm run lint-check && npm run test-min -- --reporter min",
|
||||
"dev": "cross-env NODE_ENV=DEV node --no-warnings=ExperimentalWarning --loader ts-node/esm app.ts",
|
||||
|
||||
@@ -22,13 +22,6 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// This file is not force-required: it must be loaded by mocha to get access
|
||||
// to `before`.
|
||||
import {suppressConsoleLog} from '../lib/logger.js';
|
||||
|
||||
if (typeof before === 'function') {
|
||||
// this hook will run once before any tests are executed
|
||||
before(() => {
|
||||
suppressConsoleLog();
|
||||
});
|
||||
}
|
||||
suppressConsoleLog();
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
// Copyright (c) 2020, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// This file is force-required by mocha config. That ensure it's loaded always.
|
||||
// It's force included so that even if you run a single test (e.g. from an IDE),
|
||||
// this configuration is applied.
|
||||
import chai from 'chai';
|
||||
import chaiAsPromised from 'chai-as-promised';
|
||||
import chaiHttp from 'chai-http';
|
||||
import deepEqualInAnyOrder from 'deep-equal-in-any-order';
|
||||
|
||||
chai.should();
|
||||
chai.use(chaiAsPromised);
|
||||
chai.use(chaiHttp);
|
||||
chai.use(deepEqualInAnyOrder);
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
import {AnalysisTool, LLVMmcaTool} from '../lib/compilers/index.js';
|
||||
|
||||
@@ -40,7 +42,7 @@ describe('LLVM-mca tool definition', () => {
|
||||
let ce: CompilationEnvironment;
|
||||
let a: LLVMmcaTool;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
const info = makeFakeCompilerInfo({
|
||||
remote: {
|
||||
@@ -55,28 +57,22 @@ describe('LLVM-mca tool definition', () => {
|
||||
|
||||
it('should have most filters disabled', () => {
|
||||
if (shouldExist(a)) {
|
||||
a.getInfo().disabledFilters.should.be.deep.equal([
|
||||
'labels',
|
||||
'directives',
|
||||
'commentOnly',
|
||||
'trim',
|
||||
'debugCalls',
|
||||
]);
|
||||
expect(a.getInfo().disabledFilters).toEqual(['labels', 'directives', 'commentOnly', 'trim', 'debugCalls']);
|
||||
}
|
||||
});
|
||||
|
||||
it('should default to most filters off', () => {
|
||||
const filters = a.getDefaultFilters();
|
||||
filters.intel.should.equal(true);
|
||||
filters.commentOnly.should.equal(false);
|
||||
filters.directives.should.equal(false);
|
||||
filters.labels.should.equal(false);
|
||||
filters.optOutput.should.equal(false);
|
||||
filters.debugCalls.should.equal(false);
|
||||
expect(filters.intel).toBe(true);
|
||||
expect(filters.commentOnly).toBe(false);
|
||||
expect(filters.directives).toBe(false);
|
||||
expect(filters.labels).toBe(false);
|
||||
expect(filters.optOutput).toBe(false);
|
||||
expect(filters.debugCalls).toBe(false);
|
||||
});
|
||||
|
||||
it('should not support objdump', () => {
|
||||
a.supportsObjdump().should.equal(false);
|
||||
expect(a.supportsObjdump()).toBe(false);
|
||||
});
|
||||
|
||||
it('should support "-o output-file" by default', () => {
|
||||
@@ -87,7 +83,7 @@ describe('LLVM-mca tool definition', () => {
|
||||
}),
|
||||
'output.txt',
|
||||
);
|
||||
opts.should.be.deep.equal(['-o', 'output.txt']);
|
||||
expect(opts).toEqual(['-o', 'output.txt']);
|
||||
});
|
||||
|
||||
it('should split if disabledFilters is a string', () => {
|
||||
@@ -100,6 +96,6 @@ describe('LLVM-mca tool definition', () => {
|
||||
lang: 'analysis',
|
||||
disabledFilters: 'labels,directives,debugCalls' as any,
|
||||
});
|
||||
new AnalysisTool(info, ce).getInfo().disabledFilters.should.deep.equal(['labels', 'directives', 'debugCalls']);
|
||||
expect(new AnalysisTool(info, ce).getInfo().disabledFilters).toEqual(['labels', 'directives', 'debugCalls']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
import {Dex2OatCompiler} from '../lib/compilers/index.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
@@ -47,37 +49,37 @@ const androidKotlinInfo = {
|
||||
lang: languages.androidKotlin.id,
|
||||
} as unknown as CompilerInfo;
|
||||
|
||||
describe('dex2oat', function () {
|
||||
describe('dex2oat', () => {
|
||||
let env: CompilationEnvironment;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
env = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
describe('android-java', () => {
|
||||
it('Should not crash on instantiation', function () {
|
||||
it('Should not crash on instantiation', () => {
|
||||
new Dex2OatCompiler(androidJavaInfo, env);
|
||||
});
|
||||
|
||||
it('Output is shown as-is if full output mode is enabled', function () {
|
||||
it('Output is shown as-is if full output mode is enabled', () => {
|
||||
return testParse(androidJavaInfo, 'test/android/java', true);
|
||||
});
|
||||
|
||||
it('Output is parsed and formatted if full output mode is disabled', function () {
|
||||
it('Output is parsed and formatted if full output mode is disabled', () => {
|
||||
return testParse(androidJavaInfo, 'test/android/java', false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('android-kotlin', () => {
|
||||
it('Should not crash on instantiation', function () {
|
||||
it('Should not crash on instantiation', () => {
|
||||
new Dex2OatCompiler(androidKotlinInfo, env);
|
||||
});
|
||||
|
||||
it('Output is shown as-is if full output mode is enabled', function () {
|
||||
it('Output is shown as-is if full output mode is enabled', () => {
|
||||
return testParse(androidKotlinInfo, 'test/android/kotlin', true);
|
||||
});
|
||||
|
||||
it('Output is parsed and formatted if full output mode is disabled', function () {
|
||||
it('Output is parsed and formatted if full output mode is disabled', () => {
|
||||
return testParse(androidKotlinInfo, 'test/android/kotlin', false);
|
||||
});
|
||||
});
|
||||
@@ -92,7 +94,7 @@ describe('dex2oat', function () {
|
||||
asm,
|
||||
};
|
||||
const processed = await compiler.processAsm(objdumpResult);
|
||||
processed.should.have.property('asm');
|
||||
expect(processed).toHaveProperty('asm');
|
||||
const actualSegments = (processed as {asm: ParsedAsmResultLine[]}).asm;
|
||||
|
||||
// fullOutput results in no processing, with the entire oatdump text
|
||||
@@ -107,6 +109,6 @@ describe('dex2oat', function () {
|
||||
};
|
||||
});
|
||||
|
||||
actualSegments.should.deep.equal(expectedSegments);
|
||||
expect(actualSegments).toEqual(expectedSegments);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {Filter} from '../static/ansi-to-html.js';
|
||||
|
||||
describe('ansi-to-html', () => {
|
||||
@@ -33,59 +35,59 @@ describe('ansi-to-html', () => {
|
||||
};
|
||||
it('Should leave non-ansi colours alone', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('I am a boring old string').should.equal('I am a boring old string');
|
||||
expect(filter.toHtml('I am a boring old string')).toEqual('I am a boring old string');
|
||||
});
|
||||
it('Should handle simple cases', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('\x1B[38;5;99mTest').should.equal('<span style="color:#875fff">Test</span>');
|
||||
expect(filter.toHtml('\x1B[38;5;99mTest')).toEqual('<span style="color:#875fff">Test</span>');
|
||||
});
|
||||
it('Should handle nasty edge cases', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
// See #1666, this used to cause catastrophic backtracking.
|
||||
filter
|
||||
.toHtml(
|
||||
expect(
|
||||
filter.toHtml(
|
||||
'\x1B[38;5;9999999999999999999999999999999999999999999999999999999999999999999999999999999' +
|
||||
'99999999999999999999"mTest',
|
||||
)
|
||||
.should.equal(
|
||||
'5;9999999999999999999999999999999999999999999999999999999999999' +
|
||||
'99999999999999999999999999999999999999"mTest',
|
||||
);
|
||||
),
|
||||
).toEqual(
|
||||
'5;9999999999999999999999999999999999999999999999999999999999999' +
|
||||
'99999999999999999999999999999999999999"mTest',
|
||||
);
|
||||
});
|
||||
|
||||
// With thanks to https://github.com/rburns/ansi-to-html/pull/84/files
|
||||
it('renders xterm foreground 256 sequences', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('\x1B[38;5;196mhello').should.equal('<span style="color:#ff0000">hello</span>');
|
||||
expect(filter.toHtml('\x1B[38;5;196mhello')).toEqual('<span style="color:#ff0000">hello</span>');
|
||||
});
|
||||
it('renders xterm background 256 sequences', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('\x1B[48;5;196mhello').should.equal('<span style="background-color:#ff0000">hello</span>');
|
||||
expect(filter.toHtml('\x1B[48;5;196mhello')).toEqual('<span style="background-color:#ff0000">hello</span>');
|
||||
});
|
||||
|
||||
it('should ignore reverse video', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('\x1B[7mhello').should.equal('hello');
|
||||
expect(filter.toHtml('\x1B[7mhello')).toEqual('hello');
|
||||
});
|
||||
|
||||
// tests for #3659
|
||||
it('should stream', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('\x1B[38;5;99mfoo');
|
||||
filter.toHtml('bar').should.equal('<span style="color:#875fff">bar</span>');
|
||||
expect(filter.toHtml('bar')).toEqual('<span style="color:#875fff">bar</span>');
|
||||
});
|
||||
it('should handle stream reset', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter.toHtml('\x1B[38;5;99mfoo');
|
||||
filter.reset();
|
||||
filter.toHtml('bar').should.equal('bar');
|
||||
expect(filter.toHtml('bar')).toEqual('bar');
|
||||
});
|
||||
|
||||
// rgb test
|
||||
it('should process rgb colors', () => {
|
||||
const filter = new Filter(filterOpts);
|
||||
filter
|
||||
.toHtml('\x1B[38;2;57;170;243mfoo\x1B[48;2;100;100;100mbar')
|
||||
.should.equal('<span style="color:#39aaf3">foo<span style="background-color:#646464">bar</span></span>');
|
||||
expect(filter.toHtml('\x1B[38;2;57;170;243mfoo\x1B[48;2;100;100;100mbar')).toEqual(
|
||||
'<span style="color:#39aaf3">foo<span style="background-color:#646464">bar</span></span>',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {VcAsmParser} from '../lib/parsers/asm-parser-vc.js';
|
||||
import {AsmParserZ88dk} from '../lib/parsers/asm-parser-z88dk.js';
|
||||
@@ -48,7 +50,7 @@ describe('ASM CL parser', () => {
|
||||
debugCalls: false,
|
||||
});
|
||||
|
||||
result.asm.should.deep.equal([
|
||||
expect(result.asm).toEqual([
|
||||
{
|
||||
source: null,
|
||||
text: '<Compilation failed>',
|
||||
@@ -60,36 +62,37 @@ describe('ASM CL parser', () => {
|
||||
describe('ASM regex base class', () => {
|
||||
it('should leave unfiltered lines alone', () => {
|
||||
const line = ' this is a line';
|
||||
AsmRegex.filterAsmLine(line, makeFakeParseFiltersAndOutputOptions({})).should.equal(line);
|
||||
expect(AsmRegex.filterAsmLine(line, makeFakeParseFiltersAndOutputOptions({}))).toEqual(line);
|
||||
});
|
||||
it('should use up internal whitespace when asked', () => {
|
||||
AsmRegex.filterAsmLine(
|
||||
' this is a line',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
).should.equal(' this is a line');
|
||||
AsmRegex.filterAsmLine('this is a line', makeFakeParseFiltersAndOutputOptions({trim: true})).should.equal(
|
||||
'this is a line',
|
||||
);
|
||||
expect(
|
||||
AsmRegex.filterAsmLine(' this is a line', makeFakeParseFiltersAndOutputOptions({trim: true})),
|
||||
).toEqual(' this is a line');
|
||||
expect(
|
||||
AsmRegex.filterAsmLine('this is a line', makeFakeParseFiltersAndOutputOptions({trim: true})),
|
||||
).toEqual('this is a line');
|
||||
});
|
||||
it('should keep whitespace in strings', () => {
|
||||
AsmRegex.filterAsmLine(
|
||||
'equs "this string"',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
).should.equal('equs "this string"');
|
||||
AsmRegex.filterAsmLine(
|
||||
' equs "this string"',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
).should.equal(' equs "this string"');
|
||||
AsmRegex.filterAsmLine(
|
||||
'equs "this \\" string \\""',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
).should.equal('equs "this \\" string \\""');
|
||||
expect(
|
||||
AsmRegex.filterAsmLine('equs "this string"', makeFakeParseFiltersAndOutputOptions({trim: true})),
|
||||
).toEqual('equs "this string"');
|
||||
expect(
|
||||
AsmRegex.filterAsmLine(
|
||||
' equs "this string"',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
),
|
||||
).toEqual(' equs "this string"');
|
||||
expect(
|
||||
AsmRegex.filterAsmLine(
|
||||
'equs "this \\" string \\""',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
),
|
||||
).toEqual('equs "this \\" string \\""');
|
||||
});
|
||||
it('should not get upset by mismatched strings', () => {
|
||||
AsmRegex.filterAsmLine(
|
||||
'a "string \'yeah',
|
||||
makeFakeParseFiltersAndOutputOptions({trim: true}),
|
||||
).should.equal('a "string \'yeah');
|
||||
expect(
|
||||
AsmRegex.filterAsmLine('a "string \'yeah', makeFakeParseFiltersAndOutputOptions({trim: true})),
|
||||
).toEqual('a "string \'yeah');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -97,7 +100,7 @@ describe('ASM parser base class', () => {
|
||||
let parser;
|
||||
const filters = {};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
parser = new AsmParser();
|
||||
});
|
||||
|
||||
@@ -139,10 +142,10 @@ main: # @main
|
||||
const mov1_line = output.asm.find(line => line.text.trim().startsWith('mov1'));
|
||||
const call_line = output.asm.find(line => line.text.trim().startsWith('call'));
|
||||
const mov4_line = output.asm.find(line => line.text.trim().startsWith('mov4'));
|
||||
push_line.source.should.not.have.ownProperty('column');
|
||||
mov1_line.source.should.not.have.ownProperty('column');
|
||||
call_line.source.column.should.equal(20);
|
||||
mov4_line.source.column.should.equal(9);
|
||||
expect(push_line.source).not.toHaveProperty('column');
|
||||
expect(mov1_line.source).not.toHaveProperty('column');
|
||||
expect(call_line.source.column).toEqual(20);
|
||||
expect(mov4_line.source.column).toEqual(9);
|
||||
});
|
||||
|
||||
it('should parse line numbers when a column is not specified', () => {
|
||||
@@ -167,8 +170,8 @@ main:
|
||||
`;
|
||||
const output = parser.process(asm, filters);
|
||||
const pushq_line = output.asm.find(line => line.text.trim().startsWith('pushq'));
|
||||
pushq_line.source.should.not.have.ownProperty('column');
|
||||
pushq_line.source.line.should.equal(2);
|
||||
expect(pushq_line.source).not.toHaveProperty('column');
|
||||
expect(pushq_line.source.line).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -176,7 +179,7 @@ describe('ASM parser', () => {
|
||||
let parser: AsmParser;
|
||||
const filters = {};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
parser = new AsmParser();
|
||||
});
|
||||
|
||||
@@ -197,7 +200,7 @@ ${' '.repeat(65530)}x
|
||||
ret
|
||||
`;
|
||||
const output = parser.process(asm, filters);
|
||||
parseInt(unwrap(output.parsingTime)).should.be.lessThan(500); // reported as ms, generous timeout for ci runner
|
||||
expect(parseInt(unwrap(output.parsingTime))).toBeLessThan(500); // reported as ms, generous timeout for ci runner
|
||||
});
|
||||
});
|
||||
|
||||
@@ -205,7 +208,7 @@ describe('ASM parser z88dk', () => {
|
||||
let parser: AsmParserZ88dk;
|
||||
const filters = {};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
parser = new AsmParserZ88dk(undefined as any);
|
||||
});
|
||||
|
||||
@@ -226,6 +229,6 @@ ${' '.repeat(65530)}x
|
||||
ret
|
||||
`;
|
||||
const output = parser.process(asm, filters);
|
||||
parseInt(unwrap(output.parsingTime)).should.be.lessThan(500); // reported as ms, generous timeout for ci runner
|
||||
expect(parseInt(unwrap(output.parsingTime))).toBeLessThan(500); // reported as ms, generous timeout for ci runner
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,6 +26,7 @@ import './utils.js';
|
||||
import {DescribeInstancesCommand, EC2, Instance} from '@aws-sdk/client-ec2';
|
||||
import {GetParametersCommand, SSM} from '@aws-sdk/client-ssm';
|
||||
import {mockClient} from 'aws-sdk-client-mock';
|
||||
import {beforeEach, describe, expect, it} from 'vitest';
|
||||
|
||||
import * as aws from '../lib/aws.js';
|
||||
|
||||
@@ -74,24 +75,24 @@ describe('AWS instance fetcher tests', () => {
|
||||
],
|
||||
});
|
||||
});
|
||||
it('Fetches Bob', () => {
|
||||
it('Fetches Bob', async () => {
|
||||
const fakeProps = {
|
||||
region: 'not-a-region',
|
||||
tagKey: 'Name',
|
||||
tagValue: 'Bob',
|
||||
};
|
||||
const fetcher = new aws.InstanceFetcher(prop => fakeProps[prop]);
|
||||
return fetcher.getInstances().should.eventually.deep.equal([instanceC]);
|
||||
await expect(fetcher.getInstances()).resolves.toEqual([instanceC]);
|
||||
});
|
||||
|
||||
it('Ignores sleeping nodes', () => {
|
||||
it('Ignores sleeping nodes', async () => {
|
||||
const fakeProps = {
|
||||
region: 'not-a-region',
|
||||
tagKey: 'Name',
|
||||
tagValue: 'Alice',
|
||||
};
|
||||
const fetcher = new aws.InstanceFetcher(prop => fakeProps[prop]);
|
||||
return fetcher.getInstances().should.eventually.deep.equal([instanceA, instanceD]);
|
||||
await expect(fetcher.getInstances()).resolves.toEqual([instanceA, instanceD]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -112,30 +113,24 @@ describe('AWS config tests', () => {
|
||||
],
|
||||
});
|
||||
});
|
||||
it("Doesn't fetch unless region is configured", () => {
|
||||
it("Doesn't fetch unless region is configured", async () => {
|
||||
const fakeProps = {
|
||||
region: '',
|
||||
configValue: 'fromConfigFile',
|
||||
};
|
||||
return aws
|
||||
.initConfig(prop => fakeProps[prop])
|
||||
.then(() => {
|
||||
aws.getConfig('configValue').should.equal('fromConfigFile');
|
||||
});
|
||||
await aws.initConfig(prop => fakeProps[prop]);
|
||||
expect(aws.getConfig('configValue')).toEqual('fromConfigFile');
|
||||
});
|
||||
|
||||
it('Gets results from SSM, falling back to config if needed', () => {
|
||||
it('Gets results from SSM, falling back to config if needed', async () => {
|
||||
const fakeProps = {
|
||||
region: 'a non-empty region',
|
||||
configValue: 'fromConfigFile',
|
||||
notInAmazon: 'yay',
|
||||
};
|
||||
return aws
|
||||
.initConfig(prop => fakeProps[prop])
|
||||
.then(() => {
|
||||
aws.getConfig('configValue').should.equal('fromAws');
|
||||
aws.getConfig('onlyOnAws').should.equal('bibble');
|
||||
aws.getConfig('notInAmazon').should.equal('yay');
|
||||
});
|
||||
await aws.initConfig(prop => fakeProps[prop]);
|
||||
expect(aws.getConfig('configValue')).toEqual('fromAws');
|
||||
expect(aws.getConfig('onlyOnAws')).toEqual('bibble');
|
||||
expect(aws.getConfig('notInAmazon')).toEqual('yay');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {BaseCompiler} from '../lib/base-compiler.js';
|
||||
import {BuildEnvSetupBase} from '../lib/buildenvsetup/index.js';
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
@@ -35,7 +37,6 @@ import {
|
||||
makeFakeCompilerInfo,
|
||||
makeFakeParseFiltersAndOutputOptions,
|
||||
path,
|
||||
should,
|
||||
shouldExist,
|
||||
} from './utils.js';
|
||||
|
||||
@@ -43,7 +44,7 @@ const languages = {
|
||||
'c++': {id: 'c++'},
|
||||
} as const;
|
||||
|
||||
describe('Basic compiler invariants', function () {
|
||||
describe('Basic compiler invariants', () => {
|
||||
let ce: CompilationEnvironment;
|
||||
let compiler: BaseCompiler;
|
||||
|
||||
@@ -58,25 +59,25 @@ describe('Basic compiler invariants', function () {
|
||||
ldPath: [],
|
||||
};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
compiler = new BaseCompiler(info as CompilerInfo, ce);
|
||||
});
|
||||
|
||||
it('should recognize when optOutput has been request', () => {
|
||||
compiler.optOutputRequested(['please', 'recognize', '-fsave-optimization-record']).should.equal(true);
|
||||
compiler.optOutputRequested(['please', "don't", 'recognize']).should.equal(false);
|
||||
expect(compiler.optOutputRequested(['please', 'recognize', '-fsave-optimization-record'])).toBe(true);
|
||||
expect(compiler.optOutputRequested(['please', "don't", 'recognize'])).toBe(false);
|
||||
});
|
||||
it('should allow comments next to includes (Bug #874)', () => {
|
||||
should.equal(compiler.checkSource('#include <cmath> // std::(sin, cos, ...)'), null);
|
||||
expect(compiler.checkSource('#include <cmath> // std::(sin, cos, ...)')).toBeNull();
|
||||
const badSource = compiler.checkSource('#include </dev/null..> //Muehehehe');
|
||||
if (shouldExist(badSource)) {
|
||||
badSource.should.equal('<stdin>:1:1: no absolute or relative includes please');
|
||||
expect(badSource).toEqual('<stdin>:1:1: no absolute or relative includes please');
|
||||
}
|
||||
});
|
||||
it('should not warn of path-likes outside C++ includes (Bug #3045)', () => {
|
||||
function testIncludeG(text: string) {
|
||||
should.equal(compiler.checkSource(text), null);
|
||||
expect(compiler.checkSource(text)).toBeNull();
|
||||
}
|
||||
testIncludeG('#include <iostream>');
|
||||
testIncludeG('#include <iostream> // <..>');
|
||||
@@ -86,7 +87,7 @@ describe('Basic compiler invariants', function () {
|
||||
});
|
||||
it('should not allow path C++ includes', () => {
|
||||
function testIncludeNotG(text: string) {
|
||||
should.equal(compiler.checkSource(text), '<stdin>:1:1: no absolute or relative includes please');
|
||||
expect(compiler.checkSource(text)).toEqual('<stdin>:1:1: no absolute or relative includes please');
|
||||
}
|
||||
testIncludeNotG('#include <./.bashrc>');
|
||||
testIncludeNotG('#include </dev/null> // <..>');
|
||||
@@ -97,11 +98,11 @@ describe('Basic compiler invariants', function () {
|
||||
const newConfig: Partial<CompilerInfo> = {...info, explicitVersion: '123'};
|
||||
const forcedVersionCompiler = new BaseCompiler(newConfig as CompilerInfo, ce);
|
||||
const result = await forcedVersionCompiler.getVersion();
|
||||
result.stdout.should.deep.equal(['123']);
|
||||
expect(result.stdout).toEqual(['123']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Compiler execution', function () {
|
||||
describe('Compiler execution', () => {
|
||||
let ce: CompilationEnvironment;
|
||||
let compiler: BaseCompiler;
|
||||
let compilerNoExec: BaseCompiler;
|
||||
@@ -156,7 +157,7 @@ describe('Compiler execution', function () {
|
||||
options: '--hello-abc -I"/opt/some thing 1.0/include"',
|
||||
});
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
compiler = new BaseCompiler(executingCompilerInfo, ce);
|
||||
win32compiler = new Win32Compiler(win32CompilerInfo, ce);
|
||||
@@ -168,7 +169,7 @@ describe('Compiler execution', function () {
|
||||
function stubOutCallToExec(execStub, compiler, content, result, nthCall) {
|
||||
execStub.onCall(nthCall || 0).callsFake((compiler, args) => {
|
||||
const minusO = args.indexOf('-o');
|
||||
minusO.should.be.gte(0);
|
||||
expect(minusO).toBeGreaterThanOrEqual(0);
|
||||
const output = args[minusO + 1];
|
||||
// Maybe we should mock out the FS too; but that requires a lot more work.
|
||||
fs.writeFileSync(output, content);
|
||||
@@ -194,7 +195,7 @@ describe('Compiler execution', function () {
|
||||
libraries,
|
||||
[],
|
||||
);
|
||||
args.should.deep.equal([
|
||||
expect(args).toEqual([
|
||||
'-g',
|
||||
'-o',
|
||||
'example.s',
|
||||
@@ -223,7 +224,7 @@ describe('Compiler execution', function () {
|
||||
libraries,
|
||||
[],
|
||||
);
|
||||
win32args.should.deep.equal([
|
||||
expect(win32args).toEqual([
|
||||
'/nologo',
|
||||
'/FA',
|
||||
'/c',
|
||||
@@ -239,19 +240,19 @@ describe('Compiler execution', function () {
|
||||
|
||||
it('buildenv should handle spaces correctly', () => {
|
||||
const buildenv = new BuildEnvSetupBase(executingCompilerInfo, ce);
|
||||
buildenv.getCompilerArch().should.equal('magic 8bit');
|
||||
expect(buildenv.getCompilerArch()).toEqual('magic 8bit');
|
||||
});
|
||||
|
||||
it('buildenv compiler without target/march', () => {
|
||||
const buildenv = new BuildEnvSetupBase(noExecuteSupportCompilerInfo, ce);
|
||||
buildenv.getCompilerArch().should.equal(false);
|
||||
buildenv.compilerSupportsX86.should.equal(true);
|
||||
expect(buildenv.getCompilerArch()).toBe(false);
|
||||
expect(buildenv.compilerSupportsX86).toBe(true);
|
||||
});
|
||||
|
||||
it('buildenv compiler without target/march but with options', () => {
|
||||
const buildenv = new BuildEnvSetupBase(someOptionsCompilerInfo, ce);
|
||||
buildenv.getCompilerArch().should.equal(false);
|
||||
buildenv.compilerSupportsX86.should.equal(true);
|
||||
expect(buildenv.getCompilerArch()).toBe(false);
|
||||
expect(buildenv.compilerSupportsX86).toBe(true);
|
||||
});
|
||||
|
||||
it('compiler overrides should be sanitized', () => {
|
||||
@@ -281,10 +282,10 @@ describe('Compiler execution', function () {
|
||||
|
||||
compiler.applyOverridesToExecOptions(execOptions, sanitized);
|
||||
|
||||
Object.keys(execOptions.env).should.include('SOMEVAR');
|
||||
execOptions.env['SOMEVAR'].should.equal('123');
|
||||
Object.keys(execOptions.env).should.not.include('LD_PRELOAD');
|
||||
Object.keys(execOptions.env).should.not.include('ABC$#%@6@5');
|
||||
expect(execOptions.env).toHaveProperty('SOMEVAR');
|
||||
expect(execOptions.env['SOMEVAR']).toEqual('123');
|
||||
expect(execOptions.env).not.toHaveProperty('LD_PRELOAD');
|
||||
expect(execOptions.env).not.toHaveProperty('ABC$#%@6@5');
|
||||
});
|
||||
|
||||
// it('should compile', async () => {
|
||||
@@ -632,8 +633,7 @@ Args: []
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const optPath = path.join(dirPath, 'temp.out');
|
||||
await fs.writeFile(optPath, test);
|
||||
const a = await compiler.processOptOutput(optPath);
|
||||
a.should.deep.equal([
|
||||
expect(await compiler.processOptOutput(optPath)).toEqual([
|
||||
{
|
||||
Args: [],
|
||||
DebugLoc: {Column: 21, File: 'example.cpp', Line: 4},
|
||||
@@ -648,81 +648,39 @@ Args: []
|
||||
|
||||
it('should normalize extra file path', () => {
|
||||
const withDemangler = {...noExecuteSupportCompilerInfo, demangler: 'demangler-exe', demanglerType: 'cpp'};
|
||||
const compiler = new BaseCompiler(withDemangler, ce);
|
||||
const compiler = new BaseCompiler(withDemangler, ce) as any; // to get to the protected...
|
||||
if (process.platform === 'win32') {
|
||||
(compiler as any)
|
||||
.getExtraFilepath('c:/tmp/somefolder', 'test.h')
|
||||
.should.equal('c:\\tmp\\somefolder\\test.h');
|
||||
expect(compiler.getExtraFilepath('c:/tmp/somefolder', 'test.h')).toEqual('c:\\tmp\\somefolder\\test.h');
|
||||
} else {
|
||||
(compiler as any).getExtraFilepath('/tmp/somefolder', 'test.h').should.equal('/tmp/somefolder/test.h');
|
||||
expect(compiler.getExtraFilepath('/tmp/somefolder', 'test.h')).toEqual('/tmp/somefolder/test.h');
|
||||
}
|
||||
|
||||
try {
|
||||
(compiler as any).getExtraFilepath('/tmp/somefolder', '../test.h');
|
||||
throw 'Should throw exception 1';
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
expect(() => compiler.getExtraFilepath('/tmp/somefolder', '../test.h')).toThrow(Error);
|
||||
expect(() => compiler.getExtraFilepath('/tmp/somefolder', './../test.h')).toThrow(Error);
|
||||
|
||||
try {
|
||||
(compiler as any).getExtraFilepath('/tmp/somefolder', './../test.h');
|
||||
throw 'Should throw exception 2';
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
(compiler as any)
|
||||
.getExtraFilepath('/tmp/somefolder', '/tmp/someotherfolder/test.h')
|
||||
.should.equal('/tmp/somefolder/tmp/someotherfolder/test.h');
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
(compiler as any).getExtraFilepath('/tmp/somefolder', '\\test.h').should.equal('/tmp/somefolder/test.h');
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
(compiler as any).getExtraFilepath('/tmp/somefolder', 'test_hello/../../etc/passwd');
|
||||
throw 'Should throw exception 5';
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
expect(compiler.getExtraFilepath('/tmp/somefolder', '/tmp/someotherfolder/test.h')).toEqual(
|
||||
'/tmp/somefolder/tmp/someotherfolder/test.h',
|
||||
);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
(compiler as any)
|
||||
.getExtraFilepath('c:/tmp/somefolder', 'test.txt')
|
||||
.should.equal('c:\\tmp\\somefolder\\test.txt');
|
||||
} else {
|
||||
(compiler as any).getExtraFilepath('/tmp/somefolder', 'test.txt').should.equal('/tmp/somefolder/test.txt');
|
||||
expect(compiler.getExtraFilepath('/tmp/somefolder', '\\test.h')).toEqual('/tmp/somefolder/test.h');
|
||||
}
|
||||
|
||||
try {
|
||||
(compiler as any)
|
||||
.getExtraFilepath('/tmp/somefolder', 'subfolder/hello.h')
|
||||
.should.equal('/tmp/somefolder/subfolder/hello.h');
|
||||
} catch (error) {
|
||||
if (!(error instanceof Error)) {
|
||||
throw error;
|
||||
}
|
||||
expect(() => compiler.getExtraFilepath('/tmp/somefolder', 'test_hello/../../etc/passwd')).toThrow(Error);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
expect(compiler.getExtraFilepath('c:/tmp/somefolder', 'test.txt')).toEqual('c:\\tmp\\somefolder\\test.txt');
|
||||
} else {
|
||||
expect(compiler.getExtraFilepath('/tmp/somefolder', 'test.txt')).toEqual('/tmp/somefolder/test.txt');
|
||||
}
|
||||
|
||||
expect(compiler.getExtraFilepath('/tmp/somefolder', 'subfolder/hello.h')).toEqual(
|
||||
'/tmp/somefolder/subfolder/hello.h',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDefaultExecOptions', function () {
|
||||
describe('getDefaultExecOptions', () => {
|
||||
let ce: CompilationEnvironment;
|
||||
|
||||
const noExecuteSupportCompilerInfo = makeFakeCompilerInfo({
|
||||
@@ -737,7 +695,7 @@ describe('getDefaultExecOptions', function () {
|
||||
extraPath: ['/tmp/p1', '/tmp/p2'],
|
||||
});
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({
|
||||
languages,
|
||||
props: {
|
||||
@@ -750,9 +708,9 @@ describe('getDefaultExecOptions', function () {
|
||||
it('Have all the paths', () => {
|
||||
const compiler = new BaseCompiler(noExecuteSupportCompilerInfo, ce);
|
||||
const options = compiler.getDefaultExecOptions();
|
||||
Object.keys(options.env).should.include('PATH');
|
||||
expect(options.env).toHaveProperty('PATH');
|
||||
|
||||
const paths = options.env.PATH.split(path.delimiter);
|
||||
paths.should.deep.equal(['/usr/local/ninja', '/tmp/p1', '/tmp/p2']);
|
||||
expect(paths).toEqual(['/usr/local/ninja', '/tmp/p1', '/tmp/p2']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {BaseFormatter} from '../lib/formatters/base.js';
|
||||
|
||||
class Formatter extends BaseFormatter {}
|
||||
@@ -35,8 +37,8 @@ describe('Basic formatter functionality', () => {
|
||||
type: 'foofmt',
|
||||
version: 'foobar-format 1.0.0',
|
||||
});
|
||||
fmt.isValidStyle('foostyle').should.equal(false);
|
||||
fmt.formatterInfo.styles.should.deep.equal([]);
|
||||
expect(fmt.isValidStyle('foostyle')).toBe(false);
|
||||
expect(fmt.formatterInfo.styles).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return an array of args for formatters with styles', () => {
|
||||
@@ -47,7 +49,7 @@ describe('Basic formatter functionality', () => {
|
||||
type: 'foofmt',
|
||||
version: 'foobar-format 1.0.0',
|
||||
});
|
||||
fmt.isValidStyle('foostyle').should.equal(true);
|
||||
fmt.formatterInfo.styles.should.deep.equal(['foostyle']);
|
||||
expect(fmt.isValidStyle('foostyle')).toBe(true);
|
||||
expect(fmt.formatterInfo.styles).toEqual(['foostyle']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -27,6 +27,7 @@ import {Readable} from 'stream';
|
||||
import {GetObjectCommand, NoSuchKey, PutObjectCommand, S3} from '@aws-sdk/client-s3';
|
||||
import {sdkStreamMixin} from '@smithy/util-stream';
|
||||
import {AwsClientStub, mockClient} from 'aws-sdk-client-mock';
|
||||
import {beforeEach, describe, expect, it} from 'vitest';
|
||||
|
||||
import {BaseCache} from '../lib/cache/base.js';
|
||||
import {createCacheFromConfig} from '../lib/cache/from-config.js';
|
||||
@@ -36,55 +37,38 @@ import {NullCache} from '../lib/cache/null.js';
|
||||
import {OnDiskCache} from '../lib/cache/on-disk.js';
|
||||
import {S3Cache} from '../lib/cache/s3.js';
|
||||
|
||||
import {fs, newTempDir, path, shouldExist} from './utils.js';
|
||||
import {fs, newTempDir, path} from './utils.js';
|
||||
|
||||
function basicTests(factory: () => BaseCache) {
|
||||
it('should start empty', () => {
|
||||
it('should start empty', async () => {
|
||||
const cache = factory();
|
||||
cache.stats().should.eql({hits: 0, puts: 0, gets: 0});
|
||||
return cache
|
||||
.get('not a key')
|
||||
.should.eventually.contain({hit: false})
|
||||
.then(x => {
|
||||
cache.stats().should.eql({hits: 0, puts: 0, gets: 1});
|
||||
return x;
|
||||
});
|
||||
expect(cache.stats()).toEqual({hits: 0, puts: 0, gets: 0});
|
||||
await expect(cache.get('not a key')).resolves.toHaveProperty('hit', false);
|
||||
expect(cache.stats()).toEqual({hits: 0, puts: 0, gets: 1});
|
||||
});
|
||||
|
||||
it('should store and retrieve strings', () => {
|
||||
it('should store and retrieve strings', async () => {
|
||||
const cache = factory();
|
||||
return cache
|
||||
.put('a key', 'a value', 'bob')
|
||||
.then(() => {
|
||||
cache.stats().should.eql({hits: 0, puts: 1, gets: 0});
|
||||
return cache.get('a key').should.eventually.eql({
|
||||
hit: true,
|
||||
data: Buffer.from('a value'),
|
||||
});
|
||||
})
|
||||
.then(x => {
|
||||
cache.stats().should.eql({hits: 1, puts: 1, gets: 1});
|
||||
return x;
|
||||
});
|
||||
await cache.put('a key', 'a value', 'bob');
|
||||
expect(cache.stats()).toEqual({hits: 0, puts: 1, gets: 0});
|
||||
await expect(cache.get('a key')).resolves.toEqual({
|
||||
hit: true,
|
||||
data: Buffer.from('a value'),
|
||||
});
|
||||
expect(cache.stats()).toEqual({hits: 1, puts: 1, gets: 1});
|
||||
});
|
||||
|
||||
it('should store and retrieve binary buffers', () => {
|
||||
it('should store and retrieve binary buffers', async () => {
|
||||
const cache = factory();
|
||||
const buffer = Buffer.alloc(2 * 1024);
|
||||
buffer.fill('@');
|
||||
return cache
|
||||
.put('a key', buffer, 'bob')
|
||||
.then(() => {
|
||||
cache.stats().should.eql({hits: 0, puts: 1, gets: 0});
|
||||
return cache.get('a key').should.eventually.eql({
|
||||
hit: true,
|
||||
data: buffer,
|
||||
});
|
||||
})
|
||||
.then(x => {
|
||||
cache.stats().should.eql({hits: 1, puts: 1, gets: 1});
|
||||
return x;
|
||||
});
|
||||
await cache.put('a key', buffer, 'bob');
|
||||
expect(cache.stats()).toEqual({hits: 0, puts: 1, gets: 0});
|
||||
await expect(cache.get('a key')).resolves.toEqual({
|
||||
hit: true,
|
||||
data: buffer,
|
||||
});
|
||||
expect(cache.stats()).toEqual({hits: 1, puts: 1, gets: 1});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -92,26 +76,21 @@ describe('In-memory caches', () => {
|
||||
basicTests(() => new InMemoryCache('test', 10));
|
||||
it('should give extra stats', () => {
|
||||
const cache = new InMemoryCache('test', 1);
|
||||
cache
|
||||
.statString()
|
||||
.should.equal('0 puts; 0 gets, 0 hits, 0 misses (0.00%), LRU has 0 item(s) totalling 0 bytes');
|
||||
expect(cache.statString()).toEqual(
|
||||
'0 puts; 0 gets, 0 hits, 0 misses (0.00%), LRU has 0 item(s) totalling 0 bytes',
|
||||
);
|
||||
});
|
||||
|
||||
it('should evict old objects', () => {
|
||||
it('should evict old objects', async () => {
|
||||
const cache = new InMemoryCache('test', 1);
|
||||
return cache
|
||||
.put('a key', 'a value', 'bob')
|
||||
.then(() => {
|
||||
const promises: Promise<void>[] = [];
|
||||
const oneK = ''.padEnd(1024);
|
||||
for (let i = 0; i < 1024; i++) {
|
||||
promises.push(cache.put(`key${i}`, oneK));
|
||||
}
|
||||
return Promise.all(promises);
|
||||
})
|
||||
.then(() => {
|
||||
return cache.get('a key').should.eventually.contain({hit: false});
|
||||
});
|
||||
await cache.put('a key', 'a value', 'bob');
|
||||
const promises: Promise<void>[] = [];
|
||||
const oneK = ''.padEnd(1024);
|
||||
for (let i = 0; i < 1024; i++) {
|
||||
promises.push(cache.put(`key${i}`, oneK));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
await expect(cache.get('a key')).resolves.toHaveProperty('hit', false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -126,17 +105,14 @@ describe('Multi caches', () => {
|
||||
),
|
||||
);
|
||||
|
||||
it('should write through', () => {
|
||||
it('should write through', async () => {
|
||||
const subCache1 = new InMemoryCache('test', 1);
|
||||
const subCache2 = new InMemoryCache('test', 1);
|
||||
const cache = new MultiCache('test', subCache1, subCache2);
|
||||
return cache.put('a key', 'a value', 'bob').then(() => {
|
||||
return Promise.all([
|
||||
cache.get('a key').should.eventually.eql({hit: true, data: Buffer.from('a value')}),
|
||||
subCache1.get('a key').should.eventually.eql({hit: true, data: Buffer.from('a value')}),
|
||||
subCache2.get('a key').should.eventually.eql({hit: true, data: Buffer.from('a value')}),
|
||||
]);
|
||||
});
|
||||
await cache.put('a key', 'a value', 'bob');
|
||||
await expect(cache.get('a key')).resolves.toEqual({hit: true, data: Buffer.from('a value')});
|
||||
await expect(subCache1.get('a key')).resolves.toEqual({hit: true, data: Buffer.from('a value')});
|
||||
await expect(subCache2.get('a key')).resolves.toEqual({hit: true, data: Buffer.from('a value')});
|
||||
});
|
||||
|
||||
it('services from the first cache hit', async () => {
|
||||
@@ -146,51 +122,44 @@ describe('Multi caches', () => {
|
||||
await subCache1.put('a key', 'cache1');
|
||||
await subCache2.put('a key', 'cache2');
|
||||
const cache = new MultiCache('test', subCache1, subCache2);
|
||||
await cache.get('a key').should.eventually.eql({hit: true, data: Buffer.from('cache1')});
|
||||
await expect(cache.get('a key')).resolves.toEqual({hit: true, data: Buffer.from('cache1')});
|
||||
|
||||
subCache1.hits.should.equal(1);
|
||||
subCache1.gets.should.equal(1);
|
||||
subCache2.hits.should.equal(0);
|
||||
subCache2.gets.should.equal(0);
|
||||
expect(subCache1.hits).toEqual(1);
|
||||
expect(subCache1.gets).toEqual(1);
|
||||
expect(subCache2.hits).toEqual(0);
|
||||
expect(subCache2.gets).toEqual(0);
|
||||
|
||||
await subCache1.get('a key').should.eventually.eql({hit: true, data: Buffer.from('cache1')});
|
||||
await subCache2.get('a key').should.eventually.eql({hit: true, data: Buffer.from('cache2')});
|
||||
await expect(subCache1.get('a key')).resolves.toEqual({hit: true, data: Buffer.from('cache1')});
|
||||
await expect(subCache2.get('a key')).resolves.toEqual({hit: true, data: Buffer.from('cache2')});
|
||||
});
|
||||
});
|
||||
|
||||
describe('On disk caches', () => {
|
||||
basicTests(() => new OnDiskCache('test', newTempDir(), 10));
|
||||
it('should evict old objects', () => {
|
||||
it('should evict old objects', async () => {
|
||||
const tempDir = newTempDir();
|
||||
const cache = new OnDiskCache('test', tempDir, 1);
|
||||
return cache
|
||||
.put('a key', 'a value', 'bob')
|
||||
.then(() => {
|
||||
const promises: Promise<void>[] = [];
|
||||
const oneHundredK = ''.padEnd(1024 * 100);
|
||||
for (let i = 0; i < 12; i++) {
|
||||
promises.push(cache.put(`key${i}`, oneHundredK));
|
||||
}
|
||||
return Promise.all(promises);
|
||||
})
|
||||
.then(() => {
|
||||
return cache.get('a key').should.eventually.contain({hit: false});
|
||||
});
|
||||
await cache.put('a key', 'a value', 'bob');
|
||||
const promises: Promise<void>[] = [];
|
||||
const oneHundredK = ''.padEnd(1024 * 100);
|
||||
for (let i = 0; i < 12; i++) {
|
||||
promises.push(cache.put(`key${i}`, oneHundredK));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
await expect(cache.get('a key')).resolves.toHaveProperty('hit', false);
|
||||
});
|
||||
|
||||
it('should handle existing data', () => {
|
||||
it('should handle existing data', async () => {
|
||||
const tempDir = newTempDir();
|
||||
fs.writeFileSync(path.join(tempDir, 'abcdef'), 'this is abcdef');
|
||||
fs.mkdirSync(path.join(tempDir, 'path'));
|
||||
fs.writeFileSync(path.join(tempDir, 'path', 'test'), 'this is path/test');
|
||||
const cache = new OnDiskCache('test', tempDir, 1);
|
||||
return Promise.all([
|
||||
cache.get('abcdef').should.eventually.eql({hit: true, data: Buffer.from('this is abcdef')}),
|
||||
cache.get(path.join('path', 'test')).should.eventually.eql({
|
||||
hit: true,
|
||||
data: Buffer.from('this is path/test'),
|
||||
}),
|
||||
]);
|
||||
await expect(cache.get('abcdef')).resolves.toEqual({hit: true, data: Buffer.from('this is abcdef')});
|
||||
await expect(cache.get(path.join('path', 'test'))).resolves.toEqual({
|
||||
hit: true,
|
||||
data: Buffer.from('this is path/test'),
|
||||
});
|
||||
});
|
||||
|
||||
// MRG ideally handle the case of pre-populated stuff overflowing the size
|
||||
@@ -223,23 +192,16 @@ describe('S3 tests', () => {
|
||||
});
|
||||
basicTests(() => new S3Cache('test', 'test.bucket', 'cache', 'uk-north-1'));
|
||||
|
||||
it('should correctly handle errors', () => {
|
||||
it('should correctly handle errors', async () => {
|
||||
mockS3.on(GetObjectCommand, {Bucket: 'test.bucket'}).rejects('Some s3 error');
|
||||
let err: Error | null = null;
|
||||
let err: Error = new Error('not an error');
|
||||
const cache = new S3Cache('test', 'test.bucket', 'cache', 'uk-north-1', (e: Error, op: string) => {
|
||||
err = e;
|
||||
op.should.equal('read');
|
||||
expect(op).toEqual('read');
|
||||
});
|
||||
return cache
|
||||
.get('doesntmatter')
|
||||
.should.eventually.contain({hit: false})
|
||||
.then(x => {
|
||||
cache.stats().should.eql({hits: 0, puts: 0, gets: 1});
|
||||
if (shouldExist(err)) {
|
||||
err.toString().should.equal('Error: Some s3 error');
|
||||
}
|
||||
return x;
|
||||
});
|
||||
await expect(cache.get('doesntmatter')).resolves.toHaveProperty('hit', false);
|
||||
expect(cache.stats()).toEqual({hits: 0, puts: 0, gets: 1});
|
||||
expect(err.toString()).toEqual('Error: Some s3 error');
|
||||
});
|
||||
|
||||
// BE VERY CAREFUL - the below can be used with sufficient permissions to test on prod (With mocks off)...
|
||||
@@ -254,39 +216,39 @@ describe('Config tests', () => {
|
||||
});
|
||||
it('should create null cache on empty config', () => {
|
||||
const cache = createCacheFromConfig('name', '');
|
||||
cache.constructor.should.eql(NullCache);
|
||||
cache.cacheName.should.eql('name');
|
||||
expect(cache.constructor).toEqual(NullCache);
|
||||
expect(cache.cacheName).toEqual('name');
|
||||
});
|
||||
it('should throw on bad types', () => {
|
||||
(() => createCacheFromConfig('test', 'InMemory')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'NotAType()')).should.throw();
|
||||
expect(() => createCacheFromConfig('test', 'InMemory')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'NotAType()')).toThrow();
|
||||
});
|
||||
it('should create in memory caches', () => {
|
||||
const cache = createCacheFromConfig<InMemoryCache>('test', 'InMemory(123)');
|
||||
cache.constructor.should.eql(InMemoryCache);
|
||||
cache.cacheMb.should.equal(123);
|
||||
(() => createCacheFromConfig('test', 'InMemory()')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'InMemory(argh)')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'InMemory(123,yibble)')).should.throw();
|
||||
expect(cache.constructor).toEqual(InMemoryCache);
|
||||
expect(cache.cacheMb).toEqual(123);
|
||||
expect(() => createCacheFromConfig('test', 'InMemory()')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'InMemory(argh)')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'InMemory(123,yibble)')).toThrow();
|
||||
});
|
||||
it('should create on disk caches', () => {
|
||||
const tempDir = newTempDir();
|
||||
const cache = createCacheFromConfig<OnDiskCache>('test', `OnDisk(${tempDir},456)`);
|
||||
cache.constructor.should.eql(OnDiskCache);
|
||||
cache.path.should.equal(tempDir);
|
||||
cache.cacheMb.should.equal(456);
|
||||
(() => createCacheFromConfig('test', 'OnDisk()')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'OnDisk(argh,yibble)')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'OnDisk(/tmp/moo,456,blah)')).should.throw();
|
||||
expect(cache.constructor).toEqual(OnDiskCache);
|
||||
expect(cache.path).toEqual(tempDir);
|
||||
expect(cache.cacheMb).toEqual(456);
|
||||
expect(() => createCacheFromConfig('test', 'OnDisk()')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'OnDisk(argh,yibble)')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'OnDisk(/tmp/moo,456,blah)')).toThrow();
|
||||
});
|
||||
it('should create S3 caches', () => {
|
||||
const cache = createCacheFromConfig<S3Cache>('test', `S3(test.bucket,cache,uk-north-1)`);
|
||||
cache.constructor.should.eql(S3Cache);
|
||||
cache.path.should.equal('cache');
|
||||
cache.region.should.equal('uk-north-1');
|
||||
(() => createCacheFromConfig('test', 'S3()')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'S3(argh,yibble)')).should.throw();
|
||||
(() => createCacheFromConfig('test', 'S3(/tmp/moo,456,blah,nork)')).should.throw();
|
||||
expect(cache.constructor).toEqual(S3Cache);
|
||||
expect(cache.path).toEqual('cache');
|
||||
expect(cache.region).toEqual('uk-north-1');
|
||||
expect(() => createCacheFromConfig('test', 'S3()')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'S3(argh,yibble)')).toThrow();
|
||||
expect(() => createCacheFromConfig('test', 'S3(/tmp/moo,456,blah,nork)')).toThrow();
|
||||
});
|
||||
it('should create multi caches', () => {
|
||||
const tempDir = newTempDir();
|
||||
@@ -294,15 +256,15 @@ describe('Config tests', () => {
|
||||
'multi',
|
||||
`InMemory(123);OnDisk(${tempDir},456);S3(test.bucket,cache,uk-north-1)`,
|
||||
);
|
||||
cache.constructor.should.eql(MultiCache);
|
||||
expect(cache.constructor).toEqual(MultiCache);
|
||||
|
||||
const upstream: BaseCache[] = (cache as any).upstream; // This isn't pretty. upstream is private.
|
||||
upstream.length.should.equal(3);
|
||||
upstream[0].constructor.should.eql(InMemoryCache);
|
||||
upstream[1].constructor.should.eql(OnDiskCache);
|
||||
upstream[2].constructor.should.eql(S3Cache);
|
||||
upstream[0].cacheName.should.eql('multi');
|
||||
upstream[1].cacheName.should.eql('multi');
|
||||
upstream[2].cacheName.should.eql('multi');
|
||||
expect(upstream.length).toEqual(3);
|
||||
expect(upstream[0].constructor).toEqual(InMemoryCache);
|
||||
expect(upstream[1].constructor).toEqual(OnDiskCache);
|
||||
expect(upstream[2].constructor).toEqual(S3Cache);
|
||||
expect(upstream[0].cacheName).toEqual('multi');
|
||||
expect(upstream[1].cacheName).toEqual('multi');
|
||||
expect(upstream[2].cacheName).toEqual('multi');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import * as cfg from '../lib/cfg/cfg.js';
|
||||
|
||||
import {fs, makeFakeCompilerInfo, path, resolvePathFromTestRoot} from './utils.js';
|
||||
@@ -36,7 +38,7 @@ async function DoCfgTest(cfgArg, filename, isLlvmIr = false) {
|
||||
contents.asm,
|
||||
isLlvmIr,
|
||||
);
|
||||
structure.should.deep.equal(contents.cfg);
|
||||
expect(structure).toEqual(contents.cfg);
|
||||
}
|
||||
|
||||
describe('Cfg test cases', () => {
|
||||
|
||||
@@ -25,15 +25,15 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {languages} from '../lib/languages.js';
|
||||
|
||||
const img_dir = path.resolve('views/resources/logos');
|
||||
|
||||
function checkImage(logo) {
|
||||
function checkImage(logo: string) {
|
||||
const logoPath = path.join(img_dir, logo);
|
||||
assert.isTrue(fs.existsSync(logoPath), `${logoPath} logo missing`);
|
||||
expect(fs.existsSync(logoPath)).toBe(true);
|
||||
}
|
||||
|
||||
describe('Language logo check', () => {
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import _ from 'underscore';
|
||||
import {afterAll, beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {languages} from '../lib/languages.js';
|
||||
@@ -34,12 +35,12 @@ describe('Live site checks', () => {
|
||||
let ceProps;
|
||||
let compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
properties.initialize('etc/config/', ['amazon']);
|
||||
ceProps = properties.propsFor('compiler-explorer');
|
||||
compilerProps = new properties.CompilerProps(languages, ceProps);
|
||||
});
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
properties.reset();
|
||||
});
|
||||
|
||||
@@ -65,9 +66,7 @@ describe('Live site checks', () => {
|
||||
differences[lang] = difference;
|
||||
}
|
||||
});
|
||||
differences.should.be.eql(
|
||||
{},
|
||||
'One or more defined libraries are not listed on their corresponding language libs property array',
|
||||
);
|
||||
// One or more defined libraries are not listed on their corresponding language libs property array
|
||||
expect(differences).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,34 +22,38 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {addDigitSeparator, escapeHTML} from '../shared/common-utils.js';
|
||||
|
||||
describe('HTML Escape Test Cases', () => {
|
||||
it('should prevent basic injection', () => {
|
||||
escapeHTML("<script>alert('hi');</script>").should.equal(`<script>alert('hi');</script>`);
|
||||
expect(escapeHTML("<script>alert('hi');</script>")).toEqual(
|
||||
`<script>alert('hi');</script>`,
|
||||
);
|
||||
});
|
||||
it('should prevent tag injection', () => {
|
||||
escapeHTML('\'"`>').should.equal(`'"`>`);
|
||||
expect(escapeHTML('\'"`>')).toEqual(`'"`>`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('digit separator', () => {
|
||||
it('handles short numbers', () => {
|
||||
addDigitSeparator('42', '_', 3).should.equal('42');
|
||||
expect(addDigitSeparator('42', '_', 3)).toEqual('42');
|
||||
});
|
||||
it('handles long numbers', () => {
|
||||
addDigitSeparator('1234', '_', 3).should.equal('1_234');
|
||||
addDigitSeparator('123456789', "'", 3).should.equal("123'456'789");
|
||||
addDigitSeparator('1234567890', "'", 3).should.equal("1'234'567'890");
|
||||
expect(addDigitSeparator('1234', '_', 3)).toEqual('1_234');
|
||||
expect(addDigitSeparator('123456789', "'", 3)).toEqual("123'456'789");
|
||||
expect(addDigitSeparator('1234567890', "'", 3)).toEqual("1'234'567'890");
|
||||
});
|
||||
it('handles hex numbers', () => {
|
||||
addDigitSeparator('AABBCCDD12345678', '_', 4).should.equal('AABB_CCDD_1234_5678');
|
||||
addDigitSeparator('01AABBCCDD12345678', '_', 4).should.equal('01_AABB_CCDD_1234_5678');
|
||||
expect(addDigitSeparator('AABBCCDD12345678', '_', 4)).toEqual('AABB_CCDD_1234_5678');
|
||||
expect(addDigitSeparator('01AABBCCDD12345678', '_', 4)).toEqual('01_AABB_CCDD_1234_5678');
|
||||
});
|
||||
it('handles negative numbers', () => {
|
||||
addDigitSeparator('-42', '_', 3).should.equal('-42');
|
||||
addDigitSeparator('-420', '_', 3).should.equal('-420');
|
||||
addDigitSeparator('-4200', '_', 3).should.equal('-4_200');
|
||||
addDigitSeparator('-123456789', '_', 3).should.equal('-123_456_789');
|
||||
expect(addDigitSeparator('-42', '_', 3)).toEqual('-42');
|
||||
expect(addDigitSeparator('-420', '_', 3)).toEqual('-420');
|
||||
expect(addDigitSeparator('-4200', '_', 3)).toEqual('-4_200');
|
||||
expect(addDigitSeparator('-123456789', '_', 3)).toEqual('-123_456_789');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,6 +23,8 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import './utils.js';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
import {CompilerProps, fakeProps} from '../lib/properties.js';
|
||||
|
||||
@@ -35,41 +37,35 @@ const props = {
|
||||
describe('Compilation environment', () => {
|
||||
let compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
compilerProps = new CompilerProps({}, fakeProps(props));
|
||||
});
|
||||
|
||||
it('Should cache by default', () => {
|
||||
it('Should cache by default', async () => {
|
||||
// TODO: Work will need to be done here when CompilationEnvironment's constructor is typed better
|
||||
const ce = new CompilationEnvironment(compilerProps, undefined, undefined);
|
||||
return ce
|
||||
.cacheGet('foo')
|
||||
.should.eventually.equal(null)
|
||||
.then(() => ce.cachePut('foo', {res: 'bar'}, undefined))
|
||||
.then(() => ce.cacheGet('foo').should.eventually.eql({res: 'bar'}))
|
||||
.then(() => ce.cacheGet('baz').should.eventually.equal(null));
|
||||
await expect(ce.cacheGet('foo')).resolves.toBeNull();
|
||||
await ce.cachePut('foo', {res: 'bar'}, undefined);
|
||||
await expect(ce.cacheGet('foo')).resolves.toEqual({res: 'bar'});
|
||||
await expect(ce.cacheGet('baz')).resolves.toBeNull();
|
||||
});
|
||||
it('Should cache when asked', () => {
|
||||
it('Should cache when asked', async () => {
|
||||
const ce = new CompilationEnvironment(compilerProps, undefined, true);
|
||||
return ce
|
||||
.cacheGet('foo')
|
||||
.should.eventually.equal(null)
|
||||
.then(() => ce.cachePut('foo', {res: 'bar'}, undefined))
|
||||
.then(() => ce.cacheGet('foo').should.eventually.eql({res: 'bar'}));
|
||||
await expect(ce.cacheGet('foo')).resolves.toBeNull();
|
||||
await ce.cachePut('foo', {res: 'bar'}, undefined);
|
||||
await expect(ce.cacheGet('foo')).resolves.toEqual({res: 'bar'});
|
||||
});
|
||||
it("Shouldn't cache when asked", () => {
|
||||
it("Shouldn't cache when asked", async () => {
|
||||
// TODO: Work will need to be done here when CompilationEnvironment's constructor is typed better
|
||||
const ce = new CompilationEnvironment(compilerProps, undefined, false);
|
||||
return ce
|
||||
.cacheGet('foo')
|
||||
.should.eventually.equal(null)
|
||||
.then(() => ce.cachePut('foo', {res: 'bar'}, undefined))
|
||||
.then(() => ce.cacheGet('foo').should.eventually.equal(null));
|
||||
await expect(ce.cacheGet('foo')).resolves.toBeNull();
|
||||
await ce.cachePut('foo', {res: 'bar'}, undefined);
|
||||
await expect(ce.cacheGet('foo')).resolves.toBeNull();
|
||||
});
|
||||
it('Should filter bad options', () => {
|
||||
// TODO: Work will need to be done here when CompilationEnvironment's constructor is typed better
|
||||
const ce = new CompilationEnvironment(compilerProps, undefined, undefined);
|
||||
ce.findBadOptions(['-O3', '-flto']).should.be.empty;
|
||||
ce.findBadOptions(['-O3', '-plugin']).should.eql(['-plugin']);
|
||||
expect(ce.findBadOptions(['-O3', '-flto'])).toEqual([]);
|
||||
expect(ce.findBadOptions(['-O3', '-plugin'])).toEqual(['-plugin']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,6 +23,8 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import './utils.js';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilerFinder} from '../lib/compiler-finder.js';
|
||||
import {ClientOptionsHandler} from '../lib/options-handler.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
@@ -87,7 +89,7 @@ const supportsLibrariesOptions = {
|
||||
supportsLibraries: 'fmt:catch2.2101',
|
||||
};
|
||||
|
||||
describe('Compiler-finder', function () {
|
||||
describe('Compiler-finder', () => {
|
||||
let compilerProps: properties.CompilerProps;
|
||||
|
||||
let noOptionsAtAllProps: properties.CompilerProps;
|
||||
@@ -98,7 +100,7 @@ describe('Compiler-finder', function () {
|
||||
|
||||
let optionsHandler: ClientOptionsHandler;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
compilerProps = new properties.CompilerProps(languages, properties.fakeProps(props));
|
||||
|
||||
noOptionsAtAllProps = new properties.CompilerProps(languages, properties.fakeProps(noOptionsAtAll));
|
||||
@@ -118,7 +120,7 @@ describe('Compiler-finder', function () {
|
||||
} as unknown as ClientOptionsHandler;
|
||||
});
|
||||
|
||||
it('should not hang for undefined groups (Bug #860)', () => {
|
||||
it('should not hang for undefined groups (Bug #860)', async () => {
|
||||
const finder = new CompilerFinder(
|
||||
{} as any,
|
||||
compilerProps,
|
||||
@@ -126,7 +128,7 @@ describe('Compiler-finder', function () {
|
||||
{} as any,
|
||||
optionsHandler,
|
||||
);
|
||||
return finder.getCompilers().should.eventually.have.lengthOf(1);
|
||||
await expect(finder.getCompilers()).resolves.toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should behave properly if no options are provided at all', async () => {
|
||||
@@ -138,7 +140,7 @@ describe('Compiler-finder', function () {
|
||||
optionsHandler,
|
||||
);
|
||||
const compilers = await finder.getCompilers();
|
||||
compilers[0].options.should.equal('');
|
||||
expect(compilers[0].options).toEqual('');
|
||||
});
|
||||
|
||||
it('should behave properly if no base options are provided', async () => {
|
||||
@@ -150,7 +152,7 @@ describe('Compiler-finder', function () {
|
||||
optionsHandler,
|
||||
);
|
||||
const compilers = await finder.getCompilers();
|
||||
compilers[0].options.should.equal('bar');
|
||||
expect(compilers[0].options).toEqual('bar');
|
||||
});
|
||||
|
||||
it('should behave properly if only base options are provided', async () => {
|
||||
@@ -162,7 +164,7 @@ describe('Compiler-finder', function () {
|
||||
optionsHandler,
|
||||
);
|
||||
const compilers = await finder.getCompilers();
|
||||
compilers[0].options.should.equal('foo');
|
||||
expect(compilers[0].options).toEqual('foo');
|
||||
});
|
||||
|
||||
it('should behave properly if both options are provided', async () => {
|
||||
@@ -174,7 +176,7 @@ describe('Compiler-finder', function () {
|
||||
optionsHandler,
|
||||
);
|
||||
const compilers = await finder.getCompilers();
|
||||
compilers[0].options.should.equal('foo bar');
|
||||
expect(compilers[0].options).toEqual('foo bar');
|
||||
});
|
||||
|
||||
it('should be able to filter libraries', async () => {
|
||||
@@ -187,6 +189,6 @@ describe('Compiler-finder', function () {
|
||||
);
|
||||
const compilers = await finder.getCompilers();
|
||||
const libsArr = compilers[0].libsArr;
|
||||
libsArr.should.deep.equal(['fmt', 'catch2.2101']);
|
||||
expect(libsArr).toEqual(['fmt', 'catch2.2101']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilerArguments} from '../../lib/compiler-arguments.js';
|
||||
import {
|
||||
BaseParser,
|
||||
@@ -33,21 +35,14 @@ import {
|
||||
VCParser,
|
||||
} from '../../lib/compilers/argument-parsers.js';
|
||||
import {FakeCompiler} from '../../lib/compilers/fake-for-test.js';
|
||||
import {makeCompilationEnvironment, should} from '../utils.js';
|
||||
|
||||
const languages = {
|
||||
'c++': {id: 'c++'},
|
||||
};
|
||||
|
||||
let env;
|
||||
|
||||
function makeCompiler(stdout, stderr, code) {
|
||||
if (env === undefined) {
|
||||
env = makeCompilationEnvironment({languages});
|
||||
}
|
||||
|
||||
function makeCompiler(stdout?: string, stderr?: string, code?: number) {
|
||||
if (code === undefined) code = 0;
|
||||
const compiler = new FakeCompiler({lang: languages['c++'].id, remote: true}, env);
|
||||
const compiler = new FakeCompiler({lang: languages['c++'].id, remote: true}) as any;
|
||||
compiler.exec = () => Promise.resolve({code: code, stdout: stdout || '', stderr: stderr || ''});
|
||||
compiler.execCompilerCached = compiler.exec;
|
||||
compiler.possibleArguments = new CompilerArguments('g82');
|
||||
@@ -57,156 +52,129 @@ function makeCompiler(stdout, stderr, code) {
|
||||
describe('option parser', () => {
|
||||
it('should do nothing for the base parser', () => {
|
||||
const compiler = makeCompiler();
|
||||
return BaseParser.parse(compiler).should.deep.equals(compiler);
|
||||
expect(BaseParser.parse(compiler)).toEqual(compiler);
|
||||
});
|
||||
it('should handle empty options', () => {
|
||||
return BaseParser.getOptions(makeCompiler()).should.eventually.deep.equals({});
|
||||
it('should handle empty options', async () => {
|
||||
await expect(BaseParser.getOptions(makeCompiler(), '')).resolves.toEqual({});
|
||||
});
|
||||
it('should parse single-dash options', () => {
|
||||
return BaseParser.getOptions(makeCompiler('-foo\n')).should.eventually.deep.equals({
|
||||
it('should parse single-dash options', async () => {
|
||||
await expect(BaseParser.getOptions(makeCompiler('-foo\n'), '')).resolves.toEqual({
|
||||
'-foo': {
|
||||
description: '',
|
||||
timesused: 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
it('should parse double-dash options', () => {
|
||||
return BaseParser.getOptions(makeCompiler('--foo\n')).should.eventually.deep.equals({
|
||||
it('should parse double-dash options', async () => {
|
||||
await expect(BaseParser.getOptions(makeCompiler('--foo\n'), '')).resolves.toEqual({
|
||||
'--foo': {
|
||||
description: '',
|
||||
timesused: 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
it('should parse stderr options', () => {
|
||||
return BaseParser.getOptions(makeCompiler('', '--bar=monkey\n')).should.eventually.deep.equals({
|
||||
it('should parse stderr options', async () => {
|
||||
await expect(BaseParser.getOptions(makeCompiler('', '--bar=monkey\n'), '')).resolves.toEqual({
|
||||
'--bar=monkey': {
|
||||
description: '',
|
||||
timesused: 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
it('handles non-option text', () => {
|
||||
return BaseParser.getOptions(
|
||||
makeCompiler('-foo=123\nthis is a fish\n-badger=123'),
|
||||
).should.eventually.deep.equals({
|
||||
'-foo=123': {description: 'this is a fish', timesused: 0},
|
||||
'-badger=123': {description: '', timesused: 0},
|
||||
});
|
||||
it('handles non-option text', async () => {
|
||||
await expect(BaseParser.getOptions(makeCompiler('-foo=123\nthis is a fish\n-badger=123'), '')).resolves.toEqual(
|
||||
{
|
||||
'-foo=123': {description: 'this is a fish', timesused: 0},
|
||||
'-badger=123': {description: '', timesused: 0},
|
||||
},
|
||||
);
|
||||
});
|
||||
it('should ignore if errors occur', () => {
|
||||
return BaseParser.getOptions(makeCompiler('--foo\n', '--bar\n', 1)).should.eventually.deep.equals({});
|
||||
it('should ignore if errors occur', async () => {
|
||||
await expect(BaseParser.getOptions(makeCompiler('--foo\n', '--bar\n', 1), '')).resolves.toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('gcc parser', () => {
|
||||
it('should handle empty options', async () => {
|
||||
const result = await GCCParser.parse(makeCompiler());
|
||||
should.not.exist(result.compiler.supportsGccDump);
|
||||
result.compiler.options.should.equals('');
|
||||
expect(result.compiler).not.toHaveProperty('supportsGccDump');
|
||||
expect(result.compiler.options).toEqual('');
|
||||
});
|
||||
it('should handle options', () => {
|
||||
return GCCParser.parse(
|
||||
makeCompiler('-masm=intel\n-fdiagnostics-color=[blah]\n-fdump-tree-all'),
|
||||
).should.eventually.satisfy(result => {
|
||||
return Promise.all([
|
||||
result.compiler.supportsGccDump.should.equals(true),
|
||||
result.compiler.supportsIntel.should.equals(true),
|
||||
result.compiler.intelAsm.should.equals('-masm=intel'),
|
||||
result.compiler.options.should.equals('-fdiagnostics-color=always'),
|
||||
]);
|
||||
});
|
||||
it('should handle options', async () => {
|
||||
const result = await GCCParser.parse(makeCompiler('-masm=intel\n-fdiagnostics-color=[blah]\n-fdump-tree-all'));
|
||||
expect(result.compiler.supportsGccDump).toBe(true);
|
||||
expect(result.compiler.supportsIntel).toBe(true);
|
||||
expect(result.compiler.intelAsm).toEqual('-masm=intel');
|
||||
expect(result.compiler.options).toEqual('-fdiagnostics-color=always');
|
||||
});
|
||||
it('should handle undefined options', () => {
|
||||
return GCCParser.parse(makeCompiler('-fdiagnostics-color=[blah]')).should.eventually.satisfy(result => {
|
||||
return Promise.all([result.compiler.options.should.equals('-fdiagnostics-color=always')]);
|
||||
});
|
||||
it('should handle undefined options', async () => {
|
||||
const result = await GCCParser.parse(makeCompiler('-fdiagnostics-color=[blah]'));
|
||||
expect(result.compiler.options).toEqual('-fdiagnostics-color=always');
|
||||
});
|
||||
});
|
||||
|
||||
describe('clang parser', () => {
|
||||
it('should handle empty options', () => {
|
||||
return ClangParser.parse(makeCompiler()).should.eventually.satisfy(result => {
|
||||
return Promise.all([result.compiler.options.should.equals('')]);
|
||||
});
|
||||
it('should handle empty options', async () => {
|
||||
const result = await ClangParser.parse(makeCompiler());
|
||||
expect(result.compiler.options).toEqual('');
|
||||
});
|
||||
it('should handle options', () => {
|
||||
return ClangParser.parse(
|
||||
it('should handle options', async () => {
|
||||
const result = await ClangParser.parse(
|
||||
makeCompiler(' -fno-crash-diagnostics\n -fsave-optimization-record\n -fcolor-diagnostics'),
|
||||
).should.eventually.satisfy(result => {
|
||||
return Promise.all([
|
||||
result.compiler.supportsOptOutput.should.equals(true),
|
||||
result.compiler.optArg.should.equals('-fsave-optimization-record'),
|
||||
|
||||
result.compiler.options.should.include('-fcolor-diagnostics'),
|
||||
result.compiler.options.should.include('-fno-crash-diagnostics'),
|
||||
result.compiler.options.should.not.include('-fsave-optimization-record'),
|
||||
]);
|
||||
});
|
||||
);
|
||||
expect(result.compiler.supportsOptOutput).toBe(true);
|
||||
expect(result.compiler.optArg).toEqual('-fsave-optimization-record');
|
||||
expect(result.compiler.options).toContain('-fcolor-diagnostics');
|
||||
expect(result.compiler.options).toContain('-fno-crash-diagnostics');
|
||||
expect(result.compiler.options).not.toContain('-fsave-optimization-record');
|
||||
});
|
||||
});
|
||||
|
||||
describe('pascal parser', () => {
|
||||
it('should handle empty options', () => {
|
||||
return PascalParser.parse(makeCompiler()).should.eventually.satisfy(result => {
|
||||
return Promise.all([result.compiler.options.should.equals('')]);
|
||||
});
|
||||
it('should handle empty options', async () => {
|
||||
const result = await PascalParser.parse(makeCompiler());
|
||||
expect(result.compiler.options).toEqual('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('popular compiler arguments', () => {
|
||||
let compiler;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
compiler = makeCompiler(
|
||||
' -fsave-optimization-record\n -x\n -g\n -fcolor-diagnostics\n -O<number> Optimization level\n -std=<c++11,c++14,c++17z>',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 5 arguments', () => {
|
||||
return ClangParser.parse(compiler).then(compiler => {
|
||||
return compiler.should.satisfy(compiler => {
|
||||
return Promise.all([
|
||||
compiler.possibleArguments.getPopularArguments().should.deep.equal({
|
||||
'-O<number>': {description: 'Optimization level', timesused: 0},
|
||||
'-fcolor-diagnostics': {description: '', timesused: 0},
|
||||
'-fsave-optimization-record': {description: '', timesused: 0},
|
||||
'-g': {description: '', timesused: 0},
|
||||
'-x': {description: '', timesused: 0},
|
||||
}),
|
||||
]);
|
||||
});
|
||||
it('should return 5 arguments', async () => {
|
||||
const result = await ClangParser.parse(compiler);
|
||||
expect(result.possibleArguments.getPopularArguments()).toEqual({
|
||||
'-O<number>': {description: 'Optimization level', timesused: 0},
|
||||
'-fcolor-diagnostics': {description: '', timesused: 0},
|
||||
'-fsave-optimization-record': {description: '', timesused: 0},
|
||||
'-g': {description: '', timesused: 0},
|
||||
'-x': {description: '', timesused: 0},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return arguments except the ones excluded', () => {
|
||||
return ClangParser.parse(compiler).then(compiler => {
|
||||
return compiler.should.satisfy(compiler => {
|
||||
return Promise.all([
|
||||
compiler.possibleArguments.getPopularArguments(['-O3', '--hello']).should.deep.equal({
|
||||
'-fcolor-diagnostics': {description: '', timesused: 0},
|
||||
'-fsave-optimization-record': {description: '', timesused: 0},
|
||||
'-g': {description: '', timesused: 0},
|
||||
'-x': {description: '', timesused: 0},
|
||||
'-std=<c++11,c++14,c++17z>': {description: '', timesused: 0},
|
||||
}),
|
||||
]);
|
||||
});
|
||||
it('should return arguments except the ones excluded', async () => {
|
||||
const result = await ClangParser.parse(compiler);
|
||||
expect(result.possibleArguments.getPopularArguments(['-O3', '--hello'])).toEqual({
|
||||
'-fcolor-diagnostics': {description: '', timesused: 0},
|
||||
'-fsave-optimization-record': {description: '', timesused: 0},
|
||||
'-g': {description: '', timesused: 0},
|
||||
'-x': {description: '', timesused: 0},
|
||||
'-std=<c++11,c++14,c++17z>': {description: '', timesused: 0},
|
||||
});
|
||||
});
|
||||
|
||||
it('should be able to exclude special params with assignments', () => {
|
||||
return ClangParser.parse(compiler).then(compiler => {
|
||||
return compiler.should.satisfy(compiler => {
|
||||
return Promise.all([
|
||||
compiler.possibleArguments.getPopularArguments(['-std=c++14', '-g', '--hello']).should.deep.equal({
|
||||
'-O<number>': {description: 'Optimization level', timesused: 0},
|
||||
'-fcolor-diagnostics': {description: '', timesused: 0},
|
||||
'-fsave-optimization-record': {description: '', timesused: 0},
|
||||
'-x': {description: '', timesused: 0},
|
||||
}),
|
||||
]);
|
||||
});
|
||||
it('should be able to exclude special params with assignments', async () => {
|
||||
const result = await ClangParser.parse(compiler);
|
||||
expect(result.possibleArguments.getPopularArguments(['-std=c++14', '-g', '--hello'])).toEqual({
|
||||
'-O<number>': {description: 'Optimization level', timesused: 0},
|
||||
'-fcolor-diagnostics': {description: '', timesused: 0},
|
||||
'-fsave-optimization-record': {description: '', timesused: 0},
|
||||
'-x': {description: '', timesused: 0},
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -224,7 +192,7 @@ describe('VC argument parser', () => {
|
||||
' /etc Etcetera',
|
||||
];
|
||||
const stdvers = VCParser.extractPossibleStdvers(lines);
|
||||
stdvers.should.deep.equal([
|
||||
expect(stdvers).toEqual([
|
||||
{
|
||||
name: 'c++14: ISO/IEC 14882:2014 (default)',
|
||||
value: 'c++14',
|
||||
@@ -257,7 +225,7 @@ describe('ICC argument parser', () => {
|
||||
'-etc',
|
||||
];
|
||||
const stdvers = ICCParser.extractPossibleStdvers(lines);
|
||||
stdvers.should.deep.equal([
|
||||
expect(stdvers).toEqual([
|
||||
{
|
||||
name: 'c99: conforms to ISO/IEC 9899:1999 standard for C programs',
|
||||
value: 'c99',
|
||||
@@ -291,7 +259,7 @@ describe('TableGen argument parser', () => {
|
||||
' --no-warn-on-unused-template-args - Disable...',
|
||||
];
|
||||
const actions = TableGenParser.extractPossibleActions(lines);
|
||||
actions.should.deep.equal([
|
||||
expect(actions).toEqual([
|
||||
{name: 'gen-attrs: Generate attributes', value: '--gen-attrs'},
|
||||
{name: 'print-detailed-records: Print full details...', value: '--print-detailed-records'},
|
||||
{name: 'gen-x86-mnemonic-tables: Generate X86...', value: '--gen-x86-mnemonic-tables'},
|
||||
@@ -22,10 +22,10 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {ClangCompiler} from '../../lib/compilers/index.js';
|
||||
import {chai, makeCompilationEnvironment} from '../utils.js';
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
const expect = chai.expect;
|
||||
import {ClangCompiler} from '../../lib/compilers/index.js';
|
||||
import {makeCompilationEnvironment} from '../utils.js';
|
||||
|
||||
describe('clang tests', () => {
|
||||
const languages = {'c++': {id: 'c++'}};
|
||||
@@ -38,7 +38,7 @@ describe('clang tests', () => {
|
||||
};
|
||||
|
||||
describe('device code...', async () => {
|
||||
const clang = new ClangCompiler(info, makeCompilationEnvironment({languages}));
|
||||
const clang = new ClangCompiler(info as any, makeCompilationEnvironment({languages}));
|
||||
it('Should return null for non-device code', async () => {
|
||||
expect(await clang.splitDeviceCode('')).to.be.null;
|
||||
expect(await clang.splitDeviceCode('mov eax, 00h\nadd r0, r0, #1\n')).to.be.null;
|
||||
@@ -22,12 +22,14 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {HookCompiler} from '../../lib/compilers/index.js';
|
||||
import {makeCompilationEnvironment} from '../utils.js';
|
||||
|
||||
describe('Hook compiler', () => {
|
||||
it('should return correct key', () => {
|
||||
HookCompiler.key.should.equal('hook');
|
||||
expect(HookCompiler.key).toEqual('hook');
|
||||
});
|
||||
|
||||
const info = {
|
||||
@@ -36,20 +38,20 @@ describe('Hook compiler', () => {
|
||||
lang: 'hook',
|
||||
};
|
||||
const languages = {hook: {id: 'hook'}};
|
||||
const hook = new HookCompiler(info, makeCompilationEnvironment({languages}));
|
||||
const hook = new HookCompiler(info as any, makeCompilationEnvironment({languages}));
|
||||
|
||||
it('should return correct options for filter', () => {
|
||||
hook.optionsForFilter().should.deep.equal(['--dump']);
|
||||
expect(hook.optionsForFilter(undefined as unknown as any)).toEqual(['--dump']);
|
||||
});
|
||||
|
||||
it('should return correct output filename', () => {
|
||||
const dirPath = '/tmp';
|
||||
hook.getOutputFilename(dirPath).should.equal('/tmp/example.out');
|
||||
expect(hook.getOutputFilename(dirPath)).toEqual('/tmp/example.out');
|
||||
});
|
||||
|
||||
it('should correctly add hook_home to the env', () => {
|
||||
hook.addHookHome(undefined).should.deep.equal({HOOK_HOME: '/opt/hook'});
|
||||
hook.addHookHome({moo: 'moo'}).should.deep.equal({moo: 'moo', HOOK_HOME: '/opt/hook'});
|
||||
expect(hook.addHookHome(undefined)).toEqual({HOOK_HOME: '/opt/hook'});
|
||||
expect(hook.addHookHome({moo: 'moo'})).toEqual({moo: 'moo', HOOK_HOME: '/opt/hook'});
|
||||
});
|
||||
|
||||
it('should process and return correct bytecode result', async () => {
|
||||
@@ -144,6 +146,6 @@ describe('Hook compiler', () => {
|
||||
const filters = {trim: false};
|
||||
const result = await hook.processAsm({asm: asm}, filters, null);
|
||||
delete result.parsingTime;
|
||||
result.should.deep.equal(expected);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
@@ -22,25 +22,27 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import * as csp from '../lib/csp.js';
|
||||
|
||||
describe('CSP', () => {
|
||||
it('Should work in the godbolt.org domain for every field', () => {
|
||||
for (const value of Object.keys(csp.data)) {
|
||||
csp.data[value].should.include.members(['https://*.godbolt.org', "'self'"]);
|
||||
expect(csp.data[value]).toEqual(expect.arrayContaining(['https://*.godbolt.org', "'self'"]));
|
||||
}
|
||||
});
|
||||
it('Should work in the compiler-explorer domain for every field', () => {
|
||||
for (const value of Object.keys(csp.data)) {
|
||||
csp.data[value].should.include.members(['https://*.compiler-explorer.com', "'self'"]);
|
||||
expect(csp.data[value]).toEqual(expect.arrayContaining(['https://*.compiler-explorer.com', "'self'"]));
|
||||
}
|
||||
});
|
||||
it('Should work in a localhost environment for every field', () => {
|
||||
for (const value of Object.keys(csp.data)) {
|
||||
csp.data[value].should.include.members(['localhost:*', "'self'"]);
|
||||
expect(csp.data[value]).toEqual(expect.arrayContaining(['localhost:*', "'self'"]));
|
||||
}
|
||||
});
|
||||
it('Should be a valid policy', () => {
|
||||
csp.policy.should.be.a('string');
|
||||
expect(csp.policy).toEqual(expect.any(String));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {DMDCompiler} from '../lib/compilers/dmd.js';
|
||||
import {LDCCompiler} from '../lib/compilers/ldc.js';
|
||||
import {LanguageKey} from '../types/languages.interfaces.js';
|
||||
@@ -44,24 +46,24 @@ describe('D', () => {
|
||||
lang: languages.d.id,
|
||||
};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
it('LDC should not allow -run parameter', () => {
|
||||
const compiler = new LDCCompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler.filterUserOptions(['hello', '-run', '--something']).should.deep.equal(['hello', '--something']);
|
||||
expect(compiler.filterUserOptions(['hello', '-run', '--something'])).toEqual(['hello', '--something']);
|
||||
});
|
||||
it('DMD should not allow -run parameter', () => {
|
||||
const compiler = new DMDCompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler.filterUserOptions(['hello', '-run', '--something']).should.deep.equal(['hello', '--something']);
|
||||
expect(compiler.filterUserOptions(['hello', '-run', '--something'])).toEqual(['hello', '--something']);
|
||||
});
|
||||
|
||||
it('LDC supports AST output since version 1.4.0', () => {
|
||||
const compiler = new LDCCompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.3.0)').should.equal(false);
|
||||
compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.4.0)').should.equal(true);
|
||||
compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.8.0git-d54d25b-dirty)').should.equal(true);
|
||||
compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.10.0)').should.equal(true);
|
||||
expect(compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.3.0)')).toEqual(false);
|
||||
expect(compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.4.0)')).toEqual(true);
|
||||
expect(compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.8.0git-d54d25b-dirty)')).toEqual(true);
|
||||
expect(compiler.couldSupportASTDump('LDC - the LLVM D compiler (1.10.0)')).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {BaseCompiler} from '../lib/base-compiler.js';
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
@@ -32,7 +34,7 @@ import * as properties from '../lib/properties.js';
|
||||
import {SymbolStore} from '../lib/symbol-store.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
|
||||
import {chai, fs, makeFakeCompilerInfo, path, resolvePathFromTestRoot} from './utils.js';
|
||||
import {fs, makeFakeCompilerInfo, path, resolvePathFromTestRoot} from './utils.js';
|
||||
|
||||
const cppfiltpath = 'c++filt';
|
||||
|
||||
@@ -67,8 +69,8 @@ const catchCppfiltNonexistence = err => {
|
||||
}
|
||||
};
|
||||
|
||||
describe('Basic demangling', function () {
|
||||
it('One line of asm', function () {
|
||||
describe('Basic demangling', () => {
|
||||
it('One line of asm', () => {
|
||||
const result = {
|
||||
asm: [{text: 'Hello, World!'}],
|
||||
};
|
||||
@@ -77,12 +79,12 @@ describe('Basic demangling', function () {
|
||||
|
||||
return Promise.all([
|
||||
demangler.process(result).then(output => {
|
||||
output.asm[0].text.should.equal('Hello, World!');
|
||||
expect(output.asm[0].text).toEqual('Hello, World!');
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
it('One label and some asm', function () {
|
||||
it('One label and some asm', () => {
|
||||
const result = {asm: [{text: '_Z6squarei:'}, {text: ' ret'}]};
|
||||
|
||||
const demangler = new DummyCppDemangler(cppfiltpath, new DummyCompiler(), ['-n']);
|
||||
@@ -91,14 +93,14 @@ describe('Basic demangling', function () {
|
||||
demangler
|
||||
.process(result)
|
||||
.then(output => {
|
||||
output.asm[0].text.should.equal('square(int):');
|
||||
output.asm[1].text.should.equal(' ret');
|
||||
expect(output.asm[0].text).toEqual('square(int):');
|
||||
expect(output.asm[1].text).toEqual(' ret');
|
||||
})
|
||||
.catch(catchCppfiltNonexistence),
|
||||
]);
|
||||
});
|
||||
|
||||
it('One label and use of a label', function () {
|
||||
it('One label and use of a label', () => {
|
||||
const result = {asm: [{text: '_Z6squarei:'}, {text: ' mov eax, $_Z6squarei'}]};
|
||||
|
||||
const demangler = new DummyCppDemangler(cppfiltpath, new DummyCompiler(), ['-n']);
|
||||
@@ -107,14 +109,14 @@ describe('Basic demangling', function () {
|
||||
demangler
|
||||
.process(result)
|
||||
.then(output => {
|
||||
output.asm[0].text.should.equal('square(int):');
|
||||
output.asm[1].text.should.equal(' mov eax, $square(int)');
|
||||
expect(output.asm[0].text).toEqual('square(int):');
|
||||
expect(output.asm[1].text).toEqual(' mov eax, $square(int)');
|
||||
})
|
||||
.catch(catchCppfiltNonexistence),
|
||||
]);
|
||||
});
|
||||
|
||||
it('Two destructors', function () {
|
||||
it('Two destructors', () => {
|
||||
const result = {
|
||||
asm: [
|
||||
{text: '_ZN6NormalD0Ev:'},
|
||||
@@ -134,14 +136,14 @@ describe('Basic demangling', function () {
|
||||
return demangler
|
||||
.process(result)
|
||||
.then(output => {
|
||||
output.asm[0].text.should.equal('Normal::~Normal() [deleting destructor]:');
|
||||
output.asm[1].text.should.equal(' callq operator delete(void*)');
|
||||
output.asm[6].text.should.equal(' jmp operator delete(void*, unsigned long)');
|
||||
expect(output.asm[0].text).toEqual('Normal::~Normal() [deleting destructor]:');
|
||||
expect(output.asm[1].text).toEqual(' callq operator delete(void*)');
|
||||
expect(output.asm[6].text).toEqual(' jmp operator delete(void*, unsigned long)');
|
||||
})
|
||||
.catch(catchCppfiltNonexistence);
|
||||
});
|
||||
|
||||
it('Should ignore comments (CL)', function () {
|
||||
it('Should ignore comments (CL)', () => {
|
||||
const result = {asm: [{text: ' call ??3@YAXPEAX_K@Z ; operator delete'}]};
|
||||
|
||||
const demangler = new DummyWin32Demangler(cppfiltpath, new DummyCompiler());
|
||||
@@ -150,10 +152,10 @@ describe('Basic demangling', function () {
|
||||
demangler.collectLabels();
|
||||
|
||||
const output = demangler.win32RawSymbols;
|
||||
unwrap(output).should.deep.equal(['??3@YAXPEAX_K@Z']);
|
||||
expect(unwrap(output)).toEqual(['??3@YAXPEAX_K@Z']);
|
||||
});
|
||||
|
||||
it('Should ignore comments (CPP)', function () {
|
||||
it('Should ignore comments (CPP)', () => {
|
||||
const result = {asm: [{text: ' call hello ; operator delete'}]};
|
||||
|
||||
const demangler = new DummyCppDemangler(cppfiltpath, new DummyCompiler(), ['-n']);
|
||||
@@ -163,7 +165,7 @@ describe('Basic demangling', function () {
|
||||
demangler.collectLabels();
|
||||
|
||||
const output = demangler.othersymbols.listSymbols();
|
||||
output.should.deep.equal(['hello']);
|
||||
expect(output).toEqual(['hello']);
|
||||
});
|
||||
|
||||
it('Should also support ARM branch instructions', () => {
|
||||
@@ -176,7 +178,7 @@ describe('Basic demangling', function () {
|
||||
demangler.collectLabels();
|
||||
|
||||
const output = demangler.othersymbols.listSymbols();
|
||||
output.should.deep.equal(['_ZN3FooC1Ev']);
|
||||
expect(output).toEqual(['_ZN3FooC1Ev']);
|
||||
});
|
||||
|
||||
it('Should NOT handle undecorated labels', () => {
|
||||
@@ -188,10 +190,10 @@ describe('Basic demangling', function () {
|
||||
demangler.collectLabels();
|
||||
|
||||
const output = demangler.win32RawSymbols;
|
||||
output?.should.deep.equal([]);
|
||||
expect(output).toEqual([]);
|
||||
});
|
||||
|
||||
it('Should ignore comments after jmps', function () {
|
||||
it('Should ignore comments after jmps', () => {
|
||||
const result = {asm: [{text: ' jmp _Z1fP6mytype # TAILCALL'}]};
|
||||
|
||||
const demangler = new DummyCppDemangler(cppfiltpath, new DummyCompiler(), ['-n']);
|
||||
@@ -201,10 +203,10 @@ describe('Basic demangling', function () {
|
||||
demangler.collectLabels();
|
||||
|
||||
const output = demangler.othersymbols.listSymbols();
|
||||
output.should.deep.equal(['_Z1fP6mytype']);
|
||||
expect(output).toEqual(['_Z1fP6mytype']);
|
||||
});
|
||||
|
||||
it('Should still work with normal jmps', function () {
|
||||
it('Should still work with normal jmps', () => {
|
||||
const result = {asm: [{text: ' jmp _Z1fP6mytype'}]};
|
||||
|
||||
const demangler = new DummyCppDemangler(cppfiltpath, new DummyCompiler(), ['-n']);
|
||||
@@ -214,10 +216,10 @@ describe('Basic demangling', function () {
|
||||
demangler.collectLabels();
|
||||
|
||||
const output = demangler.othersymbols.listSymbols();
|
||||
output.should.deep.equal(['_Z1fP6mytype']);
|
||||
expect(output).toEqual(['_Z1fP6mytype']);
|
||||
});
|
||||
|
||||
it('Should support CUDA PTX', function () {
|
||||
it('Should support CUDA PTX', () => {
|
||||
const result = {
|
||||
asm: [
|
||||
{text: ' .visible .entry _Z6squarePii('},
|
||||
@@ -236,20 +238,20 @@ describe('Basic demangling', function () {
|
||||
demangler
|
||||
.process(result)
|
||||
.then(output => {
|
||||
output.asm[0].text.should.equal(' .visible .entry square(int*, int)(');
|
||||
output.asm[1].text.should.equal(' .param .u64 square(int*, int)_param_0,');
|
||||
output.asm[2].text.should.equal(' ld.param.u64 %rd1, [square(int*, int)_param_0];');
|
||||
output.asm[3].text.should.equal(' .func (.param .b32 func_retval0) cube(int*, int)(');
|
||||
output.asm[4].text.should.equal('.global .attribute(.managed) .align 4 .b8 ns::mymanaged[16];');
|
||||
output.asm[5].text.should.equal('.global .texref ns::texRef;');
|
||||
output.asm[6].text.should.equal('.const .align 8 .u64 ns::mystr = generic($str);');
|
||||
expect(output.asm[0].text).toEqual(' .visible .entry square(int*, int)(');
|
||||
expect(output.asm[1].text).toEqual(' .param .u64 square(int*, int)_param_0,');
|
||||
expect(output.asm[2].text).toEqual(' ld.param.u64 %rd1, [square(int*, int)_param_0];');
|
||||
expect(output.asm[3].text).toEqual(' .func (.param .b32 func_retval0) cube(int*, int)(');
|
||||
expect(output.asm[4].text).toEqual('.global .attribute(.managed) .align 4 .b8 ns::mymanaged[16];');
|
||||
expect(output.asm[5].text).toEqual('.global .texref ns::texRef;');
|
||||
expect(output.asm[6].text).toEqual('.const .align 8 .u64 ns::mystr = generic($str);');
|
||||
})
|
||||
.catch(catchCppfiltNonexistence),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
async function readResultFile(filename) {
|
||||
async function readResultFile(filename: string) {
|
||||
const data = await fs.readFile(filename);
|
||||
const asm = utils.splitLines(data.toString()).map(line => {
|
||||
return {text: line};
|
||||
@@ -258,46 +260,40 @@ async function readResultFile(filename) {
|
||||
return {asm};
|
||||
}
|
||||
|
||||
async function DoDemangleTest(filename) {
|
||||
async function DoDemangleTest(filename: string) {
|
||||
const resultIn = await readResultFile(filename);
|
||||
const resultOut = await readResultFile(filename + '.demangle');
|
||||
|
||||
const demangler = new DummyCppDemangler(cppfiltpath, new DummyCompiler(), ['-n']);
|
||||
|
||||
return demangler.process(resultIn).should.eventually.deep.equal(resultOut);
|
||||
await expect(demangler.process(resultIn)).resolves.toEqual(resultOut);
|
||||
}
|
||||
|
||||
describe('File demangling', () => {
|
||||
if (process.platform !== 'linux') {
|
||||
it('Should be skipped', done => {
|
||||
done();
|
||||
});
|
||||
if (process.platform === 'linux') {
|
||||
describe('File demangling', () => {
|
||||
const testcasespath = resolvePathFromTestRoot('demangle-cases');
|
||||
|
||||
return;
|
||||
}
|
||||
/*
|
||||
* NB: this readdir must *NOT* be async
|
||||
*
|
||||
* Mocha calls the function passed to `describe` synchronously
|
||||
* and expects the test suite to be fully configured upon return.
|
||||
*
|
||||
* If you pass an async function to describe and setup test cases
|
||||
* after an await there is no guarantee they will be found, and
|
||||
* if they are they will not end up in the expected suite.
|
||||
*/
|
||||
const files = fs.readdirSync(testcasespath);
|
||||
|
||||
const testcasespath = resolvePathFromTestRoot('demangle-cases');
|
||||
|
||||
/*
|
||||
* NB: this readdir must *NOT* be async
|
||||
*
|
||||
* Mocha calls the function passed to `describe` synchronously
|
||||
* and expects the test suite to be fully configured upon return.
|
||||
*
|
||||
* If you pass an async function to describe and setup test cases
|
||||
* after an await there is no guarantee they will be found, and
|
||||
* if they are they will not end up in the expected suite.
|
||||
*/
|
||||
const files = fs.readdirSync(testcasespath);
|
||||
|
||||
for (const filename of files) {
|
||||
if (filename.endsWith('.asm')) {
|
||||
it(filename, async () => {
|
||||
await DoDemangleTest(path.join(testcasespath, filename));
|
||||
});
|
||||
for (const filename of files) {
|
||||
if (filename.endsWith('.asm')) {
|
||||
it(filename, async () => {
|
||||
await DoDemangleTest(path.join(testcasespath, filename));
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe('Demangler prefix tree', () => {
|
||||
const replacements = new PrefixTree([]);
|
||||
@@ -305,38 +301,40 @@ describe('Demangler prefix tree', () => {
|
||||
replacements.add('aa', 'long_a');
|
||||
replacements.add('aa_shouldnotmatch', 'ERROR');
|
||||
it('should replace a short match', () => {
|
||||
replacements.replaceAll('a').should.eq('short_a');
|
||||
expect(replacements.replaceAll('a')).toEqual('short_a');
|
||||
});
|
||||
it('should replace using the longest match', () => {
|
||||
replacements.replaceAll('aa').should.eq('long_a');
|
||||
expect(replacements.replaceAll('aa')).toEqual('long_a');
|
||||
});
|
||||
it('should replace using both', () => {
|
||||
replacements.replaceAll('aaa').should.eq('long_ashort_a');
|
||||
expect(replacements.replaceAll('aaa')).toEqual('long_ashort_a');
|
||||
});
|
||||
it('should replace using both', () => {
|
||||
replacements.replaceAll('a aa a aa').should.eq('short_a long_a short_a long_a');
|
||||
expect(replacements.replaceAll('a aa a aa')).toEqual('short_a long_a short_a long_a');
|
||||
});
|
||||
it('should work with empty replacements', () => {
|
||||
new PrefixTree([]).replaceAll('Testing 123').should.eq('Testing 123');
|
||||
expect(new PrefixTree([]).replaceAll('Testing 123')).toEqual('Testing 123');
|
||||
});
|
||||
it('should leave unmatching text alone', () => {
|
||||
replacements
|
||||
.replaceAll('Some text with none of the first letter of the ordered letter list')
|
||||
.should.eq('Some text with none of the first letter of the ordered letter list');
|
||||
expect(replacements.replaceAll('Some text with none of the first letter of the ordered letter list')).toEqual(
|
||||
'Some text with none of the first letter of the ordered letter list',
|
||||
);
|
||||
});
|
||||
it('should handle a mixture', () => {
|
||||
replacements.replaceAll('Everyone loves an aardvark').should.eq('Everyone loves short_an long_ardvshort_ark');
|
||||
expect(replacements.replaceAll('Everyone loves an aardvark')).toEqual(
|
||||
'Everyone loves short_an long_ardvshort_ark',
|
||||
);
|
||||
});
|
||||
it('should find exact matches', () => {
|
||||
unwrap(replacements.findExact('a')).should.eq('short_a');
|
||||
unwrap(replacements.findExact('aa')).should.eq('long_a');
|
||||
unwrap(replacements.findExact('aa_shouldnotmatch')).should.eq('ERROR');
|
||||
expect(unwrap(replacements.findExact('a'))).toEqual('short_a');
|
||||
expect(unwrap(replacements.findExact('aa'))).toEqual('long_a');
|
||||
expect(unwrap(replacements.findExact('aa_shouldnotmatch'))).toEqual('ERROR');
|
||||
});
|
||||
it('should find not find mismatches', () => {
|
||||
chai.expect(replacements.findExact('aaa')).to.be.null;
|
||||
chai.expect(replacements.findExact(' aa')).to.be.null;
|
||||
chai.expect(replacements.findExact(' a')).to.be.null;
|
||||
chai.expect(replacements.findExact('Oh noes')).to.be.null;
|
||||
chai.expect(replacements.findExact('')).to.be.null;
|
||||
expect(replacements.findExact('aaa')).toBeNull();
|
||||
expect(replacements.findExact(' aa')).toBeNull();
|
||||
expect(replacements.findExact(' a')).toBeNull();
|
||||
expect(replacements.findExact('Oh noes')).toBeNull();
|
||||
expect(replacements.findExact('')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -24,114 +24,57 @@
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import {assert} from '../lib/assert.js';
|
||||
import {afterAll, beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import * as exec from '../lib/exec.js';
|
||||
import * as props from '../lib/properties.js';
|
||||
import {UnprocessedExecResult} from '../types/execution/execution.interfaces.js';
|
||||
|
||||
import {chai} from './utils.js';
|
||||
|
||||
const expect = chai.expect;
|
||||
|
||||
function testExecOutput(x: Partial<UnprocessedExecResult>) {
|
||||
// Work around chai not being able to deepEquals with a function
|
||||
x.filenameTransform!.should.be.a('function');
|
||||
async function testExecOutput(execPromise: Promise<Partial<UnprocessedExecResult>>) {
|
||||
const x = await execPromise;
|
||||
expect(x.filenameTransform).toBeInstanceOf(Function);
|
||||
delete x.filenameTransform;
|
||||
delete x.execTime;
|
||||
return x;
|
||||
}
|
||||
|
||||
describe('Execution tests', () => {
|
||||
describe('Execution tests', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
// win32
|
||||
describe('Executes external commands', () => {
|
||||
// note: we use powershell, since echo is a builtin, and false doesn't exist
|
||||
it('supports output', () => {
|
||||
return exec
|
||||
.execute('powershell', ['-Command', 'echo "hello world"'], {})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'hello world\r\n',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('limits output', () => {
|
||||
return exec
|
||||
.execute('powershell', ['-Command', 'echo "A very very very very very long string"'], {
|
||||
maxOutput: 10,
|
||||
})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'A very ver\n[Truncated]',
|
||||
truncated: true,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('handles failing commands', () => {
|
||||
return exec
|
||||
.execute('powershell', ['-Command', 'function Fail { exit 1 }; Fail'], {})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: 1,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('handles timouts', () => {
|
||||
return exec
|
||||
.execute('powershell', ['-Command', '"sleep 5"'], {timeoutMs: 10})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: 1,
|
||||
okToCache: false,
|
||||
stderr: '\nKilled - processing time exceeded\n',
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
timedOut: true,
|
||||
});
|
||||
});
|
||||
it('handles missing executables', () => {
|
||||
return exec.execute('__not_a_command__', [], {}).should.be.rejectedWith('ENOENT');
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// POSIX
|
||||
describe('Executes external commands', () => {
|
||||
it('supports output', () => {
|
||||
return exec.execute('echo', ['hello', 'world'], {}).then(testExecOutput).should.eventually.deep.equals({
|
||||
it('supports output', async () => {
|
||||
await expect(
|
||||
testExecOutput(exec.execute('powershell', ['-Command', 'echo "hello world"'], {})),
|
||||
).resolves.toEqual({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'hello world\n',
|
||||
stdout: 'hello world\r\n',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('limits output', () => {
|
||||
return exec
|
||||
.execute('echo', ['A very very very very very long string'], {maxOutput: 22})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'A very ver\n[Truncated]',
|
||||
truncated: true,
|
||||
timedOut: false,
|
||||
});
|
||||
it('limits output', async () => {
|
||||
await expect(
|
||||
testExecOutput(
|
||||
exec.execute('powershell', ['-Command', 'echo "A very very very very very long string"'], {
|
||||
maxOutput: 10,
|
||||
}),
|
||||
),
|
||||
).resolves.toEqual({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'A very ver\n[Truncated]',
|
||||
truncated: true,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('handles failing commands', () => {
|
||||
return exec.execute('false', [], {}).then(testExecOutput).should.eventually.deep.equals({
|
||||
it('handles failing commands', async () => {
|
||||
await expect(
|
||||
testExecOutput(exec.execute('powershell', ['-Command', 'function Fail { exit 1 }; Fail'], {})),
|
||||
).resolves.toEqual({
|
||||
code: 1,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
@@ -140,43 +83,88 @@ describe('Execution tests', () => {
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('handles timouts', () => {
|
||||
return exec
|
||||
.execute('sleep', ['5'], {timeoutMs: 10})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: -1,
|
||||
okToCache: false,
|
||||
stderr: '\nKilled - processing time exceeded\n',
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
timedOut: true,
|
||||
});
|
||||
it('handles timouts', async () => {
|
||||
await expect(
|
||||
testExecOutput(exec.execute('powershell', ['-Command', '"sleep 5"'], {timeoutMs: 10})),
|
||||
).resolves.toEqual({
|
||||
code: 1,
|
||||
okToCache: false,
|
||||
stderr: '\nKilled - processing time exceeded\n',
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
timedOut: true,
|
||||
});
|
||||
});
|
||||
it('handles missing executables', () => {
|
||||
return exec.execute('__not_a_command__', [], {}).should.be.rejectedWith('ENOENT');
|
||||
it('handles missing executables', async () => {
|
||||
await expect(exec.execute('__not_a_command__', [], {})).rejects.toThrow('ENOENT');
|
||||
});
|
||||
it('handles input', () => {
|
||||
return exec
|
||||
.execute('cat', [], {input: 'this is stdin'})
|
||||
.then(testExecOutput)
|
||||
.should.eventually.deep.equals({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'this is stdin',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
} else {
|
||||
// POSIX
|
||||
describe('Executes external commands', () => {
|
||||
it('supports output', async () => {
|
||||
await expect(testExecOutput(exec.execute('echo', ['hello', 'world'], {}))).resolves.toEqual({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'hello world\n',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('limits output', async () => {
|
||||
return expect(
|
||||
testExecOutput(exec.execute('echo', ['A very very very very very long string'], {maxOutput: 22})),
|
||||
).resolves.toEqual({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'A very ver\n[Truncated]',
|
||||
truncated: true,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('handles failing commands', async () => {
|
||||
await expect(testExecOutput(exec.execute('false', [], {}))).resolves.toEqual({
|
||||
code: 1,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
it('handles timouts', async () => {
|
||||
await expect(testExecOutput(exec.execute('sleep', ['5'], {timeoutMs: 10}))).resolves.toEqual({
|
||||
code: -1,
|
||||
okToCache: false,
|
||||
stderr: '\nKilled - processing time exceeded\n',
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
timedOut: true,
|
||||
});
|
||||
});
|
||||
it('handles missing executables', async () => {
|
||||
await expect(exec.execute('__not_a_command__', [], {})).rejects.toThrow('ENOENT');
|
||||
});
|
||||
it('handles input', async () => {
|
||||
await expect(testExecOutput(exec.execute('cat', [], {input: 'this is stdin'}))).resolves.toEqual({
|
||||
code: 0,
|
||||
okToCache: true,
|
||||
stderr: '',
|
||||
stdout: 'this is stdin',
|
||||
truncated: false,
|
||||
timedOut: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describe('nsjail unit tests', () => {
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
props.initialize(path.resolve('./test/test-properties/execution'), ['test']);
|
||||
});
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
props.reset();
|
||||
});
|
||||
it('should handle simple cases', () => {
|
||||
@@ -186,7 +174,7 @@ describe('Execution tests', () => {
|
||||
['1', '2', '3'],
|
||||
{},
|
||||
);
|
||||
args.should.deep.equals([
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
exec.getNsJailCfgFilePath('sandbox'),
|
||||
'--env=HOME=/app',
|
||||
@@ -196,7 +184,7 @@ describe('Execution tests', () => {
|
||||
'2',
|
||||
'3',
|
||||
]);
|
||||
options.should.deep.equals({});
|
||||
expect(options).toEqual({});
|
||||
expect(filenameTransform).to.be.undefined;
|
||||
});
|
||||
it('should pass through options', () => {
|
||||
@@ -204,10 +192,10 @@ describe('Execution tests', () => {
|
||||
timeoutMs: 42,
|
||||
maxOutput: -1,
|
||||
}).options;
|
||||
options.should.deep.equals({timeoutMs: 42, maxOutput: -1});
|
||||
expect(options).toEqual({timeoutMs: 42, maxOutput: -1});
|
||||
});
|
||||
it('should not pass through unknown configs', () => {
|
||||
expect(() => exec.getNsJailOptions('custom-config', '/path/to/compiler', ['1', '2', '3'], {})).to.throw();
|
||||
expect(() => exec.getNsJailOptions('custom-config', '/path/to/compiler', ['1', '2', '3'], {})).toThrow();
|
||||
});
|
||||
it('should remap paths when using customCwd', () => {
|
||||
const {args, options, filenameTransform} = exec.getNsJailOptions(
|
||||
@@ -216,7 +204,7 @@ describe('Execution tests', () => {
|
||||
['/some/custom/cwd/file', '/not/custom/file'],
|
||||
{customCwd: '/some/custom/cwd'},
|
||||
);
|
||||
args.should.deep.equals([
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
exec.getNsJailCfgFilePath('sandbox'),
|
||||
'--cwd',
|
||||
@@ -229,42 +217,43 @@ describe('Execution tests', () => {
|
||||
'/app/file',
|
||||
'/not/custom/file',
|
||||
]);
|
||||
options.should.deep.equals({});
|
||||
expect(filenameTransform).to.not.be.undefined;
|
||||
assert(filenameTransform);
|
||||
filenameTransform('moo').should.equal('moo');
|
||||
filenameTransform('/some/custom/cwd/file').should.equal('/app/file');
|
||||
expect(options).toEqual({});
|
||||
expect(filenameTransform).toBeTruthy();
|
||||
if (filenameTransform) {
|
||||
expect(filenameTransform('moo')).toEqual('moo');
|
||||
expect(filenameTransform('/some/custom/cwd/file')).toEqual('/app/file');
|
||||
}
|
||||
});
|
||||
it('should handle timeouts', () => {
|
||||
const args = exec.getNsJailOptions('sandbox', '/path/to/compiler', [], {timeoutMs: 1234}).args;
|
||||
args.should.include('--time_limit=2');
|
||||
expect(args).toContain('--time_limit=2');
|
||||
});
|
||||
it('should handle linker paths', () => {
|
||||
const {args, options} = exec.getNsJailOptions('sandbox', '/path/to/compiler', [], {
|
||||
ldPath: ['/a/lib/path', '/b/lib2'],
|
||||
});
|
||||
options.should.deep.equals({});
|
||||
expect(options).toEqual({});
|
||||
if (process.platform === 'win32') {
|
||||
args.should.include('--env=LD_LIBRARY_PATH=/a/lib/path;/b/lib2');
|
||||
expect(args).toContain('--env=LD_LIBRARY_PATH=/a/lib/path;/b/lib2');
|
||||
} else {
|
||||
args.should.include('--env=LD_LIBRARY_PATH=/a/lib/path:/b/lib2');
|
||||
expect(args).toContain('--env=LD_LIBRARY_PATH=/a/lib/path:/b/lib2');
|
||||
}
|
||||
});
|
||||
it('should handle envs', () => {
|
||||
const {args, options} = exec.getNsJailOptions('sandbox', '/path/to/compiler', [], {
|
||||
env: {ENV1: '1', ENV2: '2'},
|
||||
});
|
||||
options.should.deep.equals({});
|
||||
args.should.include('--env=ENV1=1');
|
||||
args.should.include('--env=ENV2=2');
|
||||
expect(options).toEqual({});
|
||||
expect(args).toContain('--env=ENV1=1');
|
||||
expect(args).toContain('--env=ENV2=2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('cewrapper unit tests', () => {
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
props.initialize(path.resolve('./test/test-properties/execution'), ['test']);
|
||||
});
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
props.reset();
|
||||
});
|
||||
it('passed as arguments', () => {
|
||||
@@ -276,29 +265,29 @@ describe('Execution tests', () => {
|
||||
},
|
||||
});
|
||||
|
||||
options.args.should.deep.equals([
|
||||
expect(options.args).toEqual([
|
||||
'--config=' + path.resolve('etc/cewrapper/user-execution.json'),
|
||||
'--time_limit=1',
|
||||
'/path/to/something',
|
||||
'--help',
|
||||
]);
|
||||
options.options.should.deep.equals({timeoutMs: 42, maxOutput: -1, env: {TEST: 'Hello, World!'}});
|
||||
expect(options.options).toEqual({timeoutMs: 42, maxOutput: -1, env: {TEST: 'Hello, World!'}});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Subdirectory execution', () => {
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
props.initialize(path.resolve('./test/test-properties/execution'), ['test']);
|
||||
});
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
props.reset();
|
||||
});
|
||||
|
||||
it('Normal situation without customCwd', () => {
|
||||
const {args, options} = exec.getSandboxNsjailOptions('/tmp/hellow/output.s', [], {});
|
||||
|
||||
options.should.deep.equals({});
|
||||
args.should.deep.equals([
|
||||
expect(options).toEqual({});
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/sandbox.cfg',
|
||||
'--cwd',
|
||||
@@ -316,8 +305,8 @@ describe('Execution tests', () => {
|
||||
customCwd: '/tmp/hellow',
|
||||
});
|
||||
|
||||
options.should.deep.equals({});
|
||||
args.should.deep.equals([
|
||||
expect(options).toEqual({});
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/sandbox.cfg',
|
||||
'--cwd',
|
||||
@@ -335,8 +324,8 @@ describe('Execution tests', () => {
|
||||
env: {SOME_DOTNET_THING: '/tmp/hellow/dotnet'},
|
||||
});
|
||||
|
||||
options.should.deep.equals({});
|
||||
args.should.deep.equals([
|
||||
expect(options).toEqual({});
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/sandbox.cfg',
|
||||
'--cwd',
|
||||
@@ -356,8 +345,8 @@ describe('Execution tests', () => {
|
||||
env: {CXX_FLAGS: '-L/usr/lib -L/tmp/hellow/curl/lib -L/tmp/hellow/fmt/lib'},
|
||||
});
|
||||
|
||||
options.should.deep.equals({});
|
||||
args.should.deep.equals([
|
||||
expect(options).toEqual({});
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/sandbox.cfg',
|
||||
'--cwd',
|
||||
@@ -377,8 +366,8 @@ describe('Execution tests', () => {
|
||||
ldPath: ['/usr/lib', '', '/tmp/hellow/lib'],
|
||||
});
|
||||
|
||||
options.should.deep.equals({});
|
||||
args.should.deep.equals([
|
||||
expect(options).toEqual({});
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/sandbox.cfg',
|
||||
'--cwd',
|
||||
@@ -397,9 +386,9 @@ describe('Execution tests', () => {
|
||||
customCwd: '/tmp/hellow',
|
||||
});
|
||||
|
||||
options.should.deep.equals({});
|
||||
expect(options).toEqual({});
|
||||
if (process.platform !== 'win32') {
|
||||
args.should.deep.equals([
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/sandbox.cfg',
|
||||
'--cwd',
|
||||
@@ -419,11 +408,11 @@ describe('Execution tests', () => {
|
||||
appHome: '/tmp/hellow',
|
||||
});
|
||||
|
||||
options.should.deep.equals({
|
||||
expect(options).toEqual({
|
||||
appHome: '/tmp/hellow',
|
||||
});
|
||||
if (process.platform !== 'win32') {
|
||||
args.should.deep.equals([
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/execute.cfg',
|
||||
'--cwd',
|
||||
@@ -457,11 +446,11 @@ describe('Execution tests', () => {
|
||||
},
|
||||
);
|
||||
|
||||
options.should.deep.equals({
|
||||
expect(options).toEqual({
|
||||
appHome: '/tmp/hellow',
|
||||
});
|
||||
if (process.platform !== 'win32') {
|
||||
args.should.deep.equals([
|
||||
expect(args).toEqual([
|
||||
'--config',
|
||||
'etc/nsjail/execute.cfg',
|
||||
'--cwd',
|
||||
|
||||
@@ -24,25 +24,27 @@
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import approvals from 'approvals';
|
||||
import {configure, verifyAsJSON} from 'approvals';
|
||||
import type {ApprovalFailureReporter} from 'approvals/lib/Core/ApprovalFailureReporter.js';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CC65AsmParser} from '../lib/parsers/asm-parser-cc65.js';
|
||||
import {AsmEWAVRParser} from '../lib/parsers/asm-parser-ewavr.js';
|
||||
import {SassAsmParser} from '../lib/parsers/asm-parser-sass.js';
|
||||
import {VcAsmParser} from '../lib/parsers/asm-parser-vc.js';
|
||||
import {AsmParser} from '../lib/parsers/asm-parser.js';
|
||||
import {fakeProps} from '../lib/properties.js';
|
||||
import {ParseFiltersAndOutputOptions} from '../types/features/filters.interfaces.js';
|
||||
|
||||
import {fs, resolvePathFromTestRoot} from './utils.js';
|
||||
|
||||
approvals.mocha(resolvePathFromTestRoot('filters-cases'));
|
||||
|
||||
function processAsm(filename, filters) {
|
||||
function processAsm(filename: string, filters: ParseFiltersAndOutputOptions) {
|
||||
const file = fs.readFileSync(filename, 'utf8');
|
||||
let parser;
|
||||
let parser: AsmParser;
|
||||
if (file.includes('Microsoft')) parser = new VcAsmParser();
|
||||
else if (filename.includes('sass-')) parser = new SassAsmParser();
|
||||
else if (filename.includes('cc65-')) parser = new CC65AsmParser();
|
||||
else if (filename.includes('ewarm-')) parser = new AsmEWAVRParser();
|
||||
else if (filename.includes('cc65-')) parser = new CC65AsmParser(fakeProps({}));
|
||||
else if (filename.includes('ewarm-')) parser = new AsmEWAVRParser(fakeProps({}));
|
||||
else {
|
||||
parser = new AsmParser();
|
||||
parser.binaryHideFuncRe =
|
||||
@@ -65,36 +67,55 @@ const optionsOverride = {
|
||||
errorOnStaleApprovedFiles: process.platform !== 'win32',
|
||||
};
|
||||
|
||||
function testFilter(filename, suffix, filters) {
|
||||
function testFilter(filename: string, suffix: string, filters: ParseFiltersAndOutputOptions) {
|
||||
const testName = path.basename(filename + suffix);
|
||||
it(testName, () => {
|
||||
const result = processAsm(filename, filters);
|
||||
delete result.parsingTime;
|
||||
delete result.filteredCount;
|
||||
approvals.verifyAsJSON(casesRoot, testName, result, optionsOverride);
|
||||
}).timeout(10000); // Bump the timeout a bit so that we don't fail for slow cases
|
||||
it(
|
||||
testName,
|
||||
() => {
|
||||
const result = processAsm(filename, filters);
|
||||
delete result.parsingTime;
|
||||
delete result.filteredCount;
|
||||
verifyAsJSON(casesRoot, testName, result, optionsOverride);
|
||||
},
|
||||
{timeout: 10000},
|
||||
); // Bump the timeout a bit so that we don't fail for slow cases
|
||||
}
|
||||
|
||||
class VitestReporter implements ApprovalFailureReporter {
|
||||
name: string = 'VitestReporter';
|
||||
|
||||
canReportOn() {
|
||||
return true;
|
||||
}
|
||||
|
||||
report(approvedFilePath: string, receivedFilePath: string) {
|
||||
const approvedText = fs.readFileSync(approvedFilePath).toString();
|
||||
const receivedText = fs.readFileSync(receivedFilePath).toString();
|
||||
expect(receivedText).toBe(approvedText);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
The before() hooks on mocha are for it()s - They don't execute before the describes!
|
||||
That's sad because then we can't have cases be loaded in a before() for every describe child to see.
|
||||
*/
|
||||
describe('Filter test cases', function () {
|
||||
describe('No filters', function () {
|
||||
describe('Filter test cases', () => {
|
||||
beforeAll(() => configure({reporters: [new VitestReporter()]}));
|
||||
describe('No filters', () => {
|
||||
for (const x of cases) testFilter(x, '.none', {});
|
||||
});
|
||||
describe('Directive filters', function () {
|
||||
describe('Directive filters', () => {
|
||||
for (const x of cases) testFilter(x, '.directives', {directives: true});
|
||||
});
|
||||
describe('Directives and labels together', function () {
|
||||
describe('Directives and labels together', () => {
|
||||
for (const x of cases) testFilter(x, '.directives.labels', {directives: true, labels: true});
|
||||
});
|
||||
describe('Directives, labels and comments', function () {
|
||||
describe('Directives, labels and comments', () => {
|
||||
for (const x of cases) {
|
||||
testFilter(x, '.directives.labels.comments', {directives: true, labels: true, commentOnly: true});
|
||||
}
|
||||
});
|
||||
describe('Binary, directives, labels and comments', function () {
|
||||
describe('Binary, directives, labels and comments', () => {
|
||||
if (process.platform !== 'win32') {
|
||||
for (const x of cases) {
|
||||
testFilter(x, '.binary.directives.labels.comments', {
|
||||
@@ -106,7 +127,7 @@ describe('Filter test cases', function () {
|
||||
}
|
||||
}
|
||||
});
|
||||
describe('Binary, directives, labels, comments and library code', function () {
|
||||
describe('Binary, directives, labels, comments and library code', () => {
|
||||
if (process.platform !== 'win32') {
|
||||
for (const x of cases) {
|
||||
if (!x.endsWith('-bin.asm')) continue;
|
||||
@@ -121,7 +142,7 @@ describe('Filter test cases', function () {
|
||||
}
|
||||
}
|
||||
});
|
||||
describe('Binary, directives, labels, comments and library code with dontMaskFilenames', function () {
|
||||
describe('Binary, directives, labels, comments and library code with dontMaskFilenames', () => {
|
||||
if (process.platform !== 'win32') {
|
||||
for (const x of cases) {
|
||||
if (!x.endsWith('-bin.asm')) continue;
|
||||
@@ -137,13 +158,13 @@ describe('Filter test cases', function () {
|
||||
}
|
||||
}
|
||||
});
|
||||
describe('Directives and comments', function () {
|
||||
describe('Directives and comments', () => {
|
||||
for (const x of cases) testFilter(x, '.directives.comments', {directives: true, commentOnly: true});
|
||||
});
|
||||
describe('Directives and library code', function () {
|
||||
describe('Directives and library code', () => {
|
||||
for (const x of cases) testFilter(x, '.directives.library', {directives: true, libraryCode: true});
|
||||
});
|
||||
describe('Directives, labels, comments and library code', function () {
|
||||
describe('Directives, labels, comments and library code', () => {
|
||||
for (const x of cases) {
|
||||
testFilter(x, '.directives.labels.comments.library', {
|
||||
directives: true,
|
||||
@@ -153,7 +174,7 @@ describe('Filter test cases', function () {
|
||||
});
|
||||
}
|
||||
});
|
||||
describe('Directives, labels, comments and library code with dontMaskFilenames', function () {
|
||||
describe('Directives, labels, comments and library code with dontMaskFilenames', () => {
|
||||
for (const x of cases) {
|
||||
testFilter(x, '.directives.labels.comments.library.dontMaskFilenames', {
|
||||
directives: true,
|
||||
@@ -169,21 +190,21 @@ describe('Filter test cases', function () {
|
||||
describe('AsmParser tests', () => {
|
||||
const parser = new AsmParser();
|
||||
it('should identify generic opcodes', () => {
|
||||
parser.hasOpcode(' mov r0, #1').should.be.true;
|
||||
parser.hasOpcode(' ROL A').should.be.true;
|
||||
expect(parser.hasOpcode(' mov r0, #1')).toBe(true);
|
||||
expect(parser.hasOpcode(' ROL A')).toBe(true);
|
||||
});
|
||||
it('should not identify non-opcodes as opcodes', () => {
|
||||
parser.hasOpcode(' ;mov r0, #1').should.be.false;
|
||||
parser.hasOpcode('').should.be.false;
|
||||
parser.hasOpcode('# moose').should.be.false;
|
||||
expect(parser.hasOpcode(' ;mov r0, #1')).toBe(false);
|
||||
expect(parser.hasOpcode('')).toBe(false);
|
||||
expect(parser.hasOpcode('# moose')).toBe(false);
|
||||
});
|
||||
it('should identify llvm opcodes', () => {
|
||||
parser.hasOpcode(' %i1 = phi i32 [ %i2, %.preheader ], [ 0, %bb ]').should.be.true;
|
||||
expect(parser.hasOpcode(' %i1 = phi i32 [ %i2, %.preheader ], [ 0, %bb ]')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('forceApproveAll should be false', () => {
|
||||
it('should have forceApproveAll false', () => {
|
||||
optionsOverride.forceApproveAll.should.be.false;
|
||||
expect(optionsOverride.forceApproveAll).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {GolangCompiler} from '../lib/compilers/golang.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
import {LanguageKey} from '../types/languages.interfaces.js';
|
||||
@@ -43,10 +45,10 @@ const info = {
|
||||
lang: languages.go.id,
|
||||
};
|
||||
|
||||
function testGoAsm(basefilename) {
|
||||
async function testGoAsm(baseFilename: string) {
|
||||
const compiler = new GolangCompiler(makeFakeCompilerInfo(info), ce);
|
||||
|
||||
const asmLines = utils.splitLines(fs.readFileSync(basefilename + '.asm').toString());
|
||||
const asmLines = utils.splitLines(fs.readFileSync(baseFilename + '.asm').toString());
|
||||
|
||||
const result = {
|
||||
stderr: asmLines.map(line => {
|
||||
@@ -56,28 +58,25 @@ function testGoAsm(basefilename) {
|
||||
}),
|
||||
};
|
||||
|
||||
return compiler.postProcess(result).then(([output]) => {
|
||||
const expectedOutput = utils.splitLines(fs.readFileSync(basefilename + '.output.asm').toString());
|
||||
|
||||
utils.splitLines(output.asm).should.deep.equal(expectedOutput);
|
||||
|
||||
return output.should.deep.equal({
|
||||
asm: expectedOutput.join('\n'),
|
||||
stdout: [],
|
||||
stderr: [],
|
||||
});
|
||||
const [output] = await compiler.postProcess(result);
|
||||
const expectedOutput = utils.splitLines(fs.readFileSync(baseFilename + '.output.asm').toString());
|
||||
expect(utils.splitLines(output.asm)).toEqual(expectedOutput);
|
||||
expect(output).toEqual({
|
||||
asm: expectedOutput.join('\n'),
|
||||
stdout: [],
|
||||
stderr: [],
|
||||
});
|
||||
}
|
||||
|
||||
describe('GO asm tests', () => {
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
it('Handles unknown line number correctly', () => {
|
||||
return testGoAsm('test/golang/bug-901');
|
||||
it('Handles unknown line number correctly', async () => {
|
||||
await testGoAsm('test/golang/bug-901');
|
||||
});
|
||||
it('Rewrites PC jumps to labels', () => {
|
||||
return testGoAsm('test/golang/labels');
|
||||
it('Rewrites PC jumps to labels', async () => {
|
||||
await testGoAsm('test/golang/labels');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import nock from 'nock';
|
||||
import {afterAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import * as google from '../lib/shortener/google.js';
|
||||
|
||||
@@ -30,7 +31,7 @@ const googlDomain = 'https://goo.gl';
|
||||
const shortUrl = '/short';
|
||||
|
||||
describe('Google short URL resolver tests', () => {
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
nock.cleanAll();
|
||||
});
|
||||
|
||||
@@ -39,25 +40,24 @@ describe('Google short URL resolver tests', () => {
|
||||
it('Resolves simple URLs', async () => {
|
||||
nock(googlDomain).head(shortUrl).reply(302, {}, {location: 'http://long.url/'});
|
||||
|
||||
const resp = await resolver.resolve(googlDomain + shortUrl);
|
||||
resp.should.deep.equal({longUrl: 'http://long.url/'});
|
||||
await expect(resolver.resolve(googlDomain + shortUrl)).resolves.toEqual({longUrl: 'http://long.url/'});
|
||||
});
|
||||
|
||||
it('Handles missing long urls', () => {
|
||||
it('Handles missing long urls', async () => {
|
||||
nock(googlDomain).head(shortUrl).reply(404);
|
||||
|
||||
return resolver.resolve(googlDomain + shortUrl).should.be.rejectedWith('Got response 404');
|
||||
await expect(resolver.resolve(googlDomain + shortUrl)).rejects.toThrow('Got response 404');
|
||||
});
|
||||
|
||||
it('Handles missing location header', () => {
|
||||
it('Handles missing location header', async () => {
|
||||
nock(googlDomain).head(shortUrl).reply(302);
|
||||
|
||||
return resolver.resolve(googlDomain + shortUrl).should.be.rejectedWith('Missing location url in undefined');
|
||||
await expect(resolver.resolve(googlDomain + shortUrl)).rejects.toThrow('Missing location url in undefined');
|
||||
});
|
||||
|
||||
it('Handles failed requests', () => {
|
||||
it('Handles failed requests', async () => {
|
||||
nock(googlDomain).head(shortUrl).replyWithError('Something went wrong');
|
||||
|
||||
return resolver.resolve(googlDomain + shortUrl).should.be.rejectedWith('Something went wrong');
|
||||
await expect(resolver.resolve(googlDomain + shortUrl)).rejects.toThrow('Something went wrong');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,297 +0,0 @@
|
||||
// Copyright (c) 2017, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import express from 'express';
|
||||
|
||||
import {ApiHandler} from '../../lib/handlers/api.js';
|
||||
import {StorageNull} from '../../lib/storage/index.js';
|
||||
import {chai} from '../utils.js';
|
||||
|
||||
const languages = {
|
||||
'c++': {
|
||||
id: 'c++',
|
||||
name: 'C++',
|
||||
monaco: 'cppp',
|
||||
extensions: ['.cpp', '.cxx', '.h', '.hpp', '.hxx', '.c'],
|
||||
},
|
||||
haskell: {
|
||||
id: 'haskell',
|
||||
name: 'Haskell',
|
||||
monaco: 'haskell',
|
||||
extensions: ['.hs', '.haskell'],
|
||||
},
|
||||
pascal: {
|
||||
id: 'pascal',
|
||||
name: 'Pascal',
|
||||
monaco: 'pascal',
|
||||
extensions: ['.pas'],
|
||||
},
|
||||
};
|
||||
const compilers = [
|
||||
{
|
||||
id: 'gcc900',
|
||||
name: 'GCC 9.0.0',
|
||||
lang: 'c++',
|
||||
},
|
||||
{
|
||||
id: 'fpc302',
|
||||
name: 'FPC 3.0.2',
|
||||
lang: 'pascal',
|
||||
},
|
||||
{
|
||||
id: 'clangtrunk',
|
||||
name: 'Clang trunk',
|
||||
lang: 'c++',
|
||||
},
|
||||
];
|
||||
|
||||
const compilersLimitedFields = [
|
||||
{
|
||||
id: 'gcc900',
|
||||
name: 'GCC 9.0.0',
|
||||
},
|
||||
{
|
||||
id: 'fpc302',
|
||||
name: 'FPC 3.0.2',
|
||||
},
|
||||
{
|
||||
id: 'clangtrunk',
|
||||
name: 'Clang trunk',
|
||||
},
|
||||
];
|
||||
|
||||
describe('API handling', () => {
|
||||
let app;
|
||||
|
||||
before(() => {
|
||||
app = express();
|
||||
const apiHandler = new ApiHandler(
|
||||
{
|
||||
handle: res => res.send('compile'),
|
||||
handleCmake: res => res.send('cmake'),
|
||||
handlePopularArguments: res => res.send('ok'),
|
||||
handleOptimizationArguments: res => res.send('ok'),
|
||||
},
|
||||
(key, def) => {
|
||||
switch (key) {
|
||||
case 'formatters': {
|
||||
return 'formatt:badformatt';
|
||||
}
|
||||
case 'formatter.formatt.exe': {
|
||||
return 'echo';
|
||||
}
|
||||
case 'formatter.formatt.version': {
|
||||
return 'Release';
|
||||
}
|
||||
case 'formatter.formatt.name': {
|
||||
return 'FormatT';
|
||||
}
|
||||
default: {
|
||||
return def;
|
||||
}
|
||||
}
|
||||
},
|
||||
new StorageNull('/', {}),
|
||||
'default',
|
||||
);
|
||||
app.use('/api', apiHandler.handle);
|
||||
apiHandler.setCompilers(compilers);
|
||||
apiHandler.setLanguages(languages);
|
||||
});
|
||||
|
||||
it('should respond to plain text compiler requests', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/compilers')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.text;
|
||||
res.text.should.contain('Compiler Name');
|
||||
res.text.should.contain('gcc900');
|
||||
res.text.should.contain('GCC 9.0.0');
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to JSON compiler requests', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/compilers')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals(compilers);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to JSON compiler requests with all fields', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/compilers?fields=all')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals(compilers);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to JSON compiler requests with limited fields', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/compilers?fields=id,name')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals(compilersLimitedFields);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to JSON compilers requests with c++ filter', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/compilers/c++')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals([compilers[0], compilers[2]]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to JSON compilers requests with pascal filter', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/compilers/pascal')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals([compilers[1]]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to plain text language requests', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/languages')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.text;
|
||||
res.text.should.contain('Name');
|
||||
res.text.should.contain('c++');
|
||||
res.text.should.contain('pascal');
|
||||
// We should not list languages for which there are no compilers
|
||||
res.text.should.not.contain('Haskell');
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
it('should respond to JSON languages requests', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/languages')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals([languages['c++'], languages.pascal]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
// TODO(supergrecko): re-write this test case
|
||||
it.skip('should list the formatters', () => {
|
||||
if (process.platform !== 'win32') {
|
||||
// Expects an executable called echo
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/formats')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals([{name: 'FormatT', version: 'Release'}]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
});
|
||||
it('should not go through with invalid tools', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.post('/api/format/invalid')
|
||||
.set('Accept', 'application/json')
|
||||
.then(res => {
|
||||
res.should.have.status(422);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals({exit: 2, answer: "Unknown format tool 'invalid'"});
|
||||
});
|
||||
});
|
||||
/*
|
||||
it('should not go through with invalid base styles', () => {
|
||||
return chai.request(app)
|
||||
.post('/api/format/formatt')
|
||||
.set('Accept', 'application/json')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({
|
||||
base: "bad-base",
|
||||
source: ""
|
||||
})
|
||||
.then(res => {
|
||||
res.should.have.status(422);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals({exit: 3, answer: "Base style not supported"});
|
||||
});
|
||||
});
|
||||
*/
|
||||
it('should respond to plain site template requests', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.get('/api/siteTemplates')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
187
test/handlers/api-tests.ts
Normal file
187
test/handlers/api-tests.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
// Copyright (c) 2017, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import bodyParser from 'body-parser';
|
||||
import express from 'express';
|
||||
import request from 'supertest';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {ApiHandler} from '../../lib/handlers/api.js';
|
||||
import {CompileHandler} from '../../lib/handlers/compile.js';
|
||||
import {CompilerProps, fakeProps} from '../../lib/properties.js';
|
||||
import {StorageNull} from '../../lib/storage/index.js';
|
||||
import {CompilerInfo} from '../../types/compiler.interfaces.js';
|
||||
import {Language, LanguageKey} from '../../types/languages.interfaces.js';
|
||||
import {makeFakeCompilerInfo, makeFakeLanguage} from '../utils.js';
|
||||
|
||||
const languages: Partial<Record<LanguageKey, Language>> = {
|
||||
'c++': makeFakeLanguage({
|
||||
id: 'c++',
|
||||
name: 'C++',
|
||||
monaco: 'cppp',
|
||||
extensions: ['.cpp', '.cxx', '.h', '.hpp', '.hxx', '.c'],
|
||||
}),
|
||||
haskell: makeFakeLanguage({
|
||||
id: 'haskell',
|
||||
name: 'Haskell',
|
||||
monaco: 'haskell',
|
||||
extensions: ['.hs', '.haskell'],
|
||||
}),
|
||||
pascal: makeFakeLanguage({
|
||||
id: 'pascal',
|
||||
name: 'Pascal',
|
||||
monaco: 'pascal',
|
||||
extensions: ['.pas'],
|
||||
}),
|
||||
};
|
||||
const compilers: CompilerInfo[] = [
|
||||
makeFakeCompilerInfo({
|
||||
id: 'gcc900',
|
||||
name: 'GCC 9.0.0',
|
||||
lang: 'c++',
|
||||
}),
|
||||
makeFakeCompilerInfo({
|
||||
id: 'fpc302',
|
||||
name: 'FPC 3.0.2',
|
||||
lang: 'pascal',
|
||||
}),
|
||||
makeFakeCompilerInfo({
|
||||
id: 'clangtrunk',
|
||||
name: 'Clang trunk',
|
||||
lang: 'c++',
|
||||
}),
|
||||
];
|
||||
|
||||
describe('API handling', () => {
|
||||
let app;
|
||||
|
||||
beforeAll(() => {
|
||||
app = express();
|
||||
const apiHandler = new ApiHandler(
|
||||
{
|
||||
handle: res => res.send('compile'),
|
||||
handleCmake: res => res.send('cmake'),
|
||||
handlePopularArguments: res => res.send('ok'),
|
||||
handleOptimizationArguments: res => res.send('ok'),
|
||||
} as unknown as CompileHandler, // TODO(mrg) ideally fake this out or make it a higher-level interface
|
||||
fakeProps({
|
||||
formatters: 'formatt:badformatt',
|
||||
'formatter.formatt.exe': 'echo',
|
||||
'formatter.formatt.type': 'clangformat',
|
||||
'formatter.formatt.version': 'Release',
|
||||
'formatter.formatt.name': 'FormatT',
|
||||
}),
|
||||
new StorageNull('/', new CompilerProps(languages, fakeProps({}))),
|
||||
'default',
|
||||
);
|
||||
app.use(bodyParser.json());
|
||||
app.use('/api', apiHandler.handle);
|
||||
apiHandler.setCompilers(compilers);
|
||||
apiHandler.setLanguages(languages);
|
||||
});
|
||||
|
||||
it('should respond to plain text compiler requests', async () => {
|
||||
const res = await request(app).get('/api/compilers').expect(200).expect('Content-Type', /text/);
|
||||
expect(res.text).toContain('Compiler Name');
|
||||
expect(res.text).toContain('gcc900');
|
||||
expect(res.text).toContain('GCC 9.0.0');
|
||||
});
|
||||
it('should respond to JSON compiler requests', async () => {
|
||||
await request(app)
|
||||
.get('/api/compilers')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, compilers);
|
||||
});
|
||||
it('should respond to JSON compiler requests with all fields', async () => {
|
||||
await request(app)
|
||||
.get('/api/compilers?fields=all')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(compilers);
|
||||
});
|
||||
it('should respond to JSON compiler requests with limited fields', async () => {
|
||||
await request(app)
|
||||
.get('/api/compilers?fields=id,name')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(
|
||||
200,
|
||||
compilers.map(c => {
|
||||
return {id: c.id, name: c.name};
|
||||
}),
|
||||
);
|
||||
});
|
||||
it('should respond to JSON compilers requests with c++ filter', async () => {
|
||||
await request(app)
|
||||
.get('/api/compilers/c++')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, [compilers[0], compilers[2]]);
|
||||
});
|
||||
it('should respond to JSON compilers requests with pascal filter', async () => {
|
||||
await request(app)
|
||||
.get('/api/compilers/pascal')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, [compilers[1]]);
|
||||
});
|
||||
it('should respond to plain text language requests', async () => {
|
||||
const res = await request(app).get('/api/languages').expect(200).expect('Content-Type', /text/);
|
||||
expect(res.text).toContain('Name');
|
||||
expect(res.text).toContain('c++');
|
||||
expect(res.text).toContain('c++');
|
||||
// We should not list languages for which there are no compilers
|
||||
expect(res.text).not.toContain('haskell');
|
||||
});
|
||||
it('should respond to JSON languages requests', async () => {
|
||||
await request(app)
|
||||
.get('/api/languages')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, [languages['c++'], languages.pascal]);
|
||||
});
|
||||
|
||||
it('should not go through with invalid tools', async () => {
|
||||
await request(app)
|
||||
.post('/api/format/invalid')
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(422, {exit: 2, answer: "Unknown format tool 'invalid'"});
|
||||
});
|
||||
it('should not go through with invalid base styles', async () => {
|
||||
await request(app)
|
||||
.post('/api/format/formatt')
|
||||
.send({
|
||||
base: 'bad-base',
|
||||
source: 'i am source',
|
||||
})
|
||||
.set('Accept', 'application/json')
|
||||
.expect(422, {exit: 3, answer: "Style 'bad-base' is not supported"})
|
||||
.expect('Content-Type', /json/);
|
||||
});
|
||||
it('should respond to plain site template requests', async () => {
|
||||
await request(app).get('/api/siteTemplates').expect(200).expect('Content-Type', /json/);
|
||||
});
|
||||
});
|
||||
@@ -22,11 +22,11 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {expect} from 'chai';
|
||||
import express from 'express';
|
||||
import request from 'supertest';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {withAssemblyDocumentationProviders} from '../../lib/handlers/assembly-documentation.js';
|
||||
import {chai} from '../utils.js';
|
||||
|
||||
/** Test matrix of architecture to [opcode, tooptip, html, url] */
|
||||
export const TEST_MATRIX: Record<PropertyKey, [string, string, string, string][]> = {
|
||||
@@ -101,7 +101,7 @@ export const TEST_MATRIX: Record<PropertyKey, [string, string, string, string][]
|
||||
describe('Assembly Documentation API', () => {
|
||||
let app: express.Express;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
app = express();
|
||||
const router = express.Router();
|
||||
withAssemblyDocumentationProviders(router);
|
||||
@@ -109,44 +109,44 @@ describe('Assembly Documentation API', () => {
|
||||
});
|
||||
|
||||
it('should return 404 for unknown architecture', async () => {
|
||||
const res = await chai.request(app).get(`/api/asm/not_an_arch/mov`).set('Accept', 'application/json');
|
||||
expect(res).to.have.status(404);
|
||||
expect(res).to.be.json;
|
||||
expect(res.body).to.deep.equal({error: `No documentation for 'not_an_arch'`});
|
||||
await request(app)
|
||||
.get(`/api/asm/not_an_arch/mov`)
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(404, {error: `No documentation for 'not_an_arch'`});
|
||||
});
|
||||
|
||||
for (const [arch, cases] of Object.entries(TEST_MATRIX)) {
|
||||
for (const [opcode, tooltip, html, url] of cases) {
|
||||
it(`should process ${arch} text requests`, async () => {
|
||||
const res = await chai.request(app).get(`/api/asm/${arch}/${opcode}`).set('Accept', 'text/plain');
|
||||
expect(res).to.have.status(200);
|
||||
expect(res).to.be.html;
|
||||
expect(res.text).to.contain(html);
|
||||
const res = await request(app)
|
||||
.get(`/api/asm/${arch}/${opcode}`)
|
||||
.set('Accept', 'text/plain')
|
||||
.expect('Content-Type', /html/)
|
||||
.expect(200);
|
||||
expect(res.text).toContain(html);
|
||||
});
|
||||
|
||||
it(`should process ${arch} json requests`, async () => {
|
||||
const res = await chai.request(app).get(`/api/asm/${arch}/${opcode}`).set('Accept', 'application/json');
|
||||
|
||||
expect(res).to.have.status(200);
|
||||
expect(res).to.be.json;
|
||||
expect(res.body.html).to.contain(html);
|
||||
expect(res.body.tooltip).to.contain(tooltip);
|
||||
expect(res.body.url).to.contain(url);
|
||||
const res = await request(app)
|
||||
.get(`/api/asm/${arch}/${opcode}`)
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.html).toContain(html);
|
||||
expect(res.body.tooltip).toContain(tooltip);
|
||||
expect(res.body.url).toContain(url);
|
||||
});
|
||||
|
||||
it(`should return 404 for ${arch} unknown opcode requests`, async () => {
|
||||
const res = await chai
|
||||
.request(app)
|
||||
await request(app)
|
||||
.get(`/api/asm/${arch}/not_an_opcode`)
|
||||
.set('Accept', 'application/json');
|
||||
expect(res).to.have.status(404);
|
||||
expect(res).to.be.json;
|
||||
expect(res.body).to.deep.equal({error: "Unknown opcode 'NOT_AN_OPCODE'"});
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(404, {error: "Unknown opcode 'NOT_AN_OPCODE'"});
|
||||
});
|
||||
|
||||
it(`should return 406 for ${arch} bad accept type requests`, async () => {
|
||||
const res = await chai.request(app).get(`/api/asm/${arch}/${opcode}`).set('Accept', 'application/pdf');
|
||||
expect(res).to.have.status(406);
|
||||
await request(app).get(`/api/asm/${arch}/${opcode}`).set('Accept', 'application/pdf').expect(406);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,553 +0,0 @@
|
||||
// Copyright (c) 2017, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import bodyParser from 'body-parser';
|
||||
import express from 'express';
|
||||
|
||||
import {CompileHandler, SetTestMode} from '../../lib/handlers/compile.js';
|
||||
import {fakeProps} from '../../lib/properties.js';
|
||||
import {BypassCache} from '../../types/compilation/compilation.interfaces.js';
|
||||
import {chai, makeCompilationEnvironment} from '../utils.js';
|
||||
|
||||
SetTestMode();
|
||||
|
||||
const languages = {
|
||||
a: {id: 'a', name: 'A lang'},
|
||||
b: {id: 'b', name: 'B lang'},
|
||||
d: {id: 'd', name: 'D lang'},
|
||||
};
|
||||
|
||||
describe('Compiler tests', () => {
|
||||
let app, compileHandler;
|
||||
|
||||
before(() => {
|
||||
const compilationEnvironment = makeCompilationEnvironment({languages});
|
||||
compileHandler = new CompileHandler(compilationEnvironment, fakeProps({}));
|
||||
|
||||
const textParser = bodyParser.text({type: () => true});
|
||||
const formParser = bodyParser.urlencoded({extended: false});
|
||||
|
||||
app = express();
|
||||
app.use(bodyParser.json());
|
||||
|
||||
app.post('/noscript/compile', formParser, compileHandler.handle.bind(compileHandler));
|
||||
app.post('/:compiler/compile', textParser, compileHandler.handle.bind(compileHandler));
|
||||
app.post('/:compiler/cmake', compileHandler.handleCmake.bind(compileHandler));
|
||||
});
|
||||
|
||||
it('throws for unknown compilers', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
.post('/NOT_A_COMPILER/compile')
|
||||
.then(res => {
|
||||
res.should.have.status(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Noscript API', () => {
|
||||
it('supports compile', () => {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
])
|
||||
.then(() => {
|
||||
return chai
|
||||
.request(app)
|
||||
.post('/noscript/compile')
|
||||
.set('Content-Type', 'application/x-www-form-urlencoded')
|
||||
.send('compiler=fake-for-test&source=I am a program')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.text;
|
||||
res.text.should.contain('Something from stdout');
|
||||
res.text.should.contain('Something from stderr');
|
||||
res.text.should.contain('ASMASMASM');
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Curl API', () => {
|
||||
it('supports compile', () => {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
])
|
||||
.then(() => {
|
||||
return chai
|
||||
.request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.set('Content-Type', 'application/x-www-form-urlencoded')
|
||||
.send('I am a program /* &compiler=NOT_A_COMPILER&source=etc */')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.text;
|
||||
res.text.should.contain('Something from stdout');
|
||||
res.text.should.contain('Something from stderr');
|
||||
res.text.should.contain('ASMASMASM');
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('supports alias compile', () => {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
id: 'newcompilerid',
|
||||
alias: ['oldid1', 'oldid2'],
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
])
|
||||
.then(() => {
|
||||
return chai
|
||||
.request(app)
|
||||
.post('/oldid1/compile')
|
||||
.set('Content-Type', 'application/x-www-form-urlencoded')
|
||||
.send('I am a program /* &compiler=NOT_A_COMPILER&source=etc */')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.text;
|
||||
res.text.should.contain('Something from stdout');
|
||||
res.text.should.contain('Something from stderr');
|
||||
res.text.should.contain('ASMASMASM');
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON API', () => {
|
||||
it('handles text output', () => {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
])
|
||||
.then(() => {
|
||||
return chai
|
||||
.request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.send({
|
||||
options: {},
|
||||
source: 'I am a program',
|
||||
})
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.text;
|
||||
res.text.should.contain('Something from stdout');
|
||||
res.text.should.contain('Something from stderr');
|
||||
res.text.should.contain('ASMASMASM');
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function makeFakeJson(source, options, fakeResult) {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: fakeResult || {},
|
||||
},
|
||||
])
|
||||
.then(() =>
|
||||
chai
|
||||
.request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
options: options || {},
|
||||
source: source || '',
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function makeFakeWithExtraFilesJson(source, options, files, fakeResult) {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: fakeResult || {},
|
||||
},
|
||||
])
|
||||
.then(() =>
|
||||
chai
|
||||
.request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
options: options || {},
|
||||
source: source || '',
|
||||
files: files || [],
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function makeFakeCmakeJson(source, options, fakeResult, files) {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: fakeResult || {},
|
||||
},
|
||||
])
|
||||
.then(() =>
|
||||
chai
|
||||
.request(app)
|
||||
.post('/fake-for-test/cmake')
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
options: options || {},
|
||||
source: source || '',
|
||||
files: files || [],
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
it('handles JSON output', () => {
|
||||
return makeFakeJson(
|
||||
'I am a program',
|
||||
{},
|
||||
{
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
)
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals({
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
code: 0,
|
||||
input: {
|
||||
backendOptions: {},
|
||||
filters: {},
|
||||
options: [],
|
||||
source: 'I am a program',
|
||||
},
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('parses options and filters', () => {
|
||||
return makeFakeJson('I am a program', {
|
||||
userArguments: '-O1 -monkey "badger badger"',
|
||||
filters: {a: true, b: true, c: true},
|
||||
}).then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals(['-O1', '-monkey', 'badger badger']);
|
||||
res.body.input.filters.should.deep.equals({a: true, b: true, c: true});
|
||||
});
|
||||
});
|
||||
|
||||
it('parses extra files', () => {
|
||||
return makeFakeWithExtraFilesJson(
|
||||
'I am a program',
|
||||
{
|
||||
userArguments: '-O1 -monkey "badger badger"',
|
||||
filters: {a: true, b: true, c: true},
|
||||
},
|
||||
[
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
],
|
||||
{},
|
||||
).then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals(['-O1', '-monkey', 'badger badger']);
|
||||
res.body.input.filters.should.deep.equals({a: true, b: true, c: true});
|
||||
res.body.input.files.should.deep.equals([
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('cmakes', () => {
|
||||
return makeFakeCmakeJson(
|
||||
'I am a program',
|
||||
{
|
||||
userArguments: '-O1 -monkey "badger badger"',
|
||||
filters: {a: true, b: true, c: true},
|
||||
},
|
||||
{},
|
||||
[
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
],
|
||||
).then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals({
|
||||
backendOptions: {},
|
||||
bypassCache: BypassCache.None,
|
||||
executeParameters: {
|
||||
args: [],
|
||||
runtimeTools: [],
|
||||
stdin: '',
|
||||
},
|
||||
filters: {
|
||||
a: true,
|
||||
b: true,
|
||||
c: true,
|
||||
},
|
||||
libraries: [],
|
||||
options: ['-O1', '-monkey', 'badger badger'],
|
||||
source: 'I am a program',
|
||||
tools: [],
|
||||
});
|
||||
res.body.input.files.should.deep.equals([
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Query API', () => {
|
||||
function makeFakeQuery(source, query, fakeResult) {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: fakeResult || {},
|
||||
},
|
||||
])
|
||||
.then(() =>
|
||||
chai
|
||||
.request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.query(query || {})
|
||||
.set('Accept', 'application/json')
|
||||
.send(source || ''),
|
||||
);
|
||||
}
|
||||
|
||||
it('error on empty request body', () => {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {},
|
||||
},
|
||||
])
|
||||
.then(() => chai.request(app).post('/fake-for-test/compile').set('Accept', 'application/json'))
|
||||
.then(res => {
|
||||
res.should.have.status(500);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('handles filters set directly', () => {
|
||||
return makeFakeQuery('source', {filters: 'a,b,c'})
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals([]);
|
||||
res.body.input.filters.should.deep.equals({a: true, b: true, c: true});
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('handles filters added', () => {
|
||||
return makeFakeQuery('source', {filters: 'a', addFilters: 'e,f'})
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals([]);
|
||||
res.body.input.filters.should.deep.equals({a: true, e: true, f: true});
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('handles filters removed', () => {
|
||||
return makeFakeQuery('source', {filters: 'a,b,c', removeFilters: 'b,c,d'})
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals([]);
|
||||
res.body.input.filters.should.deep.equals({a: true});
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('handles filters added and removed', () => {
|
||||
return makeFakeQuery('source', {filters: 'a,b,c', addFilters: 'c,g,h', removeFilters: 'b,c,d,h'})
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.input.options.should.deep.equals([]);
|
||||
res.body.input.filters.should.deep.equals({a: true, g: true});
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi language', () => {
|
||||
function makeFakeJson(compiler, lang) {
|
||||
return compileHandler
|
||||
.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
id: 'a',
|
||||
lang: 'a',
|
||||
exe: 'fake',
|
||||
fakeResult: {code: 0, stdout: [], stderr: [], asm: [{text: 'LANG A'}]},
|
||||
},
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
id: 'b',
|
||||
lang: 'b',
|
||||
exe: 'fake',
|
||||
fakeResult: {code: 0, stdout: [], stderr: [], asm: [{text: 'LANG B'}]},
|
||||
},
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
id: 'a',
|
||||
lang: 'b',
|
||||
exe: 'fake',
|
||||
fakeResult: {code: 0, stdout: [], stderr: [], asm: [{text: 'LANG B but A'}]},
|
||||
},
|
||||
])
|
||||
.then(() =>
|
||||
chai.request(app).post(`/${compiler}/compile`).set('Accept', 'application/json').send({
|
||||
lang: lang,
|
||||
options: {},
|
||||
source: '',
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
it('finds without language', () => {
|
||||
return makeFakeJson('b', {})
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.asm.should.deep.equals([{text: 'LANG B'}]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('disambiguates by language, choosing A', () => {
|
||||
return makeFakeJson('a', 'a')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.asm.should.deep.equals([{text: 'LANG A'}]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
it('disambiguates by language, choosing B', () => {
|
||||
return makeFakeJson('a', 'b')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.asm.should.deep.equals([{text: 'LANG B but A'}]);
|
||||
})
|
||||
.catch(err => {
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
420
test/handlers/compile-tests.ts
Normal file
420
test/handlers/compile-tests.ts
Normal file
@@ -0,0 +1,420 @@
|
||||
// Copyright (c) 2017, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import bodyParser from 'body-parser';
|
||||
import express from 'express';
|
||||
import request from 'supertest';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompileHandler, SetTestMode} from '../../lib/handlers/compile.js';
|
||||
import {fakeProps} from '../../lib/properties.js';
|
||||
import {BypassCache} from '../../types/compilation/compilation.interfaces.js';
|
||||
import {makeCompilationEnvironment} from '../utils.js';
|
||||
|
||||
SetTestMode();
|
||||
|
||||
const languages = {
|
||||
a: {id: 'a', name: 'A lang'},
|
||||
b: {id: 'b', name: 'B lang'},
|
||||
d: {id: 'd', name: 'D lang'},
|
||||
};
|
||||
|
||||
describe('Compiler tests', () => {
|
||||
let app, compileHandler;
|
||||
|
||||
beforeAll(() => {
|
||||
const compilationEnvironment = makeCompilationEnvironment({languages});
|
||||
compileHandler = new CompileHandler(compilationEnvironment, fakeProps({}));
|
||||
|
||||
const textParser = bodyParser.text({type: () => true});
|
||||
const formParser = bodyParser.urlencoded({extended: false});
|
||||
|
||||
app = express();
|
||||
app.use(bodyParser.json());
|
||||
|
||||
app.post('/noscript/compile', formParser, compileHandler.handle.bind(compileHandler));
|
||||
app.post('/:compiler/compile', textParser, compileHandler.handle.bind(compileHandler));
|
||||
app.post('/:compiler/cmake', compileHandler.handleCmake.bind(compileHandler));
|
||||
});
|
||||
|
||||
it('throws for unknown compilers', async () => {
|
||||
await request(app).post('/NOT_A_COMPILER/compile').expect(404);
|
||||
});
|
||||
|
||||
describe('Noscript API', () => {
|
||||
it('supports compile', async () => {
|
||||
await compileHandler.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
]);
|
||||
const res = await request(app)
|
||||
.post('/noscript/compile')
|
||||
.set('Content-Type', 'application/x-www-form-urlencoded')
|
||||
.send('compiler=fake-for-test&source=I am a program')
|
||||
.expect(200)
|
||||
.expect('Content-Type', /text/);
|
||||
expect(res.text).toContain('Something from stdout');
|
||||
expect(res.text).toContain('Something from stderr');
|
||||
expect(res.text).toContain('ASMASMASM');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Curl API', () => {
|
||||
it('supports compile', async () => {
|
||||
await compileHandler.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
]);
|
||||
const res = await request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.set('Content-Type', 'application/x-www-form-urlencoded')
|
||||
.send('I am a program /* &compiler=NOT_A_COMPILER&source=etc */')
|
||||
.expect(200)
|
||||
.expect('Content-Type', /text/);
|
||||
expect(res.text).toContain('Something from stdout');
|
||||
expect(res.text).toContain('Something from stderr');
|
||||
expect(res.text).toContain('ASMASMASM');
|
||||
});
|
||||
|
||||
it('supports alias compile', async () => {
|
||||
await compileHandler.setCompilers([
|
||||
{
|
||||
id: 'newcompilerid',
|
||||
alias: ['oldid1', 'oldid2'],
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
]);
|
||||
const res = await request(app)
|
||||
.post('/oldid1/compile')
|
||||
.set('Content-Type', 'application/x-www-form-urlencoded')
|
||||
.send('I am a program /* &compiler=NOT_A_COMPILER&source=etc */')
|
||||
.expect(200)
|
||||
.expect('Content-Type', /text/);
|
||||
expect(res.text).toContain('Something from stdout');
|
||||
expect(res.text).toContain('Something from stderr');
|
||||
expect(res.text).toContain('ASMASMASM');
|
||||
});
|
||||
});
|
||||
|
||||
async function setFakeResult(fakeResult?: any) {
|
||||
await compileHandler.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: fakeResult || {},
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
describe('JSON API', () => {
|
||||
it('handles text output', async () => {
|
||||
await compileHandler.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
exe: 'fake',
|
||||
fakeResult: {
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
},
|
||||
},
|
||||
]);
|
||||
const res = await request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.send({
|
||||
options: {},
|
||||
source: 'I am a program',
|
||||
})
|
||||
.expect(200)
|
||||
.expect('Content-Type', /text/);
|
||||
expect(res.text).toContain('Something from stdout');
|
||||
expect(res.text).toContain('Something from stderr');
|
||||
expect(res.text).toContain('ASMASMASM');
|
||||
});
|
||||
|
||||
function makeFakeJson(source: string, options?: any) {
|
||||
return request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
options: options || {},
|
||||
source: source || '',
|
||||
});
|
||||
}
|
||||
|
||||
function makeFakeWithExtraFilesJson(source: string, options?: any, files?: any) {
|
||||
return request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
options: options || {},
|
||||
source: source || '',
|
||||
files: files || [],
|
||||
});
|
||||
}
|
||||
|
||||
function makeFakeCmakeJson(source: string, options?: any, files?: any) {
|
||||
return request(app)
|
||||
.post('/fake-for-test/cmake')
|
||||
.set('Accept', 'application/json')
|
||||
.send({
|
||||
options: options || {},
|
||||
source: source || '',
|
||||
files: files || [],
|
||||
});
|
||||
}
|
||||
|
||||
it('handles JSON output', async () => {
|
||||
await setFakeResult({
|
||||
code: 0,
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
});
|
||||
await makeFakeJson('I am a program')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, {
|
||||
asm: [{text: 'ASMASMASM'}],
|
||||
code: 0,
|
||||
input: {
|
||||
backendOptions: {},
|
||||
filters: {},
|
||||
options: [],
|
||||
source: 'I am a program',
|
||||
},
|
||||
stderr: [{text: 'Something from stderr'}],
|
||||
stdout: [{text: 'Something from stdout'}],
|
||||
});
|
||||
});
|
||||
|
||||
it('parses options and filters', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeJson('I am a program', {
|
||||
userArguments: '-O1 -monkey "badger badger"',
|
||||
filters: {a: true, b: true, c: true},
|
||||
})
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.input.options).toEqual(['-O1', '-monkey', 'badger badger']);
|
||||
expect(res.body.input.filters).toEqual({a: true, b: true, c: true});
|
||||
});
|
||||
|
||||
it('parses extra files', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeWithExtraFilesJson(
|
||||
'I am a program',
|
||||
{
|
||||
userArguments: '-O1 -monkey "badger badger"',
|
||||
filters: {a: true, b: true, c: true},
|
||||
},
|
||||
[
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
],
|
||||
)
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.input.options).toEqual(['-O1', '-monkey', 'badger badger']);
|
||||
expect(res.body.input.filters).toEqual({a: true, b: true, c: true});
|
||||
expect(res.body.input.files).toEqual([
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('cmakes', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeCmakeJson(
|
||||
'I am a program',
|
||||
{
|
||||
userArguments: '-O1 -monkey "badger badger"',
|
||||
filters: {a: true, b: true, c: true},
|
||||
},
|
||||
[
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
],
|
||||
)
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.input.options).toEqual({
|
||||
backendOptions: {},
|
||||
bypassCache: BypassCache.None,
|
||||
executeParameters: {
|
||||
args: [],
|
||||
runtimeTools: [],
|
||||
stdin: '',
|
||||
},
|
||||
filters: {
|
||||
a: true,
|
||||
b: true,
|
||||
c: true,
|
||||
},
|
||||
libraries: [],
|
||||
options: ['-O1', '-monkey', 'badger badger'],
|
||||
source: 'I am a program',
|
||||
tools: [],
|
||||
});
|
||||
expect(res.body.input.files).toEqual([
|
||||
{
|
||||
filename: 'myresource.txt',
|
||||
contents: 'Hello, World!\nHow are you?\n',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Query API', () => {
|
||||
function makeFakeQuery(source?: any, query?: any) {
|
||||
return request(app)
|
||||
.post('/fake-for-test/compile')
|
||||
.query(query || {})
|
||||
.set('Accept', 'application/json')
|
||||
.send(source || '');
|
||||
}
|
||||
|
||||
it('error on empty request body', async () => {
|
||||
await setFakeResult();
|
||||
await request(app).post('/fake-for-test/compile').set('Accept', 'application/json').expect(500);
|
||||
});
|
||||
|
||||
it('handles filters set directly', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeQuery('source', {filters: 'a,b,c'}).expect('Content-Type', /json/).expect(200);
|
||||
expect(res.body.input.options).toEqual([]);
|
||||
expect(res.body.input.filters).toEqual({a: true, b: true, c: true});
|
||||
});
|
||||
|
||||
it('handles filters added', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeQuery('source', {filters: 'a', addFilters: 'e,f'})
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.input.options).toEqual([]);
|
||||
expect(res.body.input.filters).toEqual({a: true, e: true, f: true});
|
||||
});
|
||||
|
||||
it('handles filters removed', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeQuery('source', {filters: 'a,b,c', removeFilters: 'b,c,d'})
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.input.options).toEqual([]);
|
||||
expect(res.body.input.filters).toEqual({a: true});
|
||||
});
|
||||
|
||||
it('handles filters added and removed', async () => {
|
||||
await setFakeResult();
|
||||
const res = await makeFakeQuery('source', {filters: 'a,b,c', addFilters: 'c,g,h', removeFilters: 'b,c,d,h'})
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200);
|
||||
expect(res.body.input.options).toEqual([]);
|
||||
expect(res.body.input.filters).toEqual({a: true, g: true});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi language', () => {
|
||||
async function setFakeCompilers() {
|
||||
await compileHandler.setCompilers([
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
id: 'a',
|
||||
lang: 'a',
|
||||
exe: 'fake',
|
||||
fakeResult: {code: 0, stdout: [], stderr: [], asm: [{text: 'LANG A'}]},
|
||||
},
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
id: 'b',
|
||||
lang: 'b',
|
||||
exe: 'fake',
|
||||
fakeResult: {code: 0, stdout: [], stderr: [], asm: [{text: 'LANG B'}]},
|
||||
},
|
||||
{
|
||||
compilerType: 'fake-for-test',
|
||||
id: 'a',
|
||||
lang: 'b',
|
||||
exe: 'fake',
|
||||
fakeResult: {code: 0, stdout: [], stderr: [], asm: [{text: 'LANG B but A'}]},
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
function makeFakeJson(compiler: string, lang: any) {
|
||||
return request(app).post(`/${compiler}/compile`).set('Accept', 'application/json').send({
|
||||
lang: lang,
|
||||
options: {},
|
||||
source: '',
|
||||
});
|
||||
}
|
||||
|
||||
it('finds without language', async () => {
|
||||
await setFakeCompilers();
|
||||
const res = await makeFakeJson('b', {}).expect('Content-Type', /json/).expect(200);
|
||||
expect(res.body.asm).toEqual([{text: 'LANG B'}]);
|
||||
});
|
||||
|
||||
it('disambiguates by language, choosing A', async () => {
|
||||
await setFakeCompilers();
|
||||
const res = await makeFakeJson('b', 'a').expect('Content-Type', /json/).expect(200);
|
||||
expect(res.body.asm).toEqual([{text: 'LANG B'}]);
|
||||
});
|
||||
|
||||
it('disambiguates by language, choosing B', async () => {
|
||||
await setFakeCompilers();
|
||||
const res = await makeFakeJson('a', 'b');
|
||||
expect(res.body.asm).toEqual([{text: 'LANG B but A'}]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -24,25 +24,24 @@
|
||||
|
||||
import express from 'express';
|
||||
import mockfs from 'mock-fs';
|
||||
import request from 'supertest';
|
||||
import {afterAll, beforeAll, beforeEach, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilationQueue} from '../../lib/compilation-queue.js';
|
||||
import {HealthCheckHandler} from '../../lib/handlers/health-check.js';
|
||||
import {chai} from '../utils.js';
|
||||
|
||||
describe('Health checks', () => {
|
||||
let app;
|
||||
let compilationQueue;
|
||||
|
||||
beforeEach(() => {
|
||||
compilationQueue = new CompilationQueue(1);
|
||||
compilationQueue = new CompilationQueue(1, 0, 0);
|
||||
app = express();
|
||||
app.use('/hc', new HealthCheckHandler(compilationQueue).handle);
|
||||
app.use('/hc', new HealthCheckHandler(compilationQueue, '').handle);
|
||||
});
|
||||
|
||||
it('should respond with OK', async () => {
|
||||
const res = await chai.request(app).get('/hc');
|
||||
res.should.have.status(200);
|
||||
res.text.should.be.eql('Everything is awesome');
|
||||
await request(app).get('/hc').expect(200, 'Everything is awesome');
|
||||
});
|
||||
|
||||
it('should use compilation queue', async () => {
|
||||
@@ -50,16 +49,16 @@ describe('Health checks', () => {
|
||||
compilationQueue._queue.on('active', () => {
|
||||
count++;
|
||||
});
|
||||
await chai.request(app).get('/hc');
|
||||
count.should.be.eql(1);
|
||||
await request(app).get('/hc');
|
||||
expect(count).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Health checks on disk', () => {
|
||||
let app;
|
||||
|
||||
before(() => {
|
||||
const compilationQueue = new CompilationQueue(1);
|
||||
beforeAll(() => {
|
||||
const compilationQueue = new CompilationQueue(1, 0, 0);
|
||||
|
||||
app = express();
|
||||
app.use('/hc', new HealthCheckHandler(compilationQueue, '/fake/.nonexist').handle);
|
||||
@@ -72,18 +71,15 @@ describe('Health checks on disk', () => {
|
||||
});
|
||||
});
|
||||
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
mockfs.restore();
|
||||
});
|
||||
|
||||
it('should respond with 500 when file not found', async () => {
|
||||
const res = await chai.request(app).get('/hc');
|
||||
res.should.have.status(500);
|
||||
await request(app).get('/hc').expect(500);
|
||||
});
|
||||
|
||||
it('should respond with OK and file contents when found', async () => {
|
||||
const res = await chai.request(app).get('/hc2');
|
||||
res.should.have.status(200);
|
||||
res.text.should.be.eql('Everything is fine');
|
||||
await request(app).get('/hc2').expect(200, 'Everything is fine');
|
||||
});
|
||||
});
|
||||
@@ -1,16 +1,16 @@
|
||||
import {assert} from 'chai';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {getSiteTemplates, loadSiteTemplates} from '../../lib/handlers/site-templates.js';
|
||||
|
||||
describe('Site Templates Backend', () => {
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
loadSiteTemplates('etc/config');
|
||||
});
|
||||
|
||||
it('should load site templates properly', () => {
|
||||
const templates = getSiteTemplates();
|
||||
// not super comprehensive
|
||||
assert(templates.meta['meta.screenshot_dimentions'] !== undefined);
|
||||
assert(Object.entries(templates.templates).length > 0);
|
||||
expect(templates.meta).toHaveProperty('meta.screenshot_dimentions');
|
||||
expect(Object.entries(templates.templates).length).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,17 +23,19 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import express from 'express';
|
||||
import request from 'supertest';
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {SourceHandler} from '../../lib/handlers/source.js';
|
||||
import {chai} from '../utils.js';
|
||||
|
||||
describe('Sources', () => {
|
||||
const app = express();
|
||||
const handler = new SourceHandler(
|
||||
[
|
||||
{
|
||||
name: 'moose',
|
||||
urlpart: 'moose',
|
||||
list: () => Promise.resolve({moose: 'pig'}),
|
||||
list: async () => [{file: 'file', lang: 'lang', name: 'name'}],
|
||||
load: name => Promise.resolve({file: `File called ${name}`}),
|
||||
},
|
||||
],
|
||||
@@ -41,32 +43,18 @@ describe('Sources', () => {
|
||||
);
|
||||
app.use('/source', handler.handle.bind(handler));
|
||||
|
||||
it('should list', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
it('should list', async () => {
|
||||
const res = await request(app)
|
||||
.get('/source/moose/list')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals({moose: 'pig'});
|
||||
res.should.have.header('Yibble', 'boing');
|
||||
})
|
||||
.catch(function (err) {
|
||||
throw err;
|
||||
});
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, [{file: 'file', lang: 'lang', name: 'name'}]);
|
||||
expect(res.headers['yibble']).toEqual('boing');
|
||||
});
|
||||
it('should fetch files', () => {
|
||||
return chai
|
||||
.request(app)
|
||||
it('should fetch files', async () => {
|
||||
const res = await request(app)
|
||||
.get('/source/moose/load/Grunkle')
|
||||
.then(res => {
|
||||
res.should.have.status(200);
|
||||
res.should.be.json;
|
||||
res.body.should.deep.equals({file: 'File called Grunkle'});
|
||||
res.should.have.header('Yibble', 'boing');
|
||||
})
|
||||
.catch(function (err) {
|
||||
throw err;
|
||||
});
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200, {file: 'File called Grunkle'});
|
||||
expect(res.headers['yibble']).toEqual('boing');
|
||||
});
|
||||
});
|
||||
@@ -22,33 +22,35 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {InstructionSets} from '../lib/instructionsets.js';
|
||||
|
||||
describe('InstructionSets', async () => {
|
||||
it('should recognize aarch64 for clang target', async () => {
|
||||
const isets = new InstructionSets();
|
||||
|
||||
return isets
|
||||
.getCompilerInstructionSetHint('aarch64-linux-gnu', '/opt/compiler-explorer/clang-11.0.1/bin/clang++')
|
||||
.should.eventually.equal('aarch64');
|
||||
await expect(
|
||||
isets.getCompilerInstructionSetHint('aarch64-linux-gnu', '/opt/compiler-explorer/clang-11.0.1/bin/clang++'),
|
||||
).resolves.toEqual('aarch64');
|
||||
});
|
||||
|
||||
it('should recognize gcc aarch64 from filepath', async () => {
|
||||
const isets = new InstructionSets();
|
||||
|
||||
return isets
|
||||
.getCompilerInstructionSetHint(
|
||||
await expect(
|
||||
isets.getCompilerInstructionSetHint(
|
||||
false,
|
||||
'/opt/compiler-explorer/arm64/gcc-12.1.0/aarch64-unknown-linux-gnu/bin/aarch64-unknown-linux-gnu-g++',
|
||||
)
|
||||
.should.eventually.equal('aarch64');
|
||||
),
|
||||
).resolves.toEqual('aarch64');
|
||||
});
|
||||
|
||||
it('should default to amd64 when not apparant', async () => {
|
||||
it('should default to amd64 when not apparent', async () => {
|
||||
const isets = new InstructionSets();
|
||||
|
||||
return isets
|
||||
.getCompilerInstructionSetHint(false, '/opt/compiler-explorer/gcc-12.2.0/bin/g++')
|
||||
.should.eventually.equal('amd64');
|
||||
await expect(
|
||||
isets.getCompilerInstructionSetHint(false, '/opt/compiler-explorer/gcc-12.2.0/bin/g++'),
|
||||
).resolves.toEqual('amd64');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
import {JavaCompiler} from '../lib/compilers/index.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
@@ -40,67 +42,71 @@ const info = {
|
||||
lang: languages.java.id,
|
||||
} as unknown as CompilerInfo;
|
||||
|
||||
describe('Basic compiler setup', function () {
|
||||
describe('Basic compiler setup', () => {
|
||||
let env: CompilationEnvironment;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
env = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
it('Should not crash on instantiation', function () {
|
||||
it('Should not crash on instantiation', () => {
|
||||
new JavaCompiler(info, env);
|
||||
});
|
||||
|
||||
it('should ignore second param for getOutputFilename', function () {
|
||||
it('should ignore second param for getOutputFilename', () => {
|
||||
// Because javac produces a class files based on user provided class names,
|
||||
// it's not possible to determine the main class file before compilation/parsing
|
||||
const compiler = new JavaCompiler(info, env);
|
||||
if (process.platform === 'win32') {
|
||||
compiler.getOutputFilename('/tmp/').should.equal('\\tmp\\example.class');
|
||||
expect(compiler.getOutputFilename('/tmp/')).toEqual('\\tmp\\example.class');
|
||||
} else {
|
||||
compiler.getOutputFilename('/tmp/').should.equal('/tmp/example.class');
|
||||
expect(compiler.getOutputFilename('/tmp/')).toEqual('/tmp/example.class');
|
||||
}
|
||||
});
|
||||
|
||||
describe('Forbidden compiler arguments', function () {
|
||||
describe('Forbidden compiler arguments', () => {
|
||||
it('JavaCompiler should not allow -d parameter', () => {
|
||||
const compiler = new JavaCompiler(info, env);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '-d', '--something', '--something-else'])
|
||||
.should.deep.equal(['hello', '--something-else']);
|
||||
compiler.filterUserOptions(['hello', '-d']).should.deep.equal(['hello']);
|
||||
compiler.filterUserOptions(['-d', 'something', 'something-else']).should.deep.equal(['something-else']);
|
||||
expect(compiler.filterUserOptions(['hello', '-d', '--something', '--something-else'])).toEqual([
|
||||
'hello',
|
||||
'--something-else',
|
||||
]);
|
||||
expect(compiler.filterUserOptions(['hello', '-d'])).toEqual(['hello']);
|
||||
expect(compiler.filterUserOptions(['-d', 'something', 'something-else'])).toEqual(['something-else']);
|
||||
});
|
||||
|
||||
it('JavaCompiler should not allow -s parameter', () => {
|
||||
const compiler = new JavaCompiler(info, env);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '-s', '--something', '--something-else'])
|
||||
.should.deep.equal(['hello', '--something-else']);
|
||||
compiler.filterUserOptions(['hello', '-s']).should.deep.equal(['hello']);
|
||||
compiler.filterUserOptions(['-s', 'something', 'something-else']).should.deep.equal(['something-else']);
|
||||
expect(compiler.filterUserOptions(['hello', '-s', '--something', '--something-else'])).toEqual([
|
||||
'hello',
|
||||
'--something-else',
|
||||
]);
|
||||
expect(compiler.filterUserOptions(['hello', '-s'])).toEqual(['hello']);
|
||||
expect(compiler.filterUserOptions(['-s', 'something', 'something-else'])).toEqual(['something-else']);
|
||||
});
|
||||
|
||||
it('JavaCompiler should not allow --source-path parameter', () => {
|
||||
const compiler = new JavaCompiler(info, env);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '--source-path', '--something', '--something-else'])
|
||||
.should.deep.equal(['hello', '--something-else']);
|
||||
compiler.filterUserOptions(['hello', '--source-path']).should.deep.equal(['hello']);
|
||||
compiler
|
||||
.filterUserOptions(['--source-path', 'something', 'something-else'])
|
||||
.should.deep.equal(['something-else']);
|
||||
expect(compiler.filterUserOptions(['hello', '--source-path', '--something', '--something-else'])).toEqual([
|
||||
'hello',
|
||||
'--something-else',
|
||||
]);
|
||||
expect(compiler.filterUserOptions(['hello', '--source-path'])).toEqual(['hello']);
|
||||
expect(compiler.filterUserOptions(['--source-path', 'something', 'something-else'])).toEqual([
|
||||
'something-else',
|
||||
]);
|
||||
});
|
||||
|
||||
it('JavaCompiler should not allow -sourcepath parameter', () => {
|
||||
const compiler = new JavaCompiler(info, env);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '-sourcepath', '--something', '--something-else'])
|
||||
.should.deep.equal(['hello', '--something-else']);
|
||||
compiler.filterUserOptions(['hello', '-sourcepath']).should.deep.equal(['hello']);
|
||||
compiler
|
||||
.filterUserOptions(['-sourcepath', 'something', 'something-else'])
|
||||
.should.deep.equal(['something-else']);
|
||||
expect(compiler.filterUserOptions(['hello', '-sourcepath', '--something', '--something-else'])).toEqual([
|
||||
'hello',
|
||||
'--something-else',
|
||||
]);
|
||||
expect(compiler.filterUserOptions(['hello', '-sourcepath'])).toEqual(['hello']);
|
||||
expect(compiler.filterUserOptions(['-sourcepath', 'something', 'something-else'])).toEqual([
|
||||
'something-else',
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -108,7 +114,7 @@ describe('Basic compiler setup', function () {
|
||||
describe('javap parsing', () => {
|
||||
let compiler: JavaCompiler;
|
||||
let env: CompilationEnvironment;
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
env = makeCompilationEnvironment({languages});
|
||||
compiler = new JavaCompiler(info, env);
|
||||
});
|
||||
@@ -143,9 +149,9 @@ describe('javap parsing', () => {
|
||||
};
|
||||
|
||||
const processed = await compiler.processAsm(result);
|
||||
processed.should.have.property('asm');
|
||||
expect(processed).toHaveProperty('asm');
|
||||
const asmSegments = (processed as {asm: ParsedAsmResultLine[]}).asm;
|
||||
asmSegments.should.deep.equal(expectedSegments);
|
||||
expect(asmSegments).toEqual(expectedSegments);
|
||||
}
|
||||
|
||||
it('should handle errors', async () => {
|
||||
@@ -153,7 +159,7 @@ describe('javap parsing', () => {
|
||||
asm: '<Compilation failed>',
|
||||
};
|
||||
|
||||
(await compiler.processAsm(result)).should.deep.equal([{text: '<Compilation failed>', source: null}]);
|
||||
await expect(compiler.processAsm(result)).resolves.toEqual([{text: '<Compilation failed>', source: null}]);
|
||||
});
|
||||
|
||||
it('Parses simple class with one method', () => {
|
||||
|
||||
@@ -22,22 +22,24 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {languages} from '../lib/languages.js';
|
||||
|
||||
import {fs, path, should} from './utils.js';
|
||||
import {fs, path} from './utils.js';
|
||||
|
||||
describe('Language definitions tests', () => {
|
||||
it('Has id equal to object key', () => {
|
||||
for (const languageKey of Object.keys(languages)) should.equal(languages[languageKey].id, languageKey);
|
||||
for (const languageKey of Object.keys(languages)) expect(languages[languageKey].id).toEqual(languageKey);
|
||||
});
|
||||
it('Has extensions with leading dots', () => {
|
||||
for (const languageKey of Object.keys(languages)) should.equal(languages[languageKey].extensions[0][0], '.');
|
||||
for (const languageKey of Object.keys(languages)) expect(languages[languageKey].extensions[0][0]).toEqual('.');
|
||||
});
|
||||
it('Has examples & are initialized', () => {
|
||||
for (const languageKey of Object.keys(languages)) {
|
||||
const lang = languages[languageKey];
|
||||
const example = fs.readFileSync(path.join('examples', lang.id, 'default' + lang.extensions[0]), 'utf8');
|
||||
should.equal(example, lang.example);
|
||||
expect(example).toEqual(lang.example);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import path from 'path';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {BaseCompiler} from '../lib/base-compiler.js';
|
||||
import {BuildEnvSetupBase} from '../lib/buildenvsetup/base.js';
|
||||
@@ -56,7 +57,7 @@ describe('Library directories (c++)', () => {
|
||||
libsArr: ['fmt.10', 'qt.660', 'cpptrace.030'],
|
||||
};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
compiler = new BaseCompiler(info as CompilerInfo, ce);
|
||||
(compiler as any).buildenvsetup = new BuildEnvSetupBase(info as CompilerInfo, ce);
|
||||
@@ -111,7 +112,7 @@ describe('Library directories (c++)', () => {
|
||||
|
||||
it('should add libpaths and link to libraries', () => {
|
||||
const links = compiler.getSharedLibraryLinks([{id: 'fmt', version: '10'}]);
|
||||
links.should.include('-lfmtd');
|
||||
expect(links).toContain('-lfmtd');
|
||||
|
||||
const fmtpaths = (compiler as any).getSharedLibraryPathsAsArguments(
|
||||
[{id: 'fmt', version: '10'}],
|
||||
@@ -119,7 +120,7 @@ describe('Library directories (c++)', () => {
|
||||
undefined,
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
fmtpaths.should.include('-L./lib');
|
||||
expect(fmtpaths).toContain('-L./lib');
|
||||
|
||||
const qtpaths = (compiler as any).getSharedLibraryPathsAsArguments(
|
||||
[{id: 'qt', version: '660'}],
|
||||
@@ -127,7 +128,7 @@ describe('Library directories (c++)', () => {
|
||||
undefined,
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
qtpaths.should.include('-L/tmp/compiler-explorer-compiler-123/qt/lib');
|
||||
expect(qtpaths).toContain('-L/tmp/compiler-explorer-compiler-123/qt/lib');
|
||||
});
|
||||
|
||||
it('should add libpaths and link to libraries when using nsjail', () => {
|
||||
@@ -139,7 +140,7 @@ describe('Library directories (c++)', () => {
|
||||
undefined,
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
fmtpaths.should.include('-L/tmp/compiler-explorer-compiler-123/fmt/lib');
|
||||
expect(fmtpaths).toContain('-L/tmp/compiler-explorer-compiler-123/fmt/lib');
|
||||
|
||||
const qtpaths = (compiler as any).getSharedLibraryPathsAsArguments(
|
||||
[{id: 'qt', version: '660'}],
|
||||
@@ -147,7 +148,7 @@ describe('Library directories (c++)', () => {
|
||||
undefined,
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
qtpaths.should.include('-L/tmp/compiler-explorer-compiler-123/qt/lib');
|
||||
expect(qtpaths).toContain('-L/tmp/compiler-explorer-compiler-123/qt/lib');
|
||||
});
|
||||
|
||||
it('should add extra include paths when using packagedheaders', () => {
|
||||
@@ -157,15 +158,15 @@ describe('Library directories (c++)', () => {
|
||||
[{id: 'fmt', version: '10'}],
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
fmtpaths.should.not.include('-I/tmp/compiler-explorer-compiler-123/fmt/include');
|
||||
fmtpaths.should.include('-I/opt/compiler-explorer/libs/fmt/1.0/include');
|
||||
expect(fmtpaths).not.toContain('-I/tmp/compiler-explorer-compiler-123/fmt/include');
|
||||
expect(fmtpaths).toContain('-I/opt/compiler-explorer/libs/fmt/1.0/include');
|
||||
|
||||
const qtpaths = (compiler as any).getIncludeArguments(
|
||||
[{id: 'qt', version: '660'}],
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
qtpaths.should.include('-I/opt/compiler-explorer/libs/qt/6.6.0/include');
|
||||
qtpaths.should.include('-I/tmp/compiler-explorer-compiler-123/qt/include');
|
||||
expect(qtpaths).toContain('-I/opt/compiler-explorer/libs/qt/6.6.0/include');
|
||||
expect(qtpaths).toContain('-I/tmp/compiler-explorer-compiler-123/qt/include');
|
||||
});
|
||||
|
||||
it('should set LD_LIBRARY_PATH when executing', () => {
|
||||
@@ -175,17 +176,17 @@ describe('Library directories (c++)', () => {
|
||||
[{id: 'qt', version: '660'}],
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
qtpaths.should.include('/tmp/compiler-explorer-compiler-123/qt/lib');
|
||||
expect(qtpaths).toContain('/tmp/compiler-explorer-compiler-123/qt/lib');
|
||||
});
|
||||
|
||||
it('should add libpaths and link when statically linking', () => {
|
||||
(compiler as any).executionType = 'nsjail';
|
||||
|
||||
const staticlinks = compiler.getStaticLibraryLinks([{id: 'cpptrace', version: '030'}], []);
|
||||
staticlinks.should.include('-lcpptrace');
|
||||
staticlinks.should.include('-ldwarf');
|
||||
staticlinks.should.include('-ldl');
|
||||
staticlinks.should.include('-lz');
|
||||
expect(staticlinks).toContain('-lcpptrace');
|
||||
expect(staticlinks).toContain('-ldwarf');
|
||||
expect(staticlinks).toContain('-ldl');
|
||||
expect(staticlinks).toContain('-lz');
|
||||
|
||||
const libpaths = (compiler as any).getSharedLibraryPathsAsArguments(
|
||||
[{id: 'cpptrace', version: '030'}],
|
||||
@@ -193,7 +194,7 @@ describe('Library directories (c++)', () => {
|
||||
undefined,
|
||||
'/tmp/compiler-explorer-compiler-123',
|
||||
);
|
||||
libpaths.should.include('-L/tmp/compiler-explorer-compiler-123/cpptrace/lib');
|
||||
expect(libpaths).toContain('-L/tmp/compiler-explorer-compiler-123/cpptrace/lib');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -214,7 +215,7 @@ describe('Library directories (fortran)', () => {
|
||||
libsArr: ['json_fortran.830', 'curl.7831'],
|
||||
};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
compiler = new FortranCompiler(info as CompilerInfo, ce);
|
||||
(compiler as any).buildenvsetup = new BuildEnvSetupBase(info as CompilerInfo, ce);
|
||||
@@ -262,12 +263,12 @@ describe('Library directories (fortran)', () => {
|
||||
await fs.mkdir(libPath, {recursive: true});
|
||||
|
||||
const libPaths = compiler.getSharedLibraryPaths([{id: 'json_fortran', version: '830'}], dirPath);
|
||||
libPaths.should.include(libPath);
|
||||
expect(libPaths).toContain(libPath);
|
||||
|
||||
const libJsonFilepath = path.join(libPath, 'libjson-fortran.a');
|
||||
|
||||
const failedLinks = compiler.getStaticLibraryLinks([{id: 'json_fortran', version: '830'}], libPaths);
|
||||
failedLinks.should.not.include(libJsonFilepath);
|
||||
expect(failedLinks).not.toContain(libJsonFilepath);
|
||||
});
|
||||
|
||||
it('should add libpaths and link to libraries', async () => {
|
||||
@@ -279,13 +280,13 @@ describe('Library directories (fortran)', () => {
|
||||
const libJsonFilepath = path.join(libPath, 'libjson-fortran.a');
|
||||
|
||||
const libPaths = compiler.getSharedLibraryPaths([{id: 'json_fortran', version: '830'}], dirPath);
|
||||
libPaths.should.include(libPath);
|
||||
expect(libPaths).toContain(libPath);
|
||||
|
||||
await fs.writeFile(libJsonFilepath, 'hello, world!');
|
||||
|
||||
// the file is now here and Should be linked to
|
||||
const links = compiler.getStaticLibraryLinks([{id: 'json_fortran', version: '830'}], libPaths);
|
||||
links.should.include(libJsonFilepath);
|
||||
expect(links).toContain(libJsonFilepath);
|
||||
|
||||
const paths = (compiler as any).getSharedLibraryPathsAsArguments(
|
||||
[{id: 'json_fortran', version: '830'}],
|
||||
@@ -293,7 +294,7 @@ describe('Library directories (fortran)', () => {
|
||||
undefined,
|
||||
dirPath,
|
||||
);
|
||||
paths.should.include('-L' + libPath);
|
||||
expect(paths).toContain('-L' + libPath);
|
||||
});
|
||||
|
||||
it('should add includes for packaged libraries', async () => {
|
||||
@@ -305,8 +306,8 @@ describe('Library directories (fortran)', () => {
|
||||
const cInclude = path.join(dirPath, 'json_fortran/include');
|
||||
|
||||
const paths = (compiler as any).getIncludeArguments([{id: 'json_fortran', version: '830'}], dirPath);
|
||||
paths.should.include('-I' + fortranInclude);
|
||||
paths.should.include('-isystem' + cInclude);
|
||||
expect(paths).toContain('-I' + fortranInclude);
|
||||
expect(paths).toContain('-isystem' + cInclude);
|
||||
});
|
||||
|
||||
it('should add includes for non-packaged C libraries', async () => {
|
||||
@@ -316,6 +317,6 @@ describe('Library directories (fortran)', () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
|
||||
const paths = (compiler as any).getIncludeArguments([{id: 'curl', version: '7831'}], dirPath);
|
||||
paths.should.include('-isystem/opt/compiler-explorer/libs/curl/7.83.1/include');
|
||||
expect(paths).toContain('-isystem/opt/compiler-explorer/libs/curl/7.83.1/include');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,12 +23,13 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import cloneDeep from 'lodash.clonedeep';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {LlvmAstParser} from '../lib/llvm-ast.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
|
||||
import {fs, should} from './utils.js';
|
||||
import {fs} from './utils.js';
|
||||
|
||||
const languages = {
|
||||
'c++': {id: 'c++'},
|
||||
@@ -38,7 +39,7 @@ function mockAstOutput(astLines) {
|
||||
return {stdout: astLines.map(l => ({text: l}))};
|
||||
}
|
||||
|
||||
describe('llvm-ast', function () {
|
||||
describe('llvm-ast', () => {
|
||||
let compilerProps;
|
||||
let astParser;
|
||||
let astDump;
|
||||
@@ -46,7 +47,7 @@ describe('llvm-ast', function () {
|
||||
let astDumpWithCTime;
|
||||
let astDumpNestedDecl1346;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -60,60 +61,61 @@ describe('llvm-ast', function () {
|
||||
it('keeps fewer lines than the original', () => {
|
||||
const origHeight = astDump.length;
|
||||
const processed = astParser.processAst(cloneDeep(compilerOutput));
|
||||
processed.length.should.be.below(origHeight);
|
||||
expect(processed.length).toBeLessThan(origHeight);
|
||||
});
|
||||
|
||||
it('removes invalid slocs', () => {
|
||||
const processed = astParser.processAst(cloneDeep(compilerOutput));
|
||||
astDump.should.match(/<invalid sloc>/);
|
||||
expect(astDump).toMatch(/<invalid sloc>/);
|
||||
const fullText = processed.map(l => l.text).join('\n');
|
||||
fullText.should.not.match(/<invalid sloc>/);
|
||||
expect(fullText).not.toMatch(/<invalid sloc>/);
|
||||
});
|
||||
|
||||
it('keeps reasonable-sized output', () => {
|
||||
astDumpWithCTime.length.should.be.above(200);
|
||||
expect(astDumpWithCTime.length).toBeGreaterThan(200);
|
||||
|
||||
const output = mockAstOutput(astDumpWithCTime);
|
||||
const processed = astParser.processAst(output);
|
||||
processed.length.should.be.below(200);
|
||||
expect(processed.length).toBeLessThan(200);
|
||||
});
|
||||
|
||||
it('links some source lines', () => {
|
||||
should.exist(compilerOutput.stdout.find(l => l.text.match(/col:21, line:4:1/)));
|
||||
should.exist(compilerOutput.stdout.find(l => l.text.match(/line:3:5, col:18/)));
|
||||
expect(compilerOutput.stdout.find(l => l.text.match(/col:21, line:4:1/))).toBeTruthy();
|
||||
expect(compilerOutput.stdout.find(l => l.text.match(/line:3:5, col:18/))).toBeTruthy();
|
||||
const processed = astParser.processAst(cloneDeep(compilerOutput));
|
||||
should.exist(processed.find(l => l.source && 0 < l.source.from.line));
|
||||
processed.find(l => l.text.match(/col:21, line:4:1/)).source.to.line.should.equal(4);
|
||||
processed.find(l => l.text.match(/col:21, line:4:1/)).source.to.col.should.equal(1);
|
||||
processed.find(l => l.text.match(/col:21, line:4:1/)).source.from.col.should.equal(21);
|
||||
processed.find(l => l.text.match(/line:3:5, col:18/)).source.from.line.should.equal(3);
|
||||
processed.find(l => l.text.match(/line:3:5, col:18/)).source.from.col.should.equal(5);
|
||||
processed.find(l => l.text.match(/line:3:5, col:18/)).source.to.line.should.equal(3);
|
||||
processed.find(l => l.text.match(/line:3:5, col:18/)).source.to.col.should.equal(18);
|
||||
expect(processed.find(l => l.source && 0 < l.source.from.line)).toBeTruthy();
|
||||
expect(processed.find(l => l.text.match(/col:21, line:4:1/))).toMatchObject({
|
||||
source: {to: {line: 4, col: 1}, from: {line: 2, col: 21}},
|
||||
});
|
||||
expect(processed.find(l => l.text.match(/line:3:5, col:18/))).toMatchObject({
|
||||
source: {to: {line: 3, col: 18}, from: {line: 3, col: 5}},
|
||||
});
|
||||
// Here "from.line" is inherited from the parent "FunctionDecl <<source>:2:1, line:4:1>"
|
||||
processed.find(l => l.text.match(/CompoundStmt.*<col:21, line:4:1>/)).source.from.line.should.equal(2);
|
||||
expect(processed.find(l => l.text.match(/CompoundStmt.*<col:21, line:4:1>/))).toMatchObject({
|
||||
source: {from: {line: 2}},
|
||||
});
|
||||
});
|
||||
|
||||
it('does not truncate nested declarations', () => {
|
||||
// See https://github.com/compiler-explorer/compiler-explorer/issues/1346
|
||||
const output = mockAstOutput(astDumpNestedDecl1346);
|
||||
const processed = astParser.processAst(output);
|
||||
processed.length.should.be.above(2);
|
||||
should.exist(processed.find(l => l.text.match(/CXXRecordDecl.*struct x/)));
|
||||
should.exist(processed.find(l => l.text.match(/TypedefDecl.*struct x/)));
|
||||
should.exist(processed.find(l => l.text.match(/ElaboratedType/)));
|
||||
should.exist(processed.find(l => l.text.match(/RecordType/)));
|
||||
should.exist(processed.find(l => l.text.match(/CXXRecord/)));
|
||||
expect(processed.length).toBeGreaterThan(2);
|
||||
expect(processed.find(l => l.text.match(/CXXRecordDecl.*struct x/))).toBeTruthy();
|
||||
expect(processed.find(l => l.text.match(/TypedefDecl.*struct x/))).toBeTruthy();
|
||||
expect(processed.find(l => l.text.match(/ElaboratedType/))).toBeTruthy();
|
||||
expect(processed.find(l => l.text.match(/RecordType/))).toBeTruthy();
|
||||
expect(processed.find(l => l.text.match(/CXXRecord/))).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-ast bug-3849a', function () {
|
||||
describe('llvm-ast bug-3849a', () => {
|
||||
let compilerProps;
|
||||
let astParser;
|
||||
let astDump;
|
||||
let compilerOutput;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -124,17 +126,17 @@ describe('llvm-ast bug-3849a', function () {
|
||||
|
||||
it('should have more than 2 lines', () => {
|
||||
const processed = astParser.processAst(compilerOutput);
|
||||
processed.length.should.be.above(2);
|
||||
expect(processed.length).toBeGreaterThan(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-ast bug-3849b', function () {
|
||||
describe('llvm-ast bug-3849b', () => {
|
||||
let compilerProps;
|
||||
let astParser;
|
||||
let astDump;
|
||||
let compilerOutput;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -145,18 +147,18 @@ describe('llvm-ast bug-3849b', function () {
|
||||
|
||||
it('should have not too many lines', () => {
|
||||
const processed = astParser.processAst(compilerOutput);
|
||||
processed.length.should.be.above(200);
|
||||
processed.length.should.be.below(300);
|
||||
expect(processed.length).toBeGreaterThan(200);
|
||||
expect(processed.length).toBeLessThan(300);
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-ast bug-5889', function () {
|
||||
describe('llvm-ast bug-5889', () => {
|
||||
let compilerProps;
|
||||
let astParser;
|
||||
let astDump;
|
||||
let compilerOutput;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -167,6 +169,6 @@ describe('llvm-ast bug-5889', function () {
|
||||
|
||||
it('should have not too many lines', () => {
|
||||
const processed = astParser.processAst(compilerOutput);
|
||||
processed.length.should.be.below(50);
|
||||
expect(processed.length).toBeLessThan(50);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,31 +22,29 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {LLVMIRDemangler} from '../lib/demangler/llvm.js';
|
||||
import {LlvmIrParser} from '../lib/llvm-ir.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
|
||||
import {chai} from './utils.js';
|
||||
|
||||
const expect = chai.expect;
|
||||
|
||||
const languages = {
|
||||
'c++': {id: 'c++'},
|
||||
};
|
||||
|
||||
describe('llvm-ir parseMetaNode', function () {
|
||||
describe('llvm-ir parseMetaNode', () => {
|
||||
let llvmIrParser;
|
||||
let compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
llvmIrParser = new LlvmIrParser(compilerProps, undefined as unknown as LLVMIRDemangler);
|
||||
});
|
||||
|
||||
it('should parse DILocation node', function () {
|
||||
llvmIrParser.parseMetaNode('!60 = !DILocation(line: 9, column: 15, scope: !58)').should.deep.equal({
|
||||
it('should parse DILocation node', () => {
|
||||
expect(llvmIrParser.parseMetaNode('!60 = !DILocation(line: 9, column: 15, scope: !58)')).toEqual({
|
||||
metaType: 'Location',
|
||||
metaId: '!60',
|
||||
line: '9',
|
||||
@@ -55,57 +53,59 @@ describe('llvm-ir parseMetaNode', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse distinct DILexicalBlock', function () {
|
||||
llvmIrParser
|
||||
.parseMetaNode('!50 = distinct !DILexicalBlock(scope: !44, file: !1, line: 8, column: 5)')
|
||||
.should.deep.equal({
|
||||
metaType: 'LexicalBlock',
|
||||
metaId: '!50',
|
||||
scope: '!44',
|
||||
file: '!1',
|
||||
line: '8',
|
||||
column: '5',
|
||||
});
|
||||
it('should parse distinct DILexicalBlock', () => {
|
||||
expect(
|
||||
llvmIrParser.parseMetaNode('!50 = distinct !DILexicalBlock(scope: !44, file: !1, line: 8, column: 5)'),
|
||||
).toEqual({
|
||||
metaType: 'LexicalBlock',
|
||||
metaId: '!50',
|
||||
scope: '!44',
|
||||
file: '!1',
|
||||
line: '8',
|
||||
column: '5',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse all value types', function () {
|
||||
llvmIrParser
|
||||
.parseMetaNode(
|
||||
it('should parse all value types', () => {
|
||||
expect(
|
||||
llvmIrParser.parseMetaNode(
|
||||
'!44 = distinct !DISubprogram(name: "func<int, int>", ' +
|
||||
'scope: !1, line: 7, isLocal: false, isDefinition: true, flags: ' +
|
||||
'DIFlagPrototyped, ceEmpty: "", ceTest: "a:b\\"c,d")',
|
||||
)
|
||||
.should.deep.equal({
|
||||
metaType: 'Subprogram',
|
||||
metaId: '!44',
|
||||
name: 'func<int, int>',
|
||||
line: '7',
|
||||
scope: '!1',
|
||||
isLocal: 'false',
|
||||
isDefinition: 'true',
|
||||
flags: 'DIFlagPrototyped',
|
||||
ceTest: 'a:b\\"c,d',
|
||||
ceEmpty: '',
|
||||
});
|
||||
),
|
||||
).toEqual({
|
||||
metaType: 'Subprogram',
|
||||
metaId: '!44',
|
||||
name: 'func<int, int>',
|
||||
line: '7',
|
||||
scope: '!1',
|
||||
isLocal: 'false',
|
||||
isDefinition: 'true',
|
||||
flags: 'DIFlagPrototyped',
|
||||
ceTest: 'a:b\\"c,d',
|
||||
ceEmpty: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('should parse distinct DILexicalBlock', function () {
|
||||
llvmIrParser
|
||||
.parseMetaNode('!1 = !DIFile(filename: "/tmp/example.cpp", directory: "/home/compiler-explorer")')
|
||||
.should.deep.equal({
|
||||
metaType: 'File',
|
||||
metaId: '!1',
|
||||
filename: '/tmp/example.cpp',
|
||||
directory: '/home/compiler-explorer',
|
||||
});
|
||||
it('should parse distinct DILexicalBlock', () => {
|
||||
expect(
|
||||
llvmIrParser.parseMetaNode(
|
||||
'!1 = !DIFile(filename: "/tmp/example.cpp", directory: "/home/compiler-explorer")',
|
||||
),
|
||||
).toEqual({
|
||||
metaType: 'File',
|
||||
metaId: '!1',
|
||||
filename: '/tmp/example.cpp',
|
||||
directory: '/home/compiler-explorer',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-ir getSourceLineNumber', function () {
|
||||
describe('llvm-ir getSourceLineNumber', () => {
|
||||
let llvmIrParser;
|
||||
let compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -122,31 +122,31 @@ describe('llvm-ir getSourceLineNumber', function () {
|
||||
'!16': {scope: '!42'},
|
||||
};
|
||||
|
||||
it('should return a line number', function () {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!10')).to.equal(10);
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!20')).to.equal(20);
|
||||
it('should return a line number', () => {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!10')).toBe(10);
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!20')).toBe(20);
|
||||
});
|
||||
|
||||
it('should return the line number of its parent scope', function () {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!11')).to.equal(10);
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!12')).to.equal(10);
|
||||
it('should return the line number of its parent scope', () => {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!11')).toBe(10);
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!12')).toBe(10);
|
||||
});
|
||||
|
||||
it('should return null on non-existend node', function () {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!16')).to.equal(null);
|
||||
it('should return null on non-existend node', () => {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!16')).toBe(null);
|
||||
});
|
||||
|
||||
it('should return null if no higher scope has a line', function () {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!14')).to.equal(null);
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!15')).to.equal(null);
|
||||
it('should return null if no higher scope has a line', () => {
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!14')).toBe(null);
|
||||
expect(llvmIrParser.getSourceLineNumber(debugInfo, '!15')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-ir getSourceColumn', function () {
|
||||
describe('llvm-ir getSourceColumn', () => {
|
||||
let llvmIrParser;
|
||||
let compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -163,32 +163,32 @@ describe('llvm-ir getSourceColumn', function () {
|
||||
'!16': {scope: '!42'},
|
||||
};
|
||||
|
||||
it('should return a column number', function () {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!10')).to.equal(10);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!20')).to.equal(20);
|
||||
it('should return a column number', () => {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!10')).toBe(10);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!20')).toBe(20);
|
||||
});
|
||||
|
||||
it('should return the column number of its parent scope', function () {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!11')).to.equal(10);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!12')).to.equal(10);
|
||||
it('should return the column number of its parent scope', () => {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!11')).toBe(10);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!12')).toBe(10);
|
||||
});
|
||||
|
||||
it('should return undefined on non-existend node', function () {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!16')).to.equal(undefined);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!30')).to.equal(undefined);
|
||||
it('should return undefined on non-existend node', () => {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!16')).toBe(undefined);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!30')).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should return undefined if no higher scope has a column', function () {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!14')).to.equal(undefined);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!15')).to.equal(undefined);
|
||||
it('should return undefined if no higher scope has a column', () => {
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!14')).toBe(undefined);
|
||||
expect(llvmIrParser.getSourceColumn(debugInfo, '!15')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-ir getFileName', function () {
|
||||
describe('llvm-ir getFileName', () => {
|
||||
let llvmIrParser;
|
||||
let compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
|
||||
@@ -203,22 +203,22 @@ describe('llvm-ir getFileName', function () {
|
||||
'!13': {scope: '!12'},
|
||||
};
|
||||
|
||||
it('should return a filename', function () {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!10')).to.equal('/test.cpp');
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!11')).to.equal('/test.cpp');
|
||||
it('should return a filename', () => {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!10')).toBe('/test.cpp');
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!11')).toBe('/test.cpp');
|
||||
});
|
||||
|
||||
it('should return the filename of its parent scope', function () {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!12')).to.equal('/test.cpp');
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!13')).to.equal('/test.cpp');
|
||||
it('should return the filename of its parent scope', () => {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!12')).toBe('/test.cpp');
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!13')).toBe('/test.cpp');
|
||||
});
|
||||
|
||||
it('should return null on non-existend node', function () {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!42')).to.equal(null);
|
||||
it('should return null on non-existend node', () => {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!42')).toBe(null);
|
||||
});
|
||||
|
||||
it('should not return source filename', function () {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!20')).to.equal(null);
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!21')).to.equal(null);
|
||||
it('should not return source filename', () => {
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!20')).toBe(null);
|
||||
expect(llvmIrParser.getFileName(debugInfo, '!21')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {LLCCompiler} from '../lib/compilers/llc.js';
|
||||
import {OptCompiler} from '../lib/compilers/opt.js';
|
||||
|
||||
@@ -50,69 +52,69 @@ function createCompiler(compiler) {
|
||||
describe('LLVM IR Compiler', () => {
|
||||
let compiler;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
compiler = createCompiler(LLCCompiler);
|
||||
});
|
||||
|
||||
it('llc options for at&t assembly', function () {
|
||||
compiler
|
||||
.optionsForFilter(
|
||||
it('llc options for at&t assembly', () => {
|
||||
expect(
|
||||
compiler.optionsForFilter(
|
||||
{
|
||||
intel: false,
|
||||
binary: false,
|
||||
},
|
||||
'output.s',
|
||||
)
|
||||
.should.eql(['-o', 'output.s']);
|
||||
),
|
||||
).toEqual(['-o', 'output.s']);
|
||||
});
|
||||
|
||||
it('llc options for intel assembly', function () {
|
||||
compiler
|
||||
.optionsForFilter(
|
||||
it('llc options for intel assembly', () => {
|
||||
expect(
|
||||
compiler.optionsForFilter(
|
||||
{
|
||||
intel: true,
|
||||
binary: false,
|
||||
},
|
||||
'output.s',
|
||||
)
|
||||
.should.eql(['-o', 'output.s', '-x86-asm-syntax=intel']);
|
||||
),
|
||||
).toEqual(['-o', 'output.s', '-x86-asm-syntax=intel']);
|
||||
});
|
||||
|
||||
it('llc options for at&t binary', function () {
|
||||
compiler
|
||||
.optionsForFilter(
|
||||
it('llc options for at&t binary', () => {
|
||||
expect(
|
||||
compiler.optionsForFilter(
|
||||
{
|
||||
intel: false,
|
||||
binary: true,
|
||||
},
|
||||
'output.s',
|
||||
)
|
||||
.should.eql(['-o', 'output.s', '-filetype=obj']);
|
||||
),
|
||||
).toEqual(['-o', 'output.s', '-filetype=obj']);
|
||||
});
|
||||
|
||||
it('llc options for intel binary', function () {
|
||||
compiler
|
||||
.optionsForFilter(
|
||||
it('llc options for intel binary', () => {
|
||||
expect(
|
||||
compiler.optionsForFilter(
|
||||
{
|
||||
intel: true,
|
||||
binary: true,
|
||||
},
|
||||
'output.s',
|
||||
)
|
||||
.should.eql(['-o', 'output.s', '-filetype=obj']);
|
||||
),
|
||||
).toEqual(['-o', 'output.s', '-filetype=obj']);
|
||||
});
|
||||
|
||||
it('opt options', function () {
|
||||
it('opt options', () => {
|
||||
const compiler = createCompiler(OptCompiler);
|
||||
|
||||
compiler
|
||||
.optionsForFilter(
|
||||
expect(
|
||||
compiler.optionsForFilter(
|
||||
{
|
||||
intel: false,
|
||||
binary: false,
|
||||
},
|
||||
'output.s',
|
||||
)
|
||||
.should.eql(['-o', 'output.s', '-S']);
|
||||
),
|
||||
).toEqual(['-o', 'output.s', '-S']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
|
||||
import * as stream from 'stream';
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
import {YAMLParseError} from 'yaml';
|
||||
|
||||
import {LLVMOptTransformer} from '../lib/llvm-opt-transformer.js';
|
||||
@@ -53,7 +54,7 @@ Args:
|
||||
for await (const opt of optStream) {
|
||||
output.push(opt);
|
||||
}
|
||||
output.should.deep.equal([
|
||||
expect(output).toEqual([
|
||||
{
|
||||
Args: [
|
||||
{
|
||||
@@ -105,13 +106,15 @@ broken: duplicate key makes this invalid
|
||||
const readString = new stream.PassThrough();
|
||||
readString.push(doc);
|
||||
readString.end();
|
||||
return (async () => {
|
||||
const optStream = stream.pipeline(readString, new LLVMOptTransformer(), res => {
|
||||
return res;
|
||||
});
|
||||
for await (const _ of optStream) {
|
||||
// just consume
|
||||
}
|
||||
})().should.be.rejectedWith(YAMLParseError);
|
||||
await expect(
|
||||
(async () => {
|
||||
const optStream = stream.pipeline(readString, new LLVMOptTransformer(), res => {
|
||||
return res;
|
||||
});
|
||||
for await (const _ of optStream) {
|
||||
// just consume
|
||||
}
|
||||
})(),
|
||||
).rejects.toThrow(YAMLParseError);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {LlvmPassDumpParser} from '../lib/parsers/llvm-pass-dump-parser.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
|
||||
@@ -33,10 +35,10 @@ function deepCopy(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
describe('llvm-pass-dump-parser filter', function () {
|
||||
describe('llvm-pass-dump-parser filter', () => {
|
||||
let llvmPassDumpParser;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
const compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
llvmPassDumpParser = new LlvmPassDumpParser(compilerProps);
|
||||
@@ -63,20 +65,20 @@ describe('llvm-pass-dump-parser filter', function () {
|
||||
{ text: ' ret void, !dbg !61' },
|
||||
];
|
||||
|
||||
it('should not filter out dbg metadata', function () {
|
||||
it('should not filter out dbg metadata', () => {
|
||||
const options = {filterDebugInfo: false};
|
||||
// prettier-ignore
|
||||
llvmPassDumpParser
|
||||
.applyIrFilters(deepCopy(rawFuncIR), options)
|
||||
.should.deep.equal(rawFuncIR);
|
||||
expect(llvmPassDumpParser
|
||||
.applyIrFilters(deepCopy(rawFuncIR), options),
|
||||
).toEqual(rawFuncIR);
|
||||
});
|
||||
|
||||
it('should filter out dbg metadata too', function () {
|
||||
it('should filter out dbg metadata too', () => {
|
||||
const options = {filterDebugInfo: true};
|
||||
// prettier-ignore
|
||||
llvmPassDumpParser
|
||||
.applyIrFilters(deepCopy(rawFuncIR), options)
|
||||
.should.deep.equal([
|
||||
expect(llvmPassDumpParser
|
||||
.applyIrFilters(deepCopy(rawFuncIR), options),
|
||||
).toEqual([
|
||||
{ text: ' # Machine code for function f(S1&, S2 const&): NoPHIs, TracksLiveness, TiedOpsRewritten' },
|
||||
{ text: 'define dso_local void @f(S1&, S2 const&)(%struct.S1* noundef nonnull align 8 dereferenceable(16) %s1, %struct.S2* noundef nonnull align 8 dereferenceable(16) %s2) {' },
|
||||
{ text: 'entry:' },
|
||||
@@ -93,39 +95,39 @@ describe('llvm-pass-dump-parser filter', function () {
|
||||
]);
|
||||
});
|
||||
|
||||
it('should filter out instruction metadata and object attribute group, leave debug instructions in place', function () {
|
||||
it('should filter out instruction metadata and object attribute group, leave debug instructions in place', () => {
|
||||
// 'hide IR metadata' aims to decrease more visual noise than `hide debug info`
|
||||
const options = {filterDebugInfo: false, filterIRMetadata: true};
|
||||
// prettier-ignore
|
||||
llvmPassDumpParser
|
||||
.applyIrFilters(deepCopy(rawFuncIR), options)
|
||||
.should.deep.equal([
|
||||
{ text: ' # Machine code for function f(S1&, S2 const&): NoPHIs, TracksLiveness, TiedOpsRewritten' },
|
||||
{ text: 'define dso_local void @f(S1&, S2 const&)(%struct.S1* noundef nonnull align 8 dereferenceable(16) %s1, %struct.S2* noundef nonnull align 8 dereferenceable(16) %s2) {' },
|
||||
{ text: 'entry:' },
|
||||
{ text: ' %s1.addr = alloca %struct.S1*, align 8' },
|
||||
{ text: ' store %struct.S1* %s1, %struct.S1** %s1.addr, align 8' },
|
||||
{ text: ' call void @llvm.dbg.declare(metadata %struct.S1** %s1.addr, metadata !30, metadata !DIExpression())' },
|
||||
{ text: ' call void @llvm.dbg.value(metadata %struct.S1* %s1, metadata !30, metadata !DIExpression())' },
|
||||
{ text: ' tail call void @llvm.dbg.declare(metadata i16* %p.addr, metadata !24, metadata !DIExpression())' },
|
||||
{ text: ' tail call void @llvm.dbg.value(metadata i32 0, metadata !20, metadata !DIExpression())' },
|
||||
{ text: ' DBG_VALUE $rdi, $noreg, !"s1", !DIExpression(), debug-location !32; example.cpp:0 line no:7' },
|
||||
{ text: ' store %struct.S2* %s2, %struct.S2** %s2.addr, align 8' },
|
||||
{ text: ' %0 = load %struct.S2*, %struct.S2** %s2.addr, align 8' },
|
||||
{ text: ' %a = getelementptr inbounds %struct.S2, %struct.S2* %0, i32 0, i32 0' },
|
||||
{ text: ' %1 = load i64, i64* %t, align 8' },
|
||||
{ text: ' %2 = load %struct.S1*, %struct.S1** %s1.addr, align 8' },
|
||||
{ text: ' store i64 %1, i64* %t2, align 8' },
|
||||
{ text: ' %t3 = getelementptr inbounds %struct.Wrapper2, %struct.Wrapper2* %b, i32 0, i32 0' },
|
||||
{ text: ' ret void' },
|
||||
]);
|
||||
expect(llvmPassDumpParser
|
||||
.applyIrFilters(deepCopy(rawFuncIR), options),
|
||||
).toEqual([
|
||||
{ text: ' # Machine code for function f(S1&, S2 const&): NoPHIs, TracksLiveness, TiedOpsRewritten' },
|
||||
{ text: 'define dso_local void @f(S1&, S2 const&)(%struct.S1* noundef nonnull align 8 dereferenceable(16) %s1, %struct.S2* noundef nonnull align 8 dereferenceable(16) %s2) {' },
|
||||
{ text: 'entry:' },
|
||||
{ text: ' %s1.addr = alloca %struct.S1*, align 8' },
|
||||
{ text: ' store %struct.S1* %s1, %struct.S1** %s1.addr, align 8' },
|
||||
{ text: ' call void @llvm.dbg.declare(metadata %struct.S1** %s1.addr, metadata !30, metadata !DIExpression())' },
|
||||
{ text: ' call void @llvm.dbg.value(metadata %struct.S1* %s1, metadata !30, metadata !DIExpression())' },
|
||||
{ text: ' tail call void @llvm.dbg.declare(metadata i16* %p.addr, metadata !24, metadata !DIExpression())' },
|
||||
{ text: ' tail call void @llvm.dbg.value(metadata i32 0, metadata !20, metadata !DIExpression())' },
|
||||
{ text: ' DBG_VALUE $rdi, $noreg, !"s1", !DIExpression(), debug-location !32; example.cpp:0 line no:7' },
|
||||
{ text: ' store %struct.S2* %s2, %struct.S2** %s2.addr, align 8' },
|
||||
{ text: ' %0 = load %struct.S2*, %struct.S2** %s2.addr, align 8' },
|
||||
{ text: ' %a = getelementptr inbounds %struct.S2, %struct.S2* %0, i32 0, i32 0' },
|
||||
{ text: ' %1 = load i64, i64* %t, align 8' },
|
||||
{ text: ' %2 = load %struct.S1*, %struct.S1** %s1.addr, align 8' },
|
||||
{ text: ' store i64 %1, i64* %t2, align 8' },
|
||||
{ text: ' %t3 = getelementptr inbounds %struct.Wrapper2, %struct.Wrapper2* %b, i32 0, i32 0' },
|
||||
{ text: ' ret void' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-pass-dump-parser Old style IR Dump header', function () {
|
||||
describe('llvm-pass-dump-parser Old style IR Dump header', () => {
|
||||
let llvmPassDumpParser;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
const compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
llvmPassDumpParser = new LlvmPassDumpParser(compilerProps);
|
||||
@@ -145,12 +147,12 @@ describe('llvm-pass-dump-parser Old style IR Dump header', function () {
|
||||
{ text: '}' },
|
||||
];
|
||||
|
||||
it('should recognize dump', function () {
|
||||
it('should recognize dump', () => {
|
||||
const options = {filterDebugInfo: false};
|
||||
|
||||
const brokenDown = llvmPassDumpParser.breakdownOutputIntoPassDumps(deepCopy(rawFuncIR), options);
|
||||
|
||||
brokenDown.should.deep.equal([
|
||||
expect(brokenDown).toEqual([
|
||||
{
|
||||
affectedFunction: undefined,
|
||||
header: 'IR Dump After NoOpModulePass on [module]',
|
||||
@@ -171,10 +173,10 @@ describe('llvm-pass-dump-parser Old style IR Dump header', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('llvm-pass-dump-parser New style IR Dump header', function () {
|
||||
describe('llvm-pass-dump-parser New style IR Dump header', () => {
|
||||
let llvmPassDumpParser;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
const compilerProps = (fakeProps.get as any).bind(fakeProps, 'c++');
|
||||
llvmPassDumpParser = new LlvmPassDumpParser(compilerProps);
|
||||
@@ -194,12 +196,12 @@ describe('llvm-pass-dump-parser New style IR Dump header', function () {
|
||||
{ text: '}' },
|
||||
];
|
||||
|
||||
it('should recognize dump', function () {
|
||||
it('should recognize dump', () => {
|
||||
const options = {filterDebugInfo: false};
|
||||
|
||||
const brokenDown = llvmPassDumpParser.breakdownOutputIntoPassDumps(deepCopy(rawFuncIR), options);
|
||||
|
||||
brokenDown.should.deep.equal([
|
||||
expect(brokenDown).toEqual([
|
||||
{
|
||||
affectedFunction: undefined,
|
||||
header: 'IR Dump After NoOpModulePass on [module]',
|
||||
|
||||
@@ -22,87 +22,85 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {MapFileReaderDelphi} from '../lib/mapfiles/map-file-delphi.js';
|
||||
import {MapFileReaderVS} from '../lib/mapfiles/map-file-vs.js';
|
||||
|
||||
import {chai} from './utils.js';
|
||||
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('Map setup', function () {
|
||||
it('VS-map preferred load address', function () {
|
||||
describe('Map setup', () => {
|
||||
it('VS-map preferred load address', () => {
|
||||
const reader = new MapFileReaderVS('');
|
||||
reader.preferredLoadAddress.should.equal(0x400000, 'default load address');
|
||||
expect(reader.preferredLoadAddress).toEqual(0x400000);
|
||||
|
||||
reader.tryReadingPreferredAddress(' Preferred load address is 00400000');
|
||||
reader.preferredLoadAddress.should.equal(0x400000);
|
||||
expect(reader.preferredLoadAddress).toEqual(0x400000);
|
||||
|
||||
reader.tryReadingPreferredAddress(' Preferred load address is 00410000');
|
||||
reader.preferredLoadAddress.should.equal(0x410000);
|
||||
expect(reader.preferredLoadAddress).toEqual(0x410000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Code Segments', function () {
|
||||
it('One normal Delphi-Map segment', function () {
|
||||
describe('Code Segments', () => {
|
||||
it('One normal Delphi-Map segment', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingCodeSegmentInfo(' 0001:00002838 00000080 C=CODE S=.text G=(none) M=output ACBP=A9');
|
||||
reader.segments.length.should.equal(1);
|
||||
expect(reader.segments.length).toEqual(1);
|
||||
|
||||
let info = reader.getSegmentInfoByStartingAddress('0001', 0x2838);
|
||||
expect(unwrap(info).unitName).to.equal('output.pas');
|
||||
expect(unwrap(info).unitName).toBe('output.pas');
|
||||
|
||||
info = reader.getSegmentInfoByStartingAddress(undefined, reader.getSegmentOffset('0001') + 0x2838);
|
||||
expect(unwrap(info).unitName).to.equal('output.pas');
|
||||
expect(unwrap(info).unitName).toBe('output.pas');
|
||||
|
||||
info = reader.getSegmentInfoByStartingAddress('0001', 0x1234);
|
||||
expect(info, 'Address should not be a Start for any segment').to.be.undefined;
|
||||
|
||||
info = reader.getSegmentInfoAddressIsIn('0001', 0x2838 + 0x10);
|
||||
expect(unwrap(info).unitName).to.equal('output.pas');
|
||||
expect(unwrap(info).unitName).toBe('output.pas');
|
||||
|
||||
info = reader.getSegmentInfoAddressIsIn(undefined, reader.getSegmentOffset('0001') + 0x2838 + 0x10);
|
||||
expect(unwrap(info).unitName).to.equal('output.pas');
|
||||
expect(unwrap(info).unitName).toBe('output.pas');
|
||||
|
||||
info = reader.getSegmentInfoAddressIsIn('0001', reader.getSegmentOffset('0001') + 0x2838 + 0x80 + 1);
|
||||
expect(info, 'Address should not be in any segment').to.be.undefined;
|
||||
|
||||
info = reader.getSegmentInfoByUnitName('output.pas');
|
||||
expect(unwrap(info).unitName).to.equal('output.pas');
|
||||
unwrap(info).addressInt.should.equal(reader.getSegmentOffset('0001') + 0x2838);
|
||||
expect(unwrap(info).unitName).toBe('output.pas');
|
||||
expect(unwrap(info).addressInt).toEqual(reader.getSegmentOffset('0001') + 0x2838);
|
||||
});
|
||||
|
||||
it('Not include this segment', function () {
|
||||
it('Not include this segment', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingCodeSegmentInfo(' 0002:000000B0 00000023 C=ICODE S=.itext G=(none) M=output ACBP=A9');
|
||||
reader.segments.length.should.equal(0);
|
||||
expect(reader.segments.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('ICode/IText segments', function () {
|
||||
it('ICode/IText segments', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingCodeSegmentInfo(' 0002:000000B0 00000023 C=ICODE S=.itext G=(none) M=output ACBP=A9');
|
||||
reader.isegments.length.should.equal(1);
|
||||
expect(reader.isegments.length).toEqual(1);
|
||||
});
|
||||
|
||||
it('One normal VS-Map segment', function () {
|
||||
it('One normal VS-Map segment', () => {
|
||||
const reader = new MapFileReaderVS('');
|
||||
reader.tryReadingCodeSegmentInfo(' 0001:00002838 00000080H .text$mn CODE');
|
||||
reader.segments.length.should.equal(1);
|
||||
expect(reader.segments.length).toEqual(1);
|
||||
|
||||
let info = reader.getSegmentInfoByStartingAddress('0001', 0x2838);
|
||||
unwrap(info).addressInt.should.equal(reader.getSegmentOffset('0001') + 0x2838);
|
||||
expect(unwrap(info).addressInt).toEqual(reader.getSegmentOffset('0001') + 0x2838);
|
||||
|
||||
info = reader.getSegmentInfoByStartingAddress(undefined, 0x403838);
|
||||
unwrap(info).addressInt.should.equal(reader.getSegmentOffset('0001') + 0x2838);
|
||||
expect(unwrap(info).addressInt).toEqual(reader.getSegmentOffset('0001') + 0x2838);
|
||||
|
||||
info = reader.getSegmentInfoAddressIsIn(undefined, reader.getSegmentOffset('0001') + 0x2838 + 0x10);
|
||||
unwrap(info).addressInt.should.equal(reader.getSegmentOffset('0001') + 0x2838);
|
||||
expect(unwrap(info).addressInt).toEqual(reader.getSegmentOffset('0001') + 0x2838);
|
||||
|
||||
info = reader.getSegmentInfoAddressIsIn('0001', reader.getSegmentOffset('0001') + 0x2837);
|
||||
expect(info).to.be.undefined;
|
||||
});
|
||||
|
||||
it('Repair VS-Map code segment info', function () {
|
||||
it('Repair VS-Map code segment info', () => {
|
||||
const reader = new MapFileReaderVS('');
|
||||
reader.tryReadingCodeSegmentInfo(' 0002:00000000 00004c73H .text$mn CODE');
|
||||
reader.tryReadingNamedAddress(
|
||||
@@ -110,146 +108,148 @@ describe('Code Segments', function () {
|
||||
);
|
||||
|
||||
let info = reader.getSegmentInfoByStartingAddress('0002', 0);
|
||||
expect(unwrap(info).unitName).to.equal('ConsoleApplication1.obj');
|
||||
expect(unwrap(info).unitName).toBe('ConsoleApplication1.obj');
|
||||
|
||||
reader.getSegmentOffset('0002').should.equal(0x411000);
|
||||
expect(reader.getSegmentOffset('0002')).toEqual(0x411000);
|
||||
|
||||
info = reader.getSegmentInfoByStartingAddress(undefined, 0x411000);
|
||||
expect(unwrap(info).unitName).to.equal('ConsoleApplication1.obj');
|
||||
expect(unwrap(info).unitName).toBe('ConsoleApplication1.obj');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Symbol info', function () {
|
||||
it('Delphi-Map symbol test', function () {
|
||||
describe('Symbol info', () => {
|
||||
it('Delphi-Map symbol test', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingNamedAddress(' 0001:00002838 Square');
|
||||
reader.namedAddresses.length.should.equal(1);
|
||||
expect(reader.namedAddresses.length).toEqual(1);
|
||||
|
||||
let info = reader.getSymbolAt('0001', 0x2838);
|
||||
expect(info).to.not.equal(undefined, 'Symbol Square should have been returned 1');
|
||||
expect(unwrap(info).displayName).to.equal('Square');
|
||||
expect(info).not.toBe(undefined);
|
||||
expect(unwrap(info).displayName).toBe('Square');
|
||||
|
||||
info = reader.getSymbolAt(undefined, reader.getSegmentOffset('0001') + 0x2838);
|
||||
expect(info).to.not.equal(undefined, 'Symbol Square should have been returned 2');
|
||||
expect(unwrap(info).displayName).to.equal('Square');
|
||||
expect(info).not.toBe(undefined);
|
||||
expect(unwrap(info).displayName).toBe('Square');
|
||||
});
|
||||
|
||||
it('Delphi-Map D2009 symbol test', function () {
|
||||
it('Delphi-Map D2009 symbol test', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingNamedAddress(' 0001:00002C4C output.MaxArray');
|
||||
reader.namedAddresses.length.should.equal(1);
|
||||
expect(reader.namedAddresses.length).toEqual(1);
|
||||
|
||||
let info = reader.getSymbolAt('0001', 0x2c4c);
|
||||
expect(info).to.not.equal(undefined, 'Symbol MaxArray should have been returned');
|
||||
expect(unwrap(info).displayName).to.equal('output.MaxArray');
|
||||
expect(info).not.toBe(undefined);
|
||||
expect(unwrap(info).displayName).toBe('output.MaxArray');
|
||||
|
||||
//todo should not be undefined
|
||||
info = reader.getSymbolAt(undefined, reader.getSegmentOffset('0001') + 0x2c4c);
|
||||
expect(info).to.not.equal(undefined, 'Symbol MaxArray should have been returned');
|
||||
expect(unwrap(info).displayName).to.equal('output.MaxArray');
|
||||
expect(info).not.toBe(undefined);
|
||||
expect(unwrap(info).displayName).toBe('output.MaxArray');
|
||||
});
|
||||
|
||||
it('VS-Map symbol test', function () {
|
||||
it('VS-Map symbol test', () => {
|
||||
const reader = new MapFileReaderVS('');
|
||||
reader.tryReadingNamedAddress(
|
||||
' 0002:000006b0 ??$__vcrt_va_start_verify_argument_type@QBD@@YAXXZ 004116b0 f i ConsoleApplication1.obj',
|
||||
);
|
||||
reader.namedAddresses.length.should.equal(1);
|
||||
expect(reader.namedAddresses.length).toEqual(1);
|
||||
|
||||
let info = reader.getSymbolAt('0002', 0x6b0);
|
||||
expect(info).to.not.equal(undefined, 'Symbol start_verify_argument should have been returned 1');
|
||||
expect(unwrap(info).displayName).to.equal('??$__vcrt_va_start_verify_argument_type@QBD@@YAXXZ');
|
||||
expect(info).not.toBe(undefined);
|
||||
expect(unwrap(info).displayName).toBe('??$__vcrt_va_start_verify_argument_type@QBD@@YAXXZ');
|
||||
|
||||
info = reader.getSymbolAt(undefined, 0x4116b0);
|
||||
expect(info).to.not.equal(undefined, 'Symbol start_verify_argument should have been returned 2');
|
||||
expect(unwrap(info).displayName).to.equal('??$__vcrt_va_start_verify_argument_type@QBD@@YAXXZ');
|
||||
expect(info).not.toBe(undefined);
|
||||
expect(unwrap(info).displayName).toBe('??$__vcrt_va_start_verify_argument_type@QBD@@YAXXZ');
|
||||
});
|
||||
|
||||
it('Delphi-Map Duplication prevention', function () {
|
||||
it('Delphi-Map Duplication prevention', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingNamedAddress(' 0001:00002838 Square');
|
||||
reader.namedAddresses.length.should.equal(1);
|
||||
expect(reader.namedAddresses.length).toEqual(1);
|
||||
|
||||
reader.tryReadingNamedAddress(' 0001:00002838 Square');
|
||||
reader.namedAddresses.length.should.equal(1);
|
||||
expect(reader.namedAddresses.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Delphi-Map Line number info', function () {
|
||||
it('No line', function () {
|
||||
describe('Delphi-Map Line number info', () => {
|
||||
it('No line', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingLineNumbers('').should.equal(false);
|
||||
expect(reader.tryReadingLineNumbers('')).toEqual(false);
|
||||
});
|
||||
|
||||
it('One line', function () {
|
||||
it('One line', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader.tryReadingLineNumbers(' 17 0001:000028A4').should.equal(true);
|
||||
expect(reader.tryReadingLineNumbers(' 17 0001:000028A4')).toEqual(true);
|
||||
|
||||
let lineInfo = reader.getLineInfoByAddress('0001', 0x28a4);
|
||||
expect(unwrap(lineInfo).lineNumber).to.equal(17);
|
||||
expect(unwrap(lineInfo).lineNumber).toBe(17);
|
||||
|
||||
lineInfo = reader.getLineInfoByAddress(undefined, reader.getSegmentOffset('0001') + 0x28a4);
|
||||
expect(unwrap(lineInfo).lineNumber).to.equal(17);
|
||||
expect(unwrap(lineInfo).lineNumber).toBe(17);
|
||||
});
|
||||
|
||||
it('Multiple lines', function () {
|
||||
it('Multiple lines', () => {
|
||||
const reader = new MapFileReaderDelphi('');
|
||||
reader
|
||||
.tryReadingLineNumbers(' 12 0001:00002838 13 0001:0000283B 14 0001:00002854 15 0001:00002858')
|
||||
.should.equal(true);
|
||||
expect(
|
||||
reader.tryReadingLineNumbers(
|
||||
' 12 0001:00002838 13 0001:0000283B 14 0001:00002854 15 0001:00002858',
|
||||
),
|
||||
).toEqual(true);
|
||||
|
||||
let lineInfo = reader.getLineInfoByAddress('0001', 0x2838);
|
||||
expect(unwrap(lineInfo).lineNumber).to.equal(12);
|
||||
expect(unwrap(lineInfo).lineNumber).toBe(12);
|
||||
|
||||
lineInfo = reader.getLineInfoByAddress('0001', 0x2858);
|
||||
expect(unwrap(lineInfo).lineNumber).to.equal(15);
|
||||
expect(unwrap(lineInfo).lineNumber).toBe(15);
|
||||
|
||||
lineInfo = reader.getLineInfoByAddress('0001', 0x2854);
|
||||
expect(unwrap(lineInfo).lineNumber).to.equal(14);
|
||||
expect(unwrap(lineInfo).lineNumber).toBe(14);
|
||||
|
||||
lineInfo = reader.getLineInfoByAddress('0001', 0x283b);
|
||||
expect(unwrap(lineInfo).lineNumber).to.equal(13);
|
||||
expect(unwrap(lineInfo).lineNumber).toBe(13);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Delphi-Map load test', function () {
|
||||
it('Minimal map', function () {
|
||||
describe('Delphi-Map load test', () => {
|
||||
it('Minimal map', () => {
|
||||
const reader = new MapFileReaderDelphi('test/maps/minimal-delphi.map');
|
||||
reader.run();
|
||||
|
||||
reader.segments.length.should.equal(4);
|
||||
reader.lineNumbers.length.should.equal(7);
|
||||
reader.namedAddresses.length.should.equal(11);
|
||||
expect(reader.segments.length).toEqual(4);
|
||||
expect(reader.lineNumbers.length).toEqual(7);
|
||||
expect(reader.namedAddresses.length).toEqual(11);
|
||||
|
||||
let info = reader.getSegmentInfoByUnitName('output.pas');
|
||||
unwrap(info).addressInt.should.equal(reader.getSegmentOffset('0001') + 0x2c4c);
|
||||
expect(unwrap(info).addressInt).toEqual(reader.getSegmentOffset('0001') + 0x2c4c);
|
||||
|
||||
info = reader.getICodeSegmentInfoByUnitName('output.pas');
|
||||
unwrap(info).segment.should.equal('0002');
|
||||
unwrap(info).addressWithoutOffset.should.equal(0xb0);
|
||||
unwrap(info).addressInt.should.equal(0x4040b0);
|
||||
expect(unwrap(info).segment).toEqual('0002');
|
||||
expect(unwrap(info).addressWithoutOffset).toEqual(0xb0);
|
||||
expect(unwrap(info).addressInt).toEqual(0x4040b0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('VS-Map load test', function () {
|
||||
it('Minimal map', function () {
|
||||
describe('VS-Map load test', () => {
|
||||
it('Minimal map', () => {
|
||||
const reader = new MapFileReaderVS('test/maps/minimal-vs15.map');
|
||||
reader.run();
|
||||
|
||||
reader.segments.length.should.equal(1);
|
||||
unwrap(reader.getSegmentInfoByUnitName('ConsoleApplication1.obj')).addressInt.should.equal(0x411000);
|
||||
expect(reader.segments.length).toEqual(1);
|
||||
expect(unwrap(reader.getSegmentInfoByUnitName('ConsoleApplication1.obj')).addressInt).toEqual(0x411000);
|
||||
|
||||
reader.getSegmentOffset('0001').should.equal(0x401000, 'offset 1');
|
||||
reader.getSegmentOffset('0002').should.equal(0x411000, 'offset 2');
|
||||
reader.getSegmentOffset('0003').should.equal(0x416000, 'offset 3');
|
||||
reader.getSegmentOffset('0004').should.equal(0x419000, 'offset 4');
|
||||
reader.getSegmentOffset('0005').should.equal(0x41a000, 'offset 5');
|
||||
reader.getSegmentOffset('0007').should.equal(0x41c000, 'offset 7');
|
||||
expect(reader.getSegmentOffset('0001')).toEqual(0x401000);
|
||||
expect(reader.getSegmentOffset('0002')).toEqual(0x411000);
|
||||
expect(reader.getSegmentOffset('0003')).toEqual(0x416000);
|
||||
expect(reader.getSegmentOffset('0004')).toEqual(0x419000);
|
||||
expect(reader.getSegmentOffset('0005')).toEqual(0x41a000);
|
||||
expect(reader.getSegmentOffset('0007')).toEqual(0x41c000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('VS-Map address checking', function () {
|
||||
it('Normal defined spaces', function () {
|
||||
describe('VS-Map address checking', () => {
|
||||
it('Normal defined spaces', () => {
|
||||
const reader = new MapFileReaderVS('');
|
||||
|
||||
const mainAddresses = [
|
||||
@@ -257,14 +257,14 @@ describe('VS-Map address checking', function () {
|
||||
{startAddress: 16, startAddressHex: '00000010', endAddress: 255, endAddressHex: '000000FF'},
|
||||
];
|
||||
|
||||
reader.isWithinAddressSpace(mainAddresses, 3, 5).should.equal(true);
|
||||
reader.isWithinAddressSpace(mainAddresses, 10, 5).should.equal(false);
|
||||
reader.isWithinAddressSpace(mainAddresses, 11, 4).should.equal(false);
|
||||
reader.isWithinAddressSpace(mainAddresses, 16, 10).should.equal(true);
|
||||
reader.isWithinAddressSpace(mainAddresses, 32, 10).should.equal(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 3, 5)).toEqual(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 10, 5)).toEqual(false);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 11, 4)).toEqual(false);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 16, 10)).toEqual(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 32, 10)).toEqual(true);
|
||||
});
|
||||
|
||||
it('Overlapping regions', function () {
|
||||
it('Overlapping regions', () => {
|
||||
const reader = new MapFileReaderVS('');
|
||||
|
||||
const mainAddresses = [
|
||||
@@ -272,9 +272,9 @@ describe('VS-Map address checking', function () {
|
||||
{startAddress: 16, startAddressHex: '00000010', endAddress: 255, endAddressHex: '000000FF'},
|
||||
];
|
||||
|
||||
reader.isWithinAddressSpace(mainAddresses, 0, 5).should.equal(true);
|
||||
reader.isWithinAddressSpace(mainAddresses, 11, 5).should.equal(true);
|
||||
reader.isWithinAddressSpace(mainAddresses, 11, 6).should.equal(true);
|
||||
reader.isWithinAddressSpace(mainAddresses, 11, 258).should.equal(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 0, 5)).toEqual(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 11, 5)).toEqual(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 11, 6)).toEqual(true);
|
||||
expect(reader.isWithinAddressSpace(mainAddresses, 11, 258)).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -24,11 +24,13 @@
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {NimCompiler} from '../lib/compilers/nim.js';
|
||||
import {LanguageKey} from '../types/languages.interfaces.js';
|
||||
|
||||
import {makeCompilationEnvironment, makeFakeCompilerInfo, should} from './utils.js';
|
||||
import {makeCompilationEnvironment, makeFakeCompilerInfo} from './utils.js';
|
||||
|
||||
const languages = {
|
||||
nim: {id: 'nim' as LanguageKey},
|
||||
@@ -46,21 +48,21 @@ describe('Nim', () => {
|
||||
lang: languages.nim.id,
|
||||
};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
it('Nim should not allow --run/-r parameter', () => {
|
||||
const compiler = new NimCompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler.filterUserOptions(['c', '--run', '--something']).should.deep.equal(['c', '--something']);
|
||||
compiler.filterUserOptions(['cpp', '-r', '--something']).should.deep.equal(['cpp', '--something']);
|
||||
expect(compiler.filterUserOptions(['c', '--run', '--something'])).toEqual(['c', '--something']);
|
||||
expect(compiler.filterUserOptions(['cpp', '-r', '--something'])).toEqual(['cpp', '--something']);
|
||||
});
|
||||
|
||||
it('Nim compile to Cpp if not asked otherwise', () => {
|
||||
const compiler = new NimCompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler.filterUserOptions([]).should.deep.equal(['compile']);
|
||||
compiler.filterUserOptions(['badoption']).should.deep.equal(['compile', 'badoption']);
|
||||
compiler.filterUserOptions(['js']).should.deep.equal(['js']);
|
||||
expect(compiler.filterUserOptions([])).toEqual(['compile']);
|
||||
expect(compiler.filterUserOptions(['badoption'])).toEqual(['compile', 'badoption']);
|
||||
expect(compiler.filterUserOptions(['js'])).toEqual(['js']);
|
||||
});
|
||||
|
||||
it('test getCacheFile from possible user-options', () => {
|
||||
@@ -74,10 +76,10 @@ describe('Nim', () => {
|
||||
};
|
||||
|
||||
for (const lang of ['cpp', 'c', 'objc']) {
|
||||
unwrap(compiler.getCacheFile([lang], input, folder)).should.equal(expected[lang]);
|
||||
expect(unwrap(compiler.getCacheFile([lang], input, folder))).toEqual(expected[lang]);
|
||||
}
|
||||
|
||||
should.equal(compiler.getCacheFile([], input, folder), null);
|
||||
should.equal(compiler.getCacheFile(['js'], input, folder), null);
|
||||
expect(compiler.getCacheFile([], input, folder)).toBeNull();
|
||||
expect(compiler.getCacheFile(['js'], input, folder)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
import _ from 'underscore';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {AppDefaultArguments} from '../app.js';
|
||||
import {BaseCompiler} from '../lib/base-compiler.js';
|
||||
@@ -35,7 +36,7 @@ import {BaseTool} from '../lib/tooling/base-tool.js';
|
||||
import {CompilerInfo} from '../types/compiler.interfaces.js';
|
||||
import {LanguageKey} from '../types/languages.interfaces.js';
|
||||
|
||||
import {makeFakeCompilerInfo, should} from './utils.js';
|
||||
import {makeFakeCompilerInfo} from './utils.js';
|
||||
|
||||
const languages = {
|
||||
fake: {
|
||||
@@ -165,7 +166,7 @@ describe('Options handler', () => {
|
||||
} as unknown as ClientOptionsType;
|
||||
}
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
fakeOptionProps = properties.fakeProps(optionsProps);
|
||||
compilerProps = new properties.CompilerProps(languages, fakeOptionProps);
|
||||
optionsHandler = new ClientOptionsHandler([], compilerProps, {env: ['dev']} as unknown as AppDefaultArguments);
|
||||
@@ -185,9 +186,9 @@ describe('Options handler', () => {
|
||||
it('should always return an array of paths', () => {
|
||||
const libs = optionsHandler.parseLibraries({fake: optionsProps.libs});
|
||||
_.each(libs[languages.fake.id]['fakelib'].versions, version => {
|
||||
Array.isArray(version.path).should.equal(true);
|
||||
expect(Array.isArray(version.path)).toEqual(true);
|
||||
});
|
||||
libs.should.deep.equal({
|
||||
expect(libs).toEqual({
|
||||
fake: {
|
||||
fakelib: {
|
||||
description: 'Its is a real, fake lib!',
|
||||
@@ -364,10 +365,8 @@ describe('Options handler', () => {
|
||||
};
|
||||
optionsHandler.setCompilers(compilers);
|
||||
_.each(optionsHandler.get().compilers, compiler => {
|
||||
should.equal(
|
||||
compiler['$order'],
|
||||
expect(compiler['$order']).toEqual(
|
||||
expectedOrder[(compiler as CompilerInfo).group][(compiler as CompilerInfo).id],
|
||||
`group: ${(compiler as CompilerInfo).group} id: ${(compiler as CompilerInfo).id}`,
|
||||
);
|
||||
});
|
||||
optionsHandler.setCompilers([]);
|
||||
@@ -382,10 +381,10 @@ describe('Options handler', () => {
|
||||
compiler.initialiseLibraries(clientOptions);
|
||||
|
||||
const staticlinks = compiler.getStaticLibraryLinks([{id: 'fs', version: 'std'}]);
|
||||
staticlinks.should.deep.equal(['-lc++fs', '-lrt', '-lpthread']);
|
||||
expect(staticlinks).toEqual(['-lc++fs', '-lrt', '-lpthread']);
|
||||
|
||||
const sharedlinks = compiler.getSharedLibraryLinks([{id: 'fs', version: 'std'}]);
|
||||
sharedlinks.should.deep.equal([]);
|
||||
expect(sharedlinks).toEqual([]);
|
||||
});
|
||||
it('should sort static libraries', () => {
|
||||
const libs = optionsHandler.parseLibraries({fake: optionsProps.libs});
|
||||
@@ -397,13 +396,13 @@ describe('Options handler', () => {
|
||||
compiler.initialiseLibraries(clientOptions);
|
||||
|
||||
let staticlinks = compiler.getSortedStaticLibraries([{id: 'someotherlib', version: 'trunk'}]);
|
||||
staticlinks.should.deep.equal(['someotherlib', 'c++fs']);
|
||||
expect(staticlinks).toEqual(['someotherlib', 'c++fs']);
|
||||
|
||||
staticlinks = compiler.getSortedStaticLibraries([
|
||||
{id: 'fs', version: 'std'},
|
||||
{id: 'someotherlib', version: 'trunk'},
|
||||
]);
|
||||
staticlinks.should.deep.equal(['someotherlib', 'c++fs', 'rt', 'pthread']);
|
||||
expect(staticlinks).toEqual(['someotherlib', 'c++fs', 'rt', 'pthread']);
|
||||
});
|
||||
it('library sort special case 1', () => {
|
||||
const libs = moreOptionsHandler.parseLibraries({fake: moreLibProps.libs});
|
||||
@@ -415,7 +414,7 @@ describe('Options handler', () => {
|
||||
compiler.initialiseLibraries(clientOptions);
|
||||
|
||||
const staticlinks = compiler.getSortedStaticLibraries([{id: 'fs', version: 'std'}]);
|
||||
staticlinks.should.deep.equal(['fsextra', 'c++fs', 'rt', 'pthread']);
|
||||
expect(staticlinks).toEqual(['fsextra', 'c++fs', 'rt', 'pthread']);
|
||||
});
|
||||
it('library sort special case 2', () => {
|
||||
const libs = moreOptionsHandler.parseLibraries({fake: moreLibProps.libs});
|
||||
@@ -431,7 +430,7 @@ describe('Options handler', () => {
|
||||
{id: 'fs', version: 'std'},
|
||||
{id: 'someotherlib', version: 'trunk'},
|
||||
]);
|
||||
staticlinks.should.deep.equal(['yalib', 'someotherlib', 'fsextra', 'c++fs', 'rt', 'pthread']);
|
||||
expect(staticlinks).toEqual(['yalib', 'someotherlib', 'fsextra', 'c++fs', 'rt', 'pthread']);
|
||||
});
|
||||
it('library sort special case 3', () => {
|
||||
const libs = moreOptionsHandler.parseLibraries({fake: moreLibProps.libs});
|
||||
@@ -447,7 +446,7 @@ describe('Options handler', () => {
|
||||
{id: 'fs', version: 'std'},
|
||||
{id: 'someotherlib', version: 'trunk'},
|
||||
]);
|
||||
staticlinks.should.deep.equal(['fourthlib', 'yalib', 'someotherlib', 'fsextra', 'c++fs', 'rt', 'pthread']);
|
||||
expect(staticlinks).toEqual(['fourthlib', 'yalib', 'someotherlib', 'fsextra', 'c++fs', 'rt', 'pthread']);
|
||||
});
|
||||
it('filtered library list', () => {
|
||||
const libs = moreOptionsHandler.parseLibraries({fake: moreLibProps.libs});
|
||||
@@ -461,7 +460,7 @@ describe('Options handler', () => {
|
||||
compiler.initialiseLibraries(clientOptions);
|
||||
|
||||
const libNames = _.keys(compiler.getSupportedLibrariesTest());
|
||||
libNames.should.deep.equal(['fs', 'someotherlib']);
|
||||
expect(libNames).toEqual(['fs', 'someotherlib']);
|
||||
});
|
||||
it('can detect libraries from options', () => {
|
||||
const libs = moreOptionsHandler.parseLibraries({fake: moreLibProps.libs});
|
||||
@@ -476,13 +475,13 @@ describe('Options handler', () => {
|
||||
libraries: [{id: 'ctre', version: 'trunk'}],
|
||||
options: ['-O3', '--std=c++17', '-lhello'],
|
||||
};
|
||||
compiler.tryAutodetectLibraries(obj).should.equal(true);
|
||||
expect(compiler.tryAutodetectLibraries(obj)).toEqual(true);
|
||||
|
||||
obj.libraries.should.deep.equal([
|
||||
expect(obj.libraries).toEqual([
|
||||
{id: 'ctre', version: 'trunk'},
|
||||
{id: 'autolib', version: 'autodetect'},
|
||||
]);
|
||||
obj.options.should.deep.equal(['-O3', '--std=c++17']);
|
||||
expect(obj.options).toEqual(['-O3', '--std=c++17']);
|
||||
});
|
||||
it("server-side library alias support (just in case client doesn't support it)", () => {
|
||||
const libs = moreOptionsHandler.parseLibraries({fake: moreLibProps.libs});
|
||||
@@ -498,7 +497,7 @@ describe('Options handler', () => {
|
||||
compiler.initialiseLibraries(clientOptions);
|
||||
|
||||
const staticlinks = compiler.getSortedStaticLibraries([{id: 'someotherlib', version: 'master'}]);
|
||||
staticlinks.should.deep.equal(['someotherlib', 'c++fs']);
|
||||
expect(staticlinks).toEqual(['someotherlib', 'c++fs']);
|
||||
});
|
||||
it('should be able to parse basic tools', () => {
|
||||
class TestBaseTool extends BaseTool {
|
||||
@@ -514,7 +513,7 @@ describe('Options handler', () => {
|
||||
delete tool.env;
|
||||
});
|
||||
|
||||
tools.should.deep.equal({
|
||||
expect(tools).toEqual({
|
||||
fake: {
|
||||
faketool: {
|
||||
id: 'faketool',
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {Packager} from '../lib/packager.js';
|
||||
|
||||
import {fs, newTempDir, path} from './utils.js';
|
||||
@@ -30,33 +32,41 @@ function writeTestFile(filepath) {
|
||||
return fs.writeFile(filepath, '#!/bin/sh\n\necho Hello, world!\n\n');
|
||||
}
|
||||
|
||||
describe('Packager', function () {
|
||||
it('should be able to package 1 file', async () => {
|
||||
const pack = new Packager();
|
||||
describe('Packager', () => {
|
||||
it(
|
||||
'should be able to package 1 file',
|
||||
async () => {
|
||||
const pack = new Packager();
|
||||
|
||||
const dirPath = newTempDir();
|
||||
await writeTestFile(path.join(dirPath, 'hello.txt'));
|
||||
const dirPath = newTempDir();
|
||||
await writeTestFile(path.join(dirPath, 'hello.txt'));
|
||||
|
||||
const targzPath = path.join(dirPath, 'package.tgz');
|
||||
await pack.package(dirPath, targzPath);
|
||||
const targzPath = path.join(dirPath, 'package.tgz');
|
||||
await pack.package(dirPath, targzPath);
|
||||
|
||||
await fs.exists(targzPath).should.eventually.equal(true);
|
||||
}).timeout(5000);
|
||||
await expect(fs.exists(targzPath)).resolves.toBe(true);
|
||||
},
|
||||
{timeout: 5000},
|
||||
);
|
||||
|
||||
it('should be able to unpack', async () => {
|
||||
const pack = new Packager();
|
||||
it(
|
||||
'should be able to unpack',
|
||||
async () => {
|
||||
const pack = new Packager();
|
||||
|
||||
const dirPath = newTempDir();
|
||||
await writeTestFile(path.join(dirPath, 'hello.txt'));
|
||||
const dirPath = newTempDir();
|
||||
await writeTestFile(path.join(dirPath, 'hello.txt'));
|
||||
|
||||
const targzPath = path.join(dirPath, 'package.tgz');
|
||||
await pack.package(dirPath, targzPath);
|
||||
const targzPath = path.join(dirPath, 'package.tgz');
|
||||
await pack.package(dirPath, targzPath);
|
||||
|
||||
const unpackPath = newTempDir();
|
||||
const pack2 = new Packager();
|
||||
await pack2.unpack(targzPath, unpackPath);
|
||||
const unpackPath = newTempDir();
|
||||
const pack2 = new Packager();
|
||||
await pack2.unpack(targzPath, unpackPath);
|
||||
|
||||
const unpackedFilepath = path.join(unpackPath, 'hello.txt');
|
||||
await fs.exists(unpackedFilepath).should.eventually.equal(true);
|
||||
}).timeout(5000);
|
||||
const unpackedFilepath = path.join(unpackPath, 'hello.txt');
|
||||
await expect(fs.exists(unpackedFilepath)).resolves.toBe(true);
|
||||
},
|
||||
{timeout: 5000},
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,568 +0,0 @@
|
||||
// Copyright (c) 2017, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import {PascalUtils} from '../lib/compilers/pascal-utils.js';
|
||||
import {PascalWinCompiler} from '../lib/compilers/pascal-win.js';
|
||||
import {FPCCompiler} from '../lib/compilers/pascal.js';
|
||||
import {PascalDemangler} from '../lib/demangler/index.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
|
||||
import {fs, makeCompilationEnvironment} from './utils.js';
|
||||
|
||||
const languages = {
|
||||
pascal: {id: 'pascal'},
|
||||
};
|
||||
|
||||
describe('Pascal', () => {
|
||||
let compiler;
|
||||
|
||||
before(() => {
|
||||
const ce = makeCompilationEnvironment({languages});
|
||||
const info = {
|
||||
exe: null,
|
||||
remote: true,
|
||||
lang: languages.pascal.id,
|
||||
};
|
||||
|
||||
compiler = new FPCCompiler(info, ce);
|
||||
});
|
||||
|
||||
it('Basic compiler setup', () => {
|
||||
if (process.platform === 'win32') {
|
||||
compiler.getOutputFilename('/tmp/', 'output.pas').should.equal('\\tmp\\output.s');
|
||||
} else {
|
||||
compiler.getOutputFilename('/tmp/', 'output.pas').should.equal('/tmp/output.s');
|
||||
}
|
||||
});
|
||||
|
||||
describe('Pascal signature composer function', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Handle 0 parameter methods', function () {
|
||||
demangler.composeReadableMethodSignature('', '', 'myfunc', '').should.equal('myfunc()');
|
||||
demangler.composeReadableMethodSignature('output', '', 'myfunc', '').should.equal('myfunc()');
|
||||
demangler
|
||||
.composeReadableMethodSignature('output', 'tmyclass', 'myfunc', '')
|
||||
.should.equal('tmyclass.myfunc()');
|
||||
});
|
||||
|
||||
it('Handle 1 parameter methods', function () {
|
||||
demangler.composeReadableMethodSignature('output', '', 'myfunc', 'integer').should.equal('myfunc(integer)');
|
||||
demangler
|
||||
.composeReadableMethodSignature('output', 'tmyclass', 'myfunc', 'integer')
|
||||
.should.equal('tmyclass.myfunc(integer)');
|
||||
});
|
||||
|
||||
it('Handle 2 parameter methods', function () {
|
||||
demangler
|
||||
.composeReadableMethodSignature('output', '', 'myfunc', 'integer,string')
|
||||
.should.equal('myfunc(integer,string)');
|
||||
demangler
|
||||
.composeReadableMethodSignature('output', 'tmyclass', 'myfunc', 'integer,string')
|
||||
.should.equal('tmyclass.myfunc(integer,string)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling FPC 2.6', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should demangle OUTPUT_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE', function () {
|
||||
demangler
|
||||
.demangle('OUTPUT_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE:')
|
||||
.should.equal('maxarray(array_of_double,array_of_double)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_TMYCLASS_$__MYPROC$ANSISTRING', function () {
|
||||
demangler.demangle('OUTPUT_TMYCLASS_$__MYPROC$ANSISTRING:').should.equal('tmyclass.myproc(ansistring)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_TMYCLASS_$__MYFUNC$$ANSISTRING', function () {
|
||||
demangler.demangle('OUTPUT_TMYCLASS_$__MYFUNC$$ANSISTRING:').should.equal('tmyclass.myfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_NOPARAMFUNC$$ANSISTRING', function () {
|
||||
demangler.demangle('OUTPUT_NOPARAMFUNC$$ANSISTRING:').should.equal('noparamfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_NOPARAMPROC', function () {
|
||||
demangler.demangle('OUTPUT_NOPARAMPROC:').should.equal('noparamproc()');
|
||||
});
|
||||
|
||||
it('Should demangle U_OUTPUT_MYGLOBALVAR', function () {
|
||||
demangler.demangle('U_OUTPUT_MYGLOBALVAR:').should.equal('myglobalvar');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_INIT (custom method)', function () {
|
||||
demangler.demangle('OUTPUT_INIT:').should.equal('init()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_init (builtin symbol)', function () {
|
||||
demangler.demangle('OUTPUT_init:').should.equal('unit_initialization');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling FPC 3.2', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should demangle OUTPUT_$$_SQUARE$LONGINT$$LONGINT', function () {
|
||||
demangler.demangle('OUTPUT_$$_SQUARE$LONGINT$$LONGINT:').should.equal('square(longint)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE', function () {
|
||||
demangler
|
||||
.demangle('OUTPUT_$$_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE:')
|
||||
.should.equal('maxarray(array_of_double,array_of_double)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYPROC$ANSISTRING', function () {
|
||||
demangler
|
||||
.demangle('OUTPUT$_$TMYCLASS_$__$$_MYPROC$ANSISTRING:')
|
||||
.should.equal('tmyclass.myproc(ansistring)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYFUNC$$ANSISTRING', function () {
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYFUNC$$ANSISTRING:').should.equal('tmyclass.myfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$$INTEGER', function () {
|
||||
demangler
|
||||
.demangle('OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$$INTEGER:')
|
||||
.should.equal('tmyclass.myfunc(ansistring)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$INTEGER$INTEGER$$INTEGER', function () {
|
||||
demangler
|
||||
.demangle('OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$INTEGER$INTEGER$$INTEGER:')
|
||||
.should.equal('tmyclass.myfunc(ansistring,integer,integer)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_NOPARAMFUNC$$ANSISTRING', function () {
|
||||
demangler.demangle('OUTPUT_$$_NOPARAMFUNC$$ANSISTRING:').should.equal('noparamfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_NOPARAMPROC', function () {
|
||||
demangler.demangle('OUTPUT_$$_NOPARAMPROC:').should.equal('noparamproc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_INIT', function () {
|
||||
demangler.demangle('OUTPUT_$$_INIT:').should.equal('init()');
|
||||
});
|
||||
|
||||
it('Should demangle U_$OUTPUT_$$_MYGLOBALVAR', function () {
|
||||
demangler.demangle('U_$OUTPUT_$$_MYGLOBALVAR:').should.equal('myglobalvar');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling Fixed Symbols FPC 2.6', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should demangle OUTPUT_finalize_implicit', function () {
|
||||
demangler.demangle('OUTPUT_finalize_implicit:').should.equal('unit_finalization_implicit');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling Fixed Symbols FPC 3.2', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should demangle OUTPUT_$$_init', function () {
|
||||
demangler.demangle('OUTPUT_$$_init:').should.equal('unit_initialization');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_finalize', function () {
|
||||
demangler.demangle('OUTPUT_$$_finalize:').should.equal('unit_finalization');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_init_implicit', function () {
|
||||
demangler.demangle('OUTPUT_$$_init_implicit:').should.equal('unit_initialization_implicit');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_finalize_implicit', function () {
|
||||
demangler.demangle('OUTPUT_$$_finalize_implicit:').should.equal('unit_finalization_implicit');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_finalize_implicit', function () {
|
||||
demangler.demangle('OUTPUT_$$_finalize_implicit:').should.equal('unit_finalization_implicit');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal NOT Demangling certain symbols FPC 2.6', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should NOT demangle VMT_OUTPUT_TMYCLASS', function () {
|
||||
demangler.demangle('VMT_OUTPUT_TMYCLASS:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle RTTI_OUTPUT_TMYCLASS', function () {
|
||||
demangler.demangle('RTTI_OUTPUT_TMYCLASS:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle INIT$_OUTPUT', function () {
|
||||
demangler.demangle('INIT$_OUTPUT:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle FINALIZE$_OUTPUT', function () {
|
||||
demangler.demangle('FINALIZE$_OUTPUT:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DEBUGSTART_OUTPUT', function () {
|
||||
demangler.demangle('DEBUGSTART_OUTPUT:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DBGREF_OUTPUT_THELLO', function () {
|
||||
demangler.demangle('DBGREF_OUTPUT_THELLO:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle non-label', function () {
|
||||
demangler.demangle(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2').should.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal NOT Demangling certain symbols FPC 3.2', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should NOT demangle RTTI_$OUTPUT_$$_TMYCLASS', function () {
|
||||
demangler.demangle('RTTI_$OUTPUT_$$_TMYCLASS:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle .Ld1', function () {
|
||||
demangler.demangle('.Ld1:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle _$OUTPUT$_Ld3 (Same in FPC 2.6 and 3.2)', function () {
|
||||
demangler.demangle('_$OUTPUT$_Ld3:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle INIT$_$OUTPUT', function () {
|
||||
demangler.demangle('INIT$_$OUTPUT:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DEBUGSTART_$OUTPUT', function () {
|
||||
demangler.demangle('DEBUGSTART_$OUTPUT:').should.equal(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DBGREF_$OUTPUT_$$_THELLO', function () {
|
||||
demangler.demangle('DBGREF_$OUTPUT_$$_THELLO:').should.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add, order and demangle inline', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYTEST:');
|
||||
demangler.demangle('U_$OUTPUT_$$_MYGLOBALVAR:');
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYTEST2:');
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING:');
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER:');
|
||||
|
||||
demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2').should.equal(' call tmyclass.mytest2()');
|
||||
demangler.demangleIfNeeded(' movl U_$OUTPUT_$$_MYGLOBALVAR,%eax').should.equal(' movl myglobalvar,%eax');
|
||||
demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2').should.equal(' call tmyclass.mytest2()');
|
||||
demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST').should.equal(' call tmyclass.mytest()');
|
||||
demangler
|
||||
.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING')
|
||||
.should.equal(' call tmyclass.myoverload(ansistring)');
|
||||
demangler
|
||||
.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER')
|
||||
.should.equal(' call tmyclass.myoverload(integer)');
|
||||
|
||||
demangler.demangleIfNeeded('.Le1').should.equal('.Le1');
|
||||
demangler.demangleIfNeeded('_$SomeThing').should.equal('_$SomeThing');
|
||||
});
|
||||
|
||||
describe('Add, order and demangle inline - using addDemangleToCache()', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYTEST:');
|
||||
demangler.addDemangleToCache('U_$OUTPUT_$$_MYGLOBALVAR:');
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYTEST2:');
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING:');
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER:');
|
||||
|
||||
demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2').should.equal(' call tmyclass.mytest2()');
|
||||
demangler.demangleIfNeeded(' movl U_$OUTPUT_$$_MYGLOBALVAR,%eax').should.equal(' movl myglobalvar,%eax');
|
||||
demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2').should.equal(' call tmyclass.mytest2()');
|
||||
demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST').should.equal(' call tmyclass.mytest()');
|
||||
demangler
|
||||
.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING')
|
||||
.should.equal(' call tmyclass.myoverload(ansistring)');
|
||||
demangler
|
||||
.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER')
|
||||
.should.equal(' call tmyclass.myoverload(integer)');
|
||||
|
||||
demangler.demangleIfNeeded('.Le1').should.equal('.Le1');
|
||||
});
|
||||
|
||||
describe('Pascal Ignored Symbols', function () {
|
||||
const demangler = new PascalDemangler();
|
||||
|
||||
it('Should ignore certain labels', function () {
|
||||
demangler.shouldIgnoreSymbol('.Le1').should.equal(true);
|
||||
demangler.shouldIgnoreSymbol('_$SomeThing').should.equal(true);
|
||||
});
|
||||
|
||||
it('Should be able to differentiate between System and User functions', function () {
|
||||
demangler.shouldIgnoreSymbol('RTTI_OUTPUT_MyProperty').should.equal(true);
|
||||
demangler.shouldIgnoreSymbol('Rtti_Output_UserFunction').should.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal ASM line number injection', function () {
|
||||
before(() => {
|
||||
compiler.demanglerClass = PascalDemangler;
|
||||
compiler.demangler = new PascalDemangler(null, compiler);
|
||||
});
|
||||
|
||||
it('Should have line numbering', function () {
|
||||
return new Promise(function (resolve) {
|
||||
fs.readFile('test/pascal/asm-example.s', function (err, buffer) {
|
||||
const asmLines = utils.splitLines(buffer.toString());
|
||||
compiler.preProcessLines(asmLines);
|
||||
|
||||
resolve(
|
||||
Promise.all([
|
||||
asmLines.should.include('# [output.pas]'),
|
||||
asmLines.should.include(' .file 1 "output.pas"'),
|
||||
asmLines.should.include('# [13] Square := num * num + 14;'),
|
||||
asmLines.should.include(' .loc 1 13 0'),
|
||||
asmLines.should.include('.Le0:'),
|
||||
asmLines.should.include(' .cfi_endproc'),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// describe('Pascal objdump filtering', function () {
|
||||
// it('Should filter out most of the runtime', function () {
|
||||
// return new Promise(function (resolve) {
|
||||
// fs.readFile('test/pascal/objdump-example.s', function (err, buffer) {
|
||||
// const output = FPCCompiler.preProcessBinaryAsm(buffer.toString());
|
||||
// resolve(Promise.all([
|
||||
// utils.splitLines(output).length.should.be.below(500),
|
||||
// output.should.not.include('fpc_zeromem():'),
|
||||
// output.should.include('SQUARE():'),
|
||||
// ]));
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
|
||||
describe('Pascal parseOutput', () => {
|
||||
it('should return parsed output', () => {
|
||||
const result = {
|
||||
stdout: 'Hello, world!',
|
||||
stderr: '',
|
||||
};
|
||||
|
||||
compiler.parseOutput(result, '/tmp/path/output.pas', '/tmp/path').should.deep.equal({
|
||||
inputFilename: 'output.pas',
|
||||
stdout: [
|
||||
{
|
||||
text: 'Hello, world!',
|
||||
},
|
||||
],
|
||||
stderr: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal filetype detection', () => {
|
||||
const pasUtils = new PascalUtils();
|
||||
const progSource = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
const unitSource = fs.readFileSync('test/pascal/example.pas').toString('utf8');
|
||||
|
||||
it('Should detect simple program', () => {
|
||||
pasUtils.isProgram(progSource).should.equal(true);
|
||||
pasUtils.isProgram(unitSource).should.equal(false);
|
||||
});
|
||||
|
||||
it('Should detect simple unit', () => {
|
||||
pasUtils.isUnit(progSource).should.equal(false);
|
||||
pasUtils.isUnit(unitSource).should.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multifile writing behaviour', function () {
|
||||
let compiler;
|
||||
|
||||
before(() => {
|
||||
const ce = makeCompilationEnvironment({languages});
|
||||
const info = {
|
||||
exe: null,
|
||||
remote: true,
|
||||
lang: languages.pascal.id,
|
||||
};
|
||||
|
||||
compiler = new FPCCompiler(info, ce);
|
||||
});
|
||||
|
||||
it('Original behaviour (old unitname)', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('examples/pascal/default.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'output.pas')),
|
||||
utils.fileExists(path.join(dirPath, 'output.pas')).should.eventually.equal(true),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(false), // note: will be written somewhere else
|
||||
]);
|
||||
});
|
||||
|
||||
it('Original behaviour (just a unit file)', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/example.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'example.pas')),
|
||||
utils.fileExists(path.join(dirPath, 'example.pas')).should.eventually.equal(true),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(false), // note: will be written somewhere else
|
||||
]);
|
||||
});
|
||||
|
||||
it('Writing program instead of a unit', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'prog.dpr')),
|
||||
utils.fileExists(path.join(dirPath, 'example.pas')).should.eventually.equal(false),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(true),
|
||||
]);
|
||||
});
|
||||
|
||||
it('Writing program with a unit', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [
|
||||
{
|
||||
filename: 'example.pas',
|
||||
contents: '{ hello\n world }',
|
||||
},
|
||||
];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'prog.dpr')),
|
||||
utils.fileExists(path.join(dirPath, 'example.pas')).should.eventually.equal(true),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(true),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multifile writing behaviour Pascal-WIN', function () {
|
||||
let compiler;
|
||||
|
||||
before(() => {
|
||||
const ce = makeCompilationEnvironment({languages});
|
||||
const info = {
|
||||
exe: null,
|
||||
remote: true,
|
||||
lang: languages.pascal.id,
|
||||
};
|
||||
|
||||
compiler = new PascalWinCompiler(info, ce);
|
||||
});
|
||||
|
||||
it('Original behaviour (old unitname)', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('examples/pascal/default.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'output.pas')),
|
||||
utils.fileExists(path.join(dirPath, 'output.pas')).should.eventually.equal(true),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(false), // note: will be written somewhere else
|
||||
]);
|
||||
});
|
||||
|
||||
it('Original behaviour (just a unit file)', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/example.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'example.pas')),
|
||||
utils.fileExists(path.join(dirPath, 'example.pas')).should.eventually.equal(true),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(false), // note: will be written somewhere else
|
||||
]);
|
||||
});
|
||||
|
||||
it('Writing program instead of a unit', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'prog.dpr')),
|
||||
utils.fileExists(path.join(dirPath, 'example.pas')).should.eventually.equal(false),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(true),
|
||||
]);
|
||||
});
|
||||
|
||||
it('Writing program with a unit', async function () {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [
|
||||
{
|
||||
filename: 'example.pas',
|
||||
contents: '{ hello\n world }',
|
||||
},
|
||||
];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
return Promise.all([
|
||||
writeSummary.inputFilename.should.equal(path.join(dirPath, 'prog.dpr')),
|
||||
utils.fileExists(path.join(dirPath, 'example.pas')).should.eventually.equal(true),
|
||||
utils.fileExists(path.join(dirPath, 'prog.dpr')).should.eventually.equal(true),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
567
test/pascal-tests.ts
Normal file
567
test/pascal-tests.ts
Normal file
@@ -0,0 +1,567 @@
|
||||
// Copyright (c) 2017, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {PascalUtils} from '../lib/compilers/pascal-utils.js';
|
||||
import {PascalWinCompiler} from '../lib/compilers/pascal-win.js';
|
||||
import {FPCCompiler} from '../lib/compilers/pascal.js';
|
||||
import {PascalDemangler} from '../lib/demangler/index.js';
|
||||
import * as utils from '../lib/utils.js';
|
||||
|
||||
import {fs, makeCompilationEnvironment} from './utils.js';
|
||||
|
||||
const languages = {
|
||||
pascal: {id: 'pascal'},
|
||||
};
|
||||
|
||||
describe('Pascal', () => {
|
||||
let compiler;
|
||||
|
||||
beforeAll(() => {
|
||||
const ce = makeCompilationEnvironment({languages});
|
||||
const info = {
|
||||
exe: null,
|
||||
remote: true,
|
||||
lang: languages.pascal.id,
|
||||
};
|
||||
|
||||
compiler = new FPCCompiler(info as any, ce);
|
||||
});
|
||||
|
||||
it('Basic compiler setup', () => {
|
||||
if (process.platform === 'win32') {
|
||||
expect(compiler.getOutputFilename('/tmp/', 'output.pas')).toEqual('\\tmp\\output.s');
|
||||
} else {
|
||||
expect(compiler.getOutputFilename('/tmp/', 'output.pas')).toEqual('/tmp/output.s');
|
||||
}
|
||||
});
|
||||
|
||||
describe('Pascal signature composer function', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Handle 0 parameter methods', () => {
|
||||
expect(demangler.composeReadableMethodSignature('', '', 'myfunc', '')).toEqual('myfunc()');
|
||||
expect(demangler.composeReadableMethodSignature('output', '', 'myfunc', '')).toEqual('myfunc()');
|
||||
expect(demangler.composeReadableMethodSignature('output', 'tmyclass', 'myfunc', '')).toEqual(
|
||||
'tmyclass.myfunc()',
|
||||
);
|
||||
});
|
||||
|
||||
it('Handle 1 parameter methods', () => {
|
||||
expect(demangler.composeReadableMethodSignature('output', '', 'myfunc', 'integer')).toEqual(
|
||||
'myfunc(integer)',
|
||||
);
|
||||
expect(demangler.composeReadableMethodSignature('output', 'tmyclass', 'myfunc', 'integer')).toEqual(
|
||||
'tmyclass.myfunc(integer)',
|
||||
);
|
||||
});
|
||||
|
||||
it('Handle 2 parameter methods', () => {
|
||||
expect(demangler.composeReadableMethodSignature('output', '', 'myfunc', 'integer,string')).toEqual(
|
||||
'myfunc(integer,string)',
|
||||
);
|
||||
expect(demangler.composeReadableMethodSignature('output', 'tmyclass', 'myfunc', 'integer,string')).toEqual(
|
||||
'tmyclass.myfunc(integer,string)',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling FPC 2.6', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should demangle OUTPUT_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE', () => {
|
||||
expect(demangler.demangle('OUTPUT_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE:')).toEqual(
|
||||
'maxarray(array_of_double,array_of_double)',
|
||||
);
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_TMYCLASS_$__MYPROC$ANSISTRING', () => {
|
||||
expect(demangler.demangle('OUTPUT_TMYCLASS_$__MYPROC$ANSISTRING:')).toEqual('tmyclass.myproc(ansistring)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_TMYCLASS_$__MYFUNC$$ANSISTRING', () => {
|
||||
expect(demangler.demangle('OUTPUT_TMYCLASS_$__MYFUNC$$ANSISTRING:')).toEqual('tmyclass.myfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_NOPARAMFUNC$$ANSISTRING', () => {
|
||||
expect(demangler.demangle('OUTPUT_NOPARAMFUNC$$ANSISTRING:')).toEqual('noparamfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_NOPARAMPROC', () => {
|
||||
expect(demangler.demangle('OUTPUT_NOPARAMPROC:')).toEqual('noparamproc()');
|
||||
});
|
||||
|
||||
it('Should demangle U_OUTPUT_MYGLOBALVAR', () => {
|
||||
expect(demangler.demangle('U_OUTPUT_MYGLOBALVAR:')).toEqual('myglobalvar');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_INIT (custom method)', () => {
|
||||
expect(demangler.demangle('OUTPUT_INIT:')).toEqual('init()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_init (builtin symbol)', () => {
|
||||
expect(demangler.demangle('OUTPUT_init:')).toEqual('unit_initialization');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling FPC 3.2', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should demangle OUTPUT_$$_SQUARE$LONGINT$$LONGINT', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_SQUARE$LONGINT$$LONGINT:')).toEqual('square(longint)');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_MAXARRAY$array_of_DOUBLE$array_of_DOUBLE:')).toEqual(
|
||||
'maxarray(array_of_double,array_of_double)',
|
||||
);
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYPROC$ANSISTRING', () => {
|
||||
expect(demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYPROC$ANSISTRING:')).toEqual(
|
||||
'tmyclass.myproc(ansistring)',
|
||||
);
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYFUNC$$ANSISTRING', () => {
|
||||
expect(demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYFUNC$$ANSISTRING:')).toEqual('tmyclass.myfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$$INTEGER', () => {
|
||||
expect(demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$$INTEGER:')).toEqual(
|
||||
'tmyclass.myfunc(ansistring)',
|
||||
);
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$INTEGER$INTEGER$$INTEGER', () => {
|
||||
expect(demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYFUNC$ANSISTRING$INTEGER$INTEGER$$INTEGER:')).toEqual(
|
||||
'tmyclass.myfunc(ansistring,integer,integer)',
|
||||
);
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_NOPARAMFUNC$$ANSISTRING', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_NOPARAMFUNC$$ANSISTRING:')).toEqual('noparamfunc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_NOPARAMPROC', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_NOPARAMPROC:')).toEqual('noparamproc()');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_INIT', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_INIT:')).toEqual('init()');
|
||||
});
|
||||
|
||||
it('Should demangle U_$OUTPUT_$$_MYGLOBALVAR', () => {
|
||||
expect(demangler.demangle('U_$OUTPUT_$$_MYGLOBALVAR:')).toEqual('myglobalvar');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling Fixed Symbols FPC 2.6', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should demangle OUTPUT_finalize_implicit', () => {
|
||||
expect(demangler.demangle('OUTPUT_finalize_implicit:')).toEqual('unit_finalization_implicit');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Demangling Fixed Symbols FPC 3.2', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should demangle OUTPUT_$$_init', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_init:')).toEqual('unit_initialization');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_finalize', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_finalize:')).toEqual('unit_finalization');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_init_implicit', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_init_implicit:')).toEqual('unit_initialization_implicit');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_finalize_implicit', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_finalize_implicit:')).toEqual('unit_finalization_implicit');
|
||||
});
|
||||
|
||||
it('Should demangle OUTPUT_$$_finalize_implicit', () => {
|
||||
expect(demangler.demangle('OUTPUT_$$_finalize_implicit:')).toEqual('unit_finalization_implicit');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal NOT Demangling certain symbols FPC 2.6', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should NOT demangle VMT_OUTPUT_TMYCLASS', () => {
|
||||
expect(demangler.demangle('VMT_OUTPUT_TMYCLASS:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle RTTI_OUTPUT_TMYCLASS', () => {
|
||||
expect(demangler.demangle('RTTI_OUTPUT_TMYCLASS:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle INIT$_OUTPUT', () => {
|
||||
expect(demangler.demangle('INIT$_OUTPUT:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle FINALIZE$_OUTPUT', () => {
|
||||
expect(demangler.demangle('FINALIZE$_OUTPUT:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DEBUGSTART_OUTPUT', () => {
|
||||
expect(demangler.demangle('DEBUGSTART_OUTPUT:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DBGREF_OUTPUT_THELLO', () => {
|
||||
expect(demangler.demangle('DBGREF_OUTPUT_THELLO:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle non-label', () => {
|
||||
expect(demangler.demangle(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2')).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal NOT Demangling certain symbols FPC 3.2', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should NOT demangle RTTI_$OUTPUT_$$_TMYCLASS', () => {
|
||||
expect(demangler.demangle('RTTI_$OUTPUT_$$_TMYCLASS:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle .Ld1', () => {
|
||||
expect(demangler.demangle('.Ld1:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle _$OUTPUT$_Ld3 (Same in FPC 2.6 and 3.2)', () => {
|
||||
expect(demangler.demangle('_$OUTPUT$_Ld3:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle INIT$_$OUTPUT', () => {
|
||||
expect(demangler.demangle('INIT$_$OUTPUT:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DEBUGSTART_$OUTPUT', () => {
|
||||
expect(demangler.demangle('DEBUGSTART_$OUTPUT:')).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should NOT demangle DBGREF_$OUTPUT_$$_THELLO', () => {
|
||||
expect(demangler.demangle('DBGREF_$OUTPUT_$$_THELLO:')).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add, order and demangle inline', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('should work', () => {
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYTEST:');
|
||||
demangler.demangle('U_$OUTPUT_$$_MYGLOBALVAR:');
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYTEST2:');
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING:');
|
||||
demangler.demangle('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER:');
|
||||
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2')).toEqual(
|
||||
' call tmyclass.mytest2()',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' movl U_$OUTPUT_$$_MYGLOBALVAR,%eax')).toEqual(
|
||||
' movl myglobalvar,%eax',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2')).toEqual(
|
||||
' call tmyclass.mytest2()',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST')).toEqual(
|
||||
' call tmyclass.mytest()',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING')).toEqual(
|
||||
' call tmyclass.myoverload(ansistring)',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER')).toEqual(
|
||||
' call tmyclass.myoverload(integer)',
|
||||
);
|
||||
|
||||
expect(demangler.demangleIfNeeded('.Le1')).toEqual('.Le1');
|
||||
expect(demangler.demangleIfNeeded('_$SomeThing')).toEqual('_$SomeThing');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add, order and demangle inline - using addDemangleToCache()', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('should work', () => {
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYTEST:');
|
||||
demangler.addDemangleToCache('U_$OUTPUT_$$_MYGLOBALVAR:');
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYTEST2:');
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING:');
|
||||
demangler.addDemangleToCache('OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER:');
|
||||
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2')).toEqual(
|
||||
' call tmyclass.mytest2()',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' movl U_$OUTPUT_$$_MYGLOBALVAR,%eax')).toEqual(
|
||||
' movl myglobalvar,%eax',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST2')).toEqual(
|
||||
' call tmyclass.mytest2()',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYTEST')).toEqual(
|
||||
' call tmyclass.mytest()',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$ANSISTRING')).toEqual(
|
||||
' call tmyclass.myoverload(ansistring)',
|
||||
);
|
||||
expect(demangler.demangleIfNeeded(' call OUTPUT$_$TMYCLASS_$__$$_MYOVERLOAD$INTEGER')).toEqual(
|
||||
' call tmyclass.myoverload(integer)',
|
||||
);
|
||||
|
||||
expect(demangler.demangleIfNeeded('.Le1')).toEqual('.Le1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal Ignored Symbols', () => {
|
||||
const demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
|
||||
it('Should ignore certain labels', () => {
|
||||
expect(demangler.shouldIgnoreSymbol('.Le1')).toEqual(true);
|
||||
expect(demangler.shouldIgnoreSymbol('_$SomeThing')).toEqual(true);
|
||||
});
|
||||
|
||||
it('Should be able to differentiate between System and User functions', () => {
|
||||
expect(demangler.shouldIgnoreSymbol('RTTI_OUTPUT_MyProperty')).toEqual(true);
|
||||
expect(demangler.shouldIgnoreSymbol('Rtti_Output_UserFunction')).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal ASM line number injection', () => {
|
||||
beforeAll(() => {
|
||||
compiler.demanglerClass = PascalDemangler;
|
||||
compiler.demangler = new PascalDemangler('demangler-exe', compiler);
|
||||
});
|
||||
|
||||
it('Should have line numbering', async () => {
|
||||
const asmLines = utils.splitLines((await fs.readFile('test/pascal/asm-example.s')).toString());
|
||||
compiler.preProcessLines(asmLines);
|
||||
expect(asmLines).toContain('# [output.pas]');
|
||||
expect(asmLines).toContain(' .file 1 "output.pas"');
|
||||
expect(asmLines).toContain('# [13] Square := num * num + 14;');
|
||||
expect(asmLines).toContain(' .loc 1 13 0');
|
||||
expect(asmLines).toContain('.Le0:');
|
||||
expect(asmLines).toContain(' .cfi_endproc');
|
||||
});
|
||||
});
|
||||
|
||||
// describe('Pascal objdump filtering', function () {
|
||||
// it('Should filter out most of the runtime', function () {
|
||||
// return new Promise(function (resolve) {
|
||||
// fs.readFile('test/pascal/objdump-example.s', function (err, buffer) {
|
||||
// const output = FPCCompiler.preProcessBinaryAsm(buffer.toString());
|
||||
// resolve(Promise.all([
|
||||
// utils.splitLines(output).length.should.be.below(500),
|
||||
// output.should.not.include('fpc_zeromem():'),
|
||||
// output.should.include('SQUARE():'),
|
||||
// ]));
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
|
||||
describe('Pascal parseOutput', () => {
|
||||
it('should return parsed output', () => {
|
||||
const result = {
|
||||
stdout: 'Hello, world!',
|
||||
stderr: '',
|
||||
};
|
||||
|
||||
expect(compiler.parseOutput(result, '/tmp/path/output.pas', '/tmp/path')).toEqual({
|
||||
inputFilename: 'output.pas',
|
||||
stdout: [
|
||||
{
|
||||
text: 'Hello, world!',
|
||||
},
|
||||
],
|
||||
stderr: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pascal filetype detection', () => {
|
||||
const pasUtils = new PascalUtils();
|
||||
const progSource = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
const unitSource = fs.readFileSync('test/pascal/example.pas').toString('utf8');
|
||||
|
||||
it('Should detect simple program', () => {
|
||||
expect(pasUtils.isProgram(progSource)).toEqual(true);
|
||||
expect(pasUtils.isProgram(unitSource)).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should detect simple unit', () => {
|
||||
expect(pasUtils.isUnit(progSource)).toEqual(false);
|
||||
expect(pasUtils.isUnit(unitSource)).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multifile writing behaviour', () => {
|
||||
let compiler;
|
||||
|
||||
beforeAll(() => {
|
||||
const ce = makeCompilationEnvironment({languages});
|
||||
const info = {
|
||||
exe: null,
|
||||
remote: true,
|
||||
lang: languages.pascal.id,
|
||||
};
|
||||
|
||||
compiler = new FPCCompiler(info as unknown as any, ce);
|
||||
});
|
||||
|
||||
it('Original behaviour (old unitname)', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('examples/pascal/default.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'output.pas'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'output.pas'))).resolves.toBe(true);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(false); // note: will be written somewhere else
|
||||
});
|
||||
|
||||
it('Original behaviour (just a unit file)', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/example.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'example.pas'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'example.pas'))).resolves.toBe(true);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(false); // note: will be written somewhere else
|
||||
});
|
||||
|
||||
it('Writing program instead of a unit', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'prog.dpr'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'example.pas'))).resolves.toBe(false);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('Writing program with a unit', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [
|
||||
{
|
||||
filename: 'example.pas',
|
||||
contents: '{ hello\n world }',
|
||||
},
|
||||
];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'prog.dpr'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'example.pas'))).resolves.toBe(true);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multifile writing behaviour Pascal-WIN', () => {
|
||||
let compiler;
|
||||
|
||||
beforeAll(() => {
|
||||
const ce = makeCompilationEnvironment({languages});
|
||||
const info = {
|
||||
exe: null,
|
||||
remote: true,
|
||||
lang: languages.pascal.id,
|
||||
};
|
||||
|
||||
compiler = new PascalWinCompiler(info as any, ce);
|
||||
});
|
||||
|
||||
it('Original behaviour (old unitname)', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('examples/pascal/default.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'output.pas'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'output.pas'))).resolves.toBe(true);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(false); // note: will be written somewhere else
|
||||
});
|
||||
|
||||
it('Original behaviour (just a unit file)', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/example.pas').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'example.pas'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'example.pas'))).resolves.toBe(true);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(false); // note: will be written somewhere else
|
||||
});
|
||||
|
||||
it('Writing program instead of a unit', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'prog.dpr'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'example.pas'))).resolves.toBe(false);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('Writing program with a unit', async () => {
|
||||
const dirPath = await compiler.newTempDir();
|
||||
const filters = {};
|
||||
const files = [
|
||||
{
|
||||
filename: 'example.pas',
|
||||
contents: '{ hello\n world }',
|
||||
},
|
||||
];
|
||||
const source = fs.readFileSync('test/pascal/prog.dpr').toString('utf8');
|
||||
|
||||
const writeSummary = await compiler.writeAllFiles(dirPath, source, files, filters);
|
||||
|
||||
expect(writeSummary.inputFilename).toEqual(path.join(dirPath, 'prog.dpr'));
|
||||
await expect(utils.fileExists(path.join(dirPath, 'example.pas'))).resolves.toBe(true);
|
||||
await expect(utils.fileExists(path.join(dirPath, 'prog.dpr'))).resolves.toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -22,12 +22,15 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {MapFileReader} from '../lib/mapfiles/map-file.js';
|
||||
import {PELabelReconstructor} from '../lib/pe32-support.js';
|
||||
|
||||
describe('Basic reconstructions', function () {
|
||||
it('No lines', function () {
|
||||
describe('Basic reconstructions', () => {
|
||||
it('No lines', () => {
|
||||
const lines = [];
|
||||
const reconstructor = new PELabelReconstructor(lines, false, false);
|
||||
reconstructor.asmLines.length.should.equal(0);
|
||||
const reconstructor = new PELabelReconstructor(lines, false, new MapFileReader('unused'), false);
|
||||
expect(reconstructor.asmLines.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
116
test/pp-output-cases/filter-tests.ts
generated
116
test/pp-output-cases/filter-tests.ts
generated
@@ -1,116 +0,0 @@
|
||||
// Copyright (c) 2022, Compiler Explorer Authors
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice,
|
||||
// this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above copyright
|
||||
// notice, this list of conditions and the following disclaimer in the
|
||||
// documentation and/or other materials provided with the distribution.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
export const cases = [
|
||||
{
|
||||
input: `# 0 "/app/example.cpp"
|
||||
# 1 "/app//"
|
||||
# 0 "<built-in>"
|
||||
# 0 "<command-line>"
|
||||
# 1 "/usr/include/stdc-predef.h" 1 3 4
|
||||
# 0 "<command-line>" 2
|
||||
# 1 "/app/example.cpp"
|
||||
# 1 "/usr/include/assert.h" 1 3 4
|
||||
# 35 "/usr/include/assert.h" 3 4
|
||||
# 1 "/usr/include/features.h" 1 3 4
|
||||
# 461 "/usr/include/features.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 1 3 4
|
||||
# 452 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/bits/wordsize.h" 1 3 4
|
||||
# 453 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 2 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/bits/long-double.h" 1 3 4
|
||||
# 454 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 2 3 4
|
||||
# 462 "/usr/include/features.h" 2 3 4
|
||||
# 485 "/usr/include/features.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/gnu/stubs.h" 1 3 4
|
||||
# 10 "/usr/include/x86_64-linux-gnu/gnu/stubs.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/gnu/stubs-64.h" 1 3 4
|
||||
# 11 "/usr/include/x86_64-linux-gnu/gnu/stubs.h" 2 3 4
|
||||
# 486 "/usr/include/features.h" 2 3 4
|
||||
# 36 "/usr/include/assert.h" 2 3 4
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
extern "C" {
|
||||
|
||||
extern void __assert_fail (const char *__assertion, const char *__file,
|
||||
unsigned int __line, const char *__function)
|
||||
throw () __attribute__ ((__noreturn__));
|
||||
|
||||
extern void __assert_perror_fail (int __errnum, const char *__file,
|
||||
unsigned int __line, const char *__function)
|
||||
throw () __attribute__ ((__noreturn__));
|
||||
|
||||
extern void __assert (const char *__assertion, const char *__file, int __line)
|
||||
throw () __attribute__ ((__noreturn__));
|
||||
|
||||
}
|
||||
# 2 "/app/example.cpp" 2
|
||||
|
||||
#line 11 "C:/WinSdk/Include/10.0.18362.0/ucrt\\assert.h"
|
||||
foo
|
||||
# 2 "<source>" 2
|
||||
bar
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
baz
|
||||
# 2 "<stdin>"
|
||||
biz
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
|
||||
|
||||
# 3 "/app/example.cpp"
|
||||
int main() {
|
||||
|
||||
# 4 "/app/example.cpp"
|
||||
#pragma foo bar
|
||||
# 4 "/app/example.cpp"
|
||||
|
||||
# 5 "/app/example.cpp" 3 4
|
||||
(static_cast <bool> (
|
||||
# 5 "/app/example.cpp"
|
||||
false
|
||||
# 5 "/app/example.cpp" 3 4
|
||||
) ? void (0) : __assert_fail (
|
||||
# 5 "/app/example.cpp"
|
||||
"false"
|
||||
# 5 "/app/example.cpp" 3 4
|
||||
, "/app/example.cpp", 5, __extension__ __PRETTY_FUNCTION__))
|
||||
# 5 "/app/example.cpp"
|
||||
;
|
||||
}`,
|
||||
output: `bar
|
||||
biz
|
||||
int main() {
|
||||
|
||||
#pragma foo bar
|
||||
|
||||
(static_cast <bool> (
|
||||
false
|
||||
) ? void (0) : __assert_fail (
|
||||
"false"
|
||||
, "/app/example.cpp", 5, __extension__ __PRETTY_FUNCTION__))
|
||||
;
|
||||
}`,
|
||||
},
|
||||
];
|
||||
@@ -22,13 +22,13 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {BaseCompiler} from '../lib/base-compiler.js';
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
import {CompilerInfo} from '../types/compiler.interfaces.js';
|
||||
|
||||
import * as filterTests from './pp-output-cases/filter-tests.js';
|
||||
|
||||
//const makeFakeCompilerInfo = (id: string, lang: string, group: string, semver: string, isSemver: boolean) => {
|
||||
const makeFakeCompilerInfo = (id, lang, group, semver, isSemver): Partial<CompilerInfo> => {
|
||||
return {
|
||||
@@ -51,9 +51,102 @@ describe('Preprocessor Output Handling', () => {
|
||||
compilerProps: (() => {}) as unknown as any,
|
||||
};
|
||||
const compiler = new BaseCompiler(compilerInfo as CompilerInfo, env as CompilationEnvironment);
|
||||
for (const testCase of filterTests.cases) {
|
||||
for (const testCase of cases) {
|
||||
const output = compiler.filterPP(testCase.input)[1];
|
||||
output.trim().should.eql(testCase.output.trim());
|
||||
expect(output.trim()).toEqual(testCase.output.trim());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const cases = [
|
||||
{
|
||||
input: `# 0 "/app/example.cpp"
|
||||
# 1 "/app//"
|
||||
# 0 "<built-in>"
|
||||
# 0 "<command-line>"
|
||||
# 1 "/usr/include/stdc-predef.h" 1 3 4
|
||||
# 0 "<command-line>" 2
|
||||
# 1 "/app/example.cpp"
|
||||
# 1 "/usr/include/assert.h" 1 3 4
|
||||
# 35 "/usr/include/assert.h" 3 4
|
||||
# 1 "/usr/include/features.h" 1 3 4
|
||||
# 461 "/usr/include/features.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 1 3 4
|
||||
# 452 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/bits/wordsize.h" 1 3 4
|
||||
# 453 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 2 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/bits/long-double.h" 1 3 4
|
||||
# 454 "/usr/include/x86_64-linux-gnu/sys/cdefs.h" 2 3 4
|
||||
# 462 "/usr/include/features.h" 2 3 4
|
||||
# 485 "/usr/include/features.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/gnu/stubs.h" 1 3 4
|
||||
# 10 "/usr/include/x86_64-linux-gnu/gnu/stubs.h" 3 4
|
||||
# 1 "/usr/include/x86_64-linux-gnu/gnu/stubs-64.h" 1 3 4
|
||||
# 11 "/usr/include/x86_64-linux-gnu/gnu/stubs.h" 2 3 4
|
||||
# 486 "/usr/include/features.h" 2 3 4
|
||||
# 36 "/usr/include/assert.h" 2 3 4
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
extern "C" {
|
||||
|
||||
extern void __assert_fail (const char *__assertion, const char *__file,
|
||||
unsigned int __line, const char *__function)
|
||||
throw () __attribute__ ((__noreturn__));
|
||||
|
||||
extern void __assert_perror_fail (int __errnum, const char *__file,
|
||||
unsigned int __line, const char *__function)
|
||||
throw () __attribute__ ((__noreturn__));
|
||||
|
||||
extern void __assert (const char *__assertion, const char *__file, int __line)
|
||||
throw () __attribute__ ((__noreturn__));
|
||||
|
||||
}
|
||||
# 2 "/app/example.cpp" 2
|
||||
|
||||
#line 11 "C:/WinSdk/Include/10.0.18362.0/ucrt\\assert.h"
|
||||
foo
|
||||
# 2 "<source>" 2
|
||||
bar
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
baz
|
||||
# 2 "<stdin>"
|
||||
biz
|
||||
# 66 "/usr/include/assert.h" 3 4
|
||||
|
||||
|
||||
# 3 "/app/example.cpp"
|
||||
int main() {
|
||||
|
||||
# 4 "/app/example.cpp"
|
||||
#pragma foo bar
|
||||
# 4 "/app/example.cpp"
|
||||
|
||||
# 5 "/app/example.cpp" 3 4
|
||||
(static_cast <bool> (
|
||||
# 5 "/app/example.cpp"
|
||||
false
|
||||
# 5 "/app/example.cpp" 3 4
|
||||
) ? void (0) : __assert_fail (
|
||||
# 5 "/app/example.cpp"
|
||||
"false"
|
||||
# 5 "/app/example.cpp" 3 4
|
||||
, "/app/example.cpp", 5, __extension__ __PRETTY_FUNCTION__))
|
||||
# 5 "/app/example.cpp"
|
||||
;
|
||||
}`,
|
||||
output: `bar
|
||||
biz
|
||||
int main() {
|
||||
|
||||
#pragma foo bar
|
||||
|
||||
(static_cast <bool> (
|
||||
false
|
||||
) ? void (0) : __assert_fail (
|
||||
"false"
|
||||
, "/app/example.cpp", 5, __extension__ __PRETTY_FUNCTION__))
|
||||
;
|
||||
}`,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {PPCICompiler} from '../lib/compilers/ppci.js';
|
||||
import {LanguageKey} from '../types/languages.interfaces.js';
|
||||
|
||||
@@ -31,7 +33,7 @@ const languages = {
|
||||
c: {id: 'c' as LanguageKey},
|
||||
};
|
||||
|
||||
describe('PPCI', function () {
|
||||
describe('PPCI', () => {
|
||||
let ce;
|
||||
const info = {
|
||||
exe: '/dev/null',
|
||||
@@ -43,28 +45,30 @@ describe('PPCI', function () {
|
||||
lang: languages.c.id,
|
||||
};
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
ce = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
it('Should be ok with most arguments', () => {
|
||||
const compiler = new PPCICompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '-help', '--something'])
|
||||
.should.deep.equal(['hello', '-help', '--something']);
|
||||
expect(compiler.filterUserOptions(['hello', '-help', '--something'])).toEqual([
|
||||
'hello',
|
||||
'-help',
|
||||
'--something',
|
||||
]);
|
||||
});
|
||||
|
||||
it('Should be ok with path argument', () => {
|
||||
const compiler = new PPCICompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '--stuff', '/proc/cpuinfo'])
|
||||
.should.deep.equal(['hello', '--stuff', '/proc/cpuinfo']);
|
||||
expect(compiler.filterUserOptions(['hello', '--stuff', '/proc/cpuinfo'])).toEqual([
|
||||
'hello',
|
||||
'--stuff',
|
||||
'/proc/cpuinfo',
|
||||
]);
|
||||
});
|
||||
|
||||
it('Should be Not ok with report arguments', () => {
|
||||
const compiler = new PPCICompiler(makeFakeCompilerInfo(info), ce);
|
||||
compiler
|
||||
.filterUserOptions(['hello', '--report', '--text-report', '--html-report'])
|
||||
.should.deep.equal(['hello']);
|
||||
expect(compiler.filterUserOptions(['hello', '--report', '--text-report', '--html-report'])).toEqual(['hello']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,10 +22,9 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
import {afterAll, beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {should} from './utils.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
|
||||
const languages = {
|
||||
a: {id: 'a'},
|
||||
@@ -34,7 +33,7 @@ const languages = {
|
||||
describe('Properties', () => {
|
||||
let casesProps, overridingProps, compilerProps;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
properties.initialize('test/example-config/', ['test', 'overridden-base', 'overridden-tip']);
|
||||
casesProps = properties.propsFor('cases');
|
||||
overridingProps = properties.propsFor('overwrite');
|
||||
@@ -46,121 +45,121 @@ describe('Properties', () => {
|
||||
);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
afterAll(() => {
|
||||
properties.reset();
|
||||
});
|
||||
|
||||
it('Has working propsFor', () => {
|
||||
should.equal(properties.get('cases', 'exampleProperty'), casesProps('exampleProperty'));
|
||||
expect(properties.get('cases', 'exampleProperty')).toEqual(casesProps('exampleProperty'));
|
||||
});
|
||||
it('Does not find non existent properties when no default is set', () => {
|
||||
should.equal(casesProps('nonexistentProp'), undefined);
|
||||
expect(casesProps('nonexistentProp')).toEqual(undefined);
|
||||
});
|
||||
it('Falls back to default if value not found and default is set', () => {
|
||||
// Randomly generated number...
|
||||
casesProps('nonexistentProp', 4).should.be.equal(4);
|
||||
should.equal(casesProps('nonexistentProp', 4), 4);
|
||||
expect(casesProps('nonexistentProp', 4)).toEqual(4);
|
||||
expect(casesProps('nonexistentProp', 4)).toEqual(4);
|
||||
});
|
||||
it('Handles empty properties as empty strings', () => {
|
||||
should.equal(casesProps('emptyProperty'), '');
|
||||
expect(casesProps('emptyProperty')).toEqual('');
|
||||
});
|
||||
it('Handles bad numbers properties as strings', () => {
|
||||
should.equal(casesProps('001string'), '001');
|
||||
expect(casesProps('001string')).toEqual('001');
|
||||
});
|
||||
it('Handles bad numbers properties as strings', () => {
|
||||
should.equal(casesProps('0985string'), '0985');
|
||||
expect(casesProps('0985string')).toEqual('0985');
|
||||
});
|
||||
it('Ignores commented out properties', () => {
|
||||
should.equal(casesProps('commentedProperty'), undefined);
|
||||
expect(casesProps('commentedProperty')).toBeUndefined();
|
||||
});
|
||||
it('Ignores bad lines', () => {
|
||||
should.equal(casesProps('badLineIfYouSeeThisWithAnErrorItsOk'), undefined);
|
||||
expect(casesProps('badLineIfYouSeeThisWithAnErrorItsOk')).toBeUndefined();
|
||||
});
|
||||
it('Understands positive integers', () => {
|
||||
should.equal(casesProps('numericPropertyPositive'), 42);
|
||||
expect(casesProps('numericPropertyPositive')).toEqual(42);
|
||||
});
|
||||
it('Understands zero as integer', () => {
|
||||
should.equal(casesProps('numericPropertyZero'), 0);
|
||||
expect(casesProps('numericPropertyZero')).toEqual(0);
|
||||
});
|
||||
it('Understands negative integers', () => {
|
||||
should.equal(casesProps('numericPropertyNegative'), -11);
|
||||
expect(casesProps('numericPropertyNegative')).toEqual(-11);
|
||||
});
|
||||
it('Understands positive floats', () => {
|
||||
should.equal(casesProps('floatPropertyPositive'), 3.14);
|
||||
expect(casesProps('floatPropertyPositive')).toEqual(3.14);
|
||||
});
|
||||
it('Understands negative floats', () => {
|
||||
should.equal(casesProps('floatPropertyNegative'), -9000);
|
||||
expect(casesProps('floatPropertyNegative')).toEqual(-9000);
|
||||
});
|
||||
it('Does not understand comma decimal as float', () => {
|
||||
should.equal(casesProps('commaAsDecimalProperty'), '3,14');
|
||||
expect(casesProps('commaAsDecimalProperty')).toEqual('3,14');
|
||||
});
|
||||
it('Does not understand DASH-SPACE-NUMBER as a negative number', () => {
|
||||
should.equal(casesProps('stringPropertyNumberLike'), '- 97');
|
||||
expect(casesProps('stringPropertyNumberLike')).toEqual('- 97');
|
||||
});
|
||||
it('Understands yes as true boolean', () => {
|
||||
should.equal(casesProps('truePropertyYes'), true);
|
||||
expect(casesProps('truePropertyYes')).toBe(true);
|
||||
});
|
||||
it('Understands true as true boolean', () => {
|
||||
should.equal(casesProps('truePropertyTrue'), true);
|
||||
expect(casesProps('truePropertyTrue')).toBe(true);
|
||||
});
|
||||
it('Does not understand Yes as boolean', () => {
|
||||
should.equal(casesProps('stringPropertyYes'), 'Yes');
|
||||
expect(casesProps('stringPropertyYes')).toEqual('Yes');
|
||||
});
|
||||
it('Does not understand True as boolean', () => {
|
||||
should.equal(casesProps('stringPropertyTrue'), 'True');
|
||||
expect(casesProps('stringPropertyTrue')).toEqual('True');
|
||||
});
|
||||
it('Understands no as false boolean', () => {
|
||||
should.equal(casesProps('falsePropertyNo'), false);
|
||||
expect(casesProps('falsePropertyNo')).toBe(false);
|
||||
});
|
||||
it('Understands false as false boolean', () => {
|
||||
should.equal(casesProps('falsePropertyFalse'), false);
|
||||
expect(casesProps('falsePropertyFalse')).toBe(false);
|
||||
});
|
||||
it('Does not understand No as boolean', () => {
|
||||
should.equal(casesProps('stringPropertyNo'), 'No');
|
||||
expect(casesProps('stringPropertyNo')).toEqual('No');
|
||||
});
|
||||
it('Does not understand False as boolean', () => {
|
||||
should.equal(casesProps('stringPropertyFalse'), 'False');
|
||||
expect(casesProps('stringPropertyFalse')).toEqual('False');
|
||||
});
|
||||
it('Should find non overridden properties', () => {
|
||||
should.equal(overridingProps('nonOverriddenProperty'), '.... . .-.. .-.. ---');
|
||||
expect(overridingProps('nonOverriddenProperty')).toEqual('.... . .-.. .-.. ---');
|
||||
});
|
||||
it('Should handle overridden properties', () => {
|
||||
should.equal(overridingProps('overrodeProperty'), 'ACTUALLY USED');
|
||||
expect(overridingProps('overrodeProperty')).toEqual('ACTUALLY USED');
|
||||
});
|
||||
it('Should fall back from overridden', () => {
|
||||
should.equal(overridingProps('localProperty'), 11235813);
|
||||
expect(overridingProps('localProperty')).toEqual(11235813);
|
||||
});
|
||||
it('should have an identity function if none provided', () => {
|
||||
should.equal(compilerProps.get('a', 'foo', '0'), '1');
|
||||
compilerProps.get(languages, 'foo', '0').should.deep.equal({a: '1'});
|
||||
expect(compilerProps.get('a', 'foo', '0')).toEqual('1');
|
||||
expect(compilerProps.get(languages, 'foo', '0')).toEqual({a: '1'});
|
||||
});
|
||||
it('should return an object of languages if the languages arg is an object itself', () => {
|
||||
compilerProps.get(languages, 'foo', '0').should.deep.equal({a: '1'});
|
||||
expect(compilerProps.get(languages, 'foo', '0')).toEqual({a: '1'});
|
||||
});
|
||||
it('should return a direct result if the language is an ID', () => {
|
||||
compilerProps.propsByLangId[languages.a.id] = properties.fakeProps({foo: 'b'});
|
||||
should.equal(compilerProps.get('a', 'foo', '0'), 'b');
|
||||
expect(compilerProps.get('a', 'foo', '0')).toEqual('b');
|
||||
compilerProps.propsByLangId[languages.a.id] = undefined;
|
||||
});
|
||||
it('should have backwards compatibility compilerProps behaviour', () => {
|
||||
should.equal(compilerProps.get('', 'foo', '0'), '1');
|
||||
expect(compilerProps.get('', 'foo', '0')).toEqual('1');
|
||||
});
|
||||
it('should report the default value if an unknown language is used', () => {
|
||||
should.equal(compilerProps.get('b', 'foo', '0'), '0');
|
||||
expect(compilerProps.get('b', 'foo', '0')).toEqual('0');
|
||||
});
|
||||
it('should not check ceProps for falsey values', () => {
|
||||
// Set bar to be falsey in the language specific setting.
|
||||
compilerProps.propsByLangId[languages.a.id] = properties.fakeProps({bar: false});
|
||||
// Now query it with a default of true. We should see false...
|
||||
should.equal(compilerProps.get('a', 'bar', true), false);
|
||||
compilerProps.get(languages, 'bar', true).should.deep.equal({a: false});
|
||||
expect(compilerProps.get('a', 'bar', true)).toBe(false);
|
||||
expect(compilerProps.get(languages, 'bar', true)).toEqual({a: false});
|
||||
compilerProps.propsByLangId[languages.a.id] = undefined;
|
||||
});
|
||||
it('should not parse version properties as numbers', () => {
|
||||
should.equal(casesProps('libs.example.versions.010.version'), '0.10');
|
||||
expect(casesProps('libs.example.versions.010.version')).toEqual('0.10');
|
||||
});
|
||||
it('should not parse semver properties as numbers', () => {
|
||||
should.equal(casesProps('compiler.example110.semver'), '1.10');
|
||||
expect(casesProps('compiler.example110.semver')).toEqual('1.10');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -173,8 +172,8 @@ describe('Properties blob parsing', () => {
|
||||
'mybool=false\n',
|
||||
'<test props>',
|
||||
);
|
||||
unwrap(props.hello).should.equal('test');
|
||||
unwrap(props.etc).should.equal(123);
|
||||
unwrap(props.mybool).should.equal(false);
|
||||
expect(props.hello).toEqual('test');
|
||||
expect(props.etc).toEqual(123);
|
||||
expect(props.mybool).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {RacketPassDumpParser} from '../lib/parsers/racket-pass-dump-parser.js';
|
||||
import * as properties from '../lib/properties.js';
|
||||
|
||||
@@ -33,16 +35,16 @@ function deepCopy(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
describe('racket-pass-dump-parser', function () {
|
||||
describe('racket-pass-dump-parser', () => {
|
||||
let racketPassDumpParser;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
const fakeProps = new properties.CompilerProps(languages, properties.fakeProps({}));
|
||||
const compilerProps = (fakeProps.get as any).bind(fakeProps, 'racket');
|
||||
racketPassDumpParser = new RacketPassDumpParser(compilerProps);
|
||||
});
|
||||
|
||||
it('should recognize step', function () {
|
||||
it('should recognize step', () => {
|
||||
// prettier-ignore
|
||||
const output = [
|
||||
{ text: ';; compile-linklet: phase: 0' },
|
||||
@@ -59,7 +61,7 @@ describe('racket-pass-dump-parser', function () {
|
||||
|
||||
const brokenDown = racketPassDumpParser.breakdownOutputIntoPassDumps(deepCopy(output), {});
|
||||
|
||||
brokenDown.should.deep.equal([
|
||||
expect(brokenDown).toEqual([
|
||||
{
|
||||
group: 'module: example, linklet: module, phase: 0',
|
||||
header: 'linklet',
|
||||
@@ -69,7 +71,7 @@ describe('racket-pass-dump-parser', function () {
|
||||
]);
|
||||
});
|
||||
|
||||
it('should recognize pass', function () {
|
||||
it('should recognize pass', () => {
|
||||
// prettier-ignore
|
||||
const output = [
|
||||
{ text: ';; compile-linklet: module: (phases configure-runtime)' },
|
||||
@@ -97,7 +99,7 @@ describe('racket-pass-dump-parser', function () {
|
||||
|
||||
const brokenDown = racketPassDumpParser.breakdownOutputIntoPassDumps(deepCopy(output), {});
|
||||
|
||||
brokenDown.should.deep.equal([
|
||||
expect(brokenDown).toEqual([
|
||||
{
|
||||
group: 'module: (phases configure-runtime), linklet: decl',
|
||||
header: 'cpnanopass',
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import * as rison from '../static/rison.js';
|
||||
|
||||
// Copied from https://github.com/Nanonid/rison/blob/master/python/rison/tests.py
|
||||
@@ -42,15 +43,15 @@ describe('Rison test cases', () => {
|
||||
for (const [r, obj] of Object.entries(py_testcases)) {
|
||||
it(`Should decode "${r}"`, () => {
|
||||
// hack to get around "TypeError: Cannot read properties of null (reading 'should')"
|
||||
({x: rison.decode(r)}).should.deep.equal({x: obj});
|
||||
expect(rison.decode(r)).toEqual(obj);
|
||||
});
|
||||
it(`Should encode ${JSON.stringify(obj)}`, () => {
|
||||
unwrap(rison.encode(obj)).should.deep.equal(r);
|
||||
expect(rison.encode(obj)).toEqual(r);
|
||||
});
|
||||
}
|
||||
for (const [obj, r] of Object.entries(encode_testcases)) {
|
||||
it(`Should encode ${JSON.stringify(obj)}`, () => {
|
||||
unwrap(rison.encode(obj)).should.deep.equal(r);
|
||||
expect(rison.encode(obj)).toEqual(r);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -24,58 +24,61 @@
|
||||
|
||||
import fs from 'fs';
|
||||
|
||||
import {unwrap} from '../lib/assert.js';
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {Sponsor} from '../lib/sponsors.interfaces.js';
|
||||
import {loadSponsorsFromString, makeIconSets, parse} from '../lib/sponsors.js';
|
||||
|
||||
import {resolvePathFromTestRoot, should} from './utils.js';
|
||||
import {resolvePathFromTestRoot} from './utils.js';
|
||||
|
||||
describe('Sponsors', () => {
|
||||
it('should expand names to objects', () => {
|
||||
parse('moo').name.should.eq('moo');
|
||||
expect(parse('moo').name).toEqual('moo');
|
||||
});
|
||||
it('should handle just names', () => {
|
||||
parse({name: 'moo'}).name.should.eq('moo');
|
||||
expect(parse({name: 'moo'}).name).toEqual('moo');
|
||||
});
|
||||
it('should default empty params', () => {
|
||||
const obj = parse('moo');
|
||||
should.equal(obj.description, undefined);
|
||||
should.equal(obj.url, undefined);
|
||||
obj.onclick.should.eq('');
|
||||
should.equal(obj.img, undefined);
|
||||
should.equal(obj.icon, undefined);
|
||||
should.equal(obj.icon_dark, undefined);
|
||||
obj.topIconShowEvery.should.eq(0);
|
||||
obj.displayType.should.eq('Above');
|
||||
should.equal(obj.statsId, undefined);
|
||||
obj.style.should.deep.equal({});
|
||||
expect(obj.description).toBeUndefined();
|
||||
expect(obj.url).toBeUndefined();
|
||||
expect(obj.onclick).toEqual('');
|
||||
expect(obj.img).toBeUndefined();
|
||||
expect(obj.icon).toBeUndefined();
|
||||
expect(obj.icon_dark).toBeUndefined();
|
||||
expect(obj.topIconShowEvery).toEqual(0);
|
||||
expect(obj.displayType).toEqual('Above');
|
||||
expect(obj.statsId).toBeUndefined();
|
||||
expect(obj.style).toEqual({});
|
||||
});
|
||||
it('should make descriptions always one-sized arrays', () => {
|
||||
unwrap(parse({name: 'moo', description: 'desc'}).description).should.deep.eq(['desc']);
|
||||
expect(parse({name: 'moo', description: 'desc'}).description).toEqual(['desc']);
|
||||
});
|
||||
it('should pass through descriptions', () => {
|
||||
unwrap(parse({name: 'moo', description: ['desc1', 'desc2']}).description).should.deep.eq(['desc1', 'desc2']);
|
||||
expect(parse({name: 'moo', description: ['desc1', 'desc2']}).description).toEqual(['desc1', 'desc2']);
|
||||
});
|
||||
it('should pass through icons', () => {
|
||||
unwrap(parse({name: 'bob', icon: 'icon'}).icon).should.eq('icon');
|
||||
expect(parse({name: 'bob', icon: 'icon'}).icon).toEqual('icon');
|
||||
});
|
||||
it('should pick icons over images', () => {
|
||||
unwrap(parse({name: 'bob', img: 'img', icon: 'icon'}).icon).should.eq('icon');
|
||||
expect(parse({name: 'bob', img: 'img', icon: 'icon'}).icon).toEqual('icon');
|
||||
});
|
||||
it('should pick icons if not img', () => {
|
||||
unwrap(parse({name: 'bob', img: 'img'}).icon).should.eq('img');
|
||||
expect(parse({name: 'bob', img: 'img'}).icon).toEqual('img');
|
||||
});
|
||||
it('should pick dark icons if specified', () => {
|
||||
unwrap(parse({name: 'bob', icon: 'icon', icon_dark: 'icon_dark'}).icon_dark).should.eq('icon_dark');
|
||||
expect(parse({name: 'bob', icon: 'icon', icon_dark: 'icon_dark'}).icon_dark).toEqual('icon_dark');
|
||||
});
|
||||
it('should handle styles', () => {
|
||||
parse({name: 'bob', bgColour: 'red'}).style.should.deep.eq({'background-color': 'red'});
|
||||
expect(parse({name: 'bob', bgColour: 'red'}).style).toEqual({'background-color': 'red'});
|
||||
});
|
||||
it('should handle clicks', () => {
|
||||
parse({
|
||||
name: 'bob',
|
||||
url: 'https://some.host/click',
|
||||
}).onclick.should.eq('window.onSponsorClick("https://some.host/click");');
|
||||
expect(
|
||||
parse({
|
||||
name: 'bob',
|
||||
url: 'https://some.host/click',
|
||||
}).onclick,
|
||||
).toEqual('window.onSponsorClick("https://some.host/click");');
|
||||
});
|
||||
|
||||
it('should load a simple example', () => {
|
||||
@@ -96,11 +99,11 @@ levels:
|
||||
- People
|
||||
- Yay
|
||||
`);
|
||||
sample.should.not.be.null;
|
||||
expect(sample).not.toBeNull();
|
||||
const levels = sample.getLevels();
|
||||
levels.length.should.eq(2);
|
||||
levels[0].name.should.eq('Patreon Legends');
|
||||
levels[1].name.should.eq('Patreons');
|
||||
expect(levels.length).toEqual(2);
|
||||
expect(levels[0].name).toEqual('Patreon Legends');
|
||||
expect(levels[1].name).toEqual('Patreons');
|
||||
});
|
||||
|
||||
it('should sort sponsors by name', () => {
|
||||
@@ -115,7 +118,7 @@ levels:
|
||||
- A
|
||||
- B
|
||||
`).getLevels()[0].sponsors;
|
||||
peeps.map(sponsor => sponsor.name).should.deep.equals(['A', 'B', 'C', 'D']);
|
||||
expect(peeps.map(sponsor => sponsor.name)).toEqual(['A', 'B', 'C', 'D']);
|
||||
});
|
||||
it('should sort sponsors by priority then name', () => {
|
||||
const peeps = loadSponsorsFromString(`
|
||||
@@ -131,15 +134,15 @@ levels:
|
||||
- name: B
|
||||
priority: 50
|
||||
`).getLevels()[0].sponsors;
|
||||
peeps
|
||||
.map(sponsor => {
|
||||
expect(
|
||||
peeps.map(sponsor => {
|
||||
return {name: sponsor.name, priority: sponsor.priority};
|
||||
})
|
||||
.should.deep.equals([
|
||||
{name: 'D', priority: 100},
|
||||
{name: 'B', priority: 50},
|
||||
{name: 'C', priority: 50},
|
||||
]);
|
||||
}),
|
||||
).toEqual([
|
||||
{name: 'D', priority: 100},
|
||||
{name: 'B', priority: 50},
|
||||
{name: 'C', priority: 50},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should pick out all the top level icons', () => {
|
||||
@@ -166,7 +169,7 @@ levels:
|
||||
- name: five
|
||||
topIconShowEvery: 3
|
||||
`).getAllTopIcons();
|
||||
icons.map(s => s.name).should.deep.equals(['one', 'four']);
|
||||
expect(icons.map(s => s.name)).toEqual(['one', 'four']);
|
||||
});
|
||||
|
||||
it('should pick icons appropriately when all required every 3', () => {
|
||||
@@ -174,27 +177,27 @@ levels:
|
||||
const sponsor2 = parse({name: 'Sponsor2', topIconShowEvery: 3, icon: '2'});
|
||||
const sponsor3 = parse({name: 'Sponsor3', topIconShowEvery: 3, icon: '3'});
|
||||
const icons = [sponsor1, sponsor2, sponsor3];
|
||||
makeIconSets(icons, 10).should.deep.eq([icons]);
|
||||
makeIconSets(icons, 3).should.deep.eq([icons]);
|
||||
makeIconSets(icons, 2).should.deep.eq([
|
||||
expect(makeIconSets(icons, 10)).toEqual([icons]);
|
||||
expect(makeIconSets(icons, 3)).toEqual([icons]);
|
||||
expect(makeIconSets(icons, 2)).toEqual([
|
||||
[sponsor1, sponsor2],
|
||||
[sponsor1, sponsor3],
|
||||
[sponsor2, sponsor3],
|
||||
]);
|
||||
makeIconSets(icons, 1).should.deep.eq([[sponsor1], [sponsor2], [sponsor3]]);
|
||||
expect(makeIconSets(icons, 1)).toEqual([[sponsor1], [sponsor2], [sponsor3]]);
|
||||
});
|
||||
it('should pick icons appropriately when not required on different schedules', () => {
|
||||
const sponsor1 = parse({name: 'Sponsor1', topIconShowEvery: 1, icon: '1'});
|
||||
const sponsor2 = parse({name: 'Sponsor2', topIconShowEvery: 2, icon: '2'});
|
||||
const sponsor3 = parse({name: 'Sponsor3', topIconShowEvery: 3, icon: '3'});
|
||||
const icons = [sponsor1, sponsor2, sponsor3];
|
||||
makeIconSets(icons, 10).should.deep.eq([icons]);
|
||||
makeIconSets(icons, 3).should.deep.eq([icons]);
|
||||
makeIconSets(icons, 2).should.deep.eq([
|
||||
expect(makeIconSets(icons, 10)).toEqual([icons]);
|
||||
expect(makeIconSets(icons, 3)).toEqual([icons]);
|
||||
expect(makeIconSets(icons, 2)).toEqual([
|
||||
[sponsor1, sponsor2],
|
||||
[sponsor1, sponsor3],
|
||||
]);
|
||||
(() => makeIconSets(icons, 1)).should.throw();
|
||||
expect(() => makeIconSets(icons, 1)).toThrow();
|
||||
});
|
||||
it('should pick icons appropriately with a lot of sponsors on representative schedules', () => {
|
||||
const sponsor1 = parse({name: 'Sponsor1', topIconShowEvery: 1, icon: '1'});
|
||||
@@ -203,12 +206,12 @@ levels:
|
||||
const sponsor4 = parse({name: 'Sponsor4', topIconShowEvery: 3, icon: '3'});
|
||||
const sponsor5 = parse({name: 'Sponsor5', topIconShowEvery: 3, icon: '3'});
|
||||
const icons = [sponsor1, sponsor2, sponsor3, sponsor4, sponsor5];
|
||||
makeIconSets(icons, 10).should.deep.eq([icons]);
|
||||
makeIconSets(icons, 3).should.deep.eq([
|
||||
expect(makeIconSets(icons, 10)).toEqual([icons]);
|
||||
expect(makeIconSets(icons, 3)).toEqual([
|
||||
[sponsor1, sponsor2, sponsor3],
|
||||
[sponsor1, sponsor4, sponsor5],
|
||||
]);
|
||||
(() => makeIconSets(icons, 1)).should.throw();
|
||||
expect(() => makeIconSets(icons, 1)).toThrow();
|
||||
});
|
||||
it('should handle alternating', () => {
|
||||
const sponsor1 = parse({name: 'Sponsor1', topIconShowEvery: 1, icon: '1'});
|
||||
@@ -216,12 +219,12 @@ levels:
|
||||
const sponsor3 = parse({name: 'Sponsor3', topIconShowEvery: 2, icon: '3'});
|
||||
const sponsor4 = parse({name: 'Sponsor4', topIconShowEvery: 2, icon: '4'});
|
||||
const icons = [sponsor1, sponsor2, sponsor3, sponsor4];
|
||||
makeIconSets(icons, 4).should.deep.eq([icons]);
|
||||
makeIconSets(icons, 3).should.deep.eq([
|
||||
expect(makeIconSets(icons, 4)).toEqual([icons]);
|
||||
expect(makeIconSets(icons, 3)).toEqual([
|
||||
[sponsor1, sponsor2, sponsor3],
|
||||
[sponsor1, sponsor2, sponsor4],
|
||||
]);
|
||||
(() => makeIconSets(icons, 2)).should.throw();
|
||||
expect(() => makeIconSets(icons, 2)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -244,11 +247,11 @@ describe('Our specific sponsor file', () => {
|
||||
for (const sponsor of pick) {
|
||||
countBySponsor.set(sponsor, (countBySponsor.get(sponsor) || 0) + 1);
|
||||
}
|
||||
pick.length.should.eq(expectedNumIcons);
|
||||
expect(pick.length).toEqual(expectedNumIcons);
|
||||
}
|
||||
for (const topIcon of sponsors.getAllTopIcons()) {
|
||||
const appearsEvery = countBySponsor.get(topIcon) / numLoads;
|
||||
appearsEvery.should.lte(topIcon.topIconShowEvery);
|
||||
expect(appearsEvery).toBeLessThanOrEqual(topIcon.topIconShowEvery);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -17,6 +17,8 @@
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {parse} from '../lib/stack-usage-transformer.js';
|
||||
|
||||
describe('stack usage transformer', () => {
|
||||
@@ -26,7 +28,7 @@ example.cpp:6:5:int f()\t32\tdynamic
|
||||
example.cpp:7:5:int h()\t64\tdynamic,bounded
|
||||
`;
|
||||
const output = parse(doc);
|
||||
output.should.deep.equal([
|
||||
expect(output).toEqual([
|
||||
{
|
||||
BytesUsed: 16,
|
||||
DebugLoc: {
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {ClientStateGoldenifier, ClientStateNormalizer} from '../lib/clientstate-normalizer.js';
|
||||
import {ClientState} from '../lib/clientstate.js';
|
||||
|
||||
@@ -39,7 +41,7 @@ describe('Normalizing clientstate', () => {
|
||||
// note: this trick is to get rid of undefined parameters
|
||||
const normalized = JSON.parse(JSON.stringify(normalizer.normalized));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('Should recognize everything and kitchensink as well', () => {
|
||||
@@ -55,7 +57,7 @@ describe('Normalizing clientstate', () => {
|
||||
|
||||
const normalized = JSON.parse(JSON.stringify(normalizer.normalized));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('Should support conformanceview', () => {
|
||||
@@ -71,7 +73,7 @@ describe('Normalizing clientstate', () => {
|
||||
|
||||
const normalized = JSON.parse(JSON.stringify(normalizer.normalized));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('Should support executors', () => {
|
||||
@@ -85,7 +87,7 @@ describe('Normalizing clientstate', () => {
|
||||
|
||||
const normalized = JSON.parse(JSON.stringify(normalizer.normalized));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('Should support newer features', () => {
|
||||
@@ -99,7 +101,7 @@ describe('Normalizing clientstate', () => {
|
||||
|
||||
const normalized = JSON.parse(JSON.stringify(normalizer.normalized));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('Allow output without editor id', () => {
|
||||
@@ -113,7 +115,7 @@ describe('Normalizing clientstate', () => {
|
||||
|
||||
const normalized = JSON.parse(JSON.stringify(normalizer.normalized));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -125,8 +127,8 @@ describe('ClientState parsing', () => {
|
||||
],
|
||||
});
|
||||
|
||||
state.sessions[0].compilers.length.should.equal(1);
|
||||
state.sessions[0].executors.length.should.equal(0);
|
||||
expect(state.sessions[0].compilers.length).toEqual(1);
|
||||
expect(state.sessions[0].executors.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('Should work with executor', () => {
|
||||
@@ -146,14 +148,14 @@ describe('ClientState parsing', () => {
|
||||
],
|
||||
});
|
||||
|
||||
state.sessions[0].compilers.length.should.equal(0);
|
||||
state.sessions[0].executors.length.should.equal(1);
|
||||
expect(state.sessions[0].compilers.length).toEqual(0);
|
||||
expect(state.sessions[0].executors.length).toEqual(1);
|
||||
});
|
||||
|
||||
it('Should not contain id-less compilers', () => {
|
||||
const jsonStr = fs.readFileSync('test/state/bug-2231.json', {encoding: 'utf8'});
|
||||
const state = new ClientState(JSON.parse(jsonStr));
|
||||
state.sessions[0].compilers.length.should.equal(1);
|
||||
expect(state.sessions[0].compilers.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -161,7 +163,7 @@ describe('Trees', () => {
|
||||
it('ClientState to GL', () => {
|
||||
const jsonStr = fs.readFileSync('test/state/tree.json', {encoding: 'utf8'});
|
||||
const state = new ClientState(JSON.parse(jsonStr));
|
||||
state.trees.length.should.equal(1);
|
||||
expect(state.trees.length).toEqual(1);
|
||||
|
||||
const gl = new ClientStateGoldenifier();
|
||||
gl.fromClientState(state);
|
||||
@@ -169,7 +171,7 @@ describe('Trees', () => {
|
||||
const golden = JSON.parse(JSON.stringify(gl.golden));
|
||||
|
||||
const resultdata = JSON.parse(fs.readFileSync('test/state/tree.goldenified.json', {encoding: 'utf8'}));
|
||||
golden.should.deep.equal(resultdata);
|
||||
expect(golden).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('GL to ClientState', () => {
|
||||
@@ -183,7 +185,7 @@ describe('Trees', () => {
|
||||
|
||||
const resultdata = JSON.parse(fs.readFileSync('test/state/tree.normalized.json', {encoding: 'utf8'}));
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('GL to ClientState with correct output pane', () => {
|
||||
@@ -199,13 +201,13 @@ describe('Trees', () => {
|
||||
fs.readFileSync('test/state/tree-gl-outputpane.normalized.json', {encoding: 'utf8'}),
|
||||
);
|
||||
|
||||
normalized.should.deep.equal(resultdata);
|
||||
expect(normalized).toEqual(resultdata);
|
||||
});
|
||||
|
||||
it('ClientState to Mobile GL', () => {
|
||||
const jsonStr = fs.readFileSync('test/state/tree-mobile.json', {encoding: 'utf8'});
|
||||
const state = new ClientState(JSON.parse(jsonStr));
|
||||
state.trees.length.should.equal(1);
|
||||
expect(state.trees.length).toEqual(1);
|
||||
|
||||
const gl = new ClientStateGoldenifier();
|
||||
const slides = gl.generatePresentationModeMobileViewerSlides(state);
|
||||
@@ -214,6 +216,6 @@ describe('Trees', () => {
|
||||
//fs.writeFileSync('test/state/tree-mobile.goldenified.json', JSON.stringify(golden));
|
||||
|
||||
const resultdata = JSON.parse(fs.readFileSync('test/state/tree-mobile.goldenified.json', {encoding: 'utf8'}));
|
||||
golden.should.deep.equal(resultdata);
|
||||
expect(golden).toEqual(resultdata);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ import {DynamoDB, GetItemCommand, PutItemCommand, QueryCommand, UpdateItemComman
|
||||
import {GetObjectCommand, PutObjectCommand, S3} from '@aws-sdk/client-s3';
|
||||
import {sdkStreamMixin} from '@smithy/util-stream';
|
||||
import {mockClient} from 'aws-sdk-client-mock';
|
||||
import {beforeEach, describe, expect, it} from 'vitest';
|
||||
|
||||
import * as properties from '../../lib/properties.js';
|
||||
import {StorageS3} from '../../lib/storage/index.js';
|
||||
@@ -47,16 +48,16 @@ describe('Find unique subhash tests', () => {
|
||||
storagePrefix: 'prefix',
|
||||
storageDynamoTable: 'table',
|
||||
});
|
||||
it('works when empty', () => {
|
||||
it('works when empty', async () => {
|
||||
const storage = new StorageS3(httpRootDir, compilerProps, awsProps);
|
||||
mockDynamoDb.on(QueryCommand, {TableName: 'table'}).resolves({});
|
||||
return storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV').should.eventually.deep.equal({
|
||||
await expect(storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV')).resolves.toEqual({
|
||||
alreadyPresent: false,
|
||||
prefix: 'ABCDEF',
|
||||
uniqueSubHash: 'ABCDEFGHI',
|
||||
});
|
||||
});
|
||||
it('works when not empty', () => {
|
||||
it('works when not empty', async () => {
|
||||
const storage = new StorageS3(httpRootDir, compilerProps, awsProps);
|
||||
mockDynamoDb.on(QueryCommand, {TableName: 'table'}).resolves({
|
||||
Items: [
|
||||
@@ -67,13 +68,13 @@ describe('Find unique subhash tests', () => {
|
||||
],
|
||||
});
|
||||
|
||||
return storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV').should.eventually.deep.equal({
|
||||
await expect(storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV')).resolves.toEqual({
|
||||
alreadyPresent: false,
|
||||
prefix: 'ABCDEF',
|
||||
uniqueSubHash: 'ABCDEFGHI',
|
||||
});
|
||||
});
|
||||
it("works when there's a collision", () => {
|
||||
it("works when there's a collision", async () => {
|
||||
const storage = new StorageS3(httpRootDir, compilerProps, awsProps);
|
||||
mockDynamoDb.on(QueryCommand, {TableName: 'table'}).resolves({
|
||||
Items: [
|
||||
@@ -83,13 +84,13 @@ describe('Find unique subhash tests', () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
return storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV').should.eventually.deep.equal({
|
||||
await expect(storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV')).resolves.toEqual({
|
||||
alreadyPresent: false,
|
||||
prefix: 'ABCDEF',
|
||||
uniqueSubHash: 'ABCDEFGHIJ',
|
||||
});
|
||||
});
|
||||
it('finds an existing match', () => {
|
||||
it('finds an existing match', async () => {
|
||||
const storage = new StorageS3(httpRootDir, compilerProps, awsProps);
|
||||
mockDynamoDb.on(QueryCommand, {TableName: 'table'}).resolves({
|
||||
Items: [
|
||||
@@ -99,7 +100,7 @@ describe('Find unique subhash tests', () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
return storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV').should.eventually.deep.equal({
|
||||
await expect(storage.findUniqueSubhash('ABCDEFGHIJKLMNOPQRSTUV')).resolves.toEqual({
|
||||
alreadyPresent: true,
|
||||
prefix: 'ABCDEF',
|
||||
uniqueSubHash: 'ABCDEFGHI',
|
||||
@@ -131,15 +132,15 @@ describe('Stores to s3', () => {
|
||||
config: 'yo',
|
||||
};
|
||||
await storage.storeItem(object, {get: () => 'localhost'});
|
||||
mockS3
|
||||
.commandCalls(PutObjectCommand, {
|
||||
expect(
|
||||
mockS3.commandCalls(PutObjectCommand, {
|
||||
Bucket: 'bucket',
|
||||
Key: 'prefix/ABCDEFGHIJKLMNOP',
|
||||
Body: 'yo',
|
||||
})
|
||||
.should.have.lengthOf(1);
|
||||
mockDynamoDb
|
||||
.commandCalls(PutItemCommand, {
|
||||
}),
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
mockDynamoDb.commandCalls(PutItemCommand, {
|
||||
TableName: 'table',
|
||||
Item: {
|
||||
prefix: {S: 'ABCDEF'},
|
||||
@@ -148,8 +149,8 @@ describe('Stores to s3', () => {
|
||||
stats: {M: {clicks: {N: '0'}}},
|
||||
creation_ip: {S: 'localhost'},
|
||||
},
|
||||
})
|
||||
.should.have.lengthOf(1);
|
||||
}),
|
||||
).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -190,12 +191,12 @@ describe('Retrieves from s3', () => {
|
||||
.resolves({Body: sdkStreamMixin(stream)});
|
||||
|
||||
const result = await storage.expandId('ABCDEF');
|
||||
result.should.deep.equal({config: 'I am a monkey'});
|
||||
expect(result).toEqual({config: 'I am a monkey'});
|
||||
});
|
||||
it('should handle failures', async () => {
|
||||
const storage = new StorageS3(httpRootDir, compilerProps, awsProps);
|
||||
mockDynamoDb.on(GetItemCommand).resolves({});
|
||||
return storage.expandId('ABCDEF').should.be.rejectedWith(Error, 'ID ABCDEF not present in links table');
|
||||
await expect(storage.expandId('ABCDEF')).rejects.toThrow('ID ABCDEF not present in links table');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -217,8 +218,8 @@ describe('Updates counts in s3', async () => {
|
||||
it('should increment for simple cases', async () => {
|
||||
const storage = new StorageS3(httpRootDir, compilerProps, awsProps);
|
||||
await storage.incrementViewCount('ABCDEF');
|
||||
mockDynamoDb
|
||||
.commandCalls(UpdateItemCommand, {
|
||||
expect(
|
||||
mockDynamoDb.commandCalls(UpdateItemCommand, {
|
||||
ExpressionAttributeValues: {':inc': {N: '1'}},
|
||||
Key: {
|
||||
prefix: {S: 'ABCDEF'},
|
||||
@@ -227,7 +228,7 @@ describe('Updates counts in s3', async () => {
|
||||
ReturnValues: 'NONE',
|
||||
TableName: 'table',
|
||||
UpdateExpression: 'SET stats.clicks = stats.clicks + :inc',
|
||||
})
|
||||
.should.have.lengthOf(1);
|
||||
}),
|
||||
).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,8 +22,9 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {StorageBase} from '../../lib/storage/index.js';
|
||||
import {should} from '../utils.js';
|
||||
|
||||
describe('Hash tests', () => {
|
||||
// afterEach(() => restore());
|
||||
@@ -31,14 +32,14 @@ describe('Hash tests', () => {
|
||||
for (let i = 0; i < 256; ++i) {
|
||||
const buf = Buffer.of(i);
|
||||
const as64 = StorageBase.encodeBuffer(buf);
|
||||
as64.should.not.contain('/');
|
||||
as64.should.not.contain('+');
|
||||
expect(as64).not.toContain('/');
|
||||
expect(as64).not.toContain('+');
|
||||
}
|
||||
});
|
||||
const badResult = 'R0Buttabcdefghio1327698asdhjkJJklQp'.toLowerCase(); // Butt hash, see https://github.com/compiler-explorer/compiler-explorer/issues/1297
|
||||
it('should detect profanities in hashes', () => {
|
||||
StorageBase.isCleanText('I am the very model of a major general').should.be.true;
|
||||
StorageBase.isCleanText(badResult).should.be.false;
|
||||
expect(StorageBase.isCleanText('I am the very model of a major general')).toBe(true);
|
||||
expect(StorageBase.isCleanText(badResult)).toBe(false);
|
||||
});
|
||||
// it('should avoid profanities and illegible characters in hashes', () => {
|
||||
// const testCase = {some: 'test'};
|
||||
@@ -62,6 +63,6 @@ describe('Hash tests', () => {
|
||||
const testCase = {some: 'test'};
|
||||
const {config} = StorageBase.getSafeHash(testCase);
|
||||
const asObj = JSON.parse(config);
|
||||
should.not.exist(asObj.nonce);
|
||||
expect(asObj).not.toHaveProperty('nonce');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,67 +22,69 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import './utils.js';
|
||||
import {SymbolStore} from '../lib/symbol-store.js';
|
||||
|
||||
describe('SymbolStore', function () {
|
||||
it('should be empty initially', function () {
|
||||
describe('SymbolStore', () => {
|
||||
it('should be empty initially', () => {
|
||||
const store = new SymbolStore();
|
||||
store.listSymbols().length.should.equal(0);
|
||||
store.listTranslations().length.should.equal(0);
|
||||
expect(store.listSymbols().length).toEqual(0);
|
||||
expect(store.listTranslations().length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should be able to add an item', function () {
|
||||
it('should be able to add an item', () => {
|
||||
const store = new SymbolStore();
|
||||
store.add('test');
|
||||
store.listSymbols().length.should.equal(1);
|
||||
store.listTranslations().length.should.equal(1);
|
||||
expect(store.listSymbols().length).toEqual(1);
|
||||
expect(store.listTranslations().length).toEqual(1);
|
||||
|
||||
store.listSymbols()[0].should.equal('test');
|
||||
expect(store.listSymbols()[0]).toEqual('test');
|
||||
|
||||
const translations = store.listTranslations();
|
||||
translations[0][0].should.equal('test');
|
||||
translations[0][1].should.equal('test');
|
||||
expect(translations[0][0]).toEqual('test');
|
||||
expect(translations[0][1]).toEqual('test');
|
||||
});
|
||||
|
||||
it('should not contain duplicate items', function () {
|
||||
it('should not contain duplicate items', () => {
|
||||
const store = new SymbolStore();
|
||||
store.add('test');
|
||||
store.add('test');
|
||||
store.listSymbols().length.should.equal(1);
|
||||
store.listTranslations().length.should.equal(1);
|
||||
expect(store.listSymbols().length).toEqual(1);
|
||||
expect(store.listTranslations().length).toEqual(1);
|
||||
|
||||
store.listSymbols()[0].should.equal('test');
|
||||
expect(store.listSymbols()[0]).toEqual('test');
|
||||
|
||||
const translations = store.listTranslations();
|
||||
translations[0][0].should.equal('test');
|
||||
translations[0][1].should.equal('test');
|
||||
expect(translations[0][0]).toEqual('test');
|
||||
expect(translations[0][1]).toEqual('test');
|
||||
});
|
||||
|
||||
it('should return a sorted list', function () {
|
||||
it('should return a sorted list', () => {
|
||||
const store = new SymbolStore();
|
||||
store.add('test123');
|
||||
store.add('test123456');
|
||||
store.listSymbols().length.should.equal(2);
|
||||
store.listTranslations().length.should.equal(2);
|
||||
expect(store.listSymbols().length).toEqual(2);
|
||||
expect(store.listTranslations().length).toEqual(2);
|
||||
|
||||
const translations = store.listTranslations();
|
||||
translations[0][0].should.equal('test123456');
|
||||
translations[1][0].should.equal('test123');
|
||||
expect(translations[0][0]).toEqual('test123456');
|
||||
expect(translations[1][0]).toEqual('test123');
|
||||
});
|
||||
|
||||
it('should be able to add an array of items', function () {
|
||||
it('should be able to add an array of items', () => {
|
||||
const store = new SymbolStore();
|
||||
store.addMany(['test123', 'test123456', 'test123']);
|
||||
store.listSymbols().length.should.equal(2);
|
||||
store.listTranslations().length.should.equal(2);
|
||||
expect(store.listSymbols().length).toEqual(2);
|
||||
expect(store.listTranslations().length).toEqual(2);
|
||||
|
||||
const translations = store.listTranslations();
|
||||
translations[0][0].should.equal('test123456');
|
||||
translations[1][0].should.equal('test123');
|
||||
expect(translations[0][0]).toEqual('test123456');
|
||||
expect(translations[1][0]).toEqual('test123');
|
||||
});
|
||||
|
||||
it('should be possible to exclude items in another store', function () {
|
||||
it('should be possible to exclude items in another store', () => {
|
||||
const store1 = new SymbolStore();
|
||||
store1.addMany(['test123', 'test123456', 'test123']);
|
||||
|
||||
@@ -91,11 +93,11 @@ describe('SymbolStore', function () {
|
||||
|
||||
store1.exclude(store2);
|
||||
const translations = store1.listTranslations();
|
||||
translations.length.should.equal(1);
|
||||
translations[0][0].should.equal('test123456');
|
||||
expect(translations.length).toEqual(1);
|
||||
expect(translations[0][0]).toEqual('test123456');
|
||||
});
|
||||
|
||||
it('should be possible to exclude items that partially match', function () {
|
||||
it('should be possible to exclude items that partially match', () => {
|
||||
const store1 = new SymbolStore();
|
||||
store1.addMany(['test123', 'test123456', 'test123']);
|
||||
|
||||
@@ -104,18 +106,18 @@ describe('SymbolStore', function () {
|
||||
|
||||
store1.softExclude(store2);
|
||||
const translations = store1.listTranslations();
|
||||
translations.length.should.equal(1);
|
||||
translations[0][0].should.equal('test123456');
|
||||
expect(translations.length).toEqual(1);
|
||||
expect(translations[0][0]).toEqual('test123456');
|
||||
});
|
||||
|
||||
it('should be able to check contents', function () {
|
||||
it('should be able to check contents', () => {
|
||||
const store = new SymbolStore();
|
||||
store.addMany(['test123', 'test123456', 'test123']);
|
||||
|
||||
store.contains('test123').should.equal(true);
|
||||
store.contains('test123456').should.equal(true);
|
||||
store.contains('test456').should.equal(false);
|
||||
expect(store.contains('test123')).toEqual(true);
|
||||
expect(store.contains('test123456')).toEqual(true);
|
||||
expect(store.contains('test456')).toEqual(false);
|
||||
|
||||
store.listSymbols().length.should.equal(2);
|
||||
expect(store.listSymbols().length).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
|
||||
import {
|
||||
getToolchainFlagFromOptions,
|
||||
getToolchainPathWithOptionsArr,
|
||||
@@ -49,7 +51,7 @@ describe('CompilerDropInTool', () => {
|
||||
const sourcefile = 'example.cpp';
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, [], args, sourcefile);
|
||||
orderedArgs.should.deep.equal([
|
||||
expect(orderedArgs).toEqual([
|
||||
'--gcc-toolchain=/opt/compiler-explorer/gcc-7.2.0',
|
||||
'--gcc-toolchain=/opt/compiler-explorer/gcc-7.2.0',
|
||||
]);
|
||||
@@ -70,7 +72,7 @@ describe('CompilerDropInTool', () => {
|
||||
const sourcefile = 'example.cpp';
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, [], args, sourcefile);
|
||||
orderedArgs.should.deep.equal([
|
||||
expect(orderedArgs).toEqual([
|
||||
'--gcc-toolchain=' + path.resolve('/opt/compiler-explorer/gcc-8.0'),
|
||||
'--gcc-toolchain=' + path.resolve('/opt/compiler-explorer/gcc-8.0'),
|
||||
]);
|
||||
@@ -91,7 +93,7 @@ describe('CompilerDropInTool', () => {
|
||||
const sourcefile = 'example.cpp';
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, [], args, sourcefile);
|
||||
orderedArgs.should.deep.equal(false);
|
||||
expect(orderedArgs).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should support ICC compilers', () => {
|
||||
@@ -109,7 +111,7 @@ describe('CompilerDropInTool', () => {
|
||||
const sourcefile = 'example.cpp';
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, [], args, sourcefile);
|
||||
orderedArgs.should.deep.equal([
|
||||
expect(orderedArgs).toEqual([
|
||||
'--gcc-toolchain=' + path.resolve('/opt/compiler-explorer/gcc-8.2.0'),
|
||||
'--gcc-toolchain=' + path.resolve('/opt/compiler-explorer/gcc-8.2.0'),
|
||||
]);
|
||||
@@ -133,7 +135,7 @@ describe('CompilerDropInTool', () => {
|
||||
const sourcefile = 'example.cpp';
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, [], args, sourcefile);
|
||||
orderedArgs.should.deep.equal(false);
|
||||
expect(orderedArgs).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should not support using libc++', () => {
|
||||
@@ -152,7 +154,7 @@ describe('CompilerDropInTool', () => {
|
||||
const sourcefile = 'example.cpp';
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, [], args, sourcefile);
|
||||
orderedArgs.should.deep.equal(false);
|
||||
expect(orderedArgs).toEqual(false);
|
||||
});
|
||||
|
||||
it('Should support library options', () => {
|
||||
@@ -172,7 +174,7 @@ describe('CompilerDropInTool', () => {
|
||||
const libOptions = ['-DMYLIBDEF', '-pthread'];
|
||||
|
||||
const orderedArgs = tool.getOrderedArguments(compilationInfo, includeflags, libOptions, args, sourcefile);
|
||||
orderedArgs.should.deep.equal([
|
||||
expect(orderedArgs).toEqual([
|
||||
'--gcc-toolchain=/opt/compiler-explorer/gcc-8.2.0',
|
||||
'--gcc-toolchain=/opt/compiler-explorer/gcc-8.2.0',
|
||||
'-DMYLIBDEF',
|
||||
@@ -195,12 +197,12 @@ describe('CompilerDropInTool', () => {
|
||||
'/app/example.cpp',
|
||||
];
|
||||
|
||||
hasToolchainArg(options).should.be.true;
|
||||
expect(hasToolchainArg(options)).toBe(true);
|
||||
|
||||
getToolchainFlagFromOptions(options).should.equal('--gcc-toolchain=');
|
||||
expect(getToolchainFlagFromOptions(options)).toEqual('--gcc-toolchain=');
|
||||
|
||||
const newOptions = removeToolchainArg(options);
|
||||
hasToolchainArg(newOptions).should.be.false;
|
||||
expect(hasToolchainArg(newOptions)).toBe(false);
|
||||
});
|
||||
|
||||
it('Should be able to swap toolchain', () => {
|
||||
@@ -220,10 +222,10 @@ describe('CompilerDropInTool', () => {
|
||||
];
|
||||
|
||||
const toolchain = getToolchainPathWithOptionsArr(exe, options);
|
||||
toolchain.should.equals('/opt/compiler-explorer/gcc-12.2.0');
|
||||
expect(toolchain).toEqual('/opt/compiler-explorer/gcc-12.2.0');
|
||||
|
||||
const replacedOptions = replaceToolchainArg(options, '/opt/compiler-explorer/gcc-11.1.0');
|
||||
replacedOptions.should.deep.equal([
|
||||
expect(replacedOptions).toEqual([
|
||||
'-gdwarf-4',
|
||||
'-g',
|
||||
'-o',
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
// POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import express from 'express';
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {RouteAPI} from '../lib/handlers/route-api.js';
|
||||
|
||||
@@ -33,7 +34,7 @@ describe('Basic unfurls', () => {
|
||||
let config;
|
||||
let routeApi;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
config = {
|
||||
ceProps: () => {},
|
||||
clientOptionsHandler: {
|
||||
@@ -75,7 +76,7 @@ describe('Basic unfurls', () => {
|
||||
});
|
||||
|
||||
const res = await prom;
|
||||
res.metadata.should.deep.equal({
|
||||
expect(res.metadata).toEqual({
|
||||
ogDescription: '',
|
||||
ogTitle: 'Compiler Explorer',
|
||||
});
|
||||
@@ -103,7 +104,7 @@ describe('Basic unfurls', () => {
|
||||
});
|
||||
|
||||
const res = await prom;
|
||||
res.metadata.should.deep.equal({
|
||||
expect(res.metadata).toEqual({
|
||||
ogDescription:
|
||||
'\ntemplate<typename T>\nconcept TheSameAndAddable = requires(T a, T b) {\n {a+b} -> T;\n};\n\ntemplate<TheSameAndAddable T>\nT sum(T x, T y) {\n return x + y;\n}\n\n#include <string>\n\nint main() {\n int z = 0;\n int w;\n\n return sum(z, w);\n}\n',
|
||||
ogTitle: 'Compiler Explorer - C++',
|
||||
@@ -132,7 +133,7 @@ describe('Basic unfurls', () => {
|
||||
});
|
||||
|
||||
const res = await prom;
|
||||
res.metadata.should.deep.equal({
|
||||
expect(res.metadata).toEqual({
|
||||
ogDescription: 'project(hello)\n\nadd_executable(output.s\n example.cpp\n square.cpp)\n',
|
||||
ogTitle: 'Compiler Explorer - C++',
|
||||
});
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
import path from 'path';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
import {describe, expect, it} from 'vitest';
|
||||
import winston from 'winston';
|
||||
|
||||
import {makeLogStream} from '../lib/logger.js';
|
||||
@@ -34,65 +35,65 @@ import {fs} from './utils.js';
|
||||
|
||||
describe('Splits lines', () => {
|
||||
it('handles empty input', () => {
|
||||
utils.splitLines('').should.deep.equals([]);
|
||||
expect(utils.splitLines('')).toEqual([]);
|
||||
});
|
||||
it('handles a single line with no newline', () => {
|
||||
utils.splitLines('A line').should.deep.equals(['A line']);
|
||||
expect(utils.splitLines('A line')).toEqual(['A line']);
|
||||
});
|
||||
it('handles a single line with a newline', () => {
|
||||
utils.splitLines('A line\n').should.deep.equals(['A line']);
|
||||
expect(utils.splitLines('A line\n')).toEqual(['A line']);
|
||||
});
|
||||
it('handles multiple lines', () => {
|
||||
utils.splitLines('A line\nAnother line\n').should.deep.equals(['A line', 'Another line']);
|
||||
expect(utils.splitLines('A line\nAnother line\n')).toEqual(['A line', 'Another line']);
|
||||
});
|
||||
it('handles multiple lines ending on a non-newline', () => {
|
||||
utils.splitLines('A line\nAnother line\nLast line').should.deep.equals(['A line', 'Another line', 'Last line']);
|
||||
expect(utils.splitLines('A line\nAnother line\nLast line')).toEqual(['A line', 'Another line', 'Last line']);
|
||||
});
|
||||
it('handles empty lines', () => {
|
||||
utils.splitLines('A line\n\nA line after an empty').should.deep.equals(['A line', '', 'A line after an empty']);
|
||||
expect(utils.splitLines('A line\n\nA line after an empty')).toEqual(['A line', '', 'A line after an empty']);
|
||||
});
|
||||
it('handles a single empty line', () => {
|
||||
utils.splitLines('\n').should.deep.equals(['']);
|
||||
expect(utils.splitLines('\n')).toEqual(['']);
|
||||
});
|
||||
it('handles multiple empty lines', () => {
|
||||
utils.splitLines('\n\n\n').should.deep.equals(['', '', '']);
|
||||
expect(utils.splitLines('\n\n\n')).toEqual(['', '', '']);
|
||||
});
|
||||
it('handles \\r\\n lines', () => {
|
||||
utils.splitLines('Some\r\nLines\r\n').should.deep.equals(['Some', 'Lines']);
|
||||
expect(utils.splitLines('Some\r\nLines\r\n')).toEqual(['Some', 'Lines']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Expands tabs', () => {
|
||||
it('leaves non-tabs alone', () => {
|
||||
utils.expandTabs('This has no tabs at all').should.equals('This has no tabs at all');
|
||||
expect(utils.expandTabs('This has no tabs at all')).toEqual('This has no tabs at all');
|
||||
});
|
||||
it('at beginning of line', () => {
|
||||
utils.expandTabs('\tOne tab').should.equals(' One tab');
|
||||
utils.expandTabs('\t\tTwo tabs').should.equals(' Two tabs');
|
||||
expect(utils.expandTabs('\tOne tab')).toEqual(' One tab');
|
||||
expect(utils.expandTabs('\t\tTwo tabs')).toEqual(' Two tabs');
|
||||
});
|
||||
it('mid-line', () => {
|
||||
utils.expandTabs('0\t1234567A').should.equals('0 1234567A');
|
||||
utils.expandTabs('01\t234567A').should.equals('01 234567A');
|
||||
utils.expandTabs('012\t34567A').should.equals('012 34567A');
|
||||
utils.expandTabs('0123\t4567A').should.equals('0123 4567A');
|
||||
utils.expandTabs('01234\t567A').should.equals('01234 567A');
|
||||
utils.expandTabs('012345\t67A').should.equals('012345 67A');
|
||||
utils.expandTabs('0123456\t7A').should.equals('0123456 7A');
|
||||
utils.expandTabs('01234567\tA').should.equals('01234567 A');
|
||||
expect(utils.expandTabs('0\t1234567A')).toEqual('0 1234567A');
|
||||
expect(utils.expandTabs('01\t234567A')).toEqual('01 234567A');
|
||||
expect(utils.expandTabs('012\t34567A')).toEqual('012 34567A');
|
||||
expect(utils.expandTabs('0123\t4567A')).toEqual('0123 4567A');
|
||||
expect(utils.expandTabs('01234\t567A')).toEqual('01234 567A');
|
||||
expect(utils.expandTabs('012345\t67A')).toEqual('012345 67A');
|
||||
expect(utils.expandTabs('0123456\t7A')).toEqual('0123456 7A');
|
||||
expect(utils.expandTabs('01234567\tA')).toEqual('01234567 A');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Parses compiler output', () => {
|
||||
it('handles simple cases', () => {
|
||||
utils.parseOutput('Line one\nLine two', 'bob.cpp').should.deep.equals([{text: 'Line one'}, {text: 'Line two'}]);
|
||||
utils.parseOutput('Line one\nbob.cpp:1 Line two', 'bob.cpp').should.deep.equals([
|
||||
expect(utils.parseOutput('Line one\nLine two', 'bob.cpp')).toEqual([{text: 'Line one'}, {text: 'Line two'}]);
|
||||
expect(utils.parseOutput('Line one\nbob.cpp:1 Line two', 'bob.cpp')).toEqual([
|
||||
{text: 'Line one'},
|
||||
{
|
||||
tag: {column: 0, line: 1, text: 'Line two', severity: 3, file: 'bob.cpp'},
|
||||
text: '<source>:1 Line two',
|
||||
},
|
||||
]);
|
||||
utils.parseOutput('Line one\nbob.cpp:1:5: Line two', 'bob.cpp').should.deep.equals([
|
||||
expect(utils.parseOutput('Line one\nbob.cpp:1:5: Line two', 'bob.cpp')).toEqual([
|
||||
{text: 'Line one'},
|
||||
{
|
||||
tag: {column: 5, line: 1, text: 'Line two', severity: 3, file: 'bob.cpp'},
|
||||
@@ -101,7 +102,7 @@ describe('Parses compiler output', () => {
|
||||
]);
|
||||
});
|
||||
it('handles windows output', () => {
|
||||
utils.parseOutput('bob.cpp(1) Oh noes', 'bob.cpp').should.deep.equals([
|
||||
expect(utils.parseOutput('bob.cpp(1) Oh noes', 'bob.cpp')).toEqual([
|
||||
{
|
||||
tag: {column: 0, line: 1, text: 'Oh noes', severity: 3, file: 'bob.cpp'},
|
||||
text: '<source>(1) Oh noes',
|
||||
@@ -109,7 +110,7 @@ describe('Parses compiler output', () => {
|
||||
]);
|
||||
});
|
||||
it('replaces all references to input source', () => {
|
||||
utils.parseOutput('bob.cpp:1 error in bob.cpp', 'bob.cpp').should.deep.equals([
|
||||
expect(utils.parseOutput('bob.cpp:1 error in bob.cpp', 'bob.cpp')).toEqual([
|
||||
{
|
||||
tag: {column: 0, line: 1, text: 'error in <source>', severity: 3, file: 'bob.cpp'},
|
||||
text: '<source>:1 error in <source>',
|
||||
@@ -117,14 +118,14 @@ describe('Parses compiler output', () => {
|
||||
]);
|
||||
});
|
||||
it('treats warnings and notes as the correct severity', () => {
|
||||
utils.parseOutput('Line one\nbob.cpp:1:5: warning Line two', 'bob.cpp').should.deep.equals([
|
||||
expect(utils.parseOutput('Line one\nbob.cpp:1:5: warning Line two', 'bob.cpp')).toEqual([
|
||||
{text: 'Line one'},
|
||||
{
|
||||
tag: {column: 5, line: 1, text: 'warning Line two', severity: 2, file: 'bob.cpp'},
|
||||
text: '<source>:1:5: warning Line two',
|
||||
},
|
||||
]);
|
||||
utils.parseOutput('Line one\nbob.cpp:1:5: note Line two', 'bob.cpp').should.deep.equals([
|
||||
expect(utils.parseOutput('Line one\nbob.cpp:1:5: note Line two', 'bob.cpp')).toEqual([
|
||||
{text: 'Line one'},
|
||||
{
|
||||
tag: {column: 5, line: 1, text: 'note Line two', severity: 1, file: 'bob.cpp'},
|
||||
@@ -133,24 +134,24 @@ describe('Parses compiler output', () => {
|
||||
]);
|
||||
});
|
||||
it('treats <stdin> as if it were the compiler source', () => {
|
||||
utils
|
||||
.parseOutput("<stdin>:120:25: error: variable or field 'transform_data' declared void", 'bob.cpp')
|
||||
.should.deep.equals([
|
||||
{
|
||||
tag: {
|
||||
column: 25,
|
||||
line: 120,
|
||||
text: "error: variable or field 'transform_data' declared void",
|
||||
severity: 3,
|
||||
file: 'bob.cpp',
|
||||
},
|
||||
text: "<source>:120:25: error: variable or field 'transform_data' declared void",
|
||||
expect(
|
||||
utils.parseOutput("<stdin>:120:25: error: variable or field 'transform_data' declared void", 'bob.cpp'),
|
||||
).toEqual([
|
||||
{
|
||||
tag: {
|
||||
column: 25,
|
||||
line: 120,
|
||||
text: "error: variable or field 'transform_data' declared void",
|
||||
severity: 3,
|
||||
file: 'bob.cpp',
|
||||
},
|
||||
]);
|
||||
text: "<source>:120:25: error: variable or field 'transform_data' declared void",
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('parser error with full path', () => {
|
||||
utils.parseOutput("/app/example.cl:5:30: error: use of undeclared identifier 'ad'").should.deep.equals([
|
||||
expect(utils.parseOutput("/app/example.cl:5:30: error: use of undeclared identifier 'ad'")).toEqual([
|
||||
{
|
||||
tag: {
|
||||
file: 'example.cl',
|
||||
@@ -167,7 +168,7 @@ describe('Parses compiler output', () => {
|
||||
|
||||
describe('Pascal compiler output', () => {
|
||||
it('recognize fpc identifier not found error', () => {
|
||||
utils.parseOutput('output.pas(13,23) Error: Identifier not found "adsadasd"', 'output.pas').should.deep.equals([
|
||||
expect(utils.parseOutput('output.pas(13,23) Error: Identifier not found "adsadasd"', 'output.pas')).toEqual([
|
||||
{
|
||||
tag: {
|
||||
column: 23,
|
||||
@@ -182,53 +183,51 @@ describe('Pascal compiler output', () => {
|
||||
});
|
||||
|
||||
it('recognize fpc exiting error', () => {
|
||||
utils
|
||||
.parseOutput('output.pas(17) Fatal: There were 1 errors compiling module, stopping', 'output.pas')
|
||||
.should.deep.equals([
|
||||
{
|
||||
tag: {
|
||||
column: 0,
|
||||
line: 17,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'output.pas',
|
||||
},
|
||||
text: '<source>(17) Fatal: There were 1 errors compiling module, stopping',
|
||||
expect(
|
||||
utils.parseOutput('output.pas(17) Fatal: There were 1 errors compiling module, stopping', 'output.pas'),
|
||||
).toEqual([
|
||||
{
|
||||
tag: {
|
||||
column: 0,
|
||||
line: 17,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'output.pas',
|
||||
},
|
||||
]);
|
||||
text: '<source>(17) Fatal: There were 1 errors compiling module, stopping',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('removes the temp path', () => {
|
||||
utils
|
||||
.parseOutput(
|
||||
expect(
|
||||
utils.parseOutput(
|
||||
'Compiling /tmp/path/prog.dpr\noutput.pas(17) Fatal: There were 1 errors compiling module, stopping',
|
||||
'output.pas',
|
||||
'/tmp/path/',
|
||||
)
|
||||
.should.deep.equals([
|
||||
{
|
||||
text: 'Compiling prog.dpr',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
text: 'Compiling prog.dpr',
|
||||
},
|
||||
{
|
||||
tag: {
|
||||
column: 0,
|
||||
line: 17,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'output.pas',
|
||||
},
|
||||
{
|
||||
tag: {
|
||||
column: 0,
|
||||
line: 17,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'output.pas',
|
||||
},
|
||||
text: '<source>(17) Fatal: There were 1 errors compiling module, stopping',
|
||||
},
|
||||
]);
|
||||
text: '<source>(17) Fatal: There were 1 errors compiling module, stopping',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rust compiler output', () => {
|
||||
it('handles simple cases', () => {
|
||||
utils
|
||||
.parseRustOutput('Line one\nLine two', 'bob.rs')
|
||||
.should.deep.equals([{text: 'Line one'}, {text: 'Line two'}]);
|
||||
utils.parseRustOutput('Unrelated\nLine one\n --> bob.rs:1\nUnrelated', 'bob.rs').should.deep.equals([
|
||||
expect(utils.parseRustOutput('Line one\nLine two', 'bob.rs')).toEqual([{text: 'Line one'}, {text: 'Line two'}]);
|
||||
expect(utils.parseRustOutput('Unrelated\nLine one\n --> bob.rs:1\nUnrelated', 'bob.rs')).toEqual([
|
||||
{text: 'Unrelated'},
|
||||
{
|
||||
tag: {column: 0, line: 1, text: 'Line one', severity: 3},
|
||||
@@ -240,7 +239,7 @@ describe('Rust compiler output', () => {
|
||||
},
|
||||
{text: 'Unrelated'},
|
||||
]);
|
||||
utils.parseRustOutput('Line one\n --> bob.rs:1:5', 'bob.rs').should.deep.equals([
|
||||
expect(utils.parseRustOutput('Line one\n --> bob.rs:1:5', 'bob.rs')).toEqual([
|
||||
{
|
||||
tag: {column: 5, line: 1, text: 'Line one', severity: 3},
|
||||
text: 'Line one',
|
||||
@@ -253,7 +252,7 @@ describe('Rust compiler output', () => {
|
||||
});
|
||||
|
||||
it('replaces all references to input source', () => {
|
||||
utils.parseRustOutput('error: Error in bob.rs\n --> bob.rs:1', 'bob.rs').should.deep.equals([
|
||||
expect(utils.parseRustOutput('error: Error in bob.rs\n --> bob.rs:1', 'bob.rs')).toEqual([
|
||||
{
|
||||
tag: {column: 0, line: 1, text: 'error: Error in <source>', severity: 3},
|
||||
text: 'error: Error in <source>',
|
||||
@@ -266,7 +265,7 @@ describe('Rust compiler output', () => {
|
||||
});
|
||||
|
||||
it('treats <stdin> as if it were the compiler source', () => {
|
||||
utils.parseRustOutput('error: <stdin> is sad\n --> <stdin>:120:25', 'bob.rs').should.deep.equals([
|
||||
expect(utils.parseRustOutput('error: <stdin> is sad\n --> <stdin>:120:25', 'bob.rs')).toEqual([
|
||||
{
|
||||
tag: {column: 25, line: 120, text: 'error: <source> is sad', severity: 3},
|
||||
text: 'error: <source> is sad',
|
||||
@@ -281,95 +280,98 @@ describe('Rust compiler output', () => {
|
||||
|
||||
describe('Tool output', () => {
|
||||
it('removes the relative path', () => {
|
||||
utils
|
||||
.parseOutput('./example.cpp:1:1: Fatal: There were 1 errors compiling module, stopping', './example.cpp')
|
||||
.should.deep.equals([
|
||||
{
|
||||
tag: {
|
||||
column: 1,
|
||||
line: 1,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'example.cpp',
|
||||
},
|
||||
text: '<source>:1:1: Fatal: There were 1 errors compiling module, stopping',
|
||||
expect(
|
||||
utils.parseOutput(
|
||||
'./example.cpp:1:1: Fatal: There were 1 errors compiling module, stopping',
|
||||
'./example.cpp',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
tag: {
|
||||
column: 1,
|
||||
line: 1,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'example.cpp',
|
||||
},
|
||||
]);
|
||||
text: '<source>:1:1: Fatal: There were 1 errors compiling module, stopping',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('removes fortran relative path', () => {
|
||||
utils
|
||||
.parseOutput("./example.f90:5:22: error: No explicit type declared for 'y'", './example.f90')
|
||||
.should.deep.equals([
|
||||
{
|
||||
tag: {
|
||||
column: 22,
|
||||
line: 5,
|
||||
text: "error: No explicit type declared for 'y'",
|
||||
severity: 3,
|
||||
file: 'example.f90',
|
||||
},
|
||||
text: "<source>:5:22: error: No explicit type declared for 'y'",
|
||||
expect(
|
||||
utils.parseOutput("./example.f90:5:22: error: No explicit type declared for 'y'", './example.f90'),
|
||||
).toEqual([
|
||||
{
|
||||
tag: {
|
||||
column: 22,
|
||||
line: 5,
|
||||
text: "error: No explicit type declared for 'y'",
|
||||
severity: 3,
|
||||
file: 'example.f90',
|
||||
},
|
||||
]);
|
||||
text: "<source>:5:22: error: No explicit type declared for 'y'",
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('removes the jailed path', () => {
|
||||
utils
|
||||
.parseOutput(
|
||||
expect(
|
||||
utils.parseOutput(
|
||||
'/home/ubuntu/example.cpp:1:1: Fatal: There were 1 errors compiling module, stopping',
|
||||
'./example.cpp',
|
||||
)
|
||||
.should.deep.equals([
|
||||
{
|
||||
tag: {
|
||||
column: 1,
|
||||
line: 1,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'example.cpp',
|
||||
},
|
||||
text: '<source>:1:1: Fatal: There were 1 errors compiling module, stopping',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
tag: {
|
||||
column: 1,
|
||||
line: 1,
|
||||
text: 'Fatal: There were 1 errors compiling module, stopping',
|
||||
severity: 3,
|
||||
file: 'example.cpp',
|
||||
},
|
||||
]);
|
||||
text: '<source>:1:1: Fatal: There were 1 errors compiling module, stopping',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pads right', () => {
|
||||
it('works', () => {
|
||||
utils.padRight('abcd', 8).should.equal('abcd ');
|
||||
utils.padRight('a', 8).should.equal('a ');
|
||||
utils.padRight('', 8).should.equal(' ');
|
||||
utils.padRight('abcd', 4).should.equal('abcd');
|
||||
utils.padRight('abcd', 2).should.equal('abcd');
|
||||
expect(utils.padRight('abcd', 8)).toEqual('abcd ');
|
||||
expect(utils.padRight('a', 8)).toEqual('a ');
|
||||
expect(utils.padRight('', 8)).toEqual(' ');
|
||||
expect(utils.padRight('abcd', 4)).toEqual('abcd');
|
||||
expect(utils.padRight('abcd', 2)).toEqual('abcd');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Trim right', () => {
|
||||
it('works', () => {
|
||||
utils.trimRight(' ').should.equal('');
|
||||
utils.trimRight('').should.equal('');
|
||||
utils.trimRight(' ab ').should.equal(' ab');
|
||||
utils.trimRight(' a b ').should.equal(' a b');
|
||||
utils.trimRight('a ').should.equal('a');
|
||||
expect(utils.trimRight(' ')).toEqual('');
|
||||
expect(utils.trimRight('')).toEqual('');
|
||||
expect(utils.trimRight(' ab ')).toEqual(' ab');
|
||||
expect(utils.trimRight(' a b ')).toEqual(' a b');
|
||||
expect(utils.trimRight('a ')).toEqual('a');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Anonymizes all kind of IPs', () => {
|
||||
it('Ignores localhost', () => {
|
||||
utils.anonymizeIp('localhost').should.equal('localhost');
|
||||
utils.anonymizeIp('localhost:42').should.equal('localhost:42');
|
||||
expect(utils.anonymizeIp('localhost')).toEqual('localhost');
|
||||
expect(utils.anonymizeIp('localhost:42')).toEqual('localhost:42');
|
||||
});
|
||||
it('Removes last octet from IPv4 addresses', () => {
|
||||
utils.anonymizeIp('127.0.0.0').should.equal('127.0.0.0');
|
||||
utils.anonymizeIp('127.0.0.10').should.equal('127.0.0.0');
|
||||
utils.anonymizeIp('127.0.0.255').should.equal('127.0.0.0');
|
||||
expect(utils.anonymizeIp('127.0.0.0')).toEqual('127.0.0.0');
|
||||
expect(utils.anonymizeIp('127.0.0.10')).toEqual('127.0.0.0');
|
||||
expect(utils.anonymizeIp('127.0.0.255')).toEqual('127.0.0.0');
|
||||
});
|
||||
it('Removes last 3 hextets from IPv6 addresses', () => {
|
||||
// Not necessarily valid addresses, we're interested in the format
|
||||
utils.anonymizeIp('ffff:aaaa:dead:beef').should.equal('ffff:0:0:0');
|
||||
utils.anonymizeIp('bad:c0de::').should.equal('bad:0:0:0');
|
||||
utils.anonymizeIp(':1d7e::c0fe').should.equal(':0:0:0');
|
||||
expect(utils.anonymizeIp('ffff:aaaa:dead:beef')).toEqual('ffff:0:0:0');
|
||||
expect(utils.anonymizeIp('bad:c0de::')).toEqual('bad:0:0:0');
|
||||
expect(utils.anonymizeIp(':1d7e::c0fe')).toEqual(':0:0:0');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -381,7 +383,7 @@ describe('Logger functionality', () => {
|
||||
infoStream.write('first\n');
|
||||
infoStream.write('part');
|
||||
infoStream.write('ial\n');
|
||||
logs.should.deep.equal([
|
||||
expect(logs).toEqual([
|
||||
{
|
||||
level: 'info',
|
||||
msg: 'first',
|
||||
@@ -397,7 +399,7 @@ describe('Logger functionality', () => {
|
||||
const fakeLog = {log: (level: string, msg: string) => logs.push({level, msg})} as any as winston.Logger;
|
||||
const infoStream = makeLogStream('warn', fakeLog);
|
||||
infoStream.write('ooh\n');
|
||||
logs.should.deep.equal([
|
||||
expect(logs).toEqual([
|
||||
{
|
||||
level: 'warn',
|
||||
msg: 'ooh',
|
||||
@@ -409,25 +411,25 @@ describe('Logger functionality', () => {
|
||||
describe('Hash interface', () => {
|
||||
it('correctly hashes strings', () => {
|
||||
const version = 'Compiler Explorer Tests Version 0';
|
||||
utils
|
||||
.getHash('cream cheese', version)
|
||||
.should.equal('cfff2d1f7a213e314a67cce8399160ae884f794a3ee9d4a01cd37a8c22c67d94');
|
||||
utils
|
||||
.getHash('large eggs', version)
|
||||
.should.equal('9144dec50b8df5bc5cc24ba008823cafd6616faf2f268af84daf49ac1d24feb0');
|
||||
utils
|
||||
.getHash('sugar', version)
|
||||
.should.equal('afa3c89d0f6a61de6805314c9bd7c52d020425a3a3c7bbdfa7c0daec594e5ef1');
|
||||
expect(utils.getHash('cream cheese', version)).toEqual(
|
||||
'cfff2d1f7a213e314a67cce8399160ae884f794a3ee9d4a01cd37a8c22c67d94',
|
||||
);
|
||||
expect(utils.getHash('large eggs', version)).toEqual(
|
||||
'9144dec50b8df5bc5cc24ba008823cafd6616faf2f268af84daf49ac1d24feb0',
|
||||
);
|
||||
expect(utils.getHash('sugar', version)).toEqual(
|
||||
'afa3c89d0f6a61de6805314c9bd7c52d020425a3a3c7bbdfa7c0daec594e5ef1',
|
||||
);
|
||||
});
|
||||
it('correctly hashes objects', () => {
|
||||
utils
|
||||
.getHash({
|
||||
expect(
|
||||
utils.getHash({
|
||||
toppings: [
|
||||
{name: 'raspberries', optional: false},
|
||||
{name: 'ground cinnamon', optional: true},
|
||||
],
|
||||
})
|
||||
.should.equal('e205d63abd5db363086621fdc62c4c23a51b733bac5855985a8b56642d570491');
|
||||
}),
|
||||
).toEqual('e205d63abd5db363086621fdc62c4c23a51b733bac5855985a8b56642d570491');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -435,7 +437,7 @@ describe('GoldenLayout utils', () => {
|
||||
it('finds every editor & compiler', async () => {
|
||||
const state = await fs.readJson('test/example-states/default-state.json');
|
||||
const contents = utils.glGetMainContents(state.content);
|
||||
contents.should.deep.equal({
|
||||
expect(contents).toEqual({
|
||||
editors: [
|
||||
{source: 'Editor 1', language: 'c++'},
|
||||
{source: 'Editor 2', language: 'c++'},
|
||||
@@ -455,60 +457,60 @@ describe('GoldenLayout utils', () => {
|
||||
|
||||
describe('squashes horizontal whitespace', () => {
|
||||
it('handles empty input', () => {
|
||||
utils.squashHorizontalWhitespace('').should.equals('');
|
||||
utils.squashHorizontalWhitespace(' ').should.equals('');
|
||||
utils.squashHorizontalWhitespace(' ').should.equals('');
|
||||
expect(utils.squashHorizontalWhitespace('')).toEqual('');
|
||||
expect(utils.squashHorizontalWhitespace(' ')).toEqual('');
|
||||
expect(utils.squashHorizontalWhitespace(' ')).toEqual('');
|
||||
});
|
||||
it('handles leading spaces', () => {
|
||||
utils.squashHorizontalWhitespace(' abc').should.equals(' abc');
|
||||
utils.squashHorizontalWhitespace(' abc').should.equals(' abc');
|
||||
utils.squashHorizontalWhitespace(' abc').should.equals(' abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc')).toEqual(' abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc')).toEqual(' abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc')).toEqual(' abc');
|
||||
});
|
||||
it('handles interline spaces', () => {
|
||||
utils.squashHorizontalWhitespace('abc abc').should.equals('abc abc');
|
||||
utils.squashHorizontalWhitespace('abc abc').should.equals('abc abc');
|
||||
utils.squashHorizontalWhitespace('abc abc').should.equals('abc abc');
|
||||
expect(utils.squashHorizontalWhitespace('abc abc')).toEqual('abc abc');
|
||||
expect(utils.squashHorizontalWhitespace('abc abc')).toEqual('abc abc');
|
||||
expect(utils.squashHorizontalWhitespace('abc abc')).toEqual('abc abc');
|
||||
});
|
||||
it('handles leading and interline spaces', () => {
|
||||
utils.squashHorizontalWhitespace(' abc abc').should.equals(' abc abc');
|
||||
utils.squashHorizontalWhitespace(' abc abc').should.equals(' abc abc');
|
||||
utils.squashHorizontalWhitespace(' abc abc').should.equals(' abc abc');
|
||||
utils.squashHorizontalWhitespace(' abc abc').should.equals(' abc abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc abc')).toEqual(' abc abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc abc')).toEqual(' abc abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc abc')).toEqual(' abc abc');
|
||||
expect(utils.squashHorizontalWhitespace(' abc abc')).toEqual(' abc abc');
|
||||
});
|
||||
});
|
||||
|
||||
describe('replaces all substrings', () => {
|
||||
it('works with no substitutions', () => {
|
||||
const string = 'This is a line with no replacements';
|
||||
utils.replaceAll(string, 'not present', "won't be substituted").should.equal(string);
|
||||
expect(utils.replaceAll(string, 'not present', "won't be substituted")).toEqual(string);
|
||||
});
|
||||
it('handles odd cases', () => {
|
||||
utils.replaceAll('', '', '').should.equal('');
|
||||
utils.replaceAll('Hello', '', '').should.equal('Hello');
|
||||
expect(utils.replaceAll('', '', '')).toEqual('');
|
||||
expect(utils.replaceAll('Hello', '', '')).toEqual('Hello');
|
||||
});
|
||||
it('works with single replacement', () => {
|
||||
utils
|
||||
.replaceAll('This is a line with a mistook in it', 'mistook', 'mistake')
|
||||
.should.equal('This is a line with a mistake in it');
|
||||
utils
|
||||
.replaceAll('This is a line with a mistook', 'mistook', 'mistake')
|
||||
.should.equal('This is a line with a mistake');
|
||||
utils.replaceAll('Mistooks were made', 'Mistooks', 'Mistakes').should.equal('Mistakes were made');
|
||||
expect(utils.replaceAll('This is a line with a mistook in it', 'mistook', 'mistake')).toEqual(
|
||||
'This is a line with a mistake in it',
|
||||
);
|
||||
expect(utils.replaceAll('This is a line with a mistook', 'mistook', 'mistake')).toEqual(
|
||||
'This is a line with a mistake',
|
||||
);
|
||||
expect(utils.replaceAll('Mistooks were made', 'Mistooks', 'Mistakes')).toEqual('Mistakes were made');
|
||||
});
|
||||
|
||||
it('works with multiple replacements', () => {
|
||||
utils.replaceAll('A mistook is a mistook', 'mistook', 'mistake').should.equal('A mistake is a mistake');
|
||||
utils.replaceAll('aaaaaaaaaaaaaaaaaaaaaaaaaaa', 'a', 'b').should.equal('bbbbbbbbbbbbbbbbbbbbbbbbbbb');
|
||||
expect(utils.replaceAll('A mistook is a mistook', 'mistook', 'mistake')).toEqual('A mistake is a mistake');
|
||||
expect(utils.replaceAll('aaaaaaaaaaaaaaaaaaaaaaaaaaa', 'a', 'b')).toEqual('bbbbbbbbbbbbbbbbbbbbbbbbbbb');
|
||||
});
|
||||
|
||||
it('works with overlapping replacements', () => {
|
||||
utils.replaceAll('aaaaaaaa', 'a', 'ba').should.equal('babababababababa');
|
||||
expect(utils.replaceAll('aaaaaaaa', 'a', 'ba')).toEqual('babababababababa');
|
||||
});
|
||||
});
|
||||
|
||||
describe('encodes in our version of base32', () => {
|
||||
function doTest(original, expected) {
|
||||
utils.base32Encode(Buffer.from(original)).should.equal(expected);
|
||||
expect(utils.base32Encode(Buffer.from(original))).toEqual(expected);
|
||||
}
|
||||
|
||||
// Done by hand to check that they are valid
|
||||
@@ -545,72 +547,76 @@ describe('encodes in our version of base32', () => {
|
||||
|
||||
describe('fileExists', () => {
|
||||
it('Returns true for files that exists', async () => {
|
||||
(await utils.fileExists(fileURLToPath(import.meta.url))).should.be.true;
|
||||
await expect(utils.fileExists(fileURLToPath(import.meta.url))).resolves.toBe(true);
|
||||
});
|
||||
it("Returns false for files that don't exist", async () => {
|
||||
(await utils.fileExists('./ABC-FileThatDoesNotExist.extension')).should.be.false;
|
||||
await expect(utils.fileExists('./ABC-FileThatDoesNotExist.extension')).resolves.toBe(false);
|
||||
});
|
||||
it('Returns false for directories that exist', async () => {
|
||||
(await utils.fileExists(path.resolve(path.dirname(fileURLToPath(import.meta.url))))).should.be.false;
|
||||
await expect(utils.fileExists(path.resolve(path.dirname(fileURLToPath(import.meta.url))))).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('safe semver', () => {
|
||||
it('should understand most kinds of semvers', () => {
|
||||
utils.asSafeVer('0').should.equal('0.0.0');
|
||||
utils.asSafeVer('1').should.equal('1.0.0');
|
||||
expect(utils.asSafeVer('0')).toEqual('0.0.0');
|
||||
expect(utils.asSafeVer('1')).toEqual('1.0.0');
|
||||
|
||||
utils.asSafeVer('1.0').should.equal('1.0.0');
|
||||
utils.asSafeVer('1.1').should.equal('1.1.0');
|
||||
expect(utils.asSafeVer('1.0')).toEqual('1.0.0');
|
||||
expect(utils.asSafeVer('1.1')).toEqual('1.1.0');
|
||||
|
||||
utils.asSafeVer('1.1.0').should.equal('1.1.0');
|
||||
utils.asSafeVer('1.1.1').should.equal('1.1.1');
|
||||
expect(utils.asSafeVer('1.1.0')).toEqual('1.1.0');
|
||||
expect(utils.asSafeVer('1.1.1')).toEqual('1.1.1');
|
||||
|
||||
utils.asSafeVer('trunk').should.equal(utils.magic_semver.trunk);
|
||||
utils.asSafeVer('(trunk)').should.equal(utils.magic_semver.trunk);
|
||||
utils.asSafeVer('(123.456.789 test)').should.equal(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('trunk')).toEqual(utils.magic_semver.trunk);
|
||||
expect(utils.asSafeVer('(trunk)')).toEqual(utils.magic_semver.trunk);
|
||||
expect(utils.asSafeVer('(123.456.789 test)')).toEqual(utils.magic_semver.non_trunk);
|
||||
|
||||
utils.asSafeVer('0..0').should.equal(utils.magic_semver.non_trunk);
|
||||
utils.asSafeVer('0.0.').should.equal(utils.magic_semver.non_trunk);
|
||||
utils.asSafeVer('0.').should.equal(utils.magic_semver.non_trunk);
|
||||
utils.asSafeVer('.0.0').should.equal(utils.magic_semver.non_trunk);
|
||||
utils.asSafeVer('.0..').should.equal(utils.magic_semver.non_trunk);
|
||||
utils.asSafeVer('0..').should.equal(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('0..0')).toEqual(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('0.0.')).toEqual(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('0.')).toEqual(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('.0.0')).toEqual(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('.0..')).toEqual(utils.magic_semver.non_trunk);
|
||||
expect(utils.asSafeVer('0..')).toEqual(utils.magic_semver.non_trunk);
|
||||
|
||||
utils.asSafeVer('123 TEXT').should.equal('123.0.0');
|
||||
utils.asSafeVer('123.456 TEXT').should.equal('123.456.0');
|
||||
utils.asSafeVer('123.456.789 TEXT').should.equal('123.456.789');
|
||||
expect(utils.asSafeVer('123 TEXT')).toEqual('123.0.0');
|
||||
expect(utils.asSafeVer('123.456 TEXT')).toEqual('123.456.0');
|
||||
expect(utils.asSafeVer('123.456.789 TEXT')).toEqual('123.456.789');
|
||||
});
|
||||
});
|
||||
|
||||
describe('argument splitting', () => {
|
||||
it('should handle normal things', () => {
|
||||
utils
|
||||
.splitArguments('-hello --world etc --std=c++20')
|
||||
.should.deep.equal(['-hello', '--world', 'etc', '--std=c++20']);
|
||||
expect(utils.splitArguments('-hello --world etc --std=c++20')).toEqual([
|
||||
'-hello',
|
||||
'--world',
|
||||
'etc',
|
||||
'--std=c++20',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle hash chars', () => {
|
||||
utils
|
||||
.splitArguments('-Wno#warnings -Wno-#pragma-messages')
|
||||
.should.deep.equal(['-Wno#warnings', '-Wno-#pragma-messages']);
|
||||
expect(utils.splitArguments('-Wno#warnings -Wno-#pragma-messages')).toEqual([
|
||||
'-Wno#warnings',
|
||||
'-Wno-#pragma-messages',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle doublequoted args', () => {
|
||||
utils.splitArguments('--hello "-world etc"').should.deep.equal(['--hello', '-world etc']);
|
||||
expect(utils.splitArguments('--hello "-world etc"')).toEqual(['--hello', '-world etc']);
|
||||
});
|
||||
|
||||
it('should handle singlequoted args', () => {
|
||||
utils.splitArguments("--hello '-world etc'").should.deep.equal(['--hello', '-world etc']);
|
||||
expect(utils.splitArguments("--hello '-world etc'")).toEqual(['--hello', '-world etc']);
|
||||
});
|
||||
|
||||
it('should handle cheekyness part 1', () => {
|
||||
/* eslint-disable no-useless-escape */
|
||||
utils.splitArguments('hello #veryfancy etc').should.deep.equal(['hello', '#veryfancy', 'etc']);
|
||||
expect(utils.splitArguments('hello #veryfancy etc')).toEqual(['hello', '#veryfancy', 'etc']);
|
||||
/* eslint-enable no-useless-escape */
|
||||
});
|
||||
|
||||
it('should handle cheekyness part 2', () => {
|
||||
utils.splitArguments('hello \\#veryfancy etc').should.deep.equal(['hello', '\\']);
|
||||
expect(utils.splitArguments('hello \\#veryfancy etc')).toEqual(['hello', '\\']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,15 +26,16 @@ import os from 'os';
|
||||
import path from 'path';
|
||||
import {fileURLToPath} from 'url';
|
||||
|
||||
import chai from 'chai';
|
||||
import fs from 'fs-extra';
|
||||
import temp from 'temp';
|
||||
import {expect} from 'vitest';
|
||||
|
||||
import {CompilationEnvironment} from '../lib/compilation-env.js';
|
||||
import {CompilationQueue} from '../lib/compilation-queue.js';
|
||||
import {CompilerProps, fakeProps} from '../lib/properties.js';
|
||||
import {CompilerInfo} from '../types/compiler.interfaces.js';
|
||||
import {ParseFiltersAndOutputOptions} from '../types/features/filters.interfaces.js';
|
||||
import {Language} from '../types/languages.interfaces.js';
|
||||
|
||||
// TODO: Find proper type for options
|
||||
export function makeCompilationEnvironment(options: Record<string, any>): CompilationEnvironment {
|
||||
@@ -47,14 +48,16 @@ export function makeFakeCompilerInfo(props: Partial<CompilerInfo>): CompilerInfo
|
||||
return props as CompilerInfo;
|
||||
}
|
||||
|
||||
export function makeFakeLanguage(props: Partial<Language>): Language {
|
||||
return props as Language;
|
||||
}
|
||||
|
||||
export function makeFakeParseFiltersAndOutputOptions(
|
||||
options: Partial<ParseFiltersAndOutputOptions>,
|
||||
): ParseFiltersAndOutputOptions {
|
||||
return options as ParseFiltersAndOutputOptions;
|
||||
}
|
||||
|
||||
export const should = chai.should();
|
||||
|
||||
// This combines a should assert and a type guard
|
||||
// Example:
|
||||
//
|
||||
@@ -66,7 +69,8 @@ export const should = chai.should();
|
||||
// a = null;
|
||||
// shouldExist(a); /* throws should.exist assertion
|
||||
export function shouldExist<T>(value: T, message?: string): value is Exclude<T, null | undefined> {
|
||||
should.exist(value, message);
|
||||
// TODO: if the message is set we should have a proper message here; since the move to vitest we lost it.
|
||||
expect(value).toEqual(expect.anything());
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -86,4 +90,4 @@ export function newTempDir() {
|
||||
}
|
||||
|
||||
// eslint-disable-next-line -- do not rewrite exports
|
||||
export {chai, path, fs};
|
||||
export {path, fs};
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
|
||||
import child_process from 'child_process';
|
||||
|
||||
import {beforeAll, describe, expect, it} from 'vitest';
|
||||
|
||||
import {WineVcCompiler} from '../lib/compilers/wine-vc.js';
|
||||
import {WslVcCompiler} from '../lib/compilers/wsl-vc.js';
|
||||
import {LanguageKey} from '../types/languages.interfaces.js';
|
||||
@@ -47,18 +49,18 @@ const info = {
|
||||
describe('Paths', () => {
|
||||
let env;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
env = makeCompilationEnvironment({languages});
|
||||
});
|
||||
|
||||
it('Linux -> Wine path', () => {
|
||||
const compiler = new WineVcCompiler(makeFakeCompilerInfo(info), env);
|
||||
compiler.filename('/tmp/123456/output.s').should.equal('Z:/tmp/123456/output.s');
|
||||
expect(compiler.filename('/tmp/123456/output.s')).toEqual('Z:/tmp/123456/output.s');
|
||||
});
|
||||
|
||||
it('Linux -> Windows path', function () {
|
||||
it('Linux -> Windows path', () => {
|
||||
const compiler = new WslVcCompiler(makeFakeCompilerInfo(info), env);
|
||||
compiler.filename('/mnt/c/tmp/123456/output.s', '/mnt/c/tmp').should.equal('c:/tmp/123456/output.s');
|
||||
expect(compiler.filename('/mnt/c/tmp/123456/output.s', '/mnt/c/tmp')).toEqual('c:/tmp/123456/output.s');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -89,7 +91,7 @@ if (process.platform === 'linux' && child_process.execSync('uname -a').toString(
|
||||
describe('Wsl compiler', () => {
|
||||
let compiler;
|
||||
|
||||
before(() => {
|
||||
beforeAll(() => {
|
||||
compiler = createCompiler(WslVcCompiler);
|
||||
});
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
/* Code generation */
|
||||
"outDir": "./out/dist",
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
"types": ["mocha", "chai"],
|
||||
/* Other options */
|
||||
"allowJs": true
|
||||
}
|
||||
|
||||
11
vitest.config.ts
Normal file
11
vitest.config.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
// eslint-disable-next-line node/no-unpublished-import
|
||||
import {defineConfig} from 'vitest/config';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default defineConfig({
|
||||
test: {
|
||||
include: ['test/**/*.ts'],
|
||||
exclude: ['test/_*.ts', 'test/utils.ts'],
|
||||
setupFiles: ['/test/_setup-fake-aws.ts', '/test/_setup-log.ts'],
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user