chore: tests for hoppscotch-cli (#2300)

Co-authored-by: Andrew Bastin <andrewbastin.k@gmail.com>
This commit is contained in:
Deepanshu Dhruw
2022-05-11 15:44:19 +05:30
committed by GitHub
parent d04520698d
commit 432337b801
30 changed files with 1919 additions and 155 deletions

View File

@@ -0,0 +1,193 @@
/*
* For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/tmp/jest_rs",
// Automatically clear mock calls, instances and results before every test
clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: true,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files
// coverageDirectory: undefined,
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// Indicates which provider should be used to instrument code for coverage
// coverageProvider: "babel",
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {
// 'ts-jest': {
// useESM: true,
// },
// },
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
moduleFileExtensions: ["js", "ts", "json"],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
// moduleNameMapper: {
// '^(\\.{1,2}/.*)\\.js$': '$1',
// },
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
preset: "ts-jest/presets/js-with-babel",
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state before every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state and implementation before every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
setupFilesAfterEnv: ["./jest.setup.ts"],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: "node",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
testMatch: [
// "**/__tests__/**/*.[jt]s?(x)",
"**/src/__tests__/**/*.*.ts",
],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
testPathIgnorePatterns: ["/node_modules/", "/dist/"],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jest-circus/runner",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
transform: {
"^.+\\.ts$": "ts-jest",
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/",
// "\\.pnp\\.[^\\/]+$"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
verbose: true,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

View File

@@ -0,0 +1 @@
import "@relmify/jest-fp-ts";

View File

@@ -16,7 +16,8 @@
"debugger": "node debugger.js 9999",
"prepublish": "pnpm exec tsup",
"prettier-format": "prettier --config .prettierrc 'src/**/*.ts' --write",
"do-typecheck": "pnpm exec tsc --noEmit"
"do-typecheck": "pnpm exec tsc --noEmit",
"test": "pnpm run build && jest && rm -rf dist"
},
"keywords": [
"cli",
@@ -37,19 +38,26 @@
"devDependencies": {
"@hoppscotch/data": "workspace:^0.4.2",
"@hoppscotch/js-sandbox": "workspace:^2.0.0",
"@swc/core": "^1.2.163",
"@relmify/jest-fp-ts": "^2.0.2",
"@swc/core": "^1.2.181",
"@types/axios": "^0.14.0",
"@types/chalk": "^2.2.0",
"@types/commander": "^2.12.2",
"@types/jest": "^27.4.1",
"@types/lodash": "^4.14.181",
"@types/qs": "^6.9.7",
"axios": "^0.21.4",
"chalk": "^4.1.1",
"commander": "^8.0.0",
"esm": "^3.2.25",
"fp-ts": "^2.11.3",
"fp-ts": "^2.12.1",
"io-ts": "^2.2.16",
"jest": "^27.5.1",
"lodash": "^4.17.21",
"prettier": "^2.6.2",
"qs": "^6.10.3",
"tsup": "^5.12.4",
"typescript": "^4.6.3"
"ts-jest": "^27.1.4",
"tsup": "^5.12.7",
"typescript": "^4.6.4"
}
}

View File

@@ -0,0 +1,64 @@
import { ExecException } from "child_process";
import { HoppErrorCode } from "../../types/errors";
import { execAsync, getErrorCode, getTestJsonFilePath } from "../utils";
describe("Test 'hopp test <file>' command:", () => {
test("No collection file path provided.", async () => {
const cmd = `node ./bin/hopp test`;
const { stdout } = await execAsync(cmd);
const out = getErrorCode(stdout);
expect(out).toBe<HoppErrorCode>("NO_FILE_PATH");
});
test("Collection file not found.", async () => {
const cmd = `node ./bin/hopp test notfound.json`;
const { stdout } = await execAsync(cmd);
const out = getErrorCode(stdout);
expect(out).toBe<HoppErrorCode>("FILE_NOT_FOUND");
});
test("Malformed collection file.", async () => {
const cmd = `node ./bin/hopp test ${getTestJsonFilePath(
"malformed-collection.json"
)}`;
const { stdout } = await execAsync(cmd);
const out = getErrorCode(stdout);
expect(out).toBe<HoppErrorCode>("MALFORMED_COLLECTION");
});
test("Invalid arguement.", async () => {
const cmd = `node ./bin/hopp invalid-arg`;
const { stdout } = await execAsync(cmd);
const out = getErrorCode(stdout);
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
});
test("Collection file not JSON type.", async () => {
const cmd = `node ./bin/hopp test ${getTestJsonFilePath("notjson.txt")}`;
const { stdout } = await execAsync(cmd);
const out = getErrorCode(stdout);
expect(out).toBe<HoppErrorCode>("FILE_NOT_JSON");
});
test("Some errors occured (exit code 1).", async () => {
const cmd = `node ./bin/hopp test ${getTestJsonFilePath("fails.json")}`;
const { error } = await execAsync(cmd);
expect(error).not.toBeNull();
expect(error).toMatchObject(<ExecException>{
code: 1,
});
});
test("No errors occured (exit code 0).", async () => {
const cmd = `node ./bin/hopp test ${getTestJsonFilePath("passes.json")}`;
const { error } = await execAsync(cmd);
expect(error).toBeNull();
});
});

View File

@@ -0,0 +1,26 @@
import { HoppCLIError } from "../../../types/errors";
import { checkFilePath } from "../../../utils/checks";
describe("checkFilePath", () => {
test("File doesn't exists.", () => {
return expect(
checkFilePath("./src/samples/this-file-not-exists.json")()
).resolves.toSubsetEqualLeft(<HoppCLIError>{
code: "FILE_NOT_FOUND",
});
});
test("File not of JSON type.", () => {
return expect(
checkFilePath("./src/__tests__/samples/notjson.txt")()
).resolves.toSubsetEqualLeft(<HoppCLIError>{
code: "FILE_NOT_JSON",
});
});
test("Existing JSON file.", () => {
return expect(
checkFilePath("./src/__tests__/samples/passes.json")()
).resolves.toBeRight();
});
});

View File

@@ -0,0 +1,19 @@
import { isHoppCLIError } from "../../../utils/checks";
describe("isHoppCLIError", () => {
test("NULL error value.", () => {
expect(isHoppCLIError(null)).toBeFalsy();
});
test("Non-existing code property.", () => {
expect(isHoppCLIError({ name: "name" })).toBeFalsy();
});
test("Invalid code value.", () => {
expect(isHoppCLIError({ code: 2 })).toBeFalsy();
});
test("Valid code value.", () => {
expect(isHoppCLIError({ code: "TEST_SCRIPT_ERROR" })).toBeTruthy();
});
});

View File

@@ -0,0 +1,19 @@
import { isHoppErrnoException } from "../../../utils/checks";
describe("isHoppErrnoException", () => {
test("NULL exception value.", () => {
expect(isHoppErrnoException(null)).toBeFalsy();
});
test("Non-existing name property.", () => {
expect(isHoppErrnoException({ what: "what" })).toBeFalsy();
});
test("Invalid name value.", () => {
expect(isHoppErrnoException({ name: 3 })).toBeFalsy();
});
test("Valid name value.", () => {
expect(isHoppErrnoException({ name: "name" })).toBeTruthy();
});
});

View File

@@ -0,0 +1,84 @@
import { isRESTCollection } from "../../../utils/checks";
describe("isRESTCollection", () => {
test("Undefined collection value.", () => {
expect(isRESTCollection(undefined)).toBeFalsy();
});
test("Invalid id value.", () => {
expect(
isRESTCollection({
v: 1,
name: "test",
id: 1,
})
).toBeFalsy();
});
test("Invalid requests value.", () => {
expect(
isRESTCollection({
v: 1,
name: "test",
id: "1",
requests: null,
})
).toBeFalsy();
});
test("Invalid folders value.", () => {
expect(
isRESTCollection({
v: 1,
name: "test",
id: "1",
requests: [],
folders: undefined,
})
).toBeFalsy();
});
test("Invalid RESTCollection(s) in folders.", () => {
expect(
isRESTCollection({
v: 1,
name: "test",
id: "1",
requests: [],
folders: [
{
v: 1,
name: "test1",
id: "2",
requests: undefined,
folders: [],
},
],
})
).toBeFalsy();
});
test("Invalid HoppRESTRequest(s) in requests.", () => {
expect(
isRESTCollection({
v: 1,
name: "test",
id: "1",
requests: [{}],
folders: [],
})
).toBeFalsy();
});
test("Valid RESTCollection.", () => {
expect(
isRESTCollection({
v: 1,
name: "test",
id: "1",
requests: [],
folders: [],
})
).toBeTruthy();
});
});

View File

@@ -0,0 +1,116 @@
import { collectionsRunner } from "../../../utils/collections";
import { HoppRESTRequest } from "@hoppscotch/data";
import axios, { AxiosResponse } from "axios";
import "@relmify/jest-fp-ts";
jest.mock("axios");
const SAMPLE_HOPP_REQUEST = <HoppRESTRequest>{
v: "1",
name: "request",
method: "GET",
endpoint: "https://example.com",
params: [],
headers: [],
preRequestScript: "",
testScript: "",
auth: {
authActive: false,
authType: "none",
},
body: {
contentType: null,
body: null,
},
};
const SAMPLE_RESOLVED_RESPONSE = <AxiosResponse>{
data: { body: 1 },
status: 200,
statusText: "OK",
config: {
url: "https://example.com",
supported: true,
method: "GET",
},
headers: [],
};
describe("collectionsRunner", () => {
beforeEach(() => {
jest.clearAllMocks();
});
afterAll(() => {
jest.clearAllMocks();
});
test("Empty HoppCollection.", () => {
return expect(collectionsRunner([])()).resolves.toStrictEqual([]);
});
test("Empty requests and folders in collection.", () => {
return expect(
collectionsRunner([
{
v: 1,
name: "name",
folders: [],
requests: [],
},
])()
).resolves.toMatchObject([]);
});
test("Non-empty requests in collection.", () => {
(axios as unknown as jest.Mock).mockResolvedValue(SAMPLE_RESOLVED_RESPONSE);
return expect(
collectionsRunner([
{
v: 1,
name: "collection",
folders: [],
requests: [SAMPLE_HOPP_REQUEST],
},
])()
).resolves.toMatchObject([
{
path: "collection/request",
tests: [],
errors: [],
result: true,
},
]);
});
test("Non-empty folders in collection.", () => {
(axios as unknown as jest.Mock).mockResolvedValue(SAMPLE_RESOLVED_RESPONSE);
return expect(
collectionsRunner([
{
v: 1,
name: "collection",
folders: [
{
v: 1,
name: "folder",
folders: [],
requests: [SAMPLE_HOPP_REQUEST],
},
],
requests: [],
},
])()
).resolves.toMatchObject([
{
path: "collection/folder/request",
tests: [],
errors: [],
result: true,
},
]);
});
});

View File

@@ -0,0 +1,35 @@
import { collectionsRunnerResult } from "../../../utils/collections";
const FALSE_RESULT_REPORT = {
path: "some_path",
tests: [],
errors: [],
result: false,
duration: { test: 1, request: 1, preRequest: 1 },
};
const TRUE_RESULT_REPORT = {
path: "some_path",
tests: [],
errors: [],
result: true,
duration: { test: 1, request: 1, preRequest: 1 },
};
describe("collectionsRunnerResult", () => {
test("Empty request-report.", () => {
expect(collectionsRunnerResult([])).toBeTruthy();
});
test("Atleast 1 false result in request-report.", () => {
expect(
collectionsRunnerResult([FALSE_RESULT_REPORT, TRUE_RESULT_REPORT])
).toBeFalsy();
});
test("All true result(s) in request-report.", () => {
expect(
collectionsRunnerResult([TRUE_RESULT_REPORT, TRUE_RESULT_REPORT])
).toBeTruthy();
});
});

View File

@@ -0,0 +1,24 @@
import { DEFAULT_DURATION_PRECISION } from "../../../utils/constants";
import { getDurationInSeconds } from "../../../utils/getters";
describe("getDurationInSeconds", () => {
const testDurations = [
{ end: [1, 111111111], precision: 1, expected: 1.1 },
{ end: [2, 333333333], precision: 2, expected: 2.33 },
{
end: [3, 555555555],
precision: DEFAULT_DURATION_PRECISION,
expected: 3.556,
},
{ end: [4, 777777777], precision: 4, expected: 4.7778 },
];
test.each(testDurations)(
"($end.0 s + $end.1 ns) rounded-off to $expected",
({ end, precision, expected }) => {
expect(getDurationInSeconds(end as [number, number], precision)).toBe(
expected
);
}
);
});

View File

@@ -0,0 +1,42 @@
import { Environment } from "@hoppscotch/data";
import { getEffectiveFinalMetaData } from "../../../utils/getters";
const DEFAULT_ENV = <Environment>{
name: "name",
variables: [{ key: "PARAM", value: "parsed_param" }],
};
describe("getEffectiveFinalMetaData", () => {
test("Empty list of meta-data.", () => {
expect(getEffectiveFinalMetaData([], DEFAULT_ENV)).toSubsetEqualRight([]);
});
test("Non-empty active list of meta-data with unavailable ENV.", () => {
expect(
getEffectiveFinalMetaData(
[{ active: true, key: "<<UNKNOWN_KEY>>", value: "<<UNKNOWN_VALUE>>" }],
DEFAULT_ENV
)
).toSubsetEqualRight([{ active: true, key: "", value: "" }]);
});
test("Inactive list of meta-data.", () => {
expect(
getEffectiveFinalMetaData(
[{ active: false, key: "KEY", value: "<<PARAM>>" }],
DEFAULT_ENV
)
).toSubsetEqualRight([]);
});
test("Active list of meta-data.", () => {
expect(
getEffectiveFinalMetaData(
[{ active: true, key: "PARAM", value: "<<PARAM>>" }],
DEFAULT_ENV
)
).toSubsetEqualRight([
{ active: true, key: "PARAM", value: "parsed_param" },
]);
});
});

View File

@@ -0,0 +1,36 @@
import { HoppCLIError } from "../../../types/errors";
import { parseCollectionData } from "../../../utils/mutators";
describe("parseCollectionData", () => {
test("Reading non-existing file.", () => {
return expect(
parseCollectionData("./src/__tests__/samples/notexist.txt")()
).resolves.toSubsetEqualLeft(<HoppCLIError>{
code: "UNKNOWN_ERROR",
});
});
test("Unparseable JSON contents.", () => {
return expect(
parseCollectionData("./src/__tests__/samples/malformed-collection.json")()
).resolves.toSubsetEqualLeft(<HoppCLIError>{
code: "MALFORMED_COLLECTION",
});
});
test("Invalid HoppCollection.", () => {
return expect(
parseCollectionData(
"./src/__tests__/samples/malformed-collection2.json"
)()
).resolves.toSubsetEqualLeft(<HoppCLIError>{
code: "MALFORMED_COLLECTION",
});
});
test("Valid HoppCollection.", () => {
return expect(
parseCollectionData("./src/__tests__/samples/passes.json")()
).resolves.toBeRight();
});
});

View File

@@ -0,0 +1,148 @@
import { Environment, HoppRESTRequest } from "@hoppscotch/data";
import { EffectiveHoppRESTRequest } from "../../../interfaces/request";
import { HoppCLIError } from "../../../types/errors";
import { getEffectiveRESTRequest } from "../../../utils/pre-request";
const DEFAULT_ENV = <Environment>{
name: "name",
variables: [
{
key: "HEADER",
value: "parsed_header",
},
{ key: "PARAM", value: "parsed_param" },
{ key: "TOKEN", value: "parsed_token" },
{ key: "BODY_PROP", value: "parsed_body_prop" },
{ key: "ENDPOINT", value: "https://parsed-endpoint.com" },
],
};
const DEFAULT_REQUEST = <HoppRESTRequest>{
v: "1",
name: "name",
method: "GET",
endpoint: "https://example.com",
params: [],
headers: [],
preRequestScript: "",
testScript: "",
auth: {
authActive: false,
authType: "none",
},
body: {
contentType: null,
body: null,
},
};
describe("getEffectiveRESTRequest", () => {
let SAMPLE_REQUEST = Object.assign({}, DEFAULT_REQUEST);
beforeEach(() => {
SAMPLE_REQUEST = Object.assign({}, DEFAULT_REQUEST);
});
test("Endpoint, headers and params with unavailable ENV.", () => {
SAMPLE_REQUEST.headers = [
{
key: "HEADER",
value: "<<UNKNOWN>>",
active: true,
},
];
SAMPLE_REQUEST.params = [
{
key: "PARAM",
value: "<<UNKNOWN>>",
active: true,
},
];
SAMPLE_REQUEST.endpoint = "<<UNKNOWN>>";
expect(
getEffectiveRESTRequest(SAMPLE_REQUEST, DEFAULT_ENV)
).toSubsetEqualRight(<EffectiveHoppRESTRequest>{
effectiveFinalHeaders: [{ active: true, key: "HEADER", value: "" }],
effectiveFinalParams: [{ active: true, key: "PARAM", value: "" }],
effectiveFinalURL: "",
});
});
test("Auth with unavailable ENV.", () => {
SAMPLE_REQUEST.auth = {
authActive: true,
authType: "bearer",
token: "<<UNKNOWN>>",
};
expect(
getEffectiveRESTRequest(SAMPLE_REQUEST, DEFAULT_ENV)
).toSubsetEqualRight(<EffectiveHoppRESTRequest>{
effectiveFinalHeaders: [
{ active: true, key: "Authorization", value: "Bearer " },
],
});
});
test("Body with unavailable ENV.", () => {
SAMPLE_REQUEST.body = {
contentType: "text/plain",
body: "<<UNKNOWN>>",
};
expect(
getEffectiveRESTRequest(SAMPLE_REQUEST, DEFAULT_ENV)
).toSubsetEqualLeft(<HoppCLIError>{
code: "PARSING_ERROR",
});
});
test("Request meta-data with available ENVs.", () => {
SAMPLE_REQUEST.headers = [
{
key: "HEADER",
value: "<<HEADER>>",
active: true,
},
];
SAMPLE_REQUEST.params = [
{
key: "PARAM",
value: "<<PARAM>>",
active: true,
},
];
SAMPLE_REQUEST.endpoint = "<<ENDPOINT>>";
SAMPLE_REQUEST.auth = {
authActive: true,
authType: "bearer",
token: "<<TOKEN>>",
};
SAMPLE_REQUEST.body = {
contentType: "text/plain",
body: "<<BODY_PROP>>",
};
const vars = DEFAULT_ENV.variables;
expect(
getEffectiveRESTRequest(SAMPLE_REQUEST, DEFAULT_ENV)
).toSubsetEqualRight(<EffectiveHoppRESTRequest>{
effectiveFinalHeaders: [
{ active: true, key: "HEADER", value: vars[0].value },
{
active: true,
key: "Authorization",
value: `Bearer ${vars[2].value}`,
},
{ active: true, key: "content-type", value: "text/plain" },
],
effectiveFinalParams: [
{ active: true, key: "PARAM", value: vars[1].value },
],
effectiveFinalURL: vars[4].value,
effectiveFinalBody: vars[3].value,
});
});
});

View File

@@ -0,0 +1,24 @@
import { PreRequestMetrics, RequestMetrics } from "../../../types/response";
import { getPreRequestMetrics } from "../../../utils/pre-request";
describe("getPreRequestMetrics", () => {
test("With empty errors.", () => {
expect(getPreRequestMetrics([], 1)).toMatchObject(<PreRequestMetrics>{
scripts: { failed: 0, passed: 1 },
});
});
test("With non-empty errors.", () => {
expect(
getPreRequestMetrics(
[
{ code: "REQUEST_ERROR", data: {} },
{ code: "PRE_REQUEST_SCRIPT_ERROR", data: {} },
],
1
)
).toMatchObject(<PreRequestMetrics>{
scripts: { failed: 1, passed: 0 },
});
});
});

View File

@@ -0,0 +1,71 @@
import { HoppRESTRequest } from "@hoppscotch/data";
import { HoppEnvs } from "../../../types/request";
import * as E from "fp-ts/Either";
import { HoppCLIError } from "../../../types/errors";
import { EffectiveHoppRESTRequest } from "../../../interfaces/request";
import { preRequestScriptRunner } from "../../../utils/pre-request";
import "@relmify/jest-fp-ts";
const SAMPLE_ENVS: HoppEnvs = {
global: [],
selected: [],
};
const VALID_PRE_REQUEST_SCRIPT = `
pw.env.set("ENDPOINT","https://example.com");
`;
const INVALID_PRE_REQUEST_SCRIPT = "d";
const SAMPLE_REQUEST: HoppRESTRequest = {
v: "1",
name: "request",
method: "GET",
endpoint: "<<ENDPOINT>>",
params: [],
headers: [],
preRequestScript: "",
testScript: "",
auth: { authActive: false, authType: "none" },
body: {
contentType: null,
body: null,
},
};
describe("preRequestScriptRunner", () => {
let SUCCESS_PRE_REQUEST_RUNNER: E.Either<
HoppCLIError,
EffectiveHoppRESTRequest
>,
FAILURE_PRE_REQUEST_RUNNER: E.Either<
HoppCLIError,
EffectiveHoppRESTRequest
>;
beforeAll(async () => {
SAMPLE_REQUEST.preRequestScript = VALID_PRE_REQUEST_SCRIPT;
SUCCESS_PRE_REQUEST_RUNNER = await preRequestScriptRunner(
SAMPLE_REQUEST,
SAMPLE_ENVS
)();
SAMPLE_REQUEST.preRequestScript = INVALID_PRE_REQUEST_SCRIPT;
FAILURE_PRE_REQUEST_RUNNER = await preRequestScriptRunner(
SAMPLE_REQUEST,
SAMPLE_ENVS
)();
});
test("Parsing of request endpoint with set ENV.", () => {
expect(SUCCESS_PRE_REQUEST_RUNNER).toSubsetEqualRight(<
EffectiveHoppRESTRequest
>{
effectiveFinalURL: "https://example.com",
});
});
test("Failed execution due to unknown variable error.", () => {
expect(FAILURE_PRE_REQUEST_RUNNER).toSubsetEqualLeft(<HoppCLIError>{
code: "PRE_REQUEST_SCRIPT_ERROR",
});
});
});

View File

@@ -0,0 +1,24 @@
import { RequestMetrics } from "../../../types/response";
import { getRequestMetrics } from "../../../utils/request";
describe("getRequestMetrics", () => {
test("With empty errors.", () => {
expect(getRequestMetrics([], 1)).toMatchObject(<RequestMetrics>{
requests: { failed: 0, passed: 1 },
});
});
test("With non-empty errors.", () => {
expect(
getRequestMetrics(
[
{ code: "REQUEST_ERROR", data: {} },
{ code: "PARSING_ERROR", data: {} },
],
1
)
).toMatchObject(<RequestMetrics>{
requests: { failed: 1, passed: 0 },
});
});
});

View File

@@ -0,0 +1,104 @@
import { HoppRESTRequest } from "@hoppscotch/data";
import axios, { AxiosResponse } from "axios";
import { processRequest } from "../../../utils/request";
import { HoppEnvs } from "../../../types/request";
import "@relmify/jest-fp-ts";
jest.mock("axios");
const DEFAULT_REQUEST = <HoppRESTRequest>{
v: "1",
name: "name",
method: "POST",
endpoint: "https://example.com",
params: [],
headers: [],
preRequestScript: "",
testScript: "",
auth: {
authType: "none",
authActive: false,
},
body: {
contentType: null,
body: null,
},
};
const DEFAULT_RESPONSE = <AxiosResponse>{
data: {},
status: 200,
config: {
url: "https://example.com",
supported: true,
method: "POST",
},
statusText: "OK",
headers: [],
};
const DEFAULT_ENVS = <HoppEnvs>{
global: [],
selected: [],
};
describe("processRequest", () => {
let SAMPLE_REQUEST = DEFAULT_REQUEST;
beforeEach(() => {
jest.clearAllMocks();
});
afterEach(() => {
SAMPLE_REQUEST = DEFAULT_REQUEST;
});
test("With empty envs for 'true' result.", () => {
(axios as unknown as jest.Mock).mockResolvedValue(DEFAULT_RESPONSE);
return expect(
processRequest(SAMPLE_REQUEST, DEFAULT_ENVS, "fake/collection/path")()
).resolves.toMatchObject({
report: {
result: true,
},
});
});
test("With non-empty envs, pre-request-script and test-script.", () => {
SAMPLE_REQUEST.preRequestScript = `
pw.env.set("ENDPOINT", "https://example.com");
`;
SAMPLE_REQUEST.testScript = `
pw.test("check status.", () => {
pw.expect(pw.response.status).toBe(200);
});
`;
(axios as unknown as jest.Mock).mockResolvedValue(DEFAULT_RESPONSE);
return expect(
processRequest(SAMPLE_REQUEST, DEFAULT_ENVS, "fake/collection/path")()
).resolves.toMatchObject({
envs: {
selected: [{ key: "ENDPOINT", value: "https://example.com" }],
},
report: {
result: true,
},
});
});
test("With invalid-pre-request-script.", () => {
SAMPLE_REQUEST.preRequestScript = `invalid`;
(axios as unknown as jest.Mock).mockResolvedValue(DEFAULT_RESPONSE);
return expect(
processRequest(SAMPLE_REQUEST, DEFAULT_ENVS, "fake/request/path")()
).resolves.toMatchObject({
report: { result: false },
});
});
});

View File

@@ -0,0 +1,111 @@
import axios, { AxiosError, AxiosResponse } from "axios";
import { RequestConfig } from "../../../interfaces/request";
import { requestRunner } from "../../../utils/request";
import { RequestRunnerResponse } from "../../../interfaces/response";
import "@relmify/jest-fp-ts";
jest.mock("axios");
describe("requestRunner", () => {
let SAMPLE_REQUEST_CONFIG: RequestConfig = {
url: "https://example.com",
supported: false,
method: "GET",
};
beforeEach(() => {
SAMPLE_REQUEST_CONFIG.supported = false;
SAMPLE_REQUEST_CONFIG.url = "https://example.com";
SAMPLE_REQUEST_CONFIG.method = "GET";
jest.clearAllMocks();
});
afterAll(() => {
jest.clearAllMocks();
});
it("Should handle axios-error with response info.", () => {
jest.spyOn(axios, "isAxiosError").mockReturnValue(true);
(axios as unknown as jest.Mock).mockRejectedValueOnce(<AxiosError>{
name: "name",
message: "message",
config: SAMPLE_REQUEST_CONFIG,
isAxiosError: true,
response: {
data: "data",
status: 404,
statusText: "NOT FOUND",
headers: [],
config: SAMPLE_REQUEST_CONFIG,
},
toJSON: () => Object({}),
});
return expect(
requestRunner(SAMPLE_REQUEST_CONFIG)()
).resolves.toSubsetEqualRight(<RequestRunnerResponse>{
body: "data",
status: 404,
});
});
it("Should handle axios-error for unsupported request.", () => {
jest.spyOn(axios, "isAxiosError").mockReturnValue(true);
(axios as unknown as jest.Mock).mockRejectedValueOnce(<AxiosError>{
name: "name",
message: "message",
config: SAMPLE_REQUEST_CONFIG,
isAxiosError: true,
toJSON: () => Object({}),
});
return expect(
requestRunner(SAMPLE_REQUEST_CONFIG)()
).resolves.toSubsetEqualRight(<RequestRunnerResponse>{
status: 501,
body: {},
});
});
it("Should handle axios-error with request info.", () => {
jest.spyOn(axios, "isAxiosError").mockReturnValue(true);
SAMPLE_REQUEST_CONFIG.supported = true;
(axios as unknown as jest.Mock).mockRejectedValueOnce(<AxiosError>{
name: "name",
message: "message",
config: SAMPLE_REQUEST_CONFIG,
isAxiosError: true,
request: {},
toJSON: () => Object({}),
});
return expect(requestRunner(SAMPLE_REQUEST_CONFIG)()).resolves.toBeLeft();
});
it("Should handle unknown error.", () => {
jest.spyOn(axios, "isAxiosError").mockReturnValue(false);
(axios as unknown as jest.Mock).mockRejectedValueOnce({});
return expect(requestRunner(SAMPLE_REQUEST_CONFIG)()).resolves.toBeLeft();
});
it("Should successfully execute.", () => {
SAMPLE_REQUEST_CONFIG.supported = true;
(axios as unknown as jest.Mock).mockResolvedValue(<AxiosResponse>{
data: "data",
status: 200,
config: SAMPLE_REQUEST_CONFIG,
statusText: "OK",
headers: [],
});
return expect(
requestRunner(SAMPLE_REQUEST_CONFIG)()
).resolves.toSubsetEqualRight(<RequestRunnerResponse>{
status: 200,
body: "data",
method: "GET",
});
});
});

View File

@@ -0,0 +1,55 @@
import { TestMetrics } from "../../../types/response";
import { getTestMetrics } from "../../../utils/test";
describe("getTestMetrics", () => {
test("With empty test-reports and errors.", () => {
expect(getTestMetrics([], 1, [])).toMatchObject(<TestMetrics>{
tests: { passed: 0, failed: 0 },
testSuites: { failed: 0, passed: 0 },
duration: 1,
scripts: { failed: 0, passed: 1 },
});
});
test("With non-empty test-reports and no test-script-error.", () => {
expect(
getTestMetrics(
[
{
descriptor: "descriptor",
expectResults: [],
failed: 0,
passed: 2,
},
{
descriptor: "descriptor",
expectResults: [],
failed: 2,
passed: 1,
},
],
5,
[]
)
).toMatchObject(<TestMetrics>{
tests: { failed: 2, passed: 3 },
testSuites: { failed: 1, passed: 1 },
scripts: { failed: 0, passed: 1 },
duration: 5,
});
});
test("With empty test-reports and some test-script-error.", () => {
expect(
getTestMetrics([], 5, [
{ code: "TEST_SCRIPT_ERROR", data: {} },
{ code: "PRE_REQUEST_SCRIPT_ERROR", data: {} },
])
).toMatchObject(<TestMetrics>{
tests: { failed: 0, passed: 0 },
testSuites: { failed: 0, passed: 0 },
scripts: { failed: 1, passed: 0 },
duration: 5,
});
});
});

View File

@@ -0,0 +1,63 @@
import { TestDescriptor } from "@hoppscotch/js-sandbox";
import { testDescriptorParser, getTestMetrics } from "../../../utils/test";
import { TestReport } from "../../../interfaces/response";
import { TestMetrics } from "../../../types/response";
import "@relmify/jest-fp-ts";
const SAMPLE_TEST_DESCRIPTOR: TestDescriptor = {
descriptor: "Status code is 200",
expectResults: [
{
status: "error",
message: "some_message",
},
],
children: [
{
descriptor: "Check JSON response property",
expectResults: [
{
status: "pass",
message: "some_message",
},
],
children: [],
},
{
descriptor: "Check header property",
expectResults: [
{
status: "fail",
message: "some_message",
},
],
children: [],
},
],
};
describe("testDescriptorParser", () => {
let TEST_REPORT: TestReport[];
beforeAll(async () => {
TEST_REPORT = await testDescriptorParser(SAMPLE_TEST_DESCRIPTOR)();
});
it("Should have 3 tests-report.", () => {
expect(TEST_REPORT).toEqual(expect.any(Array));
expect(TEST_REPORT.length).toStrictEqual(3);
});
it("Should have 1 passed, 2 failed test-cases; 1 passed, 2 failed test-suite.", () => {
expect(getTestMetrics(TEST_REPORT, 1, [])).toMatchObject(<TestMetrics>{
tests: {
failed: 2,
passed: 1,
},
testSuites: {
failed: 2,
passed: 1,
},
});
});
});

View File

@@ -0,0 +1,73 @@
import { TestResponse } from "@hoppscotch/js-sandbox";
import * as E from "fp-ts/Either";
import { TestRunnerRes } from "../../../types/response";
import { HoppCLIError } from "../../../types/errors";
import { getTestMetrics, testRunner } from "../../../utils/test";
import { HoppEnvs } from "../../../types/request";
import "@relmify/jest-fp-ts";
const SAMPLE_ENVS: HoppEnvs = {
global: [],
selected: [
{
key: "DEVBLIN",
value: "set-by-devblin",
},
],
};
const SAMPLE_RESPONSE: TestResponse = {
status: 200,
headers: [],
body: {},
};
describe("testRunner", () => {
let SUCCESS_TEST_RUNNER_RES: E.Either<HoppCLIError, TestRunnerRes>,
FAILURE_TEST_RUNNER_RES: E.Either<HoppCLIError, TestRunnerRes>;
beforeAll(async () => {
SUCCESS_TEST_RUNNER_RES = await testRunner({
testScript: `
// Check status code is 200
pw.test("Status code is 200", ()=> {
pw.expect(pw.response.status).toBe(200);
});
// Check JSON response property
pw.test("Check JSON response property", ()=> {
pw.expect(pw.response.body).toBeType("string")
pw.expect(pw.response.body).toBe("body");
});
`,
envs: SAMPLE_ENVS,
response: SAMPLE_RESPONSE,
})();
FAILURE_TEST_RUNNER_RES = await testRunner({
testScript: "a",
envs: SAMPLE_ENVS,
response: SAMPLE_RESPONSE,
})();
});
it("Should have 2 failed, 1 passed test-cases; 1 failed, 1 passed test-suites.", () => {
expect(SUCCESS_TEST_RUNNER_RES).toBeRight();
if (E.isRight(SUCCESS_TEST_RUNNER_RES)) {
const { duration, testsReport } = SUCCESS_TEST_RUNNER_RES.right;
const { tests, testSuites } = getTestMetrics(testsReport, duration, []);
expect(tests.failed).toStrictEqual(2);
expect(tests.passed).toStrictEqual(1);
expect(testSuites.failed).toStrictEqual(1);
expect(testSuites.passed).toStrictEqual(1);
}
});
it("Should fail to execute with test-script-error.", () => {
expect(FAILURE_TEST_RUNNER_RES).toSubsetEqualLeft(<HoppCLIError>{
code: "TEST_SCRIPT_ERROR",
});
});
});

View File

@@ -0,0 +1,51 @@
[
{
"v": 1,
"name": "tests",
"folders": [],
"requests": [
{
"v": "1",
"endpoint": "https://echo.hoppscotch.io/<<HEADERS_TYPE1>>",
"name": "",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.set(\"HEADERS_TYPE1\", \"devblin_local1\");",
"testScript": "// Check status code is 200\npwd.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"string\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE1>>\"\n}"
}
},
{
"v": "1",
"endpoint": "https://echo.hoppscotch.dio/<<HEADERS_TYPE2>>",
"name": "success",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.setd(\"HEADERS_TYPE2\", \"devblin_local2\");",
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(300);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"object\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE2>>\"\n}"
}
}
]
}
]

View File

@@ -0,0 +1,50 @@
[
{
"v": 1,
"folders": [],
"requests":
{
"v": "1",
"endpoint": "https://echo.hoppscotch.io/<<HEADERS_TYPE1>>",
"name": "fail",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.set(\"HEADERS_TYPE1\", \"devblin_local1\");",
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"string\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE1>>\"\n}"
}
},
{
"v": "1",
"endpoint": "https://echo.hoppscotch.io/<<HEADERS_TYPE2>>",
"name": "success",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.set(\"HEADERS_TYPE2\", \"devblin_local2\");",
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(300);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"object\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE2>>\"\n}"
}
}
]
}
]

View File

@@ -0,0 +1,7 @@
[
{
"v": 1,
"name": "tests",
"folders": []
}
]

View File

@@ -0,0 +1,29 @@
[
{
"v": 1,
"folders": [],
"requests":
{
"v": "1",
"endpoint": "https://echo.hoppscotch.io/<<HEADERS_TYPE1>>",
"name": "fail",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.set(\"HEADERS_TYPE1\", \"devblin_local1\");",
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"string\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE1>>\"\n}"
}
}
]
}
]

View File

@@ -0,0 +1,51 @@
[
{
"v": 1,
"name": "tests",
"folders": [],
"requests": [
{
"v": "1",
"endpoint": "https://echo.hoppscotch.io/<<HEADERS_TYPE1>>",
"name": "",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.set(\"HEADERS_TYPE1\", \"devblin_local1\");",
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"object\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE1>>\"\n}"
}
},
{
"v": "1",
"endpoint": "https://echo.hoppscotch.io/<<HEADERS_TYPE2>>",
"name": "success",
"params": [],
"headers": [],
"method": "GET",
"auth": {
"authType": "none",
"authActive": true,
"addTo": "Headers",
"key": "",
"value": ""
},
"preRequestScript": "pw.env.set(\"HEADERS_TYPE2\", \"devblin_local2\");",
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\n// Check JSON response property\npw.test(\"Check JSON response property\", ()=> {\n pw.expect(pw.response.body.method).toBe(\"GET\");\n pw.expect(pw.response.body.headers).toBeType(\"object\");\n});",
"body": {
"contentType": "application/json",
"body": "{\n\"test\": \"<<HEADERS_TYPE2>>\"\n}"
}
}
]
}
]

View File

@@ -0,0 +1,7 @@
import { ExecException } from "child_process";
export type ExecResponse = {
error: ExecException | null;
stdout: string;
stderr: string;
};

View File

@@ -0,0 +1,26 @@
import { exec } from "child_process";
import { ExecResponse } from "./types";
export const execAsync = (command: string): Promise<ExecResponse> =>
new Promise((resolve) =>
exec(command, (error, stdout, stderr) => resolve({ error, stdout, stderr }))
);
export const trimAnsi = (target: string) => {
const ansiRegex =
/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g;
return target.replace(ansiRegex, "");
};
export const getErrorCode = (out: string) => {
const ansiTrimmedStr = trimAnsi(out);
return ansiTrimmedStr.split(" ")[0];
};
export const getTestJsonFilePath = (file: string) => {
const filePath = `${process.cwd()}/src/__tests__/samples/${file}`;
return filePath;
};

503
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff