feat: added execution duration and updated collection-metrics (#2257)

This commit is contained in:
Deepanshu Dhruw
2022-04-18 22:43:43 +05:30
committed by GitHub
parent 62a5beb52f
commit 06937fe9e8
10 changed files with 400 additions and 74 deletions

View File

@@ -29,6 +29,7 @@ export interface RequestRunnerResponse extends TestResponse {
endpoint: string;
method: Method;
statusText: string;
duration: number;
}
/**
@@ -49,14 +50,14 @@ export interface TestScriptParams {
* @property {string} descriptor Test description.
* @property {ExpectResult[]} expectResults Expected results for each
* test-case.
* @property {number} failing Total failing test-cases.
* @property {number} passing Total passing test-cases;
* @property {number} failed Total failed test-cases.
* @property {number} passed Total passed test-cases;
*/
export interface TestReport {
descriptor: string;
expectResults: ExpectResult[];
failing: number;
passing: number;
failed: number;
passed: number;
}
/**

View File

@@ -28,4 +28,5 @@ export type RequestReport = {
tests: TestReport[];
errors: HoppCLIError[];
result: boolean;
duration: { test: number; request: number; preRequest: number };
};

View File

@@ -9,20 +9,74 @@ export type ExpectResult = {
message: string;
};
export type TestMetrics = {
/**
* Total passed and failed test-cases.
* Stats describing number of failed and passed for test-cases/test-suites/
* test-scripts/pre-request-scripts/request.
*/
tests: { failing: number; passing: number };
export type Stats = {
failed: number;
passed: number;
};
export type PreRequestMetrics = {
/**
* Pre-request-script(s) failed and passed stats.
*/
scripts: Stats;
/**
* Total test-blocks/test-suites passed & failed, calculated
* based on test-cases failed/passed with in each test-block.
* Time taken (in seconds) to execute pre-request-script(s).
*/
testSuites: { failing: number; passing: number };
duration: number;
};
export type RequestMetrics = {
/**
* Request(s) failed and passed stats.
*/
requests: Stats;
/**
* Time taken (in seconds) to execute request(s).
*/
duration: number;
};
export type TestMetrics = {
/**
* Test-cases failed and passed stats.
*/
tests: Stats;
/**
* Test-block(s)/test-suite(s) failed and passed stats.
*/
testSuites: Stats;
/**
* Test script(s) execution failed and passed stats.
*/
scripts: Stats;
/**
* Time taken (in seconds) to execute test-script(s).
*/
duration: number;
};
export type TestRunnerRes = {
/**
* Updated envs after running test-script.
*/
envs: HoppEnvs;
/**
* Describes expected details for each test-suite.
*/
testsReport: TestReport[];
/**
* Time taken (in seconds) to execute the test-script.
*/
duration: number;
};

View File

@@ -3,16 +3,31 @@ import * as A from "fp-ts/Array";
import { pipe } from "fp-ts/function";
import { bold } from "chalk";
import { log } from "console";
import round from "lodash/round";
import { HoppCollection, HoppRESTRequest } from "@hoppscotch/data";
import { HoppEnvs, CollectionStack, RequestReport } from "../types/request";
import { preProcessRequest, processRequest } from "./request";
import {
getRequestMetrics,
preProcessRequest,
processRequest,
} from "./request";
import { exceptionColors } from "./getters";
import { TestReport } from "../interfaces/response";
import {
printErrorsReport,
printFailedTestsReport,
printPreRequestMetrics,
printRequestsMetrics,
printTestsMetrics,
} from "./display";
import {
PreRequestMetrics,
RequestMetrics,
TestMetrics,
} from "../types/response";
import { getTestMetrics } from "./test";
import { DEFAULT_DURATION_PRECISION } from "./constants";
import { getPreRequestMetrics } from "./pre-request";
const { WARN, FAIL } = exceptionColors;
/**
@@ -82,21 +97,39 @@ const getCollectionStack = (
);
/**
* Prints collection-runner-report using test-metrics data in table format.
* Prints collection-runner-report using test-metrics, request-metrics and
* pre-request-metrics data in pretty-format.
* @param requestsReport Provides data for each request-report which includes
* failed-tests-report, errors
* path of each request within collection-json file, failed-tests-report, errors,
* total execution duration for requests, pre-request-scripts, test-scripts.
* @returns True, if collection runner executed without any errors or failed test-cases.
* False, if errors occured or test-cases failed.
*/
export const collectionsRunnerResult = (
requestsReport: RequestReport[]
): boolean => {
const testsReport: TestReport[] = [];
const overallTestMetrics = <TestMetrics>{
tests: { failed: 0, passed: 0 },
testSuites: { failed: 0, passed: 0 },
duration: 0,
scripts: { failed: 0, passed: 0 },
};
const overallRequestMetrics = <RequestMetrics>{
requests: { failed: 0, passed: 0 },
duration: 0,
};
const overallPreRequestMetrics = <PreRequestMetrics>{
scripts: { failed: 0, passed: 0 },
duration: 0,
};
let finalResult = true;
// Printing requests-report details of failed-tests and errors
for (const requestReport of requestsReport) {
const { path, tests, errors, result } = requestReport;
const { path, tests, errors, result, duration } = requestReport;
const requestDuration = duration.request;
const testsDuration = duration.test;
const preRequestDuration = duration.preRequest;
finalResult = finalResult && result;
@@ -104,10 +137,56 @@ export const collectionsRunnerResult = (
printErrorsReport(path, errors);
testsReport.push.apply(testsReport, tests);
/**
* Extracting current request report's test-metrics and updating
* overall test-metrics.
*/
const testMetrics = getTestMetrics(tests, testsDuration, errors);
overallTestMetrics.duration += testMetrics.duration;
overallTestMetrics.testSuites.failed += testMetrics.testSuites.failed;
overallTestMetrics.testSuites.passed += testMetrics.testSuites.passed;
overallTestMetrics.tests.failed += testMetrics.tests.failed;
overallTestMetrics.tests.passed += testMetrics.tests.passed;
overallTestMetrics.scripts.failed += testMetrics.scripts.failed;
overallTestMetrics.scripts.passed += testMetrics.scripts.passed;
/**
* Extracting current request report's request-metrics and updating
* overall request-metrics.
*/
const requestMetrics = getRequestMetrics(errors, requestDuration);
overallRequestMetrics.duration += requestMetrics.duration;
overallRequestMetrics.requests.failed += requestMetrics.requests.failed;
overallRequestMetrics.requests.passed += requestMetrics.requests.passed;
/**
* Extracting current request report's pre-request-metrics and updating
* overall pre-request-metrics.
*/
const preRequestMetrics = getPreRequestMetrics(errors, preRequestDuration);
overallPreRequestMetrics.duration += preRequestMetrics.duration;
overallPreRequestMetrics.scripts.failed += preRequestMetrics.scripts.failed;
overallPreRequestMetrics.scripts.passed += preRequestMetrics.scripts.passed;
}
printTestsMetrics(testsReport);
const testMetricsDuration = overallTestMetrics.duration;
const requestMetricsDuration = overallRequestMetrics.duration;
// Rounding-off overall test-metrics duration upto DEFAULT_DURATION_PRECISION.
overallTestMetrics.duration = round(
testMetricsDuration,
DEFAULT_DURATION_PRECISION
);
// Rounding-off overall request-metrics duration upto DEFAULT_DURATION_PRECISION.
overallRequestMetrics.duration = round(
requestMetricsDuration,
DEFAULT_DURATION_PRECISION
);
printTestsMetrics(overallTestMetrics);
printRequestsMetrics(overallRequestMetrics);
printPreRequestMetrics(overallPreRequestMetrics);
return finalResult;
};

View File

@@ -5,3 +5,8 @@ export const responseErrors: ResponseErrorPair = {
408: "NETWORK TIMEOUT",
400: "BAD REQUEST",
} as const;
/**
* Default decimal precision to round-off calculated HRTime time in seconds.
*/
export const DEFAULT_DURATION_PRECISION: number = 3;

View File

@@ -4,49 +4,109 @@ import { handleError } from "../handlers/error";
import { RequestConfig } from "../interfaces/request";
import { RequestRunnerResponse, TestReport } from "../interfaces/response";
import { HoppCLIError } from "../types/errors";
import { exceptionColors, getColorStatusCode } from "./getters";
import {
getFailedExpectedResults,
getFailedTestsReport,
getTestMetrics,
} from "./test";
const { FAIL, SUCCESS, BG_INFO } = exceptionColors;
PreRequestMetrics,
RequestMetrics,
TestMetrics,
} from "../types/response";
import { exceptionColors, getColorStatusCode } from "./getters";
import { getFailedExpectedResults, getFailedTestsReport } from "./test";
const { FAIL, SUCCESS, BG_INFO, INFO_BRIGHT } = exceptionColors;
/**
* Prints total failed and passed stats of executed pre-request-scripts.
* @param preRequestMetrics Provides data for total failed and passed
* stats of all executed pre-request-scripts.
*/
export const printPreRequestMetrics = (
preRequestMetrics: PreRequestMetrics
) => {
const {
scripts: { failed, passed },
} = preRequestMetrics;
const failedPreRequestsOut = FAIL(`${failed} failed`);
const passedPreRequestsOut = SUCCESS(`${passed} passed`);
const preRequestsOut = `Pre-Request Scripts: ${failedPreRequestsOut} ${passedPreRequestsOut}\n`;
const message = `\n${preRequestsOut}`;
process.stdout.write(message);
};
/**
* Prints total failed and passed stats, duration of executed request.
* @param requestsMetrics Provides data for total duration and total failed and
* passed stats of all executed requests.
*/
export const printRequestsMetrics = (requestsMetrics: RequestMetrics) => {
const {
requests: { failed, passed },
duration,
} = requestsMetrics;
const failedRequestsOut = FAIL(`${failed} failed`);
const passedRequestsOut = SUCCESS(`${passed} passed`);
const requestsOut = `Requests: ${failedRequestsOut} ${passedRequestsOut}\n`;
const requestsDurationOut =
duration > 0 ? `Requests Duration: ${INFO_BRIGHT(`${duration} s`)}\n` : "";
const message = `\n${requestsOut}${requestsDurationOut}`;
process.stdout.write(message);
};
/**
* Prints test-suites in pretty-way describing each test-suites failed/passed
* status.
* status and duration to execute the test-script.
* @param testsReport Providing details of each test-suites with tests-report.
* @param duration Time taken (in seconds) to execute the test-script.
*/
export const printTestSuitesReport = (testsReport: TestReport[]) => {
export const printTestSuitesReport = (
testsReport: TestReport[],
duration: number
) => {
const durationMsg =
duration > 0 ? INFO_BRIGHT(`Ran tests in ${duration} s`) : "";
group();
for (const testReport of testsReport) {
const { failing, descriptor } = testReport;
const { failed, descriptor } = testReport;
if (failing > 0) {
if (failed > 0) {
log(`${FAIL("✖")} ${descriptor}`);
} else {
log(`${SUCCESS("✔")} ${descriptor}`);
}
}
log(durationMsg);
groupEnd();
};
/**
* Prints total number of test-cases and test-suites passed/failed.
* @param testsReport Provides testSuites and testCases metrics.
* Prints total failed and passed stats for test-suites, test-cases, test-scripts,
* and total duration of executed test-scripts.
* @param testsMetrics Provides testSuites, testCases metrics, test-script
* execution duration and test-script passed/failed stats.
*/
export const printTestsMetrics = (testsReport: TestReport[]) => {
const { testSuites, tests } = getTestMetrics(testsReport);
export const printTestsMetrics = (testsMetrics: TestMetrics) => {
const { testSuites, tests, duration, scripts } = testsMetrics;
const failedTestCasesOut = FAIL(`${tests.failing} failing`);
const passedTestCasesOut = SUCCESS(`${tests.passing} passing`);
const failedTestCasesOut = FAIL(`${tests.failed} failed`);
const passedTestCasesOut = SUCCESS(`${tests.passed} passed`);
const testCasesOut = `Test Cases: ${failedTestCasesOut} ${passedTestCasesOut}\n`;
const failedTestSuitesOut = FAIL(`${testSuites.failing} failing`);
const passedTestSuitesOut = SUCCESS(`${testSuites.passing} passing`);
const failedTestSuitesOut = FAIL(`${testSuites.failed} failed`);
const passedTestSuitesOut = SUCCESS(`${testSuites.passed} passed`);
const testSuitesOut = `Test Suites: ${failedTestSuitesOut} ${passedTestSuitesOut}\n`;
const message = `\n${testCasesOut}${testSuitesOut}`;
const failedTestScriptsOut = FAIL(`${scripts.failed} failed`);
const passedTestScriptsOut = SUCCESS(`${scripts.passed} passed`);
const testScriptsOut = `Test Scripts: ${failedTestScriptsOut} ${passedTestScriptsOut}\n`;
const testsDurationOut =
duration > 0 ? `Tests Duration: ${INFO_BRIGHT(`${duration} s`)}\n` : "";
const message = `\n${testCasesOut}${testSuitesOut}${testScriptsOut}${testsDurationOut}`;
process.stdout.write(message);
};
@@ -81,7 +141,7 @@ export const printFailedTestsReport = (
) => {
const failedTestsReport = getFailedTestsReport(testsReport);
// Only printing test-reports with failing test-cases.
// Only printing test-reports with failed test-cases.
if (failedTestsReport.length > 0) {
const FAILED_TESTS_PATH = FAIL(`\n${bold(path)} failed tests:`);
group(FAILED_TESTS_PATH);
@@ -110,7 +170,10 @@ export const printFailedTestsReport = (
* Provides methods for printing request-runner's state messages.
*/
export const printRequestRunner = {
// Request-runner starting message.
/**
* Request-runner starting message.
* @param requestConfig Provides request's method and url.
*/
start: (requestConfig: RequestConfig) => {
const METHOD = BG_INFO(` ${requestConfig.method} `);
const ENDPOINT = requestConfig.url;
@@ -118,15 +181,21 @@ export const printRequestRunner = {
process.stdout.write(`${METHOD} ${ENDPOINT}`);
},
// Prints response's status, when request-runner executes successfully.
/**
* Prints response's status, when request-runner executes successfully.
* @param requestResponse Provides request's status and execution duration.
*/
success: (requestResponse: RequestRunnerResponse) => {
const { status, statusText } = requestResponse;
const { status, statusText, duration } = requestResponse;
const statusMsg = getColorStatusCode(status, statusText);
const durationMsg = duration > 0 ? INFO_BRIGHT(`(${duration} s)`) : "";
process.stdout.write(` ${statusMsg}\n`);
process.stdout.write(` ${statusMsg} ${durationMsg}\n`);
},
// Prints error message, when request-runner fails to execute.
/**
* Prints error message, when request-runner fails to execute.
*/
fail: () => log(FAIL(" ERROR\n⚠ Error running request.")),
};
@@ -134,12 +203,27 @@ export const printRequestRunner = {
* Provides methods for printing test-runner's state messages.
*/
export const printTestRunner = {
/**
* Prints test-runner failed message.
*/
fail: () => log(FAIL("⚠ Error running test-script.")),
/**
* Prints test-runner success message including tests-report.
* @param testsReport List of expected result(s) and metrics for the executed
* test-script.
* @param duration Time taken to execute a test-script.
*/
success: (testsReport: TestReport[], duration: number) =>
printTestSuitesReport(testsReport, duration),
};
/**
* Provides methods for printing pre-request-runner's state messages.
*/
export const printPreRequestRunner = {
/**
* Prints pre-request-runner failed message.
*/
fail: () => log(FAIL("⚠ Error running pre-request-script.")),
};

View File

@@ -11,6 +11,8 @@ import * as E from "fp-ts/Either";
import * as S from "fp-ts/string";
import * as O from "fp-ts/Option";
import { error } from "../types/errors";
import round from "lodash/round";
import { DEFAULT_DURATION_PRECISION } from "./constants";
/**
* Generates template string (status + statusText) with specific color unicodes
@@ -106,8 +108,24 @@ export const exceptionColors = {
INFO: chalk.blue,
FAIL: chalk.red,
SUCCESS: chalk.green,
INFO_BRIGHT: chalk.blueBright,
BG_WARN: chalk.bgYellow,
BG_FAIL: chalk.bgRed,
BG_INFO: chalk.bgBlue,
BG_SUCCESS: chalk.bgGreen,
};
/**
* Calculates duration in seconds for given end-HRTime of format [seconds, nanoseconds],
* which is rounded-off upto given decimal value.
* @param end Providing end-HRTime of format [seconds, nanoseconds].
* @param precision Decimal precision to round-off float duration value (DEFAULT = 3).
* @returns Rounded duration in seconds for given decimal precision.
*/
export const getDurationInSeconds = (
end: [number, number],
precision: number = DEFAULT_DURATION_PRECISION
) => {
const durationInSeconds = (end[0] * 1e9 + end[1]) / 1e9;
return round(durationInSeconds, precision);
};

View File

@@ -22,6 +22,7 @@ import { isHoppCLIError } from "./checks";
import { tupleToRecord, arraySort, arrayFlatMap } from "./functions/array";
import { toFormData } from "./mutators";
import { getEffectiveFinalMetaData } from "./getters";
import { PreRequestMetrics } from "../types/response";
/**
* Runs pre-request-script runner over given request which extracts set ENVs and
@@ -266,3 +267,23 @@ function getFinalBodyFromRequest(
)
);
}
/**
* Get pre-request-metrics (stats + duration) object based on existence of
* PRE_REQUEST_ERROR code in given hopp-error list.
* @param errors List of errors to check for PRE_REQUEST_ERROR code.
* @param duration Time taken (in seconds) to execute the pre-request-script.
* @returns Object containing details of pre-request-script's execution stats
* i.e., failed/passed data and duration.
*/
export const getPreRequestMetrics = (
errors: HoppCLIError[],
duration: number
): PreRequestMetrics =>
pipe(
errors,
A.some(({ code }) => code === "PRE_REQUEST_SCRIPT_ERROR"),
(hasPreReqErrors) =>
hasPreReqErrors ? { failed: 1, passed: 0 } : { failed: 0, passed: 1 },
(scripts) => <PreRequestMetrics>{ scripts, duration }
);

View File

@@ -7,7 +7,7 @@ import * as E from "fp-ts/Either";
import * as TE from "fp-ts/TaskEither";
import { HoppRESTRequest } from "@hoppscotch/data";
import { responseErrors } from "./constants";
import { getMetaDataPairs } from "./getters";
import { getDurationInSeconds, getMetaDataPairs } from "./getters";
import { testRunner, getTestScriptParams, hasFailedTestCases } from "./test";
import { RequestConfig, EffectiveHoppRESTRequest } from "../interfaces/request";
import { RequestRunnerResponse } from "../interfaces/response";
@@ -17,9 +17,11 @@ import {
printPreRequestRunner,
printRequestRunner,
printTestRunner,
printTestSuitesReport,
} from "./display";
import { error, HoppCLIError } from "../types/errors";
import { hrtime } from "process";
import { RequestMetrics } from "../types/response";
import { pipe } from "fp-ts/function";
// !NOTE: The `config.supported` checks are temporary until OAuth2 and Multipart Forms are supported
@@ -83,6 +85,8 @@ export const requestRunner =
requestConfig: RequestConfig
): TE.TaskEither<HoppCLIError, RequestRunnerResponse> =>
async () => {
const start = hrtime();
try {
// NOTE: Temporary parsing check for request endpoint.
requestConfig.url = new URL(requestConfig.url ?? "").toString();
@@ -95,6 +99,7 @@ export const requestRunner =
endpoint: getRequest.endpoint(config.url),
method: getRequest.method(config.method),
body: baseResponse.data,
duration: 0,
};
// !NOTE: Temporary `config.supported` check
@@ -104,6 +109,10 @@ export const requestRunner =
runnerResponse.statusText = responseErrors[status];
}
const end = hrtime(start);
const duration = getDurationInSeconds(end);
runnerResponse.duration = duration;
return E.right(runnerResponse);
} catch (e) {
let status: number;
@@ -114,6 +123,7 @@ export const requestRunner =
statusText: responseErrors[400],
status: 400,
headers: [],
duration: 0,
};
if (axios.isAxiosError(e)) {
@@ -133,6 +143,10 @@ export const requestRunner =
return E.left(error({ code: "REQUEST_ERROR", data: E.toError(e) }));
}
const end = hrtime(start);
const duration = getDurationInSeconds(end);
runnerResponse.duration = duration;
return E.right(runnerResponse);
}
@@ -192,6 +206,7 @@ export const processRequest =
tests: [],
errors: [],
result: true,
duration: { test: 0, request: 0, preRequest: 0 },
};
// Initial value for effective-request with default values for properties.
@@ -229,6 +244,7 @@ export const processRequest =
status: 400,
statusText: "",
body: Object(null),
duration: 0,
};
// Executing request-runner.
const requestRunnerRes = await requestRunner(requestConfig)();
@@ -240,6 +256,7 @@ export const processRequest =
printRequestRunner.fail();
} else {
_requestRunnerRes = requestRunnerRes.right;
report.duration.request = _requestRunnerRes.duration;
printRequestRunner.success(_requestRunnerRes);
}
@@ -259,17 +276,19 @@ export const processRequest =
report.errors.push(testRunnerRes.left);
report.result = report.result && false;
} else {
const { envs, testsReport } = testRunnerRes.right;
const { envs, testsReport, duration } = testRunnerRes.right;
const _hasFailedTestCases = hasFailedTestCases(testsReport);
// Updating report with current tests & result.
// Updating report with current tests, result and duration.
report.tests = testsReport;
report.result = report.result && _hasFailedTestCases;
report.duration.test = duration;
// Updating resulting envs from test-runner.
result.envs = envs;
printTestSuitesReport(testsReport);
// Printing tests-report, when test-runner executes successfully.
printTestRunner.success(testsReport, duration);
}
result.report = report;
@@ -319,3 +338,23 @@ export const preProcessRequest = (
}
return tempRequest;
};
/**
* Get request-metrics object (stats+duration) based on existence of REQUEST_ERROR code
* in hopp-errors list.
* @param errors List of errors to check for REQUEST_ERROR.
* @param duration Time taken (in seconds) to execute the request.
* @returns Object containing details of request's execution stats i.e., failed/passed
* data and duration.
*/
export const getRequestMetrics = (
errors: HoppCLIError[],
duration: number
): RequestMetrics =>
pipe(
errors,
A.some(({ code }) => code === "REQUEST_ERROR"),
(hasReqErrors) =>
hasReqErrors ? { failed: 1, passed: 0 } : { failed: 0, passed: 1 },
(requests) => <RequestMetrics>{ requests, duration }
);

View File

@@ -1,5 +1,6 @@
import { HoppRESTRequest } from "@hoppscotch/data";
import { execTestScript, TestDescriptor } from "@hoppscotch/js-sandbox";
import { hrtime } from "process";
import { flow, pipe } from "fp-ts/function";
import * as RA from "fp-ts/ReadonlyArray";
import * as A from "fp-ts/Array";
@@ -13,12 +14,13 @@ import {
import { error, HoppCLIError } from "../types/errors";
import { HoppEnvs } from "../types/request";
import { ExpectResult, TestMetrics, TestRunnerRes } from "../types/response";
import { getDurationInSeconds } from "./getters";
/**
* Executes test script and runs testDescriptorParser to generate test-report using
* expected-results, test-status & test-descriptor.
* @param testScriptData Parameters related to test-script function.
* @returns If executes successfully, we get TestRunnerRes(updated ENVs + test-reports).
* @returns If executes successfully, we get TestRunnerRes(updated ENVs, test-reports, duration).
* Else, HoppCLIError with appropriate code & data.
*/
export const testRunner = (
@@ -28,16 +30,22 @@ export const testRunner = (
/**
* Executing test-script.
*/
TE.Do,
TE.bind("start", () => TE.of(hrtime())),
TE.bind("test_response", () =>
pipe(
TE.of(testScriptData),
TE.chain(({ testScript, response, envs }) =>
execTestScript(testScript, envs, response)
)
)
),
/**
* Recursively parsing test-results using test-descriptor-parser
* to generate test-reports.
*/
TE.chainTaskK(({ envs, tests }) =>
TE.chainTaskK(({ test_response: { tests, envs }, start }) =>
pipe(
tests,
A.map(testDescriptorParser),
@@ -46,7 +54,12 @@ export const testRunner = (
flow(
RA.flatten,
RA.toArray,
(testsReport) => <TestRunnerRes>{ envs, testsReport }
(testsReport) =>
<TestRunnerRes>{
envs,
testsReport,
duration: pipe(start, hrtime, getDurationInSeconds),
}
)
)
)
@@ -58,7 +71,6 @@ export const testRunner = (
})
)
);
/**
* Recursive function to parse test-descriptor from nested-children and
* generate tests-report.
@@ -77,19 +89,19 @@ export const testDescriptorParser = (
A.isNonEmpty(expectResults)
? pipe(
expectResults,
A.reduce({ failing: 0, passing: 0 }, (prev, { status }) =>
A.reduce({ failed: 0, passed: 0 }, (prev, { status }) =>
/**
* Incrementing number of passed test-cases if status is "pass",
* else, incrementing number of failed test-cases.
*/
status === "pass"
? { failing: prev.failing, passing: prev.passing + 1 }
: { failing: prev.failing + 1, passing: prev.passing }
? { failed: prev.failed, passed: prev.passed + 1 }
: { failed: prev.failed + 1, passed: prev.passed }
),
({ failing, passing }) =>
({ failed, passed }) =>
<TestReport>{
failing,
passing,
failed,
passed,
descriptor,
expectResults,
},
@@ -141,36 +153,48 @@ export const getTestScriptParams = (
* Combines quantitative details (test-cases passed/failed) of each test-report
* to generate TestMetrics object with total test-cases & total test-suites.
* @param testsReport Contains details of each test-report (failed/passed test-cases).
* @param testDuration Time taken (in seconds) to execute the test-script.
* @param errors List of HoppCLIErrors to check for TEST_SCRIPT_ERROR code.
* @returns Object containing details of total test-cases passed/failed and
* total test-suites passed/failed.
*/
export const getTestMetrics = (testsReport: TestReport[]): TestMetrics =>
export const getTestMetrics = (
testsReport: TestReport[],
testDuration: number,
errors: HoppCLIError[]
): TestMetrics =>
testsReport.reduce(
({ testSuites, tests }, testReport) => ({
({ testSuites, tests, duration, scripts }, testReport) => ({
tests: {
failing: tests.failing + testReport.failing,
passing: tests.passing + testReport.passing,
failed: tests.failed + testReport.failed,
passed: tests.passed + testReport.passed,
},
testSuites: {
failing: testSuites.failing + (testReport.failing > 0 ? 1 : 0),
passing: testSuites.passing + (testReport.failing === 0 ? 1 : 0),
failed: testSuites.failed + (testReport.failed > 0 ? 1 : 0),
passed: testSuites.passed + (testReport.failed === 0 ? 1 : 0),
},
scripts: scripts,
duration: duration,
}),
<TestMetrics>{
tests: { failing: 0, passing: 0 },
testSuites: { failing: 0, passing: 0 },
tests: { failed: 0, passed: 0 },
testSuites: { failed: 0, passed: 0 },
duration: testDuration,
scripts: errors.some(({ code }) => code === "TEST_SCRIPT_ERROR")
? { failed: 1, passed: 0 }
: { failed: 0, passed: 1 },
}
);
/**
* Filters tests-report containing atleast one or more failed test-cases.
* @param testsReport Provides "failing" test-cases data.
* @returns Tests report with one or more test-cases failing.
* @param testsReport Provides "failed" test-cases data.
* @returns Tests report with one or more test-cases failed.
*/
export const getFailedTestsReport = (testsReport: TestReport[]) =>
pipe(
testsReport,
A.filter(({ failing }) => failing > 0)
A.filter(({ failed }) => failed > 0)
);
/**
@@ -186,12 +210,12 @@ export const getFailedExpectedResults = (expectResults: ExpectResult[]) =>
/**
* Checks if any of the tests-report have failed test-cases.
* @param testsReport Provides "failing" test-cases data.
* @param testsReport Provides "failed" test-cases data.
* @returns True, if one or more failed test-cases found.
* False, if all test-cases passed.
*/
export const hasFailedTestCases = (testsReport: TestReport[]) =>
pipe(
testsReport,
A.every(({ failing }) => failing === 0)
A.every(({ failed }) => failed === 0)
);