feat(cli): access team workspace collections and environments (#4095)
Co-authored-by: nivedin <nivedinp@gmail.com>
This commit is contained in:
@@ -1,193 +0,0 @@
|
||||
/*
|
||||
* For a detailed explanation regarding each configuration property, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/tmp/jest_rs",
|
||||
|
||||
// Automatically clear mock calls, instances and results before every test
|
||||
clearMocks: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: true,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: undefined,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: undefined,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
// coveragePathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
// coverageProvider: "babel",
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: undefined,
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: undefined,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {
|
||||
// 'ts-jest': {
|
||||
// useESM: true,
|
||||
// },
|
||||
// },
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: "50%",
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
moduleFileExtensions: ["js", "ts", "json"],
|
||||
|
||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||
// moduleNameMapper: {
|
||||
// '^(\\.{1,2}/.*)\\.js$': '$1',
|
||||
// },
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
preset: "ts-jest/presets/js-with-babel",
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state before every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state and implementation before every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: undefined,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: [
|
||||
// "<rootDir>"
|
||||
// ],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
setupFilesAfterEnv: ["./jest.setup.ts"],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
testEnvironment: "node",
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
testMatch: [
|
||||
// "**/__tests__/**/*.[jt]s?(x)",
|
||||
"**/src/__tests__/commands/**/*.*.ts",
|
||||
],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
testPathIgnorePatterns: ["/node_modules/", "/dist/"],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jest-circus/runner",
|
||||
|
||||
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||
// testURL: "http://localhost",
|
||||
|
||||
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||
// timers: "real",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
transform: {
|
||||
"^.+\\.ts$": "ts-jest",
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
// transformIgnorePatterns: [
|
||||
// "/node_modules/",
|
||||
// "\\.pnp\\.[^\\/]+$"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
verbose: true,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
import "@relmify/jest-fp-ts";
|
||||
@@ -20,7 +20,7 @@
|
||||
"debugger": "node debugger.js 9999",
|
||||
"prepublish": "pnpm exec tsup",
|
||||
"prettier-format": "prettier --config .prettierrc 'src/**/*.ts' --write",
|
||||
"test": "pnpm run build && jest && rm -rf dist",
|
||||
"test": "pnpm run build && vitest run && rm -rf dist",
|
||||
"do-typecheck": "pnpm exec tsc --noEmit",
|
||||
"do-test": "pnpm test"
|
||||
},
|
||||
@@ -55,15 +55,13 @@
|
||||
"@hoppscotch/js-sandbox": "workspace:^",
|
||||
"@relmify/jest-fp-ts": "2.1.1",
|
||||
"@swc/core": "1.4.2",
|
||||
"@types/jest": "29.5.12",
|
||||
"@types/lodash-es": "4.17.12",
|
||||
"@types/qs": "6.9.12",
|
||||
"fp-ts": "2.16.2",
|
||||
"jest": "29.7.0",
|
||||
"prettier": "3.2.5",
|
||||
"qs": "6.11.2",
|
||||
"ts-jest": "29.1.2",
|
||||
"tsup": "8.0.2",
|
||||
"typescript": "5.3.3"
|
||||
"typescript": "5.3.3",
|
||||
"vitest": "0.34.6"
|
||||
}
|
||||
}
|
||||
|
||||
15
packages/hoppscotch-cli/setupFiles.ts
Normal file
15
packages/hoppscotch-cli/setupFiles.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
// Vitest doesn't work without globals
|
||||
// Ref: https://github.com/relmify/jest-fp-ts/issues/11
|
||||
|
||||
import decodeMatchers from "@relmify/jest-fp-ts/dist/decodeMatchers";
|
||||
import eitherMatchers from "@relmify/jest-fp-ts/dist/eitherMatchers";
|
||||
import optionMatchers from "@relmify/jest-fp-ts/dist/optionMatchers";
|
||||
import theseMatchers from "@relmify/jest-fp-ts/dist/theseMatchers";
|
||||
import eitherOrTheseMatchers from "@relmify/jest-fp-ts/dist/eitherOrTheseMatchers";
|
||||
import { expect } from "vitest";
|
||||
|
||||
expect.extend(decodeMatchers.matchers);
|
||||
expect.extend(eitherMatchers.matchers);
|
||||
expect.extend(optionMatchers.matchers);
|
||||
expect.extend(theseMatchers.matchers);
|
||||
expect.extend(eitherOrTheseMatchers.matchers);
|
||||
@@ -1,345 +0,0 @@
|
||||
import { ExecException } from "child_process";
|
||||
|
||||
import { HoppErrorCode } from "../../types/errors";
|
||||
import { runCLI, getErrorCode, getTestJsonFilePath } from "../utils";
|
||||
|
||||
describe("Test `hopp test <file>` command:", () => {
|
||||
describe("Argument parsing", () => {
|
||||
test("Errors with the code `INVALID_ARGUMENT` for not supplying enough arguments", async () => {
|
||||
const args = "test";
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` for an invalid command", async () => {
|
||||
const args = "invalid-arg";
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Supplied collection export file validations", () => {
|
||||
test("Errors with the code `FILE_NOT_FOUND` if the supplied collection export file doesn't exist", async () => {
|
||||
const args = "test notfound.json";
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("FILE_NOT_FOUND");
|
||||
});
|
||||
|
||||
test("Errors with the code UNKNOWN_ERROR if the supplied collection export file content isn't valid JSON", async () => {
|
||||
const args = `test ${getTestJsonFilePath("malformed-coll.json", "collection")}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("UNKNOWN_ERROR");
|
||||
});
|
||||
|
||||
test("Errors with the code `MALFORMED_COLLECTION` if the supplied collection export file content is malformed", async () => {
|
||||
const args = `test ${getTestJsonFilePath("malformed-coll-2.json", "collection")}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("MALFORMED_COLLECTION");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_FILE_TYPE` if the supplied collection export file doesn't end with the `.json` extension", async () => {
|
||||
const args = `test ${getTestJsonFilePath("notjson-coll.txt", "collection")}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_FILE_TYPE");
|
||||
});
|
||||
|
||||
test("Fails if the collection file includes scripts with incorrect API usage and failed assertions", async () => {
|
||||
const args = `test ${getTestJsonFilePath("fails-coll.json", "collection")}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).not.toBeNull();
|
||||
expect(error).toMatchObject(<ExecException>{
|
||||
code: 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Versioned entities", () => {
|
||||
describe("Collections & Requests", () => {
|
||||
const testFixtures = [
|
||||
{ fileName: "coll-v1-req-v0.json", collVersion: 1, reqVersion: 0 },
|
||||
{ fileName: "coll-v1-req-v1.json", collVersion: 1, reqVersion: 1 },
|
||||
{ fileName: "coll-v2-req-v2.json", collVersion: 2, reqVersion: 2 },
|
||||
{ fileName: "coll-v2-req-v3.json", collVersion: 2, reqVersion: 3 },
|
||||
];
|
||||
|
||||
testFixtures.forEach(({ collVersion, fileName, reqVersion }) => {
|
||||
test(`Successfully processes a supplied collection export file where the collection is based on the "v${collVersion}" schema and the request following the "v${reqVersion}" schema`, async () => {
|
||||
const args = `test ${getTestJsonFilePath(fileName, "collection")}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Environments", () => {
|
||||
const testFixtures = [
|
||||
{ fileName: "env-v0.json", version: 0 },
|
||||
{ fileName: "env-v1.json", version: 1 },
|
||||
];
|
||||
|
||||
testFixtures.forEach(({ fileName, version }) => {
|
||||
test(`Successfully processes the supplied collection and environment export files where the environment is based on the "v${version}" schema`, async () => {
|
||||
const ENV_PATH = getTestJsonFilePath(fileName, "environment");
|
||||
const args = `test ${getTestJsonFilePath("sample-coll.json", "collection")} --env ${ENV_PATH}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully processes a supplied collection export file of the expected format", async () => {
|
||||
const args = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Successfully inherits headers and authorization set at the root collection", async () => {
|
||||
const args = `test ${getTestJsonFilePath(
|
||||
"collection-level-headers-auth-coll.json",
|
||||
"collection"
|
||||
)}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Persists environment variables set in the pre-request script for consumption in the test script", async () => {
|
||||
const args = `test ${getTestJsonFilePath(
|
||||
"pre-req-script-env-var-persistence-coll.json",
|
||||
"collection"
|
||||
)}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test `hopp test <file> --env <file>` command:", () => {
|
||||
describe("Supplied environment export file validations", () => {
|
||||
const VALID_TEST_ARGS = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` if no file is supplied", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --env`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_FILE_TYPE` if the supplied environment export file doesn't end with the `.json` extension", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --env ${getTestJsonFilePath(
|
||||
"notjson-coll.txt",
|
||||
"collection"
|
||||
)}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_FILE_TYPE");
|
||||
});
|
||||
|
||||
test("Errors with the code `FILE_NOT_FOUND` if the supplied environment export file doesn't exist", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --env notfound.json`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("FILE_NOT_FOUND");
|
||||
});
|
||||
|
||||
test("Errors with the code `MALFORMED_ENV_FILE` on supplying a malformed environment export file", async () => {
|
||||
const ENV_PATH = getTestJsonFilePath(
|
||||
"malformed-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `${VALID_TEST_ARGS} --env ${ENV_PATH}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("MALFORMED_ENV_FILE");
|
||||
});
|
||||
|
||||
test("Errors with the code `BULK_ENV_FILE` on supplying an environment export file based on the bulk environment export format", async () => {
|
||||
const ENV_PATH = getTestJsonFilePath("bulk-envs.json", "environment");
|
||||
const args = `${VALID_TEST_ARGS} --env ${ENV_PATH}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("BULK_ENV_FILE");
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully resolves values from the supplied environment export file", async () => {
|
||||
const TESTS_PATH = getTestJsonFilePath(
|
||||
"env-flag-tests-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENV_PATH = getTestJsonFilePath("env-flag-envs.json", "environment");
|
||||
const args = `test ${TESTS_PATH} --env ${ENV_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Successfully resolves environment variables referenced in the request body", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Works with shorth `-e` flag", async () => {
|
||||
const TESTS_PATH = getTestJsonFilePath(
|
||||
"env-flag-tests-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENV_PATH = getTestJsonFilePath("env-flag-envs.json", "environment");
|
||||
const args = `test ${TESTS_PATH} -e ${ENV_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
describe("Secret environment variables", () => {
|
||||
jest.setTimeout(100000);
|
||||
|
||||
// Reads secret environment values from system environment
|
||||
test("Successfully picks the values for secret environment variables from `process.env` and persists the variables set from the pre-request script", async () => {
|
||||
const env = {
|
||||
...process.env,
|
||||
secretBearerToken: "test-token",
|
||||
secretBasicAuthUsername: "test-user",
|
||||
secretBasicAuthPassword: "test-pass",
|
||||
secretQueryParamValue: "secret-query-param-value",
|
||||
secretBodyValue: "secret-body-value",
|
||||
secretHeaderValue: "secret-header-value",
|
||||
};
|
||||
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath("secret-envs.json", "environment");
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error, stdout } = await runCLI(args, { env });
|
||||
|
||||
expect(stdout).toContain(
|
||||
"https://httpbin.org/basic-auth/*********/*********"
|
||||
);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
// Prefers values specified in the environment export file over values set in the system environment
|
||||
test("Successfully picks the values for secret environment variables set directly in the environment export file and persists the environment variables set from the pre-request script", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-supplied-values-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error, stdout } = await runCLI(args);
|
||||
|
||||
expect(stdout).toContain(
|
||||
"https://httpbin.org/basic-auth/*********/*********"
|
||||
);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
// Values set from the scripting context takes the highest precedence
|
||||
test("Setting values for secret environment variables from the pre-request script overrides values set at the supplied environment export file", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-persistence-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-supplied-values-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error, stdout } = await runCLI(args);
|
||||
|
||||
expect(stdout).toContain(
|
||||
"https://httpbin.org/basic-auth/*********/*********"
|
||||
);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Persists secret environment variable values set from the pre-request script for consumption in the request and post-request script context", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-persistence-scripting-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-envs-persistence-scripting-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test `hopp test <file> --delay <delay_in_ms>` command:", () => {
|
||||
const VALID_TEST_ARGS = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` on not supplying a delay value", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --delay`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` on supplying an invalid delay value", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --delay 'NaN'`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Successfully performs delayed request execution for a valid delay value", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --delay 1`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Works with the short `-d` flag", async () => {
|
||||
const args = `${VALID_TEST_ARGS} -d 1`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
509
packages/hoppscotch-cli/src/__tests__/e2e/commands/test.spec.ts
Normal file
509
packages/hoppscotch-cli/src/__tests__/e2e/commands/test.spec.ts
Normal file
@@ -0,0 +1,509 @@
|
||||
import { ExecException } from "child_process";
|
||||
import { describe, expect, test } from "vitest";
|
||||
|
||||
import { HoppErrorCode } from "../../../types/errors";
|
||||
import { getErrorCode, getTestJsonFilePath, runCLI } from "../../utils";
|
||||
|
||||
describe("hopp test [options] <file_path_or_id>", () => {
|
||||
const VALID_TEST_ARGS = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
|
||||
|
||||
describe("Test `hopp test <file_path_or_id>` command:", () => {
|
||||
describe("Argument parsing", () => {
|
||||
test("Errors with the code `INVALID_ARGUMENT` for not supplying enough arguments", async () => {
|
||||
const args = "test";
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` for an invalid command", async () => {
|
||||
const args = "invalid-arg";
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Supplied collection export file validations", () => {
|
||||
test("Errors with the code `FILE_NOT_FOUND` if the supplied collection export file doesn't exist", async () => {
|
||||
const args = "test notfound.json";
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("FILE_NOT_FOUND");
|
||||
});
|
||||
|
||||
test("Errors with the code UNKNOWN_ERROR if the supplied collection export file content isn't valid JSON", async () => {
|
||||
const args = `test ${getTestJsonFilePath("malformed-coll.json", "collection")}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("UNKNOWN_ERROR");
|
||||
});
|
||||
|
||||
test("Errors with the code `MALFORMED_COLLECTION` if the supplied collection export file content is malformed", async () => {
|
||||
const args = `test ${getTestJsonFilePath("malformed-coll-2.json", "collection")}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("MALFORMED_COLLECTION");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_FILE_TYPE` if the supplied collection export file doesn't end with the `.json` extension", async () => {
|
||||
const args = `test ${getTestJsonFilePath("notjson-coll.txt", "collection")}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_FILE_TYPE");
|
||||
});
|
||||
|
||||
test("Fails if the collection file includes scripts with incorrect API usage and failed assertions", async () => {
|
||||
const args = `test ${getTestJsonFilePath("fails-coll.json", "collection")}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).not.toBeNull();
|
||||
expect(error).toMatchObject(<ExecException>{
|
||||
code: 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Versioned entities", () => {
|
||||
describe("Collections & Requests", () => {
|
||||
const testFixtures = [
|
||||
{ fileName: "coll-v1-req-v0.json", collVersion: 1, reqVersion: 0 },
|
||||
{ fileName: "coll-v1-req-v1.json", collVersion: 1, reqVersion: 1 },
|
||||
{ fileName: "coll-v2-req-v2.json", collVersion: 2, reqVersion: 2 },
|
||||
{ fileName: "coll-v2-req-v3.json", collVersion: 2, reqVersion: 3 },
|
||||
];
|
||||
|
||||
testFixtures.forEach(({ collVersion, fileName, reqVersion }) => {
|
||||
test(`Successfully processes a supplied collection export file where the collection is based on the "v${collVersion}" schema and the request following the "v${reqVersion}" schema`, async () => {
|
||||
const args = `test ${getTestJsonFilePath(fileName, "collection")}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Environments", () => {
|
||||
const testFixtures = [
|
||||
{ fileName: "env-v0.json", version: 0 },
|
||||
{ fileName: "env-v1.json", version: 1 },
|
||||
];
|
||||
|
||||
testFixtures.forEach(({ fileName, version }) => {
|
||||
test(`Successfully processes the supplied collection and environment export files where the environment is based on the "v${version}" schema`, async () => {
|
||||
const ENV_PATH = getTestJsonFilePath(fileName, "environment");
|
||||
const args = `test ${getTestJsonFilePath("sample-coll.json", "collection")} --env ${ENV_PATH}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully processes a supplied collection export file of the expected format", async () => {
|
||||
const args = `test ${getTestJsonFilePath("passes-coll.json", "collection")}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Successfully inherits/overrides authorization and headers specified at the root collection at deeply nested collections", async () => {
|
||||
const args = `test ${getTestJsonFilePath(
|
||||
"collection-level-auth-headers-coll.json",
|
||||
"collection"
|
||||
)}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test(
|
||||
"Successfully inherits/overrides authorization and headers at each level with multiple child collections",
|
||||
async () => {
|
||||
const args = `test ${getTestJsonFilePath(
|
||||
"multiple-child-collections-auth-headers-coll.json",
|
||||
"collection"
|
||||
)}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
|
||||
test("Persists environment variables set in the pre-request script for consumption in the test script", async () => {
|
||||
const args = `test ${getTestJsonFilePath(
|
||||
"pre-req-script-env-var-persistence-coll.json",
|
||||
"collection"
|
||||
)}`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test `hopp test <file_path_or_id> --env <file_path_or_id>` command:", () => {
|
||||
describe("Supplied environment export file validations", () => {
|
||||
describe("Argument parsing", () => {
|
||||
test("Errors with the code `INVALID_ARGUMENT` if no file is supplied", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --env`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_FILE_TYPE` if the supplied environment export file doesn't end with the `.json` extension", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --env ${getTestJsonFilePath(
|
||||
"notjson-coll.txt",
|
||||
"collection"
|
||||
)}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_FILE_TYPE");
|
||||
});
|
||||
|
||||
test("Errors with the code `FILE_NOT_FOUND` if the supplied environment export file doesn't exist", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --env notfound.json`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("FILE_NOT_FOUND");
|
||||
});
|
||||
|
||||
test("Errors with the code `MALFORMED_ENV_FILE` on supplying a malformed environment export file", async () => {
|
||||
const ENV_PATH = getTestJsonFilePath(
|
||||
"malformed-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `${VALID_TEST_ARGS} --env ${ENV_PATH}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("MALFORMED_ENV_FILE");
|
||||
});
|
||||
|
||||
test("Errors with the code `BULK_ENV_FILE` on supplying an environment export file based on the bulk environment export format", async () => {
|
||||
const ENV_PATH = getTestJsonFilePath("bulk-envs.json", "environment");
|
||||
const args = `${VALID_TEST_ARGS} --env ${ENV_PATH}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("BULK_ENV_FILE");
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully resolves values from the supplied environment export file", async () => {
|
||||
const TESTS_PATH = getTestJsonFilePath(
|
||||
"env-flag-tests-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENV_PATH = getTestJsonFilePath("env-flag-envs.json", "environment");
|
||||
const args = `test ${TESTS_PATH} --env ${ENV_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Successfully resolves environment variables referenced in the request body", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Works with shorth `-e` flag", async () => {
|
||||
const TESTS_PATH = getTestJsonFilePath(
|
||||
"env-flag-tests-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENV_PATH = getTestJsonFilePath("env-flag-envs.json", "environment");
|
||||
const args = `test ${TESTS_PATH} -e ${ENV_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
describe(
|
||||
"Secret environment variables",
|
||||
() => {
|
||||
// Reads secret environment values from system environment
|
||||
test("Successfully picks the values for secret environment variables from `process.env` and persists the variables set from the pre-request script", async () => {
|
||||
const env = {
|
||||
...process.env,
|
||||
secretBearerToken: "test-token",
|
||||
secretBasicAuthUsername: "test-user",
|
||||
secretBasicAuthPassword: "test-pass",
|
||||
secretQueryParamValue: "secret-query-param-value",
|
||||
secretBodyValue: "secret-body-value",
|
||||
secretHeaderValue: "secret-header-value",
|
||||
};
|
||||
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error, stdout } = await runCLI(args, { env });
|
||||
|
||||
expect(stdout).toContain(
|
||||
"https://httpbin.org/basic-auth/*********/*********"
|
||||
);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
// Prefers values specified in the environment export file over values set in the system environment
|
||||
test("Successfully picks the values for secret environment variables set directly in the environment export file and persists the environment variables set from the pre-request script", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-supplied-values-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error, stdout } = await runCLI(args);
|
||||
|
||||
expect(stdout).toContain(
|
||||
"https://httpbin.org/basic-auth/*********/*********"
|
||||
);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
// Values set from the scripting context takes the highest precedence
|
||||
test("Setting values for secret environment variables from the pre-request script overrides values set at the supplied environment export file", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-persistence-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-supplied-values-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error, stdout } = await runCLI(args);
|
||||
|
||||
expect(stdout).toContain(
|
||||
"https://httpbin.org/basic-auth/*********/*********"
|
||||
);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Persists secret environment variable values set from the pre-request script for consumption in the request and post-request script context", async () => {
|
||||
const COLL_PATH = getTestJsonFilePath(
|
||||
"secret-envs-persistence-scripting-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENVS_PATH = getTestJsonFilePath(
|
||||
"secret-envs-persistence-scripting-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${COLL_PATH} --env ${ENVS_PATH}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
});
|
||||
|
||||
describe("Test `hopp test <file_path_or_id> --delay <delay_in_ms>` command:", () => {
|
||||
describe("Argument parsing", () => {
|
||||
test("Errors with the code `INVALID_ARGUMENT` on not supplying a delay value", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --delay`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` on supplying an invalid delay value", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --delay 'NaN'`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully performs delayed request execution for a valid delay value", async () => {
|
||||
const args = `${VALID_TEST_ARGS} --delay 1`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Works with the short `-d` flag", async () => {
|
||||
const args = `${VALID_TEST_ARGS} -d 1`;
|
||||
const { error } = await runCLI(args);
|
||||
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// Future TODO: Enable once a proper e2e test environment is set up locally
|
||||
describe.skip("Test `hopp test <file_path_or_id> --env <file_path_or_id> --token <access_token> --server <server_url>` command:", () => {
|
||||
const {
|
||||
REQ_BODY_ENV_VARS_COLL_ID,
|
||||
COLLECTION_LEVEL_HEADERS_AUTH_COLL_ID,
|
||||
REQ_BODY_ENV_VARS_ENVS_ID,
|
||||
PERSONAL_ACCESS_TOKEN,
|
||||
} = process.env;
|
||||
|
||||
if (
|
||||
!REQ_BODY_ENV_VARS_COLL_ID ||
|
||||
!COLLECTION_LEVEL_HEADERS_AUTH_COLL_ID ||
|
||||
!REQ_BODY_ENV_VARS_ENVS_ID ||
|
||||
!PERSONAL_ACCESS_TOKEN
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const SERVER_URL = "https://stage-shc.hoppscotch.io/backend";
|
||||
|
||||
describe("Argument parsing", () => {
|
||||
test("Errors with the code `INVALID_ARGUMENT` on not supplying a value for the `--token` flag", async () => {
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --token`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ARGUMENT` on not supplying a value for the `--server` flag", async () => {
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --server`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ARGUMENT");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Workspace access validations", () => {
|
||||
const INVALID_COLLECTION_ID = "invalid-coll-id";
|
||||
const INVALID_ENVIRONMENT_ID = "invalid-env-id";
|
||||
const INVALID_ACCESS_TOKEN = "invalid-token";
|
||||
|
||||
test("Errors with the code `TOKEN_INVALID` if the supplied access token is invalid", async () => {
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --token ${INVALID_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("TOKEN_INVALID");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ID` if the supplied collection ID is invalid", async () => {
|
||||
const args = `test ${INVALID_COLLECTION_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ID");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_ID` if the supplied environment ID is invalid", async () => {
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${INVALID_ENVIRONMENT_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_ID");
|
||||
});
|
||||
|
||||
test("Errors with the code `INVALID_SERVER_URL` if not supplying a valid SH instance server URL", async () => {
|
||||
// FE URL of the staging SHC instance
|
||||
const INVALID_SERVER_URL = "https://stage-shc.hoppscotch.io";
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${REQ_BODY_ENV_VARS_ENVS_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${INVALID_SERVER_URL}`;
|
||||
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("INVALID_SERVER_URL");
|
||||
});
|
||||
|
||||
test("Errors with the code `SERVER_CONNECTION_REFUSED` if supplying an SH instance server URL that doesn't follow URL semantics", async () => {
|
||||
const INVALID_URL = "invalid-url";
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${REQ_BODY_ENV_VARS_ENVS_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${INVALID_URL}`;
|
||||
const { stderr } = await runCLI(args);
|
||||
|
||||
const out = getErrorCode(stderr);
|
||||
expect(out).toBe<HoppErrorCode>("SERVER_CONNECTION_REFUSED");
|
||||
});
|
||||
});
|
||||
|
||||
test("Successfully retrieves a collection with the ID", async () => {
|
||||
const args = `test ${COLLECTION_LEVEL_HEADERS_AUTH_COLL_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Successfully retrieves collections and environments from a workspace using their respective IDs", async () => {
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${REQ_BODY_ENV_VARS_ENVS_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Supports specifying collection file path along with environment ID", async () => {
|
||||
const TESTS_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const args = `test ${TESTS_PATH} --env ${REQ_BODY_ENV_VARS_ENVS_ID} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Supports specifying environment file path along with collection ID", async () => {
|
||||
const ENV_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${REQ_BODY_ENV_VARS_COLL_ID} --env ${ENV_PATH} --token ${PERSONAL_ACCESS_TOKEN} --server ${SERVER_URL}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test("Supports specifying both collection and environment file paths", async () => {
|
||||
const TESTS_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-coll.json",
|
||||
"collection"
|
||||
);
|
||||
const ENV_PATH = getTestJsonFilePath(
|
||||
"req-body-env-vars-envs.json",
|
||||
"environment"
|
||||
);
|
||||
const args = `test ${TESTS_PATH} --env ${ENV_PATH} --token ${PERSONAL_ACCESS_TOKEN}`;
|
||||
|
||||
const { error } = await runCLI(args);
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,655 @@
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1f86hv000010f8szcfya0t",
|
||||
"name": "Multiple child collections with authorization & headers set at each level",
|
||||
"folders": [
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fjgah000110f8a5bs68gd",
|
||||
"name": "folder-1",
|
||||
"folders": [
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fjwmm000410f8l1gkkr1a",
|
||||
"name": "folder-11",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-11-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-1\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-11",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fjyxm000510f8pv90dt43",
|
||||
"name": "folder-12",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-12-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-12-request",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Overriden at folder-12-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(undefined)\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-12-request\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Overriden at folder-12-request\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-12",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-12",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fk1cv000610f88kc3aupy",
|
||||
"name": "folder-13",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"key": "api-key",
|
||||
"addTo": "HEADERS",
|
||||
"value": "api-key-value",
|
||||
"authType": "basic",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true,
|
||||
"grantTypeInfo": {
|
||||
"token": "",
|
||||
"isPKCE": true,
|
||||
"clientID": "sfasfa",
|
||||
"password": "",
|
||||
"username": "",
|
||||
"grantType": "AUTHORIZATION_CODE",
|
||||
"authEndpoint": "asfafs",
|
||||
"clientSecret": "sfasfasf",
|
||||
"tokenEndpoint": "asfa",
|
||||
"codeVerifierMethod": "S256"
|
||||
}
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-13-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header-Request-Level",
|
||||
"value": "New custom header added at the folder-13-request level",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Overriden at folder-13-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-13\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Overriden at folder-13-request\")\n pw.expect(pw.response.body.headers[\"Custom-Header-Request-Level\"]).toBe(\"New custom header added at the folder-13-request level\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"token": "test-token",
|
||||
"authType": "bearer",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-13",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-13",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-1-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-1\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-1",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fjk9o000210f8j0573pls",
|
||||
"name": "folder-2",
|
||||
"folders": [
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fk516000710f87sfpw6bo",
|
||||
"name": "folder-21",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-21-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(undefined)\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-2\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-21",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fk72t000810f8gfwkpi5y",
|
||||
"name": "folder-22",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-22-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-22-request",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Overriden at folder-22-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(undefined)\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-22-request\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Overriden at folder-22-request\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-22",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-22",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fk95g000910f8bunhaoo8",
|
||||
"name": "folder-23",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "basic",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-23-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header-Request-Level",
|
||||
"value": "New custom header added at the folder-23-request level",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Overriden at folder-23-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-23\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Overriden at folder-23-request\")\n pw.expect(pw.response.body.headers[\"Custom-Header-Request-Level\"]).toBe(\"New custom header added at the folder-23-request level\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"token": "test-token",
|
||||
"authType": "bearer",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-23",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-23",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-2-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-2-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(undefined)\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-2-request\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-2",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1fjmlq000310f86o4d3w2o",
|
||||
"name": "folder-3",
|
||||
"folders": [
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1iwq0p003e10f8u8zg0p85",
|
||||
"name": "folder-31",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-31-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-3\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-31",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1izut7003m10f894ip59zg",
|
||||
"name": "folder-32",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-32-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-32-request",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Overriden at folder-32-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(undefined)\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-32-request\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Overriden at folder-32-request\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "none",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-32",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-32",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"v": 2,
|
||||
"id": "clx1j2ka9003q10f8cdbzpgpg",
|
||||
"name": "folder-33",
|
||||
"folders": [],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "basic",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-33-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header-Request-Level",
|
||||
"value": "New custom header added at the folder-33-request level",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Overriden at folder-33-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-33\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Overriden at folder-33-request\")\n pw.expect(pw.response.body.headers[\"Custom-Header-Request-Level\"]).toBe(\"New custom header added at the folder-33-request level\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"token": "test-token",
|
||||
"authType": "bearer",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-33",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-33",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "basic",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "folder-3-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header-Request-Level",
|
||||
"value": "New custom header added at the folder-3-request level",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "key",
|
||||
"value": "Set at folder-3-request",
|
||||
"active": true
|
||||
}
|
||||
],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value overriden at folder-3\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n pw.expect(pw.response.body.headers[\"Key\"]).toBe(\"Set at folder-3-request\")\n pw.expect(pw.response.body.headers[\"Custom-Header-Request-Level\"]).toBe(\"New custom header added at the folder-3-request level\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"key": "testuser",
|
||||
"addTo": "HEADERS",
|
||||
"value": "testpass",
|
||||
"authType": "basic",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value overriden at folder-3",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"requests": [
|
||||
{
|
||||
"v": "4",
|
||||
"auth": {
|
||||
"authType": "inherit",
|
||||
"authActive": true
|
||||
},
|
||||
"body": {
|
||||
"body": null,
|
||||
"contentType": null
|
||||
},
|
||||
"name": "root-collection-request",
|
||||
"method": "GET",
|
||||
"params": [],
|
||||
"headers": [],
|
||||
"endpoint": "https://httpbin.org/get",
|
||||
"testScript": "// Check status code is 200\npw.test(\"Status code is 200\", ()=> {\n pw.expect(pw.response.status).toBe(200);\n});\n\npw.test(\"Successfully inherits authorization/header set at the parent collection level\", () => {\n pw.expect(pw.response.body.headers[\"Authorization\"]).toBe(\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\")\n \n pw.expect(pw.response.body.headers[\"Custom-Header\"]).toBe(\"Custom header value set at the root collection\")\n pw.expect(pw.response.body.headers[\"Inherited-Header\"]).toBe(\"Inherited header at all levels\")\n})",
|
||||
"preRequestScript": "",
|
||||
"requestVariables": []
|
||||
}
|
||||
],
|
||||
"auth": {
|
||||
"authType": "basic",
|
||||
"password": "testpass",
|
||||
"username": "testuser",
|
||||
"authActive": true
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"key": "Custom-Header",
|
||||
"value": "Custom header value set at the root collection",
|
||||
"active": true
|
||||
},
|
||||
{
|
||||
"key": "Inherited-Header",
|
||||
"value": "Inherited header at all levels",
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,758 @@
|
||||
import {
|
||||
CollectionSchemaVersion,
|
||||
Environment,
|
||||
EnvironmentSchemaVersion,
|
||||
HoppCollection,
|
||||
} from "@hoppscotch/data";
|
||||
|
||||
import {
|
||||
WorkspaceCollection,
|
||||
WorkspaceEnvironment,
|
||||
} from "../../../utils/workspace-access";
|
||||
|
||||
export const WORKSPACE_DEEPLY_NESTED_COLLECTIONS_WITH_AUTH_HEADERS_MOCK: WorkspaceCollection[] =
|
||||
[
|
||||
{
|
||||
id: "clx1ldkzs005t10f8rp5u60q7",
|
||||
data: '{"auth":{"token":"BearerToken","authType":"bearer","authActive":true},"headers":[{"key":"X-Test-Header","value":"Set at root collection","active":true}]}',
|
||||
title: "CollectionA",
|
||||
parentID: null,
|
||||
folders: [
|
||||
{
|
||||
id: "clx1ldkzs005v10f86b9wx4yc",
|
||||
data: '{"auth":{"authType":"inherit","authActive":true},"headers":[]}',
|
||||
title: "FolderA",
|
||||
parentID: "clx1ldkzs005t10f8rp5u60q7",
|
||||
folders: [
|
||||
{
|
||||
id: "clx1ldkzt005x10f8i0u5lzgj",
|
||||
data: '{"auth":{"key":"key","addTo":"HEADERS","value":"test-key","authType":"api-key","authActive":true},"headers":[{"key":"X-Test-Header","value":"Overriden at FolderB","active":true}]}',
|
||||
title: "FolderB",
|
||||
parentID: "clx1ldkzs005v10f86b9wx4yc",
|
||||
folders: [
|
||||
{
|
||||
id: "clx1ldkzu005z10f880zx17bg",
|
||||
data: '{"auth":{"authType":"inherit","authActive":true},"headers":[]}',
|
||||
title: "FolderC",
|
||||
parentID: "clx1ldkzt005x10f8i0u5lzgj",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1ldkzu006010f820vzy13v",
|
||||
collectionID: "clx1ldkzu005z10f880zx17bg",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "RequestD",
|
||||
request:
|
||||
'{"v":"3","auth":{"authType":"basic","password":"password","username":"username","authActive":true},"body":{"body":null,"contentType":null},"name":"RequestD","method":"GET","params":[],"headers":[{"key":"X-Test-Header","value":"Overriden at RequestD","active":true}],"endpoint":"https://echo.hoppscotch.io","testScript":"pw.test(\\"Overrides auth and headers set at the parent folder\\", ()=> {\\n pw.expect(pw.response.body.headers[\\"x-test-header\\"]).toBe(\\"Overriden at RequestD\\");\\n pw.expect(pw.response.body.headers[\\"authorization\\"]).toBe(\\"Basic dXNlcm5hbWU6cGFzc3dvcmQ=\\");\\n});","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1ldkzt005y10f82dl8ni8d",
|
||||
collectionID: "clx1ldkzt005x10f8i0u5lzgj",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "RequestC",
|
||||
request:
|
||||
'{"v":"3","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"RequestC","method":"GET","params":[],"headers":[],"endpoint":"https://echo.hoppscotch.io","testScript":"pw.test(\\"Correctly inherits auth and headers from the parent folder\\", ()=> {\\n pw.expect(pw.response.body.headers[\\"x-test-header\\"]).toBe(\\"Overriden at FolderB\\");\\n pw.expect(pw.response.body.headers[\\"key\\"]).toBe(\\"test-key\\");\\n});","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1ldkzs005w10f8pc2v2boh",
|
||||
collectionID: "clx1ldkzs005v10f86b9wx4yc",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "RequestB",
|
||||
request:
|
||||
'{"v":"3","id":"clpttpdq00003qp16kut6doqv","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"RequestB","method":"GET","params":[],"headers":[],"endpoint":"https://echo.hoppscotch.io","testScript":"pw.test(\\"Correctly inherits auth and headers from the parent folder\\", ()=> {\\n pw.expect(pw.response.body.headers[\\"x-test-header\\"]).toBe(\\"Set at root collection\\");\\n pw.expect(pw.response.body.headers[\\"authorization\\"]).toBe(\\"Bearer BearerToken\\");\\n});","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1ldkzs005u10f82xd5ho3l",
|
||||
collectionID: "clx1ldkzs005t10f8rp5u60q7",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "RequestA",
|
||||
request:
|
||||
'{"v":"5","id":"clpttpdq00003qp16kut6doqv","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"RequestA","method":"GET","params":[],"headers":[],"endpoint":"https://echo.hoppscotch.io","testScript":"pw.test(\\"Correctly inherits auth and headers from the root collection\\", ()=> {\\n pw.expect(pw.response.body.headers[\\"x-test-header\\"]).toBe(\\"Set at root collection\\");\\n pw.expect(pw.response.body.headers[\\"authorization\\"]).toBe(\\"Bearer BearerToken\\");\\n});","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const TRANSFORMED_DEEPLY_NESTED_COLLECTIONS_WITH_AUTH_HEADERS_MOCK: HoppCollection[] =
|
||||
[
|
||||
{
|
||||
v: CollectionSchemaVersion,
|
||||
id: "clx1ldkzs005t10f8rp5u60q7",
|
||||
name: "CollectionA",
|
||||
folders: [
|
||||
{
|
||||
v: CollectionSchemaVersion,
|
||||
id: "clx1ldkzs005v10f86b9wx4yc",
|
||||
name: "FolderA",
|
||||
folders: [
|
||||
{
|
||||
v: CollectionSchemaVersion,
|
||||
id: "clx1ldkzt005x10f8i0u5lzgj",
|
||||
name: "FolderB",
|
||||
folders: [
|
||||
{
|
||||
v: CollectionSchemaVersion,
|
||||
id: "clx1ldkzu005z10f880zx17bg",
|
||||
name: "FolderC",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
v: "3",
|
||||
auth: {
|
||||
authType: "basic",
|
||||
password: "password",
|
||||
username: "username",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
body: null,
|
||||
contentType: null,
|
||||
},
|
||||
name: "RequestD",
|
||||
method: "GET",
|
||||
params: [],
|
||||
headers: [
|
||||
{
|
||||
key: "X-Test-Header",
|
||||
value: "Overriden at RequestD",
|
||||
active: true,
|
||||
},
|
||||
],
|
||||
endpoint: "https://echo.hoppscotch.io",
|
||||
testScript:
|
||||
'pw.test("Overrides auth and headers set at the parent folder", ()=> {\n pw.expect(pw.response.body.headers["x-test-header"]).toBe("Overriden at RequestD");\n pw.expect(pw.response.body.headers["authorization"]).toBe("Basic dXNlcm5hbWU6cGFzc3dvcmQ=");\n});',
|
||||
preRequestScript: "",
|
||||
requestVariables: [],
|
||||
},
|
||||
],
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
v: "3",
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
body: null,
|
||||
contentType: null,
|
||||
},
|
||||
name: "RequestC",
|
||||
method: "GET",
|
||||
params: [],
|
||||
headers: [],
|
||||
endpoint: "https://echo.hoppscotch.io",
|
||||
testScript:
|
||||
'pw.test("Correctly inherits auth and headers from the parent folder", ()=> {\n pw.expect(pw.response.body.headers["x-test-header"]).toBe("Overriden at FolderB");\n pw.expect(pw.response.body.headers["key"]).toBe("test-key");\n});',
|
||||
preRequestScript: "",
|
||||
requestVariables: [],
|
||||
},
|
||||
],
|
||||
auth: {
|
||||
key: "key",
|
||||
addTo: "HEADERS",
|
||||
value: "test-key",
|
||||
authType: "api-key",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [
|
||||
{
|
||||
key: "X-Test-Header",
|
||||
value: "Overriden at FolderB",
|
||||
active: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
v: "3",
|
||||
id: "clpttpdq00003qp16kut6doqv",
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
body: null,
|
||||
contentType: null,
|
||||
},
|
||||
name: "RequestB",
|
||||
method: "GET",
|
||||
params: [],
|
||||
headers: [],
|
||||
endpoint: "https://echo.hoppscotch.io",
|
||||
testScript:
|
||||
'pw.test("Correctly inherits auth and headers from the parent folder", ()=> {\n pw.expect(pw.response.body.headers["x-test-header"]).toBe("Set at root collection");\n pw.expect(pw.response.body.headers["authorization"]).toBe("Bearer BearerToken");\n});',
|
||||
preRequestScript: "",
|
||||
requestVariables: [],
|
||||
},
|
||||
],
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
v: "5",
|
||||
id: "clpttpdq00003qp16kut6doqv",
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
body: null,
|
||||
contentType: null,
|
||||
},
|
||||
name: "RequestA",
|
||||
method: "GET",
|
||||
params: [],
|
||||
headers: [],
|
||||
endpoint: "https://echo.hoppscotch.io",
|
||||
testScript:
|
||||
'pw.test("Correctly inherits auth and headers from the root collection", ()=> {\n pw.expect(pw.response.body.headers["x-test-header"]).toBe("Set at root collection");\n pw.expect(pw.response.body.headers["authorization"]).toBe("Bearer BearerToken");\n});',
|
||||
preRequestScript: "",
|
||||
requestVariables: [],
|
||||
},
|
||||
],
|
||||
auth: {
|
||||
token: "BearerToken",
|
||||
authType: "bearer",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [
|
||||
{
|
||||
key: "X-Test-Header",
|
||||
value: "Set at root collection",
|
||||
active: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
export const WORKSPACE_MULTIPLE_CHILD_COLLECTIONS_WITH_AUTH_HEADERS_MOCK: WorkspaceCollection[] =
|
||||
[
|
||||
{
|
||||
id: "clx1f86hv000010f8szcfya0t",
|
||||
data: '{"auth":{"authType":"basic","password":"testpass","username":"testuser","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value set at the root collection","active":true},{"key":"Inherited-Header","value":"Inherited header at all levels","active":true}]}',
|
||||
title:
|
||||
"Multiple child collections with authorization & headers set at each level",
|
||||
parentID: null,
|
||||
folders: [
|
||||
{
|
||||
id: "clx1fjgah000110f8a5bs68gd",
|
||||
data: '{"auth":{"authType":"inherit","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-1","active":true}]}',
|
||||
title: "folder-1",
|
||||
parentID: "clx1f86hv000010f8szcfya0t",
|
||||
folders: [
|
||||
{
|
||||
id: "clx1fjwmm000410f8l1gkkr1a",
|
||||
data: '{"auth":{"authType":"inherit","authActive":true},"headers":[{"key":"key","value":"Set at folder-11","active":true}]}',
|
||||
title: "folder-11",
|
||||
parentID: "clx1fjgah000110f8a5bs68gd",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1gjo1q000p10f8tc3x2u50",
|
||||
collectionID: "clx1fjwmm000410f8l1gkkr1a",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-11-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","password":"testpass","username":"testuser","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-11-request","method":"GET","params":[],"headers":[],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-1\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1fjyxm000510f8pv90dt43",
|
||||
data: '{"auth":{"authType":"none","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-12","active":true},{"key":"key","value":"Set at folder-12","active":true}]}',
|
||||
title: "folder-12",
|
||||
parentID: "clx1fjgah000110f8a5bs68gd",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1glkt5000u10f88q51ioj8",
|
||||
collectionID: "clx1fjyxm000510f8pv90dt43",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-12-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"none","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-12-request","method":"GET","params":[],"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-12-request","active":true},{"key":"key","value":"Overriden at folder-12-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(undefined)\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-12-request\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Overriden at folder-12-request\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1fk1cv000610f88kc3aupy",
|
||||
data: '{"auth":{"token":"test-token","authType":"bearer","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-13","active":true},{"key":"key","value":"Set at folder-13","active":true}]}',
|
||||
title: "folder-13",
|
||||
parentID: "clx1fjgah000110f8a5bs68gd",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1grfir001510f8c4ttiazq",
|
||||
collectionID: "clx1fk1cv000610f88kc3aupy",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-13-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"key":"api-key","addTo":"HEADERS","value":"api-key-value","authType":"basic","password":"testpass","username":"testuser","authActive":true,"grantTypeInfo":{"token":"","isPKCE":true,"clientID":"sfasfa","password":"","username":"","grantType":"AUTHORIZATION_CODE","authEndpoint":"asfafs","clientSecret":"sfasfasf","tokenEndpoint":"asfa","codeVerifierMethod":"S256"}},"body":{"body":null,"contentType":null},"name":"folder-13-request","method":"GET","params":[],"headers":[{"key":"Custom-Header-Request-Level","value":"New custom header added at the folder-13-request level","active":true},{"key":"key","value":"Overriden at folder-13-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-13\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Overriden at folder-13-request\\")\\n pw.expect(pw.response.body.headers[\\"Custom-Header-Request-Level\\"]).toBe(\\"New custom header added at the folder-13-request level\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1gebpx000k10f8andzw36z",
|
||||
collectionID: "clx1fjgah000110f8a5bs68gd",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-1-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-1-request","method":"GET","params":[],"headers":[],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-1\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1fjk9o000210f8j0573pls",
|
||||
data: '{"auth":{"authType":"none","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-2","active":true}]}',
|
||||
title: "folder-2",
|
||||
parentID: "clx1f86hv000010f8szcfya0t",
|
||||
folders: [
|
||||
{
|
||||
id: "clx1fk516000710f87sfpw6bo",
|
||||
data: '{"auth":{"authType":"inherit","authActive":true},"headers":[{"key":"key","value":"Set at folder-21","active":true}]}',
|
||||
title: "folder-21",
|
||||
parentID: "clx1fjk9o000210f8j0573pls",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1hfegy001j10f8ywbozysk",
|
||||
collectionID: "clx1fk516000710f87sfpw6bo",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-21-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-21-request","method":"GET","params":[],"headers":[],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(undefined)\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-2\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1fk72t000810f8gfwkpi5y",
|
||||
data: '{"auth":{"authType":"none","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-22","active":true},{"key":"key","value":"Set at folder-22","active":true}]}',
|
||||
title: "folder-22",
|
||||
parentID: "clx1fjk9o000210f8j0573pls",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1ibfre002k10f86brcb2aa",
|
||||
collectionID: "clx1fk72t000810f8gfwkpi5y",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-22-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"none","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-22-request","method":"GET","params":[],"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-22-request","active":true},{"key":"key","value":"Overriden at folder-22-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(undefined)\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-22-request\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Overriden at folder-22-request\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1fk95g000910f8bunhaoo8",
|
||||
data: '{"auth":{"token":"test-token","authType":"bearer","password":"testpass","username":"testuser","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-23","active":true},{"key":"key","value":"Set at folder-23","active":true}]}',
|
||||
title: "folder-23",
|
||||
parentID: "clx1fjk9o000210f8j0573pls",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1if4w6002n10f8xe4gnf0w",
|
||||
collectionID: "clx1fk95g000910f8bunhaoo8",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-23-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"basic","password":"testpass","username":"testuser","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-23-request","method":"GET","params":[],"headers":[{"key":"Custom-Header-Request-Level","value":"New custom header added at the folder-23-request level","active":true},{"key":"key","value":"Overriden at folder-23-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-23\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Overriden at folder-23-request\\")\\n pw.expect(pw.response.body.headers[\\"Custom-Header-Request-Level\\"]).toBe(\\"New custom header added at the folder-23-request level\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1hbtdj001g10f8y71y869s",
|
||||
collectionID: "clx1fjk9o000210f8j0573pls",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-2-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"none","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-2-request","method":"GET","params":[],"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-2-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(undefined)\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-2-request\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1fjmlq000310f86o4d3w2o",
|
||||
data: '{"auth":{"key":"testuser","addTo":"HEADERS","value":"testpass","authType":"basic","password":"testpass","username":"testuser","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-3","active":true}]}',
|
||||
title: "folder-3",
|
||||
parentID: "clx1f86hv000010f8szcfya0t",
|
||||
folders: [
|
||||
{
|
||||
id: "clx1iwq0p003e10f8u8zg0p85",
|
||||
data: '{"auth":{"authType":"inherit","authActive":true},"headers":[{"key":"key","value":"Set at folder-31","active":true}]}',
|
||||
title: "folder-31",
|
||||
parentID: "clx1fjmlq000310f86o4d3w2o",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1ixdiv003f10f8j6ni375m",
|
||||
collectionID: "clx1iwq0p003e10f8u8zg0p85",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-31-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-31-request","method":"GET","params":[],"headers":[],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-3\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1izut7003m10f894ip59zg",
|
||||
data: '{"auth":{"authType":"none","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-32","active":true},{"key":"key","value":"Set at folder-32","active":true}]}',
|
||||
title: "folder-32",
|
||||
parentID: "clx1fjmlq000310f86o4d3w2o",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1j01dg003n10f8e34khl6v",
|
||||
collectionID: "clx1izut7003m10f894ip59zg",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-32-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"none","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-32-request","method":"GET","params":[],"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-32-request","active":true},{"key":"key","value":"Overriden at folder-32-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(undefined)\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-32-request\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Overriden at folder-32-request\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1j2ka9003q10f8cdbzpgpg",
|
||||
data: '{"auth":{"token":"test-token","authType":"bearer","password":"testpass","username":"testuser","authActive":true},"headers":[{"key":"Custom-Header","value":"Custom header value overriden at folder-33","active":true},{"key":"key","value":"Set at folder-33","active":true}]}',
|
||||
title: "folder-33",
|
||||
parentID: "clx1fjmlq000310f86o4d3w2o",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1j361a003r10f8oly5m2n6",
|
||||
collectionID: "clx1j2ka9003q10f8cdbzpgpg",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-33-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"basic","password":"testpass","username":"testuser","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-33-request","method":"GET","params":[],"headers":[{"key":"Custom-Header-Request-Level","value":"New custom header added at the folder-33-request level","active":true},{"key":"key","value":"Overriden at folder-33-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-33\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Overriden at folder-33-request\\")\\n pw.expect(pw.response.body.headers[\\"Custom-Header-Request-Level\\"]).toBe(\\"New custom header added at the folder-33-request level\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1jk1nq004y10f8fhtxvs02",
|
||||
collectionID: "clx1fjmlq000310f86o4d3w2o",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "folder-3-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"basic","password":"testpass","username":"testuser","authActive":true},"body":{"body":null,"contentType":null},"name":"folder-3-request","method":"GET","params":[],"headers":[{"key":"Custom-Header-Request-Level","value":"New custom header added at the folder-3-request level","active":true},{"key":"key","value":"Set at folder-3-request","active":true}],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits/overrides authorization/header set at the parent collection level with new header addition\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value overriden at folder-3\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n pw.expect(pw.response.body.headers[\\"Key\\"]).toBe(\\"Set at folder-3-request\\")\\n pw.expect(pw.response.body.headers[\\"Custom-Header-Request-Level\\"]).toBe(\\"New custom header added at the folder-3-request level\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1g2pnv000b10f80f0oyp79",
|
||||
collectionID: "clx1f86hv000010f8szcfya0t",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "root-collection-request",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"root-collection-request","method":"GET","params":[],"headers":[],"endpoint":"https://httpbin.org/get","testScript":"// Check status code is 200\\npw.test(\\"Status code is 200\\", ()=> {\\n pw.expect(pw.response.status).toBe(200);\\n});\\n\\npw.test(\\"Successfully inherits authorization/header set at the parent collection level\\", () => {\\n pw.expect(pw.response.body.headers[\\"Authorization\\"]).toBe(\\"Basic dGVzdHVzZXI6dGVzdHBhc3M=\\")\\n \\n pw.expect(pw.response.body.headers[\\"Custom-Header\\"]).toBe(\\"Custom header value set at the root collection\\")\\n pw.expect(pw.response.body.headers[\\"Inherited-Header\\"]).toBe(\\"Inherited header at all levels\\")\\n})","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
// Collections with `data` field set to `null` at certain levels
|
||||
export const WORKSPACE_COLLECTIONS_WITHOUT_AUTH_HEADERS_AT_CERTAIN_LEVELS_MOCK: WorkspaceCollection[] =
|
||||
[
|
||||
{
|
||||
id: "clx1kxvao005m10f8luqivrf1",
|
||||
data: null,
|
||||
title: "Collection with no authorization/headers set",
|
||||
parentID: null,
|
||||
folders: [
|
||||
{
|
||||
id: "clx1kygjt005n10f8m1nkhjux",
|
||||
data: null,
|
||||
title: "folder-1",
|
||||
parentID: "clx1kxvao005m10f8luqivrf1",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1kz2gk005p10f8ll7ztbnj",
|
||||
collectionID: "clx1kygjt005n10f8m1nkhjux",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "req1",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"req1","method":"GET","params":[],"headers":[],"endpoint":"https://echo.hoppscotch.io","testScript":"","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1kym98005o10f8qg17t9o2",
|
||||
data: '{"auth":{"authType":"none","authActive":true},"headers":[{"key":"Custom-Header","value":"Set at folder-2","active":true}]}',
|
||||
title: "folder-2",
|
||||
parentID: "clx1kxvao005m10f8luqivrf1",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx1kz3m7005q10f8lw3v09l4",
|
||||
collectionID: "clx1kym98005o10f8qg17t9o2",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
title: "req2",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"req2","method":"GET","params":[],"headers":[],"endpoint":"https://echo.hoppscotch.io","testScript":"","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "clx1l2bu6005r10f8daynohge",
|
||||
data: null,
|
||||
title: "folder-3",
|
||||
parentID: "clx1kxvao005m10f8luqivrf1",
|
||||
folders: [],
|
||||
requests: [],
|
||||
},
|
||||
{
|
||||
id: "clx1l2eaz005s10f8loetbbeb",
|
||||
data: '{"auth":{"authType":"none","authActive":true},"headers":[{"key":"Custom-Header","value":"Set at folder-4","active":true}]}',
|
||||
title: "folder-4",
|
||||
parentID: "clx1kxvao005m10f8luqivrf1",
|
||||
folders: [],
|
||||
requests: [],
|
||||
},
|
||||
],
|
||||
requests: [],
|
||||
},
|
||||
];
|
||||
|
||||
export const TRANSFORMED_COLLECTIONS_WITHOUT_AUTH_HEADERS_AT_CERTAIN_LEVELS_MOCK: HoppCollection[] =
|
||||
[
|
||||
{
|
||||
v: 2,
|
||||
id: "clx1kxvao005m10f8luqivrf1",
|
||||
name: "Collection with no authorization/headers set",
|
||||
folders: [
|
||||
{
|
||||
v: 2,
|
||||
id: "clx1kygjt005n10f8m1nkhjux",
|
||||
name: "folder-1",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
v: "4",
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
body: null,
|
||||
contentType: null,
|
||||
},
|
||||
name: "req1",
|
||||
method: "GET",
|
||||
params: [],
|
||||
headers: [],
|
||||
endpoint: "https://echo.hoppscotch.io",
|
||||
testScript: "",
|
||||
preRequestScript: "",
|
||||
requestVariables: [],
|
||||
},
|
||||
],
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [],
|
||||
},
|
||||
{
|
||||
v: 2,
|
||||
id: "clx1kym98005o10f8qg17t9o2",
|
||||
name: "folder-2",
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
v: "4",
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
body: null,
|
||||
contentType: null,
|
||||
},
|
||||
name: "req2",
|
||||
method: "GET",
|
||||
params: [],
|
||||
headers: [],
|
||||
endpoint: "https://echo.hoppscotch.io",
|
||||
testScript: "",
|
||||
preRequestScript: "",
|
||||
requestVariables: [],
|
||||
},
|
||||
],
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [
|
||||
{
|
||||
key: "Custom-Header",
|
||||
value: "Set at folder-2",
|
||||
active: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
v: 2,
|
||||
id: "clx1l2bu6005r10f8daynohge",
|
||||
name: "folder-3",
|
||||
folders: [],
|
||||
requests: [],
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [],
|
||||
},
|
||||
{
|
||||
v: 2,
|
||||
id: "clx1l2eaz005s10f8loetbbeb",
|
||||
name: "folder-4",
|
||||
folders: [],
|
||||
requests: [],
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [
|
||||
{
|
||||
key: "Custom-Header",
|
||||
value: "Set at folder-4",
|
||||
active: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
requests: [],
|
||||
auth: {
|
||||
authType: "inherit",
|
||||
authActive: true,
|
||||
},
|
||||
headers: [],
|
||||
},
|
||||
];
|
||||
|
||||
export const WORKSPACE_ENVIRONMENT_MOCK: WorkspaceEnvironment = {
|
||||
id: "clwudd68q00079rufju8uo3on",
|
||||
teamID: "clws3hg58000011o8h07glsb1",
|
||||
name: "Response body sample",
|
||||
variables: [
|
||||
{
|
||||
key: "firstName",
|
||||
value: "John",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "lastName",
|
||||
value: "Doe",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "id",
|
||||
value: "7",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "fullName",
|
||||
value: "<<firstName>> <<lastName>>",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "recursiveVarX",
|
||||
value: "<<recursiveVarY>>",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "recursiveVarY",
|
||||
value: "<<salutation>>",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "salutation",
|
||||
value: "Hello",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "greetText",
|
||||
value: "<<salutation>> <<fullName>>",
|
||||
secret: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const TRANSFORMED_ENVIRONMENT_MOCK: Environment = {
|
||||
v: EnvironmentSchemaVersion,
|
||||
id: "clwudd68q00079rufju8uo3on",
|
||||
name: "Response body sample",
|
||||
variables: [
|
||||
{
|
||||
key: "firstName",
|
||||
value: "John",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "lastName",
|
||||
value: "Doe",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "id",
|
||||
value: "7",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "fullName",
|
||||
value: "<<firstName>> <<lastName>>",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "recursiveVarX",
|
||||
value: "<<recursiveVarY>>",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "recursiveVarY",
|
||||
value: "<<salutation>>",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "salutation",
|
||||
value: "Hello",
|
||||
secret: false,
|
||||
},
|
||||
{
|
||||
key: "greetText",
|
||||
value: "<<salutation>> <<fullName>>",
|
||||
secret: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
389
packages/hoppscotch-cli/src/__tests__/unit/getters.spec.ts
Normal file
389
packages/hoppscotch-cli/src/__tests__/unit/getters.spec.ts
Normal file
@@ -0,0 +1,389 @@
|
||||
import axios, { AxiosError, AxiosResponse } from "axios";
|
||||
import fs from "fs/promises";
|
||||
import { describe, expect, test, vi } from "vitest";
|
||||
|
||||
import {
|
||||
CollectionSchemaVersion,
|
||||
Environment,
|
||||
HoppCollection,
|
||||
getDefaultRESTRequest,
|
||||
} from "@hoppscotch/data";
|
||||
|
||||
import { DEFAULT_DURATION_PRECISION } from "../../utils/constants";
|
||||
import {
|
||||
getDurationInSeconds,
|
||||
getEffectiveFinalMetaData,
|
||||
getResourceContents,
|
||||
} from "../../utils/getters";
|
||||
import * as mutators from "../../utils/mutators";
|
||||
|
||||
import * as workspaceAccessHelpers from "../../utils/workspace-access";
|
||||
|
||||
describe("getters", () => {
|
||||
describe("getDurationInSeconds", () => {
|
||||
const testDurations = [
|
||||
{ end: [1, 111111111], precision: 1, expected: 1.1 },
|
||||
{ end: [2, 333333333], precision: 2, expected: 2.33 },
|
||||
{
|
||||
end: [3, 555555555],
|
||||
precision: DEFAULT_DURATION_PRECISION,
|
||||
expected: 3.556,
|
||||
},
|
||||
{ end: [4, 777777777], precision: 4, expected: 4.7778 },
|
||||
];
|
||||
|
||||
test.each(testDurations)(
|
||||
"($end.0 s + $end.1 ns) rounded-off to $expected",
|
||||
({ end, precision, expected }) => {
|
||||
expect(getDurationInSeconds(end as [number, number], precision)).toBe(
|
||||
expected
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe("getEffectiveFinalMetaData", () => {
|
||||
const DEFAULT_ENV = <Environment>{
|
||||
name: "name",
|
||||
variables: [{ key: "PARAM", value: "parsed_param" }],
|
||||
};
|
||||
|
||||
test("Empty list of meta-data", () => {
|
||||
expect(getEffectiveFinalMetaData([], DEFAULT_ENV)).toSubsetEqualRight([]);
|
||||
});
|
||||
|
||||
test("Non-empty active list of meta-data with unavailable ENV", () => {
|
||||
expect(
|
||||
getEffectiveFinalMetaData(
|
||||
[
|
||||
{
|
||||
active: true,
|
||||
key: "<<UNKNOWN_KEY>>",
|
||||
value: "<<UNKNOWN_VALUE>>",
|
||||
},
|
||||
],
|
||||
DEFAULT_ENV
|
||||
)
|
||||
).toSubsetEqualRight([{ active: true, key: "", value: "" }]);
|
||||
});
|
||||
|
||||
test("Inactive list of meta-data", () => {
|
||||
expect(
|
||||
getEffectiveFinalMetaData(
|
||||
[{ active: false, key: "KEY", value: "<<PARAM>>" }],
|
||||
DEFAULT_ENV
|
||||
)
|
||||
).toSubsetEqualRight([]);
|
||||
});
|
||||
|
||||
test("Active list of meta-data", () => {
|
||||
expect(
|
||||
getEffectiveFinalMetaData(
|
||||
[{ active: true, key: "PARAM", value: "<<PARAM>>" }],
|
||||
DEFAULT_ENV
|
||||
)
|
||||
).toSubsetEqualRight([
|
||||
{ active: true, key: "PARAM", value: "parsed_param" },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getResourceContents", () => {
|
||||
describe("Network call failure", () => {
|
||||
const args = {
|
||||
pathOrId: "test-collection-id-or-path",
|
||||
resourceType: "collection" as const,
|
||||
accessToken: "test-token",
|
||||
serverUrl: "test-url",
|
||||
};
|
||||
|
||||
const cases = [
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `SERVER_CONNECTION_REFUSED` if the network call fails with the code `ECONNREFUSED`",
|
||||
args,
|
||||
axiosMock: {
|
||||
code: "ECONNREFUSED",
|
||||
},
|
||||
expected: {
|
||||
code: "SERVER_CONNECTION_REFUSED",
|
||||
data: args.serverUrl,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `INVALID_SERVER_URL` if the network call fails with the code `ERR_INVALID_URL`",
|
||||
args,
|
||||
axiosMock: {
|
||||
code: "ERR_INVALID_URL",
|
||||
},
|
||||
expected: {
|
||||
code: "INVALID_SERVER_URL",
|
||||
data: args.serverUrl,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `INVALID_SERVER_URL` if the network call fails with the code `ENOTFOUND`",
|
||||
args,
|
||||
axiosMock: {
|
||||
code: "ENOTFOUND",
|
||||
},
|
||||
expected: {
|
||||
code: "INVALID_SERVER_URL",
|
||||
data: args.serverUrl,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `INVALID_SERVER_URL` if the network call returns a response with a status code of `404`",
|
||||
args,
|
||||
axiosMock: {
|
||||
response: {
|
||||
status: 404,
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
code: "INVALID_SERVER_URL",
|
||||
data: args.serverUrl,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `TOKEN_EXPIRED` if the network call fails for the same reason",
|
||||
args,
|
||||
axiosMock: {
|
||||
response: {
|
||||
data: {
|
||||
reason: "TOKEN_EXPIRED",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
code: "TOKEN_EXPIRED",
|
||||
data: args.accessToken,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `TOKEN_INVALID` if the network call fails for the same reason",
|
||||
args,
|
||||
axiosMock: {
|
||||
response: {
|
||||
data: {
|
||||
reason: "TOKEN_INVALID",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
code: "TOKEN_INVALID",
|
||||
data: args.accessToken,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `INVALID_ID` if the network call fails for the same reason when the supplied collection ID or path is invalid",
|
||||
args,
|
||||
axiosMock: {
|
||||
response: {
|
||||
data: {
|
||||
reason: "INVALID_ID",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
code: "INVALID_ID",
|
||||
data: args.pathOrId,
|
||||
},
|
||||
},
|
||||
{
|
||||
description:
|
||||
"Promise rejects with the code `INVALID_ID` if the network call fails for the same reason when the supplied environment ID or path is invalid",
|
||||
args: {
|
||||
...args,
|
||||
pathOrId: "test-environment-id-or-path",
|
||||
resourceType: "environment" as const,
|
||||
},
|
||||
axiosMock: {
|
||||
response: {
|
||||
data: {
|
||||
reason: "INVALID_ID",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
code: "INVALID_ID",
|
||||
data: "test-environment-id-or-path",
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
test.each(cases)("$description", ({ args, axiosMock, expected }) => {
|
||||
const { code, response } = axiosMock;
|
||||
const axiosErrMessage = code ?? response?.data?.reason;
|
||||
|
||||
vi.spyOn(axios, "get").mockImplementation(() =>
|
||||
Promise.reject(
|
||||
new AxiosError(
|
||||
axiosErrMessage,
|
||||
code,
|
||||
undefined,
|
||||
undefined,
|
||||
response as AxiosResponse
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
expect(getResourceContents(args)).rejects.toEqual(expected);
|
||||
});
|
||||
|
||||
test("Promise rejects with the code `INVALID_SERVER_URL` if the network call succeeds and the received response content type is not `application/json`", () => {
|
||||
const expected = {
|
||||
code: "INVALID_SERVER_URL",
|
||||
data: args.serverUrl,
|
||||
};
|
||||
|
||||
vi.spyOn(axios, "get").mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
data: "",
|
||||
headers: { "content-type": "text/html; charset=UTF-8" },
|
||||
})
|
||||
);
|
||||
|
||||
expect(getResourceContents(args)).rejects.toEqual(expected);
|
||||
});
|
||||
|
||||
test("Promise rejects with the code `UNKNOWN_ERROR` while encountering an error that is not an instance of `AxiosError`", () => {
|
||||
const expected = {
|
||||
code: "UNKNOWN_ERROR",
|
||||
data: new TypeError("UNKNOWN_ERROR"),
|
||||
};
|
||||
|
||||
vi.spyOn(axios, "get").mockImplementation(() =>
|
||||
Promise.reject(new Error("UNKNOWN_ERROR"))
|
||||
);
|
||||
|
||||
expect(getResourceContents(args)).rejects.toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Success", () => {
|
||||
test("Proceeds with reading from the file system if the supplied file exists in the path", async () => {
|
||||
fs.access = vi.fn().mockResolvedValueOnce(undefined);
|
||||
|
||||
const sampleCollectionContents: HoppCollection = {
|
||||
v: CollectionSchemaVersion,
|
||||
id: "valid-collection-id",
|
||||
name: "valid-collection-title",
|
||||
folders: [],
|
||||
requests: [],
|
||||
headers: [],
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: false,
|
||||
},
|
||||
};
|
||||
|
||||
axios.get = vi.fn();
|
||||
|
||||
vi.spyOn(mutators, "readJsonFile").mockImplementation(() =>
|
||||
Promise.resolve(sampleCollectionContents)
|
||||
);
|
||||
|
||||
const pathOrId = "valid-collection-file-path";
|
||||
const resourceType = "collection";
|
||||
const accessToken = "valid-access-token";
|
||||
const serverUrl = "valid-url";
|
||||
|
||||
const contents = await getResourceContents({
|
||||
pathOrId,
|
||||
accessToken,
|
||||
serverUrl,
|
||||
resourceType,
|
||||
});
|
||||
|
||||
expect(fs.access).toHaveBeenCalledWith(pathOrId);
|
||||
expect(axios.get).not.toBeCalled();
|
||||
expect(mutators.readJsonFile).toHaveBeenCalledWith(pathOrId, true);
|
||||
|
||||
expect(contents).toEqual(sampleCollectionContents);
|
||||
});
|
||||
|
||||
test("Proceeds with the network call if a value for the access token is specified and the supplied path/id is not a valid file path", async () => {
|
||||
fs.access = vi.fn().mockRejectedValueOnce(undefined);
|
||||
|
||||
const sampleCollectionContents: HoppCollection = {
|
||||
v: CollectionSchemaVersion,
|
||||
name: "test-coll",
|
||||
folders: [],
|
||||
requests: [getDefaultRESTRequest()],
|
||||
headers: [],
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: false,
|
||||
},
|
||||
};
|
||||
|
||||
axios.get = vi.fn().mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
data: {
|
||||
id: "clx06ik0o00028t6uwywwnxgg",
|
||||
data: null,
|
||||
title: "test-coll",
|
||||
parentID: null,
|
||||
folders: [],
|
||||
requests: [
|
||||
{
|
||||
id: "clx06imin00038t6uynt5vyk4",
|
||||
collectionID: "clx06ik0o00028t6uwywwnxgg",
|
||||
teamID: "clwt6r6j10031kc6pu0b08y6e",
|
||||
title: "req1",
|
||||
request:
|
||||
'{"v":"4","auth":{"authType":"inherit","authActive":true},"body":{"body":null,"contentType":null},"name":"req1","method":"GET","params":[],"headers":[],"endpoint":"https://echo.hoppscotch.io","testScript":"","preRequestScript":"","requestVariables":[]}',
|
||||
},
|
||||
],
|
||||
},
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
vi.spyOn(mutators, "readJsonFile").mockImplementation(() =>
|
||||
Promise.resolve(sampleCollectionContents)
|
||||
);
|
||||
|
||||
vi.spyOn(
|
||||
workspaceAccessHelpers,
|
||||
"transformWorkspaceCollections"
|
||||
).mockImplementation(() => [sampleCollectionContents]);
|
||||
|
||||
const pathOrId = "valid-collection-id";
|
||||
const resourceType = "collection";
|
||||
const accessToken = "valid-access-token";
|
||||
const serverUrl = "valid-url";
|
||||
|
||||
await getResourceContents({
|
||||
pathOrId,
|
||||
accessToken,
|
||||
serverUrl,
|
||||
resourceType,
|
||||
});
|
||||
|
||||
expect(fs.access).toHaveBeenCalledWith(pathOrId);
|
||||
expect(axios.get).toBeCalledWith(
|
||||
`${serverUrl}/v1/access-tokens/${resourceType}/${pathOrId}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(
|
||||
workspaceAccessHelpers.transformWorkspaceCollections
|
||||
).toBeCalled();
|
||||
expect(mutators.readJsonFile).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,57 @@
|
||||
import { describe, expect, test } from "vitest";
|
||||
|
||||
import {
|
||||
transformWorkspaceCollections,
|
||||
transformWorkspaceEnvironment,
|
||||
} from "../../utils/workspace-access";
|
||||
import {
|
||||
TRANSFORMED_COLLECTIONS_WITHOUT_AUTH_HEADERS_AT_CERTAIN_LEVELS_MOCK,
|
||||
TRANSFORMED_DEEPLY_NESTED_COLLECTIONS_WITH_AUTH_HEADERS_MOCK,
|
||||
TRANSFORMED_ENVIRONMENT_MOCK,
|
||||
WORKSPACE_COLLECTIONS_WITHOUT_AUTH_HEADERS_AT_CERTAIN_LEVELS_MOCK,
|
||||
WORKSPACE_DEEPLY_NESTED_COLLECTIONS_WITH_AUTH_HEADERS_MOCK,
|
||||
WORKSPACE_ENVIRONMENT_MOCK,
|
||||
WORKSPACE_MULTIPLE_CHILD_COLLECTIONS_WITH_AUTH_HEADERS_MOCK,
|
||||
} from "./fixtures/workspace-access.mock";
|
||||
|
||||
import TRANSFORMED_MULTIPLE_CHILD_COLLECTIONS_WITH_AUTH_HEADERS_MOCK from "../e2e/fixtures/collections/multiple-child-collections-auth-headers-coll.json";
|
||||
|
||||
describe("workspace-access", () => {
|
||||
describe("transformWorkspaceCollection", () => {
|
||||
test("Successfully transforms collection data with deeply nested collections and authorization/headers set at each level to the `HoppCollection` format", () => {
|
||||
expect(
|
||||
transformWorkspaceCollections(
|
||||
WORKSPACE_DEEPLY_NESTED_COLLECTIONS_WITH_AUTH_HEADERS_MOCK
|
||||
)
|
||||
).toEqual(TRANSFORMED_DEEPLY_NESTED_COLLECTIONS_WITH_AUTH_HEADERS_MOCK);
|
||||
});
|
||||
|
||||
test("Successfully transforms collection data with multiple child collections and authorization/headers set at each level to the `HoppCollection` format", () => {
|
||||
expect(
|
||||
transformWorkspaceCollections(
|
||||
WORKSPACE_MULTIPLE_CHILD_COLLECTIONS_WITH_AUTH_HEADERS_MOCK
|
||||
)
|
||||
).toEqual([
|
||||
TRANSFORMED_MULTIPLE_CHILD_COLLECTIONS_WITH_AUTH_HEADERS_MOCK,
|
||||
]);
|
||||
});
|
||||
|
||||
test("Adds the default value for `auth` & `header` fields while transforming collections without authorization/headers set at certain levels", () => {
|
||||
expect(
|
||||
transformWorkspaceCollections(
|
||||
WORKSPACE_COLLECTIONS_WITHOUT_AUTH_HEADERS_AT_CERTAIN_LEVELS_MOCK
|
||||
)
|
||||
).toEqual(
|
||||
TRANSFORMED_COLLECTIONS_WITHOUT_AUTH_HEADERS_AT_CERTAIN_LEVELS_MOCK
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("transformWorkspaceEnvironment", () => {
|
||||
test("Successfully transforms environment data conforming to the format received from the network call to the `HoppEnvironment` format", () => {
|
||||
expect(transformWorkspaceEnvironment(WORKSPACE_ENVIRONMENT_MOCK)).toEqual(
|
||||
TRANSFORMED_ENVIRONMENT_MOCK
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -37,7 +37,7 @@ export const getTestJsonFilePath = (
|
||||
|
||||
const filePath = resolve(
|
||||
__dirname,
|
||||
`../../src/__tests__/samples/${kindDir}/${file}`
|
||||
`../../src/__tests__/e2e/fixtures/${kindDir}/${file}`
|
||||
);
|
||||
return filePath;
|
||||
};
|
||||
|
||||
@@ -1,30 +1,34 @@
|
||||
import { handleError } from "../handlers/error";
|
||||
import { parseDelayOption } from "../options/test/delay";
|
||||
import { parseEnvsData } from "../options/test/env";
|
||||
import { TestCmdOptions, TestCmdEnvironmentOptions } from "../types/commands";
|
||||
import { HoppEnvs } from "../types/request";
|
||||
import { isHoppCLIError } from "../utils/checks";
|
||||
import {
|
||||
collectionsRunner,
|
||||
collectionsRunnerExit,
|
||||
collectionsRunnerResult,
|
||||
} from "../utils/collections";
|
||||
import { handleError } from "../handlers/error";
|
||||
import { parseCollectionData } from "../utils/mutators";
|
||||
import { parseEnvsData } from "../options/test/env";
|
||||
import { TestCmdOptions } from "../types/commands";
|
||||
import { parseDelayOption } from "../options/test/delay";
|
||||
import { HoppEnvs } from "../types/request";
|
||||
import { isHoppCLIError } from "../utils/checks";
|
||||
|
||||
export const test = (path: string, options: TestCmdOptions) => async () => {
|
||||
export const test = (pathOrId: string, options: TestCmdOptions) => async () => {
|
||||
try {
|
||||
const delay = options.delay ? parseDelayOption(options.delay) : 0
|
||||
const envs = options.env ? await parseEnvsData(options.env) : <HoppEnvs>{ global: [], selected: [] }
|
||||
const collections = await parseCollectionData(path)
|
||||
const delay = options.delay ? parseDelayOption(options.delay) : 0;
|
||||
|
||||
const report = await collectionsRunner({collections, envs, delay})
|
||||
const hasSucceeded = collectionsRunnerResult(report)
|
||||
collectionsRunnerExit(hasSucceeded)
|
||||
} catch(e) {
|
||||
if(isHoppCLIError(e)) {
|
||||
handleError(e)
|
||||
const envs = options.env
|
||||
? await parseEnvsData(options as TestCmdEnvironmentOptions)
|
||||
: <HoppEnvs>{ global: [], selected: [] };
|
||||
|
||||
const collections = await parseCollectionData(pathOrId, options);
|
||||
|
||||
const report = await collectionsRunner({ collections, envs, delay });
|
||||
const hasSucceeded = collectionsRunnerResult(report);
|
||||
|
||||
collectionsRunnerExit(hasSucceeded);
|
||||
} catch (e) {
|
||||
if (isHoppCLIError(e)) {
|
||||
handleError(e);
|
||||
process.exit(1);
|
||||
}
|
||||
else throw e
|
||||
} else throw e;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -63,7 +63,7 @@ export const handleError = <T extends HoppErrorCode>(error: HoppError<T>) => {
|
||||
ERROR_MSG = `Unable to parse -\n${error.data}`;
|
||||
break;
|
||||
case "INVALID_FILE_TYPE":
|
||||
ERROR_MSG = `Please provide file of extension type: ${error.data}`;
|
||||
ERROR_MSG = `Please provide file of extension type .json: ${error.data}`;
|
||||
break;
|
||||
case "REQUEST_ERROR":
|
||||
case "TEST_SCRIPT_ERROR":
|
||||
@@ -82,6 +82,21 @@ export const handleError = <T extends HoppErrorCode>(error: HoppError<T>) => {
|
||||
case "TESTS_FAILING":
|
||||
ERROR_MSG = error.data;
|
||||
break;
|
||||
case "TOKEN_EXPIRED":
|
||||
ERROR_MSG = `The specified access token is expired. Please provide a valid token: ${error.data}`;
|
||||
break;
|
||||
case "TOKEN_INVALID":
|
||||
ERROR_MSG = `The specified access token is invalid. Please provide a valid token: ${error.data}`;
|
||||
break;
|
||||
case "INVALID_ID":
|
||||
ERROR_MSG = `The specified collection/environment (ID or file path) is invalid or inaccessible. Please ensure the supplied ID or file path is correct: ${error.data}`;
|
||||
break;
|
||||
case "INVALID_SERVER_URL":
|
||||
ERROR_MSG = `Please provide a valid SH instance server URL: ${error.data}`;
|
||||
break;
|
||||
case "SERVER_CONNECTION_REFUSED":
|
||||
ERROR_MSG = `Unable to connect to the server. Please check your network connection or server instance URL and try again: ${error.data}`;
|
||||
break;
|
||||
}
|
||||
|
||||
if (!S.isEmpty(ERROR_MSG)) {
|
||||
|
||||
@@ -49,14 +49,22 @@ program.exitOverride().configureOutput({
|
||||
program
|
||||
.command("test")
|
||||
.argument(
|
||||
"<file_path>",
|
||||
"path to a hoppscotch collection.json file for CI testing"
|
||||
"<file_path_or_id>",
|
||||
"path to a hoppscotch collection.json file or collection ID from a workspace for CI testing"
|
||||
)
|
||||
.option(
|
||||
"-e, --env <file_path_or_id>",
|
||||
"path to an environment variables json file or environment ID from a workspace"
|
||||
)
|
||||
.option("-e, --env <file_path>", "path to an environment variables json file")
|
||||
.option(
|
||||
"-d, --delay <delay_in_ms>",
|
||||
"delay in milliseconds(ms) between consecutive requests within a collection"
|
||||
)
|
||||
.option(
|
||||
"--token <access_token>",
|
||||
"personal access token to access collections/environments from a workspace"
|
||||
)
|
||||
.option("--server <server_url>", "server URL for SH instance")
|
||||
.allowExcessArguments(false)
|
||||
.allowUnknownOption(false)
|
||||
.description("running hoppscotch collection.json file")
|
||||
@@ -66,7 +74,7 @@ program
|
||||
"https://docs.hoppscotch.io/documentation/clients/cli#commands"
|
||||
)}`
|
||||
)
|
||||
.action(async (path, options) => await test(path, options)());
|
||||
.action(async (pathOrId, options) => await test(pathOrId, options)());
|
||||
|
||||
export const cli = async (args: string[]) => {
|
||||
try {
|
||||
|
||||
@@ -2,21 +2,34 @@ import { Environment } from "@hoppscotch/data";
|
||||
import { entityReference } from "verzod";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TestCmdEnvironmentOptions } from "../../types/commands";
|
||||
import { error } from "../../types/errors";
|
||||
import {
|
||||
HoppEnvKeyPairObject,
|
||||
HoppEnvPair,
|
||||
HoppEnvs,
|
||||
} from "../../types/request";
|
||||
import { readJsonFile } from "../../utils/mutators";
|
||||
import { getResourceContents } from "../../utils/getters";
|
||||
|
||||
/**
|
||||
* Parses env json file for given path and validates the parsed env json object
|
||||
* @param path Path of env.json file to be parsed
|
||||
* @returns For successful parsing we get HoppEnvs object
|
||||
* Parses environment data from a given path or ID and returns the data conforming to the latest version of the `Environment` schema.
|
||||
*
|
||||
* @param {TestCmdEnvironmentOptions} options Supplied values for CLI flags.
|
||||
* @param {string} options.env Path of the environment `.json` file to be parsed.
|
||||
* @param {string} [options.token] Personal access token to fetch workspace environments.
|
||||
* @param {string} [options.server] server URL for SH instance.
|
||||
* @returns {Promise<HoppEnvs>} A promise that resolves to the parsed environment object with global and selected environments.
|
||||
*/
|
||||
export async function parseEnvsData(path: string) {
|
||||
const contents = await readJsonFile(path);
|
||||
export async function parseEnvsData(options: TestCmdEnvironmentOptions) {
|
||||
const { env: pathOrId, token: accessToken, server: serverUrl } = options;
|
||||
|
||||
const contents = await getResourceContents({
|
||||
pathOrId,
|
||||
accessToken,
|
||||
serverUrl,
|
||||
resourceType: "environment",
|
||||
});
|
||||
|
||||
const envPairs: Array<HoppEnvPair | Record<string, string>> = [];
|
||||
|
||||
// The legacy key-value pair format that is still supported
|
||||
@@ -33,7 +46,7 @@ export async function parseEnvsData(path: string) {
|
||||
// CLI doesnt support bulk environments export
|
||||
// Hence we check for this case and throw an error if it matches the format
|
||||
if (HoppBulkEnvExportObjectResult.success) {
|
||||
throw error({ code: "BULK_ENV_FILE", path, data: error });
|
||||
throw error({ code: "BULK_ENV_FILE", path: pathOrId, data: error });
|
||||
}
|
||||
|
||||
// Checks if the environment file is of the correct format
|
||||
@@ -42,7 +55,7 @@ export async function parseEnvsData(path: string) {
|
||||
!HoppEnvKeyPairResult.success &&
|
||||
HoppEnvExportObjectResult.type === "err"
|
||||
) {
|
||||
throw error({ code: "MALFORMED_ENV_FILE", path, data: error });
|
||||
throw error({ code: "MALFORMED_ENV_FILE", path: pathOrId, data: error });
|
||||
}
|
||||
|
||||
if (HoppEnvKeyPairResult.success) {
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
export type TestCmdOptions = {
|
||||
env: string | undefined;
|
||||
delay: string | undefined;
|
||||
env?: string;
|
||||
delay?: string;
|
||||
token?: string;
|
||||
server?: string;
|
||||
};
|
||||
|
||||
// Consumed in the collection `file_path_or_id` argument action handler
|
||||
export type TestCmdCollectionOptions = Omit<TestCmdOptions, "env" | "delay">;
|
||||
|
||||
// Consumed in the `--env, -e` flag action handler
|
||||
export type TestCmdEnvironmentOptions = Omit<TestCmdOptions, "env"> & {
|
||||
env: string;
|
||||
};
|
||||
|
||||
export type HOPP_ENV_FILE_EXT = "json";
|
||||
|
||||
@@ -26,6 +26,11 @@ type HoppErrors = {
|
||||
MALFORMED_ENV_FILE: HoppErrorPath & HoppErrorData;
|
||||
BULK_ENV_FILE: HoppErrorPath & HoppErrorData;
|
||||
INVALID_FILE_TYPE: HoppErrorData;
|
||||
TOKEN_EXPIRED: HoppErrorData;
|
||||
TOKEN_INVALID: HoppErrorData;
|
||||
INVALID_ID: HoppErrorData;
|
||||
INVALID_SERVER_URL: HoppErrorData;
|
||||
SERVER_CONNECTION_REFUSED: HoppErrorData;
|
||||
};
|
||||
|
||||
export type HoppErrorCode = keyof HoppErrors;
|
||||
|
||||
@@ -1,18 +1,36 @@
|
||||
import {
|
||||
HoppRESTHeader,
|
||||
Environment,
|
||||
parseTemplateStringE,
|
||||
HoppCollection,
|
||||
HoppRESTHeader,
|
||||
HoppRESTParam,
|
||||
parseTemplateStringE,
|
||||
} from "@hoppscotch/data";
|
||||
import axios, { AxiosError } from "axios";
|
||||
import chalk from "chalk";
|
||||
import { pipe } from "fp-ts/function";
|
||||
import * as A from "fp-ts/Array";
|
||||
import * as E from "fp-ts/Either";
|
||||
import * as S from "fp-ts/string";
|
||||
import * as O from "fp-ts/Option";
|
||||
import { error } from "../types/errors";
|
||||
import { pipe } from "fp-ts/function";
|
||||
import * as S from "fp-ts/string";
|
||||
import fs from "fs/promises";
|
||||
import { round } from "lodash-es";
|
||||
|
||||
import { error } from "../types/errors";
|
||||
import { DEFAULT_DURATION_PRECISION } from "./constants";
|
||||
import { readJsonFile } from "./mutators";
|
||||
import {
|
||||
WorkspaceCollection,
|
||||
WorkspaceEnvironment,
|
||||
transformWorkspaceCollections,
|
||||
transformWorkspaceEnvironment,
|
||||
} from "./workspace-access";
|
||||
|
||||
type GetResourceContentsParams = {
|
||||
pathOrId: string;
|
||||
accessToken?: string;
|
||||
serverUrl?: string;
|
||||
resourceType: "collection" | "environment";
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates template string (status + statusText) with specific color unicodes
|
||||
@@ -134,3 +152,104 @@ export const roundDuration = (
|
||||
duration: number,
|
||||
precision: number = DEFAULT_DURATION_PRECISION
|
||||
) => round(duration, precision);
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a resource (collection or environment) from a local file (export) or a remote server (workspaces).
|
||||
*
|
||||
* @param {GetResourceContentsParams} params - The parameters for retrieving resource contents.
|
||||
* @param {string} params.pathOrId - The path to the local file or the ID for remote retrieval.
|
||||
* @param {string} [params.accessToken] - The access token for authorizing remote retrieval.
|
||||
* @param {string} [params.serverUrl] - The SH instance server URL for remote retrieval. Defaults to the cloud instance.
|
||||
* @param {"collection" | "environment"} params.resourceType - The type of the resource to retrieve.
|
||||
* @returns {Promise<unknown>} A promise that resolves to the contents of the resource.
|
||||
* @throws Will throw an error if the content type of the fetched resource is not `application/json`,
|
||||
* if there is an issue with the access token, if the server connection is refused,
|
||||
* or if the server URL is invalid.
|
||||
*/
|
||||
export const getResourceContents = async (
|
||||
params: GetResourceContentsParams
|
||||
): Promise<unknown> => {
|
||||
const { pathOrId, accessToken, serverUrl, resourceType } = params;
|
||||
|
||||
let contents: unknown | null = null;
|
||||
let fileExistsInPath = false;
|
||||
|
||||
try {
|
||||
await fs.access(pathOrId);
|
||||
fileExistsInPath = true;
|
||||
} catch (e) {
|
||||
fileExistsInPath = false;
|
||||
}
|
||||
|
||||
if (accessToken && !fileExistsInPath) {
|
||||
const resolvedServerUrl = serverUrl || "https://api.hoppscotch.io";
|
||||
|
||||
try {
|
||||
const separator = resolvedServerUrl.endsWith("/") ? "" : "/";
|
||||
const resourcePath =
|
||||
resourceType === "collection" ? "collection" : "environment";
|
||||
|
||||
const url = `${resolvedServerUrl}${separator}v1/access-tokens/${resourcePath}/${pathOrId}`;
|
||||
|
||||
const { data, headers } = await axios.get(url, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!headers["content-type"].includes("application/json")) {
|
||||
throw new AxiosError("INVALID_CONTENT_TYPE");
|
||||
}
|
||||
|
||||
contents =
|
||||
resourceType === "collection"
|
||||
? transformWorkspaceCollections([data] as WorkspaceCollection[])[0]
|
||||
: transformWorkspaceEnvironment(data as WorkspaceEnvironment);
|
||||
} catch (err) {
|
||||
if (err instanceof AxiosError) {
|
||||
const axiosErr: AxiosError<{
|
||||
reason?: "TOKEN_EXPIRED" | "TOKEN_INVALID" | "INVALID_ID";
|
||||
message: string;
|
||||
statusCode: number;
|
||||
}> = err;
|
||||
|
||||
const errReason = axiosErr.response?.data?.reason;
|
||||
|
||||
if (errReason) {
|
||||
throw error({
|
||||
code: errReason,
|
||||
data: ["TOKEN_EXPIRED", "TOKEN_INVALID"].includes(errReason)
|
||||
? accessToken
|
||||
: pathOrId,
|
||||
});
|
||||
}
|
||||
|
||||
if (axiosErr.code === "ECONNREFUSED") {
|
||||
throw error({
|
||||
code: "SERVER_CONNECTION_REFUSED",
|
||||
data: resolvedServerUrl,
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
axiosErr.message === "INVALID_CONTENT_TYPE" ||
|
||||
axiosErr.code === "ERR_INVALID_URL" ||
|
||||
axiosErr.code === "ENOTFOUND" ||
|
||||
axiosErr.code === "ERR_BAD_REQUEST" ||
|
||||
axiosErr.response?.status === 404
|
||||
) {
|
||||
throw error({ code: "INVALID_SERVER_URL", data: resolvedServerUrl });
|
||||
}
|
||||
} else {
|
||||
throw error({ code: "UNKNOWN_ERROR", data: err });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to reading from file if contents are not available
|
||||
if (contents === null) {
|
||||
contents = await readJsonFile(pathOrId, fileExistsInPath);
|
||||
}
|
||||
|
||||
return contents;
|
||||
};
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { HoppCollection, HoppRESTRequest } from "@hoppscotch/data";
|
||||
import { Environment, HoppCollection, HoppRESTRequest } from "@hoppscotch/data";
|
||||
import fs from "fs/promises";
|
||||
import { entityReference } from "verzod";
|
||||
import { z } from "zod";
|
||||
|
||||
import { TestCmdCollectionOptions } from "../types/commands";
|
||||
import { error } from "../types/errors";
|
||||
import { FormDataEntry } from "../types/request";
|
||||
import { isHoppErrnoException } from "./checks";
|
||||
import { getResourceContents } from "./getters";
|
||||
|
||||
const getValidRequests = (
|
||||
collections: HoppCollection[],
|
||||
@@ -72,15 +74,26 @@ export const parseErrorMessage = (e: unknown) => {
|
||||
return msg.replace(/\n+$|\s{2,}/g, "").trim();
|
||||
};
|
||||
|
||||
export async function readJsonFile(path: string): Promise<unknown> {
|
||||
/**
|
||||
* Reads a JSON file from the specified path and returns the parsed content.
|
||||
*
|
||||
* @param {string} path - The path to the JSON file.
|
||||
* @param {boolean} fileExistsInPath - Indicates whether the file exists in the specified path.
|
||||
* @returns {Promise<unknown>} A Promise that resolves to the parsed JSON contents.
|
||||
* @throws {Error} If the file path does not end with `.json`.
|
||||
* @throws {Error} If the file does not exist in the specified path.
|
||||
* @throws {Error} If an unknown error occurs while reading or parsing the file.
|
||||
*/
|
||||
export async function readJsonFile(
|
||||
path: string,
|
||||
fileExistsInPath: boolean
|
||||
): Promise<unknown> {
|
||||
if (!path.endsWith(".json")) {
|
||||
throw error({ code: "INVALID_FILE_TYPE", data: path });
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.access(path);
|
||||
} catch (e) {
|
||||
throw error({ code: "FILE_NOT_FOUND", path: path });
|
||||
if (!fileExistsInPath) {
|
||||
throw error({ code: "FILE_NOT_FOUND", path });
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -91,15 +104,27 @@ export async function readJsonFile(path: string): Promise<unknown> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses collection json file for given path:context.path, and validates
|
||||
* the parsed collectiona array.
|
||||
* @param path Collection json file path.
|
||||
* @returns For successful parsing we get array of HoppCollection,
|
||||
* Parses collection data from a given path or ID and returns the data conforming to the latest version of the `HoppCollection` schema.
|
||||
*
|
||||
* @param pathOrId Collection JSON file path/ID from a workspace.
|
||||
* @param {TestCmdCollectionOptions} options Supplied values for CLI flags.
|
||||
* @param {string} [options.token] Personal access token to fetch workspace environments.
|
||||
* @param {string} [options.server] server URL for SH instance.
|
||||
* @returns {Promise<HoppCollection[]>} A promise that resolves to an array of HoppCollection objects.
|
||||
* @throws Throws an error if the collection data is malformed.
|
||||
*/
|
||||
export async function parseCollectionData(
|
||||
path: string
|
||||
pathOrId: string,
|
||||
options: TestCmdCollectionOptions
|
||||
): Promise<HoppCollection[]> {
|
||||
let contents = await readJsonFile(path);
|
||||
const { token: accessToken, server: serverUrl } = options;
|
||||
|
||||
const contents = await getResourceContents({
|
||||
pathOrId,
|
||||
accessToken,
|
||||
serverUrl,
|
||||
resourceType: "collection",
|
||||
});
|
||||
|
||||
const maybeArrayOfCollections: unknown[] = Array.isArray(contents)
|
||||
? contents
|
||||
@@ -112,10 +137,10 @@ export async function parseCollectionData(
|
||||
if (!collectionSchemaParsedResult.success) {
|
||||
throw error({
|
||||
code: "MALFORMED_COLLECTION",
|
||||
path,
|
||||
path: pathOrId,
|
||||
data: "Please check the collection data.",
|
||||
});
|
||||
}
|
||||
|
||||
return getValidRequests(collectionSchemaParsedResult.data, path);
|
||||
return getValidRequests(collectionSchemaParsedResult.data, pathOrId);
|
||||
}
|
||||
|
||||
101
packages/hoppscotch-cli/src/utils/workspace-access.ts
Normal file
101
packages/hoppscotch-cli/src/utils/workspace-access.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import {
|
||||
CollectionSchemaVersion,
|
||||
Environment,
|
||||
EnvironmentSchemaVersion,
|
||||
HoppCollection,
|
||||
HoppRESTRequest,
|
||||
} from "@hoppscotch/data";
|
||||
|
||||
import { HoppEnvPair } from "../types/request";
|
||||
|
||||
export interface WorkspaceEnvironment {
|
||||
id: string;
|
||||
teamID: string;
|
||||
name: string;
|
||||
variables: HoppEnvPair[];
|
||||
}
|
||||
|
||||
export interface WorkspaceCollection {
|
||||
id: string;
|
||||
data: string | null;
|
||||
title: string;
|
||||
parentID: string | null;
|
||||
folders: WorkspaceCollection[];
|
||||
requests: WorkspaceRequest[];
|
||||
}
|
||||
|
||||
interface WorkspaceRequest {
|
||||
id: string;
|
||||
collectionID: string;
|
||||
teamID: string;
|
||||
title: string;
|
||||
request: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms the incoming list of workspace requests by applying `JSON.parse` to the `request` field.
|
||||
*
|
||||
* @param {WorkspaceRequest[]} requests - An array of workspace request objects to be transformed.
|
||||
* @returns {HoppRESTRequest[]} The transformed array of requests conforming to the `HoppRESTRequest` type.
|
||||
*/
|
||||
const transformWorkspaceRequests = (
|
||||
requests: WorkspaceRequest[]
|
||||
): HoppRESTRequest[] => requests.map(({ request }) => JSON.parse(request));
|
||||
|
||||
/**
|
||||
* Transforms workspace environment data to the `HoppEnvironment` format.
|
||||
*
|
||||
* @param {WorkspaceEnvironment} workspaceEnvironment - The workspace environment object to transform.
|
||||
* @returns {Environment} The transformed environment object conforming to the `Environment` type.
|
||||
*/
|
||||
export const transformWorkspaceEnvironment = (
|
||||
workspaceEnvironment: WorkspaceEnvironment
|
||||
): Environment => {
|
||||
const { teamID, variables, ...rest } = workspaceEnvironment;
|
||||
|
||||
// Add `secret` field if the data conforms to an older schema
|
||||
const transformedEnvVars = variables.map((variable) => {
|
||||
if (!("secret" in variable)) {
|
||||
return {
|
||||
...(variable as HoppEnvPair),
|
||||
secret: false,
|
||||
} as HoppEnvPair;
|
||||
}
|
||||
|
||||
return variable;
|
||||
});
|
||||
|
||||
return {
|
||||
v: EnvironmentSchemaVersion,
|
||||
variables: transformedEnvVars,
|
||||
...rest,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Transforms workspace collection data to the `HoppCollection` format.
|
||||
*
|
||||
* @param {WorkspaceCollection[]} collections - An array of workspace collection objects to be transformed.
|
||||
* @returns {HoppCollection[]} The transformed array of collections conforming to the `HoppCollection` type.
|
||||
*/
|
||||
export const transformWorkspaceCollections = (
|
||||
collections: WorkspaceCollection[]
|
||||
): HoppCollection[] => {
|
||||
return collections.map((collection) => {
|
||||
const { id, title, data, requests, folders } = collection;
|
||||
|
||||
const parsedData = data ? JSON.parse(data) : {};
|
||||
const { auth = { authType: "inherit", authActive: true }, headers = [] } =
|
||||
parsedData;
|
||||
|
||||
return {
|
||||
v: CollectionSchemaVersion,
|
||||
id,
|
||||
name: title,
|
||||
folders: transformWorkspaceCollections(folders),
|
||||
requests: transformWorkspaceRequests(requests),
|
||||
auth,
|
||||
headers,
|
||||
};
|
||||
});
|
||||
};
|
||||
14
packages/hoppscotch-cli/vitest.config.ts
Normal file
14
packages/hoppscotch-cli/vitest.config.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
environment: "node",
|
||||
setupFiles: ["./setupFiles.ts"],
|
||||
include: ["**/src/__tests__/**/**/*.{test,spec}.ts"],
|
||||
exclude: [
|
||||
"**/node_modules/**",
|
||||
"**/dist/**",
|
||||
"**/src/__tests__/functions/**/*.ts",
|
||||
],
|
||||
},
|
||||
});
|
||||
27621
pnpm-lock.yaml
generated
27621
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user