chore: split app to commons and web (squash commit)
This commit is contained in:
240
packages/hoppscotch-common/src/helpers/GQLConnection.ts
Normal file
240
packages/hoppscotch-common/src/helpers/GQLConnection.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
import { BehaviorSubject } from "rxjs"
|
||||
import {
|
||||
getIntrospectionQuery,
|
||||
buildClientSchema,
|
||||
GraphQLSchema,
|
||||
printSchema,
|
||||
GraphQLObjectType,
|
||||
GraphQLInputObjectType,
|
||||
GraphQLEnumType,
|
||||
GraphQLInterfaceType,
|
||||
} from "graphql"
|
||||
import { distinctUntilChanged, map } from "rxjs/operators"
|
||||
import { GQLHeader, HoppGQLAuth } from "@hoppscotch/data"
|
||||
import { sendNetworkRequest } from "./network"
|
||||
|
||||
const GQL_SCHEMA_POLL_INTERVAL = 7000
|
||||
|
||||
/**
|
||||
GQLConnection deals with all the operations (like polling, schema extraction) that runs
|
||||
when a connection is made to a GraphQL server.
|
||||
*/
|
||||
export class GQLConnection {
|
||||
public isLoading$ = new BehaviorSubject<boolean>(false)
|
||||
public connected$ = new BehaviorSubject<boolean>(false)
|
||||
public schema$ = new BehaviorSubject<GraphQLSchema | null>(null)
|
||||
|
||||
public schemaString$ = this.schema$.pipe(
|
||||
distinctUntilChanged(),
|
||||
map((schema) => {
|
||||
if (!schema) return null
|
||||
|
||||
return printSchema(schema, {
|
||||
commentDescriptions: true,
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
public queryFields$ = this.schema$.pipe(
|
||||
distinctUntilChanged(),
|
||||
map((schema) => {
|
||||
if (!schema) return null
|
||||
|
||||
const fields = schema.getQueryType()?.getFields()
|
||||
if (!fields) return null
|
||||
|
||||
return Object.values(fields)
|
||||
})
|
||||
)
|
||||
|
||||
public mutationFields$ = this.schema$.pipe(
|
||||
distinctUntilChanged(),
|
||||
map((schema) => {
|
||||
if (!schema) return null
|
||||
|
||||
const fields = schema.getMutationType()?.getFields()
|
||||
if (!fields) return null
|
||||
|
||||
return Object.values(fields)
|
||||
})
|
||||
)
|
||||
|
||||
public subscriptionFields$ = this.schema$.pipe(
|
||||
distinctUntilChanged(),
|
||||
map((schema) => {
|
||||
if (!schema) return null
|
||||
|
||||
const fields = schema.getSubscriptionType()?.getFields()
|
||||
if (!fields) return null
|
||||
|
||||
return Object.values(fields)
|
||||
})
|
||||
)
|
||||
|
||||
public graphqlTypes$ = this.schema$.pipe(
|
||||
distinctUntilChanged(),
|
||||
map((schema) => {
|
||||
if (!schema) return null
|
||||
|
||||
const typeMap = schema.getTypeMap()
|
||||
|
||||
const queryTypeName = schema.getQueryType()?.name ?? ""
|
||||
const mutationTypeName = schema.getMutationType()?.name ?? ""
|
||||
const subscriptionTypeName = schema.getSubscriptionType()?.name ?? ""
|
||||
|
||||
return Object.values(typeMap).filter((type) => {
|
||||
return (
|
||||
!type.name.startsWith("__") &&
|
||||
![queryTypeName, mutationTypeName, subscriptionTypeName].includes(
|
||||
type.name
|
||||
) &&
|
||||
(type instanceof GraphQLObjectType ||
|
||||
type instanceof GraphQLInputObjectType ||
|
||||
type instanceof GraphQLEnumType ||
|
||||
type instanceof GraphQLInterfaceType)
|
||||
)
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
private timeoutSubscription: any
|
||||
|
||||
public connect(url: string, headers: GQLHeader[]) {
|
||||
if (this.connected$.value) {
|
||||
throw new Error(
|
||||
"A connection is already running. Close it before starting another."
|
||||
)
|
||||
}
|
||||
|
||||
// Polling
|
||||
this.connected$.next(true)
|
||||
|
||||
const poll = async () => {
|
||||
await this.getSchema(url, headers)
|
||||
this.timeoutSubscription = setTimeout(() => {
|
||||
poll()
|
||||
}, GQL_SCHEMA_POLL_INTERVAL)
|
||||
}
|
||||
poll()
|
||||
}
|
||||
|
||||
public disconnect() {
|
||||
if (!this.connected$.value) {
|
||||
throw new Error("No connections are running to be disconnected")
|
||||
}
|
||||
|
||||
clearTimeout(this.timeoutSubscription)
|
||||
this.connected$.next(false)
|
||||
}
|
||||
|
||||
public reset() {
|
||||
if (this.connected$.value) this.disconnect()
|
||||
|
||||
this.isLoading$.next(false)
|
||||
this.connected$.next(false)
|
||||
this.schema$.next(null)
|
||||
}
|
||||
|
||||
private async getSchema(url: string, headers: GQLHeader[]) {
|
||||
try {
|
||||
this.isLoading$.next(true)
|
||||
|
||||
const introspectionQuery = JSON.stringify({
|
||||
query: getIntrospectionQuery(),
|
||||
})
|
||||
|
||||
const finalHeaders: Record<string, string> = {}
|
||||
headers
|
||||
.filter((x) => x.active && x.key !== "")
|
||||
.forEach((x) => (finalHeaders[x.key] = x.value))
|
||||
|
||||
const reqOptions = {
|
||||
method: "POST",
|
||||
url,
|
||||
headers: {
|
||||
...finalHeaders,
|
||||
"content-type": "application/json",
|
||||
},
|
||||
data: introspectionQuery,
|
||||
}
|
||||
|
||||
const data = await sendNetworkRequest(reqOptions)
|
||||
|
||||
// HACK : Temporary trailing null character issue from the extension fix
|
||||
const response = new TextDecoder("utf-8")
|
||||
.decode(data.data)
|
||||
.replace(/\0+$/, "")
|
||||
|
||||
const introspectResponse = JSON.parse(response)
|
||||
|
||||
const schema = buildClientSchema(introspectResponse.data)
|
||||
|
||||
this.schema$.next(schema)
|
||||
|
||||
this.isLoading$.next(false)
|
||||
} catch (e: any) {
|
||||
console.error(e)
|
||||
this.disconnect()
|
||||
}
|
||||
}
|
||||
|
||||
public async runQuery(
|
||||
url: string,
|
||||
headers: GQLHeader[],
|
||||
query: string,
|
||||
variables: string,
|
||||
auth: HoppGQLAuth
|
||||
) {
|
||||
const finalHeaders: Record<string, string> = {}
|
||||
|
||||
const parsedVariables = JSON.parse(variables || "{}")
|
||||
|
||||
const params: Record<string, string> = {}
|
||||
|
||||
if (auth.authActive) {
|
||||
if (auth.authType === "basic") {
|
||||
const username = auth.username
|
||||
const password = auth.password
|
||||
finalHeaders.Authorization = `Basic ${btoa(`${username}:${password}`)}`
|
||||
} else if (auth.authType === "bearer" || auth.authType === "oauth-2") {
|
||||
finalHeaders.Authorization = `Bearer ${auth.token}`
|
||||
} else if (auth.authType === "api-key") {
|
||||
const { key, value, addTo } = auth
|
||||
if (addTo === "Headers") {
|
||||
finalHeaders[key] = value
|
||||
} else if (addTo === "Query params") {
|
||||
params[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
headers
|
||||
.filter((item) => item.active && item.key !== "")
|
||||
.forEach(({ key, value }) => (finalHeaders[key] = value))
|
||||
|
||||
const reqOptions = {
|
||||
method: "POST",
|
||||
url,
|
||||
headers: {
|
||||
...finalHeaders,
|
||||
"content-type": "application/json",
|
||||
},
|
||||
data: JSON.stringify({
|
||||
query,
|
||||
variables: parsedVariables,
|
||||
}),
|
||||
params: {
|
||||
...params,
|
||||
},
|
||||
}
|
||||
|
||||
const res = await sendNetworkRequest(reqOptions)
|
||||
|
||||
// HACK: Temporary trailing null character issue from the extension fix
|
||||
const responseText = new TextDecoder("utf-8")
|
||||
.decode(res.data)
|
||||
.replace(/\0+$/, "")
|
||||
|
||||
return responseText
|
||||
}
|
||||
}
|
||||
116
packages/hoppscotch-common/src/helpers/RESTExtURLParams.ts
Normal file
116
packages/hoppscotch-common/src/helpers/RESTExtURLParams.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { FormDataKeyValue, HoppRESTRequest } from "@hoppscotch/data"
|
||||
import { isJSONContentType } from "./utils/contenttypes"
|
||||
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||
|
||||
/**
|
||||
* Handles translations for all the hopp.io REST Shareable URL params
|
||||
*/
|
||||
export function translateExtURLParams(
|
||||
urlParams: Record<string, any>
|
||||
): HoppRESTRequest {
|
||||
if (urlParams.v) return parseV1ExtURL(urlParams)
|
||||
else return parseV0ExtURL(urlParams)
|
||||
}
|
||||
|
||||
function parseV0ExtURL(urlParams: Record<string, any>): HoppRESTRequest {
|
||||
const resolvedReq = getDefaultRESTRequest()
|
||||
|
||||
if (urlParams.method && typeof urlParams.method === "string") {
|
||||
resolvedReq.method = urlParams.method
|
||||
}
|
||||
|
||||
if (urlParams.url && typeof urlParams.url === "string") {
|
||||
if (urlParams.path && typeof urlParams.path === "string") {
|
||||
resolvedReq.endpoint = `${urlParams.url}/${urlParams.path}`
|
||||
} else {
|
||||
resolvedReq.endpoint = urlParams.url
|
||||
}
|
||||
}
|
||||
|
||||
if (urlParams.headers && typeof urlParams.headers === "string") {
|
||||
resolvedReq.headers = JSON.parse(urlParams.headers)
|
||||
}
|
||||
|
||||
if (urlParams.params && typeof urlParams.params === "string") {
|
||||
resolvedReq.params = JSON.parse(urlParams.params)
|
||||
}
|
||||
|
||||
if (urlParams.httpUser && typeof urlParams.httpUser === "string") {
|
||||
resolvedReq.auth = {
|
||||
authType: "basic",
|
||||
authActive: true,
|
||||
username: urlParams.httpUser,
|
||||
password: urlParams.httpPassword ?? "",
|
||||
}
|
||||
}
|
||||
|
||||
if (urlParams.bearerToken && typeof urlParams.bearerToken === "string") {
|
||||
resolvedReq.auth = {
|
||||
authType: "bearer",
|
||||
authActive: true,
|
||||
token: urlParams.bearerToken,
|
||||
}
|
||||
}
|
||||
|
||||
if (urlParams.contentType) {
|
||||
if (urlParams.contentType === "multipart/form-data") {
|
||||
resolvedReq.body = {
|
||||
contentType: "multipart/form-data",
|
||||
body: JSON.parse(urlParams.bodyParams || "[]").map(
|
||||
(x: any) =>
|
||||
<FormDataKeyValue>{
|
||||
active: x.active,
|
||||
key: x.key,
|
||||
value: x.value,
|
||||
isFile: false,
|
||||
}
|
||||
),
|
||||
}
|
||||
} else if (isJSONContentType(urlParams.contentType)) {
|
||||
if (urlParams.rawParams) {
|
||||
resolvedReq.body = {
|
||||
contentType: urlParams.contentType,
|
||||
body: urlParams.rawParams,
|
||||
}
|
||||
} else {
|
||||
resolvedReq.body = {
|
||||
contentType: urlParams.contentType,
|
||||
body: urlParams.bodyParams,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
resolvedReq.body = {
|
||||
contentType: urlParams.contentType,
|
||||
body: urlParams.rawParams,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedReq
|
||||
}
|
||||
|
||||
function parseV1ExtURL(urlParams: Record<string, any>): HoppRESTRequest {
|
||||
const resolvedReq = getDefaultRESTRequest()
|
||||
|
||||
if (urlParams.headers && typeof urlParams.headers === "string") {
|
||||
resolvedReq.headers = JSON.parse(urlParams.headers)
|
||||
}
|
||||
|
||||
if (urlParams.params && typeof urlParams.params === "string") {
|
||||
resolvedReq.params = JSON.parse(urlParams.params)
|
||||
}
|
||||
|
||||
if (urlParams.method && typeof urlParams.method === "string") {
|
||||
resolvedReq.method = urlParams.method
|
||||
}
|
||||
|
||||
if (urlParams.endpoint && typeof urlParams.endpoint === "string") {
|
||||
resolvedReq.endpoint = urlParams.endpoint
|
||||
}
|
||||
|
||||
if (urlParams.body && typeof urlParams.body === "string") {
|
||||
resolvedReq.body = JSON.parse(urlParams.body)
|
||||
}
|
||||
|
||||
return resolvedReq
|
||||
}
|
||||
236
packages/hoppscotch-common/src/helpers/RequestRunner.ts
Normal file
236
packages/hoppscotch-common/src/helpers/RequestRunner.ts
Normal file
@@ -0,0 +1,236 @@
|
||||
import { Observable } from "rxjs"
|
||||
import { filter } from "rxjs/operators"
|
||||
import { chain, right, TaskEither } from "fp-ts/lib/TaskEither"
|
||||
import { flow, pipe } from "fp-ts/function"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as A from "fp-ts/Array"
|
||||
import { Environment } from "@hoppscotch/data"
|
||||
import {
|
||||
SandboxTestResult,
|
||||
runTestScript,
|
||||
TestDescriptor,
|
||||
} from "@hoppscotch/js-sandbox"
|
||||
import { isRight } from "fp-ts/Either"
|
||||
import { cloneDeep } from "lodash-es"
|
||||
import {
|
||||
getCombinedEnvVariables,
|
||||
getFinalEnvsFromPreRequest,
|
||||
} from "./preRequest"
|
||||
import { getEffectiveRESTRequest } from "./utils/EffectiveURL"
|
||||
import { HoppRESTResponse } from "./types/HoppRESTResponse"
|
||||
import { createRESTNetworkRequestStream } from "./network"
|
||||
import { HoppTestData, HoppTestResult } from "./types/HoppTestResult"
|
||||
import { isJSONContentType } from "./utils/contenttypes"
|
||||
import { updateTeamEnvironment } from "./backend/mutations/TeamEnvironment"
|
||||
import { getRESTRequest, setRESTTestResults } from "~/newstore/RESTSession"
|
||||
import {
|
||||
environmentsStore,
|
||||
getCurrentEnvironment,
|
||||
getEnvironment,
|
||||
getGlobalVariables,
|
||||
setGlobalEnvVariables,
|
||||
updateEnvironment,
|
||||
} from "~/newstore/environments"
|
||||
|
||||
const getTestableBody = (
|
||||
res: HoppRESTResponse & { type: "success" | "fail" }
|
||||
) => {
|
||||
const contentTypeHeader = res.headers.find(
|
||||
(h) => h.key.toLowerCase() === "content-type"
|
||||
)
|
||||
|
||||
const rawBody = new TextDecoder("utf-8")
|
||||
.decode(res.body)
|
||||
.replaceAll("\x00", "")
|
||||
|
||||
const x = pipe(
|
||||
// This pipeline just decides whether JSON parses or not
|
||||
contentTypeHeader && isJSONContentType(contentTypeHeader.value)
|
||||
? O.of(rawBody)
|
||||
: O.none,
|
||||
|
||||
// Try parsing, if failed, go to the fail option
|
||||
O.chain((body) => O.tryCatch(() => JSON.parse(body))),
|
||||
|
||||
// If JSON, return that (get), else return just the body string (else)
|
||||
O.getOrElse<any | string>(() => rawBody)
|
||||
)
|
||||
|
||||
return x
|
||||
}
|
||||
|
||||
const combineEnvVariables = (env: {
|
||||
global: Environment["variables"]
|
||||
selected: Environment["variables"]
|
||||
}) => [...env.selected, ...env.global]
|
||||
|
||||
export const runRESTRequest$ = (): TaskEither<
|
||||
string | Error,
|
||||
Observable<HoppRESTResponse>
|
||||
> =>
|
||||
pipe(
|
||||
getFinalEnvsFromPreRequest(
|
||||
getRESTRequest().preRequestScript,
|
||||
getCombinedEnvVariables()
|
||||
),
|
||||
chain((envs) => {
|
||||
const effectiveRequest = getEffectiveRESTRequest(getRESTRequest(), {
|
||||
name: "Env",
|
||||
variables: combineEnvVariables(envs),
|
||||
})
|
||||
|
||||
const stream = createRESTNetworkRequestStream(effectiveRequest)
|
||||
|
||||
// Run Test Script when request ran successfully
|
||||
const subscription = stream
|
||||
.pipe(filter((res) => res.type === "success" || res.type === "fail"))
|
||||
.subscribe(async (res) => {
|
||||
if (res.type === "success" || res.type === "fail") {
|
||||
const runResult = await runTestScript(res.req.testScript, envs, {
|
||||
status: res.statusCode,
|
||||
body: getTestableBody(res),
|
||||
headers: res.headers,
|
||||
})()
|
||||
|
||||
if (isRight(runResult)) {
|
||||
setRESTTestResults(translateToSandboxTestResults(runResult.right))
|
||||
|
||||
setGlobalEnvVariables(runResult.right.envs.global)
|
||||
|
||||
if (
|
||||
environmentsStore.value.selectedEnvironmentIndex.type ===
|
||||
"MY_ENV"
|
||||
) {
|
||||
const env = getEnvironment({
|
||||
type: "MY_ENV",
|
||||
index: environmentsStore.value.selectedEnvironmentIndex.index,
|
||||
})
|
||||
updateEnvironment(
|
||||
environmentsStore.value.selectedEnvironmentIndex.index,
|
||||
{
|
||||
name: env.name,
|
||||
variables: runResult.right.envs.selected,
|
||||
}
|
||||
)
|
||||
} else if (
|
||||
environmentsStore.value.selectedEnvironmentIndex.type ===
|
||||
"TEAM_ENV"
|
||||
) {
|
||||
const env = getEnvironment({
|
||||
type: "TEAM_ENV",
|
||||
})
|
||||
pipe(
|
||||
updateTeamEnvironment(
|
||||
JSON.stringify(runResult.right.envs.selected),
|
||||
environmentsStore.value.selectedEnvironmentIndex.teamEnvID,
|
||||
env.name
|
||||
)
|
||||
)()
|
||||
}
|
||||
} else {
|
||||
setRESTTestResults({
|
||||
description: "",
|
||||
expectResults: [],
|
||||
tests: [],
|
||||
envDiff: {
|
||||
global: {
|
||||
additions: [],
|
||||
deletions: [],
|
||||
updations: [],
|
||||
},
|
||||
selected: {
|
||||
additions: [],
|
||||
deletions: [],
|
||||
updations: [],
|
||||
},
|
||||
},
|
||||
scriptError: true,
|
||||
})
|
||||
}
|
||||
|
||||
subscription.unsubscribe()
|
||||
}
|
||||
})
|
||||
|
||||
return right(stream)
|
||||
})
|
||||
)
|
||||
|
||||
const getAddedEnvVariables = (
|
||||
current: Environment["variables"],
|
||||
updated: Environment["variables"]
|
||||
) => updated.filter((x) => current.findIndex((y) => y.key === x.key) === -1)
|
||||
|
||||
const getRemovedEnvVariables = (
|
||||
current: Environment["variables"],
|
||||
updated: Environment["variables"]
|
||||
) => current.filter((x) => updated.findIndex((y) => y.key === x.key) === -1)
|
||||
|
||||
const getUpdatedEnvVariables = (
|
||||
current: Environment["variables"],
|
||||
updated: Environment["variables"]
|
||||
) =>
|
||||
pipe(
|
||||
updated,
|
||||
A.filterMap(
|
||||
flow(
|
||||
O.of,
|
||||
O.bindTo("env"),
|
||||
O.bind("index", ({ env }) =>
|
||||
pipe(
|
||||
current.findIndex((x) => x.key === env.key),
|
||||
O.fromPredicate((x) => x !== -1)
|
||||
)
|
||||
),
|
||||
O.chain(
|
||||
O.fromPredicate(
|
||||
({ env, index }) => env.value !== current[index].value
|
||||
)
|
||||
),
|
||||
O.map(({ env, index }) => ({
|
||||
...env,
|
||||
previousValue: current[index].value,
|
||||
}))
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
function translateToSandboxTestResults(
|
||||
testDesc: SandboxTestResult
|
||||
): HoppTestResult {
|
||||
const translateChildTests = (child: TestDescriptor): HoppTestData => {
|
||||
return {
|
||||
description: child.descriptor,
|
||||
expectResults: child.expectResults,
|
||||
tests: child.children.map(translateChildTests),
|
||||
}
|
||||
}
|
||||
|
||||
const globals = cloneDeep(getGlobalVariables())
|
||||
const env = getCurrentEnvironment()
|
||||
|
||||
return {
|
||||
description: "",
|
||||
expectResults: testDesc.tests.expectResults,
|
||||
tests: testDesc.tests.children.map(translateChildTests),
|
||||
scriptError: false,
|
||||
envDiff: {
|
||||
global: {
|
||||
additions: getAddedEnvVariables(globals, testDesc.envs.global),
|
||||
deletions: getRemovedEnvVariables(globals, testDesc.envs.global),
|
||||
updations: getUpdatedEnvVariables(globals, testDesc.envs.global),
|
||||
},
|
||||
selected: {
|
||||
additions: getAddedEnvVariables(env.variables, testDesc.envs.selected),
|
||||
deletions: getRemovedEnvVariables(
|
||||
env.variables,
|
||||
testDesc.envs.selected
|
||||
),
|
||||
updations: getUpdatedEnvVariables(
|
||||
env.variables,
|
||||
testDesc.envs.selected
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import { getEditorLangForMimeType } from "../editorutils"
|
||||
|
||||
describe("getEditorLangForMimeType", () => {
|
||||
test("returns 'json' for valid JSON mimes", () => {
|
||||
expect(getEditorLangForMimeType("application/json")).toMatch("json")
|
||||
expect(getEditorLangForMimeType("application/hal+json")).toMatch("json")
|
||||
expect(getEditorLangForMimeType("application/vnd.api+json")).toMatch("json")
|
||||
})
|
||||
|
||||
test("returns 'xml' for valid XML mimes", () => {
|
||||
expect(getEditorLangForMimeType("application/xml")).toMatch("xml")
|
||||
})
|
||||
|
||||
test("returns 'html' for valid HTML mimes", () => {
|
||||
expect(getEditorLangForMimeType("text/html")).toMatch("html")
|
||||
})
|
||||
|
||||
test("returns 'text/x-yaml' for plain text mime", () => {
|
||||
expect(getEditorLangForMimeType("text/plain")).toMatch("text/x-yaml")
|
||||
})
|
||||
|
||||
test("returns 'text/x-yaml' for unimplemented mimes", () => {
|
||||
expect(getEditorLangForMimeType("image/gif")).toMatch("text/x-yaml")
|
||||
})
|
||||
|
||||
test("returns 'text/x-yaml' for null/undefined mimes", () => {
|
||||
expect(getEditorLangForMimeType(null)).toMatch("text/x-yaml")
|
||||
expect(getEditorLangForMimeType(undefined)).toMatch("text/x-yaml")
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,34 @@
|
||||
import jsonParse from "../jsonParse"
|
||||
|
||||
describe("jsonParse", () => {
|
||||
test("parses without errors for valid JSON", () => {
|
||||
const testJSON = JSON.stringify({
|
||||
name: "hoppscotch",
|
||||
url: "https://hoppscotch.io",
|
||||
awesome: true,
|
||||
when: 2019,
|
||||
})
|
||||
|
||||
expect(() => jsonParse(testJSON)).not.toThrow()
|
||||
})
|
||||
|
||||
test("throws error for invalid JSON", () => {
|
||||
const testJSON = '{ "name": hopp "url": true }'
|
||||
|
||||
expect(() => jsonParse(testJSON)).toThrow()
|
||||
})
|
||||
|
||||
test("thrown error has proper info fields", () => {
|
||||
expect.assertions(3)
|
||||
|
||||
const testJSON = '{ "name": hopp "url": true }'
|
||||
|
||||
try {
|
||||
jsonParse(testJSON)
|
||||
} catch (e) {
|
||||
expect(e).toHaveProperty("start")
|
||||
expect(e).toHaveProperty("end")
|
||||
expect(e).toHaveProperty("message")
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,42 @@
|
||||
import { getPlatformSpecialKey } from "../platformutils"
|
||||
|
||||
describe("getPlatformSpecialKey", () => {
|
||||
let platformGetter
|
||||
|
||||
beforeEach(() => {
|
||||
platformGetter = jest.spyOn(navigator, "platform", "get")
|
||||
})
|
||||
|
||||
test("returns '⌘' for Apple platforms", () => {
|
||||
platformGetter.mockReturnValue("Mac")
|
||||
expect(getPlatformSpecialKey()).toMatch("⌘")
|
||||
|
||||
platformGetter.mockReturnValue("iPhone")
|
||||
expect(getPlatformSpecialKey()).toMatch("⌘")
|
||||
|
||||
platformGetter.mockReturnValue("iPad")
|
||||
expect(getPlatformSpecialKey()).toMatch("⌘")
|
||||
|
||||
platformGetter.mockReturnValue("iPod")
|
||||
expect(getPlatformSpecialKey()).toMatch("⌘")
|
||||
})
|
||||
|
||||
test("return 'Ctrl' for non-Apple platforms", () => {
|
||||
platformGetter.mockReturnValue("Android")
|
||||
expect(getPlatformSpecialKey()).toMatch("Ctrl")
|
||||
|
||||
platformGetter.mockReturnValue("Windows")
|
||||
expect(getPlatformSpecialKey()).toMatch("Ctrl")
|
||||
|
||||
platformGetter.mockReturnValue("Linux")
|
||||
expect(getPlatformSpecialKey()).toMatch("Ctrl")
|
||||
})
|
||||
|
||||
test("returns 'Ctrl' for null/undefined platforms", () => {
|
||||
platformGetter.mockReturnValue(null)
|
||||
expect(getPlatformSpecialKey()).toMatch("Ctrl")
|
||||
|
||||
platformGetter.mockReturnValue(undefined)
|
||||
expect(getPlatformSpecialKey()).toMatch("Ctrl")
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,313 @@
|
||||
import runTestScriptWithVariables from "../postwomanTesting"
|
||||
|
||||
/**
|
||||
* @param {string} script
|
||||
* @param {number} index
|
||||
*/
|
||||
function getTestResult(script, index) {
|
||||
return runTestScriptWithVariables(script).testResults[index].result
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} script
|
||||
*/
|
||||
function getErrors(script) {
|
||||
return runTestScriptWithVariables(script).errors
|
||||
}
|
||||
|
||||
describe("Error handling", () => {
|
||||
test("throws error at unknown test method", () => {
|
||||
const testScriptWithUnknownMethod = "pw.expect(1).toBeSomeUnknownMethod()"
|
||||
expect(() => {
|
||||
runTestScriptWithVariables(testScriptWithUnknownMethod)
|
||||
}).toThrow()
|
||||
})
|
||||
test("errors array is empty on a successful test", () => {
|
||||
expect(getErrors("pw.expect(1).toBe(1)")).toStrictEqual([])
|
||||
})
|
||||
test("throws error at a variable which is not declared", () => {
|
||||
expect(() => {
|
||||
runTestScriptWithVariables("someVariable")
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe("toBe", () => {
|
||||
test("test for numbers", () => {
|
||||
expect(getTestResult("pw.expect(1).toBe(2)", 0)).toEqual("FAIL")
|
||||
|
||||
expect(getTestResult("pw.expect(1).toBe(1)", 0)).toEqual("PASS")
|
||||
})
|
||||
|
||||
test("test for strings", () => {
|
||||
expect(getTestResult("pw.expect('hello').toBe('bonjour')", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('hi').toBe('hi')", 0)).toEqual("PASS")
|
||||
})
|
||||
|
||||
test("test for negative assertion (.not.toBe)", () => {
|
||||
expect(getTestResult("pw.expect(1).not.toBe(1)", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect(1).not.toBe(2)", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect('world').not.toBe('planet')", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
expect(getTestResult("pw.expect('world').not.toBe('world')", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("toHaveProperty", () => {
|
||||
const dummyResponse = {
|
||||
id: 843,
|
||||
description: "random",
|
||||
}
|
||||
|
||||
test("test for positive assertion (.toHaveProperty)", () => {
|
||||
expect(
|
||||
getTestResult(
|
||||
`pw.expect(${JSON.stringify(dummyResponse)}).toHaveProperty("id")`,
|
||||
0
|
||||
)
|
||||
).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult(`pw.expect(${dummyResponse.id}).toBe(843)`, 0)
|
||||
).toEqual("PASS")
|
||||
})
|
||||
test("test for negative assertion (.not.toHaveProperty)", () => {
|
||||
expect(
|
||||
getTestResult(
|
||||
`pw.expect(${JSON.stringify(
|
||||
dummyResponse
|
||||
)}).not.toHaveProperty("type")`,
|
||||
0
|
||||
)
|
||||
).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult(
|
||||
`pw.expect(${JSON.stringify(dummyResponse)}).toHaveProperty("type")`,
|
||||
0
|
||||
)
|
||||
).toEqual("FAIL")
|
||||
})
|
||||
})
|
||||
|
||||
describe("toBeLevel2xx", () => {
|
||||
test("test for numbers", () => {
|
||||
expect(getTestResult("pw.expect(200).toBeLevel2xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect(200).not.toBeLevel2xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(300).toBeLevel2xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect(300).not.toBeLevel2xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("test for strings", () => {
|
||||
expect(getTestResult("pw.expect('200').toBeLevel2xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect('200').not.toBeLevel2xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('300').toBeLevel2xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect('300').not.toBeLevel2xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("failed to parse to integer", () => {
|
||||
expect(getTestResult("pw.expect(undefined).toBeLevel2xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(null).toBeLevel2xx()", 0)).toEqual("FAIL")
|
||||
expect(() => {
|
||||
runTestScriptWithVariables("pw.expect(Symbol('test')).toBeLevel2xx()")
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe("toBeLevel3xx()", () => {
|
||||
test("test for numbers", () => {
|
||||
expect(getTestResult("pw.expect(300).toBeLevel3xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect(300).not.toBeLevel3xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(400).toBeLevel3xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect(400).not.toBeLevel3xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("test for strings", () => {
|
||||
expect(getTestResult("pw.expect('300').toBeLevel3xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect('300').not.toBeLevel3xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('400').toBeLevel3xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect('400').not.toBeLevel3xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("failed to parse to integer", () => {
|
||||
expect(getTestResult("pw.expect(undefined).toBeLevel3xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(null).toBeLevel3xx()", 0)).toEqual("FAIL")
|
||||
expect(() => {
|
||||
runTestScriptWithVariables("pw.expect(Symbol('test')).toBeLevel3xx()")
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe("toBeLevel4xx()", () => {
|
||||
test("test for numbers", () => {
|
||||
expect(getTestResult("pw.expect(400).toBeLevel4xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect(400).not.toBeLevel4xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(500).toBeLevel4xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect(500).not.toBeLevel4xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("test for strings", () => {
|
||||
expect(getTestResult("pw.expect('400').toBeLevel4xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect('400').not.toBeLevel4xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('500').toBeLevel4xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect('500').not.toBeLevel4xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("failed to parse to integer", () => {
|
||||
expect(getTestResult("pw.expect(undefined).toBeLevel4xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(null).toBeLevel4xx()", 0)).toEqual("FAIL")
|
||||
expect(() => {
|
||||
runTestScriptWithVariables("pw.expect(Symbol('test')).toBeLevel4xx()")
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe("toBeLevel5xx()", () => {
|
||||
test("test for numbers", () => {
|
||||
expect(getTestResult("pw.expect(500).toBeLevel5xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect(500).not.toBeLevel5xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(200).toBeLevel5xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect(200).not.toBeLevel5xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("test for strings", () => {
|
||||
expect(getTestResult("pw.expect('500').toBeLevel5xx()", 0)).toEqual("PASS")
|
||||
expect(getTestResult("pw.expect('500').not.toBeLevel5xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('200').toBeLevel5xx()", 0)).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect('200').not.toBeLevel5xx()", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("failed to parse to integer", () => {
|
||||
expect(getTestResult("pw.expect(undefined).toBeLevel5xx()", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(null).toBeLevel5xx()", 0)).toEqual("FAIL")
|
||||
expect(() => {
|
||||
runTestScriptWithVariables("pw.expect(Symbol('test')).toBeLevel5xx()")
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe("toHaveLength()", () => {
|
||||
test("test for strings", () => {
|
||||
expect(getTestResult("pw.expect('word').toHaveLength(4)", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
expect(getTestResult("pw.expect('word').toHaveLength(5)", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('word').not.toHaveLength(4)", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect('word').not.toHaveLength(5)", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
})
|
||||
test("test for arrays", () => {
|
||||
const fruits =
|
||||
"['apples', 'bananas', 'oranges', 'grapes', 'strawberries', 'cherries']"
|
||||
expect(getTestResult(`pw.expect(${fruits}).toHaveLength(6)`, 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
expect(getTestResult(`pw.expect(${fruits}).toHaveLength(7)`, 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(
|
||||
getTestResult(`pw.expect(${fruits}).not.toHaveLength(6)`, 0)
|
||||
).toEqual("FAIL")
|
||||
expect(
|
||||
getTestResult(`pw.expect(${fruits}).not.toHaveLength(7)`, 0)
|
||||
).toEqual("PASS")
|
||||
})
|
||||
})
|
||||
|
||||
describe("toBeType()", () => {
|
||||
test("test for positive assertion", () => {
|
||||
expect(getTestResult("pw.expect('random').toBeType('string')", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
expect(getTestResult("pw.expect(true).toBeType('boolean')", 0)).toEqual(
|
||||
"PASS"
|
||||
)
|
||||
expect(getTestResult("pw.expect(5).toBeType('number')", 0)).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult("pw.expect(new Date()).toBeType('object')", 0)
|
||||
).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult("pw.expect(undefined).toBeType('undefined')", 0)
|
||||
).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult("pw.expect(BigInt(123)).toBeType('bigint')", 0)
|
||||
).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult("pw.expect(Symbol('test')).toBeType('symbol')", 0)
|
||||
).toEqual("PASS")
|
||||
expect(
|
||||
getTestResult("pw.expect(function() {}).toBeType('function')", 0)
|
||||
).toEqual("PASS")
|
||||
})
|
||||
test("test for negative assertion", () => {
|
||||
expect(
|
||||
getTestResult("pw.expect('random').not.toBeType('string')", 0)
|
||||
).toEqual("FAIL")
|
||||
expect(getTestResult("pw.expect(true).not.toBeType('boolean')", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(getTestResult("pw.expect(5).not.toBeType('number')", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
expect(
|
||||
getTestResult("pw.expect(new Date()).not.toBeType('object')", 0)
|
||||
).toEqual("FAIL")
|
||||
expect(
|
||||
getTestResult("pw.expect(undefined).not.toBeType('undefined')", 0)
|
||||
).toEqual("FAIL")
|
||||
expect(
|
||||
getTestResult("pw.expect(BigInt(123)).not.toBeType('bigint')", 0)
|
||||
).toEqual("FAIL")
|
||||
expect(
|
||||
getTestResult("pw.expect(Symbol('test')).not.toBeType('symbol')", 0)
|
||||
).toEqual("FAIL")
|
||||
expect(
|
||||
getTestResult("pw.expect(function() {}).not.toBeType('function')", 0)
|
||||
).toEqual("FAIL")
|
||||
})
|
||||
test("unexpected type", () => {
|
||||
expect(getTestResult("pw.expect('random').toBeType('unknown')", 0)).toEqual(
|
||||
"FAIL"
|
||||
)
|
||||
})
|
||||
})
|
||||
155
packages/hoppscotch-common/src/helpers/actions.ts
Normal file
155
packages/hoppscotch-common/src/helpers/actions.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
/* An `action` is a unique verb that is associated with certain thing that can be done on Hoppscotch.
|
||||
* For example, sending a request.
|
||||
*/
|
||||
|
||||
import { onBeforeUnmount, onMounted } from "vue"
|
||||
import { BehaviorSubject } from "rxjs"
|
||||
|
||||
export type HoppAction =
|
||||
| "request.send-cancel" // Send/Cancel a Hoppscotch Request
|
||||
| "request.reset" // Clear request data
|
||||
| "request.copy-link" // Copy Request Link
|
||||
| "request.save" // Save to Collections
|
||||
| "request.save-as" // Save As
|
||||
| "request.method.next" // Select Next Method
|
||||
| "request.method.prev" // Select Previous Method
|
||||
| "request.method.get" // Select GET Method
|
||||
| "request.method.head" // Select HEAD Method
|
||||
| "request.method.post" // Select POST Method
|
||||
| "request.method.put" // Select PUT Method
|
||||
| "request.method.delete" // Select DELETE Method
|
||||
| "flyouts.keybinds.toggle" // Shows the keybinds flyout
|
||||
| "modals.search.toggle" // Shows the search modal
|
||||
| "modals.support.toggle" // Shows the support modal
|
||||
| "modals.share.toggle" // Shows the share modal
|
||||
| "modals.my.environment.edit" // Edit current personal environment
|
||||
| "modals.team.environment.edit" // Edit current team environment
|
||||
| "navigation.jump.rest" // Jump to REST page
|
||||
| "navigation.jump.graphql" // Jump to GraphQL page
|
||||
| "navigation.jump.realtime" // Jump to realtime page
|
||||
| "navigation.jump.documentation" // Jump to documentation page
|
||||
| "navigation.jump.settings" // Jump to settings page
|
||||
| "navigation.jump.profile" // Jump to profile page
|
||||
| "settings.theme.system" // Use system theme
|
||||
| "settings.theme.light" // Use light theme
|
||||
| "settings.theme.dark" // Use dark theme
|
||||
| "settings.theme.black" // Use black theme
|
||||
| "response.preview.toggle" // Toggle response preview
|
||||
| "response.file.download" // Download response as file
|
||||
| "response.copy" // Copy response to clipboard
|
||||
|
||||
/**
|
||||
* Defines the arguments, if present for a given type that is required to be passed on
|
||||
* invocation and will be passed to action handlers.
|
||||
*
|
||||
* This type is supposed to be an object with the key being one of the actions mentioned above.
|
||||
* The value to the key can be anything.
|
||||
* If an action has no argument, you do not need to add it to this type.
|
||||
*
|
||||
* NOTE: We can't enforce type checks to make sure the key is Action, you
|
||||
* will know if you got something wrong if there is a type error in this file
|
||||
*/
|
||||
type HoppActionArgs = {
|
||||
"modals.my.environment.edit": {
|
||||
envName: string
|
||||
variableName: string
|
||||
}
|
||||
"modals.team.environment.edit": {
|
||||
envName: string
|
||||
variableName: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* HoppActions which require arguments for their invocation
|
||||
*/
|
||||
type HoppActionWithArgs = keyof HoppActionArgs
|
||||
|
||||
/**
|
||||
* HoppActions which do not require arguments for their invocation
|
||||
*/
|
||||
export type HoppActionWithNoArgs = Exclude<HoppAction, HoppActionWithArgs>
|
||||
|
||||
/**
|
||||
* Resolves the argument type for a given HoppAction
|
||||
*/
|
||||
type ArgOfHoppAction<A extends HoppAction> = A extends HoppActionWithArgs
|
||||
? HoppActionArgs[A]
|
||||
: undefined
|
||||
|
||||
/**
|
||||
* Resolves the action function for a given HoppAction, used by action handler function defs
|
||||
*/
|
||||
type ActionFunc<A extends HoppAction> = A extends HoppActionWithArgs
|
||||
? (arg: ArgOfHoppAction<A>) => void
|
||||
: () => void
|
||||
|
||||
type BoundActionList = {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
[A in HoppAction]?: Array<ActionFunc<A>>
|
||||
}
|
||||
|
||||
const boundActions: BoundActionList = {}
|
||||
|
||||
export const activeActions$ = new BehaviorSubject<HoppAction[]>([])
|
||||
|
||||
export function bindAction<A extends HoppAction>(
|
||||
action: A,
|
||||
handler: ActionFunc<A>
|
||||
) {
|
||||
if (boundActions[action]) {
|
||||
boundActions[action]?.push(handler)
|
||||
} else {
|
||||
// 'any' assertion because TypeScript doesn't seem to be able to figure out the links.
|
||||
boundActions[action] = [handler] as any
|
||||
}
|
||||
|
||||
activeActions$.next(Object.keys(boundActions) as HoppAction[])
|
||||
}
|
||||
|
||||
type InvokeActionFunc = {
|
||||
(action: HoppActionWithNoArgs, args?: undefined): void
|
||||
<A extends HoppActionWithArgs>(action: A, args: ArgOfHoppAction<A>): void
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes a action, triggering action handlers if any registered.
|
||||
* The second argument parameter is optional if your action has no args required
|
||||
* @param action The action to fire
|
||||
* @param args The argument passed to the action handler. Optional if action has no args required
|
||||
*/
|
||||
export const invokeAction: InvokeActionFunc = <A extends HoppAction>(
|
||||
action: A,
|
||||
args: ArgOfHoppAction<A>
|
||||
) => {
|
||||
boundActions[action]?.forEach((handler) => handler(args!))
|
||||
}
|
||||
|
||||
export function unbindAction<A extends HoppAction>(
|
||||
action: A,
|
||||
handler: ActionFunc<A>
|
||||
) {
|
||||
// 'any' assertion because TypeScript doesn't seem to be able to figure out the links.
|
||||
boundActions[action] = boundActions[action]?.filter(
|
||||
(x) => x !== handler
|
||||
) as any
|
||||
|
||||
if (boundActions[action]?.length === 0) {
|
||||
delete boundActions[action]
|
||||
}
|
||||
|
||||
activeActions$.next(Object.keys(boundActions) as HoppAction[])
|
||||
}
|
||||
|
||||
export function defineActionHandler<A extends HoppAction>(
|
||||
action: A,
|
||||
handler: ActionFunc<A>
|
||||
) {
|
||||
onMounted(() => {
|
||||
bindAction(action, handler)
|
||||
})
|
||||
|
||||
onBeforeUnmount(() => {
|
||||
unbindAction(action, handler)
|
||||
})
|
||||
}
|
||||
388
packages/hoppscotch-common/src/helpers/backend/GQLClient.ts
Normal file
388
packages/hoppscotch-common/src/helpers/backend/GQLClient.ts
Normal file
@@ -0,0 +1,388 @@
|
||||
import { ref } from "vue"
|
||||
import {
|
||||
createClient,
|
||||
TypedDocumentNode,
|
||||
dedupExchange,
|
||||
OperationContext,
|
||||
fetchExchange,
|
||||
makeOperation,
|
||||
createRequest,
|
||||
subscriptionExchange,
|
||||
errorExchange,
|
||||
CombinedError,
|
||||
Operation,
|
||||
OperationResult,
|
||||
} from "@urql/core"
|
||||
import { authExchange } from "@urql/exchange-auth"
|
||||
import { devtoolsExchange } from "@urql/devtools"
|
||||
import { SubscriptionClient } from "subscriptions-transport-ws"
|
||||
import * as E from "fp-ts/Either"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import { pipe, constVoid, flow } from "fp-ts/function"
|
||||
import { subscribe, pipe as wonkaPipe } from "wonka"
|
||||
import { filter, map, Subject } from "rxjs"
|
||||
import {
|
||||
authIdToken$,
|
||||
getAuthIDToken,
|
||||
probableUser$,
|
||||
waitProbableLoginToConfirm,
|
||||
} from "~/helpers/fb/auth"
|
||||
|
||||
// TODO: Implement caching
|
||||
|
||||
const BACKEND_GQL_URL =
|
||||
import.meta.env.VITE_BACKEND_GQL_URL ?? "https://api.hoppscotch.io/graphql"
|
||||
const BACKEND_WS_URL =
|
||||
import.meta.env.VITE_BACKEND_WS_URL ?? "wss://api.hoppscotch.io/graphql"
|
||||
|
||||
type GQLOpType = "query" | "mutation" | "subscription"
|
||||
/**
|
||||
* A type that defines error events that are possible during backend operations on the GQLCLient
|
||||
*/
|
||||
export type GQLClientErrorEvent =
|
||||
| { type: "SUBSCRIPTION_CONN_CALLBACK_ERR_REPORT"; errors: Error[] }
|
||||
| { type: "CLIENT_REPORTED_ERROR"; error: CombinedError; op: Operation }
|
||||
| {
|
||||
type: "GQL_CLIENT_REPORTED_ERROR"
|
||||
opType: GQLOpType
|
||||
opResult: OperationResult
|
||||
}
|
||||
|
||||
/**
|
||||
* A stream of the errors that occur during GQLClient operations.
|
||||
* Exposed to be subscribed to by systems like sentry for error reporting
|
||||
*/
|
||||
export const gqlClientError$ = new Subject<GQLClientErrorEvent>()
|
||||
|
||||
const createSubscriptionClient = () => {
|
||||
return new SubscriptionClient(BACKEND_WS_URL, {
|
||||
reconnect: true,
|
||||
connectionParams: () => {
|
||||
return {
|
||||
authorization: `Bearer ${authIdToken$.value}`,
|
||||
}
|
||||
},
|
||||
connectionCallback(error) {
|
||||
if (error?.length > 0) {
|
||||
gqlClientError$.next({
|
||||
type: "SUBSCRIPTION_CONN_CALLBACK_ERR_REPORT",
|
||||
errors: error,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const createHoppClient = () => {
|
||||
const exchanges = [
|
||||
devtoolsExchange,
|
||||
dedupExchange,
|
||||
authExchange({
|
||||
addAuthToOperation({ authState, operation }) {
|
||||
if (!authState || !authState.authToken) {
|
||||
return operation
|
||||
}
|
||||
|
||||
const fetchOptions =
|
||||
typeof operation.context.fetchOptions === "function"
|
||||
? operation.context.fetchOptions()
|
||||
: operation.context.fetchOptions || {}
|
||||
|
||||
return makeOperation(operation.kind, operation, {
|
||||
...operation.context,
|
||||
fetchOptions: {
|
||||
...fetchOptions,
|
||||
headers: {
|
||||
...fetchOptions.headers,
|
||||
Authorization: `Bearer ${authState.authToken}`,
|
||||
},
|
||||
},
|
||||
})
|
||||
},
|
||||
willAuthError({ authState }) {
|
||||
return !authState || !authState.authToken
|
||||
},
|
||||
getAuth: async () => {
|
||||
if (!probableUser$.value) return { authToken: null }
|
||||
|
||||
await waitProbableLoginToConfirm()
|
||||
|
||||
return {
|
||||
authToken: getAuthIDToken(),
|
||||
}
|
||||
},
|
||||
}),
|
||||
fetchExchange,
|
||||
errorExchange({
|
||||
onError(error, op) {
|
||||
gqlClientError$.next({
|
||||
type: "CLIENT_REPORTED_ERROR",
|
||||
error,
|
||||
op,
|
||||
})
|
||||
},
|
||||
}),
|
||||
]
|
||||
|
||||
if (subscriptionClient) {
|
||||
exchanges.push(
|
||||
subscriptionExchange({
|
||||
forwardSubscription: (operation) => {
|
||||
return subscriptionClient!.request(operation)
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
return createClient({
|
||||
url: BACKEND_GQL_URL,
|
||||
exchanges,
|
||||
})
|
||||
}
|
||||
|
||||
let subscriptionClient: SubscriptionClient | null
|
||||
export const client = ref(createHoppClient())
|
||||
|
||||
authIdToken$.subscribe((idToken) => {
|
||||
// triggering reconnect by closing the websocket client
|
||||
if (idToken && subscriptionClient) {
|
||||
subscriptionClient?.client?.close()
|
||||
}
|
||||
|
||||
// creating new subscription
|
||||
if (idToken && !subscriptionClient) {
|
||||
subscriptionClient = createSubscriptionClient()
|
||||
}
|
||||
|
||||
// closing existing subscription client.
|
||||
if (!idToken && subscriptionClient) {
|
||||
subscriptionClient.close()
|
||||
subscriptionClient = null
|
||||
}
|
||||
|
||||
client.value = createHoppClient()
|
||||
})
|
||||
|
||||
type RunQueryOptions<T = any, V = object> = {
|
||||
query: TypedDocumentNode<T, V>
|
||||
variables?: V
|
||||
}
|
||||
|
||||
/**
|
||||
* A wrapper type for defining errors possible in a GQL operation
|
||||
*/
|
||||
export type GQLError<T extends string> =
|
||||
| {
|
||||
type: "network_error"
|
||||
error: Error
|
||||
}
|
||||
| {
|
||||
type: "gql_error"
|
||||
error: T
|
||||
}
|
||||
|
||||
export const runGQLQuery = <DocType, DocVarType, DocErrorType extends string>(
|
||||
args: RunQueryOptions<DocType, DocVarType>
|
||||
): Promise<E.Either<GQLError<DocErrorType>, DocType>> => {
|
||||
const request = createRequest<DocType, DocVarType>(args.query, args.variables)
|
||||
const source = client.value.executeQuery(request, {
|
||||
requestPolicy: "network-only",
|
||||
})
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const sub = wonkaPipe(
|
||||
source,
|
||||
subscribe((res) => {
|
||||
if (sub) {
|
||||
sub.unsubscribe()
|
||||
}
|
||||
|
||||
pipe(
|
||||
// The target
|
||||
res.data as DocType | undefined,
|
||||
// Define what happens if data does not exist (it is an error)
|
||||
E.fromNullable(
|
||||
pipe(
|
||||
// Take the network error value
|
||||
res.error?.networkError,
|
||||
// If it null, set the left to the generic error name
|
||||
E.fromNullable(res.error?.message),
|
||||
E.match(
|
||||
// The left case (network error was null)
|
||||
(gqlErr) => {
|
||||
if (res.error) {
|
||||
gqlClientError$.next({
|
||||
type: "GQL_CLIENT_REPORTED_ERROR",
|
||||
opType: "query",
|
||||
opResult: res,
|
||||
})
|
||||
}
|
||||
|
||||
return <GQLError<DocErrorType>>{
|
||||
type: "gql_error",
|
||||
error: parseGQLErrorString(gqlErr ?? "") as DocErrorType,
|
||||
}
|
||||
},
|
||||
// The right case (it was a GraphQL Error)
|
||||
(networkErr) =>
|
||||
<GQLError<DocErrorType>>{
|
||||
type: "network_error",
|
||||
error: networkErr,
|
||||
}
|
||||
)
|
||||
)
|
||||
),
|
||||
resolve
|
||||
)
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: The subscription system seems to be firing multiple updates for certain subscriptions.
|
||||
// Make sure to handle cases if the subscription fires with the same update multiple times
|
||||
export const runGQLSubscription = <
|
||||
DocType,
|
||||
DocVarType,
|
||||
DocErrorType extends string
|
||||
>(
|
||||
args: RunQueryOptions<DocType, DocVarType>
|
||||
) => {
|
||||
const result$ = new Subject<E.Either<GQLError<DocErrorType>, DocType>>()
|
||||
|
||||
const source = client.value.executeSubscription(
|
||||
createRequest(args.query, args.variables)
|
||||
)
|
||||
|
||||
const sub = wonkaPipe(
|
||||
source,
|
||||
subscribe((res) => {
|
||||
result$.next(
|
||||
pipe(
|
||||
// The target
|
||||
res.data as DocType | undefined,
|
||||
// Define what happens if data does not exist (it is an error)
|
||||
E.fromNullable(
|
||||
pipe(
|
||||
// Take the network error value
|
||||
res.error?.networkError,
|
||||
// If it null, set the left to the generic error name
|
||||
E.fromNullable(res.error?.message),
|
||||
E.match(
|
||||
// The left case (network error was null)
|
||||
(gqlErr) => {
|
||||
if (res.error) {
|
||||
gqlClientError$.next({
|
||||
type: "GQL_CLIENT_REPORTED_ERROR",
|
||||
opType: "subscription",
|
||||
opResult: res,
|
||||
})
|
||||
}
|
||||
|
||||
return <GQLError<DocErrorType>>{
|
||||
type: "gql_error",
|
||||
error: parseGQLErrorString(gqlErr ?? "") as DocErrorType,
|
||||
}
|
||||
},
|
||||
// The right case (it was a GraphQL Error)
|
||||
(networkErr) =>
|
||||
<GQLError<DocErrorType>>{
|
||||
type: "network_error",
|
||||
error: networkErr,
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
// Returns the stream and a subscription handle to unsub
|
||||
return [result$, sub] as const
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `runGQLSubscription` but stops the subscription silently
|
||||
* if there is an authentication error because of logged out
|
||||
*/
|
||||
export const runAuthOnlyGQLSubscription = flow(
|
||||
runGQLSubscription,
|
||||
([result$, sub]) => {
|
||||
const updatedResult$ = result$.pipe(
|
||||
map((res) => {
|
||||
if (
|
||||
E.isLeft(res) &&
|
||||
res.left.type === "gql_error" &&
|
||||
res.left.error === "auth/fail"
|
||||
) {
|
||||
sub.unsubscribe()
|
||||
return null
|
||||
} else return res
|
||||
}),
|
||||
filter((res): res is Exclude<typeof res, null> => res !== null)
|
||||
)
|
||||
|
||||
return [updatedResult$, sub] as const
|
||||
}
|
||||
)
|
||||
|
||||
export const parseGQLErrorString = (s: string) =>
|
||||
s.startsWith("[GraphQL] ") ? s.split("[GraphQL] ")[1] : s
|
||||
|
||||
export const runMutation = <
|
||||
DocType,
|
||||
DocVariables extends object | undefined,
|
||||
DocErrors extends string
|
||||
>(
|
||||
mutation: TypedDocumentNode<DocType, DocVariables>,
|
||||
variables?: DocVariables,
|
||||
additionalConfig?: Partial<OperationContext>
|
||||
): TE.TaskEither<GQLError<DocErrors>, DocType> =>
|
||||
pipe(
|
||||
TE.tryCatch(
|
||||
() =>
|
||||
client.value
|
||||
.mutation(mutation, variables, {
|
||||
requestPolicy: "cache-and-network",
|
||||
...additionalConfig,
|
||||
})
|
||||
.toPromise(),
|
||||
() => constVoid() as never // The mutation function can never fail, so this will never be called ;)
|
||||
),
|
||||
TE.chainEitherK((result) =>
|
||||
pipe(
|
||||
result.data,
|
||||
E.fromNullable(
|
||||
// Result is null
|
||||
pipe(
|
||||
result.error?.networkError,
|
||||
E.fromNullable(result.error?.message),
|
||||
E.match(
|
||||
// The left case (network error was null)
|
||||
(gqlErr) => {
|
||||
if (result.error) {
|
||||
gqlClientError$.next({
|
||||
type: "GQL_CLIENT_REPORTED_ERROR",
|
||||
opType: "mutation",
|
||||
opResult: result,
|
||||
})
|
||||
}
|
||||
|
||||
return <GQLError<DocErrors>>{
|
||||
type: "gql_error",
|
||||
error: parseGQLErrorString(gqlErr ?? ""),
|
||||
}
|
||||
},
|
||||
// The right case (it was a network error)
|
||||
(networkErr) =>
|
||||
<GQLError<DocErrors>>{
|
||||
type: "network_error",
|
||||
error: networkErr,
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -0,0 +1,3 @@
|
||||
export type UserQueryError = "user/not_found"
|
||||
|
||||
export type MyTeamsQueryError = "ea/not_invite_or_admin"
|
||||
@@ -0,0 +1,12 @@
|
||||
mutation AcceptTeamInvitation($inviteID: ID!) {
|
||||
acceptTeamInvitation(inviteID: $inviteID) {
|
||||
membershipID
|
||||
role
|
||||
user {
|
||||
uid
|
||||
displayName
|
||||
photoURL
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
mutation CreateChildCollection(
|
||||
$childTitle: String!
|
||||
$collectionID: ID!
|
||||
) {
|
||||
createChildCollection(
|
||||
childTitle: $childTitle
|
||||
collectionID: $collectionID
|
||||
) {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
mutation CreateDuplicateEnvironment($id: ID!){
|
||||
createDuplicateEnvironment (id: $id ){
|
||||
id
|
||||
teamID
|
||||
name
|
||||
variables
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
mutation CreateNewRootCollection($title: String!, $teamID: ID!) {
|
||||
createRootCollection(title: $title, teamID: $teamID) {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
mutation CreateRequestInCollection($data: CreateTeamRequestInput!, $collectionID: ID!) {
|
||||
createRequestInCollection(data: $data, collectionID: $collectionID) {
|
||||
id
|
||||
collection {
|
||||
id
|
||||
team {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
mutation CreateShortcode($request: String!) {
|
||||
createShortcode(request: $request) {
|
||||
id
|
||||
request
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
mutation CreateTeam($name: String!) {
|
||||
createTeam(name: $name) {
|
||||
id
|
||||
name
|
||||
members {
|
||||
membershipID
|
||||
role
|
||||
user {
|
||||
uid
|
||||
displayName
|
||||
email
|
||||
photoURL
|
||||
}
|
||||
}
|
||||
myRole
|
||||
ownersCount
|
||||
editorsCount
|
||||
viewersCount
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
mutation CreateTeamEnvironment($variables: String!,$teamID: ID!,$name: String!){
|
||||
createTeamEnvironment( variables: $variables ,teamID: $teamID ,name: $name){
|
||||
variables
|
||||
name
|
||||
teamID
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
mutation CreateTeamInvitation($inviteeEmail: String!, $inviteeRole: TeamMemberRole!, $teamID: ID!) {
|
||||
createTeamInvitation(inviteeRole: $inviteeRole, inviteeEmail: $inviteeEmail, teamID: $teamID) {
|
||||
id
|
||||
teamID
|
||||
creatorUid
|
||||
inviteeEmail
|
||||
inviteeRole
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation DeleteCollection($collectionID: ID!) {
|
||||
deleteCollection(collectionID: $collectionID)
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation DeleteRequest($requestID: ID!) {
|
||||
deleteRequest(requestID: $requestID)
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation DeleteShortcode($code: ID!) {
|
||||
revokeShortcode(code: $code)
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation DeleteTeam($teamID: ID!) {
|
||||
deleteTeam(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation DeleteTeamEnvironment($id: ID!){
|
||||
deleteTeamEnvironment (id: $id )
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation importFromJSON($jsonString: String!, $teamID: ID!) {
|
||||
importCollectionsFromJSON(jsonString: $jsonString, teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation LeaveTeam($teamID: ID!) {
|
||||
leaveTeam(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
mutation MoveRESTTeamRequest($requestID: ID!, $collectionID: ID!) {
|
||||
moveRequest(requestID: $requestID, destCollID: $collectionID) {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation RemoveTeamMember($userUid: ID!, $teamID: ID!) {
|
||||
removeTeamMember(userUid: $userUid, teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
mutation RenameCollection($newTitle: String!, $collectionID: ID!) {
|
||||
renameCollection(newTitle: $newTitle, collectionID: $collectionID) {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
mutation RenameTeam($newName: String!, $teamID: ID!) {
|
||||
renameTeam(newName: $newName, teamID: $teamID) {
|
||||
id
|
||||
name
|
||||
teamMembers {
|
||||
membershipID
|
||||
user {
|
||||
uid
|
||||
}
|
||||
role
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
mutation RevokeTeamInvitation($inviteID: ID!) {
|
||||
revokeTeamInvitation(inviteID: $inviteID)
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
mutation UpdateRequest($data: UpdateTeamRequestInput!, $requestID: ID!) {
|
||||
updateRequest(data: $data, requestID: $requestID) {
|
||||
id
|
||||
title
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
mutation UpdateTeamEnvironment($variables: String!,$id: ID!,$name: String!){
|
||||
updateTeamEnvironment( variables: $variables ,id: $id ,name: $name){
|
||||
variables
|
||||
name
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
mutation UpdateTeamMemberRole(
|
||||
$newRole: TeamMemberRole!,
|
||||
$userUid: ID!,
|
||||
$teamID: ID!
|
||||
) {
|
||||
updateTeamMemberRole(
|
||||
newRole: $newRole
|
||||
userUid: $userUid
|
||||
teamID: $teamID
|
||||
) {
|
||||
membershipID
|
||||
role
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
query ExportAsJSON($teamID: ID!) {
|
||||
exportCollectionsToJSON(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
query GetCollectionChildren($collectionID: ID!, $cursor: String) {
|
||||
collection(collectionID: $collectionID) {
|
||||
children(cursor: $cursor) {
|
||||
id
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
query GetCollectionChildrenIDs($collectionID: ID!, $cursor: String) {
|
||||
collection(collectionID: $collectionID) {
|
||||
children(cursor: $cursor) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
query GetCollectionRequests($collectionID: ID!, $cursor: ID) {
|
||||
requestsInCollection(collectionID: $collectionID, cursor: $cursor) {
|
||||
id
|
||||
title
|
||||
request
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
query GetCollectionTitle($collectionID: ID!) {
|
||||
collection(collectionID: $collectionID) {
|
||||
title
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
query GetInviteDetails($inviteID: ID!) {
|
||||
teamInvitation(inviteID: $inviteID) {
|
||||
id
|
||||
inviteeEmail
|
||||
inviteeRole
|
||||
team {
|
||||
id
|
||||
name
|
||||
}
|
||||
creator {
|
||||
uid
|
||||
displayName
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
query GetUserShortcodes($cursor: ID) {
|
||||
myShortcodes(cursor: $cursor) {
|
||||
id
|
||||
request
|
||||
createdOn
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
query GetMyTeams($cursor: ID) {
|
||||
myTeams(cursor: $cursor) {
|
||||
id
|
||||
name
|
||||
myRole
|
||||
ownersCount
|
||||
teamMembers {
|
||||
membershipID
|
||||
user {
|
||||
photoURL
|
||||
displayName
|
||||
email
|
||||
uid
|
||||
}
|
||||
role
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
query GetTeam($teamID: ID!) {
|
||||
team(teamID: $teamID) {
|
||||
id
|
||||
name
|
||||
teamMembers {
|
||||
membershipID
|
||||
user {
|
||||
uid
|
||||
email
|
||||
}
|
||||
role
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
query GetTeamEnvironments($teamID: ID!){
|
||||
team(teamID: $teamID){
|
||||
teamEnvironments{
|
||||
id
|
||||
name
|
||||
variables
|
||||
teamID
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
query GetTeamMembers($teamID: ID!, $cursor: ID) {
|
||||
team(teamID: $teamID) {
|
||||
members(cursor: $cursor) {
|
||||
membershipID
|
||||
user {
|
||||
uid
|
||||
email
|
||||
}
|
||||
role
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
query GetUserInfo {
|
||||
me {
|
||||
uid
|
||||
displayName
|
||||
email
|
||||
photoURL
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
query Me {
|
||||
me {
|
||||
uid
|
||||
displayName
|
||||
photoURL
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
query ResolveShortcode($code: ID!) {
|
||||
shortcode(code: $code) {
|
||||
id
|
||||
request
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
query RootCollectionsOfTeam($teamID: ID!, $cursor: ID) {
|
||||
rootCollectionsOfTeam(teamID: $teamID, cursor: $cursor) {
|
||||
id
|
||||
title
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
query GetPendingInvites($teamID: ID!) {
|
||||
team(teamID: $teamID) {
|
||||
id
|
||||
teamInvitations {
|
||||
inviteeRole
|
||||
inviteeEmail
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
subscription ShortcodeCreated {
|
||||
myShortcodesCreated {
|
||||
id
|
||||
request
|
||||
createdOn
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
subscription ShortcodeDeleted {
|
||||
myShortcodesRevoked {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
subscription TeamCollectionAdded($teamID: ID!) {
|
||||
teamCollectionAdded(teamID: $teamID) {
|
||||
id
|
||||
title
|
||||
parent {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
subscription TeamCollectionRemoved($teamID: ID!) {
|
||||
teamCollectionRemoved(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
subscription TeamCollectionUpdated($teamID: ID!) {
|
||||
teamCollectionUpdated(teamID: $teamID) {
|
||||
id
|
||||
title
|
||||
parent {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
subscription TeamEnvironmentCreated ($teamID: ID!) {
|
||||
teamEnvironmentCreated(teamID: $teamID) {
|
||||
id
|
||||
teamID
|
||||
name
|
||||
variables
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
subscription TeamEnvironmentDeleted ($teamID: ID!) {
|
||||
teamEnvironmentDeleted(teamID: $teamID) {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
subscription TeamEnvironmentUpdated ($teamID: ID!) {
|
||||
teamEnvironmentUpdated(teamID: $teamID) {
|
||||
id
|
||||
teamID
|
||||
name
|
||||
variables
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
subscription TeamInvitationAdded($teamID: ID!) {
|
||||
teamInvitationAdded(teamID: $teamID) {
|
||||
id
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
subscription TeamInvitationRemoved($teamID: ID!) {
|
||||
teamInvitationRemoved(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
subscription TeamMemberAdded($teamID: ID!) {
|
||||
teamMemberAdded(teamID: $teamID) {
|
||||
membershipID
|
||||
user {
|
||||
uid
|
||||
email
|
||||
}
|
||||
role
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
subscription TeamMemberRemoved($teamID: ID!) {
|
||||
teamMemberRemoved(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
subscription TeamMemberUpdated($teamID: ID!) {
|
||||
teamMemberUpdated(teamID: $teamID) {
|
||||
membershipID
|
||||
user {
|
||||
uid
|
||||
email
|
||||
}
|
||||
role
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
subscription TeamRequestAdded($teamID: ID!) {
|
||||
teamRequestAdded(teamID: $teamID) {
|
||||
id
|
||||
collectionID
|
||||
request
|
||||
title
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
subscription TeamRequestDeleted($teamID: ID!) {
|
||||
teamRequestDeleted(teamID: $teamID)
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
subscription TeamRequestUpdated($teamID: ID!) {
|
||||
teamRequestUpdated(teamID: $teamID) {
|
||||
id
|
||||
collectionID
|
||||
request
|
||||
title
|
||||
}
|
||||
}
|
||||
127
packages/hoppscotch-common/src/helpers/backend/helpers.ts
Normal file
127
packages/hoppscotch-common/src/helpers/backend/helpers.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import * as A from "fp-ts/Array"
|
||||
import * as E from "fp-ts/Either"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import {
|
||||
HoppCollection,
|
||||
HoppRESTRequest,
|
||||
makeCollection,
|
||||
translateToNewRequest,
|
||||
} from "@hoppscotch/data"
|
||||
import { TeamCollection } from "../teams/TeamCollection"
|
||||
import { TeamRequest } from "../teams/TeamRequest"
|
||||
import { GQLError, runGQLQuery } from "./GQLClient"
|
||||
import {
|
||||
GetCollectionChildrenIDsDocument,
|
||||
GetCollectionRequestsDocument,
|
||||
GetCollectionTitleDocument,
|
||||
} from "./graphql"
|
||||
|
||||
export const BACKEND_PAGE_SIZE = 10
|
||||
|
||||
const getCollectionChildrenIDs = async (collID: string) => {
|
||||
const collsList: string[] = []
|
||||
|
||||
while (true) {
|
||||
const data = await runGQLQuery({
|
||||
query: GetCollectionChildrenIDsDocument,
|
||||
variables: {
|
||||
collectionID: collID,
|
||||
cursor:
|
||||
collsList.length > 0 ? collsList[collsList.length - 1] : undefined,
|
||||
},
|
||||
})
|
||||
|
||||
if (E.isLeft(data)) {
|
||||
return E.left(data.left)
|
||||
}
|
||||
|
||||
collsList.push(...data.right.collection!.children.map((x) => x.id))
|
||||
|
||||
if (data.right.collection!.children.length !== BACKEND_PAGE_SIZE) break
|
||||
}
|
||||
|
||||
return E.right(collsList)
|
||||
}
|
||||
|
||||
const getCollectionRequests = async (collID: string) => {
|
||||
const reqList: TeamRequest[] = []
|
||||
|
||||
while (true) {
|
||||
const data = await runGQLQuery({
|
||||
query: GetCollectionRequestsDocument,
|
||||
variables: {
|
||||
collectionID: collID,
|
||||
cursor: reqList.length > 0 ? reqList[reqList.length - 1].id : undefined,
|
||||
},
|
||||
})
|
||||
|
||||
if (E.isLeft(data)) {
|
||||
return E.left(data.left)
|
||||
}
|
||||
|
||||
reqList.push(
|
||||
...data.right.requestsInCollection.map(
|
||||
(x) =>
|
||||
<TeamRequest>{
|
||||
id: x.id,
|
||||
request: translateToNewRequest(JSON.parse(x.request)),
|
||||
collectionID: collID,
|
||||
title: x.title,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
if (data.right.requestsInCollection.length !== BACKEND_PAGE_SIZE) break
|
||||
}
|
||||
|
||||
return E.right(reqList)
|
||||
}
|
||||
|
||||
export const getCompleteCollectionTree = (
|
||||
collID: string
|
||||
): TE.TaskEither<GQLError<string>, TeamCollection> =>
|
||||
pipe(
|
||||
TE.Do,
|
||||
|
||||
TE.bind("title", () =>
|
||||
pipe(
|
||||
() =>
|
||||
runGQLQuery({
|
||||
query: GetCollectionTitleDocument,
|
||||
variables: {
|
||||
collectionID: collID,
|
||||
},
|
||||
}),
|
||||
TE.map((x) => x.collection!.title)
|
||||
)
|
||||
),
|
||||
TE.bind("children", () =>
|
||||
pipe(
|
||||
// TaskEither -> () => Promise<Either>
|
||||
() => getCollectionChildrenIDs(collID),
|
||||
TE.chain(flow(A.map(getCompleteCollectionTree), TE.sequenceArray))
|
||||
)
|
||||
),
|
||||
|
||||
TE.bind("requests", () => () => getCollectionRequests(collID)),
|
||||
|
||||
TE.map(
|
||||
({ title, children, requests }) =>
|
||||
<TeamCollection>{
|
||||
id: collID,
|
||||
children,
|
||||
requests,
|
||||
title,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
export const teamCollToHoppRESTColl = (
|
||||
coll: TeamCollection
|
||||
): HoppCollection<HoppRESTRequest> =>
|
||||
makeCollection({
|
||||
name: coll.title,
|
||||
folders: coll.children?.map(teamCollToHoppRESTColl) ?? [],
|
||||
requests: coll.requests?.map((x) => x.request) ?? [],
|
||||
})
|
||||
@@ -0,0 +1,29 @@
|
||||
import { HoppRESTRequest } from "@hoppscotch/data"
|
||||
import { runMutation } from "../GQLClient"
|
||||
import {
|
||||
CreateShortcodeDocument,
|
||||
CreateShortcodeMutation,
|
||||
CreateShortcodeMutationVariables,
|
||||
DeleteShortcodeDocument,
|
||||
DeleteShortcodeMutation,
|
||||
DeleteShortcodeMutationVariables,
|
||||
} from "../graphql"
|
||||
|
||||
type DeleteShortcodeErrors = "shortcode/not_found"
|
||||
|
||||
export const createShortcode = (request: HoppRESTRequest) =>
|
||||
runMutation<CreateShortcodeMutation, CreateShortcodeMutationVariables, "">(
|
||||
CreateShortcodeDocument,
|
||||
{
|
||||
request: JSON.stringify(request),
|
||||
}
|
||||
)
|
||||
|
||||
export const deleteShortcode = (code: string) =>
|
||||
runMutation<
|
||||
DeleteShortcodeMutation,
|
||||
DeleteShortcodeMutationVariables,
|
||||
DeleteShortcodeErrors
|
||||
>(DeleteShortcodeDocument, {
|
||||
code,
|
||||
})
|
||||
132
packages/hoppscotch-common/src/helpers/backend/mutations/Team.ts
Normal file
132
packages/hoppscotch-common/src/helpers/backend/mutations/Team.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { pipe } from "fp-ts/function"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import { runMutation } from "../GQLClient"
|
||||
import { TeamName } from "../types/TeamName"
|
||||
import {
|
||||
CreateTeamDocument,
|
||||
CreateTeamMutation,
|
||||
CreateTeamMutationVariables,
|
||||
DeleteTeamDocument,
|
||||
DeleteTeamMutation,
|
||||
DeleteTeamMutationVariables,
|
||||
LeaveTeamDocument,
|
||||
LeaveTeamMutation,
|
||||
LeaveTeamMutationVariables,
|
||||
RemoveTeamMemberDocument,
|
||||
RemoveTeamMemberMutation,
|
||||
RemoveTeamMemberMutationVariables,
|
||||
RenameTeamDocument,
|
||||
RenameTeamMutation,
|
||||
RenameTeamMutationVariables,
|
||||
TeamMemberRole,
|
||||
UpdateTeamMemberRoleDocument,
|
||||
UpdateTeamMemberRoleMutation,
|
||||
UpdateTeamMemberRoleMutationVariables,
|
||||
} from "../graphql"
|
||||
|
||||
type DeleteTeamErrors =
|
||||
| "team/not_required_role"
|
||||
| "team/invalid_id"
|
||||
| "team/member_not_found"
|
||||
| "ea/not_invite_or_admin"
|
||||
|
||||
type LeaveTeamErrors =
|
||||
| "team/invalid_id"
|
||||
| "team/member_not_found"
|
||||
| "ea/not_invite_or_admin"
|
||||
|
||||
type CreateTeamErrors = "team/name_invalid" | "ea/not_invite_or_admin"
|
||||
|
||||
type RenameTeamErrors =
|
||||
| "ea/not_invite_or_admin"
|
||||
| "team/invalid_id"
|
||||
| "team/not_required_role"
|
||||
|
||||
type UpdateTeamMemberRoleErrors =
|
||||
| "ea/not_invite_or_admin"
|
||||
| "team/invalid_id"
|
||||
| "team/not_required_role"
|
||||
|
||||
type RemoveTeamMemberErrors =
|
||||
| "ea/not_invite_or_admin"
|
||||
| "team/invalid_id"
|
||||
| "team/not_required_role"
|
||||
|
||||
export const createTeam = (name: TeamName) =>
|
||||
pipe(
|
||||
runMutation<
|
||||
CreateTeamMutation,
|
||||
CreateTeamMutationVariables,
|
||||
CreateTeamErrors
|
||||
>(CreateTeamDocument, {
|
||||
name,
|
||||
}),
|
||||
TE.map(({ createTeam }) => createTeam)
|
||||
)
|
||||
|
||||
export const deleteTeam = (teamID: string) =>
|
||||
runMutation<
|
||||
DeleteTeamMutation,
|
||||
DeleteTeamMutationVariables,
|
||||
DeleteTeamErrors
|
||||
>(
|
||||
DeleteTeamDocument,
|
||||
{
|
||||
teamID,
|
||||
},
|
||||
{
|
||||
additionalTypenames: ["Team"],
|
||||
}
|
||||
)
|
||||
|
||||
export const leaveTeam = (teamID: string) =>
|
||||
runMutation<LeaveTeamMutation, LeaveTeamMutationVariables, LeaveTeamErrors>(
|
||||
LeaveTeamDocument,
|
||||
{
|
||||
teamID,
|
||||
},
|
||||
{
|
||||
additionalTypenames: ["Team"],
|
||||
}
|
||||
)
|
||||
|
||||
export const renameTeam = (teamID: string, newName: TeamName) =>
|
||||
pipe(
|
||||
runMutation<
|
||||
RenameTeamMutation,
|
||||
RenameTeamMutationVariables,
|
||||
RenameTeamErrors
|
||||
>(RenameTeamDocument, {
|
||||
newName,
|
||||
teamID,
|
||||
}),
|
||||
TE.map(({ renameTeam }) => renameTeam)
|
||||
)
|
||||
|
||||
export const updateTeamMemberRole = (
|
||||
userUid: string,
|
||||
teamID: string,
|
||||
newRole: TeamMemberRole
|
||||
) =>
|
||||
pipe(
|
||||
runMutation<
|
||||
UpdateTeamMemberRoleMutation,
|
||||
UpdateTeamMemberRoleMutationVariables,
|
||||
UpdateTeamMemberRoleErrors
|
||||
>(UpdateTeamMemberRoleDocument, {
|
||||
newRole,
|
||||
userUid,
|
||||
teamID,
|
||||
}),
|
||||
TE.map(({ updateTeamMemberRole }) => updateTeamMemberRole)
|
||||
)
|
||||
|
||||
export const removeTeamMember = (userUid: string, teamID: string) =>
|
||||
runMutation<
|
||||
RemoveTeamMemberMutation,
|
||||
RemoveTeamMemberMutationVariables,
|
||||
RemoveTeamMemberErrors
|
||||
>(RemoveTeamMemberDocument, {
|
||||
userUid,
|
||||
teamID,
|
||||
})
|
||||
@@ -0,0 +1,69 @@
|
||||
import { runMutation } from "../GQLClient"
|
||||
import {
|
||||
CreateDuplicateEnvironmentDocument,
|
||||
CreateDuplicateEnvironmentMutation,
|
||||
CreateDuplicateEnvironmentMutationVariables,
|
||||
CreateTeamEnvironmentDocument,
|
||||
CreateTeamEnvironmentMutation,
|
||||
CreateTeamEnvironmentMutationVariables,
|
||||
DeleteTeamEnvironmentDocument,
|
||||
DeleteTeamEnvironmentMutation,
|
||||
DeleteTeamEnvironmentMutationVariables,
|
||||
UpdateTeamEnvironmentDocument,
|
||||
UpdateTeamEnvironmentMutation,
|
||||
UpdateTeamEnvironmentMutationVariables,
|
||||
} from "../graphql"
|
||||
|
||||
type DeleteTeamEnvironmentError = "team_environment/not_found"
|
||||
|
||||
type UpdateTeamEnvironmentError = "team_environment/not_found"
|
||||
|
||||
type DuplicateTeamEnvironmentError = "team_environment/not_found"
|
||||
|
||||
export const createTeamEnvironment = (
|
||||
variables: string,
|
||||
teamID: string,
|
||||
name: string
|
||||
) =>
|
||||
runMutation<
|
||||
CreateTeamEnvironmentMutation,
|
||||
CreateTeamEnvironmentMutationVariables,
|
||||
""
|
||||
>(CreateTeamEnvironmentDocument, {
|
||||
variables,
|
||||
teamID,
|
||||
name,
|
||||
})
|
||||
|
||||
export const deleteTeamEnvironment = (id: string) =>
|
||||
runMutation<
|
||||
DeleteTeamEnvironmentMutation,
|
||||
DeleteTeamEnvironmentMutationVariables,
|
||||
DeleteTeamEnvironmentError
|
||||
>(DeleteTeamEnvironmentDocument, {
|
||||
id,
|
||||
})
|
||||
|
||||
export const updateTeamEnvironment = (
|
||||
variables: string,
|
||||
id: string,
|
||||
name: string
|
||||
) =>
|
||||
runMutation<
|
||||
UpdateTeamEnvironmentMutation,
|
||||
UpdateTeamEnvironmentMutationVariables,
|
||||
UpdateTeamEnvironmentError
|
||||
>(UpdateTeamEnvironmentDocument, {
|
||||
variables,
|
||||
id,
|
||||
name,
|
||||
})
|
||||
|
||||
export const createDuplicateEnvironment = (id: string) =>
|
||||
runMutation<
|
||||
CreateDuplicateEnvironmentMutation,
|
||||
CreateDuplicateEnvironmentMutationVariables,
|
||||
DuplicateTeamEnvironmentError
|
||||
>(CreateDuplicateEnvironmentDocument, {
|
||||
id,
|
||||
})
|
||||
@@ -0,0 +1,68 @@
|
||||
import { pipe } from "fp-ts/function"
|
||||
import * as TE from "fp-ts/TaskEither"
|
||||
import { runMutation } from "../GQLClient"
|
||||
import {
|
||||
AcceptTeamInvitationDocument,
|
||||
AcceptTeamInvitationMutation,
|
||||
AcceptTeamInvitationMutationVariables,
|
||||
CreateTeamInvitationDocument,
|
||||
CreateTeamInvitationMutation,
|
||||
CreateTeamInvitationMutationVariables,
|
||||
RevokeTeamInvitationDocument,
|
||||
RevokeTeamInvitationMutation,
|
||||
RevokeTeamInvitationMutationVariables,
|
||||
TeamMemberRole,
|
||||
} from "../graphql"
|
||||
import { Email } from "../types/Email"
|
||||
|
||||
export type CreateTeamInvitationErrors =
|
||||
| "invalid/email"
|
||||
| "team/invalid_id"
|
||||
| "team/member_not_found"
|
||||
| "team_invite/already_member"
|
||||
| "team_invite/member_has_invite"
|
||||
|
||||
type RevokeTeamInvitationErrors =
|
||||
| "team/not_required_role"
|
||||
| "team_invite/no_invite_found"
|
||||
|
||||
type AcceptTeamInvitationErrors =
|
||||
| "team_invite/no_invite_found"
|
||||
| "team_invite/already_member"
|
||||
| "team_invite/email_do_not_match"
|
||||
|
||||
export const createTeamInvitation = (
|
||||
inviteeEmail: Email,
|
||||
inviteeRole: TeamMemberRole,
|
||||
teamID: string
|
||||
) =>
|
||||
pipe(
|
||||
runMutation<
|
||||
CreateTeamInvitationMutation,
|
||||
CreateTeamInvitationMutationVariables,
|
||||
CreateTeamInvitationErrors
|
||||
>(CreateTeamInvitationDocument, {
|
||||
inviteeEmail,
|
||||
inviteeRole,
|
||||
teamID,
|
||||
}),
|
||||
TE.map((x) => x.createTeamInvitation)
|
||||
)
|
||||
|
||||
export const revokeTeamInvitation = (inviteID: string) =>
|
||||
runMutation<
|
||||
RevokeTeamInvitationMutation,
|
||||
RevokeTeamInvitationMutationVariables,
|
||||
RevokeTeamInvitationErrors
|
||||
>(RevokeTeamInvitationDocument, {
|
||||
inviteID,
|
||||
})
|
||||
|
||||
export const acceptTeamInvitation = (inviteID: string) =>
|
||||
runMutation<
|
||||
AcceptTeamInvitationMutation,
|
||||
AcceptTeamInvitationMutationVariables,
|
||||
AcceptTeamInvitationErrors
|
||||
>(AcceptTeamInvitationDocument, {
|
||||
inviteID,
|
||||
})
|
||||
@@ -0,0 +1,20 @@
|
||||
import { runMutation } from "../GQLClient"
|
||||
import {
|
||||
MoveRestTeamRequestDocument,
|
||||
MoveRestTeamRequestMutation,
|
||||
MoveRestTeamRequestMutationVariables,
|
||||
} from "../graphql"
|
||||
|
||||
type MoveRestTeamRequestErrors =
|
||||
| "team_req/not_found"
|
||||
| "team_req/invalid_target_id"
|
||||
|
||||
export const moveRESTTeamRequest = (requestID: string, collectionID: string) =>
|
||||
runMutation<
|
||||
MoveRestTeamRequestMutation,
|
||||
MoveRestTeamRequestMutationVariables,
|
||||
MoveRestTeamRequestErrors
|
||||
>(MoveRestTeamRequestDocument, {
|
||||
requestID,
|
||||
collectionID,
|
||||
})
|
||||
@@ -0,0 +1,16 @@
|
||||
import * as t from "io-ts"
|
||||
|
||||
const emailRegex =
|
||||
/^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||
|
||||
interface EmailBrand {
|
||||
readonly Email: unique symbol
|
||||
}
|
||||
|
||||
export const EmailCodec = t.brand(
|
||||
t.string,
|
||||
(x): x is t.Branded<string, EmailBrand> => emailRegex.test(x),
|
||||
"Email"
|
||||
)
|
||||
|
||||
export type Email = t.TypeOf<typeof EmailCodec>
|
||||
@@ -0,0 +1,13 @@
|
||||
import * as t from "io-ts"
|
||||
|
||||
interface TeamNameBrand {
|
||||
readonly TeamName: unique symbol
|
||||
}
|
||||
|
||||
export const TeamNameCodec = t.brand(
|
||||
t.string,
|
||||
(x): x is t.Branded<string, TeamNameBrand> => x.trim().length >= 6,
|
||||
"TeamName"
|
||||
)
|
||||
|
||||
export type TeamName = t.TypeOf<typeof TeamNameCodec>
|
||||
@@ -0,0 +1,851 @@
|
||||
// @ts-check
|
||||
// ^^^ Enables Type Checking by the TypeScript compiler
|
||||
|
||||
import { makeRESTRequest, rawKeyValueEntriesToString } from "@hoppscotch/data"
|
||||
import { parseCurlToHoppRESTReq } from ".."
|
||||
|
||||
const samples = [
|
||||
{
|
||||
command: `
|
||||
curl --request GET \
|
||||
--url https://echo.hoppscotch.io/ \
|
||||
--header 'content-type: application/x-www-form-urlencoded' \
|
||||
--data a=b \
|
||||
--data c=d
|
||||
`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://echo.hoppscotch.io/",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
body: rawKeyValueEntriesToString([
|
||||
{
|
||||
active: true,
|
||||
key: "a",
|
||||
value: "b",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "c",
|
||||
value: "d",
|
||||
},
|
||||
]),
|
||||
},
|
||||
headers: [],
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `
|
||||
curl 'http://avs:def@127.0.0.1:8000/api/admin/crm/brand/4'
|
||||
-X PUT
|
||||
-H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0'
|
||||
-H 'Accept: application/json, text/plain, */*'
|
||||
-H 'Accept-Language: en'
|
||||
--compressed
|
||||
-H 'Content-Type: application/hal+json;charset=utf-8'
|
||||
-H 'Origin: http://localhost:3012'
|
||||
-H 'Connection: keep-alive'
|
||||
-H 'Referer: http://localhost:3012/crm/company/4'
|
||||
--data-raw '{"id":4,"crm_company_id":4,"industry_primary_id":2,"industry_head_id":2,"industry_body_id":2,"code":"01","barcode":"222010101","summary":"Healt-Seasoning-Basic-Hori-Kello","name":"Kellolaa","sub_code":"01","sub_name":"Hori","created_at":"2020-06-08 08:50:02","updated_at":"2020-06-08 08:50:02","company":4,"primary":{"id":2,"code":"2","name":"Healt","created_at":"2020-05-19 07:05:02","updated_at":"2020-05-19 07:09:28"},"head":{"id":2,"code":"2","name":"Seasoning","created_at":"2020-04-14 19:34:33","updated_at":"2020-04-14 19:34:33"},"body":{"id":2,"code":"2","name":"Basic","created_at":"2020-04-14 19:33:54","updated_at":"2020-04-14 19:33:54"},"contacts":[]}'
|
||||
`,
|
||||
response: makeRESTRequest({
|
||||
method: "PUT",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://127.0.0.1:8000/api/admin/crm/brand/4",
|
||||
auth: {
|
||||
authType: "basic",
|
||||
authActive: true,
|
||||
username: "avs",
|
||||
password: "def",
|
||||
},
|
||||
body: {
|
||||
contentType: "application/hal+json",
|
||||
body: `{
|
||||
"id": 4,
|
||||
"crm_company_id": 4,
|
||||
"industry_primary_id": 2,
|
||||
"industry_head_id": 2,
|
||||
"industry_body_id": 2,
|
||||
"code": "01",
|
||||
"barcode": "222010101",
|
||||
"summary": "Healt-Seasoning-Basic-Hori-Kello",
|
||||
"name": "Kellolaa",
|
||||
"sub_code": "01",
|
||||
"sub_name": "Hori",
|
||||
"created_at": "2020-06-08 08:50:02",
|
||||
"updated_at": "2020-06-08 08:50:02",
|
||||
"company": 4,
|
||||
"primary": {
|
||||
"id": 2,
|
||||
"code": "2",
|
||||
"name": "Healt",
|
||||
"created_at": "2020-05-19 07:05:02",
|
||||
"updated_at": "2020-05-19 07:09:28"
|
||||
},
|
||||
"head": {
|
||||
"id": 2,
|
||||
"code": "2",
|
||||
"name": "Seasoning",
|
||||
"created_at": "2020-04-14 19:34:33",
|
||||
"updated_at": "2020-04-14 19:34:33"
|
||||
},
|
||||
"body": {
|
||||
"id": 2,
|
||||
"code": "2",
|
||||
"name": "Basic",
|
||||
"created_at": "2020-04-14 19:33:54",
|
||||
"updated_at": "2020-04-14 19:33:54"
|
||||
},
|
||||
"contacts": []
|
||||
}`,
|
||||
},
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "User-Agent",
|
||||
value:
|
||||
"Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "Accept",
|
||||
value: "application/json, text/plain, */*",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "Accept-Language",
|
||||
value: "en",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "Origin",
|
||||
value: "http://localhost:3012",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "Connection",
|
||||
value: "keep-alive",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "Referer",
|
||||
value: "http://localhost:3012/crm/company/4",
|
||||
},
|
||||
],
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl google.com`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://google.com/",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
headers: [],
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl -X POST -d '{"foo":"bar"}' http://localhost:1111/hello/world/?bar=baz&buzz`,
|
||||
response: makeRESTRequest({
|
||||
method: "POST",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://localhost:1111/hello/world/?buzz",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: "application/json",
|
||||
body: `{\n "foo": "bar"\n}`,
|
||||
},
|
||||
headers: [],
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "bar",
|
||||
value: "baz",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl --get -d "tool=curl" -d "age=old" https://example.com`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://example.com/",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
headers: [],
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "tool",
|
||||
value: "curl",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "age",
|
||||
value: "old",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl -F hello=hello2 -F hello3=@hello4.txt bing.com`,
|
||||
response: makeRESTRequest({
|
||||
method: "POST",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://bing.com/",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: "multipart/form-data",
|
||||
body: [
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "hello",
|
||||
value: "hello2",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "hello3",
|
||||
value: "",
|
||||
},
|
||||
],
|
||||
},
|
||||
headers: [],
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command:
|
||||
"curl -X GET localhost -H 'Accept: application/json' --user root:toor",
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://localhost/",
|
||||
auth: {
|
||||
authType: "basic",
|
||||
authActive: true,
|
||||
username: "root",
|
||||
password: "toor",
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "Accept",
|
||||
value: "application/json",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command:
|
||||
"curl -X GET localhost --header 'Authorization: Basic dXNlcjpwYXNz'",
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://localhost/",
|
||||
auth: {
|
||||
authType: "basic",
|
||||
authActive: true,
|
||||
username: "user",
|
||||
password: "pass",
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command:
|
||||
"curl -X GET localhost:9900 --header 'Authorization: Basic 77898dXNlcjpwYXNz'",
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://localhost:9900/",
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command:
|
||||
"curl -X GET localhost --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'",
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://localhost/",
|
||||
auth: {
|
||||
authType: "bearer",
|
||||
authActive: true,
|
||||
token:
|
||||
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c",
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl --get -I -d "tool=curl" -d "platform=hoppscotch" -d"io" https://hoppscotch.io`,
|
||||
response: makeRESTRequest({
|
||||
method: "HEAD",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://hoppscotch.io/?io",
|
||||
auth: {
|
||||
authActive: true,
|
||||
authType: "none",
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "tool",
|
||||
value: "curl",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "platform",
|
||||
value: "hoppscotch",
|
||||
},
|
||||
],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl 'https://someshadywebsite.com/questionable/path/?and=params&so&stay=tuned&' \
|
||||
-H 'user-agent: Mozilla/5.0' \
|
||||
-H 'accept: text/html' \
|
||||
-H $'cookie: cookie-cookie' \
|
||||
--data $'------WebKitFormBoundaryj3oufpIISPa2DP7c\\r\\nContent-Disposition: form-data; name="EmailAddress"\\r\\n\\r\\ntest@test.com\\r\\n------WebKitFormBoundaryj3oufpIISPa2DP7c\\r\\nContent-Disposition: form-data; name="Entity"\\r\\n\\r\\n1\\r\\n------WebKitFormBoundaryj3oufpIISPa2DP7c--\\r\\n'`,
|
||||
response: makeRESTRequest({
|
||||
method: "POST",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://someshadywebsite.com/questionable/path/?so",
|
||||
auth: {
|
||||
authActive: true,
|
||||
authType: "none",
|
||||
},
|
||||
body: {
|
||||
contentType: "multipart/form-data",
|
||||
body: [
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "EmailAddress",
|
||||
value: "test@test.com",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "Entity",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
},
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "and",
|
||||
value: "params",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "stay",
|
||||
value: "tuned",
|
||||
},
|
||||
],
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "user-agent",
|
||||
value: "Mozilla/5.0",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "accept",
|
||||
value: "text/html",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "cookie",
|
||||
value: "cookie-cookie",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command:
|
||||
"curl localhost -H 'content-type: multipart/form-data; boundary=------------------------d74496d66958873e' --data '-----------------------------d74496d66958873e\\r\\nContent-Disposition: form-data; name=\"file\"; filename=\"test.txt\"\\r\\nContent-Type: text/plain\\r\\n\\r\\nHello World\\r\\n\\r\\n-----------------------------d74496d66958873e--\\r\\n'",
|
||||
response: makeRESTRequest({
|
||||
method: "POST",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://localhost/",
|
||||
auth: {
|
||||
authActive: true,
|
||||
authType: "none",
|
||||
},
|
||||
body: {
|
||||
contentType: "multipart/form-data",
|
||||
body: [
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "file",
|
||||
value: "",
|
||||
},
|
||||
],
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl 'https://hoppscotch.io/' \
|
||||
-H 'authority: hoppscotch.io' \
|
||||
-H 'sec-ch-ua: " Not A;Brand";v="99", "Chromium";v="98", "Google Chrome";v="98"' \
|
||||
-H 'accept: */*' \
|
||||
-H 'user-agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36' \
|
||||
-H 'sec-ch-ua-platform: "Windows"' \
|
||||
-H 'accept-language: en-US,en;q=0.9,ml;q=0.8' \
|
||||
--compressed`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://hoppscotch.io/",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "authority",
|
||||
value: "hoppscotch.io",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "sec-ch-ua",
|
||||
value:
|
||||
'" Not A;Brand";v="99", "Chromium";v="98", "Google Chrome";v="98"',
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "accept",
|
||||
value: "*/*",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "user-agent",
|
||||
value:
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "sec-ch-ua-platform",
|
||||
value: '"Windows"',
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "accept-language",
|
||||
value: "en-US,en;q=0.9,ml;q=0.8",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl --request GET \
|
||||
--url 'https://echo.hoppscotch.io/?hello=there' \
|
||||
--header 'content-type: application/x-www-form-urlencoded' \
|
||||
--header 'something: other-thing' \
|
||||
--data a=b \
|
||||
--data c=d`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://echo.hoppscotch.io/",
|
||||
auth: { authType: "none", authActive: true },
|
||||
body: {
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
body: rawKeyValueEntriesToString([
|
||||
{
|
||||
key: "a",
|
||||
value: "b",
|
||||
active: true,
|
||||
},
|
||||
{
|
||||
key: "c",
|
||||
value: "d",
|
||||
active: true,
|
||||
},
|
||||
]),
|
||||
},
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "hello",
|
||||
value: "there",
|
||||
},
|
||||
],
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "something",
|
||||
value: "other-thing",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl --request POST \
|
||||
--url 'https://echo.hoppscotch.io/?hello=there' \
|
||||
--header 'content-type: multipart/form-data' \
|
||||
--header 'something: other-thing' \
|
||||
--form a=b \
|
||||
--form c=d`,
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "https://echo.hoppscotch.io/",
|
||||
method: "POST",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "something",
|
||||
value: "other-thing",
|
||||
},
|
||||
],
|
||||
body: {
|
||||
contentType: "multipart/form-data",
|
||||
body: [
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "a",
|
||||
value: "b",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "c",
|
||||
value: "d",
|
||||
},
|
||||
],
|
||||
},
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "hello",
|
||||
value: "there",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: "curl 'muxueqz.top/skybook.html'",
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "https://muxueqz.top/skybook.html",
|
||||
method: "GET",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [],
|
||||
body: { contentType: null, body: null },
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: "curl -F abcd=efghi",
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "https://echo.hoppscotch.io/",
|
||||
method: "POST",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [],
|
||||
body: {
|
||||
contentType: "multipart/form-data",
|
||||
body: [
|
||||
{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key: "abcd",
|
||||
value: "efghi",
|
||||
},
|
||||
],
|
||||
},
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: "curl 127.0.0.1 -X custommethod",
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "http://127.0.0.1/",
|
||||
method: "CUSTOMMETHOD",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [],
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: "curl echo.hoppscotch.io -A pinephone",
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "https://echo.hoppscotch.io/",
|
||||
method: "GET",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [
|
||||
{
|
||||
active: true,
|
||||
key: "User-Agent",
|
||||
value: "pinephone",
|
||||
},
|
||||
],
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: "curl echo.hoppscotch.io -G",
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "https://echo.hoppscotch.io/",
|
||||
method: "GET",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [],
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl --get -I -d "tool=hopp" https://example.org`,
|
||||
response: makeRESTRequest({
|
||||
name: "Untitled request",
|
||||
endpoint: "https://example.org/",
|
||||
method: "HEAD",
|
||||
auth: { authType: "none", authActive: true },
|
||||
headers: [],
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [
|
||||
{
|
||||
active: true,
|
||||
key: "tool",
|
||||
value: "hopp",
|
||||
},
|
||||
],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl google.com -u userx`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://google.com/",
|
||||
auth: {
|
||||
authType: "basic",
|
||||
authActive: true,
|
||||
username: "userx",
|
||||
password: "",
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl google.com -H "Authorization"`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://google.com/",
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl \`
|
||||
google.com -H "content-type: application/json"`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://google.com/",
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl 192.168.0.24:8080/ping`,
|
||||
response: makeRESTRequest({
|
||||
method: "GET",
|
||||
name: "Untitled request",
|
||||
endpoint: "http://192.168.0.24:8080/ping",
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
contentType: null,
|
||||
body: null,
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
{
|
||||
command: `curl https://example.com -d "alpha=beta&request_id=4"`,
|
||||
response: makeRESTRequest({
|
||||
method: "POST",
|
||||
name: "Untitled request",
|
||||
endpoint: "https://example.com/",
|
||||
auth: {
|
||||
authType: "none",
|
||||
authActive: true,
|
||||
},
|
||||
body: {
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
body: rawKeyValueEntriesToString([
|
||||
{
|
||||
active: true,
|
||||
key: "alpha",
|
||||
value: "beta",
|
||||
},
|
||||
{
|
||||
active: true,
|
||||
key: "request_id",
|
||||
value: "4",
|
||||
},
|
||||
]),
|
||||
},
|
||||
params: [],
|
||||
headers: [],
|
||||
preRequestScript: "",
|
||||
testScript: "",
|
||||
}),
|
||||
},
|
||||
]
|
||||
|
||||
describe("Parse curl command to Hopp REST Request", () => {
|
||||
for (const [i, { command, response }] of samples.entries()) {
|
||||
test(`for sample #${i + 1}:\n\n${command}`, () => {
|
||||
expect(parseCurlToHoppRESTReq(command)).toEqual(response)
|
||||
})
|
||||
}
|
||||
})
|
||||
@@ -0,0 +1,158 @@
|
||||
import { detectContentType } from "../sub_helpers/contentParser"
|
||||
|
||||
describe("detect content type", () => {
|
||||
test("should return null for blank input", () => {
|
||||
expect(detectContentType("")).toBe(null)
|
||||
})
|
||||
|
||||
describe("application/json", () => {
|
||||
test('should return text/plain for "{"', () => {
|
||||
expect(detectContentType("{")).toBe("text/plain")
|
||||
})
|
||||
|
||||
test('should return application/json for "{}"', () => {
|
||||
expect(detectContentType("{}")).toBe("application/json")
|
||||
})
|
||||
|
||||
test("should return application/json for valid json data", () => {
|
||||
expect(
|
||||
detectContentType(`
|
||||
{
|
||||
"body": "some text",
|
||||
"name": "interesting name",
|
||||
"code": [1, 5, 6, 2]
|
||||
}
|
||||
`)
|
||||
).toBe("application/json")
|
||||
})
|
||||
})
|
||||
|
||||
describe("application/xml", () => {
|
||||
test("should return text/html for XML data without XML declaration", () => {
|
||||
expect(
|
||||
detectContentType(`
|
||||
<book category="cooking">
|
||||
<title lang="en">Everyday Italian</title>
|
||||
<author>Giada De Laurentiis</author>
|
||||
<year>2005</year>
|
||||
<price>30.00</price>
|
||||
</book>
|
||||
`)
|
||||
).toBe("text/html")
|
||||
})
|
||||
|
||||
test("should return application/xml for valid XML data", () => {
|
||||
expect(
|
||||
detectContentType(`
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<book category="cooking">
|
||||
<title lang="en">Everyday Italian</title>
|
||||
<author>Giada De Laurentiis</author>
|
||||
<year>2005</year>
|
||||
<price>30.00</price>
|
||||
</book>
|
||||
`)
|
||||
).toBe("text/html")
|
||||
})
|
||||
|
||||
test("should return text/html for invalid XML data", () => {
|
||||
expect(
|
||||
detectContentType(`
|
||||
<book category="cooking">
|
||||
<title lang="en">Everyday Italian
|
||||
<abcd>Giada De Laurentiis</abcd>
|
||||
<year>2005</year>
|
||||
<price>30.00</price>
|
||||
`)
|
||||
).toBe("text/html")
|
||||
})
|
||||
})
|
||||
|
||||
describe("text/html", () => {
|
||||
test("should return text/html for valid HTML data", () => {
|
||||
expect(
|
||||
detectContentType(`
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Page Title</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>This is a Heading</h1>
|
||||
<p>This is a paragraph.</p>
|
||||
</body>
|
||||
</html>
|
||||
`)
|
||||
).toBe("text/html")
|
||||
})
|
||||
|
||||
test("should return text/html for invalid HTML data", () => {
|
||||
expect(
|
||||
detectContentType(`
|
||||
<head>
|
||||
<title>Page Title</title>
|
||||
<body>
|
||||
<h1>This is a Heading</h1>
|
||||
</body>
|
||||
</html>
|
||||
`)
|
||||
).toBe("text/html")
|
||||
})
|
||||
|
||||
test("should return text/html for unmatched tag", () => {
|
||||
expect(detectContentType("</html>")).toBe("text/html")
|
||||
})
|
||||
|
||||
test("should return text/plain for no valid tags in input", () => {
|
||||
expect(detectContentType("</html")).toBe("text/plain")
|
||||
})
|
||||
})
|
||||
|
||||
describe("application/x-www-form-urlencoded", () => {
|
||||
test("should return application/x-www-form-urlencoded for valid data", () => {
|
||||
expect(detectContentType("hello=world&hopp=scotch")).toBe(
|
||||
"application/x-www-form-urlencoded"
|
||||
)
|
||||
})
|
||||
|
||||
test("should return application/x-www-form-urlencoded for empty pair", () => {
|
||||
expect(detectContentType("hello=world&hopp=scotch&")).toBe(
|
||||
"application/x-www-form-urlencoded"
|
||||
)
|
||||
})
|
||||
|
||||
test("should return application/x-www-form-urlencoded for dangling param", () => {
|
||||
expect(detectContentType("hello=world&hoppscotch")).toBe(
|
||||
"application/x-www-form-urlencoded"
|
||||
)
|
||||
})
|
||||
|
||||
test('should return text/plain for "="', () => {
|
||||
expect(detectContentType("=")).toBe("text/plain")
|
||||
})
|
||||
|
||||
test("should return application/x-www-form-urlencoded for no value field", () => {
|
||||
expect(detectContentType("hello=")).toBe(
|
||||
"application/x-www-form-urlencoded"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("multipart/form-data", () => {
|
||||
test("should return multipart/form-data for valid data", () => {
|
||||
expect(
|
||||
detectContentType(
|
||||
`------WebKitFormBoundaryj3oufpIISPa2DP7c\\r\\nContent-Disposition: form-data; name="EmailAddress"\\r\\n\\r\\ntest@test.com\\r\\n------WebKitFormBoundaryj3oufpIISPa2DP7c\\r\\nContent-Disposition: form-data; name="Entity"\\r\\n\\r\\n1\\r\\n------WebKitFormBoundaryj3oufpIISPa2DP7c--\\r\\n`
|
||||
)
|
||||
).toBe("multipart/form-data")
|
||||
})
|
||||
|
||||
test("should return application/x-www-form-urlencoded for data with only one boundary", () => {
|
||||
expect(
|
||||
detectContentType(
|
||||
`\\r\\nContent-Disposition: form-data; name="EmailAddress"\\r\\n\\r\\ntest@test.com\\r\\n\\r\\nContent-Disposition: form-data; name="Entity"\\r\\n\\r\\n1\\r\\n------WebKitFormBoundaryj3oufpIISPa2DP7c--\\r\\n`
|
||||
)
|
||||
).toBe("application/x-www-form-urlencoded")
|
||||
})
|
||||
})
|
||||
})
|
||||
185
packages/hoppscotch-common/src/helpers/curl/curlparser.ts
Normal file
185
packages/hoppscotch-common/src/helpers/curl/curlparser.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
/**
|
||||
* the direct import from yargs-parser uses fs which is a built in node module,
|
||||
* just adding the /browser import as a fix for now, which does not have type info on DefinitelyTyped.
|
||||
* remove/update this comment before merging the vue3 port.
|
||||
*/
|
||||
import parser from "yargs-parser/browser"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as A from "fp-ts/Array"
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import {
|
||||
FormDataKeyValue,
|
||||
HoppRESTReqBody,
|
||||
makeRESTRequest,
|
||||
} from "@hoppscotch/data"
|
||||
import { getAuthObject } from "./sub_helpers/auth"
|
||||
import { getHeaders, recordToHoppHeaders } from "./sub_helpers/headers"
|
||||
// import { getCookies } from "./sub_helpers/cookies"
|
||||
import { getQueries } from "./sub_helpers/queries"
|
||||
import { getMethod } from "./sub_helpers/method"
|
||||
import { concatParams, getURLObject } from "./sub_helpers/url"
|
||||
import { preProcessCurlCommand } from "./sub_helpers/preproc"
|
||||
import { getBody, getFArgumentMultipartData } from "./sub_helpers/body"
|
||||
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||
import {
|
||||
objHasProperty,
|
||||
objHasArrayProperty,
|
||||
} from "~/helpers/functional/object"
|
||||
|
||||
const defaultRESTReq = getDefaultRESTRequest()
|
||||
|
||||
export const parseCurlCommand = (curlCommand: string) => {
|
||||
// const isDataBinary = curlCommand.includes(" --data-binary")
|
||||
// const compressed = !!parsedArguments.compressed
|
||||
|
||||
curlCommand = preProcessCurlCommand(curlCommand)
|
||||
const parsedArguments = parser(curlCommand)
|
||||
|
||||
const headerObject = getHeaders(parsedArguments)
|
||||
const { headers } = headerObject
|
||||
let { rawContentType } = headerObject
|
||||
const hoppHeaders = pipe(
|
||||
headers,
|
||||
O.fromPredicate(() => Object.keys(headers).length > 0),
|
||||
O.map(recordToHoppHeaders),
|
||||
O.getOrElse(() => defaultRESTReq.headers)
|
||||
)
|
||||
|
||||
const method = getMethod(parsedArguments)
|
||||
// const cookies = getCookies(parsedArguments)
|
||||
const urlObject = getURLObject(parsedArguments)
|
||||
const auth = getAuthObject(parsedArguments, headers, urlObject)
|
||||
|
||||
let rawData: string | string[] = pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasArrayProperty("d", "string")),
|
||||
O.map((args) => args.d),
|
||||
O.altW(() =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasProperty("d", "string")),
|
||||
O.map((args) => args.d)
|
||||
)
|
||||
),
|
||||
O.getOrElseW(() => "")
|
||||
)
|
||||
|
||||
let body: HoppRESTReqBody["body"] = ""
|
||||
let contentType: HoppRESTReqBody["contentType"] =
|
||||
defaultRESTReq.body.contentType
|
||||
let hasBodyBeenParsed = false
|
||||
|
||||
let { queries, danglingParams } = getQueries(
|
||||
Array.from(urlObject.searchParams.entries())
|
||||
)
|
||||
|
||||
const stringToPair = flow(
|
||||
decodeURIComponent,
|
||||
(pair) => <[string, string]>pair.split("=", 2)
|
||||
)
|
||||
const pairs = pipe(
|
||||
rawData,
|
||||
O.fromPredicate(Array.isArray),
|
||||
O.map(A.map(stringToPair)),
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
rawData,
|
||||
O.fromPredicate((s) => s.length > 0),
|
||||
O.map(() => [stringToPair(rawData as string)])
|
||||
)
|
||||
),
|
||||
O.getOrElseW(() => undefined)
|
||||
)
|
||||
|
||||
if (objHasProperty("G", "boolean")(parsedArguments) && !!pairs) {
|
||||
const newQueries = getQueries(pairs)
|
||||
queries = [...queries, ...newQueries.queries]
|
||||
danglingParams = [...danglingParams, ...newQueries.danglingParams]
|
||||
hasBodyBeenParsed = true
|
||||
} else if (
|
||||
rawContentType.includes("application/x-www-form-urlencoded") &&
|
||||
!!pairs &&
|
||||
Array.isArray(rawData)
|
||||
) {
|
||||
body = pairs.map((p) => p.join(": ")).join("\n") || null
|
||||
contentType = "application/x-www-form-urlencoded"
|
||||
hasBodyBeenParsed = true
|
||||
}
|
||||
|
||||
const urlString = concatParams(urlObject, danglingParams)
|
||||
|
||||
let multipartUploads: Record<string, string> = pipe(
|
||||
O.of(parsedArguments),
|
||||
O.chain(getFArgumentMultipartData),
|
||||
O.match(
|
||||
() => ({}),
|
||||
(args) => {
|
||||
hasBodyBeenParsed = true
|
||||
rawContentType = "multipart/form-data"
|
||||
return args
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
if (!hasBodyBeenParsed) {
|
||||
if (typeof rawData !== "string") {
|
||||
rawData = rawData.join("")
|
||||
}
|
||||
const bodyObject = getBody(rawData, rawContentType, contentType)
|
||||
|
||||
if (O.isSome(bodyObject)) {
|
||||
const bodyObjectValue = bodyObject.value
|
||||
|
||||
if (bodyObjectValue.type === "FORMDATA") {
|
||||
multipartUploads = bodyObjectValue.body
|
||||
} else {
|
||||
body = bodyObjectValue.body.body
|
||||
contentType = bodyObjectValue.body
|
||||
.contentType as HoppRESTReqBody["contentType"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const finalBody: HoppRESTReqBody = pipe(
|
||||
body,
|
||||
O.fromNullable,
|
||||
O.filter((b) => b.length > 0),
|
||||
O.map((b) => <HoppRESTReqBody>{ body: b, contentType }),
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
multipartUploads,
|
||||
O.of,
|
||||
O.map((m) => Object.entries(m)),
|
||||
O.filter((m) => m.length > 0),
|
||||
O.map(
|
||||
flow(
|
||||
A.map(
|
||||
([key, value]) =>
|
||||
<FormDataKeyValue>{
|
||||
active: true,
|
||||
isFile: false,
|
||||
key,
|
||||
value,
|
||||
}
|
||||
),
|
||||
(b) =>
|
||||
<HoppRESTReqBody>{ body: b, contentType: "multipart/form-data" }
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
O.getOrElse(() => defaultRESTReq.body)
|
||||
)
|
||||
|
||||
return makeRESTRequest({
|
||||
name: defaultRESTReq.name,
|
||||
endpoint: urlString,
|
||||
method: (method || defaultRESTReq.method).toUpperCase(),
|
||||
params: queries ?? defaultRESTReq.params,
|
||||
headers: hoppHeaders,
|
||||
preRequestScript: defaultRESTReq.preRequestScript,
|
||||
testScript: defaultRESTReq.testScript,
|
||||
auth,
|
||||
body: finalBody,
|
||||
})
|
||||
}
|
||||
5
packages/hoppscotch-common/src/helpers/curl/index.ts
Normal file
5
packages/hoppscotch-common/src/helpers/curl/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { flow } from "fp-ts/function"
|
||||
import { cloneDeep } from "lodash-es"
|
||||
import { parseCurlCommand } from "./curlparser"
|
||||
|
||||
export const parseCurlToHoppRESTReq = flow(parseCurlCommand, cloneDeep)
|
||||
116
packages/hoppscotch-common/src/helpers/curl/sub_helpers/auth.ts
Normal file
116
packages/hoppscotch-common/src/helpers/curl/sub_helpers/auth.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { HoppRESTAuth } from "@hoppscotch/data"
|
||||
import parser from "yargs-parser"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as S from "fp-ts/string"
|
||||
import { pipe } from "fp-ts/function"
|
||||
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||
import { objHasProperty } from "~/helpers/functional/object"
|
||||
|
||||
const defaultRESTReq = getDefaultRESTRequest()
|
||||
|
||||
const getAuthFromAuthHeader = (headers: Record<string, string>) =>
|
||||
pipe(
|
||||
headers.Authorization,
|
||||
O.fromNullable,
|
||||
O.map((a) => a.split(" ")),
|
||||
O.filter((a) => a.length > 1),
|
||||
O.chain((kv) =>
|
||||
O.fromNullable(
|
||||
(() => {
|
||||
switch (kv[0].toLowerCase()) {
|
||||
case "bearer":
|
||||
return <HoppRESTAuth>{
|
||||
authActive: true,
|
||||
authType: "bearer",
|
||||
token: kv[1],
|
||||
}
|
||||
case "basic": {
|
||||
const [username, password] = pipe(
|
||||
O.tryCatch(() => atob(kv[1])),
|
||||
O.map(S.split(":")),
|
||||
// can have a username with no password
|
||||
O.filter((arr) => arr.length > 0),
|
||||
O.map(
|
||||
([username, password]) =>
|
||||
<[string, string]>[username, password]
|
||||
),
|
||||
O.getOrElse(() => ["", ""])
|
||||
)
|
||||
|
||||
if (!username) return undefined
|
||||
|
||||
return <HoppRESTAuth>{
|
||||
authActive: true,
|
||||
authType: "basic",
|
||||
username,
|
||||
password: password ?? "",
|
||||
}
|
||||
}
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
})()
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
const getAuthFromParsedArgs = (parsedArguments: parser.Arguments) =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasProperty("u", "string")),
|
||||
O.chain((args) =>
|
||||
pipe(
|
||||
args.u,
|
||||
S.split(":"),
|
||||
// can have a username with no password
|
||||
O.fromPredicate((arr) => arr.length > 0 && arr[0].length > 0),
|
||||
O.map(
|
||||
([username, password]) => <[string, string]>[username, password ?? ""]
|
||||
)
|
||||
)
|
||||
),
|
||||
O.map(
|
||||
([username, password]) =>
|
||||
<HoppRESTAuth>{
|
||||
authActive: true,
|
||||
authType: "basic",
|
||||
username,
|
||||
password,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
const getAuthFromURLObject = (urlObject: URL) =>
|
||||
pipe(
|
||||
urlObject,
|
||||
(url) => [url.username, url.password ?? ""],
|
||||
// can have a username with no password
|
||||
O.fromPredicate(([username]) => !!username && username.length > 0),
|
||||
O.map(
|
||||
([username, password]) =>
|
||||
<HoppRESTAuth>{
|
||||
authActive: true,
|
||||
authType: "basic",
|
||||
username,
|
||||
password,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
/**
|
||||
* Preference order:
|
||||
* - Auth headers
|
||||
* - --user or -u argument
|
||||
* - Creds provided along with URL
|
||||
*/
|
||||
export const getAuthObject = (
|
||||
parsedArguments: parser.Arguments,
|
||||
headers: Record<string, string>,
|
||||
urlObject: URL
|
||||
): HoppRESTAuth =>
|
||||
pipe(
|
||||
getAuthFromAuthHeader(headers),
|
||||
O.alt(() => getAuthFromParsedArgs(parsedArguments)),
|
||||
O.alt(() => getAuthFromURLObject(urlObject)),
|
||||
O.getOrElse(() => defaultRESTReq.auth)
|
||||
)
|
||||
169
packages/hoppscotch-common/src/helpers/curl/sub_helpers/body.ts
Normal file
169
packages/hoppscotch-common/src/helpers/curl/sub_helpers/body.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import parser from "yargs-parser"
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as A from "fp-ts/Array"
|
||||
import * as RNEA from "fp-ts/ReadonlyNonEmptyArray"
|
||||
import * as S from "fp-ts/string"
|
||||
import {
|
||||
HoppRESTReqBody,
|
||||
HoppRESTReqBodyFormData,
|
||||
ValidContentTypes,
|
||||
knownContentTypes,
|
||||
} from "@hoppscotch/data"
|
||||
import { detectContentType, parseBody } from "./contentParser"
|
||||
import { tupleToRecord } from "~/helpers/functional/record"
|
||||
import {
|
||||
objHasProperty,
|
||||
objHasArrayProperty,
|
||||
} from "~/helpers/functional/object"
|
||||
|
||||
type BodyReturnType =
|
||||
| { type: "FORMDATA"; body: Record<string, string> }
|
||||
| {
|
||||
type: "NON_FORMDATA"
|
||||
body: Exclude<HoppRESTReqBody, HoppRESTReqBodyFormData>
|
||||
}
|
||||
|
||||
/** Parses body based on the content type
|
||||
* @param rData Raw data
|
||||
* @param cType Sanitized content type
|
||||
* @returns Option of parsed body of type string | Record<string, string>
|
||||
*/
|
||||
const getBodyFromContentType =
|
||||
(rData: string, cType: HoppRESTReqBody["contentType"]) => (rct: string) =>
|
||||
pipe(
|
||||
cType,
|
||||
O.fromPredicate((ctype) => ctype === "multipart/form-data"),
|
||||
O.chain(() =>
|
||||
pipe(
|
||||
// pass rawContentType for boundary ascertion
|
||||
parseBody(rData, cType, rct),
|
||||
O.filter((parsedBody) => typeof parsedBody !== "string")
|
||||
)
|
||||
),
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
parseBody(rData, cType),
|
||||
O.filter(
|
||||
(parsedBody) =>
|
||||
typeof parsedBody === "string" && parsedBody.length > 0
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
const getContentTypeFromRawContentType = (rawContentType: string) =>
|
||||
pipe(
|
||||
rawContentType,
|
||||
O.fromPredicate((rct) => rct.length > 0),
|
||||
// get everything before semi-colon
|
||||
O.map(flow(S.toLowerCase, S.split(";"), RNEA.head)),
|
||||
// if rawContentType is valid, cast it to contentType type
|
||||
O.filter((ct) => Object.keys(knownContentTypes).includes(ct)),
|
||||
O.map((ct) => ct as HoppRESTReqBody["contentType"])
|
||||
)
|
||||
|
||||
const getContentTypeFromRawData = (rawData: string) =>
|
||||
pipe(
|
||||
rawData,
|
||||
O.fromPredicate((rd) => rd.length > 0),
|
||||
O.map(detectContentType)
|
||||
)
|
||||
|
||||
export const getBody = (
|
||||
rawData: string,
|
||||
rawContentType: string,
|
||||
contentType: HoppRESTReqBody["contentType"]
|
||||
): O.Option<BodyReturnType> => {
|
||||
return pipe(
|
||||
O.Do,
|
||||
|
||||
O.bind("cType", () =>
|
||||
pipe(
|
||||
// get provided content-type
|
||||
contentType,
|
||||
O.fromNullable,
|
||||
// or figure it out
|
||||
O.alt(() => getContentTypeFromRawContentType(rawContentType)),
|
||||
O.alt(() => getContentTypeFromRawData(rawData))
|
||||
)
|
||||
),
|
||||
|
||||
O.bind("rData", () =>
|
||||
pipe(
|
||||
rawData,
|
||||
O.fromPredicate(() => rawData.length > 0)
|
||||
)
|
||||
),
|
||||
|
||||
O.bind("ctBody", ({ cType, rData }) =>
|
||||
pipe(rawContentType, getBodyFromContentType(rData, cType))
|
||||
),
|
||||
|
||||
O.map(({ cType, ctBody }) =>
|
||||
typeof ctBody === "string"
|
||||
? {
|
||||
type: "NON_FORMDATA",
|
||||
body: {
|
||||
body: ctBody,
|
||||
contentType: cType as Exclude<
|
||||
ValidContentTypes,
|
||||
"multipart/form-data"
|
||||
>,
|
||||
},
|
||||
}
|
||||
: { type: "FORMDATA", body: ctBody }
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and structures multipart/form-data from -F argument of curl command
|
||||
* @param parsedArguments Parsed Arguments object
|
||||
* @returns Option of Record<string, string> type containing key-value pairs of multipart/form-data
|
||||
*/
|
||||
export function getFArgumentMultipartData(
|
||||
parsedArguments: parser.Arguments
|
||||
): O.Option<Record<string, string>> {
|
||||
// --form or -F multipart data
|
||||
|
||||
return pipe(
|
||||
parsedArguments,
|
||||
// make it an array if not already
|
||||
O.fromPredicate(objHasProperty("F", "string")),
|
||||
O.map((args) => [args.F]),
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasArrayProperty("F", "string")),
|
||||
O.map((args) => args.F)
|
||||
)
|
||||
),
|
||||
O.chain(
|
||||
flow(
|
||||
A.map(S.split("=")),
|
||||
// can only have a key and no value
|
||||
O.fromPredicate((fArgs) => fArgs.length > 0),
|
||||
O.map(
|
||||
flow(
|
||||
A.map(([k, v]) =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
// form-string option allows for "@" and "<" prefixes
|
||||
// without them being considered as files
|
||||
O.fromPredicate(objHasProperty("form-string", "boolean")),
|
||||
O.match(
|
||||
// leave the value field empty for files
|
||||
() => [k, v[0] === "@" || v[0] === "<" ? "" : v],
|
||||
() => [k, v]
|
||||
)
|
||||
)
|
||||
),
|
||||
A.map(([k, v]) => [k, v] as [string, string]),
|
||||
tupleToRecord
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,302 @@
|
||||
import { HoppRESTReqBody } from "@hoppscotch/data"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as RA from "fp-ts/ReadonlyArray"
|
||||
import * as S from "fp-ts/string"
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import { tupleToRecord } from "~/helpers/functional/record"
|
||||
import { safeParseJSON } from "~/helpers/functional/json"
|
||||
import { optionChoose } from "~/helpers/functional/option"
|
||||
|
||||
const isJSON = flow(safeParseJSON, O.isSome)
|
||||
|
||||
const isXML = (rawData: string) =>
|
||||
pipe(
|
||||
rawData,
|
||||
O.fromPredicate(() => /<\/?[a-zA-Z][\s\S]*>/i.test(rawData)),
|
||||
O.chain(prettifyXml),
|
||||
O.isSome
|
||||
)
|
||||
|
||||
const isHTML = (rawData: string) =>
|
||||
pipe(
|
||||
rawData,
|
||||
O.fromPredicate(() => /<\/?[a-zA-Z][\s\S]*>/i.test(rawData)),
|
||||
O.isSome
|
||||
)
|
||||
|
||||
const isFormData = (rawData: string) =>
|
||||
pipe(
|
||||
rawData.match(/^-{2,}[A-Za-z0-9]+\\r\\n/),
|
||||
O.fromNullable,
|
||||
O.filter((boundaryMatch) => boundaryMatch.length > 0),
|
||||
O.isSome
|
||||
)
|
||||
|
||||
const isXWWWFormUrlEncoded = (rawData: string) =>
|
||||
pipe(
|
||||
rawData,
|
||||
O.fromPredicate((rd) => /([^&=]+)=([^&=]*)/.test(rd)),
|
||||
O.isSome
|
||||
)
|
||||
|
||||
/**
|
||||
* Detects the content type of the input string
|
||||
* @param rawData String for which content type is to be detected
|
||||
* @returns Content type of the data
|
||||
*/
|
||||
export const detectContentType = (
|
||||
rawData: string
|
||||
): HoppRESTReqBody["contentType"] =>
|
||||
pipe(
|
||||
rawData,
|
||||
optionChoose([
|
||||
[(rd) => !rd, null],
|
||||
[isJSON, "application/json" as const],
|
||||
[isFormData, "multipart/form-data" as const],
|
||||
[isXML, "application/xml" as const],
|
||||
[isHTML, "text/html" as const],
|
||||
[isXWWWFormUrlEncoded, "application/x-www-form-urlencoded" as const],
|
||||
]),
|
||||
O.getOrElseW(() => "text/plain" as const)
|
||||
)
|
||||
|
||||
const multipartFunctions = {
|
||||
getBoundary(rawData: string, rawContentType: string | undefined) {
|
||||
return pipe(
|
||||
rawContentType,
|
||||
O.fromNullable,
|
||||
O.filter((rct) => rct.length > 0),
|
||||
O.match(
|
||||
() => this.getBoundaryFromRawData(rawData),
|
||||
(rct) => this.getBoundaryFromRawContentType(rawData, rct)
|
||||
)
|
||||
)
|
||||
},
|
||||
|
||||
getBoundaryFromRawData(rawData: string) {
|
||||
return pipe(
|
||||
rawData.match(/(-{2,}[A-Za-z0-9]+)\\r\\n/g),
|
||||
O.fromNullable,
|
||||
O.filter((boundaryMatch) => boundaryMatch.length > 0),
|
||||
O.map((matches) => matches[0].slice(0, -4))
|
||||
)
|
||||
},
|
||||
|
||||
getBoundaryFromRawContentType(rawData: string, rawContentType: string) {
|
||||
return pipe(
|
||||
rawContentType.match(/boundary=(.+)/),
|
||||
O.fromNullable,
|
||||
O.filter((boundaryContentMatch) => boundaryContentMatch.length > 1),
|
||||
O.filter((matches) =>
|
||||
rawData.replaceAll("\\r\\n", "").endsWith("--" + matches[1] + "--")
|
||||
),
|
||||
O.map((matches) => "--" + matches[1])
|
||||
)
|
||||
},
|
||||
|
||||
splitUsingBoundaryAndNewLines(rawData: string, boundary: string) {
|
||||
return pipe(
|
||||
rawData,
|
||||
S.split(RegExp(`${boundary}-*`)),
|
||||
RA.filter((p) => p !== "" && p.includes("name")),
|
||||
RA.map((p) =>
|
||||
pipe(
|
||||
p.replaceAll(/\\r\\n+/g, "\\r\\n"),
|
||||
S.split("\\r\\n"),
|
||||
RA.filter((q) => q !== "")
|
||||
)
|
||||
)
|
||||
)
|
||||
},
|
||||
|
||||
getNameValuePair(pair: readonly string[]) {
|
||||
return pipe(
|
||||
pair,
|
||||
O.fromPredicate((p) => p.length > 1),
|
||||
O.chain((pair) => O.fromNullable(pair[0].match(/ name="(\w+)"/))),
|
||||
O.filter((nameMatch) => nameMatch.length > 0),
|
||||
O.chain((nameMatch) =>
|
||||
pipe(
|
||||
nameMatch[0],
|
||||
S.replace(/"/g, ""),
|
||||
S.split("="),
|
||||
O.fromPredicate((q) => q.length === 2),
|
||||
O.map(
|
||||
(nameArr) =>
|
||||
[nameArr[1], pair[0].includes("filename") ? "" : pair[1]] as [
|
||||
string,
|
||||
string
|
||||
]
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
const getFormDataBody = (rawData: string, rawContentType: string | undefined) =>
|
||||
pipe(
|
||||
multipartFunctions.getBoundary(rawData, rawContentType),
|
||||
O.map((boundary) =>
|
||||
pipe(
|
||||
multipartFunctions.splitUsingBoundaryAndNewLines(rawData, boundary),
|
||||
RA.filterMap((p) => multipartFunctions.getNameValuePair(p)),
|
||||
RA.toArray
|
||||
)
|
||||
),
|
||||
|
||||
O.filter((arr) => arr.length > 0),
|
||||
O.map(tupleToRecord)
|
||||
)
|
||||
|
||||
const getHTMLBody = flow(formatHTML, O.of)
|
||||
|
||||
const getXMLBody = (rawData: string) =>
|
||||
pipe(
|
||||
rawData,
|
||||
prettifyXml,
|
||||
O.alt(() => O.some(rawData))
|
||||
)
|
||||
|
||||
const getFormattedJSON = flow(
|
||||
safeParseJSON,
|
||||
O.map((parsedJSON) => JSON.stringify(parsedJSON, null, 2)),
|
||||
O.getOrElse(() => "{ }")
|
||||
)
|
||||
|
||||
const getXWWWFormUrlEncodedBody = flow(
|
||||
decodeURIComponent,
|
||||
(decoded) => decoded.match(/(([^&=]+)=?([^&=]*))/g),
|
||||
O.fromNullable,
|
||||
O.map((pairs) => pairs.map((p) => p.replace("=", ": ")).join("\n"))
|
||||
)
|
||||
|
||||
/**
|
||||
* Parses provided string according to the content type
|
||||
* @param rawData Data to be parsed
|
||||
* @param contentType Content type of the data
|
||||
* @param rawContentType Optional parameter required for multipart/form-data
|
||||
* @returns Option of parsed body as string or Record object for multipart/form-data
|
||||
*/
|
||||
export function parseBody(
|
||||
rawData: string,
|
||||
contentType: HoppRESTReqBody["contentType"],
|
||||
rawContentType?: string
|
||||
): O.Option<string | Record<string, string>> {
|
||||
switch (contentType) {
|
||||
case "application/hal+json":
|
||||
case "application/ld+json":
|
||||
case "application/vnd.api+json":
|
||||
case "application/json":
|
||||
return O.some(getFormattedJSON(rawData))
|
||||
|
||||
case "application/x-www-form-urlencoded":
|
||||
return getXWWWFormUrlEncodedBody(rawData)
|
||||
|
||||
case "multipart/form-data":
|
||||
return getFormDataBody(rawData, rawContentType)
|
||||
|
||||
case "text/html":
|
||||
return getHTMLBody(rawData)
|
||||
|
||||
case "application/xml":
|
||||
return getXMLBody(rawData)
|
||||
|
||||
case "text/plain":
|
||||
default:
|
||||
return O.some(rawData)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formatter Functions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Prettifies XML string
|
||||
* @param sourceXml The string to format
|
||||
* @returns Indented XML string (uses spaces)
|
||||
*/
|
||||
function prettifyXml(sourceXml: string) {
|
||||
return pipe(
|
||||
O.tryCatch(() => {
|
||||
const xmlDoc = new DOMParser().parseFromString(
|
||||
sourceXml,
|
||||
"application/xml"
|
||||
)
|
||||
|
||||
if (xmlDoc.querySelector("parsererror")) {
|
||||
throw new Error("Unstructured Body")
|
||||
}
|
||||
|
||||
const xsltDoc = new DOMParser().parseFromString(
|
||||
[
|
||||
// describes how we want to modify the XML - indent everything
|
||||
'<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform">',
|
||||
' <xsl:strip-space elements="*"/>',
|
||||
' <xsl:template match="para[content-style][not(text())]">', // change to just text() to strip space in text nodes
|
||||
' <xsl:value-of select="normalize-space(.)"/>',
|
||||
" </xsl:template>",
|
||||
' <xsl:template match="node()|@*">',
|
||||
' <xsl:copy><xsl:apply-templates select="node()|@*"/></xsl:copy>',
|
||||
" </xsl:template>",
|
||||
' <xsl:output indent="yes"/>',
|
||||
"</xsl:stylesheet>",
|
||||
].join("\n"),
|
||||
"application/xml"
|
||||
)
|
||||
|
||||
const xsltProcessor = new XSLTProcessor()
|
||||
xsltProcessor.importStylesheet(xsltDoc)
|
||||
const resultDoc = xsltProcessor.transformToDocument(xmlDoc)
|
||||
const resultXml = new XMLSerializer().serializeToString(resultDoc)
|
||||
|
||||
return resultXml
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Prettifies HTML string
|
||||
* @param htmlString The string to format
|
||||
* @returns Indented HTML string (uses spaces)
|
||||
*/
|
||||
function formatHTML(htmlString: string) {
|
||||
const tab = " "
|
||||
let result = ""
|
||||
let indent = ""
|
||||
const emptyTags = [
|
||||
"area",
|
||||
"base",
|
||||
"br",
|
||||
"col",
|
||||
"embed",
|
||||
"hr",
|
||||
"img",
|
||||
"input",
|
||||
"link",
|
||||
"meta",
|
||||
"param",
|
||||
"source",
|
||||
"track",
|
||||
"wbr",
|
||||
]
|
||||
|
||||
const spl = htmlString.split(/>\s*</)
|
||||
spl.forEach((element) => {
|
||||
if (element.match(/^\/\w/)) {
|
||||
indent = indent.substring(tab.length)
|
||||
}
|
||||
|
||||
result += indent + "<" + element + ">\n"
|
||||
|
||||
if (
|
||||
element.match(/^<?\w[^>]*[^/]$/) &&
|
||||
!emptyTags.includes(element.match(/^([a-z]*)/i)?.at(1) || "")
|
||||
) {
|
||||
indent += tab
|
||||
}
|
||||
})
|
||||
|
||||
return result.substring(1, result.length - 2)
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import parser from "yargs-parser"
|
||||
import * as cookie from "cookie"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as S from "fp-ts/string"
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import { objHasProperty } from "~/helpers/functional/object"
|
||||
|
||||
export function getCookies(parsedArguments: parser.Arguments) {
|
||||
return pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasProperty("cookie", "string")),
|
||||
|
||||
O.map((args) => args.cookie),
|
||||
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasProperty("b", "string")),
|
||||
O.map((args) => args.b)
|
||||
)
|
||||
),
|
||||
|
||||
O.map(flow(S.replace(/^cookie: /i, ""), cookie.parse)),
|
||||
|
||||
O.getOrElse(() => ({}))
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
import parser from "yargs-parser"
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import { HoppRESTHeader } from "@hoppscotch/data"
|
||||
import * as A from "fp-ts/Array"
|
||||
import * as S from "fp-ts/string"
|
||||
import * as O from "fp-ts/Option"
|
||||
import { tupleToRecord } from "~/helpers/functional/record"
|
||||
import {
|
||||
objHasProperty,
|
||||
objHasArrayProperty,
|
||||
} from "~/helpers/functional/object"
|
||||
|
||||
const getHeaderPair = flow(
|
||||
S.split(": "),
|
||||
// must have a key and a value
|
||||
O.fromPredicate((arr) => arr.length === 2),
|
||||
O.map(([k, v]) => [k.trim(), v?.trim() ?? ""] as [string, string])
|
||||
)
|
||||
|
||||
export function getHeaders(parsedArguments: parser.Arguments) {
|
||||
let headers: Record<string, string> = {}
|
||||
|
||||
headers = pipe(
|
||||
parsedArguments,
|
||||
// make it an array if not already
|
||||
O.fromPredicate(objHasProperty("H", "string")),
|
||||
O.map((args) => [args.H]),
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasArrayProperty("H", "string")),
|
||||
O.map((args) => args.H)
|
||||
)
|
||||
),
|
||||
O.map(
|
||||
flow(
|
||||
A.map(getHeaderPair),
|
||||
A.filterMap((a) => a),
|
||||
tupleToRecord
|
||||
)
|
||||
),
|
||||
O.getOrElseW(() => ({}))
|
||||
)
|
||||
|
||||
if (
|
||||
objHasProperty("A", "string")(parsedArguments) ||
|
||||
objHasProperty("user-agent", "string")(parsedArguments)
|
||||
)
|
||||
headers["User-Agent"] = parsedArguments.A ?? parsedArguments["user-agent"]
|
||||
|
||||
const rawContentType =
|
||||
headers["Content-Type"] ?? headers["content-type"] ?? ""
|
||||
|
||||
return {
|
||||
headers,
|
||||
rawContentType,
|
||||
}
|
||||
}
|
||||
|
||||
export const recordToHoppHeaders = (
|
||||
headers: Record<string, string>
|
||||
): HoppRESTHeader[] =>
|
||||
pipe(
|
||||
Object.keys(headers),
|
||||
A.map((key) => ({
|
||||
key,
|
||||
value: headers[key],
|
||||
active: true,
|
||||
})),
|
||||
A.filter(
|
||||
(header) =>
|
||||
header.key !== "Authorization" &&
|
||||
header.key !== "content-type" &&
|
||||
header.key !== "Content-Type"
|
||||
)
|
||||
)
|
||||
@@ -0,0 +1,68 @@
|
||||
import parser from "yargs-parser"
|
||||
import { pipe } from "fp-ts/function"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as R from "fp-ts/Refinement"
|
||||
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||
import {
|
||||
objHasProperty,
|
||||
objHasArrayProperty,
|
||||
} from "~/helpers/functional/object"
|
||||
|
||||
const defaultRESTReq = getDefaultRESTRequest()
|
||||
|
||||
const getMethodFromXArg = (parsedArguments: parser.Arguments) =>
|
||||
pipe(
|
||||
parsedArguments,
|
||||
O.fromPredicate(objHasProperty("X", "string")),
|
||||
O.map((args) => args.X.trim()),
|
||||
O.chain((xarg) =>
|
||||
pipe(
|
||||
O.fromNullable(
|
||||
xarg.match(/GET|POST|PUT|PATCH|DELETE|HEAD|CONNECT|OPTIONS|TRACE/i)
|
||||
),
|
||||
O.alt(() => O.fromNullable(xarg.match(/[a-zA-Z]+/)))
|
||||
)
|
||||
),
|
||||
O.map((method) => method[0])
|
||||
)
|
||||
|
||||
const getMethodByDeduction = (parsedArguments: parser.Arguments) => {
|
||||
if (
|
||||
pipe(
|
||||
objHasProperty("T", "string"),
|
||||
R.or(objHasProperty("upload-file", "string"))
|
||||
)(parsedArguments)
|
||||
)
|
||||
return O.some("put")
|
||||
else if (
|
||||
pipe(
|
||||
objHasProperty("I", "boolean"),
|
||||
R.or(objHasProperty("head", "boolean"))
|
||||
)(parsedArguments)
|
||||
)
|
||||
return O.some("head")
|
||||
else if (objHasProperty("G", "boolean")(parsedArguments)) return O.some("get")
|
||||
else if (
|
||||
pipe(
|
||||
objHasProperty("d", "string"),
|
||||
R.or(objHasArrayProperty("d", "string")),
|
||||
R.or(objHasProperty("F", "string")),
|
||||
R.or(objHasArrayProperty("F", "string"))
|
||||
)(parsedArguments)
|
||||
)
|
||||
return O.some("POST")
|
||||
else return O.none
|
||||
}
|
||||
|
||||
/**
|
||||
* Get method type from X argument in curl string or
|
||||
* find it out through other arguments
|
||||
* @param parsedArguments Parsed Arguments object
|
||||
* @returns Method string
|
||||
*/
|
||||
export const getMethod = (parsedArguments: parser.Arguments): string =>
|
||||
pipe(
|
||||
getMethodFromXArg(parsedArguments),
|
||||
O.alt(() => getMethodByDeduction(parsedArguments)),
|
||||
O.getOrElse(() => defaultRESTReq.method)
|
||||
)
|
||||
@@ -0,0 +1,70 @@
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import * as S from "fp-ts/string"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as A from "fp-ts/Array"
|
||||
|
||||
const replaceables: { [key: string]: string } = {
|
||||
"--request": "-X",
|
||||
"--header": "-H",
|
||||
"--url": "",
|
||||
"--form": "-F",
|
||||
"--data-raw": "--data",
|
||||
"--data": "-d",
|
||||
"--data-ascii": "-d",
|
||||
"--data-binary": "-d",
|
||||
"--user": "-u",
|
||||
"--get": "-G",
|
||||
}
|
||||
|
||||
const paperCuts = flow(
|
||||
// remove '\' and newlines
|
||||
S.replace(/ ?\\ ?$/gm, " "),
|
||||
S.replace(/\n/g, " "),
|
||||
// remove all $ symbols from start of argument values
|
||||
S.replace(/\$'/g, "'"),
|
||||
S.replace(/\$"/g, '"'),
|
||||
S.trim
|
||||
)
|
||||
|
||||
// replace --zargs option with -Z
|
||||
const replaceLongOptions = (curlCmd: string) =>
|
||||
pipe(Object.keys(replaceables), A.reduce(curlCmd, replaceFunction))
|
||||
|
||||
const replaceFunction = (curlCmd: string, r: string) =>
|
||||
pipe(
|
||||
curlCmd,
|
||||
O.fromPredicate(
|
||||
() => r.includes("data") || r.includes("form") || r.includes("header")
|
||||
),
|
||||
O.map(S.replace(RegExp(`[ \t]${r}(["' ])`, "g"), ` ${replaceables[r]}$1`)),
|
||||
O.alt(() =>
|
||||
pipe(
|
||||
curlCmd,
|
||||
S.replace(RegExp(`[ \t]${r}(["' ])`), ` ${replaceables[r]}$1`),
|
||||
O.of
|
||||
)
|
||||
),
|
||||
O.getOrElse(() => "")
|
||||
)
|
||||
|
||||
// yargs parses -XPOST as separate arguments. just prescreen for it.
|
||||
const prescreenXArgs = flow(
|
||||
S.replace(
|
||||
/ -X(GET|POST|PUT|PATCH|DELETE|HEAD|CONNECT|OPTIONS|TRACE)/,
|
||||
" -X $1"
|
||||
),
|
||||
S.trim
|
||||
)
|
||||
|
||||
/**
|
||||
* Sanitizes and makes curl string processable
|
||||
* @param curlCommand Raw curl command string
|
||||
* @returns Processed curl command string
|
||||
*/
|
||||
export const preProcessCurlCommand = (curlCommand: string) =>
|
||||
pipe(
|
||||
curlCommand,
|
||||
O.fromPredicate((curlCmd) => curlCmd.length > 0),
|
||||
O.map(flow(paperCuts, replaceLongOptions, prescreenXArgs)),
|
||||
O.getOrElse(() => "")
|
||||
)
|
||||
@@ -0,0 +1,43 @@
|
||||
import { pipe, flow } from "fp-ts/function"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as A from "fp-ts/Array"
|
||||
import * as Sep from "fp-ts/Separated"
|
||||
import { HoppRESTParam } from "@hoppscotch/data"
|
||||
|
||||
const isDangling = ([, value]: [string, string]) => !value
|
||||
|
||||
/**
|
||||
* Converts queries to HoppRESTParam format and separates dangling ones
|
||||
* @param params Array of key value pairs of queries
|
||||
* @returns Object containing separated queries and dangling queries
|
||||
*/
|
||||
export function getQueries(params: Array<[string, string]>): {
|
||||
queries: Array<HoppRESTParam>
|
||||
danglingParams: Array<string>
|
||||
} {
|
||||
return pipe(
|
||||
params,
|
||||
O.of,
|
||||
O.map(
|
||||
flow(
|
||||
A.partition(isDangling),
|
||||
Sep.bimap(
|
||||
A.map(([key, value]) => ({
|
||||
key,
|
||||
value,
|
||||
active: true,
|
||||
})),
|
||||
A.map(([key]) => key)
|
||||
),
|
||||
(sep) => ({
|
||||
queries: sep.left,
|
||||
danglingParams: sep.right,
|
||||
})
|
||||
)
|
||||
),
|
||||
O.getOrElseW(() => ({
|
||||
queries: [],
|
||||
danglingParams: [],
|
||||
}))
|
||||
)
|
||||
}
|
||||
112
packages/hoppscotch-common/src/helpers/curl/sub_helpers/url.ts
Normal file
112
packages/hoppscotch-common/src/helpers/curl/sub_helpers/url.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import parser from "yargs-parser"
|
||||
import { pipe } from "fp-ts/function"
|
||||
import * as O from "fp-ts/Option"
|
||||
import * as A from "fp-ts/Array"
|
||||
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||
import { stringArrayJoin } from "~/helpers/functional/array"
|
||||
|
||||
const defaultRESTReq = getDefaultRESTRequest()
|
||||
|
||||
const getProtocolFromURL = (url: string) =>
|
||||
pipe(
|
||||
// get the base URL
|
||||
/^([^\s:@]+:[^\s:@]+@)?([^:/\s]+)([:]*)/.exec(url),
|
||||
O.fromNullable,
|
||||
O.filter((burl) => burl.length > 1),
|
||||
O.map((burl) => burl[2]),
|
||||
// set protocol to http for local URLs
|
||||
O.map((burl) =>
|
||||
burl === "localhost" ||
|
||||
burl === "2130706433" ||
|
||||
/127(\.0){0,2}\.1/.test(burl) ||
|
||||
/0177(\.0){0,2}\.1/.test(burl) ||
|
||||
/0x7f(\.0){0,2}\.1/.test(burl) ||
|
||||
/192\.168(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){2}/.test(burl) ||
|
||||
/10(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}/.test(burl)
|
||||
? "http://" + url
|
||||
: "https://" + url
|
||||
)
|
||||
)
|
||||
|
||||
/**
|
||||
* Checks if the URL is valid using the URL constructor
|
||||
* @param urlString URL string (with protocol)
|
||||
* @returns boolean whether the URL is valid using the inbuilt URL class
|
||||
*/
|
||||
const isURLValid = (urlString: string) =>
|
||||
pipe(
|
||||
O.tryCatch(() => new URL(urlString)),
|
||||
O.isSome
|
||||
)
|
||||
|
||||
/**
|
||||
* Checks and returns URL object for the valid URL
|
||||
* @param urlText Raw URL string provided by argument parser
|
||||
* @returns Option of URL object
|
||||
*/
|
||||
const parseURL = (urlText: string | number) =>
|
||||
pipe(
|
||||
urlText,
|
||||
O.fromNullable,
|
||||
// preprocess url string
|
||||
O.map((u) => u.toString().replaceAll(/[^a-zA-Z0-9_\-./?&=:@%+#,;\s]/g, "")),
|
||||
O.filter((u) => u.length > 0),
|
||||
O.chain((u) =>
|
||||
pipe(
|
||||
u,
|
||||
// check if protocol is available
|
||||
O.fromPredicate(
|
||||
(url: string) => /^[^:\s]+(?=:\/\/)/.exec(url) !== null
|
||||
),
|
||||
O.alt(() => getProtocolFromURL(u))
|
||||
)
|
||||
),
|
||||
O.filter(isURLValid),
|
||||
O.map((u) => new URL(u))
|
||||
)
|
||||
|
||||
/**
|
||||
* Processes URL string and returns the URL object
|
||||
* @param parsedArguments Parsed Arguments object
|
||||
* @returns URL object
|
||||
*/
|
||||
export function getURLObject(parsedArguments: parser.Arguments) {
|
||||
return pipe(
|
||||
// contains raw url strings
|
||||
parsedArguments._.slice(1),
|
||||
A.findFirstMap(parseURL),
|
||||
// no url found
|
||||
O.getOrElse(() => new URL(defaultRESTReq.endpoint))
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Joins dangling params to origin
|
||||
* @param urlObject URL object containing origin and pathname
|
||||
* @param danglingParams Keys of params with empty values
|
||||
* @returns origin string concatenated with dangling paramas
|
||||
*/
|
||||
export function concatParams(urlObject: URL, danglingParams: string[]) {
|
||||
return pipe(
|
||||
O.Do,
|
||||
|
||||
O.bind("originString", () =>
|
||||
pipe(
|
||||
urlObject.origin,
|
||||
O.fromPredicate((h) => h !== "")
|
||||
)
|
||||
),
|
||||
|
||||
O.map(({ originString }) =>
|
||||
pipe(
|
||||
danglingParams,
|
||||
O.fromPredicate((dp) => dp.length > 0),
|
||||
O.map(stringArrayJoin("&")),
|
||||
O.map((h) => originString + (urlObject.pathname || "") + "?" + h),
|
||||
O.getOrElse(() => originString + (urlObject.pathname || ""))
|
||||
)
|
||||
),
|
||||
|
||||
O.getOrElse(() => defaultRESTReq.endpoint)
|
||||
)
|
||||
}
|
||||
4
packages/hoppscotch-common/src/helpers/dev.ts
Normal file
4
packages/hoppscotch-common/src/helpers/dev.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* A constant specifying whether the app is running in the development server
|
||||
*/
|
||||
export const APP_IS_IN_DEV_MODE = import.meta.env.DEV
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Ref } from "vue"
|
||||
import { GraphQLSchema } from "graphql"
|
||||
import { getAutocompleteSuggestions } from "graphql-language-service-interface"
|
||||
import { Completer, CompleterResult, CompletionEntry } from "."
|
||||
|
||||
const completer: (schemaRef: Ref<GraphQLSchema | null>) => Completer =
|
||||
(schemaRef: Ref<GraphQLSchema | null>) => (text, completePos) => {
|
||||
if (!schemaRef.value) return Promise.resolve(null)
|
||||
|
||||
const completions = getAutocompleteSuggestions(schemaRef.value, text, {
|
||||
line: completePos.line,
|
||||
character: completePos.ch,
|
||||
} as any)
|
||||
|
||||
return Promise.resolve(<CompleterResult>{
|
||||
completions: completions.map(
|
||||
(x, i) =>
|
||||
<CompletionEntry>{
|
||||
text: x.label!,
|
||||
meta: x.detail!,
|
||||
score: completions.length - i,
|
||||
}
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
export default completer
|
||||
@@ -0,0 +1,23 @@
|
||||
export type CompletionEntry = {
|
||||
text: string
|
||||
meta: string
|
||||
score: number
|
||||
}
|
||||
|
||||
export type CompleterResult = {
|
||||
/**
|
||||
* List of completions to display
|
||||
*/
|
||||
completions: CompletionEntry[]
|
||||
}
|
||||
|
||||
export type Completer = (
|
||||
/**
|
||||
* The contents of the editor
|
||||
*/
|
||||
text: string,
|
||||
/**
|
||||
* Position where the completer is fired
|
||||
*/
|
||||
completePos: { line: number; ch: number }
|
||||
) => Promise<CompleterResult | null>
|
||||
@@ -0,0 +1,24 @@
|
||||
import { Completer, CompletionEntry } from "."
|
||||
import { getPreRequestScriptCompletions } from "~/helpers/tern"
|
||||
|
||||
const completer: Completer = async (text, completePos) => {
|
||||
const results = await getPreRequestScriptCompletions(
|
||||
text,
|
||||
completePos.line,
|
||||
completePos.ch
|
||||
)
|
||||
|
||||
const completions = results.completions.map((completion: any, i: number) => {
|
||||
return <CompletionEntry>{
|
||||
text: completion.name,
|
||||
meta: completion.isKeyword ? "keyword" : completion.type,
|
||||
score: results.completions.length - i,
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
completions,
|
||||
}
|
||||
}
|
||||
|
||||
export default completer
|
||||
@@ -0,0 +1,24 @@
|
||||
import { Completer, CompletionEntry } from "."
|
||||
import { getTestScriptCompletions } from "~/helpers/tern"
|
||||
|
||||
export const completer: Completer = async (text, completePos) => {
|
||||
const results = await getTestScriptCompletions(
|
||||
text,
|
||||
completePos.line,
|
||||
completePos.ch
|
||||
)
|
||||
|
||||
const completions = results.completions.map((completion: any, i: number) => {
|
||||
return <CompletionEntry>{
|
||||
text: completion.name,
|
||||
meta: completion.isKeyword ? "keyword" : completion.type,
|
||||
score: results.completions.length - i,
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
completions,
|
||||
}
|
||||
}
|
||||
|
||||
export default completer
|
||||
@@ -0,0 +1,226 @@
|
||||
import { watch, Ref } from "vue"
|
||||
import { Compartment } from "@codemirror/state"
|
||||
import {
|
||||
Decoration,
|
||||
EditorView,
|
||||
MatchDecorator,
|
||||
ViewPlugin,
|
||||
hoverTooltip,
|
||||
} from "@codemirror/view"
|
||||
import * as E from "fp-ts/Either"
|
||||
import { parseTemplateStringE } from "@hoppscotch/data"
|
||||
import { StreamSubscriberFunc } from "@composables/stream"
|
||||
import {
|
||||
AggregateEnvironment,
|
||||
aggregateEnvs$,
|
||||
getAggregateEnvs,
|
||||
getSelectedEnvironmentType,
|
||||
} from "~/newstore/environments"
|
||||
import { invokeAction } from "~/helpers/actions"
|
||||
|
||||
const HOPP_ENVIRONMENT_REGEX = /(<<[a-zA-Z0-9-_]+>>)/g
|
||||
|
||||
const HOPP_ENV_HIGHLIGHT =
|
||||
"cursor-help transition rounded px-1 focus:outline-none mx-0.5 env-highlight"
|
||||
const HOPP_ENV_HIGHLIGHT_FOUND =
|
||||
"bg-accentDark text-accentContrast hover:bg-accent"
|
||||
const HOPP_ENV_HIGHLIGHT_NOT_FOUND =
|
||||
"bg-red-500 text-accentContrast hover:bg-red-600"
|
||||
|
||||
const cursorTooltipField = (aggregateEnvs: AggregateEnvironment[]) =>
|
||||
hoverTooltip(
|
||||
(view, pos, side) => {
|
||||
const { from, to, text } = view.state.doc.lineAt(pos)
|
||||
|
||||
// TODO: When Codemirror 6 allows this to work (not make the
|
||||
// popups appear half of the time) use this implementation
|
||||
// const wordSelection = view.state.wordAt(pos)
|
||||
// if (!wordSelection) return null
|
||||
// const word = view.state.doc.sliceString(
|
||||
// wordSelection.from - 2,
|
||||
// wordSelection.to + 2
|
||||
// )
|
||||
// if (!HOPP_ENVIRONMENT_REGEX.test(word)) return null
|
||||
|
||||
// Tracking the start and the end of the words
|
||||
let start = pos
|
||||
let end = pos
|
||||
|
||||
while (start > from && /[a-zA-Z0-9-_]+/.test(text[start - from - 1]))
|
||||
start--
|
||||
while (end < to && /[a-zA-Z0-9-_]+/.test(text[end - from])) end++
|
||||
|
||||
if (
|
||||
(start === pos && side < 0) ||
|
||||
(end === pos && side > 0) ||
|
||||
!HOPP_ENVIRONMENT_REGEX.test(
|
||||
text.slice(start - from - 2, end - from + 2)
|
||||
)
|
||||
)
|
||||
return null
|
||||
|
||||
const parsedEnvKey = text.slice(start - from, end - from)
|
||||
|
||||
const tooltipEnv = aggregateEnvs.find((env) => env.key === parsedEnvKey)
|
||||
|
||||
const envName = tooltipEnv?.sourceEnv ?? "Choose an Environment"
|
||||
|
||||
const envValue = tooltipEnv?.value ?? "Not found"
|
||||
|
||||
const result = parseTemplateStringE(envValue, aggregateEnvs)
|
||||
|
||||
const finalEnv = E.isLeft(result) ? "error" : result.right
|
||||
|
||||
const selectedEnvType = getSelectedEnvironmentType()
|
||||
|
||||
const envTypeIcon = `<i class="inline-flex -my-1 -mx-0.5 opacity-65 items-center text-base material-icons border-secondary">${
|
||||
selectedEnvType === "TEAM_ENV" ? "people" : "person"
|
||||
}</i>`
|
||||
|
||||
const appendEditAction = (tooltip: HTMLElement) => {
|
||||
const editIcon = document.createElement("span")
|
||||
editIcon.className =
|
||||
"ml-2 cursor-pointer env-icon text-accent hover:text-accentDark"
|
||||
editIcon.addEventListener("click", () => {
|
||||
const isPersonalEnv =
|
||||
envName === "Global" || selectedEnvType !== "TEAM_ENV"
|
||||
const action = isPersonalEnv ? "my" : "team"
|
||||
invokeAction(`modals.${action}.environment.edit`, {
|
||||
envName,
|
||||
variableName: parsedEnvKey,
|
||||
})
|
||||
})
|
||||
editIcon.innerHTML = `<i class="inline-flex items-center px-1 -mx-1 -my-1 text-base material-icons border-secondary">drive_file_rename_outline</i>`
|
||||
tooltip.appendChild(editIcon)
|
||||
}
|
||||
|
||||
return {
|
||||
pos: start,
|
||||
end: to,
|
||||
above: true,
|
||||
arrow: true,
|
||||
create() {
|
||||
const dom = document.createElement("span")
|
||||
const tooltipContainer = document.createElement("span")
|
||||
const kbd = document.createElement("kbd")
|
||||
const icon = document.createElement("span")
|
||||
icon.innerHTML = envTypeIcon
|
||||
icon.className = "mr-2 env-icon"
|
||||
kbd.textContent = finalEnv
|
||||
tooltipContainer.appendChild(icon)
|
||||
tooltipContainer.appendChild(document.createTextNode(`${envName} `))
|
||||
tooltipContainer.appendChild(kbd)
|
||||
if (tooltipEnv) appendEditAction(tooltipContainer)
|
||||
tooltipContainer.className = "tippy-content"
|
||||
dom.className = "tippy-box"
|
||||
dom.dataset.theme = "tooltip"
|
||||
dom.appendChild(tooltipContainer)
|
||||
return { dom }
|
||||
},
|
||||
}
|
||||
},
|
||||
// HACK: This is a hack to fix hover tooltip not coming half of the time
|
||||
// https://github.com/codemirror/tooltip/blob/765c463fc1d5afcc3ec93cee47d72606bed27e1d/src/tooltip.ts#L622
|
||||
// Still doesn't fix the not showing up some of the time issue, but this is atleast more consistent
|
||||
{ hoverTime: 1 } as any
|
||||
)
|
||||
|
||||
function checkEnv(env: string, aggregateEnvs: AggregateEnvironment[]) {
|
||||
const className = aggregateEnvs.find(
|
||||
(k: { key: string }) => k.key === env.slice(2, -2)
|
||||
)
|
||||
? HOPP_ENV_HIGHLIGHT_FOUND
|
||||
: HOPP_ENV_HIGHLIGHT_NOT_FOUND
|
||||
|
||||
return Decoration.mark({
|
||||
class: `${HOPP_ENV_HIGHLIGHT} ${className}`,
|
||||
})
|
||||
}
|
||||
|
||||
const getMatchDecorator = (aggregateEnvs: AggregateEnvironment[]) =>
|
||||
new MatchDecorator({
|
||||
regexp: HOPP_ENVIRONMENT_REGEX,
|
||||
decoration: (m) => checkEnv(m[0], aggregateEnvs),
|
||||
})
|
||||
|
||||
export const environmentHighlightStyle = (
|
||||
aggregateEnvs: AggregateEnvironment[]
|
||||
) => {
|
||||
const decorator = getMatchDecorator(aggregateEnvs)
|
||||
|
||||
return ViewPlugin.define(
|
||||
(view) => ({
|
||||
decorations: decorator.createDeco(view),
|
||||
update(u) {
|
||||
this.decorations = decorator.updateDeco(u, this.decorations)
|
||||
},
|
||||
}),
|
||||
{
|
||||
decorations: (v) => v.decorations,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export class HoppEnvironmentPlugin {
|
||||
private compartment = new Compartment()
|
||||
|
||||
private envs: AggregateEnvironment[] = []
|
||||
|
||||
constructor(
|
||||
subscribeToStream: StreamSubscriberFunc,
|
||||
private editorView: Ref<EditorView | undefined>
|
||||
) {
|
||||
this.envs = getAggregateEnvs()
|
||||
|
||||
subscribeToStream(aggregateEnvs$, (envs) => {
|
||||
this.envs = envs
|
||||
|
||||
this.editorView.value?.dispatch({
|
||||
effects: this.compartment.reconfigure([
|
||||
cursorTooltipField(this.envs),
|
||||
environmentHighlightStyle(this.envs),
|
||||
]),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
get extension() {
|
||||
return this.compartment.of([
|
||||
cursorTooltipField(this.envs),
|
||||
environmentHighlightStyle(this.envs),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
export class HoppReactiveEnvPlugin {
|
||||
private compartment = new Compartment()
|
||||
|
||||
private envs: AggregateEnvironment[] = []
|
||||
|
||||
constructor(
|
||||
envsRef: Ref<AggregateEnvironment[]>,
|
||||
private editorView: Ref<EditorView | undefined>
|
||||
) {
|
||||
watch(
|
||||
envsRef,
|
||||
(envs) => {
|
||||
this.envs = envs
|
||||
|
||||
this.editorView.value?.dispatch({
|
||||
effects: this.compartment.reconfigure([
|
||||
cursorTooltipField(this.envs),
|
||||
environmentHighlightStyle(this.envs),
|
||||
]),
|
||||
})
|
||||
},
|
||||
{ immediate: true }
|
||||
)
|
||||
}
|
||||
|
||||
get extension() {
|
||||
return this.compartment.of([
|
||||
cursorTooltipField(this.envs),
|
||||
environmentHighlightStyle(this.envs),
|
||||
])
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import { Ref } from "vue"
|
||||
import {
|
||||
GraphQLError,
|
||||
GraphQLSchema,
|
||||
parse as gqlParse,
|
||||
validate as gqlValidate,
|
||||
} from "graphql"
|
||||
import { LinterDefinition, LinterResult } from "./linter"
|
||||
|
||||
/**
|
||||
* Creates a Linter function that can lint a GQL query against a given
|
||||
* schema
|
||||
*/
|
||||
export const createGQLQueryLinter: (
|
||||
schema: Ref<GraphQLSchema | null>
|
||||
) => LinterDefinition = (schema: Ref<GraphQLSchema | null>) => (text) => {
|
||||
if (text === "") return Promise.resolve([])
|
||||
if (!schema.value) return Promise.resolve([])
|
||||
|
||||
try {
|
||||
const doc = gqlParse(text)
|
||||
|
||||
const results = gqlValidate(schema.value, doc).map(
|
||||
({ locations, message }) =>
|
||||
<LinterResult>{
|
||||
from: {
|
||||
line: locations![0].line,
|
||||
ch: locations![0].column - 1,
|
||||
},
|
||||
to: {
|
||||
line: locations![0].line,
|
||||
ch: locations![0].column - 1,
|
||||
},
|
||||
message,
|
||||
severity: "error",
|
||||
}
|
||||
)
|
||||
|
||||
return Promise.resolve(results)
|
||||
} catch (e) {
|
||||
const err = e as GraphQLError
|
||||
|
||||
return Promise.resolve([
|
||||
<LinterResult>{
|
||||
from: {
|
||||
line: err.locations![0].line,
|
||||
ch: err.locations![0].column - 1,
|
||||
},
|
||||
to: {
|
||||
line: err.locations![0].line,
|
||||
ch: err.locations![0].column,
|
||||
},
|
||||
message: err.message,
|
||||
severity: "error",
|
||||
},
|
||||
])
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
import { convertIndexToLineCh } from "../utils"
|
||||
import { LinterDefinition, LinterResult } from "./linter"
|
||||
import jsonParse from "~/helpers/jsonParse"
|
||||
|
||||
const linter: LinterDefinition = (text) => {
|
||||
try {
|
||||
jsonParse(text)
|
||||
return Promise.resolve([])
|
||||
} catch (e: any) {
|
||||
return Promise.resolve([
|
||||
<LinterResult>{
|
||||
from: convertIndexToLineCh(text, e.start),
|
||||
to: convertIndexToLineCh(text, e.end),
|
||||
message: e.message,
|
||||
severity: "error",
|
||||
},
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
export default linter
|
||||
@@ -0,0 +1,7 @@
|
||||
export type LinterResult = {
|
||||
message: string
|
||||
severity: "warning" | "error"
|
||||
from: { line: number; ch: number }
|
||||
to: { line: number; ch: number }
|
||||
}
|
||||
export type LinterDefinition = (text: string) => Promise<LinterResult[]>
|
||||
@@ -0,0 +1,75 @@
|
||||
import * as esprima from "esprima"
|
||||
import { LinterDefinition, LinterResult } from "./linter"
|
||||
import { performPreRequestLinting } from "~/helpers/tern"
|
||||
|
||||
const linter: LinterDefinition = async (text) => {
|
||||
let results: LinterResult[] = []
|
||||
|
||||
// Semantic linting
|
||||
const semanticLints = await performPreRequestLinting(text)
|
||||
|
||||
results = results.concat(
|
||||
semanticLints.map((lint: any) => ({
|
||||
from: {
|
||||
ch: lint.from.ch + 1,
|
||||
line: lint.from.line + 1,
|
||||
},
|
||||
to: {
|
||||
ch: lint.from.ch + 1,
|
||||
line: lint.to.line + 1,
|
||||
},
|
||||
severity: "error",
|
||||
message: `[semantic] ${lint.message}`,
|
||||
}))
|
||||
)
|
||||
|
||||
// Syntax linting
|
||||
try {
|
||||
const res: any = esprima.parseScript(text, { tolerant: true })
|
||||
if (res.errors && res.errors.length > 0) {
|
||||
results = results.concat(
|
||||
res.errors.map((err: any) => {
|
||||
const fromPos: { line: number; ch: number } = {
|
||||
line: err.lineNumber,
|
||||
ch: err.column,
|
||||
}
|
||||
|
||||
const toPos: { line: number; ch: number } = {
|
||||
line: err.lineNumber,
|
||||
ch: err.column,
|
||||
}
|
||||
|
||||
return <LinterResult>{
|
||||
from: fromPos,
|
||||
to: toPos,
|
||||
message: `[syntax] ${err.description}`,
|
||||
severity: "error",
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
} catch (e: any) {
|
||||
const fromPos: { line: number; ch: number } = {
|
||||
line: e.lineNumber,
|
||||
ch: e.column,
|
||||
}
|
||||
|
||||
const toPos: { line: number; ch: number } = {
|
||||
line: e.lineNumber,
|
||||
ch: e.column,
|
||||
}
|
||||
|
||||
results = results.concat([
|
||||
<LinterResult>{
|
||||
from: fromPos,
|
||||
to: toPos,
|
||||
message: `[syntax] ${e.description}`,
|
||||
severity: "error",
|
||||
},
|
||||
])
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export default linter
|
||||
@@ -0,0 +1,24 @@
|
||||
import * as E from "fp-ts/Either"
|
||||
import { strictParseRawKeyValueEntriesE } from "@hoppscotch/data"
|
||||
import { convertIndexToLineCh } from "../utils"
|
||||
import { LinterDefinition, LinterResult } from "./linter"
|
||||
|
||||
const linter: LinterDefinition = (text) => {
|
||||
const result = strictParseRawKeyValueEntriesE(text)
|
||||
if (E.isLeft(result)) {
|
||||
const pos = convertIndexToLineCh(text, result.left.pos)
|
||||
|
||||
return Promise.resolve([
|
||||
<LinterResult>{
|
||||
from: pos,
|
||||
to: pos,
|
||||
message: result.left.message,
|
||||
severity: "error",
|
||||
},
|
||||
])
|
||||
} else {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
}
|
||||
|
||||
export default linter
|
||||
@@ -0,0 +1,75 @@
|
||||
import * as esprima from "esprima"
|
||||
import { LinterDefinition, LinterResult } from "./linter"
|
||||
import { performTestLinting } from "~/helpers/tern"
|
||||
|
||||
const linter: LinterDefinition = async (text) => {
|
||||
let results: LinterResult[] = []
|
||||
|
||||
// Semantic linting
|
||||
const semanticLints = await performTestLinting(text)
|
||||
|
||||
results = results.concat(
|
||||
semanticLints.map((lint: any) => ({
|
||||
from: {
|
||||
ch: lint.from.ch + 1,
|
||||
line: lint.from.line + 1,
|
||||
},
|
||||
to: {
|
||||
ch: lint.from.ch + 1,
|
||||
line: lint.to.line + 1,
|
||||
},
|
||||
severity: "error",
|
||||
message: `[semantic] ${lint.message}`,
|
||||
}))
|
||||
)
|
||||
|
||||
// Syntax linting
|
||||
try {
|
||||
const res: any = esprima.parseScript(text, { tolerant: true })
|
||||
if (res.errors && res.errors.length > 0) {
|
||||
results = results.concat(
|
||||
res.errors.map((err: any) => {
|
||||
const fromPos: { line: number; ch: number } = {
|
||||
line: err.lineNumber,
|
||||
ch: err.column,
|
||||
}
|
||||
|
||||
const toPos: { line: number; ch: number } = {
|
||||
line: err.lineNumber,
|
||||
ch: err.column,
|
||||
}
|
||||
|
||||
return <LinterResult>{
|
||||
from: fromPos,
|
||||
to: toPos,
|
||||
message: `[syntax] ${err.description}`,
|
||||
severity: "error",
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
} catch (e: any) {
|
||||
const fromPos: { line: number; ch: number } = {
|
||||
line: e.lineNumber,
|
||||
ch: e.column,
|
||||
}
|
||||
|
||||
const toPos: { line: number; ch: number } = {
|
||||
line: e.lineNumber,
|
||||
ch: e.column,
|
||||
}
|
||||
|
||||
results = results.concat([
|
||||
<LinterResult>{
|
||||
from: fromPos,
|
||||
to: toPos,
|
||||
message: `[syntax] ${e.description}`,
|
||||
severity: "error",
|
||||
},
|
||||
])
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export default linter
|
||||
@@ -0,0 +1,413 @@
|
||||
import {
|
||||
EditorView,
|
||||
keymap,
|
||||
highlightSpecialChars,
|
||||
highlightActiveLine,
|
||||
drawSelection,
|
||||
dropCursor,
|
||||
lineNumbers,
|
||||
highlightActiveLineGutter,
|
||||
rectangularSelection,
|
||||
crosshairCursor,
|
||||
} from "@codemirror/view"
|
||||
import {
|
||||
HighlightStyle,
|
||||
defaultHighlightStyle,
|
||||
foldKeymap,
|
||||
foldGutter,
|
||||
indentOnInput,
|
||||
bracketMatching,
|
||||
syntaxHighlighting,
|
||||
} from "@codemirror/language"
|
||||
import { tags as t } from "@lezer/highlight"
|
||||
import { Extension, EditorState } from "@codemirror/state"
|
||||
import { history, historyKeymap, defaultKeymap } from "@codemirror/commands"
|
||||
import {
|
||||
closeBrackets,
|
||||
closeBracketsKeymap,
|
||||
autocompletion,
|
||||
completionKeymap,
|
||||
} from "@codemirror/autocomplete"
|
||||
import {
|
||||
searchKeymap,
|
||||
highlightSelectionMatches,
|
||||
search,
|
||||
} from "@codemirror/search"
|
||||
import { lintKeymap } from "@codemirror/lint"
|
||||
|
||||
export const baseTheme = EditorView.theme({
|
||||
"&": {
|
||||
fontSize: "var(--font-size-body)",
|
||||
height: "100%",
|
||||
width: "100%",
|
||||
flex: "1",
|
||||
},
|
||||
".cm-content": {
|
||||
caretColor: "var(--secondary-dark-color)",
|
||||
fontFamily: "var(--font-mono)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
".cm-cursor": {
|
||||
borderColor: "var(--secondary-color)",
|
||||
},
|
||||
".cm-widgetBuffer": {
|
||||
position: "absolute",
|
||||
},
|
||||
".cm-selectionBackground": {
|
||||
backgroundColor: "var(--accent-dark-color)",
|
||||
color: "var(--accent-contrast-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-panels": {
|
||||
backgroundColor: "var(--primary-light-color)",
|
||||
color: "var(--secondary-light-color)",
|
||||
},
|
||||
".cm-panels.cm-panels-top": {
|
||||
borderBottom: "1px solid var(--divider-light-color)",
|
||||
},
|
||||
".cm-panels.cm-panels-bottom": {
|
||||
borderTop: "1px solid var(--divider-light-color)",
|
||||
},
|
||||
".cm-search": {
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
flexWrap: "nowrap",
|
||||
flexShrink: 0,
|
||||
overflow: "auto",
|
||||
},
|
||||
".cm-search label": {
|
||||
display: "inline-flex",
|
||||
alignItems: "center",
|
||||
},
|
||||
".cm-textfield": {
|
||||
backgroundColor: "var(--primary-dark-color)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
borderColor: "var(--divider-light-color)",
|
||||
borderRadius: "4px",
|
||||
},
|
||||
".cm-button": {
|
||||
backgroundColor: "var(--primary-dark-color)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
backgroundImage: "none",
|
||||
border: "none",
|
||||
borderRadius: "4px",
|
||||
},
|
||||
".cm-completionLabel": {
|
||||
color: "var(--secondary-color)",
|
||||
},
|
||||
".cm-tooltip": {
|
||||
backgroundColor: "var(--primary-dark-color)",
|
||||
color: "var(--secondary-light-color)",
|
||||
border: "none",
|
||||
borderRadius: "4px",
|
||||
},
|
||||
".cm-tooltip-arrow": {
|
||||
color: "var(--tooltip-color)",
|
||||
},
|
||||
".cm-tooltip-arrow:after": {
|
||||
borderTopColor: "inherit !important",
|
||||
},
|
||||
".cm-tooltip-arrow:before": {
|
||||
borderTopColor: "inherit !important",
|
||||
},
|
||||
".cm-tooltip.cm-tooltip-autocomplete > ul": {
|
||||
fontFamily: "var(--font-mono)",
|
||||
},
|
||||
".cm-tooltip-autocomplete ul li[aria-selected]": {
|
||||
backgroundColor: "var(--accent-dark-color)",
|
||||
color: "var(--accent-contrast-color)",
|
||||
},
|
||||
".cm-tooltip-autocomplete ul li[aria-selected] .cm-completionLabel": {
|
||||
color: "var(--accent-contrast-color)",
|
||||
},
|
||||
".cm-activeLine": { backgroundColor: "transparent" },
|
||||
".cm-searchMatch": {
|
||||
outline: "1px solid var(--accent-dark-color)",
|
||||
backgroundColor: "var(--divider-dark-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-selectionMatch": {
|
||||
outline: "1px solid var(--accent-dark-color)",
|
||||
backgroundColor: "var(--divider-light-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-matchingBracket, .cm-nonmatchingBracket": {
|
||||
backgroundColor: "var(--divider-color)",
|
||||
outline: "1px solid var(--accent-dark-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-gutters": {
|
||||
fontFamily: "var(--font-mono)",
|
||||
backgroundColor: "var(--primary-color)",
|
||||
borderColor: "var(--divider-light-color)",
|
||||
},
|
||||
".cm-lineNumbers": {
|
||||
minWidth: "3em",
|
||||
color: "var(--secondary-light-color)",
|
||||
},
|
||||
".cm-foldGutter": {
|
||||
minWidth: "2em",
|
||||
color: "var(--secondary-light-color)",
|
||||
},
|
||||
".cm-foldGutter .cm-gutterElement": {
|
||||
textAlign: "center",
|
||||
},
|
||||
".cm-line": {
|
||||
paddingLeft: "0.5em",
|
||||
paddingRight: "0.5em",
|
||||
},
|
||||
".cm-activeLineGutter": {
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
".cm-scroller::-webkit-scrollbar": {
|
||||
display: "none",
|
||||
},
|
||||
".cm-foldPlaceholder": {
|
||||
backgroundColor: "var(--divider-light-color)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
borderColor: "var(--divider-dark-color)",
|
||||
},
|
||||
})
|
||||
|
||||
export const inputTheme = EditorView.theme({
|
||||
"&": {
|
||||
fontSize: "var(--font-size-body)",
|
||||
height: "100%",
|
||||
width: "100%",
|
||||
flex: "1",
|
||||
},
|
||||
".cm-content": {
|
||||
caretColor: "var(--secondary-dark-color)",
|
||||
fontFamily: "var(--font-sans)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
".cm-cursor": {
|
||||
borderColor: "var(--secondary-color)",
|
||||
},
|
||||
".cm-widgetBuffer": {
|
||||
position: "absolute",
|
||||
},
|
||||
".cm-selectionBackground": {
|
||||
backgroundColor: "var(--accent-dark-color)",
|
||||
color: "var(--accent-contrast-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-panels": {
|
||||
backgroundColor: "var(--primary-light-color)",
|
||||
color: "var(--secondary-light-color)",
|
||||
},
|
||||
".cm-panels.cm-panels-top": {
|
||||
borderBottom: "1px solid var(--divider-light-color)",
|
||||
},
|
||||
".cm-panels.cm-panels-bottom": {
|
||||
borderTop: "1px solid var(--divider-light-color)",
|
||||
},
|
||||
".cm-search": {
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
flexWrap: "nowrap",
|
||||
flexShrink: 0,
|
||||
overflow: "auto",
|
||||
},
|
||||
".cm-search label": {
|
||||
display: "inline-flex",
|
||||
alignItems: "center",
|
||||
},
|
||||
".cm-textfield": {
|
||||
backgroundColor: "var(--primary-dark-color)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
borderColor: "var(--divider-light-color)",
|
||||
borderRadius: "4px",
|
||||
},
|
||||
".cm-button": {
|
||||
backgroundColor: "var(--primary-dark-color)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
backgroundImage: "none",
|
||||
border: "none",
|
||||
borderRadius: "4px",
|
||||
},
|
||||
".cm-completionLabel": {
|
||||
color: "var(--secondary-color)",
|
||||
},
|
||||
".cm-tooltip": {
|
||||
backgroundColor: "var(--primary-dark-color)",
|
||||
color: "var(--secondary-light-color)",
|
||||
border: "none",
|
||||
borderRadius: "4px",
|
||||
},
|
||||
".cm-tooltip-arrow": {
|
||||
color: "var(--tooltip-color)",
|
||||
},
|
||||
".cm-tooltip-arrow:after": {
|
||||
borderTopColor: "currentColor !important",
|
||||
},
|
||||
".cm-tooltip-arrow:before": {
|
||||
borderTopColor: "currentColor !important",
|
||||
},
|
||||
".cm-tooltip.cm-tooltip-autocomplete > ul": {
|
||||
fontFamily: "var(--font-mono)",
|
||||
},
|
||||
".cm-tooltip-autocomplete ul li[aria-selected]": {
|
||||
backgroundColor: "var(--accent-dark-color)",
|
||||
color: "var(--accent-contrast-color)",
|
||||
},
|
||||
".cm-tooltip-autocomplete ul li[aria-selected] .cm-completionLabel": {
|
||||
color: "var(--accent-contrast-color)",
|
||||
},
|
||||
".cm-activeLine": { backgroundColor: "transparent" },
|
||||
".cm-searchMatch": {
|
||||
outline: "1px solid var(--accent-dark-color)",
|
||||
backgroundColor: "var(--divider-dark-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-selectionMatch": {
|
||||
outline: "1px solid var(--accent-dark-color)",
|
||||
backgroundColor: "var(--divider-light-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-matchingBracket, .cm-nonmatchingBracket": {
|
||||
backgroundColor: "var(--divider-color)",
|
||||
outline: "1px solid var(--accent-dark-color)",
|
||||
borderRadius: "2px",
|
||||
},
|
||||
".cm-gutters": {
|
||||
fontFamily: "var(--font-mono)",
|
||||
backgroundColor: "var(--primary-color)",
|
||||
borderColor: "var(--divider-light-color)",
|
||||
},
|
||||
".cm-lineNumbers": {
|
||||
minWidth: "3em",
|
||||
color: "var(--secondary-light-color)",
|
||||
},
|
||||
".cm-foldGutter": {
|
||||
minWidth: "2em",
|
||||
color: "var(--secondary-light-color)",
|
||||
},
|
||||
".cm-foldGutter .cm-gutterElement": {
|
||||
textAlign: "center",
|
||||
},
|
||||
".cm-line": {
|
||||
paddingLeft: "1rem",
|
||||
paddingRight: "1rem",
|
||||
paddingTop: "0.2rem",
|
||||
paddingBottom: "0.2rem",
|
||||
},
|
||||
".cm-activeLineGutter": {
|
||||
backgroundColor: "transparent",
|
||||
},
|
||||
".cm-scroller::-webkit-scrollbar": {
|
||||
display: "none",
|
||||
},
|
||||
".cm-foldPlaceholder": {
|
||||
backgroundColor: "var(--divider-light-color)",
|
||||
color: "var(--secondary-dark-color)",
|
||||
borderColor: "var(--divider-dark-color)",
|
||||
},
|
||||
})
|
||||
|
||||
const editorTypeColor = "var(--editor-type-color)"
|
||||
const editorNameColor = "var(--editor-name-color)"
|
||||
const editorOperatorColor = "var(--editor-operator-color)"
|
||||
const editorInvalidColor = "var(--editor-invalid-color)"
|
||||
const editorSeparatorColor = "var(--editor-separator-color)"
|
||||
const editorMetaColor = "var(--editor-meta-color)"
|
||||
const editorVariableColor = "var(--editor-variable-color)"
|
||||
const editorLinkColor = "var(--editor-link-color)"
|
||||
const editorProcessColor = "var(--editor-process-color)"
|
||||
const editorConstantColor = "var(--editor-constant-color)"
|
||||
const editorKeywordColor = "var(--editor-keyword-color)"
|
||||
|
||||
export const baseHighlightStyle = HighlightStyle.define([
|
||||
{ tag: t.keyword, color: editorKeywordColor },
|
||||
{
|
||||
tag: [t.name, t.deleted, t.character, t.propertyName, t.macroName],
|
||||
color: editorNameColor,
|
||||
},
|
||||
{
|
||||
tag: [t.function(t.variableName), t.labelName],
|
||||
color: editorVariableColor,
|
||||
},
|
||||
{
|
||||
tag: [t.color, t.constant(t.name), t.standard(t.name)],
|
||||
color: editorConstantColor,
|
||||
},
|
||||
{ tag: [t.definition(t.name), t.separator], color: editorSeparatorColor },
|
||||
{
|
||||
tag: [
|
||||
t.typeName,
|
||||
t.className,
|
||||
t.number,
|
||||
t.changed,
|
||||
t.annotation,
|
||||
t.modifier,
|
||||
t.self,
|
||||
t.namespace,
|
||||
],
|
||||
color: editorTypeColor,
|
||||
},
|
||||
{
|
||||
tag: [
|
||||
t.operator,
|
||||
t.operatorKeyword,
|
||||
t.url,
|
||||
t.escape,
|
||||
t.regexp,
|
||||
t.link,
|
||||
t.special(t.string),
|
||||
],
|
||||
color: editorOperatorColor,
|
||||
},
|
||||
{ tag: [t.meta, t.comment], color: editorMetaColor },
|
||||
{ tag: t.strong, fontWeight: "bold" },
|
||||
{ tag: t.emphasis, fontStyle: "italic" },
|
||||
{ tag: t.strikethrough, textDecoration: "line-through" },
|
||||
{ tag: t.link, color: editorLinkColor, textDecoration: "underline" },
|
||||
{ tag: t.heading, fontWeight: "bold", color: editorNameColor },
|
||||
{
|
||||
tag: [t.atom, t.bool, t.special(t.variableName)],
|
||||
color: editorConstantColor,
|
||||
},
|
||||
{
|
||||
tag: [t.processingInstruction, t.string, t.inserted],
|
||||
color: editorProcessColor,
|
||||
},
|
||||
{ tag: t.invalid, color: editorInvalidColor },
|
||||
])
|
||||
|
||||
export const basicSetup: Extension = [
|
||||
lineNumbers(),
|
||||
highlightActiveLineGutter(),
|
||||
highlightSpecialChars(),
|
||||
history(),
|
||||
foldGutter({
|
||||
openText: "▾",
|
||||
closedText: "▸",
|
||||
}),
|
||||
drawSelection(),
|
||||
dropCursor(),
|
||||
EditorState.allowMultipleSelections.of(true),
|
||||
indentOnInput(),
|
||||
syntaxHighlighting(baseHighlightStyle),
|
||||
syntaxHighlighting(defaultHighlightStyle, { fallback: true }),
|
||||
bracketMatching(),
|
||||
closeBrackets(),
|
||||
autocompletion(),
|
||||
rectangularSelection(),
|
||||
crosshairCursor(),
|
||||
highlightActiveLine(),
|
||||
highlightSelectionMatches(),
|
||||
keymap.of([
|
||||
...closeBracketsKeymap,
|
||||
...defaultKeymap,
|
||||
...searchKeymap,
|
||||
...historyKeymap,
|
||||
...foldKeymap,
|
||||
...completionKeymap,
|
||||
...lintKeymap,
|
||||
]),
|
||||
search({
|
||||
top: true,
|
||||
}),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user