feat(common): support simultaneous imports of collections and environment files (#3719)

This commit is contained in:
James George
2024-03-05 04:19:01 -08:00
committed by GitHub
parent 55a94bdccc
commit de8929ab18
16 changed files with 257 additions and 131 deletions

View File

@@ -2,6 +2,7 @@ import { pipe, flow } from "fp-ts/function"
import * as TE from "fp-ts/TaskEither"
import * as O from "fp-ts/Option"
import * as RA from "fp-ts/ReadonlyArray"
import * as A from "fp-ts/Array"
import { translateToNewRESTCollection, HoppCollection } from "@hoppscotch/data"
import { isPlainObject as _isPlainObject } from "lodash-es"
@@ -9,11 +10,13 @@ import { IMPORTER_INVALID_FILE_FORMAT } from "."
import { safeParseJSON } from "~/helpers/functional/json"
import { translateToNewGQLCollection } from "@hoppscotch/data"
export const hoppRESTImporter = (content: string) =>
export const hoppRESTImporter = (content: string[]) =>
pipe(
safeParseJSON(content),
content,
A.traverse(O.Applicative)((str) => safeParseJSON(str, true)),
O.chain(
flow(
A.flatten,
makeCollectionsArray,
RA.map(validateCollection),
O.sequenceArray,

View File

@@ -8,17 +8,35 @@ import { IMPORTER_INVALID_FILE_FORMAT } from "."
import { Environment } from "@hoppscotch/data"
import { z } from "zod"
export const hoppEnvImporter = (content: string) => {
const parsedContent = safeParseJSON(content, true)
export const hoppEnvImporter = (contents: string[]) => {
const parsedContents = contents.map((str) => safeParseJSON(str, true))
// parse json from the environments string
if (O.isNone(parsedContent)) {
if (parsedContents.some((parsed) => O.isNone(parsed))) {
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
}
const parsedValues = parsedContents.flatMap((content) => {
const unwrappedContent = O.toNullable(content) as Environment[] | null
if (unwrappedContent) {
return unwrappedContent.map((contentEntry) => {
return {
...contentEntry,
variables: contentEntry.variables?.map((valueEntry) => ({
...valueEntry,
...("value" in valueEntry
? { value: String(valueEntry.value) }
: {}),
})),
}
})
}
return null
})
const validationResult = z
.array(entityReference(Environment))
.safeParse(parsedContent.value)
.safeParse(parsedValues)
if (!validationResult.success) {
return TE.left(IMPORTER_INVALID_FILE_FORMAT)

View File

@@ -3,10 +3,10 @@ import * as E from "fp-ts/Either"
// TODO: add zod validation
export const hoppGqlCollectionsImporter = (
content: string
contents: string[]
): E.Either<"INVALID_JSON", HoppCollection[]> => {
return E.tryCatch(
() => JSON.parse(content) as HoppCollection[],
() => contents.flatMap((content) => JSON.parse(content)),
() => "INVALID_JSON"
)
}

View File

@@ -6,7 +6,7 @@ import { v4 as uuidv4 } from "uuid"
export function FileSource(metadata: {
acceptedFileTypes: string
caption: string
onImportFromFile: (content: string) => any | Promise<any>
onImportFromFile: (content: string[]) => any | Promise<any>
}) {
const stepID = uuidv4()

View File

@@ -10,14 +10,14 @@ import { v4 as uuidv4 } from "uuid"
export function GistSource(metadata: {
caption: string
onImportFromGist: (
importResult: E.Either<string, string>
importResult: E.Either<string, string[]>
) => any | Promise<any>
}) {
const stepID = uuidv4()
return defineStep(stepID, UrlImport, () => ({
caption: metadata.caption,
onImportFromURL: (gistResponse) => {
onImportFromURL: (gistResponse: Record<string, unknown>) => {
const fileSchema = z.object({
files: z.record(z.object({ content: z.string() })),
})
@@ -29,9 +29,11 @@ export function GistSource(metadata: {
return
}
const content = Object.values(parseResult.data.files)[0].content
const contents = Object.values(parseResult.data.files).map(
({ content }) => content
)
metadata.onImportFromGist(E.right(content))
metadata.onImportFromGist(E.right(contents))
},
fetchLogic: fetchGistFromUrl,
}))

View File

@@ -1,19 +1,21 @@
import { convert, ImportRequest } from "insomnia-importers"
import { pipe } from "fp-ts/function"
import {
HoppCollection,
HoppRESTAuth,
HoppRESTHeader,
HoppRESTParam,
HoppRESTReqBody,
HoppRESTRequest,
knownContentTypes,
makeRESTRequest,
HoppCollection,
makeCollection,
makeRESTRequest,
} from "@hoppscotch/data"
import * as A from "fp-ts/Array"
import * as TO from "fp-ts/TaskOption"
import * as TE from "fp-ts/TaskEither"
import * as TO from "fp-ts/TaskOption"
import { pipe } from "fp-ts/function"
import { ImportRequest, convert } from "insomnia-importers"
import { IMPORTER_INVALID_FILE_FORMAT } from "."
import { replaceInsomniaTemplating } from "./insomniaEnv"
@@ -203,15 +205,18 @@ const getHoppFolder = (
headers: [],
})
const getHoppCollections = (doc: InsomniaDoc) =>
getFoldersIn(null, doc.data.resources).map((f) =>
getHoppFolder(f, doc.data.resources)
)
const getHoppCollections = (docs: InsomniaDoc[]) => {
return docs.flatMap((doc) => {
return getFoldersIn(null, doc.data.resources).map((f) =>
getHoppFolder(f, doc.data.resources)
)
})
}
export const hoppInsomniaImporter = (fileContent: string) =>
export const hoppInsomniaImporter = (fileContents: string[]) =>
pipe(
fileContent,
parseInsomniaDoc,
fileContents,
A.traverse(TO.ApplicativeSeq)(parseInsomniaDoc),
TO.map(getHoppCollections),
TE.fromTaskOption(() => IMPORTER_INVALID_FILE_FORMAT)
)

View File

@@ -29,33 +29,36 @@ export const replaceInsomniaTemplating = (expression: string) => {
return expression.replaceAll(regex, "<<$1>>")
}
export const insomniaEnvImporter = (content: string) => {
const parsedContent = safeParseJSONOrYAML(content)
if (O.isNone(parsedContent)) {
export const insomniaEnvImporter = (contents: string[]) => {
const parsedContents = contents.map((str) => safeParseJSONOrYAML(str))
if (parsedContents.some((parsed) => O.isNone(parsed))) {
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
}
const validationResult = insomniaResourcesSchema.safeParse(
parsedContent.value
)
const parsedValues = parsedContents.map((parsed) => O.toNullable(parsed))
const validationResult = z
.array(insomniaResourcesSchema)
.safeParse(parsedValues)
if (!validationResult.success) {
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
}
const insomniaEnvs = validationResult.data.resources
.filter((resource) => resource._type === "environment")
.map((envResource) => {
const envResourceData = envResource.data as Record<string, unknown>
const stringifiedData: Record<string, string> = {}
const insomniaEnvs = validationResult.data.flatMap(({ resources }) => {
return resources
.filter((resource) => resource._type === "environment")
.map((envResource) => {
const envResourceData = envResource.data as Record<string, unknown>
const stringifiedData: Record<string, string> = {}
Object.keys(envResourceData).forEach((key) => {
stringifiedData[key] = String(envResourceData[key])
Object.keys(envResourceData).forEach((key) => {
stringifiedData[key] = String(envResourceData[key])
})
return { ...envResource, data: stringifiedData }
})
return { ...envResource, data: stringifiedData }
})
})
const environments: NonSecretEnvironment[] = []

View File

@@ -584,24 +584,28 @@ const convertPathToHoppReqs = (
RA.toArray
)
const convertOpenApiDocToHopp = (
doc: OpenAPI.Document
const convertOpenApiDocsToHopp = (
docs: OpenAPI.Document[]
): TE.TaskEither<never, HoppCollection[]> => {
const name = doc.info.title
const collections = docs.map((doc) => {
const name = doc.info.title
const paths = Object.entries(doc.paths ?? {})
.map(([pathName, pathObj]) => convertPathToHoppReqs(doc, pathName, pathObj))
.flat()
const paths = Object.entries(doc.paths ?? {})
.map(([pathName, pathObj]) =>
convertPathToHoppReqs(doc, pathName, pathObj)
)
.flat()
return TE.of([
makeCollection({
return makeCollection({
name,
folders: [],
requests: paths,
auth: { authType: "inherit", authActive: true },
headers: [],
}),
])
})
})
return TE.of(collections)
}
const parseOpenAPIDocContent = (str: string) =>
@@ -614,29 +618,49 @@ const parseOpenAPIDocContent = (str: string) =>
)
)
export const hoppOpenAPIImporter = (fileContent: string) =>
export const hoppOpenAPIImporter = (fileContents: string[]) =>
pipe(
// See if we can parse JSON properly
fileContent,
parseOpenAPIDocContent,
TE.fromOption(() => IMPORTER_INVALID_FILE_FORMAT),
fileContents,
A.traverse(O.Applicative)(parseOpenAPIDocContent),
TE.fromOption(() => {
return IMPORTER_INVALID_FILE_FORMAT
}),
// Try validating, else the importer is invalid file format
TE.chainW((obj) =>
pipe(
TE.chainW((docArr) => {
return pipe(
TE.tryCatch(
() => SwaggerParser.validate(obj),
async () => {
const resultDoc = []
for (const docObj of docArr) {
const validatedDoc = await SwaggerParser.validate(docObj)
resultDoc.push(validatedDoc)
}
return resultDoc
},
() => IMPORTER_INVALID_FILE_FORMAT
)
)
),
}),
// Deference the references
TE.chainW((obj) =>
TE.chainW((docArr) =>
pipe(
TE.tryCatch(
() => SwaggerParser.dereference(obj),
async () => {
const resultDoc = []
for (const docObj of docArr) {
const validatedDoc = await SwaggerParser.dereference(docObj)
resultDoc.push(validatedDoc)
}
return resultDoc
},
() => OPENAPI_DEREF_ERROR
)
)
),
TE.chainW(convertOpenApiDocToHopp)
TE.chainW(convertOpenApiDocsToHopp)
)

View File

@@ -55,7 +55,11 @@ const readPMCollection = (def: string) =>
pipe(
def,
safeParseJSON,
O.chain((data) => O.tryCatch(() => new PMCollection(data)))
O.chain((data) =>
O.tryCatch(() => {
return new PMCollection(data)
})
)
)
const getHoppReqHeaders = (item: Item): HoppRESTHeader[] =>
@@ -296,15 +300,17 @@ const getHoppFolder = (ig: ItemGroup<Item>): HoppCollection =>
headers: [],
})
export const getHoppCollection = (coll: PMCollection) => getHoppFolder(coll)
export const getHoppCollections = (collections: PMCollection[]) => {
return collections.map(getHoppFolder)
}
export const hoppPostmanImporter = (fileContent: string) =>
export const hoppPostmanImporter = (fileContents: string[]) =>
pipe(
// Try reading
fileContent,
readPMCollection,
fileContents,
A.traverse(O.Applicative)(readPMCollection),
O.map(flow(getHoppCollection, A.of)),
O.map(flow(getHoppCollections)),
TE.fromOption(() => IMPORTER_INVALID_FILE_FORMAT)
)

View File

@@ -1,12 +1,11 @@
import * as TE from "fp-ts/TaskEither"
import * as O from "fp-ts/Option"
import { IMPORTER_INVALID_FILE_FORMAT } from "."
import { safeParseJSON } from "~/helpers/functional/json"
import { z } from "zod"
import { Environment } from "@hoppscotch/data"
import * as O from "fp-ts/Option"
import * as TE from "fp-ts/TaskEither"
import { uniqueId } from "lodash-es"
import { z } from "zod"
import { safeParseJSON } from "~/helpers/functional/json"
import { IMPORTER_INVALID_FILE_FORMAT } from "."
const postmanEnvSchema = z.object({
name: z.string(),
@@ -18,32 +17,44 @@ const postmanEnvSchema = z.object({
),
})
export const postmanEnvImporter = (content: string) => {
const parsedContent = safeParseJSON(content)
type PostmanEnv = z.infer<typeof postmanEnvSchema>
// parse json from the environments string
if (O.isNone(parsedContent)) {
export const postmanEnvImporter = (contents: string[]) => {
const parsedContents = contents.map((str) => safeParseJSON(str, true))
if (parsedContents.some((parsed) => O.isNone(parsed))) {
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
}
const validationResult = postmanEnvSchema.safeParse(parsedContent.value)
const parsedValues = parsedContents.flatMap((parsed) => {
const unwrappedEntry = O.toNullable(parsed) as PostmanEnv[] | null
if (unwrappedEntry) {
return unwrappedEntry.map((entry) => ({
...entry,
values: entry.values?.map((valueEntry) => ({
...valueEntry,
value: String(valueEntry.value),
})),
}))
}
return null
})
const validationResult = z.array(postmanEnvSchema).safeParse(parsedValues)
if (!validationResult.success) {
return TE.left(IMPORTER_INVALID_FILE_FORMAT)
}
const postmanEnv = validationResult.data
const environment: Environment = {
id: uniqueId(),
v: 1,
name: postmanEnv.name,
variables: [],
}
postmanEnv.values.forEach(({ key, value }) =>
environment.variables.push({ key, value, secret: false })
// Convert `values` to `variables` to match the format expected by the system
const environments: Environment[] = validationResult.data.map(
({ name, values }) => ({
id: uniqueId(),
v: 1,
name,
variables: values.map((entires) => ({ ...entires, secret: false })),
})
)
return TE.right(environment)
return TE.right(environments)
}