feat: init codegen state
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import { HoppRESTHeader, HoppRESTParam } from "../types/HoppRESTRequest"
|
||||
import { CLibcurlCodegen } from "./generators/c-libcurl"
|
||||
import { CsRestsharpCodegen } from "./generators/cs-restsharp"
|
||||
import { CurlCodegen } from "./generators/curl"
|
||||
@@ -54,7 +55,30 @@ export const codegens = [
|
||||
ShellWgetCodegen,
|
||||
]
|
||||
|
||||
export function generateCodeWithGenerator(codegenID, context) {
|
||||
export type HoppCodegenContext = {
|
||||
name: string
|
||||
method: string
|
||||
uri: string
|
||||
url: string
|
||||
pathName: string
|
||||
auth: any // TODO: Change this
|
||||
httpUser: string | null
|
||||
httpPassword: string | null
|
||||
bearerToken: string | null
|
||||
headers: HoppRESTHeader[]
|
||||
params: HoppRESTParam[]
|
||||
bodyParams: any // TODO: Change this
|
||||
rawParams: string | null
|
||||
rawInput: boolean
|
||||
rawRequestBody: any
|
||||
contentType: string
|
||||
queryString: string
|
||||
}
|
||||
|
||||
export function generateCodeWithGenerator(
|
||||
codegenID: string,
|
||||
context: HoppCodegenContext
|
||||
) {
|
||||
if (codegenID) {
|
||||
const gen = codegens.find(({ id }) => id === codegenID)
|
||||
return gen ? gen.generator(context) : ""
|
||||
@@ -8,7 +8,7 @@ import parser from "yargs-parser"
|
||||
* output this: 'msg1=value1&msg2=value2'
|
||||
* @param dataArguments
|
||||
*/
|
||||
const joinDataArguments = (dataArguments) => {
|
||||
const joinDataArguments = (dataArguments: string[]) => {
|
||||
let data = ""
|
||||
dataArguments.forEach((argument, i) => {
|
||||
if (i === 0) {
|
||||
@@ -20,7 +20,49 @@ const joinDataArguments = (dataArguments) => {
|
||||
return data
|
||||
}
|
||||
|
||||
const parseCurlCommand = (curlCommand) => {
|
||||
const parseDataFromArguments = (parsedArguments: any) => {
|
||||
if (parsedArguments.data) {
|
||||
return {
|
||||
data: Array.isArray(parsedArguments.data)
|
||||
? joinDataArguments(parsedArguments.data)
|
||||
: parsedArguments.data,
|
||||
dataArray: Array.isArray(parsedArguments.data)
|
||||
? parsedArguments.data
|
||||
: null,
|
||||
isDataBinary: false,
|
||||
}
|
||||
} else if (parsedArguments["data-binary"]) {
|
||||
return {
|
||||
data: Array.isArray(parsedArguments["data-binary"])
|
||||
? joinDataArguments(parsedArguments["data-binary"])
|
||||
: parsedArguments["data-binary"],
|
||||
dataArray: Array.isArray(parsedArguments["data-binary"])
|
||||
? parsedArguments["data-binary"]
|
||||
: null,
|
||||
isDataBinary: true,
|
||||
}
|
||||
} else if (parsedArguments.d) {
|
||||
return {
|
||||
data: Array.isArray(parsedArguments.d)
|
||||
? joinDataArguments(parsedArguments.d)
|
||||
: parsedArguments.d,
|
||||
dataArray: Array.isArray(parsedArguments.d) ? parsedArguments.d : null,
|
||||
isDataBinary: false,
|
||||
}
|
||||
} else if (parsedArguments["data-ascii"]) {
|
||||
return {
|
||||
data: Array.isArray(parsedArguments["data-ascii"])
|
||||
? joinDataArguments(parsedArguments["data-ascii"])
|
||||
: parsedArguments["data-ascii"],
|
||||
dataArray: Array.isArray(parsedArguments["data-ascii"])
|
||||
? parsedArguments["data-ascii"]
|
||||
: null,
|
||||
isDataBinary: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parseCurlCommand = (curlCommand: string) => {
|
||||
const newlineFound = /\\/gi.test(curlCommand)
|
||||
if (newlineFound) {
|
||||
// remove '\' and newlines
|
||||
@@ -47,9 +89,9 @@ const parseCurlCommand = (curlCommand) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
let headers
|
||||
let headers: any
|
||||
|
||||
const parseHeaders = (headerFieldName) => {
|
||||
const parseHeaders = (headerFieldName: string) => {
|
||||
if (parsedArguments[headerFieldName]) {
|
||||
if (!headers) {
|
||||
headers = {}
|
||||
@@ -57,7 +99,7 @@ const parseCurlCommand = (curlCommand) => {
|
||||
if (!Array.isArray(parsedArguments[headerFieldName])) {
|
||||
parsedArguments[headerFieldName] = [parsedArguments[headerFieldName]]
|
||||
}
|
||||
parsedArguments[headerFieldName].forEach((header) => {
|
||||
parsedArguments[headerFieldName].forEach((header: string) => {
|
||||
if (header.includes("Cookie")) {
|
||||
// stupid javascript tricks: closure
|
||||
cookieString = header
|
||||
@@ -91,13 +133,12 @@ const parseCurlCommand = (curlCommand) => {
|
||||
if (parsedArguments.cookie) {
|
||||
cookieString = parsedArguments.cookie
|
||||
}
|
||||
let multipartUploads
|
||||
const multipartUploads: Record<string, string> = {}
|
||||
if (parsedArguments.F) {
|
||||
multipartUploads = {}
|
||||
if (!Array.isArray(parsedArguments.F)) {
|
||||
parsedArguments.F = [parsedArguments.F]
|
||||
}
|
||||
parsedArguments.F.forEach((multipartArgument) => {
|
||||
parsedArguments.F.forEach((multipartArgument: string) => {
|
||||
// input looks like key=value. value could be json or a file path prepended with an @
|
||||
const [key, value] = multipartArgument.split("=", 2)
|
||||
multipartUploads[key] = value
|
||||
@@ -105,7 +146,7 @@ const parseCurlCommand = (curlCommand) => {
|
||||
}
|
||||
if (cookieString) {
|
||||
const cookieParseOptions = {
|
||||
decode: (s) => s,
|
||||
decode: (s: any) => s,
|
||||
}
|
||||
// separate out cookie headers into separate data structure
|
||||
// note: cookie is case insensitive
|
||||
@@ -169,7 +210,7 @@ const parseCurlCommand = (curlCommand) => {
|
||||
delete parsedArguments[option]
|
||||
}
|
||||
}
|
||||
const query = querystring.parse(urlObject.query, null, null, {
|
||||
const query = querystring.parse(urlObject.query!, null as any, null as any, {
|
||||
maxKeys: 10000,
|
||||
})
|
||||
|
||||
@@ -177,51 +218,18 @@ const parseCurlCommand = (curlCommand) => {
|
||||
const request = {
|
||||
url,
|
||||
urlWithoutQuery: URL.format(urlObject),
|
||||
}
|
||||
if (compressed) {
|
||||
request.compressed = true
|
||||
compressed,
|
||||
query,
|
||||
headers,
|
||||
method,
|
||||
cookies,
|
||||
cookieString: cookieString?.replace("Cookie: ", ""),
|
||||
multipartUploads,
|
||||
...parseDataFromArguments(parsedArguments),
|
||||
auth: parsedArguments.u,
|
||||
user: parsedArguments.user,
|
||||
}
|
||||
|
||||
if (Object.keys(query).length > 0) {
|
||||
request.query = query
|
||||
}
|
||||
if (headers) {
|
||||
request.headers = headers
|
||||
}
|
||||
request.method = method
|
||||
|
||||
if (cookies) {
|
||||
request.cookies = cookies
|
||||
request.cookieString = cookieString.replace("Cookie: ", "")
|
||||
}
|
||||
if (multipartUploads) {
|
||||
request.multipartUploads = multipartUploads
|
||||
}
|
||||
if (parsedArguments.data) {
|
||||
request.data = parsedArguments.data
|
||||
} else if (parsedArguments["data-binary"]) {
|
||||
request.data = parsedArguments["data-binary"]
|
||||
request.isDataBinary = true
|
||||
} else if (parsedArguments.d) {
|
||||
request.data = parsedArguments.d
|
||||
} else if (parsedArguments["data-ascii"]) {
|
||||
request.data = parsedArguments["data-ascii"]
|
||||
}
|
||||
|
||||
if (parsedArguments.u) {
|
||||
request.auth = parsedArguments.u
|
||||
}
|
||||
if (parsedArguments.user) {
|
||||
request.auth = parsedArguments.user
|
||||
}
|
||||
if (Array.isArray(request.data)) {
|
||||
request.dataArray = request.data
|
||||
request.data = joinDataArguments(request.data)
|
||||
}
|
||||
|
||||
if (parsedArguments.k || parsedArguments.insecure) {
|
||||
request.insecure = true
|
||||
}
|
||||
return request
|
||||
}
|
||||
|
||||
@@ -21,6 +21,15 @@ export interface HoppRESTRequest {
|
||||
headers: HoppRESTHeader[]
|
||||
}
|
||||
|
||||
export function makeRESTRequest(
|
||||
x: Omit<HoppRESTRequest, "v">
|
||||
): HoppRESTRequest {
|
||||
return {
|
||||
...x,
|
||||
v: RESTReqSchemaVersion,
|
||||
}
|
||||
}
|
||||
|
||||
export function isHoppRESTRequest(x: any): x is HoppRESTRequest {
|
||||
return x && typeof x === "object" && "v" in x
|
||||
}
|
||||
|
||||
@@ -13,6 +13,30 @@ export interface EffectiveHoppRESTRequest extends HoppRESTRequest {
|
||||
effectiveFinalHeaders: { key: string; value: string }[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Outputs an executable request format with environment variables applied
|
||||
*
|
||||
* @param request The request to source from
|
||||
* @param environment The environment to apply
|
||||
*
|
||||
* @returns An object with extra fields defining a complete request
|
||||
*/
|
||||
export function getEffectiveRESTRequest(
|
||||
request: HoppRESTRequest,
|
||||
_environment: Environment
|
||||
) {
|
||||
// TODO: Change this
|
||||
return {
|
||||
...request,
|
||||
effectiveFinalURL: request.endpoint,
|
||||
effectiveFinalHeaders: request.headers.filter(
|
||||
(x) =>
|
||||
x.key !== "" && // Remove empty keys
|
||||
x.active // Only active
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an Observable Stream that emits HoppRESTRequests whenever
|
||||
* the input streams emit a value
|
||||
@@ -27,17 +51,8 @@ export function getEffectiveRESTRequestStream(
|
||||
environment$: Observable<Environment>
|
||||
): Observable<EffectiveHoppRESTRequest> {
|
||||
return combineLatest([request$, environment$]).pipe(
|
||||
map(([request, _env]) => {
|
||||
// TODO: Change this
|
||||
return {
|
||||
...request,
|
||||
effectiveFinalURL: request.endpoint,
|
||||
effectiveFinalHeaders: request.headers.filter(
|
||||
(x) =>
|
||||
x.key !== "" && // Remove empty keys
|
||||
x.active // Only active
|
||||
),
|
||||
}
|
||||
map(([request, env]) => {
|
||||
return getEffectiveRESTRequest(request, env)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user