refactor: inherit default curl parser values (#2169)
This commit is contained in:
@@ -17,7 +17,7 @@ const samples = [
|
|||||||
method: "GET",
|
method: "GET",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://echo.hoppscotch.io/",
|
endpoint: "https://echo.hoppscotch.io/",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: "application/x-www-form-urlencoded",
|
contentType: "application/x-www-form-urlencoded",
|
||||||
body: rawKeyValueEntriesToString([
|
body: rawKeyValueEntriesToString([
|
||||||
@@ -148,7 +148,7 @@ const samples = [
|
|||||||
method: "GET",
|
method: "GET",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://google.com/",
|
endpoint: "https://google.com/",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: null,
|
contentType: null,
|
||||||
body: null,
|
body: null,
|
||||||
@@ -165,7 +165,7 @@ const samples = [
|
|||||||
method: "POST",
|
method: "POST",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "http://localhost:1111/hello/world/?buzz",
|
endpoint: "http://localhost:1111/hello/world/?buzz",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: "application/json",
|
contentType: "application/json",
|
||||||
body: `{\n "foo": "bar"\n}`,
|
body: `{\n "foo": "bar"\n}`,
|
||||||
@@ -188,7 +188,7 @@ const samples = [
|
|||||||
method: "GET",
|
method: "GET",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://example.com/",
|
endpoint: "https://example.com/",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: null,
|
contentType: null,
|
||||||
body: null,
|
body: null,
|
||||||
@@ -216,7 +216,7 @@ const samples = [
|
|||||||
method: "POST",
|
method: "POST",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://bing.com/",
|
endpoint: "https://bing.com/",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: "multipart/form-data",
|
contentType: "multipart/form-data",
|
||||||
body: [
|
body: [
|
||||||
@@ -292,6 +292,27 @@ const samples = [
|
|||||||
testScript: "",
|
testScript: "",
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
command:
|
||||||
|
"curl -X GET localhost:9900 --header 'Authorization: Basic 77898dXNlcjpwYXNz'",
|
||||||
|
response: makeRESTRequest({
|
||||||
|
method: "GET",
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "http://localhost:9900/",
|
||||||
|
auth: {
|
||||||
|
authType: "none",
|
||||||
|
authActive: true,
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
headers: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
command:
|
command:
|
||||||
"curl -X GET localhost --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'",
|
"curl -X GET localhost --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'",
|
||||||
@@ -315,30 +336,6 @@ const samples = [
|
|||||||
testScript: "",
|
testScript: "",
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
{
|
|
||||||
command:
|
|
||||||
"curl -X GET localhost --header 'Authorization: Apikey dXNlcjpwYXNz'",
|
|
||||||
response: makeRESTRequest({
|
|
||||||
method: "GET",
|
|
||||||
name: "Untitled request",
|
|
||||||
endpoint: "http://localhost/",
|
|
||||||
auth: {
|
|
||||||
authActive: true,
|
|
||||||
authType: "api-key",
|
|
||||||
key: "apikey",
|
|
||||||
value: "dXNlcjpwYXNz",
|
|
||||||
addTo: "headers",
|
|
||||||
},
|
|
||||||
body: {
|
|
||||||
contentType: null,
|
|
||||||
body: null,
|
|
||||||
},
|
|
||||||
params: [],
|
|
||||||
headers: [],
|
|
||||||
preRequestScript: "",
|
|
||||||
testScript: "",
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
command: `curl --get -I -d "tool=curl" -d "platform=hoppscotch" -d"io" https://hoppscotch.io`,
|
command: `curl --get -I -d "tool=curl" -d "platform=hoppscotch" -d"io" https://hoppscotch.io`,
|
||||||
response: makeRESTRequest({
|
response: makeRESTRequest({
|
||||||
@@ -346,7 +343,7 @@ const samples = [
|
|||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://hoppscotch.io/?io",
|
endpoint: "https://hoppscotch.io/?io",
|
||||||
auth: {
|
auth: {
|
||||||
authActive: false,
|
authActive: true,
|
||||||
authType: "none",
|
authType: "none",
|
||||||
},
|
},
|
||||||
body: {
|
body: {
|
||||||
@@ -381,7 +378,7 @@ const samples = [
|
|||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://someshadywebsite.com/questionable/path/?so",
|
endpoint: "https://someshadywebsite.com/questionable/path/?so",
|
||||||
auth: {
|
auth: {
|
||||||
authActive: false,
|
authActive: true,
|
||||||
authType: "none",
|
authType: "none",
|
||||||
},
|
},
|
||||||
body: {
|
body: {
|
||||||
@@ -442,7 +439,7 @@ const samples = [
|
|||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "http://localhost/",
|
endpoint: "http://localhost/",
|
||||||
auth: {
|
auth: {
|
||||||
authActive: false,
|
authActive: true,
|
||||||
authType: "none",
|
authType: "none",
|
||||||
},
|
},
|
||||||
body: {
|
body: {
|
||||||
@@ -475,7 +472,7 @@ const samples = [
|
|||||||
method: "GET",
|
method: "GET",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://hoppscotch.io/",
|
endpoint: "https://hoppscotch.io/",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: null,
|
contentType: null,
|
||||||
body: null,
|
body: null,
|
||||||
@@ -530,7 +527,7 @@ const samples = [
|
|||||||
method: "GET",
|
method: "GET",
|
||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://echo.hoppscotch.io/",
|
endpoint: "https://echo.hoppscotch.io/",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
body: {
|
body: {
|
||||||
contentType: "application/x-www-form-urlencoded",
|
contentType: "application/x-www-form-urlencoded",
|
||||||
body: rawKeyValueEntriesToString([
|
body: rawKeyValueEntriesToString([
|
||||||
@@ -575,7 +572,7 @@ const samples = [
|
|||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://echo.hoppscotch.io/",
|
endpoint: "https://echo.hoppscotch.io/",
|
||||||
method: "POST",
|
method: "POST",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
headers: [
|
headers: [
|
||||||
{
|
{
|
||||||
active: true,
|
active: true,
|
||||||
@@ -617,7 +614,7 @@ const samples = [
|
|||||||
name: "Untitled request",
|
name: "Untitled request",
|
||||||
endpoint: "https://muxueqz.top/skybook.html",
|
endpoint: "https://muxueqz.top/skybook.html",
|
||||||
method: "GET",
|
method: "GET",
|
||||||
auth: { authType: "none", authActive: false },
|
auth: { authType: "none", authActive: true },
|
||||||
headers: [],
|
headers: [],
|
||||||
body: { contentType: null, body: null },
|
body: { contentType: null, body: null },
|
||||||
params: [],
|
params: [],
|
||||||
@@ -625,6 +622,152 @@ const samples = [
|
|||||||
testScript: "",
|
testScript: "",
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
command: "curl -F abcd=efghi",
|
||||||
|
response: makeRESTRequest({
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "https://echo.hoppscotch.io/",
|
||||||
|
method: "POST",
|
||||||
|
auth: { authType: "none", authActive: true },
|
||||||
|
headers: [],
|
||||||
|
body: {
|
||||||
|
contentType: "multipart/form-data",
|
||||||
|
body: [
|
||||||
|
{
|
||||||
|
active: true,
|
||||||
|
isFile: false,
|
||||||
|
key: "abcd",
|
||||||
|
value: "efghi",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
command: "curl 127.0.0.1 -X custommethod",
|
||||||
|
response: makeRESTRequest({
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "http://127.0.0.1/",
|
||||||
|
method: "CUSTOMMETHOD",
|
||||||
|
auth: { authType: "none", authActive: true },
|
||||||
|
headers: [],
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
command: "curl echo.hoppscotch.io -A pinephone",
|
||||||
|
response: makeRESTRequest({
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "https://echo.hoppscotch.io/",
|
||||||
|
method: "GET",
|
||||||
|
auth: { authType: "none", authActive: true },
|
||||||
|
headers: [
|
||||||
|
{
|
||||||
|
active: true,
|
||||||
|
key: "User-Agent",
|
||||||
|
value: "pinephone",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
command: "curl echo.hoppscotch.io -G",
|
||||||
|
response: makeRESTRequest({
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "https://echo.hoppscotch.io/",
|
||||||
|
method: "GET",
|
||||||
|
auth: { authType: "none", authActive: true },
|
||||||
|
headers: [],
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
command: `curl --get -I -d "tool=hopp" https://example.org`,
|
||||||
|
response: makeRESTRequest({
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "https://example.org/",
|
||||||
|
method: "HEAD",
|
||||||
|
auth: { authType: "none", authActive: true },
|
||||||
|
headers: [],
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [
|
||||||
|
{
|
||||||
|
active: true,
|
||||||
|
key: "tool",
|
||||||
|
value: "hopp",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
command: `curl google.com -u userx`,
|
||||||
|
response: makeRESTRequest({
|
||||||
|
method: "GET",
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "https://google.com/",
|
||||||
|
auth: {
|
||||||
|
authType: "basic",
|
||||||
|
authActive: true,
|
||||||
|
username: "userx",
|
||||||
|
password: "",
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
headers: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
command: `curl google.com -H "Authorization"`,
|
||||||
|
response: makeRESTRequest({
|
||||||
|
method: "GET",
|
||||||
|
name: "Untitled request",
|
||||||
|
endpoint: "https://google.com/",
|
||||||
|
auth: {
|
||||||
|
authType: "none",
|
||||||
|
authActive: true,
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
contentType: null,
|
||||||
|
body: null,
|
||||||
|
},
|
||||||
|
params: [],
|
||||||
|
headers: [],
|
||||||
|
preRequestScript: "",
|
||||||
|
testScript: "",
|
||||||
|
}),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
describe("parseCurlToHoppRESTReq", () => {
|
describe("parseCurlToHoppRESTReq", () => {
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
import { detectContentType } from "../contentParser"
|
import { detectContentType } from "../sub_helpers/contentParser"
|
||||||
|
|
||||||
describe("detect content type", () => {
|
describe("detect content type", () => {
|
||||||
test("should return text/plain for blank/null/undefined input", () => {
|
test("should return null for blank input", () => {
|
||||||
expect(detectContentType("")).toBe("text/plain")
|
expect(detectContentType("")).toBe(null)
|
||||||
expect(detectContentType(null)).toBe("text/plain")
|
|
||||||
expect(detectContentType(undefined)).toBe("text/plain")
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("application/json", () => {
|
describe("application/json", () => {
|
||||||
|
|||||||
@@ -1,282 +0,0 @@
|
|||||||
import { HoppRESTReqBody } from "@hoppscotch/data"
|
|
||||||
import * as S from "fp-ts/string"
|
|
||||||
import * as RA from "fp-ts/ReadonlyArray"
|
|
||||||
import * as O from "fp-ts/Option"
|
|
||||||
import { pipe } from "fp-ts/function"
|
|
||||||
import { tupleToRecord } from "~/helpers/functional/record"
|
|
||||||
import { safeParseJSON } from "~/helpers/functional/json"
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Detects the content type of the input string
|
|
||||||
* @param rawData String for which content type is to be detected
|
|
||||||
* @returns Content type of the data
|
|
||||||
*/
|
|
||||||
export function detectContentType(
|
|
||||||
rawData: string
|
|
||||||
): HoppRESTReqBody["contentType"] {
|
|
||||||
if (!rawData) return "text/plain"
|
|
||||||
|
|
||||||
let contentType: HoppRESTReqBody["contentType"]
|
|
||||||
|
|
||||||
if (O.isSome(safeParseJSON(rawData))) {
|
|
||||||
contentType = "application/json"
|
|
||||||
} else if (/<\/?[a-zA-Z][\s\S]*>/i.test(rawData)) {
|
|
||||||
if (O.isSome(prettifyXml(rawData))) {
|
|
||||||
contentType = "application/xml"
|
|
||||||
} else {
|
|
||||||
// everything is HTML
|
|
||||||
contentType = "text/html"
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
contentType = pipe(
|
|
||||||
rawData.match(/^-{2,}[A-Za-z0-9]+\\r\\n/),
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter((boundaryMatch) => boundaryMatch.length > 0),
|
|
||||||
O.match(
|
|
||||||
() =>
|
|
||||||
pipe(
|
|
||||||
rawData,
|
|
||||||
O.fromPredicate((rd) => /([^&=]+)=([^&=]*)/.test(rd)),
|
|
||||||
O.match(
|
|
||||||
() => "text/plain",
|
|
||||||
() => "application/x-www-form-urlencoded"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
() => "multipart/form-data"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return contentType
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prettifies XML string
|
|
||||||
* @param sourceXml The string to format
|
|
||||||
* @returns Indented XML string (uses spaces)
|
|
||||||
*/
|
|
||||||
const prettifyXml = (sourceXml: string) =>
|
|
||||||
pipe(
|
|
||||||
O.tryCatch(() => {
|
|
||||||
const xmlDoc = new DOMParser().parseFromString(
|
|
||||||
sourceXml,
|
|
||||||
"application/xml"
|
|
||||||
)
|
|
||||||
|
|
||||||
if (xmlDoc.querySelector("parsererror")) {
|
|
||||||
throw new Error("Unstructured Body")
|
|
||||||
}
|
|
||||||
|
|
||||||
const xsltDoc = new DOMParser().parseFromString(
|
|
||||||
[
|
|
||||||
// describes how we want to modify the XML - indent everything
|
|
||||||
'<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform">',
|
|
||||||
' <xsl:strip-space elements="*"/>',
|
|
||||||
' <xsl:template match="para[content-style][not(text())]">', // change to just text() to strip space in text nodes
|
|
||||||
' <xsl:value-of select="normalize-space(.)"/>',
|
|
||||||
" </xsl:template>",
|
|
||||||
' <xsl:template match="node()|@*">',
|
|
||||||
' <xsl:copy><xsl:apply-templates select="node()|@*"/></xsl:copy>',
|
|
||||||
" </xsl:template>",
|
|
||||||
' <xsl:output indent="yes"/>',
|
|
||||||
"</xsl:stylesheet>",
|
|
||||||
].join("\n"),
|
|
||||||
"application/xml"
|
|
||||||
)
|
|
||||||
|
|
||||||
const xsltProcessor = new XSLTProcessor()
|
|
||||||
xsltProcessor.importStylesheet(xsltDoc)
|
|
||||||
const resultDoc = xsltProcessor.transformToDocument(xmlDoc)
|
|
||||||
const resultXml = new XMLSerializer().serializeToString(resultDoc)
|
|
||||||
|
|
||||||
return resultXml
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prettifies HTML string
|
|
||||||
* @param htmlString The string to format
|
|
||||||
* @returns Indented HTML string (uses spaces)
|
|
||||||
*/
|
|
||||||
const formatHTML = (htmlString: string) => {
|
|
||||||
const tab = " "
|
|
||||||
let result = ""
|
|
||||||
let indent = ""
|
|
||||||
const emptyTags = [
|
|
||||||
"area",
|
|
||||||
"base",
|
|
||||||
"br",
|
|
||||||
"col",
|
|
||||||
"embed",
|
|
||||||
"hr",
|
|
||||||
"img",
|
|
||||||
"input",
|
|
||||||
"link",
|
|
||||||
"meta",
|
|
||||||
"param",
|
|
||||||
"source",
|
|
||||||
"track",
|
|
||||||
"wbr",
|
|
||||||
]
|
|
||||||
|
|
||||||
const spl = htmlString.split(/>\s*</)
|
|
||||||
spl.forEach((element) => {
|
|
||||||
if (element.match(/^\/\w/)) {
|
|
||||||
indent = indent.substring(tab.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
result += indent + "<" + element + ">\n"
|
|
||||||
|
|
||||||
if (
|
|
||||||
element.match(/^<?\w[^>]*[^/]$/) &&
|
|
||||||
!emptyTags.includes(element.match(/^([a-z]*)/i)?.at(1) || "")
|
|
||||||
) {
|
|
||||||
indent += tab
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return result.substring(1, result.length - 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses provided string according to the content type
|
|
||||||
* @param rawData Data to be parsed
|
|
||||||
* @param contentType Content type of the data
|
|
||||||
* @param boundary Optional parameter required for multipart/form-data content type
|
|
||||||
* @returns Option of parsed body as string or Record object for multipart/form-data
|
|
||||||
*/
|
|
||||||
export function parseBody(
|
|
||||||
rawData: string,
|
|
||||||
contentType: HoppRESTReqBody["contentType"],
|
|
||||||
rawContentType?: string
|
|
||||||
): O.Option<string | Record<string, string>> {
|
|
||||||
switch (contentType) {
|
|
||||||
case "application/hal+json":
|
|
||||||
case "application/ld+json":
|
|
||||||
case "application/vnd.api+json":
|
|
||||||
case "application/json": {
|
|
||||||
return pipe(
|
|
||||||
rawData,
|
|
||||||
safeParseJSON,
|
|
||||||
O.map((parsedJSON) => JSON.stringify(parsedJSON, null, 2)),
|
|
||||||
O.getOrElse(() => "{}"),
|
|
||||||
O.fromNullable
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
case "application/x-www-form-urlencoded": {
|
|
||||||
return pipe(
|
|
||||||
rawData,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map(decodeURIComponent),
|
|
||||||
O.chain((rd) =>
|
|
||||||
pipe(rd.match(/(([^&=]+)=?([^&=]*))/g), O.fromNullable)
|
|
||||||
),
|
|
||||||
O.map((pairs) => pairs.map((p) => p.replace("=", ": ")).join("\n"))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
case "multipart/form-data": {
|
|
||||||
/**
|
|
||||||
* O.bind binds "boundary"
|
|
||||||
* If rawContentType is present, try to extract boundary from it
|
|
||||||
* If rawContentTpe is not present, try to regex match the boundary from rawData
|
|
||||||
* In case both the above attempts fail, O.map is not executed and the pipe is
|
|
||||||
* short-circuited. O.none is returned.
|
|
||||||
*
|
|
||||||
* In the event the boundary is ascertained, process rawData to get key-value
|
|
||||||
* pairs and convert them to a tuple array. If the array is not empty,
|
|
||||||
* convert it to Record<string, string> type and return O.some of it.
|
|
||||||
*/
|
|
||||||
return pipe(
|
|
||||||
O.Do,
|
|
||||||
|
|
||||||
O.bind("boundary", () =>
|
|
||||||
pipe(
|
|
||||||
rawContentType,
|
|
||||||
O.fromNullable,
|
|
||||||
O.match(
|
|
||||||
() =>
|
|
||||||
pipe(
|
|
||||||
rawData.match(/-{2,}[A-Za-z0-9]+\\r\\n/g),
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter((boundaryMatch) => boundaryMatch.length > 1),
|
|
||||||
O.map((matches) => matches[0])
|
|
||||||
),
|
|
||||||
(rct) =>
|
|
||||||
pipe(
|
|
||||||
rct.match(/boundary=(.+)/),
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter(
|
|
||||||
(boundaryContentMatch) => boundaryContentMatch.length > 1
|
|
||||||
),
|
|
||||||
O.filter((matches) =>
|
|
||||||
rawData
|
|
||||||
.replaceAll("\\r\\n", "")
|
|
||||||
.endsWith("--" + matches[1] + "--")
|
|
||||||
),
|
|
||||||
O.map((matches) => "--" + matches[1])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.map(({ boundary }) =>
|
|
||||||
pipe(
|
|
||||||
rawData,
|
|
||||||
S.split(boundary),
|
|
||||||
RA.filter((p) => p !== "" && p.includes("name")),
|
|
||||||
RA.map((p) =>
|
|
||||||
pipe(
|
|
||||||
p.replaceAll(/\\r\\n+/g, "\\r\\n"),
|
|
||||||
S.split("\\r\\n"),
|
|
||||||
RA.filter((q) => q !== "")
|
|
||||||
)
|
|
||||||
),
|
|
||||||
RA.filterMap((p) =>
|
|
||||||
pipe(
|
|
||||||
p[0].match(/ name="(\w+)"/),
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter((nameMatch) => nameMatch.length > 0),
|
|
||||||
O.map((nameMatch) => {
|
|
||||||
const name = nameMatch[0]
|
|
||||||
.replaceAll(/"/g, "")
|
|
||||||
.split("=", 2)[1]
|
|
||||||
return [name, p[0].includes("filename") ? "" : p[1]] as [
|
|
||||||
string,
|
|
||||||
string
|
|
||||||
]
|
|
||||||
})
|
|
||||||
)
|
|
||||||
),
|
|
||||||
RA.toArray
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.filter((arr) => arr.length > 0),
|
|
||||||
O.map(tupleToRecord)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
case "text/html": {
|
|
||||||
return pipe(rawData, O.fromNullable, O.map(formatHTML))
|
|
||||||
}
|
|
||||||
|
|
||||||
case "application/xml": {
|
|
||||||
return pipe(
|
|
||||||
rawData,
|
|
||||||
O.fromNullable,
|
|
||||||
O.chain(prettifyXml),
|
|
||||||
O.match(
|
|
||||||
() => rawData,
|
|
||||||
(res) => res
|
|
||||||
),
|
|
||||||
O.fromNullable
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
case "text/plain":
|
|
||||||
default:
|
|
||||||
return O.some(rawData)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,675 +1,178 @@
|
|||||||
import * as cookie from "cookie"
|
|
||||||
import parser from "yargs-parser"
|
import parser from "yargs-parser"
|
||||||
import * as RA from "fp-ts/ReadonlyArray"
|
|
||||||
import * as O from "fp-ts/Option"
|
import * as O from "fp-ts/Option"
|
||||||
import { pipe } from "fp-ts/function"
|
import * as A from "fp-ts/Array"
|
||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
import {
|
import {
|
||||||
HoppRESTAuth,
|
|
||||||
FormDataKeyValue,
|
FormDataKeyValue,
|
||||||
HoppRESTReqBody,
|
HoppRESTReqBody,
|
||||||
makeRESTRequest,
|
makeRESTRequest,
|
||||||
} from "@hoppscotch/data"
|
} from "@hoppscotch/data"
|
||||||
import { detectContentType, parseBody } from "./contentParser"
|
import { getAuthObject } from "./sub_helpers/auth"
|
||||||
import { CurlParserRequest } from "."
|
import { getHeaders, recordToHoppHeaders } from "./sub_helpers/headers"
|
||||||
import { tupleToRecord } from "~/helpers/functional/record"
|
// import { getCookies } from "./sub_helpers/cookies"
|
||||||
import { stringArrayJoin } from "~/helpers/functional/array"
|
import { getQueries } from "./sub_helpers/queries"
|
||||||
|
import { getMethod } from "./sub_helpers/method"
|
||||||
|
import { concatParams, parseURL } from "./sub_helpers/url"
|
||||||
|
import { preProcessCurlCommand } from "./sub_helpers/preproc"
|
||||||
|
import { getBody, getFArgumentMultipartData } from "./sub_helpers/body"
|
||||||
|
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||||
|
import {
|
||||||
|
objHasProperty,
|
||||||
|
objHasArrayProperty,
|
||||||
|
} from "~/helpers/functional/object"
|
||||||
|
|
||||||
|
const defaultRESTReq = getDefaultRESTRequest()
|
||||||
|
|
||||||
export const parseCurlCommand = (curlCommand: string) => {
|
export const parseCurlCommand = (curlCommand: string) => {
|
||||||
const isDataBinary = curlCommand.includes(" --data-binary")
|
// const isDataBinary = curlCommand.includes(" --data-binary")
|
||||||
|
// const compressed = !!parsedArguments.compressed
|
||||||
|
|
||||||
curlCommand = preProcessCurlCommand(curlCommand)
|
curlCommand = preProcessCurlCommand(curlCommand)
|
||||||
const parsedArguments = parser(curlCommand)
|
const parsedArguments = parser(curlCommand)
|
||||||
|
|
||||||
const headers = getHeaders(parsedArguments)
|
const headerObject = getHeaders(parsedArguments)
|
||||||
const method = getMethod(parsedArguments)
|
const { headers } = headerObject
|
||||||
const urlObject = parseURL(parsedArguments)
|
let { rawContentType } = headerObject
|
||||||
|
const hoppHeaders = pipe(
|
||||||
let rawContentType: string = ""
|
headers,
|
||||||
let rawData: string | string[] = parsedArguments?.d || ""
|
O.fromPredicate(() => Object.keys(headers).length > 0),
|
||||||
let body: string | null = ""
|
O.map(recordToHoppHeaders),
|
||||||
let contentType: HoppRESTReqBody["contentType"] = null
|
O.getOrElse(() => defaultRESTReq.headers)
|
||||||
let hasBodyBeenParsed = false
|
|
||||||
|
|
||||||
if (headers && rawContentType === "")
|
|
||||||
rawContentType = headers["Content-Type"] || headers["content-type"] || ""
|
|
||||||
|
|
||||||
let { queries, danglingParams } = getQueries(
|
|
||||||
urlObject?.searchParams.entries()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if (Array.isArray(rawData)) {
|
const method = getMethod(parsedArguments)
|
||||||
const pairs = getParamPairs(rawData)
|
// const cookies = getCookies(parsedArguments)
|
||||||
|
const urlObject = parseURL(parsedArguments)
|
||||||
|
const auth = getAuthObject(parsedArguments, headers, urlObject)
|
||||||
|
|
||||||
if (parsedArguments.G) {
|
let rawData: string | string[] = pipe(
|
||||||
const newQueries = getQueries(pairs as [string, string][])
|
parsedArguments,
|
||||||
queries = [...queries, ...newQueries.queries]
|
O.fromPredicate(objHasArrayProperty("d", "string")),
|
||||||
danglingParams = [...danglingParams, ...newQueries.danglingParams]
|
O.map((args) => args.d),
|
||||||
hasBodyBeenParsed = true
|
O.altW(() =>
|
||||||
} else if (rawContentType.includes("application/x-www-form-urlencoded")) {
|
pipe(
|
||||||
body = pairs?.map((p) => p.join(": ")).join("\n") || null
|
parsedArguments,
|
||||||
contentType = "application/x-www-form-urlencoded"
|
O.fromPredicate(objHasProperty("d", "string")),
|
||||||
hasBodyBeenParsed = true
|
O.map((args) => args.d)
|
||||||
} else {
|
)
|
||||||
rawData = rawData.join("")
|
),
|
||||||
}
|
O.getOrElseW(() => "")
|
||||||
|
)
|
||||||
|
|
||||||
|
let body: HoppRESTReqBody["body"] = ""
|
||||||
|
let contentType: HoppRESTReqBody["contentType"] =
|
||||||
|
defaultRESTReq.body.contentType
|
||||||
|
let hasBodyBeenParsed = false
|
||||||
|
|
||||||
|
let { queries, danglingParams } = getQueries(
|
||||||
|
Array.from(urlObject.searchParams.entries())
|
||||||
|
)
|
||||||
|
|
||||||
|
const stringToPair = flow(
|
||||||
|
decodeURIComponent,
|
||||||
|
(pair) => <[string, string]>pair.split("=", 2)
|
||||||
|
)
|
||||||
|
const pairs = pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate(Array.isArray),
|
||||||
|
O.map(A.map(stringToPair)),
|
||||||
|
O.alt(() =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate((s) => s.length > 0),
|
||||||
|
O.map(() => [stringToPair(rawData as string)])
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.getOrElseW(() => undefined)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (objHasProperty("G", "boolean")(parsedArguments) && !!pairs) {
|
||||||
|
const newQueries = getQueries(pairs)
|
||||||
|
queries = [...queries, ...newQueries.queries]
|
||||||
|
danglingParams = [...danglingParams, ...newQueries.danglingParams]
|
||||||
|
hasBodyBeenParsed = true
|
||||||
|
} else if (
|
||||||
|
rawContentType.includes("application/x-www-form-urlencoded") &&
|
||||||
|
!!pairs
|
||||||
|
) {
|
||||||
|
body = pairs.map((p) => p.join(": ")).join("\n") || null
|
||||||
|
contentType = "application/x-www-form-urlencoded"
|
||||||
|
hasBodyBeenParsed = true
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlString = concatParams(urlObject, danglingParams) || ""
|
const urlString = concatParams(urlObject, danglingParams)
|
||||||
|
|
||||||
let multipartUploads: Record<string, string> = pipe(
|
let multipartUploads: Record<string, string> = pipe(
|
||||||
parsedArguments,
|
O.of(parsedArguments),
|
||||||
O.fromNullable,
|
|
||||||
O.chain(getFArgumentMultipartData),
|
O.chain(getFArgumentMultipartData),
|
||||||
O.match(
|
O.match(
|
||||||
() => ({}),
|
() => ({}),
|
||||||
(args) => {
|
(args) => {
|
||||||
|
hasBodyBeenParsed = true
|
||||||
rawContentType = "multipart/form-data"
|
rawContentType = "multipart/form-data"
|
||||||
return args
|
return args
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
const auth = getAuthObject(parsedArguments, headers, urlObject)
|
if (!hasBodyBeenParsed) {
|
||||||
|
if (typeof rawData !== "string") {
|
||||||
let cookies: Record<string, string> | undefined
|
rawData = rawData.join("")
|
||||||
|
|
||||||
const cookieString = parsedArguments.b || parsedArguments.cookie || ""
|
|
||||||
if (cookieString) {
|
|
||||||
const cookieParseOptions = {
|
|
||||||
decode: (s: any) => s,
|
|
||||||
}
|
}
|
||||||
// separate out cookie headers into separate data structure
|
const bodyObject = getBody(rawData, rawContentType, contentType)
|
||||||
// note: cookie is case insensitive
|
|
||||||
cookies = cookie.parse(
|
|
||||||
cookieString.replace(/^Cookie: /gi, ""),
|
|
||||||
cookieParseOptions
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasBodyBeenParsed && typeof rawData === "string") {
|
if (O.isSome(bodyObject)) {
|
||||||
const tempBody = pipe(
|
const bodyObjectValue = bodyObject.value
|
||||||
O.Do,
|
|
||||||
|
|
||||||
O.bind("rct", () =>
|
if (bodyObjectValue.type === "FORMDATA") {
|
||||||
pipe(
|
multipartUploads = bodyObjectValue.body
|
||||||
rawContentType,
|
} else {
|
||||||
O.fromNullable,
|
body = bodyObjectValue.body.body
|
||||||
O.filter(() => !!headers && rawContentType !== "")
|
contentType = bodyObjectValue.body
|
||||||
)
|
.contentType as HoppRESTReqBody["contentType"]
|
||||||
),
|
|
||||||
|
|
||||||
O.bind("cType", ({ rct }) =>
|
|
||||||
pipe(
|
|
||||||
rct,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((RCT) => RCT.toLowerCase()),
|
|
||||||
O.map((RCT) => RCT.split(";")[0]),
|
|
||||||
O.map((RCT) => RCT as HoppRESTReqBody["contentType"])
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.bind("rData", () =>
|
|
||||||
pipe(
|
|
||||||
rawData as string,
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter(() => !!rawData && rawData.length > 0)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.bind("ctBody", ({ rct, cType, rData }) =>
|
|
||||||
pipe(rData, getBodyFromContentType(rct, cType))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (O.isSome(tempBody)) {
|
|
||||||
const { cType, ctBody } = tempBody.value
|
|
||||||
contentType = cType
|
|
||||||
if (typeof ctBody === "string") body = ctBody
|
|
||||||
else multipartUploads = ctBody
|
|
||||||
} else if (
|
|
||||||
!(
|
|
||||||
rawContentType &&
|
|
||||||
rawContentType.startsWith("multipart/form-data") &&
|
|
||||||
rawContentType.includes("boundary")
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
const newTempBody = pipe(
|
|
||||||
rawData,
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter(() => !!rawData && rawData.length > 0),
|
|
||||||
O.chain(getBodyWithoutContentType)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (O.isSome(newTempBody)) {
|
|
||||||
const { cType, proData } = newTempBody.value
|
|
||||||
contentType = cType
|
|
||||||
if (typeof proData === "string") body = proData
|
|
||||||
else multipartUploads = proData
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
body = null
|
|
||||||
contentType = null
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const compressed = !!parsedArguments.compressed
|
const finalBody: HoppRESTReqBody = pipe(
|
||||||
const hoppHeaders = recordToHoppHeaders(headers)
|
|
||||||
|
|
||||||
const request: CurlParserRequest = {
|
|
||||||
urlString,
|
|
||||||
urlObject,
|
|
||||||
compressed,
|
|
||||||
queries,
|
|
||||||
hoppHeaders,
|
|
||||||
method,
|
|
||||||
contentType,
|
|
||||||
body,
|
body,
|
||||||
cookies,
|
|
||||||
cookieString: cookieString?.replace(/Cookie: /i, ""),
|
|
||||||
multipartUploads,
|
|
||||||
isDataBinary,
|
|
||||||
auth,
|
|
||||||
}
|
|
||||||
|
|
||||||
return request
|
|
||||||
}
|
|
||||||
|
|
||||||
// ############################################ //
|
|
||||||
// ## HELPER FUNCTIONS ## //
|
|
||||||
// ############################################ //
|
|
||||||
|
|
||||||
const replaceables: { [key: string]: string } = {
|
|
||||||
"--request": "-X",
|
|
||||||
"--header": "-H",
|
|
||||||
"--url": "",
|
|
||||||
"--form": "-F",
|
|
||||||
"--data-raw": "--data",
|
|
||||||
"--data": "-d",
|
|
||||||
"--data-ascii": "-d",
|
|
||||||
"--data-binary": "-d",
|
|
||||||
"--user": "-u",
|
|
||||||
"--get": "-G",
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sanitizes curl string
|
|
||||||
* @param curlCommand Raw curl command string
|
|
||||||
* @returns Processed curl command string
|
|
||||||
*/
|
|
||||||
function preProcessCurlCommand(curlCommand: string) {
|
|
||||||
// remove '\' and newlines
|
|
||||||
curlCommand = curlCommand.replace(/ ?\\ ?$/gm, " ")
|
|
||||||
curlCommand = curlCommand.replace(/\n/g, "")
|
|
||||||
|
|
||||||
// remove all $ symbols from start of argument values
|
|
||||||
curlCommand = curlCommand.replaceAll("$'", "'")
|
|
||||||
curlCommand = curlCommand.replaceAll('$"', '"')
|
|
||||||
|
|
||||||
// replace string for insomnia
|
|
||||||
for (const r in replaceables) {
|
|
||||||
if (r.includes("data") || r.includes("form") || r.includes("header")) {
|
|
||||||
curlCommand = curlCommand.replaceAll(
|
|
||||||
RegExp(`[ \t]${r}(["' ])`, "g"),
|
|
||||||
` ${replaceables[r]}$1`
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
curlCommand = curlCommand.replace(
|
|
||||||
RegExp(`[ \t]${r}(["' ])`),
|
|
||||||
` ${replaceables[r]}$1`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// yargs parses -XPOST as separate arguments. just prescreen for it.
|
|
||||||
curlCommand = curlCommand.replace(
|
|
||||||
/ -X(GET|POST|PUT|PATCH|DELETE|HEAD|CONNECT|OPTIONS|TRACE|CUSTOM)/,
|
|
||||||
" -X $1"
|
|
||||||
)
|
|
||||||
curlCommand = curlCommand.trim()
|
|
||||||
|
|
||||||
return curlCommand
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Parses body based on the content type
|
|
||||||
* @param rct Raw content type
|
|
||||||
* @param cType Sanitized content type
|
|
||||||
* @returns Option of parsed body
|
|
||||||
*/
|
|
||||||
function getBodyFromContentType(
|
|
||||||
rct: string,
|
|
||||||
cType: HoppRESTReqBody["contentType"]
|
|
||||||
) {
|
|
||||||
return (rData: string) => {
|
|
||||||
if (cType === "multipart/form-data")
|
|
||||||
// put body to multipartUploads in post processing
|
|
||||||
return pipe(
|
|
||||||
parseBody(rData, cType, rct),
|
|
||||||
O.filter((parsedBody) => typeof parsedBody !== "string")
|
|
||||||
)
|
|
||||||
else
|
|
||||||
return pipe(
|
|
||||||
parseBody(rData, cType),
|
|
||||||
O.filter(
|
|
||||||
(parsedBody) =>
|
|
||||||
typeof parsedBody === "string" && parsedBody.length > 0
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Detects and parses body without the help of content type
|
|
||||||
* @param rawData Raw body string
|
|
||||||
* @returns Option of raw data, detected content type and parsed data
|
|
||||||
*/
|
|
||||||
function getBodyWithoutContentType(rawData: string) {
|
|
||||||
return pipe(
|
|
||||||
O.Do,
|
|
||||||
|
|
||||||
O.bind("rData", () =>
|
|
||||||
pipe(
|
|
||||||
rawData,
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter((rd) => rd.length > 0)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.bind("cType", ({ rData }) =>
|
|
||||||
pipe(rData, detectContentType, O.fromNullable)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.bind("proData", ({ cType, rData }) => parseBody(rData, cType))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Processes URL string and returns the URL object
|
|
||||||
* @param parsedArguments Parsed Arguments object
|
|
||||||
* @returns URL object
|
|
||||||
*/
|
|
||||||
function parseURL(parsedArguments: parser.Arguments) {
|
|
||||||
return pipe(
|
|
||||||
parsedArguments?._[1],
|
|
||||||
O.fromNullable,
|
O.fromNullable,
|
||||||
O.map((u) => u.toString().replace(/["']/g, "")),
|
O.filter((b) => b.length > 0),
|
||||||
O.map((u) => u.trim()),
|
O.map((b) => <HoppRESTReqBody>{ body: b, contentType }),
|
||||||
O.chain((u) =>
|
O.alt(() =>
|
||||||
pipe(
|
pipe(
|
||||||
/^[^:\s]+(?=:\/\/)/.exec(u),
|
multipartUploads,
|
||||||
O.fromNullable,
|
O.of,
|
||||||
O.map((p) => p[2]),
|
O.map((m) => Object.entries(m)),
|
||||||
O.match(
|
O.filter((m) => m.length > 0),
|
||||||
// if protocol is not found
|
O.map(
|
||||||
() =>
|
flow(
|
||||||
pipe(
|
A.map(
|
||||||
// get the base URL
|
([key, value]) =>
|
||||||
/^([^\s:@]+:[^\s:@]+@)?([^:/\s]+)([:]*)/.exec(u),
|
<FormDataKeyValue>{
|
||||||
O.fromNullable,
|
active: true,
|
||||||
O.map((burl) => burl[2]),
|
isFile: false,
|
||||||
O.map((burl) =>
|
key,
|
||||||
burl === "localhost" || burl === "127.0.0.1"
|
value,
|
||||||
? "http://" + u
|
}
|
||||||
: "https://" + u
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
(_) => O.some(u)
|
(b) =>
|
||||||
)
|
<HoppRESTReqBody>{ body: b, contentType: "multipart/form-data" }
|
||||||
)
|
|
||||||
),
|
|
||||||
O.map((u) => new URL(u)),
|
|
||||||
O.getOrElse(() => {
|
|
||||||
// no url found
|
|
||||||
for (const argName in parsedArguments) {
|
|
||||||
if (
|
|
||||||
typeof parsedArguments[argName] === "string" &&
|
|
||||||
["http", "www."].includes(parsedArguments[argName])
|
|
||||||
)
|
|
||||||
return pipe(
|
|
||||||
parsedArguments[argName],
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((u) => new URL(u)),
|
|
||||||
O.match(
|
|
||||||
() => undefined,
|
|
||||||
(u) => u
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts queries to HoppRESTParam format and separates dangling ones
|
|
||||||
* @param queries Array or IterableIterator of key value pairs of queries
|
|
||||||
* @returns Queries formatted compatible to HoppRESTParam and list of dangling params
|
|
||||||
*/
|
|
||||||
function getQueries(
|
|
||||||
searchParams:
|
|
||||||
| [string, string][]
|
|
||||||
| IterableIterator<[string, string]>
|
|
||||||
| undefined
|
|
||||||
) {
|
|
||||||
const danglingParams: string[] = []
|
|
||||||
const queries = pipe(
|
|
||||||
searchParams,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((iter) => {
|
|
||||||
const params = []
|
|
||||||
|
|
||||||
for (const q of iter) {
|
|
||||||
if (!q[1]) {
|
|
||||||
danglingParams.push(q[0])
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
params.push({
|
|
||||||
key: q[0],
|
|
||||||
value: q[1],
|
|
||||||
active: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return params
|
|
||||||
}),
|
|
||||||
|
|
||||||
O.getOrElseW(() => [])
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
queries,
|
|
||||||
danglingParams,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Joins dangling params to origin
|
|
||||||
* @param origin origin value from the URL Object
|
|
||||||
* @param params params without values
|
|
||||||
* @returns origin string concatenated with dangling paramas
|
|
||||||
*/
|
|
||||||
function concatParams(urlObject: URL | undefined, params: string[]) {
|
|
||||||
return pipe(
|
|
||||||
O.Do,
|
|
||||||
|
|
||||||
O.bind("originString", () =>
|
|
||||||
pipe(
|
|
||||||
urlObject?.origin,
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter((h) => h !== "")
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.map(({ originString }) =>
|
|
||||||
pipe(
|
|
||||||
params,
|
|
||||||
O.fromNullable,
|
|
||||||
O.filter((dp) => dp.length > 0),
|
|
||||||
O.map(stringArrayJoin("&")),
|
|
||||||
O.map((h) => originString + (urlObject?.pathname || "") + "?" + h),
|
|
||||||
O.getOrElse(() => originString + (urlObject?.pathname || ""))
|
|
||||||
)
|
|
||||||
),
|
|
||||||
|
|
||||||
O.getOrElse(() => "")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses and structures multipart/form-data from -F argument of curl command
|
|
||||||
* @param parsedArguments Parsed Arguments object
|
|
||||||
* @returns Option of Record<string, string> type containing key-value pairs of multipart/form-data
|
|
||||||
*/
|
|
||||||
function getFArgumentMultipartData(
|
|
||||||
parsedArguments: parser.Arguments
|
|
||||||
): O.Option<Record<string, string>> {
|
|
||||||
// -F multipart data
|
|
||||||
|
|
||||||
return pipe(
|
|
||||||
parsedArguments.F as Array<string> | string | undefined,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((fArgs) => (Array.isArray(fArgs) ? fArgs : [fArgs])),
|
|
||||||
O.map((fArgs: string[]) =>
|
|
||||||
pipe(
|
|
||||||
fArgs.map((multipartArgument: string) => {
|
|
||||||
const [key, value] = multipartArgument.split("=", 2)
|
|
||||||
|
|
||||||
if (parsedArguments["form-string"])
|
|
||||||
return [key, value] as [string, string]
|
|
||||||
return [key, value[0] === "@" || value[0] === "<" ? "" : value] as [
|
|
||||||
string,
|
|
||||||
string
|
|
||||||
]
|
|
||||||
}),
|
|
||||||
RA.toArray,
|
|
||||||
tupleToRecord
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get method type from X argument in curl string or
|
|
||||||
* find it out through presence of other arguments
|
|
||||||
* @param parsedArguments Parsed Arguments object
|
|
||||||
* @returns Method type
|
|
||||||
*/
|
|
||||||
function getMethod(parsedArguments: parser.Arguments): string {
|
|
||||||
const Xarg: string = parsedArguments.X
|
|
||||||
return pipe(
|
|
||||||
Xarg?.match(/GET|POST|PUT|PATCH|DELETE|HEAD|CONNECT|OPTIONS|TRACE|CUSTOM/i),
|
|
||||||
O.fromNullable,
|
|
||||||
O.match(
|
|
||||||
() => {
|
|
||||||
if (parsedArguments.T) return "put"
|
|
||||||
else if (parsedArguments.I || parsedArguments.head) return "head"
|
|
||||||
else if (parsedArguments.G) return "get"
|
|
||||||
else if (parsedArguments.d || parsedArguments.F) return "post"
|
|
||||||
else return "get"
|
|
||||||
},
|
|
||||||
(method) => method[0]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function getHeaders(parsedArguments: parser.Arguments) {
|
|
||||||
let headers: Record<string, string> = {}
|
|
||||||
|
|
||||||
headers = pipe(
|
|
||||||
parsedArguments.H,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((h) => (Array.isArray(h) ? h : [h])),
|
|
||||||
O.map((h: string[]) =>
|
|
||||||
pipe(
|
|
||||||
h.map((header: string) => {
|
|
||||||
const [key, value] = header.split(": ")
|
|
||||||
return [key.trim(), value.trim()] as [string, string]
|
|
||||||
}),
|
|
||||||
RA.toArray,
|
|
||||||
tupleToRecord
|
|
||||||
)
|
|
||||||
),
|
|
||||||
O.match(
|
|
||||||
() => ({}),
|
|
||||||
(h) => h
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
const userAgent = parsedArguments.A || parsedArguments["user-agent"]
|
|
||||||
if (userAgent) headers["User-Agent"] = userAgent
|
|
||||||
|
|
||||||
return headers
|
|
||||||
}
|
|
||||||
|
|
||||||
function recordToHoppHeaders(headers: Record<string, string>) {
|
|
||||||
const hoppHeaders = []
|
|
||||||
for (const key of Object.keys(headers)) {
|
|
||||||
hoppHeaders.push({
|
|
||||||
key,
|
|
||||||
value: headers[key],
|
|
||||||
active: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return hoppHeaders
|
|
||||||
}
|
|
||||||
|
|
||||||
function getParamPairs(rawdata: string[]) {
|
|
||||||
return pipe(
|
|
||||||
rawdata,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((p) => p.map(decodeURIComponent)),
|
|
||||||
O.map((pairs) => pairs.map((pair) => pair.split("="))),
|
|
||||||
O.getOrElseW(() => undefined)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function getAuthObject(
|
|
||||||
parsedArguments: parser.Arguments,
|
|
||||||
headers: Record<string, string>,
|
|
||||||
urlObject: URL | undefined
|
|
||||||
): HoppRESTAuth {
|
|
||||||
// >> preference order:
|
|
||||||
// - Auth headers
|
|
||||||
// - apikey headers
|
|
||||||
// - --user arg
|
|
||||||
// - Creds provided along with URL
|
|
||||||
|
|
||||||
let auth: HoppRESTAuth = {
|
|
||||||
authActive: false,
|
|
||||||
authType: "none",
|
|
||||||
}
|
|
||||||
let username: string = ""
|
|
||||||
let password: string = ""
|
|
||||||
|
|
||||||
if (headers?.Authorization) {
|
|
||||||
auth = pipe(
|
|
||||||
headers?.Authorization,
|
|
||||||
O.fromNullable,
|
|
||||||
O.map((a) => a.split(" ")),
|
|
||||||
O.filter((a) => a.length > 0),
|
|
||||||
O.chain((kv) =>
|
|
||||||
pipe(
|
|
||||||
(() => {
|
|
||||||
switch (kv[0].toLowerCase()) {
|
|
||||||
case "bearer":
|
|
||||||
return {
|
|
||||||
authActive: true,
|
|
||||||
authType: "bearer",
|
|
||||||
token: kv[1],
|
|
||||||
}
|
|
||||||
case "apikey":
|
|
||||||
return {
|
|
||||||
authActive: true,
|
|
||||||
authType: "api-key",
|
|
||||||
key: "apikey",
|
|
||||||
value: kv[1],
|
|
||||||
addTo: "headers",
|
|
||||||
}
|
|
||||||
case "basic": {
|
|
||||||
const buffer = Buffer.from(kv[1], "base64")
|
|
||||||
const [username, password] = buffer.toString().split(":")
|
|
||||||
return {
|
|
||||||
authActive: true,
|
|
||||||
authType: "basic",
|
|
||||||
username,
|
|
||||||
password,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
})(),
|
|
||||||
O.fromNullable
|
|
||||||
)
|
)
|
||||||
),
|
)
|
||||||
O.getOrElseW(() => ({ authActive: false, authType: "none" }))
|
),
|
||||||
) as HoppRESTAuth
|
O.getOrElse(() => defaultRESTReq.body)
|
||||||
} else if (headers?.apikey || headers["api-key"]) {
|
|
||||||
const apikey = headers?.apikey || headers["api-key"]
|
|
||||||
if (apikey)
|
|
||||||
auth = {
|
|
||||||
authActive: true,
|
|
||||||
authType: "api-key",
|
|
||||||
key: headers?.apikey ? "apikey" : "api-key",
|
|
||||||
value: apikey,
|
|
||||||
addTo: "headers",
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (parsedArguments.u) {
|
|
||||||
const user: string = parsedArguments.u ?? ""
|
|
||||||
;[username, password] = user.split(":")
|
|
||||||
} else if (urlObject) {
|
|
||||||
username = urlObject.username
|
|
||||||
password = urlObject.password
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!!username && !!password)
|
|
||||||
auth = {
|
|
||||||
authType: "basic",
|
|
||||||
authActive: true,
|
|
||||||
username,
|
|
||||||
password,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return auth
|
|
||||||
}
|
|
||||||
|
|
||||||
export function requestToHoppRequest(parsedCurl: CurlParserRequest) {
|
|
||||||
const endpoint = parsedCurl.urlString
|
|
||||||
const params = parsedCurl.queries || []
|
|
||||||
const body = parsedCurl.body
|
|
||||||
|
|
||||||
const method = parsedCurl.method?.toUpperCase() || "GET"
|
|
||||||
const contentType = parsedCurl.contentType
|
|
||||||
const auth = parsedCurl.auth
|
|
||||||
const headers =
|
|
||||||
parsedCurl.hoppHeaders.filter(
|
|
||||||
(header) =>
|
|
||||||
header.key !== "Authorization" &&
|
|
||||||
header.key !== "content-type" &&
|
|
||||||
header.key !== "Content-Type" &&
|
|
||||||
header.key !== "apikey" &&
|
|
||||||
header.key !== "api-key"
|
|
||||||
) || []
|
|
||||||
|
|
||||||
let finalBody: HoppRESTReqBody = {
|
|
||||||
contentType: null,
|
|
||||||
body: null,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
contentType &&
|
|
||||||
contentType !== "multipart/form-data" &&
|
|
||||||
typeof body === "string"
|
|
||||||
)
|
)
|
||||||
// final body if multipart data is not present
|
|
||||||
finalBody = {
|
|
||||||
contentType,
|
|
||||||
body,
|
|
||||||
}
|
|
||||||
else if (Object.keys(parsedCurl.multipartUploads).length > 0) {
|
|
||||||
// if multipart data is present
|
|
||||||
const ydob: FormDataKeyValue[] = []
|
|
||||||
for (const key in parsedCurl.multipartUploads) {
|
|
||||||
ydob.push({
|
|
||||||
active: true,
|
|
||||||
isFile: false,
|
|
||||||
key,
|
|
||||||
value: parsedCurl.multipartUploads[key],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
finalBody = {
|
|
||||||
contentType: "multipart/form-data",
|
|
||||||
body: ydob,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return makeRESTRequest({
|
return makeRESTRequest({
|
||||||
name: "Untitled request",
|
name: defaultRESTReq.name,
|
||||||
endpoint,
|
endpoint: urlString,
|
||||||
method,
|
method: (method || defaultRESTReq.method).toUpperCase(),
|
||||||
params,
|
params: queries ?? defaultRESTReq.params,
|
||||||
headers,
|
headers: hoppHeaders,
|
||||||
preRequestScript: "",
|
preRequestScript: defaultRESTReq.preRequestScript,
|
||||||
testScript: "",
|
testScript: defaultRESTReq.testScript,
|
||||||
auth,
|
auth,
|
||||||
body: finalBody,
|
body: finalBody,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,31 +1,5 @@
|
|||||||
import {
|
|
||||||
HoppRESTReqBody,
|
|
||||||
HoppRESTHeader,
|
|
||||||
HoppRESTParam,
|
|
||||||
HoppRESTAuth,
|
|
||||||
} from "@hoppscotch/data"
|
|
||||||
import { flow } from "fp-ts/function"
|
import { flow } from "fp-ts/function"
|
||||||
import cloneDeep from "lodash/cloneDeep"
|
import cloneDeep from "lodash/cloneDeep"
|
||||||
import { parseCurlCommand, requestToHoppRequest } from "./curlparser"
|
import { parseCurlCommand } from "./curlparser"
|
||||||
|
|
||||||
export type CurlParserRequest = {
|
export const parseCurlToHoppRESTReq = flow(parseCurlCommand, cloneDeep)
|
||||||
urlString: string
|
|
||||||
urlObject: URL | undefined
|
|
||||||
compressed: boolean
|
|
||||||
queries: HoppRESTParam[]
|
|
||||||
hoppHeaders: HoppRESTHeader[]
|
|
||||||
method: string
|
|
||||||
contentType: HoppRESTReqBody["contentType"]
|
|
||||||
body: HoppRESTReqBody["body"]
|
|
||||||
cookies: Record<string, string> | undefined
|
|
||||||
cookieString: string
|
|
||||||
multipartUploads: Record<string, string>
|
|
||||||
isDataBinary: boolean
|
|
||||||
auth: HoppRESTAuth
|
|
||||||
}
|
|
||||||
|
|
||||||
export const parseCurlToHoppRESTReq = flow(
|
|
||||||
parseCurlCommand,
|
|
||||||
requestToHoppRequest,
|
|
||||||
cloneDeep
|
|
||||||
)
|
|
||||||
|
|||||||
116
packages/hoppscotch-app/helpers/curl/sub_helpers/auth.ts
Normal file
116
packages/hoppscotch-app/helpers/curl/sub_helpers/auth.ts
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
import { HoppRESTAuth } from "@hoppscotch/data"
|
||||||
|
import parser from "yargs-parser"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as S from "fp-ts/string"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||||
|
import { objHasProperty } from "~/helpers/functional/object"
|
||||||
|
|
||||||
|
const defaultRESTReq = getDefaultRESTRequest()
|
||||||
|
|
||||||
|
const getAuthFromAuthHeader = (headers: Record<string, string>) =>
|
||||||
|
pipe(
|
||||||
|
headers.Authorization,
|
||||||
|
O.fromNullable,
|
||||||
|
O.map((a) => a.split(" ")),
|
||||||
|
O.filter((a) => a.length > 1),
|
||||||
|
O.chain((kv) =>
|
||||||
|
O.fromNullable(
|
||||||
|
(() => {
|
||||||
|
switch (kv[0].toLowerCase()) {
|
||||||
|
case "bearer":
|
||||||
|
return <HoppRESTAuth>{
|
||||||
|
authActive: true,
|
||||||
|
authType: "bearer",
|
||||||
|
token: kv[1],
|
||||||
|
}
|
||||||
|
case "basic": {
|
||||||
|
const [username, password] = pipe(
|
||||||
|
O.tryCatch(() => atob(kv[1])),
|
||||||
|
O.map(S.split(":")),
|
||||||
|
// can have a username with no password
|
||||||
|
O.filter((arr) => arr.length > 0),
|
||||||
|
O.map(
|
||||||
|
([username, password]) =>
|
||||||
|
<[string, string]>[username, password]
|
||||||
|
),
|
||||||
|
O.getOrElse(() => ["", ""])
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!username) return undefined
|
||||||
|
|
||||||
|
return <HoppRESTAuth>{
|
||||||
|
authActive: true,
|
||||||
|
authType: "basic",
|
||||||
|
username,
|
||||||
|
password: password ?? "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
const getAuthFromParsedArgs = (parsedArguments: parser.Arguments) =>
|
||||||
|
pipe(
|
||||||
|
parsedArguments,
|
||||||
|
O.fromPredicate(objHasProperty("u", "string")),
|
||||||
|
O.chain((args) =>
|
||||||
|
pipe(
|
||||||
|
args.u,
|
||||||
|
S.split(":"),
|
||||||
|
// can have a username with no password
|
||||||
|
O.fromPredicate((arr) => arr.length > 0 && arr[0].length > 0),
|
||||||
|
O.map(
|
||||||
|
([username, password]) => <[string, string]>[username, password ?? ""]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.map(
|
||||||
|
([username, password]) =>
|
||||||
|
<HoppRESTAuth>{
|
||||||
|
authActive: true,
|
||||||
|
authType: "basic",
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
const getAuthFromURLObject = (urlObject: URL) =>
|
||||||
|
pipe(
|
||||||
|
urlObject,
|
||||||
|
(url) => [url.username, url.password ?? ""],
|
||||||
|
// can have a username with no password
|
||||||
|
O.fromPredicate(([username, _]) => !!username && username.length > 0),
|
||||||
|
O.map(
|
||||||
|
([username, password]) =>
|
||||||
|
<HoppRESTAuth>{
|
||||||
|
authActive: true,
|
||||||
|
authType: "basic",
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preference order:
|
||||||
|
* - Auth headers
|
||||||
|
* - --user or -u argument
|
||||||
|
* - Creds provided along with URL
|
||||||
|
*/
|
||||||
|
export const getAuthObject = (
|
||||||
|
parsedArguments: parser.Arguments,
|
||||||
|
headers: Record<string, string>,
|
||||||
|
urlObject: URL
|
||||||
|
): HoppRESTAuth =>
|
||||||
|
pipe(
|
||||||
|
getAuthFromAuthHeader(headers),
|
||||||
|
O.alt(() => getAuthFromParsedArgs(parsedArguments)),
|
||||||
|
O.alt(() => getAuthFromURLObject(urlObject)),
|
||||||
|
O.getOrElse(() => defaultRESTReq.auth)
|
||||||
|
)
|
||||||
169
packages/hoppscotch-app/helpers/curl/sub_helpers/body.ts
Normal file
169
packages/hoppscotch-app/helpers/curl/sub_helpers/body.ts
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
import parser from "yargs-parser"
|
||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as A from "fp-ts/Array"
|
||||||
|
import * as RNEA from "fp-ts/ReadonlyNonEmptyArray"
|
||||||
|
import * as S from "fp-ts/string"
|
||||||
|
import {
|
||||||
|
HoppRESTReqBody,
|
||||||
|
HoppRESTReqBodyFormData,
|
||||||
|
ValidContentTypes,
|
||||||
|
knownContentTypes,
|
||||||
|
} from "@hoppscotch/data"
|
||||||
|
import { detectContentType, parseBody } from "./contentParser"
|
||||||
|
import { tupleToRecord } from "~/helpers/functional/record"
|
||||||
|
import {
|
||||||
|
objHasProperty,
|
||||||
|
objHasArrayProperty,
|
||||||
|
} from "~/helpers/functional/object"
|
||||||
|
|
||||||
|
type BodyReturnType =
|
||||||
|
| { type: "FORMDATA"; body: Record<string, string> }
|
||||||
|
| {
|
||||||
|
type: "NON_FORMDATA"
|
||||||
|
body: Exclude<HoppRESTReqBody, HoppRESTReqBodyFormData>
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Parses body based on the content type
|
||||||
|
* @param rData Raw data
|
||||||
|
* @param cType Sanitized content type
|
||||||
|
* @returns Option of parsed body of type string | Record<string, string>
|
||||||
|
*/
|
||||||
|
const getBodyFromContentType =
|
||||||
|
(rData: string, cType: HoppRESTReqBody["contentType"]) => (rct: string) =>
|
||||||
|
pipe(
|
||||||
|
cType,
|
||||||
|
O.fromPredicate((ctype) => ctype === "multipart/form-data"),
|
||||||
|
O.chain(() =>
|
||||||
|
pipe(
|
||||||
|
// pass rawContentType for boundary ascertion
|
||||||
|
parseBody(rData, cType, rct),
|
||||||
|
O.filter((parsedBody) => typeof parsedBody !== "string")
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.alt(() =>
|
||||||
|
pipe(
|
||||||
|
parseBody(rData, cType),
|
||||||
|
O.filter(
|
||||||
|
(parsedBody) =>
|
||||||
|
typeof parsedBody === "string" && parsedBody.length > 0
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
const getContentTypeFromRawContentType = (rawContentType: string) =>
|
||||||
|
pipe(
|
||||||
|
rawContentType,
|
||||||
|
O.fromPredicate((rct) => rct.length > 0),
|
||||||
|
// get everything before semi-colon
|
||||||
|
O.map(flow(S.toLowerCase, S.split(";"), RNEA.head)),
|
||||||
|
// if rawContentType is valid, cast it to contentType type
|
||||||
|
O.filter((ct) => Object.keys(knownContentTypes).includes(ct)),
|
||||||
|
O.map((ct) => ct as HoppRESTReqBody["contentType"])
|
||||||
|
)
|
||||||
|
|
||||||
|
const getContentTypeFromRawData = (rawData: string) =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate((rd) => rd.length > 0),
|
||||||
|
O.map(detectContentType)
|
||||||
|
)
|
||||||
|
|
||||||
|
export const getBody = (
|
||||||
|
rawData: string,
|
||||||
|
rawContentType: string,
|
||||||
|
contentType: HoppRESTReqBody["contentType"]
|
||||||
|
): O.Option<BodyReturnType> => {
|
||||||
|
return pipe(
|
||||||
|
O.Do,
|
||||||
|
|
||||||
|
O.bind("cType", () =>
|
||||||
|
pipe(
|
||||||
|
// get provided content-type
|
||||||
|
contentType,
|
||||||
|
O.fromNullable,
|
||||||
|
// or figure it out
|
||||||
|
O.alt(() => getContentTypeFromRawContentType(rawContentType)),
|
||||||
|
O.alt(() => getContentTypeFromRawData(rawData))
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
O.bind("rData", () =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate(() => rawData.length > 0)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
O.bind("ctBody", ({ cType, rData }) =>
|
||||||
|
pipe(rawContentType, getBodyFromContentType(rData, cType))
|
||||||
|
),
|
||||||
|
|
||||||
|
O.map(({ cType, ctBody }) =>
|
||||||
|
typeof ctBody === "string"
|
||||||
|
? {
|
||||||
|
type: "NON_FORMDATA",
|
||||||
|
body: {
|
||||||
|
body: ctBody,
|
||||||
|
contentType: cType as Exclude<
|
||||||
|
ValidContentTypes,
|
||||||
|
"multipart/form-data"
|
||||||
|
>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: { type: "FORMDATA", body: ctBody }
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses and structures multipart/form-data from -F argument of curl command
|
||||||
|
* @param parsedArguments Parsed Arguments object
|
||||||
|
* @returns Option of Record<string, string> type containing key-value pairs of multipart/form-data
|
||||||
|
*/
|
||||||
|
export function getFArgumentMultipartData(
|
||||||
|
parsedArguments: parser.Arguments
|
||||||
|
): O.Option<Record<string, string>> {
|
||||||
|
// --form or -F multipart data
|
||||||
|
|
||||||
|
return pipe(
|
||||||
|
parsedArguments,
|
||||||
|
// make it an array if not already
|
||||||
|
O.fromPredicate(objHasProperty("F", "string")),
|
||||||
|
O.map((args) => [args.F]),
|
||||||
|
O.alt(() =>
|
||||||
|
pipe(
|
||||||
|
parsedArguments,
|
||||||
|
O.fromPredicate(objHasArrayProperty("F", "string")),
|
||||||
|
O.map((args) => args.F)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.chain(
|
||||||
|
flow(
|
||||||
|
A.map(S.split("=")),
|
||||||
|
// can only have a key and no value
|
||||||
|
O.fromPredicate((fArgs) => fArgs.length > 0),
|
||||||
|
O.map(
|
||||||
|
flow(
|
||||||
|
A.map(([k, v]) =>
|
||||||
|
pipe(
|
||||||
|
parsedArguments,
|
||||||
|
// form-string option allows for "@" and "<" prefixes
|
||||||
|
// without them being considered as files
|
||||||
|
O.fromPredicate(objHasProperty("form-string", "boolean")),
|
||||||
|
O.match(
|
||||||
|
// leave the value field empty for files
|
||||||
|
() => [k, v[0] === "@" || v[0] === "<" ? "" : v],
|
||||||
|
(_) => [k, v]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
A.map(([k, v]) => [k, v] as [string, string]),
|
||||||
|
tupleToRecord
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -0,0 +1,303 @@
|
|||||||
|
import { HoppRESTReqBody } from "@hoppscotch/data"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as RA from "fp-ts/ReadonlyArray"
|
||||||
|
import * as S from "fp-ts/string"
|
||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
|
import { tupleToRecord } from "~/helpers/functional/record"
|
||||||
|
import { safeParseJSON } from "~/helpers/functional/json"
|
||||||
|
import { optionChoose } from "~/helpers/functional/option"
|
||||||
|
|
||||||
|
const isJSON = flow(safeParseJSON, O.isSome)
|
||||||
|
|
||||||
|
const isXML = (rawData: string) =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate(() => /<\/?[a-zA-Z][\s\S]*>/i.test(rawData)),
|
||||||
|
O.chain(prettifyXml),
|
||||||
|
O.isSome
|
||||||
|
)
|
||||||
|
|
||||||
|
const isHTML = (rawData: string) =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate(() => /<\/?[a-zA-Z][\s\S]*>/i.test(rawData)),
|
||||||
|
O.isSome
|
||||||
|
)
|
||||||
|
|
||||||
|
const isFormData = (rawData: string) =>
|
||||||
|
pipe(
|
||||||
|
rawData.match(/^-{2,}[A-Za-z0-9]+\\r\\n/),
|
||||||
|
O.fromNullable,
|
||||||
|
O.filter((boundaryMatch) => boundaryMatch.length > 0),
|
||||||
|
O.isSome
|
||||||
|
)
|
||||||
|
|
||||||
|
const isXWWWFormUrlEncoded = (rawData: string) =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
O.fromPredicate((rd) => /([^&=]+)=([^&=]*)/.test(rd)),
|
||||||
|
O.isSome
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects the content type of the input string
|
||||||
|
* @param rawData String for which content type is to be detected
|
||||||
|
* @returns Content type of the data
|
||||||
|
*/
|
||||||
|
export const detectContentType = (
|
||||||
|
rawData: string
|
||||||
|
): HoppRESTReqBody["contentType"] =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
optionChoose([
|
||||||
|
[(rd) => !rd, null],
|
||||||
|
[isJSON, "application/json" as const],
|
||||||
|
[isFormData, "multipart/form-data" as const],
|
||||||
|
[isXML, "application/xml" as const],
|
||||||
|
[isHTML, "text/html" as const],
|
||||||
|
[isXWWWFormUrlEncoded, "application/x-www-form-urlencoded" as const],
|
||||||
|
]),
|
||||||
|
O.getOrElseW(() => "text/plain" as const)
|
||||||
|
)
|
||||||
|
|
||||||
|
const multipartFunctions = {
|
||||||
|
getBoundary(rawData: string, rawContentType: string | undefined) {
|
||||||
|
return pipe(
|
||||||
|
rawContentType,
|
||||||
|
O.fromNullable,
|
||||||
|
O.filter((rct) => rct.length > 0),
|
||||||
|
O.match(
|
||||||
|
() => this.getBoundaryFromRawData(rawData),
|
||||||
|
(rct) => this.getBoundaryFromRawContentType(rawData, rct)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getBoundaryFromRawData(rawData: string) {
|
||||||
|
return pipe(
|
||||||
|
rawData.match(/(-{2,}[A-Za-z0-9]+)\\r\\n/g),
|
||||||
|
O.fromNullable,
|
||||||
|
O.filter((boundaryMatch) => boundaryMatch.length > 0),
|
||||||
|
O.map((matches) => matches[0].slice(0, -4))
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getBoundaryFromRawContentType(rawData: string, rawContentType: string) {
|
||||||
|
return pipe(
|
||||||
|
rawContentType.match(/boundary=(.+)/),
|
||||||
|
O.fromNullable,
|
||||||
|
O.filter((boundaryContentMatch) => boundaryContentMatch.length > 1),
|
||||||
|
O.filter((matches) =>
|
||||||
|
rawData.replaceAll("\\r\\n", "").endsWith("--" + matches[1] + "--")
|
||||||
|
),
|
||||||
|
O.map((matches) => "--" + matches[1])
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
splitUsingBoundaryAndNewLines(rawData: string, boundary: string) {
|
||||||
|
return pipe(
|
||||||
|
rawData,
|
||||||
|
S.split(RegExp(`${boundary}-*`)),
|
||||||
|
RA.filter((p) => p !== "" && p.includes("name")),
|
||||||
|
RA.map((p) =>
|
||||||
|
pipe(
|
||||||
|
p.replaceAll(/\\r\\n+/g, "\\r\\n"),
|
||||||
|
S.split("\\r\\n"),
|
||||||
|
RA.filter((q) => q !== "")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
|
||||||
|
getNameValuePair(pair: readonly string[]) {
|
||||||
|
return pipe(
|
||||||
|
pair,
|
||||||
|
O.fromPredicate((p) => p.length > 1),
|
||||||
|
O.chain((pair) => O.fromNullable(pair[0].match(/ name="(\w+)"/))),
|
||||||
|
O.filter((nameMatch) => nameMatch.length > 0),
|
||||||
|
O.chain((nameMatch) =>
|
||||||
|
pipe(
|
||||||
|
nameMatch[0],
|
||||||
|
S.replace(/"/g, ""),
|
||||||
|
S.split("="),
|
||||||
|
O.fromPredicate((q) => q.length === 2),
|
||||||
|
O.map(
|
||||||
|
(nameArr) =>
|
||||||
|
[nameArr[1], pair[0].includes("filename") ? "" : pair[1]] as [
|
||||||
|
string,
|
||||||
|
string
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const getFormDataBody = (rawData: string, rawContentType: string | undefined) =>
|
||||||
|
pipe(
|
||||||
|
multipartFunctions.getBoundary(rawData, rawContentType),
|
||||||
|
O.map((boundary) =>
|
||||||
|
pipe(
|
||||||
|
multipartFunctions.splitUsingBoundaryAndNewLines(rawData, boundary),
|
||||||
|
RA.filterMap((p) => multipartFunctions.getNameValuePair(p)),
|
||||||
|
RA.toArray
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
O.filter((arr) => arr.length > 0),
|
||||||
|
O.map(tupleToRecord)
|
||||||
|
)
|
||||||
|
|
||||||
|
const getHTMLBody = flow(formatHTML, O.of)
|
||||||
|
|
||||||
|
const getXMLBody = (rawData: string) =>
|
||||||
|
pipe(
|
||||||
|
rawData,
|
||||||
|
prettifyXml,
|
||||||
|
O.alt(() => O.some(rawData))
|
||||||
|
)
|
||||||
|
|
||||||
|
const getFormattedJSON = flow(
|
||||||
|
safeParseJSON,
|
||||||
|
O.map((parsedJSON) => JSON.stringify(parsedJSON, null, 2)),
|
||||||
|
O.getOrElse(() => "{}"),
|
||||||
|
O.of
|
||||||
|
)
|
||||||
|
|
||||||
|
const getXWWWFormUrlEncodedBody = flow(
|
||||||
|
decodeURIComponent,
|
||||||
|
(decoded) => decoded.match(/(([^&=]+)=?([^&=]*))/g),
|
||||||
|
O.fromNullable,
|
||||||
|
O.map((pairs) => pairs.map((p) => p.replace("=", ": ")).join("\n"))
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses provided string according to the content type
|
||||||
|
* @param rawData Data to be parsed
|
||||||
|
* @param contentType Content type of the data
|
||||||
|
* @param rawContentType Optional parameter required for multipart/form-data
|
||||||
|
* @returns Option of parsed body as string or Record object for multipart/form-data
|
||||||
|
*/
|
||||||
|
export function parseBody(
|
||||||
|
rawData: string,
|
||||||
|
contentType: HoppRESTReqBody["contentType"],
|
||||||
|
rawContentType?: string
|
||||||
|
): O.Option<string | Record<string, string>> {
|
||||||
|
switch (contentType) {
|
||||||
|
case "application/hal+json":
|
||||||
|
case "application/ld+json":
|
||||||
|
case "application/vnd.api+json":
|
||||||
|
case "application/json":
|
||||||
|
return getFormattedJSON(rawData)
|
||||||
|
|
||||||
|
case "application/x-www-form-urlencoded":
|
||||||
|
return getXWWWFormUrlEncodedBody(rawData)
|
||||||
|
|
||||||
|
case "multipart/form-data":
|
||||||
|
return getFormDataBody(rawData, rawContentType)
|
||||||
|
|
||||||
|
case "text/html":
|
||||||
|
return getHTMLBody(rawData)
|
||||||
|
|
||||||
|
case "application/xml":
|
||||||
|
return getXMLBody(rawData)
|
||||||
|
|
||||||
|
case "text/plain":
|
||||||
|
default:
|
||||||
|
return O.some(rawData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Formatter Functions
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prettifies XML string
|
||||||
|
* @param sourceXml The string to format
|
||||||
|
* @returns Indented XML string (uses spaces)
|
||||||
|
*/
|
||||||
|
function prettifyXml(sourceXml: string) {
|
||||||
|
return pipe(
|
||||||
|
O.tryCatch(() => {
|
||||||
|
const xmlDoc = new DOMParser().parseFromString(
|
||||||
|
sourceXml,
|
||||||
|
"application/xml"
|
||||||
|
)
|
||||||
|
|
||||||
|
if (xmlDoc.querySelector("parsererror")) {
|
||||||
|
throw new Error("Unstructured Body")
|
||||||
|
}
|
||||||
|
|
||||||
|
const xsltDoc = new DOMParser().parseFromString(
|
||||||
|
[
|
||||||
|
// describes how we want to modify the XML - indent everything
|
||||||
|
'<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform">',
|
||||||
|
' <xsl:strip-space elements="*"/>',
|
||||||
|
' <xsl:template match="para[content-style][not(text())]">', // change to just text() to strip space in text nodes
|
||||||
|
' <xsl:value-of select="normalize-space(.)"/>',
|
||||||
|
" </xsl:template>",
|
||||||
|
' <xsl:template match="node()|@*">',
|
||||||
|
' <xsl:copy><xsl:apply-templates select="node()|@*"/></xsl:copy>',
|
||||||
|
" </xsl:template>",
|
||||||
|
' <xsl:output indent="yes"/>',
|
||||||
|
"</xsl:stylesheet>",
|
||||||
|
].join("\n"),
|
||||||
|
"application/xml"
|
||||||
|
)
|
||||||
|
|
||||||
|
const xsltProcessor = new XSLTProcessor()
|
||||||
|
xsltProcessor.importStylesheet(xsltDoc)
|
||||||
|
const resultDoc = xsltProcessor.transformToDocument(xmlDoc)
|
||||||
|
const resultXml = new XMLSerializer().serializeToString(resultDoc)
|
||||||
|
|
||||||
|
return resultXml
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prettifies HTML string
|
||||||
|
* @param htmlString The string to format
|
||||||
|
* @returns Indented HTML string (uses spaces)
|
||||||
|
*/
|
||||||
|
function formatHTML(htmlString: string) {
|
||||||
|
const tab = " "
|
||||||
|
let result = ""
|
||||||
|
let indent = ""
|
||||||
|
const emptyTags = [
|
||||||
|
"area",
|
||||||
|
"base",
|
||||||
|
"br",
|
||||||
|
"col",
|
||||||
|
"embed",
|
||||||
|
"hr",
|
||||||
|
"img",
|
||||||
|
"input",
|
||||||
|
"link",
|
||||||
|
"meta",
|
||||||
|
"param",
|
||||||
|
"source",
|
||||||
|
"track",
|
||||||
|
"wbr",
|
||||||
|
]
|
||||||
|
|
||||||
|
const spl = htmlString.split(/>\s*</)
|
||||||
|
spl.forEach((element) => {
|
||||||
|
if (element.match(/^\/\w/)) {
|
||||||
|
indent = indent.substring(tab.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
result += indent + "<" + element + ">\n"
|
||||||
|
|
||||||
|
if (
|
||||||
|
element.match(/^<?\w[^>]*[^/]$/) &&
|
||||||
|
!emptyTags.includes(element.match(/^([a-z]*)/i)?.at(1) || "")
|
||||||
|
) {
|
||||||
|
indent += tab
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return result.substring(1, result.length - 2)
|
||||||
|
}
|
||||||
27
packages/hoppscotch-app/helpers/curl/sub_helpers/cookies.ts
Normal file
27
packages/hoppscotch-app/helpers/curl/sub_helpers/cookies.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import parser from "yargs-parser"
|
||||||
|
import * as cookie from "cookie"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as S from "fp-ts/string"
|
||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
|
import { objHasProperty } from "~/helpers/functional/object"
|
||||||
|
|
||||||
|
export function getCookies(parsedArguments: parser.Arguments) {
|
||||||
|
return pipe(
|
||||||
|
parsedArguments,
|
||||||
|
O.fromPredicate(objHasProperty("cookie", "string")),
|
||||||
|
|
||||||
|
O.map((args) => args.cookie),
|
||||||
|
|
||||||
|
O.alt(() =>
|
||||||
|
pipe(
|
||||||
|
parsedArguments,
|
||||||
|
O.fromPredicate(objHasProperty("b", "string")),
|
||||||
|
O.map((args) => args.b)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
O.map(flow(S.replace(/^cookie: /i, ""), cookie.parse)),
|
||||||
|
|
||||||
|
O.getOrElse(() => ({}))
|
||||||
|
)
|
||||||
|
}
|
||||||
76
packages/hoppscotch-app/helpers/curl/sub_helpers/headers.ts
Normal file
76
packages/hoppscotch-app/helpers/curl/sub_helpers/headers.ts
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import parser from "yargs-parser"
|
||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
|
import { HoppRESTHeader } from "@hoppscotch/data"
|
||||||
|
import * as A from "fp-ts/Array"
|
||||||
|
import * as S from "fp-ts/string"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import { tupleToRecord } from "~/helpers/functional/record"
|
||||||
|
import {
|
||||||
|
objHasProperty,
|
||||||
|
objHasArrayProperty,
|
||||||
|
} from "~/helpers/functional/object"
|
||||||
|
|
||||||
|
const getHeaderPair = flow(
|
||||||
|
S.split(": "),
|
||||||
|
// must have a key and a value
|
||||||
|
O.fromPredicate((arr) => arr.length === 2),
|
||||||
|
O.map(([k, v]) => [k.trim(), v?.trim() ?? ""] as [string, string])
|
||||||
|
)
|
||||||
|
|
||||||
|
export function getHeaders(parsedArguments: parser.Arguments) {
|
||||||
|
let headers: Record<string, string> = {}
|
||||||
|
|
||||||
|
headers = pipe(
|
||||||
|
parsedArguments,
|
||||||
|
// make it an array if not already
|
||||||
|
O.fromPredicate(objHasProperty("H", "string")),
|
||||||
|
O.map((args) => [args.H]),
|
||||||
|
O.alt(() =>
|
||||||
|
pipe(
|
||||||
|
parsedArguments,
|
||||||
|
O.fromPredicate(objHasArrayProperty("H", "string")),
|
||||||
|
O.map((args) => args.H)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.map(
|
||||||
|
flow(
|
||||||
|
A.map(getHeaderPair),
|
||||||
|
A.filterMap((a) => a),
|
||||||
|
tupleToRecord
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.getOrElseW(() => ({}))
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
objHasProperty("A", "string")(parsedArguments) ||
|
||||||
|
objHasProperty("user-agent", "string")(parsedArguments)
|
||||||
|
)
|
||||||
|
headers["User-Agent"] = parsedArguments.A ?? parsedArguments["user-agent"]
|
||||||
|
|
||||||
|
const rawContentType =
|
||||||
|
headers["Content-Type"] ?? headers["content-type"] ?? ""
|
||||||
|
|
||||||
|
return {
|
||||||
|
headers,
|
||||||
|
rawContentType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const recordToHoppHeaders = (
|
||||||
|
headers: Record<string, string>
|
||||||
|
): HoppRESTHeader[] =>
|
||||||
|
pipe(
|
||||||
|
Object.keys(headers),
|
||||||
|
A.map((key) => ({
|
||||||
|
key,
|
||||||
|
value: headers[key],
|
||||||
|
active: true,
|
||||||
|
})),
|
||||||
|
A.filter(
|
||||||
|
(header) =>
|
||||||
|
header.key !== "Authorization" &&
|
||||||
|
header.key !== "content-type" &&
|
||||||
|
header.key !== "Content-Type"
|
||||||
|
)
|
||||||
|
)
|
||||||
68
packages/hoppscotch-app/helpers/curl/sub_helpers/method.ts
Normal file
68
packages/hoppscotch-app/helpers/curl/sub_helpers/method.ts
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import parser from "yargs-parser"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as R from "fp-ts/Refinement"
|
||||||
|
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||||
|
import {
|
||||||
|
objHasProperty,
|
||||||
|
objHasArrayProperty,
|
||||||
|
} from "~/helpers/functional/object"
|
||||||
|
|
||||||
|
const defaultRESTReq = getDefaultRESTRequest()
|
||||||
|
|
||||||
|
const getMethodFromXArg = (parsedArguments: parser.Arguments) =>
|
||||||
|
pipe(
|
||||||
|
parsedArguments,
|
||||||
|
O.fromPredicate(objHasProperty("X", "string")),
|
||||||
|
O.map((args) => args.X.trim()),
|
||||||
|
O.chain((xarg) =>
|
||||||
|
pipe(
|
||||||
|
O.fromNullable(
|
||||||
|
xarg.match(/GET|POST|PUT|PATCH|DELETE|HEAD|CONNECT|OPTIONS|TRACE/i)
|
||||||
|
),
|
||||||
|
O.alt(() => O.fromNullable(xarg.match(/[a-zA-Z]+/)))
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.map((method) => method[0])
|
||||||
|
)
|
||||||
|
|
||||||
|
const getMethodByDeduction = (parsedArguments: parser.Arguments) => {
|
||||||
|
if (
|
||||||
|
pipe(
|
||||||
|
objHasProperty("T", "string"),
|
||||||
|
R.or(objHasProperty("upload-file", "string"))
|
||||||
|
)(parsedArguments)
|
||||||
|
)
|
||||||
|
return O.some("put")
|
||||||
|
else if (
|
||||||
|
pipe(
|
||||||
|
objHasProperty("I", "boolean"),
|
||||||
|
R.or(objHasProperty("head", "boolean"))
|
||||||
|
)(parsedArguments)
|
||||||
|
)
|
||||||
|
return O.some("head")
|
||||||
|
else if (objHasProperty("G", "boolean")(parsedArguments)) return O.some("get")
|
||||||
|
else if (
|
||||||
|
pipe(
|
||||||
|
objHasProperty("d", "string"),
|
||||||
|
R.or(objHasArrayProperty("d", "string")),
|
||||||
|
R.or(objHasProperty("F", "string")),
|
||||||
|
R.or(objHasArrayProperty("F", "string"))
|
||||||
|
)(parsedArguments)
|
||||||
|
)
|
||||||
|
return O.some("post")
|
||||||
|
else return O.none
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get method type from X argument in curl string or
|
||||||
|
* find it out through other arguments
|
||||||
|
* @param parsedArguments Parsed Arguments object
|
||||||
|
* @returns Method string
|
||||||
|
*/
|
||||||
|
export const getMethod = (parsedArguments: parser.Arguments): string =>
|
||||||
|
pipe(
|
||||||
|
getMethodFromXArg(parsedArguments),
|
||||||
|
O.alt(() => getMethodByDeduction(parsedArguments)),
|
||||||
|
O.getOrElse(() => defaultRESTReq.method)
|
||||||
|
)
|
||||||
69
packages/hoppscotch-app/helpers/curl/sub_helpers/preproc.ts
Normal file
69
packages/hoppscotch-app/helpers/curl/sub_helpers/preproc.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
|
import * as S from "fp-ts/string"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as A from "fp-ts/Array"
|
||||||
|
|
||||||
|
const replaceables: { [key: string]: string } = {
|
||||||
|
"--request": "-X",
|
||||||
|
"--header": "-H",
|
||||||
|
"--url": "",
|
||||||
|
"--form": "-F",
|
||||||
|
"--data-raw": "--data",
|
||||||
|
"--data": "-d",
|
||||||
|
"--data-ascii": "-d",
|
||||||
|
"--data-binary": "-d",
|
||||||
|
"--user": "-u",
|
||||||
|
"--get": "-G",
|
||||||
|
}
|
||||||
|
|
||||||
|
const paperCuts = flow(
|
||||||
|
// remove '\' and newlines
|
||||||
|
S.replace(/ ?\\ ?$/gm, " "),
|
||||||
|
S.replace(/\n/g, ""),
|
||||||
|
// remove all $ symbols from start of argument values
|
||||||
|
S.replace(/\$'/g, "'"),
|
||||||
|
S.replace(/\$"/g, '"')
|
||||||
|
)
|
||||||
|
|
||||||
|
// replace --zargs option with -Z
|
||||||
|
const replaceLongOptions = (curlCmd: string) =>
|
||||||
|
pipe(Object.keys(replaceables), A.reduce(curlCmd, replaceFunction))
|
||||||
|
|
||||||
|
const replaceFunction = (curlCmd: string, r: string) =>
|
||||||
|
pipe(
|
||||||
|
curlCmd,
|
||||||
|
O.fromPredicate(
|
||||||
|
() => r.includes("data") || r.includes("form") || r.includes("header")
|
||||||
|
),
|
||||||
|
O.map(S.replace(RegExp(`[ \t]${r}(["' ])`, "g"), ` ${replaceables[r]}$1`)),
|
||||||
|
O.alt(() =>
|
||||||
|
pipe(
|
||||||
|
curlCmd,
|
||||||
|
S.replace(RegExp(`[ \t]${r}(["' ])`), ` ${replaceables[r]}$1`),
|
||||||
|
O.of
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.getOrElse(() => "")
|
||||||
|
)
|
||||||
|
|
||||||
|
// yargs parses -XPOST as separate arguments. just prescreen for it.
|
||||||
|
const prescreenXArgs = flow(
|
||||||
|
S.replace(
|
||||||
|
/ -X(GET|POST|PUT|PATCH|DELETE|HEAD|CONNECT|OPTIONS|TRACE)/,
|
||||||
|
" -X $1"
|
||||||
|
),
|
||||||
|
S.trim
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitizes and makes curl string processable
|
||||||
|
* @param curlCommand Raw curl command string
|
||||||
|
* @returns Processed curl command string
|
||||||
|
*/
|
||||||
|
export const preProcessCurlCommand = (curlCommand: string) =>
|
||||||
|
pipe(
|
||||||
|
curlCommand,
|
||||||
|
O.fromPredicate((curlCmd) => curlCmd.length > 0),
|
||||||
|
O.map(flow(paperCuts, replaceLongOptions, prescreenXArgs)),
|
||||||
|
O.getOrElse(() => "")
|
||||||
|
)
|
||||||
43
packages/hoppscotch-app/helpers/curl/sub_helpers/queries.ts
Normal file
43
packages/hoppscotch-app/helpers/curl/sub_helpers/queries.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import { pipe, flow } from "fp-ts/function"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as A from "fp-ts/Array"
|
||||||
|
import * as Sep from "fp-ts/Separated"
|
||||||
|
import { HoppRESTParam } from "@hoppscotch/data"
|
||||||
|
|
||||||
|
const isDangling = ([_, value]: [string, string]) => !value
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts queries to HoppRESTParam format and separates dangling ones
|
||||||
|
* @param params Array of key value pairs of queries
|
||||||
|
* @returns Object containing separated queries and dangling queries
|
||||||
|
*/
|
||||||
|
export function getQueries(params: Array<[string, string]>): {
|
||||||
|
queries: Array<HoppRESTParam>
|
||||||
|
danglingParams: Array<string>
|
||||||
|
} {
|
||||||
|
return pipe(
|
||||||
|
params,
|
||||||
|
O.of,
|
||||||
|
O.map(
|
||||||
|
flow(
|
||||||
|
A.partition(isDangling),
|
||||||
|
Sep.bimap(
|
||||||
|
A.map(([key, value]) => ({
|
||||||
|
key,
|
||||||
|
value,
|
||||||
|
active: true,
|
||||||
|
})),
|
||||||
|
A.map(([key]) => key)
|
||||||
|
),
|
||||||
|
(sep) => ({
|
||||||
|
queries: sep.left,
|
||||||
|
danglingParams: sep.right,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.getOrElseW(() => ({
|
||||||
|
queries: [],
|
||||||
|
danglingParams: [],
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
}
|
||||||
80
packages/hoppscotch-app/helpers/curl/sub_helpers/url.ts
Normal file
80
packages/hoppscotch-app/helpers/curl/sub_helpers/url.ts
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
import parser from "yargs-parser"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
|
||||||
|
import { stringArrayJoin } from "~/helpers/functional/array"
|
||||||
|
|
||||||
|
const defaultRESTReq = getDefaultRESTRequest()
|
||||||
|
|
||||||
|
const getProtocolForBaseURL = (baseURL: string) =>
|
||||||
|
pipe(
|
||||||
|
// get the base URL
|
||||||
|
/^([^\s:@]+:[^\s:@]+@)?([^:/\s]+)([:]*)/.exec(baseURL),
|
||||||
|
O.fromNullable,
|
||||||
|
O.filter((burl) => burl.length > 1),
|
||||||
|
O.map((burl) => burl[2]),
|
||||||
|
// set protocol to http for local URLs
|
||||||
|
O.map((burl) =>
|
||||||
|
burl === "localhost" || burl === "127.0.0.1"
|
||||||
|
? "http://" + baseURL
|
||||||
|
: "https://" + baseURL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes URL string and returns the URL object
|
||||||
|
* @param parsedArguments Parsed Arguments object
|
||||||
|
* @returns URL object
|
||||||
|
*/
|
||||||
|
export function parseURL(parsedArguments: parser.Arguments) {
|
||||||
|
return pipe(
|
||||||
|
// contains raw url string
|
||||||
|
parsedArguments._[1],
|
||||||
|
O.fromNullable,
|
||||||
|
// preprocess url string
|
||||||
|
O.map((u) => u.toString().replace(/["']/g, "").trim()),
|
||||||
|
O.chain((u) =>
|
||||||
|
pipe(
|
||||||
|
// check if protocol is available
|
||||||
|
/^[^:\s]+(?=:\/\/)/.exec(u),
|
||||||
|
O.fromNullable,
|
||||||
|
O.map((_) => u),
|
||||||
|
O.alt(() => getProtocolForBaseURL(u))
|
||||||
|
)
|
||||||
|
),
|
||||||
|
O.map((u) => new URL(u)),
|
||||||
|
// no url found
|
||||||
|
O.getOrElse(() => new URL(defaultRESTReq.endpoint))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Joins dangling params to origin
|
||||||
|
* @param urlObject URL object containing origin and pathname
|
||||||
|
* @param danglingParams Keys of params with empty values
|
||||||
|
* @returns origin string concatenated with dangling paramas
|
||||||
|
*/
|
||||||
|
export function concatParams(urlObject: URL, danglingParams: string[]) {
|
||||||
|
return pipe(
|
||||||
|
O.Do,
|
||||||
|
|
||||||
|
O.bind("originString", () =>
|
||||||
|
pipe(
|
||||||
|
urlObject.origin,
|
||||||
|
O.fromPredicate((h) => h !== "")
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
O.map(({ originString }) =>
|
||||||
|
pipe(
|
||||||
|
danglingParams,
|
||||||
|
O.fromPredicate((dp) => dp.length > 0),
|
||||||
|
O.map(stringArrayJoin("&")),
|
||||||
|
O.map((h) => originString + (urlObject.pathname || "") + "?" + h),
|
||||||
|
O.getOrElse(() => originString + (urlObject.pathname || ""))
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
O.getOrElse(() => defaultRESTReq.endpoint)
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -18,3 +18,74 @@ export const objFieldMatches =
|
|||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
(obj: T): obj is T & { [_ in K]: V } =>
|
(obj: T): obj is T & { [_ in K]: V } =>
|
||||||
matches.findIndex((x) => isEqual(obj[fieldName], x)) !== -1
|
matches.findIndex((x) => isEqual(obj[fieldName], x)) !== -1
|
||||||
|
|
||||||
|
type JSPrimitive =
|
||||||
|
| "undefined"
|
||||||
|
| "object"
|
||||||
|
| "boolean"
|
||||||
|
| "number"
|
||||||
|
| "bigint"
|
||||||
|
| "string"
|
||||||
|
| "symbol"
|
||||||
|
| "function"
|
||||||
|
|
||||||
|
type TypeFromPrimitive<P extends JSPrimitive | undefined> =
|
||||||
|
P extends "undefined"
|
||||||
|
? undefined
|
||||||
|
: P extends "object"
|
||||||
|
? object | null // typeof null === "object"
|
||||||
|
: P extends "boolean"
|
||||||
|
? boolean
|
||||||
|
: P extends "number"
|
||||||
|
? number
|
||||||
|
: P extends "bigint"
|
||||||
|
? BigInt
|
||||||
|
: P extends "string"
|
||||||
|
? string
|
||||||
|
: P extends "symbol"
|
||||||
|
? Symbol
|
||||||
|
: P extends "function"
|
||||||
|
? Function
|
||||||
|
: unknown
|
||||||
|
|
||||||
|
type TypeFromPrimitiveArray<P extends JSPrimitive | undefined> =
|
||||||
|
P extends "undefined"
|
||||||
|
? undefined
|
||||||
|
: P extends "object"
|
||||||
|
? object[] | null
|
||||||
|
: P extends "boolean"
|
||||||
|
? boolean[]
|
||||||
|
: P extends "number"
|
||||||
|
? number[]
|
||||||
|
: P extends "bigint"
|
||||||
|
? BigInt[]
|
||||||
|
: P extends "string"
|
||||||
|
? string[]
|
||||||
|
: P extends "symbol"
|
||||||
|
? Symbol[]
|
||||||
|
: P extends "function"
|
||||||
|
? Function[]
|
||||||
|
: unknown[]
|
||||||
|
|
||||||
|
export const objHasProperty =
|
||||||
|
<O extends object, K extends string, P extends JSPrimitive>(
|
||||||
|
prop: K,
|
||||||
|
type: P
|
||||||
|
) =>
|
||||||
|
// eslint-disable-next-line
|
||||||
|
(obj: O): obj is O & { [_ in K]: TypeFromPrimitive<P> } =>
|
||||||
|
// eslint-disable-next-line
|
||||||
|
prop in obj && typeof (obj as any)[prop] === type
|
||||||
|
|
||||||
|
export const objHasArrayProperty =
|
||||||
|
<O extends object, K extends string, P extends JSPrimitive>(
|
||||||
|
prop: K,
|
||||||
|
type: P
|
||||||
|
) =>
|
||||||
|
// eslint-disable-next-line
|
||||||
|
(obj: O): obj is O & { [_ in K]: TypeFromPrimitiveArray<P> } =>
|
||||||
|
prop in obj &&
|
||||||
|
Array.isArray((obj as any)[prop]) &&
|
||||||
|
(obj as any)[prop].every(
|
||||||
|
(val: unknown) => typeof val === type // eslint-disable-line
|
||||||
|
)
|
||||||
|
|||||||
19
packages/hoppscotch-app/helpers/functional/option.ts
Normal file
19
packages/hoppscotch-app/helpers/functional/option.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import * as O from "fp-ts/Option"
|
||||||
|
import * as A from "fp-ts/Array"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tries to match one of the given predicates.
|
||||||
|
* If a predicate is matched, the associated value is returned in a Some.
|
||||||
|
* Else if none of the predicates is matched, None is returned.
|
||||||
|
* @param choice An array of tuples having a predicate function and the selected value
|
||||||
|
* @returns A function which takes the input and returns an Option
|
||||||
|
*/
|
||||||
|
export const optionChoose =
|
||||||
|
<T, V>(choice: Array<[(x: T) => boolean, V]>) =>
|
||||||
|
(input: T): O.Option<V> =>
|
||||||
|
pipe(
|
||||||
|
choice,
|
||||||
|
A.findFirst(([pred]) => pred(input)),
|
||||||
|
O.map(([_, value]) => value)
|
||||||
|
)
|
||||||
Reference in New Issue
Block a user