fix: curl parser url sanitisation (#2366)

This commit is contained in:
kyteinsky
2022-05-27 14:48:33 +05:30
committed by GitHub
parent b1a2c9e9d5
commit 83bdd03f43
6 changed files with 110 additions and 34 deletions

View File

@@ -768,11 +768,52 @@ const samples = [
testScript: "",
}),
},
{
command: `curl \`
google.com -H "content-type: application/json"`,
response: makeRESTRequest({
method: "GET",
name: "Untitled request",
endpoint: "https://google.com/",
auth: {
authType: "none",
authActive: true,
},
body: {
contentType: null,
body: null,
},
params: [],
headers: [],
preRequestScript: "",
testScript: "",
}),
},
{
command: `curl 192.168.0.24:8080/ping`,
response: makeRESTRequest({
method: "GET",
name: "Untitled request",
endpoint: "http://192.168.0.24:8080/ping",
auth: {
authType: "none",
authActive: true,
},
body: {
contentType: null,
body: null,
},
params: [],
headers: [],
preRequestScript: "",
testScript: "",
}),
},
]
describe("parseCurlToHoppRESTReq", () => {
describe("Parse curl command to Hopp REST Request", () => {
for (const [i, { command, response }] of samples.entries()) {
test(`matches expectation for sample #${i + 1}`, () => {
test(`for sample #${i + 1}:\n\n${command}`, () => {
expect(parseCurlToHoppRESTReq(command)).toEqual(response)
})
}

View File

@@ -12,7 +12,7 @@ import { getHeaders, recordToHoppHeaders } from "./sub_helpers/headers"
// import { getCookies } from "./sub_helpers/cookies"
import { getQueries } from "./sub_helpers/queries"
import { getMethod } from "./sub_helpers/method"
import { concatParams, parseURL } from "./sub_helpers/url"
import { concatParams, getURLObject } from "./sub_helpers/url"
import { preProcessCurlCommand } from "./sub_helpers/preproc"
import { getBody, getFArgumentMultipartData } from "./sub_helpers/body"
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
@@ -42,7 +42,7 @@ export const parseCurlCommand = (curlCommand: string) => {
const method = getMethod(parsedArguments)
// const cookies = getCookies(parsedArguments)
const urlObject = parseURL(parsedArguments)
const urlObject = getURLObject(parsedArguments)
const auth = getAuthObject(parsedArguments, headers, urlObject)
let rawData: string | string[] = pipe(

View File

@@ -158,12 +158,14 @@ const getXMLBody = (rawData: string) =>
O.alt(() => O.some(rawData))
)
const getFormattedJSON = flow(
safeParseJSON,
O.map((parsedJSON) => JSON.stringify(parsedJSON, null, 2)),
O.getOrElse(() => "{}"),
O.of
)
const getFormattedJSON = (jsonString: string) =>
pipe(
jsonString.replaceAll('\\"', '"'),
safeParseJSON,
O.map((parsedJSON) => JSON.stringify(parsedJSON, null, 2)),
O.getOrElse(() => "{ }"),
O.of
)
const getXWWWFormUrlEncodedBody = flow(
decodeURIComponent,

View File

@@ -19,10 +19,11 @@ const replaceables: { [key: string]: string } = {
const paperCuts = flow(
// remove '\' and newlines
S.replace(/ ?\\ ?$/gm, " "),
S.replace(/\n/g, ""),
S.replace(/\n/g, " "),
// remove all $ symbols from start of argument values
S.replace(/\$'/g, "'"),
S.replace(/\$"/g, '"')
S.replace(/\$"/g, '"'),
S.trim
)
// replace --zargs option with -Z

View File

@@ -1,48 +1,80 @@
import parser from "yargs-parser"
import { pipe } from "fp-ts/function"
import * as O from "fp-ts/Option"
import * as A from "fp-ts/Array"
import { getDefaultRESTRequest } from "~/newstore/RESTSession"
import { stringArrayJoin } from "~/helpers/functional/array"
const defaultRESTReq = getDefaultRESTRequest()
const getProtocolForBaseURL = (baseURL: string) =>
const getProtocolFromURL = (url: string) =>
pipe(
// get the base URL
/^([^\s:@]+:[^\s:@]+@)?([^:/\s]+)([:]*)/.exec(baseURL),
/^([^\s:@]+:[^\s:@]+@)?([^:/\s]+)([:]*)/.exec(url),
O.fromNullable,
O.filter((burl) => burl.length > 1),
O.map((burl) => burl[2]),
// set protocol to http for local URLs
O.map((burl) =>
burl === "localhost" || burl === "127.0.0.1"
? "http://" + baseURL
: "https://" + baseURL
burl === "localhost" ||
burl === "2130706433" ||
/127(\.0){0,2}\.1/.test(burl) ||
/0177(\.0){0,2}\.1/.test(burl) ||
/0x7f(\.0){0,2}\.1/.test(burl) ||
/192\.168(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){2}/.test(burl) ||
/10(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}/.test(burl)
? "http://" + url
: "https://" + url
)
)
/**
* Checks if the URL is valid using the URL constructor
* @param urlString URL string (with protocol)
* @returns boolean whether the URL is valid using the inbuilt URL class
*/
const isURLValid = (urlString: string) =>
pipe(
O.tryCatch(() => new URL(urlString)),
O.isSome
)
/**
* Checks and returns URL object for the valid URL
* @param urlText Raw URL string provided by argument parser
* @returns Option of URL object
*/
const parseURL = (urlText: string | number) =>
pipe(
urlText,
O.fromNullable,
// preprocess url string
O.map((u) => u.toString().replaceAll(/[^a-zA-Z0-9_\-./?&=:@%+#,;\s]/g, "")),
O.filter((u) => u.length > 0),
O.chain((u) =>
pipe(
u,
// check if protocol is available
O.fromPredicate(
(url: string) => /^[^:\s]+(?=:\/\/)/.exec(url) !== null
),
O.alt(() => getProtocolFromURL(u))
)
),
O.filter(isURLValid),
O.map((u) => new URL(u))
)
/**
* Processes URL string and returns the URL object
* @param parsedArguments Parsed Arguments object
* @returns URL object
*/
export function parseURL(parsedArguments: parser.Arguments) {
export function getURLObject(parsedArguments: parser.Arguments) {
return pipe(
// contains raw url string
parsedArguments._[1],
O.fromNullable,
// preprocess url string
O.map((u) => u.toString().replace(/["']/g, "").trim()),
O.chain((u) =>
pipe(
// check if protocol is available
/^[^:\s]+(?=:\/\/)/.exec(u),
O.fromNullable,
O.map((_) => u),
O.alt(() => getProtocolForBaseURL(u))
)
),
O.map((u) => new URL(u)),
// contains raw url strings
parsedArguments._.slice(1),
A.findFirstMap(parseURL),
// no url found
O.getOrElse(() => new URL(defaultRESTReq.endpoint))
)

View File

@@ -17,6 +17,6 @@ export const trace = <T>(x: T) => {
export const namedTrace =
(name: string) =>
<T>(x: T) => {
console.log(`${name}: `, x)
console.log(`${name}:`, x)
return x
}