From 819b111383b85592aa2cf3dc83f3c41ad851f8eb Mon Sep 17 00:00:00 2001 From: Andrew Bastin Date: Thu, 17 Feb 2022 19:13:28 +0530 Subject: [PATCH] fix: params, headers, url encoded syncing issues --- .../components/graphql/RequestOptions.vue | 93 ++++++++---- .../components/http/Headers.vue | 135 ++++++++++------- .../components/http/Parameters.vue | 126 +++++++++------- .../components/http/URLEncodedParams.vue | 140 ++++++++++-------- .../helpers/functional/error.ts | 3 + .../helpers/functional/object.ts | 10 ++ packages/hoppscotch-data/src/rawKeyValue.ts | 11 +- 7 files changed, 329 insertions(+), 189 deletions(-) create mode 100644 packages/hoppscotch-app/helpers/functional/error.ts create mode 100644 packages/hoppscotch-app/helpers/functional/object.ts diff --git a/packages/hoppscotch-app/components/graphql/RequestOptions.vue b/packages/hoppscotch-app/components/graphql/RequestOptions.vue index 69b7f2385..bc29d315f 100644 --- a/packages/hoppscotch-app/components/graphql/RequestOptions.vue +++ b/packages/hoppscotch-app/components/graphql/RequestOptions.vue @@ -157,7 +157,7 @@
(null) @@ -345,8 +354,9 @@ useCodemirror(bulkEditor, bulkHeaders, { const headers = useStream(gqlHeaders$, [], setGQLHeaders) as Ref // The UI representation of the headers list (has the empty end header) -const workingHeaders = ref([ +const workingHeaders = ref>([ { + id: idTicker.value++, key: "", value: "", active: true, @@ -360,6 +370,7 @@ watch(workingHeaders, (headersList) => { headersList[headersList.length - 1].key !== "" ) { workingHeaders.value.push({ + id: idTicker.value++, key: "", value: "", active: true, @@ -372,42 +383,72 @@ watch( headers, (newHeadersList) => { // Sync should overwrite working headers - const filteredWorkingHeaders = workingHeaders.value.filter( - (e) => e.key !== "" + const filteredWorkingHeaders = pipe( + workingHeaders.value, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) + ) + ) + + const filteredBulkHeaders = pipe( + parseRawKeyValueEntriesE(bulkHeaders.value), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) ) if (!isEqual(newHeadersList, filteredWorkingHeaders)) { - workingHeaders.value = newHeadersList + workingHeaders.value = pipe( + newHeadersList, + A.map((x) => ({ id: idTicker.value++, ...x })) + ) + } + + if (!isEqual(newHeadersList, filteredBulkHeaders)) { + bulkHeaders.value = rawKeyValueEntriesToString(newHeadersList) } }, { immediate: true } ) watch(workingHeaders, (newWorkingHeaders) => { - const fixedHeaders = newWorkingHeaders.filter((e) => e.key !== "") + const fixedHeaders = pipe( + newWorkingHeaders, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) + ) + ) + if (!isEqual(headers.value, fixedHeaders)) { - headers.value = fixedHeaders + headers.value = cloneDeep(fixedHeaders) } }) // Bulk Editor Syncing with Working Headers -watch(bulkHeaders, () => { - try { - const transformation = pipe( - bulkHeaders.value, - parseRawKeyValueEntriesE, - E.map(RA.toArray), - E.getOrElse(() => [] as RawKeyValueEntry[]) - ) +watch(bulkHeaders, (newBulkHeaders) => { + const filteredBulkHeaders = pipe( + parseRawKeyValueEntriesE(newBulkHeaders), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) + ) - const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "") - - if (!isEqual(filteredHeaders, transformation)) { - workingHeaders.value = transformation - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) + if (!isEqual(headers.value, filteredBulkHeaders)) { + headers.value = filteredBulkHeaders } }) @@ -435,13 +476,14 @@ watch(workingHeaders, (newHeadersList) => { const addHeader = () => { workingHeaders.value.push({ + id: idTicker.value++, key: "", value: "", active: true, }) } -const updateHeader = (index: number, header: GQLHeader) => { +const updateHeader = (index: number, header: GQLHeader & { id: number }) => { workingHeaders.value = workingHeaders.value.map((h, i) => i === index ? header : h ) @@ -486,6 +528,7 @@ const clearContent = () => { // set headers list to the initial state workingHeaders.value = [ { + id: idTicker.value++, key: "", value: "", active: true, diff --git a/packages/hoppscotch-app/components/http/Headers.vue b/packages/hoppscotch-app/components/http/Headers.vue index c06505fd6..ad975f43a 100644 --- a/packages/hoppscotch-app/components/http/Headers.vue +++ b/packages/hoppscotch-app/components/http/Headers.vue @@ -40,7 +40,7 @@
import { Ref, ref, watch } from "@nuxtjs/composition-api" import isEqual from "lodash/isEqual" -import clone from "lodash/clone" import { HoppRESTHeader, parseRawKeyValueEntriesE, rawKeyValueEntriesToString, RawKeyValueEntry, } from "@hoppscotch/data" -import { pipe } from "fp-ts/function" +import { flow, pipe } from "fp-ts/function" import * as RA from "fp-ts/ReadonlyArray" import * as E from "fp-ts/Either" +import * as O from "fp-ts/Option" +import * as A from "fp-ts/Array" +import cloneDeep from "lodash/cloneDeep" import { useCodemirror } from "~/helpers/editor/codemirror" import { restHeaders$, setRESTHeaders } from "~/newstore/RESTSession" import { commonHeaders } from "~/helpers/headers" import { useI18n, useStream, useToast } from "~/helpers/utils/composables" import linter from "~/helpers/editor/linting/rawKeyValue" +import { throwError } from "~/helpers/functional/error" +import { objRemoveKey } from "~/helpers/functional/object" const t = useI18n() const toast = useToast() +const idTicker = ref(0) + const bulkMode = ref(false) const bulkHeaders = ref("") const bulkEditor = ref(null) @@ -182,22 +191,24 @@ const headers = useStream(restHeaders$, [], setRESTHeaders) as Ref< HoppRESTHeader[] > -// The UI representation of the headers list (has the empty end header) -const workingHeaders = ref([ +// The UI representation of the headers list (has the empty end headers) +const workingHeaders = ref>([ { + id: idTicker.value++, key: "", value: "", active: true, }, ]) -// Rule: Working Headers always have one empty header or the last element is always an empty header +// Rule: Working Headers always have last element is always an empty header watch(workingHeaders, (headersList) => { if ( headersList.length > 0 && headersList[headersList.length - 1].key !== "" ) { workingHeaders.value.push({ + id: idTicker.value++, key: "", value: "", active: true, @@ -205,88 +216,99 @@ watch(workingHeaders, (headersList) => { } }) -// Sync logic between headers and working headers +// Sync logic between headers and working/bulk headers watch( headers, (newHeadersList) => { // Sync should overwrite working headers - const filteredWorkingHeaders = workingHeaders.value.filter( - (e) => e.key !== "" + const filteredWorkingHeaders = pipe( + workingHeaders.value, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) + ) + ) + + const filteredBulkHeaders = pipe( + parseRawKeyValueEntriesE(bulkHeaders.value), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) ) if (!isEqual(newHeadersList, filteredWorkingHeaders)) { - workingHeaders.value = newHeadersList + workingHeaders.value = pipe( + newHeadersList, + A.map((x) => ({ id: idTicker.value++, ...x })) + ) + } + + if (!isEqual(newHeadersList, filteredBulkHeaders)) { + bulkHeaders.value = rawKeyValueEntriesToString(newHeadersList) } }, { immediate: true } ) watch(workingHeaders, (newWorkingHeaders) => { - const fixedHeaders = newWorkingHeaders.filter((e) => e.key !== "") - if (!isEqual(headers.value, fixedHeaders)) { - headers.value = fixedHeaders - } -}) - -// Bulk Editor Syncing with Working Headers -watch(bulkHeaders, () => { - try { - const transformation = pipe( - bulkHeaders.value, - parseRawKeyValueEntriesE, - E.map(RA.toArray), - E.getOrElse(() => [] as RawKeyValueEntry[]) + const fixedHeaders = pipe( + newWorkingHeaders, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) ) + ) - const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "") - - if (!isEqual(filteredHeaders, transformation)) { - workingHeaders.value = transformation - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) + if (!isEqual(headers.value, fixedHeaders)) { + headers.value = cloneDeep(fixedHeaders) } }) -watch(workingHeaders, (newHeadersList) => { - // If we are in bulk mode, don't apply direct changes - if (bulkMode.value) return +watch(bulkHeaders, (newBulkHeaders) => { + const filteredBulkHeaders = pipe( + parseRawKeyValueEntriesE(newBulkHeaders), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) + ) - try { - const currentBulkHeaders = bulkHeaders.value.split("\n").map((item) => ({ - key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), - value: item.substring(item.indexOf(":") + 1).trimLeft(), - active: !item.trim().startsWith("#"), - })) - - const filteredHeaders = newHeadersList.filter((x) => x.key !== "") - - if (!isEqual(currentBulkHeaders, filteredHeaders)) { - bulkHeaders.value = rawKeyValueEntriesToString(filteredHeaders) - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) + if (!isEqual(headers.value, filteredBulkHeaders)) { + headers.value = filteredBulkHeaders } }) const addHeader = () => { workingHeaders.value.push({ + id: idTicker.value++, key: "", value: "", active: true, }) } -const updateHeader = (index: number, header: HoppRESTHeader) => { +const updateHeader = ( + index: number, + header: HoppRESTHeader & { id: number } +) => { workingHeaders.value = workingHeaders.value.map((h, i) => i === index ? header : h ) } const deleteHeader = (index: number) => { - const headersBeforeDeletion = clone(workingHeaders.value) + const headersBeforeDeletion = cloneDeep(workingHeaders.value) if ( !( @@ -317,13 +339,18 @@ const deleteHeader = (index: number) => { }) } - workingHeaders.value.splice(index, 1) + workingHeaders.value = pipe( + workingHeaders.value, + A.deleteAt(index), + O.getOrElseW(() => throwError("Working Headers Deletion Out of Bounds")) + ) } const clearContent = () => { - // set headers list to the initial state + // set params list to the initial state workingHeaders.value = [ { + id: idTicker.value++, key: "", value: "", active: true, diff --git a/packages/hoppscotch-app/components/http/Parameters.vue b/packages/hoppscotch-app/components/http/Parameters.vue index 447e4d930..bcadffaf0 100644 --- a/packages/hoppscotch-app/components/http/Parameters.vue +++ b/packages/hoppscotch-app/components/http/Parameters.vue @@ -40,7 +40,7 @@
import { ref, watch } from "@nuxtjs/composition-api" -import { pipe } from "fp-ts/function" +import { flow, pipe } from "fp-ts/function" +import * as O from "fp-ts/Option" +import * as A from "fp-ts/Array" import * as RA from "fp-ts/ReadonlyArray" import * as E from "fp-ts/Either" import { @@ -139,16 +144,20 @@ import { RawKeyValueEntry, } from "@hoppscotch/data" import isEqual from "lodash/isEqual" -import clone from "lodash/clone" +import cloneDeep from "lodash/cloneDeep" import linter from "~/helpers/editor/linting/rawKeyValue" import { useCodemirror } from "~/helpers/editor/codemirror" import { useI18n, useToast, useStream } from "~/helpers/utils/composables" import { restParams$, setRESTParams } from "~/newstore/RESTSession" +import { throwError } from "~/helpers/functional/error" +import { objRemoveKey } from "~/helpers/functional/object" const t = useI18n() const toast = useToast() +const idTicker = ref(0) + const bulkMode = ref(false) const bulkParams = ref("") @@ -170,8 +179,9 @@ useCodemirror(bulkEditor, bulkParams, { const params = useStream(restParams$, [], setRESTParams) // The UI representation of the parameters list (has the empty end param) -const workingParams = ref([ +const workingParams = ref>([ { + id: idTicker.value++, key: "", value: "", active: true, @@ -182,6 +192,7 @@ const workingParams = ref([ watch(workingParams, (paramsList) => { if (paramsList.length > 0 && paramsList[paramsList.length - 1].key !== "") { workingParams.value.push({ + id: idTicker.value++, key: "", value: "", active: true, @@ -189,88 +200,96 @@ watch(workingParams, (paramsList) => { } }) -// Sync logic between params and working params +// Sync logic between params and working/bulk params watch( params, (newParamsList) => { // Sync should overwrite working params - const filteredWorkingParams = workingParams.value.filter( - (e) => e.key !== "" + const filteredWorkingParams: HoppRESTParam[] = pipe( + workingParams.value, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) + ) + ) + + const filteredBulkParams = pipe( + parseRawKeyValueEntriesE(bulkParams.value), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) ) if (!isEqual(newParamsList, filteredWorkingParams)) { - workingParams.value = newParamsList + workingParams.value = pipe( + newParamsList, + A.map((x) => ({ id: idTicker.value++, ...x })) + ) + } + + if (!isEqual(newParamsList, filteredBulkParams)) { + bulkParams.value = rawKeyValueEntriesToString(newParamsList) } }, { immediate: true } ) watch(workingParams, (newWorkingParams) => { - const fixedParams = newWorkingParams.filter((e) => e.key !== "") - if (!isEqual(params.value, fixedParams)) { - params.value = fixedParams - } -}) - -// Bulk Editor Syncing with Working Params -watch(bulkParams, () => { - try { - const transformation = pipe( - bulkParams.value, - parseRawKeyValueEntriesE, - E.map(RA.toArray), - E.getOrElse(() => [] as RawKeyValueEntry[]) + const fixedParams = pipe( + newWorkingParams, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) ) + ) - const filteredParams = workingParams.value.filter((x) => x.key !== "") - - if (!isEqual(filteredParams, transformation)) { - workingParams.value = transformation - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) + if (!isEqual(params.value, fixedParams)) { + params.value = cloneDeep(fixedParams) } }) -watch(workingParams, (newParamsList) => { - // If we are in bulk mode, don't apply direct changes - if (bulkMode.value) return +watch(bulkParams, (newBulkParams) => { + const filteredBulkParams = pipe( + parseRawKeyValueEntriesE(newBulkParams), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) + ) - try { - const currentBulkParams = bulkParams.value.split("\n").map((item) => ({ - key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), - value: item.substring(item.indexOf(":") + 1).trimLeft(), - active: !item.trim().startsWith("#"), - })) - - const filteredParams = newParamsList.filter((x) => x.key !== "") - - if (!isEqual(currentBulkParams, filteredParams)) { - bulkParams.value = rawKeyValueEntriesToString(filteredParams) - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) + if (!isEqual(params.value, filteredBulkParams)) { + params.value = filteredBulkParams } }) const addParam = () => { workingParams.value.push({ + id: idTicker.value++, key: "", value: "", active: true, }) } -const updateParam = (index: number, param: HoppRESTParam) => { +const updateParam = (index: number, param: HoppRESTParam & { id: number }) => { workingParams.value = workingParams.value.map((h, i) => i === index ? param : h ) } const deleteParam = (index: number) => { - const paramsBeforeDeletion = clone(workingParams.value) + const paramsBeforeDeletion = cloneDeep(workingParams.value) if ( !( @@ -301,13 +320,18 @@ const deleteParam = (index: number) => { }) } - workingParams.value.splice(index, 1) + workingParams.value = pipe( + workingParams.value, + A.deleteAt(index), + O.getOrElseW(() => throwError("Working Params Deletion Out of Bounds")) + ) } const clearContent = () => { // set params list to the initial state workingParams.value = [ { + id: idTicker.value++, key: "", value: "", active: true, diff --git a/packages/hoppscotch-app/components/http/URLEncodedParams.vue b/packages/hoppscotch-app/components/http/URLEncodedParams.vue index 251c104e4..137983162 100644 --- a/packages/hoppscotch-app/components/http/URLEncodedParams.vue +++ b/packages/hoppscotch-app/components/http/URLEncodedParams.vue @@ -40,7 +40,7 @@
import { computed, Ref, ref, watch } from "@nuxtjs/composition-api" import isEqual from "lodash/isEqual" -import clone from "lodash/clone" import { HoppRESTReqBody, parseRawKeyValueEntries, @@ -138,17 +140,24 @@ import { rawKeyValueEntriesToString, RawKeyValueEntry, } from "@hoppscotch/data" -import { pipe } from "fp-ts/function" +import { flow, pipe } from "fp-ts/function" +import * as A from "fp-ts/Array" +import * as O from "fp-ts/Option" import * as RA from "fp-ts/ReadonlyArray" import * as E from "fp-ts/Either" +import { cloneDeep } from "lodash" import { useCodemirror } from "~/helpers/editor/codemirror" import linter from "~/helpers/editor/linting/rawKeyValue" import { useRESTRequestBody } from "~/newstore/RESTSession" import { pluckRef, useI18n, useToast } from "~/helpers/utils/composables" +import { objRemoveKey } from "~/helpers/functional/object" +import { throwError } from "~/helpers/functional/error" const t = useI18n() const toast = useToast() +const idTicker = ref(0) + const bulkMode = ref(false) const bulkUrlEncodedParams = ref("") const bulkEditor = ref(null) @@ -183,8 +192,9 @@ const urlEncodedParams = computed({ }) // The UI representation of the urlEncodedParams list (has the empty end urlEncodedParam) -const workingUrlEncodedParams = ref([ +const workingUrlEncodedParams = ref>([ { + id: idTicker.value++, key: "", value: "", active: true, @@ -198,6 +208,7 @@ watch(workingUrlEncodedParams, (urlEncodedParamList) => { urlEncodedParamList[urlEncodedParamList.length - 1].key !== "" ) { workingUrlEncodedParams.value.push({ + id: idTicker.value++, key: "", value: "", active: true, @@ -209,91 +220,97 @@ watch(workingUrlEncodedParams, (urlEncodedParamList) => { watch( urlEncodedParams, (newurlEncodedParamList) => { - const filteredWorkingUrlEncodedParams = - workingUrlEncodedParams.value.filter((e) => e.key !== "") + const filteredWorkingUrlEncodedParams = pipe( + workingUrlEncodedParams.value, + A.filterMap( + flow( + O.fromPredicate((x) => x.key !== ""), + O.map(objRemoveKey("id")) + ) + ) + ) + + const filteredBulkUrlEncodedParams = pipe( + parseRawKeyValueEntriesE(bulkUrlEncodedParams.value), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray + ) + ) + ) if (!isEqual(newurlEncodedParamList, filteredWorkingUrlEncodedParams)) { - workingUrlEncodedParams.value = newurlEncodedParamList + workingUrlEncodedParams.value = pipe( + newurlEncodedParamList, + A.map((x) => ({ id: idTicker.value++, ...x })) + ) + } + + if (!isEqual(newurlEncodedParamList, filteredBulkUrlEncodedParams)) { + bulkUrlEncodedParams.value = rawKeyValueEntriesToString( + newurlEncodedParamList + ) } }, { immediate: true } ) watch(workingUrlEncodedParams, (newWorkingUrlEncodedParams) => { - const fixedUrlEncodedParams = newWorkingUrlEncodedParams.filter( - (e) => e.key !== "" + const fixedUrlEncodedParams = pipe( + newWorkingUrlEncodedParams, + A.filterMap( + flow( + O.fromPredicate((e) => e.key !== ""), + O.map(objRemoveKey("id")) + ) + ) ) + if (!isEqual(urlEncodedParams.value, fixedUrlEncodedParams)) { urlEncodedParams.value = fixedUrlEncodedParams } }) -// Bulk Editor Syncing with Working urlEncodedParams -watch(bulkUrlEncodedParams, () => { - try { - const transformation = pipe( - bulkUrlEncodedParams.value, - parseRawKeyValueEntriesE, - E.map(RA.toArray), - E.getOrElse(() => [] as RawKeyValueEntry[]) - ) - - const filteredUrlEncodedParams = workingUrlEncodedParams.value.filter( - (x) => x.key !== "" - ) - - if (!isEqual(filteredUrlEncodedParams, transformation)) { - workingUrlEncodedParams.value = transformation - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) - } -}) - -watch(workingUrlEncodedParams, (newurlEncodedParamList) => { - if (bulkMode.value) return - - try { - const currentBulkUrlEncodedParams = bulkUrlEncodedParams.value - .split("\n") - .map((item) => ({ - key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), - value: item.substring(item.indexOf(":") + 1).trimLeft(), - active: !item.trim().startsWith("#"), - })) - - const filteredUrlEncodedParams = newurlEncodedParamList.filter( - (x) => x.key !== "" - ) - - if (!isEqual(currentBulkUrlEncodedParams, filteredUrlEncodedParams)) { - bulkUrlEncodedParams.value = rawKeyValueEntriesToString( - filteredUrlEncodedParams +watch(bulkUrlEncodedParams, (newBulkUrlEncodedParams) => { + const filteredBulkParams = pipe( + parseRawKeyValueEntriesE(newBulkUrlEncodedParams), + E.map( + flow( + RA.filter((e) => e.key !== ""), + RA.toArray ) - } - } catch (e) { - toast.error(`${t("error.something_went_wrong")}`) - console.error(e) + ), + E.getOrElse(() => [] as RawKeyValueEntry[]) + ) + + if (!isEqual(urlEncodedParams.value, filteredBulkParams)) { + urlEncodedParams.value = filteredBulkParams } }) const addUrlEncodedParam = () => { workingUrlEncodedParams.value.push({ + id: idTicker.value++, key: "", value: "", active: true, }) } -const updateUrlEncodedParam = (index: number, param: RawKeyValueEntry) => { +const updateUrlEncodedParam = ( + index: number, + param: RawKeyValueEntry & { id: number } +) => { workingUrlEncodedParams.value = workingUrlEncodedParams.value.map((p, i) => i === index ? param : p ) } const deleteUrlEncodedParam = (index: number) => { - const urlEncodedParamsBeforeDeletion = clone(workingUrlEncodedParams.value) + const urlEncodedParamsBeforeDeletion = cloneDeep( + workingUrlEncodedParams.value + ) if ( !( @@ -324,13 +341,20 @@ const deleteUrlEncodedParam = (index: number) => { }) } - workingUrlEncodedParams.value.splice(index, 1) + workingUrlEncodedParams.value = pipe( + workingUrlEncodedParams.value, + A.deleteAt(index), + O.getOrElseW(() => + throwError("Working URL Encoded Params Deletion Out of Bounds") + ) + ) } const clearContent = () => { // set urlEncodedParams list to the initial state workingUrlEncodedParams.value = [ { + id: idTicker.value++, key: "", value: "", active: true, diff --git a/packages/hoppscotch-app/helpers/functional/error.ts b/packages/hoppscotch-app/helpers/functional/error.ts new file mode 100644 index 000000000..f91956a6a --- /dev/null +++ b/packages/hoppscotch-app/helpers/functional/error.ts @@ -0,0 +1,3 @@ +export const throwError = (message: string): never => { + throw new Error(message) +} diff --git a/packages/hoppscotch-app/helpers/functional/object.ts b/packages/hoppscotch-app/helpers/functional/object.ts new file mode 100644 index 000000000..11697c924 --- /dev/null +++ b/packages/hoppscotch-app/helpers/functional/object.ts @@ -0,0 +1,10 @@ +import { pipe } from "fp-ts/function" +import cloneDeep from "lodash/cloneDeep" + +export const objRemoveKey = + (key: K) => + (obj: T): Omit => + pipe(cloneDeep(obj), (e) => { + delete e[key] + return e + }) diff --git a/packages/hoppscotch-data/src/rawKeyValue.ts b/packages/hoppscotch-data/src/rawKeyValue.ts index 1d662e046..1e17faf0b 100644 --- a/packages/hoppscotch-data/src/rawKeyValue.ts +++ b/packages/hoppscotch-data/src/rawKeyValue.ts @@ -60,7 +60,16 @@ const line = pipe( const lineWithNoColon = pipe( wsSurround(commented), P.bindTo("commented"), - P.bind("key", () => stringTakeUntilCharsInclusive(["\n"])), + P.bind("key", () => P.either( + stringTakeUntilCharsInclusive(["\n"]), + () => pipe( + P.manyTill(P.sat((_: string) => true), P.eof()), + P.map(flow( + RA.toArray, + stringArrayJoin("") + )) + ) + )), P.map(flow( O.fromPredicate(({ key }) => !Str.isEmpty(key)) ))