Merge branch 'refactor/rawkeyvalue'

This commit is contained in:
liyasthomas
2022-02-17 19:55:52 +05:30
24 changed files with 911 additions and 655 deletions

View File

@@ -17,8 +17,8 @@
"types": "dist/index.d.ts", "types": "dist/index.d.ts",
"sideEffects": false, "sideEffects": false,
"dependencies": { "dependencies": {
"@codemirror/highlight": "^0.19.0", "@codemirror/highlight": "^0.19.7",
"@codemirror/language": "^0.19.0", "@codemirror/language": "^0.19.7",
"@lezer/lr": "^0.15.8" "@lezer/lr": "^0.15.8"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -118,15 +118,14 @@
import clone from "lodash/clone" import clone from "lodash/clone"
import { computed, defineComponent, PropType } from "@nuxtjs/composition-api" import { computed, defineComponent, PropType } from "@nuxtjs/composition-api"
import * as E from "fp-ts/Either" import * as E from "fp-ts/Either"
import { Environment, parseTemplateStringE } from "@hoppscotch/data"
import { import {
Environment,
getEnviroment, getEnviroment,
getGlobalVariables, getGlobalVariables,
globalEnv$, globalEnv$,
setGlobalEnvVariables, setGlobalEnvVariables,
updateEnvironment, updateEnvironment,
} from "~/newstore/environments" } from "~/newstore/environments"
import { parseTemplateStringE } from "~/helpers/templating"
import { useReadonlyStream } from "~/helpers/utils/composables" import { useReadonlyStream } from "~/helpers/utils/composables"
export default defineComponent({ export default defineComponent({

View File

@@ -86,6 +86,7 @@
<script setup lang="ts"> <script setup lang="ts">
import { computed, ref } from "@nuxtjs/composition-api" import { computed, ref } from "@nuxtjs/composition-api"
import { Environment } from "@hoppscotch/data"
import { currentUser$ } from "~/helpers/fb/auth" import { currentUser$ } from "~/helpers/fb/auth"
import { import {
useAxios, useAxios,
@@ -97,7 +98,6 @@ import {
environments$, environments$,
replaceEnvironments, replaceEnvironments,
appendEnvironments, appendEnvironments,
Environment,
} from "~/newstore/environments" } from "~/newstore/environments"
defineProps<{ defineProps<{

View File

@@ -157,7 +157,7 @@
<div v-else> <div v-else>
<div <div
v-for="(header, index) in workingHeaders" v-for="(header, index) in workingHeaders"
:key="`header-${String(index)}`" :key="`header-${header.id}`"
class="flex border-b divide-x divide-dividerLight border-dividerLight" class="flex border-b divide-x divide-dividerLight border-dividerLight"
> >
<SmartAutoComplete <SmartAutoComplete
@@ -177,6 +177,7 @@
class="flex-1 !flex" class="flex-1 !flex"
@input=" @input="
updateHeader(index, { updateHeader(index, {
id: header.id,
key: $event, key: $event,
value: header.value, value: header.value,
active: header.active, active: header.active,
@@ -191,6 +192,7 @@
autofocus autofocus
@change=" @change="
updateHeader(index, { updateHeader(index, {
id: header.id,
key: header.key, key: header.key,
value: $event.target.value, value: $event.target.value,
active: header.active, active: header.active,
@@ -217,6 +219,7 @@
color="green" color="green"
@click.native=" @click.native="
updateHeader(index, { updateHeader(index, {
id: header.id,
key: header.key, key: header.key,
value: header.value, value: header.value,
active: !header.active, active: !header.active,
@@ -270,8 +273,20 @@
import { Ref, computed, reactive, ref, watch } from "@nuxtjs/composition-api" import { Ref, computed, reactive, ref, watch } from "@nuxtjs/composition-api"
import clone from "lodash/clone" import clone from "lodash/clone"
import * as gql from "graphql" import * as gql from "graphql"
import { GQLHeader, makeGQLRequest } from "@hoppscotch/data" import * as E from "fp-ts/Either"
import * as O from "fp-ts/Option"
import * as A from "fp-ts/Array"
import * as RA from "fp-ts/ReadonlyArray"
import { pipe, flow } from "fp-ts/function"
import {
GQLHeader,
makeGQLRequest,
rawKeyValueEntriesToString,
parseRawKeyValueEntriesE,
RawKeyValueEntry,
} from "@hoppscotch/data"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import cloneDeep from "lodash/cloneDeep"
import { copyToClipboard } from "~/helpers/utils/clipboard" import { copyToClipboard } from "~/helpers/utils/clipboard"
import { import {
useNuxt, useNuxt,
@@ -302,6 +317,7 @@ import { createGQLQueryLinter } from "~/helpers/editor/linting/gqlQuery"
import queryCompleter from "~/helpers/editor/completion/gqlQuery" import queryCompleter from "~/helpers/editor/completion/gqlQuery"
import { defineActionHandler } from "~/helpers/actions" import { defineActionHandler } from "~/helpers/actions"
import { getPlatformSpecialKey as getSpecialKey } from "~/helpers/platformutils" import { getPlatformSpecialKey as getSpecialKey } from "~/helpers/platformutils"
import { objRemoveKey } from "~/helpers/functional/object"
const t = useI18n() const t = useI18n()
@@ -316,6 +332,8 @@ const url = useReadonlyStream(gqlURL$, "")
const gqlQueryString = useStream(gqlQuery$, "", setGQLQuery) const gqlQueryString = useStream(gqlQuery$, "", setGQLQuery)
const variableString = useStream(gqlVariables$, "", setGQLVariables) const variableString = useStream(gqlVariables$, "", setGQLVariables)
const idTicker = ref(0)
const bulkMode = ref(false) const bulkMode = ref(false)
const bulkHeaders = ref("") const bulkHeaders = ref("")
const bulkEditor = ref<any | null>(null) const bulkEditor = ref<any | null>(null)
@@ -336,8 +354,9 @@ useCodemirror(bulkEditor, bulkHeaders, {
const headers = useStream(gqlHeaders$, [], setGQLHeaders) as Ref<GQLHeader[]> const headers = useStream(gqlHeaders$, [], setGQLHeaders) as Ref<GQLHeader[]>
// The UI representation of the headers list (has the empty end header) // The UI representation of the headers list (has the empty end header)
const workingHeaders = ref<GQLHeader[]>([ const workingHeaders = ref<Array<GQLHeader & { id: number }>>([
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -351,6 +370,7 @@ watch(workingHeaders, (headersList) => {
headersList[headersList.length - 1].key !== "" headersList[headersList.length - 1].key !== ""
) { ) {
workingHeaders.value.push({ workingHeaders.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -363,44 +383,72 @@ watch(
headers, headers,
(newHeadersList) => { (newHeadersList) => {
// Sync should overwrite working headers // Sync should overwrite working headers
const filteredWorkingHeaders = workingHeaders.value.filter( const filteredWorkingHeaders = pipe(
(e) => e.key !== "" workingHeaders.value,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
const filteredBulkHeaders = pipe(
parseRawKeyValueEntriesE(bulkHeaders.value),
E.map(
flow(
RA.filter((e) => e.key !== ""),
RA.toArray
)
),
E.getOrElse(() => [] as RawKeyValueEntry[])
) )
if (!isEqual(newHeadersList, filteredWorkingHeaders)) { if (!isEqual(newHeadersList, filteredWorkingHeaders)) {
workingHeaders.value = newHeadersList workingHeaders.value = pipe(
newHeadersList,
A.map((x) => ({ id: idTicker.value++, ...x }))
)
}
if (!isEqual(newHeadersList, filteredBulkHeaders)) {
bulkHeaders.value = rawKeyValueEntriesToString(newHeadersList)
} }
}, },
{ immediate: true } { immediate: true }
) )
watch(workingHeaders, (newWorkingHeaders) => { watch(workingHeaders, (newWorkingHeaders) => {
const fixedHeaders = newWorkingHeaders.filter((e) => e.key !== "") const fixedHeaders = pipe(
newWorkingHeaders,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
if (!isEqual(headers.value, fixedHeaders)) { if (!isEqual(headers.value, fixedHeaders)) {
headers.value = fixedHeaders headers.value = cloneDeep(fixedHeaders)
} }
}) })
// Bulk Editor Syncing with Working Headers // Bulk Editor Syncing with Working Headers
watch(bulkHeaders, () => { watch(bulkHeaders, (newBulkHeaders) => {
try { const filteredBulkHeaders = pipe(
const transformation = bulkHeaders.value parseRawKeyValueEntriesE(newBulkHeaders),
.split("\n") E.map(
.filter((x) => x.trim().length > 0 && x.includes(":")) flow(
.map((item) => ({ RA.filter((e) => e.key !== ""),
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), RA.toArray
value: item.substring(item.indexOf(":") + 1).trimLeft(), )
active: !item.trim().startsWith("#"), ),
})) E.getOrElse(() => [] as RawKeyValueEntry[])
)
const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "") if (!isEqual(headers.value, filteredBulkHeaders)) {
headers.value = filteredBulkHeaders
if (!isEqual(filteredHeaders, transformation)) {
workingHeaders.value = transformation
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
} }
}) })
@@ -418,11 +466,7 @@ watch(workingHeaders, (newHeadersList) => {
const filteredHeaders = newHeadersList.filter((x) => x.key !== "") const filteredHeaders = newHeadersList.filter((x) => x.key !== "")
if (!isEqual(currentBulkHeaders, filteredHeaders)) { if (!isEqual(currentBulkHeaders, filteredHeaders)) {
bulkHeaders.value = filteredHeaders bulkHeaders.value = rawKeyValueEntriesToString(filteredHeaders)
.map((header) => {
return `${header.active ? "" : "#"}${header.key}: ${header.value}`
})
.join("\n")
} }
} catch (e) { } catch (e) {
toast.error(`${t("error.something_went_wrong")}`) toast.error(`${t("error.something_went_wrong")}`)
@@ -432,13 +476,14 @@ watch(workingHeaders, (newHeadersList) => {
const addHeader = () => { const addHeader = () => {
workingHeaders.value.push({ workingHeaders.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
}) })
} }
const updateHeader = (index: number, header: GQLHeader) => { const updateHeader = (index: number, header: GQLHeader & { id: number }) => {
workingHeaders.value = workingHeaders.value.map((h, i) => workingHeaders.value = workingHeaders.value.map((h, i) =>
i === index ? header : h i === index ? header : h
) )
@@ -483,6 +528,7 @@ const clearContent = () => {
// set headers list to the initial state // set headers list to the initial state
workingHeaders.value = [ workingHeaders.value = [
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,

View File

@@ -85,14 +85,14 @@
<script setup lang="ts"> <script setup lang="ts">
import { computed, ref, watch } from "@nuxtjs/composition-api" import { computed, ref, watch } from "@nuxtjs/composition-api"
import * as O from "fp-ts/Option" import * as O from "fp-ts/Option"
import { makeRESTRequest } from "@hoppscotch/data" import { Environment, makeRESTRequest } from "@hoppscotch/data"
import { useCodemirror } from "~/helpers/editor/codemirror" import { useCodemirror } from "~/helpers/editor/codemirror"
import { copyToClipboard } from "~/helpers/utils/clipboard" import { copyToClipboard } from "~/helpers/utils/clipboard"
import { import {
getEffectiveRESTRequest, getEffectiveRESTRequest,
resolvesEnvsInBody, resolvesEnvsInBody,
} from "~/helpers/utils/EffectiveURL" } from "~/helpers/utils/EffectiveURL"
import { Environment, getAggregateEnvs } from "~/newstore/environments" import { getAggregateEnvs } from "~/newstore/environments"
import { getRESTRequest } from "~/newstore/RESTSession" import { getRESTRequest } from "~/newstore/RESTSession"
import { useI18n, useToast } from "~/helpers/utils/composables" import { useI18n, useToast } from "~/helpers/utils/composables"
import { import {

View File

@@ -40,7 +40,7 @@
<div v-else> <div v-else>
<div <div
v-for="(header, index) in workingHeaders" v-for="(header, index) in workingHeaders"
:key="`header-${index}`" :key="`header-${header.id}`"
class="flex border-b divide-x divide-dividerLight border-dividerLight" class="flex border-b divide-x divide-dividerLight border-dividerLight"
> >
<SmartAutoComplete <SmartAutoComplete
@@ -60,6 +60,7 @@
class="flex-1 !flex" class="flex-1 !flex"
@input=" @input="
updateHeader(index, { updateHeader(index, {
id: header.id,
key: $event, key: $event,
value: header.value, value: header.value,
active: header.active, active: header.active,
@@ -71,6 +72,7 @@
:placeholder="`${t('count.value', { count: index + 1 })}`" :placeholder="`${t('count.value', { count: index + 1 })}`"
@change=" @change="
updateHeader(index, { updateHeader(index, {
id: header.id,
key: header.key, key: header.key,
value: $event, value: $event,
active: header.active, active: header.active,
@@ -97,6 +99,7 @@
color="green" color="green"
@click.native=" @click.native="
updateHeader(index, { updateHeader(index, {
id: header.id,
key: header.key, key: header.key,
value: header.value, value: header.value,
active: !header.active, active: !header.active,
@@ -142,16 +145,31 @@
<script setup lang="ts"> <script setup lang="ts">
import { Ref, ref, watch } from "@nuxtjs/composition-api" import { Ref, ref, watch } from "@nuxtjs/composition-api"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import clone from "lodash/clone" import {
import { HoppRESTHeader } from "@hoppscotch/data" HoppRESTHeader,
parseRawKeyValueEntriesE,
rawKeyValueEntriesToString,
RawKeyValueEntry,
} from "@hoppscotch/data"
import { flow, pipe } from "fp-ts/function"
import * as RA from "fp-ts/ReadonlyArray"
import * as E from "fp-ts/Either"
import * as O from "fp-ts/Option"
import * as A from "fp-ts/Array"
import cloneDeep from "lodash/cloneDeep"
import { useCodemirror } from "~/helpers/editor/codemirror" import { useCodemirror } from "~/helpers/editor/codemirror"
import { restHeaders$, setRESTHeaders } from "~/newstore/RESTSession" import { restHeaders$, setRESTHeaders } from "~/newstore/RESTSession"
import { commonHeaders } from "~/helpers/headers" import { commonHeaders } from "~/helpers/headers"
import { useI18n, useStream, useToast } from "~/helpers/utils/composables" import { useI18n, useStream, useToast } from "~/helpers/utils/composables"
import linter from "~/helpers/editor/linting/rawKeyValue"
import { throwError } from "~/helpers/functional/error"
import { objRemoveKey } from "~/helpers/functional/object"
const t = useI18n() const t = useI18n()
const toast = useToast() const toast = useToast()
const idTicker = ref(0)
const bulkMode = ref(false) const bulkMode = ref(false)
const bulkHeaders = ref("") const bulkHeaders = ref("")
const bulkEditor = ref<any | null>(null) const bulkEditor = ref<any | null>(null)
@@ -163,7 +181,7 @@ useCodemirror(bulkEditor, bulkHeaders, {
mode: "text/x-yaml", mode: "text/x-yaml",
placeholder: `${t("state.bulk_mode_placeholder")}`, placeholder: `${t("state.bulk_mode_placeholder")}`,
}, },
linter: null, linter,
completer: null, completer: null,
environmentHighlights: true, environmentHighlights: true,
}) })
@@ -173,22 +191,24 @@ const headers = useStream(restHeaders$, [], setRESTHeaders) as Ref<
HoppRESTHeader[] HoppRESTHeader[]
> >
// The UI representation of the headers list (has the empty end header) // The UI representation of the headers list (has the empty end headers)
const workingHeaders = ref<HoppRESTHeader[]>([ const workingHeaders = ref<Array<HoppRESTHeader & { id: number }>>([
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
}, },
]) ])
// Rule: Working Headers always have one empty header or the last element is always an empty header // Rule: Working Headers always have last element is always an empty header
watch(workingHeaders, (headersList) => { watch(workingHeaders, (headersList) => {
if ( if (
headersList.length > 0 && headersList.length > 0 &&
headersList[headersList.length - 1].key !== "" headersList[headersList.length - 1].key !== ""
) { ) {
workingHeaders.value.push({ workingHeaders.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -196,94 +216,99 @@ watch(workingHeaders, (headersList) => {
} }
}) })
// Sync logic between headers and working headers // Sync logic between headers and working/bulk headers
watch( watch(
headers, headers,
(newHeadersList) => { (newHeadersList) => {
// Sync should overwrite working headers // Sync should overwrite working headers
const filteredWorkingHeaders = workingHeaders.value.filter( const filteredWorkingHeaders = pipe(
(e) => e.key !== "" workingHeaders.value,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
const filteredBulkHeaders = pipe(
parseRawKeyValueEntriesE(bulkHeaders.value),
E.map(
flow(
RA.filter((e) => e.key !== ""),
RA.toArray
)
),
E.getOrElse(() => [] as RawKeyValueEntry[])
) )
if (!isEqual(newHeadersList, filteredWorkingHeaders)) { if (!isEqual(newHeadersList, filteredWorkingHeaders)) {
workingHeaders.value = newHeadersList workingHeaders.value = pipe(
newHeadersList,
A.map((x) => ({ id: idTicker.value++, ...x }))
)
}
if (!isEqual(newHeadersList, filteredBulkHeaders)) {
bulkHeaders.value = rawKeyValueEntriesToString(newHeadersList)
} }
}, },
{ immediate: true } { immediate: true }
) )
watch(workingHeaders, (newWorkingHeaders) => { watch(workingHeaders, (newWorkingHeaders) => {
const fixedHeaders = newWorkingHeaders.filter((e) => e.key !== "") const fixedHeaders = pipe(
newWorkingHeaders,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
if (!isEqual(headers.value, fixedHeaders)) { if (!isEqual(headers.value, fixedHeaders)) {
headers.value = fixedHeaders headers.value = cloneDeep(fixedHeaders)
} }
}) })
// Bulk Editor Syncing with Working Headers watch(bulkHeaders, (newBulkHeaders) => {
watch(bulkHeaders, () => { const filteredBulkHeaders = pipe(
try { parseRawKeyValueEntriesE(newBulkHeaders),
const transformation = bulkHeaders.value E.map(
.split("\n") flow(
.filter((x) => x.trim().length > 0 && x.includes(":")) RA.filter((e) => e.key !== ""),
.map((item) => ({ RA.toArray
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), )
value: item.substring(item.indexOf(":") + 1).trimLeft(), ),
active: !item.trim().startsWith("#"), E.getOrElse(() => [] as RawKeyValueEntry[])
})) )
const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "") if (!isEqual(headers.value, filteredBulkHeaders)) {
headers.value = filteredBulkHeaders
if (!isEqual(filteredHeaders, transformation)) {
workingHeaders.value = transformation
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
}
})
watch(workingHeaders, (newHeadersList) => {
// If we are in bulk mode, don't apply direct changes
if (bulkMode.value) return
try {
const currentBulkHeaders = bulkHeaders.value.split("\n").map((item) => ({
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
value: item.substring(item.indexOf(":") + 1).trimLeft(),
active: !item.trim().startsWith("#"),
}))
const filteredHeaders = newHeadersList.filter((x) => x.key !== "")
if (!isEqual(currentBulkHeaders, filteredHeaders)) {
bulkHeaders.value = filteredHeaders
.map((header) => {
return `${header.active ? "" : "#"}${header.key}: ${header.value}`
})
.join("\n")
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
} }
}) })
const addHeader = () => { const addHeader = () => {
workingHeaders.value.push({ workingHeaders.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
}) })
} }
const updateHeader = (index: number, header: HoppRESTHeader) => { const updateHeader = (
index: number,
header: HoppRESTHeader & { id: number }
) => {
workingHeaders.value = workingHeaders.value.map((h, i) => workingHeaders.value = workingHeaders.value.map((h, i) =>
i === index ? header : h i === index ? header : h
) )
} }
const deleteHeader = (index: number) => { const deleteHeader = (index: number) => {
const headersBeforeDeletion = clone(workingHeaders.value) const headersBeforeDeletion = cloneDeep(workingHeaders.value)
if ( if (
!( !(
@@ -314,13 +339,18 @@ const deleteHeader = (index: number) => {
}) })
} }
workingHeaders.value.splice(index, 1) workingHeaders.value = pipe(
workingHeaders.value,
A.deleteAt(index),
O.getOrElseW(() => throwError("Working Headers Deletion Out of Bounds"))
)
} }
const clearContent = () => { const clearContent = () => {
// set headers list to the initial state // set params list to the initial state
workingHeaders.value = [ workingHeaders.value = [
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,

View File

@@ -40,7 +40,7 @@
<div v-else> <div v-else>
<div <div
v-for="(param, index) in workingParams" v-for="(param, index) in workingParams"
:key="`param-${index}`" :key="`param-${param.id}`"
class="flex border-b divide-x divide-dividerLight border-dividerLight" class="flex border-b divide-x divide-dividerLight border-dividerLight"
> >
<SmartEnvInput <SmartEnvInput
@@ -48,6 +48,7 @@
:placeholder="`${t('count.parameter', { count: index + 1 })}`" :placeholder="`${t('count.parameter', { count: index + 1 })}`"
@change=" @change="
updateParam(index, { updateParam(index, {
id: param.id,
key: $event, key: $event,
value: param.value, value: param.value,
active: param.active, active: param.active,
@@ -59,6 +60,7 @@
:placeholder="`${t('count.value', { count: index + 1 })}`" :placeholder="`${t('count.value', { count: index + 1 })}`"
@change=" @change="
updateParam(index, { updateParam(index, {
id: param.id,
key: param.key, key: param.key,
value: $event, value: $event,
active: param.active, active: param.active,
@@ -85,6 +87,7 @@
color="green" color="green"
@click.native=" @click.native="
updateParam(index, { updateParam(index, {
id: param.id,
key: param.key, key: param.key,
value: param.value, value: param.value,
active: param.hasOwnProperty('active') ? !param.active : false, active: param.hasOwnProperty('active') ? !param.active : false,
@@ -128,41 +131,55 @@
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import { ref, watch } from "@nuxtjs/composition-api" import { Ref, ref, watch } from "@nuxtjs/composition-api"
import { HoppRESTParam } from "@hoppscotch/data" import { flow, pipe } from "fp-ts/function"
import * as O from "fp-ts/Option"
import * as A from "fp-ts/Array"
import * as RA from "fp-ts/ReadonlyArray"
import * as E from "fp-ts/Either"
import {
HoppRESTParam,
parseRawKeyValueEntriesE,
rawKeyValueEntriesToString,
RawKeyValueEntry,
} from "@hoppscotch/data"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import clone from "lodash/clone" import cloneDeep from "lodash/cloneDeep"
import linter from "~/helpers/editor/linting/rawKeyValue"
import { useCodemirror } from "~/helpers/editor/codemirror" import { useCodemirror } from "~/helpers/editor/codemirror"
import { useI18n, useToast, useStream } from "~/helpers/utils/composables" import { useI18n, useToast, useStream } from "~/helpers/utils/composables"
import { restParams$, setRESTParams } from "~/newstore/RESTSession" import { restParams$, setRESTParams } from "~/newstore/RESTSession"
import { throwError } from "~/helpers/functional/error"
import { objRemoveKey } from "~/helpers/functional/object"
const t = useI18n() const t = useI18n()
const toast = useToast() const toast = useToast()
const idTicker = ref(0)
const bulkMode = ref(false) const bulkMode = ref(false)
const bulkParams = ref("") const bulkParams = ref("")
const bulkEditor = ref<any | null>(null)
const deletionToast = ref<{ goAway: (delay: number) => void } | null>(null) const deletionToast = ref<{ goAway: (delay: number) => void } | null>(null)
const bulkEditor = ref<any | null>(null)
useCodemirror(bulkEditor, bulkParams, { useCodemirror(bulkEditor, bulkParams, {
extendedEditorConfig: { extendedEditorConfig: {
mode: "text/x-yaml", mode: "text/x-yaml",
placeholder: `${t("state.bulk_mode_placeholder")}`, placeholder: `${t("state.bulk_mode_placeholder")}`,
}, },
linter: null, linter,
completer: null, completer: null,
environmentHighlights: true, environmentHighlights: true,
}) })
// The functional parameters list (the parameters actually applied to the session) // The functional parameters list (the parameters actually applied to the session)
const params = useStream(restParams$, [], setRESTParams) const params = useStream(restParams$, [], setRESTParams) as Ref<HoppRESTParam[]>
// The UI representation of the parameters list (has the empty end param) // The UI representation of the parameters list (has the empty end param)
const workingParams = ref<HoppRESTParam[]>([ const workingParams = ref<Array<HoppRESTParam & { id: number }>>([
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -173,6 +190,7 @@ const workingParams = ref<HoppRESTParam[]>([
watch(workingParams, (paramsList) => { watch(workingParams, (paramsList) => {
if (paramsList.length > 0 && paramsList[paramsList.length - 1].key !== "") { if (paramsList.length > 0 && paramsList[paramsList.length - 1].key !== "") {
workingParams.value.push({ workingParams.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -180,94 +198,96 @@ watch(workingParams, (paramsList) => {
} }
}) })
// Sync logic between params and working params // Sync logic between params and working/bulk params
watch( watch(
params, params,
(newParamsList) => { (newParamsList) => {
// Sync should overwrite working params // Sync should overwrite working params
const filteredWorkingParams = workingParams.value.filter( const filteredWorkingParams: HoppRESTParam[] = pipe(
(e) => e.key !== "" workingParams.value,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
const filteredBulkParams = pipe(
parseRawKeyValueEntriesE(bulkParams.value),
E.map(
flow(
RA.filter((e) => e.key !== ""),
RA.toArray
)
),
E.getOrElse(() => [] as RawKeyValueEntry[])
) )
if (!isEqual(newParamsList, filteredWorkingParams)) { if (!isEqual(newParamsList, filteredWorkingParams)) {
workingParams.value = newParamsList workingParams.value = pipe(
newParamsList,
A.map((x) => ({ id: idTicker.value++, ...x }))
)
}
if (!isEqual(newParamsList, filteredBulkParams)) {
bulkParams.value = rawKeyValueEntriesToString(newParamsList)
} }
}, },
{ immediate: true } { immediate: true }
) )
watch(workingParams, (newWorkingParams) => { watch(workingParams, (newWorkingParams) => {
const fixedParams = newWorkingParams.filter((e) => e.key !== "") const fixedParams = pipe(
newWorkingParams,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
if (!isEqual(params.value, fixedParams)) { if (!isEqual(params.value, fixedParams)) {
params.value = fixedParams params.value = cloneDeep(fixedParams)
} }
}) })
// Bulk Editor Syncing with Working Params watch(bulkParams, (newBulkParams) => {
watch(bulkParams, () => { const filteredBulkParams = pipe(
try { parseRawKeyValueEntriesE(newBulkParams),
const transformation = bulkParams.value E.map(
.split("\n") flow(
.filter((x) => x.trim().length > 0 && x.includes(":")) RA.filter((e) => e.key !== ""),
.map((item) => ({ RA.toArray
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), )
value: item.substring(item.indexOf(":") + 1).trimLeft(), ),
active: !item.trim().startsWith("#"), E.getOrElse(() => [] as RawKeyValueEntry[])
})) )
const filteredParams = workingParams.value.filter((x) => x.key !== "") if (!isEqual(params.value, filteredBulkParams)) {
params.value = filteredBulkParams
if (!isEqual(filteredParams, transformation)) {
workingParams.value = transformation
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
}
})
watch(workingParams, (newParamsList) => {
// If we are in bulk mode, don't apply direct changes
if (bulkMode.value) return
try {
const currentBulkParams = bulkParams.value.split("\n").map((item) => ({
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
value: item.substring(item.indexOf(":") + 1).trimLeft(),
active: !item.trim().startsWith("#"),
}))
const filteredParams = newParamsList.filter((x) => x.key !== "")
if (!isEqual(currentBulkParams, filteredParams)) {
bulkParams.value = filteredParams
.map((param) => {
return `${param.active ? "" : "#"}${param.key}: ${param.value}`
})
.join("\n")
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
} }
}) })
const addParam = () => { const addParam = () => {
workingParams.value.push({ workingParams.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
}) })
} }
const updateParam = (index: number, param: HoppRESTParam) => { const updateParam = (index: number, param: HoppRESTParam & { id: number }) => {
workingParams.value = workingParams.value.map((h, i) => workingParams.value = workingParams.value.map((h, i) =>
i === index ? param : h i === index ? param : h
) )
} }
const deleteParam = (index: number) => { const deleteParam = (index: number) => {
const paramsBeforeDeletion = clone(workingParams.value) const paramsBeforeDeletion = cloneDeep(workingParams.value)
if ( if (
!( !(
@@ -298,13 +318,18 @@ const deleteParam = (index: number) => {
}) })
} }
workingParams.value.splice(index, 1) workingParams.value = pipe(
workingParams.value,
A.deleteAt(index),
O.getOrElseW(() => throwError("Working Params Deletion Out of Bounds"))
)
} }
const clearContent = () => { const clearContent = () => {
// set params list to the initial state // set params list to the initial state
workingParams.value = [ workingParams.value = [
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,

View File

@@ -40,7 +40,7 @@
<div v-else> <div v-else>
<div <div
v-for="(param, index) in workingUrlEncodedParams" v-for="(param, index) in workingUrlEncodedParams"
:key="`param-${index}`" :key="`param-${param.id}`"
class="flex border-b divide-x divide-dividerLight border-dividerLight" class="flex border-b divide-x divide-dividerLight border-dividerLight"
> >
<SmartEnvInput <SmartEnvInput
@@ -48,6 +48,7 @@
:placeholder="`${t('count.parameter', { count: index + 1 })}`" :placeholder="`${t('count.parameter', { count: index + 1 })}`"
@change=" @change="
updateUrlEncodedParam(index, { updateUrlEncodedParam(index, {
id: param.id,
key: $event, key: $event,
value: param.value, value: param.value,
active: param.active, active: param.active,
@@ -59,6 +60,7 @@
:placeholder="`${t('count.value', { count: index + 1 })}`" :placeholder="`${t('count.value', { count: index + 1 })}`"
@change=" @change="
updateUrlEncodedParam(index, { updateUrlEncodedParam(index, {
id: param.id,
key: param.key, key: param.key,
value: $event, value: $event,
active: param.active, active: param.active,
@@ -85,6 +87,7 @@
color="green" color="green"
@click.native=" @click.native="
updateUrlEncodedParam(index, { updateUrlEncodedParam(index, {
id: param.id,
key: param.key, key: param.key,
value: param.value, value: param.value,
active: !param.active, active: !param.active,
@@ -130,20 +133,31 @@
<script setup lang="ts"> <script setup lang="ts">
import { computed, Ref, ref, watch } from "@nuxtjs/composition-api" import { computed, Ref, ref, watch } from "@nuxtjs/composition-api"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import clone from "lodash/clone"
import { HoppRESTReqBody } from "@hoppscotch/data"
import { useCodemirror } from "~/helpers/editor/codemirror"
import { useRESTRequestBody } from "~/newstore/RESTSession"
import { pluckRef, useI18n, useToast } from "~/helpers/utils/composables"
import { import {
HoppRESTReqBody,
parseRawKeyValueEntries, parseRawKeyValueEntries,
parseRawKeyValueEntriesE,
rawKeyValueEntriesToString, rawKeyValueEntriesToString,
RawKeyValueEntry, RawKeyValueEntry,
} from "~/helpers/rawKeyValue" } from "@hoppscotch/data"
import { flow, pipe } from "fp-ts/function"
import * as A from "fp-ts/Array"
import * as O from "fp-ts/Option"
import * as RA from "fp-ts/ReadonlyArray"
import * as E from "fp-ts/Either"
import { cloneDeep } from "lodash"
import { useCodemirror } from "~/helpers/editor/codemirror"
import linter from "~/helpers/editor/linting/rawKeyValue"
import { useRESTRequestBody } from "~/newstore/RESTSession"
import { pluckRef, useI18n, useToast } from "~/helpers/utils/composables"
import { objRemoveKey } from "~/helpers/functional/object"
import { throwError } from "~/helpers/functional/error"
const t = useI18n() const t = useI18n()
const toast = useToast() const toast = useToast()
const idTicker = ref(0)
const bulkMode = ref(false) const bulkMode = ref(false)
const bulkUrlEncodedParams = ref("") const bulkUrlEncodedParams = ref("")
const bulkEditor = ref<any | null>(null) const bulkEditor = ref<any | null>(null)
@@ -155,7 +169,7 @@ useCodemirror(bulkEditor, bulkUrlEncodedParams, {
mode: "text/x-yaml", mode: "text/x-yaml",
placeholder: `${t("state.bulk_mode_placeholder")}`, placeholder: `${t("state.bulk_mode_placeholder")}`,
}, },
linter: null, linter,
completer: null, completer: null,
environmentHighlights: true, environmentHighlights: true,
}) })
@@ -178,8 +192,9 @@ const urlEncodedParams = computed<RawKeyValueEntry[]>({
}) })
// The UI representation of the urlEncodedParams list (has the empty end urlEncodedParam) // The UI representation of the urlEncodedParams list (has the empty end urlEncodedParam)
const workingUrlEncodedParams = ref<RawKeyValueEntry[]>([ const workingUrlEncodedParams = ref<Array<RawKeyValueEntry & { id: number }>>([
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -193,6 +208,7 @@ watch(workingUrlEncodedParams, (urlEncodedParamList) => {
urlEncodedParamList[urlEncodedParamList.length - 1].key !== "" urlEncodedParamList[urlEncodedParamList.length - 1].key !== ""
) { ) {
workingUrlEncodedParams.value.push({ workingUrlEncodedParams.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
@@ -204,95 +220,97 @@ watch(workingUrlEncodedParams, (urlEncodedParamList) => {
watch( watch(
urlEncodedParams, urlEncodedParams,
(newurlEncodedParamList) => { (newurlEncodedParamList) => {
const filteredWorkingUrlEncodedParams = const filteredWorkingUrlEncodedParams = pipe(
workingUrlEncodedParams.value.filter((e) => e.key !== "") workingUrlEncodedParams.value,
A.filterMap(
flow(
O.fromPredicate((x) => x.key !== ""),
O.map(objRemoveKey("id"))
)
)
)
const filteredBulkUrlEncodedParams = pipe(
parseRawKeyValueEntriesE(bulkUrlEncodedParams.value),
E.map(
flow(
RA.filter((e) => e.key !== ""),
RA.toArray
)
)
)
if (!isEqual(newurlEncodedParamList, filteredWorkingUrlEncodedParams)) { if (!isEqual(newurlEncodedParamList, filteredWorkingUrlEncodedParams)) {
workingUrlEncodedParams.value = newurlEncodedParamList workingUrlEncodedParams.value = pipe(
newurlEncodedParamList,
A.map((x) => ({ id: idTicker.value++, ...x }))
)
}
if (!isEqual(newurlEncodedParamList, filteredBulkUrlEncodedParams)) {
bulkUrlEncodedParams.value = rawKeyValueEntriesToString(
newurlEncodedParamList
)
} }
}, },
{ immediate: true } { immediate: true }
) )
watch(workingUrlEncodedParams, (newWorkingUrlEncodedParams) => { watch(workingUrlEncodedParams, (newWorkingUrlEncodedParams) => {
const fixedUrlEncodedParams = newWorkingUrlEncodedParams.filter( const fixedUrlEncodedParams = pipe(
(e) => e.key !== "" newWorkingUrlEncodedParams,
A.filterMap(
flow(
O.fromPredicate((e) => e.key !== ""),
O.map(objRemoveKey("id"))
)
)
) )
if (!isEqual(urlEncodedParams.value, fixedUrlEncodedParams)) { if (!isEqual(urlEncodedParams.value, fixedUrlEncodedParams)) {
urlEncodedParams.value = fixedUrlEncodedParams urlEncodedParams.value = fixedUrlEncodedParams
} }
}) })
// Bulk Editor Syncing with Working urlEncodedParams watch(bulkUrlEncodedParams, (newBulkUrlEncodedParams) => {
watch(bulkUrlEncodedParams, () => { const filteredBulkParams = pipe(
try { parseRawKeyValueEntriesE(newBulkUrlEncodedParams),
const transformation = bulkUrlEncodedParams.value E.map(
.split("\n") flow(
.filter((x) => x.trim().length > 0 && x.includes(":")) RA.filter((e) => e.key !== ""),
.map((item) => ({ RA.toArray
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""), )
value: item.substring(item.indexOf(":") + 1).trimLeft(), ),
active: !item.trim().startsWith("#"), E.getOrElse(() => [] as RawKeyValueEntry[])
})) )
const filteredUrlEncodedParams = workingUrlEncodedParams.value.filter( if (!isEqual(urlEncodedParams.value, filteredBulkParams)) {
(x) => x.key !== "" urlEncodedParams.value = filteredBulkParams
)
if (!isEqual(filteredUrlEncodedParams, transformation)) {
workingUrlEncodedParams.value = transformation
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
}
})
watch(workingUrlEncodedParams, (newurlEncodedParamList) => {
if (bulkMode.value) return
try {
const currentBulkUrlEncodedParams = bulkUrlEncodedParams.value
.split("\n")
.map((item) => ({
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
value: item.substring(item.indexOf(":") + 1).trimLeft(),
active: !item.trim().startsWith("#"),
}))
const filteredUrlEncodedParams = newurlEncodedParamList.filter(
(x) => x.key !== ""
)
if (!isEqual(currentBulkUrlEncodedParams, filteredUrlEncodedParams)) {
bulkUrlEncodedParams.value = filteredUrlEncodedParams
.map((param) => {
return `${param.active ? "" : "#"}${param.key}: ${param.value}`
})
.join("\n")
}
} catch (e) {
toast.error(`${t("error.something_went_wrong")}`)
console.error(e)
} }
}) })
const addUrlEncodedParam = () => { const addUrlEncodedParam = () => {
workingUrlEncodedParams.value.push({ workingUrlEncodedParams.value.push({
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,
}) })
} }
const updateUrlEncodedParam = (index: number, param: RawKeyValueEntry) => { const updateUrlEncodedParam = (
index: number,
param: RawKeyValueEntry & { id: number }
) => {
workingUrlEncodedParams.value = workingUrlEncodedParams.value.map((p, i) => workingUrlEncodedParams.value = workingUrlEncodedParams.value.map((p, i) =>
i === index ? param : p i === index ? param : p
) )
} }
const deleteUrlEncodedParam = (index: number) => { const deleteUrlEncodedParam = (index: number) => {
const urlEncodedParamsBeforeDeletion = clone(workingUrlEncodedParams.value) const urlEncodedParamsBeforeDeletion = cloneDeep(
workingUrlEncodedParams.value
)
if ( if (
!( !(
@@ -323,13 +341,20 @@ const deleteUrlEncodedParam = (index: number) => {
}) })
} }
workingUrlEncodedParams.value.splice(index, 1) workingUrlEncodedParams.value = pipe(
workingUrlEncodedParams.value,
A.deleteAt(index),
O.getOrElseW(() =>
throwError("Working URL Encoded Params Deletion Out of Bounds")
)
)
} }
const clearContent = () => { const clearContent = () => {
// set urlEncodedParams list to the initial state // set urlEncodedParams list to the initial state
workingUrlEncodedParams.value = [ workingUrlEncodedParams.value = [
{ {
id: idTicker.value++,
key: "", key: "",
value: "", value: "",
active: true, active: true,

View File

@@ -8,13 +8,13 @@ import {
ViewPlugin, ViewPlugin,
} from "@codemirror/view" } from "@codemirror/view"
import * as E from "fp-ts/Either" import * as E from "fp-ts/Either"
import { parseTemplateStringE } from "@hoppscotch/data"
import { StreamSubscriberFunc } from "~/helpers/utils/composables" import { StreamSubscriberFunc } from "~/helpers/utils/composables"
import { import {
AggregateEnvironment, AggregateEnvironment,
aggregateEnvs$, aggregateEnvs$,
getAggregateEnvs, getAggregateEnvs,
} from "~/newstore/environments" } from "~/newstore/environments"
import { parseTemplateStringE } from "~/helpers/templating"
const HOPP_ENVIRONMENT_REGEX = /(<<\w+>>)/g const HOPP_ENVIRONMENT_REGEX = /(<<\w+>>)/g

View File

@@ -0,0 +1,24 @@
import * as E from "fp-ts/Either"
import { strictParseRawKeyValueEntriesE } from "@hoppscotch/data"
import { convertIndexToLineCh } from "../utils"
import { LinterDefinition, LinterResult } from "./linter"
const linter: LinterDefinition = (text) => {
const result = strictParseRawKeyValueEntriesE(text)
if (E.isLeft(result)) {
const pos = convertIndexToLineCh(text, result.left.pos)
return Promise.resolve([
<LinterResult>{
from: pos,
to: pos,
message: result.left.message,
severity: "error",
},
])
} else {
return Promise.resolve([])
}
}
export default linter

View File

@@ -1,3 +1,4 @@
import { Environment } from "@hoppscotch/data"
import { import {
collection, collection,
doc, doc,
@@ -7,7 +8,6 @@ import {
} from "firebase/firestore" } from "firebase/firestore"
import { currentUser$ } from "./auth" import { currentUser$ } from "./auth"
import { import {
Environment,
environments$, environments$,
globalEnv$, globalEnv$,
replaceEnvironments, replaceEnvironments,

View File

@@ -0,0 +1,3 @@
export const throwError = (message: string): never => {
throw new Error(message)
}

View File

@@ -0,0 +1,10 @@
import { pipe } from "fp-ts/function"
import cloneDeep from "lodash/cloneDeep"
export const objRemoveKey =
<T, K extends keyof T>(key: K) =>
(obj: T): Omit<T, K> =>
pipe(cloneDeep(obj), (e) => {
delete e[key]
return e
})

View File

@@ -1,40 +0,0 @@
import * as A from "fp-ts/Array"
import * as RA from "fp-ts/ReadonlyArray"
import * as S from "fp-ts/string"
import { pipe, flow } from "fp-ts/function"
import { stringArrayJoin } from "./functional/array"
export type RawKeyValueEntry = {
key: string
value: string
active: boolean
}
const parseRawKeyValueEntry = (str: string): RawKeyValueEntry => {
const trimmed = str.trim()
const inactive = trimmed.startsWith("#")
const [key, value] = trimmed.split(":").map(S.trim)
return {
key: inactive ? key.replaceAll(/^#+\s*/g, "") : key, // Remove comment hash and early space
value,
active: !inactive,
}
}
export const parseRawKeyValueEntries = flow(
S.split("\n"),
RA.filter((x) => x.trim().length > 0), // Remove lines which are empty
RA.map(parseRawKeyValueEntry),
RA.toArray
)
export const rawKeyValueEntriesToString = (entries: RawKeyValueEntry[]) =>
pipe(
entries,
A.map(({ key, value, active }) =>
active ? `${key}: ${value}` : `# ${key}: ${value}`
),
stringArrayJoin("\n")
)

View File

@@ -9,12 +9,10 @@ import {
FormDataKeyValue, FormDataKeyValue,
HoppRESTReqBody, HoppRESTReqBody,
ValidContentTypes, ValidContentTypes,
} from "@hoppscotch/data"
import {
parseRawKeyValueEntries, parseRawKeyValueEntries,
rawKeyValueEntriesToString, rawKeyValueEntriesToString,
RawKeyValueEntry, RawKeyValueEntry,
} from "../rawKeyValue" } from "@hoppscotch/data"
const ANY_TYPE = Symbol("TRANSITION_RULESET_IGNORE_TYPE") const ANY_TYPE = Symbol("TRANSITION_RULESET_IGNORE_TYPE")
// eslint-disable-next-line no-redeclare // eslint-disable-next-line no-redeclare

View File

@@ -7,13 +7,15 @@ import {
FormDataKeyValue, FormDataKeyValue,
HoppRESTReqBody, HoppRESTReqBody,
HoppRESTRequest, HoppRESTRequest,
parseTemplateString,
parseBodyEnvVariables,
parseRawKeyValueEntries,
Environment,
} from "@hoppscotch/data" } from "@hoppscotch/data"
import { parseTemplateString, parseBodyEnvVariables } from "../templating"
import { arrayFlatMap, arraySort } from "../functional/array" import { arrayFlatMap, arraySort } from "../functional/array"
import { toFormData } from "../functional/formData" import { toFormData } from "../functional/formData"
import { tupleToRecord } from "../functional/record" import { tupleToRecord } from "../functional/record"
import { parseRawKeyValueEntries } from "../rawKeyValue" import { getGlobalVariables } from "~/newstore/environments"
import { Environment, getGlobalVariables } from "~/newstore/environments"
export interface EffectiveHoppRESTRequest extends HoppRESTRequest { export interface EffectiveHoppRESTRequest extends HoppRESTRequest {
/** /**

View File

@@ -1,3 +1,4 @@
import { Environment } from "@hoppscotch/data"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { combineLatest, Observable } from "rxjs" import { combineLatest, Observable } from "rxjs"
@@ -6,14 +7,6 @@ import DispatchingStore, {
defineDispatchers, defineDispatchers,
} from "~/newstore/DispatchingStore" } from "~/newstore/DispatchingStore"
export type Environment = {
name: string
variables: {
key: string
value: string
}[]
}
const defaultEnvironmentsState = { const defaultEnvironmentsState = {
environments: [ environments: [
{ {

View File

@@ -10,6 +10,7 @@ import {
translateToNewRequest, translateToNewRequest,
translateToNewRESTCollection, translateToNewRESTCollection,
translateToNewGQLCollection, translateToNewGQLCollection,
Environment,
} from "@hoppscotch/data" } from "@hoppscotch/data"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import { import {
@@ -37,7 +38,6 @@ import {
import { import {
replaceEnvironments, replaceEnvironments,
environments$, environments$,
Environment,
addGlobalEnvVariable, addGlobalEnvVariable,
setGlobalEnvVariables, setGlobalEnvVariables,
globalEnv$, globalEnv$,

View File

@@ -36,27 +36,27 @@
"@apidevtools/swagger-parser": "^10.0.3", "@apidevtools/swagger-parser": "^10.0.3",
"@codemirror/autocomplete": "^0.19.0", "@codemirror/autocomplete": "^0.19.0",
"@codemirror/closebrackets": "^0.19.0", "@codemirror/closebrackets": "^0.19.0",
"@codemirror/commands": "^0.19.0", "@codemirror/commands": "^0.19.8",
"@codemirror/comment": "^0.19.0", "@codemirror/comment": "^0.19.0",
"@codemirror/fold": "^0.19.0", "@codemirror/fold": "^0.19.3",
"@codemirror/gutter": "^0.19.0", "@codemirror/gutter": "^0.19.0",
"@codemirror/highlight": "^0.19.0", "@codemirror/highlight": "^0.19.0",
"@codemirror/history": "^0.19.0", "@codemirror/history": "^0.19.0",
"@codemirror/lang-javascript": "^0.19.0", "@codemirror/lang-javascript": "^0.19.7",
"@codemirror/lang-json": "^0.19.0", "@codemirror/lang-json": "^0.19.0",
"@codemirror/language": "^0.19.0", "@codemirror/language": "^0.19.0",
"@codemirror/legacy-modes": "^0.19.0", "@codemirror/legacy-modes": "^0.19.0",
"@codemirror/lint": "^0.19.0", "@codemirror/lint": "^0.19.0",
"@codemirror/matchbrackets": "^0.19.0", "@codemirror/matchbrackets": "^0.19.4",
"@codemirror/rectangular-selection": "^0.19.0", "@codemirror/rectangular-selection": "^0.19.0",
"@codemirror/search": "^0.19.0", "@codemirror/search": "^0.19.6",
"@codemirror/state": "^0.19.0", "@codemirror/state": "^0.19.7",
"@codemirror/stream-parser": "^0.19.0", "@codemirror/stream-parser": "^0.19.5",
"@codemirror/text": "^0.19.0", "@codemirror/text": "^0.19.0",
"@codemirror/tooltip": "^0.19.0", "@codemirror/tooltip": "^0.19.0",
"@codemirror/view": "^0.19.0", "@codemirror/view": "^0.19.42",
"@hoppscotch/codemirror-lang-graphql": "workspace:^0.1.0", "@hoppscotch/codemirror-lang-graphql": "workspace:^0.1.0",
"@hoppscotch/data": "workspace:^0.3.0", "@hoppscotch/data": "workspace:^0.4.0",
"@hoppscotch/js-sandbox": "workspace:^1.0.0", "@hoppscotch/js-sandbox": "workspace:^1.0.0",
"@nuxtjs/axios": "^5.13.6", "@nuxtjs/axios": "^5.13.6",
"@nuxtjs/composition-api": "^0.31.0", "@nuxtjs/composition-api": "^0.31.0",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@hoppscotch/data", "name": "@hoppscotch/data",
"version": "0.3.0", "version": "0.4.0",
"description": "Data Types, Validations and Migrations for Hoppscotch Public Data Structures", "description": "Data Types, Validations and Migrations for Hoppscotch Public Data Structures",
"main": "dist/index.js", "main": "dist/index.js",
"module": "true", "module": "true",
@@ -14,7 +14,10 @@
"exports": { "exports": {
".": "./dist/index.js", ".": "./dist/index.js",
"./graphql": "./dist/graphql/index.js", "./graphql": "./dist/graphql/index.js",
"./rest": "./dist/rest/index.js" "./rest": "./dist/rest/index.js",
"./rawKeyValue": "./dist/rawKeyValue.js",
"./collection": "./dist/index.js",
"./environment": "./dist/environment.js"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@@ -31,6 +34,9 @@
"tsup": "^5.11.13" "tsup": "^5.11.13"
}, },
"dependencies": { "dependencies": {
"lodash": "^4.17.21" "fp-ts": "^2.11.8",
"io-ts": "^2.2.16",
"lodash": "^4.17.21",
"parser-ts": "^0.6.16"
} }
} }

View File

@@ -1,6 +1,13 @@
import * as E from "fp-ts/Either"
import { pipe } from "fp-ts/function" import { pipe } from "fp-ts/function"
import { Environment } from "~/newstore/environments" import * as E from "fp-ts/Either"
export type Environment = {
name: string
variables: {
key: string
value: string
}[]
}
const REGEX_ENV_VAR = /<<([^>]*)>>/g // "<<myVariable>>" const REGEX_ENV_VAR = /<<([^>]*)>>/g // "<<myVariable>>"

View File

@@ -1,3 +1,5 @@
export * from "./rest" export * from "./rest"
export * from "./graphql" export * from "./graphql"
export * from "./collection" export * from "./collection"
export * from "./rawKeyValue"
export * from "./environment"

View File

@@ -0,0 +1,187 @@
import { not } from "fp-ts/Predicate"
import { pipe, flow } from "fp-ts/function"
import * as Str from "fp-ts/string"
import * as RA from "fp-ts/ReadonlyArray"
import * as A from "fp-ts/Array"
import * as O from "fp-ts/Option"
import * as E from "fp-ts/Either"
import * as P from "parser-ts/Parser"
import * as S from "parser-ts/string"
import * as C from "parser-ts/char"
export type RawKeyValueEntry = {
key: string
value: string
active: boolean
}
/* Beginning of Parser Definitions */
const wsSurround = P.surroundedBy(S.spaces)
const stringArrayJoin = (sep: string) => (input: string[]) => input.join(sep)
const stringTakeUntilChars = (chars: C.Char[]) => pipe(
P.takeUntil((c: C.Char) => chars.includes(c)),
P.map(stringArrayJoin("")),
)
const stringTakeUntilCharsInclusive = flow(
stringTakeUntilChars,
P.chainFirst(() => P.sat(() => true)),
)
const key = pipe(
stringTakeUntilChars([":", "\n"]),
P.map(Str.trim)
)
const value = pipe(
stringTakeUntilChars(["\n"]),
P.map(Str.trim)
)
const commented = pipe(
S.maybe(S.string("#")),
P.map(not(Str.isEmpty))
)
const line = pipe(
wsSurround(commented),
P.bindTo("commented"),
P.bind("key", () => wsSurround(key)),
P.chainFirst(() => C.char(":")),
P.bind("value", () => value),
)
const lineWithNoColon = pipe(
wsSurround(commented),
P.bindTo("commented"),
P.bind("key", () => P.either(
stringTakeUntilCharsInclusive(["\n"]),
() => pipe(
P.manyTill(P.sat((_: string) => true), P.eof()),
P.map(flow(
RA.toArray,
stringArrayJoin("")
))
)
)),
P.map(flow(
O.fromPredicate(({ key }) => !Str.isEmpty(key))
))
)
const file = pipe(
P.manyTill(wsSurround(line), P.eof()),
)
/**
* This Raw Key Value parser ignores the key value pair (no colon) issues
*/
const tolerantFile = pipe(
P.manyTill(
P.either(
pipe(line, P.map(O.some)),
() => pipe(
lineWithNoColon,
P.map(flow(
O.map((a) => ({ ...a, value: "" }))
))
)
),
P.eof()
),
P.map(flow(
RA.filterMap(flow(
O.fromPredicate(O.isSome),
O.map((a) => a.value)
))
))
)
/* End of Parser Definitions */
/**
* Converts Raw Key Value Entries to the file string format
* @param entries The entries array
* @returns The entries in string format
*/
export const rawKeyValueEntriesToString = (entries: RawKeyValueEntry[]) =>
pipe(
entries,
A.map(({ key, value, active }) =>
active ? `${key}: ${value}` : `# ${key}: ${value}`
),
stringArrayJoin("\n")
)
/**
* Parses raw key value entries string to array
* @param s The file string to parse from
* @returns Either the parser fail result or the raw key value entries
*/
export const parseRawKeyValueEntriesE = (s: string) =>
pipe(
tolerantFile,
S.run(s),
E.mapLeft((err) => ({
message: `Expected ${err.expected.map((x) => `'${x}'`).join(", ")}`,
expected: err.expected,
pos: err.input.cursor,
})),
E.map(
({ value }) => pipe(
value,
RA.map(({ key, value, commented }) =>
<RawKeyValueEntry>{
active: !commented,
key,
value
}
)
)
)
)
/**
* Less error tolerating version of `parseRawKeyValueEntriesE`
* @param s The file string to parse from
* @returns Either the parser fail result or the raw key value entries
*/
export const strictParseRawKeyValueEntriesE = (s: string) =>
pipe(
file,
S.run(s),
E.mapLeft((err) => ({
message: `Expected ${err.expected.map((x) => `'${x}'`).join(", ")}`,
expected: err.expected,
pos: err.input.cursor,
})),
E.map(
({ value }) => pipe(
value,
RA.map(({ key, value, commented }) =>
<RawKeyValueEntry>{
active: !commented,
key,
value
}
)
)
)
)
/**
* Kept for legacy code compatibility, parses raw key value entries.
* If failed, it returns an empty array
* @deprecated Use `parseRawKeyValueEntriesE` instead
*/
export const parseRawKeyValueEntries = flow(
parseRawKeyValueEntriesE,
E.map(RA.toArray),
E.getOrElse(() => [] as RawKeyValueEntry[])
)

643
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff