fix: spacing and corrections for commented lines in raw key value
This commit is contained in:
@@ -270,7 +270,16 @@
|
|||||||
import { Ref, computed, reactive, ref, watch } from "@nuxtjs/composition-api"
|
import { Ref, computed, reactive, ref, watch } from "@nuxtjs/composition-api"
|
||||||
import clone from "lodash/clone"
|
import clone from "lodash/clone"
|
||||||
import * as gql from "graphql"
|
import * as gql from "graphql"
|
||||||
import { GQLHeader, makeGQLRequest } from "@hoppscotch/data"
|
import * as E from "fp-ts/Either"
|
||||||
|
import * as RA from "fp-ts/ReadonlyArray"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
import {
|
||||||
|
GQLHeader,
|
||||||
|
makeGQLRequest,
|
||||||
|
rawKeyValueEntriesToString,
|
||||||
|
parseRawKeyValueEntriesE,
|
||||||
|
RawKeyValueEntry,
|
||||||
|
} from "@hoppscotch/data"
|
||||||
import isEqual from "lodash/isEqual"
|
import isEqual from "lodash/isEqual"
|
||||||
import { copyToClipboard } from "~/helpers/utils/clipboard"
|
import { copyToClipboard } from "~/helpers/utils/clipboard"
|
||||||
import {
|
import {
|
||||||
@@ -384,14 +393,12 @@ watch(workingHeaders, (newWorkingHeaders) => {
|
|||||||
// Bulk Editor Syncing with Working Headers
|
// Bulk Editor Syncing with Working Headers
|
||||||
watch(bulkHeaders, () => {
|
watch(bulkHeaders, () => {
|
||||||
try {
|
try {
|
||||||
const transformation = bulkHeaders.value
|
const transformation = pipe(
|
||||||
.split("\n")
|
bulkHeaders.value,
|
||||||
.filter((x) => x.trim().length > 0 && x.includes(":"))
|
parseRawKeyValueEntriesE,
|
||||||
.map((item) => ({
|
E.map(RA.toArray),
|
||||||
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
|
E.getOrElse(() => [] as RawKeyValueEntry[])
|
||||||
value: item.substring(item.indexOf(":") + 1).trimLeft(),
|
)
|
||||||
active: !item.trim().startsWith("#"),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "")
|
const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "")
|
||||||
|
|
||||||
@@ -418,11 +425,7 @@ watch(workingHeaders, (newHeadersList) => {
|
|||||||
const filteredHeaders = newHeadersList.filter((x) => x.key !== "")
|
const filteredHeaders = newHeadersList.filter((x) => x.key !== "")
|
||||||
|
|
||||||
if (!isEqual(currentBulkHeaders, filteredHeaders)) {
|
if (!isEqual(currentBulkHeaders, filteredHeaders)) {
|
||||||
bulkHeaders.value = filteredHeaders
|
bulkHeaders.value = rawKeyValueEntriesToString(filteredHeaders)
|
||||||
.map((header) => {
|
|
||||||
return `${header.active ? "" : "#"}${header.key}: ${header.value}`
|
|
||||||
})
|
|
||||||
.join("\n")
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
toast.error(`${t("error.something_went_wrong")}`)
|
toast.error(`${t("error.something_went_wrong")}`)
|
||||||
|
|||||||
@@ -143,7 +143,15 @@
|
|||||||
import { Ref, ref, watch } from "@nuxtjs/composition-api"
|
import { Ref, ref, watch } from "@nuxtjs/composition-api"
|
||||||
import isEqual from "lodash/isEqual"
|
import isEqual from "lodash/isEqual"
|
||||||
import clone from "lodash/clone"
|
import clone from "lodash/clone"
|
||||||
import { HoppRESTHeader } from "@hoppscotch/data"
|
import {
|
||||||
|
HoppRESTHeader,
|
||||||
|
parseRawKeyValueEntriesE,
|
||||||
|
rawKeyValueEntriesToString,
|
||||||
|
RawKeyValueEntry,
|
||||||
|
} from "@hoppscotch/data"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
import * as RA from "fp-ts/ReadonlyArray"
|
||||||
|
import * as E from "fp-ts/Either"
|
||||||
import { useCodemirror } from "~/helpers/editor/codemirror"
|
import { useCodemirror } from "~/helpers/editor/codemirror"
|
||||||
import { restHeaders$, setRESTHeaders } from "~/newstore/RESTSession"
|
import { restHeaders$, setRESTHeaders } from "~/newstore/RESTSession"
|
||||||
import { commonHeaders } from "~/helpers/headers"
|
import { commonHeaders } from "~/helpers/headers"
|
||||||
@@ -223,14 +231,12 @@ watch(workingHeaders, (newWorkingHeaders) => {
|
|||||||
// Bulk Editor Syncing with Working Headers
|
// Bulk Editor Syncing with Working Headers
|
||||||
watch(bulkHeaders, () => {
|
watch(bulkHeaders, () => {
|
||||||
try {
|
try {
|
||||||
const transformation = bulkHeaders.value
|
const transformation = pipe(
|
||||||
.split("\n")
|
bulkHeaders.value,
|
||||||
.filter((x) => x.trim().length > 0 && x.includes(":"))
|
parseRawKeyValueEntriesE,
|
||||||
.map((item) => ({
|
E.map(RA.toArray),
|
||||||
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
|
E.getOrElse(() => [] as RawKeyValueEntry[])
|
||||||
value: item.substring(item.indexOf(":") + 1).trimLeft(),
|
)
|
||||||
active: !item.trim().startsWith("#"),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "")
|
const filteredHeaders = workingHeaders.value.filter((x) => x.key !== "")
|
||||||
|
|
||||||
@@ -257,11 +263,7 @@ watch(workingHeaders, (newHeadersList) => {
|
|||||||
const filteredHeaders = newHeadersList.filter((x) => x.key !== "")
|
const filteredHeaders = newHeadersList.filter((x) => x.key !== "")
|
||||||
|
|
||||||
if (!isEqual(currentBulkHeaders, filteredHeaders)) {
|
if (!isEqual(currentBulkHeaders, filteredHeaders)) {
|
||||||
bulkHeaders.value = filteredHeaders
|
bulkHeaders.value = rawKeyValueEntriesToString(filteredHeaders)
|
||||||
.map((header) => {
|
|
||||||
return `${header.active ? "" : "#"}${header.key}: ${header.value}`
|
|
||||||
})
|
|
||||||
.join("\n")
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
toast.error(`${t("error.something_went_wrong")}`)
|
toast.error(`${t("error.something_went_wrong")}`)
|
||||||
|
|||||||
@@ -129,7 +129,15 @@
|
|||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { ref, watch } from "@nuxtjs/composition-api"
|
import { ref, watch } from "@nuxtjs/composition-api"
|
||||||
import { HoppRESTParam } from "@hoppscotch/data"
|
import { pipe } from "fp-ts/function"
|
||||||
|
import * as RA from "fp-ts/ReadonlyArray"
|
||||||
|
import * as E from "fp-ts/Either"
|
||||||
|
import {
|
||||||
|
HoppRESTParam,
|
||||||
|
parseRawKeyValueEntriesE,
|
||||||
|
rawKeyValueEntriesToString,
|
||||||
|
RawKeyValueEntry,
|
||||||
|
} from "@hoppscotch/data"
|
||||||
import isEqual from "lodash/isEqual"
|
import isEqual from "lodash/isEqual"
|
||||||
import clone from "lodash/clone"
|
import clone from "lodash/clone"
|
||||||
import linter from "~/helpers/editor/linting/rawKeyValue"
|
import linter from "~/helpers/editor/linting/rawKeyValue"
|
||||||
@@ -207,14 +215,12 @@ watch(workingParams, (newWorkingParams) => {
|
|||||||
// Bulk Editor Syncing with Working Params
|
// Bulk Editor Syncing with Working Params
|
||||||
watch(bulkParams, () => {
|
watch(bulkParams, () => {
|
||||||
try {
|
try {
|
||||||
const transformation = bulkParams.value
|
const transformation = pipe(
|
||||||
.split("\n")
|
bulkParams.value,
|
||||||
.filter((x) => x.trim().length > 0 && x.includes(":"))
|
parseRawKeyValueEntriesE,
|
||||||
.map((item) => ({
|
E.map(RA.toArray),
|
||||||
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
|
E.getOrElse(() => [] as RawKeyValueEntry[])
|
||||||
value: item.substring(item.indexOf(":") + 1).trimLeft(),
|
)
|
||||||
active: !item.trim().startsWith("#"),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const filteredParams = workingParams.value.filter((x) => x.key !== "")
|
const filteredParams = workingParams.value.filter((x) => x.key !== "")
|
||||||
|
|
||||||
@@ -241,11 +247,7 @@ watch(workingParams, (newParamsList) => {
|
|||||||
const filteredParams = newParamsList.filter((x) => x.key !== "")
|
const filteredParams = newParamsList.filter((x) => x.key !== "")
|
||||||
|
|
||||||
if (!isEqual(currentBulkParams, filteredParams)) {
|
if (!isEqual(currentBulkParams, filteredParams)) {
|
||||||
bulkParams.value = filteredParams
|
bulkParams.value = rawKeyValueEntriesToString(filteredParams)
|
||||||
.map((param) => {
|
|
||||||
return `${param.active ? "" : "#"}${param.key}: ${param.value}`
|
|
||||||
})
|
|
||||||
.join("\n")
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
toast.error(`${t("error.something_went_wrong")}`)
|
toast.error(`${t("error.something_went_wrong")}`)
|
||||||
|
|||||||
@@ -134,9 +134,13 @@ import clone from "lodash/clone"
|
|||||||
import {
|
import {
|
||||||
HoppRESTReqBody,
|
HoppRESTReqBody,
|
||||||
parseRawKeyValueEntries,
|
parseRawKeyValueEntries,
|
||||||
|
parseRawKeyValueEntriesE,
|
||||||
rawKeyValueEntriesToString,
|
rawKeyValueEntriesToString,
|
||||||
RawKeyValueEntry,
|
RawKeyValueEntry,
|
||||||
} from "@hoppscotch/data"
|
} from "@hoppscotch/data"
|
||||||
|
import { pipe } from "fp-ts/function"
|
||||||
|
import * as RA from "fp-ts/ReadonlyArray"
|
||||||
|
import * as E from "fp-ts/Either"
|
||||||
import { useCodemirror } from "~/helpers/editor/codemirror"
|
import { useCodemirror } from "~/helpers/editor/codemirror"
|
||||||
import linter from "~/helpers/editor/linting/rawKeyValue"
|
import linter from "~/helpers/editor/linting/rawKeyValue"
|
||||||
import { useRESTRequestBody } from "~/newstore/RESTSession"
|
import { useRESTRequestBody } from "~/newstore/RESTSession"
|
||||||
@@ -227,14 +231,12 @@ watch(workingUrlEncodedParams, (newWorkingUrlEncodedParams) => {
|
|||||||
// Bulk Editor Syncing with Working urlEncodedParams
|
// Bulk Editor Syncing with Working urlEncodedParams
|
||||||
watch(bulkUrlEncodedParams, () => {
|
watch(bulkUrlEncodedParams, () => {
|
||||||
try {
|
try {
|
||||||
const transformation = bulkUrlEncodedParams.value
|
const transformation = pipe(
|
||||||
.split("\n")
|
bulkUrlEncodedParams.value,
|
||||||
.filter((x) => x.trim().length > 0 && x.includes(":"))
|
parseRawKeyValueEntriesE,
|
||||||
.map((item) => ({
|
E.map(RA.toArray),
|
||||||
key: item.substring(0, item.indexOf(":")).trimLeft().replace(/^#/, ""),
|
E.getOrElse(() => [] as RawKeyValueEntry[])
|
||||||
value: item.substring(item.indexOf(":") + 1).trimLeft(),
|
)
|
||||||
active: !item.trim().startsWith("#"),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const filteredUrlEncodedParams = workingUrlEncodedParams.value.filter(
|
const filteredUrlEncodedParams = workingUrlEncodedParams.value.filter(
|
||||||
(x) => x.key !== ""
|
(x) => x.key !== ""
|
||||||
@@ -266,11 +268,9 @@ watch(workingUrlEncodedParams, (newurlEncodedParamList) => {
|
|||||||
)
|
)
|
||||||
|
|
||||||
if (!isEqual(currentBulkUrlEncodedParams, filteredUrlEncodedParams)) {
|
if (!isEqual(currentBulkUrlEncodedParams, filteredUrlEncodedParams)) {
|
||||||
bulkUrlEncodedParams.value = filteredUrlEncodedParams
|
bulkUrlEncodedParams.value = rawKeyValueEntriesToString(
|
||||||
.map((param) => {
|
filteredUrlEncodedParams
|
||||||
return `${param.active ? "" : "#"}${param.key}: ${param.value}`
|
)
|
||||||
})
|
|
||||||
.join("\n")
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
toast.error(`${t("error.something_went_wrong")}`)
|
toast.error(`${t("error.something_went_wrong")}`)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { LinterDefinition, LinterResult } from "./linter"
|
|||||||
const linter: LinterDefinition = (text) => {
|
const linter: LinterDefinition = (text) => {
|
||||||
const result = strictParseRawKeyValueEntriesE(text)
|
const result = strictParseRawKeyValueEntriesE(text)
|
||||||
if (E.isLeft(result)) {
|
if (E.isLeft(result)) {
|
||||||
const pos = convertIndexToLineCh(text, result.left.pos + 1)
|
const pos = convertIndexToLineCh(text, result.left.pos)
|
||||||
|
|
||||||
return Promise.resolve([
|
return Promise.resolve([
|
||||||
<LinterResult>{
|
<LinterResult>{
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ const lineWithNoColon = pipe(
|
|||||||
)
|
)
|
||||||
|
|
||||||
const file = pipe(
|
const file = pipe(
|
||||||
P.manyTill(line, P.eof()),
|
P.manyTill(wsSurround(line), P.eof()),
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
Reference in New Issue
Block a user