Merge branch 'main' of github.com:hoppscotch/hoppscotch
This commit is contained in:
3
packages/hoppscotch-selfhost-desktop/.envrc
Normal file
3
packages/hoppscotch-selfhost-desktop/.envrc
Normal file
@@ -0,0 +1,3 @@
|
||||
source_url "https://raw.githubusercontent.com/cachix/devenv/95f329d49a8a5289d31e0982652f7058a189bfca/direnvrc" "sha256-d+8cBpDfDBj41inrADaJt+bDWhOktwslgoP5YiGJ1v0="
|
||||
|
||||
use devenv
|
||||
@@ -28,3 +28,12 @@ dist-ssr
|
||||
|
||||
# Backend Code generation
|
||||
src/api/generated
|
||||
# Devenv
|
||||
.devenv*
|
||||
devenv.local.nix
|
||||
|
||||
# direnv
|
||||
.direnv
|
||||
|
||||
# pre-commit
|
||||
.pre-commit-config.yaml
|
||||
|
||||
153
packages/hoppscotch-selfhost-desktop/devenv.lock
Normal file
153
packages/hoppscotch-selfhost-desktop/devenv.lock
Normal file
@@ -0,0 +1,153 @@
|
||||
{
|
||||
"nodes": {
|
||||
"devenv": {
|
||||
"locked": {
|
||||
"dir": "src/modules",
|
||||
"lastModified": 1729681848,
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"rev": "2634c4c9e9226a3fb54550ad4115df1992d502c5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"dir": "src/modules",
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1729751566,
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "f32a2d484091a6dc98220b1f4a2c2d60b7c97c64",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1696426674,
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"pre-commit-hooks",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1709087332,
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1729690727,
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "be79af5ec63facf6c7709094db72b253c34e1ac2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-stable": {
|
||||
"locked": {
|
||||
"lastModified": 1729449015,
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "89172919243df199fe237ba0f776c3e3e3d72367",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-24.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"pre-commit-hooks": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"gitignore": "gitignore",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"nixpkgs-stable": "nixpkgs-stable"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1729104314,
|
||||
"owner": "cachix",
|
||||
"repo": "pre-commit-hooks.nix",
|
||||
"rev": "3c3e88f0f544d6bb54329832616af7eb971b6be6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"repo": "pre-commit-hooks.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"devenv": "devenv",
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"pre-commit-hooks": "pre-commit-hooks"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1729715509,
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "40492e15d49b89cf409e2c5536444131fac49429",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
92
packages/hoppscotch-selfhost-desktop/devenv.nix
Normal file
92
packages/hoppscotch-selfhost-desktop/devenv.nix
Normal file
@@ -0,0 +1,92 @@
|
||||
{ pkgs, lib, config, inputs, ... }:
|
||||
|
||||
{
|
||||
# https://devenv.sh/packages/
|
||||
packages = with pkgs; [
|
||||
git
|
||||
postgresql_16
|
||||
# BE and Tauri stuff
|
||||
libsoup
|
||||
webkitgtk_4_0
|
||||
# FE and Node stuff
|
||||
nodejs_22
|
||||
nodePackages_latest.typescript-language-server
|
||||
nodePackages_latest.vls
|
||||
nodePackages_latest.prisma
|
||||
prisma-engines
|
||||
# CI
|
||||
act
|
||||
# Cargo
|
||||
cargo-edit
|
||||
];
|
||||
|
||||
# https://devenv.sh/basics/
|
||||
env = {
|
||||
APP_GREET = "Hoppscotch";
|
||||
# NOTE: Setting these `PRISMA_*` environment variable fixes
|
||||
# Error: Failed to fetch sha256 checksum at https://binaries.prisma.sh/all_commits/<hash>/linux-nixos/libquery_engine.so.node.gz.sha256 - 404 Not Found
|
||||
# See: https://github.com/prisma/prisma/discussions/3120
|
||||
PRISMA_QUERY_ENGINE_LIBRARY = "${pkgs.prisma-engines}/lib/libquery_engine.node";
|
||||
PRISMA_QUERY_ENGINE_BINARY = "${pkgs.prisma-engines}/bin/query-engine";
|
||||
PRISMA_SCHEMA_ENGINE_BINARY = "${pkgs.prisma-engines}/bin/schema-engine";
|
||||
};
|
||||
|
||||
|
||||
# https://devenv.sh/scripts/
|
||||
scripts = {
|
||||
hello.exec = "echo hello from $APP_GREET";
|
||||
e.exec = "emacs";
|
||||
};
|
||||
|
||||
enterShell = ''
|
||||
git --version
|
||||
'';
|
||||
|
||||
# https://devenv.sh/tests/
|
||||
enterTest = ''
|
||||
echo "Running tests"
|
||||
'';
|
||||
|
||||
# https://devenv.sh/integrations/dotenv/
|
||||
dotenv.enable = true;
|
||||
|
||||
# https://devenv.sh/languages/
|
||||
|
||||
# https://devenv.sh/languages/
|
||||
languages = {
|
||||
typescript.enable = true;
|
||||
|
||||
javascript = {
|
||||
enable = true;
|
||||
pnpm = {
|
||||
enable = true;
|
||||
};
|
||||
npm = {
|
||||
enable = true;
|
||||
};
|
||||
};
|
||||
|
||||
rust = {
|
||||
enable = true;
|
||||
channel = "nightly";
|
||||
components = [
|
||||
"rustc"
|
||||
"cargo"
|
||||
"clippy"
|
||||
"rustfmt"
|
||||
"rust-analyzer"
|
||||
"llvm-tools-preview"
|
||||
"rust-src"
|
||||
"rustc-codegen-cranelift-preview"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
# https://devenv.sh/pre-commit-hooks/
|
||||
# pre-commit.hooks.shellcheck.enable = true;
|
||||
|
||||
# https://devenv.sh/processes/
|
||||
# processes.ping.exec = "ping example.com";
|
||||
|
||||
# See full reference at https://devenv.sh/reference/options/
|
||||
}
|
||||
23
packages/hoppscotch-selfhost-desktop/devenv.yaml
Normal file
23
packages/hoppscotch-selfhost-desktop/devenv.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
# yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
|
||||
inputs:
|
||||
# For NodeJS-22 and above
|
||||
nixpkgs:
|
||||
url: github:NixOS/nixpkgs/nixpkgs-unstable
|
||||
# nixpkgs:
|
||||
# url: github:cachix/devenv-nixpkgs/rolling
|
||||
fenix:
|
||||
url: github:nix-community/fenix
|
||||
inputs:
|
||||
nixpkgs:
|
||||
follows: nixpkgs
|
||||
|
||||
# If you're using non-OSS software, you can set allowUnfree to true.
|
||||
allowUnfree: true
|
||||
|
||||
# If you're willing to use a package that's vulnerable
|
||||
# permittedInsecurePackages:
|
||||
# - "openssl-1.1.1w"
|
||||
|
||||
# If you have more than one devenv you can merge them
|
||||
#imports:
|
||||
# - ./backend
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@hoppscotch/selfhost-desktop",
|
||||
"private": true,
|
||||
"version": "2024.7.0",
|
||||
"version": "2024.11.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev:vite": "vite",
|
||||
@@ -22,7 +22,7 @@
|
||||
"@tauri-apps/api": "1.5.1",
|
||||
"@tauri-apps/cli": "1.5.6",
|
||||
"@vueuse/core": "10.5.0",
|
||||
"axios": "0.21.4",
|
||||
"axios": "1.7.5",
|
||||
"buffer": "6.0.3",
|
||||
"dioc": "3.0.2",
|
||||
"environments.api": "link:@platform/environments/environments.api",
|
||||
@@ -36,7 +36,7 @@
|
||||
"tauri": "link:@tauri-apps/api/tauri",
|
||||
"tauri-plugin-store-api": "0.0.0",
|
||||
"util": "0.12.5",
|
||||
"verzod": "0.2.2",
|
||||
"verzod": "0.2.3",
|
||||
"vue": "3.3.9",
|
||||
"workbox-window": "6.6.0",
|
||||
"@urql/core": "^4.1.1",
|
||||
|
||||
1001
packages/hoppscotch-selfhost-desktop/src-tauri/Cargo.lock
generated
1001
packages/hoppscotch-selfhost-desktop/src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "hoppscotch-desktop"
|
||||
version = "24.7.0"
|
||||
version = "24.11.0"
|
||||
description = "A Tauri App"
|
||||
authors = ["you"]
|
||||
license = ""
|
||||
@@ -10,10 +10,10 @@ edition = "2021"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "1.5.0", features = [] }
|
||||
tauri-build = { version = "1.5.5", features = [] }
|
||||
|
||||
[dependencies]
|
||||
tauri = { version = "1.5.3", features = [
|
||||
tauri = { version = "1.8.1", features = [
|
||||
"dialog-save",
|
||||
"fs-write-file",
|
||||
"http-all",
|
||||
@@ -21,21 +21,27 @@ tauri = { version = "1.5.3", features = [
|
||||
"shell-open",
|
||||
"window-start-dragging",
|
||||
"http-multipart",
|
||||
"devtools"
|
||||
] }
|
||||
tauri-plugin-store = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-deep-link = { git = "https://github.com/FabianLars/tauri-plugin-deep-link", branch = "main" }
|
||||
tauri-plugin-websocket = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-window-state = "0.1.0"
|
||||
reqwest = { version = "0.11.22", features = ["native-tls"] }
|
||||
serde_json = "1.0.108"
|
||||
url = "2.5.0"
|
||||
hex_color = "3.0.0"
|
||||
time = "0.3.36"
|
||||
serde = { version = "1.0.203", features = ["derive"] }
|
||||
tauri-plugin-window-state = "0.1.1"
|
||||
hoppscotch-relay = { path = "../../hoppscotch-relay" }
|
||||
serde_json = "1.0.128"
|
||||
url = "2.5.2"
|
||||
hex_color = "3.0.0"
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
dashmap = "5.5.3"
|
||||
tokio = { version = "1.38.0", features = ["macros"] }
|
||||
tokio-util = "0.7.11"
|
||||
tokio = { version = "1.40.0", features = ["macros"] }
|
||||
tokio-util = "0.7.12"
|
||||
log = "0.4.22"
|
||||
thiserror = "1.0.64"
|
||||
|
||||
[dev-dependencies]
|
||||
tauri = { version = "1.8.1", features = ["devtools", "test"] }
|
||||
env_logger = "0.11.5"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
cocoa = "0.25.0"
|
||||
|
||||
@@ -1,320 +1,90 @@
|
||||
use dashmap::DashMap;
|
||||
use reqwest::{header::{HeaderMap, HeaderName, HeaderValue}, Certificate, ClientBuilder, Identity};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{plugin::{Builder, TauriPlugin}, Manager, Runtime, State};
|
||||
use hoppscotch_relay::{RequestWithMetadata, ResponseWithMetadata};
|
||||
use serde::Serialize;
|
||||
use tauri::{
|
||||
plugin::{Builder, TauriPlugin},
|
||||
Manager, Runtime, State,
|
||||
};
|
||||
use thiserror::Error;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
#[derive(Default)]
|
||||
struct InterceptorState {
|
||||
cancellation_tokens: DashMap<usize, CancellationToken>
|
||||
cancellation_tokens: DashMap<usize, CancellationToken>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct KeyValuePair {
|
||||
key: String,
|
||||
value: String
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum FormDataValue {
|
||||
Text(String),
|
||||
File {
|
||||
filename: String,
|
||||
data: Vec<u8>,
|
||||
mime: String
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct FormDataEntry {
|
||||
key: String,
|
||||
value: FormDataValue
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum BodyDef {
|
||||
Text(String),
|
||||
URLEncoded(Vec<KeyValuePair>),
|
||||
FormData(Vec<FormDataEntry>)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum ClientCertDef {
|
||||
PEMCert {
|
||||
certificate_pem: Vec<u8>,
|
||||
key_pem: Vec<u8>
|
||||
},
|
||||
|
||||
PFXCert {
|
||||
certificate_pfx: Vec<u8>,
|
||||
password: String
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct RequestDef {
|
||||
req_id: usize,
|
||||
|
||||
method: String,
|
||||
endpoint: String,
|
||||
|
||||
parameters: Vec<KeyValuePair>,
|
||||
headers: Vec<KeyValuePair>,
|
||||
|
||||
body: Option<BodyDef>,
|
||||
|
||||
validate_certs: bool,
|
||||
root_cert_bundle_files: Vec<Vec<u8>>,
|
||||
client_cert: Option<ClientCertDef>
|
||||
}
|
||||
|
||||
fn get_identity_from_req(req: &RequestDef) -> Result<Option<Identity>, reqwest::Error> {
|
||||
let result = match &req.client_cert {
|
||||
None => return Ok(None),
|
||||
Some(ClientCertDef::PEMCert { certificate_pem, key_pem }) => Identity::from_pkcs8_pem(&certificate_pem, &key_pem),
|
||||
Some(ClientCertDef::PFXCert { certificate_pfx, password }) => Identity::from_pkcs12_der(&certificate_pfx, &password)
|
||||
};
|
||||
|
||||
Ok(Some(result?))
|
||||
}
|
||||
|
||||
fn parse_root_certs(req: &RequestDef) -> Result<Vec<Certificate>, reqwest::Error> {
|
||||
let mut result = vec![];
|
||||
|
||||
for cert_bundle_file in &req.root_cert_bundle_files {
|
||||
let mut certs = Certificate::from_pem_bundle(&cert_bundle_file)?;
|
||||
result.append(&mut certs);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
enum ReqBodyAction {
|
||||
Body(reqwest::Body),
|
||||
UrlEncodedForm(Vec<(String, String)>),
|
||||
MultipartForm(reqwest::multipart::Form)
|
||||
}
|
||||
|
||||
fn convert_bodydef_to_req_action(req: &RequestDef) -> Option<ReqBodyAction> {
|
||||
match &req.body {
|
||||
None => None,
|
||||
Some(BodyDef::Text(text)) => Some(ReqBodyAction::Body(text.clone().into())),
|
||||
Some(BodyDef::URLEncoded(entries)) =>
|
||||
Some(
|
||||
ReqBodyAction::UrlEncodedForm(
|
||||
entries.iter()
|
||||
.map(|KeyValuePair { key, value }| (key.clone(), value.clone()))
|
||||
.collect()
|
||||
)
|
||||
),
|
||||
Some(BodyDef::FormData(entries)) => {
|
||||
let mut form = reqwest::multipart::Form::new();
|
||||
|
||||
for entry in entries {
|
||||
form = match &entry.value {
|
||||
FormDataValue::Text(value) => form.text(entry.key.clone(), value.clone()),
|
||||
FormDataValue::File { filename, data, mime } =>
|
||||
form.part(
|
||||
entry.key.clone(),
|
||||
reqwest::multipart::Part::bytes(data.clone())
|
||||
.file_name(filename.clone())
|
||||
.mime_str(mime.as_str()).expect("Error while setting File enum")
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
Some(ReqBodyAction::MultipartForm(form))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RunRequestResponse {
|
||||
status: u16,
|
||||
status_text: String,
|
||||
headers: Vec<KeyValuePair>,
|
||||
data: Vec<u8>,
|
||||
|
||||
time_start_ms: u128,
|
||||
time_end_ms: u128
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
enum RunRequestError {
|
||||
RequestCancelled,
|
||||
ClientCertError,
|
||||
RootCertError,
|
||||
InvalidMethod,
|
||||
InvalidUrl,
|
||||
InvalidHeaders,
|
||||
RequestRunError(String)
|
||||
}
|
||||
|
||||
async fn execute_request(req_builder: reqwest::RequestBuilder) -> Result<RunRequestResponse, RunRequestError> {
|
||||
let start_time_ms = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
|
||||
let response = req_builder.send()
|
||||
.await
|
||||
.map_err(|err| RunRequestError::RequestRunError(err.to_string()))?;
|
||||
|
||||
// We hold on to these values becase we lose ownership of response
|
||||
// when we read the body
|
||||
let res_status = response.status();
|
||||
let res_headers = response.headers().clone();
|
||||
|
||||
|
||||
let res_body_bytes = response.bytes()
|
||||
.await
|
||||
.map_err(|err| RunRequestError::RequestRunError(err.to_string()))?;
|
||||
|
||||
// Reqwest resolves the send before all the response is loaded, to keep the timing
|
||||
// correctly, we load the response as well.
|
||||
let end_time_ms = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
|
||||
let response_status = res_status.as_u16();
|
||||
let response_status_text = res_status
|
||||
.canonical_reason()
|
||||
.unwrap_or("Unknown Status")
|
||||
.to_owned();
|
||||
|
||||
let response_headers = res_headers
|
||||
.iter()
|
||||
.map(|(key, value)|
|
||||
KeyValuePair {
|
||||
key: key.as_str().to_owned(),
|
||||
value: value.to_str().unwrap_or("").to_owned()
|
||||
}
|
||||
)
|
||||
.collect();
|
||||
|
||||
Ok(
|
||||
RunRequestResponse {
|
||||
status: response_status,
|
||||
status_text: response_status_text,
|
||||
headers: response_headers,
|
||||
data: res_body_bytes.into(),
|
||||
time_start_ms: start_time_ms,
|
||||
time_end_ms: end_time_ms
|
||||
}
|
||||
)
|
||||
#[derive(Debug, Serialize, Error)]
|
||||
pub enum RunRequestError {
|
||||
#[error("Request cancelled")]
|
||||
RequestCancelled,
|
||||
#[error("Internal server error")]
|
||||
InternalServerError,
|
||||
#[error("Relay error: {0}")]
|
||||
Relay(#[from] hoppscotch_relay::RelayError),
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn run_request(req: RequestDef, state: State<'_, InterceptorState>) -> Result<RunRequestResponse, RunRequestError> {
|
||||
let method = reqwest::Method::from_bytes(req.method.as_bytes())
|
||||
.map_err(|_| RunRequestError::InvalidMethod)?;
|
||||
async fn run_request(
|
||||
req: RequestWithMetadata,
|
||||
state: State<'_, InterceptorState>,
|
||||
) -> Result<ResponseWithMetadata, RunRequestError> {
|
||||
let req_id = req.req_id;
|
||||
let cancel_token = CancellationToken::new();
|
||||
// NOTE: This will drop reference to an existing cancellation token
|
||||
// if you send a request with the same request id as an existing one,
|
||||
// thereby, dropping any means to cancel a running operation with the old token.
|
||||
// This is done so because, on FE side, we may lose cancel token info upon reloads
|
||||
// and this allows us to work around that.
|
||||
state
|
||||
.cancellation_tokens
|
||||
.insert(req_id, cancel_token.clone());
|
||||
|
||||
let endpoint_url = reqwest::Url::parse(&req.endpoint)
|
||||
.map_err(|_| RunRequestError::InvalidUrl)?;
|
||||
let cancel_token_clone = cancel_token.clone();
|
||||
// Execute the HTTP request in a blocking thread pool and handles cancellation.
|
||||
//
|
||||
// It:
|
||||
// 1. Uses `spawn_blocking` to run the sync `run_request_task`
|
||||
// without blocking the main Tokio runtime.
|
||||
// 2. Uses `select!` to concurrently wait for either
|
||||
// a. the task to complete,
|
||||
// b. or a cancellation signal.
|
||||
//
|
||||
// Why spawn_blocking?
|
||||
// - `run_request_task` uses synchronous curl operations which would block
|
||||
// the async runtime if not run in a separate thread.
|
||||
// - `spawn_blocking` moves this operation to a thread pool designed for
|
||||
// blocking tasks, so other async operations to continue unblocked.
|
||||
let result = tokio::select! {
|
||||
res = tokio::task::spawn_blocking(move || hoppscotch_relay::run_request_task(&req, cancel_token_clone)) => {
|
||||
match res {
|
||||
Ok(task_result) => Ok(task_result?),
|
||||
Err(_) => Err(RunRequestError::InternalServerError),
|
||||
}
|
||||
},
|
||||
_ = cancel_token.cancelled() => {
|
||||
Err(RunRequestError::RequestCancelled)
|
||||
}
|
||||
};
|
||||
|
||||
let headers = req.headers
|
||||
.iter()
|
||||
.map(|KeyValuePair { key, value }|
|
||||
Ok(
|
||||
(
|
||||
key.parse::<HeaderName>().map_err(|_| ())?,
|
||||
value.parse::<HeaderValue>().map_err(|_| ())?
|
||||
)
|
||||
)
|
||||
)
|
||||
.collect::<Result<HeaderMap, ()>>()
|
||||
.map_err(|_| RunRequestError::InvalidHeaders)?;
|
||||
state.cancellation_tokens.remove(&req_id);
|
||||
|
||||
let body_action = convert_bodydef_to_req_action(&req);
|
||||
|
||||
let client_identity = get_identity_from_req(&req)
|
||||
.map_err(|_| RunRequestError::ClientCertError)?;
|
||||
|
||||
let root_certs = parse_root_certs(&req)
|
||||
.map_err(|_| RunRequestError::RootCertError)?;
|
||||
|
||||
let mut client_builder = ClientBuilder::new()
|
||||
.danger_accept_invalid_certs(!req.validate_certs);
|
||||
|
||||
// NOTE: Root Certificates are not currently implemented into the Hoppscotch UI
|
||||
// This is done so as the current mechanism doesn't allow for v1 X.509 certificates
|
||||
// to be accepted. Reqwest supports `native-tls` and `rustls`.
|
||||
// `native-tls` should support v1 X.509 in Linux [OpenSSL] (and hopefully on Win [SChannel]), but on
|
||||
// macOS the Security Framework system in it blocks certiticates pretty harshly and blocks v1.
|
||||
// `rustls` doesn't allow v1 x.509 as well as documented here: https://github.com/rustls/webpki/issues/29
|
||||
// We will fully introduce the feature when the dilemma is solved (or demand is voiced), until
|
||||
// then, disabling SSL verification should yield same results
|
||||
for root_cert in root_certs {
|
||||
client_builder = client_builder.add_root_certificate(root_cert);
|
||||
}
|
||||
|
||||
if let Some(identity) = client_identity {
|
||||
client_builder = client_builder.identity(identity);
|
||||
}
|
||||
|
||||
let client = client_builder.build()
|
||||
.expect("TLS Backend couldn't be initialized");
|
||||
|
||||
let mut req_builder = client.request(method, endpoint_url)
|
||||
.query(
|
||||
&req.parameters
|
||||
.iter()
|
||||
.map(|KeyValuePair { key, value }| (key, value))
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.headers(headers);
|
||||
|
||||
req_builder = match body_action {
|
||||
None => req_builder,
|
||||
Some(ReqBodyAction::Body(body)) => req_builder.body(body),
|
||||
Some(ReqBodyAction::UrlEncodedForm(entries)) => req_builder.form(&entries),
|
||||
Some(ReqBodyAction::MultipartForm(form)) => req_builder.multipart(form)
|
||||
};
|
||||
|
||||
let cancel_token = CancellationToken::new();
|
||||
|
||||
// NOTE: This will drop reference to an existing cancellation token
|
||||
// if you send a request with the same request id as an existing one,
|
||||
// thereby, dropping any means to cancel a running operation with the old token.
|
||||
// This is done so because, on FE side, we may lose cancel token info upon reloads
|
||||
// and this allows us to work around that.
|
||||
state.cancellation_tokens.insert(req.req_id, cancel_token.clone());
|
||||
|
||||
// Races between whether cancellation happened or requext execution happened
|
||||
let result = tokio::select! {
|
||||
_ = cancel_token.cancelled() => { None },
|
||||
result = execute_request(req_builder) => {
|
||||
// Remove cancellation token since the request has now completed
|
||||
state.cancellation_tokens.remove(&req.req_id);
|
||||
|
||||
Some(result)
|
||||
}
|
||||
};
|
||||
|
||||
result
|
||||
.unwrap_or(Err(RunRequestError::RequestCancelled))
|
||||
result
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn cancel_request(req_id: usize, state: State<'_, InterceptorState>) {
|
||||
if let Some((_, cancel_token)) = state.cancellation_tokens.remove(&req_id) {
|
||||
cancel_token.cancel();
|
||||
}
|
||||
if let Some((_, cancel_token)) = state.cancellation_tokens.remove(&req_id) {
|
||||
cancel_token.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("hopp_native_interceptor")
|
||||
.invoke_handler(
|
||||
tauri::generate_handler![
|
||||
run_request,
|
||||
cancel_request
|
||||
]
|
||||
)
|
||||
.setup(|app_handle| {
|
||||
app_handle.manage(InterceptorState::default());
|
||||
Builder::new("hopp_native_interceptor")
|
||||
.invoke_handler(tauri::generate_handler![run_request, cancel_request])
|
||||
.setup(|app_handle| {
|
||||
app_handle.manage(InterceptorState::default());
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.build()
|
||||
Ok(())
|
||||
})
|
||||
.build()
|
||||
}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
pub(crate) mod startup;
|
||||
@@ -0,0 +1,53 @@
|
||||
/// Error handling module for startup-related operations.
|
||||
///
|
||||
/// This module defines custom error types and a result type used for startup process of the app.
|
||||
/// Essentially provides a way to handle and communicate errors
|
||||
/// that may occur during the initialization and window management phases.
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Represents errors related to window lookup failures.
|
||||
///
|
||||
/// Provide more specific information about which window that could not be found.
|
||||
///
|
||||
/// Derives `Serialize` mainly for sending it over to the frontend for info/logging purposes.
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub(crate) enum WindowNotFoundError {
|
||||
/// Indicates that the `main` window of the app could not be found.
|
||||
///
|
||||
/// This typically occurs if there's a mismatch between the expected
|
||||
/// window labels and the actual windows created by the application.
|
||||
#[error("No window labeled 'main' found")]
|
||||
Main,
|
||||
}
|
||||
|
||||
/// Represents errors that can occur during the startup process.
|
||||
///
|
||||
/// Derives `Serialize` mainly for sending it over to the frontend for info/logging purposes.
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub(crate) enum StartupError {
|
||||
/// Represents errors related to window lookup failures.
|
||||
#[error("Window not found: {0}")]
|
||||
WindowNotFound(WindowNotFoundError),
|
||||
|
||||
/// Represents a general error from the Tauri runtime.
|
||||
///
|
||||
/// This variant is used for any errors originating from Tauri that don't
|
||||
/// fit into more specific categories.
|
||||
#[error("Tauri error: {0}")]
|
||||
Tauri(String),
|
||||
}
|
||||
|
||||
/// Functions that are part of the startup process should return this result type.
|
||||
/// This allows for consistent error handling and makes it clear that the function
|
||||
/// is part of the startup flow.
|
||||
///
|
||||
/// ```
|
||||
/// use your_crate::error::{StartupResult, StartupError};
|
||||
///
|
||||
/// fn some_startup_function() -> StartupResult<()> {
|
||||
/// // Function implementation
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
pub(crate) type StartupResult<T> = std::result::Result<T, StartupError>;
|
||||
@@ -0,0 +1,186 @@
|
||||
use log::{error, info};
|
||||
use tauri::{Manager, Runtime, Window};
|
||||
|
||||
use super::error::{StartupError, StartupResult, WindowNotFoundError};
|
||||
/// Shows the `main` labeled application window.
|
||||
///
|
||||
/// This function is designed to be called as a Tauri command.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `window` - A `Window` instance representing the current window. This is automatically
|
||||
/// provided by Tauri when the command is invoked.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a `StartupResult<(), String>`:
|
||||
/// - `Ok(())` if showing main window operation succeed.
|
||||
/// - `Err(StartupError)` containing an error message if any operation fails.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This function will return an error if:
|
||||
/// - The "main" window is not found.
|
||||
/// - Showing the main window fails.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// #[tauri::command]
|
||||
/// async fn invoke_interop_startup_init(window: tauri::Window) {
|
||||
/// match interop_startup_init(window).await {
|
||||
/// Ok(_) => println!("`main` window shown successfully"),
|
||||
/// Err(e) => eprintln!("Error: {}", e),
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[tauri::command]
|
||||
pub async fn interop_startup_init<R: Runtime>(window: Window<R>) -> StartupResult<()> {
|
||||
let main_window = window.get_window("main").ok_or_else(|| {
|
||||
error!("No window labeled 'main' found");
|
||||
StartupError::WindowNotFound(WindowNotFoundError::Main)
|
||||
})?;
|
||||
|
||||
main_window.show().map_err(|e| {
|
||||
error!("Failed to show `main` window: {}", e);
|
||||
StartupError::Tauri(format!("Failed to show `main` window: {}", e))
|
||||
})?;
|
||||
|
||||
info!("`main` window shown successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
use tauri::test::{assert_ipc_response, mock_builder, mock_context, noop_assets};
|
||||
use tauri::{InvokePayload, WindowBuilder, WindowUrl};
|
||||
|
||||
fn create_app<R: tauri::Runtime>(builder: tauri::Builder<R>) -> tauri::App<R> {
|
||||
builder
|
||||
.invoke_handler(tauri::generate_handler![interop_startup_init])
|
||||
.build(mock_context(noop_assets()))
|
||||
.expect("failed to build mock app")
|
||||
}
|
||||
|
||||
/// Test: Main window shown successfully in isolation
|
||||
///
|
||||
/// Rationale:
|
||||
/// This test verifies the core functionality of `interop_startup_init`.
|
||||
/// A failure indicates a fundamental issue with the app's initialization process.
|
||||
///
|
||||
/// Context:
|
||||
/// The "main" window is typically the primary interface,
|
||||
/// so ensuring it shows correctly is important.
|
||||
///
|
||||
/// Key Points:
|
||||
/// - We use a mock Tauri application to isolate the window showing behavior.
|
||||
/// - The test focuses solely on the "main" window to verify the basic case works correctly.
|
||||
///
|
||||
/// Assumptions:
|
||||
/// - The Tauri runtime is functioning correctly.
|
||||
/// - A window labeled "main" exists in the application.
|
||||
/// For this see `tauri.conf.json`:
|
||||
/// ```json
|
||||
/// {
|
||||
/// ...
|
||||
/// "label": "main",
|
||||
/// "title": "Hoppscotch",
|
||||
/// ...
|
||||
/// ...
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Implications of Failure:
|
||||
/// 1. The window labeling system is broken.
|
||||
/// 2. There's an issue with Tauri's window management.
|
||||
/// 3. The `interop_startup_init` function is not correctly implemented.
|
||||
#[tokio::test]
|
||||
async fn test_interop_startup_init_main_window_shown_successfully() {
|
||||
let app = create_app(mock_builder());
|
||||
|
||||
let window = app.get_window("main").expect("`main` window not found");
|
||||
|
||||
let result = interop_startup_init(window).await;
|
||||
|
||||
assert!(result.is_ok(), "Expected Ok, but got {:?}", result);
|
||||
}
|
||||
|
||||
/// Test: Main window found and shown amongst other windows
|
||||
///
|
||||
/// Rationale:
|
||||
/// This test ensures `interop_startup_init` can correctly identify and show the main window
|
||||
/// in a more complex scenario with multiple windows.
|
||||
///
|
||||
/// Context:
|
||||
/// As applications grow, they may introduce additional windows for various purposes. The ability
|
||||
/// to consistently identify and manipulate the main window is important for maintaining
|
||||
/// expected behavior.
|
||||
///
|
||||
/// Key Points:
|
||||
/// - We create an additional "other" window to simulate another window.
|
||||
/// - The test verifies that the presence of other windows doesn't interfere with main window operations.
|
||||
///
|
||||
/// Assumptions:
|
||||
/// - The window labeling system consistently identifies the "main" window regardless of other windows.
|
||||
/// - The order of window creation doesn't affect the ability to find the main window.
|
||||
///
|
||||
/// Implications of Failure:
|
||||
/// 1. The window identification logic breaks with multiple windows.
|
||||
#[tokio::test]
|
||||
async fn test_interop_startup_init_main_window_found_amongst_others() {
|
||||
let app = create_app(mock_builder());
|
||||
|
||||
let _ = WindowBuilder::new(&app, "other", WindowUrl::default())
|
||||
.build()
|
||||
.expect("Failed to create other window");
|
||||
|
||||
let window = app.get_window("other").expect("`other` window not found");
|
||||
|
||||
let result = interop_startup_init(window).await;
|
||||
|
||||
assert!(result.is_ok(), "Expected `Ok(())`, but got {:?}", result);
|
||||
}
|
||||
|
||||
/// Test: IPC invocation of interop startup init
|
||||
///
|
||||
/// Rationale:
|
||||
/// This test makes sure that `interop_startup_init` can be correctly invoked through Tauri's IPC mechanism.
|
||||
/// It's important because it verifies the integration between the Rust backend and the frontend
|
||||
/// that would typically call this function.
|
||||
///
|
||||
/// Context:
|
||||
/// This test simulates scenarios where operations are initiated from the frontend via IPC calls.
|
||||
///
|
||||
/// Key Points:
|
||||
/// - We're testing the IPC invocation, not just the direct function call.
|
||||
/// - This verifies both the function's behavior and its correct registration with Tauri's IPC system.
|
||||
///
|
||||
/// Assumptions:
|
||||
/// - The Tauri IPC system is functioning correctly.
|
||||
/// - The `interop_startup_init` function is properly registered as a Tauri command.
|
||||
///
|
||||
/// Implications of Failure:
|
||||
/// 1. There's a mismatch between how the frontend tries to call the function and how it's implemented.
|
||||
/// 2. The Tauri command registration is incorrect.
|
||||
/// 3. The function isn't properly handling the IPC context.
|
||||
#[tokio::test]
|
||||
async fn test_ipc_interop_startup_init() {
|
||||
let app = create_app(mock_builder());
|
||||
|
||||
let window = app.get_window("main").expect("main window not found");
|
||||
|
||||
let payload = InvokePayload {
|
||||
cmd: "interop_startup_init".into(),
|
||||
tauri_module: None,
|
||||
callback: tauri::api::ipc::CallbackFn(0),
|
||||
error: tauri::api::ipc::CallbackFn(1),
|
||||
inner: json!(null),
|
||||
invoke_key: Some("__invoke-key__".to_string()),
|
||||
};
|
||||
|
||||
assert_ipc_response(&window, payload, Ok(()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
//! Startup management module.
|
||||
//!
|
||||
//! This module contains functionality related to managing the application's startup
|
||||
//! like controlling visibility and lifecycle of the main application windows.
|
||||
|
||||
pub(crate) mod init;
|
||||
pub(crate) mod error;
|
||||
@@ -16,6 +16,7 @@ mod mac;
|
||||
mod win;
|
||||
|
||||
mod interceptor;
|
||||
mod interop;
|
||||
|
||||
use tauri::Manager;
|
||||
|
||||
@@ -24,7 +25,31 @@ fn main() {
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_websocket::init())
|
||||
.plugin(tauri_plugin_window_state::Builder::default().build())
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
interop::startup::init::interop_startup_init
|
||||
])
|
||||
.plugin(
|
||||
tauri_plugin_window_state::Builder::default()
|
||||
.with_state_flags(
|
||||
// NOTE:
|
||||
// The app (window labeled "main") manages its visible state via `interop_startup_init`.
|
||||
// See `tauri.conf.json`:
|
||||
// ```json
|
||||
// {
|
||||
// "label": "main",
|
||||
// "title": "Hoppscotch",
|
||||
// ...
|
||||
// ...
|
||||
// "visible": false, // This is the important part.
|
||||
// ...
|
||||
// ...
|
||||
// }
|
||||
// ```
|
||||
tauri_plugin_window_state::StateFlags::all()
|
||||
& !tauri_plugin_window_state::StateFlags::VISIBLE,
|
||||
)
|
||||
.build(),
|
||||
)
|
||||
.plugin(tauri_plugin_store::Builder::default().build())
|
||||
.plugin(interceptor::init())
|
||||
.setup(|app| {
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"package": {
|
||||
"productName": "Hoppscotch",
|
||||
"version": "24.7.0"
|
||||
"version": "24.11.0"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
@@ -56,9 +56,11 @@
|
||||
},
|
||||
"windows": [
|
||||
{
|
||||
"label": "main",
|
||||
"title": "Hoppscotch",
|
||||
"visible": false,
|
||||
"fullscreen": false,
|
||||
"resizable": true,
|
||||
"title": "Hoppscotch",
|
||||
"width": 800,
|
||||
"height": 600,
|
||||
"fileDropEnabled": false
|
||||
|
||||
@@ -6,5 +6,6 @@ subscription UserCollectionCreated {
|
||||
id
|
||||
title
|
||||
type
|
||||
data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
subscription UserCollectionDuplicated {
|
||||
userCollectionDuplicated {
|
||||
id
|
||||
parentID
|
||||
title
|
||||
type
|
||||
data
|
||||
childCollections
|
||||
requests {
|
||||
id
|
||||
request
|
||||
collectionID
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,14 +9,12 @@
|
||||
</div>
|
||||
|
||||
<div class="flex space-x-4">
|
||||
<!--
|
||||
<HoppButtonSecondary
|
||||
:icon="IconLucideFileBadge"
|
||||
:label="'CA Certificates'"
|
||||
outline
|
||||
@click="showCACertificatesModal = true"
|
||||
/>
|
||||
-->
|
||||
<HoppButtonSecondary
|
||||
:icon="IconLucideFileKey"
|
||||
:label="'Client Certificates'"
|
||||
@@ -25,31 +23,78 @@
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!--
|
||||
<ModalsNativeCACertificates
|
||||
:show="showCACertificatesModal"
|
||||
@hide-modal="showCACertificatesModal = false"
|
||||
/>
|
||||
-->
|
||||
<ModalsNativeClientCertificates
|
||||
:show="showClientCertificatesModal"
|
||||
@hide-modal="showClientCertificatesModal = false"
|
||||
/>
|
||||
|
||||
<div class="pt-4 space-y-4">
|
||||
<div class="flex items-center">
|
||||
<HoppSmartToggle :on="allowProxy" @change="allowProxy = !allowProxy" />
|
||||
Use HTTP Proxy
|
||||
</div>
|
||||
|
||||
<HoppSmartInput
|
||||
v-if="allowProxy"
|
||||
v-model="proxyURL"
|
||||
:autofocus="false"
|
||||
styles="flex-1"
|
||||
placeholder=" "
|
||||
:label="'Proxy URL'"
|
||||
input-styles="input floating-input"
|
||||
/>
|
||||
|
||||
<p class="my-1 text-secondaryLight">
|
||||
Hoppscotch native interceptor supports HTTP/HTTPS/SOCKS proxies along with NTLM and Basic Auth in those proxies. Include the username and password for the proxy authentication in the URL itself.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<!-- TODO: i18n -->
|
||||
<script setup lang="ts">
|
||||
import { ref } from "vue"
|
||||
import { computed, ref } from "vue"
|
||||
import IconLucideFileBadge from "~icons/lucide/file-badge"
|
||||
import IconLucideFileKey from "~icons/lucide/file-key"
|
||||
import { useService } from "dioc/vue"
|
||||
import { NativeInterceptorService } from "@platform/interceptors/native"
|
||||
import { RequestDef, NativeInterceptorService } from "@platform/interceptors/native"
|
||||
import { syncRef } from "@vueuse/core"
|
||||
|
||||
type RequestProxyInfo = RequestDef["proxy"]
|
||||
|
||||
const nativeInterceptorService = useService(NativeInterceptorService)
|
||||
|
||||
const allowSSLVerification = nativeInterceptorService.validateCerts
|
||||
|
||||
// const showCACertificatesModal = ref(false)
|
||||
const showCACertificatesModal = ref(false)
|
||||
const showClientCertificatesModal = ref(false)
|
||||
|
||||
const allowProxy = ref(false)
|
||||
const proxyURL = ref("")
|
||||
|
||||
const proxyInfo = computed<RequestProxyInfo>({
|
||||
get() {
|
||||
if (allowProxy.value) {
|
||||
return {
|
||||
url: proxyURL.value,
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
},
|
||||
set(newData) {
|
||||
if (newData) {
|
||||
allowProxy.value = true
|
||||
proxyURL.value = newData.url
|
||||
} else {
|
||||
allowProxy.value = false
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
syncRef(nativeInterceptorService.proxyInfo, proxyInfo, { direction: "both" })
|
||||
</script>
|
||||
|
||||
41
packages/hoppscotch-selfhost-desktop/src/interop.ts
Normal file
41
packages/hoppscotch-selfhost-desktop/src/interop.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { invoke } from "@tauri-apps/api/tauri";
|
||||
import { onMounted } from "vue";
|
||||
import { HoppModule } from "@hoppscotch/common/modules"
|
||||
|
||||
/**
|
||||
* Initializes the interop startup process.
|
||||
*
|
||||
* This function invokes a Tauri command to perform necessary startup operations.
|
||||
* It's a bridge, specifically calling `interop_startup_init` function defined in
|
||||
* `src-tauri/src/interop/startup/init.rs`.
|
||||
*
|
||||
* @returns A promise that resolves when the startup initialization is complete.
|
||||
* @throws Will throw an error if the Tauri command fails for any reason.
|
||||
*/
|
||||
async function interopStartupInit(): Promise<void> {
|
||||
return invoke<void>("interop_startup_init");
|
||||
}
|
||||
|
||||
/**
|
||||
* HoppModule for handling interop operations and
|
||||
* is responsible for initializing interop-startup related functionality.
|
||||
*/
|
||||
export const interopModule: HoppModule = {
|
||||
/**
|
||||
* Executes when the root component is set up.
|
||||
*
|
||||
* This function is called during the application's initialization process,
|
||||
* and it also uses Vue's `onMounted` hook so the interop-startup occurs
|
||||
* **after** the component has been mounted in the DOM.
|
||||
*/
|
||||
onRootSetup: () => {
|
||||
onMounted(async () => {
|
||||
try {
|
||||
await interopStartupInit();
|
||||
console.log("Interop startup initialization completed successfully");
|
||||
} catch (error) {
|
||||
console.error("Failed to initialize interop startup:", error);
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
@@ -80,7 +80,7 @@ export const getSyncInitFunction = <T extends DispatchingStore<any, any>>(
|
||||
)
|
||||
}
|
||||
|
||||
stopSubscriptions = startSubscriptions()
|
||||
stopSubscriptions = startSubscriptions?.()
|
||||
}
|
||||
|
||||
function stopListeningToSubscriptions() {
|
||||
@@ -90,7 +90,7 @@ export const getSyncInitFunction = <T extends DispatchingStore<any, any>>(
|
||||
)
|
||||
}
|
||||
|
||||
stopSubscriptions()
|
||||
stopSubscriptions?.()
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -14,6 +14,7 @@ import { appWindow } from "@tauri-apps/api/window"
|
||||
import { stdFooterItems } from "@hoppscotch/common/platform/std/ui/footerItem"
|
||||
import { stdSupportOptionItems } from "@hoppscotch/common/platform/std/ui/supportOptionsItem"
|
||||
import { ioDef } from "./platform/io"
|
||||
import { interopModule } from "./interop"
|
||||
|
||||
const headerPaddingLeft = ref("0px")
|
||||
const headerPaddingTop = ref("0px")
|
||||
@@ -45,11 +46,15 @@ const headerPaddingTop = ref("0px")
|
||||
settings: settingsDef,
|
||||
history: historyDef,
|
||||
},
|
||||
addedHoppModules: [interopModule],
|
||||
interceptors: {
|
||||
default: "native",
|
||||
interceptors: [
|
||||
{ type: "service", service: NativeInterceptorService },
|
||||
{ type: "standalone", interceptor: proxyInterceptor },
|
||||
{
|
||||
type: "standalone",
|
||||
interceptor: { ...proxyInterceptor, supportsDigestAuth: true },
|
||||
},
|
||||
],
|
||||
},
|
||||
platformFeatureFlags: {
|
||||
|
||||
@@ -4,67 +4,68 @@ import {
|
||||
runMutation,
|
||||
} from "@hoppscotch/common/helpers/backend/GQLClient"
|
||||
import {
|
||||
CreateGqlChildUserCollectionDocument,
|
||||
CreateGqlChildUserCollectionMutation,
|
||||
CreateGqlChildUserCollectionMutationVariables,
|
||||
CreateGqlRootUserCollectionDocument,
|
||||
CreateGqlRootUserCollectionMutation,
|
||||
CreateGqlRootUserCollectionMutationVariables,
|
||||
CreateGqlUserRequestDocument,
|
||||
CreateGqlUserRequestMutation,
|
||||
CreateGqlUserRequestMutationVariables,
|
||||
CreateRestChildUserCollectionDocument,
|
||||
CreateRestChildUserCollectionMutation,
|
||||
CreateRestChildUserCollectionMutationVariables,
|
||||
CreateRestRootUserCollectionDocument,
|
||||
CreateRestRootUserCollectionMutation,
|
||||
CreateRestRootUserCollectionMutationVariables,
|
||||
CreateRestUserRequestDocument,
|
||||
CreateRestUserRequestMutation,
|
||||
CreateRestUserRequestMutationVariables,
|
||||
CreateRestUserRequestDocument,
|
||||
CreateRestChildUserCollectionMutation,
|
||||
CreateRestChildUserCollectionMutationVariables,
|
||||
CreateRestChildUserCollectionDocument,
|
||||
DeleteUserCollectionDocument,
|
||||
DeleteUserCollectionMutation,
|
||||
DeleteUserCollectionMutationVariables,
|
||||
DeleteUserCollectionDocument,
|
||||
RenameUserCollectionMutation,
|
||||
RenameUserCollectionMutationVariables,
|
||||
RenameUserCollectionDocument,
|
||||
MoveUserCollectionMutation,
|
||||
MoveUserCollectionMutationVariables,
|
||||
MoveUserCollectionDocument,
|
||||
DeleteUserRequestDocument,
|
||||
DeleteUserRequestMutation,
|
||||
DeleteUserRequestMutationVariables,
|
||||
DeleteUserRequestDocument,
|
||||
ExportUserCollectionsToJsonDocument,
|
||||
ExportUserCollectionsToJsonQuery,
|
||||
ExportUserCollectionsToJsonQueryVariables,
|
||||
GetGqlRootUserCollectionsDocument,
|
||||
GetGqlRootUserCollectionsQuery,
|
||||
GetGqlRootUserCollectionsQueryVariables,
|
||||
GetUserRootCollectionsDocument,
|
||||
GetUserRootCollectionsQuery,
|
||||
GetUserRootCollectionsQueryVariables,
|
||||
MoveUserCollectionDocument,
|
||||
MoveUserCollectionMutation,
|
||||
MoveUserCollectionMutationVariables,
|
||||
MoveUserRequestDocument,
|
||||
MoveUserRequestMutation,
|
||||
MoveUserRequestMutationVariables,
|
||||
UpdateUserCollectionOrderMutation,
|
||||
UpdateUserCollectionOrderMutationVariables,
|
||||
UpdateUserCollectionOrderDocument,
|
||||
GetUserRootCollectionsQuery,
|
||||
GetUserRootCollectionsQueryVariables,
|
||||
GetUserRootCollectionsDocument,
|
||||
UserCollectionCreatedDocument,
|
||||
UserCollectionUpdatedDocument,
|
||||
UserCollectionRemovedDocument,
|
||||
UserCollectionMovedDocument,
|
||||
UserCollectionOrderUpdatedDocument,
|
||||
ExportUserCollectionsToJsonQuery,
|
||||
ExportUserCollectionsToJsonQueryVariables,
|
||||
ExportUserCollectionsToJsonDocument,
|
||||
UserRequestCreatedDocument,
|
||||
UserRequestUpdatedDocument,
|
||||
UserRequestMovedDocument,
|
||||
UserRequestDeletedDocument,
|
||||
UpdateRestUserRequestMutation,
|
||||
UpdateRestUserRequestMutationVariables,
|
||||
UpdateRestUserRequestDocument,
|
||||
CreateGqlRootUserCollectionMutation,
|
||||
CreateGqlRootUserCollectionMutationVariables,
|
||||
CreateGqlRootUserCollectionDocument,
|
||||
CreateGqlUserRequestMutation,
|
||||
CreateGqlUserRequestMutationVariables,
|
||||
CreateGqlUserRequestDocument,
|
||||
CreateGqlChildUserCollectionMutation,
|
||||
CreateGqlChildUserCollectionMutationVariables,
|
||||
CreateGqlChildUserCollectionDocument,
|
||||
RenameUserCollectionDocument,
|
||||
RenameUserCollectionMutation,
|
||||
RenameUserCollectionMutationVariables,
|
||||
ReqType,
|
||||
UpdateGqlUserRequestDocument,
|
||||
UpdateGqlUserRequestMutation,
|
||||
UpdateGqlUserRequestMutationVariables,
|
||||
UpdateGqlUserRequestDocument,
|
||||
GetGqlRootUserCollectionsQuery,
|
||||
GetGqlRootUserCollectionsQueryVariables,
|
||||
GetGqlRootUserCollectionsDocument,
|
||||
ReqType,
|
||||
UpdateRestUserRequestDocument,
|
||||
UpdateRestUserRequestMutation,
|
||||
UpdateRestUserRequestMutationVariables,
|
||||
UpdateUserCollectionOrderDocument,
|
||||
UpdateUserCollectionOrderMutation,
|
||||
UpdateUserCollectionOrderMutationVariables,
|
||||
UserCollectionCreatedDocument,
|
||||
UserCollectionDuplicatedDocument,
|
||||
UserCollectionMovedDocument,
|
||||
UserCollectionOrderUpdatedDocument,
|
||||
UserCollectionRemovedDocument,
|
||||
UserCollectionUpdatedDocument,
|
||||
UserRequestCreatedDocument,
|
||||
UserRequestDeletedDocument,
|
||||
UserRequestMovedDocument,
|
||||
UserRequestUpdatedDocument,
|
||||
} from "../../api/generated/graphql"
|
||||
|
||||
export const createRESTRootUserCollection = (title: string) =>
|
||||
@@ -292,6 +293,12 @@ export const runUserCollectionOrderUpdatedSubscription = () =>
|
||||
variables: {},
|
||||
})
|
||||
|
||||
export const runUserCollectionDuplicatedSubscription = () =>
|
||||
runGQLSubscription({
|
||||
query: UserCollectionDuplicatedDocument,
|
||||
variables: {},
|
||||
})
|
||||
|
||||
export const runUserRequestCreatedSubscription = () =>
|
||||
runGQLSubscription({ query: UserRequestCreatedDocument, variables: {} })
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { authEvents$, def as platformAuth } from "@platform/auth"
|
||||
import { CollectionsPlatformDef } from "@hoppscotch/common/platform/collections"
|
||||
import { authEvents$, def as platformAuth } from "@platform/auth"
|
||||
import { runDispatchWithOutSyncing } from "../../lib/sync"
|
||||
|
||||
import {
|
||||
exportUserCollectionsToJSON,
|
||||
runUserCollectionCreatedSubscription,
|
||||
runUserCollectionDuplicatedSubscription,
|
||||
runUserCollectionMovedSubscription,
|
||||
runUserCollectionOrderUpdatedSubscription,
|
||||
runUserCollectionRemovedSubscription,
|
||||
@@ -16,44 +17,51 @@ import {
|
||||
} from "./collections.api"
|
||||
import { collectionsSyncer, getStoreByCollectionType } from "./collections.sync"
|
||||
|
||||
import * as E from "fp-ts/Either"
|
||||
import {
|
||||
addRESTCollection,
|
||||
setRESTCollections,
|
||||
editRESTCollection,
|
||||
removeRESTCollection,
|
||||
moveRESTFolder,
|
||||
updateRESTCollectionOrder,
|
||||
saveRESTRequestAs,
|
||||
navigateToFolderWithIndexPath,
|
||||
editRESTRequest,
|
||||
removeRESTRequest,
|
||||
moveRESTRequest,
|
||||
updateRESTRequestOrder,
|
||||
addRESTFolder,
|
||||
editRESTFolder,
|
||||
removeRESTFolder,
|
||||
addGraphqlFolder,
|
||||
addGraphqlCollection,
|
||||
editGraphqlFolder,
|
||||
editGraphqlCollection,
|
||||
removeGraphqlFolder,
|
||||
removeGraphqlCollection,
|
||||
saveGraphqlRequestAs,
|
||||
editGraphqlRequest,
|
||||
moveGraphqlRequest,
|
||||
removeGraphqlRequest,
|
||||
setGraphqlCollections,
|
||||
restCollectionStore,
|
||||
} from "@hoppscotch/common/newstore/collections"
|
||||
import { runGQLSubscription } from "@hoppscotch/common/helpers/backend/GQLClient"
|
||||
import {
|
||||
addGraphqlCollection,
|
||||
addGraphqlFolder,
|
||||
addRESTCollection,
|
||||
addRESTFolder,
|
||||
editGraphqlCollection,
|
||||
editGraphqlFolder,
|
||||
editGraphqlRequest,
|
||||
editRESTCollection,
|
||||
editRESTFolder,
|
||||
editRESTRequest,
|
||||
moveGraphqlRequest,
|
||||
moveRESTFolder,
|
||||
moveRESTRequest,
|
||||
navigateToFolderWithIndexPath,
|
||||
removeGraphqlCollection,
|
||||
removeGraphqlFolder,
|
||||
removeGraphqlRequest,
|
||||
removeRESTCollection,
|
||||
removeRESTFolder,
|
||||
removeRESTRequest,
|
||||
restCollectionStore,
|
||||
saveGraphqlRequestAs,
|
||||
saveRESTRequestAs,
|
||||
setGraphqlCollections,
|
||||
setRESTCollections,
|
||||
updateRESTCollectionOrder,
|
||||
updateRESTRequestOrder,
|
||||
} from "@hoppscotch/common/newstore/collections"
|
||||
import {
|
||||
GQLHeader,
|
||||
HoppCollection,
|
||||
HoppGQLRequest,
|
||||
HoppRESTHeaders,
|
||||
HoppRESTParam,
|
||||
HoppRESTRequest,
|
||||
} from "@hoppscotch/data"
|
||||
import * as E from "fp-ts/Either"
|
||||
import {
|
||||
ReqType,
|
||||
UserCollectionDuplicatedData,
|
||||
UserRequest,
|
||||
} from "../../api/generated/graphql"
|
||||
import { gqlCollectionsSyncer } from "./gqlCollections.sync"
|
||||
import { ReqType } from "../../api/generated/graphql"
|
||||
|
||||
function initCollectionsSync() {
|
||||
const currentUser$ = platformAuth.getCurrentUserStream()
|
||||
@@ -89,6 +97,7 @@ type ExportedUserCollectionREST = {
|
||||
folders: ExportedUserCollectionREST[]
|
||||
requests: Array<HoppRESTRequest & { id: string }>
|
||||
name: string
|
||||
data: string
|
||||
}
|
||||
|
||||
type ExportedUserCollectionGQL = {
|
||||
@@ -96,6 +105,16 @@ type ExportedUserCollectionGQL = {
|
||||
folders: ExportedUserCollectionGQL[]
|
||||
requests: Array<HoppGQLRequest & { id: string }>
|
||||
name: string
|
||||
data: string
|
||||
}
|
||||
|
||||
function addDescriptionField(
|
||||
candidate: HoppRESTHeaders | GQLHeader[] | HoppRESTParam[]
|
||||
) {
|
||||
return candidate.map((item) => ({
|
||||
...item,
|
||||
description: "description" in item ? item.description : "",
|
||||
}))
|
||||
}
|
||||
|
||||
function exportedCollectionToHoppCollection(
|
||||
@@ -105,9 +124,17 @@ function exportedCollectionToHoppCollection(
|
||||
if (collectionType == "REST") {
|
||||
const restCollection = collection as ExportedUserCollectionREST
|
||||
|
||||
const data =
|
||||
restCollection.data && restCollection.data !== "null"
|
||||
? JSON.parse(restCollection.data)
|
||||
: {
|
||||
auth: { authType: "inherit", authActive: false },
|
||||
headers: [],
|
||||
}
|
||||
|
||||
return {
|
||||
id: restCollection.id,
|
||||
v: 1,
|
||||
v: 4,
|
||||
name: restCollection.name,
|
||||
folders: restCollection.folders.map((folder) =>
|
||||
exportedCollectionToHoppCollection(folder, collectionType)
|
||||
@@ -131,42 +158,72 @@ function exportedCollectionToHoppCollection(
|
||||
preRequestScript,
|
||||
testScript,
|
||||
requestVariables,
|
||||
responses,
|
||||
} = request
|
||||
|
||||
const resolvedParams = addDescriptionField(params)
|
||||
const resolvedHeaders = addDescriptionField(headers)
|
||||
|
||||
return {
|
||||
v,
|
||||
id,
|
||||
name,
|
||||
endpoint,
|
||||
method,
|
||||
params,
|
||||
requestVariables: requestVariables,
|
||||
params: resolvedParams,
|
||||
requestVariables,
|
||||
auth,
|
||||
headers,
|
||||
headers: resolvedHeaders,
|
||||
body,
|
||||
preRequestScript,
|
||||
testScript,
|
||||
responses,
|
||||
}
|
||||
}),
|
||||
auth: data.auth,
|
||||
headers: addDescriptionField(data.headers),
|
||||
}
|
||||
} else {
|
||||
const gqlCollection = collection as ExportedUserCollectionGQL
|
||||
|
||||
const data =
|
||||
gqlCollection.data && gqlCollection.data !== "null"
|
||||
? JSON.parse(gqlCollection.data)
|
||||
: {
|
||||
auth: { authType: "inherit", authActive: false },
|
||||
headers: [],
|
||||
}
|
||||
|
||||
return {
|
||||
id: gqlCollection.id,
|
||||
v: 1,
|
||||
v: 4,
|
||||
name: gqlCollection.name,
|
||||
folders: gqlCollection.folders.map((folder) =>
|
||||
exportedCollectionToHoppCollection(folder, collectionType)
|
||||
),
|
||||
requests: gqlCollection.requests.map(
|
||||
({ v, auth, headers, name, id }) => ({
|
||||
requests: gqlCollection.requests.map((request) => {
|
||||
const requestParsedResult = HoppGQLRequest.safeParse(request)
|
||||
if (requestParsedResult.type === "ok") {
|
||||
return requestParsedResult.value
|
||||
}
|
||||
|
||||
const { v, auth, headers, name, id, query, url, variables } = request
|
||||
|
||||
const resolvedHeaders = addDescriptionField(headers)
|
||||
|
||||
return {
|
||||
id,
|
||||
v,
|
||||
auth,
|
||||
headers,
|
||||
headers: resolvedHeaders,
|
||||
name,
|
||||
})
|
||||
) as HoppGQLRequest[],
|
||||
query,
|
||||
url,
|
||||
variables,
|
||||
}
|
||||
}),
|
||||
auth: data.auth,
|
||||
headers: addDescriptionField(data.headers),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -176,7 +233,6 @@ async function loadUserCollections(collectionType: "REST" | "GQL") {
|
||||
undefined,
|
||||
collectionType == "REST" ? ReqType.Rest : ReqType.Gql
|
||||
)
|
||||
|
||||
if (E.isRight(res)) {
|
||||
const collectionsJSONString =
|
||||
res.right.exportUserCollectionsToJSON.exportedCollection
|
||||
@@ -185,7 +241,6 @@ async function loadUserCollections(collectionType: "REST" | "GQL") {
|
||||
ExportedUserCollectionGQL | ExportedUserCollectionREST
|
||||
>
|
||||
).map((collection) => ({ v: 1, ...collection }))
|
||||
|
||||
runDispatchWithOutSyncing(() => {
|
||||
collectionType == "REST"
|
||||
? setRESTCollections(
|
||||
@@ -219,6 +274,9 @@ function setupSubscriptions() {
|
||||
const userCollectionMovedSub = setupUserCollectionMovedSubscription()
|
||||
const userCollectionOrderUpdatedSub =
|
||||
setupUserCollectionOrderUpdatedSubscription()
|
||||
const userCollectionDuplicatedSub =
|
||||
setupUserCollectionDuplicatedSubscription()
|
||||
|
||||
const userRequestCreatedSub = setupUserRequestCreatedSubscription()
|
||||
const userRequestUpdatedSub = setupUserRequestUpdatedSubscription()
|
||||
const userRequestDeletedSub = setupUserRequestDeletedSubscription()
|
||||
@@ -230,6 +288,7 @@ function setupSubscriptions() {
|
||||
userCollectionRemovedSub,
|
||||
userCollectionMovedSub,
|
||||
userCollectionOrderUpdatedSub,
|
||||
userCollectionDuplicatedSub,
|
||||
userRequestCreatedSub,
|
||||
userRequestUpdatedSub,
|
||||
userRequestDeletedSub,
|
||||
@@ -300,19 +359,32 @@ function setupUserCollectionCreatedSubscription() {
|
||||
})
|
||||
} else {
|
||||
// root collections won't have parentCollectionID
|
||||
const data =
|
||||
res.right.userCollectionCreated.data &&
|
||||
res.right.userCollectionCreated.data != "null"
|
||||
? JSON.parse(res.right.userCollectionCreated.data)
|
||||
: {
|
||||
auth: { authType: "inherit", authActive: false },
|
||||
headers: [],
|
||||
}
|
||||
|
||||
runDispatchWithOutSyncing(() => {
|
||||
collectionType == "GQL"
|
||||
? addGraphqlCollection({
|
||||
name: res.right.userCollectionCreated.title,
|
||||
folders: [],
|
||||
requests: [],
|
||||
v: 1,
|
||||
v: 4,
|
||||
auth: data.auth,
|
||||
headers: addDescriptionField(data.headers),
|
||||
})
|
||||
: addRESTCollection({
|
||||
name: res.right.userCollectionCreated.title,
|
||||
folders: [],
|
||||
requests: [],
|
||||
v: 1,
|
||||
v: 4,
|
||||
auth: data.auth,
|
||||
headers: addDescriptionField(data.headers),
|
||||
})
|
||||
|
||||
const localIndex = collectionStore.value.state.length - 1
|
||||
@@ -489,6 +561,147 @@ function setupUserCollectionOrderUpdatedSubscription() {
|
||||
return userCollectionOrderUpdatedSub
|
||||
}
|
||||
|
||||
function setupUserCollectionDuplicatedSubscription() {
|
||||
const [userCollectionDuplicated$, userCollectionDuplicatedSub] =
|
||||
runUserCollectionDuplicatedSubscription()
|
||||
|
||||
userCollectionDuplicated$.subscribe((res) => {
|
||||
if (E.isRight(res)) {
|
||||
const {
|
||||
childCollections: childCollectionsJSONStr,
|
||||
data,
|
||||
id,
|
||||
parentID: parentCollectionID,
|
||||
requests: userRequests,
|
||||
title: name,
|
||||
type: collectionType,
|
||||
} = res.right.userCollectionDuplicated
|
||||
|
||||
const { collectionStore } = getStoreByCollectionType(collectionType)
|
||||
|
||||
const parentCollectionPath =
|
||||
parentCollectionID &&
|
||||
getCollectionPathFromCollectionID(
|
||||
parentCollectionID,
|
||||
collectionStore.value.state
|
||||
)
|
||||
|
||||
// Incoming data transformed to the respective internal representations
|
||||
const { auth, headers } =
|
||||
data && data != "null"
|
||||
? JSON.parse(data)
|
||||
: {
|
||||
auth: { authType: "inherit", authActive: false },
|
||||
headers: [],
|
||||
}
|
||||
|
||||
const folders = transformDuplicatedCollections(childCollectionsJSONStr)
|
||||
|
||||
const requests = transformDuplicatedCollectionRequests(
|
||||
userRequests as UserRequest[]
|
||||
)
|
||||
|
||||
// New collection to be added to store with the transformed data
|
||||
const effectiveDuplicatedCollection: HoppCollection = {
|
||||
id,
|
||||
name,
|
||||
folders,
|
||||
requests,
|
||||
v: 4,
|
||||
auth,
|
||||
headers: addDescriptionField(headers),
|
||||
}
|
||||
|
||||
// only folders will have parent collection id
|
||||
if (parentCollectionID && parentCollectionPath) {
|
||||
const collectionCreatedFromStoreIDSuffix = "-duplicate-collection"
|
||||
|
||||
const parentCollection = navigateToFolderWithIndexPath(
|
||||
collectionStore.value.state,
|
||||
parentCollectionPath
|
||||
.split("/")
|
||||
.map((pathIndex) => parseInt(pathIndex))
|
||||
)
|
||||
|
||||
if (!parentCollection) {
|
||||
return
|
||||
}
|
||||
|
||||
// Grab the child collection inserted via store update with the ID suffix
|
||||
const collectionInsertedViaStoreUpdateIdx =
|
||||
parentCollection.folders.findIndex(({ id }) =>
|
||||
id?.endsWith(collectionCreatedFromStoreIDSuffix)
|
||||
)
|
||||
|
||||
if (collectionInsertedViaStoreUpdateIdx === -1) {
|
||||
return
|
||||
}
|
||||
|
||||
const collectionInsertedViaStoreUpdateIndexPath = `${parentCollectionPath}/${collectionInsertedViaStoreUpdateIdx}`
|
||||
|
||||
runDispatchWithOutSyncing(() => {
|
||||
/**
|
||||
* Step 1. Remove the collection inserted via store update with the ID suffix
|
||||
* Step 2. Add the duplicated collection received from the GQL subscription
|
||||
* Step 3. Update the duplicated collection with the relevant data
|
||||
*/
|
||||
|
||||
if (collectionType === "GQL") {
|
||||
removeGraphqlFolder(collectionInsertedViaStoreUpdateIndexPath)
|
||||
|
||||
addGraphqlFolder(name, parentCollectionPath)
|
||||
|
||||
editGraphqlFolder(
|
||||
collectionInsertedViaStoreUpdateIndexPath,
|
||||
effectiveDuplicatedCollection
|
||||
)
|
||||
} else {
|
||||
removeRESTFolder(collectionInsertedViaStoreUpdateIndexPath)
|
||||
|
||||
addRESTFolder(name, parentCollectionPath)
|
||||
|
||||
editRESTFolder(
|
||||
collectionInsertedViaStoreUpdateIndexPath,
|
||||
effectiveDuplicatedCollection
|
||||
)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// root collections won't have `parentCollectionID`
|
||||
const collectionCreatedFromStoreIDSuffix = "-duplicate-collection"
|
||||
|
||||
// Grab the child collection inserted via store update with the ID suffix
|
||||
const collectionInsertedViaStoreUpdateIdx =
|
||||
collectionStore.value.state.findIndex(({ id }) =>
|
||||
id?.endsWith(collectionCreatedFromStoreIDSuffix)
|
||||
)
|
||||
|
||||
if (collectionInsertedViaStoreUpdateIdx === -1) {
|
||||
return
|
||||
}
|
||||
|
||||
runDispatchWithOutSyncing(() => {
|
||||
/**
|
||||
* Step 1. Remove the collection inserted via store update with the ID suffix
|
||||
* Step 2. Add the duplicated collection received from the GQL subscription
|
||||
*/
|
||||
if (collectionType === "GQL") {
|
||||
removeGraphqlCollection(collectionInsertedViaStoreUpdateIdx)
|
||||
|
||||
addGraphqlCollection(effectiveDuplicatedCollection)
|
||||
} else {
|
||||
removeRESTCollection(collectionInsertedViaStoreUpdateIdx)
|
||||
|
||||
addRESTCollection(effectiveDuplicatedCollection)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return userCollectionDuplicatedSub
|
||||
}
|
||||
|
||||
function setupUserRequestCreatedSubscription() {
|
||||
const [userRequestCreated$, userRequestCreatedSub] =
|
||||
runUserRequestCreatedSubscription()
|
||||
@@ -795,3 +1008,52 @@ function getRequestIndex(
|
||||
|
||||
return requestIndex
|
||||
}
|
||||
|
||||
function transformDuplicatedCollections(
|
||||
collectionsJSONStr: string
|
||||
): HoppCollection[] {
|
||||
const parsedCollections: UserCollectionDuplicatedData[] =
|
||||
JSON.parse(collectionsJSONStr)
|
||||
|
||||
return parsedCollections.map(
|
||||
({
|
||||
childCollections: childCollectionsJSONStr,
|
||||
data,
|
||||
id,
|
||||
requests: userRequests,
|
||||
title: name,
|
||||
}) => {
|
||||
const { auth, headers } =
|
||||
data && data !== "null"
|
||||
? JSON.parse(data)
|
||||
: { auth: { authType: "inherit", authActive: false }, headers: [] }
|
||||
|
||||
const folders = transformDuplicatedCollections(childCollectionsJSONStr)
|
||||
|
||||
const requests = transformDuplicatedCollectionRequests(userRequests)
|
||||
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
folders,
|
||||
requests,
|
||||
v: 4,
|
||||
auth,
|
||||
headers: addDescriptionField(headers),
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function transformDuplicatedCollectionRequests(
|
||||
requests: UserRequest[]
|
||||
): HoppRESTRequest[] | HoppGQLRequest[] {
|
||||
return requests.map(({ id, request }) => {
|
||||
const parsedRequest = JSON.parse(request)
|
||||
|
||||
return {
|
||||
...parsedRequest,
|
||||
id,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ type ClientCertDef =
|
||||
}
|
||||
|
||||
// TODO: Figure out a way to autogen this from the interceptor definition on the Rust side
|
||||
type RequestDef = {
|
||||
export type RequestDef = {
|
||||
req_id: number
|
||||
|
||||
method: string
|
||||
@@ -65,6 +65,10 @@ type RequestDef = {
|
||||
validate_certs: boolean,
|
||||
root_cert_bundle_files: number[],
|
||||
client_cert: ClientCertDef | null
|
||||
|
||||
proxy?: {
|
||||
url: string
|
||||
}
|
||||
}
|
||||
|
||||
type RunRequestResponse = {
|
||||
@@ -177,7 +181,8 @@ async function convertToRequestDef(
|
||||
reqID: number,
|
||||
caCertificates: CACertificateEntry[],
|
||||
clientCertificates: Map<string, ClientCertificateEntry>,
|
||||
validateCerts: boolean
|
||||
validateCerts: boolean,
|
||||
proxyInfo: RequestDef["proxy"]
|
||||
): Promise<RequestDef> {
|
||||
const clientCertDomain = getURLDomain(axiosReq.url!)
|
||||
|
||||
@@ -188,14 +193,21 @@ async function convertToRequestDef(
|
||||
method: axiosReq.method ?? "GET",
|
||||
endpoint: axiosReq.url ?? "",
|
||||
headers: Object.entries(axiosReq.headers ?? {})
|
||||
.filter(([key, value]) => !(key.toLowerCase() === "content-type" && value.toLowerCase() === "multipart/form-data")) // Removing header, because this header will be set by reqwest
|
||||
.filter(
|
||||
([key, value]) =>
|
||||
!(
|
||||
key.toLowerCase() === "content-type" &&
|
||||
value.toLowerCase() === "multipart/form-data"
|
||||
)
|
||||
) // Removing header, because this header will be set by relay.
|
||||
.map(([key, value]): KeyValuePair => ({ key, value })),
|
||||
parameters: Object.entries(axiosReq.params as Record<string, string> ?? {})
|
||||
.map(([key, value]): KeyValuePair => ({ key, value })),
|
||||
body: await processBody(axiosReq),
|
||||
root_cert_bundle_files: caCertificates.map((cert) => Array.from(cert.certificate)),
|
||||
validate_certs: validateCerts,
|
||||
client_cert: clientCert ? convertClientCertToDefCert(clientCert) : null
|
||||
client_cert: clientCert ? convertClientCertToDefCert(clientCert) : null,
|
||||
proxy: proxyInfo
|
||||
}
|
||||
}
|
||||
|
||||
@@ -236,6 +248,7 @@ export type ClientCertificateEntry = z.infer<typeof ClientCertificateEntry>
|
||||
const CA_STORE_PERSIST_KEY = "native_interceptor_ca_store"
|
||||
const CLIENT_CERTS_PERSIST_KEY = "native_interceptor_client_certs_store"
|
||||
const VALIDATE_SSL_KEY = "native_interceptor_validate_ssl"
|
||||
const PROXY_INFO_PERSIST_KEY = "native_interceptor_proxy_info"
|
||||
|
||||
export class NativeInterceptorService extends Service implements Interceptor {
|
||||
public static readonly ID = "NATIVE_INTERCEPTOR_SERVICE"
|
||||
@@ -247,6 +260,8 @@ export class NativeInterceptorService extends Service implements Interceptor {
|
||||
public selectable = { type: "selectable" as const }
|
||||
|
||||
public supportsCookies = true
|
||||
public supportsDigestAuth = true
|
||||
public supportsBinaryContentType = false
|
||||
|
||||
private cookieJarService = this.bind(CookieJarService)
|
||||
private persistenceService: PersistenceService = this.bind(PersistenceService)
|
||||
@@ -262,6 +277,7 @@ export class NativeInterceptorService extends Service implements Interceptor {
|
||||
|
||||
public clientCertificates = ref<Map<string, ClientCertificateEntry>>(new Map())
|
||||
public validateCerts = ref(true)
|
||||
public proxyInfo = ref<RequestDef["proxy"]>(undefined)
|
||||
|
||||
override onServiceInit() {
|
||||
// Load SSL Validation
|
||||
@@ -273,6 +289,17 @@ export class NativeInterceptorService extends Service implements Interceptor {
|
||||
this.validateCerts.value = persistedValidateSSL
|
||||
}
|
||||
|
||||
const persistedProxyInfo = this.persistenceService.getLocalConfig(
|
||||
PROXY_INFO_PERSIST_KEY
|
||||
)
|
||||
|
||||
if (persistedProxyInfo && persistedProxyInfo !== "null") {
|
||||
try {
|
||||
const proxyInfo = JSON.parse(persistedProxyInfo)
|
||||
this.proxyInfo.value = proxyInfo
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
watch(this.validateCerts, () => {
|
||||
this.persistenceService.setLocalConfig(VALIDATE_SSL_KEY, JSON.stringify(this.validateCerts.value))
|
||||
})
|
||||
@@ -390,6 +417,13 @@ export class NativeInterceptorService extends Service implements Interceptor {
|
||||
|
||||
this.persistenceService.setLocalConfig(CLIENT_CERTS_PERSIST_KEY, JSON.stringify(storableValue))
|
||||
})
|
||||
|
||||
watch(this.proxyInfo, (newProxyInfo) => {
|
||||
this.persistenceService.setLocalConfig(
|
||||
PROXY_INFO_PERSIST_KEY,
|
||||
JSON.stringify(newProxyInfo) ?? "null"
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
public runRequest(req: AxiosRequestConfig): RequestRunResult<InterceptorError> {
|
||||
@@ -417,7 +451,8 @@ export class NativeInterceptorService extends Service implements Interceptor {
|
||||
reqID,
|
||||
this.caCertificates.value,
|
||||
this.clientCertificates.value,
|
||||
this.validateCerts.value
|
||||
this.validateCerts.value,
|
||||
this.proxyInfo.value
|
||||
)
|
||||
|
||||
try {
|
||||
|
||||
@@ -9,9 +9,18 @@ import { updateUserSettings } from "./settings.api"
|
||||
export const settingsSyncDefinition: StoreSyncDefinitionOf<
|
||||
typeof settingsStore
|
||||
> = {
|
||||
toggleSetting() {
|
||||
updateUserSettings(JSON.stringify(settingsStore.value))
|
||||
},
|
||||
toggleNestedSetting() {
|
||||
updateUserSettings(JSON.stringify(settingsStore.value))
|
||||
},
|
||||
applySetting() {
|
||||
updateUserSettings(JSON.stringify(settingsStore.value))
|
||||
},
|
||||
applyNestedSetting() {
|
||||
updateUserSettings(JSON.stringify(settingsStore.value))
|
||||
},
|
||||
}
|
||||
|
||||
export const settingsSyncer = getSyncInitFunction(
|
||||
|
||||
Reference in New Issue
Block a user