mirror of
https://github.com/hoppscotch/hoppscotch
synced 2024-11-21 14:38:47 +00:00
feat: hoppscotch-relay
for request forwarding (#4471)
This commit is contained in:
parent
58407ae9dd
commit
8dc471f33f
3
packages/hoppscotch-selfhost-desktop/.envrc
Normal file
3
packages/hoppscotch-selfhost-desktop/.envrc
Normal file
@ -0,0 +1,3 @@
|
||||
source_url "https://raw.githubusercontent.com/cachix/devenv/95f329d49a8a5289d31e0982652f7058a189bfca/direnvrc" "sha256-d+8cBpDfDBj41inrADaJt+bDWhOktwslgoP5YiGJ1v0="
|
||||
|
||||
use devenv
|
@ -28,3 +28,12 @@ dist-ssr
|
||||
|
||||
# Backend Code generation
|
||||
src/api/generated
|
||||
# Devenv
|
||||
.devenv*
|
||||
devenv.local.nix
|
||||
|
||||
# direnv
|
||||
.direnv
|
||||
|
||||
# pre-commit
|
||||
.pre-commit-config.yaml
|
||||
|
153
packages/hoppscotch-selfhost-desktop/devenv.lock
Normal file
153
packages/hoppscotch-selfhost-desktop/devenv.lock
Normal file
@ -0,0 +1,153 @@
|
||||
{
|
||||
"nodes": {
|
||||
"devenv": {
|
||||
"locked": {
|
||||
"dir": "src/modules",
|
||||
"lastModified": 1729681848,
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"rev": "2634c4c9e9226a3fb54550ad4115df1992d502c5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"dir": "src/modules",
|
||||
"owner": "cachix",
|
||||
"repo": "devenv",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1729751566,
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "f32a2d484091a6dc98220b1f4a2c2d60b7c97c64",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1696426674,
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"pre-commit-hooks",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1709087332,
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1729690727,
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "be79af5ec63facf6c7709094db72b253c34e1ac2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-stable": {
|
||||
"locked": {
|
||||
"lastModified": 1729449015,
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "89172919243df199fe237ba0f776c3e3e3d72367",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-24.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"pre-commit-hooks": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"gitignore": "gitignore",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"nixpkgs-stable": "nixpkgs-stable"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1729104314,
|
||||
"owner": "cachix",
|
||||
"repo": "pre-commit-hooks.nix",
|
||||
"rev": "3c3e88f0f544d6bb54329832616af7eb971b6be6",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"repo": "pre-commit-hooks.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"devenv": "devenv",
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"pre-commit-hooks": "pre-commit-hooks"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1729715509,
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "40492e15d49b89cf409e2c5536444131fac49429",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
92
packages/hoppscotch-selfhost-desktop/devenv.nix
Normal file
92
packages/hoppscotch-selfhost-desktop/devenv.nix
Normal file
@ -0,0 +1,92 @@
|
||||
{ pkgs, lib, config, inputs, ... }:
|
||||
|
||||
{
|
||||
# https://devenv.sh/packages/
|
||||
packages = with pkgs; [
|
||||
git
|
||||
postgresql_16
|
||||
# BE and Tauri stuff
|
||||
libsoup
|
||||
webkitgtk_4_0
|
||||
# FE and Node stuff
|
||||
nodejs_22
|
||||
nodePackages_latest.typescript-language-server
|
||||
nodePackages_latest.vls
|
||||
nodePackages_latest.prisma
|
||||
prisma-engines
|
||||
# CI
|
||||
act
|
||||
# Cargo
|
||||
cargo-edit
|
||||
];
|
||||
|
||||
# https://devenv.sh/basics/
|
||||
env = {
|
||||
APP_GREET = "Hoppscotch";
|
||||
# NOTE: Setting these `PRISMA_*` environment variable fixes
|
||||
# Error: Failed to fetch sha256 checksum at https://binaries.prisma.sh/all_commits/<hash>/linux-nixos/libquery_engine.so.node.gz.sha256 - 404 Not Found
|
||||
# See: https://github.com/prisma/prisma/discussions/3120
|
||||
PRISMA_QUERY_ENGINE_LIBRARY = "${pkgs.prisma-engines}/lib/libquery_engine.node";
|
||||
PRISMA_QUERY_ENGINE_BINARY = "${pkgs.prisma-engines}/bin/query-engine";
|
||||
PRISMA_SCHEMA_ENGINE_BINARY = "${pkgs.prisma-engines}/bin/schema-engine";
|
||||
};
|
||||
|
||||
|
||||
# https://devenv.sh/scripts/
|
||||
scripts = {
|
||||
hello.exec = "echo hello from $APP_GREET";
|
||||
e.exec = "emacs";
|
||||
};
|
||||
|
||||
enterShell = ''
|
||||
git --version
|
||||
'';
|
||||
|
||||
# https://devenv.sh/tests/
|
||||
enterTest = ''
|
||||
echo "Running tests"
|
||||
'';
|
||||
|
||||
# https://devenv.sh/integrations/dotenv/
|
||||
dotenv.enable = true;
|
||||
|
||||
# https://devenv.sh/languages/
|
||||
|
||||
# https://devenv.sh/languages/
|
||||
languages = {
|
||||
typescript.enable = true;
|
||||
|
||||
javascript = {
|
||||
enable = true;
|
||||
pnpm = {
|
||||
enable = true;
|
||||
};
|
||||
npm = {
|
||||
enable = true;
|
||||
};
|
||||
};
|
||||
|
||||
rust = {
|
||||
enable = true;
|
||||
channel = "nightly";
|
||||
components = [
|
||||
"rustc"
|
||||
"cargo"
|
||||
"clippy"
|
||||
"rustfmt"
|
||||
"rust-analyzer"
|
||||
"llvm-tools-preview"
|
||||
"rust-src"
|
||||
"rustc-codegen-cranelift-preview"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
# https://devenv.sh/pre-commit-hooks/
|
||||
# pre-commit.hooks.shellcheck.enable = true;
|
||||
|
||||
# https://devenv.sh/processes/
|
||||
# processes.ping.exec = "ping example.com";
|
||||
|
||||
# See full reference at https://devenv.sh/reference/options/
|
||||
}
|
23
packages/hoppscotch-selfhost-desktop/devenv.yaml
Normal file
23
packages/hoppscotch-selfhost-desktop/devenv.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
# yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
|
||||
inputs:
|
||||
# For NodeJS-22 and above
|
||||
nixpkgs:
|
||||
url: github:NixOS/nixpkgs/nixpkgs-unstable
|
||||
# nixpkgs:
|
||||
# url: github:cachix/devenv-nixpkgs/rolling
|
||||
fenix:
|
||||
url: github:nix-community/fenix
|
||||
inputs:
|
||||
nixpkgs:
|
||||
follows: nixpkgs
|
||||
|
||||
# If you're using non-OSS software, you can set allowUnfree to true.
|
||||
allowUnfree: true
|
||||
|
||||
# If you're willing to use a package that's vulnerable
|
||||
# permittedInsecurePackages:
|
||||
# - "openssl-1.1.1w"
|
||||
|
||||
# If you have more than one devenv you can merge them
|
||||
#imports:
|
||||
# - ./backend
|
File diff suppressed because it is too large
Load Diff
@ -25,7 +25,7 @@ tauri = { version = "1.8.1", features = [
|
||||
tauri-plugin-store = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
tauri-plugin-deep-link = { git = "https://github.com/FabianLars/tauri-plugin-deep-link", branch = "main" }
|
||||
tauri-plugin-window-state = "0.1.1"
|
||||
reqwest = { version = "0.11.27", features = ["native-tls"] }
|
||||
hoppscotch-relay = { path = "../../hoppscotch-relay" }
|
||||
serde_json = "1.0.128"
|
||||
url = "2.5.2"
|
||||
hex_color = "3.0.0"
|
||||
|
@ -1,320 +1,90 @@
|
||||
use dashmap::DashMap;
|
||||
use reqwest::{header::{HeaderMap, HeaderName, HeaderValue}, Certificate, ClientBuilder, Identity};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{plugin::{Builder, TauriPlugin}, Manager, Runtime, State};
|
||||
use hoppscotch_relay::{RequestWithMetadata, ResponseWithMetadata};
|
||||
use serde::Serialize;
|
||||
use tauri::{
|
||||
plugin::{Builder, TauriPlugin},
|
||||
Manager, Runtime, State,
|
||||
};
|
||||
use thiserror::Error;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
#[derive(Default)]
|
||||
struct InterceptorState {
|
||||
cancellation_tokens: DashMap<usize, CancellationToken>
|
||||
cancellation_tokens: DashMap<usize, CancellationToken>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct KeyValuePair {
|
||||
key: String,
|
||||
value: String
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum FormDataValue {
|
||||
Text(String),
|
||||
File {
|
||||
filename: String,
|
||||
data: Vec<u8>,
|
||||
mime: String
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct FormDataEntry {
|
||||
key: String,
|
||||
value: FormDataValue
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum BodyDef {
|
||||
Text(String),
|
||||
URLEncoded(Vec<KeyValuePair>),
|
||||
FormData(Vec<FormDataEntry>)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
enum ClientCertDef {
|
||||
PEMCert {
|
||||
certificate_pem: Vec<u8>,
|
||||
key_pem: Vec<u8>
|
||||
},
|
||||
|
||||
PFXCert {
|
||||
certificate_pfx: Vec<u8>,
|
||||
password: String
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct RequestDef {
|
||||
req_id: usize,
|
||||
|
||||
method: String,
|
||||
endpoint: String,
|
||||
|
||||
parameters: Vec<KeyValuePair>,
|
||||
headers: Vec<KeyValuePair>,
|
||||
|
||||
body: Option<BodyDef>,
|
||||
|
||||
validate_certs: bool,
|
||||
root_cert_bundle_files: Vec<Vec<u8>>,
|
||||
client_cert: Option<ClientCertDef>
|
||||
}
|
||||
|
||||
fn get_identity_from_req(req: &RequestDef) -> Result<Option<Identity>, reqwest::Error> {
|
||||
let result = match &req.client_cert {
|
||||
None => return Ok(None),
|
||||
Some(ClientCertDef::PEMCert { certificate_pem, key_pem }) => Identity::from_pkcs8_pem(&certificate_pem, &key_pem),
|
||||
Some(ClientCertDef::PFXCert { certificate_pfx, password }) => Identity::from_pkcs12_der(&certificate_pfx, &password)
|
||||
};
|
||||
|
||||
Ok(Some(result?))
|
||||
}
|
||||
|
||||
fn parse_root_certs(req: &RequestDef) -> Result<Vec<Certificate>, reqwest::Error> {
|
||||
let mut result = vec![];
|
||||
|
||||
for cert_bundle_file in &req.root_cert_bundle_files {
|
||||
let mut certs = Certificate::from_pem_bundle(&cert_bundle_file)?;
|
||||
result.append(&mut certs);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
enum ReqBodyAction {
|
||||
Body(reqwest::Body),
|
||||
UrlEncodedForm(Vec<(String, String)>),
|
||||
MultipartForm(reqwest::multipart::Form)
|
||||
}
|
||||
|
||||
fn convert_bodydef_to_req_action(req: &RequestDef) -> Option<ReqBodyAction> {
|
||||
match &req.body {
|
||||
None => None,
|
||||
Some(BodyDef::Text(text)) => Some(ReqBodyAction::Body(text.clone().into())),
|
||||
Some(BodyDef::URLEncoded(entries)) =>
|
||||
Some(
|
||||
ReqBodyAction::UrlEncodedForm(
|
||||
entries.iter()
|
||||
.map(|KeyValuePair { key, value }| (key.clone(), value.clone()))
|
||||
.collect()
|
||||
)
|
||||
),
|
||||
Some(BodyDef::FormData(entries)) => {
|
||||
let mut form = reqwest::multipart::Form::new();
|
||||
|
||||
for entry in entries {
|
||||
form = match &entry.value {
|
||||
FormDataValue::Text(value) => form.text(entry.key.clone(), value.clone()),
|
||||
FormDataValue::File { filename, data, mime } =>
|
||||
form.part(
|
||||
entry.key.clone(),
|
||||
reqwest::multipart::Part::bytes(data.clone())
|
||||
.file_name(filename.clone())
|
||||
.mime_str(mime.as_str()).expect("Error while setting File enum")
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
Some(ReqBodyAction::MultipartForm(form))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RunRequestResponse {
|
||||
status: u16,
|
||||
status_text: String,
|
||||
headers: Vec<KeyValuePair>,
|
||||
data: Vec<u8>,
|
||||
|
||||
time_start_ms: u128,
|
||||
time_end_ms: u128
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
enum RunRequestError {
|
||||
RequestCancelled,
|
||||
ClientCertError,
|
||||
RootCertError,
|
||||
InvalidMethod,
|
||||
InvalidUrl,
|
||||
InvalidHeaders,
|
||||
RequestRunError(String)
|
||||
}
|
||||
|
||||
async fn execute_request(req_builder: reqwest::RequestBuilder) -> Result<RunRequestResponse, RunRequestError> {
|
||||
let start_time_ms = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
|
||||
let response = req_builder.send()
|
||||
.await
|
||||
.map_err(|err| RunRequestError::RequestRunError(err.to_string()))?;
|
||||
|
||||
// We hold on to these values becase we lose ownership of response
|
||||
// when we read the body
|
||||
let res_status = response.status();
|
||||
let res_headers = response.headers().clone();
|
||||
|
||||
|
||||
let res_body_bytes = response.bytes()
|
||||
.await
|
||||
.map_err(|err| RunRequestError::RequestRunError(err.to_string()))?;
|
||||
|
||||
// Reqwest resolves the send before all the response is loaded, to keep the timing
|
||||
// correctly, we load the response as well.
|
||||
let end_time_ms = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
|
||||
let response_status = res_status.as_u16();
|
||||
let response_status_text = res_status
|
||||
.canonical_reason()
|
||||
.unwrap_or("Unknown Status")
|
||||
.to_owned();
|
||||
|
||||
let response_headers = res_headers
|
||||
.iter()
|
||||
.map(|(key, value)|
|
||||
KeyValuePair {
|
||||
key: key.as_str().to_owned(),
|
||||
value: value.to_str().unwrap_or("").to_owned()
|
||||
}
|
||||
)
|
||||
.collect();
|
||||
|
||||
Ok(
|
||||
RunRequestResponse {
|
||||
status: response_status,
|
||||
status_text: response_status_text,
|
||||
headers: response_headers,
|
||||
data: res_body_bytes.into(),
|
||||
time_start_ms: start_time_ms,
|
||||
time_end_ms: end_time_ms
|
||||
}
|
||||
)
|
||||
#[derive(Debug, Serialize, Error)]
|
||||
pub enum RunRequestError {
|
||||
#[error("Request cancelled")]
|
||||
RequestCancelled,
|
||||
#[error("Internal server error")]
|
||||
InternalServerError,
|
||||
#[error("Relay error: {0}")]
|
||||
Relay(#[from] hoppscotch_relay::RelayError),
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn run_request(req: RequestDef, state: State<'_, InterceptorState>) -> Result<RunRequestResponse, RunRequestError> {
|
||||
let method = reqwest::Method::from_bytes(req.method.as_bytes())
|
||||
.map_err(|_| RunRequestError::InvalidMethod)?;
|
||||
async fn run_request(
|
||||
req: RequestWithMetadata,
|
||||
state: State<'_, InterceptorState>,
|
||||
) -> Result<ResponseWithMetadata, RunRequestError> {
|
||||
let req_id = req.req_id;
|
||||
let cancel_token = CancellationToken::new();
|
||||
// NOTE: This will drop reference to an existing cancellation token
|
||||
// if you send a request with the same request id as an existing one,
|
||||
// thereby, dropping any means to cancel a running operation with the old token.
|
||||
// This is done so because, on FE side, we may lose cancel token info upon reloads
|
||||
// and this allows us to work around that.
|
||||
state
|
||||
.cancellation_tokens
|
||||
.insert(req_id, cancel_token.clone());
|
||||
|
||||
let endpoint_url = reqwest::Url::parse(&req.endpoint)
|
||||
.map_err(|_| RunRequestError::InvalidUrl)?;
|
||||
let cancel_token_clone = cancel_token.clone();
|
||||
// Execute the HTTP request in a blocking thread pool and handles cancellation.
|
||||
//
|
||||
// It:
|
||||
// 1. Uses `spawn_blocking` to run the sync `run_request_task`
|
||||
// without blocking the main Tokio runtime.
|
||||
// 2. Uses `select!` to concurrently wait for either
|
||||
// a. the task to complete,
|
||||
// b. or a cancellation signal.
|
||||
//
|
||||
// Why spawn_blocking?
|
||||
// - `run_request_task` uses synchronous curl operations which would block
|
||||
// the async runtime if not run in a separate thread.
|
||||
// - `spawn_blocking` moves this operation to a thread pool designed for
|
||||
// blocking tasks, so other async operations to continue unblocked.
|
||||
let result = tokio::select! {
|
||||
res = tokio::task::spawn_blocking(move || hoppscotch_relay::run_request_task(&req, cancel_token_clone)) => {
|
||||
match res {
|
||||
Ok(task_result) => Ok(task_result?),
|
||||
Err(_) => Err(RunRequestError::InternalServerError),
|
||||
}
|
||||
},
|
||||
_ = cancel_token.cancelled() => {
|
||||
Err(RunRequestError::RequestCancelled)
|
||||
}
|
||||
};
|
||||
|
||||
let headers = req.headers
|
||||
.iter()
|
||||
.map(|KeyValuePair { key, value }|
|
||||
Ok(
|
||||
(
|
||||
key.parse::<HeaderName>().map_err(|_| ())?,
|
||||
value.parse::<HeaderValue>().map_err(|_| ())?
|
||||
)
|
||||
)
|
||||
)
|
||||
.collect::<Result<HeaderMap, ()>>()
|
||||
.map_err(|_| RunRequestError::InvalidHeaders)?;
|
||||
state.cancellation_tokens.remove(&req_id);
|
||||
|
||||
let body_action = convert_bodydef_to_req_action(&req);
|
||||
|
||||
let client_identity = get_identity_from_req(&req)
|
||||
.map_err(|_| RunRequestError::ClientCertError)?;
|
||||
|
||||
let root_certs = parse_root_certs(&req)
|
||||
.map_err(|_| RunRequestError::RootCertError)?;
|
||||
|
||||
let mut client_builder = ClientBuilder::new()
|
||||
.danger_accept_invalid_certs(!req.validate_certs);
|
||||
|
||||
// NOTE: Root Certificates are not currently implemented into the Hoppscotch UI
|
||||
// This is done so as the current mechanism doesn't allow for v1 X.509 certificates
|
||||
// to be accepted. Reqwest supports `native-tls` and `rustls`.
|
||||
// `native-tls` should support v1 X.509 in Linux [OpenSSL] (and hopefully on Win [SChannel]), but on
|
||||
// macOS the Security Framework system in it blocks certiticates pretty harshly and blocks v1.
|
||||
// `rustls` doesn't allow v1 x.509 as well as documented here: https://github.com/rustls/webpki/issues/29
|
||||
// We will fully introduce the feature when the dilemma is solved (or demand is voiced), until
|
||||
// then, disabling SSL verification should yield same results
|
||||
for root_cert in root_certs {
|
||||
client_builder = client_builder.add_root_certificate(root_cert);
|
||||
}
|
||||
|
||||
if let Some(identity) = client_identity {
|
||||
client_builder = client_builder.identity(identity);
|
||||
}
|
||||
|
||||
let client = client_builder.build()
|
||||
.expect("TLS Backend couldn't be initialized");
|
||||
|
||||
let mut req_builder = client.request(method, endpoint_url)
|
||||
.query(
|
||||
&req.parameters
|
||||
.iter()
|
||||
.map(|KeyValuePair { key, value }| (key, value))
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.headers(headers);
|
||||
|
||||
req_builder = match body_action {
|
||||
None => req_builder,
|
||||
Some(ReqBodyAction::Body(body)) => req_builder.body(body),
|
||||
Some(ReqBodyAction::UrlEncodedForm(entries)) => req_builder.form(&entries),
|
||||
Some(ReqBodyAction::MultipartForm(form)) => req_builder.multipart(form)
|
||||
};
|
||||
|
||||
let cancel_token = CancellationToken::new();
|
||||
|
||||
// NOTE: This will drop reference to an existing cancellation token
|
||||
// if you send a request with the same request id as an existing one,
|
||||
// thereby, dropping any means to cancel a running operation with the old token.
|
||||
// This is done so because, on FE side, we may lose cancel token info upon reloads
|
||||
// and this allows us to work around that.
|
||||
state.cancellation_tokens.insert(req.req_id, cancel_token.clone());
|
||||
|
||||
// Races between whether cancellation happened or requext execution happened
|
||||
let result = tokio::select! {
|
||||
_ = cancel_token.cancelled() => { None },
|
||||
result = execute_request(req_builder) => {
|
||||
// Remove cancellation token since the request has now completed
|
||||
state.cancellation_tokens.remove(&req.req_id);
|
||||
|
||||
Some(result)
|
||||
}
|
||||
};
|
||||
|
||||
result
|
||||
.unwrap_or(Err(RunRequestError::RequestCancelled))
|
||||
result
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn cancel_request(req_id: usize, state: State<'_, InterceptorState>) {
|
||||
if let Some((_, cancel_token)) = state.cancellation_tokens.remove(&req_id) {
|
||||
cancel_token.cancel();
|
||||
}
|
||||
if let Some((_, cancel_token)) = state.cancellation_tokens.remove(&req_id) {
|
||||
cancel_token.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("hopp_native_interceptor")
|
||||
.invoke_handler(
|
||||
tauri::generate_handler![
|
||||
run_request,
|
||||
cancel_request
|
||||
]
|
||||
)
|
||||
.setup(|app_handle| {
|
||||
app_handle.manage(InterceptorState::default());
|
||||
Builder::new("hopp_native_interceptor")
|
||||
.invoke_handler(tauri::generate_handler![run_request, cancel_request])
|
||||
.setup(|app_handle| {
|
||||
app_handle.manage(InterceptorState::default());
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.build()
|
||||
Ok(())
|
||||
})
|
||||
.build()
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user