Upstream-driver getProxyConfig and getProxyConfig

Co-Authored-By: brekotis <93345790+brekotis@users.noreply.github.com>
This commit is contained in:
Alexey 2026-03-23 22:41:17 +03:00
parent 8db566dbe9
commit a40d6929e5
No known key found for this signature in database
8 changed files with 266 additions and 41 deletions

View File

@ -1267,10 +1267,7 @@ mod tests {
cfg.server.proxy_protocol_trusted_cidrs, cfg.server.proxy_protocol_trusted_cidrs,
default_proxy_protocol_trusted_cidrs() default_proxy_protocol_trusted_cidrs()
); );
assert_eq!( assert_eq!(cfg.censorship.unknown_sni_action, UnknownSniAction::Drop);
cfg.censorship.unknown_sni_action,
UnknownSniAction::Drop
);
assert_eq!(cfg.server.api.listen, default_api_listen()); assert_eq!(cfg.server.api.listen, default_api_listen());
assert_eq!(cfg.server.api.whitelist, default_api_whitelist()); assert_eq!(cfg.server.api.whitelist, default_api_whitelist());
assert_eq!( assert_eq!(
@ -1493,7 +1490,10 @@ mod tests {
"#, "#,
) )
.unwrap(); .unwrap();
assert_eq!(cfg_default.censorship.unknown_sni_action, UnknownSniAction::Drop); assert_eq!(
cfg_default.censorship.unknown_sni_action,
UnknownSniAction::Drop
);
let cfg_mask: ProxyConfig = toml::from_str( let cfg_mask: ProxyConfig = toml::from_str(
r#" r#"
@ -1506,7 +1506,10 @@ mod tests {
"#, "#,
) )
.unwrap(); .unwrap();
assert_eq!(cfg_mask.censorship.unknown_sni_action, UnknownSniAction::Mask); assert_eq!(
cfg_mask.censorship.unknown_sni_action,
UnknownSniAction::Mask
);
} }
#[test] #[test]

View File

@ -8,8 +8,10 @@ use tracing::{debug, error, info, warn};
use crate::cli; use crate::cli;
use crate::config::ProxyConfig; use crate::config::ProxyConfig;
use crate::transport::UpstreamManager;
use crate::transport::middle_proxy::{ use crate::transport::middle_proxy::{
ProxyConfigData, fetch_proxy_config_with_raw, load_proxy_config_cache, save_proxy_config_cache, ProxyConfigData, fetch_proxy_config_with_raw_via_upstream, load_proxy_config_cache,
save_proxy_config_cache,
}; };
pub(crate) fn resolve_runtime_config_path( pub(crate) fn resolve_runtime_config_path(
@ -288,9 +290,10 @@ pub(crate) async fn load_startup_proxy_config_snapshot(
cache_path: Option<&str>, cache_path: Option<&str>,
me2dc_fallback: bool, me2dc_fallback: bool,
label: &'static str, label: &'static str,
upstream: Option<std::sync::Arc<UpstreamManager>>,
) -> Option<ProxyConfigData> { ) -> Option<ProxyConfigData> {
loop { loop {
match fetch_proxy_config_with_raw(url).await { match fetch_proxy_config_with_raw_via_upstream(url, upstream.clone()).await {
Ok((cfg, raw)) => { Ok((cfg, raw)) => {
if !cfg.map.is_empty() { if !cfg.map.is_empty() {
if let Some(path) = cache_path if let Some(path) = cache_path

View File

@ -63,9 +63,10 @@ pub(crate) async fn initialize_me_pool(
let proxy_secret_path = config.general.proxy_secret_path.as_deref(); let proxy_secret_path = config.general.proxy_secret_path.as_deref();
let pool_size = config.general.middle_proxy_pool_size.max(1); let pool_size = config.general.middle_proxy_pool_size.max(1);
let proxy_secret = loop { let proxy_secret = loop {
match crate::transport::middle_proxy::fetch_proxy_secret( match crate::transport::middle_proxy::fetch_proxy_secret_with_upstream(
proxy_secret_path, proxy_secret_path,
config.general.proxy_secret_len_max, config.general.proxy_secret_len_max,
Some(upstream_manager.clone()),
) )
.await .await
{ {
@ -129,6 +130,7 @@ pub(crate) async fn initialize_me_pool(
config.general.proxy_config_v4_cache_path.as_deref(), config.general.proxy_config_v4_cache_path.as_deref(),
me2dc_fallback, me2dc_fallback,
"getProxyConfig", "getProxyConfig",
Some(upstream_manager.clone()),
) )
.await; .await;
if cfg_v4.is_some() { if cfg_v4.is_some() {
@ -160,6 +162,7 @@ pub(crate) async fn initialize_me_pool(
config.general.proxy_config_v6_cache_path.as_deref(), config.general.proxy_config_v6_cache_path.as_deref(),
me2dc_fallback, me2dc_fallback,
"getProxyConfigV6", "getProxyConfigV6",
Some(upstream_manager.clone()),
) )
.await; .await;
if cfg_v6.is_some() { if cfg_v6.is_some() {

View File

@ -667,7 +667,8 @@ where
let cached = if config.censorship.tls_emulation { let cached = if config.censorship.tls_emulation {
if let Some(cache) = tls_cache.as_ref() { if let Some(cache) = tls_cache.as_ref() {
let selected_domain = matched_tls_domain.unwrap_or(config.censorship.tls_domain.as_str()); let selected_domain =
matched_tls_domain.unwrap_or(config.censorship.tls_domain.as_str());
let cached_entry = cache.get(selected_domain).await; let cached_entry = cache.get(selected_domain).await;
let use_full_cert_payload = cache let use_full_cert_payload = cache
.take_full_cert_budget_for_ip( .take_full_cert_budget_for_ip(

View File

@ -11,17 +11,19 @@ use tracing::{debug, info, warn};
use crate::config::ProxyConfig; use crate::config::ProxyConfig;
use crate::error::Result; use crate::error::Result;
use crate::transport::UpstreamManager;
use super::http_fetch::https_get;
use super::MePool; use super::MePool;
use super::rotation::{MeReinitTrigger, enqueue_reinit_trigger}; use super::rotation::{MeReinitTrigger, enqueue_reinit_trigger};
use super::secret::download_proxy_secret_with_max_len; use super::secret::download_proxy_secret_with_max_len_via_upstream;
use super::selftest::record_timeskew_sample; use super::selftest::record_timeskew_sample;
use std::time::SystemTime; use std::time::SystemTime;
async fn retry_fetch(url: &str) -> Option<ProxyConfigData> { async fn retry_fetch(url: &str, upstream: Option<Arc<UpstreamManager>>) -> Option<ProxyConfigData> {
let delays = [1u64, 5, 15]; let delays = [1u64, 5, 15];
for (i, d) in delays.iter().enumerate() { for (i, d) in delays.iter().enumerate() {
match fetch_proxy_config(url).await { match fetch_proxy_config_via_upstream(url, upstream.clone()).await {
Ok(cfg) => return Some(cfg), Ok(cfg) => return Some(cfg),
Err(e) => { Err(e) => {
if i == delays.len() - 1 { if i == delays.len() - 1 {
@ -96,13 +98,17 @@ pub async fn save_proxy_config_cache(path: &str, raw_text: &str) -> Result<()> {
} }
pub async fn fetch_proxy_config_with_raw(url: &str) -> Result<(ProxyConfigData, String)> { pub async fn fetch_proxy_config_with_raw(url: &str) -> Result<(ProxyConfigData, String)> {
let resp = reqwest::get(url).await.map_err(|e| { fetch_proxy_config_with_raw_via_upstream(url, None).await
crate::error::ProxyError::Proxy(format!("fetch_proxy_config GET failed: {e}")) }
})?;
let http_status = resp.status().as_u16();
if let Some(date) = resp.headers().get(reqwest::header::DATE) pub async fn fetch_proxy_config_with_raw_via_upstream(
&& let Ok(date_str) = date.to_str() url: &str,
upstream: Option<Arc<UpstreamManager>>,
) -> Result<(ProxyConfigData, String)> {
let resp = https_get(url, upstream).await?;
let http_status = resp.status;
if let Some(date_str) = resp.date_header.as_deref()
&& let Ok(server_time) = httpdate::parse_http_date(date_str) && let Ok(server_time) = httpdate::parse_http_date(date_str)
&& let Ok(skew) = SystemTime::now() && let Ok(skew) = SystemTime::now()
.duration_since(server_time) .duration_since(server_time)
@ -123,9 +129,7 @@ pub async fn fetch_proxy_config_with_raw(url: &str) -> Result<(ProxyConfigData,
} }
} }
let text = resp.text().await.map_err(|e| { let text = String::from_utf8_lossy(&resp.body).into_owned();
crate::error::ProxyError::Proxy(format!("fetch_proxy_config read failed: {e}"))
})?;
let parsed = parse_proxy_config_text(&text, http_status); let parsed = parse_proxy_config_text(&text, http_status);
Ok((parsed, text)) Ok((parsed, text))
} }
@ -261,7 +265,14 @@ fn parse_proxy_line(line: &str) -> Option<(i32, IpAddr, u16)> {
} }
pub async fn fetch_proxy_config(url: &str) -> Result<ProxyConfigData> { pub async fn fetch_proxy_config(url: &str) -> Result<ProxyConfigData> {
fetch_proxy_config_with_raw(url) fetch_proxy_config_via_upstream(url, None).await
}
pub async fn fetch_proxy_config_via_upstream(
url: &str,
upstream: Option<Arc<UpstreamManager>>,
) -> Result<ProxyConfigData> {
fetch_proxy_config_with_raw_via_upstream(url, upstream)
.await .await
.map(|(parsed, _raw)| parsed) .map(|(parsed, _raw)| parsed)
} }
@ -300,6 +311,7 @@ async fn run_update_cycle(
state: &mut UpdaterState, state: &mut UpdaterState,
reinit_tx: &mpsc::Sender<MeReinitTrigger>, reinit_tx: &mpsc::Sender<MeReinitTrigger>,
) { ) {
let upstream = pool.upstream.clone();
pool.update_runtime_reinit_policy( pool.update_runtime_reinit_policy(
cfg.general.hardswap, cfg.general.hardswap,
cfg.general.me_pool_drain_ttl_secs, cfg.general.me_pool_drain_ttl_secs,
@ -354,7 +366,7 @@ async fn run_update_cycle(
let mut maps_changed = false; let mut maps_changed = false;
let mut ready_v4: Option<(ProxyConfigData, u64)> = None; let mut ready_v4: Option<(ProxyConfigData, u64)> = None;
let cfg_v4 = retry_fetch("https://core.telegram.org/getProxyConfig").await; let cfg_v4 = retry_fetch("https://core.telegram.org/getProxyConfig", upstream.clone()).await;
if let Some(cfg_v4) = cfg_v4 if let Some(cfg_v4) = cfg_v4
&& snapshot_passes_guards(cfg, &cfg_v4, "getProxyConfig") && snapshot_passes_guards(cfg, &cfg_v4, "getProxyConfig")
{ {
@ -378,7 +390,7 @@ async fn run_update_cycle(
} }
let mut ready_v6: Option<(ProxyConfigData, u64)> = None; let mut ready_v6: Option<(ProxyConfigData, u64)> = None;
let cfg_v6 = retry_fetch("https://core.telegram.org/getProxyConfigV6").await; let cfg_v6 = retry_fetch("https://core.telegram.org/getProxyConfigV6", upstream.clone()).await;
if let Some(cfg_v6) = cfg_v6 if let Some(cfg_v6) = cfg_v6
&& snapshot_passes_guards(cfg, &cfg_v6, "getProxyConfigV6") && snapshot_passes_guards(cfg, &cfg_v6, "getProxyConfigV6")
{ {
@ -456,7 +468,12 @@ async fn run_update_cycle(
pool.reset_stun_state(); pool.reset_stun_state();
if cfg.general.proxy_secret_rotate_runtime { if cfg.general.proxy_secret_rotate_runtime {
match download_proxy_secret_with_max_len(cfg.general.proxy_secret_len_max).await { match download_proxy_secret_with_max_len_via_upstream(
cfg.general.proxy_secret_len_max,
upstream,
)
.await
{
Ok(secret) => { Ok(secret) => {
let secret_hash = hash_secret(&secret); let secret_hash = hash_secret(&secret);
let stable_hits = state.secret.observe(secret_hash); let stable_hits = state.secret.observe(secret_hash);

View File

@ -0,0 +1,184 @@
use std::sync::Arc;
use std::time::Duration;
use http_body_util::{BodyExt, Empty};
use hyper::header::{CONNECTION, DATE, HOST, USER_AGENT};
use hyper::{Method, Request};
use hyper_util::rt::TokioIo;
use rustls::pki_types::ServerName;
use tokio::net::TcpStream;
use tokio::time::timeout;
use tokio_rustls::TlsConnector;
use tracing::debug;
use crate::error::{ProxyError, Result};
use crate::network::dns_overrides::resolve_socket_addr;
use crate::transport::{UpstreamManager, UpstreamStream};
const HTTP_CONNECT_TIMEOUT: Duration = Duration::from_secs(10);
const HTTP_REQUEST_TIMEOUT: Duration = Duration::from_secs(15);
pub(crate) struct HttpsGetResponse {
pub(crate) status: u16,
pub(crate) date_header: Option<String>,
pub(crate) body: Vec<u8>,
}
fn build_tls_client_config() -> Arc<rustls::ClientConfig> {
let mut root_store = rustls::RootCertStore::empty();
root_store.extend(webpki_roots::TLS_SERVER_ROOTS.iter().cloned());
let config = rustls::ClientConfig::builder()
.with_root_certificates(root_store)
.with_no_client_auth();
Arc::new(config)
}
fn extract_host_port_path(url: &str) -> Result<(String, u16, String)> {
let parsed = url::Url::parse(url)
.map_err(|e| ProxyError::Proxy(format!("invalid URL '{url}': {e}")))?;
if parsed.scheme() != "https" {
return Err(ProxyError::Proxy(format!(
"unsupported URL scheme '{}': only https is supported",
parsed.scheme()
)));
}
let host = parsed
.host_str()
.ok_or_else(|| ProxyError::Proxy(format!("URL has no host: {url}")))?
.to_string();
let port = parsed
.port_or_known_default()
.ok_or_else(|| ProxyError::Proxy(format!("URL has no known port: {url}")))?;
let mut path = parsed.path().to_string();
if path.is_empty() {
path.push('/');
}
if let Some(query) = parsed.query() {
path.push('?');
path.push_str(query);
}
Ok((host, port, path))
}
async fn resolve_target_addr(host: &str, port: u16) -> Result<std::net::SocketAddr> {
if let Some(addr) = resolve_socket_addr(host, port) {
return Ok(addr);
}
let addrs: Vec<std::net::SocketAddr> = tokio::net::lookup_host((host, port))
.await
.map_err(|e| ProxyError::Proxy(format!("DNS resolve failed for {host}:{port}: {e}")))?
.collect();
if let Some(addr) = addrs.iter().copied().find(|addr| addr.is_ipv4()) {
return Ok(addr);
}
addrs
.first()
.copied()
.ok_or_else(|| ProxyError::Proxy(format!("DNS returned no addresses for {host}:{port}")))
}
async fn connect_https_transport(
host: &str,
port: u16,
upstream: Option<Arc<UpstreamManager>>,
) -> Result<UpstreamStream> {
if let Some(manager) = upstream {
let target = resolve_target_addr(host, port).await?;
return timeout(HTTP_CONNECT_TIMEOUT, manager.connect(target, None, None))
.await
.map_err(|_| {
ProxyError::Proxy(format!("upstream connect timeout for {host}:{port}"))
})?
.map_err(|e| {
ProxyError::Proxy(format!(
"upstream connect failed for {host}:{port}: {e}"
))
});
}
if let Some(addr) = resolve_socket_addr(host, port) {
let stream = timeout(HTTP_CONNECT_TIMEOUT, TcpStream::connect(addr))
.await
.map_err(|_| ProxyError::Proxy(format!("connect timeout for {host}:{port}")))?
.map_err(|e| ProxyError::Proxy(format!("connect failed for {host}:{port}: {e}")))?;
return Ok(UpstreamStream::Tcp(stream));
}
let stream = timeout(HTTP_CONNECT_TIMEOUT, TcpStream::connect((host, port)))
.await
.map_err(|_| ProxyError::Proxy(format!("connect timeout for {host}:{port}")))?
.map_err(|e| ProxyError::Proxy(format!("connect failed for {host}:{port}: {e}")))?;
Ok(UpstreamStream::Tcp(stream))
}
pub(crate) async fn https_get(
url: &str,
upstream: Option<Arc<UpstreamManager>>,
) -> Result<HttpsGetResponse> {
let (host, port, path_and_query) = extract_host_port_path(url)?;
let stream = connect_https_transport(&host, port, upstream).await?;
let server_name = ServerName::try_from(host.clone())
.map_err(|_| ProxyError::Proxy(format!("invalid TLS server name: {host}")))?;
let connector = TlsConnector::from(build_tls_client_config());
let tls_stream = timeout(HTTP_REQUEST_TIMEOUT, connector.connect(server_name, stream))
.await
.map_err(|_| ProxyError::Proxy(format!("TLS handshake timeout for {host}:{port}")))?
.map_err(|e| ProxyError::Proxy(format!("TLS handshake failed for {host}:{port}: {e}")))?;
let (mut sender, connection) = hyper::client::conn::http1::handshake(TokioIo::new(tls_stream))
.await
.map_err(|e| ProxyError::Proxy(format!("HTTP handshake failed for {host}:{port}: {e}")))?;
tokio::spawn(async move {
if let Err(e) = connection.await {
debug!(error = %e, "HTTPS fetch connection task failed");
}
});
let host_header = if port == 443 {
host.clone()
} else {
format!("{host}:{port}")
};
let request = Request::builder()
.method(Method::GET)
.uri(path_and_query)
.header(HOST, host_header)
.header(USER_AGENT, "telemt-middle-proxy/1")
.header(CONNECTION, "close")
.body(Empty::<bytes::Bytes>::new())
.map_err(|e| ProxyError::Proxy(format!("build HTTP request failed for {url}: {e}")))?;
let response = timeout(HTTP_REQUEST_TIMEOUT, sender.send_request(request))
.await
.map_err(|_| ProxyError::Proxy(format!("HTTP request timeout for {url}")))?
.map_err(|e| ProxyError::Proxy(format!("HTTP request failed for {url}: {e}")))?;
let status = response.status().as_u16();
let date_header = response
.headers()
.get(DATE)
.and_then(|value| value.to_str().ok())
.map(|value| value.to_string());
let body = timeout(HTTP_REQUEST_TIMEOUT, response.into_body().collect())
.await
.map_err(|_| ProxyError::Proxy(format!("HTTP body read timeout for {url}")))?
.map_err(|e| ProxyError::Proxy(format!("HTTP body read failed for {url}: {e}")))?
.to_bytes()
.to_vec();
Ok(HttpsGetResponse {
status,
date_header,
body,
})
}

View File

@ -4,6 +4,7 @@ mod codec;
mod config_updater; mod config_updater;
mod handshake; mod handshake;
mod health; mod health;
mod http_fetch;
#[cfg(test)] #[cfg(test)]
#[path = "tests/health_adversarial_tests.rs"] #[path = "tests/health_adversarial_tests.rs"]
mod health_adversarial_tests; mod health_adversarial_tests;
@ -44,7 +45,8 @@ use bytes::Bytes;
#[allow(unused_imports)] #[allow(unused_imports)]
pub use config_updater::{ pub use config_updater::{
ProxyConfigData, fetch_proxy_config, fetch_proxy_config_with_raw, load_proxy_config_cache, ProxyConfigData, fetch_proxy_config, fetch_proxy_config_via_upstream,
fetch_proxy_config_with_raw, fetch_proxy_config_with_raw_via_upstream, load_proxy_config_cache,
me_config_updater, save_proxy_config_cache, me_config_updater, save_proxy_config_cache,
}; };
pub use health::{me_drain_timeout_enforcer, me_health_monitor, me_zombie_writer_watchdog}; pub use health::{me_drain_timeout_enforcer, me_health_monitor, me_zombie_writer_watchdog};
@ -57,7 +59,7 @@ pub use pool::MePool;
pub use pool_nat::{detect_public_ip, stun_probe}; pub use pool_nat::{detect_public_ip, stun_probe};
pub use registry::ConnRegistry; pub use registry::ConnRegistry;
pub use rotation::{MeReinitTrigger, me_reinit_scheduler, me_rotation_task}; pub use rotation::{MeReinitTrigger, me_reinit_scheduler, me_rotation_task};
pub use secret::fetch_proxy_secret; pub use secret::{fetch_proxy_secret, fetch_proxy_secret_with_upstream};
pub(crate) use selftest::{bnd_snapshot, timeskew_snapshot, upstream_bnd_snapshots}; pub(crate) use selftest::{bnd_snapshot, timeskew_snapshot, upstream_bnd_snapshots};
pub use wire::proto_flags_for_tag; pub use wire::proto_flags_for_tag;

View File

@ -1,8 +1,11 @@
use httpdate; use httpdate;
use std::sync::Arc;
use std::time::SystemTime; use std::time::SystemTime;
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use super::http_fetch::https_get;
use super::selftest::record_timeskew_sample; use super::selftest::record_timeskew_sample;
use crate::transport::UpstreamManager;
use crate::error::{ProxyError, Result}; use crate::error::{ProxyError, Result};
pub const PROXY_SECRET_MIN_LEN: usize = 32; pub const PROXY_SECRET_MIN_LEN: usize = 32;
@ -34,10 +37,19 @@ pub(super) fn validate_proxy_secret_len(data_len: usize, max_len: usize) -> Resu
/// Fetch Telegram proxy-secret binary. /// Fetch Telegram proxy-secret binary.
pub async fn fetch_proxy_secret(cache_path: Option<&str>, max_len: usize) -> Result<Vec<u8>> { pub async fn fetch_proxy_secret(cache_path: Option<&str>, max_len: usize) -> Result<Vec<u8>> {
fetch_proxy_secret_with_upstream(cache_path, max_len, None).await
}
/// Fetch Telegram proxy-secret binary, optionally through upstream routing.
pub async fn fetch_proxy_secret_with_upstream(
cache_path: Option<&str>,
max_len: usize,
upstream: Option<Arc<UpstreamManager>>,
) -> Result<Vec<u8>> {
let cache = cache_path.unwrap_or("proxy-secret"); let cache = cache_path.unwrap_or("proxy-secret");
// 1) Try fresh download first. // 1) Try fresh download first.
match download_proxy_secret_with_max_len(max_len).await { match download_proxy_secret_with_max_len_via_upstream(max_len, upstream).await {
Ok(data) => { Ok(data) => {
if let Err(e) = tokio::fs::write(cache, &data).await { if let Err(e) = tokio::fs::write(cache, &data).await {
warn!(error = %e, "Failed to cache proxy-secret (non-fatal)"); warn!(error = %e, "Failed to cache proxy-secret (non-fatal)");
@ -77,19 +89,23 @@ pub async fn fetch_proxy_secret(cache_path: Option<&str>, max_len: usize) -> Res
} }
pub async fn download_proxy_secret_with_max_len(max_len: usize) -> Result<Vec<u8>> { pub async fn download_proxy_secret_with_max_len(max_len: usize) -> Result<Vec<u8>> {
let resp = reqwest::get("https://core.telegram.org/getProxySecret") download_proxy_secret_with_max_len_via_upstream(max_len, None).await
.await }
.map_err(|e| ProxyError::Proxy(format!("Failed to download proxy-secret: {e}")))?;
if !resp.status().is_success() { pub async fn download_proxy_secret_with_max_len_via_upstream(
max_len: usize,
upstream: Option<Arc<UpstreamManager>>,
) -> Result<Vec<u8>> {
let resp = https_get("https://core.telegram.org/getProxySecret", upstream).await?;
if !(200..=299).contains(&resp.status) {
return Err(ProxyError::Proxy(format!( return Err(ProxyError::Proxy(format!(
"proxy-secret download HTTP {}", "proxy-secret download HTTP {}",
resp.status() resp.status
))); )));
} }
if let Some(date) = resp.headers().get(reqwest::header::DATE) if let Some(date_str) = resp.date_header.as_deref()
&& let Ok(date_str) = date.to_str()
&& let Ok(server_time) = httpdate::parse_http_date(date_str) && let Ok(server_time) = httpdate::parse_http_date(date_str)
&& let Ok(skew) = SystemTime::now() && let Ok(skew) = SystemTime::now()
.duration_since(server_time) .duration_since(server_time)
@ -110,11 +126,7 @@ pub async fn download_proxy_secret_with_max_len(max_len: usize) -> Result<Vec<u8
} }
} }
let data = resp let data = resp.body;
.bytes()
.await
.map_err(|e| ProxyError::Proxy(format!("Read proxy-secret body: {e}")))?
.to_vec();
validate_proxy_secret_len(data.len(), max_len)?; validate_proxy_secret_len(data.len(), max_len)?;