Compare commits

..

11 commits

Author SHA1 Message Date
d7dff98c8a feat: add session and local id vars 2024-10-30 01:40:36 +01:00
ca228593e5 chore: bump version 2024-10-25 23:03:43 +02:00
5a8937ed42 feat: don't show errors on filtered out collections 2024-10-25 21:34:54 +02:00
447b073110 fix: apply timeout to everything 2024-10-25 21:10:49 +02:00
9c26b68806 fix: fill() with owned values again 2024-10-25 17:32:13 +02:00
94fcb8efdc fix: fallback to plaintext if json parse fails 2024-10-25 17:31:46 +02:00
c29c91a7b9 fix: smart var replace in client too 2024-10-25 17:31:01 +02:00
5670b8b67a fix: don't quit at first fill() failure 2024-10-25 17:30:30 +02:00
b3fe8faaba fix: trait replace vars uses pre-parsed map 2024-10-25 17:30:02 +02:00
492226f56a fix: dont break when multiple vars 2024-10-24 01:19:30 +02:00
40f2c0a6d4 feat: initial work on detecting vars
i want to throw errors when vars are missing, so that its easier to find
and fill them when running pw
2024-10-22 21:22:25 +02:00
7 changed files with 166 additions and 96 deletions

12
Cargo.lock generated
View file

@ -815,7 +815,7 @@ checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
[[package]] [[package]]
name = "postwoman" name = "postwoman"
version = "0.4.1" version = "0.4.2"
dependencies = [ dependencies = [
"base64", "base64",
"chrono", "chrono",
@ -832,6 +832,7 @@ dependencies = [
"tokio", "tokio",
"toml", "toml",
"toml_edit", "toml_edit",
"uuid",
] ]
[[package]] [[package]]
@ -1385,6 +1386,15 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a"
dependencies = [
"getrandom",
]
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"

View file

@ -1,7 +1,7 @@
[package] [package]
name = "postwoman" name = "postwoman"
description = "API tester and debugger for your CLI " description = "API tester and debugger for your CLI "
version = "0.4.1" version = "0.4.2"
repository = "https://moonlit.technology/alemi/postwoman" repository = "https://moonlit.technology/alemi/postwoman"
authors = [ "alemi <me@alemi.dev>" ] authors = [ "alemi <me@alemi.dev>" ]
license = "GPL-3.0-only" license = "GPL-3.0-only"
@ -25,6 +25,7 @@ thiserror = "1.0.64"
tokio = { version = "1.40", features = ["rt-multi-thread"] } tokio = { version = "1.40", features = ["rt-multi-thread"] }
toml = { version = "0.8", features = ["preserve_order"] } toml = { version = "0.8", features = ["preserve_order"] }
toml_edit = { version = "0.22", features = ["serde"] } # only to pretty print tables ... toml_edit = { version = "0.22", features = ["serde"] } # only to pretty print tables ...
uuid = { version = "1.11", features = ["v4"] }
[profile.release] [profile.release]
opt-level = "z" opt-level = "z"

View file

@ -42,6 +42,9 @@ pub enum PostWomanError {
#[error("regex failed matching in content: {0}")] #[error("regex failed matching in content: {0}")]
NoMatch(String), NoMatch(String),
#[error("missing environment variable: {0}")]
MissingVar(#[from] crate::ext::FillError),
} }
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]

View file

@ -1,3 +1,10 @@
use std::{collections::HashMap, sync::OnceLock};
pub fn sid() -> &'static str {
static SID: std::sync::OnceLock<String> = std::sync::OnceLock::new();
SID.get_or_init(|| uuid::Uuid::new_v4().to_string())
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub enum StringOr<T> { pub enum StringOr<T> {
@ -34,18 +41,47 @@ pub fn stringify_json(v: &serde_json::Value) -> String {
} }
} }
pub fn var_matcher() -> &'static regex::Regex {
static MATCHER : OnceLock<regex::Regex> = OnceLock::new();
MATCHER.get_or_init(|| regex::Regex::new(r"\$\{(\w+)\}").expect("wrong matcher regex"))
}
// keep it as separate fn so we can change it everywhere easily // keep it as separate fn so we can change it everywhere easily
pub fn full_name(namespace: &str, name: &str) -> String { pub fn full_name(namespace: &str, name: &str) -> String {
format!("{namespace}:{name}") format!("{namespace}:{name}")
} }
pub trait FillableFromEnvironment { #[derive(Debug, thiserror::Error)]
fn fill(self, env: &toml::Table) -> Self; #[error("could not fill {0}")]
pub struct FillError(pub String);
pub trait FillableFromEnvironment: Sized {
fn fill(self, env: &toml::Table) -> Result<Self, FillError>;
fn replace(mut from: String, env: &HashMap<String, String>) -> Result<String, FillError> {
let placeholders: Vec<(String, String)> = var_matcher()
.captures_iter(&from)
.map(|m| m.extract())
.map(|(txt, [var])| (txt.to_string(), var.to_string()))
.collect();
// TODO can we avoid cloning all matches??? can't mutate `from` as captures_iter holds an
// immutable reference to original string
for (txt, var) in placeholders {
let value = env.get(&var).ok_or(FillError(var.to_string()))?;
from = from.replace(&txt, value);
}
Ok(from)
}
fn default_vars(env: &toml::Table) -> std::collections::HashMap<String, String> { fn default_vars(env: &toml::Table) -> std::collections::HashMap<String, String> {
let mut vars: std::collections::HashMap<String, String> = std::collections::HashMap::default(); let mut vars: std::collections::HashMap<String, String> = std::collections::HashMap::default();
vars.insert("POSTWOMAN_TIMESTAMP".to_string(), chrono::Local::now().timestamp().to_string()); vars.insert("POSTWOMAN_TIMESTAMP".to_string(), chrono::Local::now().timestamp().to_string());
vars.insert("POSTWOMAN_LOCAL_ID".to_string(), uuid::Uuid::new_v4().to_string());
vars.insert("POSTWOMAN_SESSION_ID".to_string(), sid().to_string());
for (k, v) in env { for (k, v) in env {
vars.insert(k.to_string(), stringify_toml(v)); vars.insert(k.to_string(), stringify_toml(v));

View file

@ -70,14 +70,6 @@ fn main() {
let args = PostWomanArgs::parse(); let args = PostWomanArgs::parse();
let multi_thread = args.multi_thread; let multi_thread = args.multi_thread;
// if we got a regex, test it early to avoid wasting work when invalid
if let Some(PostWomanActions::Run { ref query, .. }) = args.action {
// note that if you remove this test, there's another .expect() below you need to manage too!
if let Err(e) = regex::Regex::new(query) {
return eprintln!("! invalid regex filter: {e}");
}
}
let mut collections = IndexMap::new(); let mut collections = IndexMap::new();
if !load_collections(&mut collections, args.collection.clone(), &toml::Table::default()) { if !load_collections(&mut collections, args.collection.clone(), &toml::Table::default()) {
@ -94,6 +86,11 @@ fn main() {
}, },
PostWomanActions::Run { query, parallel, debug, dry_run } => { PostWomanActions::Run { query, parallel, debug, dry_run } => {
// note that if you remove this test, there's another .expect() below you need to manage too!
let filter = match regex::Regex::new(query) {
Ok(regex) => regex,
Err(e) => return eprintln!("! invalid regex filter: {e}"),
};
let task = async move { let task = async move {
let mut pool = tokio::task::JoinSet::new(); let mut pool = tokio::task::JoinSet::new();
@ -101,7 +98,7 @@ fn main() {
run_collection_endpoints( run_collection_endpoints(
collection_name, collection_name,
collection, collection,
query.clone(), filter.clone(),
*parallel, *parallel,
*debug, *debug,
*dry_run, *dry_run,
@ -138,22 +135,39 @@ fn main() {
// TODO too many arguments // TODO too many arguments
async fn run_collection_endpoints( async fn run_collection_endpoints(
namespace: String, namespace: String,
collection: PostWomanCollection, mut collection: PostWomanCollection,
query: String, filter: regex::Regex,
parallel: bool, parallel: bool,
debug: bool, debug: bool,
dry_run: bool, dry_run: bool,
report: bool, report: bool,
pool: &mut tokio::task::JoinSet<()> pool: &mut tokio::task::JoinSet<()>
) { ) {
// this is always safe to compile because we tested it beforehand let mut matched_endpoints = Vec::new();
let pattern = regex::Regex::new(&query).expect("tested it before and still failed here???"); for name in collection.route.keys() {
let env = std::sync::Arc::new(collection.env); let full_name = ext::full_name(&namespace, name);
let client = std::sync::Arc::new(collection.client.fill(&env)); if filter.find(&full_name).is_some() {
matched_endpoints.push(name.clone());
};
}
for (name, mut endpoint) in collection.route { if matched_endpoints.is_empty() { return } // nothing to do for this collection
let env = std::sync::Arc::new(collection.env);
let client = match collection.client.fill(&env) {
Ok(c) => std::sync::Arc::new(c),
Err(e) => return eprintln!(
"<!>[{}] {namespace}:* \terror constructing client\n ! missing environment variable: {e}",
chrono::Local::now().format(fmt::TIMESTAMP_FMT)
),
};
for name in matched_endpoints {
let mut endpoint = collection.route
.swap_remove(&name)
.expect("endpoint removed while running collection?");
let full_name = ext::full_name(&namespace, &name); let full_name = ext::full_name(&namespace, &name);
if pattern.find(&full_name).is_none() { continue }; if filter.find(&full_name).is_none() { continue };
if debug { endpoint.extract = Some(ext::StringOr::T(model::ExtractorConfig::Debug)) }; if debug { endpoint.extract = Some(ext::StringOr::T(model::ExtractorConfig::Debug)) };
let _client = client.clone(); let _client = client.clone();
@ -162,15 +176,17 @@ async fn run_collection_endpoints(
let task = async move { let task = async move {
let before = chrono::Local::now(); let before = chrono::Local::now();
eprintln!(" : [{}] {full_name} \tsending request...", before.format(fmt::TIMESTAMP_FMT));
let res = if dry_run { let res = match endpoint.fill(&_env) {
Err(e) => Err(e.into()),
Ok(e) => {
eprintln!(" : [{}] {full_name} \tsending request...", before.format(fmt::TIMESTAMP_FMT));
if dry_run {
Ok("".to_string()) Ok("".to_string())
} else { } else {
endpoint e.execute(&_client).await
.fill(&_env) }
.execute(&_client) },
.await
}; };
let after = chrono::Local::now(); let after = chrono::Local::now();
@ -179,7 +195,7 @@ async fn run_collection_endpoints(
let timestamp = after.format(fmt::TIMESTAMP_FMT); let timestamp = after.format(fmt::TIMESTAMP_FMT);
let symbol = if res.is_ok() { " + " } else { "<!>" }; let symbol = if res.is_ok() { " + " } else { "<!>" };
let verb = if res.is_ok() { "done in" } else { "failed after" }; let verb = if res.is_ok() { "done in" } else { "failed after" };
eprintln!("{symbol}[{timestamp}] {_namespace}::{name} \t{verb} {elapsed}ms", ); eprintln!("{symbol}[{timestamp}] {_namespace}:{name} \t{verb} {elapsed}ms", );
if report { if report {
(res, _namespace, name, elapsed).report(); (res, _namespace, name, elapsed).report();

View file

@ -1,4 +1,4 @@
use crate::ext::FillableFromEnvironment; use crate::ext::{FillError, FillableFromEnvironment};
#[derive(Debug, Default, Clone, serde::Serialize, serde::Deserialize)] #[derive(Debug, Default, Clone, serde::Serialize, serde::Deserialize)]
@ -16,21 +16,17 @@ pub struct ClientConfig {
} }
impl FillableFromEnvironment for ClientConfig { impl FillableFromEnvironment for ClientConfig {
fn fill(mut self, env: &toml::Table) -> Self { fn fill(mut self, env: &toml::Table) -> Result<Self, FillError> {
let vars = Self::default_vars(env); let vars = Self::default_vars(env);
for (k, v) in vars { if let Some(ref base) = self.base {
let k_var = format!("${{{k}}}"); self.base = Some(Self::replace(base.clone(), &vars)?);
if let Some(base) = self.base {
self.base = Some(base.replace(&k_var, &v));
} }
if let Some(user_agent) = self.user_agent { if let Some(ref user_agent) = self.user_agent {
self.user_agent = Some(user_agent.replace(&k_var, &v)); self.user_agent = Some(Self::replace(user_agent.clone(), &vars)?);
}
} }
self Ok(self)
} }
} }

View file

@ -1,3 +1,4 @@
use std::collections::HashMap;
use std::str::FromStr; use std::str::FromStr;
use base64::{prelude::BASE64_STANDARD, Engine}; use base64::{prelude::BASE64_STANDARD, Engine};
@ -8,7 +9,7 @@ use jaq_interpret::FilterT;
use crate::errors::InvalidHeaderError; use crate::errors::InvalidHeaderError;
use crate::{PostWomanError, APP_USER_AGENT}; use crate::{PostWomanError, APP_USER_AGENT};
use crate::ext::{stringify_json, FillableFromEnvironment, StringOr}; use crate::ext::{stringify_json, FillError, FillableFromEnvironment, StringOr};
use super::{ExtractorConfig, ClientConfig}; use super::{ExtractorConfig, ClientConfig};
@ -84,6 +85,8 @@ impl EndpointConfig {
let client = reqwest::Client::builder() let client = reqwest::Client::builder()
.user_agent(opts.user_agent.as_deref().unwrap_or(APP_USER_AGENT)) .user_agent(opts.user_agent.as_deref().unwrap_or(APP_USER_AGENT))
.timeout(std::time::Duration::from_secs(opts.timeout.unwrap_or(30))) .timeout(std::time::Duration::from_secs(opts.timeout.unwrap_or(30)))
.read_timeout(std::time::Duration::from_secs(opts.timeout.unwrap_or(30)))
.connect_timeout(std::time::Duration::from_secs(opts.timeout.unwrap_or(30)))
.redirect(opts.redirects.map(reqwest::redirect::Policy::limited).unwrap_or(reqwest::redirect::Policy::none())) .redirect(opts.redirects.map(reqwest::redirect::Policy::limited).unwrap_or(reqwest::redirect::Policy::none()))
.danger_accept_invalid_certs(opts.accept_invalid_certs.unwrap_or(false)) .danger_accept_invalid_certs(opts.accept_invalid_certs.unwrap_or(false))
.build()?; .build()?;
@ -146,84 +149,89 @@ impl EndpointConfig {
} }
impl FillableFromEnvironment for EndpointConfig { impl FillableFromEnvironment for EndpointConfig {
fn fill(mut self, env: &toml::Table) -> Self { fn fill(mut self, env: &toml::Table) -> Result<Self, FillError> {
let vars = Self::default_vars(env); let vars = Self::default_vars(env);
for (k, v) in vars { self.path = Self::replace(self.path, &vars)?;
let k_var = format!("${{{k}}}");
self.path = self.path.replace(&k_var, &v);
if let Some(method) = self.method { if let Some(method) = self.method {
self.method = Some(method.replace(&k_var, &v)); self.method = Some(Self::replace(method, &vars)?);
} }
if let Some(b) = self.body { if let Some(b) = self.body {
match b { match b {
StringOr::Str(body) => { StringOr::Str(body) => {
self.body = Some(StringOr::Str(body.replace(&k_var, &v))); self.body = Some(StringOr::Str(Self::replace(body, &vars)?));
}, },
StringOr::T(json) => { StringOr::T(json) => {
let wrap = toml::Value::Table(json.clone()); let wrap = toml::Value::Table(json);
let toml::Value::Table(out) = replace_recursive(wrap, &k_var, &v) let toml::Value::Table(out) = replace_recursive(wrap, &vars)?
else { unreachable!("we put in a table, we get out a table") }; else { unreachable!("we put in a table, we get out a table") };
self.body = Some(StringOr::T(out)); self.body = Some(StringOr::T(out));
}, },
} }
} }
if let Some(query) = self.query { if let Some(query) = self.query {
self.query = Some( let mut out = Vec::new();
query.into_iter() for q in query {
.map(|x| x.replace(&k_var, &v)) out.push(Self::replace(q, &vars)?);
.collect() }
); self.query = Some(out);
} }
if let Some(headers) = self.headers { if let Some(headers) = self.headers {
self.headers = Some( let mut out = Vec::new();
headers.into_iter() for h in headers {
.map(|x| x.replace(&k_var, &v)) out.push(Self::replace(h.clone(), &vars)?);
.collect() }
); self.headers = Some(out);
}
Ok(self)
} }
} }
self fn replace_recursive(element: toml::Value, env: &HashMap<String, String>) -> Result<toml::Value, FillError> {
} Ok(match element {
}
fn replace_recursive(element: toml::Value, from: &str, to: &str) -> toml::Value {
match element {
toml::Value::Float(x) => toml::Value::Float(x), toml::Value::Float(x) => toml::Value::Float(x),
toml::Value::Integer(x) => toml::Value::Integer(x), toml::Value::Integer(x) => toml::Value::Integer(x),
toml::Value::Boolean(x) => toml::Value::Boolean(x), toml::Value::Boolean(x) => toml::Value::Boolean(x),
toml::Value::Datetime(x) => toml::Value::Datetime(x), toml::Value::Datetime(x) => toml::Value::Datetime(x),
toml::Value::String(x) => toml::Value::String(x.replace(from, to)), toml::Value::String(x) => toml::Value::String(EndpointConfig::replace(x, env)?),
toml::Value::Array(x) => toml::Value::Array( toml::Value::Array(mut arr) => {
x.into_iter().map(|x| replace_recursive(x, from, to)).collect() for v in arr.iter_mut() {
), *v = replace_recursive(v.clone(), env)?;
}
toml::Value::Array(arr)
},
toml::Value::Table(map) => { toml::Value::Table(map) => {
let mut out = toml::map::Map::new(); let mut out = toml::map::Map::new();
for (k, v) in map { for (k, v) in map {
let new_v = replace_recursive(v.clone(), from, to); let new_v = replace_recursive(v.clone(), env)?;
if k.contains(from) { let new_k = EndpointConfig::replace(k, env)?;
out.insert(k.replace(from, to), new_v); out.insert(new_k, new_v);
} else {
out.insert(k.to_string(), new_v);
}
} }
toml::Value::Table(out) toml::Value::Table(out)
}, },
} })
} }
async fn format_body(res: reqwest::Response) -> Result<String, PostWomanError> { async fn format_body(res: reqwest::Response) -> Result<String, PostWomanError> {
match res.headers().get("Content-Type") { let content_type = res.headers().get("Content-Type").cloned();
None => Ok(res.text().await?), let raw = res.bytes().await?;
match content_type {
None => Ok(String::from_utf8_lossy(&raw).to_string()),
Some(v) => { Some(v) => {
let content_type = v.to_str()?; let content_type = v.to_str()?;
if content_type.starts_with("application/json") { if content_type.starts_with("application/json") {
Ok(serde_json::to_string_pretty(&res.json::<serde_json::Value>().await?)?) match serde_json::from_slice::<serde_json::Value>(&raw) {
Ok(x) => Ok(serde_json::to_string_pretty(&x)?),
Err(e) => {
eprintln!(" ? content-type is 'json' but content is not valid json: {e}");
Ok(String::from_utf8_lossy(&raw).to_string())
},
}
} else if content_type.starts_with("text/plain") || content_type.starts_with("text/html") { } else if content_type.starts_with("text/plain") || content_type.starts_with("text/html") {
Ok(res.text().await?) Ok(String::from_utf8_lossy(&raw).to_string())
} else { } else {
Ok(format!("base64({})\n", BASE64_STANDARD.encode(res.bytes().await?))) Ok(format!("base64({})\n", BASE64_STANDARD.encode(raw)))
} }
}, },
} }