mirror of https://github.com/zino-rs/zino
Add HTTP client
This commit is contained in:
parent
b844f35d30
commit
920fb46a65
|
@ -0,0 +1,3 @@
|
|||
[build]
|
||||
rustflags = ["--cfg", "uuid_unstable"]
|
||||
rustdocflags = ["--cfg", "uuid_unstable"]
|
|
@ -6,9 +6,9 @@ edition = "2021"
|
|||
publish = false
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.6.1" }
|
||||
serde_json = { version = "1.0.91" }
|
||||
tracing = { version = "0.1.37" }
|
||||
axum = "0.6.2"
|
||||
serde_json = "1.0.91"
|
||||
tracing = "0.1.37"
|
||||
|
||||
[dependencies.zino]
|
||||
path = "../../zino"
|
||||
|
|
|
@ -26,7 +26,7 @@ username = "postgres"
|
|||
password = "QAx01wnh1i5ER713zfHmZi6dIUYn/Iq9ag+iUGtvKzEFJFYW"
|
||||
|
||||
[tracing]
|
||||
filter = "info,sqlx=trace,tower_http=trace,zino=trace,zino_core=trace"
|
||||
filter = "debug,sqlx=trace,tower_http=trace,zino=trace,zino_core=trace"
|
||||
display-filename = true
|
||||
display-line-number = true
|
||||
|
||||
|
|
|
@ -12,33 +12,58 @@ repository = "https://github.com/photino/zino"
|
|||
documentation = "https://docs.rs/zino-core"
|
||||
readme = "README.md"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["axum"]
|
||||
|
||||
[dependencies]
|
||||
aes-gcm-siv = { version = "0.11.1" }
|
||||
async-trait = { version = "0.1.60" }
|
||||
base64 = { version = "0.21.0" }
|
||||
bytes = { version = "1.3.0" }
|
||||
chrono = { version = "0.4.23", features = ["serde"] }
|
||||
cron = { version = "0.12.0" }
|
||||
futures = { version = "0.3.25" }
|
||||
hmac = { version = "0.12.1" }
|
||||
http = { version = "0.2.8" }
|
||||
http-body = { version = "0.4.5" }
|
||||
http-types = { version = "2.12.0" }
|
||||
hyper = { version = "0.14.23" }
|
||||
lru = { version = "0.9.0" }
|
||||
metrics = { version = "0.20.1" }
|
||||
metrics-exporter-prometheus = { version = "0.11.0" }
|
||||
metrics-exporter-tcp = { version = "0.7.0" }
|
||||
parking_lot = { version = "0.12.1" }
|
||||
rand = { version = "0.8.5" }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = { version = "1.0.91" }
|
||||
serde_qs = { version = "0.10.1" }
|
||||
serde_urlencoded = { version = "0.7.1" }
|
||||
sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls", "postgres", "uuid", "chrono", "json"] }
|
||||
toml = { version = "0.5.10" }
|
||||
tracing = { version = "0.1.37" }
|
||||
tracing-appender = { version = "0.2.2" }
|
||||
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json", "local-time"] }
|
||||
url = { version = "2.3.1" }
|
||||
uuid = { version = "1.2.2", features = ["fast-rng", "serde", "v4", "v7"] }
|
||||
aes-gcm-siv = "0.11.1"
|
||||
async-trait = "0.1.60"
|
||||
base64 = "0.21.0"
|
||||
bytes = "1.3.0"
|
||||
cron = "0.12.0"
|
||||
futures = "0.3.25"
|
||||
hmac = "0.12.1"
|
||||
http = "0.2.8"
|
||||
http-body = "0.4.5"
|
||||
http-types = "2.12.0"
|
||||
hyper = "0.14.23"
|
||||
lru = "0.9.0"
|
||||
metrics = "0.20.1"
|
||||
metrics-exporter-prometheus = "0.11.0"
|
||||
metrics-exporter-tcp = "0.7.0"
|
||||
parking_lot = "0.12.1"
|
||||
rand = "0.8.5"
|
||||
reqwest-middleware = "0.2.0"
|
||||
reqwest-tracing = "0.4.0"
|
||||
serde_json = "1.0.91"
|
||||
serde_qs = "0.11.0"
|
||||
serde_urlencoded = "0.7.1"
|
||||
task-local-extensions = "0.1.3"
|
||||
toml = "0.5.10"
|
||||
tracing = "0.1.37"
|
||||
tracing-appender = "0.2.2"
|
||||
url = "2.3.1"
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.23"
|
||||
features = ["serde"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.152"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "0.11.13"
|
||||
features = ["cookies", "gzip", "brotli", "deflate", "json", "multipart"]
|
||||
|
||||
[dependencies.sqlx]
|
||||
version = "0.6.2"
|
||||
features = ["runtime-tokio-native-tls", "postgres", "uuid", "chrono", "json"]
|
||||
|
||||
[dependencies.tracing-subscriber]
|
||||
version = "0.3.16"
|
||||
features = ["env-filter", "json", "local-time"]
|
||||
|
||||
[dependencies.uuid]
|
||||
version = "1.2.2"
|
||||
features = ["fast-rng", "serde", "v4", "v7"]
|
|
@ -0,0 +1,55 @@
|
|||
use crate::application::Application;
|
||||
use reqwest::{Client, Request, Response};
|
||||
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware, Extension, Result};
|
||||
use reqwest_tracing::{
|
||||
default_on_request_end, reqwest_otel_span, OtelName, ReqwestOtelSpanBackend, TracingMiddleware,
|
||||
};
|
||||
use std::{sync::OnceLock, time::Instant};
|
||||
use task_local_extensions::Extensions;
|
||||
use tracing::Span;
|
||||
|
||||
pub(super) fn init<APP: Application + ?Sized>() {
|
||||
let name = APP::name();
|
||||
let version = APP::version();
|
||||
let reqwest_client = Client::builder()
|
||||
.user_agent(format!("ZinoBot/1.0 {name}/{version}"))
|
||||
.cookie_store(true)
|
||||
.gzip(true)
|
||||
.brotli(true)
|
||||
.deflate(true)
|
||||
.build()
|
||||
.unwrap_or_else(|err| panic!("failed to create an HTTP client: {err}"));
|
||||
let client = ClientBuilder::new(reqwest_client)
|
||||
.with_init(Extension(OtelName("zino-bot".into())))
|
||||
.with(TracingMiddleware::default())
|
||||
.with(TracingMiddleware::<RequestTiming>::new())
|
||||
.build();
|
||||
SHARED_HTTP_CLIENT
|
||||
.set(client)
|
||||
.expect("failed to set an HTTP client for the application");
|
||||
}
|
||||
|
||||
/// Request timing.
|
||||
struct RequestTiming;
|
||||
|
||||
impl ReqwestOtelSpanBackend for RequestTiming {
|
||||
fn on_request_start(req: &Request, extension: &mut Extensions) -> Span {
|
||||
extension.insert(Instant::now());
|
||||
reqwest_otel_span!(
|
||||
name = "example-request",
|
||||
req,
|
||||
time_elapsed = tracing::field::Empty
|
||||
)
|
||||
}
|
||||
|
||||
fn on_request_end(span: &Span, outcome: &Result<Response>, extensions: &mut Extensions) {
|
||||
let latency_micros = extensions
|
||||
.get::<Instant>()
|
||||
.and_then(|t| u64::try_from(t.elapsed().as_micros()).ok());
|
||||
default_on_request_end(span, outcome);
|
||||
span.record("latency_micros", latency_micros);
|
||||
}
|
||||
}
|
||||
|
||||
/// Shared HTTP client.
|
||||
pub(crate) static SHARED_HTTP_CLIENT: OnceLock<ClientWithMiddleware> = OnceLock::new();
|
|
@ -0,0 +1,108 @@
|
|||
use crate::application::Application;
|
||||
use metrics_exporter_prometheus::{Matcher, PrometheusBuilder};
|
||||
use metrics_exporter_tcp::TcpBuilder;
|
||||
use std::{net::IpAddr, time::Duration};
|
||||
|
||||
pub(super) fn init<APP: Application + ?Sized>() {
|
||||
if let Some(metrics) = APP::config().get("metrics").and_then(|t| t.as_table()) {
|
||||
let exporter = metrics
|
||||
.get("exporter")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or_default();
|
||||
if exporter == "prometheus" {
|
||||
let mut builder = match metrics.get("push-gateway").and_then(|t| t.as_str()) {
|
||||
Some(endpoint) => {
|
||||
let interval = metrics
|
||||
.get("interval")
|
||||
.and_then(|t| t.as_integer().and_then(|i| i.try_into().ok()))
|
||||
.unwrap_or(60);
|
||||
PrometheusBuilder::new()
|
||||
.with_push_gateway(endpoint, Duration::from_secs(interval))
|
||||
.expect("failed to configure the exporter to run in push gateway mode")
|
||||
}
|
||||
None => {
|
||||
let host = metrics
|
||||
.get("host")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("127.0.0.1");
|
||||
let port = metrics
|
||||
.get("port")
|
||||
.and_then(|t| t.as_integer())
|
||||
.and_then(|t| u16::try_from(t).ok())
|
||||
.unwrap_or(9000);
|
||||
let host_addr = host
|
||||
.parse::<IpAddr>()
|
||||
.unwrap_or_else(|err| panic!("invalid host address `{host}`: {err}"));
|
||||
PrometheusBuilder::new().with_http_listener((host_addr, port))
|
||||
}
|
||||
};
|
||||
if let Some(quantiles) = metrics.get("quantiles").and_then(|t| t.as_array()) {
|
||||
let quantiles = quantiles
|
||||
.iter()
|
||||
.filter_map(|q| q.as_float())
|
||||
.collect::<Vec<_>>();
|
||||
builder = builder
|
||||
.set_quantiles(&quantiles)
|
||||
.expect("invalid quantiles to render histograms");
|
||||
}
|
||||
if let Some(buckets) = metrics.get("buckets").and_then(|t| t.as_table()) {
|
||||
for (key, value) in buckets {
|
||||
let matcher = if key.starts_with('^') {
|
||||
Matcher::Prefix(key.to_string())
|
||||
} else if key.ends_with('$') {
|
||||
Matcher::Suffix(key.to_string())
|
||||
} else {
|
||||
Matcher::Full(key.to_string())
|
||||
};
|
||||
let values = value
|
||||
.as_array()
|
||||
.expect("buckets should be an array of floats")
|
||||
.iter()
|
||||
.filter_map(|v| v.as_float())
|
||||
.collect::<Vec<_>>();
|
||||
builder = builder
|
||||
.set_buckets_for_metric(matcher, &values)
|
||||
.expect("invalid buckets to render histograms");
|
||||
}
|
||||
}
|
||||
if let Some(labels) = metrics.get("global-labels").and_then(|t| t.as_table()) {
|
||||
for (key, value) in labels {
|
||||
builder = builder.add_global_label(key, value.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(addresses) = metrics.get("allowed-addresses").and_then(|t| t.as_array()) {
|
||||
for addr in addresses {
|
||||
builder = builder
|
||||
.add_allowed_address(addr.as_str().unwrap_or_default())
|
||||
.unwrap_or_else(|err| panic!("invalid IP address `{addr}`: {err}"));
|
||||
}
|
||||
}
|
||||
builder
|
||||
.install()
|
||||
.expect("failed to install Prometheus exporter");
|
||||
} else if exporter == "tcp" {
|
||||
let host = metrics
|
||||
.get("host")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("127.0.0.1");
|
||||
let port = metrics
|
||||
.get("port")
|
||||
.and_then(|t| t.as_integer())
|
||||
.and_then(|t| u16::try_from(t).ok())
|
||||
.unwrap_or(9000);
|
||||
let buffer_size = metrics
|
||||
.get("buffer_size")
|
||||
.and_then(|t| t.as_integer())
|
||||
.and_then(|t| usize::try_from(t).ok())
|
||||
.unwrap_or(1024);
|
||||
let host_addr = host
|
||||
.parse::<IpAddr>()
|
||||
.unwrap_or_else(|err| panic!("invalid host address `{host}`: {err}"));
|
||||
TcpBuilder::new()
|
||||
.listen_address((host_addr, port))
|
||||
.buffer_size(Some(buffer_size))
|
||||
.install()
|
||||
.expect("failed to install TCP exporter");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,25 +1,17 @@
|
|||
//! Application trait.
|
||||
//! Application utilities.
|
||||
|
||||
use crate::{
|
||||
schedule::{AsyncCronJob, CronJob, Job, JobScheduler},
|
||||
state::State,
|
||||
Map,
|
||||
};
|
||||
use metrics_exporter_prometheus::{Matcher, PrometheusBuilder};
|
||||
use metrics_exporter_tcp::TcpBuilder;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env, fs, io,
|
||||
net::IpAddr,
|
||||
path::{Path, PathBuf},
|
||||
sync::{LazyLock, OnceLock},
|
||||
thread,
|
||||
time::Duration,
|
||||
BoxError, Map,
|
||||
};
|
||||
use reqwest::{Method, Response, Url};
|
||||
use std::{collections::HashMap, env, path::PathBuf, sync::LazyLock, thread};
|
||||
use toml::value::Table;
|
||||
use tracing::Level;
|
||||
use tracing_appender::{non_blocking::WorkerGuard, rolling};
|
||||
use tracing_subscriber::fmt::{time, writer::MakeWriterExt};
|
||||
|
||||
mod http_client;
|
||||
mod metrics_exporter;
|
||||
mod tracing_subscriber;
|
||||
|
||||
/// Application.
|
||||
pub trait Application {
|
||||
|
@ -54,6 +46,23 @@ pub trait Application {
|
|||
self
|
||||
}
|
||||
|
||||
/// Makes an HTTP request to the provided resource
|
||||
/// using [`reqwest`](https://crates.io/crates/reqwest).
|
||||
async fn fetch(resource: Url, options: Map) -> Result<Response, BoxError> {
|
||||
let method = options
|
||||
.get("method")
|
||||
.and_then(|s| s.as_str())
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or(Method::GET);
|
||||
http_client::SHARED_HTTP_CLIENT
|
||||
.get()
|
||||
.ok_or("failed to get the global http client")?
|
||||
.request(method, resource)
|
||||
.send()
|
||||
.await
|
||||
.map_err(BoxError::from)
|
||||
}
|
||||
|
||||
/// Returns the application env.
|
||||
#[inline]
|
||||
fn env() -> &'static str {
|
||||
|
@ -96,195 +105,12 @@ pub trait Application {
|
|||
LazyLock::force(&PROJECT_DIR)
|
||||
}
|
||||
|
||||
/// Initializes the application. It setups the tracing subscriber and the metrics exporter.
|
||||
/// Initializes the application. It setups the tracing subscriber, the metrics exporter
|
||||
/// and a global HTTP client.
|
||||
fn init() {
|
||||
if TRACING_APPENDER_GUARD.get().is_some() {
|
||||
tracing::warn!("the tracing subscriber has already been initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
let app_env = Self::env();
|
||||
let mut log_dir = "./log";
|
||||
let mut env_filter = if app_env == "dev" {
|
||||
"info,sqlx=trace,zino=trace,zino_core=trace"
|
||||
} else {
|
||||
"info,sqlx=warn"
|
||||
};
|
||||
let mut display_target = true;
|
||||
let mut display_filename = false;
|
||||
let mut display_line_number = false;
|
||||
let mut display_thread_names = false;
|
||||
let mut display_span_list = false;
|
||||
let display_current_span = true;
|
||||
|
||||
let config = Self::config();
|
||||
if let Some(tracing) = config.get("tracing").and_then(|t| t.as_table()) {
|
||||
if let Some(dir) = tracing.get("log-dir").and_then(|t| t.as_str()) {
|
||||
log_dir = dir;
|
||||
}
|
||||
if let Some(filter) = tracing.get("filter").and_then(|t| t.as_str()) {
|
||||
env_filter = filter;
|
||||
}
|
||||
display_target = tracing
|
||||
.get("display-target")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(true);
|
||||
display_filename = tracing
|
||||
.get("display-filename")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
display_line_number = tracing
|
||||
.get("display-line-number")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
display_thread_names = tracing
|
||||
.get("display-thread-names")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
display_span_list = tracing
|
||||
.get("display-span-list")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
}
|
||||
|
||||
let app_name = Self::name();
|
||||
let log_dir = Path::new(log_dir);
|
||||
let rolling_file_dir = if log_dir.exists() {
|
||||
log_dir.to_path_buf()
|
||||
} else {
|
||||
let project_dir = Self::project_dir();
|
||||
let log_dir = project_dir.join("./log");
|
||||
if !log_dir.exists() {
|
||||
fs::create_dir(log_dir.as_path()).unwrap_or_else(|err| {
|
||||
let log_dir = log_dir.to_string_lossy();
|
||||
panic!("failed to create the log directory `{log_dir}`: {err}");
|
||||
});
|
||||
}
|
||||
log_dir
|
||||
};
|
||||
let file_appender = rolling::hourly(rolling_file_dir, format!("{app_name}.{app_env}"));
|
||||
let (non_blocking_appender, worker_guard) = tracing_appender::non_blocking(file_appender);
|
||||
let stderr = io::stderr.with_max_level(Level::WARN);
|
||||
let subscriber = tracing_subscriber::fmt()
|
||||
.json()
|
||||
.with_env_filter(env_filter)
|
||||
.with_target(display_target)
|
||||
.with_file(display_filename)
|
||||
.with_line_number(display_line_number)
|
||||
.with_thread_names(display_thread_names)
|
||||
.with_span_list(display_span_list)
|
||||
.with_current_span(display_current_span)
|
||||
.with_timer(time::LocalTime::rfc_3339())
|
||||
.with_writer(stderr.and(non_blocking_appender))
|
||||
.finish();
|
||||
tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("setting default subscriber failed");
|
||||
TRACING_APPENDER_GUARD
|
||||
.set(worker_guard)
|
||||
.expect("failed to set the worker guard for the tracing appender");
|
||||
|
||||
if let Some(metrics) = config.get("metrics").and_then(|t| t.as_table()) {
|
||||
let exporter = metrics
|
||||
.get("exporter")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or_default();
|
||||
if exporter == "prometheus" {
|
||||
let mut builder = match metrics.get("push-gateway").and_then(|t| t.as_str()) {
|
||||
Some(endpoint) => {
|
||||
let interval = metrics
|
||||
.get("interval")
|
||||
.and_then(|t| t.as_integer().and_then(|i| i.try_into().ok()))
|
||||
.unwrap_or(60);
|
||||
PrometheusBuilder::new()
|
||||
.with_push_gateway(endpoint, Duration::from_secs(interval))
|
||||
.expect("failed to configure the exporter to run in push gateway mode")
|
||||
}
|
||||
None => {
|
||||
let host = config
|
||||
.get("host")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("127.0.0.1");
|
||||
let port = config
|
||||
.get("port")
|
||||
.and_then(|t| t.as_integer())
|
||||
.and_then(|t| u16::try_from(t).ok())
|
||||
.unwrap_or(9000);
|
||||
let host_addr = host
|
||||
.parse::<IpAddr>()
|
||||
.unwrap_or_else(|err| panic!("invalid host address `{host}`: {err}"));
|
||||
PrometheusBuilder::new().with_http_listener((host_addr, port))
|
||||
}
|
||||
};
|
||||
if let Some(quantiles) = config.get("quantiles").and_then(|t| t.as_array()) {
|
||||
let quantiles = quantiles
|
||||
.iter()
|
||||
.filter_map(|q| q.as_float())
|
||||
.collect::<Vec<_>>();
|
||||
builder = builder
|
||||
.set_quantiles(&quantiles)
|
||||
.expect("invalid quantiles to render histograms");
|
||||
}
|
||||
if let Some(buckets) = config.get("buckets").and_then(|t| t.as_table()) {
|
||||
for (key, value) in buckets {
|
||||
let matcher = if key.starts_with('^') {
|
||||
Matcher::Prefix(key.to_string())
|
||||
} else if key.ends_with('$') {
|
||||
Matcher::Suffix(key.to_string())
|
||||
} else {
|
||||
Matcher::Full(key.to_string())
|
||||
};
|
||||
let values = value
|
||||
.as_array()
|
||||
.expect("buckets should be an array of floats")
|
||||
.iter()
|
||||
.filter_map(|v| v.as_float())
|
||||
.collect::<Vec<_>>();
|
||||
builder = builder
|
||||
.set_buckets_for_metric(matcher, &values)
|
||||
.expect("invalid buckets to render histograms");
|
||||
}
|
||||
}
|
||||
if let Some(labels) = config.get("global-labels").and_then(|t| t.as_table()) {
|
||||
for (key, value) in labels {
|
||||
builder = builder.add_global_label(key, value.to_string());
|
||||
}
|
||||
}
|
||||
if let Some(addresses) = config.get("allowed-addresses").and_then(|t| t.as_array())
|
||||
{
|
||||
for addr in addresses {
|
||||
builder = builder
|
||||
.add_allowed_address(addr.as_str().unwrap_or_default())
|
||||
.unwrap_or_else(|err| panic!("invalid IP address `{addr}`: {err}"));
|
||||
}
|
||||
}
|
||||
builder
|
||||
.install()
|
||||
.expect("failed to install Prometheus exporter");
|
||||
} else if exporter == "tcp" {
|
||||
let host = config
|
||||
.get("host")
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("127.0.0.1");
|
||||
let port = config
|
||||
.get("port")
|
||||
.and_then(|t| t.as_integer())
|
||||
.and_then(|t| u16::try_from(t).ok())
|
||||
.unwrap_or(9000);
|
||||
let buffer_size = config
|
||||
.get("buffer_size")
|
||||
.and_then(|t| t.as_integer())
|
||||
.and_then(|t| usize::try_from(t).ok())
|
||||
.unwrap_or(1024);
|
||||
let host_addr = host
|
||||
.parse::<IpAddr>()
|
||||
.unwrap_or_else(|err| panic!("invalid host address `{host}`: {err}"));
|
||||
TcpBuilder::new()
|
||||
.listen_address((host_addr, port))
|
||||
.buffer_size(Some(buffer_size))
|
||||
.install()
|
||||
.expect("failed to install TCP exporter");
|
||||
}
|
||||
}
|
||||
tracing_subscriber::init::<Self>();
|
||||
metrics_exporter::init::<Self>();
|
||||
http_client::init::<Self>();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -293,6 +119,3 @@ static PROJECT_DIR: LazyLock<PathBuf> = LazyLock::new(|| {
|
|||
env::current_dir()
|
||||
.expect("the project directory does not exist or permissions are insufficient")
|
||||
});
|
||||
|
||||
/// Tracing appender guard.
|
||||
static TRACING_APPENDER_GUARD: OnceLock<WorkerGuard> = OnceLock::new();
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
use crate::application::Application;
|
||||
use std::{fs, io, path::Path, sync::OnceLock};
|
||||
use tracing::Level;
|
||||
use tracing_appender::{non_blocking::WorkerGuard, rolling};
|
||||
use tracing_subscriber::fmt::{time, writer::MakeWriterExt};
|
||||
|
||||
pub(super) fn init<APP: Application + ?Sized>() {
|
||||
if TRACING_APPENDER_GUARD.get().is_some() {
|
||||
tracing::warn!("the tracing subscriber has already been initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
let app_env = APP::env();
|
||||
let mut log_dir = "./log";
|
||||
let mut env_filter = if app_env == "dev" {
|
||||
"info,sqlx=trace,zino=trace,zino_core=trace"
|
||||
} else {
|
||||
"info,sqlx=warn"
|
||||
};
|
||||
let mut display_target = true;
|
||||
let mut display_filename = false;
|
||||
let mut display_line_number = false;
|
||||
let mut display_thread_names = false;
|
||||
let mut display_span_list = false;
|
||||
let display_current_span = true;
|
||||
|
||||
if let Some(tracing) = APP::config().get("tracing").and_then(|t| t.as_table()) {
|
||||
if let Some(dir) = tracing.get("log-dir").and_then(|t| t.as_str()) {
|
||||
log_dir = dir;
|
||||
}
|
||||
if let Some(filter) = tracing.get("filter").and_then(|t| t.as_str()) {
|
||||
env_filter = filter;
|
||||
}
|
||||
display_target = tracing
|
||||
.get("display-target")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(true);
|
||||
display_filename = tracing
|
||||
.get("display-filename")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
display_line_number = tracing
|
||||
.get("display-line-number")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
display_thread_names = tracing
|
||||
.get("display-thread-names")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
display_span_list = tracing
|
||||
.get("display-span-list")
|
||||
.and_then(|t| t.as_bool())
|
||||
.unwrap_or(false);
|
||||
}
|
||||
|
||||
let app_name = APP::name();
|
||||
let log_dir = Path::new(log_dir);
|
||||
let rolling_file_dir = if log_dir.exists() {
|
||||
log_dir.to_path_buf()
|
||||
} else {
|
||||
let project_dir = APP::project_dir();
|
||||
let log_dir = project_dir.join("./log");
|
||||
if !log_dir.exists() {
|
||||
fs::create_dir(log_dir.as_path()).unwrap_or_else(|err| {
|
||||
let log_dir = log_dir.to_string_lossy();
|
||||
panic!("failed to create the log directory `{log_dir}`: {err}");
|
||||
});
|
||||
}
|
||||
log_dir
|
||||
};
|
||||
let file_appender = rolling::hourly(rolling_file_dir, format!("{app_name}.{app_env}"));
|
||||
let (non_blocking_appender, worker_guard) = tracing_appender::non_blocking(file_appender);
|
||||
let stderr = io::stderr.with_max_level(Level::WARN);
|
||||
let subscriber = tracing_subscriber::fmt()
|
||||
.json()
|
||||
.with_env_filter(env_filter)
|
||||
.with_target(display_target)
|
||||
.with_file(display_filename)
|
||||
.with_line_number(display_line_number)
|
||||
.with_thread_names(display_thread_names)
|
||||
.with_span_list(display_span_list)
|
||||
.with_current_span(display_current_span)
|
||||
.with_timer(time::LocalTime::rfc_3339())
|
||||
.with_writer(stderr.and(non_blocking_appender))
|
||||
.finish();
|
||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||
TRACING_APPENDER_GUARD
|
||||
.set(worker_guard)
|
||||
.expect("failed to set the worker guard for the tracing appender");
|
||||
}
|
||||
|
||||
/// Tracing appender guard.
|
||||
static TRACING_APPENDER_GUARD: OnceLock<WorkerGuard> = OnceLock::new();
|
|
@ -171,7 +171,7 @@ impl ConnectionPool {
|
|||
|
||||
/// A list of database connection pools.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ConnectionPools(Vec<ConnectionPool>);
|
||||
struct ConnectionPools(Vec<ConnectionPool>);
|
||||
|
||||
impl ConnectionPools {
|
||||
/// Returns a connection pool with the specific name.
|
||||
|
@ -182,7 +182,7 @@ impl ConnectionPools {
|
|||
}
|
||||
|
||||
/// Shared connection pools.
|
||||
pub(super) static SHARED_CONNECTION_POOLS: LazyLock<ConnectionPools> = LazyLock::new(|| {
|
||||
static SHARED_CONNECTION_POOLS: LazyLock<ConnectionPools> = LazyLock::new(|| {
|
||||
let config = SHARED_STATE.config();
|
||||
|
||||
// Application name.
|
||||
|
@ -211,7 +211,7 @@ pub(super) static SHARED_CONNECTION_POOLS: LazyLock<ConnectionPools> = LazyLock:
|
|||
});
|
||||
|
||||
/// Database namespace prefix.
|
||||
pub(super) static NAMESPACE_PREFIX: LazyLock<&'static str> = LazyLock::new(|| {
|
||||
static NAMESPACE_PREFIX: LazyLock<&'static str> = LazyLock::new(|| {
|
||||
SHARED_STATE
|
||||
.config()
|
||||
.get("database")
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
//! [`zino`]: https://github.com/photino/zino
|
||||
|
||||
#![feature(async_fn_in_trait)]
|
||||
#![feature(io_error_other)]
|
||||
#![feature(iter_intersperse)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(nonzero_min_max)]
|
||||
|
|
|
@ -33,7 +33,7 @@ impl Job {
|
|||
let schedule = Schedule::from_str(cron_expr)
|
||||
.unwrap_or_else(|err| panic!("invalid cron expression `{cron_expr}`: {err}"));
|
||||
Job {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
data: Map::new(),
|
||||
schedule,
|
||||
run: ExecutableJob::Fn(exec),
|
||||
|
@ -47,7 +47,7 @@ impl Job {
|
|||
let schedule = Schedule::from_str(cron_expr)
|
||||
.unwrap_or_else(|err| panic!("invalid cron expression `{cron_expr}`: {err}"));
|
||||
Job {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
data: Map::new(),
|
||||
schedule,
|
||||
run: ExecutableJob::AsyncFn(exec),
|
||||
|
|
|
@ -8,7 +8,7 @@ const FLAG_SAMPLED: u8 = 1;
|
|||
const FLAG_RANDOM_TRACE_ID: u8 = 2;
|
||||
|
||||
/// HTTP headers for distributed tracing.
|
||||
/// See the [spec](https://w3c.github.io/trace-context).
|
||||
/// See [the spec](https://w3c.github.io/trace-context).
|
||||
#[derive(Debug)]
|
||||
pub struct TraceContext {
|
||||
/// Span identifier.
|
||||
|
|
|
@ -14,9 +14,12 @@ readme = "README.md"
|
|||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = { version = "1.0.49" }
|
||||
quote = { version = "1.0.23" }
|
||||
syn = { version = "1.0.107", features = ["full", "extra-traits"] }
|
||||
proc-macro2 = "1.0.50"
|
||||
quote = "1.0.23"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0.107"
|
||||
features = ["full", "extra-traits"]
|
||||
|
||||
[dependencies.zino-core]
|
||||
path = "../zino-core"
|
||||
|
|
|
@ -10,8 +10,9 @@ repository = "https://github.com/photino/zino"
|
|||
documentation = "https://docs.rs/zino-model"
|
||||
readme = "README.md"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
[dependencies.serde]
|
||||
version = "1.0.152"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.zino-core]
|
||||
path = "../zino-core"
|
||||
|
|
|
@ -45,7 +45,7 @@ pub struct Collection {
|
|||
impl Model for Collection {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ pub struct Dataset {
|
|||
impl Model for Dataset {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ pub struct Group {
|
|||
impl Model for Group {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ pub struct Log {
|
|||
impl Model for Log {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ pub struct Message {
|
|||
impl Model for Message {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ pub struct Order {
|
|||
impl Model for Order {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ pub struct Policy {
|
|||
impl Model for Policy {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ pub struct Record {
|
|||
impl Model for Record {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ pub struct Resource {
|
|||
impl Model for Resource {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ pub struct Source {
|
|||
impl Model for Source {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ pub struct Tag {
|
|||
impl Model for Tag {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ pub struct Task {
|
|||
impl Model for Task {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ pub struct User {
|
|||
impl Model for User {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
id: Uuid::new_v4(),
|
||||
id: Uuid::now_v7(),
|
||||
access_key_id: AccessKeyId::new().to_string(),
|
||||
..Self::default()
|
||||
}
|
||||
|
|
|
@ -12,30 +12,48 @@ repository = "https://github.com/photino/zino"
|
|||
documentation = "https://docs.rs/zino"
|
||||
readme = "README.md"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["axum"]
|
||||
|
||||
[features]
|
||||
axum = ["dep:axum", "dep:tokio", "dep:tokio-stream", "dep:tower", "dep:tower-http"]
|
||||
|
||||
[dependencies]
|
||||
async-trait = { version = "0.1.60" }
|
||||
futures = { version = "0.3.25" }
|
||||
hyper = { version = "0.14.23" }
|
||||
parking_lot = { version = "0.12.1" }
|
||||
serde = { version = "1.0.152", features = ["derive"] }
|
||||
serde_json = { version = "1.0.91" }
|
||||
serde_qs = { version = "0.10.1" }
|
||||
serde_urlencoded = { version = "0.7.1" }
|
||||
toml = { version = "0.5.10" }
|
||||
tracing = { version = "0.1.37" }
|
||||
async-trait = "0.1.60"
|
||||
futures = "0.3.25"
|
||||
hyper = "0.14.23"
|
||||
parking_lot = "0.12.1"
|
||||
serde_json = "1.0.91"
|
||||
serde_qs = "0.11.0"
|
||||
serde_urlencoded = "0.7.1"
|
||||
toml = "0.5.10"
|
||||
tracing = "0.1.37"
|
||||
|
||||
# optional dependencies
|
||||
axum = { version = "0.6.1", features = ["ws"], optional = true }
|
||||
tokio = { version = "1.24.1", features = ["rt-multi-thread", "sync", "parking_lot"], optional = true }
|
||||
tokio-stream = { version = "0.1.11", features = ["sync"], optional = true }
|
||||
tower = { version = "0.4.13", features = ["timeout"], optional = true }
|
||||
tower-http = { version = "0.3.5", features = ["full"], optional = true }
|
||||
[dependencies.axum]
|
||||
version = "0.6.2"
|
||||
features = ["ws"]
|
||||
optional = true
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.152"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.tokio]
|
||||
version = "1.24.1"
|
||||
features = ["rt-multi-thread", "sync", "parking_lot"]
|
||||
optional = true
|
||||
|
||||
[dependencies.tokio-stream]
|
||||
version = "0.1.11"
|
||||
features = ["sync"]
|
||||
optional = true
|
||||
|
||||
[dependencies.tower]
|
||||
version = "0.4.13"
|
||||
features = ["timeout"]
|
||||
optional = true
|
||||
|
||||
[dependencies.tower-http]
|
||||
version = "0.3.5"
|
||||
features = ["full"]
|
||||
optional = true
|
||||
|
||||
[dependencies.zino-core]
|
||||
path = "../zino-core"
|
||||
|
|
|
@ -57,7 +57,7 @@ impl MessageChannel {
|
|||
/// Creates a new `MessageChannel`.
|
||||
pub(crate) fn new() -> Self {
|
||||
let (sender, receiver) = mpsc::channel(*CHANNEL_CAPACITY);
|
||||
let sender_id = Uuid::new_v4();
|
||||
let sender_id = Uuid::now_v7();
|
||||
let subscriber = Subscriber::new(sender, None);
|
||||
let mut senders = CHANNEL_SUBSCRIBERS.write();
|
||||
senders.retain(|_, subscriber| !subscriber.emitter().is_closed());
|
||||
|
|
Loading…
Reference in New Issue