Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions pgdog-config/src/general.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,19 @@ pub struct General {
#[serde(default)]
pub query_log: Option<PathBuf>,

/// Minimum parse duration in milliseconds that triggers a warning log with the query text.
/// Queries whose parsing takes longer than this value are logged at WARN level.
/// Set to `0` or omit to disable.
///
/// _Default:_ `None` (disabled)
pub log_min_duration_parse: Option<u64>,

/// Maximum number of characters of the query text included in log messages.
///
/// _Default:_ `100`
#[serde(default = "General::log_query_sample_length")]
pub log_query_sample_length: usize,

/// The port used for the OpenMetrics HTTP endpoint.
///
/// https://docs.pgdog.dev/configuration/pgdog.toml/general/#openmetrics_port
Expand Down Expand Up @@ -764,6 +777,8 @@ impl Default for General {
broadcast_address: Self::broadcast_address(),
broadcast_port: Self::broadcast_port(),
query_log: Self::query_log(),
log_min_duration_parse: Self::log_min_duration_parse(),
log_query_sample_length: Self::log_query_sample_length(),
openmetrics_port: Self::openmetrics_port(),
openmetrics_namespace: Self::openmetrics_namespace(),
prepared_statements: Self::prepared_statements(),
Expand Down Expand Up @@ -1139,6 +1154,14 @@ impl General {
Self::env_option_string("PGDOG_QUERY_LOG").map(PathBuf::from)
}

fn log_min_duration_parse() -> Option<u64> {
Self::env_option("PGDOG_LOG_MIN_DURATION_PARSE")
}

pub fn log_query_sample_length() -> usize {
Self::env_or_default("PGDOG_LOG_QUERY_SAMPLE_LENGTH", 100)
}

pub fn openmetrics_port() -> Option<u16> {
Self::env_option("PGDOG_OPENMETRICS_PORT")
}
Expand Down Expand Up @@ -1537,6 +1560,8 @@ mod tests {
env::set_var("PGDOG_MIRROR_EXPOSURE", "0.5");
env::set_var("PGDOG_DNS_TTL", "60000");
env::set_var("PGDOG_PUB_SUB_CHANNEL_SIZE", "100");
env::set_var("PGDOG_LOG_MIN_DURATION_PARSE", "5");
env::set_var("PGDOG_LOG_QUERY_SAMPLE_LENGTH", "200");

assert_eq!(General::broadcast_port(), 7432);
assert_eq!(General::openmetrics_port(), Some(9090));
Expand All @@ -1547,6 +1572,8 @@ mod tests {
assert_eq!(General::mirror_exposure(), 0.5);
assert_eq!(General::default_dns_ttl(), Some(60000));
assert_eq!(General::pub_sub_channel_size(), 100);
assert_eq!(General::log_min_duration_parse(), Some(5));
assert_eq!(General::log_query_sample_length(), 200);

env::remove_var("PGDOG_BROADCAST_PORT");
env::remove_var("PGDOG_OPENMETRICS_PORT");
Expand All @@ -1557,6 +1584,8 @@ mod tests {
env::remove_var("PGDOG_MIRROR_EXPOSURE");
env::remove_var("PGDOG_DNS_TTL");
env::remove_var("PGDOG_PUB_SUB_CHANNEL_SIZE");
env::remove_var("PGDOG_LOG_MIN_DURATION_PARSE");
env::remove_var("PGDOG_LOG_QUERY_SAMPLE_LENGTH");

assert_eq!(General::broadcast_port(), General::port() + 1);
assert_eq!(General::openmetrics_port(), None);
Expand All @@ -1567,6 +1596,8 @@ mod tests {
assert_eq!(General::mirror_exposure(), 1.0);
assert_eq!(General::default_dns_ttl(), None);
assert_eq!(General::pub_sub_channel_size(), 0);
assert_eq!(General::log_min_duration_parse(), None);
assert_eq!(General::log_query_sample_length(), 100);
}

#[test]
Expand Down
14 changes: 14 additions & 0 deletions pgdog/src/frontend/router/parser/cache/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@ use std::{collections::HashSet, ops::Deref};

use parking_lot::Mutex;
use std::sync::Arc;
use tracing::warn;

use super::super::{Error, Route, Shard, StatementRewrite, StatementRewriteContext, Table};
use super::{Cache, Fingerprint, Stats};
use crate::backend::schema::Schema;
use crate::config::config;
use crate::frontend::router::parser::cache::AstQuery;
use crate::frontend::router::parser::rewrite::statement::RewritePlan;
use crate::frontend::PreparedStatements;
Expand Down Expand Up @@ -105,6 +107,18 @@ impl Ast {
let mut stats = Stats::new();
stats.parse_time += elapsed;

let cfg = config();
if let Some(threshold_ms) = cfg.config.general.log_min_duration_parse {
if threshold_ms > 0 && elapsed.as_millis() as u64 >= threshold_ms {
let sample_len = cfg.config.general.log_query_sample_length;
warn!(
"[slow_query_parse] parse_time_in_ms={}ms truncated_query=\"{}\"",
elapsed.as_millis(),
query.truncated_query(sample_len),
);
}
}

Ok(Self {
cached: true,
comment_shard: None,
Expand Down
5 changes: 5 additions & 0 deletions pgdog/src/frontend/router/parser/cache/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,9 @@ impl<'a> AstQuery<'a> {
original_query: query,
}
}

/// Return the first `sample_len` characters of the original query, including any comment.
pub fn truncated_query(&self, sample_len: usize) -> String {
self.original_query.query().chars().take(sample_len).collect()
}
}
21 changes: 21 additions & 0 deletions pgdog/src/frontend/router/parser/cache/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,27 @@ fn test_cache_key_shared_across_different_comments() {
assert_eq!(matching[0].as_str(), "SELECT 1 FROM cache_key_shared");
}

#[test]
fn test_truncated_query_shorter_than_limit() {
let buffered = BufferedQuery::Query(Query::new("SELECT 1"));
let ast_query = AstQuery::from_query(&buffered);
assert_eq!(ast_query.truncated_query(100), "SELECT 1");
}

#[test]
fn test_truncated_query_longer_than_limit() {
let buffered = BufferedQuery::Query(Query::new("SELECT * FROM users WHERE id = 1"));
let ast_query = AstQuery::from_query(&buffered);
assert_eq!(ast_query.truncated_query(6), "SELECT");
}

#[test]
fn test_truncated_query_includes_leading_comment() {
let buffered = BufferedQuery::Query(Query::new("/* shard=0 */ SELECT 1"));
let ast_query = AstQuery::from_query(&buffered);
assert!(ast_query.truncated_query(100).starts_with("/* shard=0 */"));
}

#[test]
fn test_normalize() {
let q = "SELECT * FROM users WHERE id = 1";
Expand Down
Loading