Skip to content

Commit

Permalink
Support PG 17
Browse files Browse the repository at this point in the history
  • Loading branch information
pawurb committed Oct 8, 2024
1 parent 7e2041b commit 1c9c3a3
Show file tree
Hide file tree
Showing 41 changed files with 317 additions and 169 deletions.
78 changes: 73 additions & 5 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,52 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Run PostgreSQL 12
run: |
docker run --env POSTGRES_USER=postgres \
--env POSTGRES_DB=rust-pg-extras-test \
--env POSTGRES_PASSWORD=secret \
-d -p 5432:5432 postgres:12.20-alpine \
postgres -c shared_preload_libraries=pg_stat_statements
- name: Run PostgreSQL 13
run: |
docker run --env POSTGRES_USER=postgres \
--env POSTGRES_DB=rust-pg-extras-test \
--env POSTGRES_PASSWORD=secret \
-d -p 5433:5432 postgres:13.16-alpine \
postgres -c shared_preload_libraries=pg_stat_statements
- name: Run PostgreSQL 14
run: |
docker run --env POSTGRES_USER=postgres \
--env POSTGRES_DB=rust_pg_extras \
--env POSTGRES_DB=rust-pg-extras-test \
--env POSTGRES_PASSWORD=secret \
-d -p 5434:5432 postgres:14.13-alpine \
postgres -c shared_preload_libraries=pg_stat_statements
- name: Run PostgreSQL 15
run: |
docker run --env POSTGRES_USER=postgres \
--env POSTGRES_DB=rust-pg-extras-test \
--env POSTGRES_PASSWORD=secret \
-d -p 5432:5432 postgres:14.6-alpine \
-d -p 5435:5432 postgres:15.8-alpine \
postgres -c shared_preload_libraries=pg_stat_statements
sleep 15
- name: Run PostgreSQL 16
run: |
docker run --env POSTGRES_USER=postgres \
--env POSTGRES_DB=rust-pg-extras-test \
--env POSTGRES_PASSWORD=secret \
-d -p 5436:5432 postgres:16.4-alpine \
postgres -c shared_preload_libraries=pg_stat_statements
sleep 15
- name: Run PostgreSQL 17
run: |
docker run --env POSTGRES_USER=postgres \
--env POSTGRES_DB=rust-pg-extras-test \
--env POSTGRES_PASSWORD=secret \
-d -p 5437:5432 postgres:17.0-alpine \
postgres -c shared_preload_libraries=pg_stat_statements
sleep 15
- name: Setup Rust
uses: actions-rs/toolchain@v1
with:
Expand All @@ -29,9 +67,39 @@ jobs:
command: check
- name: Lint
run: cargo clippy --all --all-features -- -D warnings
- name: Test
- name: Run tests for PG 12
env:
PG_VERSION: 12
uses: actions-rs/cargo@v1
with:
command: test
- name: Run tests for PG 13
env:
PG_VERSION: 13
uses: actions-rs/cargo@v1
with:
command: test
- name: Run tests for PG 14
env:
PG_VERSION: 14
uses: actions-rs/cargo@v1
with:
command: test
- name: Run tests for PG 15
env:
PG_VERSION: 15
uses: actions-rs/cargo@v1
with:
command: test
- name: Run tests for PG 16
env:
PG_VERSION: 16
uses: actions-rs/cargo@v1
with:
command: test
- name: Run tests for PG 17
env:
DATABASE_URL: postgresql://postgres:secret@localhost:5432/rust_pg_extras
PG_VERSION: 17
uses: actions-rs/cargo@v1
with:
command: test
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ version = "0.4.0"
exclude = ["docker-compose.yml.sample", "live_tests.sh"]

[dependencies]
lazy_static = "1.5.0"
prettytable-rs = "0.10.0"
semver = "1.0.23"
sqlx = { version = "0.8", features = [
"runtime-tokio-rustls",
"postgres",
Expand Down
156 changes: 108 additions & 48 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,46 +1,51 @@
use std::collections::HashMap;
use std::time::Duration;
use std::{env, fmt};
use std::{
collections::HashMap,
time::Duration,
{env, fmt},
};
pub mod queries;
pub use queries::all_locks::AllLocks;
pub use queries::bloat::Bloat;
pub use queries::blocking::Blocking;
pub use queries::buffercache_stats::BuffercacheStats;
pub use queries::buffercache_usage::BuffercacheUsage;
pub use queries::cache_hit::CacheHit;
pub use queries::calls::Calls;
pub use queries::connections::Connections;
pub use queries::db_settings::DbSettings;
pub use queries::duplicate_indexes::DuplicateIndexes;
pub use queries::extensions::Extensions;
pub use queries::index_cache_hit::IndexCacheHit;
pub use queries::index_scans::IndexScans;
pub use queries::index_size::IndexSize;
pub use queries::index_usage::IndexUsage;
pub use queries::indexes::Indexes;
pub use queries::locks::Locks;
pub use queries::long_running_queries::LongRunningQueries;
pub use queries::mandelbrot::Mandelbrot;
pub use queries::null_indexes::NullIndexes;
pub use queries::outliers::Outliers;
pub use queries::records_rank::RecordsRank;
pub use queries::seq_scans::SeqScans;
pub use queries::shared::{get_default_schema, Query};
pub use queries::ssl_used::SslUsed;
pub use queries::table_cache_hit::TableCacheHit;
pub use queries::table_index_scans::TableIndexScans;
pub use queries::table_indexes_size::TableIndexesSize;
pub use queries::table_size::TableSize;
pub use queries::tables::Tables;
pub use queries::total_index_size::TotalIndexSize;
pub use queries::total_table_size::TotalTableSize;
pub use queries::unused_indexes::UnusedIndexes;
pub use queries::vacuum_stats::VacuumStats;
use sqlx::postgres::PgPoolOptions;
pub use queries::{
all_locks::AllLocks,
bloat::Bloat,
blocking::Blocking,
buffercache_stats::BuffercacheStats,
buffercache_usage::BuffercacheUsage,
cache_hit::CacheHit,
calls::Calls,
connections::Connections,
db_settings::DbSettings,
duplicate_indexes::DuplicateIndexes,
extensions::Extensions,
index_cache_hit::IndexCacheHit,
index_scans::IndexScans,
index_size::IndexSize,
index_usage::IndexUsage,
indexes::Indexes,
locks::Locks,
long_running_queries::LongRunningQueries,
mandelbrot::Mandelbrot,
null_indexes::NullIndexes,
outliers::Outliers,
records_rank::RecordsRank,
seq_scans::SeqScans,
shared::{get_default_schema, Query},
ssl_used::SslUsed,
table_cache_hit::TableCacheHit,
table_index_scans::TableIndexScans,
table_indexes_size::TableIndexesSize,
table_size::TableSize,
tables::Tables,
total_index_size::TotalIndexSize,
total_table_size::TotalTableSize,
unused_indexes::UnusedIndexes,
vacuum_stats::VacuumStats,
};
use semver::Version;
use sqlx::{postgres::PgPoolOptions, Row};

#[macro_use]
extern crate prettytable;
use prettytable::{Cell, Row, Table};
use prettytable::{Cell, Row as TableRow, Table};

pub fn render_table<T: Query>(items: Vec<T>) {
let mut table = Table::new();
Expand All @@ -51,7 +56,7 @@ pub fn render_table<T: Query>(items: Vec<T>) {
for item in items {
table.add_row(item.to_row());
}
table.set_titles(Row::new(vec![
table.set_titles(TableRow::new(vec![
Cell::new(T::description().as_str()).style_spec(format!("H{}", columns_count).as_str())
]));
table.printstd();
Expand Down Expand Up @@ -228,17 +233,23 @@ impl fmt::Display for PgExtrasError {

impl std::error::Error for PgExtrasError {}

use lazy_static::lazy_static;

lazy_static! {
pub static ref NEW_PG_STAT_STATEMENTS: Version = Version::parse("1.8.0").unwrap();
pub static ref PG_STAT_STATEMENTS_17: Version = semver::Version::parse("1.11.0").unwrap();
}

#[derive(Debug)]
pub enum PgStatsVersion {
Legacy,
Standard,
Pg17,
}

async fn get_rows<T: Query>(
params: Option<HashMap<String, String>>,
) -> Result<Vec<T>, PgExtrasError> {
let mut query = T::read_file();

if let Some(params) = params {
for (key, value) in &params {
query = query.replace(&format!("%{{{}}}", key), value.as_str());
}
}

let pool = match PgPoolOptions::new()
.max_connections(5)
.acquire_timeout(Duration::from_secs(10))
Expand All @@ -249,6 +260,37 @@ async fn get_rows<T: Query>(
Err(e) => return Err(PgExtrasError::DbConnectionError(format!("{}", e))),
};

let pg_statements_query =
"select installed_version from pg_available_extensions where name='pg_stat_statements'";

let pg_statements_version = match sqlx::query(pg_statements_query).fetch_one(&pool).await {
Ok(row) => row
.try_get::<String, _>("installed_version")
.unwrap_or_default(),
Err(_) => "".to_string(),
};

let default_version = NEW_PG_STAT_STATEMENTS.clone();
let pg_statements_version = format!("{}.0", pg_statements_version);
let pg_statements_version =
Version::parse(&pg_statements_version).unwrap_or(default_version.clone());

let pg_statements_version = if pg_statements_version < default_version {
PgStatsVersion::Legacy
} else if pg_statements_version >= *PG_STAT_STATEMENTS_17 {
PgStatsVersion::Pg17
} else {
PgStatsVersion::Standard
};

let mut query = T::read_file(Some(pg_statements_version));

if let Some(params) = params {
for (key, value) in &params {
query = query.replace(&format!("%{{{}}}", key), value.as_str());
}
}

Ok(match sqlx::query(&query).fetch_all(&pool).await {
Ok(rows) => rows.iter().map(T::new).collect(),
Err(e) => return Err(PgExtrasError::Unknown(format!("{}", e))),
Expand Down Expand Up @@ -282,6 +324,24 @@ mod tests {
use super::*;

async fn setup() -> Result<(), Box<dyn std::error::Error>> {
let port = match env::var("PG_VERSION").expect("PG_VERSION not set").as_str() {
"12" => "5432",
"13" => "5433",
"14" => "5434",
"15" => "5435",
"16" => "5436",
"17" => "5437",
_ => "5432",
};

env::set_var(
"PG_EXTRAS_DATABASE_URL",
format!(
"postgres://postgres:secret@localhost:{}/rust-pg-extras-test",
port
),
);

let pool = PgPoolOptions::new()
.max_connections(5)
.connect(db_url()?.as_str())
Expand Down
2 changes: 1 addition & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ async fn main() {
match execute(command).await {
Ok(_) => {}
Err(error) => {
println!("{}", error);
eprintln!("{}", error);
}
}
}
Expand Down
3 changes: 2 additions & 1 deletion src/queries/all_locks.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::queries::shared::{get_default_interval, Query};
use crate::PgStatsVersion;
use sqlx::postgres::{types::PgInterval, PgRow};
use sqlx::Row;

Expand Down Expand Up @@ -54,7 +55,7 @@ impl Query for AllLocks {
]
}

fn read_file() -> String {
fn read_file(_pg_statement_version: Option<PgStatsVersion>) -> String {
include_str!("../sql/all_locks.sql").to_string()
}
}
4 changes: 2 additions & 2 deletions src/queries/bloat.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::queries::shared::Query;
use crate::{queries::shared::Query, PgStatsVersion};
use sqlx::postgres::PgRow;
use sqlx::types::BigDecimal;
use sqlx::Row;
Expand Down Expand Up @@ -37,7 +37,7 @@ impl Query for Bloat {
row!["type", "schemaname", "object_name", "bloat", "waste"]
}

fn read_file() -> String {
fn read_file(_pg_statement_version: Option<PgStatsVersion>) -> String {
include_str!("../sql/bloat.sql").to_string()
}
}
3 changes: 2 additions & 1 deletion src/queries/blocking.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::queries::shared::{get_default_interval, Query};
use crate::PgStatsVersion;
use sqlx::postgres::{types::PgInterval, PgRow};
use sqlx::Row;

Expand Down Expand Up @@ -58,7 +59,7 @@ impl Query for Blocking {
]
}

fn read_file() -> String {
fn read_file(_pg_statement_version: Option<PgStatsVersion>) -> String {
include_str!("../sql/blocking.sql").to_string()
}
}
7 changes: 3 additions & 4 deletions src/queries/buffercache_stats.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::queries::shared::Query;
use sqlx::postgres::PgRow;
use sqlx::Row;
use crate::{queries::shared::Query, PgStatsVersion};
use sqlx::{postgres::PgRow, Row};

#[derive(Debug, Clone)]
pub struct BuffercacheStats {
Expand Down Expand Up @@ -38,7 +37,7 @@ impl Query for BuffercacheStats {
]
}

fn read_file() -> String {
fn read_file(_pg_statement_version: Option<PgStatsVersion>) -> String {
include_str!("../sql/buffercache_stats.sql")
.to_string()
.to_string()
Expand Down
7 changes: 3 additions & 4 deletions src/queries/buffercache_usage.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::queries::shared::Query;
use sqlx::postgres::PgRow;
use sqlx::Row;
use crate::{queries::shared::Query, PgStatsVersion};
use sqlx::{postgres::PgRow, Row};

#[derive(Debug, Clone)]
pub struct BuffercacheUsage {
Expand All @@ -24,7 +23,7 @@ impl Query for BuffercacheUsage {
row!["relname", "buffers"]
}

fn read_file() -> String {
fn read_file(_pg_statement_version: Option<PgStatsVersion>) -> String {
include_str!("../sql/buffercache_usage.sql").to_string()
}
}
Loading

0 comments on commit 1c9c3a3

Please sign in to comment.