Skip to content

Commit

Permalink
chore: rename database.rs into table.rs (#7676)
Browse files Browse the repository at this point in the history
Co-authored-by: Henry Fontanier <henry@dust.tt>
  • Loading branch information
fontanierh and Henry Fontanier authored Sep 25, 2024
1 parent 97101cd commit ec827b2
Show file tree
Hide file tree
Showing 16 changed files with 33 additions and 34 deletions.
3 changes: 2 additions & 1 deletion core/bin/dust_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ use dust::{
qdrant::QdrantClients,
},
databases::{
database::{LocalTable, QueryDatabaseError, Row, Table},
error::QueryDatabaseError,
table::{LocalTable, Row, Table},
transient_database::execute_query_on_transient_database,
},
databases_store::store::{self as databases_store, DatabasesStore},
Expand Down
2 changes: 1 addition & 1 deletion core/bin/sqlite_worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use axum::{
Router,
};
use dust::{
databases::database::Table,
databases::table::Table,
databases_store::{self, store::DatabasesStore},
sqlite_workers::sqlite_database::{SqliteDatabase, SqliteDatabaseError},
utils::{error_response, APIResponse, CoreRequestMakeSpan},
Expand Down
2 changes: 1 addition & 1 deletion core/src/blocks/database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::{
database_schema::load_tables_from_identifiers,
},
databases::{
database::QueryDatabaseError, transient_database::execute_query_on_transient_database,
error::QueryDatabaseError, transient_database::execute_query_on_transient_database,
},
Rule,
};
Expand Down
2 changes: 1 addition & 1 deletion core/src/blocks/database_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::{
helpers::get_data_source_project_and_view_filter,
},
databases::{
database::{LocalTable, Table},
table::{LocalTable, Table},
transient_database::get_unique_table_names_for_transient_database,
},
Rule,
Expand Down
13 changes: 13 additions & 0 deletions core/src/databases/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
use thiserror::Error;

#[derive(Debug, Error)]
pub enum QueryDatabaseError {
#[error("{0}")]
GenericError(#[from] anyhow::Error),
#[error("Too many result rows")]
TooManyResultRows,
#[error("Result is too large: {0}")]
ResultTooLarge(String),
#[error("Query execution error: {0}")]
ExecutionError(String),
}
5 changes: 1 addition & 4 deletions core/src/databases/remote_databases/remote_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@ use anyhow::Result;
use async_trait::async_trait;
use std::collections::HashMap;

use crate::databases::{
database::{QueryDatabaseError, QueryResult},
table_schema::TableSchema,
};
use crate::databases::{error::QueryDatabaseError, table::QueryResult, table_schema::TableSchema};

#[async_trait]
pub trait RemoteDatabase {
Expand Down
3 changes: 2 additions & 1 deletion core/src/databases/remote_databases/snowflake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ use snowflake_connector_rs::{
};

use crate::databases::{
database::{QueryDatabaseError, QueryResult},
error::QueryDatabaseError,
remote_databases::remote_database::RemoteDatabase,
table::QueryResult,
table_schema::{TableSchema, TableSchemaColumn, TableSchemaFieldType},
};

Expand Down
17 changes: 2 additions & 15 deletions core/src/databases/database.rs → core/src/databases/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ use anyhow::{anyhow, Result};
use futures::future::try_join_all;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use thiserror::Error;

#[derive(Debug, Clone, Copy, Serialize, PartialEq, Deserialize)]
#[serde(rename_all = "lowercase")]
Expand All @@ -28,16 +27,8 @@ impl ToString for TableType {
}
}

#[derive(Debug, Error)]
pub enum QueryDatabaseError {
#[error("{0}")]
GenericError(#[from] anyhow::Error),
#[error("Too many result rows")]
TooManyResultRows,
#[error("Result is too large: {0}")]
ResultTooLarge(String),
#[error("Query execution error: {0}")]
ExecutionError(String),
pub fn get_table_unique_id(project: &Project, data_source_id: &str, table_id: &str) -> String {
format!("{}__{}__{}", project.project_id(), data_source_id, table_id)
}

#[derive(Debug, Serialize, Clone, Deserialize)]
Expand All @@ -60,10 +51,6 @@ pub struct Table {
remote_database_secret_id: Option<String>,
}

pub fn get_table_unique_id(project: &Project, data_source_id: &str, table_id: &str) -> String {
format!("{}__{}__{}", project.project_id(), data_source_id, table_id)
}

impl Table {
pub fn new(
project: &Project,
Expand Down
2 changes: 1 addition & 1 deletion core/src/databases/table_schema.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::collections::{HashMap, HashSet};

use super::database::{HasValue, Row};
use super::table::{HasValue, Row};
use anyhow::{anyhow, Result};
use chrono::prelude::DateTime;
use itertools::Itertools;
Expand Down
3 changes: 2 additions & 1 deletion core/src/databases/transient_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ use tracing::info;

use crate::{
databases::{
database::{QueryDatabaseError, QueryResult, Table},
error::QueryDatabaseError,
table::{QueryResult, Table},
table_schema::TableSchema,
},
sqlite_workers::client::{SqliteWorker, SqliteWorkerError, HEARTBEAT_INTERVAL_MS},
Expand Down
2 changes: 1 addition & 1 deletion core/src/databases_store/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use bb8_postgres::PostgresConnectionManager;
use serde_json::Value;
use tokio_postgres::{types::ToSql, NoTls};

use crate::{databases::database::Row, utils};
use crate::{databases::table::Row, utils};

#[async_trait]
pub trait DatabasesStore {
Expand Down
3 changes: 2 additions & 1 deletion core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ pub mod data_sources {
pub mod splitter;
}
pub mod databases {
pub mod database;
pub mod error;
pub mod table;
pub mod table_schema;
pub mod remote_databases {
pub mod remote_database;
Expand Down
2 changes: 1 addition & 1 deletion core/src/sqlite_workers/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use thiserror::Error;
use urlencoding::encode;

use crate::{
databases::database::{QueryResult, Table},
databases::table::{QueryResult, Table},
utils,
};

Expand Down
2 changes: 1 addition & 1 deletion core/src/sqlite_workers/sqlite_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use tracing::info;

use crate::{
databases::{
database::{QueryResult, Row, Table},
table::{QueryResult, Row, Table},
transient_database::get_unique_table_names_for_transient_database,
},
databases_store::store::DatabasesStore,
Expand Down
2 changes: 1 addition & 1 deletion core/src/stores/postgres.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use crate::{
consts::DATA_SOURCE_DOCUMENT_SYSTEM_TAG_PREFIX,
data_sources::data_source::{DataSource, DataSourceConfig, Document, DocumentVersion},
databases::{
database::{get_table_unique_id, Table},
table::{get_table_unique_id, Table},
table_schema::TableSchema,
transient_database::TransientDatabase,
},
Expand Down
4 changes: 1 addition & 3 deletions core/src/stores/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@ use crate::{
blocks::block::BlockType,
cached_request::CachedRequest,
data_sources::data_source::{DataSource, DataSourceConfig, Document, DocumentVersion},
databases::{
database::Table, table_schema::TableSchema, transient_database::TransientDatabase,
},
databases::{table::Table, table_schema::TableSchema, transient_database::TransientDatabase},
dataset::Dataset,
http::request::{HttpRequest, HttpResponse},
project::Project,
Expand Down

0 comments on commit ec827b2

Please sign in to comment.