Skip to content

Commit

Permalink
Change dataset env var config (#764)
Browse files Browse the repository at this point in the history
  • Loading branch information
rmn-boiko authored Aug 13, 2024
1 parent 36aba5c commit ebb32da
Show file tree
Hide file tree
Showing 9 changed files with 42 additions and 44 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## Unreleased
### Changed
- Change `mode` value for `DatasetEnvVarsConfig` to `enabled: Option<bool>`
### Added
- New `Reset` flow in GQL Api which can be triggered manually for `Root` and `Derivative` datasets
- Private Datasets, preparation work:
Expand Down
2 changes: 1 addition & 1 deletion images/kamu-base-with-env-var-storage/extra/.kamuconfig
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@ kind: CLIConfig
version: 1
content:
datasetEnvVars:
mode: storage
enabled: true
encryptionKey: "QfnEDcnUtGSW2pwVXaFPvZOwxyFm2BOC"
4 changes: 2 additions & 2 deletions src/adapter/graphql/src/mutations/dataset_mut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use opendatafabric as odf;

use super::{DatasetEnvVarsMut, DatasetFlowsMut, DatasetMetadataMut};
use crate::prelude::*;
use crate::utils::ensure_dataset_env_vars_mode;
use crate::utils::ensure_dataset_env_vars_enabled;
use crate::LoggedInGuard;

#[derive(Debug, Clone)]
Expand Down Expand Up @@ -42,7 +42,7 @@ impl DatasetMut {
/// Access to the mutable flow configurations of this dataset
#[allow(clippy::unused_async)]
async fn env_vars(&self, ctx: &Context<'_>) -> Result<DatasetEnvVarsMut> {
ensure_dataset_env_vars_mode(ctx)?;
ensure_dataset_env_vars_enabled(ctx)?;

Ok(DatasetEnvVarsMut::new(self.dataset_handle.clone()))
}
Expand Down
4 changes: 2 additions & 2 deletions src/adapter/graphql/src/queries/datasets/dataset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use opendatafabric as odf;

use crate::prelude::*;
use crate::queries::*;
use crate::utils::ensure_dataset_env_vars_mode;
use crate::utils::ensure_dataset_env_vars_enabled;

#[derive(Debug, Clone)]
pub struct Dataset {
Expand Down Expand Up @@ -102,7 +102,7 @@ impl Dataset {
/// Access to the environment variable of this dataset
#[allow(clippy::unused_async)]
async fn env_vars(&self, ctx: &Context<'_>) -> Result<DatasetEnvVars> {
ensure_dataset_env_vars_mode(ctx)?;
ensure_dataset_env_vars_enabled(ctx)?;

Ok(DatasetEnvVars::new(self.dataset_handle.clone()))
}
Expand Down
10 changes: 5 additions & 5 deletions src/adapter/graphql/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use internal_error::*;
use kamu_accounts::{CurrentAccountSubject, GetAccessTokenError, LoggedAccount};
use kamu_core::auth::DatasetActionUnauthorizedError;
use kamu_core::{Dataset, DatasetRepository};
use kamu_datasets::{DatasetEnvVarsConfig, DatasetEnvVarsType};
use kamu_datasets::DatasetEnvVarsConfig;
use kamu_task_system as ts;
use opendatafabric::{AccountName as OdfAccountName, DatasetHandle};

Expand Down Expand Up @@ -113,11 +113,11 @@ pub(crate) async fn get_task(

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

pub(crate) fn ensure_dataset_env_vars_mode(ctx: &Context<'_>) -> Result<(), GqlError> {
let dataset_env_vars_type = from_catalog::<DatasetEnvVarsConfig>(ctx).unwrap();
if dataset_env_vars_type.as_ref().mode.as_ref().unwrap() != &DatasetEnvVarsType::Storage {
pub(crate) fn ensure_dataset_env_vars_enabled(ctx: &Context<'_>) -> Result<(), GqlError> {
let dataset_env_vars_config = from_catalog::<DatasetEnvVarsConfig>(ctx).unwrap();
if !dataset_env_vars_config.as_ref().is_enabled() {
return Err(GqlError::Gql(async_graphql::Error::new(
"API unavailable for static mode",
"API is unavailable",
)));
}
Ok(())
Expand Down
29 changes: 15 additions & 14 deletions src/app/cli/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ use kamu_accounts::*;
use kamu_accounts_services::PredefinedAccountsRegistrator;
use kamu_adapter_http::{FileUploadLimitConfig, UploadServiceLocal};
use kamu_adapter_oauth::GithubAuthenticationConfig;
use kamu_datasets::{DatasetEnvVar, DatasetEnvVarsType};
use kamu_datasets::DatasetEnvVar;
use tracing::warn;

use crate::accounts::AccountService;
use crate::error::*;
Expand Down Expand Up @@ -578,27 +579,27 @@ pub fn register_config_in_catalog(
catalog_builder.add_value(config.dataset_env_vars.clone().unwrap());

let dataset_env_vars_config = config.dataset_env_vars.as_ref().unwrap();
match dataset_env_vars_config.mode.as_ref().unwrap() {
DatasetEnvVarsType::Static => {
match dataset_env_vars_config.encryption_key.as_ref() {
None => {
match dataset_env_vars_config.enabled.as_ref() {
None => {
warn!("Dataset env vars configuration is missing. Feature will be disabled");
}
Some(true) => panic!("Dataset env vars encryption key is required"),
_ => {}
}
catalog_builder.add::<kamu_datasets_services::DatasetKeyValueServiceSysEnv>();
catalog_builder.add::<kamu_datasets_services::DatasetEnvVarServiceNull>();
}
DatasetEnvVarsType::Storage => {
Some(encryption_key) => {
assert!(
dataset_env_vars_config.encryption_key.is_some(),
"Dataset env var encryption key is required"
DatasetEnvVar::try_asm_256_gcm_from_str(encryption_key).is_ok(),
"Invalid dataset env var encryption key",
);
if DatasetEnvVar::try_asm_256_gcm_from_str(
dataset_env_vars_config.encryption_key.as_ref().unwrap(),
)
.is_err()
{
panic!("Invalid dataset env var encryption key");
}
catalog_builder.add::<kamu_datasets_services::DatasetKeyValueServiceImpl>();
catalog_builder.add::<kamu_datasets_services::DatasetEnvVarServiceImpl>();
}
};
}
}

////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Expand Down
4 changes: 2 additions & 2 deletions src/app/cli/src/commands/ui_command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use dill::Catalog;
use internal_error::ResultIntoInternal;
use kamu_accounts::PredefinedAccountsConfig;
use kamu_adapter_http::FileUploadLimitConfig;
use kamu_datasets::{DatasetEnvVarsConfig, DatasetEnvVarsType};
use kamu_datasets::DatasetEnvVarsConfig;
use opendatafabric::AccountName;

use super::{CLIError, Command};
Expand Down Expand Up @@ -78,7 +78,7 @@ impl Command for UICommand {
self.current_account_name.clone(),
self.predefined_accounts_config.clone(),
self.file_upload_limit_config.clone(),
self.dataset_env_vars_config.mode == Some(DatasetEnvVarsType::Storage),
self.dataset_env_vars_config.is_enabled(),
self.address,
self.port,
)
Expand Down
30 changes: 12 additions & 18 deletions src/domain/datasets/domain/src/entities/dataset_env_var.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,48 +227,42 @@ impl std::error::Error for AesGcmError {}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

#[skip_serializing_none]
#[derive(Debug, Clone, Merge, Serialize, Deserialize)]
#[derive(Debug, Default, Clone, Merge, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct DatasetEnvVarsConfig {
pub mode: Option<DatasetEnvVarsType>,
pub enabled: Option<bool>,
/// Represents the encryption key for the dataset env vars. This field is
/// required if `DatasetEnvVarsType` is `Storage`.
/// required if `enabled` is `true` or `None`.
///
/// The encryption key must be a 32-character alphanumeric string, which
/// includes both uppercase and lowercase Latin letters (A-Z, a-z) and
/// digits (0-9).
///
/// # Example
/// let config = DatasetEnvVarsConfig {
/// mode: Some(DatasetEnvVarsType::Storage),
/// enabled: Some(true),
/// encryption_key:
/// Some(String::from("aBcDeFgHiJkLmNoPqRsTuVwXyZ012345")) };
/// ```
pub encryption_key: Option<String>,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum DatasetEnvVarsType {
Static,
Storage,
}

impl DatasetEnvVarsConfig {
pub fn sample() -> Self {
Self {
mode: Some(DatasetEnvVarsType::Storage),
enabled: Some(true),
encryption_key: Some(SAMPLE_DATASET_ENV_VAR_ENCRYPTION_KEY.to_string()),
}
}
}

impl Default for DatasetEnvVarsConfig {
fn default() -> Self {
Self {
mode: Some(DatasetEnvVarsType::Static),
encryption_key: None,
pub fn is_enabled(&self) -> bool {
if let Some(enabled) = self.enabled
&& enabled
&& self.encryption_key.is_some()
{
return true;
}
false
}
}

Expand Down
1 change: 1 addition & 0 deletions src/domain/datasets/domain/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

#![feature(error_generic_member_access)]
#![feature(assert_matches)]
#![feature(let_chains)]

mod entities;
mod repos;
Expand Down

0 comments on commit ebb32da

Please sign in to comment.