Skip to content

Commit

Permalink
feat: Check essential permissions when creating table with external l…
Browse files Browse the repository at this point in the history
…ocation (#16315)

* feat: check essential permissions for external  location

* check permission to delete

* tweak test scripts

* tweak test comment

* Refactor: Check status with specific verification key

Check status with a specific verification key instead of relying on '/', which receives "special treatment" during status checks.
  • Loading branch information
dantengsky committed Aug 24, 2024
1 parent 3fd674a commit 380b626
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 7 deletions.
60 changes: 59 additions & 1 deletion src/query/sql/src/planner/binder/ddl/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ use databend_common_ast::ast::VacuumTemporaryFiles;
use databend_common_ast::parser::parse_sql;
use databend_common_ast::parser::tokenize_sql;
use databend_common_base::base::uuid::Uuid;
use databend_common_base::runtime::GlobalIORuntime;
use databend_common_base::runtime::TrySpawn;
use databend_common_catalog::lock::LockTableOption;
use databend_common_catalog::plan::Filters;
use databend_common_catalog::table::CompactionLimits;
Expand Down Expand Up @@ -90,6 +92,7 @@ use databend_storages_common_table_meta::table::OPT_KEY_TABLE_COMPRESSION;
use databend_storages_common_table_meta::table::OPT_KEY_TEMP_PREFIX;
use derive_visitor::DriveMut;
use log::debug;
use opendal::Operator;

use crate::binder::get_storage_params_from_options;
use crate::binder::parse_storage_params_from_uri;
Expand Down Expand Up @@ -459,7 +462,7 @@ impl Binder {
.await?;

// create a temporary op to check if params is correct
DataOperator::try_create(&sp).await?;
let data_operator = DataOperator::try_create(&sp).await?;

// Path ends with "/" means it's a directory.
let fp = if uri.path.ends_with('/') {
Expand All @@ -468,6 +471,10 @@ impl Binder {
"".to_string()
};

// Verify essential privileges.
// The permission check might fail for reasons other than the permissions themselves,
// such as network communication issues.
verify_external_location_privileges(data_operator.operator()).await?;
(Some(sp), fp)
}
(Some(uri), _) => Err(ErrorCode::BadArguments(format!(
Expand Down Expand Up @@ -1689,3 +1696,54 @@ impl Binder {
.unwrap_or(true)
}
}

const VERIFICATION_KEY: &str = "_v_d77aa11285c22e0e1d4593a035c98c0d";
const VERIFICATION_KEY_DEL: &str = "_v_d77aa11285c22e0e1d4593a035c98c0d_del";

// verify that essential privileges has granted for accessing external location
//
// The permission check might fail for reasons other than the permissions themselves,
// such as network communication issues.
async fn verify_external_location_privileges(dal: Operator) -> Result<()> {
let verification_task = async move {
// verify privilege to put
let mut errors = Vec::new();
if let Err(e) = dal.write(VERIFICATION_KEY, "V").await {
errors.push(format!("Permission check for [Write] failed: {}", e));
}

// verify privilege to get
if let Err(e) = dal.read_with(VERIFICATION_KEY).range(0..1).await {
errors.push(format!("Permission check for [Read] failed: {}", e));
}

// verify privilege to stat
if let Err(e) = dal.stat(VERIFICATION_KEY).await {
errors.push(format!("Permission check for [Stat] failed: {}", e));
}

// verify privilege to list
if let Err(e) = dal.list(VERIFICATION_KEY).await {
errors.push(format!("Permission check for [List] failed: {}", e));
}

// verify privilege to delete (del something not exist)
if let Err(e) = dal.delete(VERIFICATION_KEY_DEL).await {
errors.push(format!("Permission check for [Delete] failed: {}", e));
}

if errors.is_empty() {
Ok(())
} else {
Err(ErrorCode::StorageOther(
"Checking essential permissions for the external location failed.",
)
.add_message(errors.join("\n")))
}
};

GlobalIORuntime::instance()
.spawn(verification_task)
.await
.expect("join must succeed")
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,16 @@ SHOW CREATE TABLE `test`.`c`
----
c CREATE TABLE c ( a INT NOT NULL ) ENGINE=FUSE CLUSTER BY (a, a % 3) COMPRESSION='lz4' STORAGE_FORMAT='parquet'

statement error 4000
CREATE TABLE test.should_fail (a int not null) ENGINE=FUSE 's3://load/files/' CONNECTION=(access_key_id='1a2b3c' aws_secret_key='4x5y6z') CLUSTER BY (a, a % 3) COMPRESSION='lz4' STORAGE_FORMAT='parquet'

statement ok
CREATE TABLE test.d (a int not null) ENGINE=FUSE 's3://load/files/' CONNECTION=(access_key_id='1a2b3c' aws_secret_key='4x5y6z') CLUSTER BY (a, a % 3) COMPRESSION='lz4' STORAGE_FORMAT='parquet'
CREATE TABLE test.d (a int not null) ENGINE=FUSE 'fs:///tmp/load/files/' CONNECTION=(access_key_id='1a2b3c' aws_secret_key='4x5y6z') CLUSTER BY (a, a % 3) COMPRESSION='lz4' STORAGE_FORMAT='parquet'

query TT
SHOW CREATE TABLE `test`.`d`
----
d CREATE TABLE d ( a INT NOT NULL ) ENGINE=FUSE CLUSTER BY (a, a % 3) COMPRESSION='lz4' STORAGE_FORMAT='parquet' LOCATION = 's3 | bucket=load,root=/files/,endpoint=https://s3.amazonaws.com'
d CREATE TABLE d ( a INT NOT NULL ) ENGINE=FUSE CLUSTER BY (a, a % 3) COMPRESSION='lz4' STORAGE_FORMAT='parquet' LOCATION = 'fs | root=/tmp/load/files/'

query TT
SHOW CREATE TABLE `test`.`c`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,15 @@ statement ok
create stage ctas_stage url='fs:///tmp/ctas/';


# the data of dropped table should be purged:
# list the stage, an empty result-set should be return
query T
list @ctas_stage;
# The data of the dropped table should be purged:
# Listing the stage should return an empty result set,
# except for the verification key '_v_d77aa11285c22e0e1d4593a035c98c0d',
# which is 1 byte in size.

query TT
SELECT name, size FROM LIST_STAGE(location => '@ctas_stage')
----
_v_d77aa11285c22e0e1d4593a035c98c0d 1

statement ok
drop database ctas_test;

0 comments on commit 380b626

Please sign in to comment.