Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Fix scanning cloud paths with spaces #17379

Merged
merged 8 commits into from
Jul 3, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 3 additions & 7 deletions crates/polars-io/src/cloud/glob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,13 +108,9 @@ impl CloudLocation {
(bucket, key)
};

let key = if parsed.scheme().starts_with("http") {
percent_encoding::percent_decode_str(key)
.decode_utf8()
.map_err(to_compute_err)?
} else {
std::borrow::Cow::Borrowed(key)
};
let key = percent_encoding::percent_decode_str(key)
Copy link
Collaborator Author

@nameexhaustion nameexhaustion Jul 3, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

revert wrong approach from previous PR for supporting % in paths

.decode_utf8()
.map_err(to_compute_err)?;
let (mut prefix, expansion) = extract_prefix_expansion(&key)?;
if is_local && key.starts_with(DELIMITER) {
prefix.insert(0, DELIMITER);
Expand Down
8 changes: 8 additions & 0 deletions crates/polars-io/src/cloud/object_store_setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,14 @@ fn url_and_creds_to_key(url: &Url, options: Option<&CloudOptions>) -> String {
)
}

/// Simply construct an object_store `Path` struct from a string.
pub fn object_path_from_string(path: String) -> object_store::path::Path {
// We transmute because they don't expose a way to just create it from a string
// without encoding or decoding it. If one day we can't use this transmute hack
ritchie46 marked this conversation as resolved.
Show resolved Hide resolved
// anymore then we'll just have to `Path::from_url_path(percent_encode(path))`
unsafe { std::mem::transmute::<String, object_store::path::Path>(path) }
}

/// Build an [`ObjectStore`] based on the URL and passed in url. Return the cloud location and an implementation of the object store.
pub async fn build_object_store(
url: &str,
Expand Down
10 changes: 0 additions & 10 deletions crates/polars-io/src/cloud/options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,16 +131,6 @@ impl CloudType {
#[cfg(feature = "cloud")]
pub(crate) fn parse_url(input: &str) -> std::result::Result<url::Url, url::ParseError> {
Ok(if input.contains("://") {
let input = if input.starts_with("https://") {
std::borrow::Cow::Borrowed(input)
} else {
// Some paths may contain '%', we need to double-encode as it doesn't seem
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

revert wrong approach

// possible to construct `Url` without having it decode the path.
// TODO: Maybe we can avoid using `Url`.
const PERC: percent_encoding::AsciiSet = percent_encoding::CONTROLS.add(b'%');
std::borrow::Cow::<str>::from(percent_encoding::percent_encode(input.as_bytes(), &PERC))
};
let input = input.as_ref();
url::Url::parse(input)?
} else {
let path = std::path::Path::new(input);
Expand Down
9 changes: 5 additions & 4 deletions crates/polars-io/src/file_cache/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ use std::sync::Arc;
use std::time::UNIX_EPOCH;

use once_cell::sync::Lazy;
use polars_error::{to_compute_err, PolarsError, PolarsResult};
use polars_error::{PolarsError, PolarsResult};

use super::cache::{get_env_file_cache_ttl, FILE_CACHE};
use super::entry::FileCacheEntry;
use super::file_fetcher::{CloudFileFetcher, LocalFileFetcher};
use crate::cloud::{build_object_store, CloudLocation, CloudOptions, PolarsObjectStore};
use crate::cloud::{
build_object_store, object_path_from_string, CloudLocation, CloudOptions, PolarsObjectStore,
};
use crate::pl_async;
use crate::prelude::{is_cloud_url, POLARS_TEMP_DIR_BASE_PATH};
use crate::utils::ensure_directory_init;
Expand Down Expand Up @@ -83,8 +85,7 @@ pub fn init_entries_from_uri_list<A: AsRef<[Arc<str>]>>(

let cloud_path = {
assert!(expansion.is_none(), "path should not contain wildcards");
object_store::path::Path::from_url_path(prefix)
.map_err(to_compute_err)?
object_path_from_string(prefix)
};

let object_store = object_store.clone();
Expand Down
6 changes: 4 additions & 2 deletions crates/polars-io/src/ipc/ipc_reader_async.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@ use polars_core::frame::DataFrame;
use polars_core::schema::Schema;
use polars_error::{polars_bail, polars_err, to_compute_err, PolarsResult};

use crate::cloud::{build_object_store, CloudLocation, CloudOptions, PolarsObjectStore};
use crate::cloud::{
build_object_store, object_path_from_string, CloudLocation, CloudOptions, PolarsObjectStore,
};
use crate::file_cache::{init_entries_from_uri_list, FileCacheEntry};
use crate::predicates::PhysicalIoExpr;
use crate::prelude::{materialize_projection, IpcReader};
Expand Down Expand Up @@ -76,7 +78,7 @@ impl IpcReaderAsync {
// Any wildcards should already have been resolved here. Without this assertion they would
// be ignored.
debug_assert!(expansion.is_none(), "path should not contain wildcards");
Path::from_url_path(prefix).map_err(to_compute_err)?
object_path_from_string(prefix)
};

Ok(Self {
Expand Down
7 changes: 4 additions & 3 deletions crates/polars-io/src/parquet/read/async_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use arrow::datatypes::ArrowSchemaRef;
use bytes::Bytes;
use object_store::path::Path as ObjectPath;
use polars_core::config::{get_rg_prefetch_size, verbose};
use polars_core::error::to_compute_err;
use polars_core::prelude::*;
use polars_parquet::read::RowGroupMetaData;
use polars_parquet::write::FileMetaData;
Expand All @@ -16,7 +15,9 @@ use tokio::sync::Mutex;
use super::mmap::ColumnStore;
use super::predicates::read_this_row_group;
use super::read_impl::compute_row_group_range;
use crate::cloud::{build_object_store, CloudLocation, CloudOptions, PolarsObjectStore};
use crate::cloud::{
build_object_store, object_path_from_string, CloudLocation, CloudOptions, PolarsObjectStore,
};
use crate::parquet::metadata::FileMetaDataRef;
use crate::pl_async::get_runtime;
use crate::predicates::PhysicalIoExpr;
Expand Down Expand Up @@ -48,7 +49,7 @@ impl ParquetObjectStore {
// Any wildcards should already have been resolved here. Without this assertion they would
// be ignored.
debug_assert!(expansion.is_none(), "path should not contain wildcards");
let path = ObjectPath::from_url_path(prefix).map_err(to_compute_err)?;
let path = object_path_from_string(prefix);

Ok(ParquetObjectStore {
store: PolarsObjectStore::new(store),
Expand Down
4 changes: 3 additions & 1 deletion crates/polars-lazy/src/scan/file_list_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ fn expand_paths(
if is_cloud || { cfg!(not(target_family = "windows")) && config::force_async() } {
#[cfg(feature = "async")]
{
use polars_io::cloud::object_path_from_string;

let format_path = |scheme: &str, bucket: &str, location: &str| {
if is_cloud {
format!("{}://{}/{}", scheme, bucket, location)
Expand All @@ -70,7 +72,7 @@ fn expand_paths(
let (cloud_location, store) =
polars_io::cloud::build_object_store(path, cloud_options).await?;

let prefix = cloud_location.prefix.clone().into();
let prefix = object_path_from_string(cloud_location.prefix.clone());

let out = if !path.ends_with("/")
&& cloud_location.expansion.is_none()
Expand Down
17 changes: 17 additions & 0 deletions py-polars/tests/unit/io/test_scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,3 +489,20 @@ def test_scan_glob_excludes_directories(tmp_path: Path) -> None:
pl.scan_parquet(tmp_path / "**/*").collect(), pl.concat(3 * [df])
)
assert_frame_equal(pl.scan_parquet(tmp_path / "*").collect(), df)


@pytest.mark.parametrize("file_name", ["a b", "a %25 b"])
def test_scan_async_whitespace_in_path(
tmp_path: Path, monkeypatch: Any, file_name: str
) -> None:
monkeypatch.setenv("POLARS_FORCE_ASYNC", "1")
tmp_path.mkdir(exist_ok=True)

path = tmp_path / f"{file_name}.parquet"
df = pl.DataFrame({"x": 1})
df.write_parquet(path)
assert_frame_equal(pl.scan_parquet(path).collect(), df)
assert_frame_equal(pl.scan_parquet(tmp_path).collect(), df)
assert_frame_equal(pl.scan_parquet(tmp_path / "*").collect(), df)
assert_frame_equal(pl.scan_parquet(tmp_path / "*.parquet").collect(), df)
path.unlink()