diff --git a/tokenizers/src/tokenizer/pre_tokenizer.rs b/tokenizers/src/tokenizer/pre_tokenizer.rs index e36dae592..3a243e809 100644 --- a/tokenizers/src/tokenizer/pre_tokenizer.rs +++ b/tokenizers/src/tokenizer/pre_tokenizer.rs @@ -20,7 +20,7 @@ pub struct Split { /// The underlying `NormalizedString`. Each SubString is represented by a `NormalizedString` /// and in the end we might be carrying a lot of SubString representing various parts of the /// original input string. - normalized: NormalizedString, + pub normalized: NormalizedString, /// Optional Tokens associated to this Split tokens: Option>, } @@ -52,7 +52,7 @@ impl From<(NormalizedString, Option>)> for Split { #[derive(Debug, Clone, PartialEq, Eq)] pub struct PreTokenizedString { pub original: String, - splits: Vec, + pub splits: Vec, } impl PreTokenizedString {