Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HF Tokenizer: get charspans #1584

Merged
merged 6 commits into from
Apr 20, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 49 additions & 4 deletions extensions/tokenizers/rust/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,13 @@ extern crate tokenizers as tk;

use std::str::FromStr;
use tk::tokenizer::{EncodeInput, Encoding};
use tk::FromPretrainedParameters;
use tk::Tokenizer;
use tk::{FromPretrainedParameters, Offsets};

use jni::objects::{JObject, JString};
use jni::sys::{jboolean, jlong, jlongArray, jobjectArray, jsize, JNI_TRUE};
use jni::objects::{JClass, JMethodID, JObject, JString, JValue};
use jni::sys::{
jboolean, jint, jlong, jlongArray, jobjectArray, jsize, JNI_TRUE,
};
use jni::JNIEnv;

#[no_mangle]
Expand Down Expand Up @@ -256,7 +258,8 @@ pub extern "system" fn Java_ai_djl_huggingface_tokenizers_jni_TokenizersLibrary_
.unwrap();
for (i, token) in tokens.iter().enumerate() {
let item: JString = env.new_string(&token).unwrap();
env.set_object_array_element(array, i as jsize, item).unwrap();
env.set_object_array_element(array, i as jsize, item)
.unwrap();
}
array
}
Expand Down Expand Up @@ -299,6 +302,48 @@ pub extern "system" fn Java_ai_djl_huggingface_tokenizers_jni_TokenizersLibrary_
array
}

#[no_mangle]
pub extern "system" fn Java_ai_djl_huggingface_tokenizers_jni_TokenizersLibrary_getTokenCharSpans(
env: JNIEnv,
_: JObject,
handle: jlong,
) -> jobjectArray {
let encoding = cast_handle::<Encoding>(handle);
let tokens = encoding.get_tokens();
let len = tokens.len() as jsize;

let array: jobjectArray = env
.new_object_array(
len,
"ai/djl/huggingface/tokenizers/jni/CharSpan",
JObject::null(),
)
.unwrap();
for (i, _) in tokens.iter().enumerate() {
let opt_offsets: Option<(usize, Offsets)> = encoding.token_to_chars(i);
match &opt_offsets {
Some((_, offsets)) => {
let class_id = "ai/djl/huggingface/tokenizers/jni/CharSpan";
let method_id = "<init>";
let params = "(II)V";
let cls: JClass = env.find_class(class_id).unwrap();
let constructor: JMethodID = env.get_method_id(cls, method_id, params).unwrap();
let offsets_vec: Vec<JValue> = vec![
JValue::Int((*offsets).0 as jint),
JValue::Int((*offsets).1 as jint),
];
let obj = env
.new_object_unchecked(cls, constructor, &offsets_vec[..])
.unwrap();
env.set_object_array_element(array, i as jsize, obj)
.unwrap();
}
None => {}
}
}
array
}

fn to_handle<T: 'static>(val: T) -> jlong {
let handle = Box::into_raw(Box::new(val)) as jlong;
handle
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
*/
package ai.djl.huggingface.tokenizers;

import ai.djl.huggingface.tokenizers.jni.CharSpan;

/** A class holds token encoding information. */
public class Encoding {

Expand All @@ -21,20 +23,23 @@ public class Encoding {
private long[] wordIds;
private long[] attentionMask;
private long[] specialTokenMask;
private CharSpan[] charTokenSpans;

Encoding(
long[] ids,
long[] typeIds,
String[] tokens,
long[] wordIds,
long[] attentionMask,
long[] specialTokenMask) {
long[] specialTokenMask,
CharSpan[] charTokenSpans) {
this.ids = ids;
this.typeIds = typeIds;
this.tokens = tokens;
this.wordIds = wordIds;
this.attentionMask = attentionMask;
this.specialTokenMask = specialTokenMask;
this.charTokenSpans = charTokenSpans;
}

/**
Expand Down Expand Up @@ -90,4 +95,13 @@ public long[] getAttentionMask() {
public long[] getSpecialTokenMask() {
return specialTokenMask;
}

/**
* Returns char token spans.
*
* @return char token spans
*/
public CharSpan[] getCharTokenSpans() {
return charTokenSpans;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
*/
package ai.djl.huggingface.tokenizers;

import ai.djl.huggingface.tokenizers.jni.CharSpan;
import ai.djl.huggingface.tokenizers.jni.LibUtils;
import ai.djl.huggingface.tokenizers.jni.TokenizersLibrary;
import ai.djl.modality.nlp.preprocess.Tokenizer;
Expand Down Expand Up @@ -200,8 +201,10 @@ private Encoding toEncoding(long encoding) {
long[] wordIds = TokenizersLibrary.LIB.getWordIds(encoding);
long[] attentionMask = TokenizersLibrary.LIB.getAttentionMask(encoding);
long[] specialTokenMask = TokenizersLibrary.LIB.getSpecialTokenMask(encoding);
CharSpan[] charSpans = TokenizersLibrary.LIB.getTokenCharSpans(encoding);

TokenizersLibrary.LIB.deleteEncoding(encoding);
return new Encoding(ids, typeIds, tokens, wordIds, attentionMask, specialTokenMask);
return new Encoding(
ids, typeIds, tokens, wordIds, attentionMask, specialTokenMask, charSpans);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package ai.djl.huggingface.tokenizers.jni;
andreabrduque marked this conversation as resolved.
Show resolved Hide resolved

public class CharSpan {
andreabrduque marked this conversation as resolved.
Show resolved Hide resolved
private final int start;
private final int end;

public CharSpan(int start, int end) {
andreabrduque marked this conversation as resolved.
Show resolved Hide resolved
this.start = start;
this.end = end;
}

public double getStart() {
andreabrduque marked this conversation as resolved.
Show resolved Hide resolved
return start;
}

public double getEnd() {
andreabrduque marked this conversation as resolved.
Show resolved Hide resolved
return end;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,6 @@ private TokenizersLibrary() {}
public native long[] getAttentionMask(long encoding);

public native long[] getSpecialTokenMask(long encoding);

public native CharSpan[] getTokenCharSpans(long encoding);
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

package ai.djl.huggingface.tokenizers;

import ai.djl.huggingface.tokenizers.jni.CharSpan;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -49,6 +50,34 @@ public void testTokenizer() {
Assert.assertEquals(attentionMask, encoding.getAttentionMask());
Assert.assertEquals(specialTokenMask, encoding.getSpecialTokenMask());

CharSpan[] charSpansExpected = {
null,
new CharSpan(0, 5),
new CharSpan(5, 6),
new CharSpan(7, 8),
new CharSpan(8, 9),
new CharSpan(9, 12),
new CharSpan(12, 13),
new CharSpan(14, 17),
new CharSpan(18, 21),
new CharSpan(22, 25),
new CharSpan(26, 30),
new CharSpan(31, 32),
null
};
int expectedLength = charSpansExpected.length;
CharSpan[] charSpansResult = encoding.getCharTokenSpans();

Assert.assertEquals(expectedLength, charSpansResult.length);
Assert.assertEquals(charSpansExpected[0], charSpansResult[0]);
Assert.assertEquals(
charSpansExpected[expectedLength - 1], charSpansResult[expectedLength - 1]);

for (int i = 1; i < expectedLength - 1; i++) {
Assert.assertEquals(charSpansExpected[i].getStart(), charSpansResult[i].getStart());
Assert.assertEquals(charSpansExpected[i].getEnd(), charSpansResult[i].getEnd());
}

Encoding[] encodings = tokenizer.batchEncode(Arrays.asList(inputs));
Assert.assertEquals(encodings.length, 2);
}
Expand Down