Skip to content

Commit

Permalink
Merge pull request #20 from EricLBuehler/candle_bump
Browse files Browse the repository at this point in the history
Bump candle version to 0.6.0
  • Loading branch information
EricLBuehler authored Aug 20, 2024
2 parents 9dc75e1 + 956d967 commit 0a8ebe9
Show file tree
Hide file tree
Showing 6 changed files with 19 additions and 19 deletions.
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ categories = ["science"]
license = "MIT OR Apache-2.0"

[workspace.dependencies]
candle-core = { git = "https://github.com/huggingface/candle.git", version = "0.5.0" }
candle-examples = { git = "https://github.com/huggingface/candle.git", version = "0.5.0" }
candle-nn = { git = "https://github.com/huggingface/candle.git", version = "0.5.0" }
candle-core = { git = "https://github.com/huggingface/candle.git", version = "0.6.0" }
candle-examples = { git = "https://github.com/huggingface/candle.git", version = "0.6.0" }
candle-nn = { git = "https://github.com/huggingface/candle.git", version = "0.6.0" }
either = "1.9.0"
serde_json = "1.0.107"
thiserror = "1.0.48"
Expand Down
4 changes: 2 additions & 2 deletions candle-lora-macro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ proc-macro2 = "1.0.66"
quote_into = "0.2.0"
syn = { version = "2.0.32", features = ["full", "extra-traits"] }
candle-lora = { path = "../candle-lora", package = "candle-lora", version = "0.2.0" }
candle-core = { git = "https://github.com/huggingface/candle.git", version = "0.5.0" }
candle-nn = { git = "https://github.com/huggingface/candle.git", version = "0.5.0" }
candle-core = { git = "https://github.com/huggingface/candle.git", version = "0.6.0" }
candle-nn = { git = "https://github.com/huggingface/candle.git", version = "0.6.0" }
quote = "1.0.33"

[lib]
Expand Down
16 changes: 8 additions & 8 deletions candle-lora-macro/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -591,16 +591,16 @@ pub fn auto_lora_convert(tokens: TokenStream1) -> TokenStream1 {
#embed_option1_stream

if !linear.is_empty() && linear_config.is_none() {
panic!("Config not speified for linear layers.");
panic!("Config not specified for linear layers.");
}
if !conv1d.is_empty() && conv1d_config.is_none() {
panic!("Config not speified for conv1d layers.");
panic!("Config not specified for conv1d layers.");
}
if !conv2d.is_empty() && conv2d_config.is_none() {
panic!("Config not speified for conv2d layers.");
panic!("Config not specified for conv2d layers.");
}
if !embed.is_empty() && embed_config.is_none() {
panic!("Config not speified for embedding layers.");
panic!("Config not specified for embedding layers.");
}

let mut builder = candle_lora::SelectedLayersBuilder::new();
Expand Down Expand Up @@ -650,16 +650,16 @@ pub fn auto_lora_convert(tokens: TokenStream1) -> TokenStream1 {
#embed_option1_stream

if !linear.is_empty() && linear_config.is_none() {
panic!("Config not speified for linear layers.");
panic!("Config not specified for linear layers.");
}
if !conv1d.is_empty() && conv1d_config.is_none() {
panic!("Config not speified for conv1d layers.");
panic!("Config not specified for conv1d layers.");
}
if !conv2d.is_empty() && conv2d_config.is_none() {
panic!("Config not speified for conv2d layers.");
panic!("Config not specified for conv2d layers.");
}
if !embed.is_empty() && embed_config.is_none() {
panic!("Config not speified for embedding layers.");
panic!("Config not specified for embedding layers.");
}

let mut builder = candle_lora::SelectedLayersBuilder::new();
Expand Down
8 changes: 4 additions & 4 deletions candle-lora-transformers/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ candle-examples.workspace = true
candle-lora = { version = "0.2.0", path = "../candle-lora" }
candle-lora-macro = { version = "0.2.0", path = "../candle-lora-macro" }
candle-nn.workspace = true
candle-transformers = { git = "https://github.com/huggingface/candle.git", version = "0.5.0" }
candle-flash-attn = { git = "https://github.com/huggingface/candle.git", version = "0.5.0", optional = true }
clap = "4.4.7"
candle-transformers = { git = "https://github.com/huggingface/candle.git", version = "0.6.0" }
candle-flash-attn = { git = "https://github.com/huggingface/candle.git", version = "0.6.0", optional = true }
clap = { version = "4.4.7", features = ["derive"] }
hf-hub = "0.3.2"
serde = "1.0.189"
serde_json.workspace = true
tokenizers = "0.15.1"
tokenizers = "0.19.1"
tracing = "0.1.40"
tracing-chrome = "0.7.1"
tracing-subscriber = "0.3.17"
Expand Down
2 changes: 1 addition & 1 deletion candle-lora-transformers/src/dinov2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -417,7 +417,7 @@ impl DinoVisionTransformer {
let blocks = (0..depth)
.map(|i| {
Block::new(
vb_b.pp(&i.to_string()),
vb_b.pp(i.to_string()),
embed_dim,
num_heads,
merge,
Expand Down
2 changes: 1 addition & 1 deletion candle-lora-transformers/src/t5.rs
Original file line number Diff line number Diff line change
Expand Up @@ -653,7 +653,7 @@ impl T5Block {
None
};
let ff_i = if cross_attn.is_some() { 2 } else { 1 };
let ff = T5LayerFF::load(vb.pp(&ff_i.to_string()), cfg, merge, lora_config)?;
let ff = T5LayerFF::load(vb.pp(ff_i.to_string()), cfg, merge, lora_config)?;
Ok(Self {
self_attn,
cross_attn,
Expand Down

0 comments on commit 0a8ebe9

Please sign in to comment.