Skip to content

Commit

Permalink
Use u8 tensors for masks. (#273)
Browse files Browse the repository at this point in the history
  • Loading branch information
LaurentMazare authored Jul 29, 2023
1 parent 50d8273 commit 4bf2ebf
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 10 deletions.
2 changes: 1 addition & 1 deletion candle-examples/examples/bigcode/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ fn layer_norm(size: usize, eps: f64, vb: VarBuilder) -> Result<LayerNorm> {

fn make_causal_mask(t: usize, device: &Device) -> Result<Tensor> {
let mask: Vec<_> = (0..t)
.flat_map(|i| (0..t).map(move |j| u32::from(j <= i)))
.flat_map(|i| (0..t).map(move |j| u8::from(j <= i)))
.collect();
let mask = Tensor::from_slice(&mask, (t, t), device)?;
Ok(mask)
Expand Down
2 changes: 1 addition & 1 deletion candle-examples/examples/falcon/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ pub struct Falcon {

fn make_causal_mask(t: usize) -> Result<Tensor> {
let mask: Vec<_> = (0..t)
.flat_map(|i| (0..t).map(move |j| u32::from(j > i)))
.flat_map(|i| (0..t).map(move |j| u8::from(j > i)))
.collect();
let mask = Tensor::from_slice(&mask, (t, t), &Device::Cpu)?;
Ok(mask)
Expand Down
3 changes: 1 addition & 2 deletions candle-examples/examples/llama/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,8 @@ impl Cache {
if let Some(mask) = masks.get(&t) {
Ok(mask.clone())
} else {
// TODO: If we support bool or u8 tensors, this would be better.
let mask: Vec<_> = (0..t)
.flat_map(|i| (0..t).map(move |j| u32::from(j > i)))
.flat_map(|i| (0..t).map(move |j| u8::from(j > i)))
.collect();
let mask = Tensor::from_slice(&mask, (t, t), &self.device)?;
masks.insert(t, mask.clone());
Expand Down
3 changes: 1 addition & 2 deletions candle-examples/examples/llama2-c/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,8 @@ impl Cache {
if let Some(mask) = masks.get(&t) {
Ok(mask.clone())
} else {
// TODO: If we support bool or u8 tensors, this would be better.
let mask: Vec<_> = (0..t)
.flat_map(|i| (0..t).map(move |j| u32::from(j > i)))
.flat_map(|i| (0..t).map(move |j| u8::from(j > i)))
.collect();
let mask = Tensor::from_slice(&mask, (t, t), &self.device)?;
masks.insert(t, mask.clone());
Expand Down
3 changes: 1 addition & 2 deletions candle-examples/examples/llama_multiprocess/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,8 @@ impl Cache {
if let Some(mask) = masks.get(&t) {
Ok(mask.clone())
} else {
// TODO: If we support bool or u8 tensors, this would be better.
let mask: Vec<_> = (0..t)
.flat_map(|i| (0..t).map(move |j| u32::from(j > i)))
.flat_map(|i| (0..t).map(move |j| u8::from(j > i)))
.collect();
let mask = Tensor::from_slice(&mask, (t, t), &self.device)?;
masks.insert(t, mask.clone());
Expand Down
3 changes: 1 addition & 2 deletions candle-wasm-examples/llama2-c/src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,8 @@ impl Cache {
if let Some(mask) = masks.get(&t) {
Ok(mask.clone())
} else {
// TODO: If we support bool or u8 tensors, this would be better.
let mask: Vec<_> = (0..t)
.flat_map(|i| (0..t).map(move |j| u32::from(j > i)))
.flat_map(|i| (0..t).map(move |j| u8::from(j > i)))
.collect();
let mask = Tensor::from_slice(&mask, (t, t), &self.device)?;
masks.insert(t, mask.clone());
Expand Down

0 comments on commit 4bf2ebf

Please sign in to comment.