Skip to content

Commit 71e468c

Browse files
anforowiczBurntSushi
authored andcommitted
safety: introduce ALIGN_MASK based on core::mem::align_of
Fixes #194, Closes #197
1 parent 77e5fd7 commit 71e468c

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

src/ascii.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@
2424
#[cfg(any(test, miri, not(target_arch = "x86_64")))]
2525
const USIZE_BYTES: usize = core::mem::size_of::<usize>();
2626
#[cfg(any(test, miri, not(target_arch = "x86_64")))]
27+
const ALIGN_MASK: usize = core::mem::align_of::<usize>() - 1;
28+
#[cfg(any(test, miri, not(target_arch = "x86_64")))]
2729
const FALLBACK_LOOP_SIZE: usize = 2 * USIZE_BYTES;
2830

2931
// This is a mask where the most significant bit of each byte in the usize
@@ -53,7 +55,6 @@ pub fn first_non_ascii_byte(slice: &[u8]) -> usize {
5355

5456
#[cfg(any(test, miri, not(target_arch = "x86_64")))]
5557
fn first_non_ascii_byte_fallback(slice: &[u8]) -> usize {
56-
let align = USIZE_BYTES - 1;
5758
let start_ptr = slice.as_ptr();
5859
let end_ptr = slice[slice.len()..].as_ptr();
5960
let mut ptr = start_ptr;
@@ -69,7 +70,7 @@ fn first_non_ascii_byte_fallback(slice: &[u8]) -> usize {
6970
return first_non_ascii_byte_mask(mask);
7071
}
7172

72-
ptr = ptr_add(ptr, USIZE_BYTES - (start_ptr as usize & align));
73+
ptr = ptr_add(ptr, USIZE_BYTES - (start_ptr as usize & ALIGN_MASK));
7374
debug_assert!(ptr > start_ptr);
7475
debug_assert!(ptr_sub(end_ptr, USIZE_BYTES) >= start_ptr);
7576
if slice.len() >= FALLBACK_LOOP_SIZE {

src/byteset/scalar.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
use core::{cmp, usize};
66

77
const USIZE_BYTES: usize = core::mem::size_of::<usize>();
8+
const ALIGN_MASK: usize = core::mem::align_of::<usize>() - 1;
89

910
// The number of bytes to loop at in one iteration of memchr/memrchr.
1011
const LOOP_SIZE: usize = 2 * USIZE_BYTES;
@@ -22,7 +23,6 @@ pub fn inv_memchr(n1: u8, haystack: &[u8]) -> Option<usize> {
2223
let vn1 = repeat_byte(n1);
2324
let confirm = |byte| byte != n1;
2425
let loop_size = cmp::min(LOOP_SIZE, haystack.len());
25-
let align = USIZE_BYTES - 1;
2626
let start_ptr = haystack.as_ptr();
2727

2828
unsafe {
@@ -38,7 +38,7 @@ pub fn inv_memchr(n1: u8, haystack: &[u8]) -> Option<usize> {
3838
return forward_search(start_ptr, end_ptr, ptr, confirm);
3939
}
4040

41-
ptr = ptr.add(USIZE_BYTES - (start_ptr as usize & align));
41+
ptr = ptr.add(USIZE_BYTES - (start_ptr as usize & ALIGN_MASK));
4242
debug_assert!(ptr > start_ptr);
4343
debug_assert!(end_ptr.sub(USIZE_BYTES) >= start_ptr);
4444
while loop_size == LOOP_SIZE && ptr <= end_ptr.sub(loop_size) {
@@ -62,7 +62,6 @@ pub fn inv_memrchr(n1: u8, haystack: &[u8]) -> Option<usize> {
6262
let vn1 = repeat_byte(n1);
6363
let confirm = |byte| byte != n1;
6464
let loop_size = cmp::min(LOOP_SIZE, haystack.len());
65-
let align = USIZE_BYTES - 1;
6665
let start_ptr = haystack.as_ptr();
6766

6867
unsafe {
@@ -78,7 +77,7 @@ pub fn inv_memrchr(n1: u8, haystack: &[u8]) -> Option<usize> {
7877
return reverse_search(start_ptr, end_ptr, ptr, confirm);
7978
}
8079

81-
ptr = ptr.sub(end_ptr as usize & align);
80+
ptr = ptr.sub(end_ptr as usize & ALIGN_MASK);
8281
debug_assert!(start_ptr <= ptr && ptr <= end_ptr);
8382
while loop_size == LOOP_SIZE && ptr >= start_ptr.add(loop_size) {
8483
debug_assert_eq!(0, (ptr as usize) % USIZE_BYTES);

0 commit comments

Comments
 (0)