use crate::{Entry, Iter};
+use std::mem::size_of;
use std::sync::Arc;
type Error = Box<dyn std::error::Error>;
fn decode(&self, ridx: usize) -> Result<usize> {
match self {
Restarts::Restart32(r) => {
- let len = 4;
+ let len = size_of::<u32>();
let start = ridx * len;
let end = start + len;
if end > r.len() {
}
}
Restarts::Restart64(r) => {
- let len = 8;
+ let len = size_of::<u64>();
let start = ridx * len;
let end = start + len;
if end > r.len() {
impl<'b> Block<'b> {
pub(crate) fn new(data: &'b [u8]) -> Result<Self> {
- let mut dlen = data.len();
-
- if data.len() < 4 {
+ if data.len() < size_of::<u32>() {
return Err("block data too short".into());
}
- dlen -= 4;
+ let rc_off = data.len() - size_of::<u32>();
- let nrestarts = u32::from_be_bytes(data[dlen..].try_into()?) as usize;
+ let nrestarts = u32::from_be_bytes(data[rc_off..].try_into()?) as usize;
// try 32-bit restarts
- let mut len_restarts = nrestarts * 4;
- if len_restarts > dlen {
+ if (nrestarts * size_of::<u32>()) > rc_off {
return Err("block data too short 2".into());
}
- let mut dlen = dlen - len_restarts;
+ let mut r_off = rc_off - (nrestarts * size_of::<u32>());
- let restarts = if dlen <= u32::MAX as usize {
- Restarts::Restart32(&data[dlen..data.len() - 4])
+ let restarts = if r_off <= u32::MAX as usize {
+ Restarts::Restart32(&data[r_off..rc_off])
} else {
// try 64-bit restarts
- dlen -= len_restarts;
- len_restarts *= 2;
- if len_restarts > dlen {
+ if (nrestarts * size_of::<u64>()) > rc_off {
return Err("block data too short 3".into());
}
- Restarts::Restart64(&data[dlen..data.len() - 4])
+ r_off = rc_off - (nrestarts * size_of::<u64>());
+ Restarts::Restart64(&data[r_off..rc_off])
};
+ println!("Block::new() -- r_off = {}", r_off);
Ok(Block {
- data: &data[..dlen - 4], // XXX - debugme -- off by 4
+ data: &data[..r_off],
restarts,
})
}
+use std::mem::size_of;
+
pub struct BlockBuilder {
prev_key: Vec<u8>,
data: Vec<u8>,
fn restart_size(&self) -> usize {
if self.data.len() > u32::MAX as usize {
- 4
+ size_of::<u32>()
} else {
- 8
+ size_of::<u64>()
}
}
if self.finished {
self.data.len()
} else {
- self.data.len() + self.restarts.len() * self.restart_size() + 4
+ self.data.len() + self.restarts.len() * self.restart_size() + size_of::<u32>()
}
}
if self.finished {
return self.data.as_slice();
}
+ self.finished = true;
let num_restarts = self.restarts.len();
assert!(num_restarts <= u32::MAX as usize);
- self.data.reserve(num_restarts * self.restart_size() + 4);
+ self.data
+ .reserve(num_restarts * self.restart_size() + size_of::<u32>());
match self.restart_size() {
4 => {
for b in self.restarts.iter().map(|r| u32::to_be_bytes(*r as u32)) {