} else {
let start = idx * size_of::<u32>();
let end = start + size_of::<u32>();
- Ok(u32::from_be_bytes(restarts[start..end].try_into()?) as usize)
+ Ok(u32::from_le_bytes(restarts[start..end].try_into()?) as usize)
}
}
RestartType::U64 => {
} else {
let start = idx * size_of::<u64>();
let end = start + size_of::<u64>();
- Ok(u64::from_be_bytes(restarts[start..end].try_into()?) as usize)
+ Ok(u64::from_le_bytes(restarts[start..end].try_into()?) as usize)
}
}
}
}
let rc_off = data.as_ref().len() - size_of::<u32>();
- let restart_count = u32::from_be_bytes(data.as_ref()[rc_off..].try_into()?) as usize;
+ let restart_count = u32::from_le_bytes(data.as_ref()[rc_off..].try_into()?) as usize;
// try 32-bit restarts
if (restart_count * size_of::<u32>()) > rc_off {
let mut bi = b.into_iter();
bi.seek(&u32::to_be_bytes(40));
assert_eq!(bi.next().unwrap().key.as_ref(), &u32::to_be_bytes(40));
+ bi.seek(&u32::to_be_bytes(32));
+ assert_eq!(bi.next().unwrap().key.as_ref(), &u32::to_be_bytes(40));
}
}
.reserve(num_restarts * self.restart_size() + size_of::<u32>());
match self.restart_size() {
4 => {
- for b in self.restarts.iter().map(|r| u32::to_be_bytes(*r as u32)) {
+ for b in self.restarts.iter().map(|r| u32::to_le_bytes(*r as u32)) {
self.data.extend_from_slice(&b);
}
}
8 => {
- for b in self.restarts.iter().map(|r| u64::to_be_bytes(*r as u64)) {
+ for b in self.restarts.iter().map(|r| u64::to_le_bytes(*r as u64)) {
self.data.extend_from_slice(&b);
}
}
_ => unreachable!(),
};
self.data
- .extend_from_slice(&u32::to_be_bytes(num_restarts as u32));
+ .extend_from_slice(&u32::to_le_bytes(num_restarts as u32));
self.data.as_slice()
}