diff --git a/src/server/mod.rs b/src/server/mod.rs index 6ecc75d1d..bffdf427a 100644 --- a/src/server/mod.rs +++ b/src/server/mod.rs @@ -219,10 +219,10 @@ pub trait IoStream: AsyncRead + AsyncWrite + 'static { fn read_available(&mut self, buf: &mut BytesMut) -> Poll { let mut read_some = false; loop { + if buf.remaining_mut() < LW_BUFFER_SIZE { + buf.reserve(HW_BUFFER_SIZE); + } unsafe { - if buf.remaining_mut() < LW_BUFFER_SIZE { - buf.reserve(HW_BUFFER_SIZE); - } match self.read(buf.bytes_mut()) { Ok(n) => { if n == 0 { diff --git a/src/server/settings.rs b/src/server/settings.rs index cf58e4321..ca5acb917 100644 --- a/src/server/settings.rs +++ b/src/server/settings.rs @@ -230,14 +230,16 @@ impl WorkerSettings { } pub fn set_date(&self, dst: &mut BytesMut, full: bool) { - if full { - let mut buf: [u8; 39] = unsafe { mem::uninitialized() }; - buf[..6].copy_from_slice(b"date: "); - buf[6..35].copy_from_slice(&(unsafe { &*self.date.get() }.bytes)); - buf[35..].copy_from_slice(b"\r\n\r\n"); - dst.extend_from_slice(&buf); - } else { - dst.extend_from_slice(&(unsafe { &*self.date.get() }.bytes)); + unsafe { + if full { + let mut buf: [u8; 39] = mem::uninitialized(); + buf[..6].copy_from_slice(b"date: "); + buf[6..35].copy_from_slice(&(*self.date.get()).bytes); + buf[35..].copy_from_slice(b"\r\n\r\n"); + dst.extend_from_slice(&buf); + } else { + dst.extend_from_slice(&(*self.date.get()).bytes); + } } } } diff --git a/src/ws/mask.rs b/src/ws/mask.rs index e99b950c8..d5d5ee92d 100644 --- a/src/ws/mask.rs +++ b/src/ws/mask.rs @@ -10,15 +10,6 @@ pub fn apply_mask(buf: &mut [u8], mask: u32) { unsafe { apply_mask_fast32(buf, mask) } } -/// A safe unoptimized mask application. -#[inline] -#[allow(dead_code)] -fn apply_mask_fallback(buf: &mut [u8], mask: &[u8; 4]) { - for (i, byte) in buf.iter_mut().enumerate() { - *byte ^= mask[i & 3]; - } -} - /// Faster version of `apply_mask()` which operates on 8-byte blocks. /// /// unsafe because uses pointer math and bit operations for performance @@ -99,13 +90,20 @@ unsafe fn xor_mem(ptr: *mut u8, mask: u32, len: usize) { #[cfg(test)] mod tests { - use super::{apply_mask, apply_mask_fallback}; - use std::ptr; + use super::apply_mask; + use byteorder::{ByteOrder, LittleEndian}; + + /// A safe unoptimized mask application. + fn apply_mask_fallback(buf: &mut [u8], mask: &[u8; 4]) { + for (i, byte) in buf.iter_mut().enumerate() { + *byte ^= mask[i & 3]; + } + } #[test] fn test_apply_mask() { let mask = [0x6d, 0xb6, 0xb2, 0x80]; - let mask_u32: u32 = unsafe { ptr::read_unaligned(mask.as_ptr() as *const u32) }; + let mask_u32: u32 = LittleEndian::read_u32(&mask); let unmasked = vec![ 0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17,