use libc::c_void;
use win_util::create_file_mapping;
use win_util::duplicate_handle;
use winapi::um::winnt::PAGE_READWRITE;
pub use super::mmap_platform::MemoryMappingArena;
use crate::AsRawDescriptor;
use crate::Descriptor;
use crate::FromRawDescriptor;
use crate::MappedRegion;
use crate::MemoryMapping as CrateMemoryMapping;
use crate::MemoryMappingBuilder;
use crate::MmapError as Error;
use crate::MmapResult as Result;
use crate::Protection;
use crate::RawDescriptor;
use crate::SafeDescriptor;
fn validate_includes_range(mmap_size: usize, offset: usize, range_size: usize) -> Result<()> {
let end_offset = offset
.checked_add(range_size)
.ok_or(Error::InvalidAddress)?;
if end_offset <= mmap_size {
Ok(())
} else {
Err(Error::InvalidAddress)
}
}
impl dyn MappedRegion {
pub fn msync(&self, offset: usize, size: usize) -> Result<()> {
validate_includes_range(self.size(), offset, size)?;
let ret = unsafe {
use winapi::um::memoryapi::FlushViewOfFile;
if FlushViewOfFile((self.as_ptr() as usize + offset) as *mut libc::c_void, size) == 0 {
-1
} else {
0
}
};
if ret != -1 {
Ok(())
} else {
Err(Error::SystemCallFailed(super::Error::last()))
}
}
}
#[derive(Debug)]
pub struct MemoryMapping {
pub(crate) addr: *mut c_void,
pub(crate) size: usize,
}
unsafe impl Send for MemoryMapping {}
unsafe impl Sync for MemoryMapping {}
impl MemoryMapping {
pub fn new(size: usize) -> Result<MemoryMapping> {
MemoryMapping::new_protection(size, Protection::read_write())
}
pub fn from_descriptor(
file_handle: &dyn AsRawDescriptor,
size: usize,
) -> Result<MemoryMapping> {
MemoryMapping::from_descriptor_offset(file_handle, size, 0)
}
pub fn from_raw_descriptor(file_handle: RawDescriptor, size: usize) -> Result<MemoryMapping> {
MemoryMapping::from_descriptor_offset(&Descriptor(file_handle), size, 0)
}
pub fn from_descriptor_offset(
file_handle: &dyn AsRawDescriptor,
size: usize,
offset: u64,
) -> Result<MemoryMapping> {
MemoryMapping::from_descriptor_offset_protection(
file_handle,
size,
offset,
Protection::read_write(),
)
}
pub(crate) fn range_end(&self, offset: usize, count: usize) -> Result<usize> {
let mem_end = offset.checked_add(count).ok_or(Error::InvalidAddress)?;
if mem_end > self.size() {
return Err(Error::InvalidAddress);
}
Ok(mem_end)
}
}
unsafe impl MappedRegion for MemoryMapping {
fn as_ptr(&self) -> *mut u8 {
self.addr as *mut u8
}
fn size(&self) -> usize {
self.size
}
}
impl CrateMemoryMapping {
pub fn from_raw_ptr(addr: RawDescriptor, size: usize) -> Result<CrateMemoryMapping> {
MemoryMapping::from_raw_ptr(addr, size).map(|mapping| CrateMemoryMapping {
mapping,
_file_descriptor: None,
})
}
}
pub trait MemoryMappingBuilderWindows<'a> {
#[allow(clippy::wrong_self_convention)]
fn from_descriptor(self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder<'a>;
}
impl<'a> MemoryMappingBuilderWindows<'a> for MemoryMappingBuilder<'a> {
fn from_descriptor(mut self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder<'a> {
self.descriptor = Some(descriptor);
self
}
}
impl<'a> MemoryMappingBuilder<'a> {
pub fn build(self) -> Result<CrateMemoryMapping> {
match self.descriptor {
Some(descriptor) => {
let mapping_descriptor = if self.is_file_descriptor {
let mapping_handle = unsafe {
create_file_mapping(
Some(descriptor.as_raw_descriptor()),
self.size as u64,
PAGE_READWRITE,
None,
)
}
.map_err(Error::StdSyscallFailed)?;
Some(unsafe { SafeDescriptor::from_raw_descriptor(mapping_handle) })
} else {
None
};
MemoryMappingBuilder::wrap(
MemoryMapping::from_descriptor_offset_protection(
match mapping_descriptor.as_ref() {
Some(descriptor) => descriptor as &dyn AsRawDescriptor,
None => descriptor,
},
self.size,
self.offset.unwrap_or(0),
self.protection.unwrap_or_else(Protection::read_write),
)?,
if self.is_file_descriptor {
self.descriptor
} else {
None
},
)
}
None => MemoryMappingBuilder::wrap(
MemoryMapping::new_protection(
self.size,
self.protection.unwrap_or_else(Protection::read_write),
)?,
None,
),
}
}
pub fn wrap(
mapping: MemoryMapping,
file_descriptor: Option<&'a dyn AsRawDescriptor>,
) -> Result<CrateMemoryMapping> {
let file_descriptor = match file_descriptor {
Some(descriptor) => unsafe {
Some(SafeDescriptor::from_raw_descriptor(
duplicate_handle(descriptor.as_raw_descriptor())
.map_err(Error::StdSyscallFailed)?,
))
},
None => None,
};
Ok(CrateMemoryMapping {
mapping,
_file_descriptor: file_descriptor,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::SharedMemory;
use crate::VolatileMemory;
use crate::VolatileMemoryError;
fn to_crate_mmap(mapping: MemoryMapping) -> crate::MemoryMapping {
crate::MemoryMapping {
mapping,
_file_descriptor: None,
}
}
#[test]
fn basic_map() {
let shm = SharedMemory::new("test", 1028).unwrap();
let m = to_crate_mmap(MemoryMapping::from_descriptor(&shm, 1024).unwrap());
assert_eq!(1024, m.size());
}
#[test]
fn test_write_past_end() {
let shm = SharedMemory::new("test", 1028).unwrap();
let m = to_crate_mmap(MemoryMapping::from_descriptor(&shm, 5).unwrap());
let res = m.write_slice(&[1, 2, 3, 4, 5, 6], 0);
assert!(res.is_ok());
assert_eq!(res.unwrap(), 5);
}
#[test]
fn slice_size() {
let shm = SharedMemory::new("test", 1028).unwrap();
let m = to_crate_mmap(MemoryMapping::from_descriptor(&shm, 5).unwrap());
let s = m.get_slice(2, 3).unwrap();
assert_eq!(s.size(), 3);
}
#[test]
fn slice_addr() {
let shm = SharedMemory::new("test", 1028).unwrap();
let m = to_crate_mmap(MemoryMapping::from_descriptor(&shm, 5).unwrap());
let s = m.get_slice(2, 3).unwrap();
assert_eq!(s.as_ptr(), unsafe { m.as_ptr().offset(2) });
}
#[test]
fn slice_overflow_error() {
let shm = SharedMemory::new("test", 1028).unwrap();
let m = to_crate_mmap(MemoryMapping::from_descriptor(&shm, 5).unwrap());
let res = m.get_slice(usize::MAX, 3).unwrap_err();
assert_eq!(
res,
VolatileMemoryError::Overflow {
base: usize::MAX,
offset: 3,
}
);
}
#[test]
fn slice_oob_error() {
let shm = SharedMemory::new("test", 1028).unwrap();
let m = to_crate_mmap(MemoryMapping::from_descriptor(&shm, 5).unwrap());
let res = m.get_slice(3, 3).unwrap_err();
assert_eq!(res, VolatileMemoryError::OutOfBounds { addr: 6 });
}
}