X-Git-Url: http://git.bitcoin.ninja/index.cgi?p=ldk-c-bindings;a=blobdiff_plain;f=lightning-c-bindings%2Fsrc%2Fc_types%2Fmod.rs;h=09c8a5147d4c096fea4c1313af2497a659072269;hp=2648a9b39092dbdf96576e4dc998cae10eaf8282;hb=ed6eb46f7b4e0d4c869ad08dffc600b4e93e2129;hpb=2f0c348c6fe505a736c3d015df6d6cd30032c912 diff --git a/lightning-c-bindings/src/c_types/mod.rs b/lightning-c-bindings/src/c_types/mod.rs index 2648a9b..09c8a51 100644 --- a/lightning-c-bindings/src/c_types/mod.rs +++ b/lightning-c-bindings/src/c_types/mod.rs @@ -527,3 +527,51 @@ impl TakePointer<*mut T> for *mut T { ret } } + + +pub(crate) mod ObjOps { + #[inline] + #[must_use = "returns new dangling pointer"] + pub(crate) fn heap_alloc(obj: T) -> *mut T { + let ptr = Box::into_raw(Box::new(obj)); + nonnull_ptr_to_inner(ptr) + } + #[inline] + pub(crate) fn nonnull_ptr_to_inner(ptr: *const T) -> *mut T { + if core::mem::size_of::() == 0 { + // We map `None::` as `T { inner: null, .. }` which works great for all + // non-Zero-Sized-Types `T`. + // For ZSTs, we need to differentiate between null implying `None` and null implying + // `Some` with no allocation. + // Thus, for ZSTs, we add one (usually) page here, which should always be aligned. + // Note that this relies on undefined behavior! A pointer to NULL may be valid, but a + // pointer to NULL + 4096 is almost certainly not. That said, Rust's existing use of + // `(*mut T)1` for the pointer we're adding to is also not defined, so we should be + // fine. + // Note that we add 4095 here as at least the Java client assumes that the low bit on + // any heap pointer is 0, which is generally provided by malloc, but which is not true + // for ZSTs "allocated" by `Box::new`. + debug_assert_eq!(ptr as usize, 1); + unsafe { (ptr as *mut T).cast::().add(4096 - 1).cast::() } + } else { + // In order to get better test coverage, also increment non-ZST pointers with + // --cfg=test_mod_pointers, which is set in genbindings.sh for debug builds. + #[cfg(test_mod_pointers)] + unsafe { (ptr as *mut T).cast::().add(4096).cast::() } + #[cfg(not(test_mod_pointers))] + unsafe { ptr as *mut T } + } + } + #[inline] + /// Invert nonnull_ptr_to_inner + pub(crate) fn untweak_ptr(ptr: *mut T) -> *mut T { + if core::mem::size_of::() == 0 { + unsafe { ptr.cast::().sub(4096 - 1).cast::() } + } else { + #[cfg(test_mod_pointers)] + unsafe { ptr.cast::().sub(4096).cast::() } + #[cfg(not(test_mod_pointers))] + ptr + } + } +}