gstreamer/
memory.rs

1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::{
4    fmt,
5    marker::PhantomData,
6    mem,
7    ops::{Bound, Deref, DerefMut, RangeBounds},
8    ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{ffi, AllocationParams, Allocator, MemoryFlags};
14
15mini_object_wrapper!(Memory, MemoryRef, ffi::GstMemory, || {
16    ffi::gst_memory_get_type()
17});
18
19pub struct MemoryMap<'a, T> {
20    map_info: ffi::GstMapInfo,
21    phantom: PhantomData<(&'a MemoryRef, T)>,
22}
23
24pub struct MappedMemory<T> {
25    map_info: ffi::GstMapInfo,
26    phantom: PhantomData<(Memory, T)>,
27}
28
29impl fmt::Debug for Memory {
30    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
31        MemoryRef::fmt(self, f)
32    }
33}
34
35impl fmt::Debug for MemoryRef {
36    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
37        f.debug_struct("Memory")
38            .field("ptr", &self.as_ptr())
39            .field("allocator", &self.allocator())
40            .field("parent", &self.parent())
41            .field("maxsize", &self.maxsize())
42            .field("align", &self.align())
43            .field("offset", &self.offset())
44            .field("size", &self.size())
45            .field("flags", &self.flags())
46            .finish()
47    }
48}
49
50pub enum Readable {}
51pub enum Writable {}
52
53impl Memory {
54    #[inline]
55    pub fn with_size(size: usize) -> Self {
56        assert_initialized_main_thread!();
57        unsafe {
58            from_glib_full(ffi::gst_allocator_alloc(
59                ptr::null_mut(),
60                size,
61                ptr::null_mut(),
62            ))
63        }
64    }
65
66    #[inline]
67    pub fn with_size_and_params(size: usize, params: &AllocationParams) -> Self {
68        assert_initialized_main_thread!();
69        unsafe {
70            from_glib_full(ffi::gst_allocator_alloc(
71                ptr::null_mut(),
72                size,
73                params.as_ptr() as *mut _,
74            ))
75        }
76    }
77
78    /// Fill `info` with the pointer and sizes of the memory in `self` that can be
79    /// accessed according to `flags`.
80    ///
81    /// This function can return [`false`] for various reasons:
82    /// - the memory backed by `self` is not accessible with the given `flags`.
83    /// - the memory was already mapped with a different mapping.
84    ///
85    /// `info` and its contents remain valid for as long as `self` is valid and
86    /// until `gst_memory_unmap()` is called.
87    ///
88    /// For each [`into_mapped_memory_readable()`][Self::into_mapped_memory_readable()] call, a corresponding `gst_memory_unmap()` call
89    /// should be done.
90    /// ## `flags`
91    /// mapping flags
92    ///
93    /// # Returns
94    ///
95    /// [`true`] if the map operation was successful.
96    ///
97    /// ## `info`
98    /// pointer for info
99    #[inline]
100    pub fn into_mapped_memory_readable(self) -> Result<MappedMemory<Readable>, Self> {
101        unsafe {
102            let s = mem::ManuallyDrop::new(self);
103            let mut map_info = mem::MaybeUninit::uninit();
104            let res: bool = from_glib(ffi::gst_memory_map(
105                s.as_mut_ptr(),
106                map_info.as_mut_ptr(),
107                ffi::GST_MAP_READ,
108            ));
109            if res {
110                Ok(MappedMemory {
111                    map_info: map_info.assume_init(),
112                    phantom: PhantomData,
113                })
114            } else {
115                Err(mem::ManuallyDrop::into_inner(s))
116            }
117        }
118    }
119
120    #[inline]
121    pub fn into_mapped_memory_writable(self) -> Result<MappedMemory<Writable>, Self> {
122        unsafe {
123            let s = mem::ManuallyDrop::new(self);
124            let mut map_info = mem::MaybeUninit::uninit();
125            let res: bool = from_glib(ffi::gst_memory_map(
126                s.as_mut_ptr(),
127                map_info.as_mut_ptr(),
128                ffi::GST_MAP_READWRITE,
129            ));
130            if res {
131                Ok(MappedMemory {
132                    map_info: map_info.assume_init(),
133                    phantom: PhantomData,
134                })
135            } else {
136                Err(mem::ManuallyDrop::into_inner(s))
137            }
138        }
139    }
140}
141
142impl MemoryRef {
143    #[doc(alias = "get_allocator")]
144    #[inline]
145    pub fn allocator(&self) -> Option<&Allocator> {
146        unsafe {
147            if self.0.allocator.is_null() {
148                None
149            } else {
150                Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator))
151            }
152        }
153    }
154
155    #[doc(alias = "get_parent")]
156    #[inline]
157    pub fn parent(&self) -> Option<&MemoryRef> {
158        unsafe {
159            if self.0.parent.is_null() {
160                None
161            } else {
162                Some(MemoryRef::from_ptr(self.0.parent))
163            }
164        }
165    }
166
167    #[doc(alias = "get_maxsize")]
168    #[inline]
169    pub fn maxsize(&self) -> usize {
170        self.0.maxsize
171    }
172
173    #[doc(alias = "get_align")]
174    #[inline]
175    pub fn align(&self) -> usize {
176        self.0.align
177    }
178
179    #[doc(alias = "get_offset")]
180    #[inline]
181    pub fn offset(&self) -> usize {
182        self.0.offset
183    }
184
185    #[doc(alias = "get_size")]
186    #[inline]
187    pub fn size(&self) -> usize {
188        self.0.size
189    }
190
191    #[doc(alias = "get_flags")]
192    #[inline]
193    pub fn flags(&self) -> MemoryFlags {
194        unsafe { from_glib(self.0.mini_object.flags) }
195    }
196
197    fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
198        let size = self.size();
199
200        let start_offset = match range.start_bound() {
201            Bound::Included(v) => *v,
202            Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
203            Bound::Unbounded => 0,
204        };
205        assert!(start_offset < size, "Start offset after valid range");
206
207        let end_offset = match range.end_bound() {
208            Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
209            Bound::Excluded(v) => *v,
210            Bound::Unbounded => size,
211        };
212        assert!(end_offset <= size, "End offset after valid range");
213
214        // Cast from usize to isize because that's literally how this works in the
215        // implementation and how the upper half of the usize range can be made use of.
216        //
217        // The implementation works exploiting wraparounds.
218        let new_offset = start_offset as isize;
219        let new_size = end_offset.saturating_sub(start_offset) as isize;
220
221        (new_offset, new_size)
222    }
223
224    fn calculate_offset_size_maxsize(&self, range: impl RangeBounds<usize>) -> (isize, isize) {
225        let maxsize = self.maxsize();
226
227        let start_offset = match range.start_bound() {
228            Bound::Included(v) => *v,
229            Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
230            Bound::Unbounded => 0,
231        };
232        assert!(start_offset < maxsize, "Start offset after valid range");
233
234        let end_offset = match range.end_bound() {
235            Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
236            Bound::Excluded(v) => *v,
237            Bound::Unbounded => maxsize,
238        };
239        assert!(end_offset <= maxsize, "End offset after valid range");
240
241        // Cast from usize to isize because that's literally how this works in the
242        // implementation and how the upper half of the usize range can be made use of.
243        //
244        // The implementation works by exploiting wraparounds.
245        let offset = self.offset();
246
247        let new_offset = start_offset.wrapping_sub(offset) as isize;
248        let new_size = end_offset.saturating_sub(start_offset) as isize;
249
250        (new_offset, new_size)
251    }
252
253    #[doc(alias = "gst_memory_copy")]
254    pub fn copy_range(&self, range: impl RangeBounds<usize>) -> Memory {
255        let (offset, size) = self.calculate_offset_size(range);
256        unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
257    }
258
259    #[doc(alias = "gst_memory_copy")]
260    pub fn copy_range_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
261        let (offset, size) = self.calculate_offset_size_maxsize(range);
262        unsafe { from_glib_full(ffi::gst_memory_copy(self.as_mut_ptr(), offset, size)) }
263    }
264
265    #[doc(alias = "gst_memory_is_span")]
266    pub fn is_span(&self, mem2: &MemoryRef) -> Option<usize> {
267        unsafe {
268            let mut offset = mem::MaybeUninit::uninit();
269            let res = from_glib(ffi::gst_memory_is_span(
270                self.as_mut_ptr(),
271                mem2.as_mut_ptr(),
272                offset.as_mut_ptr(),
273            ));
274            if res {
275                Some(offset.assume_init())
276            } else {
277                None
278            }
279        }
280    }
281
282    #[doc(alias = "gst_memory_is_type")]
283    pub fn is_type(&self, mem_type: &str) -> bool {
284        unsafe {
285            from_glib(ffi::gst_memory_is_type(
286                self.as_mut_ptr(),
287                mem_type.to_glib_none().0,
288            ))
289        }
290    }
291
292    #[inline]
293    pub fn map_readable(&self) -> Result<MemoryMap<Readable>, glib::BoolError> {
294        unsafe {
295            let mut map_info = mem::MaybeUninit::uninit();
296            let res =
297                ffi::gst_memory_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
298            if res == glib::ffi::GTRUE {
299                Ok(MemoryMap {
300                    map_info: map_info.assume_init(),
301                    phantom: PhantomData,
302                })
303            } else {
304                Err(glib::bool_error!("Failed to map memory readable"))
305            }
306        }
307    }
308
309    #[inline]
310    pub fn map_writable(&mut self) -> Result<MemoryMap<Writable>, glib::BoolError> {
311        unsafe {
312            let mut map_info = mem::MaybeUninit::uninit();
313            let res = ffi::gst_memory_map(
314                self.as_mut_ptr(),
315                map_info.as_mut_ptr(),
316                ffi::GST_MAP_READWRITE,
317            );
318            if res == glib::ffi::GTRUE {
319                Ok(MemoryMap {
320                    map_info: map_info.assume_init(),
321                    phantom: PhantomData,
322                })
323            } else {
324                Err(glib::bool_error!("Failed to map memory writable"))
325            }
326        }
327    }
328
329    #[doc(alias = "gst_memory_share")]
330    pub fn share(&self, range: impl RangeBounds<usize>) -> Memory {
331        let (offset, size) = self.calculate_offset_size(range);
332        unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
333    }
334
335    #[doc(alias = "gst_memory_share")]
336    pub fn share_maxsize(&self, range: impl RangeBounds<usize>) -> Memory {
337        let (offset, size) = self.calculate_offset_size_maxsize(range);
338        unsafe { from_glib_full(ffi::gst_memory_share(self.as_ptr() as *mut _, offset, size)) }
339    }
340
341    #[doc(alias = "gst_memory_resize")]
342    pub fn resize(&mut self, range: impl RangeBounds<usize>) {
343        let (offset, size) = self.calculate_offset_size(range);
344        unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
345    }
346
347    #[doc(alias = "gst_memory_resize")]
348    pub fn resize_maxsize(&mut self, range: impl RangeBounds<usize>) {
349        let (offset, size) = self.calculate_offset_size_maxsize(range);
350        unsafe { ffi::gst_memory_resize(self.as_mut_ptr(), offset, size as usize) }
351    }
352
353    #[doc(alias = "gst_util_dump_mem")]
354    pub fn dump(&self) -> Dump {
355        Dump {
356            memory: self,
357            start: Bound::Unbounded,
358            end: Bound::Unbounded,
359        }
360    }
361
362    #[doc(alias = "gst_util_dump_mem")]
363    pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump {
364        Dump {
365            memory: self,
366            start: range.start_bound().cloned(),
367            end: range.end_bound().cloned(),
368        }
369    }
370}
371
372impl<T> MemoryMap<'_, T> {
373    #[doc(alias = "get_size")]
374    #[inline]
375    pub fn size(&self) -> usize {
376        self.map_info.size
377    }
378
379    #[doc(alias = "get_memory")]
380    #[inline]
381    pub fn memory(&self) -> &MemoryRef {
382        unsafe { MemoryRef::from_ptr(self.map_info.memory) }
383    }
384
385    #[inline]
386    pub fn as_slice(&self) -> &[u8] {
387        if self.map_info.size == 0 {
388            return &[];
389        }
390        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
391    }
392}
393
394impl MemoryMap<'_, Writable> {
395    #[inline]
396    pub fn as_mut_slice(&mut self) -> &mut [u8] {
397        if self.map_info.size == 0 {
398            return &mut [];
399        }
400        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
401    }
402}
403
404impl<T> AsRef<[u8]> for MemoryMap<'_, T> {
405    #[inline]
406    fn as_ref(&self) -> &[u8] {
407        self.as_slice()
408    }
409}
410
411impl AsMut<[u8]> for MemoryMap<'_, Writable> {
412    #[inline]
413    fn as_mut(&mut self) -> &mut [u8] {
414        self.as_mut_slice()
415    }
416}
417
418impl<T> Deref for MemoryMap<'_, T> {
419    type Target = [u8];
420
421    #[inline]
422    fn deref(&self) -> &[u8] {
423        self.as_slice()
424    }
425}
426
427impl DerefMut for MemoryMap<'_, Writable> {
428    #[inline]
429    fn deref_mut(&mut self) -> &mut [u8] {
430        self.as_mut_slice()
431    }
432}
433
434impl<T> fmt::Debug for MemoryMap<'_, T> {
435    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
436        f.debug_tuple("MemoryMap").field(&self.memory()).finish()
437    }
438}
439
440impl<'a, T> PartialEq for MemoryMap<'a, T> {
441    fn eq(&self, other: &MemoryMap<'a, T>) -> bool {
442        self.as_slice().eq(other.as_slice())
443    }
444}
445
446impl<T> Eq for MemoryMap<'_, T> {}
447
448impl<T> Drop for MemoryMap<'_, T> {
449    #[inline]
450    fn drop(&mut self) {
451        unsafe {
452            ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
453        }
454    }
455}
456
457unsafe impl<T> Send for MemoryMap<'_, T> {}
458unsafe impl<T> Sync for MemoryMap<'_, T> {}
459
460impl<T> MappedMemory<T> {
461    #[inline]
462    pub fn as_slice(&self) -> &[u8] {
463        if self.map_info.size == 0 {
464            return &[];
465        }
466        unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
467    }
468
469    #[doc(alias = "get_size")]
470    #[inline]
471    pub fn size(&self) -> usize {
472        self.map_info.size
473    }
474
475    #[doc(alias = "get_memory")]
476    #[inline]
477    pub fn memory(&self) -> &MemoryRef {
478        unsafe { MemoryRef::from_ptr(self.map_info.memory) }
479    }
480
481    #[inline]
482    pub fn into_memory(self) -> Memory {
483        let mut s = mem::ManuallyDrop::new(self);
484        let memory = unsafe { from_glib_full(s.map_info.memory) };
485        unsafe {
486            ffi::gst_memory_unmap(s.map_info.memory, &mut s.map_info);
487        }
488
489        memory
490    }
491}
492
493impl MappedMemory<Writable> {
494    #[inline]
495    pub fn as_mut_slice(&mut self) -> &mut [u8] {
496        if self.map_info.size == 0 {
497            return &mut [];
498        }
499        unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
500    }
501}
502
503impl<T> AsRef<[u8]> for MappedMemory<T> {
504    #[inline]
505    fn as_ref(&self) -> &[u8] {
506        self.as_slice()
507    }
508}
509
510impl AsMut<[u8]> for MappedMemory<Writable> {
511    #[inline]
512    fn as_mut(&mut self) -> &mut [u8] {
513        self.as_mut_slice()
514    }
515}
516
517impl<T> Deref for MappedMemory<T> {
518    type Target = [u8];
519
520    #[inline]
521    fn deref(&self) -> &[u8] {
522        self.as_slice()
523    }
524}
525
526impl DerefMut for MappedMemory<Writable> {
527    #[inline]
528    fn deref_mut(&mut self) -> &mut [u8] {
529        self.as_mut_slice()
530    }
531}
532
533impl<T> Drop for MappedMemory<T> {
534    #[inline]
535    fn drop(&mut self) {
536        unsafe {
537            let _memory = Memory::from_glib_full(self.map_info.memory);
538            ffi::gst_memory_unmap(self.map_info.memory, &mut self.map_info);
539        }
540    }
541}
542
543impl<T> fmt::Debug for MappedMemory<T> {
544    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
545        f.debug_tuple("MappedMemory").field(&self.memory()).finish()
546    }
547}
548
549impl<T> PartialEq for MappedMemory<T> {
550    fn eq(&self, other: &MappedMemory<T>) -> bool {
551        self.as_slice().eq(other.as_slice())
552    }
553}
554
555impl<T> Eq for MappedMemory<T> {}
556
557unsafe impl<T> Send for MappedMemory<T> {}
558unsafe impl<T> Sync for MappedMemory<T> {}
559
560pub struct Dump<'a> {
561    memory: &'a MemoryRef,
562    start: Bound<usize>,
563    end: Bound<usize>,
564}
565
566impl Dump<'_> {
567    fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
568        let map = self.memory.map_readable().expect("Failed to map memory");
569        let data = map.as_slice();
570
571        let dump = crate::slice::Dump {
572            data,
573            start: self.start,
574            end: self.end,
575        };
576
577        if debug {
578            <crate::slice::Dump as fmt::Debug>::fmt(&dump, f)
579        } else {
580            <crate::slice::Dump as fmt::Display>::fmt(&dump, f)
581        }
582    }
583}
584
585impl fmt::Display for Dump<'_> {
586    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
587        self.fmt(f, false)
588    }
589}
590
591impl fmt::Debug for Dump<'_> {
592    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
593        self.fmt(f, true)
594    }
595}
596
597pub unsafe trait MemoryType: crate::prelude::IsMiniObject + AsRef<Memory>
598where
599    <Self as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
600{
601    fn check_memory_type(mem: &MemoryRef) -> bool;
602}
603
604#[derive(Debug, thiserror::Error)]
605pub enum MemoryTypeMismatchError {
606    #[error(transparent)]
607    ValueTypeMismatch(#[from] glib::value::ValueTypeMismatchError),
608    #[error("the memory is not of the requested type {requested}")]
609    MemoryTypeMismatch { requested: &'static str },
610}
611
612pub struct MemoryTypeValueTypeChecker<M>(PhantomData<M>);
613
614unsafe impl<M> glib::value::ValueTypeChecker for MemoryTypeValueTypeChecker<M>
615where
616    M: MemoryType + glib::prelude::StaticType,
617    <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
618{
619    type Error = glib::value::ValueTypeMismatchOrNoneError<MemoryTypeMismatchError>;
620
621    fn check(value: &glib::Value) -> Result<(), Self::Error> {
622        skip_assert_initialized!();
623        let mem = value.get::<&Memory>().map_err(|err| match err {
624            glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone => {
625                glib::value::ValueTypeMismatchOrNoneError::UnexpectedNone
626            }
627            glib::value::ValueTypeMismatchOrNoneError::WrongValueType(err) => {
628                glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
629                    MemoryTypeMismatchError::ValueTypeMismatch(err),
630                )
631            }
632        })?;
633
634        if mem.is_memory_type::<M>() {
635            Ok(())
636        } else {
637            Err(glib::value::ValueTypeMismatchOrNoneError::WrongValueType(
638                MemoryTypeMismatchError::MemoryTypeMismatch {
639                    requested: std::any::type_name::<M>(),
640                },
641            ))
642        }
643    }
644}
645
646impl AsRef<MemoryRef> for MemoryRef {
647    #[inline]
648    fn as_ref(&self) -> &MemoryRef {
649        self
650    }
651}
652
653impl AsMut<MemoryRef> for MemoryRef {
654    #[inline]
655    fn as_mut(&mut self) -> &mut MemoryRef {
656        self
657    }
658}
659
660impl AsRef<Memory> for Memory {
661    #[inline]
662    fn as_ref(&self) -> &Memory {
663        self
664    }
665}
666
667unsafe impl MemoryType for Memory {
668    #[inline]
669    fn check_memory_type(_mem: &MemoryRef) -> bool {
670        skip_assert_initialized!();
671        true
672    }
673}
674
675impl Memory {
676    #[inline]
677    pub fn downcast_memory<M: MemoryType>(self) -> Result<M, Self>
678    where
679        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
680    {
681        if M::check_memory_type(&self) {
682            unsafe { Ok(from_glib_full(self.into_glib_ptr() as *mut M::FfiType)) }
683        } else {
684            Err(self)
685        }
686    }
687}
688
689impl MemoryRef {
690    #[inline]
691    pub fn is_memory_type<M: MemoryType>(&self) -> bool
692    where
693        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
694    {
695        M::check_memory_type(self)
696    }
697
698    #[inline]
699    pub fn downcast_memory_ref<M: MemoryType>(&self) -> Option<&M::RefType>
700    where
701        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
702    {
703        if M::check_memory_type(self) {
704            unsafe { Some(&*(self as *const Self as *const M::RefType)) }
705        } else {
706            None
707        }
708    }
709
710    #[inline]
711    pub fn downcast_memory_mut<M: MemoryType>(&mut self) -> Option<&mut M::RefType>
712    where
713        <M as crate::prelude::IsMiniObject>::RefType: AsRef<MemoryRef> + AsMut<MemoryRef>,
714    {
715        if M::check_memory_type(self) {
716            unsafe { Some(&mut *(self as *mut Self as *mut M::RefType)) }
717        } else {
718            None
719        }
720    }
721}
722
723#[macro_export]
724macro_rules! memory_object_wrapper {
725    ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path) => {
726        $crate::mini_object_wrapper!($name, $ref_name, $ffi_name);
727
728        unsafe impl $crate::memory::MemoryType for $name {
729            #[inline]
730            fn check_memory_type(mem: &$crate::MemoryRef) -> bool {
731                skip_assert_initialized!();
732                $mem_type_check(mem)
733            }
734        }
735
736        impl $name {
737            #[inline]
738            pub fn downcast_memory<M: $crate::memory::MemoryType>(self) -> Result<M, Self>
739            where
740                <M as $crate::miniobject::IsMiniObject>::RefType: AsRef<$crate::MemoryRef>
741                    + AsMut<$crate::MemoryRef>
742                    + AsRef<$ref_name>
743                    + AsMut<$ref_name>,
744            {
745                if M::check_memory_type(&self) {
746                    unsafe {
747                        Ok($crate::glib::translate::from_glib_full(
748                            self.into_glib_ptr() as *mut M::FfiType
749                        ))
750                    }
751                } else {
752                    Err(self)
753                }
754            }
755
756            #[inline]
757            pub fn upcast_memory<M>(self) -> M
758            where
759                M: $crate::memory::MemoryType
760                    + $crate::glib::translate::FromGlibPtrFull<
761                        *const <M as $crate::miniobject::IsMiniObject>::FfiType,
762                    >,
763                <M as $crate::miniobject::IsMiniObject>::RefType:
764                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
765                Self: AsRef<M>,
766            {
767                unsafe {
768                    $crate::glib::translate::from_glib_full(
769                        self.into_glib_ptr() as *const <M as $crate::miniobject::IsMiniObject>::FfiType
770                    )
771                }
772            }
773        }
774
775        impl $ref_name {
776            #[inline]
777            pub fn upcast_memory_ref<M>(&self) -> &M::RefType
778            where
779                M: $crate::memory::MemoryType,
780                <M as $crate::miniobject::IsMiniObject>::RefType:
781                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
782                Self: AsRef<M::RefType> + AsMut<M::RefType>
783            {
784                self.as_ref()
785            }
786
787            #[inline]
788            pub fn upcast_memory_mut<M>(&mut self) -> &mut M::RefType
789            where
790                M: $crate::memory::MemoryType,
791                <M as $crate::miniobject::IsMiniObject>::RefType:
792                    AsRef<$crate::MemoryRef> + AsMut<$crate::MemoryRef>,
793                Self: AsRef<M::RefType> + AsMut<M::RefType>
794            {
795                self.as_mut()
796            }
797        }
798
799        impl std::ops::Deref for $ref_name {
800            type Target = $parent_memory_ref_type;
801
802            #[inline]
803            fn deref(&self) -> &Self::Target {
804                unsafe { &*(self as *const _ as *const Self::Target) }
805            }
806        }
807
808        impl std::ops::DerefMut for $ref_name {
809            #[inline]
810            fn deref_mut(&mut self) -> &mut Self::Target {
811                unsafe { &mut *(self as *mut _ as *mut Self::Target) }
812            }
813        }
814
815        impl AsRef<$parent_memory_type> for $name {
816            #[inline]
817            fn as_ref(&self) -> &$parent_memory_type {
818                unsafe { &*(self as *const _ as *const $parent_memory_type) }
819            }
820        }
821
822        impl AsRef<$parent_memory_ref_type> for $ref_name {
823            #[inline]
824            fn as_ref(&self) -> &$parent_memory_ref_type {
825                self
826            }
827        }
828
829        impl AsMut<$parent_memory_ref_type> for $ref_name {
830            #[inline]
831            fn as_mut(&mut self) -> &mut $parent_memory_ref_type {
832                &mut *self
833            }
834        }
835
836        impl $crate::glib::types::StaticType for $name {
837            #[inline]
838            fn static_type() -> glib::types::Type {
839                $ref_name::static_type()
840            }
841        }
842
843        impl $crate::glib::types::StaticType for $ref_name {
844            #[inline]
845            fn static_type() -> $crate::glib::types::Type {
846                unsafe { $crate::glib::translate::from_glib($crate::ffi::gst_memory_get_type()) }
847            }
848        }
849
850        impl $crate::glib::value::ValueType for $name {
851            type Type = Self;
852        }
853
854        unsafe impl<'a> $crate::glib::value::FromValue<'a> for $name {
855            type Checker = $crate::memory::MemoryTypeValueTypeChecker<Self>;
856
857            unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
858                skip_assert_initialized!();
859                $crate::glib::translate::from_glib_none($crate::glib::gobject_ffi::g_value_get_boxed(
860                    $crate::glib::translate::ToGlibPtr::to_glib_none(value).0,
861                ) as *mut $ffi_name)
862            }
863        }
864
865        unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $name {
866            type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
867
868            unsafe fn from_value(value: &'a $crate::glib::Value) -> Self {
869                skip_assert_initialized!();
870                assert_eq!(
871                    std::mem::size_of::<$name>(),
872                    std::mem::size_of::<$crate::glib::ffi::gpointer>()
873                );
874                let value = &*(value as *const $crate::glib::Value as *const $crate::glib::gobject_ffi::GValue);
875                let ptr = &value.data[0].v_pointer as *const $crate::glib::ffi::gpointer
876                    as *const *const $ffi_name;
877                debug_assert!(!(*ptr).is_null());
878                &*(ptr as *const $name)
879            }
880        }
881
882        impl $crate::glib::value::ToValue for $name {
883            fn to_value(&self) -> $crate::glib::Value {
884                let mut value = $crate::glib::Value::for_value_type::<Self>();
885                unsafe {
886                    $crate::glib::gobject_ffi::g_value_set_boxed(
887                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
888                        $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(self).0
889                            as *mut _,
890                    )
891                }
892                value
893            }
894
895            fn value_type(&self) -> glib::Type {
896                <Self as $crate::glib::prelude::StaticType>::static_type()
897            }
898        }
899
900        impl $crate::glib::value::ToValueOptional for $name {
901            fn to_value_optional(s: Option<&Self>) -> $crate::glib::Value {
902                skip_assert_initialized!();
903                let mut value = $crate::glib::Value::for_value_type::<Self>();
904                unsafe {
905                    $crate::glib::gobject_ffi::g_value_set_boxed(
906                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
907                        $crate::glib::translate::ToGlibPtr::<*const $ffi_name>::to_glib_none(&s).0
908                            as *mut _,
909                    )
910                }
911                value
912            }
913        }
914
915        impl From<$name> for $crate::glib::Value {
916            fn from(v: $name) -> $crate::glib::Value {
917                skip_assert_initialized!();
918                let mut value = $crate::glib::Value::for_value_type::<$name>();
919                unsafe {
920                    $crate::glib::gobject_ffi::g_value_take_boxed(
921                        $crate::glib::translate::ToGlibPtrMut::to_glib_none_mut(&mut value).0,
922                        $crate::glib::translate::IntoGlibPtr::<*mut $ffi_name>::into_glib_ptr(v) as *mut _,
923                    )
924                }
925                value
926            }
927        }
928
929        unsafe impl<'a> $crate::glib::value::FromValue<'a> for &'a $ref_name {
930            type Checker = $crate::memory::MemoryTypeValueTypeChecker<$name>;
931
932            unsafe fn from_value(value: &'a glib::Value) -> Self {
933                skip_assert_initialized!();
934                &*($crate::glib::gobject_ffi::g_value_get_boxed($crate::glib::translate::ToGlibPtr::to_glib_none(value).0)
935                    as *const $ref_name)
936            }
937        }
938
939        // Can't have SetValue/SetValueOptional impls as otherwise one could use it to get
940        // immutable references from a mutable reference without borrowing via the value
941    };
942    ($name:ident, $ref_name:ident, $ffi_name:path, $mem_type_check:expr, $parent_memory_type:path, $parent_memory_ref_type:path, $($parent_parent_memory_type:path, $parent_parent_memory_ref_type:path),*) => {
943        $crate::memory_object_wrapper!($name, $ref_name, $ffi_name, $mem_type_check, $parent_memory_type, $parent_memory_ref_type);
944
945        $(
946            impl AsRef<$parent_parent_memory_type> for $name {
947                #[inline]
948                fn as_ref(&self) -> &$parent_parent_memory_type {
949                    unsafe { &*(self as *const _ as *const $parent_parent_memory_type) }
950                }
951            }
952
953            impl AsRef<$parent_parent_memory_ref_type> for $ref_name {
954                #[inline]
955                fn as_ref(&self) -> &$parent_parent_memory_ref_type {
956                    self
957                }
958            }
959
960            impl AsMut<$parent_parent_memory_ref_type> for $ref_name {
961                #[inline]
962                fn as_mut(&mut self) -> &mut $parent_parent_memory_ref_type {
963                    &mut *self
964                }
965            }
966        )*
967    };
968}
969
970#[cfg(feature = "v1_26")]
971#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
972#[doc(alias = "GstMemory")]
973pub struct MemoryRefTrace(ffi::GstMemory);
974#[cfg(feature = "v1_26")]
975#[cfg_attr(docsrs, doc(cfg(feature = "v1_26")))]
976impl MemoryRefTrace {
977    pub unsafe fn from_ptr<'a>(ptr: *mut ffi::GstMemory) -> &'a MemoryRefTrace {
978        assert!(!ptr.is_null());
979
980        &*(ptr as *const Self)
981    }
982
983    pub fn as_ptr(&self) -> *const ffi::GstMemory {
984        self as *const Self as *const ffi::GstMemory
985    }
986
987    #[doc(alias = "get_allocator")]
988    #[inline]
989    pub fn allocator(&self) -> Option<&Allocator> {
990        unsafe {
991            if self.0.allocator.is_null() {
992                None
993            } else {
994                Some(&*(&self.0.allocator as *const *mut ffi::GstAllocator as *const Allocator))
995            }
996        }
997    }
998
999    #[doc(alias = "get_parent")]
1000    #[inline]
1001    pub fn parent(&self) -> Option<&MemoryRef> {
1002        unsafe {
1003            if self.0.parent.is_null() {
1004                None
1005            } else {
1006                Some(MemoryRef::from_ptr(self.0.parent))
1007            }
1008        }
1009    }
1010
1011    #[doc(alias = "get_maxsize")]
1012    #[inline]
1013    pub fn maxsize(&self) -> usize {
1014        self.0.maxsize
1015    }
1016
1017    #[doc(alias = "get_align")]
1018    #[inline]
1019    pub fn align(&self) -> usize {
1020        self.0.align
1021    }
1022
1023    #[doc(alias = "get_offset")]
1024    #[inline]
1025    pub fn offset(&self) -> usize {
1026        self.0.offset
1027    }
1028
1029    #[doc(alias = "get_size")]
1030    #[inline]
1031    pub fn size(&self) -> usize {
1032        self.0.size
1033    }
1034
1035    #[doc(alias = "get_flags")]
1036    #[inline]
1037    pub fn flags(&self) -> crate::MemoryFlags {
1038        unsafe { from_glib(self.0.mini_object.flags) }
1039    }
1040
1041    #[doc(alias = "gst_memory_is_type")]
1042    pub fn is_type(&self, mem_type: &str) -> bool {
1043        unsafe {
1044            from_glib(ffi::gst_memory_is_type(
1045                self as *const Self as *mut ffi::GstMemory,
1046                mem_type.to_glib_none().0,
1047            ))
1048        }
1049    }
1050}
1051
1052#[cfg(test)]
1053mod tests {
1054    #[test]
1055    fn test_map() {
1056        crate::init().unwrap();
1057
1058        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1059        let map = mem.map_readable().unwrap();
1060        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1061        drop(map);
1062
1063        let mem = mem.into_mapped_memory_readable().unwrap();
1064        assert_eq!(mem.as_slice(), &[1, 2, 3, 4]);
1065
1066        let mem = mem.into_memory();
1067        let map = mem.map_readable().unwrap();
1068        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1069    }
1070
1071    #[test]
1072    fn test_share() {
1073        crate::init().unwrap();
1074
1075        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1076        let sub = mem.share(1..=2); // [2, 3]
1077        let sub_sub1 = sub.share(1..=1); // [3]
1078        let sub_sub2 = sub.share_maxsize(0..4); // [1, 2, 3, 4]
1079
1080        let map = mem.map_readable().unwrap();
1081        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1082        drop(map);
1083
1084        let map = sub.map_readable().unwrap();
1085        assert_eq!(map.as_slice(), &[2, 3]);
1086        drop(map);
1087
1088        let map = sub_sub1.map_readable().unwrap();
1089        assert_eq!(map.as_slice(), &[3]);
1090        drop(map);
1091
1092        let map = sub_sub2.map_readable().unwrap();
1093        assert_eq!(map.as_slice(), &[1, 2, 3, 4]);
1094        drop(map);
1095    }
1096
1097    #[test]
1098    fn test_dump() {
1099        use std::fmt::Write;
1100
1101        crate::init().unwrap();
1102
1103        let mut s = String::new();
1104        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1105        write!(&mut s, "{:?}", mem.dump()).unwrap();
1106        assert_eq!(
1107            s,
1108            "0000:  01 02 03 04                                       ...."
1109        );
1110        s.clear();
1111        write!(&mut s, "{}", mem.dump()).unwrap();
1112        assert_eq!(s, "01 02 03 04");
1113        s.clear();
1114
1115        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1116        write!(&mut s, "{:?}", mem.dump_range(..)).unwrap();
1117        assert_eq!(
1118            s,
1119            "0000:  01 02 03 04                                       ...."
1120        );
1121        s.clear();
1122        write!(&mut s, "{:?}", mem.dump_range(..2)).unwrap();
1123        assert_eq!(
1124            s,
1125            "0000:  01 02                                             .."
1126        );
1127        s.clear();
1128        write!(&mut s, "{:?}", mem.dump_range(2..=3)).unwrap();
1129        assert_eq!(
1130            s,
1131            "0002:  03 04                                             .."
1132        );
1133        s.clear();
1134        write!(&mut s, "{:?}", mem.dump_range(..100)).unwrap();
1135        assert_eq!(s, "<end out of range>",);
1136        s.clear();
1137        write!(&mut s, "{:?}", mem.dump_range(90..100)).unwrap();
1138        assert_eq!(s, "<start out of range>",);
1139        s.clear();
1140
1141        let mem = crate::Memory::from_slice(vec![0; 19]);
1142        write!(&mut s, "{:?}", mem.dump()).unwrap();
1143        assert_eq!(
1144            s,
1145            "0000:  00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................\n\
1146             0010:  00 00 00                                          ..."
1147        );
1148        s.clear();
1149    }
1150
1151    #[test]
1152    fn test_value() {
1153        use glib::prelude::*;
1154
1155        crate::init().unwrap();
1156
1157        let v = None::<&crate::Memory>.to_value();
1158        assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(None)));
1159
1160        let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
1161        let v = mem.to_value();
1162        assert!(matches!(v.get::<Option<crate::Memory>>(), Ok(Some(_))));
1163        assert!(v.get::<crate::Memory>().is_ok());
1164    }
1165}