1use std::{
4 cmp, fmt,
5 marker::PhantomData,
6 mem, ops,
7 ops::{Bound, ControlFlow, Range, RangeBounds},
8 ptr, slice,
9};
10
11use glib::translate::*;
12
13use crate::{
14 ffi, meta::*, BufferCursor, BufferFlags, BufferRefCursor, ClockTime, Memory, MemoryRef,
15};
16
17pub enum Readable {}
18pub enum Writable {}
19
20#[derive(Copy, Clone, Debug, PartialEq, Eq)]
21pub enum BufferMetaForeachAction {
22 Keep,
23 Remove,
24}
25
26mini_object_wrapper!(Buffer, BufferRef, ffi::GstBuffer, || {
27 ffi::gst_buffer_get_type()
28});
29
30pub struct BufferMap<'a, T> {
31 buffer: &'a BufferRef,
32 map_info: ffi::GstMapInfo,
33 phantom: PhantomData<T>,
34}
35
36pub struct MappedBuffer<T> {
37 buffer: Buffer,
38 map_info: ffi::GstMapInfo,
39 phantom: PhantomData<T>,
40}
41
42impl Buffer {
43 #[doc(alias = "gst_buffer_new")]
49 #[inline]
50 pub fn new() -> Self {
51 assert_initialized_main_thread!();
52
53 unsafe { from_glib_full(ffi::gst_buffer_new()) }
54 }
55
56 #[doc(alias = "gst_buffer_new_allocate")]
57 #[doc(alias = "gst_buffer_new_and_alloc")]
58 #[inline]
59 pub fn with_size(size: usize) -> Result<Self, glib::BoolError> {
60 assert_initialized_main_thread!();
61
62 unsafe {
63 Option::<_>::from_glib_full(ffi::gst_buffer_new_allocate(
64 ptr::null_mut(),
65 size,
66 ptr::null_mut(),
67 ))
68 .ok_or_else(|| glib::bool_error!("Failed to allocate buffer"))
69 }
70 }
71
72 #[doc(alias = "gst_buffer_new_wrapped")]
73 #[doc(alias = "gst_buffer_new_wrapped_full")]
74 #[inline]
75 pub fn from_mut_slice<T: AsMut<[u8]> + Send + 'static>(slice: T) -> Self {
76 assert_initialized_main_thread!();
77
78 let mem = Memory::from_mut_slice(slice);
79 let mut buffer = Buffer::new();
80 {
81 let buffer = buffer.get_mut().unwrap();
82 buffer.append_memory(mem);
83 buffer.unset_flags(BufferFlags::TAG_MEMORY);
84 }
85
86 buffer
87 }
88
89 #[doc(alias = "gst_buffer_new_wrapped")]
90 #[doc(alias = "gst_buffer_new_wrapped_full")]
91 #[inline]
92 pub fn from_slice<T: AsRef<[u8]> + Send + 'static>(slice: T) -> Self {
93 assert_initialized_main_thread!();
94
95 let mem = Memory::from_slice(slice);
96 let mut buffer = Buffer::new();
97 {
98 let buffer = buffer.get_mut().unwrap();
99 buffer.append_memory(mem);
100 buffer.unset_flags(BufferFlags::TAG_MEMORY);
101 }
102
103 buffer
104 }
105
106 #[inline]
132 pub fn try_into_inner<T: 'static>(self) -> Result<T, (Self, crate::MemoryIntoInnerError)> {
133 if self.n_memory() != 1 {
134 return Err((self, crate::MemoryIntoInnerError::MultipleMemoryBlocks));
135 }
136
137 if !self.is_writable() {
139 return Err((self, crate::MemoryIntoInnerError::NotWritable));
140 }
141
142 unsafe {
143 let mem_ptr = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), 0);
146
147 assert!(
149 !mem_ptr.is_null(),
150 "peek_memory returned null after validation - this is a bug"
151 );
152
153 match crate::memory_wrapped::try_into_from_memory_ptr(mem_ptr) {
154 Ok(value) => {
155 ffi::gst_buffer_remove_memory(self.as_mut_ptr(), 0);
156
157 Ok(value)
158 }
159 Err(err) => Err((self, err)),
160 }
161 }
162 }
163
164 #[doc(alias = "gst_buffer_map")]
186 #[inline]
187 pub fn into_mapped_buffer_readable(self) -> Result<MappedBuffer<Readable>, Self> {
188 unsafe {
189 let mut map_info = mem::MaybeUninit::uninit();
190 let res: bool = from_glib(ffi::gst_buffer_map(
191 self.as_mut_ptr(),
192 map_info.as_mut_ptr(),
193 ffi::GST_MAP_READ,
194 ));
195 if res {
196 Ok(MappedBuffer {
197 buffer: self,
198 map_info: map_info.assume_init(),
199 phantom: PhantomData,
200 })
201 } else {
202 Err(self)
203 }
204 }
205 }
206
207 #[doc(alias = "gst_buffer_map")]
208 #[inline]
209 pub fn into_mapped_buffer_writable(self) -> Result<MappedBuffer<Writable>, Self> {
210 unsafe {
211 let mut map_info = mem::MaybeUninit::uninit();
212 let res: bool = from_glib(ffi::gst_buffer_map(
213 self.as_mut_ptr(),
214 map_info.as_mut_ptr(),
215 ffi::GST_MAP_READWRITE,
216 ));
217 if res {
218 Ok(MappedBuffer {
219 buffer: self,
220 map_info: map_info.assume_init(),
221 phantom: PhantomData,
222 })
223 } else {
224 Err(self)
225 }
226 }
227 }
228
229 #[inline]
230 pub fn into_cursor_readable(self) -> BufferCursor<Readable> {
231 BufferCursor::new_readable(self)
232 }
233
234 #[inline]
235 pub fn into_cursor_writable(self) -> Result<BufferCursor<Writable>, glib::BoolError> {
236 BufferCursor::new_writable(self)
237 }
238
239 #[doc(alias = "gst_buffer_append")]
249 pub fn append(&mut self, other: Self) {
250 unsafe {
251 let ptr = ffi::gst_buffer_append(self.as_mut_ptr(), other.into_glib_ptr());
252 self.replace_ptr(ptr);
253 }
254 }
255}
256
257impl Default for Buffer {
258 fn default() -> Self {
259 Self::new()
260 }
261}
262
263impl BufferRef {
264 #[doc(alias = "gst_buffer_map")]
265 #[inline]
266 pub fn map_readable(&self) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
267 unsafe {
268 let mut map_info = mem::MaybeUninit::uninit();
269 let res =
270 ffi::gst_buffer_map(self.as_mut_ptr(), map_info.as_mut_ptr(), ffi::GST_MAP_READ);
271 if res == glib::ffi::GTRUE {
272 Ok(BufferMap {
273 buffer: self,
274 map_info: map_info.assume_init(),
275 phantom: PhantomData,
276 })
277 } else {
278 Err(glib::bool_error!("Failed to map buffer readable"))
279 }
280 }
281 }
282
283 #[doc(alias = "gst_buffer_map")]
284 #[inline]
285 pub fn map_writable(&mut self) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
286 unsafe {
287 let mut map_info = mem::MaybeUninit::uninit();
288 let res = ffi::gst_buffer_map(
289 self.as_mut_ptr(),
290 map_info.as_mut_ptr(),
291 ffi::GST_MAP_READWRITE,
292 );
293 if res == glib::ffi::GTRUE {
294 Ok(BufferMap {
295 buffer: self,
296 map_info: map_info.assume_init(),
297 phantom: PhantomData,
298 })
299 } else {
300 Err(glib::bool_error!("Failed to map buffer writable"))
301 }
302 }
303 }
304
305 fn memory_range_into_idx_len(
306 &self,
307 range: impl RangeBounds<usize>,
308 ) -> Result<(u32, i32), glib::BoolError> {
309 let n_memory = self.n_memory();
310 debug_assert!(n_memory <= u32::MAX as usize);
311
312 let start_idx = match range.start_bound() {
313 ops::Bound::Included(idx) if *idx >= n_memory => {
314 return Err(glib::bool_error!("Invalid range start"));
315 }
316 ops::Bound::Included(idx) => *idx,
317 ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= n_memory) => {
318 return Err(glib::bool_error!("Invalid range start"));
319 }
320 ops::Bound::Excluded(idx) => *idx + 1,
321 ops::Bound::Unbounded => 0,
322 };
323
324 let end_idx = match range.end_bound() {
325 ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > n_memory) => {
326 return Err(glib::bool_error!("Invalid range end"));
327 }
328 ops::Bound::Included(idx) => *idx + 1,
329 ops::Bound::Excluded(idx) if *idx > n_memory => {
330 return Err(glib::bool_error!("Invalid range end"));
331 }
332 ops::Bound::Excluded(idx) => *idx,
333 ops::Bound::Unbounded => n_memory,
334 };
335
336 Ok((
337 start_idx as u32,
338 i32::try_from(end_idx - start_idx).map_err(|_| glib::bool_error!("Too large range"))?,
339 ))
340 }
341
342 #[doc(alias = "gst_buffer_map_range")]
343 #[inline]
344 pub fn map_range_readable(
345 &self,
346 range: impl RangeBounds<usize>,
347 ) -> Result<BufferMap<'_, Readable>, glib::BoolError> {
348 let (idx, len) = self.memory_range_into_idx_len(range)?;
349 unsafe {
350 let mut map_info = mem::MaybeUninit::uninit();
351 let res = ffi::gst_buffer_map_range(
352 self.as_mut_ptr(),
353 idx,
354 len,
355 map_info.as_mut_ptr(),
356 ffi::GST_MAP_READ,
357 );
358 if res == glib::ffi::GTRUE {
359 Ok(BufferMap {
360 buffer: self,
361 map_info: map_info.assume_init(),
362 phantom: PhantomData,
363 })
364 } else {
365 Err(glib::bool_error!("Failed to map buffer readable"))
366 }
367 }
368 }
369
370 #[doc(alias = "gst_buffer_map_range")]
371 #[inline]
372 pub fn map_range_writable(
373 &mut self,
374 range: impl RangeBounds<usize>,
375 ) -> Result<BufferMap<'_, Writable>, glib::BoolError> {
376 let (idx, len) = self.memory_range_into_idx_len(range)?;
377 unsafe {
378 let mut map_info = mem::MaybeUninit::uninit();
379 let res = ffi::gst_buffer_map_range(
380 self.as_mut_ptr(),
381 idx,
382 len,
383 map_info.as_mut_ptr(),
384 ffi::GST_MAP_READWRITE,
385 );
386 if res == glib::ffi::GTRUE {
387 Ok(BufferMap {
388 buffer: self,
389 map_info: map_info.assume_init(),
390 phantom: PhantomData,
391 })
392 } else {
393 Err(glib::bool_error!("Failed to map buffer writable"))
394 }
395 }
396 }
397
398 pub(crate) fn byte_range_into_offset_len(
399 &self,
400 range: impl RangeBounds<usize>,
401 ) -> Result<(usize, usize), glib::BoolError> {
402 let size = self.size();
403
404 let start_idx = match range.start_bound() {
405 ops::Bound::Included(idx) if *idx >= size => {
406 return Err(glib::bool_error!("Invalid range start"));
407 }
408 ops::Bound::Included(idx) => *idx,
409 ops::Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= size) => {
410 return Err(glib::bool_error!("Invalid range start"));
411 }
412 ops::Bound::Excluded(idx) => *idx + 1,
413 ops::Bound::Unbounded => 0,
414 };
415
416 let end_idx = match range.end_bound() {
417 ops::Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > size) => {
418 return Err(glib::bool_error!("Invalid range end"));
419 }
420 ops::Bound::Included(idx) => *idx + 1,
421 ops::Bound::Excluded(idx) if *idx > size => {
422 return Err(glib::bool_error!("Invalid range end"));
423 }
424 ops::Bound::Excluded(idx) => *idx,
425 ops::Bound::Unbounded => size,
426 };
427
428 Ok((start_idx, end_idx - start_idx))
429 }
430
431 #[doc(alias = "gst_buffer_copy_region")]
432 pub fn copy_region(
433 &self,
434 flags: crate::BufferCopyFlags,
435 range: impl RangeBounds<usize>,
436 ) -> Result<Buffer, glib::BoolError> {
437 let (offset, size) = self.byte_range_into_offset_len(range)?;
438
439 unsafe {
440 Option::<_>::from_glib_full(ffi::gst_buffer_copy_region(
441 self.as_mut_ptr(),
442 flags.into_glib(),
443 offset,
444 size,
445 ))
446 .ok_or_else(|| glib::bool_error!("Failed to copy region of buffer"))
447 }
448 }
449
450 #[doc(alias = "gst_buffer_copy_into")]
451 pub fn copy_into(
452 &self,
453 dest: &mut BufferRef,
454 flags: crate::BufferCopyFlags,
455 range: impl RangeBounds<usize>,
456 ) -> Result<(), glib::BoolError> {
457 let (offset, size) = self.byte_range_into_offset_len(range)?;
458
459 unsafe {
460 glib::result_from_gboolean!(
461 ffi::gst_buffer_copy_into(
462 dest.as_mut_ptr(),
463 self.as_mut_ptr(),
464 flags.into_glib(),
465 offset,
466 size,
467 ),
468 "Failed to copy into destination buffer",
469 )
470 }
471 }
472
473 #[doc(alias = "gst_buffer_fill")]
474 pub fn copy_from_slice(&mut self, offset: usize, slice: &[u8]) -> Result<(), usize> {
475 let maxsize = self.maxsize();
476 let size = slice.len();
477
478 assert!(maxsize >= offset && maxsize - offset >= size);
479
480 let copied = unsafe {
481 let src = slice.as_ptr();
482 ffi::gst_buffer_fill(
483 self.as_mut_ptr(),
484 offset,
485 src as glib::ffi::gconstpointer,
486 size,
487 )
488 };
489
490 if copied == size {
491 Ok(())
492 } else {
493 Err(copied)
494 }
495 }
496
497 #[doc(alias = "gst_buffer_extract")]
498 pub fn copy_to_slice(&self, offset: usize, slice: &mut [u8]) -> Result<(), usize> {
499 let maxsize = self.size();
500 let size = slice.len();
501
502 assert!(maxsize >= offset && maxsize - offset >= size);
503
504 let copied = unsafe {
505 let dest = slice.as_mut_ptr();
506 ffi::gst_buffer_extract(self.as_mut_ptr(), offset, dest as glib::ffi::gpointer, size)
507 };
508
509 if copied == size {
510 Ok(())
511 } else {
512 Err(copied)
513 }
514 }
515
516 #[doc(alias = "gst_buffer_copy_deep")]
517 pub fn copy_deep(&self) -> Result<Buffer, glib::BoolError> {
518 unsafe {
519 Option::<_>::from_glib_full(ffi::gst_buffer_copy_deep(self.as_ptr()))
520 .ok_or_else(|| glib::bool_error!("Failed to deep copy buffer"))
521 }
522 }
523
524 #[doc(alias = "get_size")]
525 #[doc(alias = "gst_buffer_get_size")]
526 pub fn size(&self) -> usize {
527 unsafe { ffi::gst_buffer_get_size(self.as_mut_ptr()) }
528 }
529
530 #[doc(alias = "get_maxsize")]
531 pub fn maxsize(&self) -> usize {
532 unsafe {
533 let mut maxsize = mem::MaybeUninit::uninit();
534 ffi::gst_buffer_get_sizes_range(
535 self.as_mut_ptr(),
536 0,
537 -1,
538 ptr::null_mut(),
539 maxsize.as_mut_ptr(),
540 );
541
542 maxsize.assume_init()
543 }
544 }
545
546 #[doc(alias = "gst_buffer_set_size")]
547 pub fn set_size(&mut self, size: usize) {
548 assert!(self.maxsize() >= size);
549
550 unsafe {
551 ffi::gst_buffer_set_size(self.as_mut_ptr(), size as isize);
552 }
553 }
554
555 #[doc(alias = "get_offset")]
556 #[doc(alias = "GST_BUFFER_OFFSET")]
557 #[inline]
558 pub fn offset(&self) -> u64 {
559 self.0.offset
560 }
561
562 #[inline]
563 pub fn set_offset(&mut self, offset: u64) {
564 self.0.offset = offset;
565 }
566
567 #[doc(alias = "get_offset_end")]
568 #[doc(alias = "GST_BUFFER_OFFSET_END")]
569 #[inline]
570 pub fn offset_end(&self) -> u64 {
571 self.0.offset_end
572 }
573
574 #[inline]
575 pub fn set_offset_end(&mut self, offset_end: u64) {
576 self.0.offset_end = offset_end;
577 }
578
579 #[doc(alias = "get_pts")]
580 #[doc(alias = "GST_BUFFER_PTS")]
581 #[inline]
582 pub fn pts(&self) -> Option<ClockTime> {
583 unsafe { from_glib(self.0.pts) }
584 }
585
586 #[inline]
587 pub fn set_pts(&mut self, pts: impl Into<Option<ClockTime>>) {
588 self.0.pts = pts.into().into_glib();
589 }
590
591 #[doc(alias = "get_dts")]
592 #[doc(alias = "GST_BUFFER_DTS")]
593 #[inline]
594 pub fn dts(&self) -> Option<ClockTime> {
595 unsafe { from_glib(self.0.dts) }
596 }
597
598 #[inline]
599 pub fn set_dts(&mut self, dts: impl Into<Option<ClockTime>>) {
600 self.0.dts = dts.into().into_glib();
601 }
602
603 #[doc(alias = "get_dts_or_pts")]
604 #[doc(alias = "GST_BUFFER_DTS_OR_PTS")]
605 #[inline]
606 pub fn dts_or_pts(&self) -> Option<ClockTime> {
607 let val = self.dts();
608 if val.is_none() {
609 self.pts()
610 } else {
611 val
612 }
613 }
614
615 #[doc(alias = "get_duration")]
616 #[doc(alias = "GST_BUFFER_DURATION")]
617 #[inline]
618 pub fn duration(&self) -> Option<ClockTime> {
619 unsafe { from_glib(self.0.duration) }
620 }
621
622 #[inline]
623 pub fn set_duration(&mut self, duration: impl Into<Option<ClockTime>>) {
624 self.0.duration = duration.into().into_glib();
625 }
626
627 #[doc(alias = "get_flags")]
628 #[doc(alias = "GST_BUFFER_FLAGS")]
629 #[inline]
630 pub fn flags(&self) -> BufferFlags {
631 BufferFlags::from_bits_truncate(self.0.mini_object.flags)
632 }
633
634 #[doc(alias = "GST_BUFFER_FLAG_SET")]
635 #[inline]
636 pub fn set_flags(&mut self, flags: BufferFlags) {
637 self.0.mini_object.flags |= flags.bits();
638 }
639
640 #[doc(alias = "GST_BUFFER_FLAG_UNSET")]
641 #[inline]
642 pub fn unset_flags(&mut self, flags: BufferFlags) {
643 self.0.mini_object.flags &= !flags.bits();
644 }
645
646 #[doc(alias = "get_meta")]
647 #[doc(alias = "gst_buffer_get_meta")]
648 #[inline]
649 pub fn meta<T: MetaAPI>(&self) -> Option<MetaRef<'_, T>> {
650 unsafe {
651 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
652 if meta.is_null() {
653 None
654 } else {
655 Some(T::from_ptr(self, meta as *const <T as MetaAPI>::GstType))
656 }
657 }
658 }
659
660 #[doc(alias = "get_meta_mut")]
661 #[inline]
662 pub fn meta_mut<T: MetaAPI>(&mut self) -> Option<MetaRefMut<'_, T, crate::meta::Standalone>> {
663 unsafe {
664 let meta = ffi::gst_buffer_get_meta(self.as_mut_ptr(), T::meta_api().into_glib());
665 if meta.is_null() {
666 None
667 } else {
668 Some(T::from_mut_ptr(self, meta as *mut <T as MetaAPI>::GstType))
669 }
670 }
671 }
672
673 pub fn iter_meta<T: MetaAPI>(&self) -> MetaIter<'_, T> {
674 MetaIter::new(self)
675 }
676
677 pub fn iter_meta_mut<T: MetaAPI>(&mut self) -> MetaIterMut<'_, T> {
678 MetaIterMut::new(self)
679 }
680
681 #[doc(alias = "gst_buffer_foreach_meta")]
682 pub fn foreach_meta<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(&self, func: F) -> bool {
683 unsafe extern "C" fn trampoline<F: FnMut(MetaRef<Meta>) -> ControlFlow<(), ()>>(
684 buffer: *mut ffi::GstBuffer,
685 meta: *mut *mut ffi::GstMeta,
686 user_data: glib::ffi::gpointer,
687 ) -> glib::ffi::gboolean {
688 let func = user_data as *mut F;
689 let res = (*func)(Meta::from_ptr(BufferRef::from_ptr(buffer), *meta));
690
691 matches!(res, ControlFlow::Continue(_)).into_glib()
692 }
693
694 unsafe {
695 let mut func = func;
696 let func_ptr: &mut F = &mut func;
697
698 from_glib(ffi::gst_buffer_foreach_meta(
699 mut_override(self.as_ptr()),
700 Some(trampoline::<F>),
701 func_ptr as *mut _ as *mut _,
702 ))
703 }
704 }
705
706 #[doc(alias = "gst_buffer_foreach_meta")]
707 pub fn foreach_meta_mut<
708 F: FnMut(
709 MetaRefMut<Meta, crate::meta::Iterated>,
710 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
711 >(
712 &mut self,
713 func: F,
714 ) -> bool {
715 unsafe extern "C" fn trampoline<
716 F: FnMut(
717 MetaRefMut<Meta, crate::meta::Iterated>,
718 ) -> ControlFlow<BufferMetaForeachAction, BufferMetaForeachAction>,
719 >(
720 buffer: *mut ffi::GstBuffer,
721 meta: *mut *mut ffi::GstMeta,
722 user_data: glib::ffi::gpointer,
723 ) -> glib::ffi::gboolean {
724 let func = user_data as *mut F;
725 let res = (*func)(Meta::from_mut_ptr(BufferRef::from_mut_ptr(buffer), *meta));
726
727 let (cont, action) = match res {
728 ControlFlow::Continue(action) => (true, action),
729 ControlFlow::Break(action) => (false, action),
730 };
731
732 if action == BufferMetaForeachAction::Remove {
733 *meta = ptr::null_mut();
734 }
735
736 cont.into_glib()
737 }
738
739 unsafe {
740 let mut func = func;
741 let func_ptr: &mut F = &mut func;
742
743 from_glib(ffi::gst_buffer_foreach_meta(
744 mut_override(self.as_ptr()),
745 Some(trampoline::<F>),
746 func_ptr as *mut _ as *mut _,
747 ))
748 }
749 }
750
751 #[doc(alias = "gst_buffer_append_memory")]
752 pub fn append_memory(&mut self, mem: Memory) {
753 unsafe { ffi::gst_buffer_append_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
754 }
755
756 #[doc(alias = "gst_buffer_find_memory")]
757 pub fn find_memory(&self, range: impl RangeBounds<usize>) -> Option<(Range<usize>, usize)> {
758 let (offset, size) = self.byte_range_into_offset_len(range).ok()?;
759
760 unsafe {
761 let mut idx = mem::MaybeUninit::uninit();
762 let mut length = mem::MaybeUninit::uninit();
763 let mut skip = mem::MaybeUninit::uninit();
764
765 let res = from_glib(ffi::gst_buffer_find_memory(
766 self.as_mut_ptr(),
767 offset,
768 size,
769 idx.as_mut_ptr(),
770 length.as_mut_ptr(),
771 skip.as_mut_ptr(),
772 ));
773
774 if res {
775 let idx = idx.assume_init() as usize;
776 let length = length.assume_init() as usize;
777 let skip = skip.assume_init();
778 Some((idx..(idx + length), skip))
779 } else {
780 None
781 }
782 }
783 }
784
785 #[doc(alias = "get_all_memory")]
786 #[doc(alias = "gst_buffer_get_all_memory")]
787 pub fn all_memory(&self) -> Option<Memory> {
788 unsafe { from_glib_full(ffi::gst_buffer_get_all_memory(self.as_mut_ptr())) }
789 }
790
791 #[doc(alias = "get_max_memory")]
792 #[doc(alias = "gst_buffer_get_max_memory")]
793 pub fn max_memory() -> usize {
794 unsafe { ffi::gst_buffer_get_max_memory() as usize }
795 }
796
797 #[doc(alias = "get_memory")]
798 #[doc(alias = "gst_buffer_get_memory")]
799 pub fn memory(&self, idx: usize) -> Option<Memory> {
800 if idx >= self.n_memory() {
801 return None;
802 }
803 unsafe {
804 let res = ffi::gst_buffer_get_memory(self.as_mut_ptr(), idx as u32);
805 Some(from_glib_full(res))
806 }
807 }
808
809 #[doc(alias = "get_memory_range")]
810 #[doc(alias = "gst_buffer_get_memory_range")]
811 pub fn memory_range(&self, range: impl RangeBounds<usize>) -> Option<Memory> {
812 let (idx, len) = self.memory_range_into_idx_len(range).ok()?;
813
814 unsafe {
815 let res = ffi::gst_buffer_get_memory_range(self.as_mut_ptr(), idx, len);
816 from_glib_full(res)
817 }
818 }
819
820 #[doc(alias = "gst_buffer_insert_memory")]
821 pub fn insert_memory(&mut self, idx: impl Into<Option<usize>>, mem: Memory) {
822 let n_memory = self.n_memory();
823 let idx = idx.into();
824 let idx = idx.unwrap_or(n_memory);
825 assert!(idx <= self.n_memory());
826 unsafe { ffi::gst_buffer_insert_memory(self.as_mut_ptr(), idx as i32, mem.into_glib_ptr()) }
827 }
828
829 #[doc(alias = "gst_buffer_is_all_memory_writable")]
830 pub fn is_all_memory_writable(&self) -> bool {
831 unsafe { from_glib(ffi::gst_buffer_is_all_memory_writable(self.as_mut_ptr())) }
832 }
833
834 #[doc(alias = "gst_buffer_is_memory_range_writable")]
835 pub fn is_memory_range_writable(&self, range: impl RangeBounds<usize>) -> bool {
836 let Some((idx, len)) = self.memory_range_into_idx_len(range).ok() else {
837 return false;
838 };
839
840 unsafe {
841 from_glib(ffi::gst_buffer_is_memory_range_writable(
842 self.as_mut_ptr(),
843 idx,
844 len,
845 ))
846 }
847 }
848
849 #[doc(alias = "gst_buffer_n_memory")]
850 pub fn n_memory(&self) -> usize {
851 unsafe { ffi::gst_buffer_n_memory(self.as_ptr() as *mut _) as usize }
852 }
853
854 #[doc(alias = "gst_buffer_peek_memory")]
855 pub fn peek_memory(&self, idx: usize) -> &MemoryRef {
856 assert!(idx < self.n_memory());
857 unsafe { MemoryRef::from_ptr(ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32)) }
858 }
859
860 #[doc(alias = "gst_buffer_peek_memory")]
861 pub fn peek_memory_mut(&mut self, idx: usize) -> Result<&mut MemoryRef, glib::BoolError> {
862 assert!(idx < self.n_memory());
863 unsafe {
864 let mem = ffi::gst_buffer_peek_memory(self.as_mut_ptr(), idx as u32);
865 if ffi::gst_mini_object_is_writable(mem as *mut _) == glib::ffi::GFALSE {
866 Err(glib::bool_error!("Memory not writable"))
867 } else {
868 Ok(MemoryRef::from_mut_ptr(mem))
869 }
870 }
871 }
872
873 #[doc(alias = "gst_buffer_prepend_memory")]
874 pub fn prepend_memory(&mut self, mem: Memory) {
875 unsafe { ffi::gst_buffer_prepend_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
876 }
877
878 #[doc(alias = "gst_buffer_remove_all_memory")]
879 pub fn remove_all_memory(&mut self) {
880 unsafe { ffi::gst_buffer_remove_all_memory(self.as_mut_ptr()) }
881 }
882
883 #[doc(alias = "gst_buffer_remove_memory")]
884 pub fn remove_memory(&mut self, idx: usize) {
885 assert!(idx < self.n_memory());
886 unsafe { ffi::gst_buffer_remove_memory(self.as_mut_ptr(), idx as u32) }
887 }
888
889 #[doc(alias = "gst_buffer_remove_memory_range")]
890 pub fn remove_memory_range(&mut self, range: impl RangeBounds<usize>) {
891 let (idx, len) = self
892 .memory_range_into_idx_len(range)
893 .expect("Invalid memory range");
894
895 unsafe { ffi::gst_buffer_remove_memory_range(self.as_mut_ptr(), idx, len) }
896 }
897
898 #[doc(alias = "gst_buffer_replace_all_memory")]
899 pub fn replace_all_memory(&mut self, mem: Memory) {
900 unsafe { ffi::gst_buffer_replace_all_memory(self.as_mut_ptr(), mem.into_glib_ptr()) }
901 }
902
903 #[doc(alias = "gst_buffer_replace_memory")]
904 pub fn replace_memory(&mut self, idx: usize, mem: Memory) {
905 assert!(idx < self.n_memory());
906 unsafe {
907 ffi::gst_buffer_replace_memory(self.as_mut_ptr(), idx as u32, mem.into_glib_ptr())
908 }
909 }
910
911 #[doc(alias = "gst_buffer_replace_memory_range")]
912 pub fn replace_memory_range(&mut self, range: impl RangeBounds<usize>, mem: Memory) {
913 let (idx, len) = self
914 .memory_range_into_idx_len(range)
915 .expect("Invalid memory range");
916
917 unsafe {
918 ffi::gst_buffer_replace_memory_range(self.as_mut_ptr(), idx, len, mem.into_glib_ptr())
919 }
920 }
921
922 pub fn iter_memories(&self) -> Iter<'_> {
923 Iter::new(self)
924 }
925
926 pub fn iter_memories_mut(&mut self) -> Result<IterMut<'_>, glib::BoolError> {
927 if !self.is_all_memory_writable() {
928 Err(glib::bool_error!("Not all memory are writable"))
929 } else {
930 Ok(IterMut::new(self))
931 }
932 }
933
934 pub fn iter_memories_owned(&self) -> IterOwned<'_> {
935 IterOwned::new(self)
936 }
937
938 pub fn as_cursor_readable(&self) -> BufferRefCursor<&BufferRef> {
939 BufferRefCursor::new_readable(self)
940 }
941
942 pub fn as_cursor_writable(
943 &mut self,
944 ) -> Result<BufferRefCursor<&mut BufferRef>, glib::BoolError> {
945 BufferRefCursor::new_writable(self)
946 }
947
948 #[doc(alias = "gst_util_dump_buffer")]
949 pub fn dump(&self) -> Dump<'_> {
950 Dump {
951 buffer: self,
952 start: Bound::Unbounded,
953 end: Bound::Unbounded,
954 }
955 }
956
957 #[doc(alias = "gst_util_dump_buffer")]
958 pub fn dump_range(&self, range: impl RangeBounds<usize>) -> Dump<'_> {
959 Dump {
960 buffer: self,
961 start: range.start_bound().cloned(),
962 end: range.end_bound().cloned(),
963 }
964 }
965}
966
967macro_rules! define_meta_iter(
968 ($name:ident, $typ:ty, $mtyp:ty, $prepare_buffer:expr, $from_ptr:expr) => {
969 #[must_use = "iterators are lazy and do nothing unless consumed"]
970 pub struct $name<'a, T: MetaAPI + 'a> {
971 buffer: $typ,
972 state: glib::ffi::gpointer,
973 meta_api: glib::Type,
974 items: PhantomData<$mtyp>,
975 }
976
977 unsafe impl<'a, T: MetaAPI> Send for $name<'a, T> { }
978 unsafe impl<'a, T: MetaAPI> Sync for $name<'a, T> { }
979
980 impl<'a, T: MetaAPI> fmt::Debug for $name<'a, T> {
981 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
982 f.debug_struct(stringify!($name))
983 .field("buffer", &self.buffer)
984 .field("state", &self.state)
985 .field("meta_api", &self.meta_api)
986 .field("items", &self.items)
987 .finish()
988 }
989 }
990
991 impl<'a, T: MetaAPI> $name<'a, T> {
992 fn new(buffer: $typ) -> $name<'a, T> {
993 skip_assert_initialized!();
994
995 $name {
996 buffer,
997 state: ptr::null_mut(),
998 meta_api: T::meta_api(),
999 items: PhantomData,
1000 }
1001 }
1002 }
1003
1004 #[allow(clippy::redundant_closure_call)]
1005 impl<'a, T: MetaAPI> Iterator for $name<'a, T> {
1006 type Item = $mtyp;
1007
1008 fn next(&mut self) -> Option<Self::Item> {
1009 loop {
1010 unsafe {
1011 let meta = ffi::gst_buffer_iterate_meta(self.buffer.as_mut_ptr(), &mut self.state);
1012
1013 if meta.is_null() {
1014 return None;
1015 } else if self.meta_api == glib::Type::INVALID || glib::Type::from_glib((*(*meta).info).api) == self.meta_api {
1016 let buffer = $prepare_buffer(self.buffer.as_mut_ptr());
1018 let item = $from_ptr(buffer, meta);
1019 return Some(item);
1020 }
1021 }
1022 }
1023 }
1024 }
1025
1026 impl<'a, T: MetaAPI> std::iter::FusedIterator for $name<'a, T> { }
1027 }
1028);
1029
1030define_meta_iter!(
1031 MetaIter,
1032 &'a BufferRef,
1033 MetaRef<'a, T>,
1034 |buffer: *const ffi::GstBuffer| BufferRef::from_ptr(buffer),
1035 |buffer, meta| T::from_ptr(buffer, meta as *const <T as MetaAPI>::GstType)
1036);
1037define_meta_iter!(
1038 MetaIterMut,
1039 &'a mut BufferRef,
1040 MetaRefMut<'a, T, crate::meta::Iterated>,
1041 |buffer: *mut ffi::GstBuffer| BufferRef::from_mut_ptr(buffer),
1042 |buffer: &'a mut BufferRef, meta| T::from_mut_ptr(buffer, meta as *mut <T as MetaAPI>::GstType)
1043);
1044
1045macro_rules! define_iter(
1046 ($name:ident, $typ:ty, $mtyp:ty, $get_item:expr) => {
1047 crate::utils::define_fixed_size_iter!(
1048 $name, $typ, $mtyp,
1049 |buffer: &BufferRef| buffer.n_memory() as usize,
1050 $get_item
1051 );
1052 }
1053);
1054
1055define_iter!(
1056 Iter,
1057 &'a BufferRef,
1058 &'a MemoryRef,
1059 |buffer: &BufferRef, idx| unsafe {
1060 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1061 MemoryRef::from_ptr(ptr as *const ffi::GstMemory)
1062 }
1063);
1064
1065define_iter!(
1066 IterMut,
1067 &'a mut BufferRef,
1068 &'a mut MemoryRef,
1069 |buffer: &mut BufferRef, idx| unsafe {
1070 let ptr = ffi::gst_buffer_peek_memory(buffer.as_mut_ptr(), idx as u32);
1071 MemoryRef::from_mut_ptr(ptr)
1072 }
1073);
1074
1075impl<'a> IntoIterator for &'a BufferRef {
1076 type IntoIter = Iter<'a>;
1077 type Item = &'a MemoryRef;
1078
1079 fn into_iter(self) -> Self::IntoIter {
1080 self.iter_memories()
1081 }
1082}
1083
1084impl From<Memory> for Buffer {
1085 fn from(value: Memory) -> Self {
1086 skip_assert_initialized!();
1087
1088 let mut buffer = Buffer::new();
1089 {
1090 let buffer = buffer.get_mut().unwrap();
1091 buffer.append_memory(value);
1092 }
1093 buffer
1094 }
1095}
1096
1097impl<const N: usize> From<[Memory; N]> for Buffer {
1098 fn from(value: [Memory; N]) -> Self {
1099 skip_assert_initialized!();
1100
1101 let mut buffer = Buffer::new();
1102 {
1103 let buffer = buffer.get_mut().unwrap();
1104 value.into_iter().for_each(|b| buffer.append_memory(b));
1105 }
1106 buffer
1107 }
1108}
1109
1110impl std::iter::FromIterator<Memory> for Buffer {
1111 fn from_iter<T: IntoIterator<Item = Memory>>(iter: T) -> Self {
1112 skip_assert_initialized!();
1113 let iter = iter.into_iter();
1114
1115 let mut buffer = Buffer::new();
1116
1117 {
1118 let buffer = buffer.get_mut().unwrap();
1119 iter.for_each(|m| buffer.append_memory(m));
1120 }
1121
1122 buffer
1123 }
1124}
1125
1126impl std::iter::Extend<Memory> for BufferRef {
1127 fn extend<T: IntoIterator<Item = Memory>>(&mut self, iter: T) {
1128 iter.into_iter().for_each(|m| self.append_memory(m));
1129 }
1130}
1131
1132define_iter!(
1133 IterOwned,
1134 &'a BufferRef,
1135 Memory,
1136 |buffer: &BufferRef, idx| unsafe {
1137 let ptr = ffi::gst_buffer_get_memory(buffer.as_mut_ptr(), idx as u32);
1138 from_glib_full(ptr)
1139 }
1140);
1141
1142impl fmt::Debug for Buffer {
1143 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1144 BufferRef::fmt(self, f)
1145 }
1146}
1147
1148impl PartialEq for Buffer {
1149 fn eq(&self, other: &Buffer) -> bool {
1150 BufferRef::eq(self, other)
1151 }
1152}
1153
1154impl Eq for Buffer {}
1155
1156impl PartialEq<BufferRef> for Buffer {
1157 fn eq(&self, other: &BufferRef) -> bool {
1158 BufferRef::eq(self, other)
1159 }
1160}
1161impl PartialEq<Buffer> for BufferRef {
1162 fn eq(&self, other: &Buffer) -> bool {
1163 BufferRef::eq(other, self)
1164 }
1165}
1166
1167impl fmt::Debug for BufferRef {
1168 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1169 use std::cell::RefCell;
1170
1171 use crate::utils::Displayable;
1172
1173 struct DebugIter<I>(RefCell<I>);
1174 impl<I: Iterator> fmt::Debug for DebugIter<I>
1175 where
1176 I::Item: fmt::Debug,
1177 {
1178 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1179 f.debug_list().entries(&mut *self.0.borrow_mut()).finish()
1180 }
1181 }
1182
1183 f.debug_struct("Buffer")
1184 .field("ptr", &self.as_ptr())
1185 .field("pts", &self.pts().display())
1186 .field("dts", &self.dts().display())
1187 .field("duration", &self.duration().display())
1188 .field("size", &self.size())
1189 .field("offset", &self.offset())
1190 .field("offset_end", &self.offset_end())
1191 .field("flags", &self.flags())
1192 .field(
1193 "metas",
1194 &DebugIter(RefCell::new(
1195 self.iter_meta::<crate::Meta>().map(|m| m.api()),
1196 )),
1197 )
1198 .finish()
1199 }
1200}
1201
1202impl PartialEq for BufferRef {
1203 fn eq(&self, other: &BufferRef) -> bool {
1204 if self.size() != other.size() {
1205 return false;
1206 }
1207
1208 let self_map = self.map_readable();
1209 let other_map = other.map_readable();
1210
1211 match (self_map, other_map) {
1212 (Ok(self_map), Ok(other_map)) => self_map.as_slice().eq(other_map.as_slice()),
1213 _ => false,
1214 }
1215 }
1216}
1217
1218impl Eq for BufferRef {}
1219
1220impl<T> BufferMap<'_, T> {
1221 #[doc(alias = "get_size")]
1222 #[inline]
1223 pub fn size(&self) -> usize {
1224 self.map_info.size
1225 }
1226
1227 #[doc(alias = "get_buffer")]
1228 #[inline]
1229 pub fn buffer(&self) -> &BufferRef {
1230 self.buffer
1231 }
1232
1233 #[inline]
1234 pub fn as_slice(&self) -> &[u8] {
1235 if self.map_info.size == 0 {
1236 return &[];
1237 }
1238 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1239 }
1240}
1241
1242impl BufferMap<'_, Writable> {
1243 #[inline]
1244 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1245 if self.map_info.size == 0 {
1246 return &mut [];
1247 }
1248 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1249 }
1250}
1251
1252impl<T> AsRef<[u8]> for BufferMap<'_, T> {
1253 #[inline]
1254 fn as_ref(&self) -> &[u8] {
1255 self.as_slice()
1256 }
1257}
1258
1259impl AsMut<[u8]> for BufferMap<'_, Writable> {
1260 #[inline]
1261 fn as_mut(&mut self) -> &mut [u8] {
1262 self.as_mut_slice()
1263 }
1264}
1265
1266impl<T> ops::Deref for BufferMap<'_, T> {
1267 type Target = [u8];
1268
1269 #[inline]
1270 fn deref(&self) -> &[u8] {
1271 self.as_slice()
1272 }
1273}
1274
1275impl ops::DerefMut for BufferMap<'_, Writable> {
1276 #[inline]
1277 fn deref_mut(&mut self) -> &mut [u8] {
1278 self.as_mut_slice()
1279 }
1280}
1281
1282impl<T> fmt::Debug for BufferMap<'_, T> {
1283 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1284 f.debug_tuple("BufferMap").field(&self.buffer()).finish()
1285 }
1286}
1287
1288impl<'a, T> PartialEq for BufferMap<'a, T> {
1289 fn eq(&self, other: &BufferMap<'a, T>) -> bool {
1290 self.as_slice().eq(other.as_slice())
1291 }
1292}
1293
1294impl<T> Eq for BufferMap<'_, T> {}
1295
1296impl<T> Drop for BufferMap<'_, T> {
1297 #[inline]
1298 fn drop(&mut self) {
1299 unsafe {
1300 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1301 }
1302 }
1303}
1304
1305unsafe impl<T> Send for BufferMap<'_, T> {}
1306unsafe impl<T> Sync for BufferMap<'_, T> {}
1307
1308impl<T> MappedBuffer<T> {
1309 #[inline]
1310 pub fn as_slice(&self) -> &[u8] {
1311 if self.map_info.size == 0 {
1312 return &[];
1313 }
1314 unsafe { slice::from_raw_parts(self.map_info.data, self.map_info.size) }
1315 }
1316
1317 #[doc(alias = "get_size")]
1318 #[inline]
1319 pub fn size(&self) -> usize {
1320 self.map_info.size
1321 }
1322
1323 #[doc(alias = "get_buffer")]
1324 #[inline]
1325 pub fn buffer(&self) -> &BufferRef {
1326 self.buffer.as_ref()
1327 }
1328
1329 #[inline]
1330 pub fn into_buffer(self) -> Buffer {
1331 let mut s = mem::ManuallyDrop::new(self);
1332 let buffer = unsafe { ptr::read(&s.buffer) };
1333 unsafe {
1334 ffi::gst_buffer_unmap(buffer.as_mut_ptr(), &mut s.map_info);
1335 }
1336
1337 buffer
1338 }
1339}
1340
1341impl MappedBuffer<Readable> {
1342 #[doc(alias = "get_buffer")]
1343 #[inline]
1344 pub fn buffer_owned(&self) -> Buffer {
1345 self.buffer.clone()
1346 }
1347}
1348
1349impl MappedBuffer<Writable> {
1350 #[inline]
1351 pub fn as_mut_slice(&mut self) -> &mut [u8] {
1352 if self.map_info.size == 0 {
1353 return &mut [];
1354 }
1355 unsafe { slice::from_raw_parts_mut(self.map_info.data, self.map_info.size) }
1356 }
1357}
1358
1359impl<T> AsRef<[u8]> for MappedBuffer<T> {
1360 #[inline]
1361 fn as_ref(&self) -> &[u8] {
1362 self.as_slice()
1363 }
1364}
1365
1366impl AsMut<[u8]> for MappedBuffer<Writable> {
1367 #[inline]
1368 fn as_mut(&mut self) -> &mut [u8] {
1369 self.as_mut_slice()
1370 }
1371}
1372
1373impl<T> ops::Deref for MappedBuffer<T> {
1374 type Target = [u8];
1375
1376 #[inline]
1377 fn deref(&self) -> &[u8] {
1378 self.as_slice()
1379 }
1380}
1381
1382impl ops::DerefMut for MappedBuffer<Writable> {
1383 #[inline]
1384 fn deref_mut(&mut self) -> &mut [u8] {
1385 self.as_mut_slice()
1386 }
1387}
1388
1389impl<T> Drop for MappedBuffer<T> {
1390 #[inline]
1391 fn drop(&mut self) {
1392 unsafe {
1393 ffi::gst_buffer_unmap(self.buffer.as_mut_ptr(), &mut self.map_info);
1394 }
1395 }
1396}
1397
1398impl<T> fmt::Debug for MappedBuffer<T> {
1399 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1400 f.debug_tuple("MappedBuffer").field(&self.buffer()).finish()
1401 }
1402}
1403
1404impl<T> PartialEq for MappedBuffer<T> {
1405 fn eq(&self, other: &MappedBuffer<T>) -> bool {
1406 self.as_slice().eq(other.as_slice())
1407 }
1408}
1409
1410impl<T> Eq for MappedBuffer<T> {}
1411
1412unsafe impl<T> Send for MappedBuffer<T> {}
1413unsafe impl<T> Sync for MappedBuffer<T> {}
1414
1415#[doc(alias = "GST_BUFFER_COPY_METADATA")]
1416pub const BUFFER_COPY_METADATA: crate::BufferCopyFlags =
1417 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_METADATA);
1418#[doc(alias = "GST_BUFFER_COPY_ALL")]
1419pub const BUFFER_COPY_ALL: crate::BufferCopyFlags =
1420 crate::BufferCopyFlags::from_bits_truncate(ffi::GST_BUFFER_COPY_ALL);
1421
1422pub struct Dump<'a> {
1423 buffer: &'a BufferRef,
1424 start: Bound<usize>,
1425 end: Bound<usize>,
1426}
1427
1428#[must_use = "iterators are lazy and do nothing unless consumed"]
1429struct BufferChunked16Iter<'a> {
1430 buffer: &'a BufferRef,
1431 mem_idx: usize,
1432 mem_len: usize,
1433 map: Option<crate::memory::MemoryMap<'a, crate::memory::Readable>>,
1434 map_offset: usize,
1435 len: usize,
1436}
1437
1438impl Iterator for BufferChunked16Iter<'_> {
1439 type Item = ([u8; 16], usize);
1441
1442 fn next(&mut self) -> Option<Self::Item> {
1443 if self.mem_idx == self.mem_len || self.len == 0 {
1444 return None;
1445 }
1446
1447 let mut item = [0u8; 16];
1448 let mut data = item.as_mut_slice();
1449
1450 while !data.is_empty() && self.mem_idx < self.mem_len && self.len > 0 {
1451 if self.map.is_none() {
1452 let mem = self.buffer.peek_memory(self.mem_idx);
1453 self.map = Some(mem.map_readable().expect("failed to map memory"));
1454 }
1455
1456 let map = self.map.as_ref().unwrap();
1457 debug_assert!(self.map_offset < map.len());
1458 let copy = cmp::min(cmp::min(map.len() - self.map_offset, data.len()), self.len);
1459 data[..copy].copy_from_slice(&map[self.map_offset..][..copy]);
1460 self.map_offset += copy;
1461 self.len -= copy;
1462 data = &mut data[copy..];
1463
1464 if self.map_offset == map.len() {
1465 self.map = None;
1466 self.map_offset = 0;
1467 self.mem_idx += 1;
1468 }
1469 }
1470
1471 let copied = 16 - data.len();
1472 Some((item, copied))
1473 }
1474}
1475
1476impl Dump<'_> {
1477 fn fmt(&self, f: &mut fmt::Formatter, debug: bool) -> fmt::Result {
1478 let n_memory = self.buffer.n_memory();
1479 if n_memory == 0 {
1480 write!(f, "<empty>")?;
1481 return Ok(());
1482 }
1483
1484 use std::fmt::Write;
1485
1486 let len = self.buffer.size();
1487
1488 let mut start_idx = match self.start {
1491 Bound::Included(idx) if idx >= len => {
1492 write!(f, "<start out of range>")?;
1493 return Ok(());
1494 }
1495 Bound::Excluded(idx) if idx.checked_add(1).is_none_or(|idx| idx >= len) => {
1496 write!(f, "<start out of range>")?;
1497 return Ok(());
1498 }
1499 Bound::Included(idx) => idx,
1500 Bound::Excluded(idx) => idx + 1,
1501 Bound::Unbounded => 0,
1502 };
1503
1504 let end_idx = match self.end {
1505 Bound::Included(idx) if idx.checked_add(1).is_none_or(|idx| idx > len) => {
1506 write!(f, "<end out of range>")?;
1507 return Ok(());
1508 }
1509 Bound::Excluded(idx) if idx > len => {
1510 write!(f, "<end out of range>")?;
1511 return Ok(());
1512 }
1513 Bound::Included(idx) => idx + 1,
1514 Bound::Excluded(idx) => idx,
1515 Bound::Unbounded => len,
1516 };
1517
1518 if start_idx >= end_idx {
1519 write!(f, "<empty range>")?;
1520 return Ok(());
1521 }
1522
1523 let (memory_range, skip) = self
1525 .buffer
1526 .find_memory(start_idx..)
1527 .expect("can't find memory");
1528
1529 let chunks = BufferChunked16Iter {
1530 buffer: self.buffer,
1531 mem_idx: memory_range.start,
1532 mem_len: n_memory,
1533 map: None,
1534 map_offset: skip,
1535 len: end_idx - start_idx,
1536 };
1537
1538 if debug {
1539 for (line, line_len) in chunks {
1540 let line = &line[..line_len];
1541
1542 match end_idx {
1543 0x00_00..=0xff_ff => write!(f, "{start_idx:04x}: ")?,
1544 0x01_00_00..=0xff_ff_ff => write!(f, "{start_idx:06x}: ")?,
1545 0x01_00_00_00..=0xff_ff_ff_ff => write!(f, "{start_idx:08x}: ")?,
1546 _ => write!(f, "{start_idx:016x}: ")?,
1547 }
1548
1549 for (i, v) in line.iter().enumerate() {
1550 if i > 0 {
1551 write!(f, " {v:02x}")?;
1552 } else {
1553 write!(f, "{v:02x}")?;
1554 }
1555 }
1556
1557 for _ in line.len()..16 {
1558 write!(f, " ")?;
1559 }
1560 write!(f, " ")?;
1561
1562 for v in line {
1563 if v.is_ascii() && !v.is_ascii_control() {
1564 f.write_char((*v).into())?;
1565 } else {
1566 f.write_char('.')?;
1567 }
1568 }
1569
1570 start_idx = start_idx.saturating_add(16);
1571 if start_idx < end_idx {
1572 writeln!(f)?;
1573 }
1574 }
1575
1576 Ok(())
1577 } else {
1578 for (line, line_len) in chunks {
1579 let line = &line[..line_len];
1580
1581 for (i, v) in line.iter().enumerate() {
1582 if i > 0 {
1583 write!(f, " {v:02x}")?;
1584 } else {
1585 write!(f, "{v:02x}")?;
1586 }
1587 }
1588
1589 start_idx = start_idx.saturating_add(16);
1590 if start_idx < end_idx {
1591 writeln!(f)?;
1592 }
1593 }
1594
1595 Ok(())
1596 }
1597 }
1598}
1599
1600impl fmt::Display for Dump<'_> {
1601 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1602 self.fmt(f, false)
1603 }
1604}
1605
1606impl fmt::Debug for Dump<'_> {
1607 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1608 self.fmt(f, true)
1609 }
1610}
1611
1612#[cfg(test)]
1613mod tests {
1614 use super::*;
1615
1616 #[test]
1617 fn test_fields() {
1618 crate::init().unwrap();
1619
1620 let mut buffer = Buffer::new();
1621
1622 {
1623 let buffer = buffer.get_mut().unwrap();
1624 buffer.set_pts(ClockTime::NSECOND);
1625 buffer.set_dts(2 * ClockTime::NSECOND);
1626 buffer.set_offset(3);
1627 buffer.set_offset_end(4);
1628 buffer.set_duration(Some(5 * ClockTime::NSECOND));
1629 }
1630 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1631 assert_eq!(buffer.dts(), Some(2 * ClockTime::NSECOND));
1632 assert_eq!(buffer.offset(), 3);
1633 assert_eq!(buffer.offset_end(), 4);
1634 assert_eq!(buffer.duration(), Some(5 * ClockTime::NSECOND));
1635 }
1636
1637 #[test]
1638 fn test_writability() {
1639 crate::init().unwrap();
1640
1641 let mut buffer = Buffer::from_slice(vec![1, 2, 3, 4]);
1642 {
1643 let data = buffer.map_readable().unwrap();
1644 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1645 }
1646 assert_ne!(buffer.get_mut(), None);
1647 {
1648 let buffer = buffer.get_mut().unwrap();
1649 buffer.set_pts(Some(ClockTime::NSECOND));
1650 }
1651
1652 let mut buffer2 = buffer.clone();
1653 assert_eq!(buffer.get_mut(), None);
1654
1655 assert_eq!(buffer2.as_ptr(), buffer.as_ptr());
1656
1657 {
1658 let buffer2 = buffer2.make_mut();
1659 assert_ne!(buffer2.as_ptr(), buffer.as_ptr());
1660
1661 buffer2.set_pts(Some(2 * ClockTime::NSECOND));
1662
1663 let mut data = buffer2.map_writable().unwrap();
1664 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1665 data.as_mut_slice()[0] = 0;
1666 }
1667
1668 assert_eq!(buffer.pts(), Some(ClockTime::NSECOND));
1669 assert_eq!(buffer2.pts(), Some(2 * ClockTime::NSECOND));
1670
1671 {
1672 let data = buffer.map_readable().unwrap();
1673 assert_eq!(data.as_slice(), vec![1, 2, 3, 4].as_slice());
1674
1675 let data = buffer2.map_readable().unwrap();
1676 assert_eq!(data.as_slice(), vec![0, 2, 3, 4].as_slice());
1677 }
1678 }
1679
1680 #[test]
1681 #[allow(clippy::cognitive_complexity)]
1682 fn test_memories() {
1683 crate::init().unwrap();
1684
1685 let mut buffer = Buffer::new();
1686 {
1687 let buffer = buffer.get_mut().unwrap();
1688 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1689 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1690 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1691 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 5]));
1692 buffer.append_memory(crate::Memory::from_mut_slice(vec![0; 10]));
1693 }
1694
1695 assert!(buffer.is_all_memory_writable());
1696 assert_eq!(buffer.n_memory(), 5);
1697 assert_eq!(buffer.size(), 30);
1698
1699 for i in 0..5 {
1700 {
1701 let mem = buffer.memory(i).unwrap();
1702 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1703 let map = mem.map_readable().unwrap();
1704 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1705 }
1706
1707 {
1708 let mem = buffer.peek_memory(i);
1709 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1710 let map = mem.map_readable().unwrap();
1711 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1712 }
1713
1714 {
1715 let buffer = buffer.get_mut().unwrap();
1716 let mem = buffer.peek_memory_mut(i).unwrap();
1717 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1718 let map = mem.map_writable().unwrap();
1719 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1720 }
1721 }
1722
1723 {
1724 let buffer = buffer.get_mut().unwrap();
1725 let mut last = 0;
1726 for (i, mem) in buffer.iter_memories_mut().unwrap().enumerate() {
1727 {
1728 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1729 let map = mem.map_readable().unwrap();
1730 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1731 }
1732
1733 {
1734 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1735 let map = mem.map_readable().unwrap();
1736 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1737 }
1738
1739 {
1740 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1741 let map = mem.map_writable().unwrap();
1742 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1743 }
1744
1745 last = i;
1746 }
1747
1748 assert_eq!(last, 4);
1749 }
1750
1751 let mut last = 0;
1752 for (i, mem) in buffer.iter_memories().enumerate() {
1753 {
1754 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1755 let map = mem.map_readable().unwrap();
1756 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1757 }
1758
1759 {
1760 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1761 let map = mem.map_readable().unwrap();
1762 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1763 }
1764
1765 last = i;
1766 }
1767
1768 assert_eq!(last, 4);
1769
1770 let mut last = 0;
1771 for (i, mem) in buffer.iter_memories_owned().enumerate() {
1772 {
1773 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1774 let map = mem.map_readable().unwrap();
1775 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1776 }
1777
1778 {
1779 assert_eq!(mem.size(), if i < 4 { 5 } else { 10 });
1780 let map = mem.map_readable().unwrap();
1781 assert_eq!(map.size(), if i < 4 { 5 } else { 10 });
1782 }
1783
1784 last = i;
1785 }
1786
1787 assert_eq!(last, 4);
1788 }
1789
1790 #[test]
1791 fn test_meta_foreach() {
1792 crate::init().unwrap();
1793
1794 let mut buffer = Buffer::new();
1795 {
1796 let buffer = buffer.get_mut().unwrap();
1797 crate::ReferenceTimestampMeta::add(
1798 buffer,
1799 &crate::Caps::builder("foo/bar").build(),
1800 ClockTime::ZERO,
1801 ClockTime::NONE,
1802 );
1803 crate::ReferenceTimestampMeta::add(
1804 buffer,
1805 &crate::Caps::builder("foo/bar").build(),
1806 ClockTime::SECOND,
1807 ClockTime::NONE,
1808 );
1809 }
1810
1811 let mut res = vec![];
1812 buffer.foreach_meta(|meta| {
1813 let meta = meta
1814 .downcast_ref::<crate::ReferenceTimestampMeta>()
1815 .unwrap();
1816 res.push(meta.timestamp());
1817 ControlFlow::Continue(())
1818 });
1819
1820 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1821 }
1822
1823 #[test]
1824 fn test_meta_foreach_mut() {
1825 crate::init().unwrap();
1826
1827 let mut buffer = Buffer::new();
1828 {
1829 let buffer = buffer.get_mut().unwrap();
1830 crate::ReferenceTimestampMeta::add(
1831 buffer,
1832 &crate::Caps::builder("foo/bar").build(),
1833 ClockTime::ZERO,
1834 ClockTime::NONE,
1835 );
1836 crate::ReferenceTimestampMeta::add(
1837 buffer,
1838 &crate::Caps::builder("foo/bar").build(),
1839 ClockTime::SECOND,
1840 ClockTime::NONE,
1841 );
1842 }
1843
1844 let mut res = vec![];
1845 buffer.get_mut().unwrap().foreach_meta_mut(|mut meta| {
1846 let meta = meta
1847 .downcast_ref::<crate::ReferenceTimestampMeta>()
1848 .unwrap();
1849 res.push(meta.timestamp());
1850 if meta.timestamp() == ClockTime::SECOND {
1851 ControlFlow::Continue(BufferMetaForeachAction::Remove)
1852 } else {
1853 ControlFlow::Continue(BufferMetaForeachAction::Keep)
1854 }
1855 });
1856
1857 assert_eq!(&[ClockTime::ZERO, ClockTime::SECOND][..], &res[..]);
1858
1859 let mut res = vec![];
1860 buffer.foreach_meta(|meta| {
1861 let meta = meta
1862 .downcast_ref::<crate::ReferenceTimestampMeta>()
1863 .unwrap();
1864 res.push(meta.timestamp());
1865 ControlFlow::Continue(())
1866 });
1867
1868 assert_eq!(&[ClockTime::ZERO][..], &res[..]);
1869 }
1870
1871 #[test]
1872 fn test_ptr_eq() {
1873 crate::init().unwrap();
1874
1875 let buffer1 = Buffer::new();
1876 assert!(BufferRef::ptr_eq(&buffer1, &buffer1));
1877 let buffer2 = Buffer::new();
1878 assert!(!BufferRef::ptr_eq(&buffer1, &buffer2));
1879 }
1880
1881 #[test]
1882 fn test_copy_region() {
1883 crate::init().unwrap();
1884
1885 let buffer1 = Buffer::from_mut_slice(vec![0, 1, 2, 3, 4, 5, 6, 7]);
1886 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..).unwrap();
1887 assert_eq!(
1888 buffer2.map_readable().unwrap().as_slice(),
1889 &[0, 1, 2, 3, 4, 5, 6, 7]
1890 );
1891 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..8).unwrap();
1892 assert_eq!(
1893 buffer2.map_readable().unwrap().as_slice(),
1894 &[0, 1, 2, 3, 4, 5, 6, 7]
1895 );
1896 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..=7).unwrap();
1897 assert_eq!(
1898 buffer2.map_readable().unwrap().as_slice(),
1899 &[0, 1, 2, 3, 4, 5, 6, 7]
1900 );
1901 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=7).unwrap();
1902 assert_eq!(
1903 buffer2.map_readable().unwrap().as_slice(),
1904 &[0, 1, 2, 3, 4, 5, 6, 7]
1905 );
1906 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..8).unwrap();
1907 assert_eq!(
1908 buffer2.map_readable().unwrap().as_slice(),
1909 &[0, 1, 2, 3, 4, 5, 6, 7]
1910 );
1911 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 0..).unwrap();
1912 assert_eq!(
1913 buffer2.map_readable().unwrap().as_slice(),
1914 &[0, 1, 2, 3, 4, 5, 6, 7]
1915 );
1916
1917 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=8).is_err());
1918 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 0..=10).is_err());
1919 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=10).is_err());
1920 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 8..=8).is_err());
1921 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..).is_err());
1922 assert!(buffer1.copy_region(BUFFER_COPY_ALL, 10..100).is_err());
1923
1924 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..4).unwrap();
1925 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3]);
1926
1927 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..=4).unwrap();
1928 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[2, 3, 4]);
1929
1930 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, 2..).unwrap();
1931 assert_eq!(
1932 buffer2.map_readable().unwrap().as_slice(),
1933 &[2, 3, 4, 5, 6, 7]
1934 );
1935 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..2).unwrap();
1936 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1]);
1937 let buffer2 = buffer1.copy_region(BUFFER_COPY_ALL, ..=2).unwrap();
1938 assert_eq!(buffer2.map_readable().unwrap().as_slice(), &[0, 1, 2]);
1939 }
1940
1941 #[test]
1942 fn test_dump() {
1943 use std::fmt::Write;
1944
1945 crate::init().unwrap();
1946
1947 let mut s = String::new();
1948 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
1949 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1950 assert_eq!(
1951 s,
1952 "0000: 01 02 03 04 ...."
1953 );
1954 s.clear();
1955 write!(&mut s, "{}", buffer.dump()).unwrap();
1956 assert_eq!(s, "01 02 03 04");
1957 s.clear();
1958
1959 let buffer = crate::Buffer::from_slice(vec![1, 2, 3, 4]);
1960 write!(&mut s, "{:?}", buffer.dump_range(..)).unwrap();
1961 assert_eq!(
1962 s,
1963 "0000: 01 02 03 04 ...."
1964 );
1965 s.clear();
1966 write!(&mut s, "{:?}", buffer.dump_range(..2)).unwrap();
1967 assert_eq!(
1968 s,
1969 "0000: 01 02 .."
1970 );
1971 s.clear();
1972 write!(&mut s, "{:?}", buffer.dump_range(2..=3)).unwrap();
1973 assert_eq!(
1974 s,
1975 "0002: 03 04 .."
1976 );
1977 s.clear();
1978 write!(&mut s, "{:?}", buffer.dump_range(..100)).unwrap();
1979 assert_eq!(s, "<end out of range>",);
1980 s.clear();
1981 write!(&mut s, "{:?}", buffer.dump_range(90..100)).unwrap();
1982 assert_eq!(s, "<start out of range>",);
1983 s.clear();
1984
1985 let buffer = crate::Buffer::from_slice(vec![0; 19]);
1986 write!(&mut s, "{:?}", buffer.dump()).unwrap();
1987 assert_eq!(
1988 s,
1989 "0000: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................\n\
1990 0010: 00 00 00 ..."
1991 );
1992 s.clear();
1993 }
1994
1995 #[test]
1996 fn test_dump_multi_memories() {
1997 use std::fmt::Write;
1998
1999 crate::init().unwrap();
2000
2001 let mut buffer = crate::Buffer::new();
2002 {
2003 let buffer = buffer.get_mut().unwrap();
2004
2005 let mem = crate::Memory::from_slice(vec![1, 2, 3, 4]);
2006 buffer.append_memory(mem);
2007
2008 let mem = crate::Memory::from_slice(vec![5, 6, 7, 8]);
2009 buffer.append_memory(mem);
2010
2011 let mem = crate::Memory::from_slice(vec![9, 10, 11, 12]);
2012 buffer.append_memory(mem);
2013
2014 let mem = crate::Memory::from_slice(vec![13, 14, 15, 16]);
2015 buffer.append_memory(mem);
2016
2017 let mem = crate::Memory::from_slice(vec![17, 18, 19]);
2018 buffer.append_memory(mem);
2019 }
2020
2021 let mut s = String::new();
2022 write!(&mut s, "{:?}", buffer.dump()).unwrap();
2023 assert_eq!(
2024 s,
2025 "0000: 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 ................\n\
2026 0010: 11 12 13 ..."
2027 );
2028 s.clear();
2029 write!(&mut s, "{}", buffer.dump()).unwrap();
2030 assert_eq!(
2031 s,
2032 "01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10\n11 12 13"
2033 );
2034 s.clear();
2035
2036 write!(&mut s, "{:?}", buffer.dump_range(2..)).unwrap();
2037 assert_eq!(
2038 s,
2039 "0002: 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f 10 11 12 ................\n\
2040 0012: 13 ."
2041 );
2042 s.clear();
2043
2044 write!(&mut s, "{:?}", buffer.dump_range(14..17)).unwrap();
2045 assert_eq!(
2046 s,
2047 "000e: 0f 10 11 ..."
2048 );
2049 s.clear();
2050
2051 write!(&mut s, "{:?}", buffer.dump_range(14..20)).unwrap();
2052 assert_eq!(s, "<end out of range>");
2053 s.clear();
2054
2055 #[allow(clippy::reversed_empty_ranges)]
2056 {
2057 write!(&mut s, "{:?}", buffer.dump_range(23..20)).unwrap();
2058 assert_eq!(s, "<start out of range>");
2059 s.clear();
2060 }
2061 }
2062
2063 #[test]
2064 fn test_buffer_wrap_vec_u8() {
2065 crate::init().unwrap();
2066
2067 let data = vec![1u8, 2, 3, 4, 5];
2068 let expected = data.clone();
2069
2070 let buf = Buffer::from_slice(data);
2071 assert_eq!(buf.size(), 5);
2072 assert_eq!(buf.n_memory(), 1);
2073
2074 let converted: Vec<u8> = buf.try_into_inner().unwrap();
2075 assert_eq!(converted, expected);
2076 }
2077
2078 #[test]
2079 fn test_buffer_into_wrong_type() {
2080 crate::init().unwrap();
2081
2082 let buf = Buffer::from_slice(vec![1u8, 2, 3, 4, 5]);
2083 assert_eq!(buf.size(), 5);
2084 assert_eq!(buf.n_memory(), 1);
2085
2086 let res = buf.try_into_inner::<Vec<u32>>();
2087 assert!(res.is_err());
2088 let (_buf, err) = res.err().unwrap();
2089 assert!(matches!(
2090 err,
2091 crate::MemoryIntoInnerError::TypeMismatch { .. }
2092 ));
2093 }
2094
2095 #[test]
2096 fn test_buffer_modify_and_extract() {
2097 crate::init().unwrap();
2098
2099 let data = vec![0u8; 10];
2100 let mut buf = Buffer::from_mut_slice(data);
2101
2102 {
2104 let bufref = buf.make_mut();
2105 let mut mapped = bufref.map_writable().unwrap();
2106 let slice = mapped.as_mut_slice();
2107 for (i, byte) in slice.iter_mut().enumerate() {
2108 *byte = (i * 2) as u8;
2109 }
2110 }
2111
2112 let extracted: Vec<u8> = buf.try_into_inner().unwrap();
2114 assert_eq!(extracted, vec![0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
2115 }
2116}