1 /*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //! Container for messages that are sent via binder.
18
19 use crate::binder::AsNative;
20 use crate::error::{status_result, Result, StatusCode};
21 use crate::proxy::SpIBinder;
22 use crate::sys;
23
24 use std::convert::TryInto;
25 use std::fmt;
26 use std::marker::PhantomData;
27 use std::mem::ManuallyDrop;
28 use std::ptr::{self, NonNull};
29
30 mod file_descriptor;
31 mod parcelable;
32 mod parcelable_holder;
33
34 pub use self::file_descriptor::ParcelFileDescriptor;
35 pub use self::parcelable::{
36 Deserialize, DeserializeArray, DeserializeOption, Parcelable, Serialize, SerializeArray,
37 SerializeOption, UnstructuredParcelable, NON_NULL_PARCELABLE_FLAG, NULL_PARCELABLE_FLAG,
38 };
39 pub use self::parcelable_holder::{ParcelableHolder, ParcelableMetadata};
40
41 /// Container for a message (data and object references) that can be sent
42 /// through Binder.
43 ///
44 /// A Parcel can contain both serialized data that will be deserialized on the
45 /// other side of the IPC, and references to live Binder objects that will
46 /// result in the other side receiving a proxy Binder connected with the
47 /// original Binder in the Parcel.
48 ///
49 /// This type represents a parcel that is owned by Rust code.
50 #[repr(transparent)]
51 pub struct Parcel {
52 ptr: NonNull<sys::AParcel>,
53 }
54
55 /// Safety: This type guarantees that it owns the AParcel and that all access to
56 /// the AParcel happens through the Parcel, so it is ok to send across threads.
57 ///
58 /// It would not be okay to implement Sync, because that would allow you to call
59 /// the reading methods from several threads in parallel, which would be a data
60 /// race on the cursor position inside the AParcel.
61 unsafe impl Send for Parcel {}
62
63 /// Container for a message (data and object references) that can be sent
64 /// through Binder.
65 ///
66 /// This object is a borrowed variant of [`Parcel`]. It is a separate type from
67 /// `&mut Parcel` because it is not valid to `mem::swap` two parcels.
68 #[repr(transparent)]
69 pub struct BorrowedParcel<'a> {
70 ptr: NonNull<sys::AParcel>,
71 _lifetime: PhantomData<&'a mut Parcel>,
72 }
73
74 impl Parcel {
75 /// Create a new empty `Parcel`.
new() -> Parcel76 pub fn new() -> Parcel {
77 // Safety: If `AParcel_create` succeeds, it always returns
78 // a valid pointer. If it fails, the process will crash.
79 let ptr = unsafe { sys::AParcel_create() };
80 Self { ptr: NonNull::new(ptr).expect("AParcel_create returned null pointer") }
81 }
82
83 /// Create an owned reference to a parcel object from a raw pointer.
84 ///
85 /// # Safety
86 ///
87 /// This constructor is safe if the raw pointer parameter is either null
88 /// (resulting in `None`), or a valid pointer to an `AParcel` object. The
89 /// parcel object must be owned by the caller prior to this call, as this
90 /// constructor takes ownership of the parcel and will destroy it on drop.
91 ///
92 /// Additionally, the caller must guarantee that it is valid to take
93 /// ownership of the AParcel object. All future access to the AParcel
94 /// must happen through this `Parcel`.
95 ///
96 /// Because `Parcel` implements `Send`, the pointer must never point to any
97 /// thread-local data, e.g., a variable on the stack, either directly or
98 /// indirectly.
from_raw(ptr: *mut sys::AParcel) -> Option<Parcel>99 pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<Parcel> {
100 NonNull::new(ptr).map(|ptr| Self { ptr })
101 }
102
103 /// Consume the parcel, transferring ownership to the caller.
into_raw(self) -> *mut sys::AParcel104 pub(crate) fn into_raw(self) -> *mut sys::AParcel {
105 let ptr = self.ptr.as_ptr();
106 let _ = ManuallyDrop::new(self);
107 ptr
108 }
109
110 /// Get a borrowed view into the contents of this `Parcel`.
borrowed(&mut self) -> BorrowedParcel<'_>111 pub fn borrowed(&mut self) -> BorrowedParcel<'_> {
112 // Safety: The raw pointer is a valid pointer to an AParcel, and the
113 // lifetime of the returned `BorrowedParcel` is tied to `self`, so the
114 // borrow checker will ensure that the `AParcel` can only be accessed
115 // via the `BorrowParcel` until it goes out of scope.
116 BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData }
117 }
118
119 /// Get an immutable borrowed view into the contents of this `Parcel`.
borrowed_ref(&self) -> &BorrowedParcel<'_>120 pub fn borrowed_ref(&self) -> &BorrowedParcel<'_> {
121 // Safety: Parcel and BorrowedParcel are both represented in the same
122 // way as a NonNull<sys::AParcel> due to their use of repr(transparent),
123 // so casting references as done here is valid.
124 unsafe { &*(self as *const Parcel as *const BorrowedParcel<'_>) }
125 }
126 }
127
128 impl Default for Parcel {
default() -> Self129 fn default() -> Self {
130 Self::new()
131 }
132 }
133
134 impl Clone for Parcel {
clone(&self) -> Self135 fn clone(&self) -> Self {
136 let mut new_parcel = Self::new();
137 new_parcel
138 .borrowed()
139 .append_all_from(self.borrowed_ref())
140 .expect("Failed to append from Parcel");
141 new_parcel
142 }
143 }
144
145 impl<'a> BorrowedParcel<'a> {
146 /// Create a borrowed reference to a parcel object from a raw pointer.
147 ///
148 /// # Safety
149 ///
150 /// This constructor is safe if the raw pointer parameter is either null
151 /// (resulting in `None`), or a valid pointer to an `AParcel` object.
152 ///
153 /// Since the raw pointer is not restricted by any lifetime, the lifetime on
154 /// the returned `BorrowedParcel` object can be chosen arbitrarily by the
155 /// caller. The caller must ensure it is valid to mutably borrow the AParcel
156 /// for the duration of the lifetime that the caller chooses. Note that
157 /// since this is a mutable borrow, it must have exclusive access to the
158 /// AParcel for the duration of the borrow.
from_raw(ptr: *mut sys::AParcel) -> Option<BorrowedParcel<'a>>159 pub unsafe fn from_raw(ptr: *mut sys::AParcel) -> Option<BorrowedParcel<'a>> {
160 Some(Self { ptr: NonNull::new(ptr)?, _lifetime: PhantomData })
161 }
162
163 /// Get a sub-reference to this reference to the parcel.
reborrow(&mut self) -> BorrowedParcel<'_>164 pub fn reborrow(&mut self) -> BorrowedParcel<'_> {
165 // Safety: The raw pointer is a valid pointer to an AParcel, and the
166 // lifetime of the returned `BorrowedParcel` is tied to `self`, so the
167 // borrow checker will ensure that the `AParcel` can only be accessed
168 // via the `BorrowParcel` until it goes out of scope.
169 BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData }
170 }
171 }
172
173 /// Safety: The `Parcel` constructors guarantee that a `Parcel` object will
174 /// always contain a valid pointer to an `AParcel`.
175 unsafe impl AsNative<sys::AParcel> for Parcel {
as_native(&self) -> *const sys::AParcel176 fn as_native(&self) -> *const sys::AParcel {
177 self.ptr.as_ptr()
178 }
179
as_native_mut(&mut self) -> *mut sys::AParcel180 fn as_native_mut(&mut self) -> *mut sys::AParcel {
181 self.ptr.as_ptr()
182 }
183 }
184
185 /// Safety: The `BorrowedParcel` constructors guarantee that a `BorrowedParcel`
186 /// object will always contain a valid pointer to an `AParcel`.
187 unsafe impl<'a> AsNative<sys::AParcel> for BorrowedParcel<'a> {
as_native(&self) -> *const sys::AParcel188 fn as_native(&self) -> *const sys::AParcel {
189 self.ptr.as_ptr()
190 }
191
as_native_mut(&mut self) -> *mut sys::AParcel192 fn as_native_mut(&mut self) -> *mut sys::AParcel {
193 self.ptr.as_ptr()
194 }
195 }
196
197 // Data serialization methods
198 impl<'a> BorrowedParcel<'a> {
199 /// Data written to parcelable is zero'd before being deleted or reallocated.
mark_sensitive(&mut self)200 pub fn mark_sensitive(&mut self) {
201 // Safety: guaranteed to have a parcel object, and this method never fails
202 unsafe { sys::AParcel_markSensitive(self.as_native()) }
203 }
204
205 /// Write a type that implements [`Serialize`] to the parcel.
write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()>206 pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> {
207 parcelable.serialize(self)
208 }
209
210 /// Writes the length of a slice to the parcel.
211 ///
212 /// This is used in AIDL-generated client side code to indicate the
213 /// allocated space for an output array parameter.
write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()>214 pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> {
215 if let Some(slice) = slice {
216 let len: i32 = slice.len().try_into().or(Err(StatusCode::BAD_VALUE))?;
217 self.write(&len)
218 } else {
219 self.write(&-1i32)
220 }
221 }
222
223 /// Perform a series of writes to the parcel, prepended with the length
224 /// (in bytes) of the written data.
225 ///
226 /// The length `0i32` will be written to the parcel first, followed by the
227 /// writes performed by the callback. The initial length will then be
228 /// updated to the length of all data written by the callback, plus the
229 /// size of the length elemement itself (4 bytes).
230 ///
231 /// # Examples
232 ///
233 /// After the following call:
234 ///
235 /// ```
236 /// # use binder::{Binder, Interface, Parcel};
237 /// # let mut parcel = Parcel::new();
238 /// parcel.sized_write(|subparcel| {
239 /// subparcel.write(&1u32)?;
240 /// subparcel.write(&2u32)?;
241 /// subparcel.write(&3u32)
242 /// });
243 /// ```
244 ///
245 /// `parcel` will contain the following:
246 ///
247 /// ```ignore
248 /// [16i32, 1u32, 2u32, 3u32]
249 /// ```
sized_write<F>(&mut self, f: F) -> Result<()> where for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>,250 pub fn sized_write<F>(&mut self, f: F) -> Result<()>
251 where
252 for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>,
253 {
254 let start = self.get_data_position();
255 self.write(&0i32)?;
256 {
257 let mut subparcel = WritableSubParcel(self.reborrow());
258 f(&mut subparcel)?;
259 }
260 let end = self.get_data_position();
261 // Safety: start is less than the current size of the parcel data
262 // buffer, because we just got it with `get_data_position`.
263 unsafe {
264 self.set_data_position(start)?;
265 }
266 assert!(end >= start);
267 self.write(&(end - start))?;
268 // Safety: end is less than the current size of the parcel data
269 // buffer, because we just got it with `get_data_position`.
270 unsafe {
271 self.set_data_position(end)?;
272 }
273 Ok(())
274 }
275
276 /// Returns the current position in the parcel data.
get_data_position(&self) -> i32277 pub fn get_data_position(&self) -> i32 {
278 // Safety: `BorrowedParcel` always contains a valid pointer to an
279 // `AParcel`, and this call is otherwise safe.
280 unsafe { sys::AParcel_getDataPosition(self.as_native()) }
281 }
282
283 /// Returns the total size of the parcel.
get_data_size(&self) -> i32284 pub fn get_data_size(&self) -> i32 {
285 // Safety: `BorrowedParcel` always contains a valid pointer to an
286 // `AParcel`, and this call is otherwise safe.
287 unsafe { sys::AParcel_getDataSize(self.as_native()) }
288 }
289
290 /// Move the current read/write position in the parcel.
291 ///
292 /// # Safety
293 ///
294 /// This method is safe if `pos` is less than the current size of the parcel
295 /// data buffer. Otherwise, we are relying on correct bounds checking in the
296 /// Parcel C++ code on every subsequent read or write to this parcel. If all
297 /// accesses are bounds checked, this call is still safe, but we can't rely
298 /// on that.
set_data_position(&self, pos: i32) -> Result<()>299 pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> {
300 // Safety: `BorrowedParcel` always contains a valid pointer to an
301 // `AParcel`, and the caller guarantees that `pos` is within bounds.
302 status_result(unsafe { sys::AParcel_setDataPosition(self.as_native(), pos) })
303 }
304
305 /// Append a subset of another parcel.
306 ///
307 /// This appends `size` bytes of data from `other` starting at offset
308 /// `start` to the current parcel, or returns an error if not possible.
append_from( &mut self, other: &impl AsNative<sys::AParcel>, start: i32, size: i32, ) -> Result<()>309 pub fn append_from(
310 &mut self,
311 other: &impl AsNative<sys::AParcel>,
312 start: i32,
313 size: i32,
314 ) -> Result<()> {
315 // Safety: `Parcel::appendFrom` from C++ checks that `start`
316 // and `size` are in bounds, and returns an error otherwise.
317 // Both `self` and `other` always contain valid pointers.
318 let status = unsafe {
319 sys::AParcel_appendFrom(other.as_native(), self.as_native_mut(), start, size)
320 };
321 status_result(status)
322 }
323
324 /// Append the contents of another parcel.
append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()>325 pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> {
326 // Safety: `BorrowedParcel` always contains a valid pointer to an
327 // `AParcel`, and this call is otherwise safe.
328 let size = unsafe { sys::AParcel_getDataSize(other.as_native()) };
329 self.append_from(other, 0, size)
330 }
331 }
332
333 /// A segment of a writable parcel, used for [`BorrowedParcel::sized_write`].
334 pub struct WritableSubParcel<'a>(BorrowedParcel<'a>);
335
336 impl<'a> WritableSubParcel<'a> {
337 /// Write a type that implements [`Serialize`] to the sub-parcel.
write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()>338 pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> {
339 parcelable.serialize(&mut self.0)
340 }
341 }
342
343 impl Parcel {
344 /// Data written to parcelable is zero'd before being deleted or reallocated.
mark_sensitive(&mut self)345 pub fn mark_sensitive(&mut self) {
346 self.borrowed().mark_sensitive()
347 }
348
349 /// Write a type that implements [`Serialize`] to the parcel.
write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()>350 pub fn write<S: Serialize + ?Sized>(&mut self, parcelable: &S) -> Result<()> {
351 self.borrowed().write(parcelable)
352 }
353
354 /// Writes the length of a slice to the parcel.
355 ///
356 /// This is used in AIDL-generated client side code to indicate the
357 /// allocated space for an output array parameter.
write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()>358 pub fn write_slice_size<T>(&mut self, slice: Option<&[T]>) -> Result<()> {
359 self.borrowed().write_slice_size(slice)
360 }
361
362 /// Perform a series of writes to the parcel, prepended with the length
363 /// (in bytes) of the written data.
364 ///
365 /// The length `0i32` will be written to the parcel first, followed by the
366 /// writes performed by the callback. The initial length will then be
367 /// updated to the length of all data written by the callback, plus the
368 /// size of the length elemement itself (4 bytes).
369 ///
370 /// # Examples
371 ///
372 /// After the following call:
373 ///
374 /// ```
375 /// # use binder::{Binder, Interface, Parcel};
376 /// # let mut parcel = Parcel::new();
377 /// parcel.sized_write(|subparcel| {
378 /// subparcel.write(&1u32)?;
379 /// subparcel.write(&2u32)?;
380 /// subparcel.write(&3u32)
381 /// });
382 /// ```
383 ///
384 /// `parcel` will contain the following:
385 ///
386 /// ```ignore
387 /// [16i32, 1u32, 2u32, 3u32]
388 /// ```
sized_write<F>(&mut self, f: F) -> Result<()> where for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>,389 pub fn sized_write<F>(&mut self, f: F) -> Result<()>
390 where
391 for<'b> F: FnOnce(&'b mut WritableSubParcel<'b>) -> Result<()>,
392 {
393 self.borrowed().sized_write(f)
394 }
395
396 /// Returns the current position in the parcel data.
get_data_position(&self) -> i32397 pub fn get_data_position(&self) -> i32 {
398 self.borrowed_ref().get_data_position()
399 }
400
401 /// Returns the total size of the parcel.
get_data_size(&self) -> i32402 pub fn get_data_size(&self) -> i32 {
403 self.borrowed_ref().get_data_size()
404 }
405
406 /// Move the current read/write position in the parcel.
407 ///
408 /// # Safety
409 ///
410 /// This method is safe if `pos` is less than the current size of the parcel
411 /// data buffer. Otherwise, we are relying on correct bounds checking in the
412 /// Parcel C++ code on every subsequent read or write to this parcel. If all
413 /// accesses are bounds checked, this call is still safe, but we can't rely
414 /// on that.
set_data_position(&self, pos: i32) -> Result<()>415 pub unsafe fn set_data_position(&self, pos: i32) -> Result<()> {
416 // Safety: We have the same safety requirements as
417 // `BorrowedParcel::set_data_position`.
418 unsafe { self.borrowed_ref().set_data_position(pos) }
419 }
420
421 /// Append a subset of another parcel.
422 ///
423 /// This appends `size` bytes of data from `other` starting at offset
424 /// `start` to the current parcel, or returns an error if not possible.
append_from( &mut self, other: &impl AsNative<sys::AParcel>, start: i32, size: i32, ) -> Result<()>425 pub fn append_from(
426 &mut self,
427 other: &impl AsNative<sys::AParcel>,
428 start: i32,
429 size: i32,
430 ) -> Result<()> {
431 self.borrowed().append_from(other, start, size)
432 }
433
434 /// Append the contents of another parcel.
append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()>435 pub fn append_all_from(&mut self, other: &impl AsNative<sys::AParcel>) -> Result<()> {
436 self.borrowed().append_all_from(other)
437 }
438 }
439
440 // Data deserialization methods
441 impl<'a> BorrowedParcel<'a> {
442 /// Attempt to read a type that implements [`Deserialize`] from this parcel.
read<D: Deserialize>(&self) -> Result<D>443 pub fn read<D: Deserialize>(&self) -> Result<D> {
444 D::deserialize(self)
445 }
446
447 /// Attempt to read a type that implements [`Deserialize`] from this parcel
448 /// onto an existing value. This operation will overwrite the old value
449 /// partially or completely, depending on how much data is available.
read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()>450 pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> {
451 x.deserialize_from(self)
452 }
453
454 /// Safely read a sized parcelable.
455 ///
456 /// Read the size of a parcelable, compute the end position
457 /// of that parcelable, then build a sized readable sub-parcel
458 /// and call a closure with the sub-parcel as its parameter.
459 /// The closure can keep reading data from the sub-parcel
460 /// until it runs out of input data. The closure is responsible
461 /// for calling `ReadableSubParcel::has_more_data` to check for
462 /// more data before every read, at least until Rust generators
463 /// are stabilized.
464 /// After the closure returns, skip to the end of the current
465 /// parcelable regardless of how much the closure has read.
466 ///
467 /// # Examples
468 ///
469 /// ```no_run
470 /// let mut parcelable = Default::default();
471 /// parcel.sized_read(|subparcel| {
472 /// if subparcel.has_more_data() {
473 /// parcelable.a = subparcel.read()?;
474 /// }
475 /// if subparcel.has_more_data() {
476 /// parcelable.b = subparcel.read()?;
477 /// }
478 /// Ok(())
479 /// });
480 /// ```
481 ///
sized_read<F>(&self, f: F) -> Result<()> where for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>,482 pub fn sized_read<F>(&self, f: F) -> Result<()>
483 where
484 for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>,
485 {
486 let start = self.get_data_position();
487 let parcelable_size: i32 = self.read()?;
488 if parcelable_size < 4 {
489 return Err(StatusCode::BAD_VALUE);
490 }
491
492 let end = start.checked_add(parcelable_size).ok_or(StatusCode::BAD_VALUE)?;
493 if end > self.get_data_size() {
494 return Err(StatusCode::NOT_ENOUGH_DATA);
495 }
496
497 let subparcel = ReadableSubParcel {
498 parcel: BorrowedParcel { ptr: self.ptr, _lifetime: PhantomData },
499 end_position: end,
500 };
501 f(subparcel)?;
502
503 // Advance the data position to the actual end,
504 // in case the closure read less data than was available.
505 //
506 // Safety: end must be less than the current size of the parcel, because
507 // we checked above against `get_data_size`.
508 unsafe {
509 self.set_data_position(end)?;
510 }
511
512 Ok(())
513 }
514
515 /// Read a vector size from the parcel and resize the given output vector to
516 /// be correctly sized for that amount of data.
517 ///
518 /// This method is used in AIDL-generated server side code for methods that
519 /// take a mutable slice reference parameter.
resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()>520 pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> {
521 let len: i32 = self.read()?;
522
523 if len < 0 {
524 return Err(StatusCode::UNEXPECTED_NULL);
525 }
526
527 // usize in Rust may be 16-bit, so i32 may not fit
528 let len = len.try_into().unwrap();
529 out_vec.resize_with(len, Default::default);
530
531 Ok(())
532 }
533
534 /// Read a vector size from the parcel and either create a correctly sized
535 /// vector for that amount of data or set the output parameter to None if
536 /// the vector should be null.
537 ///
538 /// This method is used in AIDL-generated server side code for methods that
539 /// take a mutable slice reference parameter.
resize_nullable_out_vec<D: Default + Deserialize>( &self, out_vec: &mut Option<Vec<D>>, ) -> Result<()>540 pub fn resize_nullable_out_vec<D: Default + Deserialize>(
541 &self,
542 out_vec: &mut Option<Vec<D>>,
543 ) -> Result<()> {
544 let len: i32 = self.read()?;
545
546 if len < 0 {
547 *out_vec = None;
548 } else {
549 // usize in Rust may be 16-bit, so i32 may not fit
550 let len = len.try_into().unwrap();
551 let mut vec = Vec::with_capacity(len);
552 vec.resize_with(len, Default::default);
553 *out_vec = Some(vec);
554 }
555
556 Ok(())
557 }
558 }
559
560 /// A segment of a readable parcel, used for [`Parcel::sized_read`].
561 pub struct ReadableSubParcel<'a> {
562 parcel: BorrowedParcel<'a>,
563 end_position: i32,
564 }
565
566 impl<'a> ReadableSubParcel<'a> {
567 /// Read a type that implements [`Deserialize`] from the sub-parcel.
read<D: Deserialize>(&self) -> Result<D>568 pub fn read<D: Deserialize>(&self) -> Result<D> {
569 D::deserialize(&self.parcel)
570 }
571
572 /// Check if the sub-parcel has more data to read
has_more_data(&self) -> bool573 pub fn has_more_data(&self) -> bool {
574 self.parcel.get_data_position() < self.end_position
575 }
576 }
577
578 impl Parcel {
579 /// Attempt to read a type that implements [`Deserialize`] from this parcel.
read<D: Deserialize>(&self) -> Result<D>580 pub fn read<D: Deserialize>(&self) -> Result<D> {
581 self.borrowed_ref().read()
582 }
583
584 /// Attempt to read a type that implements [`Deserialize`] from this parcel
585 /// onto an existing value. This operation will overwrite the old value
586 /// partially or completely, depending on how much data is available.
read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()>587 pub fn read_onto<D: Deserialize>(&self, x: &mut D) -> Result<()> {
588 self.borrowed_ref().read_onto(x)
589 }
590
591 /// Safely read a sized parcelable.
592 ///
593 /// Read the size of a parcelable, compute the end position
594 /// of that parcelable, then build a sized readable sub-parcel
595 /// and call a closure with the sub-parcel as its parameter.
596 /// The closure can keep reading data from the sub-parcel
597 /// until it runs out of input data. The closure is responsible
598 /// for calling `ReadableSubParcel::has_more_data` to check for
599 /// more data before every read, at least until Rust generators
600 /// are stabilized.
601 /// After the closure returns, skip to the end of the current
602 /// parcelable regardless of how much the closure has read.
603 ///
604 /// # Examples
605 ///
606 /// ```no_run
607 /// let mut parcelable = Default::default();
608 /// parcel.sized_read(|subparcel| {
609 /// if subparcel.has_more_data() {
610 /// parcelable.a = subparcel.read()?;
611 /// }
612 /// if subparcel.has_more_data() {
613 /// parcelable.b = subparcel.read()?;
614 /// }
615 /// Ok(())
616 /// });
617 /// ```
618 ///
sized_read<F>(&self, f: F) -> Result<()> where for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>,619 pub fn sized_read<F>(&self, f: F) -> Result<()>
620 where
621 for<'b> F: FnOnce(ReadableSubParcel<'b>) -> Result<()>,
622 {
623 self.borrowed_ref().sized_read(f)
624 }
625
626 /// Read a vector size from the parcel and resize the given output vector to
627 /// be correctly sized for that amount of data.
628 ///
629 /// This method is used in AIDL-generated server side code for methods that
630 /// take a mutable slice reference parameter.
resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()>631 pub fn resize_out_vec<D: Default + Deserialize>(&self, out_vec: &mut Vec<D>) -> Result<()> {
632 self.borrowed_ref().resize_out_vec(out_vec)
633 }
634
635 /// Read a vector size from the parcel and either create a correctly sized
636 /// vector for that amount of data or set the output parameter to None if
637 /// the vector should be null.
638 ///
639 /// This method is used in AIDL-generated server side code for methods that
640 /// take a mutable slice reference parameter.
resize_nullable_out_vec<D: Default + Deserialize>( &self, out_vec: &mut Option<Vec<D>>, ) -> Result<()>641 pub fn resize_nullable_out_vec<D: Default + Deserialize>(
642 &self,
643 out_vec: &mut Option<Vec<D>>,
644 ) -> Result<()> {
645 self.borrowed_ref().resize_nullable_out_vec(out_vec)
646 }
647 }
648
649 // Internal APIs
650 impl<'a> BorrowedParcel<'a> {
write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()>651 pub(crate) fn write_binder(&mut self, binder: Option<&SpIBinder>) -> Result<()> {
652 // Safety: `BorrowedParcel` always contains a valid pointer to an
653 // `AParcel`. `AsNative` for `Option<SpIBinder`> will either return
654 // null or a valid pointer to an `AIBinder`, both of which are
655 // valid, safe inputs to `AParcel_writeStrongBinder`.
656 //
657 // This call does not take ownership of the binder. However, it does
658 // require a mutable pointer, which we cannot extract from an
659 // immutable reference, so we clone the binder, incrementing the
660 // refcount before the call. The refcount will be immediately
661 // decremented when this temporary is dropped.
662 unsafe {
663 status_result(sys::AParcel_writeStrongBinder(
664 self.as_native_mut(),
665 binder.cloned().as_native_mut(),
666 ))
667 }
668 }
669
read_binder(&self) -> Result<Option<SpIBinder>>670 pub(crate) fn read_binder(&self) -> Result<Option<SpIBinder>> {
671 let mut binder = ptr::null_mut();
672 // Safety: `BorrowedParcel` always contains a valid pointer to an
673 // `AParcel`. We pass a valid, mutable out pointer to the `binder`
674 // parameter. After this call, `binder` will be either null or a
675 // valid pointer to an `AIBinder` owned by the caller.
676 let status = unsafe { sys::AParcel_readStrongBinder(self.as_native(), &mut binder) };
677
678 status_result(status)?;
679
680 // Safety: `binder` is either null or a valid, owned pointer at this
681 // point, so can be safely passed to `SpIBinder::from_raw`.
682 Ok(unsafe { SpIBinder::from_raw(binder) })
683 }
684 }
685
686 impl Drop for Parcel {
drop(&mut self)687 fn drop(&mut self) {
688 // Run the C++ Parcel complete object destructor
689 //
690 // Safety: `Parcel` always contains a valid pointer to an
691 // `AParcel`. Since we own the parcel, we can safely delete it
692 // here.
693 unsafe { sys::AParcel_delete(self.ptr.as_ptr()) }
694 }
695 }
696
697 impl fmt::Debug for Parcel {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result698 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
699 f.debug_struct("Parcel").finish()
700 }
701 }
702
703 impl<'a> fmt::Debug for BorrowedParcel<'a> {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result704 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
705 f.debug_struct("BorrowedParcel").finish()
706 }
707 }
708
709 #[test]
test_read_write()710 fn test_read_write() {
711 let mut parcel = Parcel::new();
712 let start = parcel.get_data_position();
713
714 assert_eq!(parcel.read::<bool>(), Err(StatusCode::NOT_ENOUGH_DATA));
715 assert_eq!(parcel.read::<i8>(), Err(StatusCode::NOT_ENOUGH_DATA));
716 assert_eq!(parcel.read::<u16>(), Err(StatusCode::NOT_ENOUGH_DATA));
717 assert_eq!(parcel.read::<i32>(), Err(StatusCode::NOT_ENOUGH_DATA));
718 assert_eq!(parcel.read::<u32>(), Err(StatusCode::NOT_ENOUGH_DATA));
719 assert_eq!(parcel.read::<i64>(), Err(StatusCode::NOT_ENOUGH_DATA));
720 assert_eq!(parcel.read::<u64>(), Err(StatusCode::NOT_ENOUGH_DATA));
721 assert_eq!(parcel.read::<f32>(), Err(StatusCode::NOT_ENOUGH_DATA));
722 assert_eq!(parcel.read::<f64>(), Err(StatusCode::NOT_ENOUGH_DATA));
723 assert_eq!(parcel.read::<Option<String>>(), Ok(None));
724 assert_eq!(parcel.read::<String>(), Err(StatusCode::UNEXPECTED_NULL));
725
726 assert_eq!(parcel.borrowed_ref().read_binder().err(), Some(StatusCode::BAD_TYPE));
727
728 parcel.write(&1i32).unwrap();
729
730 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
731 // made it any shorter since we got the position.
732 unsafe {
733 parcel.set_data_position(start).unwrap();
734 }
735
736 let i: i32 = parcel.read().unwrap();
737 assert_eq!(i, 1i32);
738 }
739
740 #[test]
741 #[allow(clippy::float_cmp)]
test_read_data()742 fn test_read_data() {
743 let mut parcel = Parcel::new();
744 let str_start = parcel.get_data_position();
745
746 parcel.write(&b"Hello, Binder!\0"[..]).unwrap();
747 // Skip over string length
748 // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't
749 // made it any shorter since we got the position.
750 unsafe {
751 assert!(parcel.set_data_position(str_start).is_ok());
752 }
753 assert_eq!(parcel.read::<i32>().unwrap(), 15);
754 let start = parcel.get_data_position();
755
756 assert!(parcel.read::<bool>().unwrap());
757
758 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
759 // made it any shorter since we got the position.
760 unsafe {
761 assert!(parcel.set_data_position(start).is_ok());
762 }
763
764 assert_eq!(parcel.read::<i8>().unwrap(), 72i8);
765
766 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
767 // made it any shorter since we got the position.
768 unsafe {
769 assert!(parcel.set_data_position(start).is_ok());
770 }
771
772 assert_eq!(parcel.read::<u16>().unwrap(), 25928);
773
774 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
775 // made it any shorter since we got the position.
776 unsafe {
777 assert!(parcel.set_data_position(start).is_ok());
778 }
779
780 assert_eq!(parcel.read::<i32>().unwrap(), 1819043144);
781
782 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
783 // made it any shorter since we got the position.
784 unsafe {
785 assert!(parcel.set_data_position(start).is_ok());
786 }
787
788 assert_eq!(parcel.read::<u32>().unwrap(), 1819043144);
789
790 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
791 // made it any shorter since we got the position.
792 unsafe {
793 assert!(parcel.set_data_position(start).is_ok());
794 }
795
796 assert_eq!(parcel.read::<i64>().unwrap(), 4764857262830019912);
797
798 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
799 // made it any shorter since we got the position.
800 unsafe {
801 assert!(parcel.set_data_position(start).is_ok());
802 }
803
804 assert_eq!(parcel.read::<u64>().unwrap(), 4764857262830019912);
805
806 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
807 // made it any shorter since we got the position.
808 unsafe {
809 assert!(parcel.set_data_position(start).is_ok());
810 }
811
812 assert_eq!(parcel.read::<f32>().unwrap(), 1143139100000000000000000000.0);
813 assert_eq!(parcel.read::<f32>().unwrap(), 40.043392);
814
815 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
816 // made it any shorter since we got the position.
817 unsafe {
818 assert!(parcel.set_data_position(start).is_ok());
819 }
820
821 assert_eq!(parcel.read::<f64>().unwrap(), 34732488246.197815);
822
823 // Skip back to before the string length
824 // SAFETY: str_start is less than the current size of the parcel data buffer, because we haven't
825 // made it any shorter since we got the position.
826 unsafe {
827 assert!(parcel.set_data_position(str_start).is_ok());
828 }
829
830 assert_eq!(parcel.read::<Vec<u8>>().unwrap(), b"Hello, Binder!\0");
831 }
832
833 #[test]
test_utf8_utf16_conversions()834 fn test_utf8_utf16_conversions() {
835 let mut parcel = Parcel::new();
836 let start = parcel.get_data_position();
837
838 assert!(parcel.write("Hello, Binder!").is_ok());
839 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
840 // made it any shorter since we got the position.
841 unsafe {
842 assert!(parcel.set_data_position(start).is_ok());
843 }
844 assert_eq!(parcel.read::<Option<String>>().unwrap().unwrap(), "Hello, Binder!",);
845 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
846 // made it any shorter since we got the position.
847 unsafe {
848 assert!(parcel.set_data_position(start).is_ok());
849 }
850
851 assert!(parcel.write("Embedded null \0 inside a string").is_ok());
852 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
853 // made it any shorter since we got the position.
854 unsafe {
855 assert!(parcel.set_data_position(start).is_ok());
856 }
857 assert_eq!(
858 parcel.read::<Option<String>>().unwrap().unwrap(),
859 "Embedded null \0 inside a string",
860 );
861 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
862 // made it any shorter since we got the position.
863 unsafe {
864 assert!(parcel.set_data_position(start).is_ok());
865 }
866
867 assert!(parcel.write(&["str1", "str2", "str3"][..]).is_ok());
868 assert!(parcel
869 .write(&[String::from("str4"), String::from("str5"), String::from("str6"),][..])
870 .is_ok());
871
872 let s1 = "Hello, Binder!";
873 let s2 = "This is a utf8 string.";
874 let s3 = "Some more text here.";
875
876 assert!(parcel.write(&[s1, s2, s3][..]).is_ok());
877 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
878 // made it any shorter since we got the position.
879 unsafe {
880 assert!(parcel.set_data_position(start).is_ok());
881 }
882
883 assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str1", "str2", "str3"]);
884 assert_eq!(parcel.read::<Vec<String>>().unwrap(), ["str4", "str5", "str6"]);
885 assert_eq!(parcel.read::<Vec<String>>().unwrap(), [s1, s2, s3]);
886 }
887
888 #[test]
test_sized_write()889 fn test_sized_write() {
890 let mut parcel = Parcel::new();
891 let start = parcel.get_data_position();
892
893 let arr = [1i32, 2i32, 3i32];
894
895 parcel
896 .sized_write(|subparcel| subparcel.write(&arr[..]))
897 .expect("Could not perform sized write");
898
899 // i32 sub-parcel length + i32 array length + 3 i32 elements
900 let expected_len = 20i32;
901
902 assert_eq!(parcel.get_data_position(), start + expected_len);
903
904 // SAFETY: start is less than the current size of the parcel data buffer, because we haven't
905 // made it any shorter since we got the position.
906 unsafe {
907 parcel.set_data_position(start).unwrap();
908 }
909
910 assert_eq!(expected_len, parcel.read().unwrap(),);
911
912 assert_eq!(parcel.read::<Vec<i32>>().unwrap(), &arr,);
913 }
914
915 #[test]
test_append_from()916 fn test_append_from() {
917 let mut parcel1 = Parcel::new();
918 parcel1.write(&42i32).expect("Could not perform write");
919
920 let mut parcel2 = Parcel::new();
921 assert_eq!(Ok(()), parcel2.append_all_from(&parcel1));
922 assert_eq!(4, parcel2.get_data_size());
923 assert_eq!(Ok(()), parcel2.append_all_from(&parcel1));
924 assert_eq!(8, parcel2.get_data_size());
925 // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not
926 // empty.
927 unsafe {
928 parcel2.set_data_position(0).unwrap();
929 }
930 assert_eq!(Ok(42), parcel2.read::<i32>());
931 assert_eq!(Ok(42), parcel2.read::<i32>());
932
933 let mut parcel2 = Parcel::new();
934 assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2));
935 assert_eq!(Ok(()), parcel2.append_from(&parcel1, 2, 2));
936 assert_eq!(4, parcel2.get_data_size());
937 // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not
938 // empty.
939 unsafe {
940 parcel2.set_data_position(0).unwrap();
941 }
942 assert_eq!(Ok(42), parcel2.read::<i32>());
943
944 let mut parcel2 = Parcel::new();
945 assert_eq!(Ok(()), parcel2.append_from(&parcel1, 0, 2));
946 assert_eq!(2, parcel2.get_data_size());
947 // SAFETY: 0 is less than the current size of the parcel data buffer, because the parcel is not
948 // empty.
949 unsafe {
950 parcel2.set_data_position(0).unwrap();
951 }
952 assert_eq!(Err(StatusCode::NOT_ENOUGH_DATA), parcel2.read::<i32>());
953
954 let mut parcel2 = Parcel::new();
955 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 4, 2));
956 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, 4));
957 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, -1, 4));
958 assert_eq!(Err(StatusCode::BAD_VALUE), parcel2.append_from(&parcel1, 2, -1));
959 }
960