rustls/msgs/deframer/
buffers.rs1use alloc::vec::Vec;
2use core::mem;
3use core::ops::Range;
4#[cfg(feature = "std")]
5use std::io;
6
7#[cfg(feature = "std")]
8use crate::msgs::message::MAX_WIRE_SIZE;
9
10#[derive(Debug)]
13pub(crate) struct Locator {
14 bounds: Range<*const u8>,
15}
16
17impl Locator {
18 #[inline]
19 pub(crate) fn new(slice: &[u8]) -> Self {
20 Self {
21 bounds: slice.as_ptr_range(),
22 }
23 }
24
25 #[inline]
26 pub(crate) fn locate(&self, slice: &[u8]) -> Range<usize> {
27 let bounds = slice.as_ptr_range();
28 debug_assert!(self.fully_contains(slice));
29 let start = bounds.start as usize - self.bounds.start as usize;
30 let len = bounds.end as usize - bounds.start as usize;
31 Range {
32 start,
33 end: start + len,
34 }
35 }
36
37 #[inline]
38 pub(crate) fn fully_contains(&self, slice: &[u8]) -> bool {
39 let bounds = slice.as_ptr_range();
40 bounds.start >= self.bounds.start && bounds.end <= self.bounds.end
41 }
42}
43
44pub(crate) struct Delocator<'b> {
46 slice: &'b [u8],
47}
48
49impl<'b> Delocator<'b> {
50 #[inline]
51 pub(crate) fn new(slice: &'b [u8]) -> Self {
52 Self { slice }
53 }
54
55 #[inline]
56 pub(crate) fn slice_from_range(&'_ self, range: &Range<usize>) -> &'b [u8] {
57 self.slice.get(range.clone()).unwrap()
60 }
61
62 #[inline]
63 pub(crate) fn locator(self) -> Locator {
64 Locator::new(self.slice)
65 }
66}
67
68pub(crate) struct Coalescer<'b> {
70 slice: &'b mut [u8],
71}
72
73impl<'b> Coalescer<'b> {
74 #[inline]
75 pub(crate) fn new(slice: &'b mut [u8]) -> Self {
76 Self { slice }
77 }
78
79 #[inline]
80 pub(crate) fn copy_within(&mut self, from: Range<usize>, to: Range<usize>) {
81 debug_assert!(from.len() == to.len());
82 debug_assert!(self.slice.get(from.clone()).is_some());
83 debug_assert!(self.slice.get(to.clone()).is_some());
84 self.slice.copy_within(from, to.start);
85 }
86
87 #[inline]
88 pub(crate) fn delocator(self) -> Delocator<'b> {
89 Delocator::new(self.slice)
90 }
91}
92
93#[derive(Clone, Debug)]
95pub(crate) struct BufferProgress {
96 processed: usize,
105
106 discard: usize,
113}
114
115impl BufferProgress {
116 pub(super) fn new(processed: usize) -> Self {
117 Self {
118 processed,
119 discard: 0,
120 }
121 }
122
123 #[inline]
124 pub(crate) fn add_discard(&mut self, discard: usize) {
125 self.discard += discard;
126 }
127
128 #[inline]
129 pub(crate) fn add_processed(&mut self, processed: usize) {
130 self.processed += processed;
131 }
132
133 #[inline]
134 pub(crate) fn take_discard(&mut self) -> usize {
135 self.processed = self
139 .processed
140 .saturating_sub(self.discard);
141 mem::take(&mut self.discard)
142 }
143
144 #[inline]
145 pub(crate) fn processed(&self) -> usize {
146 self.processed
147 }
148}
149
150#[derive(Default, Debug)]
151pub(crate) struct DeframerVecBuffer {
152 buf: Vec<u8>,
156
157 used: usize,
159}
160
161impl DeframerVecBuffer {
162 pub(crate) fn discard(&mut self, taken: usize) {
164 #[allow(clippy::comparison_chain)]
165 if taken < self.used {
166 self.buf
180 .copy_within(taken..self.used, 0);
181 self.used -= taken;
182 } else if taken >= self.used {
183 self.used = 0;
184 }
185 }
186
187 pub(crate) fn filled_mut(&mut self) -> &mut [u8] {
188 &mut self.buf[..self.used]
189 }
190
191 pub(crate) fn filled(&self) -> &[u8] {
192 &self.buf[..self.used]
193 }
194}
195
196#[cfg(feature = "std")]
197impl DeframerVecBuffer {
198 pub(crate) fn read(&mut self, rd: &mut dyn io::Read, in_handshake: bool) -> io::Result<usize> {
200 if let Err(err) = self.prepare_read(in_handshake) {
201 return Err(io::Error::new(io::ErrorKind::InvalidData, err));
202 }
203
204 let new_bytes = rd.read(&mut self.buf[self.used..])?;
209 self.used += new_bytes;
210 Ok(new_bytes)
211 }
212
213 fn prepare_read(&mut self, is_joining_hs: bool) -> Result<(), &'static str> {
215 const MAX_HANDSHAKE_SIZE: u32 = 0xffff;
219
220 const READ_SIZE: usize = 4096;
221
222 let allow_max = match is_joining_hs {
230 true => MAX_HANDSHAKE_SIZE as usize,
231 false => MAX_WIRE_SIZE,
232 };
233
234 if self.used >= allow_max {
235 return Err("message buffer full");
236 }
237
238 let need_capacity = Ord::min(allow_max, self.used + READ_SIZE);
244 if need_capacity > self.buf.len() {
245 self.buf.resize(need_capacity, 0);
246 } else if self.used == 0 || self.buf.len() > allow_max {
247 self.buf.resize(need_capacity, 0);
248 self.buf.shrink_to(need_capacity);
249 }
250
251 Ok(())
252 }
253
254 pub(crate) fn extend(&mut self, bytes: &[u8]) -> Range<usize> {
258 let len = bytes.len();
259 let start = self.used;
260 let end = start + len;
261 if self.buf.len() < end {
262 self.buf.resize(end, 0);
263 }
264 self.buf[start..end].copy_from_slice(bytes);
265 self.used += len;
266 Range { start, end }
267 }
268}
269
270#[derive(Debug)]
272pub(crate) struct DeframerSliceBuffer<'a> {
273 buf: &'a mut [u8],
275 discard: usize,
277}
278
279impl<'a> DeframerSliceBuffer<'a> {
280 pub(crate) fn new(buf: &'a mut [u8]) -> Self {
281 Self { buf, discard: 0 }
282 }
283
284 pub(crate) fn queue_discard(&mut self, num_bytes: usize) {
286 self.discard += num_bytes;
287 }
288
289 pub(crate) fn pending_discard(&self) -> usize {
290 self.discard
291 }
292
293 pub(crate) fn filled_mut(&mut self) -> &mut [u8] {
294 &mut self.buf[self.discard..]
295 }
296}