std/io/
impls.rs

1#[cfg(test)]
2mod tests;
3
4use crate::alloc::Allocator;
5use crate::collections::VecDeque;
6use crate::io::{self, BorrowedCursor, BufRead, IoSlice, IoSliceMut, Read, Seek, SeekFrom, Write};
7use crate::{cmp, fmt, mem, str};
8
9// =============================================================================
10// Forwarding implementations
11
12#[stable(feature = "rust1", since = "1.0.0")]
13impl<R: Read + ?Sized> Read for &mut R {
14    #[inline]
15    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
16        (**self).read(buf)
17    }
18
19    #[inline]
20    fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
21        (**self).read_buf(cursor)
22    }
23
24    #[inline]
25    fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
26        (**self).read_vectored(bufs)
27    }
28
29    #[inline]
30    fn is_read_vectored(&self) -> bool {
31        (**self).is_read_vectored()
32    }
33
34    #[inline]
35    fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
36        (**self).read_to_end(buf)
37    }
38
39    #[inline]
40    fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
41        (**self).read_to_string(buf)
42    }
43
44    #[inline]
45    fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
46        (**self).read_exact(buf)
47    }
48
49    #[inline]
50    fn read_buf_exact(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
51        (**self).read_buf_exact(cursor)
52    }
53}
54#[stable(feature = "rust1", since = "1.0.0")]
55impl<W: Write + ?Sized> Write for &mut W {
56    #[inline]
57    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
58        (**self).write(buf)
59    }
60
61    #[inline]
62    fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
63        (**self).write_vectored(bufs)
64    }
65
66    #[inline]
67    fn is_write_vectored(&self) -> bool {
68        (**self).is_write_vectored()
69    }
70
71    #[inline]
72    fn flush(&mut self) -> io::Result<()> {
73        (**self).flush()
74    }
75
76    #[inline]
77    fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
78        (**self).write_all(buf)
79    }
80
81    #[inline]
82    fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> io::Result<()> {
83        (**self).write_all_vectored(bufs)
84    }
85
86    #[inline]
87    fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
88        (**self).write_fmt(fmt)
89    }
90}
91#[stable(feature = "rust1", since = "1.0.0")]
92impl<S: Seek + ?Sized> Seek for &mut S {
93    #[inline]
94    fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
95        (**self).seek(pos)
96    }
97
98    #[inline]
99    fn rewind(&mut self) -> io::Result<()> {
100        (**self).rewind()
101    }
102
103    #[inline]
104    fn stream_len(&mut self) -> io::Result<u64> {
105        (**self).stream_len()
106    }
107
108    #[inline]
109    fn stream_position(&mut self) -> io::Result<u64> {
110        (**self).stream_position()
111    }
112
113    #[inline]
114    fn seek_relative(&mut self, offset: i64) -> io::Result<()> {
115        (**self).seek_relative(offset)
116    }
117}
118#[stable(feature = "rust1", since = "1.0.0")]
119impl<B: BufRead + ?Sized> BufRead for &mut B {
120    #[inline]
121    fn fill_buf(&mut self) -> io::Result<&[u8]> {
122        (**self).fill_buf()
123    }
124
125    #[inline]
126    fn consume(&mut self, amt: usize) {
127        (**self).consume(amt)
128    }
129
130    #[inline]
131    fn has_data_left(&mut self) -> io::Result<bool> {
132        (**self).has_data_left()
133    }
134
135    #[inline]
136    fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
137        (**self).read_until(byte, buf)
138    }
139
140    #[inline]
141    fn skip_until(&mut self, byte: u8) -> io::Result<usize> {
142        (**self).skip_until(byte)
143    }
144
145    #[inline]
146    fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
147        (**self).read_line(buf)
148    }
149}
150
151#[stable(feature = "rust1", since = "1.0.0")]
152impl<R: Read + ?Sized> Read for Box<R> {
153    #[inline]
154    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
155        (**self).read(buf)
156    }
157
158    #[inline]
159    fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
160        (**self).read_buf(cursor)
161    }
162
163    #[inline]
164    fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
165        (**self).read_vectored(bufs)
166    }
167
168    #[inline]
169    fn is_read_vectored(&self) -> bool {
170        (**self).is_read_vectored()
171    }
172
173    #[inline]
174    fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
175        (**self).read_to_end(buf)
176    }
177
178    #[inline]
179    fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
180        (**self).read_to_string(buf)
181    }
182
183    #[inline]
184    fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
185        (**self).read_exact(buf)
186    }
187
188    #[inline]
189    fn read_buf_exact(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
190        (**self).read_buf_exact(cursor)
191    }
192}
193#[stable(feature = "rust1", since = "1.0.0")]
194impl<W: Write + ?Sized> Write for Box<W> {
195    #[inline]
196    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
197        (**self).write(buf)
198    }
199
200    #[inline]
201    fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
202        (**self).write_vectored(bufs)
203    }
204
205    #[inline]
206    fn is_write_vectored(&self) -> bool {
207        (**self).is_write_vectored()
208    }
209
210    #[inline]
211    fn flush(&mut self) -> io::Result<()> {
212        (**self).flush()
213    }
214
215    #[inline]
216    fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
217        (**self).write_all(buf)
218    }
219
220    #[inline]
221    fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> io::Result<()> {
222        (**self).write_all_vectored(bufs)
223    }
224
225    #[inline]
226    fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
227        (**self).write_fmt(fmt)
228    }
229}
230#[stable(feature = "rust1", since = "1.0.0")]
231impl<S: Seek + ?Sized> Seek for Box<S> {
232    #[inline]
233    fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
234        (**self).seek(pos)
235    }
236
237    #[inline]
238    fn rewind(&mut self) -> io::Result<()> {
239        (**self).rewind()
240    }
241
242    #[inline]
243    fn stream_len(&mut self) -> io::Result<u64> {
244        (**self).stream_len()
245    }
246
247    #[inline]
248    fn stream_position(&mut self) -> io::Result<u64> {
249        (**self).stream_position()
250    }
251
252    #[inline]
253    fn seek_relative(&mut self, offset: i64) -> io::Result<()> {
254        (**self).seek_relative(offset)
255    }
256}
257#[stable(feature = "rust1", since = "1.0.0")]
258impl<B: BufRead + ?Sized> BufRead for Box<B> {
259    #[inline]
260    fn fill_buf(&mut self) -> io::Result<&[u8]> {
261        (**self).fill_buf()
262    }
263
264    #[inline]
265    fn consume(&mut self, amt: usize) {
266        (**self).consume(amt)
267    }
268
269    #[inline]
270    fn has_data_left(&mut self) -> io::Result<bool> {
271        (**self).has_data_left()
272    }
273
274    #[inline]
275    fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
276        (**self).read_until(byte, buf)
277    }
278
279    #[inline]
280    fn skip_until(&mut self, byte: u8) -> io::Result<usize> {
281        (**self).skip_until(byte)
282    }
283
284    #[inline]
285    fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
286        (**self).read_line(buf)
287    }
288}
289
290// =============================================================================
291// In-memory buffer implementations
292
293/// Read is implemented for `&[u8]` by copying from the slice.
294///
295/// Note that reading updates the slice to point to the yet unread part.
296/// The slice will be empty when EOF is reached.
297#[stable(feature = "rust1", since = "1.0.0")]
298impl Read for &[u8] {
299    #[inline]
300    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
301        let amt = cmp::min(buf.len(), self.len());
302        let (a, b) = self.split_at(amt);
303
304        // First check if the amount of bytes we want to read is small:
305        // `copy_from_slice` will generally expand to a call to `memcpy`, and
306        // for a single byte the overhead is significant.
307        if amt == 1 {
308            buf[0] = a[0];
309        } else {
310            buf[..amt].copy_from_slice(a);
311        }
312
313        *self = b;
314        Ok(amt)
315    }
316
317    #[inline]
318    fn read_buf(&mut self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> {
319        let amt = cmp::min(cursor.capacity(), self.len());
320        let (a, b) = self.split_at(amt);
321
322        cursor.append(a);
323
324        *self = b;
325        Ok(())
326    }
327
328    #[inline]
329    fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
330        let mut nread = 0;
331        for buf in bufs {
332            nread += self.read(buf)?;
333            if self.is_empty() {
334                break;
335            }
336        }
337
338        Ok(nread)
339    }
340
341    #[inline]
342    fn is_read_vectored(&self) -> bool {
343        true
344    }
345
346    #[inline]
347    fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
348        if buf.len() > self.len() {
349            // `read_exact` makes no promise about the content of `buf` if it
350            // fails so don't bother about that.
351            *self = &self[self.len()..];
352            return Err(io::Error::READ_EXACT_EOF);
353        }
354        let (a, b) = self.split_at(buf.len());
355
356        // First check if the amount of bytes we want to read is small:
357        // `copy_from_slice` will generally expand to a call to `memcpy`, and
358        // for a single byte the overhead is significant.
359        if buf.len() == 1 {
360            buf[0] = a[0];
361        } else {
362            buf.copy_from_slice(a);
363        }
364
365        *self = b;
366        Ok(())
367    }
368
369    #[inline]
370    fn read_buf_exact(&mut self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> {
371        if cursor.capacity() > self.len() {
372            // Append everything we can to the cursor.
373            cursor.append(*self);
374            *self = &self[self.len()..];
375            return Err(io::Error::READ_EXACT_EOF);
376        }
377        let (a, b) = self.split_at(cursor.capacity());
378
379        cursor.append(a);
380
381        *self = b;
382        Ok(())
383    }
384
385    #[inline]
386    fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
387        let len = self.len();
388        buf.try_reserve(len)?;
389        buf.extend_from_slice(*self);
390        *self = &self[len..];
391        Ok(len)
392    }
393
394    #[inline]
395    fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
396        let content = str::from_utf8(self).map_err(|_| io::Error::INVALID_UTF8)?;
397        let len = self.len();
398        buf.try_reserve(len)?;
399        buf.push_str(content);
400        *self = &self[len..];
401        Ok(len)
402    }
403}
404
405#[stable(feature = "rust1", since = "1.0.0")]
406impl BufRead for &[u8] {
407    #[inline]
408    fn fill_buf(&mut self) -> io::Result<&[u8]> {
409        Ok(*self)
410    }
411
412    #[inline]
413    fn consume(&mut self, amt: usize) {
414        *self = &self[amt..];
415    }
416}
417
418/// Write is implemented for `&mut [u8]` by copying into the slice, overwriting
419/// its data.
420///
421/// Note that writing updates the slice to point to the yet unwritten part.
422/// The slice will be empty when it has been completely overwritten.
423///
424/// If the number of bytes to be written exceeds the size of the slice, write operations will
425/// return short writes: ultimately, `Ok(0)`; in this situation, `write_all` returns an error of
426/// kind `ErrorKind::WriteZero`.
427#[stable(feature = "rust1", since = "1.0.0")]
428impl Write for &mut [u8] {
429    #[inline]
430    fn write(&mut self, data: &[u8]) -> io::Result<usize> {
431        let amt = cmp::min(data.len(), self.len());
432        let (a, b) = mem::take(self).split_at_mut(amt);
433        a.copy_from_slice(&data[..amt]);
434        *self = b;
435        Ok(amt)
436    }
437
438    #[inline]
439    fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
440        let mut nwritten = 0;
441        for buf in bufs {
442            nwritten += self.write(buf)?;
443            if self.is_empty() {
444                break;
445            }
446        }
447
448        Ok(nwritten)
449    }
450
451    #[inline]
452    fn is_write_vectored(&self) -> bool {
453        true
454    }
455
456    #[inline]
457    fn write_all(&mut self, data: &[u8]) -> io::Result<()> {
458        if self.write(data)? < data.len() { Err(io::Error::WRITE_ALL_EOF) } else { Ok(()) }
459    }
460
461    #[inline]
462    fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> io::Result<()> {
463        for buf in bufs {
464            if self.write(buf)? < buf.len() {
465                return Err(io::Error::WRITE_ALL_EOF);
466            }
467        }
468        Ok(())
469    }
470
471    #[inline]
472    fn flush(&mut self) -> io::Result<()> {
473        Ok(())
474    }
475}
476
477/// Write is implemented for `Vec<u8>` by appending to the vector.
478/// The vector will grow as needed.
479#[stable(feature = "rust1", since = "1.0.0")]
480impl<A: Allocator> Write for Vec<u8, A> {
481    #[inline]
482    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
483        self.extend_from_slice(buf);
484        Ok(buf.len())
485    }
486
487    #[inline]
488    fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
489        let len = bufs.iter().map(|b| b.len()).sum();
490        self.reserve(len);
491        for buf in bufs {
492            self.extend_from_slice(buf);
493        }
494        Ok(len)
495    }
496
497    #[inline]
498    fn is_write_vectored(&self) -> bool {
499        true
500    }
501
502    #[inline]
503    fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
504        self.extend_from_slice(buf);
505        Ok(())
506    }
507
508    #[inline]
509    fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> io::Result<()> {
510        self.write_vectored(bufs)?;
511        Ok(())
512    }
513
514    #[inline]
515    fn flush(&mut self) -> io::Result<()> {
516        Ok(())
517    }
518}
519
520/// Read is implemented for `VecDeque<u8>` by consuming bytes from the front of the `VecDeque`.
521#[stable(feature = "vecdeque_read_write", since = "1.63.0")]
522impl<A: Allocator> Read for VecDeque<u8, A> {
523    /// Fill `buf` with the contents of the "front" slice as returned by
524    /// [`as_slices`][`VecDeque::as_slices`]. If the contained byte slices of the `VecDeque` are
525    /// discontiguous, multiple calls to `read` will be needed to read the entire content.
526    #[inline]
527    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
528        let (ref mut front, _) = self.as_slices();
529        let n = Read::read(front, buf)?;
530        self.drain(..n);
531        Ok(n)
532    }
533
534    #[inline]
535    fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
536        let (front, back) = self.as_slices();
537
538        // Use only the front buffer if it is big enough to fill `buf`, else use
539        // the back buffer too.
540        match buf.split_at_mut_checked(front.len()) {
541            None => buf.copy_from_slice(&front[..buf.len()]),
542            Some((buf_front, buf_back)) => match back.split_at_checked(buf_back.len()) {
543                Some((back, _)) => {
544                    buf_front.copy_from_slice(front);
545                    buf_back.copy_from_slice(back);
546                }
547                None => {
548                    self.clear();
549                    return Err(io::Error::READ_EXACT_EOF);
550                }
551            },
552        }
553
554        self.drain(..buf.len());
555        Ok(())
556    }
557
558    #[inline]
559    fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
560        let (ref mut front, _) = self.as_slices();
561        let n = cmp::min(cursor.capacity(), front.len());
562        Read::read_buf(front, cursor)?;
563        self.drain(..n);
564        Ok(())
565    }
566
567    #[inline]
568    fn read_buf_exact(&mut self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> {
569        let len = cursor.capacity();
570        let (front, back) = self.as_slices();
571
572        match front.split_at_checked(cursor.capacity()) {
573            Some((front, _)) => cursor.append(front),
574            None => {
575                cursor.append(front);
576                match back.split_at_checked(cursor.capacity()) {
577                    Some((back, _)) => cursor.append(back),
578                    None => {
579                        cursor.append(back);
580                        self.clear();
581                        return Err(io::Error::READ_EXACT_EOF);
582                    }
583                }
584            }
585        }
586
587        self.drain(..len);
588        Ok(())
589    }
590
591    #[inline]
592    fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
593        // The total len is known upfront so we can reserve it in a single call.
594        let len = self.len();
595        buf.try_reserve(len)?;
596
597        let (front, back) = self.as_slices();
598        buf.extend_from_slice(front);
599        buf.extend_from_slice(back);
600        self.clear();
601        Ok(len)
602    }
603
604    #[inline]
605    fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
606        // SAFETY: We only append to the buffer
607        unsafe { io::append_to_string(buf, |buf| self.read_to_end(buf)) }
608    }
609}
610
611/// BufRead is implemented for `VecDeque<u8>` by reading bytes from the front of the `VecDeque`.
612#[stable(feature = "vecdeque_buf_read", since = "1.75.0")]
613impl<A: Allocator> BufRead for VecDeque<u8, A> {
614    /// Returns the contents of the "front" slice as returned by
615    /// [`as_slices`][`VecDeque::as_slices`]. If the contained byte slices of the `VecDeque` are
616    /// discontiguous, multiple calls to `fill_buf` will be needed to read the entire content.
617    #[inline]
618    fn fill_buf(&mut self) -> io::Result<&[u8]> {
619        let (front, _) = self.as_slices();
620        Ok(front)
621    }
622
623    #[inline]
624    fn consume(&mut self, amt: usize) {
625        self.drain(..amt);
626    }
627}
628
629/// Write is implemented for `VecDeque<u8>` by appending to the `VecDeque`, growing it as needed.
630#[stable(feature = "vecdeque_read_write", since = "1.63.0")]
631impl<A: Allocator> Write for VecDeque<u8, A> {
632    #[inline]
633    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
634        self.extend(buf);
635        Ok(buf.len())
636    }
637
638    #[inline]
639    fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
640        let len = bufs.iter().map(|b| b.len()).sum();
641        self.reserve(len);
642        for buf in bufs {
643            self.extend(&**buf);
644        }
645        Ok(len)
646    }
647
648    #[inline]
649    fn is_write_vectored(&self) -> bool {
650        true
651    }
652
653    #[inline]
654    fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
655        self.extend(buf);
656        Ok(())
657    }
658
659    #[inline]
660    fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> io::Result<()> {
661        self.write_vectored(bufs)?;
662        Ok(())
663    }
664
665    #[inline]
666    fn flush(&mut self) -> io::Result<()> {
667        Ok(())
668    }
669}
670
671#[unstable(feature = "read_buf", issue = "78485")]
672impl<'a> io::Write for core::io::BorrowedCursor<'a> {
673    #[inline]
674    fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
675        let amt = cmp::min(buf.len(), self.capacity());
676        self.append(&buf[..amt]);
677        Ok(amt)
678    }
679
680    #[inline]
681    fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
682        let mut nwritten = 0;
683        for buf in bufs {
684            let n = self.write(buf)?;
685            nwritten += n;
686            if n < buf.len() {
687                break;
688            }
689        }
690        Ok(nwritten)
691    }
692
693    #[inline]
694    fn is_write_vectored(&self) -> bool {
695        true
696    }
697
698    #[inline]
699    fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
700        if self.write(buf)? < buf.len() { Err(io::Error::WRITE_ALL_EOF) } else { Ok(()) }
701    }
702
703    #[inline]
704    fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> io::Result<()> {
705        for buf in bufs {
706            if self.write(buf)? < buf.len() {
707                return Err(io::Error::WRITE_ALL_EOF);
708            }
709        }
710        Ok(())
711    }
712
713    #[inline]
714    fn flush(&mut self) -> io::Result<()> {
715        Ok(())
716    }
717}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy