diff --git a/Cargo.lock b/Cargo.lock index 47fbf5169f491..b062b505cb248 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 4 [[package]] name = "addr2line" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9acbfca36652500c911ddb767ed433e3ed99b032b5d935be73c6923662db1d43" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", "rustc-std-workspace-alloc", @@ -49,9 +49,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" dependencies = [ "rustc-std-workspace-core", ] @@ -78,9 +78,9 @@ dependencies = [ [[package]] name = "dlmalloc" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa3a2dbee57b69fbb5dbe852fa9c0925697fb0c7fbcb1593e90e5ffaedf13d51" +checksum = "06cdfe340b16dd990c54cce79743613fa09fbb16774f33a77c9fd196f8f3fa30" dependencies = [ "cfg-if", "libc", @@ -109,9 +109,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.32.0" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93563d740bc9ef04104f9ed6f86f1e3275c2cdafb95664e26584b9ca807a8ffe" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" dependencies = [ "rustc-std-workspace-alloc", "rustc-std-workspace-core", @@ -139,18 +139,18 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.175" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" dependencies = [ "rustc-std-workspace-core", ] [[package]] name = "memchr" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" dependencies = [ "rustc-std-workspace-core", ] @@ -166,6 +166,16 @@ dependencies = [ "rustc-std-workspace-core", ] +[[package]] +name = "moto-rt" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058a2807a30527bee4c30df7ababe971cdde94372d4dbd1ff145bb403381436c" +dependencies = [ + "rustc-std-workspace-alloc", + "rustc-std-workspace-core", +] + [[package]] name = "object" version = "0.37.3" @@ -316,6 +326,7 @@ dependencies = [ "hermit-abi", "libc", "miniz_oxide", + "moto-rt", "object", "panic_abort", "panic_unwind", @@ -382,9 +393,9 @@ dependencies = [ [[package]] name = "vex-sdk" -version = "0.27.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89f74fce61d7a7ba1589da9634c6305a72befb7cc9150c1f872d87d8060f32b9" +checksum = "79e5fe15afde1305478b35e2cb717fff59f485428534cf49cfdbfa4723379bf6" dependencies = [ "rustc-std-workspace-core", ] @@ -410,13 +421,19 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.53.5", ] [[package]] @@ -425,10 +442,11 @@ version = "0.0.0" [[package]] name = "windows-targets" -version = "0.52.6" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ + "windows-link", "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", @@ -441,57 +459,57 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "wit-bindgen" -version = "0.45.0" +version = "0.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052283831dbae3d879dc7f51f3d92703a316ca49f91540417d38591826127814" +checksum = "5c573471f125075647d03df72e026074b7203790d41351cd6edc96f46bcccd36" dependencies = [ "rustc-std-workspace-alloc", "rustc-std-workspace-core", diff --git a/alloc/src/alloc.rs b/alloc/src/alloc.rs index 65c8206e9d462..39450f69ce30a 100644 --- a/alloc/src/alloc.rs +++ b/alloc/src/alloc.rs @@ -361,7 +361,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { unsafe extern "Rust" { // This is the magic symbol to call the global alloc error handler. rustc generates // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the - // default implementations below (`__rdl_oom`) otherwise. + // default implementations below (`__rdl_alloc_error_handler`) otherwise. #[rustc_std_internal_symbol] fn __rust_alloc_error_handler(size: usize, align: usize) -> !; } @@ -425,7 +425,7 @@ pub mod __alloc_error_handler { // called via generated `__rust_alloc_error_handler` if there is no // `#[alloc_error_handler]`. #[rustc_std_internal_symbol] - pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! { + pub unsafe fn __rdl_alloc_error_handler(size: usize, _align: usize) -> ! { unsafe extern "Rust" { // This symbol is emitted by rustc next to __rust_alloc_error_handler. // Its value depends on the -Zoom={panic,abort} compiler option. diff --git a/alloc/src/borrow.rs b/alloc/src/borrow.rs index cb32896161e5c..aa973e0bb0240 100644 --- a/alloc/src/borrow.rs +++ b/alloc/src/borrow.rs @@ -16,12 +16,13 @@ use crate::fmt; #[cfg(not(no_global_oom_handling))] use crate::string::String; +// FIXME(inference): const bounds removed due to inference regressions found by crater; +// see https://github.com/rust-lang/rust/issues/147964 +// #[rustc_const_unstable(feature = "const_convert", issue = "143773")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_const_unstable(feature = "const_convert", issue = "143773")] -impl<'a, B: ?Sized> const Borrow for Cow<'a, B> -where - B: ToOwned, - B::Owned: [const] Borrow, +impl<'a, B: ?Sized + ToOwned> Borrow for Cow<'a, B> +// where +// B::Owned: [const] Borrow, { fn borrow(&self) -> &B { &**self @@ -214,6 +215,10 @@ impl Clone for Cow<'_, B> { impl Cow<'_, B> { /// Returns true if the data is borrowed, i.e. if `to_mut` would require additional work. /// + /// Note: this is an associated function, which means that you have to call + /// it as `Cow::is_borrowed(&c)` instead of `c.is_borrowed()`. This is so + /// that there is no conflict with a method on the inner type. + /// /// # Examples /// /// ``` @@ -221,14 +226,14 @@ impl Cow<'_, B> { /// use std::borrow::Cow; /// /// let cow = Cow::Borrowed("moo"); - /// assert!(cow.is_borrowed()); + /// assert!(Cow::is_borrowed(&cow)); /// /// let bull: Cow<'_, str> = Cow::Owned("...moo?".to_string()); - /// assert!(!bull.is_borrowed()); + /// assert!(!Cow::is_borrowed(&bull)); /// ``` #[unstable(feature = "cow_is_borrowed", issue = "65143")] - pub const fn is_borrowed(&self) -> bool { - match *self { + pub const fn is_borrowed(c: &Self) -> bool { + match *c { Borrowed(_) => true, Owned(_) => false, } @@ -236,6 +241,10 @@ impl Cow<'_, B> { /// Returns true if the data is owned, i.e. if `to_mut` would be a no-op. /// + /// Note: this is an associated function, which means that you have to call + /// it as `Cow::is_owned(&c)` instead of `c.is_owned()`. This is so that + /// there is no conflict with a method on the inner type. + /// /// # Examples /// /// ``` @@ -243,14 +252,14 @@ impl Cow<'_, B> { /// use std::borrow::Cow; /// /// let cow: Cow<'_, str> = Cow::Owned("moo".to_string()); - /// assert!(cow.is_owned()); + /// assert!(Cow::is_owned(&cow)); /// /// let bull = Cow::Borrowed("...moo?"); - /// assert!(!bull.is_owned()); + /// assert!(!Cow::is_owned(&bull)); /// ``` #[unstable(feature = "cow_is_borrowed", issue = "65143")] - pub const fn is_owned(&self) -> bool { - !self.is_borrowed() + pub const fn is_owned(c: &Self) -> bool { + !Cow::is_borrowed(c) } /// Acquires a mutable reference to the owned form of the data. @@ -327,11 +336,13 @@ impl Cow<'_, B> { } } +// FIXME(inference): const bounds removed due to inference regressions found by crater; +// see https://github.com/rust-lang/rust/issues/147964 +// #[rustc_const_unstable(feature = "const_convert", issue = "143773")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_const_unstable(feature = "const_convert", issue = "143773")] -impl const Deref for Cow<'_, B> -where - B::Owned: [const] Borrow, +impl Deref for Cow<'_, B> +// where +// B::Owned: [const] Borrow, { type Target = B; @@ -441,11 +452,13 @@ where } } +// FIXME(inference): const bounds removed due to inference regressions found by crater; +// see https://github.com/rust-lang/rust/issues/147964 +// #[rustc_const_unstable(feature = "const_convert", issue = "143773")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_const_unstable(feature = "const_convert", issue = "143773")] -impl const AsRef for Cow<'_, T> -where - T::Owned: [const] Borrow, +impl AsRef for Cow<'_, T> +// where +// T::Owned: [const] Borrow, { fn as_ref(&self) -> &T { self diff --git a/alloc/src/boxed.rs b/alloc/src/boxed.rs index 49ff768bed1b2..2b767ffe02bee 100644 --- a/alloc/src/boxed.rs +++ b/alloc/src/boxed.rs @@ -192,11 +192,15 @@ use core::fmt; use core::future::Future; use core::hash::{Hash, Hasher}; use core::marker::{Tuple, Unsize}; +#[cfg(not(no_global_oom_handling))] +use core::mem::MaybeUninit; use core::mem::{self, SizedTypeProperties}; use core::ops::{ AsyncFn, AsyncFnMut, AsyncFnOnce, CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver, }; +#[cfg(not(no_global_oom_handling))] +use core::ops::{Residual, Try}; use core::pin::{Pin, PinCoerceUnsized}; use core::ptr::{self, NonNull, Unique}; use core::task::{Context, Poll}; @@ -237,6 +241,7 @@ pub struct Box< /// the newly allocated memory. This is an intrinsic to avoid unnecessary copies. /// /// This is the surface syntax for `box ` expressions. +#[doc(hidden)] #[rustc_intrinsic] #[unstable(feature = "liballoc_internals", issue = "none")] pub fn box_new(x: T) -> Box; @@ -299,7 +304,7 @@ impl Box { /// [zeroed]: mem::MaybeUninit::zeroed #[cfg(not(no_global_oom_handling))] #[inline] - #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "new_zeroed_alloc", since = "1.92.0")] #[must_use] pub fn new_zeroed() -> Box> { Self::new_zeroed_in(Global) @@ -385,6 +390,82 @@ impl Box { pub fn try_new_zeroed() -> Result>, AllocError> { Box::try_new_zeroed_in(Global) } + + /// Maps the value in a box, reusing the allocation if possible. + /// + /// `f` is called on the value in the box, and the result is returned, also boxed. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Box::map(b, f)` instead of `b.map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// + /// let b = Box::new(7); + /// let new = Box::map(b, |i| i + 7); + /// assert_eq!(*new, 14); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn map(this: Self, f: impl FnOnce(T) -> U) -> Box { + if size_of::() == size_of::() && align_of::() == align_of::() { + let (value, allocation) = Box::take(this); + Box::write( + unsafe { mem::transmute::>, Box>>(allocation) }, + f(value), + ) + } else { + Box::new(f(*this)) + } + } + + /// Attempts to map the value in a box, reusing the allocation if possible. + /// + /// `f` is called on the value in the box, and if the operation succeeds, the result is + /// returned, also boxed. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Box::try_map(b, f)` instead of `b.try_map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// + /// let b = Box::new(7); + /// let new = Box::try_map(b, u32::try_from).unwrap(); + /// assert_eq!(*new, 7); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn try_map( + this: Self, + f: impl FnOnce(T) -> R, + ) -> >>::TryType + where + R: Try, + R::Residual: Residual>, + { + if size_of::() == size_of::() && align_of::() == align_of::() { + let (value, allocation) = Box::take(this); + try { + Box::write( + unsafe { + mem::transmute::>, Box>>( + allocation, + ) + }, + f(value)?, + ) + } + } else { + try { Box::new(f(*this)?) } + } + } } impl Box { @@ -644,9 +725,9 @@ impl Box { #[unstable(feature = "box_take", issue = "147212")] pub fn take(boxed: Self) -> (T, Box, A>) { unsafe { - let (raw, alloc) = Box::into_raw_with_allocator(boxed); + let (raw, alloc) = Box::into_non_null_with_allocator(boxed); let value = raw.read(); - let uninit = Box::from_raw_in(raw.cast::>(), alloc); + let uninit = Box::from_non_null_in(raw.cast_uninit(), alloc); (value, uninit) } } @@ -691,7 +772,7 @@ impl Box<[T]> { /// /// [zeroed]: mem::MaybeUninit::zeroed #[cfg(not(no_global_oom_handling))] - #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "new_zeroed_alloc", since = "1.92.0")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit]> { unsafe { RawVec::with_capacity_zeroed(len).into_box(len) } @@ -769,7 +850,7 @@ impl Box<[T]> { /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[unstable(feature = "alloc_slice_into_array", issue = "148082")] #[inline] #[must_use] pub fn into_array(self) -> Option> { @@ -2160,3 +2241,55 @@ impl Error for Box { Error::provide(&**self, request); } } + +#[unstable(feature = "allocator_api", issue = "32838")] +unsafe impl Allocator for Box { + #[inline] + fn allocate(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate(layout) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate_zeroed(layout) + } + + #[inline] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).deallocate(ptr, layout) } + } + + #[inline] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).shrink(ptr, old_layout, new_layout) } + } +} diff --git a/alloc/src/boxed/convert.rs b/alloc/src/boxed/convert.rs index 45c46fb526365..73940db5d2f50 100644 --- a/alloc/src/boxed/convert.rs +++ b/alloc/src/boxed/convert.rs @@ -1,4 +1,6 @@ use core::any::Any; +#[cfg(not(no_global_oom_handling))] +use core::clone::TrivialClone; use core::error::Error; use core::mem; use core::pin::Pin; @@ -75,11 +77,13 @@ impl BoxFromSlice for Box<[T]> { } #[cfg(not(no_global_oom_handling))] -impl BoxFromSlice for Box<[T]> { +impl BoxFromSlice for Box<[T]> { #[inline] fn from_slice(slice: &[T]) -> Self { let len = slice.len(); let buf = RawVec::with_capacity(len); + // SAFETY: since `T` implements `TrivialClone`, this is sound and + // equivalent to the above. unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); buf.into_box(slice.len()).assume_init() diff --git a/alloc/src/collections/btree/map.rs b/alloc/src/collections/btree/map.rs index adcb444d08c66..766f4589177a8 100644 --- a/alloc/src/collections/btree/map.rs +++ b/alloc/src/collections/btree/map.rs @@ -1368,7 +1368,8 @@ impl BTreeMap { } /// Splits the collection into two at the given key. Returns everything after the given key, - /// including the key. + /// including the key. If the key is not present, the split will occur at the nearest + /// greater key, or return an empty map if no such key exists. /// /// # Examples /// @@ -1433,7 +1434,8 @@ impl BTreeMap { /// /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating /// or the iteration short-circuits, then the remaining elements will be retained. - /// Use [`retain`] with a negated predicate if you do not need the returned iterator. + /// Use `extract_if().for_each(drop)` if you do not need the returned iterator, + /// or [`retain`] with a negated predicate if you also do not need to restrict the range. /// /// [`retain`]: BTreeMap::retain /// @@ -1944,7 +1946,8 @@ impl Default for Values<'_, K, V> { /// An iterator produced by calling `extract_if` on BTreeMap. #[stable(feature = "btree_extract_if", since = "1.91.0")] -#[must_use = "iterators are lazy and do nothing unless consumed"] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `retain` or `extract_if().for_each(drop)` to remove and discard elements"] pub struct ExtractIf< 'a, K, diff --git a/alloc/src/collections/btree/map/entry.rs b/alloc/src/collections/btree/map/entry.rs index df51be3de54b9..add8782a9499a 100644 --- a/alloc/src/collections/btree/map/entry.rs +++ b/alloc/src/collections/btree/map/entry.rs @@ -284,7 +284,7 @@ impl<'a, K: Ord, V, A: Allocator + Clone> Entry<'a, K, V, A> { /// assert_eq!(entry.key(), &"poneyland"); /// ``` #[inline] - #[stable(feature = "btree_entry_insert", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "btree_entry_insert", since = "1.92.0")] pub fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, A> { match self { Occupied(mut entry) => { @@ -394,7 +394,7 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> { /// } /// assert_eq!(map["poneyland"], 37); /// ``` - #[stable(feature = "btree_entry_insert", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "btree_entry_insert", since = "1.92.0")] pub fn insert_entry(mut self, value: V) -> OccupiedEntry<'a, K, V, A> { let handle = match self.handle { None => { diff --git a/alloc/src/collections/btree/node.rs b/alloc/src/collections/btree/node.rs index a87259e7c58f2..84dd4b7e49def 100644 --- a/alloc/src/collections/btree/node.rs +++ b/alloc/src/collections/btree/node.rs @@ -33,6 +33,7 @@ use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; +use core::num::NonZero; use core::ptr::{self, NonNull}; use core::slice::SliceIndex; @@ -143,7 +144,7 @@ type BoxedNode = NonNull>; /// /// A reference to a node. /// -/// This type has a number of parameters that controls how it acts: +/// This type has a number of parameters that control how it acts: /// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime. /// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`. /// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node` @@ -226,33 +227,27 @@ impl NodeRef { fn from_new_leaf(leaf: Box, A>) -> Self { // The allocator must be dropped, not leaked. See also `BTreeMap::alloc`. - let (leaf, _alloc) = Box::into_raw_with_allocator(leaf); - // SAFETY: the node was just allocated. - let node = unsafe { NonNull::new_unchecked(leaf) }; + let (node, _alloc) = Box::into_non_null_with_allocator(leaf); NodeRef { height: 0, node, _marker: PhantomData } } } impl NodeRef { + /// Creates a new internal (height > 0) `NodeRef` fn new_internal(child: Root, alloc: A) -> Self { let mut new_node = unsafe { InternalNode::new(alloc) }; new_node.edges[0].write(child.node); - unsafe { NodeRef::from_new_internal(new_node, child.height + 1) } + NodeRef::from_new_internal(new_node, NonZero::new(child.height + 1).unwrap()) } - /// # Safety - /// `height` must not be zero. - unsafe fn from_new_internal( + /// Creates a new internal (height > 0) `NodeRef` from an existing internal node + fn from_new_internal( internal: Box, A>, - height: usize, + height: NonZero, ) -> Self { - debug_assert!(height > 0); // The allocator must be dropped, not leaked. See also `BTreeMap::alloc`. - let (internal, _alloc) = Box::into_raw_with_allocator(internal); - // SAFETY: the node was just allocated. - let internal = unsafe { NonNull::new_unchecked(internal) }; - let node = internal.cast(); - let mut this = NodeRef { height, node, _marker: PhantomData }; + let (node, _alloc) = Box::into_non_null_with_allocator(internal); + let mut this = NodeRef { height: height.into(), node: node.cast(), _marker: PhantomData }; this.borrow_mut().correct_all_childrens_parent_links(); this } @@ -625,9 +620,8 @@ impl NodeRef { let top = self.node; // SAFETY: we asserted to be internal. - let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() }; - // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive. - let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) }; + let mut internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() }; + let internal_node = internal_self.as_internal_mut(); // SAFETY: the first edge is always initialized. self.node = unsafe { internal_node.edges[0].assume_init_read() }; self.height -= 1; @@ -1305,7 +1299,8 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, &mut new_node.edges[..new_len + 1], ); - let height = self.node.height; + // SAFETY: self is `marker::Internal`, so `self.node.height` is positive + let height = NonZero::new_unchecked(self.node.height); let right = NodeRef::from_new_internal(new_node, height); SplitResult { left: self.node, kv, right } diff --git a/alloc/src/collections/btree/set.rs b/alloc/src/collections/btree/set.rs index 6e6996bcbd69b..28d26699d7d2c 100644 --- a/alloc/src/collections/btree/set.rs +++ b/alloc/src/collections/btree/set.rs @@ -427,39 +427,35 @@ impl BTreeSet { where T: Ord, { - let (self_min, self_max) = - if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { - (self_min, self_max) - } else { - return Difference { inner: DifferenceInner::Iterate(self.iter()) }; - }; - let (other_min, other_max) = - if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { - (other_min, other_max) - } else { - return Difference { inner: DifferenceInner::Iterate(self.iter()) }; - }; - Difference { - inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { - (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()), - (Equal, _) => { - let mut self_iter = self.iter(); - self_iter.next(); - DifferenceInner::Iterate(self_iter) - } - (_, Equal) => { - let mut self_iter = self.iter(); - self_iter.next_back(); - DifferenceInner::Iterate(self_iter) - } - _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { - DifferenceInner::Search { self_iter: self.iter(), other_set: other } - } - _ => DifferenceInner::Stitch { - self_iter: self.iter(), - other_iter: other.iter().peekable(), + if let Some(self_min) = self.first() + && let Some(self_max) = self.last() + && let Some(other_min) = other.first() + && let Some(other_max) = other.last() + { + Difference { + inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { + (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()), + (Equal, _) => { + let mut self_iter = self.iter(); + self_iter.next(); + DifferenceInner::Iterate(self_iter) + } + (_, Equal) => { + let mut self_iter = self.iter(); + self_iter.next_back(); + DifferenceInner::Iterate(self_iter) + } + _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + DifferenceInner::Search { self_iter: self.iter(), other_set: other } + } + _ => DifferenceInner::Stitch { + self_iter: self.iter(), + other_iter: other.iter().peekable(), + }, }, - }, + } + } else { + Difference { inner: DifferenceInner::Iterate(self.iter()) } } } @@ -519,31 +515,27 @@ impl BTreeSet { where T: Ord, { - let (self_min, self_max) = - if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { - (self_min, self_max) - } else { - return Intersection { inner: IntersectionInner::Answer(None) }; - }; - let (other_min, other_max) = - if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { - (other_min, other_max) - } else { - return Intersection { inner: IntersectionInner::Answer(None) }; - }; - Intersection { - inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { - (Greater, _) | (_, Less) => IntersectionInner::Answer(None), - (Equal, _) => IntersectionInner::Answer(Some(self_min)), - (_, Equal) => IntersectionInner::Answer(Some(self_max)), - _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { - IntersectionInner::Search { small_iter: self.iter(), large_set: other } - } - _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { - IntersectionInner::Search { small_iter: other.iter(), large_set: self } - } - _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() }, - }, + if let Some(self_min) = self.first() + && let Some(self_max) = self.last() + && let Some(other_min) = other.first() + && let Some(other_max) = other.last() + { + Intersection { + inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) { + (Greater, _) | (_, Less) => IntersectionInner::Answer(None), + (Equal, _) => IntersectionInner::Answer(Some(self_min)), + (_, Equal) => IntersectionInner::Answer(Some(self_max)), + _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + IntersectionInner::Search { small_iter: self.iter(), large_set: other } + } + _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => { + IntersectionInner::Search { small_iter: other.iter(), large_set: self } + } + _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() }, + }, + } + } else { + Intersection { inner: IntersectionInner::Answer(None) } } } @@ -694,55 +686,56 @@ impl BTreeSet { // Same result as self.difference(other).next().is_none() // but the code below is faster (hugely in some cases). if self.len() > other.len() { - return false; + return false; // self has more elements than other } - let (self_min, self_max) = - if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) { - (self_min, self_max) - } else { - return true; // self is empty - }; - let (other_min, other_max) = - if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) { - (other_min, other_max) - } else { - return false; // other is empty - }; + let (Some(self_min), Some(self_max)) = (self.first(), self.last()) else { + return true; // self is empty + }; + let (Some(other_min), Some(other_max)) = (other.first(), other.last()) else { + return false; // other is empty + }; let mut self_iter = self.iter(); match self_min.cmp(other_min) { - Less => return false, + Less => return false, // other does not contain self_min Equal => { - self_iter.next(); + self_iter.next(); // self_min is contained in other, so remove it from consideration + // other_min is now not in self_iter (used below) } - Greater => (), - } + Greater => {} // other_min is not in self_iter (used below) + }; + match self_max.cmp(other_max) { - Greater => return false, + Greater => return false, // other does not contain self_max Equal => { - self_iter.next_back(); + self_iter.next_back(); // self_max is contained in other, so remove it from consideration + // other_max is now not in self_iter (used below) } - Less => (), - } + Less => {} // other_max is not in self_iter (used below) + }; if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF { - for next in self_iter { - if !other.contains(next) { - return false; - } - } + self_iter.all(|e| other.contains(e)) } else { let mut other_iter = other.iter(); - other_iter.next(); - other_iter.next_back(); - let mut self_next = self_iter.next(); - while let Some(self1) = self_next { - match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) { - Less => return false, - Equal => self_next = self_iter.next(), - Greater => (), - } + { + // remove other_min and other_max as they are not in self_iter (see above) + other_iter.next(); + other_iter.next_back(); } + // custom `self_iter.all(|e| other.contains(e))` + self_iter.all(|self1| { + while let Some(other1) = other_iter.next() { + match other1.cmp(self1) { + // happens up to `ITER_PERFORMANCE_TIPPING_SIZE_DIFF * self.len() - 1` times + Less => continue, // skip over elements that are smaller + // happens `self.len()` times + Equal => return true, // self1 is in other + // happens only once + Greater => return false, // self1 is not in other + } + } + false + }) } - true } /// Returns `true` if the set is a superset of another, @@ -1196,7 +1189,8 @@ impl BTreeSet { /// /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating /// or the iteration short-circuits, then the remaining elements will be retained. - /// Use [`retain`] with a negated predicate if you do not need the returned iterator. + /// Use `extract_if().for_each(drop)` if you do not need the returned iterator, + /// or [`retain`] with a negated predicate if you also do not need to restrict the range. /// /// [`retain`]: BTreeSet::retain /// # Examples @@ -1554,7 +1548,8 @@ impl<'a, T, A: Allocator + Clone> IntoIterator for &'a BTreeSet { /// An iterator produced by calling `extract_if` on BTreeSet. #[stable(feature = "btree_extract_if", since = "1.91.0")] -#[must_use = "iterators are lazy and do nothing unless consumed"] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `retain` or `extract_if().for_each(drop)` to remove and discard elements"] pub struct ExtractIf< 'a, T, diff --git a/alloc/src/collections/linked_list.rs b/alloc/src/collections/linked_list.rs index 31dfe73fc7992..8bc0e08a4b26b 100644 --- a/alloc/src/collections/linked_list.rs +++ b/alloc/src/collections/linked_list.rs @@ -1943,7 +1943,8 @@ impl<'a, T, A: Allocator> CursorMut<'a, T, A> { /// An iterator produced by calling `extract_if` on LinkedList. #[stable(feature = "extract_if", since = "1.87.0")] -#[must_use = "iterators are lazy and do nothing unless consumed"] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `extract_if().for_each(drop)` to remove and discard elements"] pub struct ExtractIf< 'a, T: 'a, diff --git a/alloc/src/collections/vec_deque/extract_if.rs b/alloc/src/collections/vec_deque/extract_if.rs new file mode 100644 index 0000000000000..437f0d6dd5eb3 --- /dev/null +++ b/alloc/src/collections/vec_deque/extract_if.rs @@ -0,0 +1,150 @@ +use core::ops::{Range, RangeBounds}; +use core::{fmt, ptr, slice}; + +use super::VecDeque; +use crate::alloc::{Allocator, Global}; + +/// An iterator which uses a closure to determine if an element should be removed. +/// +/// This struct is created by [`VecDeque::extract_if`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// #![feature(vec_deque_extract_if)] +/// +/// use std::collections::vec_deque::ExtractIf; +/// use std::collections::vec_deque::VecDeque; +/// +/// let mut v = VecDeque::from([0, 1, 2]); +/// let iter: ExtractIf<'_, _, _> = v.extract_if(.., |x| *x % 2 == 0); +/// ``` +#[unstable(feature = "vec_deque_extract_if", issue = "147750")] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `retain_mut` or `extract_if().for_each(drop)` to remove and discard elements"] +pub struct ExtractIf< + 'a, + T, + F, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { + vec: &'a mut VecDeque, + /// The index of the item that will be inspected by the next call to `next`. + idx: usize, + /// Elements at and beyond this point will be retained. Must be equal or smaller than `old_len`. + end: usize, + /// The number of items that have been drained (removed) thus far. + del: usize, + /// The original length of `vec` prior to draining. + old_len: usize, + /// The filter test predicate. + pred: F, +} + +impl<'a, T, F, A: Allocator> ExtractIf<'a, T, F, A> { + pub(super) fn new>( + vec: &'a mut VecDeque, + pred: F, + range: R, + ) -> Self { + let old_len = vec.len(); + let Range { start, end } = slice::range(range, ..old_len); + + // Guard against the deque getting leaked (leak amplification) + vec.len = 0; + ExtractIf { vec, idx: start, del: 0, end, old_len, pred } + } + + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + self.vec.allocator() + } +} + +#[unstable(feature = "vec_deque_extract_if", issue = "147750")] +impl Iterator for ExtractIf<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + type Item = T; + + fn next(&mut self) -> Option { + while self.idx < self.end { + let i = self.idx; + // SAFETY: + // We know that `i < self.end` from the if guard and that `self.end <= self.old_len` from + // the validity of `Self`. Therefore `i` points to an element within `vec`. + // + // Additionally, the i-th element is valid because each element is visited at most once + // and it is the first time we access vec[i]. + // + // Note: we can't use `vec.get_mut(i).unwrap()` here since the precondition for that + // function is that i < vec.len, but we've set vec's length to zero. + let idx = self.vec.to_physical_idx(i); + let cur = unsafe { &mut *self.vec.ptr().add(idx) }; + let drained = (self.pred)(cur); + // Update the index *after* the predicate is called. If the index + // is updated prior and the predicate panics, the element at this + // index would be leaked. + self.idx += 1; + if drained { + self.del += 1; + // SAFETY: We never touch this element again after returning it. + return Some(unsafe { ptr::read(cur) }); + } else if self.del > 0 { + let hole_slot = self.vec.to_physical_idx(i - self.del); + // SAFETY: `self.del` > 0, so the hole slot must not overlap with current element. + // We use copy for move, and never touch this element again. + unsafe { self.vec.wrap_copy(idx, hole_slot, 1) }; + } + } + None + } + + fn size_hint(&self) -> (usize, Option) { + (0, Some(self.end - self.idx)) + } +} + +#[unstable(feature = "vec_deque_extract_if", issue = "147750")] +impl Drop for ExtractIf<'_, T, F, A> { + fn drop(&mut self) { + if self.del > 0 { + let src = self.vec.to_physical_idx(self.idx); + let dst = self.vec.to_physical_idx(self.idx - self.del); + let len = self.old_len - self.idx; + // SAFETY: Trailing unchecked items must be valid since we never touch them. + unsafe { self.vec.wrap_copy(src, dst, len) }; + } + self.vec.len = self.old_len - self.del; + } +} + +#[unstable(feature = "vec_deque_extract_if", issue = "147750")] +impl fmt::Debug for ExtractIf<'_, T, F, A> +where + T: fmt::Debug, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let peek = if self.idx < self.end { + let idx = self.vec.to_physical_idx(self.idx); + // This has to use pointer arithmetic as `self.vec[self.idx]` or + // `self.vec.get_unchecked(self.idx)` wouldn't work since we + // temporarily set the length of `self.vec` to zero. + // + // SAFETY: + // Since `self.idx` is smaller than `self.end` and `self.end` is + // smaller than `self.old_len`, `idx` is valid for indexing the + // buffer. Also, per the invariant of `self.idx`, this element + // has not been inspected/moved out yet. + Some(unsafe { &*self.vec.ptr().add(idx) }) + } else { + None + }; + f.debug_struct("ExtractIf").field("peek", &peek).finish_non_exhaustive() + } +} diff --git a/alloc/src/collections/vec_deque/mod.rs b/alloc/src/collections/vec_deque/mod.rs index ac619a42d356d..52e079d3ae8e6 100644 --- a/alloc/src/collections/vec_deque/mod.rs +++ b/alloc/src/collections/vec_deque/mod.rs @@ -7,6 +7,8 @@ #![stable(feature = "rust1", since = "1.0.0")] +#[cfg(not(no_global_oom_handling))] +use core::clone::TrivialClone; use core::cmp::{self, Ordering}; use core::hash::{Hash, Hasher}; use core::iter::{ByRefSized, repeat_n, repeat_with}; @@ -32,6 +34,11 @@ pub use self::drain::Drain; mod drain; +#[unstable(feature = "vec_deque_extract_if", issue = "147750")] +pub use self::extract_if::ExtractIf; + +mod extract_if; + #[stable(feature = "rust1", since = "1.0.0")] pub use self::iter_mut::IterMut; @@ -47,7 +54,7 @@ pub use self::iter::Iter; mod iter; -use self::spec_extend::SpecExtend; +use self::spec_extend::{SpecExtend, SpecExtendFront}; mod spec_extend; @@ -174,6 +181,21 @@ impl VecDeque { self.len += 1; } + /// Prepends an element to the buffer. + /// + /// # Safety + /// + /// May only be called if `deque.len() < deque.capacity()` + #[inline] + unsafe fn push_front_unchecked(&mut self, element: T) { + self.head = self.wrap_sub(self.head, 1); + // SAFETY: Because of the precondition, it's guaranteed that there is space + // in the logical array before the first element (where self.head is now). + unsafe { self.buffer_write(self.head, element) }; + // This can't overflow because `deque.len() < deque.capacity() <= usize::MAX`. + self.len += 1; + } + /// Moves an element out of the buffer #[inline] unsafe fn buffer_read(&mut self, off: usize) -> T { @@ -227,6 +249,78 @@ impl VecDeque { wrap_index(idx.wrapping_sub(subtrahend).wrapping_add(self.capacity()), self.capacity()) } + /// Get source, destination and count (like the arguments to [`ptr::copy_nonoverlapping`]) + /// for copying `count` values from index `src` to index `dst`. + /// One of the ranges can wrap around the physical buffer, for this reason 2 triples are returned. + /// + /// Use of the word "ranges" specifically refers to `src..src + count` and `dst..dst + count`. + /// + /// # Safety + /// + /// - Ranges must not overlap: `src.abs_diff(dst) >= count`. + /// - Ranges must be in bounds of the logical buffer: `src + count <= self.capacity()` and `dst + count <= self.capacity()`. + /// - `head` must be in bounds: `head < self.capacity()`. + #[cfg(not(no_global_oom_handling))] + unsafe fn nonoverlapping_ranges( + &mut self, + src: usize, + dst: usize, + count: usize, + head: usize, + ) -> [(*const T, *mut T, usize); 2] { + // "`src` and `dst` must be at least as far apart as `count`" + debug_assert!( + src.abs_diff(dst) >= count, + "`src` and `dst` must not overlap. src={src} dst={dst} count={count}", + ); + debug_assert!( + src.max(dst) + count <= self.capacity(), + "ranges must be in bounds. src={src} dst={dst} count={count} cap={}", + self.capacity(), + ); + + let wrapped_src = self.wrap_add(head, src); + let wrapped_dst = self.wrap_add(head, dst); + + let room_after_src = self.capacity() - wrapped_src; + let room_after_dst = self.capacity() - wrapped_dst; + + let src_wraps = room_after_src < count; + let dst_wraps = room_after_dst < count; + + // Wrapping occurs if `capacity` is contained within `wrapped_src..wrapped_src + count` or `wrapped_dst..wrapped_dst + count`. + // Since these two ranges must not overlap as per the safety invariants of this function, only one range can wrap. + debug_assert!( + !(src_wraps && dst_wraps), + "BUG: at most one of src and dst can wrap. src={src} dst={dst} count={count} cap={}", + self.capacity(), + ); + + unsafe { + let ptr = self.ptr(); + let src_ptr = ptr.add(wrapped_src); + let dst_ptr = ptr.add(wrapped_dst); + + if src_wraps { + [ + (src_ptr, dst_ptr, room_after_src), + (ptr, dst_ptr.add(room_after_src), count - room_after_src), + ] + } else if dst_wraps { + [ + (src_ptr, dst_ptr, room_after_dst), + (src_ptr.add(room_after_dst), ptr, count - room_after_dst), + ] + } else { + [ + (src_ptr, dst_ptr, count), + // null pointers are fine as long as the count is 0 + (ptr::null(), ptr::null_mut(), 0), + ] + } + } + } + /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy(&mut self, src: usize, dst: usize, len: usize) { @@ -428,6 +522,35 @@ impl VecDeque { } } + /// Copies all values from `src` to `dst` in reversed order, wrapping around if needed. + /// Assumes capacity is sufficient. + /// Equivalent to calling [`VecDeque::copy_slice`] with a [reversed](https://doc.rust-lang.org/std/primitive.slice.html#method.reverse) slice. + #[inline] + unsafe fn copy_slice_reversed(&mut self, dst: usize, src: &[T]) { + /// # Safety + /// + /// See [`ptr::copy_nonoverlapping`]. + unsafe fn copy_nonoverlapping_reversed(src: *const T, dst: *mut T, count: usize) { + for i in 0..count { + unsafe { ptr::copy_nonoverlapping(src.add(count - 1 - i), dst.add(i), 1) }; + } + } + + debug_assert!(src.len() <= self.capacity()); + let head_room = self.capacity() - dst; + if src.len() <= head_room { + unsafe { + copy_nonoverlapping_reversed(src.as_ptr(), self.ptr().add(dst), src.len()); + } + } else { + let (left, right) = src.split_at(src.len() - head_room); + unsafe { + copy_nonoverlapping_reversed(right.as_ptr(), self.ptr().add(dst), right.len()); + copy_nonoverlapping_reversed(left.as_ptr(), self.ptr(), left.len()); + } + } + } + /// Writes all values from `iter` to `dst`. /// /// # Safety @@ -542,6 +665,96 @@ impl VecDeque { } debug_assert!(self.head < self.capacity() || self.capacity() == 0); } + + /// Creates an iterator which uses a closure to determine if an element in the range should be removed. + /// + /// If the closure returns `true`, the element is removed from the deque and yielded. If the closure + /// returns `false`, or panics, the element remains in the deque and will not be yielded. + /// + /// Only elements that fall in the provided range are considered for extraction, but any elements + /// after the range will still have to be moved if any element has been extracted. + /// + /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating + /// or the iteration short-circuits, then the remaining elements will be retained. + /// Use `extract_if().for_each(drop)` if you do not need the returned iterator, + /// or [`retain_mut`] with a negated predicate if you also do not need to restrict the range. + /// + /// [`retain_mut`]: VecDeque::retain_mut + /// + /// Using this method is equivalent to the following code: + /// + /// ``` + /// #![feature(vec_deque_extract_if)] + /// # use std::collections::VecDeque; + /// # let some_predicate = |x: &mut i32| { *x % 2 == 1 }; + /// # let mut deq: VecDeque<_> = (0..10).collect(); + /// # let mut deq2 = deq.clone(); + /// # let range = 1..5; + /// let mut i = range.start; + /// let end_items = deq.len() - range.end; + /// # let mut extracted = vec![]; + /// + /// while i < deq.len() - end_items { + /// if some_predicate(&mut deq[i]) { + /// let val = deq.remove(i).unwrap(); + /// // your code here + /// # extracted.push(val); + /// } else { + /// i += 1; + /// } + /// } + /// + /// # let extracted2: Vec<_> = deq2.extract_if(range, some_predicate).collect(); + /// # assert_eq!(deq, deq2); + /// # assert_eq!(extracted, extracted2); + /// ``` + /// + /// But `extract_if` is easier to use. `extract_if` is also more efficient, + /// because it can backshift the elements of the array in bulk. + /// + /// The iterator also lets you mutate the value of each element in the + /// closure, regardless of whether you choose to keep or remove it. + /// + /// # Panics + /// + /// If `range` is out of bounds. + /// + /// # Examples + /// + /// Splitting a deque into even and odd values, reusing the original deque: + /// + /// ``` + /// #![feature(vec_deque_extract_if)] + /// use std::collections::VecDeque; + /// + /// let mut numbers = VecDeque::from([1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]); + /// + /// let evens = numbers.extract_if(.., |x| *x % 2 == 0).collect::>(); + /// let odds = numbers; + /// + /// assert_eq!(evens, VecDeque::from([2, 4, 6, 8, 14])); + /// assert_eq!(odds, VecDeque::from([1, 3, 5, 9, 11, 13, 15])); + /// ``` + /// + /// Using the range argument to only process a part of the deque: + /// + /// ``` + /// #![feature(vec_deque_extract_if)] + /// use std::collections::VecDeque; + /// + /// let mut items = VecDeque::from([0, 0, 0, 0, 0, 0, 0, 1, 2, 1, 2, 1, 2]); + /// let ones = items.extract_if(7.., |x| *x == 1).collect::>(); + /// assert_eq!(items, VecDeque::from([0, 0, 0, 0, 0, 0, 0, 2, 2, 2])); + /// assert_eq!(ones.len(), 3); + /// ``` + #[unstable(feature = "vec_deque_extract_if", issue = "147750")] + pub fn extract_if(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, A> + where + F: FnMut(&mut T) -> bool, + R: RangeBounds, + { + ExtractIf::new(self, filter, range) + } } impl VecDeque { @@ -1831,7 +2044,6 @@ impl VecDeque { /// # Examples /// /// ``` - /// #![feature(vec_deque_pop_if)] /// use std::collections::VecDeque; /// /// let mut deque: VecDeque = vec![0, 1, 2, 3, 4].into(); @@ -1841,7 +2053,7 @@ impl VecDeque { /// assert_eq!(deque, [1, 2, 3, 4]); /// assert_eq!(deque.pop_front_if(pred), None); /// ``` - #[unstable(feature = "vec_deque_pop_if", issue = "135889")] + #[stable(feature = "vec_deque_pop_if", since = "CURRENT_RUSTC_VERSION")] pub fn pop_front_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option { let first = self.front_mut()?; if predicate(first) { self.pop_front() } else { None } @@ -1854,7 +2066,6 @@ impl VecDeque { /// # Examples /// /// ``` - /// #![feature(vec_deque_pop_if)] /// use std::collections::VecDeque; /// /// let mut deque: VecDeque = vec![0, 1, 2, 3, 4].into(); @@ -1864,10 +2075,10 @@ impl VecDeque { /// assert_eq!(deque, [0, 1, 2, 3]); /// assert_eq!(deque.pop_back_if(pred), None); /// ``` - #[unstable(feature = "vec_deque_pop_if", issue = "135889")] + #[stable(feature = "vec_deque_pop_if", since = "CURRENT_RUSTC_VERSION")] pub fn pop_back_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option { - let first = self.back_mut()?; - if predicate(first) { self.pop_back() } else { None } + let last = self.back_mut()?; + if predicate(last) { self.pop_back() } else { None } } /// Prepends an element to the deque. @@ -1956,6 +2167,73 @@ impl VecDeque { unsafe { self.buffer_write(self.to_physical_idx(len), value) } } + /// Prepends all contents of the iterator to the front of the deque. + /// The order of the contents is preserved. + /// + /// To get behavior like [`append`][VecDeque::append] where elements are moved + /// from the other collection to this one, use `self.prepend(other.drain(..))`. + /// + /// # Examples + /// + /// ``` + /// #![feature(deque_extend_front)] + /// use std::collections::VecDeque; + /// + /// let mut deque = VecDeque::from([4, 5, 6]); + /// deque.prepend([1, 2, 3]); + /// assert_eq!(deque, [1, 2, 3, 4, 5, 6]); + /// ``` + /// + /// Move values between collections like [`append`][VecDeque::append] does but prepend to the front: + /// + /// ``` + /// #![feature(deque_extend_front)] + /// use std::collections::VecDeque; + /// + /// let mut deque1 = VecDeque::from([4, 5, 6]); + /// let mut deque2 = VecDeque::from([1, 2, 3]); + /// deque1.prepend(deque2.drain(..)); + /// assert_eq!(deque1, [1, 2, 3, 4, 5, 6]); + /// assert!(deque2.is_empty()); + /// ``` + #[unstable(feature = "deque_extend_front", issue = "146975")] + #[track_caller] + pub fn prepend>(&mut self, other: I) { + self.extend_front(other.into_iter().rev()) + } + + /// Prepends all contents of the iterator to the front of the deque, + /// as if [`push_front`][VecDeque::push_front] was called repeatedly with + /// the values yielded by the iterator. + /// + /// # Examples + /// + /// ``` + /// #![feature(deque_extend_front)] + /// use std::collections::VecDeque; + /// + /// let mut deque = VecDeque::from([4, 5, 6]); + /// deque.extend_front([3, 2, 1]); + /// assert_eq!(deque, [1, 2, 3, 4, 5, 6]); + /// ``` + /// + /// This behaves like [`push_front`][VecDeque::push_front] was called repeatedly: + /// + /// ``` + /// use std::collections::VecDeque; + /// + /// let mut deque = VecDeque::from([4, 5, 6]); + /// for v in [3, 2, 1] { + /// deque.push_front(v); + /// } + /// assert_eq!(deque, [1, 2, 3, 4, 5, 6]); + /// ``` + #[unstable(feature = "deque_extend_front", issue = "146975")] + #[track_caller] + pub fn extend_front>(&mut self, iter: I) { + >::spec_extend_front(self, iter.into_iter()); + } + #[inline] fn is_contiguous(&self) -> bool { // Do the calculation like this to avoid overflowing if len + head > usize::MAX @@ -2971,6 +3249,222 @@ impl VecDeque { self.truncate(new_len); } } + + /// Clones the elements at the range `src` and appends them to the end. + /// + /// # Panics + /// + /// Panics if the starting index is greater than the end index + /// or if either index is greater than the length of the vector. + /// + /// # Examples + /// + /// ``` + /// #![feature(deque_extend_front)] + /// use std::collections::VecDeque; + /// + /// let mut characters = VecDeque::from(['a', 'b', 'c', 'd', 'e']); + /// characters.extend_from_within(2..); + /// assert_eq!(characters, ['a', 'b', 'c', 'd', 'e', 'c', 'd', 'e']); + /// + /// let mut numbers = VecDeque::from([0, 1, 2, 3, 4]); + /// numbers.extend_from_within(..2); + /// assert_eq!(numbers, [0, 1, 2, 3, 4, 0, 1]); + /// + /// let mut strings = VecDeque::from([String::from("hello"), String::from("world"), String::from("!")]); + /// strings.extend_from_within(1..=2); + /// assert_eq!(strings, ["hello", "world", "!", "world", "!"]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "deque_extend_front", issue = "146975")] + pub fn extend_from_within(&mut self, src: R) + where + R: RangeBounds, + { + let range = slice::range(src, ..self.len()); + self.reserve(range.len()); + + // SAFETY: + // - `slice::range` guarantees that the given range is valid for indexing self + // - at least `range.len()` additional space is available + unsafe { + self.spec_extend_from_within(range); + } + } + + /// Clones the elements at the range `src` and prepends them to the front. + /// + /// # Panics + /// + /// Panics if the starting index is greater than the end index + /// or if either index is greater than the length of the vector. + /// + /// # Examples + /// + /// ``` + /// #![feature(deque_extend_front)] + /// use std::collections::VecDeque; + /// + /// let mut characters = VecDeque::from(['a', 'b', 'c', 'd', 'e']); + /// characters.prepend_from_within(2..); + /// assert_eq!(characters, ['c', 'd', 'e', 'a', 'b', 'c', 'd', 'e']); + /// + /// let mut numbers = VecDeque::from([0, 1, 2, 3, 4]); + /// numbers.prepend_from_within(..2); + /// assert_eq!(numbers, [0, 1, 0, 1, 2, 3, 4]); + /// + /// let mut strings = VecDeque::from([String::from("hello"), String::from("world"), String::from("!")]); + /// strings.prepend_from_within(1..=2); + /// assert_eq!(strings, ["world", "!", "hello", "world", "!"]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "deque_extend_front", issue = "146975")] + pub fn prepend_from_within(&mut self, src: R) + where + R: RangeBounds, + { + let range = slice::range(src, ..self.len()); + self.reserve(range.len()); + + // SAFETY: + // - `slice::range` guarantees that the given range is valid for indexing self + // - at least `range.len()` additional space is available + unsafe { + self.spec_prepend_from_within(range); + } + } +} + +/// Associated functions have the following preconditions: +/// +/// - `src` needs to be a valid range: `src.start <= src.end <= self.len()`. +/// - The buffer must have enough spare capacity: `self.capacity() - self.len() >= src.len()`. +#[cfg(not(no_global_oom_handling))] +trait SpecExtendFromWithin { + unsafe fn spec_extend_from_within(&mut self, src: Range); + + unsafe fn spec_prepend_from_within(&mut self, src: Range); +} + +#[cfg(not(no_global_oom_handling))] +impl SpecExtendFromWithin for VecDeque { + default unsafe fn spec_extend_from_within(&mut self, src: Range) { + let dst = self.len(); + let count = src.end - src.start; + let src = src.start; + + unsafe { + // SAFETY: + // - Ranges do not overlap: src entirely spans initialized values, dst entirely spans uninitialized values. + // - Ranges are in bounds: guaranteed by the caller. + let ranges = self.nonoverlapping_ranges(src, dst, count, self.head); + + // `len` is updated after every clone to prevent leaking and + // leave the deque in the right state when a clone implementation panics + + for (src, dst, count) in ranges { + for offset in 0..count { + dst.add(offset).write((*src.add(offset)).clone()); + self.len += 1; + } + } + } + } + + default unsafe fn spec_prepend_from_within(&mut self, src: Range) { + let dst = 0; + let count = src.end - src.start; + let src = src.start + count; + + let new_head = self.wrap_sub(self.head, count); + let cap = self.capacity(); + + unsafe { + // SAFETY: + // - Ranges do not overlap: src entirely spans initialized values, dst entirely spans uninitialized values. + // - Ranges are in bounds: guaranteed by the caller. + let ranges = self.nonoverlapping_ranges(src, dst, count, new_head); + + // Cloning is done in reverse because we prepend to the front of the deque, + // we can't get holes in the *logical* buffer. + // `head` and `len` are updated after every clone to prevent leaking and + // leave the deque in the right state when a clone implementation panics + + // Clone the first range + let (src, dst, count) = ranges[1]; + for offset in (0..count).rev() { + dst.add(offset).write((*src.add(offset)).clone()); + self.head -= 1; + self.len += 1; + } + + // Clone the second range + let (src, dst, count) = ranges[0]; + let mut iter = (0..count).rev(); + if let Some(offset) = iter.next() { + dst.add(offset).write((*src.add(offset)).clone()); + // After the first clone of the second range, wrap `head` around + if self.head == 0 { + self.head = cap; + } + self.head -= 1; + self.len += 1; + + // Continue like normal + for offset in iter { + dst.add(offset).write((*src.add(offset)).clone()); + self.head -= 1; + self.len += 1; + } + } + } + } +} + +#[cfg(not(no_global_oom_handling))] +impl SpecExtendFromWithin for VecDeque { + unsafe fn spec_extend_from_within(&mut self, src: Range) { + let dst = self.len(); + let count = src.end - src.start; + let src = src.start; + + unsafe { + // SAFETY: + // - Ranges do not overlap: src entirely spans initialized values, dst entirely spans uninitialized values. + // - Ranges are in bounds: guaranteed by the caller. + let ranges = self.nonoverlapping_ranges(src, dst, count, self.head); + for (src, dst, count) in ranges { + ptr::copy_nonoverlapping(src, dst, count); + } + } + + // SAFETY: + // - The elements were just initialized by `copy_nonoverlapping` + self.len += count; + } + + unsafe fn spec_prepend_from_within(&mut self, src: Range) { + let dst = 0; + let count = src.end - src.start; + let src = src.start + count; + + let new_head = self.wrap_sub(self.head, count); + + unsafe { + // SAFETY: + // - Ranges do not overlap: src entirely spans initialized values, dst entirely spans uninitialized values. + // - Ranges are in bounds: guaranteed by the caller. + let ranges = self.nonoverlapping_ranges(src, dst, count, new_head); + for (src, dst, count) in ranges { + ptr::copy_nonoverlapping(src, dst, count); + } + } + + // SAFETY: + // - The elements were just initialized by `copy_nonoverlapping` + self.head = new_head; + self.len += count; + } } /// Returns the index in the underlying buffer for a given logical element index. diff --git a/alloc/src/collections/vec_deque/spec_extend.rs b/alloc/src/collections/vec_deque/spec_extend.rs index 6c2199135e08a..f73ba795cbea4 100644 --- a/alloc/src/collections/vec_deque/spec_extend.rs +++ b/alloc/src/collections/vec_deque/spec_extend.rs @@ -1,4 +1,4 @@ -use core::iter::TrustedLen; +use core::iter::{Copied, Rev, TrustedLen}; use core::slice; use super::VecDeque; @@ -114,3 +114,113 @@ where } } } + +// Specialization trait used for VecDeque::extend_front +pub(super) trait SpecExtendFront { + #[track_caller] + fn spec_extend_front(&mut self, iter: I); +} + +impl SpecExtendFront for VecDeque +where + I: Iterator, +{ + #[track_caller] + default fn spec_extend_front(&mut self, mut iter: I) { + // This function should be the moral equivalent of: + // + // for item in iter { + // self.push_front(item); + // } + + while let Some(element) = iter.next() { + let (lower, _) = iter.size_hint(); + self.reserve(lower.saturating_add(1)); + + // SAFETY: We just reserved space for at least one element. + unsafe { self.push_front_unchecked(element) }; + + // Inner loop to avoid repeatedly calling `reserve`. + while self.len < self.capacity() { + let Some(element) = iter.next() else { + return; + }; + // SAFETY: The loop condition guarantees that `self.len() < self.capacity()`. + unsafe { self.push_front_unchecked(element) }; + } + } + } +} + +#[cfg(not(test))] +impl SpecExtendFront> for VecDeque { + #[track_caller] + fn spec_extend_front(&mut self, mut iterator: vec::IntoIter) { + let slice = iterator.as_slice(); + // SAFETY: elements in the slice are forgotten after this call + unsafe { prepend_reversed(self, slice) }; + iterator.forget_remaining_elements(); + } +} + +#[cfg(not(test))] +impl SpecExtendFront>> for VecDeque { + #[track_caller] + fn spec_extend_front(&mut self, iterator: Rev>) { + let mut iterator = iterator.into_inner(); + let slice = iterator.as_slice(); + // SAFETY: elements in the slice are forgotten after this call + unsafe { prepend(self, slice) }; + iterator.forget_remaining_elements(); + } +} + +impl<'a, T, A: Allocator> SpecExtendFront>> for VecDeque +where + Copied>: Iterator, +{ + #[track_caller] + fn spec_extend_front(&mut self, iter: Copied>) { + let slice = iter.into_inner().as_slice(); + // SAFETY: T is Copy because Copied> is Iterator + unsafe { prepend_reversed(self, slice) }; + } +} + +impl<'a, T, A: Allocator> SpecExtendFront>>> for VecDeque +where + Rev>>: Iterator, +{ + #[track_caller] + fn spec_extend_front(&mut self, iter: Rev>>) { + let slice = iter.into_inner().into_inner().as_slice(); + // SAFETY: T is Copy because Rev>> is Iterator + unsafe { prepend(self, slice) }; + } +} + +/// # Safety +/// +/// Elements of `slice` will be copied into the deque, make sure to forget the items if `T` is not `Copy`. +unsafe fn prepend(deque: &mut VecDeque, slice: &[T]) { + deque.reserve(slice.len()); + + unsafe { + deque.head = deque.wrap_sub(deque.head, slice.len()); + deque.copy_slice(deque.head, slice); + deque.len += slice.len(); + } +} + +/// # Safety +/// +/// Elements of `slice` will be copied into the deque, make sure to forget the items if `T` is not `Copy`. +unsafe fn prepend_reversed(deque: &mut VecDeque, slice: &[T]) { + deque.reserve(slice.len()); + + unsafe { + deque.head = deque.wrap_sub(deque.head, slice.len()); + deque.copy_slice_reversed(deque.head, slice); + deque.len += slice.len(); + } +} diff --git a/alloc/src/collections/vec_deque/tests.rs b/alloc/src/collections/vec_deque/tests.rs index ad76cb14deb86..dc50cc34d9dac 100644 --- a/alloc/src/collections/vec_deque/tests.rs +++ b/alloc/src/collections/vec_deque/tests.rs @@ -1,6 +1,8 @@ -use core::iter::TrustedLen; +use std::iter::TrustedLen; +use std::panic::{AssertUnwindSafe, catch_unwind}; use super::*; +use crate::testing::crash_test::{CrashTestDummy, Panic}; use crate::testing::macros::struct_with_counted_drop; #[bench] @@ -367,7 +369,7 @@ fn test_rotate_right_panic() { #[test] fn test_binary_search() { - // If the givin VecDeque is not sorted, the returned result is unspecified and meaningless, + // If the given VecDeque is not sorted, the returned result is unspecified and meaningless, // as this method performs a binary search. let tester: VecDeque<_> = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); @@ -391,7 +393,7 @@ fn test_binary_search() { #[test] fn test_binary_search_by() { - // If the givin VecDeque is not sorted, the returned result is unspecified and meaningless, + // If the given VecDeque is not sorted, the returned result is unspecified and meaningless, // as this method performs a binary search. let tester: VecDeque<_> = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); @@ -406,7 +408,7 @@ fn test_binary_search_by() { #[test] fn test_binary_search_key() { - // If the givin VecDeque is not sorted, the returned result is unspecified and meaningless, + // If the given VecDeque is not sorted, the returned result is unspecified and meaningless, // as this method performs a binary search. let tester: VecDeque<_> = [ @@ -1161,3 +1163,271 @@ fn issue_80303() { assert_eq!(vda, vdb); assert_eq!(hash_code(vda), hash_code(vdb)); } + +#[test] +fn extract_if_test() { + let mut m: VecDeque = VecDeque::from([1, 2, 3, 4, 5, 6]); + let deleted = m.extract_if(.., |v| *v < 4).collect::>(); + + assert_eq!(deleted, &[1, 2, 3]); + assert_eq!(m, &[4, 5, 6]); +} + +#[test] +fn drain_to_empty_test() { + let mut m: VecDeque = VecDeque::from([1, 2, 3, 4, 5, 6]); + let deleted = m.extract_if(.., |_| true).collect::>(); + + assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]); + assert_eq!(m, &[]); +} + +#[test] +fn extract_if_empty() { + let mut list: VecDeque = VecDeque::new(); + + { + let mut iter = list.extract_if(.., |_| true); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + assert_eq!(iter.size_hint(), (0, Some(0))); + } + + assert_eq!(list.len(), 0); + assert_eq!(list, vec![]); +} + +#[test] +fn extract_if_zst() { + let mut list: VecDeque<_> = [(), (), (), (), ()].into_iter().collect(); + let initial_len = list.len(); + let mut count = 0; + + { + let mut iter = list.extract_if(.., |_| true); + assert_eq!(iter.size_hint(), (0, Some(initial_len))); + while let Some(_) = iter.next() { + count += 1; + assert_eq!(iter.size_hint(), (0, Some(initial_len - count))); + } + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + assert_eq!(iter.size_hint(), (0, Some(0))); + } + + assert_eq!(count, initial_len); + assert_eq!(list.len(), 0); + assert_eq!(list, vec![]); +} + +#[test] +fn extract_if_false() { + let mut list: VecDeque<_> = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); + + let initial_len = list.len(); + let mut count = 0; + + { + let mut iter = list.extract_if(.., |_| false); + assert_eq!(iter.size_hint(), (0, Some(initial_len))); + for _ in iter.by_ref() { + count += 1; + } + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + assert_eq!(iter.size_hint(), (0, Some(0))); + } + + assert_eq!(count, 0); + assert_eq!(list.len(), initial_len); + assert_eq!(list, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); +} + +#[test] +fn extract_if_true() { + let mut list: VecDeque<_> = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect(); + + let initial_len = list.len(); + let mut count = 0; + + { + let mut iter = list.extract_if(.., |_| true); + assert_eq!(iter.size_hint(), (0, Some(initial_len))); + while let Some(_) = iter.next() { + count += 1; + assert_eq!(iter.size_hint(), (0, Some(initial_len - count))); + } + assert_eq!(iter.size_hint(), (0, Some(0))); + assert_eq!(iter.next(), None); + assert_eq!(iter.size_hint(), (0, Some(0))); + } + + assert_eq!(count, initial_len); + assert_eq!(list.len(), 0); + assert_eq!(list, vec![]); +} + +#[test] +fn extract_if_non_contiguous() { + let mut list = + [1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39] + .into_iter() + .collect::>(); + list.rotate_left(3); + + assert!(!list.is_contiguous()); + assert_eq!( + list, + [6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, 1, 2, 4] + ); + + let removed = list.extract_if(.., |x| *x % 2 == 0).collect::>(); + assert_eq!(removed.len(), 10); + assert_eq!(removed, vec![6, 18, 20, 22, 24, 26, 34, 36, 2, 4]); + + assert_eq!(list.len(), 14); + assert_eq!(list, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39, 1]); +} + +#[test] +fn extract_if_complex() { + { + // [+xxx++++++xxxxx++++x+x++] + let mut list = [ + 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, + 39, + ] + .into_iter() + .collect::>(); + + let removed = list.extract_if(.., |x| *x % 2 == 0).collect::>(); + assert_eq!(removed.len(), 10); + assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); + + assert_eq!(list.len(), 14); + assert_eq!(list, vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]); + } + + { + // [xxx++++++xxxxx++++x+x++] + let mut list = + [2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39] + .into_iter() + .collect::>(); + + let removed = list.extract_if(.., |x| *x % 2 == 0).collect::>(); + assert_eq!(removed.len(), 10); + assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); + + assert_eq!(list.len(), 13); + assert_eq!(list, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]); + } + + { + // [xxx++++++xxxxx++++x+x] + let mut list = + [2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36] + .into_iter() + .collect::>(); + + let removed = list.extract_if(.., |x| *x % 2 == 0).collect::>(); + assert_eq!(removed.len(), 10); + assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]); + + assert_eq!(list.len(), 11); + assert_eq!(list, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]); + } + + { + // [xxxxxxxxxx+++++++++++] + let mut list = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19] + .into_iter() + .collect::>(); + + let removed = list.extract_if(.., |x| *x % 2 == 0).collect::>(); + assert_eq!(removed.len(), 10); + assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); + + assert_eq!(list.len(), 10); + assert_eq!(list, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); + } + + { + // [+++++++++++xxxxxxxxxx] + let mut list = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20] + .into_iter() + .collect::>(); + + let removed = list.extract_if(.., |x| *x % 2 == 0).collect::>(); + assert_eq!(removed.len(), 10); + assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]); + + assert_eq!(list.len(), 10); + assert_eq!(list, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]); + } +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn extract_if_drop_panic_leak() { + let d0 = CrashTestDummy::new(0); + let d1 = CrashTestDummy::new(1); + let d2 = CrashTestDummy::new(2); + let d3 = CrashTestDummy::new(3); + let d4 = CrashTestDummy::new(4); + let d5 = CrashTestDummy::new(5); + let d6 = CrashTestDummy::new(6); + let d7 = CrashTestDummy::new(7); + let mut q = VecDeque::new(); + q.push_back(d3.spawn(Panic::Never)); + q.push_back(d4.spawn(Panic::Never)); + q.push_back(d5.spawn(Panic::Never)); + q.push_back(d6.spawn(Panic::Never)); + q.push_back(d7.spawn(Panic::Never)); + q.push_front(d2.spawn(Panic::Never)); + q.push_front(d1.spawn(Panic::InDrop)); + q.push_front(d0.spawn(Panic::Never)); + + catch_unwind(AssertUnwindSafe(|| q.extract_if(.., |_| true).for_each(drop))).unwrap_err(); + + assert_eq!(d0.dropped(), 1); + assert_eq!(d1.dropped(), 1); + assert_eq!(d2.dropped(), 0); + assert_eq!(d3.dropped(), 0); + assert_eq!(d4.dropped(), 0); + assert_eq!(d5.dropped(), 0); + assert_eq!(d6.dropped(), 0); + assert_eq!(d7.dropped(), 0); + drop(q); + assert_eq!(d2.dropped(), 1); + assert_eq!(d3.dropped(), 1); + assert_eq!(d4.dropped(), 1); + assert_eq!(d5.dropped(), 1); + assert_eq!(d6.dropped(), 1); + assert_eq!(d7.dropped(), 1); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn extract_if_pred_panic_leak() { + struct_with_counted_drop!(D(u32), DROPS); + + let mut q = VecDeque::new(); + q.push_back(D(3)); + q.push_back(D(4)); + q.push_back(D(5)); + q.push_back(D(6)); + q.push_back(D(7)); + q.push_front(D(2)); + q.push_front(D(1)); + q.push_front(D(0)); + + _ = catch_unwind(AssertUnwindSafe(|| { + q.extract_if(.., |item| if item.0 >= 2 { panic!() } else { true }).for_each(drop) + })); + + assert_eq!(DROPS.get(), 2); // 0 and 1 + assert_eq!(q.len(), 6); +} diff --git a/alloc/src/fmt.rs b/alloc/src/fmt.rs index 82eaf7d87244d..4d6fe220a09ad 100644 --- a/alloc/src/fmt.rs +++ b/alloc/src/fmt.rs @@ -602,7 +602,7 @@ pub use core::fmt::{DebugAsHex, FormattingOptions, Sign}; pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{Formatter, Result, Write}; -#[unstable(feature = "debug_closure_helpers", issue = "117729")] +#[stable(feature = "fmt_from_fn", since = "CURRENT_RUSTC_VERSION")] pub use core::fmt::{FromFn, from_fn}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{LowerExp, UpperExp}; diff --git a/alloc/src/lib.rs b/alloc/src/lib.rs index 87ad5b0ce30e6..3f391fe2c1de8 100644 --- a/alloc/src/lib.rs +++ b/alloc/src/lib.rs @@ -85,6 +85,7 @@ // // Library features: // tidy-alphabetical-start +#![cfg_attr(not(no_global_oom_handling), feature(string_replace_in_place))] #![feature(alloc_layout_extra)] #![feature(allocator_api)] #![feature(array_into_iter_constructors)] @@ -98,13 +99,13 @@ #![feature(cast_maybe_uninit)] #![feature(cell_get_cloned)] #![feature(char_internals)] -#![feature(char_max_len)] #![feature(clone_to_uninit)] #![feature(coerce_unsized)] #![feature(const_convert)] #![feature(const_default)] #![feature(const_eval_select)] #![feature(const_heap)] +#![feature(copied_into_inner)] #![feature(core_intrinsics)] #![feature(deprecated_suggestion)] #![feature(deref_pure_trait)] @@ -114,6 +115,7 @@ #![feature(exact_size_is_empty)] #![feature(extend_one)] #![feature(extend_one_unchecked)] +#![feature(fmt_arguments_from_str)] #![feature(fmt_internals)] #![feature(fn_traits)] #![feature(formatting_options)] @@ -133,6 +135,7 @@ #![feature(ptr_alignment_type)] #![feature(ptr_internals)] #![feature(ptr_metadata)] +#![feature(rev_into_inner)] #![feature(set_ptr_value)] #![feature(sized_type_properties)] #![feature(slice_from_ptr_range)] @@ -143,10 +146,14 @@ #![feature(std_internals)] #![feature(str_internals)] #![feature(temporary_niche_types)] +#![feature(transmutability)] +#![feature(trivial_clone)] #![feature(trusted_fused)] #![feature(trusted_len)] #![feature(trusted_random_access)] +#![feature(try_blocks)] #![feature(try_trait_v2)] +#![feature(try_trait_v2_residual)] #![feature(try_with_capacity)] #![feature(tuple_trait)] #![feature(ub_checks)] diff --git a/alloc/src/raw_vec/mod.rs b/alloc/src/raw_vec/mod.rs index bc9692f5b6c2f..236e33e2f450e 100644 --- a/alloc/src/raw_vec/mod.rs +++ b/alloc/src/raw_vec/mod.rs @@ -668,8 +668,7 @@ impl RawVecInner { /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to /// initially construct `self` /// - `elem_layout`'s size must be a multiple of its alignment - /// - The sum of `len` and `additional` must be greater than or equal to - /// `self.capacity(elem_layout.size())` + /// - The sum of `len` and `additional` must be greater than the current capacity unsafe fn grow_amortized( &mut self, len: usize, @@ -693,16 +692,12 @@ impl RawVecInner { let cap = cmp::max(self.cap.as_inner() * 2, required_cap); let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap); - let new_layout = layout_array(cap, elem_layout)?; - // SAFETY: - // - For the `current_memory` call: Precondition passed to caller - // - For the `finish_grow` call: Precondition passed to caller - // + `current_memory` does the right thing - let ptr = - unsafe { finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)? }; + // - cap >= len + additional + // - other preconditions passed to caller + let ptr = unsafe { self.finish_grow(cap, elem_layout)? }; - // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items + // SAFETY: `finish_grow` would have failed if `cap > isize::MAX` unsafe { self.set_ptr_and_cap(ptr, cap) }; Ok(()) } @@ -711,8 +706,7 @@ impl RawVecInner { /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to /// initially construct `self` /// - `elem_layout`'s size must be a multiple of its alignment - /// - The sum of `len` and `additional` must be greater than or equal to - /// `self.capacity(elem_layout.size())` + /// - The sum of `len` and `additional` must be greater than the current capacity unsafe fn grow_exact( &mut self, len: usize, @@ -726,21 +720,44 @@ impl RawVecInner { } let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; - let new_layout = layout_array(cap, elem_layout)?; - // SAFETY: - // - For the `current_memory` call: Precondition passed to caller - // - For the `finish_grow` call: Precondition passed to caller - // + `current_memory` does the right thing - let ptr = - unsafe { finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)? }; - // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items - unsafe { - self.set_ptr_and_cap(ptr, cap); - } + // SAFETY: preconditions passed to caller + let ptr = unsafe { self.finish_grow(cap, elem_layout)? }; + + // SAFETY: `finish_grow` would have failed if `cap > isize::MAX` + unsafe { self.set_ptr_and_cap(ptr, cap) }; Ok(()) } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + /// - `cap` must be greater than the current capacity + // not marked inline(never) since we want optimizers to be able to observe the specifics of this + // function, see tests/codegen-llvm/vec-reserve-extend.rs. + #[cold] + unsafe fn finish_grow( + &self, + cap: usize, + elem_layout: Layout, + ) -> Result, TryReserveError> { + let new_layout = layout_array(cap, elem_layout)?; + + let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } { + debug_assert_eq!(old_layout.align(), new_layout.align()); + unsafe { + // The allocator checks for alignment equality + hint::assert_unchecked(old_layout.align() == new_layout.align()); + self.alloc.grow(ptr, old_layout, new_layout) + } + } else { + self.alloc.allocate(new_layout) + }; + + memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) + } + /// # Safety /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to /// initially construct `self` @@ -820,38 +837,6 @@ impl RawVecInner { } } -/// # Safety -/// If `current_memory` matches `Some((ptr, old_layout))`: -/// - `ptr` must denote a block of memory *currently allocated* via `alloc` -/// - `old_layout` must *fit* that block of memory -/// - `new_layout` must have the same alignment as `old_layout` -/// - `new_layout.size()` must be greater than or equal to `old_layout.size()` -/// If `current_memory` is `None`, this function is safe. -// not marked inline(never) since we want optimizers to be able to observe the specifics of this -// function, see tests/codegen-llvm/vec-reserve-extend.rs. -#[cold] -unsafe fn finish_grow( - new_layout: Layout, - current_memory: Option<(NonNull, Layout)>, - alloc: &mut A, -) -> Result, TryReserveError> -where - A: Allocator, -{ - let memory = if let Some((ptr, old_layout)) = current_memory { - debug_assert_eq!(old_layout.align(), new_layout.align()); - unsafe { - // The allocator checks for alignment equality - hint::assert_unchecked(old_layout.align() == new_layout.align()); - alloc.grow(ptr, old_layout, new_layout) - } - } else { - alloc.allocate(new_layout) - }; - - memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) -} - // Central function for reserve error handling. #[cfg(not(no_global_oom_handling))] #[cold] diff --git a/alloc/src/rc.rs b/alloc/src/rc.rs index 2b62b92d43886..0ab019a68ea05 100644 --- a/alloc/src/rc.rs +++ b/alloc/src/rc.rs @@ -243,9 +243,9 @@ use core::any::Any; use core::cell::{Cell, CloneFromCell}; -#[cfg(not(no_global_oom_handling))] -use core::clone::CloneToUninit; use core::clone::UseCloned; +#[cfg(not(no_global_oom_handling))] +use core::clone::{CloneToUninit, TrivialClone}; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; use core::intrinsics::abort; @@ -255,6 +255,8 @@ use core::marker::{PhantomData, Unsize}; use core::mem::{self, ManuallyDrop, align_of_val_raw}; use core::num::NonZeroUsize; use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver}; +#[cfg(not(no_global_oom_handling))] +use core::ops::{Residual, Try}; use core::panic::{RefUnwindSafe, UnwindSafe}; #[cfg(not(no_global_oom_handling))] use core::pin::Pin; @@ -529,7 +531,7 @@ impl Rc { /// /// [zeroed]: mem::MaybeUninit::zeroed #[cfg(not(no_global_oom_handling))] - #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "new_zeroed_alloc", since = "1.92.0")] #[must_use] pub fn new_zeroed() -> Rc> { unsafe { @@ -639,6 +641,93 @@ impl Rc { pub fn pin(value: T) -> Pin> { unsafe { Pin::new_unchecked(Rc::new(value)) } } + + /// Maps the value in an `Rc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in + /// an `Rc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// + /// use std::rc::Rc; + /// + /// let r = Rc::new(7); + /// let new = Rc::map(r, |i| i + 7); + /// assert_eq!(*new, 14); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn map(this: Self, f: impl FnOnce(&T) -> U) -> Rc { + if size_of::() == size_of::() + && align_of::() == align_of::() + && Rc::is_unique(&this) + { + unsafe { + let ptr = Rc::into_raw(this); + let value = ptr.read(); + let mut allocation = Rc::from_raw(ptr.cast::>()); + + Rc::get_mut_unchecked(&mut allocation).write(f(&value)); + allocation.assume_init() + } + } else { + Rc::new(f(&*this)) + } + } + + /// Attempts to map the value in an `Rc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the + /// result is returned, also in an `Rc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// + /// use std::rc::Rc; + /// + /// let b = Rc::new(7); + /// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap(); + /// assert_eq!(*new, 7); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn try_map( + this: Self, + f: impl FnOnce(&T) -> R, + ) -> >>::TryType + where + R: Try, + R::Residual: Residual>, + { + if size_of::() == size_of::() + && align_of::() == align_of::() + && Rc::is_unique(&this) + { + unsafe { + let ptr = Rc::into_raw(this); + let value = ptr.read(); + let mut allocation = Rc::from_raw(ptr.cast::>()); + + Rc::get_mut_unchecked(&mut allocation).write(f(&value)?); + try { allocation.assume_init() } + } + } else { + try { Rc::new(f(&*this)?) } + } + } } impl Rc { @@ -1057,7 +1146,7 @@ impl Rc<[T]> { /// /// [zeroed]: mem::MaybeUninit::zeroed #[cfg(not(no_global_oom_handling))] - #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "new_zeroed_alloc", since = "1.92.0")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { unsafe { @@ -1077,7 +1166,7 @@ impl Rc<[T]> { /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[unstable(feature = "alloc_slice_into_array", issue = "148082")] #[inline] #[must_use] pub fn into_array(self) -> Option> { @@ -2135,7 +2224,8 @@ impl Rc<[T]> { /// Copy elements from slice into newly allocated `Rc<[T]>` /// - /// Unsafe because the caller must either take ownership or bind `T: Copy` + /// Unsafe because the caller must either take ownership, bind `T: Copy` or + /// bind `T: TrivialClone`. #[cfg(not(no_global_oom_handling))] unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { unsafe { @@ -2225,9 +2315,11 @@ impl RcFromSlice for Rc<[T]> { } #[cfg(not(no_global_oom_handling))] -impl RcFromSlice for Rc<[T]> { +impl RcFromSlice for Rc<[T]> { #[inline] fn from_slice(v: &[T]) -> Self { + // SAFETY: `T` implements `TrivialClone`, so this is sound and equivalent + // to the above. unsafe { Rc::copy_from_slice(v) } } } @@ -3991,6 +4083,128 @@ impl UniqueRc { pub fn new(value: T) -> Self { Self::new_in(value, Global) } + + /// Maps the value in a `UniqueRc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned, + /// also in a `UniqueRc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// #![feature(unique_rc_arc)] + /// + /// use std::rc::UniqueRc; + /// + /// let r = UniqueRc::new(7); + /// let new = UniqueRc::map(r, |i| i + 7); + /// assert_eq!(*new, 14); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn map(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc { + if size_of::() == size_of::() + && align_of::() == align_of::() + && UniqueRc::weak_count(&this) == 0 + { + unsafe { + let ptr = UniqueRc::into_raw(this); + let value = ptr.read(); + let mut allocation = UniqueRc::from_raw(ptr.cast::>()); + + allocation.write(f(value)); + allocation.assume_init() + } + } else { + UniqueRc::new(f(UniqueRc::unwrap(this))) + } + } + + /// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds, + /// the result is returned, also in a `UniqueRc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// #![feature(unique_rc_arc)] + /// + /// use std::rc::UniqueRc; + /// + /// let b = UniqueRc::new(7); + /// let new = UniqueRc::try_map(b, u32::try_from).unwrap(); + /// assert_eq!(*new, 7); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn try_map( + this: Self, + f: impl FnOnce(T) -> R, + ) -> >>::TryType + where + R: Try, + R::Residual: Residual>, + { + if size_of::() == size_of::() + && align_of::() == align_of::() + && UniqueRc::weak_count(&this) == 0 + { + unsafe { + let ptr = UniqueRc::into_raw(this); + let value = ptr.read(); + let mut allocation = UniqueRc::from_raw(ptr.cast::>()); + + allocation.write(f(value)?); + try { allocation.assume_init() } + } + } else { + try { UniqueRc::new(f(UniqueRc::unwrap(this))?) } + } + } + + #[cfg(not(no_global_oom_handling))] + fn unwrap(this: Self) -> T { + let this = ManuallyDrop::new(this); + let val: T = unsafe { ptr::read(&**this) }; + + let _weak = Weak { ptr: this.ptr, alloc: Global }; + + val + } +} + +impl UniqueRc { + #[cfg(not(no_global_oom_handling))] + unsafe fn from_raw(ptr: *const T) -> Self { + let offset = unsafe { data_offset(ptr) }; + + // Reverse the offset to find the original RcInner. + let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner }; + + Self { + ptr: unsafe { NonNull::new_unchecked(rc_ptr) }, + _marker: PhantomData, + _marker2: PhantomData, + alloc: Global, + } + } + + #[cfg(not(no_global_oom_handling))] + fn into_raw(this: Self) -> *const T { + let this = ManuallyDrop::new(this); + Self::as_ptr(&*this) + } } impl UniqueRc { @@ -4041,6 +4255,40 @@ impl UniqueRc { Rc::from_inner_in(this.ptr, alloc) } } + + #[cfg(not(no_global_oom_handling))] + fn weak_count(this: &Self) -> usize { + this.inner().weak() - 1 + } + + #[cfg(not(no_global_oom_handling))] + fn inner(&self) -> &RcInner { + // SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid. + unsafe { self.ptr.as_ref() } + } + + #[cfg(not(no_global_oom_handling))] + fn as_ptr(this: &Self) -> *const T { + let ptr: *mut RcInner = NonNull::as_ptr(this.ptr); + + // SAFETY: This cannot go through Deref::deref or UniqueRc::inner because + // this is required to retain raw/mut provenance such that e.g. `get_mut` can + // write through the pointer after the Rc is recovered through `from_raw`. + unsafe { &raw mut (*ptr).value } + } + + #[inline] + #[cfg(not(no_global_oom_handling))] + fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { + let this = mem::ManuallyDrop::new(this); + (this.ptr, unsafe { ptr::read(&this.alloc) }) + } + + #[inline] + #[cfg(not(no_global_oom_handling))] + unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { + Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc } + } } impl UniqueRc { @@ -4059,6 +4307,14 @@ impl UniqueRc { } } +#[cfg(not(no_global_oom_handling))] +impl UniqueRc, A> { + unsafe fn assume_init(self) -> UniqueRc { + let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self); + unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) } + } +} + #[unstable(feature = "unique_rc_arc", issue = "112566")] impl Deref for UniqueRc { type Target = T; @@ -4160,3 +4416,55 @@ impl Drop for UniqueRcUninit { } } } + +#[unstable(feature = "allocator_api", issue = "32838")] +unsafe impl Allocator for Rc { + #[inline] + fn allocate(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate(layout) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate_zeroed(layout) + } + + #[inline] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).deallocate(ptr, layout) } + } + + #[inline] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).shrink(ptr, old_layout, new_layout) } + } +} diff --git a/alloc/src/slice.rs b/alloc/src/slice.rs index ce9f967cc387a..a83b51ccb60c3 100644 --- a/alloc/src/slice.rs +++ b/alloc/src/slice.rs @@ -11,6 +11,8 @@ use core::borrow::{Borrow, BorrowMut}; #[cfg(not(no_global_oom_handling))] +use core::clone::TrivialClone; +#[cfg(not(no_global_oom_handling))] use core::cmp::Ordering::{self, Less}; #[cfg(not(no_global_oom_handling))] use core::mem::MaybeUninit; @@ -439,7 +441,7 @@ impl [T] { } } - impl ConvertVec for T { + impl ConvertVec for T { #[inline] fn to_vec(s: &[Self], alloc: A) -> Vec { let mut v = Vec::with_capacity_in(s.len(), alloc); @@ -822,7 +824,7 @@ impl SpecCloneIntoVec for [T] { } #[cfg(not(no_global_oom_handling))] -impl SpecCloneIntoVec for [T] { +impl SpecCloneIntoVec for [T] { fn clone_into(&self, target: &mut Vec) { target.clear(); target.extend_from_slice(self); diff --git a/alloc/src/string.rs b/alloc/src/string.rs index ae30cabf5af5b..4a2689e01ff17 100644 --- a/alloc/src/string.rs +++ b/alloc/src/string.rs @@ -265,18 +265,11 @@ use crate::vec::{self, Vec}; /// You can look at these with the [`as_ptr`], [`len`], and [`capacity`] /// methods: /// -// FIXME Update this when vec_into_raw_parts is stabilized /// ``` -/// use std::mem; -/// /// let story = String::from("Once upon a time..."); /// -/// // Prevent automatically dropping the String's data -/// let mut story = mem::ManuallyDrop::new(story); -/// -/// let ptr = story.as_mut_ptr(); -/// let len = story.len(); -/// let capacity = story.capacity(); +/// // Deconstruct the String into parts. +/// let (ptr, len, capacity) = story.into_raw_parts(); /// /// // story has nineteen bytes /// assert_eq!(19, len); @@ -932,7 +925,6 @@ impl String { /// # Examples /// /// ``` - /// #![feature(vec_into_raw_parts)] /// let s = String::from("hello"); /// /// let (ptr, len, cap) = s.into_raw_parts(); @@ -941,7 +933,7 @@ impl String { /// assert_eq!(rebuilt, "hello"); /// ``` #[must_use = "losing the pointer will leak memory"] - #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] + #[stable(feature = "vec_into_raw_parts", since = "CURRENT_RUSTC_VERSION")] pub fn into_raw_parts(self) -> (*mut u8, usize, usize) { self.vec.into_raw_parts() } @@ -970,19 +962,12 @@ impl String { /// /// # Examples /// - // FIXME Update this when vec_into_raw_parts is stabilized /// ``` - /// use std::mem; - /// /// unsafe { /// let s = String::from("hello"); /// - /// // Prevent automatically dropping the String's data - /// let mut s = mem::ManuallyDrop::new(s); - /// - /// let ptr = s.as_mut_ptr(); - /// let len = s.len(); - /// let capacity = s.capacity(); + /// // Deconstruct the String into parts. + /// let (ptr, len, capacity) = s.into_raw_parts(); /// /// let s = String::from_raw_parts(ptr, len, capacity); /// @@ -2090,6 +2075,67 @@ impl String { unsafe { self.as_mut_vec() }.splice((start, end), replace_with.bytes()); } + /// Replaces the leftmost occurrence of a pattern with another string, in-place. + /// + /// This method can be preferred over [`string = string.replacen(..., 1);`][replacen], + /// as it can use the `String`'s existing capacity to prevent a reallocation if + /// sufficient space is available. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(string_replace_in_place)] + /// + /// let mut s = String::from("Test Results: ❌❌❌"); + /// + /// // Replace the leftmost ❌ with a ✅ + /// s.replace_first('❌', "✅"); + /// assert_eq!(s, "Test Results: ✅❌❌"); + /// ``` + /// + /// [replacen]: ../../std/primitive.str.html#method.replacen + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "string_replace_in_place", issue = "147949")] + pub fn replace_first(&mut self, from: P, to: &str) { + let range = match self.match_indices(from).next() { + Some((start, match_str)) => start..start + match_str.len(), + None => return, + }; + + self.replace_range(range, to); + } + + /// Replaces the rightmost occurrence of a pattern with another string, in-place. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(string_replace_in_place)] + /// + /// let mut s = String::from("Test Results: ❌❌❌"); + /// + /// // Replace the rightmost ❌ with a ✅ + /// s.replace_last('❌', "✅"); + /// assert_eq!(s, "Test Results: ❌❌✅"); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "string_replace_in_place", issue = "147949")] + pub fn replace_last(&mut self, from: P, to: &str) + where + for<'a> P::Searcher<'a>: core::str::pattern::ReverseSearcher<'a>, + { + let range = match self.rmatch_indices(from).next() { + Some((start, match_str)) => start..start + match_str.len(), + None => return, + }; + + self.replace_range(range, to); + } + /// Converts this `String` into a [Box]<[str]>. /// /// Before doing the conversion, this method discards excess capacity like [`shrink_to_fit`]. @@ -2378,6 +2424,28 @@ impl<'a> FromIterator> for String { } } +#[cfg(not(no_global_oom_handling))] +#[unstable(feature = "ascii_char", issue = "110998")] +impl FromIterator for String { + fn from_iter>(iter: T) -> Self { + let buf = iter.into_iter().map(core::ascii::Char::to_u8).collect(); + // SAFETY: `buf` is guaranteed to be valid UTF-8 because the `core::ascii::Char` type + // only contains ASCII values (0x00-0x7F), which are valid UTF-8. + unsafe { String::from_utf8_unchecked(buf) } + } +} + +#[cfg(not(no_global_oom_handling))] +#[unstable(feature = "ascii_char", issue = "110998")] +impl<'a> FromIterator<&'a core::ascii::Char> for String { + fn from_iter>(iter: T) -> Self { + let buf = iter.into_iter().copied().map(core::ascii::Char::to_u8).collect(); + // SAFETY: `buf` is guaranteed to be valid UTF-8 because the `core::ascii::Char` type + // only contains ASCII values (0x00-0x7F), which are valid UTF-8. + unsafe { String::from_utf8_unchecked(buf) } + } +} + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Extend for String { @@ -3184,6 +3252,14 @@ impl<'a> FromIterator for Cow<'a, str> { } } +#[cfg(not(no_global_oom_handling))] +#[unstable(feature = "ascii_char", issue = "110998")] +impl<'a> FromIterator for Cow<'a, str> { + fn from_iter>(it: T) -> Self { + Cow::Owned(FromIterator::from_iter(it)) + } +} + #[stable(feature = "from_string_for_vec_u8", since = "1.14.0")] impl From for Vec { /// Converts the given [`String`] to a vector [`Vec`] that holds values of type [`u8`]. diff --git a/alloc/src/sync.rs b/alloc/src/sync.rs index 5927d03646928..c302f35e5ed6e 100644 --- a/alloc/src/sync.rs +++ b/alloc/src/sync.rs @@ -12,6 +12,8 @@ use core::any::Any; use core::cell::CloneFromCell; #[cfg(not(no_global_oom_handling))] use core::clone::CloneToUninit; +#[cfg(not(no_global_oom_handling))] +use core::clone::TrivialClone; use core::clone::UseCloned; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; @@ -22,6 +24,8 @@ use core::marker::{PhantomData, Unsize}; use core::mem::{self, ManuallyDrop, align_of_val_raw}; use core::num::NonZeroUsize; use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver}; +#[cfg(not(no_global_oom_handling))] +use core::ops::{Residual, Try}; use core::panic::{RefUnwindSafe, UnwindSafe}; use core::pin::{Pin, PinCoerceUnsized}; use core::ptr::{self, NonNull}; @@ -536,7 +540,7 @@ impl Arc { /// [zeroed]: mem::MaybeUninit::zeroed #[cfg(not(no_global_oom_handling))] #[inline] - #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "new_zeroed_alloc", since = "1.92.0")] #[must_use] pub fn new_zeroed() -> Arc> { unsafe { @@ -650,6 +654,93 @@ impl Arc { )?)) } } + + /// Maps the value in an `Arc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `Arc`, and the result is returned, also in + /// an `Arc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Arc::map(a, f)` instead of `r.map(a)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// + /// use std::sync::Arc; + /// + /// let r = Arc::new(7); + /// let new = Arc::map(r, |i| i + 7); + /// assert_eq!(*new, 14); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn map(this: Self, f: impl FnOnce(&T) -> U) -> Arc { + if size_of::() == size_of::() + && align_of::() == align_of::() + && Arc::is_unique(&this) + { + unsafe { + let ptr = Arc::into_raw(this); + let value = ptr.read(); + let mut allocation = Arc::from_raw(ptr.cast::>()); + + Arc::get_mut_unchecked(&mut allocation).write(f(&value)); + allocation.assume_init() + } + } else { + Arc::new(f(&*this)) + } + } + + /// Attempts to map the value in an `Arc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `Arc`, and if the operation succeeds, the + /// result is returned, also in an `Arc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Arc::try_map(a, f)` instead of `a.try_map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// + /// use std::sync::Arc; + /// + /// let b = Arc::new(7); + /// let new = Arc::try_map(b, |&i| u32::try_from(i)).unwrap(); + /// assert_eq!(*new, 7); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn try_map( + this: Self, + f: impl FnOnce(&T) -> R, + ) -> >>::TryType + where + R: Try, + R::Residual: Residual>, + { + if size_of::() == size_of::() + && align_of::() == align_of::() + && Arc::is_unique(&this) + { + unsafe { + let ptr = Arc::into_raw(this); + let value = ptr.read(); + let mut allocation = Arc::from_raw(ptr.cast::>()); + + Arc::get_mut_unchecked(&mut allocation).write(f(&value)?); + try { allocation.assume_init() } + } + } else { + try { Arc::new(f(&*this)?) } + } + } } impl Arc { @@ -886,7 +977,6 @@ impl Arc { /// let five = Arc::try_new_in(5, System)?; /// # Ok::<(), std::alloc::AllocError>(()) /// ``` - #[inline] #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_in(data: T, alloc: A) -> Result, AllocError> { @@ -1206,7 +1296,7 @@ impl Arc<[T]> { /// [zeroed]: mem::MaybeUninit::zeroed #[cfg(not(no_global_oom_handling))] #[inline] - #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "new_zeroed_alloc", since = "1.92.0")] #[must_use] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { unsafe { @@ -1226,7 +1316,7 @@ impl Arc<[T]> { /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[unstable(feature = "alloc_slice_into_array", issue = "148082")] #[inline] #[must_use] pub fn into_array(self) -> Option> { @@ -2068,7 +2158,8 @@ impl Arc<[T]> { /// Copy elements from slice into newly allocated `Arc<[T]>` /// - /// Unsafe because the caller must either take ownership or bind `T: Copy`. + /// Unsafe because the caller must either take ownership, bind `T: Copy` or + /// bind `T: TrivialClone`. #[cfg(not(no_global_oom_handling))] unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { unsafe { @@ -2160,9 +2251,11 @@ impl ArcFromSlice for Arc<[T]> { } #[cfg(not(no_global_oom_handling))] -impl ArcFromSlice for Arc<[T]> { +impl ArcFromSlice for Arc<[T]> { #[inline] fn from_slice(v: &[T]) -> Self { + // SAFETY: `T` implements `TrivialClone`, so this is sound and equivalent + // to the above. unsafe { Arc::copy_from_slice(v) } } } @@ -4404,6 +4497,128 @@ impl UniqueArc { pub fn new(value: T) -> Self { Self::new_in(value, Global) } + + /// Maps the value in a `UniqueArc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `UniqueArc`, and the result is returned, + /// also in a `UniqueArc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `UniqueArc::map(u, f)` instead of `u.map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// #![feature(unique_rc_arc)] + /// + /// use std::sync::UniqueArc; + /// + /// let r = UniqueArc::new(7); + /// let new = UniqueArc::map(r, |i| i + 7); + /// assert_eq!(*new, 14); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn map(this: Self, f: impl FnOnce(T) -> U) -> UniqueArc { + if size_of::() == size_of::() + && align_of::() == align_of::() + && UniqueArc::weak_count(&this) == 0 + { + unsafe { + let ptr = UniqueArc::into_raw(this); + let value = ptr.read(); + let mut allocation = UniqueArc::from_raw(ptr.cast::>()); + + allocation.write(f(value)); + allocation.assume_init() + } + } else { + UniqueArc::new(f(UniqueArc::unwrap(this))) + } + } + + /// Attempts to map the value in a `UniqueArc`, reusing the allocation if possible. + /// + /// `f` is called on a reference to the value in the `UniqueArc`, and if the operation succeeds, + /// the result is returned, also in a `UniqueArc`. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `UniqueArc::try_map(u, f)` instead of `u.try_map(f)`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// ``` + /// #![feature(smart_pointer_try_map)] + /// #![feature(unique_rc_arc)] + /// + /// use std::sync::UniqueArc; + /// + /// let b = UniqueArc::new(7); + /// let new = UniqueArc::try_map(b, u32::try_from).unwrap(); + /// assert_eq!(*new, 7); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "smart_pointer_try_map", issue = "144419")] + pub fn try_map( + this: Self, + f: impl FnOnce(T) -> R, + ) -> >>::TryType + where + R: Try, + R::Residual: Residual>, + { + if size_of::() == size_of::() + && align_of::() == align_of::() + && UniqueArc::weak_count(&this) == 0 + { + unsafe { + let ptr = UniqueArc::into_raw(this); + let value = ptr.read(); + let mut allocation = UniqueArc::from_raw(ptr.cast::>()); + + allocation.write(f(value)?); + try { allocation.assume_init() } + } + } else { + try { UniqueArc::new(f(UniqueArc::unwrap(this))?) } + } + } + + #[cfg(not(no_global_oom_handling))] + fn unwrap(this: Self) -> T { + let this = ManuallyDrop::new(this); + let val: T = unsafe { ptr::read(&**this) }; + + let _weak = Weak { ptr: this.ptr, alloc: Global }; + + val + } +} + +impl UniqueArc { + #[cfg(not(no_global_oom_handling))] + unsafe fn from_raw(ptr: *const T) -> Self { + let offset = unsafe { data_offset(ptr) }; + + // Reverse the offset to find the original ArcInner. + let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut ArcInner }; + + Self { + ptr: unsafe { NonNull::new_unchecked(rc_ptr) }, + _marker: PhantomData, + _marker2: PhantomData, + alloc: Global, + } + } + + #[cfg(not(no_global_oom_handling))] + fn into_raw(this: Self) -> *const T { + let this = ManuallyDrop::new(this); + Self::as_ptr(&*this) + } } impl UniqueArc { @@ -4457,6 +4672,40 @@ impl UniqueArc { Arc::from_inner_in(this.ptr, alloc) } } + + #[cfg(not(no_global_oom_handling))] + fn weak_count(this: &Self) -> usize { + this.inner().weak.load(Acquire) - 1 + } + + #[cfg(not(no_global_oom_handling))] + fn inner(&self) -> &ArcInner { + // SAFETY: while this UniqueArc is alive we're guaranteed that the inner pointer is valid. + unsafe { self.ptr.as_ref() } + } + + #[cfg(not(no_global_oom_handling))] + fn as_ptr(this: &Self) -> *const T { + let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); + + // SAFETY: This cannot go through Deref::deref or UniqueArc::inner because + // this is required to retain raw/mut provenance such that e.g. `get_mut` can + // write through the pointer after the Rc is recovered through `from_raw`. + unsafe { &raw mut (*ptr).data } + } + + #[inline] + #[cfg(not(no_global_oom_handling))] + fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { + let this = mem::ManuallyDrop::new(this); + (this.ptr, unsafe { ptr::read(&this.alloc) }) + } + + #[inline] + #[cfg(not(no_global_oom_handling))] + unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { + Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc } + } } impl UniqueArc { @@ -4487,6 +4736,14 @@ impl UniqueArc { } } +#[cfg(not(no_global_oom_handling))] +impl UniqueArc, A> { + unsafe fn assume_init(self) -> UniqueArc { + let (ptr, alloc) = UniqueArc::into_inner_with_allocator(self); + unsafe { UniqueArc::from_inner_in(ptr.cast(), alloc) } + } +} + #[unstable(feature = "unique_rc_arc", issue = "112566")] impl Deref for UniqueArc { type Target = T; @@ -4528,3 +4785,55 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueArc { unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) }; } } + +#[unstable(feature = "allocator_api", issue = "32838")] +unsafe impl Allocator for Arc { + #[inline] + fn allocate(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate(layout) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + (**self).allocate_zeroed(layout) + } + + #[inline] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).deallocate(ptr, layout) } + } + + #[inline] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).shrink(ptr, old_layout, new_layout) } + } +} diff --git a/alloc/src/vec/extract_if.rs b/alloc/src/vec/extract_if.rs index cb9e14f554d41..014219f8d461c 100644 --- a/alloc/src/vec/extract_if.rs +++ b/alloc/src/vec/extract_if.rs @@ -16,7 +16,8 @@ use crate::alloc::{Allocator, Global}; /// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(.., |x| *x % 2 == 0); /// ``` #[stable(feature = "extract_if", since = "1.87.0")] -#[must_use = "iterators are lazy and do nothing unless consumed"] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `retain_mut` or `extract_if().for_each(drop)` to remove and discard elements"] pub struct ExtractIf< 'a, T, diff --git a/alloc/src/vec/into_iter.rs b/alloc/src/vec/into_iter.rs index 37df928228d9c..358bdeacae790 100644 --- a/alloc/src/vec/into_iter.rs +++ b/alloc/src/vec/into_iter.rs @@ -7,6 +7,7 @@ use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::num::NonZero; #[cfg(not(no_global_oom_handling))] use core::ops::Deref; +use core::panic::UnwindSafe; use core::ptr::{self, NonNull}; use core::slice::{self}; use core::{array, fmt}; @@ -60,6 +61,11 @@ pub struct IntoIter< pub(super) end: *const T, } +// Manually mirroring what `Vec` has, +// because otherwise we get `T: RefUnwindSafe` from `NonNull`. +#[stable(feature = "catch_unwind", since = "1.9.0")] +impl UnwindSafe for IntoIter {} + #[stable(feature = "vec_intoiter_debug", since = "1.13.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/alloc/src/vec/is_zero.rs b/alloc/src/vec/is_zero.rs index a3ddd6f6e230e..04b50e5762986 100644 --- a/alloc/src/vec/is_zero.rs +++ b/alloc/src/vec/is_zero.rs @@ -1,3 +1,4 @@ +use core::mem::SizedTypeProperties; use core::num::{NonZero, Saturating, Wrapping}; use crate::boxed::Box; @@ -20,6 +21,8 @@ macro_rules! impl_is_zero { }; } +impl_is_zero!((), |_: ()| true); // It is needed to impl for arrays and tuples of (). + impl_is_zero!(i8, |x| x == 0); // It is needed to impl for arrays and tuples of i8. impl_is_zero!(i16, |x| x == 0); impl_is_zero!(i32, |x| x == 0); @@ -43,17 +46,38 @@ impl_is_zero!(f64, |x: f64| x.to_bits() == 0); // `IsZero` cannot be soundly implemented for pointers because of provenance // (see #135338). +unsafe impl IsZero for [T; N] { + #[inline] + default fn is_zero(&self) -> bool { + // If the array is of length zero, + // then it doesn't actually contain any `T`s, + // so `T::clone` doesn't need to be called, + // and we can "zero-initialize" all zero bytes of the array. + N == 0 + } +} + unsafe impl IsZero for [T; N] { #[inline] fn is_zero(&self) -> bool { - // Because this is generated as a runtime check, it's not obvious that - // it's worth doing if the array is really long. The threshold here - // is largely arbitrary, but was picked because as of 2022-07-01 LLVM - // fails to const-fold the check in `vec![[1; 32]; n]` - // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022 - // Feel free to tweak if you have better evidence. - - N <= 16 && self.iter().all(IsZero::is_zero) + if T::IS_ZST { + // If T is a ZST, then there is at most one possible value of `T`, + // so we only need to check one element for zeroness. + // We can't unconditionally return `true` here, since, e.g. + // `T = [NonTrivialCloneZst; 5]` is a ZST that implements `IsZero` + // due to the generic array impl, but `T::is_zero` returns `false` + // since the length is not 0. + self.get(0).is_none_or(IsZero::is_zero) + } else { + // Because this is generated as a runtime check, it's not obvious that + // it's worth doing if the array is really long. The threshold here + // is largely arbitrary, but was picked because as of 2022-07-01 LLVM + // fails to const-fold the check in `vec![[1; 32]; n]` + // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022 + // Feel free to tweak if you have better evidence. + + N <= 16 && self.iter().all(IsZero::is_zero) + } } } @@ -61,7 +85,7 @@ unsafe impl IsZero for [T; N] { macro_rules! impl_is_zero_tuples { // Stopper () => { - // No use for implementing for empty tuple because it is ZST. + // We already have an impl for () above. }; ($first_arg:ident $(,$rest:ident)*) => { unsafe impl <$first_arg: IsZero, $($rest: IsZero,)*> IsZero for ($first_arg, $($rest,)*){ diff --git a/alloc/src/vec/mod.rs b/alloc/src/vec/mod.rs index 45d6c28e186e4..13d38d3c9609a 100644 --- a/alloc/src/vec/mod.rs +++ b/alloc/src/vec/mod.rs @@ -73,6 +73,8 @@ #![stable(feature = "rust1", since = "1.0.0")] +#[cfg(not(no_global_oom_handling))] +use core::clone::TrivialClone; #[cfg(not(no_global_oom_handling))] use core::cmp; use core::cmp::Ordering; @@ -80,7 +82,7 @@ use core::hash::{Hash, Hasher}; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::PhantomData; -use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; +use core::mem::{self, Assume, ManuallyDrop, MaybeUninit, SizedTypeProperties, TransmuteFrom}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; @@ -590,21 +592,13 @@ impl Vec { /// /// # Examples /// - // FIXME Update this when vec_into_raw_parts is stabilized /// ``` /// use std::ptr; - /// use std::mem; /// /// let v = vec![1, 2, 3]; /// - /// // Prevent running `v`'s destructor so we are in complete control - /// // of the allocation. - /// let mut v = mem::ManuallyDrop::new(v); - /// - /// // Pull out the various important pieces of information about `v` - /// let p = v.as_mut_ptr(); - /// let len = v.len(); - /// let cap = v.capacity(); + /// // Deconstruct the vector into parts. + /// let (p, len, cap) = v.into_raw_parts(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 @@ -698,23 +692,13 @@ impl Vec { /// /// # Examples /// - // FIXME Update this when vec_into_raw_parts is stabilized /// ``` /// #![feature(box_vec_non_null)] /// - /// use std::ptr::NonNull; - /// use std::mem; - /// /// let v = vec![1, 2, 3]; /// - /// // Prevent running `v`'s destructor so we are in complete control - /// // of the allocation. - /// let mut v = mem::ManuallyDrop::new(v); - /// - /// // Pull out the various important pieces of information about `v` - /// let p = unsafe { NonNull::new_unchecked(v.as_mut_ptr()) }; - /// let len = v.len(); - /// let cap = v.capacity(); + /// // Deconstruct the vector into parts. + /// let (p, len, cap) = v.into_parts(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 @@ -781,7 +765,6 @@ impl Vec { /// # Examples /// /// ``` - /// #![feature(vec_into_raw_parts)] /// let v: Vec = vec![-1, 0, 1]; /// /// let (ptr, len, cap) = v.into_raw_parts(); @@ -796,7 +779,7 @@ impl Vec { /// assert_eq!(rebuilt, [4294967295, 0, 1]); /// ``` #[must_use = "losing the pointer will leak memory"] - #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] + #[stable(feature = "vec_into_raw_parts", since = "CURRENT_RUSTC_VERSION")] pub fn into_raw_parts(self) -> (*mut T, usize, usize) { let mut me = ManuallyDrop::new(self); (me.as_mut_ptr(), me.len(), me.capacity()) @@ -821,7 +804,7 @@ impl Vec { /// # Examples /// /// ``` - /// #![feature(vec_into_raw_parts, box_vec_non_null)] + /// #![feature(box_vec_non_null)] /// /// let v: Vec = vec![-1, 0, 1]; /// @@ -838,7 +821,6 @@ impl Vec { /// ``` #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "box_vec_non_null", reason = "new API", issue = "130364")] - // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] pub fn into_parts(self) -> (NonNull, usize, usize) { let (ptr, len, capacity) = self.into_raw_parts(); // SAFETY: A `Vec` always has a non-null pointer. @@ -994,29 +976,20 @@ impl Vec { /// /// # Examples /// - // FIXME Update this when vec_into_raw_parts is stabilized /// ``` /// #![feature(allocator_api)] /// /// use std::alloc::System; /// /// use std::ptr; - /// use std::mem; /// /// let mut v = Vec::with_capacity_in(3, System); /// v.push(1); /// v.push(2); /// v.push(3); /// - /// // Prevent running `v`'s destructor so we are in complete control - /// // of the allocation. - /// let mut v = mem::ManuallyDrop::new(v); - /// - /// // Pull out the various important pieces of information about `v` - /// let p = v.as_mut_ptr(); - /// let len = v.len(); - /// let cap = v.capacity(); - /// let alloc = v.allocator(); + /// // Deconstruct the vector into parts. + /// let (p, len, cap, alloc) = v.into_raw_parts_with_alloc(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 @@ -1114,29 +1087,18 @@ impl Vec { /// /// # Examples /// - // FIXME Update this when vec_into_raw_parts is stabilized /// ``` /// #![feature(allocator_api, box_vec_non_null)] /// /// use std::alloc::System; /// - /// use std::ptr::NonNull; - /// use std::mem; - /// /// let mut v = Vec::with_capacity_in(3, System); /// v.push(1); /// v.push(2); /// v.push(3); /// - /// // Prevent running `v`'s destructor so we are in complete control - /// // of the allocation. - /// let mut v = mem::ManuallyDrop::new(v); - /// - /// // Pull out the various important pieces of information about `v` - /// let p = unsafe { NonNull::new_unchecked(v.as_mut_ptr()) }; - /// let len = v.len(); - /// let cap = v.capacity(); - /// let alloc = v.allocator(); + /// // Deconstruct the vector into parts. + /// let (p, len, cap, alloc) = v.into_parts_with_alloc(); /// /// unsafe { /// // Overwrite memory with 4, 5, 6 @@ -1204,7 +1166,7 @@ impl Vec { /// # Examples /// /// ``` - /// #![feature(allocator_api, vec_into_raw_parts)] + /// #![feature(allocator_api)] /// /// use std::alloc::System; /// @@ -1226,7 +1188,6 @@ impl Vec { /// ``` #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] - // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) { let mut me = ManuallyDrop::new(self); let len = me.len(); @@ -1254,7 +1215,7 @@ impl Vec { /// # Examples /// /// ``` - /// #![feature(allocator_api, vec_into_raw_parts, box_vec_non_null)] + /// #![feature(allocator_api, box_vec_non_null)] /// /// use std::alloc::System; /// @@ -1277,7 +1238,6 @@ impl Vec { #[must_use = "losing the pointer will leak memory"] #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "box_vec_non_null", reason = "new API", issue = "130364")] - // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] pub fn into_parts_with_alloc(self) -> (NonNull, usize, usize, A) { let (ptr, len, capacity, alloc) = self.into_raw_parts_with_alloc(); // SAFETY: A `Vec` always has a non-null pointer. @@ -2564,8 +2524,8 @@ impl Vec { let _ = self.push_mut(value); } - /// Appends an element if there is sufficient spare capacity, otherwise an error is returned - /// with the element. + /// Appends an element and returns a reference to it if there is sufficient spare capacity, + /// otherwise an error is returned with the element. /// /// Unlike [`push`] this method will not reallocate when there's insufficient capacity. /// The caller should use [`reserve`] or [`try_reserve`] to ensure that there is enough capacity. @@ -2601,8 +2561,20 @@ impl Vec { /// Takes *O*(1) time. #[inline] #[unstable(feature = "vec_push_within_capacity", issue = "100486")] - pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> { - self.push_mut_within_capacity(value).map(|_| ()) + // #[unstable(feature = "push_mut", issue = "135974")] + pub fn push_within_capacity(&mut self, value: T) -> Result<&mut T, T> { + if self.len == self.buf.capacity() { + return Err(value); + } + + unsafe { + let end = self.as_mut_ptr().add(self.len); + ptr::write(end, value); + self.len += 1; + + // SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference. + Ok(&mut *end) + } } /// Appends an element to the back of a collection, returning a reference to it. @@ -2654,36 +2626,6 @@ impl Vec { } } - /// Appends an element and returns a reference to it if there is sufficient spare capacity, - /// otherwise an error is returned with the element. - /// - /// Unlike [`push_mut`] this method will not reallocate when there's insufficient capacity. - /// The caller should use [`reserve`] or [`try_reserve`] to ensure that there is enough capacity. - /// - /// [`push_mut`]: Vec::push_mut - /// [`reserve`]: Vec::reserve - /// [`try_reserve`]: Vec::try_reserve - /// - /// # Time complexity - /// - /// Takes *O*(1) time. - #[unstable(feature = "push_mut", issue = "135974")] - // #[unstable(feature = "vec_push_within_capacity", issue = "100486")] - #[inline] - #[must_use = "if you don't need a reference to the value, use `Vec::push_within_capacity` instead"] - pub fn push_mut_within_capacity(&mut self, value: T) -> Result<&mut T, T> { - if self.len == self.buf.capacity() { - return Err(value); - } - unsafe { - let end = self.as_mut_ptr().add(self.len); - ptr::write(end, value); - self.len += 1; - // SAFETY: We just wrote a value to the pointer that will live the lifetime of the reference. - Ok(&mut *end) - } - } - /// Removes the last element from a vector and returns it, or [`None`] if it /// is empty. /// @@ -3259,6 +3201,92 @@ impl Vec { // - `cap / N` fits the size of the allocated memory after shrinking unsafe { Vec::from_raw_parts_in(ptr.cast(), len / N, cap / N, alloc) } } + + /// This clears out this `Vec` and recycles the allocation into a new `Vec`. + /// The item type of the resulting `Vec` needs to have the same size and + /// alignment as the item type of the original `Vec`. + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_recycle, transmutability)] + /// let a: Vec = vec![0; 100]; + /// let capacity = a.capacity(); + /// let addr = a.as_ptr().addr(); + /// let b: Vec = a.recycle(); + /// assert_eq!(b.len(), 0); + /// assert_eq!(b.capacity(), capacity); + /// assert_eq!(b.as_ptr().addr(), addr); + /// ``` + /// + /// The `Recyclable` bound prevents this method from being called when `T` and `U` have different sizes; e.g.: + /// + /// ```compile_fail,E0277 + /// #![feature(vec_recycle, transmutability)] + /// let vec: Vec<[u8; 2]> = Vec::new(); + /// let _: Vec<[u8; 1]> = vec.recycle(); + /// ``` + /// ...or different alignments: + /// + /// ```compile_fail,E0277 + /// #![feature(vec_recycle, transmutability)] + /// let vec: Vec<[u16; 0]> = Vec::new(); + /// let _: Vec<[u8; 0]> = vec.recycle(); + /// ``` + /// + /// However, due to temporary implementation limitations of `Recyclable`, + /// this method is not yet callable when `T` or `U` are slices, trait objects, + /// or other exotic types; e.g.: + /// + /// ```compile_fail,E0277 + /// #![feature(vec_recycle, transmutability)] + /// # let inputs = ["a b c", "d e f"]; + /// # fn process(_: &[&str]) {} + /// let mut storage: Vec<&[&str]> = Vec::new(); + /// + /// for input in inputs { + /// let mut buffer: Vec<&str> = storage.recycle(); + /// buffer.extend(input.split(" ")); + /// process(&buffer); + /// storage = buffer.recycle(); + /// } + /// ``` + #[unstable(feature = "vec_recycle", issue = "148227")] + #[expect(private_bounds)] + pub fn recycle(mut self) -> Vec + where + U: Recyclable, + { + self.clear(); + const { + // FIXME(const-hack, 146097): compare `Layout`s + assert!(size_of::() == size_of::()); + assert!(align_of::() == align_of::()); + }; + let (ptr, length, capacity, alloc) = self.into_parts_with_alloc(); + debug_assert_eq!(length, 0); + // SAFETY: + // - `ptr` and `alloc` were just returned from `self.into_raw_parts_with_alloc()` + // - `T` & `U` have the same layout, so `capacity` does not need to be changed and we can safely use `alloc.dealloc` later + // - the original vector was cleared, so there is no problem with "transmuting" the stored values + unsafe { Vec::from_parts_in(ptr.cast::(), length, capacity, alloc) } + } +} + +/// Denotes that an allocation of `From` can be recycled into an allocation of `Self`. +/// +/// # Safety +/// +/// `Self` is `Recyclable` if `Layout::new::() == Layout::new::()`. +unsafe trait Recyclable: Sized {} + +#[unstable_feature_bound(transmutability)] +// SAFETY: enforced by `TransmuteFrom` +unsafe impl Recyclable for To +where + for<'a> &'a MaybeUninit: TransmuteFrom<&'a MaybeUninit, { Assume::SAFETY }>, + for<'a> &'a MaybeUninit: TransmuteFrom<&'a MaybeUninit, { Assume::SAFETY }>, +{ } impl Vec { @@ -3512,7 +3540,7 @@ impl ExtendFromWithinSpec for Vec { } #[cfg(not(no_global_oom_handling))] -impl ExtendFromWithinSpec for Vec { +impl ExtendFromWithinSpec for Vec { unsafe fn spec_extend_from_within(&mut self, src: Range) { let count = src.len(); { @@ -3525,8 +3553,8 @@ impl ExtendFromWithinSpec for Vec { // SAFETY: // - Both pointers are created from unique slice references (`&mut [_]`) // so they are valid and do not overlap. - // - Elements are :Copy so it's OK to copy them, without doing - // anything with the original values + // - Elements implement `TrivialClone` so this is equivalent to calling + // `clone` on every one of them. // - `count` is equal to the len of `source`, so source is valid for // `count` reads // - `.reserve(count)` guarantees that `spare.len() >= count` so spare @@ -3905,7 +3933,8 @@ impl Vec { /// /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating /// or the iteration short-circuits, then the remaining elements will be retained. - /// Use [`retain_mut`] with a negated predicate if you do not need the returned iterator. + /// Use `extract_if().for_each(drop)` if you do not need the returned iterator, + /// or [`retain_mut`] with a negated predicate if you also do not need to restrict the range. /// /// [`retain_mut`]: Vec::retain_mut /// diff --git a/alloc/src/vec/spec_extend.rs b/alloc/src/vec/spec_extend.rs index 7085bceef5baa..f5bcd3ec9d82d 100644 --- a/alloc/src/vec/spec_extend.rs +++ b/alloc/src/vec/spec_extend.rs @@ -1,3 +1,4 @@ +use core::clone::TrivialClone; use core::iter::TrustedLen; use core::slice::{self}; @@ -48,7 +49,7 @@ where impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec where - T: Copy, + T: TrivialClone, { fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); diff --git a/alloc/src/wtf8/mod.rs b/alloc/src/wtf8/mod.rs index 047994adc4486..e4834a24bf430 100644 --- a/alloc/src/wtf8/mod.rs +++ b/alloc/src/wtf8/mod.rs @@ -14,7 +14,7 @@ #[cfg(test)] mod tests; -use core::char::{MAX_LEN_UTF8, encode_utf8_raw}; +use core::char::encode_utf8_raw; use core::hash::{Hash, Hasher}; pub use core::wtf8::{CodePoint, Wtf8}; #[cfg(not(test))] @@ -166,7 +166,7 @@ impl Wtf8Buf { /// This does **not** include the WTF-8 concatenation check or `is_known_utf8` check. /// Copied from String::push. unsafe fn push_code_point_unchecked(&mut self, code_point: CodePoint) { - let mut bytes = [0; MAX_LEN_UTF8]; + let mut bytes = [0; char::MAX_LEN_UTF8]; let bytes = encode_utf8_raw(code_point.to_u32(), &mut bytes); self.bytes.extend_from_slice(bytes) } diff --git a/alloc/src/wtf8/tests.rs b/alloc/src/wtf8/tests.rs index 291f63f9f9e54..a72ad0837d11e 100644 --- a/alloc/src/wtf8/tests.rs +++ b/alloc/src/wtf8/tests.rs @@ -579,6 +579,17 @@ fn wtf8_encode_wide_size_hint() { assert!(iter.next().is_none()); } +#[test] +fn wtf8_encode_wide_debug() { + let mut string = Wtf8Buf::from_str("aé "); + string.push(CodePoint::from_u32(0xD83D).unwrap()); + string.push_char('💩'); + assert_eq!( + format!("{:?}", string.encode_wide()), + r#"EncodeWide(['a', 'é', ' ', 0xD83D, 0xD83D, 0xDCA9])"# + ); +} + #[test] fn wtf8_clone_into() { let mut string = Wtf8Buf::new(); diff --git a/alloctests/Cargo.toml b/alloctests/Cargo.toml index 07c45d1b82484..3b522bf80a217 100644 --- a/alloctests/Cargo.toml +++ b/alloctests/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/rust-lang/rust.git" description = "Tests for the Rust Allocation Library" autotests = false autobenches = false -edition = "2021" +edition = "2024" [lib] path = "lib.rs" diff --git a/alloctests/lib.rs b/alloctests/lib.rs index 0201c8752210c..73c25679d05ba 100644 --- a/alloctests/lib.rs +++ b/alloctests/lib.rs @@ -19,7 +19,7 @@ #![feature(array_into_iter_constructors)] #![feature(assert_matches)] #![feature(char_internals)] -#![feature(char_max_len)] +#![feature(copied_into_inner)] #![feature(core_intrinsics)] #![feature(exact_size_is_empty)] #![feature(extend_one)] @@ -32,12 +32,14 @@ #![feature(maybe_uninit_uninit_array_transpose)] #![feature(ptr_alignment_type)] #![feature(ptr_internals)] +#![feature(rev_into_inner)] #![feature(sized_type_properties)] #![feature(slice_iter_mut_as_mut_slice)] #![feature(slice_ptr_get)] #![feature(slice_range)] #![feature(std_internals)] #![feature(temporary_niche_types)] +#![feature(trivial_clone)] #![feature(trusted_fused)] #![feature(trusted_len)] #![feature(trusted_random_access)] diff --git a/alloctests/tests/borrow.rs b/alloctests/tests/borrow.rs index af7efb7d78223..19695d424db2d 100644 --- a/alloctests/tests/borrow.rs +++ b/alloctests/tests/borrow.rs @@ -52,9 +52,9 @@ fn cow_const() { const COW: Cow<'_, str> = Cow::Borrowed("moo"); - const IS_BORROWED: bool = COW.is_borrowed(); + const IS_BORROWED: bool = Cow::is_borrowed(&COW); assert!(IS_BORROWED); - const IS_OWNED: bool = COW.is_owned(); + const IS_OWNED: bool = Cow::is_owned(&COW); assert!(!IS_OWNED); } diff --git a/alloctests/tests/lib.rs b/alloctests/tests/lib.rs index 49fb21ef5f3ac..b4d3e75b09942 100644 --- a/alloctests/tests/lib.rs +++ b/alloctests/tests/lib.rs @@ -3,9 +3,9 @@ #![feature(iter_array_chunks)] #![feature(assert_matches)] #![feature(wtf8_internals)] -#![feature(char_max_len)] #![feature(cow_is_borrowed)] #![feature(core_intrinsics)] +#![feature(deque_extend_front)] #![feature(downcast_unchecked)] #![feature(exact_size_is_empty)] #![feature(hashmap_internals)] @@ -36,7 +36,7 @@ #![feature(local_waker)] #![feature(str_as_str)] #![feature(strict_provenance_lints)] -#![feature(vec_deque_pop_if)] +#![feature(string_replace_in_place)] #![feature(vec_deque_truncate_front)] #![feature(unique_rc_arc)] #![feature(macro_metavar_expr_concat)] diff --git a/alloctests/tests/str.rs b/alloctests/tests/str.rs index 906fa2d425e77..fbb3b01fd67f9 100644 --- a/alloctests/tests/str.rs +++ b/alloctests/tests/str.rs @@ -2,7 +2,6 @@ use std::assert_matches::assert_matches; use std::borrow::Cow; -use std::char::MAX_LEN_UTF8; use std::cmp::Ordering::{Equal, Greater, Less}; use std::str::{from_utf8, from_utf8_unchecked}; @@ -1232,7 +1231,7 @@ fn test_to_uppercase_rev_iterator() { #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_chars_decoding() { - let mut bytes = [0; MAX_LEN_UTF8]; + let mut bytes = [0; char::MAX_LEN_UTF8]; for c in (0..0x110000).filter_map(std::char::from_u32) { let s = c.encode_utf8(&mut bytes); if Some(c) != s.chars().next() { @@ -1244,7 +1243,7 @@ fn test_chars_decoding() { #[test] #[cfg_attr(miri, ignore)] // Miri is too slow fn test_chars_rev_decoding() { - let mut bytes = [0; MAX_LEN_UTF8]; + let mut bytes = [0; char::MAX_LEN_UTF8]; for c in (0..0x110000).filter_map(std::char::from_u32) { let s = c.encode_utf8(&mut bytes); if Some(c) != s.chars().rev().next() { diff --git a/alloctests/tests/string.rs b/alloctests/tests/string.rs index d996c55f94660..ecc5b9dc82ed0 100644 --- a/alloctests/tests/string.rs +++ b/alloctests/tests/string.rs @@ -719,6 +719,40 @@ fn test_replace_range_evil_end_bound() { assert_eq!(Ok(""), str::from_utf8(s.as_bytes())); } +#[test] +fn test_replace_first() { + let mut s = String::from("~ First ❌ Middle ❌ Last ❌ ~"); + s.replace_first("❌", "✅✅"); + assert_eq!(s, "~ First ✅✅ Middle ❌ Last ❌ ~"); + s.replace_first("🦀", "😳"); + assert_eq!(s, "~ First ✅✅ Middle ❌ Last ❌ ~"); + + let mut s = String::from("❌"); + s.replace_first('❌', "✅✅"); + assert_eq!(s, "✅✅"); + + let mut s = String::from(""); + s.replace_first('🌌', "❌"); + assert_eq!(s, ""); +} + +#[test] +fn test_replace_last() { + let mut s = String::from("~ First ❌ Middle ❌ Last ❌ ~"); + s.replace_last("❌", "✅✅"); + assert_eq!(s, "~ First ❌ Middle ❌ Last ✅✅ ~"); + s.replace_last("🦀", "😳"); + assert_eq!(s, "~ First ❌ Middle ❌ Last ✅✅ ~"); + + let mut s = String::from("❌"); + s.replace_last::('❌', "✅✅"); + assert_eq!(s, "✅✅"); + + let mut s = String::from(""); + s.replace_last::('🌌', "❌"); + assert_eq!(s, ""); +} + #[test] fn test_extend_ref() { let mut a = "foo".to_string(); diff --git a/alloctests/tests/vec.rs b/alloctests/tests/vec.rs index ea334ab0f143a..03b82fa82440e 100644 --- a/alloctests/tests/vec.rs +++ b/alloctests/tests/vec.rs @@ -2299,20 +2299,6 @@ fn test_vec_swap() { assert_eq!(n, 0); } -#[test] -fn test_extend_from_within_spec() { - #[derive(Copy)] - struct CopyOnly; - - impl Clone for CopyOnly { - fn clone(&self) -> Self { - panic!("extend_from_within must use specialization on copy"); - } - } - - vec![CopyOnly, CopyOnly].extend_from_within(..); -} - #[test] fn test_extend_from_within_clone() { let mut v = vec![String::from("sssss"), String::from("12334567890"), String::from("c")]; diff --git a/alloctests/tests/vec_deque.rs b/alloctests/tests/vec_deque.rs index a82906d55e5d0..cf31613577f74 100644 --- a/alloctests/tests/vec_deque.rs +++ b/alloctests/tests/vec_deque.rs @@ -1,3 +1,4 @@ +use core::cell::Cell; use core::num::NonZero; use std::assert_matches::assert_matches; use std::collections::TryReserveErrorKind::*; @@ -1849,3 +1850,308 @@ fn test_truncate_front() { v.truncate_front(5); assert_eq!(v.as_slices(), ([2, 3, 4, 5, 6].as_slice(), [].as_slice())); } + +#[test] +fn test_extend_from_within() { + let mut v = VecDeque::with_capacity(8); + v.extend(0..6); + v.truncate_front(4); + assert_eq!(v, [2, 3, 4, 5]); + v.extend_from_within(1..4); + assert_eq!(v, [2, 3, 4, 5, 3, 4, 5]); + // check it really wrapped + assert_eq!(v.as_slices(), ([2, 3, 4, 5, 3, 4].as_slice(), [5].as_slice())); + v.extend_from_within(1..=2); + assert_eq!(v, [2, 3, 4, 5, 3, 4, 5, 3, 4]); + v.extend_from_within(..3); + assert_eq!(v, [2, 3, 4, 5, 3, 4, 5, 3, 4, 2, 3, 4]); +} + +/// Struct that allows tracking clone and drop calls and can be set to panic on calling clone. +struct CloneTracker<'a> { + id: usize, + // Counters can be set to None if not needed. + clone: Option<&'a Cell>, + drop: Option<&'a Cell>, + panic: bool, +} + +impl<'a> CloneTracker<'a> { + pub const DUMMY: Self = Self { id: 999, clone: None, drop: None, panic: false }; +} + +impl<'a> Clone for CloneTracker<'a> { + fn clone(&self) -> Self { + if self.panic { + panic!(); + } + + if let Some(clone_count) = self.clone { + clone_count.update(|c| c + 1); + } + + Self { id: self.id, clone: self.clone, drop: self.drop, panic: false } + } +} + +impl<'a> Drop for CloneTracker<'a> { + fn drop(&mut self) { + if let Some(drop_count) = self.drop { + drop_count.update(|c| c + 1); + } + } +} + +#[test] +fn test_extend_from_within_clone() { + let clone_counts = [const { Cell::new(0) }; 4]; + let mut v = VecDeque::with_capacity(10); + // insert 2 dummy elements to have the buffer wrap later + v.extend([CloneTracker::DUMMY; 2]); + v.extend(clone_counts.iter().enumerate().map(|(id, clone_count)| CloneTracker { + id, + clone: Some(clone_count), + drop: None, + panic: false, + })); + // remove the dummy elements + v.truncate_front(4); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3]); + + v.extend_from_within(2..); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3, 2, 3]); + // elements at index 2 and 3 should have been cloned once + assert_eq!(clone_counts.each_ref().map(Cell::get), [0, 0, 1, 1]); + // it is important that the deque wraps because of this operation, we want to test if wrapping is handled correctly + v.extend_from_within(1..5); + // total length is 10, 8 in the first part and 2 in the second part + assert_eq!(v.as_slices().0.len(), 8); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3, 2, 3, 1, 2, 3, 2]); + // the new elements are from indices 1, 2, 3 and 2, those elements should have their clone count + // incremented (clone count at index 2 gets incremented twice so ends up at 3) + assert_eq!(clone_counts.each_ref().map(Cell::get), [0, 1, 3, 2]); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_extend_from_within_clone_panic() { + let clone_counts = [const { Cell::new(0) }; 4]; + let drop_count = Cell::new(0); + let mut v = VecDeque::with_capacity(8); + // insert 2 dummy elements to have the buffer wrap later + v.extend([CloneTracker::DUMMY; 2]); + v.extend(clone_counts.iter().enumerate().map(|(id, clone_count)| CloneTracker { + id, + clone: Some(clone_count), + drop: Some(&drop_count), + panic: false, + })); + // remove the dummy elements + v.truncate_front(4); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3]); + + // panic after wrapping + v[2].panic = true; + catch_unwind(AssertUnwindSafe(|| { + v.extend_from_within(..); + })) + .unwrap_err(); + v[2].panic = false; + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3, 0, 1]); + // the first 2 elements were cloned + assert_eq!(clone_counts.each_ref().map(Cell::get), [1, 1, 0, 0]); + // nothing should have been dropped + assert_eq!(drop_count.get(), 0); + + v.truncate_front(2); + assert_eq!(drop_count.get(), 4); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1]); + + // panic before wrapping + v[1].panic = true; + catch_unwind(AssertUnwindSafe(|| { + v.extend_from_within(..); + })) + .unwrap_err(); + v[1].panic = false; + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 0]); + // only the first element was cloned + assert_eq!(clone_counts.each_ref().map(Cell::get), [2, 1, 0, 0]); + // nothing more should have been dropped + assert_eq!(drop_count.get(), 4); +} + +#[test] +fn test_prepend_from_within() { + let mut v = VecDeque::with_capacity(8); + v.extend(0..6); + v.truncate_front(4); + v.prepend_from_within(..=0); + assert_eq!(v.as_slices(), ([2, 2, 3, 4, 5].as_slice(), [].as_slice())); + v.prepend_from_within(2..); + assert_eq!(v.as_slices(), ([3, 4].as_slice(), [5, 2, 2, 3, 4, 5].as_slice())); + v.prepend_from_within(..); + assert_eq!(v, [[3, 4, 5, 2, 2, 3, 4, 5]; 2].as_flattened()); +} + +#[test] +fn test_prepend_from_within_clone() { + let clone_counts = [const { Cell::new(0) }; 4]; + // insert 2 dummy elements to have the buffer wrap later + let mut v = VecDeque::with_capacity(10); + v.extend([CloneTracker::DUMMY; 2]); + v.extend(clone_counts.iter().enumerate().map(|(id, clone_count)| CloneTracker { + id, + clone: Some(clone_count), + drop: None, + panic: false, + })); + // remove the dummy elements + v.truncate_front(4); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3]); + + v.prepend_from_within(..2); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 0, 1, 2, 3]); + v.prepend_from_within(1..5); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [1, 0, 1, 2, 0, 1, 0, 1, 2, 3]); + // count the number of each element and subtract one (clone should have been called n-1 times if we have n elements) + // example: 0 appears 3 times so should have been cloned twice, 1 appears 4 times so cloned 3 times, etc + assert_eq!(clone_counts.each_ref().map(Cell::get), [2, 3, 1, 0]); +} + +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn test_prepend_from_within_clone_panic() { + let clone_counts = [const { Cell::new(0) }; 4]; + let drop_count = Cell::new(0); + let mut v = VecDeque::with_capacity(8); + // insert 2 dummy elements to have the buffer wrap later + v.extend([CloneTracker::DUMMY; 2]); + v.extend(clone_counts.iter().enumerate().map(|(id, clone_count)| CloneTracker { + id, + clone: Some(clone_count), + drop: Some(&drop_count), + panic: false, + })); + // remove the dummy elements + v.truncate_front(4); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [0, 1, 2, 3]); + + // panic after wrapping + v[1].panic = true; + catch_unwind(AssertUnwindSafe(|| { + v.prepend_from_within(..); + })) + .unwrap_err(); + v[1].panic = false; + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [2, 3, 0, 1, 2, 3]); + // the last 2 elements were cloned + assert_eq!(clone_counts.each_ref().map(Cell::get), [0, 0, 1, 1]); + // nothing should have been dropped + assert_eq!(drop_count.get(), 0); + + v.truncate_front(2); + assert_eq!(drop_count.get(), 4); + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [2, 3]); + + // panic before wrapping + v[0].panic = true; + catch_unwind(AssertUnwindSafe(|| { + v.prepend_from_within(..); + })) + .unwrap_err(); + v[0].panic = false; + assert_eq!(v.iter().map(|tr| tr.id).collect::>(), [3, 2, 3]); + // only the first element was cloned + assert_eq!(clone_counts.each_ref().map(Cell::get), [0, 0, 1, 2]); + // nothing more should have been dropped + assert_eq!(drop_count.get(), 4); +} + +#[test] +fn test_extend_and_prepend_from_within() { + let mut v = ('0'..='9').map(String::from).collect::>(); + v.truncate_front(5); + v.extend_from_within(4..); + v.prepend_from_within(..2); + assert_eq!(v.iter().map(|s| &**s).collect::(), "56567899"); + v.clear(); + v.extend(['1', '2', '3'].map(String::from)); + v.prepend_from_within(..); + v.extend_from_within(..); + assert_eq!(v.iter().map(|s| &**s).collect::(), "123123123123"); +} + +#[test] +fn test_extend_front() { + let mut v = VecDeque::new(); + v.extend_front(0..3); + assert_eq!(v, [2, 1, 0]); + v.extend_front(3..6); + assert_eq!(v, [5, 4, 3, 2, 1, 0]); + v.prepend([1; 4]); + assert_eq!(v, [1, 1, 1, 1, 5, 4, 3, 2, 1, 0]); + + let mut v = VecDeque::with_capacity(8); + let cap = v.capacity(); + v.extend(0..4); + v.truncate_front(2); + v.extend_front(4..8); + assert_eq!(v.as_slices(), ([7, 6].as_slice(), [5, 4, 2, 3].as_slice())); + assert_eq!(v.capacity(), cap); + + let mut v = VecDeque::new(); + v.extend_front([]); + v.extend_front(None); + v.extend_front(vec![]); + v.prepend([]); + v.prepend(None); + v.prepend(vec![]); + assert_eq!(v.capacity(), 0); + v.extend_front(Some(123)); + assert_eq!(v, [123]); +} + +#[test] +fn test_extend_front_specialization_vec_into_iter() { + // trigger 4 code paths: all combinations of prepend and extend_front, wrap and no wrap + let mut v = VecDeque::with_capacity(4); + v.prepend(vec![1, 2, 3]); + assert_eq!(v, [1, 2, 3]); + v.pop_back(); + // this should wrap around the physical buffer + v.prepend(vec![-1, 0]); + // check it really wrapped + assert_eq!(v.as_slices(), ([-1].as_slice(), [0, 1, 2].as_slice())); + + let mut v = VecDeque::with_capacity(4); + v.extend_front(vec![1, 2, 3]); + assert_eq!(v, [3, 2, 1]); + v.pop_back(); + // this should wrap around the physical buffer + v.extend_front(vec![4, 5]); + // check it really wrapped + assert_eq!(v.as_slices(), ([5].as_slice(), [4, 3, 2].as_slice())); +} + +#[test] +fn test_extend_front_specialization_copy_slice() { + // trigger 4 code paths: all combinations of prepend and extend_front, wrap and no wrap + let mut v = VecDeque::with_capacity(4); + v.prepend([1, 2, 3].as_slice().iter().copied()); + assert_eq!(v, [1, 2, 3]); + v.pop_back(); + // this should wrap around the physical buffer + v.prepend([-1, 0].as_slice().iter().copied()); + // check it really wrapped + assert_eq!(v.as_slices(), ([-1].as_slice(), [0, 1, 2].as_slice())); + + let mut v = VecDeque::with_capacity(4); + v.extend_front([1, 2, 3].as_slice().iter().copied()); + assert_eq!(v, [3, 2, 1]); + v.pop_back(); + // this should wrap around the physical buffer + v.extend_front([4, 5].as_slice().iter().copied()); + // check it really wrapped + assert_eq!(v.as_slices(), ([5].as_slice(), [4, 3, 2].as_slice())); +} diff --git a/core/Cargo.toml b/core/Cargo.toml index d094172b07659..8f435dd72d7a1 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -23,6 +23,7 @@ optimize_for_size = [] # Make `RefCell` store additional debugging information, which is printed out when # a borrow error occurs debug_refcell = [] +llvm_enzyme = [] [lints.rust.unexpected_cfgs] level = "warn" @@ -38,4 +39,6 @@ check-cfg = [ 'cfg(target_has_reliable_f16_math)', 'cfg(target_has_reliable_f128)', 'cfg(target_has_reliable_f128_math)', + 'cfg(llvm_enzyme)', + ] diff --git a/core/src/alloc/global.rs b/core/src/alloc/global.rs index 5bf6f143b4f82..e2413b619f9fa 100644 --- a/core/src/alloc/global.rs +++ b/core/src/alloc/global.rs @@ -124,7 +124,7 @@ pub unsafe trait GlobalAlloc { /// /// # Safety /// - /// `layout` must have non-zero size. Attempting to allocate for a zero-sized `layout` may + /// `layout` must have non-zero size. Attempting to allocate for a zero-sized `layout` will /// result in undefined behavior. /// /// (Extension subtraits might provide more specific bounds on @@ -163,7 +163,7 @@ pub unsafe trait GlobalAlloc { /// * `layout` is the same layout that was used to allocate that block of /// memory. /// - /// Otherwise undefined behavior can result. + /// Otherwise the behavior is undefined. #[stable(feature = "global_alloc", since = "1.28.0")] unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout); @@ -173,7 +173,7 @@ pub unsafe trait GlobalAlloc { /// # Safety /// /// The caller has to ensure that `layout` has non-zero size. Like `alloc` - /// zero sized `layout` can result in undefined behavior. + /// zero sized `layout` will result in undefined behavior. /// However the allocated block of memory is guaranteed to be initialized. /// /// # Errors @@ -234,7 +234,7 @@ pub unsafe trait GlobalAlloc { /// does not overflow `isize` (i.e., the rounded value must be less than or /// equal to `isize::MAX`). /// - /// If these are not followed, undefined behavior can result. + /// If these are not followed, the behavior is undefined. /// /// (Extension subtraits might provide more specific bounds on /// behavior, e.g., guarantee a sentinel address or a null pointer diff --git a/core/src/alloc/layout.rs b/core/src/alloc/layout.rs index cd5fd77f86597..1f37c978fecfd 100644 --- a/core/src/alloc/layout.rs +++ b/core/src/alloc/layout.rs @@ -316,8 +316,7 @@ impl Layout { // Size 1 Align MAX or Size isize::MAX Align 2 round up to `isize::MAX + 1`.) unsafe { let align_m1 = unchecked_sub(align.as_usize(), 1); - let size_rounded_up = unchecked_add(self.size, align_m1) & !align_m1; - size_rounded_up + unchecked_add(self.size, align_m1) & !align_m1 } } diff --git a/core/src/any.rs b/core/src/any.rs index 3ab95438c3ff3..ff55793340bd0 100644 --- a/core/src/any.rs +++ b/core/src/any.rs @@ -227,7 +227,7 @@ impl dyn Any { // SAFETY: just checked whether we are pointing to the correct type, and we can rely on // that check for memory safety because we have implemented Any for all types; no other // impls can exist as they would conflict with our impl. - unsafe { Some(self.downcast_ref_unchecked()) } + unsafe { Some(self.downcast_unchecked_ref()) } } else { None } @@ -263,7 +263,7 @@ impl dyn Any { // SAFETY: just checked whether we are pointing to the correct type, and we can rely on // that check for memory safety because we have implemented Any for all types; no other // impls can exist as they would conflict with our impl. - unsafe { Some(self.downcast_mut_unchecked()) } + unsafe { Some(self.downcast_unchecked_mut()) } } else { None } @@ -281,7 +281,7 @@ impl dyn Any { /// let x: Box = Box::new(1_usize); /// /// unsafe { - /// assert_eq!(*x.downcast_ref_unchecked::(), 1); + /// assert_eq!(*x.downcast_unchecked_ref::(), 1); /// } /// ``` /// @@ -291,7 +291,7 @@ impl dyn Any { /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] - pub unsafe fn downcast_ref_unchecked(&self) -> &T { + pub unsafe fn downcast_unchecked_ref(&self) -> &T { debug_assert!(self.is::()); // SAFETY: caller guarantees that T is the correct type unsafe { &*(self as *const dyn Any as *const T) } @@ -309,7 +309,7 @@ impl dyn Any { /// let mut x: Box = Box::new(1_usize); /// /// unsafe { - /// *x.downcast_mut_unchecked::() += 1; + /// *x.downcast_unchecked_mut::() += 1; /// } /// /// assert_eq!(*x.downcast_ref::().unwrap(), 2); @@ -321,7 +321,7 @@ impl dyn Any { /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] - pub unsafe fn downcast_mut_unchecked(&mut self) -> &mut T { + pub unsafe fn downcast_unchecked_mut(&mut self) -> &mut T { debug_assert!(self.is::()); // SAFETY: caller guarantees that T is the correct type unsafe { &mut *(self as *mut dyn Any as *mut T) } @@ -417,7 +417,7 @@ impl dyn Any + Send { /// let x: Box = Box::new(1_usize); /// /// unsafe { - /// assert_eq!(*x.downcast_ref_unchecked::(), 1); + /// assert_eq!(*x.downcast_unchecked_ref::(), 1); /// } /// ``` /// @@ -427,9 +427,9 @@ impl dyn Any + Send { /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] - pub unsafe fn downcast_ref_unchecked(&self) -> &T { + pub unsafe fn downcast_unchecked_ref(&self) -> &T { // SAFETY: guaranteed by caller - unsafe { ::downcast_ref_unchecked::(self) } + unsafe { ::downcast_unchecked_ref::(self) } } /// Forwards to the method defined on the type `dyn Any`. @@ -444,7 +444,7 @@ impl dyn Any + Send { /// let mut x: Box = Box::new(1_usize); /// /// unsafe { - /// *x.downcast_mut_unchecked::() += 1; + /// *x.downcast_unchecked_mut::() += 1; /// } /// /// assert_eq!(*x.downcast_ref::().unwrap(), 2); @@ -456,9 +456,9 @@ impl dyn Any + Send { /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] - pub unsafe fn downcast_mut_unchecked(&mut self) -> &mut T { + pub unsafe fn downcast_unchecked_mut(&mut self) -> &mut T { // SAFETY: guaranteed by caller - unsafe { ::downcast_mut_unchecked::(self) } + unsafe { ::downcast_unchecked_mut::(self) } } } @@ -551,7 +551,7 @@ impl dyn Any + Send + Sync { /// let x: Box = Box::new(1_usize); /// /// unsafe { - /// assert_eq!(*x.downcast_ref_unchecked::(), 1); + /// assert_eq!(*x.downcast_unchecked_ref::(), 1); /// } /// ``` /// # Safety @@ -560,9 +560,9 @@ impl dyn Any + Send + Sync { /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] - pub unsafe fn downcast_ref_unchecked(&self) -> &T { + pub unsafe fn downcast_unchecked_ref(&self) -> &T { // SAFETY: guaranteed by caller - unsafe { ::downcast_ref_unchecked::(self) } + unsafe { ::downcast_unchecked_ref::(self) } } /// Forwards to the method defined on the type `Any`. @@ -577,7 +577,7 @@ impl dyn Any + Send + Sync { /// let mut x: Box = Box::new(1_usize); /// /// unsafe { - /// *x.downcast_mut_unchecked::() += 1; + /// *x.downcast_unchecked_mut::() += 1; /// } /// /// assert_eq!(*x.downcast_ref::().unwrap(), 2); @@ -588,9 +588,9 @@ impl dyn Any + Send + Sync { /// with the incorrect type is *undefined behavior*. #[unstable(feature = "downcast_unchecked", issue = "90850")] #[inline] - pub unsafe fn downcast_mut_unchecked(&mut self) -> &mut T { + pub unsafe fn downcast_unchecked_mut(&mut self) -> &mut T { // SAFETY: guaranteed by caller - unsafe { ::downcast_mut_unchecked::(self) } + unsafe { ::downcast_unchecked_mut::(self) } } } @@ -611,6 +611,15 @@ impl dyn Any + Send + Sync { /// noting that the hashes and ordering will vary between Rust releases. Beware /// of relying on them inside of your code! /// +/// # Layout +/// +/// Like other [`Rust`-representation][repr-rust] types, `TypeId`'s size and layout are unstable. +/// In particular, this means that you cannot rely on the size and layout of `TypeId` remaining the +/// same between Rust releases; they are subject to change without prior notice between Rust +/// releases. +/// +/// [repr-rust]: https://doc.rust-lang.org/reference/type-layout.html#r-layout.repr.rust.unspecified +/// /// # Danger of Improper Variance /// /// You might think that subtyping is impossible between two static types, diff --git a/core/src/array/equality.rs b/core/src/array/equality.rs index c2c7ccf0daa23..ec79a657e58e2 100644 --- a/core/src/array/equality.rs +++ b/core/src/array/equality.rs @@ -132,9 +132,8 @@ where #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] impl const Eq for [T; N] {} -#[const_trait] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] -trait SpecArrayEq: Sized { +const trait SpecArrayEq: Sized { fn spec_eq(a: &[Self; N], b: &[Other; N]) -> bool; fn spec_ne(a: &[Self; N], b: &[Other; N]) -> bool; } diff --git a/core/src/array/iter.rs b/core/src/array/iter.rs index fdae5c08f1e8e..1c1f4d78c03fd 100644 --- a/core/src/array/iter.rs +++ b/core/src/array/iter.rs @@ -2,9 +2,9 @@ use crate::intrinsics::transmute_unchecked; use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce}; -use crate::mem::MaybeUninit; +use crate::mem::{ManuallyDrop, MaybeUninit}; use crate::num::NonZero; -use crate::ops::{IndexRange, Range, Try}; +use crate::ops::{Deref as _, DerefMut as _, IndexRange, Range, Try}; use crate::{fmt, ptr}; mod iter_inner; @@ -18,17 +18,17 @@ type InnerUnsized = iter_inner::PolymorphicIter<[MaybeUninit]>; #[rustc_diagnostic_item = "ArrayIntoIter"] #[derive(Clone)] pub struct IntoIter { - inner: InnerSized, + inner: ManuallyDrop>, } impl IntoIter { #[inline] fn unsize(&self) -> &InnerUnsized { - &self.inner + self.inner.deref() } #[inline] fn unsize_mut(&mut self) -> &mut InnerUnsized { - &mut self.inner + self.inner.deref_mut() } } @@ -69,7 +69,7 @@ impl IntoIterator for [T; N] { // SAFETY: The original array was entirely initialized and the the alive // range we're passing here represents that fact. let inner = unsafe { InnerSized::new_unchecked(IndexRange::zero_to(N), data) }; - IntoIter { inner } + IntoIter { inner: ManuallyDrop::new(inner) } } } @@ -146,7 +146,7 @@ impl IntoIter { let alive = unsafe { IndexRange::new_unchecked(initialized.start, initialized.end) }; // SAFETY: one of our safety condition is that these items are initialized. let inner = unsafe { InnerSized::new_unchecked(alive, buffer) }; - IntoIter { inner } + IntoIter { inner: ManuallyDrop::new(inner) } } /// Creates an iterator over `T` which returns no elements. @@ -205,7 +205,7 @@ impl IntoIter { #[inline] pub const fn empty() -> Self { let inner = InnerSized::empty(); - IntoIter { inner } + IntoIter { inner: ManuallyDrop::new(inner) } } /// Returns an immutable slice of all elements that have not been yielded @@ -320,11 +320,20 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "array_value_iter_impls", since = "1.40.0")] +// Even though all the Drop logic could be completely handled by +// PolymorphicIter, this impl still serves two purposes: +// - Drop has been part of the public API, so we can't remove it +// - the partial_drop function doesn't always get fully optimized away +// for !Drop types and ends up as dead code in the final binary. +// Branching on needs_drop higher in the call-tree allows it to be +// removed by earlier optimization passes. impl Drop for IntoIter { #[inline] fn drop(&mut self) { - // `inner` now handles this, but it'd technically be a breaking change - // to remove this `impl`, even though it's useless. + if crate::mem::needs_drop::() { + // SAFETY: This is the only place where we drop this field. + unsafe { ManuallyDrop::drop(&mut self.inner) } + } } } diff --git a/core/src/array/mod.rs b/core/src/array/mod.rs index 0dc10758a8560..2dd639d68f0ea 100644 --- a/core/src/array/mod.rs +++ b/core/src/array/mod.rs @@ -5,10 +5,10 @@ #![stable(feature = "core_array", since = "1.35.0")] use crate::borrow::{Borrow, BorrowMut}; +use crate::clone::TrivialClone; use crate::cmp::Ordering; use crate::convert::Infallible; use crate::error::Error; -use crate::fmt; use crate::hash::{self, Hash}; use crate::intrinsics::transmute_unchecked; use crate::iter::{UncheckedIterator, repeat_n}; @@ -18,6 +18,7 @@ use crate::ops::{ }; use crate::ptr::{null, null_mut}; use crate::slice::{Iter, IterMut}; +use crate::{fmt, ptr}; mod ascii; mod drain; @@ -451,6 +452,10 @@ impl Clone for [T; N] { } } +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for [T; N] {} + trait SpecArrayClone: Clone { fn clone(array: &[Self; N]) -> [Self; N]; } @@ -462,10 +467,12 @@ impl SpecArrayClone for T { } } -impl SpecArrayClone for T { +impl SpecArrayClone for T { #[inline] fn clone(array: &[T; N]) -> [T; N] { - *array + // SAFETY: `TrivialClone` implies that this is equivalent to calling + // `Clone` on every element. + unsafe { ptr::read(array) } } } diff --git a/core/src/asserting.rs b/core/src/asserting.rs index 3015aa562e6c0..acaee72c93016 100644 --- a/core/src/asserting.rs +++ b/core/src/asserting.rs @@ -73,12 +73,14 @@ where // ***** Others ***** +//spellchecker:off /// All possible captured `assert!` elements /// /// # Types /// /// * `E`: **E**lement that is going to be displayed. /// * `M`: **M**arker used to differentiate [Capture]s in regards to [Debug]. +//spellchecker:on #[unstable(feature = "generic_assert_internals", issue = "44838")] pub struct Capture { // If None, then `E` does not implements [Printable] or `E` wasn't evaluated (`assert!( ... )` diff --git a/core/src/cell.rs b/core/src/cell.rs index aeac35e45a5d0..988c50795e299 100644 --- a/core/src/cell.rs +++ b/core/src/cell.rs @@ -252,7 +252,7 @@ use crate::cmp::Ordering; use crate::fmt::{self, Debug, Display}; -use crate::marker::{PhantomData, Unsize}; +use crate::marker::{Destruct, PhantomData, Unsize}; use crate::mem::{self, ManuallyDrop}; use crate::ops::{self, CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn}; use crate::panic::const_panic; @@ -429,7 +429,11 @@ impl Cell { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn set(&self, val: T) { + #[rustc_const_unstable(feature = "const_cell_traits", issue = "147787")] + pub const fn set(&self, val: T) + where + T: [const] Destruct, + { self.replace(val); } @@ -561,7 +565,12 @@ impl Cell { /// ``` #[inline] #[stable(feature = "cell_update", since = "1.88.0")] - pub fn update(&self, f: impl FnOnce(T) -> T) { + #[rustc_const_unstable(feature = "const_cell_traits", issue = "147787")] + pub const fn update(&self, f: impl [const] FnOnce(T) -> T) + where + // FIXME(const-hack): `Copy` should imply `const Destruct` + T: [const] Destruct, + { let old = self.get(); self.set(f(old)); } @@ -654,7 +663,11 @@ impl Cell { /// assert_eq!(c.into_inner(), 0); /// ``` #[stable(feature = "move_cell", since = "1.17.0")] - pub fn take(&self) -> T { + #[rustc_const_unstable(feature = "const_cell_traits", issue = "147787")] + pub const fn take(&self) -> T + where + T: [const] Default, + { self.replace(Default::default()) } } diff --git a/core/src/char/methods.rs b/core/src/char/methods.rs index 76f54db287079..d1de2c5606154 100644 --- a/core/src/char/methods.rs +++ b/core/src/char/methods.rs @@ -74,12 +74,12 @@ impl char { /// The maximum number of bytes required to [encode](char::encode_utf8) a `char` to /// UTF-8 encoding. - #[unstable(feature = "char_max_len", issue = "121714")] + #[stable(feature = "char_max_len_assoc", since = "CURRENT_RUSTC_VERSION")] pub const MAX_LEN_UTF8: usize = 4; /// The maximum number of two-byte units required to [encode](char::encode_utf16) a `char` /// to UTF-16 encoding. - #[unstable(feature = "char_max_len", issue = "121714")] + #[stable(feature = "char_max_len_assoc", since = "CURRENT_RUSTC_VERSION")] pub const MAX_LEN_UTF16: usize = 2; /// `U+FFFD REPLACEMENT CHARACTER` (�) is used in Unicode to represent a diff --git a/core/src/clone.rs b/core/src/clone.rs index 7f2a40f753fa6..bf8875098edfa 100644 --- a/core/src/clone.rs +++ b/core/src/clone.rs @@ -250,10 +250,42 @@ pub const trait Clone: Sized { } } +/// Indicates that the `Clone` implementation is identical to copying the value. +/// +/// This is used for some optimizations in the standard library, which specializes +/// on this trait to select faster implementations of functions such as +/// [`clone_from_slice`](slice::clone_from_slice). It is automatically implemented +/// when using `#[derive(Clone, Copy)]`. +/// +/// Note that this trait does not imply that the type is `Copy`, because e.g. +/// `core::ops::Range` could soundly implement this trait. +/// +/// # Safety +/// `Clone::clone` must be equivalent to copying the value, otherwise calling functions +/// such as `slice::clone_from_slice` can have undefined behaviour. +#[unstable( + feature = "trivial_clone", + reason = "this isn't part of any API guarantee", + issue = "none" +)] +#[rustc_const_unstable(feature = "const_clone", issue = "142757")] +#[lang = "trivial_clone"] +// SAFETY: +// It is sound to specialize on this because the `clone` implementation cannot be +// lifetime-dependent. Therefore, if `TrivialClone` is implemented for any lifetime, +// its invariant holds whenever `Clone` is implemented, even if the actual +// `TrivialClone` bound would not be satisfied because of lifetime bounds. +#[rustc_unsafe_specialization_marker] +// If `#[derive(Clone, Clone, Copy)]` is written, there will be multiple +// implementations of `TrivialClone`. To keep it from appearing in error +// messages, make it a `#[marker]` trait. +#[marker] +pub const unsafe trait TrivialClone: [const] Clone {} + /// Derive macro generating an impl of the trait `Clone`. #[rustc_builtin_macro] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] -#[allow_internal_unstable(core_intrinsics, derive_clone_copy)] +#[allow_internal_unstable(core_intrinsics, derive_clone_copy, trivial_clone)] pub macro Clone($item:item) { /* compiler built-in */ } @@ -569,18 +601,25 @@ unsafe impl CloneToUninit for crate::bstr::ByteStr { /// are implemented in `traits::SelectionContext::copy_clone_conditions()` /// in `rustc_trait_selection`. mod impls { + use super::TrivialClone; use crate::marker::PointeeSized; macro_rules! impl_clone { ($($t:ty)*) => { $( #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for $t { + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + impl const Clone for $t { #[inline(always)] fn clone(&self) -> Self { *self } } + + #[doc(hidden)] + #[unstable(feature = "trivial_clone", issue = "none")] + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + unsafe impl const TrivialClone for $t {} )* } } @@ -593,32 +632,51 @@ mod impls { } #[unstable(feature = "never_type", issue = "35121")] - impl Clone for ! { + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + impl const Clone for ! { #[inline] fn clone(&self) -> Self { *self } } + #[doc(hidden)] + #[unstable(feature = "trivial_clone", issue = "none")] + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + unsafe impl const TrivialClone for ! {} + #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for *const T { + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + impl const Clone for *const T { #[inline(always)] fn clone(&self) -> Self { *self } } + #[doc(hidden)] + #[unstable(feature = "trivial_clone", issue = "none")] + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + unsafe impl const TrivialClone for *const T {} + #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for *mut T { + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + impl const Clone for *mut T { #[inline(always)] fn clone(&self) -> Self { *self } } + #[doc(hidden)] + #[unstable(feature = "trivial_clone", issue = "none")] + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + unsafe impl const TrivialClone for *mut T {} + /// Shared references can be cloned, but mutable references *cannot*! #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for &T { + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + impl const Clone for &T { #[inline(always)] #[rustc_diagnostic_item = "noop_method_clone"] fn clone(&self) -> Self { @@ -626,6 +684,11 @@ mod impls { } } + #[doc(hidden)] + #[unstable(feature = "trivial_clone", issue = "none")] + #[rustc_const_unstable(feature = "const_clone", issue = "142757")] + unsafe impl const TrivialClone for &T {} + /// Shared references can be cloned, but mutable references *cannot*! #[stable(feature = "rust1", since = "1.0.0")] impl !Clone for &mut T {} diff --git a/core/src/clone/uninit.rs b/core/src/clone/uninit.rs index 8b738bec796de..8d1185067eb88 100644 --- a/core/src/clone/uninit.rs +++ b/core/src/clone/uninit.rs @@ -1,3 +1,4 @@ +use super::TrivialClone; use crate::mem::{self, MaybeUninit}; use crate::ptr; @@ -49,9 +50,9 @@ unsafe impl CopySpec for T { } } -// Specialized implementation for types that are [`Copy`], not just [`Clone`], +// Specialized implementation for types that are [`TrivialClone`], not just [`Clone`], // and can therefore be copied bitwise. -unsafe impl CopySpec for T { +unsafe impl CopySpec for T { #[inline] unsafe fn clone_one(src: &Self, dst: *mut Self) { // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of diff --git a/core/src/cmp/bytewise.rs b/core/src/cmp/bytewise.rs index 2265fa7a3531c..f0f5f656405a9 100644 --- a/core/src/cmp/bytewise.rs +++ b/core/src/cmp/bytewise.rs @@ -17,8 +17,7 @@ use crate::num::NonZero; /// - Neither `Self` nor `Rhs` have provenance, so integer comparisons are correct. /// - `>::{eq,ne}` are equivalent to comparing the bytes. #[rustc_specialization_trait] -#[const_trait] // FIXME(const_trait_impl): Migrate to `const unsafe trait` once #146122 is fixed. -pub(crate) unsafe trait BytewiseEq: +pub(crate) const unsafe trait BytewiseEq: [const] PartialEq + Sized { } diff --git a/core/src/ffi/c_longlong.md b/core/src/ffi/c_longlong.md index 49c61bd61f4ad..234ab344409da 100644 --- a/core/src/ffi/c_longlong.md +++ b/core/src/ffi/c_longlong.md @@ -2,4 +2,4 @@ Equivalent to C's `signed long long` (`long long`) type. This type will almost always be [`i64`], but may differ on some systems. The C standard technically only requires that this type be a signed integer that is at least 64 bits and at least the size of a [`long`], although in practice, no system would have a `long long` that is not an `i64`, as most systems do not have a standardised [`i128`] type. -[`long`]: c_int +[`long`]: c_long diff --git a/core/src/ffi/c_str.rs b/core/src/ffi/c_str.rs index 09d9b160700ca..9a35ed07b89ac 100644 --- a/core/src/ffi/c_str.rs +++ b/core/src/ffi/c_str.rs @@ -15,18 +15,18 @@ use crate::{fmt, ops, slice, str}; // actually reference libstd or liballoc in intra-doc links. so, the best we can do is remove the // links to `CString` and `String` for now until a solution is developed -/// Representation of a borrowed C string. +/// A dynamically-sized view of a C string. /// -/// This type represents a borrowed reference to a nul-terminated +/// The type `&CStr` represents a reference to a borrowed nul-terminated /// array of bytes. It can be constructed safely from a &[[u8]] /// slice, or unsafely from a raw `*const c_char`. It can be expressed as a /// literal in the form `c"Hello world"`. /// -/// The `CStr` can then be converted to a Rust &[str] by performing +/// The `&CStr` can then be converted to a Rust &[str] by performing /// UTF-8 validation, or into an owned `CString`. /// /// `&CStr` is to `CString` as &[str] is to `String`: the former -/// in each pair are borrowed references; the latter are owned +/// in each pair are borrowing references; the latter are owned /// strings. /// /// Note that this structure does **not** have a guaranteed layout (the `repr(transparent)` diff --git a/core/src/ffi/va_list.rs b/core/src/ffi/va_list.rs index 46ccf330d1c22..3c9587d383e30 100644 --- a/core/src/ffi/va_list.rs +++ b/core/src/ffi/va_list.rs @@ -299,3 +299,15 @@ impl<'f> Drop for VaListImpl<'f> { // This works for now, since `va_end` is a no-op on all current LLVM targets. } } + +// Checks (via an assert in `compiler/rustc_ty_utils/src/abi.rs`) that the C ABI for the current +// target correctly implements `rustc_pass_indirectly_in_non_rustic_abis`. +const _: () = { + #[repr(C)] + #[rustc_pass_indirectly_in_non_rustic_abis] + struct Type(usize); + + const extern "C" fn c(_: Type) {} + + c(Type(0)) +}; diff --git a/core/src/fmt/builders.rs b/core/src/fmt/builders.rs index 665b05b12ec07..4ea6c6ba8fb9c 100644 --- a/core/src/fmt/builders.rs +++ b/core/src/fmt/builders.rs @@ -1210,13 +1210,12 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { } } -/// Creates a type whose [`fmt::Debug`] and [`fmt::Display`] impls are provided with the function -/// `f`. +/// Creates a type whose [`fmt::Debug`] and [`fmt::Display`] impls are +/// forwarded to the provided closure. /// /// # Examples /// /// ``` -/// #![feature(debug_closure_helpers)] /// use std::fmt; /// /// let value = 'a'; @@ -1227,21 +1226,19 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { /// assert_eq!(format!("{}", wrapped), "'a'"); /// assert_eq!(format!("{:?}", wrapped), "'a'"); /// ``` -#[unstable(feature = "debug_closure_helpers", issue = "117729")] +#[stable(feature = "fmt_from_fn", since = "CURRENT_RUSTC_VERSION")] #[must_use = "returns a type implementing Debug and Display, which do not have any effects unless they are used"] pub fn from_fn) -> fmt::Result>(f: F) -> FromFn { FromFn(f) } -/// Implements [`fmt::Debug`] and [`fmt::Display`] using a function. +/// Implements [`fmt::Debug`] and [`fmt::Display`] via the provided closure. /// /// Created with [`from_fn`]. -#[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub struct FromFn(F) -where - F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result; +#[stable(feature = "fmt_from_fn", since = "CURRENT_RUSTC_VERSION")] +pub struct FromFn(F); -#[unstable(feature = "debug_closure_helpers", issue = "117729")] +#[stable(feature = "fmt_from_fn", since = "CURRENT_RUSTC_VERSION")] impl fmt::Debug for FromFn where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, @@ -1251,7 +1248,7 @@ where } } -#[unstable(feature = "debug_closure_helpers", issue = "117729")] +#[stable(feature = "fmt_from_fn", since = "CURRENT_RUSTC_VERSION")] impl fmt::Display for FromFn where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, diff --git a/core/src/fmt/mod.rs b/core/src/fmt/mod.rs index 0f255e57fe585..4b1e013c2b445 100644 --- a/core/src/fmt/mod.rs +++ b/core/src/fmt/mod.rs @@ -3,11 +3,13 @@ #![stable(feature = "rust1", since = "1.0.0")] use crate::cell::{Cell, Ref, RefCell, RefMut, SyncUnsafeCell, UnsafeCell}; -use crate::char::{EscapeDebugExtArgs, MAX_LEN_UTF8}; +use crate::char::EscapeDebugExtArgs; +use crate::hint::assert_unchecked; use crate::marker::{PhantomData, PointeeSized}; use crate::num::fmt as numfmt; use crate::ops::Deref; -use crate::{iter, result, str}; +use crate::ptr::NonNull; +use crate::{iter, mem, result, str}; mod builders; #[cfg(not(no_fp_fmt_parse))] @@ -39,7 +41,7 @@ pub use num_buffer::{NumBuffer, NumBufferTrait}; #[stable(feature = "debug_builders", since = "1.2.0")] pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; -#[unstable(feature = "debug_closure_helpers", issue = "117729")] +#[stable(feature = "fmt_from_fn", since = "CURRENT_RUSTC_VERSION")] pub use self::builders::{FromFn, from_fn}; /// The type returned by formatter methods. @@ -179,7 +181,7 @@ pub trait Write { /// ``` #[stable(feature = "fmt_write_char", since = "1.1.0")] fn write_char(&mut self, c: char) -> Result { - self.write_str(c.encode_utf8(&mut [0; MAX_LEN_UTF8])) + self.write_str(c.encode_utf8(&mut [0; char::MAX_LEN_UTF8])) } /// Glue for usage of the [`write!`] macro with implementors of this trait. @@ -288,7 +290,7 @@ pub struct FormattingOptions { /// ```text /// 31 30 29 28 27 26 25 24 23 22 21 20 0 /// ┌───┬───────┬───┬───┬───┬───┬───┬───┬───┬───┬──────────────────────────────────┐ - /// │ 1 │ align │ p │ w │ X?│ x?│'0'│ # │ - │ + │ fill │ + /// │ 0 │ align │ p │ w │ X?│ x?│'0'│ # │ - │ + │ fill │ /// └───┴───────┴───┴───┴───┴───┴───┴───┴───┴───┴──────────────────────────────────┘ /// │ │ │ │ └─┬───────────────────┘ └─┬──────────────────────────────┘ /// │ │ │ │ │ └─ The fill character (21 bits char). @@ -299,12 +301,9 @@ pub struct FormattingOptions { /// │ ├─ 1: Align right. (>) /// │ ├─ 2: Align center. (^) /// │ └─ 3: Alignment not set. (default) - /// └─ Always set. - /// This makes it possible to distinguish formatting flags from - /// a &str size when stored in (the upper bits of) the same field. - /// (fmt::Arguments will make use of this property in the future.) + /// └─ Always zero. /// ``` - // Note: This could use a special niche type with range 0x8000_0000..=0xfdd0ffff. + // Note: This could use a pattern type with range 0x0000_0000..=0x7dd0ffff. // It's unclear if that's useful, though. flags: u32, /// Width if width flag (bit 27) above is set. Otherwise, always 0. @@ -328,7 +327,6 @@ mod flags { pub(super) const ALIGN_RIGHT: u32 = 1 << 29; pub(super) const ALIGN_CENTER: u32 = 2 << 29; pub(super) const ALIGN_UNKNOWN: u32 = 3 << 29; - pub(super) const ALWAYS_SET: u32 = 1 << 31; } impl FormattingOptions { @@ -344,11 +342,7 @@ impl FormattingOptions { /// - no [`DebugAsHex`] output mode. #[unstable(feature = "formatting_options", issue = "118117")] pub const fn new() -> Self { - Self { - flags: ' ' as u32 | flags::ALIGN_UNKNOWN | flags::ALWAYS_SET, - width: 0, - precision: 0, - } + Self { flags: ' ' as u32 | flags::ALIGN_UNKNOWN, width: 0, precision: 0 } } /// Sets or removes the sign (the `+` or the `-` flag). @@ -612,19 +606,141 @@ impl<'a> Formatter<'a> { /// ``` /// /// [`format()`]: ../../std/fmt/fn.format.html +// +// Internal representation: +// +// fmt::Arguments is represented in one of two ways: +// +// 1) String literal representation (e.g. format_args!("hello")) +// ┌────────────────────────────────┐ +// template: │ *const u8 │ ─▷ "hello" +// ├──────────────────────────────┬─┤ +// args: │ len │1│ (lowest bit is 1; field contains `len << 1 | 1`) +// └──────────────────────────────┴─┘ +// In this representation, there are no placeholders and `fmt::Arguments::as_str()` returns Some. +// The pointer points to the start of a static `str`. The length is given by `args as usize >> 1`. +// (The length of a `&str` is isize::MAX at most, so it always fits in a usize minus one bit.) +// +// `fmt::Arguments::from_str()` constructs this representation from a `&'static str`. +// +// 2) Placeholders representation (e.g. format_args!("hello {name}\n")) +// ┌────────────────────────────────┐ +// template: │ *const u8 │ ─▷ b"\x06hello \x80\x01\n\x00" +// ├────────────────────────────────┤ +// args: │ &'a [Argument<'a>; _] 0│ (lower bit is 0 due to alignment of Argument type) +// └────────────────────────────────┘ +// In this representation, the template is a byte sequence encoding both the literal string pieces +// and the placeholders (including their options/flags). +// +// The `args` pointer points to an array of `fmt::Argument<'a>` values, of sufficient length to +// match the placeholders in the template. +// +// `fmt::Arguments::new()` constructs this representation from a template byte slice and a slice +// of arguments. This function is unsafe, as the template is assumed to be valid and the args +// slice is assumed to have elements matching the template. +// +// The template byte sequence is the concatenation of parts of the following types: +// +// - Literal string piece: +// Pieces that must be formatted verbatim (e.g. "hello " and "\n" in "hello {name}\n") +// appear literally in the template byte sequence, prefixed by their length. +// +// For pieces of up to 127 bytes, these are represented as a single byte containing the +// length followed directly by the bytes of the string: +// ┌───┬────────────────────────────┐ +// │len│ `len` bytes (utf-8) │ (e.g. b"\x06hello ") +// └───┴────────────────────────────┘ +// +// For larger pieces up to u16::MAX bytes, these are represented as a 0x80 followed by +// their length in 16-bit little endian, followed by the bytes of the string: +// ┌────┬─────────┬───────────────────────────┐ +// │0x80│ len │ `len` bytes (utf-8) │ (e.g. b"\x80\x00\x01hello … ") +// └────┴─────────┴───────────────────────────┘ +// +// Longer pieces are split into multiple pieces of max u16::MAX bytes (at utf-8 boundaries). +// +// - Placeholder: +// Placeholders (e.g. `{name}` in "hello {name}") are represented as a byte with the highest +// two bits set, followed by zero or more fields depending on the flags in the first byte: +// ┌──────────┬┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┬┄┄┄┄┄┄┄┄┄┄┄┬┄┄┄┄┄┄┄┄┄┄┄┬┄┄┄┄┄┄┄┄┄┄┄┐ +// │0b11______│ flags ┊ width ┊ precision ┊ arg_index ┊ (e.g. b"\xC2\x05\0") +// └────││││││┴┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┴┄┄┄┄┄┄┄┄┄┄┄┴┄┄┄┄┄┄┄┄┄┄┄┴┄┄┄┄┄┄┄┄┄┄┄┘ +// ││││││ 32 bit 16 bit 16 bit 16 bit +// │││││└─ flags present +// ││││└─ width present +// │││└─ precision present +// ││└─ arg_index present +// │└─ width indirect +// └─ precision indirect +// +// All fields other than the first byte are optional and only present when their +// corresponding flag is set in the first byte. +// +// So, a fully default placeholder without any options is just a single byte: +// ┌──────────┐ +// │0b11000000│ (b"\xC0") +// └──────────┘ +// +// The fields are stored as little endian. +// +// The `flags` fields corresponds to the `flags` field of `FormattingOptions`. +// See doc comment of `FormattingOptions::flags` for details. +// +// The `width` and `precision` fields correspond to their respective fields in +// `FormattingOptions`. However, if their "indirect" flag is set, the field contains the +// index in the `args` array where the dynamic width or precision is stored, rather than the +// value directly. +// +// The `arg_index` field is the index into the `args` array for the argument to be +// formatted. +// +// If omitted, the flags, width and precision of the default FormattingOptions::new() are +// used. +// +// If the `arg_index` is omitted, the next argument in the `args` array is used (starting +// at 0). +// +// - End: +// A single zero byte marks the end of the template: +// ┌───┐ +// │ 0 │ ("\0") +// └───┘ +// +// (Note that a zero byte may also occur naturally as part of the string pieces or flags, +// width, precision and arg_index fields above. That is, the template byte sequence ends +// with a 0 byte, but isn't terminated by the first 0 byte.) +// #[lang = "format_arguments"] #[stable(feature = "rust1", since = "1.0.0")] #[derive(Copy, Clone)] pub struct Arguments<'a> { - // Format string pieces to print. - pieces: &'a [&'static str], + template: NonNull, + args: NonNull>, +} - // Placeholder specs, or `None` if all specs are default (as in "{}{}"). - fmt: Option<&'a [rt::Placeholder]>, +/// Used by the format_args!() macro to create a fmt::Arguments object. +#[doc(hidden)] +#[rustc_diagnostic_item = "FmtArgumentsNew"] +#[unstable(feature = "fmt_internals", issue = "none")] +impl<'a> Arguments<'a> { + // SAFETY: The caller must ensure that the provided template and args encode a valid + // fmt::Arguments, as documented above. + #[inline] + pub unsafe fn new( + template: &'a [u8; N], + args: &'a [rt::Argument<'a>; M], + ) -> Arguments<'a> { + // SAFETY: Responsibility of the caller. + unsafe { Arguments { template: mem::transmute(template), args: mem::transmute(args) } } + } - // Dynamic arguments for interpolation, to be interleaved with string - // pieces. (Every argument is preceded by a string piece.) - args: &'a [rt::Argument<'a>], + // Same as `from_str`, but not const. + // Used by format_args!() expansion when arguments are inlined, + // e.g. format_args!("{}", 123), which is not allowed in const. + #[inline] + pub fn from_str_nonconst(s: &'static str) -> Arguments<'a> { + Arguments::from_str(s) + } } #[doc(hidden)] @@ -636,25 +752,76 @@ impl<'a> Arguments<'a> { /// when using `format!`. Note: this is neither the lower nor upper bound. #[inline] pub fn estimated_capacity(&self) -> usize { - let pieces_length: usize = self.pieces.iter().map(|x| x.len()).sum(); + if let Some(s) = self.as_str() { + return s.len(); + } + // Iterate over the template, counting the length of literal pieces. + let mut length = 0usize; + let mut starts_with_placeholder = false; + let mut template = self.template; + loop { + // SAFETY: We can assume the template is valid. + unsafe { + let n = template.read(); + template = template.add(1); + if n == 0 { + // End of template. + break; + } else if n < 128 { + // Short literal string piece. + length += n as usize; + template = template.add(n as usize); + } else if n == 128 { + // Long literal string piece. + let len = usize::from(u16::from_le_bytes(template.cast_array().read())); + length += len; + template = template.add(2 + len); + } else { + assert_unchecked(n >= 0xC0); + // Placeholder piece. + if length == 0 { + starts_with_placeholder = true; + } + // Skip remainder of placeholder: + let skip = (n & 1 != 0) as usize * 4 // flags (32 bit) + + (n & 2 != 0) as usize * 2 // width (16 bit) + + (n & 4 != 0) as usize * 2 // precision (16 bit) + + (n & 8 != 0) as usize * 2; // arg_index (16 bit) + template = template.add(skip as usize); + } + } + } - if self.args.is_empty() { - pieces_length - } else if !self.pieces.is_empty() && self.pieces[0].is_empty() && pieces_length < 16 { - // If the format string starts with an argument, + if starts_with_placeholder && length < 16 { + // If the format string starts with a placeholder, // don't preallocate anything, unless length - // of pieces is significant. + // of literal pieces is significant. 0 } else { - // There are some arguments, so any additional push + // There are some placeholders, so any additional push // will reallocate the string. To avoid that, // we're "pre-doubling" the capacity here. - pieces_length.checked_mul(2).unwrap_or(0) + length.wrapping_mul(2) } } } impl<'a> Arguments<'a> { + /// Create a `fmt::Arguments` object for a single static string. + /// + /// Formatting this `fmt::Arguments` will just produce the string as-is. + #[inline] + #[unstable(feature = "fmt_arguments_from_str", issue = "148905")] + pub const fn from_str(s: &'static str) -> Arguments<'a> { + // SAFETY: This is the "static str" representation of fmt::Arguments; see above. + unsafe { + Arguments { + template: mem::transmute(s.as_ptr()), + args: mem::transmute(s.len() << 1 | 1), + } + } + } + /// Gets the formatted string, if it has no arguments to be formatted at runtime. /// /// This can be used to avoid allocations in some cases. @@ -702,10 +869,22 @@ impl<'a> Arguments<'a> { #[must_use] #[inline] pub const fn as_str(&self) -> Option<&'static str> { - match (self.pieces, self.args) { - ([], []) => Some(""), - ([s], []) => Some(s), - _ => None, + // SAFETY: During const eval, `self.args` must have come from a usize, + // not a pointer, because that's the only way to create a fmt::Arguments in const. + // (I.e. only fmt::Arguments::from_str is const, fmt::Arguments::new is not.) + // + // Outside const eval, transmuting a pointer to a usize is fine. + let bits: usize = unsafe { mem::transmute(self.args) }; + if bits & 1 == 1 { + // SAFETY: This fmt::Arguments stores a &'static str. See encoding documentation above. + Some(unsafe { + str::from_utf8_unchecked(crate::slice::from_raw_parts( + self.template.as_ptr(), + bits >> 1, + )) + }) + } else { + None } } @@ -1448,86 +1627,113 @@ pub trait UpperExp: PointeeSized { /// /// [`write!`]: crate::write! #[stable(feature = "rust1", since = "1.0.0")] -pub fn write(output: &mut dyn Write, args: Arguments<'_>) -> Result { - let mut formatter = Formatter::new(output, FormattingOptions::new()); - let mut idx = 0; - - match args.fmt { - None => { - // We can use default formatting parameters for all arguments. - for (i, arg) in args.args.iter().enumerate() { - // SAFETY: args.args and args.pieces come from the same Arguments, - // which guarantees the indexes are always within bounds. - let piece = unsafe { args.pieces.get_unchecked(i) }; - if !piece.is_empty() { - formatter.buf.write_str(*piece)?; - } - - // SAFETY: There are no formatting parameters and hence no - // count arguments. - unsafe { - arg.fmt(&mut formatter)?; - } - idx += 1; - } - } - Some(fmt) => { - // Every spec has a corresponding argument that is preceded by - // a string piece. - for (i, arg) in fmt.iter().enumerate() { - // SAFETY: fmt and args.pieces come from the same Arguments, - // which guarantees the indexes are always within bounds. - let piece = unsafe { args.pieces.get_unchecked(i) }; - if !piece.is_empty() { - formatter.buf.write_str(*piece)?; - } - // SAFETY: arg and args.args come from the same Arguments, - // which guarantees the indexes are always within bounds. - unsafe { run(&mut formatter, arg, args.args) }?; - idx += 1; - } - } +pub fn write(output: &mut dyn Write, fmt: Arguments<'_>) -> Result { + if let Some(s) = fmt.as_str() { + return output.write_str(s); } - // There can be only one trailing string piece left. - if let Some(piece) = args.pieces.get(idx) { - formatter.buf.write_str(*piece)?; - } + let mut template = fmt.template; + let args = fmt.args; - Ok(()) -} + let mut arg_index = 0; -unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::Placeholder, args: &[rt::Argument<'_>]) -> Result { - let (width, precision) = - // SAFETY: arg and args come from the same Arguments, - // which guarantees the indexes are always within bounds. - unsafe { (getcount(args, &arg.width), getcount(args, &arg.precision)) }; + // See comment on `fmt::Arguments` for the details of how the template is encoded. - let options = FormattingOptions { flags: arg.flags, width, precision }; + // This must match the encoding from `expand_format_args` in + // compiler/rustc_ast_lowering/src/format.rs. + loop { + // SAFETY: We can assume the template is valid. + let n = unsafe { + let n = template.read(); + template = template.add(1); + n + }; - // Extract the correct argument - debug_assert!(arg.position < args.len()); - // SAFETY: arg and args come from the same Arguments, - // which guarantees its index is always within bounds. - let value = unsafe { args.get_unchecked(arg.position) }; + if n == 0 { + // End of template. + return Ok(()); + } else if n < 0x80 { + // Literal string piece of length `n`. + + // SAFETY: We can assume the strings in the template are valid. + let s = unsafe { + let s = crate::str::from_raw_parts(template.as_ptr(), n as usize); + template = template.add(n as usize); + s + }; + output.write_str(s)?; + } else if n == 0x80 { + // Literal string piece with a 16-bit length. + + // SAFETY: We can assume the strings in the template are valid. + let s = unsafe { + let len = usize::from(u16::from_le_bytes(template.cast_array().read())); + template = template.add(2); + let s = crate::str::from_raw_parts(template.as_ptr(), len); + template = template.add(len); + s + }; + output.write_str(s)?; + } else if n == 0xC0 { + // Placeholder for next argument with default options. + // + // Having this as a separate case improves performance for the common case. + + // SAFETY: We can assume the template only refers to arguments that exist. + unsafe { + args.add(arg_index) + .as_ref() + .fmt(&mut Formatter::new(output, FormattingOptions::new()))?; + } + arg_index += 1; + } else { + // SAFETY: We can assume the template is valid. + unsafe { assert_unchecked(n > 0xC0) }; - // Set all the formatting options. - fmt.options = options; + // Placeholder with custom options. - // Then actually do some printing - // SAFETY: this is a placeholder argument. - unsafe { value.fmt(fmt) } -} + let mut opt = FormattingOptions::new(); -unsafe fn getcount(args: &[rt::Argument<'_>], cnt: &rt::Count) -> u16 { - match *cnt { - rt::Count::Is(n) => n, - rt::Count::Implied => 0, - rt::Count::Param(i) => { - debug_assert!(i < args.len()); - // SAFETY: cnt and args come from the same Arguments, - // which guarantees this index is always within bounds. - unsafe { args.get_unchecked(i).as_u16().unwrap_unchecked() } + // SAFETY: We can assume the template is valid. + unsafe { + if n & 1 != 0 { + opt.flags = u32::from_le_bytes(template.cast_array().read()); + template = template.add(4); + } + if n & 2 != 0 { + opt.width = u16::from_le_bytes(template.cast_array().read()); + template = template.add(2); + } + if n & 4 != 0 { + opt.precision = u16::from_le_bytes(template.cast_array().read()); + template = template.add(2); + } + if n & 8 != 0 { + arg_index = usize::from(u16::from_le_bytes(template.cast_array().read())); + template = template.add(2); + } + } + if n & 16 != 0 { + // Dynamic width from a usize argument. + // SAFETY: We can assume the template only refers to arguments that exist. + unsafe { + opt.width = args.add(opt.width as usize).as_ref().as_u16().unwrap_unchecked(); + } + } + if n & 32 != 0 { + // Dynamic precision from a usize argument. + // SAFETY: We can assume the template only refers to arguments that exist. + unsafe { + opt.precision = + args.add(opt.precision as usize).as_ref().as_u16().unwrap_unchecked(); + } + } + + // SAFETY: We can assume the template only refers to arguments that exist. + unsafe { + args.add(arg_index).as_ref().fmt(&mut Formatter::new(output, opt))?; + } + arg_index += 1; } } } @@ -2775,7 +2981,7 @@ impl Display for char { if f.options.flags & (flags::WIDTH_FLAG | flags::PRECISION_FLAG) == 0 { f.write_char(*self) } else { - f.pad(self.encode_utf8(&mut [0; MAX_LEN_UTF8])) + f.pad(self.encode_utf8(&mut [0; char::MAX_LEN_UTF8])) } } } diff --git a/core/src/fmt/rt.rs b/core/src/fmt/rt.rs index fb858a0572612..5221783e72901 100644 --- a/core/src/fmt/rt.rs +++ b/core/src/fmt/rt.rs @@ -10,28 +10,6 @@ use super::*; use crate::hint::unreachable_unchecked; use crate::ptr::NonNull; -#[lang = "format_placeholder"] -#[derive(Copy, Clone)] -pub struct Placeholder { - pub position: usize, - pub flags: u32, - pub precision: Count, - pub width: Count, -} - -/// Used by [width](https://doc.rust-lang.org/std/fmt/#width) -/// and [precision](https://doc.rust-lang.org/std/fmt/#precision) specifiers. -#[lang = "format_count"] -#[derive(Copy, Clone)] -pub enum Count { - /// Specified with a literal number, stores the value - Is(u16), - /// Specified using `$` and `*` syntaxes, stores the index into `args` - Param(usize), - /// Not specified - Implied, -} - #[derive(Copy, Clone)] enum ArgumentType<'a> { Placeholder { @@ -56,6 +34,7 @@ enum ArgumentType<'a> { /// precision and width. #[lang = "format_argument"] #[derive(Copy, Clone)] +#[repr(align(2))] // To ensure pointers to this struct always have their lowest bit cleared. pub struct Argument<'a> { ty: ArgumentType<'a>, } @@ -184,55 +163,3 @@ impl Argument<'_> { } } } - -/// Used by the format_args!() macro to create a fmt::Arguments object. -#[doc(hidden)] -#[unstable(feature = "fmt_internals", issue = "none")] -#[rustc_diagnostic_item = "FmtArgumentsNew"] -impl<'a> Arguments<'a> { - #[inline] - pub const fn new_const(pieces: &'a [&'static str; N]) -> Self { - const { assert!(N <= 1) }; - Arguments { pieces, fmt: None, args: &[] } - } - - /// When using the format_args!() macro, this function is used to generate the - /// Arguments structure. - /// - /// This function should _not_ be const, to make sure we don't accept - /// format_args!() and panic!() with arguments in const, even when not evaluated: - /// - /// ```compile_fail,E0015 - /// const _: () = if false { panic!("a {}", "a") }; - /// ``` - #[inline] - pub fn new_v1( - pieces: &'a [&'static str; P], - args: &'a [rt::Argument<'a>; A], - ) -> Arguments<'a> { - const { assert!(P >= A && P <= A + 1, "invalid args") } - Arguments { pieces, fmt: None, args } - } - - /// Specifies nonstandard formatting parameters. - /// - /// SAFETY: the following invariants must be held: - /// 1. The `pieces` slice must be at least as long as `fmt`. - /// 2. Every `rt::Placeholder::position` value within `fmt` must be a valid index of `args`. - /// 3. Every `rt::Count::Param` within `fmt` must contain a valid index of `args`. - /// - /// This function should _not_ be const, to make sure we don't accept - /// format_args!() and panic!() with arguments in const, even when not evaluated: - /// - /// ```compile_fail,E0015 - /// const _: () = if false { panic!("a {:1}", "a") }; - /// ``` - #[inline] - pub unsafe fn new_v1_formatted( - pieces: &'a [&'static str], - args: &'a [rt::Argument<'a>], - fmt: &'a [rt::Placeholder], - ) -> Arguments<'a> { - Arguments { pieces, fmt: Some(fmt), args } - } -} diff --git a/core/src/hash/mod.rs b/core/src/hash/mod.rs index a10c85640bbb6..c3f3cd7294254 100644 --- a/core/src/hash/mod.rs +++ b/core/src/hash/mod.rs @@ -633,6 +633,7 @@ impl Hasher for &mut H { /// /// [`build_hasher`]: BuildHasher::build_hasher /// [`HashMap`]: ../../std/collections/struct.HashMap.html +#[cfg_attr(not(test), rustc_diagnostic_item = "BuildHasher")] #[stable(since = "1.7.0", feature = "build_hasher")] pub trait BuildHasher { /// Type of the hasher that will be created. diff --git a/core/src/hint.rs b/core/src/hint.rs index 23cfdf5bfde2b..71acede7e3eb3 100644 --- a/core/src/hint.rs +++ b/core/src/hint.rs @@ -4,6 +4,7 @@ //! //! Hints may be compile time or runtime. +use crate::marker::Destruct; use crate::mem::MaybeUninit; use crate::{intrinsics, ub_checks}; @@ -270,11 +271,11 @@ pub fn spin_loop() { crate::cfg_select! { target_arch = "x86" => { // SAFETY: the `cfg` attr ensures that we only execute this on x86 targets. - unsafe { crate::arch::x86::_mm_pause() } + crate::arch::x86::_mm_pause() } target_arch = "x86_64" => { // SAFETY: the `cfg` attr ensures that we only execute this on x86_64 targets. - unsafe { crate::arch::x86_64::_mm_pause() } + crate::arch::x86_64::_mm_pause() } target_arch = "riscv32" => crate::arch::riscv32::pause(), target_arch = "riscv64" => crate::arch::riscv64::pause(), @@ -771,7 +772,11 @@ pub const fn cold_path() { /// ``` #[inline(always)] #[stable(feature = "select_unpredictable", since = "1.88.0")] -pub fn select_unpredictable(condition: bool, true_val: T, false_val: T) -> T { +#[rustc_const_unstable(feature = "const_select_unpredictable", issue = "145938")] +pub const fn select_unpredictable(condition: bool, true_val: T, false_val: T) -> T +where + T: [const] Destruct, +{ // FIXME(https://github.com/rust-lang/unsafe-code-guidelines/issues/245): // Change this to use ManuallyDrop instead. let mut true_val = MaybeUninit::new(true_val); diff --git a/core/src/index.rs b/core/src/index.rs new file mode 100644 index 0000000000000..3baefdf10cecb --- /dev/null +++ b/core/src/index.rs @@ -0,0 +1,472 @@ +#![unstable(feature = "sliceindex_wrappers", issue = "146179")] + +//! Helper types for indexing slices. + +use crate::intrinsics::slice_get_unchecked; +use crate::slice::SliceIndex; +use crate::{cmp, ops, range}; + +/// Clamps an index, guaranteeing that it will only access valid elements of the slice. +/// +/// # Examples +/// +/// ``` +/// #![feature(sliceindex_wrappers)] +/// +/// use core::index::Clamp; +/// +/// let s: &[usize] = &[0, 1, 2, 3]; +/// +/// assert_eq!(&3, &s[Clamp(6)]); +/// assert_eq!(&[1, 2, 3], &s[Clamp(1..6)]); +/// assert_eq!(&[] as &[usize], &s[Clamp(5..6)]); +/// assert_eq!(&[0, 1, 2, 3], &s[Clamp(..6)]); +/// assert_eq!(&[0, 1, 2, 3], &s[Clamp(..=6)]); +/// assert_eq!(&[] as &[usize], &s[Clamp(6..)]); +/// ``` +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +#[derive(Debug)] +pub struct Clamp(pub Idx); + +/// Always accesses the last element of the slice. +/// +/// # Examples +/// +/// ``` +/// #![feature(sliceindex_wrappers)] +/// #![feature(slice_index_methods)] +/// +/// use core::index::Last; +/// use core::slice::SliceIndex; +/// +/// let s = &[0, 1, 2, 3]; +/// +/// assert_eq!(&3, &s[Last]); +/// assert_eq!(None, Last.get(&[] as &[usize])); +/// +/// ``` +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +#[derive(Debug)] +pub struct Last; + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp { + type Output = T; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + slice.get(cmp::min(self.0, slice.len() - 1)) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + slice.get_mut(cmp::min(self.0, slice.len() - 1)) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { slice_get_unchecked(slice, cmp::min(self.0, slice.len() - 1)) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { slice_get_unchecked(slice, cmp::min(self.0, slice.len() - 1)) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + &(*slice)[cmp::min(self.0, slice.len() - 1)] + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + &mut (*slice)[cmp::min(self.0, slice.len() - 1)] + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + // SAFETY: a range ending before len is always valid + unsafe { (start..end).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + // SAFETY: a range ending before len is always valid + unsafe { (start..end).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + // SAFETY: a range ending before len is always valid + unsafe { (start..end).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + // SAFETY: a range ending before len is always valid + unsafe { (start..end).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + let start = cmp::min(self.0.start, slice.len()); + let end = cmp::min(self.0.end, slice.len()); + (start..end).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.last, slice.len() - 1); + (start..=end).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.last, slice.len() - 1); + (start..=end).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.last, slice.len() - 1); + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (start..=end).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.last, slice.len() - 1); + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (start..=end).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.last, slice.len() - 1); + (start..=end).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.last, slice.len() - 1); + (start..=end).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.end, slice.len() - 1); + (start..=end).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.end, slice.len() - 1); + (start..=end).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.end, slice.len() - 1); + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (start..=end).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.end, slice.len() - 1); + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (start..=end).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.end, slice.len() - 1); + (start..=end).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + let start = cmp::min(self.0.start, slice.len() - 1); + let end = cmp::min(self.0.end, slice.len() - 1); + (start..=end).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + (cmp::min(self.0.start, slice.len())..).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + (cmp::min(self.0.start, slice.len())..).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: a range starting at len is valid + unsafe { (cmp::min(self.0.start, slice.len())..).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: a range starting at len is valid + unsafe { (cmp::min(self.0.start, slice.len())..).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + (cmp::min(self.0.start, slice.len())..).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + (cmp::min(self.0.start, slice.len())..).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + (cmp::min(self.0.start, slice.len())..).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + (cmp::min(self.0.start, slice.len())..).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: a range starting at len is valid + unsafe { (cmp::min(self.0.start, slice.len())..).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: a range starting at len is valid + unsafe { (cmp::min(self.0.start, slice.len())..).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + (cmp::min(self.0.start, slice.len())..).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + (cmp::min(self.0.start, slice.len())..).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + (..cmp::min(self.0.end, slice.len())).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + (..cmp::min(self.0.end, slice.len())).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: a range ending before len is always valid + unsafe { (..cmp::min(self.0.end, slice.len())).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: a range ending before len is always valid + unsafe { (..cmp::min(self.0.end, slice.len())).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + (..cmp::min(self.0.end, slice.len())).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + (..cmp::min(self.0.end, slice.len())).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + (..=cmp::min(self.0.last, slice.len() - 1)).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + (..=cmp::min(self.0.last, slice.len() - 1)).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (..=cmp::min(self.0.last, slice.len() - 1)).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (..=cmp::min(self.0.last, slice.len() - 1)).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + (..=cmp::min(self.0.last, slice.len() - 1)).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + (..=cmp::min(self.0.last, slice.len() - 1)).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp> { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + (..=cmp::min(self.0.end, slice.len() - 1)).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + (..=cmp::min(self.0.end, slice.len() - 1)).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (..=cmp::min(self.0.end, slice.len() - 1)).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { (..=cmp::min(self.0.end, slice.len() - 1)).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + (..=cmp::min(self.0.end, slice.len() - 1)).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + (..=cmp::min(self.0.end, slice.len() - 1)).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Clamp { + type Output = [T]; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + (..).get(slice) + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + (..).get_mut(slice) + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: RangeFull just returns `slice` here + unsafe { (..).get_unchecked(slice) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: RangeFull just returns `slice` here + unsafe { (..).get_unchecked_mut(slice) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + (..).index(slice) + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + (..).index_mut(slice) + } +} + +#[unstable(feature = "sliceindex_wrappers", issue = "146179")] +unsafe impl SliceIndex<[T]> for Last { + type Output = T; + + fn get(self, slice: &[T]) -> Option<&Self::Output> { + slice.last() + } + + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output> { + slice.last_mut() + } + + unsafe fn get_unchecked(self, slice: *const [T]) -> *const Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { slice_get_unchecked(slice, slice.len() - 1) } + } + + unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut Self::Output { + // SAFETY: the caller ensures that the slice isn't empty + unsafe { slice_get_unchecked(slice, slice.len() - 1) } + } + + fn index(self, slice: &[T]) -> &Self::Output { + // N.B., use intrinsic indexing + &(*slice)[slice.len() - 1] + } + + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output { + // N.B., use intrinsic indexing + &mut (*slice)[slice.len() - 1] + } +} diff --git a/core/src/intrinsics/mir.rs b/core/src/intrinsics/mir.rs index a800ef1cb9375..5b3c924152552 100644 --- a/core/src/intrinsics/mir.rs +++ b/core/src/intrinsics/mir.rs @@ -227,13 +227,13 @@ //! //! #### Statements //! - Assign statements work via normal Rust assignment. -//! - [`Retag`], [`StorageLive`], [`StorageDead`], [`Deinit`] statements have an associated function. +//! - [`Retag`], [`StorageLive`], [`StorageDead`] statements have an associated function. //! //! #### Rvalues //! //! - Operands implicitly convert to `Use` rvalues. //! - `&`, `&mut`, `addr_of!`, and `addr_of_mut!` all work to create their associated rvalue. -//! - [`CopyForDeref`], [`CastTransmute`], [`CastPtrToPtr`], [`CastUnsize`], and [`Discriminant`] +//! - [`CastTransmute`], [`CastPtrToPtr`], [`CastUnsize`], and [`Discriminant`] //! have associated functions. //! - Unary and binary operations use their normal Rust syntax - `a * b`, `!c`, etc. //! - The binary operation `Offset` can be created via [`Offset`]. @@ -400,13 +400,11 @@ define!("mir_unwind_resume", define!("mir_storage_live", fn StorageLive(local: T)); define!("mir_storage_dead", fn StorageDead(local: T)); define!("mir_assume", fn Assume(operand: bool)); -define!("mir_deinit", fn Deinit(place: T)); define!("mir_checked", fn Checked(binop: T) -> (T, bool)); define!( "mir_ptr_metadata", fn PtrMetadata(place: *const P) ->

(&mut self, mut predicate: P) -> Option where Self: Sized, P: FnMut(Self::Item) -> bool, diff --git a/core/src/slice/mod.rs b/core/src/slice/mod.rs index f7f5ee819b2e4..f03f2045444df 100644 --- a/core/src/slice/mod.rs +++ b/core/src/slice/mod.rs @@ -6,6 +6,7 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::clone::TrivialClone; use crate::cmp::Ordering::{self, Equal, Greater, Less}; use crate::intrinsics::{exact_div, unchecked_sub}; use crate::mem::{self, MaybeUninit, SizedTypeProperties}; @@ -841,7 +842,8 @@ impl [T] { /// Gets a reference to the underlying array. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] #[inline] #[must_use] pub const fn as_array(&self) -> Option<&[T; N]> { @@ -859,7 +861,8 @@ impl [T] { /// Gets a mutable reference to the slice's underlying array. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] #[inline] #[must_use] pub const fn as_mut_array(&mut self) -> Option<&mut [T; N]> { @@ -2725,6 +2728,38 @@ impl [T] { None } + /// Returns a subslice with the prefix and suffix removed. + /// + /// If the slice starts with `prefix` and ends with `suffix`, returns the subslice after the + /// prefix and before the suffix, wrapped in `Some`. + /// + /// If the slice does not start with `prefix` or does not end with `suffix`, returns `None`. + /// + /// # Examples + /// + /// ``` + /// #![feature(strip_circumfix)] + /// + /// let v = &[10, 50, 40, 30]; + /// assert_eq!(v.strip_circumfix(&[10], &[30]), Some(&[50, 40][..])); + /// assert_eq!(v.strip_circumfix(&[10], &[40, 30]), Some(&[50][..])); + /// assert_eq!(v.strip_circumfix(&[10, 50], &[40, 30]), Some(&[][..])); + /// assert_eq!(v.strip_circumfix(&[50], &[30]), None); + /// assert_eq!(v.strip_circumfix(&[10], &[40]), None); + /// assert_eq!(v.strip_circumfix(&[], &[40, 30]), Some(&[10, 50][..])); + /// assert_eq!(v.strip_circumfix(&[10, 50], &[]), Some(&[40, 30][..])); + /// ``` + #[must_use = "returns the subslice without modifying the original"] + #[unstable(feature = "strip_circumfix", issue = "147946")] + pub fn strip_circumfix(&self, prefix: &P, suffix: &S) -> Option<&[T]> + where + T: PartialEq, + S: SlicePattern + ?Sized, + P: SlicePattern + ?Sized, + { + self.strip_prefix(prefix)?.strip_suffix(suffix) + } + /// Returns a subslice with the optional prefix removed. /// /// If the slice starts with `prefix`, returns the subslice after the prefix. If `prefix` @@ -3629,7 +3664,7 @@ impl [T] { /// assert_eq!(a, ['a', 'c', 'd', 'e', 'b', 'f']); /// ``` #[stable(feature = "slice_rotate", since = "1.26.0")] - #[rustc_const_unstable(feature = "const_slice_rotate", issue = "143812")] + #[rustc_const_stable(feature = "const_slice_rotate", since = "1.92.0")] pub const fn rotate_left(&mut self, mid: usize) { assert!(mid <= self.len()); let k = self.len() - mid; @@ -3675,7 +3710,7 @@ impl [T] { /// assert_eq!(a, ['a', 'e', 'b', 'c', 'd', 'f']); /// ``` #[stable(feature = "slice_rotate", since = "1.26.0")] - #[rustc_const_unstable(feature = "const_slice_rotate", issue = "143812")] + #[rustc_const_stable(feature = "const_slice_rotate", since = "1.92.0")] pub const fn rotate_right(&mut self, k: usize) { assert!(k <= self.len()); let mid = self.len() - k; @@ -3856,30 +3891,8 @@ impl [T] { where T: Copy, { - // The panic code path was put into a cold function to not bloat the - // call site. - #[cfg_attr(not(panic = "immediate-abort"), inline(never), cold)] - #[cfg_attr(panic = "immediate-abort", inline)] - #[track_caller] - const fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { - const_panic!( - "copy_from_slice: source slice length does not match destination slice length", - "copy_from_slice: source slice length ({src_len}) does not match destination slice length ({dst_len})", - src_len: usize, - dst_len: usize, - ) - } - - if self.len() != src.len() { - len_mismatch_fail(self.len(), src.len()); - } - - // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was - // checked to have the same length. The slices cannot overlap because - // mutable references are exclusive. - unsafe { - ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); - } + // SAFETY: `T` implements `Copy`. + unsafe { copy_from_slice_impl(self, src) } } /// Copies elements from one part of the slice to another part of itself, @@ -5089,6 +5102,38 @@ impl [f64] { } } +/// Copies `src` to `dest`. +/// +/// # Safety +/// `T` must implement one of `Copy` or `TrivialClone`. +#[track_caller] +const unsafe fn copy_from_slice_impl(dest: &mut [T], src: &[T]) { + // The panic code path was put into a cold function to not bloat the + // call site. + #[cfg_attr(not(panic = "immediate-abort"), inline(never), cold)] + #[cfg_attr(panic = "immediate-abort", inline)] + #[track_caller] + const fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { + const_panic!( + "copy_from_slice: source slice length does not match destination slice length", + "copy_from_slice: source slice length ({src_len}) does not match destination slice length ({dst_len})", + src_len: usize, + dst_len: usize, + ) + } + + if dest.len() != src.len() { + len_mismatch_fail(dest.len(), src.len()); + } + + // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was + // checked to have the same length. The slices cannot overlap because + // mutable references are exclusive. + unsafe { + ptr::copy_nonoverlapping(src.as_ptr(), dest.as_mut_ptr(), dest.len()); + } +} + trait CloneFromSpec { fn spec_clone_from(&mut self, src: &[T]); } @@ -5113,11 +5158,14 @@ where impl CloneFromSpec for [T] where - T: Copy, + T: TrivialClone, { #[track_caller] fn spec_clone_from(&mut self, src: &[T]) { - self.copy_from_slice(src); + // SAFETY: `T` implements `TrivialClone`. + unsafe { + copy_from_slice_impl(self, src); + } } } diff --git a/core/src/slice/specialize.rs b/core/src/slice/specialize.rs index 80eb590587f99..c44225b753642 100644 --- a/core/src/slice/specialize.rs +++ b/core/src/slice/specialize.rs @@ -1,3 +1,6 @@ +use crate::clone::TrivialClone; +use crate::ptr; + pub(super) trait SpecFill { fn spec_fill(&mut self, value: T); } @@ -14,10 +17,57 @@ impl SpecFill for [T] { } } -impl SpecFill for [T] { - fn spec_fill(&mut self, value: T) { +impl SpecFill for [T] { + default fn spec_fill(&mut self, value: T) { for item in self.iter_mut() { - *item = value; + // SAFETY: `TrivialClone` indicates that this is equivalent to + // calling `Clone::clone` + *item = unsafe { ptr::read(&value) }; + } + } +} + +impl SpecFill for [u8] { + fn spec_fill(&mut self, value: u8) { + // SAFETY: The pointer is derived from a reference, so it's writable. + unsafe { + crate::intrinsics::write_bytes(self.as_mut_ptr(), value, self.len()); } } } + +impl SpecFill for [i8] { + fn spec_fill(&mut self, value: i8) { + // SAFETY: The pointer is derived from a reference, so it's writable. + unsafe { + crate::intrinsics::write_bytes(self.as_mut_ptr(), value.cast_unsigned(), self.len()); + } + } +} + +macro spec_fill_int { + ($($type:ty)*) => {$( + impl SpecFill<$type> for [$type] { + #[inline] + fn spec_fill(&mut self, value: $type) { + // We always take this fastpath in Miri for long slices as the manual `for` + // loop can be prohibitively slow. + if (cfg!(miri) && self.len() > 32) || crate::intrinsics::is_val_statically_known(value) { + let bytes = value.to_ne_bytes(); + if value == <$type>::from_ne_bytes([bytes[0]; size_of::<$type>()]) { + // SAFETY: The pointer is derived from a reference, so it's writable. + unsafe { + crate::intrinsics::write_bytes(self.as_mut_ptr(), bytes[0], self.len()); + } + return; + } + } + for item in self.iter_mut() { + *item = value; + } + } + } + )*} +} + +spec_fill_int! { u16 i16 u32 i32 u64 i64 u128 i128 usize isize } diff --git a/core/src/str/lossy.rs b/core/src/str/lossy.rs index 8d4210c80827d..d2dc650910f63 100644 --- a/core/src/str/lossy.rs +++ b/core/src/str/lossy.rs @@ -1,3 +1,4 @@ +use super::char::EscapeDebugExtArgs; use super::from_utf8_unchecked; use super::validations::utf8_char_width; use crate::fmt; @@ -121,7 +122,11 @@ impl fmt::Debug for Debug<'_> { let valid = chunk.valid(); let mut from = 0; for (i, c) in valid.char_indices() { - let esc = c.escape_debug(); + let esc = c.escape_debug_ext(EscapeDebugExtArgs { + escape_grapheme_extended: true, + escape_single_quote: false, + escape_double_quote: true, + }); // If char needs escaping, flush backlog so far and write, else skip if esc.len() != 1 { f.write_str(&valid[from..i])?; diff --git a/core/src/str/mod.rs b/core/src/str/mod.rs index 3a5efa7d83511..ab7389a1300c5 100644 --- a/core/src/str/mod.rs +++ b/core/src/str/mod.rs @@ -1251,6 +1251,8 @@ impl str { /// ending will return the same lines as an otherwise identical string /// without a final line ending. /// + /// An empty string returns an empty iterator. + /// /// # Examples /// /// Basic usage: @@ -1281,6 +1283,15 @@ impl str { /// /// assert_eq!(None, lines.next()); /// ``` + /// + /// An empty string returns an empty iterator: + /// + /// ``` + /// let text = ""; + /// let mut lines = text.lines(); + /// + /// assert_eq!(lines.next(), None); + /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn lines(&self) -> Lines<'_> { @@ -1953,6 +1964,7 @@ impl str { /// /// ``` /// assert_eq!("cfg".rsplit_once('='), None); + /// assert_eq!("cfg=".rsplit_once('='), Some(("cfg", ""))); /// assert_eq!("cfg=foo".rsplit_once('='), Some(("cfg", "foo"))); /// assert_eq!("cfg=foo=bar".rsplit_once('='), Some(("cfg=foo", "bar"))); /// ``` @@ -2447,6 +2459,42 @@ impl str { suffix.strip_suffix_of(self) } + /// Returns a string slice with the prefix and suffix removed. + /// + /// If the string starts with the pattern `prefix` and ends with the pattern `suffix`, returns + /// the substring after the prefix and before the suffix, wrapped in `Some`. + /// Unlike [`trim_start_matches`] and [`trim_end_matches`], this method removes both the prefix + /// and suffix exactly once. + /// + /// If the string does not start with `prefix` or does not end with `suffix`, returns `None`. + /// + /// Each [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a + /// function or closure that determines if a character matches. + /// + /// [`char`]: prim@char + /// [pattern]: self::pattern + /// [`trim_start_matches`]: Self::trim_start_matches + /// [`trim_end_matches`]: Self::trim_end_matches + /// + /// # Examples + /// + /// ``` + /// #![feature(strip_circumfix)] + /// + /// assert_eq!("bar:hello:foo".strip_circumfix("bar:", ":foo"), Some("hello")); + /// assert_eq!("bar:foo".strip_circumfix("foo", "foo"), None); + /// assert_eq!("foo:bar;".strip_circumfix("foo:", ';'), Some("bar")); + /// ``` + #[must_use = "this returns the remaining substring as a new slice, \ + without modifying the original"] + #[unstable(feature = "strip_circumfix", issue = "147946")] + pub fn strip_circumfix(&self, prefix: P, suffix: S) -> Option<&str> + where + for<'a> S::Searcher<'a>: ReverseSearcher<'a>, + { + self.strip_prefix(prefix)?.strip_suffix(suffix) + } + /// Returns a string slice with the optional prefix removed. /// /// If the string starts with the pattern `prefix`, returns the substring after the prefix. @@ -2704,6 +2752,8 @@ impl str { /// Checks if all characters in this string are within the ASCII range. /// + /// An empty string returns `true`. + /// /// # Examples /// /// ``` diff --git a/core/src/str/pattern.rs b/core/src/str/pattern.rs index e116b13838323..b54522fcc886f 100644 --- a/core/src/str/pattern.rs +++ b/core/src/str/pattern.rs @@ -38,7 +38,6 @@ issue = "27721" )] -use crate::char::MAX_LEN_UTF8; use crate::cmp::Ordering; use crate::convert::TryInto as _; use crate::slice::memchr; @@ -563,7 +562,7 @@ impl Pattern for char { #[inline] fn into_searcher<'a>(self, haystack: &'a str) -> Self::Searcher<'a> { - let mut utf8_encoded = [0; MAX_LEN_UTF8]; + let mut utf8_encoded = [0; char::MAX_LEN_UTF8]; let utf8_size = self .encode_utf8(&mut utf8_encoded) .len() diff --git a/core/src/sync/atomic.rs b/core/src/sync/atomic.rs index 30a42d4eb5e64..d7eaaf35b53a3 100644 --- a/core/src/sync/atomic.rs +++ b/core/src/sync/atomic.rs @@ -1558,6 +1558,8 @@ impl AtomicPtr { /// Gets atomic access to a pointer. /// + /// **Note:** This function is only available on targets where `AtomicPtr` has the same alignment as `*const T` + /// /// # Examples /// /// ``` @@ -1625,6 +1627,8 @@ impl AtomicPtr { /// Gets atomic access to a slice of pointers. /// + /// **Note:** This function is only available on targets where `AtomicPtr` has the same alignment as `*const T` + /// /// # Examples /// /// ```ignore-wasm @@ -2804,6 +2808,14 @@ macro_rules! atomic_int { #[doc = concat!("Get atomic access to a `&mut [", stringify!($int_type), "]` slice.")] /// + #[doc = if_8_bit! { + $int_type, + no = [ + "**Note:** This function is only available on targets where `", + stringify!($atomic_type), "` has the same alignment as `", stringify!($int_type), "`." + ], + }] + /// /// # Examples /// /// ```ignore-wasm @@ -4277,11 +4289,81 @@ unsafe fn atomic_umin(dst: *mut T, val: T, order: Ordering) -> T { /// threads. To achieve this, a fence prevents the compiler and CPU from reordering certain types of /// memory operations around it. /// -/// A fence 'A' which has (at least) [`Release`] ordering semantics, synchronizes -/// with a fence 'B' with (at least) [`Acquire`] semantics, if and only if there -/// exist operations X and Y, both operating on some atomic object 'm' such -/// that A is sequenced before X, Y is sequenced before B and Y observes -/// the change to m. This provides a happens-before dependence between A and B. +/// There are 3 different ways to use an atomic fence: +/// +/// - atomic - fence synchronization: an atomic operation with (at least) [`Release`] ordering +/// semantics synchronizes with a fence with (at least) [`Acquire`] ordering semantics. +/// - fence - atomic synchronization: a fence with (at least) [`Release`] ordering semantics +/// synchronizes with an atomic operation with (at least) [`Acquire`] ordering semantics. +/// - fence - fence synchronization: a fence with (at least) [`Release`] ordering semantics +/// synchronizes with a fence with (at least) [`Acquire`] ordering semantics. +/// +/// These 3 ways complement the regular, fence-less, atomic - atomic synchronization. +/// +/// ## Atomic - Fence +/// +/// An atomic operation on one thread will synchronize with a fence on another thread when: +/// +/// - on thread 1: +/// - an atomic operation 'X' with (at least) [`Release`] ordering semantics on some atomic +/// object 'm', +/// +/// - is paired on thread 2 with: +/// - an atomic read 'Y' with any order on 'm', +/// - followed by a fence 'B' with (at least) [`Acquire`] ordering semantics. +/// +/// This provides a happens-before dependence between X and B. +/// +/// ```text +/// Thread 1 Thread 2 +/// +/// m.store(3, Release); X --------- +/// | +/// | +/// -------------> Y if m.load(Relaxed) == 3 { +/// B fence(Acquire); +/// ... +/// } +/// ``` +/// +/// ## Fence - Atomic +/// +/// A fence on one thread will synchronize with an atomic operation on another thread when: +/// +/// - on thread: +/// - a fence 'A' with (at least) [`Release`] ordering semantics, +/// - followed by an atomic write 'X' with any ordering on some atomic object 'm', +/// +/// - is paired on thread 2 with: +/// - an atomic operation 'Y' with (at least) [`Acquire`] ordering semantics. +/// +/// This provides a happens-before dependence between A and Y. +/// +/// ```text +/// Thread 1 Thread 2 +/// +/// fence(Release); A +/// m.store(3, Relaxed); X --------- +/// | +/// | +/// -------------> Y if m.load(Acquire) == 3 { +/// ... +/// } +/// ``` +/// +/// ## Fence - Fence +/// +/// A fence on one thread will synchronize with a fence on another thread when: +/// +/// - on thread 1: +/// - a fence 'A' which has (at least) [`Release`] ordering semantics, +/// - followed by an atomic write 'X' with any ordering on some atomic object 'm', +/// +/// - is paired on thread 2 with: +/// - an atomic read 'Y' with any ordering on 'm', +/// - followed by a fence 'B' with (at least) [`Acquire`] ordering semantics. +/// +/// This provides a happens-before dependence between A and B. /// /// ```text /// Thread 1 Thread 2 @@ -4296,18 +4378,20 @@ unsafe fn atomic_umin(dst: *mut T, val: T, order: Ordering) -> T { /// } /// ``` /// -/// Note that in the example above, it is crucial that the accesses to `m` are atomic. Fences cannot -/// be used to establish synchronization among non-atomic accesses in different threads. However, -/// thanks to the happens-before relationship between A and B, any non-atomic accesses that -/// happen-before A are now also properly synchronized with any non-atomic accesses that -/// happen-after B. +/// ## Mandatory Atomic +/// +/// Note that in the examples above, it is crucial that the access to `m` are atomic. Fences cannot +/// be used to establish synchronization between non-atomic accesses in different threads. However, +/// thanks to the happens-before relationship, any non-atomic access that happen-before the atomic +/// operation or fence with (at least) [`Release`] ordering semantics are now also properly +/// synchronized with any non-atomic accesses that happen-after the atomic operation or fence with +/// (at least) [`Acquire`] ordering semantics. /// -/// Atomic operations with [`Release`] or [`Acquire`] semantics can also synchronize -/// with a fence. +/// ## Memory Ordering /// -/// A fence which has [`SeqCst`] ordering, in addition to having both [`Acquire`] -/// and [`Release`] semantics, participates in the global program order of the -/// other [`SeqCst`] operations and/or fences. +/// A fence which has [`SeqCst`] ordering, in addition to having both [`Acquire`] and [`Release`] +/// semantics, participates in the global program order of the other [`SeqCst`] operations and/or +/// fences. /// /// Accepts [`Acquire`], [`Release`], [`AcqRel`] and [`SeqCst`] orderings. /// diff --git a/core/src/sync/exclusive.rs b/core/src/sync/exclusive.rs index f181c5514f256..35b8120995187 100644 --- a/core/src/sync/exclusive.rs +++ b/core/src/sync/exclusive.rs @@ -1,5 +1,6 @@ //! Defines [`Exclusive`]. +use core::clone::TrivialClone; use core::cmp::Ordering; use core::fmt; use core::future::Future; @@ -261,6 +262,10 @@ where } } +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for Exclusive where T: Sync + TrivialClone {} + #[unstable(feature = "exclusive_wrapper", issue = "98407")] impl Copy for Exclusive where T: Sync + Copy {} diff --git a/core/src/task/wake.rs b/core/src/task/wake.rs index 97eb9ec7dc5b0..c22a9da0385b5 100644 --- a/core/src/task/wake.rs +++ b/core/src/task/wake.rs @@ -584,6 +584,28 @@ impl Waker { pub fn vtable(&self) -> &'static RawWakerVTable { self.waker.vtable } + + /// Constructs a `Waker` from a function pointer. + #[inline] + #[must_use] + #[unstable(feature = "waker_from_fn_ptr", issue = "148457")] + pub const fn from_fn_ptr(f: fn()) -> Self { + // SAFETY: Unsafe is used for transmutes, pointer came from `fn()` so it + // is sound to transmute it back to `fn()`. + static VTABLE: RawWakerVTable = unsafe { + RawWakerVTable::new( + |this| RawWaker::new(this, &VTABLE), + |this| transmute::<*const (), fn()>(this)(), + |this| transmute::<*const (), fn()>(this)(), + |_| {}, + ) + }; + let raw = RawWaker::new(f as *const (), &VTABLE); + + // SAFETY: `clone` is just a copy, `drop` is a no-op while `wake` and + // `wake_by_ref` just call the function pointer. + unsafe { Self::from_raw(raw) } + } } #[stable(feature = "futures_api", since = "1.36.0")] @@ -879,6 +901,28 @@ impl LocalWaker { pub fn vtable(&self) -> &'static RawWakerVTable { self.waker.vtable } + + /// Constructs a `LocalWaker` from a function pointer. + #[inline] + #[must_use] + #[unstable(feature = "waker_from_fn_ptr", issue = "148457")] + pub const fn from_fn_ptr(f: fn()) -> Self { + // SAFETY: Unsafe is used for transmutes, pointer came from `fn()` so it + // is sound to transmute it back to `fn()`. + static VTABLE: RawWakerVTable = unsafe { + RawWakerVTable::new( + |this| RawWaker::new(this, &VTABLE), + |this| transmute::<*const (), fn()>(this)(), + |this| transmute::<*const (), fn()>(this)(), + |_| {}, + ) + }; + let raw = RawWaker::new(f as *const (), &VTABLE); + + // SAFETY: `clone` is just a copy, `drop` is a no-op while `wake` and + // `wake_by_ref` just call the function pointer. + unsafe { Self::from_raw(raw) } + } } #[unstable(feature = "local_waker", issue = "118959")] impl Clone for LocalWaker { diff --git a/core/src/time.rs b/core/src/time.rs index f721fcd6156cf..51a01545f5cf5 100644 --- a/core/src/time.rs +++ b/core/src/time.rs @@ -317,7 +317,6 @@ impl Duration { /// # Examples /// /// ``` - /// #![feature(duration_from_nanos_u128)] /// use std::time::Duration; /// /// let nanos = 10_u128.pow(24) + 321; @@ -326,12 +325,12 @@ impl Duration { /// assert_eq!(10_u64.pow(15), duration.as_secs()); /// assert_eq!(321, duration.subsec_nanos()); /// ``` - #[unstable(feature = "duration_from_nanos_u128", issue = "139201")] - // This is necessary because of const `try_from`, but can be removed if a trait-free impl is used instead - #[rustc_const_unstable(feature = "duration_from_nanos_u128", issue = "139201")] + #[stable(feature = "duration_from_nanos_u128", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "duration_from_nanos_u128", since = "CURRENT_RUSTC_VERSION")] #[must_use] #[inline] #[track_caller] + #[rustc_allow_const_fn_unstable(const_trait_impl, const_convert)] // for `u64::try_from` pub const fn from_nanos_u128(nanos: u128) -> Duration { const NANOS_PER_SEC: u128 = self::NANOS_PER_SEC as u128; let Ok(secs) = u64::try_from(nanos / NANOS_PER_SEC) else { diff --git a/core/src/ub_checks.rs b/core/src/ub_checks.rs index 514ff93c9820e..50e02320748b7 100644 --- a/core/src/ub_checks.rs +++ b/core/src/ub_checks.rs @@ -70,7 +70,7 @@ macro_rules! assert_unsafe_precondition { let msg = concat!("unsafe precondition(s) violated: ", $message, "\n\nThis indicates a bug in the program. \ This Undefined Behavior check is optional, and cannot be relied on for safety."); - ::core::panicking::panic_nounwind_fmt(::core::fmt::Arguments::new_const(&[msg]), false); + ::core::panicking::panic_nounwind_fmt(::core::fmt::Arguments::from_str(msg), false); } } diff --git a/core/src/wtf8.rs b/core/src/wtf8.rs index de0dfa560a3f3..7214918db6c39 100644 --- a/core/src/wtf8.rs +++ b/core/src/wtf8.rs @@ -19,7 +19,7 @@ // implementations, so, we'll have to add more doc(hidden)s anyway #![doc(hidden)] -use crate::char::{MAX_LEN_UTF16, encode_utf16_raw}; +use crate::char::{EscapeDebugExtArgs, encode_utf16_raw}; use crate::clone::CloneToUninit; use crate::fmt::{self, Write}; use crate::hash::{Hash, Hasher}; @@ -144,14 +144,20 @@ impl AsRef<[u8]> for Wtf8 { impl fmt::Debug for Wtf8 { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { fn write_str_escaped(f: &mut fmt::Formatter<'_>, s: &str) -> fmt::Result { - use crate::fmt::Write; - for c in s.chars().flat_map(|c| c.escape_debug()) { + use crate::fmt::Write as _; + for c in s.chars().flat_map(|c| { + c.escape_debug_ext(EscapeDebugExtArgs { + escape_grapheme_extended: true, + escape_single_quote: false, + escape_double_quote: true, + }) + }) { f.write_char(c)? } Ok(()) } - formatter.write_str("\"")?; + formatter.write_char('"')?; let mut pos = 0; while let Some((surrogate_pos, surrogate)) = self.next_surrogate(pos) { // SAFETY: next_surrogate provides an index for a range of valid UTF-8 bytes. @@ -164,7 +170,7 @@ impl fmt::Debug for Wtf8 { // SAFETY: after next_surrogate returns None, the remainder is valid UTF-8. write_str_escaped(formatter, unsafe { str::from_utf8_unchecked(&self.bytes[pos..]) })?; - formatter.write_str("\"") + formatter.write_char('"') } } @@ -541,7 +547,7 @@ impl Iterator for EncodeWide<'_> { return Some(tmp); } - let mut buf = [0; MAX_LEN_UTF16]; + let mut buf = [0; char::MAX_LEN_UTF16]; self.code_points.next().map(|code_point| { let n = encode_utf16_raw(code_point.to_u32(), &mut buf).len(); if n == 2 { @@ -562,15 +568,36 @@ impl Iterator for EncodeWide<'_> { } } +#[stable(feature = "encode_wide_fused_iterator", since = "1.62.0")] +impl FusedIterator for EncodeWide<'_> {} + +#[stable(feature = "encode_wide_debug", since = "1.92.0")] impl fmt::Debug for EncodeWide<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("EncodeWide").finish_non_exhaustive() + struct CodeUnit(u16); + impl fmt::Debug for CodeUnit { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // This output attempts to balance readability with precision. + // Render characters which take only one WTF-16 code unit using + // `char` syntax and everything else as code units with hex + // integer syntax (including paired and unpaired surrogate + // halves). Since Rust has no `char`-like type for WTF-16, this + // isn't perfect, so if this output isn't suitable, it is open + // to being changed (see #140153). + match char::from_u32(self.0 as u32) { + Some(c) => write!(f, "{c:?}"), + None => write!(f, "0x{:04X}", self.0), + } + } + } + + write!(f, "EncodeWide(")?; + f.debug_list().entries(self.clone().map(CodeUnit)).finish()?; + write!(f, ")")?; + Ok(()) } } -#[stable(feature = "encode_wide_fused_iterator", since = "1.62.0")] -impl FusedIterator for EncodeWide<'_> {} - impl Hash for CodePoint { #[inline] fn hash(&self, state: &mut H) { diff --git a/coretests/tests/char.rs b/coretests/tests/char.rs index 6f94065b2d927..f0f6a24429284 100644 --- a/coretests/tests/char.rs +++ b/coretests/tests/char.rs @@ -1,4 +1,3 @@ -use std::char::MAX_LEN_UTF8; use std::str::FromStr; use std::{char, str}; @@ -259,7 +258,7 @@ fn test_escape_unicode() { #[test] fn test_encode_utf8() { fn check(input: char, expect: &[u8]) { - let mut buf = [0; MAX_LEN_UTF8]; + let mut buf = [0; char::MAX_LEN_UTF8]; let ptr = buf.as_ptr(); let s = input.encode_utf8(&mut buf); assert_eq!(s.as_ptr() as usize, ptr as usize); diff --git a/coretests/tests/floats/f128.rs b/coretests/tests/floats/f128.rs index 62278bf96c3c1..8e4f0c9899e1c 100644 --- a/coretests/tests/floats/f128.rs +++ b/coretests/tests/floats/f128.rs @@ -1,8 +1,6 @@ // FIXME(f16_f128): only tested on platforms that have symbols and aren't buggy #![cfg(target_has_reliable_f128)] -#[cfg(any(miri, target_has_reliable_f128_math))] -use super::assert_approx_eq; use super::assert_biteq; // Note these tolerances make sense around zero, but not for more extreme exponents. @@ -20,16 +18,6 @@ const TOL_PRECISE: f128 = 1e-28; // FIXME(f16_f128,miri): many of these have to be disabled since miri does not yet support // the intrinsics. -#[test] -#[cfg(any(miri, target_has_reliable_f128_math))] -fn test_max_recip() { - assert_approx_eq!( - f128::MAX.recip(), - 8.40525785778023376565669454330438228902076605e-4933, - 1e-4900 - ); -} - #[test] fn test_from() { assert_biteq!(f128::from(false), 0.0); diff --git a/coretests/tests/floats/f16.rs b/coretests/tests/floats/f16.rs index 7ffafd467a519..3cff4259de54f 100644 --- a/coretests/tests/floats/f16.rs +++ b/coretests/tests/floats/f16.rs @@ -1,7 +1,7 @@ // FIXME(f16_f128): only tested on platforms that have symbols and aren't buggy #![cfg(target_has_reliable_f16)] -use super::{assert_approx_eq, assert_biteq}; +use super::assert_biteq; /// Tolerance for results on the order of 10.0e-2 #[allow(unused)] @@ -22,12 +22,6 @@ const TOL_P4: f16 = 10.0; // FIXME(f16_f128,miri): many of these have to be disabled since miri does not yet support // the intrinsics. -#[test] -#[cfg(any(miri, target_has_reliable_f16_math))] -fn test_max_recip() { - assert_approx_eq!(f16::MAX.recip(), 1.526624e-5f16, 1e-4); -} - #[test] fn test_from() { assert_biteq!(f16::from(false), 0.0); diff --git a/coretests/tests/floats/mod.rs b/coretests/tests/floats/mod.rs index 0348065d17fe3..63d5b8fb2c6e9 100644 --- a/coretests/tests/floats/mod.rs +++ b/coretests/tests/floats/mod.rs @@ -38,6 +38,8 @@ trait TestableFloat: Sized { const MUL_ADD_RESULT: Self; /// The result of (-12.3).mul_add(-4.5, -6.7) const NEG_MUL_ADD_RESULT: Self; + /// Reciprocal of the maximum val + const MAX_RECIP: Self; } impl TestableFloat for f16 { @@ -64,6 +66,7 @@ impl TestableFloat for f16 { const RAW_MINUS_14_DOT_25: Self = Self::from_bits(0xcb20); const MUL_ADD_RESULT: Self = 62.031; const NEG_MUL_ADD_RESULT: Self = 48.625; + const MAX_RECIP: Self = 1.526624e-5; } impl TestableFloat for f32 { @@ -92,6 +95,7 @@ impl TestableFloat for f32 { const RAW_MINUS_14_DOT_25: Self = Self::from_bits(0xc1640000); const MUL_ADD_RESULT: Self = 62.05; const NEG_MUL_ADD_RESULT: Self = 48.65; + const MAX_RECIP: Self = 2.938736e-39; } impl TestableFloat for f64 { @@ -116,6 +120,7 @@ impl TestableFloat for f64 { const RAW_MINUS_14_DOT_25: Self = Self::from_bits(0xc02c800000000000); const MUL_ADD_RESULT: Self = 62.050000000000004; const NEG_MUL_ADD_RESULT: Self = 48.650000000000006; + const MAX_RECIP: Self = 5.562684646268003e-309; } impl TestableFloat for f128 { @@ -140,6 +145,7 @@ impl TestableFloat for f128 { const RAW_MINUS_14_DOT_25: Self = Self::from_bits(0xc002c800000000000000000000000000); const MUL_ADD_RESULT: Self = 62.0500000000000000000000000000000037; const NEG_MUL_ADD_RESULT: Self = 48.6500000000000000000000000000000049; + const MAX_RECIP: Self = 8.40525785778023376565669454330438228902076605e-4933; } /// Determine the tolerance for values of the argument type. @@ -1425,6 +1431,7 @@ float_test! { let nan: Float = Float::NAN; let inf: Float = Float::INFINITY; let neg_inf: Float = Float::NEG_INFINITY; + let max: Float = Float::MAX; assert_biteq!((1.0 as Float).recip(), 1.0); assert_biteq!((2.0 as Float).recip(), 0.5); assert_biteq!((-0.4 as Float).recip(), -2.5); @@ -1432,6 +1439,7 @@ float_test! { assert!(nan.recip().is_nan()); assert_biteq!(inf.recip(), 0.0); assert_biteq!(neg_inf.recip(), -0.0); + assert_biteq!(max.recip(), Float::MAX_RECIP); } } diff --git a/coretests/tests/hint.rs b/coretests/tests/hint.rs index 24de27b24b802..d15730823eb5b 100644 --- a/coretests/tests/hint.rs +++ b/coretests/tests/hint.rs @@ -1,25 +1,33 @@ #[test] fn select_unpredictable_drop() { use core::cell::Cell; + struct X<'a>(&'a Cell); - impl Drop for X<'_> { + impl const Drop for X<'_> { fn drop(&mut self) { self.0.set(true); } } - let a_dropped = Cell::new(false); - let b_dropped = Cell::new(false); - let a = X(&a_dropped); - let b = X(&b_dropped); - assert!(!a_dropped.get()); - assert!(!b_dropped.get()); - let selected = core::hint::select_unpredictable(core::hint::black_box(true), a, b); - assert!(!a_dropped.get()); - assert!(b_dropped.get()); - drop(selected); - assert!(a_dropped.get()); - assert!(b_dropped.get()); + const fn do_test() { + let a_dropped = Cell::new(false); + let b_dropped = Cell::new(false); + let a = X(&a_dropped); + let b = X(&b_dropped); + assert!(!a_dropped.get()); + assert!(!b_dropped.get()); + let selected = core::hint::select_unpredictable(core::hint::black_box(true), a, b); + assert!(!a_dropped.get()); + assert!(b_dropped.get()); + drop(selected); + assert!(a_dropped.get()); + assert!(b_dropped.get()); + } + + do_test(); + const { + do_test(); + } } #[test] diff --git a/coretests/tests/index.rs b/coretests/tests/index.rs new file mode 100644 index 0000000000000..68e4c841e3226 --- /dev/null +++ b/coretests/tests/index.rs @@ -0,0 +1,83 @@ +use core::index::Clamp; +use core::range; +use core::slice::SliceIndex; + +macro_rules! test_clamp { + ($range:expr, $(($slice:expr, $other:expr)),+) => { + $( + assert_eq!(Clamp($range.clone()).get(&$slice as &[_]), $other.get(&$slice as &[_])); + assert_eq!(Clamp($range.clone()).get_mut(&mut $slice as &mut [_]), $other.get_mut(&mut $slice as &mut [_])); + unsafe { + assert_eq!(&*Clamp($range.clone()).get_unchecked(&$slice as &[_]), &*$other.get_unchecked(&$slice as &[_])); + assert_eq!(&*Clamp($range.clone()).get_unchecked_mut(&mut $slice as &mut [_]), &*$other.get_unchecked_mut(&mut $slice as &mut [_])); + } + assert_eq!(Clamp($range.clone()).index(&$slice as &[_]), $other.index(&$slice as &[_])); + assert_eq!(Clamp($range.clone()).index_mut(&mut $slice as &mut [_]), $other.index_mut(&mut $slice as &mut [_])); + )+ + }; +} + +#[test] +fn test_clamp_usize() { + test_clamp!(2, ([0, 1], 1), ([0, 1, 2], 2)); +} + +#[test] +fn test_clamp_range_range() { + test_clamp!(range::Range::from(1..4), ([0, 1], 1..2), ([0, 1, 2, 3, 4], 1..4), ([0], 1..1)); +} + +#[test] +fn test_clamp_ops_range() { + test_clamp!(1..4, ([0, 1], 1..2), ([0, 1, 2, 3, 4], 1..4), ([0], 1..1)); +} + +#[test] +fn test_clamp_range_range_inclusive() { + test_clamp!( + range::RangeInclusive::from(1..=3), + ([0, 1], 1..=1), + ([0, 1, 2, 3, 4], 1..=3), + ([0], 0..=0) + ); +} + +#[test] +fn test_clamp_ops_range_inclusive() { + test_clamp!(1..=3, ([0, 1], 1..=1), ([0, 1, 2, 3, 4], 1..=3), ([0], 0..=0)); +} + +#[test] +fn test_clamp_range_range_from() { + test_clamp!(range::RangeFrom::from(1..), ([0, 1], 1..), ([0, 1, 2, 3, 4], 1..), ([0], 1..)); +} + +#[test] +fn test_clamp_ops_range_from() { + test_clamp!(1.., ([0, 1], 1..), ([0, 1, 2, 3, 4], 1..), ([0], 1..)); +} + +#[test] +fn test_clamp_range_to() { + test_clamp!(..4, ([0, 1], ..2), ([0, 1, 2, 3, 4], ..4), ([0], ..1)); +} + +#[test] +fn test_clamp_range_range_to_inclusive() { + test_clamp!( + range::RangeToInclusive::from(..=4), + ([0, 1], ..=1), + ([0, 1, 2, 3, 4], ..=4), + ([0], ..=0) + ); +} + +#[test] +fn test_clamp_ops_range_to_inclusive() { + test_clamp!(..=4, ([0, 1], ..=1), ([0, 1, 2, 3, 4], ..=4), ([0], ..=0)); +} + +#[test] +fn test_clamp_range_full() { + test_clamp!(.., ([0, 1], ..), ([0, 1, 2, 3, 4], ..), ([0], ..)); +} diff --git a/coretests/tests/iter/sources.rs b/coretests/tests/iter/sources.rs index 5a391cb67751d..420f3088e6ee4 100644 --- a/coretests/tests/iter/sources.rs +++ b/coretests/tests/iter/sources.rs @@ -37,6 +37,7 @@ fn test_repeat_count() { } #[test] +#[should_panic = "iterator is infinite"] fn test_repeat_last() { assert_eq!(repeat(42).last(), Some(42)); } diff --git a/coretests/tests/iter/traits/iterator.rs b/coretests/tests/iter/traits/iterator.rs index e31d2e15b6d7e..5ef1f797ae55d 100644 --- a/coretests/tests/iter/traits/iterator.rs +++ b/coretests/tests/iter/traits/iterator.rs @@ -1,3 +1,5 @@ +use core::cell::RefCell; +use core::iter::zip; use core::num::NonZero; /// A wrapper struct that implements `Eq` and `Ord` based on the wrapped @@ -642,6 +644,26 @@ fn test_collect_for_tuples() { assert!(e.2 == d); } +#[test] +fn test_extend_for_tuple_side_effects_order() { + struct TrackingExtender<'a, T>(&'static str, &'a RefCell)>>, Vec); + impl Extend for TrackingExtender<'_, T> { + fn extend>(&mut self, i: I) { + let items = Vec::from_iter(i); + self.1.borrow_mut().push((self.0, items.clone())); + self.2.extend(items); + } + } + + let effects = RefCell::new(vec![]); + let l = TrackingExtender("l", &effects, vec![]); + let r = TrackingExtender("r", &effects, vec![]); + let mut p = ((l, r), ()); + p.extend(zip([(1, 2), (3, 4)], [(), ()])); + let effects = effects.into_inner(); + assert_eq!(effects, [("l", vec![1]), ("r", vec![2]), ("l", vec![3]), ("r", vec![4])]); +} + // just tests by whether or not this compiles fn _empty_impl_all_auto_traits() { use std::panic::{RefUnwindSafe, UnwindSafe}; diff --git a/coretests/tests/lib.rs b/coretests/tests/lib.rs index a80d7f8b44d7d..e190536abcf9f 100644 --- a/coretests/tests/lib.rs +++ b/coretests/tests/lib.rs @@ -16,15 +16,18 @@ #![feature(char_internals)] #![feature(char_max_len)] #![feature(clone_to_uninit)] +#![feature(const_cell_traits)] #![feature(const_cmp)] #![feature(const_convert)] #![feature(const_destruct)] +#![feature(const_drop_in_place)] #![feature(const_eval_select)] #![feature(const_mul_add)] #![feature(const_ops)] #![feature(const_option_ops)] #![feature(const_ref_cell)] #![feature(const_result_trait_fn)] +#![feature(const_select_unpredictable)] #![feature(const_trait_impl)] #![feature(control_flow_ok)] #![feature(core_float_math)] @@ -39,7 +42,6 @@ #![feature(drop_guard)] #![feature(duration_constants)] #![feature(duration_constructors)] -#![feature(duration_from_nanos_u128)] #![feature(error_generic_member_access)] #![feature(exact_div)] #![feature(exact_size_is_empty)] @@ -82,6 +84,7 @@ #![feature(maybe_uninit_write_slice)] #![feature(min_specialization)] #![feature(never_type)] +#![feature(new_range_api)] #![feature(next_index)] #![feature(non_exhaustive_omitted_patterns_lint)] #![feature(numfmt)] @@ -94,9 +97,11 @@ #![feature(ptr_metadata)] #![feature(result_option_map_or_default)] #![feature(slice_from_ptr_range)] +#![feature(slice_index_methods)] #![feature(slice_internals)] #![feature(slice_partition_dedup)] #![feature(slice_split_once)] +#![feature(sliceindex_wrappers)] #![feature(split_array)] #![feature(split_as_slice)] #![feature(std_internals)] @@ -174,6 +179,7 @@ mod fmt; mod future; mod hash; mod hint; +mod index; mod intrinsics; mod io; mod iter; diff --git a/coretests/tests/manually_drop.rs b/coretests/tests/manually_drop.rs index bbf444471ad2a..1638c82b161b4 100644 --- a/coretests/tests/manually_drop.rs +++ b/coretests/tests/manually_drop.rs @@ -27,3 +27,41 @@ fn smoke() { drop(x); drop(y); } + +#[test] +fn const_drop_in_place() { + const COUNTER: usize = { + use core::cell::Cell; + + let counter = Cell::new(0); + + // only exists to make `Drop` indirect impl + #[allow(dead_code)] + struct Test<'a>(Dropped<'a>); + + struct Dropped<'a>(&'a Cell); + impl const Drop for Dropped<'_> { + fn drop(&mut self) { + self.0.set(self.0.get() + 1); + } + } + + let mut one = ManuallyDrop::new(Test(Dropped(&counter))); + let mut two = ManuallyDrop::new(Test(Dropped(&counter))); + let mut three = ManuallyDrop::new(Test(Dropped(&counter))); + assert!(counter.get() == 0); + unsafe { + ManuallyDrop::drop(&mut one); + } + assert!(counter.get() == 1); + unsafe { + ManuallyDrop::drop(&mut two); + } + assert!(counter.get() == 2); + unsafe { + ManuallyDrop::drop(&mut three); + } + counter.get() + }; + assert_eq!(COUNTER, 3); +} diff --git a/coretests/tests/nonzero.rs b/coretests/tests/nonzero.rs index 69e4ed9c36b3a..c368a2621740b 100644 --- a/coretests/tests/nonzero.rs +++ b/coretests/tests/nonzero.rs @@ -570,3 +570,21 @@ fn test_nonzero_lowest_one() { nonzero_int_impl!(i8, i16, i32, i64, i128, isize); nonzero_uint_impl!(u8, u16, u32, u64, u128, usize); } + +#[test] +fn test_nonzero_bit_width() { + macro_rules! nonzero_uint_impl { + ($($T:ty),+) => { + $( + { + assert_eq!(NonZero::<$T>::new(0b010_1100).unwrap().bit_width(), NonZero::new(6).unwrap()); + assert_eq!(NonZero::<$T>::new(0b111_1001).unwrap().bit_width(), NonZero::new(7).unwrap()); + assert_eq!(NonZero::<$T>::MIN.bit_width(), NonZero::new(1).unwrap()); + assert_eq!(NonZero::<$T>::MAX.bit_width(), NonZero::new(<$T>::BITS).unwrap()); + } + )+ + }; + } + + nonzero_uint_impl!(u8, u16, u32, u64, u128, usize); +} diff --git a/coretests/tests/num/int_macros.rs b/coretests/tests/num/int_macros.rs index 1611a6466f5ab..37336f49ef1b6 100644 --- a/coretests/tests/num/int_macros.rs +++ b/coretests/tests/num/int_macros.rs @@ -724,41 +724,42 @@ macro_rules! int_module { } } - const EXACT_DIV_SUCCESS_DIVIDEND1: $T = 42; - const EXACT_DIV_SUCCESS_DIVISOR1: $T = 6; - const EXACT_DIV_SUCCESS_QUOTIENT1: $T = 7; - const EXACT_DIV_SUCCESS_DIVIDEND2: $T = 18; - const EXACT_DIV_SUCCESS_DIVISOR2: $T = 3; - const EXACT_DIV_SUCCESS_QUOTIENT2: $T = 6; - const EXACT_DIV_SUCCESS_DIVIDEND3: $T = -91; - const EXACT_DIV_SUCCESS_DIVISOR3: $T = 13; - const EXACT_DIV_SUCCESS_QUOTIENT3: $T = -7; - const EXACT_DIV_SUCCESS_DIVIDEND4: $T = -57; - const EXACT_DIV_SUCCESS_DIVISOR4: $T = -3; - const EXACT_DIV_SUCCESS_QUOTIENT4: $T = 19; + const DIV_EXACT_SUCCESS_DIVIDEND1: $T = 42; + const DIV_EXACT_SUCCESS_DIVISOR1: $T = 6; + const DIV_EXACT_SUCCESS_QUOTIENT1: $T = 7; + const DIV_EXACT_SUCCESS_DIVIDEND2: $T = 18; + const DIV_EXACT_SUCCESS_DIVISOR2: $T = 3; + const DIV_EXACT_SUCCESS_QUOTIENT2: $T = 6; + const DIV_EXACT_SUCCESS_DIVIDEND3: $T = -91; + const DIV_EXACT_SUCCESS_DIVISOR3: $T = 13; + const DIV_EXACT_SUCCESS_QUOTIENT3: $T = -7; + const DIV_EXACT_SUCCESS_DIVIDEND4: $T = -57; + const DIV_EXACT_SUCCESS_DIVISOR4: $T = -3; + const DIV_EXACT_SUCCESS_QUOTIENT4: $T = 19; test_runtime_and_compiletime! { - fn test_exact_div() { + fn test_div_exact() { // 42 / 6 - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(EXACT_DIV_SUCCESS_DIVIDEND1, EXACT_DIV_SUCCESS_DIVISOR1), Some(EXACT_DIV_SUCCESS_QUOTIENT1)); - assert_eq_const_safe!($T: <$T>::exact_div(EXACT_DIV_SUCCESS_DIVIDEND1, EXACT_DIV_SUCCESS_DIVISOR1), EXACT_DIV_SUCCESS_QUOTIENT1); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(DIV_EXACT_SUCCESS_DIVIDEND1, DIV_EXACT_SUCCESS_DIVISOR1), Some(DIV_EXACT_SUCCESS_QUOTIENT1)); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(DIV_EXACT_SUCCESS_DIVIDEND1, DIV_EXACT_SUCCESS_DIVISOR1), Some(DIV_EXACT_SUCCESS_QUOTIENT1)); // 18 / 3 - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(EXACT_DIV_SUCCESS_DIVIDEND2, EXACT_DIV_SUCCESS_DIVISOR2), Some(EXACT_DIV_SUCCESS_QUOTIENT2)); - assert_eq_const_safe!($T: <$T>::exact_div(EXACT_DIV_SUCCESS_DIVIDEND2, EXACT_DIV_SUCCESS_DIVISOR2), EXACT_DIV_SUCCESS_QUOTIENT2); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(DIV_EXACT_SUCCESS_DIVIDEND2, DIV_EXACT_SUCCESS_DIVISOR2), Some(DIV_EXACT_SUCCESS_QUOTIENT2)); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(DIV_EXACT_SUCCESS_DIVIDEND2, DIV_EXACT_SUCCESS_DIVISOR2), Some(DIV_EXACT_SUCCESS_QUOTIENT2)); // -91 / 13 - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(EXACT_DIV_SUCCESS_DIVIDEND3, EXACT_DIV_SUCCESS_DIVISOR3), Some(EXACT_DIV_SUCCESS_QUOTIENT3)); - assert_eq_const_safe!($T: <$T>::exact_div(EXACT_DIV_SUCCESS_DIVIDEND3, EXACT_DIV_SUCCESS_DIVISOR3), EXACT_DIV_SUCCESS_QUOTIENT3); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(DIV_EXACT_SUCCESS_DIVIDEND3, DIV_EXACT_SUCCESS_DIVISOR3), Some(DIV_EXACT_SUCCESS_QUOTIENT3)); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(DIV_EXACT_SUCCESS_DIVIDEND3, DIV_EXACT_SUCCESS_DIVISOR3), Some(DIV_EXACT_SUCCESS_QUOTIENT3)); // -57 / -3 - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(EXACT_DIV_SUCCESS_DIVIDEND4, EXACT_DIV_SUCCESS_DIVISOR4), Some(EXACT_DIV_SUCCESS_QUOTIENT4)); - assert_eq_const_safe!($T: <$T>::exact_div(EXACT_DIV_SUCCESS_DIVIDEND4, EXACT_DIV_SUCCESS_DIVISOR4), EXACT_DIV_SUCCESS_QUOTIENT4); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(DIV_EXACT_SUCCESS_DIVIDEND4, DIV_EXACT_SUCCESS_DIVISOR4), Some(DIV_EXACT_SUCCESS_QUOTIENT4)); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(DIV_EXACT_SUCCESS_DIVIDEND4, DIV_EXACT_SUCCESS_DIVISOR4), Some(DIV_EXACT_SUCCESS_QUOTIENT4)); // failures - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(1, 2), None); - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(<$T>::MIN, -1), None); - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(0, 0), None); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(1, 2), None); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(1, 2), None); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(<$T>::MIN, -1), None); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(0, 0), None); } } }; diff --git a/coretests/tests/num/uint_macros.rs b/coretests/tests/num/uint_macros.rs index 63be8a45b5cfd..b89a371efcc25 100644 --- a/coretests/tests/num/uint_macros.rs +++ b/coretests/tests/num/uint_macros.rs @@ -595,26 +595,27 @@ macro_rules! uint_module { } } - const EXACT_DIV_SUCCESS_DIVIDEND1: $T = 42; - const EXACT_DIV_SUCCESS_DIVISOR1: $T = 6; - const EXACT_DIV_SUCCESS_QUOTIENT1: $T = 7; - const EXACT_DIV_SUCCESS_DIVIDEND2: $T = 18; - const EXACT_DIV_SUCCESS_DIVISOR2: $T = 3; - const EXACT_DIV_SUCCESS_QUOTIENT2: $T = 6; + const DIV_EXACT_SUCCESS_DIVIDEND1: $T = 42; + const DIV_EXACT_SUCCESS_DIVISOR1: $T = 6; + const DIV_EXACT_SUCCESS_QUOTIENT1: $T = 7; + const DIV_EXACT_SUCCESS_DIVIDEND2: $T = 18; + const DIV_EXACT_SUCCESS_DIVISOR2: $T = 3; + const DIV_EXACT_SUCCESS_QUOTIENT2: $T = 6; test_runtime_and_compiletime! { - fn test_exact_div() { + fn test_div_exact() { // 42 / 6 - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(EXACT_DIV_SUCCESS_DIVIDEND1, EXACT_DIV_SUCCESS_DIVISOR1), Some(EXACT_DIV_SUCCESS_QUOTIENT1)); - assert_eq_const_safe!($T: <$T>::exact_div(EXACT_DIV_SUCCESS_DIVIDEND1, EXACT_DIV_SUCCESS_DIVISOR1), EXACT_DIV_SUCCESS_QUOTIENT1); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(DIV_EXACT_SUCCESS_DIVIDEND1, DIV_EXACT_SUCCESS_DIVISOR1), Some(DIV_EXACT_SUCCESS_QUOTIENT1)); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(DIV_EXACT_SUCCESS_DIVIDEND1, DIV_EXACT_SUCCESS_DIVISOR1), Some(DIV_EXACT_SUCCESS_QUOTIENT1)); // 18 / 3 - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(EXACT_DIV_SUCCESS_DIVIDEND2, EXACT_DIV_SUCCESS_DIVISOR2), Some(EXACT_DIV_SUCCESS_QUOTIENT2)); - assert_eq_const_safe!($T: <$T>::exact_div(EXACT_DIV_SUCCESS_DIVIDEND2, EXACT_DIV_SUCCESS_DIVISOR2), EXACT_DIV_SUCCESS_QUOTIENT2); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(DIV_EXACT_SUCCESS_DIVIDEND2, DIV_EXACT_SUCCESS_DIVISOR2), Some(DIV_EXACT_SUCCESS_QUOTIENT2)); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(DIV_EXACT_SUCCESS_DIVIDEND2, DIV_EXACT_SUCCESS_DIVISOR2), Some(DIV_EXACT_SUCCESS_QUOTIENT2)); // failures - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(1, 2), None); - assert_eq_const_safe!(Option<$T>: <$T>::checked_exact_div(0, 0), None); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(1, 2), None); + assert_eq_const_safe!(Option<$T>: <$T>::div_exact(1, 2), None); + assert_eq_const_safe!(Option<$T>: <$T>::checked_div_exact(0, 0), None); } } }; diff --git a/coretests/tests/ptr.rs b/coretests/tests/ptr.rs index 4d5138d539b95..93f9454d71378 100644 --- a/coretests/tests/ptr.rs +++ b/coretests/tests/ptr.rs @@ -565,6 +565,7 @@ fn ptr_metadata() { #[test] fn ptr_metadata_bounds() { + #[allow(unknown_lints, function_casts_as_integer)] fn metadata_eq_method_address() -> usize { // The `Metadata` associated type has an `Ord` bound, so this is valid: <::Metadata as PartialEq>::eq as usize @@ -944,13 +945,12 @@ fn test_const_swap_ptr() { assert!(*s1.0.ptr == 666); assert!(*s2.0.ptr == 1); - // Swap them back, again as an array. - // FIXME(#146291): we should be swapping back at type `u8` but that currently does not work. + // Swap them back, byte-for-byte unsafe { ptr::swap_nonoverlapping( - ptr::from_mut(&mut s1).cast::(), - ptr::from_mut(&mut s2).cast::(), - 1, + ptr::from_mut(&mut s1).cast::(), + ptr::from_mut(&mut s2).cast::(), + size_of::(), ); } diff --git a/coretests/tests/str_lossy.rs b/coretests/tests/str_lossy.rs index 6e70ea3e28574..820da38dd7466 100644 --- a/coretests/tests/str_lossy.rs +++ b/coretests/tests/str_lossy.rs @@ -80,4 +80,5 @@ fn debug() { b"Hello\xC0\x80 There\xE6\x83 Goodbye\xf4\x8d\x93\xaa".utf8_chunks().debug(), ), ); + assert_eq!("\"'\"", &format!("{:?}", b"'".utf8_chunks().debug())); } diff --git a/panic_unwind/src/lib.rs b/panic_unwind/src/lib.rs index 83311f3238012..1be19913f260f 100644 --- a/panic_unwind/src/lib.rs +++ b/panic_unwind/src/lib.rs @@ -24,8 +24,6 @@ #![feature(rustc_attrs)] #![panic_runtime] #![feature(panic_runtime)] -// `real_imp` is unused with Miri, so silence warnings. -#![cfg_attr(miri, allow(dead_code))] #![allow(internal_features)] #![warn(unreachable_pub)] #![deny(unsafe_op_in_unsafe_fn)] diff --git a/panic_unwind/src/seh.rs b/panic_unwind/src/seh.rs index a5d67dbb6a9f4..257916c4d5cdc 100644 --- a/panic_unwind/src/seh.rs +++ b/panic_unwind/src/seh.rs @@ -336,6 +336,7 @@ unsafe fn throw_exception(data: Option>) -> ! { // In any case, we basically need to do something like this until we can // express more operations in statics (and we may never be able to). unsafe { + #[allow(function_casts_as_integer)] atomic_store::<_, { AtomicOrdering::SeqCst }>( (&raw mut THROW_INFO.pmfnUnwind).cast(), ptr_t::new(exception_cleanup as *mut u8).raw(), @@ -352,6 +353,7 @@ unsafe fn throw_exception(data: Option>) -> ! { (&raw mut CATCHABLE_TYPE.pType).cast(), ptr_t::new((&raw mut TYPE_DESCRIPTOR).cast()).raw(), ); + #[allow(function_casts_as_integer)] atomic_store::<_, { AtomicOrdering::SeqCst }>( (&raw mut CATCHABLE_TYPE.copyFunction).cast(), ptr_t::new(exception_copy as *mut u8).raw(), diff --git a/portable-simd/crates/core_simd/src/masks/bitmask.rs b/portable-simd/crates/core_simd/src/masks/bitmask.rs index 8221d8f17e90e..32d37b5533926 100644 --- a/portable-simd/crates/core_simd/src/masks/bitmask.rs +++ b/portable-simd/crates/core_simd/src/masks/bitmask.rs @@ -170,7 +170,6 @@ where { type Output = Self; #[inline] - #[must_use = "method returns a new mask and does not mutate the original value"] fn bitand(mut self, rhs: Self) -> Self { for (l, r) in self.0.as_mut().iter_mut().zip(rhs.0.as_ref().iter()) { *l &= r; @@ -187,7 +186,6 @@ where { type Output = Self; #[inline] - #[must_use = "method returns a new mask and does not mutate the original value"] fn bitor(mut self, rhs: Self) -> Self { for (l, r) in self.0.as_mut().iter_mut().zip(rhs.0.as_ref().iter()) { *l |= r; @@ -203,7 +201,6 @@ where { type Output = Self; #[inline] - #[must_use = "method returns a new mask and does not mutate the original value"] fn bitxor(mut self, rhs: Self) -> Self::Output { for (l, r) in self.0.as_mut().iter_mut().zip(rhs.0.as_ref().iter()) { *l ^= r; @@ -219,7 +216,6 @@ where { type Output = Self; #[inline] - #[must_use = "method returns a new mask and does not mutate the original value"] fn not(mut self) -> Self::Output { for x in self.0.as_mut() { *x = !*x; diff --git a/portable-simd/crates/core_simd/src/vector.rs b/portable-simd/crates/core_simd/src/vector.rs index d76a6cd52bfc5..f40031f8c4da7 100644 --- a/portable-simd/crates/core_simd/src/vector.rs +++ b/portable-simd/crates/core_simd/src/vector.rs @@ -474,7 +474,14 @@ where or: Self, ) -> Self { // SAFETY: The safety of reading elements through `ptr` is ensured by the caller. - unsafe { core::intrinsics::simd::simd_masked_load(enable.to_int(), ptr, or) } + unsafe { + core::intrinsics::simd::simd_masked_load::< + _, + _, + _, + { core::intrinsics::simd::SimdAlign::Element }, + >(enable.to_int(), ptr, or) + } } /// Reads from potentially discontiguous indices in `slice` to construct a SIMD vector. @@ -723,7 +730,14 @@ where #[inline] pub unsafe fn store_select_ptr(self, ptr: *mut T, enable: Mask<::Mask, N>) { // SAFETY: The safety of writing elements through `ptr` is ensured by the caller. - unsafe { core::intrinsics::simd::simd_masked_store(enable.to_int(), ptr, self) } + unsafe { + core::intrinsics::simd::simd_masked_store::< + _, + _, + _, + { core::intrinsics::simd::SimdAlign::Element }, + >(enable.to_int(), ptr, self) + } } /// Writes the values in a SIMD vector to potentially discontiguous indices in `slice`. diff --git a/proc_macro/src/bridge/client.rs b/proc_macro/src/bridge/client.rs index 92558f2b7d9cc..bdaa865a998d6 100644 --- a/proc_macro/src/bridge/client.rs +++ b/proc_macro/src/bridge/client.rs @@ -58,7 +58,7 @@ macro_rules! define_client_handles { } } - impl DecodeMut<'_, '_, S> for $oty { + impl Decode<'_, '_, S> for $oty { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { $oty { handle: handle::Handle::decode(r, s), @@ -82,7 +82,7 @@ macro_rules! define_client_handles { } } - impl DecodeMut<'_, '_, S> for $ity { + impl Decode<'_, '_, S> for $ity { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { $ity { handle: handle::Handle::decode(r, s), @@ -276,7 +276,7 @@ fn maybe_install_panic_hook(force_show_panics: bool) { /// Client-side helper for handling client panics, entering the bridge, /// deserializing input and serializing output. // FIXME(eddyb) maybe replace `Bridge::enter` with this? -fn run_client DecodeMut<'a, 's, ()>, R: Encode<()>>( +fn run_client Decode<'a, 's, ()>, R: Encode<()>>( config: BridgeConfig<'_>, f: impl FnOnce(A) -> R, ) -> Buffer { diff --git a/proc_macro/src/bridge/mod.rs b/proc_macro/src/bridge/mod.rs index 582c43c78fcbb..b0ee9c0cc3027 100644 --- a/proc_macro/src/bridge/mod.rs +++ b/proc_macro/src/bridge/mod.rs @@ -143,7 +143,7 @@ mod symbol; use buffer::Buffer; pub use rpc::PanicMessage; -use rpc::{DecodeMut, Encode, Reader, Writer}; +use rpc::{Decode, Encode, Reader, Writer}; /// Configuration for establishing an active connection between a server and a /// client. The server creates the bridge config (`run_server` in `server.rs`), @@ -168,7 +168,7 @@ impl !Sync for BridgeConfig<'_> {} #[forbid(unsafe_code)] #[allow(non_camel_case_types)] mod api_tags { - use super::rpc::{DecodeMut, Encode, Reader, Writer}; + use super::rpc::{Decode, Encode, Reader, Writer}; macro_rules! declare_tags { ($($name:ident { diff --git a/proc_macro/src/bridge/rpc.rs b/proc_macro/src/bridge/rpc.rs index 7f4f5fc3a97d5..ed67674a74ab7 100644 --- a/proc_macro/src/bridge/rpc.rs +++ b/proc_macro/src/bridge/rpc.rs @@ -12,7 +12,7 @@ pub(super) trait Encode: Sized { pub(super) type Reader<'a> = &'a [u8]; -pub(super) trait DecodeMut<'a, 's, S>: Sized { +pub(super) trait Decode<'a, 's, S>: Sized { fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self; } @@ -24,7 +24,7 @@ macro_rules! rpc_encode_decode { } } - impl DecodeMut<'_, '_, S> for $ty { + impl Decode<'_, '_, S> for $ty { fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { const N: usize = size_of::<$ty>(); @@ -43,12 +43,12 @@ macro_rules! rpc_encode_decode { } } - impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> + impl<'a, S, $($($T: for<'s> Decode<'a, 's, S>),+)?> Decode<'a, '_, S> for $name $(<$($T),+>)? { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { $name { - $($field: DecodeMut::decode(r, s)),* + $($field: Decode::decode(r, s)),* } } } @@ -58,23 +58,18 @@ macro_rules! rpc_encode_decode { fn encode(self, w: &mut Writer, s: &mut S) { // HACK(eddyb): `Tag` enum duplicated between the // two impls as there's no other place to stash it. - #[allow(non_upper_case_globals)] - mod tag { - #[repr(u8)] enum Tag { $($variant),* } - - $(pub(crate) const $variant: u8 = Tag::$variant as u8;)* - } + #[repr(u8)] enum Tag { $($variant),* } match self { $($name::$variant $(($field))* => { - tag::$variant.encode(w, s); + (Tag::$variant as u8).encode(w, s); $($field.encode(w, s);)* })* } } } - impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> + impl<'a, S, $($($T: for<'s> Decode<'a, 's, S>),+)?> Decode<'a, '_, S> for $name $(<$($T),+>)? { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { @@ -89,7 +84,7 @@ macro_rules! rpc_encode_decode { match u8::decode(r, s) { $(tag::$variant => { - $(let $field = DecodeMut::decode(r, s);)* + $(let $field = Decode::decode(r, s);)* $name::$variant $(($field))* })* _ => unreachable!(), @@ -103,7 +98,7 @@ impl Encode for () { fn encode(self, _: &mut Writer, _: &mut S) {} } -impl DecodeMut<'_, '_, S> for () { +impl Decode<'_, '_, S> for () { fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {} } @@ -113,7 +108,7 @@ impl Encode for u8 { } } -impl DecodeMut<'_, '_, S> for u8 { +impl Decode<'_, '_, S> for u8 { fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { let x = r[0]; *r = &r[1..]; @@ -130,7 +125,7 @@ impl Encode for bool { } } -impl DecodeMut<'_, '_, S> for bool { +impl Decode<'_, '_, S> for bool { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { match u8::decode(r, s) { 0 => false, @@ -146,7 +141,7 @@ impl Encode for char { } } -impl DecodeMut<'_, '_, S> for char { +impl Decode<'_, '_, S> for char { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { char::from_u32(u32::decode(r, s)).unwrap() } @@ -158,7 +153,7 @@ impl Encode for NonZero { } } -impl DecodeMut<'_, '_, S> for NonZero { +impl Decode<'_, '_, S> for NonZero { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { Self::new(u32::decode(r, s)).unwrap() } @@ -171,11 +166,11 @@ impl, B: Encode> Encode for (A, B) { } } -impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> +impl<'a, S, A: for<'s> Decode<'a, 's, S>, B: for<'s> Decode<'a, 's, S>> Decode<'a, '_, S> for (A, B) { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - (DecodeMut::decode(r, s), DecodeMut::decode(r, s)) + (Decode::decode(r, s), Decode::decode(r, s)) } } @@ -186,7 +181,7 @@ impl Encode for &[u8] { } } -impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] { +impl<'a, S> Decode<'a, '_, S> for &'a [u8] { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { let len = usize::decode(r, s); let xs = &r[..len]; @@ -201,7 +196,7 @@ impl Encode for &str { } } -impl<'a, S> DecodeMut<'a, '_, S> for &'a str { +impl<'a, S> Decode<'a, '_, S> for &'a str { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { str::from_utf8(<&[u8]>::decode(r, s)).unwrap() } @@ -213,7 +208,7 @@ impl Encode for String { } } -impl DecodeMut<'_, '_, S> for String { +impl Decode<'_, '_, S> for String { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { <&str>::decode(r, s).to_string() } @@ -228,7 +223,7 @@ impl> Encode for Vec { } } -impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec { +impl<'a, S, T: for<'s> Decode<'a, 's, S>> Decode<'a, '_, S> for Vec { fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { let len = usize::decode(r, s); let mut vec = Vec::with_capacity(len); @@ -288,7 +283,7 @@ impl Encode for PanicMessage { } } -impl DecodeMut<'_, '_, S> for PanicMessage { +impl Decode<'_, '_, S> for PanicMessage { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { match Option::::decode(r, s) { Some(s) => PanicMessage::String(s), diff --git a/proc_macro/src/bridge/selfless_reify.rs b/proc_macro/src/bridge/selfless_reify.rs index b06434a5ffee2..a53550e0b9e0c 100644 --- a/proc_macro/src/bridge/selfless_reify.rs +++ b/proc_macro/src/bridge/selfless_reify.rs @@ -50,7 +50,7 @@ macro_rules! define_reify_functions { >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty { // FIXME(eddyb) describe the `F` type (e.g. via `type_name::`) once panic // formatting becomes possible in `const fn`. - assert!(size_of::() == 0, "selfless_reify: closure must be zero-sized"); + const { assert!(size_of::() == 0, "selfless_reify: closure must be zero-sized"); } $(extern $abi)? fn wrapper< $($($param,)*)? diff --git a/proc_macro/src/bridge/server.rs b/proc_macro/src/bridge/server.rs index 2850e1099b700..e9ef26c07f24f 100644 --- a/proc_macro/src/bridge/server.rs +++ b/proc_macro/src/bridge/server.rs @@ -32,7 +32,7 @@ macro_rules! define_server_handles { } } - impl DecodeMut<'_, '_, HandleStore>> + impl Decode<'_, '_, HandleStore>> for Marked { fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { @@ -40,7 +40,7 @@ macro_rules! define_server_handles { } } - impl<'s, S: Types> DecodeMut<'_, 's, HandleStore>> + impl<'s, S: Types> Decode<'_, 's, HandleStore>> for &'s Marked { fn decode(r: &mut Reader<'_>, s: &'s mut HandleStore>) -> Self { @@ -48,7 +48,7 @@ macro_rules! define_server_handles { } } - impl<'s, S: Types> DecodeMut<'_, 's, HandleStore>> + impl<'s, S: Types> Decode<'_, 's, HandleStore>> for &'s mut Marked { fn decode( @@ -67,7 +67,7 @@ macro_rules! define_server_handles { } } - impl DecodeMut<'_, '_, HandleStore>> + impl Decode<'_, '_, HandleStore>> for Marked { fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { @@ -355,7 +355,7 @@ pub trait MessagePipe: Sized { fn run_server< S: Server, I: Encode>>, - O: for<'a, 's> DecodeMut<'a, 's, HandleStore>>, + O: for<'a, 's> Decode<'a, 's, HandleStore>>, >( strategy: &impl ExecutionStrategy, handle_counters: &'static client::HandleCounters, diff --git a/proc_macro/src/bridge/symbol.rs b/proc_macro/src/bridge/symbol.rs index eb7d30f9a6cc9..0d6a725fddd98 100644 --- a/proc_macro/src/bridge/symbol.rs +++ b/proc_macro/src/bridge/symbol.rs @@ -102,7 +102,7 @@ impl Encode for Symbol { } } -impl DecodeMut<'_, '_, server::HandleStore>> +impl Decode<'_, '_, server::HandleStore>> for Marked { fn decode(r: &mut Reader<'_>, s: &mut server::HandleStore>) -> Self { @@ -118,7 +118,7 @@ impl Encode>> } } -impl DecodeMut<'_, '_, S> for Symbol { +impl Decode<'_, '_, S> for Symbol { fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { Symbol::new(<&str>::decode(r, s)) } diff --git a/proc_macro/src/lib.rs b/proc_macro/src/lib.rs index 613abd7024e3f..4efdfcad924b5 100644 --- a/proc_macro/src/lib.rs +++ b/proc_macro/src/lib.rs @@ -376,6 +376,21 @@ impl Extend for TokenStream { } } +macro_rules! extend_items { + ($($item:ident)*) => { + $( + #[stable(feature = "token_stream_extend_tt_items", since = "1.92.0")] + impl Extend<$item> for TokenStream { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(TokenTree::$item)); + } + } + )* + }; +} + +extend_items!(Group Literal Punct Ident); + /// Public implementation details for the `TokenStream` type, such as iterators. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] pub mod token_stream { diff --git a/std/Cargo.toml b/std/Cargo.toml index 779b07ce240a6..1ba9f7e32d91a 100644 --- a/std/Cargo.toml +++ b/std/Cargo.toml @@ -33,7 +33,7 @@ miniz_oxide = { version = "0.8.0", optional = true, default-features = false } addr2line = { version = "0.25.0", optional = true, default-features = false } [target.'cfg(not(all(windows, target_env = "msvc")))'.dependencies] -libc = { version = "0.2.172", default-features = false, features = [ +libc = { version = "0.2.177", default-features = false, features = [ 'rustc-dep-of-std', ], public = true } @@ -70,6 +70,9 @@ fortanix-sgx-abi = { version = "0.6.1", features = [ 'rustc-dep-of-std', ], public = true } +[target.'cfg(target_os = "motor")'.dependencies] +moto-rt = { version = "0.15", features = ['rustc-dep-of-std'], public = true } + [target.'cfg(target_os = "hermit")'.dependencies] hermit-abi = { version = "0.5.0", features = [ 'rustc-dep-of-std', @@ -123,6 +126,7 @@ optimize_for_size = ["core/optimize_for_size", "alloc/optimize_for_size"] # a borrow error occurs debug_refcell = ["core/debug_refcell"] +llvm_enzyme = ["core/llvm_enzyme"] # Enable std_detect features: std_detect_file_io = ["std_detect/std_detect_file_io"] diff --git a/std/benches/path.rs b/std/benches/path.rs index 094c00894a8ee..912c783b31e4c 100644 --- a/std/benches/path.rs +++ b/std/benches/path.rs @@ -55,6 +55,30 @@ fn bench_path_cmp_fast_path_short(b: &mut test::Bencher) { }); } +#[bench] +fn bench_path_components_iter(b: &mut test::Bencher) { + let p = Path::new("/my/home/is/my/castle/and/my/castle/has/a/rusty/workbench/"); + + b.iter(|| { + for c in black_box(p).components() { + black_box(c); + } + }) +} + +#[bench] +fn bench_path_file_name(b: &mut test::Bencher) { + let p1 = Path::new("foo.bar"); + let p2 = Path::new("foo/bar"); + let p3 = Path::new("/bar"); + + b.iter(|| { + black_box(black_box(p1).file_name()); + black_box(black_box(p2).file_name()); + black_box(black_box(p3).file_name()); + }) +} + #[bench] #[cfg_attr(miri, ignore)] // Miri isn't fast... fn bench_path_hashset(b: &mut test::Bencher) { diff --git a/std/build.rs b/std/build.rs index 8a5a785060c85..bee28e88491d0 100644 --- a/std/build.rs +++ b/std/build.rs @@ -30,6 +30,7 @@ fn main() { || target_os == "windows" || target_os == "fuchsia" || (target_vendor == "fortanix" && target_env == "sgx") + || target_os == "motor" || target_os == "hermit" || target_os == "trusty" || target_os == "l4re" diff --git a/std/src/alloc.rs b/std/src/alloc.rs index 1d61630269ac3..daa25c5a50dd6 100644 --- a/std/src/alloc.rs +++ b/std/src/alloc.rs @@ -358,9 +358,10 @@ fn default_alloc_error_hook(layout: Layout) { // This is the default path taken on OOM, and the only path taken on stable with std. // Crucially, it does *not* call any user-defined code, and therefore users do not have to // worry about allocation failure causing reentrancy issues. That makes it different from - // the default `__rdl_oom` defined in alloc (i.e., the default alloc error handler that is - // called when there is no `#[alloc_error_handler]`), which triggers a regular panic and - // thus can invoke a user-defined panic hook, executing arbitrary user-defined code. + // the default `__rdl_alloc_error_handler` defined in alloc (i.e., the default alloc error + // handler that is called when there is no `#[alloc_error_handler]`), which triggers a + // regular panic and thus can invoke a user-defined panic hook, executing arbitrary + // user-defined code. rtprintpanic!("memory allocation of {} bytes failed\n", layout.size()); } } diff --git a/std/src/backtrace.rs b/std/src/backtrace.rs index c3fcb0e2e42b0..99724e29e02b2 100644 --- a/std/src/backtrace.rs +++ b/std/src/backtrace.rs @@ -293,7 +293,7 @@ impl Backtrace { if !Backtrace::enabled() { return Backtrace { inner: Inner::Disabled }; } - Backtrace::create(Backtrace::capture as usize) + Backtrace::create(Backtrace::capture as fn() -> Backtrace as usize) } /// Forcibly captures a full backtrace, regardless of environment variable @@ -309,7 +309,7 @@ impl Backtrace { #[stable(feature = "backtrace", since = "1.65.0")] #[inline(never)] // want to make sure there's a frame here to remove pub fn force_capture() -> Backtrace { - Backtrace::create(Backtrace::force_capture as usize) + Backtrace::create(Backtrace::force_capture as fn() -> Backtrace as usize) } /// Forcibly captures a disabled backtrace, regardless of environment diff --git a/std/src/collections/hash/map.rs b/std/src/collections/hash/map.rs index fc0fef620e3b6..ab21e3b927e20 100644 --- a/std/src/collections/hash/map.rs +++ b/std/src/collections/hash/map.rs @@ -1685,7 +1685,8 @@ impl<'a, K, V> Drain<'a, K, V> { /// let iter = map.extract_if(|_k, v| *v % 2 == 0); /// ``` #[stable(feature = "hash_extract_if", since = "1.88.0")] -#[must_use = "iterators are lazy and do nothing unless consumed"] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `retain` to remove and discard elements"] pub struct ExtractIf<'a, K, V, F> { base: base::ExtractIf<'a, K, V, F>, } diff --git a/std/src/collections/hash/set.rs b/std/src/collections/hash/set.rs index 482d57b47f677..6795da80aacb6 100644 --- a/std/src/collections/hash/set.rs +++ b/std/src/collections/hash/set.rs @@ -1391,6 +1391,8 @@ pub struct Drain<'a, K: 'a> { /// let mut extract_ifed = a.extract_if(|v| v % 2 == 0); /// ``` #[stable(feature = "hash_extract_if", since = "1.88.0")] +#[must_use = "iterators are lazy and do nothing unless consumed; \ + use `retain` to remove and discard elements"] pub struct ExtractIf<'a, K, F> { base: base::ExtractIf<'a, K, F>, } diff --git a/std/src/env.rs b/std/src/env.rs index 6d716bd854433..fd662e8a663a9 100644 --- a/std/src/env.rs +++ b/std/src/env.rs @@ -170,7 +170,7 @@ impl Iterator for Vars { impl fmt::Debug for Vars { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let Self { inner: VarsOs { inner } } = self; - f.debug_struct("Vars").field("inner", &inner.str_debug()).finish() + f.debug_struct("Vars").field("inner", inner).finish() } } diff --git a/std/src/ffi/os_str.rs b/std/src/ffi/os_str.rs index 6c098034eea3b..09bd911aa769a 100644 --- a/std/src/ffi/os_str.rs +++ b/std/src/ffi/os_str.rs @@ -1215,6 +1215,8 @@ impl OsStr { /// Checks if all characters in this string are within the ASCII range. /// + /// An empty string returns `true`. + /// /// # Examples /// /// ``` diff --git a/std/src/ffi/os_str/tests.rs b/std/src/ffi/os_str/tests.rs index 2572b71fd9ac6..3474f0ab50684 100644 --- a/std/src/ffi/os_str/tests.rs +++ b/std/src/ffi/os_str/tests.rs @@ -303,3 +303,9 @@ fn clone_to_uninit() { unsafe { a.clone_to_uninit(ptr::from_mut::(&mut b).cast()) }; assert_eq!(a, &*b); } + +#[test] +fn debug() { + let s = "'single quotes'"; + assert_eq!(format!("{:?}", OsStr::new(s)), format!("{:?}", s)); +} diff --git a/std/src/fs.rs b/std/src/fs.rs index 28b2c7173d321..b548eb4939d42 100644 --- a/std/src/fs.rs +++ b/std/src/fs.rs @@ -387,6 +387,87 @@ pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> io::Result inner(path.as_ref(), contents.as_ref()) } +/// Changes the timestamps of the file or directory at the specified path. +/// +/// This function will attempt to set the access and modification times +/// to the times specified. If the path refers to a symbolic link, this function +/// will follow the link and change the timestamps of the target file. +/// +/// # Platform-specific behavior +/// +/// This function currently corresponds to the `utimensat` function on Unix platforms, the +/// `setattrlist` function on Apple platforms, and the `SetFileTime` function on Windows. +/// +/// # Errors +/// +/// This function will return an error if the user lacks permission to change timestamps on the +/// target file or symlink. It may also return an error if the OS does not support it. +/// +/// # Examples +/// +/// ```no_run +/// #![feature(fs_set_times)] +/// use std::fs::{self, FileTimes}; +/// use std::time::SystemTime; +/// +/// fn main() -> std::io::Result<()> { +/// let now = SystemTime::now(); +/// let times = FileTimes::new() +/// .set_accessed(now) +/// .set_modified(now); +/// fs::set_times("foo.txt", times)?; +/// Ok(()) +/// } +/// ``` +#[unstable(feature = "fs_set_times", issue = "147455")] +#[doc(alias = "utimens")] +#[doc(alias = "utimes")] +#[doc(alias = "utime")] +pub fn set_times>(path: P, times: FileTimes) -> io::Result<()> { + fs_imp::set_times(path.as_ref(), times.0) +} + +/// Changes the timestamps of the file or symlink at the specified path. +/// +/// This function will attempt to set the access and modification times +/// to the times specified. Differ from `set_times`, if the path refers to a symbolic link, +/// this function will change the timestamps of the symlink itself, not the target file. +/// +/// # Platform-specific behavior +/// +/// This function currently corresponds to the `utimensat` function with `AT_SYMLINK_NOFOLLOW` on +/// Unix platforms, the `setattrlist` function with `FSOPT_NOFOLLOW` on Apple platforms, and the +/// `SetFileTime` function on Windows. +/// +/// # Errors +/// +/// This function will return an error if the user lacks permission to change timestamps on the +/// target file or symlink. It may also return an error if the OS does not support it. +/// +/// # Examples +/// +/// ```no_run +/// #![feature(fs_set_times)] +/// use std::fs::{self, FileTimes}; +/// use std::time::SystemTime; +/// +/// fn main() -> std::io::Result<()> { +/// let now = SystemTime::now(); +/// let times = FileTimes::new() +/// .set_accessed(now) +/// .set_modified(now); +/// fs::set_times_nofollow("symlink.txt", times)?; +/// Ok(()) +/// } +/// ``` +#[unstable(feature = "fs_set_times", issue = "147455")] +#[doc(alias = "utimensat")] +#[doc(alias = "lutimens")] +#[doc(alias = "lutimes")] +pub fn set_times_nofollow>(path: P, times: FileTimes) -> io::Result<()> { + fs_imp::set_times_nofollow(path.as_ref(), times.0) +} + #[stable(feature = "file_lock", since = "1.89.0")] impl error::Error for TryLockError {} diff --git a/std/src/fs/tests.rs b/std/src/fs/tests.rs index f8dfb0d633400..9fd87e119906e 100644 --- a/std/src/fs/tests.rs +++ b/std/src/fs/tests.rs @@ -5,15 +5,16 @@ use rand::RngCore; target_os = "freebsd", target_os = "linux", target_os = "netbsd", + target_os = "illumos", target_vendor = "apple", ))] use crate::assert_matches::assert_matches; -use crate::char::MAX_LEN_UTF8; #[cfg(any( windows, target_os = "freebsd", target_os = "linux", target_os = "netbsd", + target_os = "illumos", target_vendor = "apple", ))] use crate::fs::TryLockError; @@ -172,7 +173,7 @@ fn file_test_io_non_positional_read() { #[test] fn file_test_io_seek_and_tell_smoke_test() { let message = "ten-four"; - let mut read_mem = [0; MAX_LEN_UTF8]; + let mut read_mem = [0; char::MAX_LEN_UTF8]; let set_cursor = 4 as u64; let tell_pos_pre_read; let tell_pos_post_read; @@ -227,6 +228,7 @@ fn file_test_io_seek_and_write() { target_os = "linux", target_os = "netbsd", target_os = "solaris", + target_os = "illumos", target_vendor = "apple", ))] fn file_lock_multiple_shared() { @@ -251,6 +253,7 @@ fn file_lock_multiple_shared() { target_os = "linux", target_os = "netbsd", target_os = "solaris", + target_os = "illumos", target_vendor = "apple", ))] fn file_lock_blocking() { @@ -276,6 +279,7 @@ fn file_lock_blocking() { target_os = "linux", target_os = "netbsd", target_os = "solaris", + target_os = "illumos", target_vendor = "apple", ))] fn file_lock_drop() { @@ -298,6 +302,7 @@ fn file_lock_drop() { target_os = "linux", target_os = "netbsd", target_os = "solaris", + target_os = "illumos", target_vendor = "apple", ))] fn file_lock_dup() { @@ -399,7 +404,7 @@ fn file_test_io_seek_shakedown() { let chunk_one: &str = "qwer"; let chunk_two: &str = "asdf"; let chunk_three: &str = "zxcv"; - let mut read_mem = [0; MAX_LEN_UTF8]; + let mut read_mem = [0; char::MAX_LEN_UTF8]; let tmpdir = tmpdir(); let filename = &tmpdir.join("file_rt_io_file_test_seek_shakedown.txt"); { @@ -776,7 +781,7 @@ fn file_test_directoryinfo_readdir() { check!(w.write(msg)); } let files = check!(fs::read_dir(dir)); - let mut mem = [0; MAX_LEN_UTF8]; + let mut mem = [0; char::MAX_LEN_UTF8]; for f in files { let f = f.unwrap().path(); { @@ -2226,3 +2231,222 @@ fn test_open_options_invalid_combinations() { assert_eq!(err.kind(), ErrorKind::InvalidInput); assert_eq!(err.to_string(), "must specify at least one of read, write, or append access"); } + +#[test] +fn test_fs_set_times() { + #[cfg(target_vendor = "apple")] + use crate::os::darwin::fs::FileTimesExt; + #[cfg(windows)] + use crate::os::windows::fs::FileTimesExt; + + let tmp = tmpdir(); + let path = tmp.join("foo"); + File::create(&path).unwrap(); + + let mut times = FileTimes::new(); + let accessed = SystemTime::UNIX_EPOCH + Duration::from_secs(12345); + let modified = SystemTime::UNIX_EPOCH + Duration::from_secs(54321); + times = times.set_accessed(accessed).set_modified(modified); + + #[cfg(any(windows, target_vendor = "apple"))] + let created = SystemTime::UNIX_EPOCH + Duration::from_secs(32123); + #[cfg(any(windows, target_vendor = "apple"))] + { + times = times.set_created(created); + } + + match fs::set_times(&path, times) { + // Allow unsupported errors on platforms which don't support setting times. + #[cfg(not(any( + windows, + all( + unix, + not(any( + target_os = "android", + target_os = "redox", + target_os = "espidf", + target_os = "horizon" + )) + ) + )))] + Err(e) if e.kind() == ErrorKind::Unsupported => return, + Err(e) => panic!("error setting file times: {e:?}"), + Ok(_) => {} + } + + let metadata = fs::metadata(&path).unwrap(); + assert_eq!(metadata.accessed().unwrap(), accessed); + assert_eq!(metadata.modified().unwrap(), modified); + #[cfg(any(windows, target_vendor = "apple"))] + { + assert_eq!(metadata.created().unwrap(), created); + } +} + +#[test] +fn test_fs_set_times_follows_symlink() { + #[cfg(target_vendor = "apple")] + use crate::os::darwin::fs::FileTimesExt; + #[cfg(windows)] + use crate::os::windows::fs::FileTimesExt; + + let tmp = tmpdir(); + + // Create a target file + let target = tmp.join("target"); + File::create(&target).unwrap(); + + // Create a symlink to the target + #[cfg(unix)] + let link = tmp.join("link"); + #[cfg(unix)] + crate::os::unix::fs::symlink(&target, &link).unwrap(); + + #[cfg(windows)] + let link = tmp.join("link.txt"); + #[cfg(windows)] + crate::os::windows::fs::symlink_file(&target, &link).unwrap(); + + // Get the symlink's own modified time BEFORE calling set_times (to compare later) + // We don't check accessed time because reading metadata may update atime on some platforms. + let link_metadata_before = fs::symlink_metadata(&link).unwrap(); + let link_modified_before = link_metadata_before.modified().unwrap(); + + let mut times = FileTimes::new(); + let accessed = SystemTime::UNIX_EPOCH + Duration::from_secs(12345); + let modified = SystemTime::UNIX_EPOCH + Duration::from_secs(54321); + times = times.set_accessed(accessed).set_modified(modified); + + #[cfg(any(windows, target_vendor = "apple"))] + let created = SystemTime::UNIX_EPOCH + Duration::from_secs(32123); + #[cfg(any(windows, target_vendor = "apple"))] + { + times = times.set_created(created); + } + + // Call fs::set_times on the symlink - it should follow the link and modify the target + match fs::set_times(&link, times) { + // Allow unsupported errors on platforms which don't support setting times. + #[cfg(not(any( + windows, + all( + unix, + not(any( + target_os = "android", + target_os = "redox", + target_os = "espidf", + target_os = "horizon" + )) + ) + )))] + Err(e) if e.kind() == ErrorKind::Unsupported => return, + Err(e) => panic!("error setting file times through symlink: {e:?}"), + Ok(_) => {} + } + + // Verify that the TARGET file's times were changed (following the symlink) + let target_metadata = fs::metadata(&target).unwrap(); + assert_eq!( + target_metadata.accessed().unwrap(), + accessed, + "target file accessed time should match" + ); + assert_eq!( + target_metadata.modified().unwrap(), + modified, + "target file modified time should match" + ); + #[cfg(any(windows, target_vendor = "apple"))] + { + assert_eq!( + target_metadata.created().unwrap(), + created, + "target file created time should match" + ); + } + + // Also verify through the symlink (fs::metadata follows symlinks) + let link_followed_metadata = fs::metadata(&link).unwrap(); + assert_eq!(link_followed_metadata.accessed().unwrap(), accessed); + assert_eq!(link_followed_metadata.modified().unwrap(), modified); + + // Verify that the SYMLINK ITSELF was NOT modified + // Note: We only check modified time, not accessed time, because reading the symlink + // metadata may update its atime on some platforms (e.g., Linux). + let link_metadata_after = fs::symlink_metadata(&link).unwrap(); + assert_eq!( + link_metadata_after.modified().unwrap(), + link_modified_before, + "symlink's own modified time should not change" + ); +} + +#[test] +fn test_fs_set_times_nofollow() { + #[cfg(target_vendor = "apple")] + use crate::os::darwin::fs::FileTimesExt; + #[cfg(windows)] + use crate::os::windows::fs::FileTimesExt; + + let tmp = tmpdir(); + + // Create a target file and a symlink to it + let target = tmp.join("target"); + File::create(&target).unwrap(); + + #[cfg(unix)] + let link = tmp.join("link"); + #[cfg(unix)] + crate::os::unix::fs::symlink(&target, &link).unwrap(); + + #[cfg(windows)] + let link = tmp.join("link.txt"); + #[cfg(windows)] + crate::os::windows::fs::symlink_file(&target, &link).unwrap(); + + let mut times = FileTimes::new(); + let accessed = SystemTime::UNIX_EPOCH + Duration::from_secs(11111); + let modified = SystemTime::UNIX_EPOCH + Duration::from_secs(22222); + times = times.set_accessed(accessed).set_modified(modified); + + #[cfg(any(windows, target_vendor = "apple"))] + let created = SystemTime::UNIX_EPOCH + Duration::from_secs(33333); + #[cfg(any(windows, target_vendor = "apple"))] + { + times = times.set_created(created); + } + + // Set times on the symlink itself (not following it) + match fs::set_times_nofollow(&link, times) { + // Allow unsupported errors on platforms which don't support setting times. + #[cfg(not(any( + windows, + all( + unix, + not(any( + target_os = "android", + target_os = "redox", + target_os = "espidf", + target_os = "horizon" + )) + ) + )))] + Err(e) if e.kind() == ErrorKind::Unsupported => return, + Err(e) => panic!("error setting symlink times: {e:?}"), + Ok(_) => {} + } + + // Read symlink metadata (without following) + let metadata = fs::symlink_metadata(&link).unwrap(); + assert_eq!(metadata.accessed().unwrap(), accessed); + assert_eq!(metadata.modified().unwrap(), modified); + #[cfg(any(windows, target_vendor = "apple"))] + { + assert_eq!(metadata.created().unwrap(), created); + } + + // Verify that the target file's times were NOT changed + let target_metadata = fs::metadata(&target).unwrap(); + assert_ne!(target_metadata.accessed().unwrap(), accessed); + assert_ne!(target_metadata.modified().unwrap(), modified); +} diff --git a/std/src/io/copy.rs b/std/src/io/copy.rs index d060ad528973f..2b558efb8885e 100644 --- a/std/src/io/copy.rs +++ b/std/src/io/copy.rs @@ -4,6 +4,7 @@ use crate::cmp; use crate::collections::VecDeque; use crate::io::IoSlice; use crate::mem::MaybeUninit; +use crate::sys::io::{CopyState, kernel_copy}; #[cfg(test)] mod tests; @@ -63,19 +64,17 @@ where R: Read, W: Write, { - cfg_select! { - any(target_os = "linux", target_os = "android") => { - crate::sys::kernel_copy::copy_spec(reader, writer) - } - _ => { - generic_copy(reader, writer) + match kernel_copy(reader, writer)? { + CopyState::Ended(copied) => Ok(copied), + CopyState::Fallback(copied) => { + generic_copy(reader, writer).map(|additional| copied + additional) } } } /// The userspace read-write-loop implementation of `io::copy` that is used when /// OS-specific specializations for copy offloading are not available or not applicable. -pub(crate) fn generic_copy(reader: &mut R, writer: &mut W) -> Result +fn generic_copy(reader: &mut R, writer: &mut W) -> Result where R: Read, W: Write, @@ -269,7 +268,7 @@ impl BufferedWriterSpec for Vec { } } -pub fn stack_buffer_copy( +fn stack_buffer_copy( reader: &mut R, writer: &mut W, ) -> Result { diff --git a/std/src/io/mod.rs b/std/src/io/mod.rs index 25a4661a0bc9c..b7756befa11e9 100644 --- a/std/src/io/mod.rs +++ b/std/src/io/mod.rs @@ -330,7 +330,7 @@ pub use self::{ stdio::{Stderr, StderrLock, Stdin, StdinLock, Stdout, StdoutLock, stderr, stdin, stdout}, util::{Empty, Repeat, Sink, empty, repeat, sink}, }; -use crate::mem::take; +use crate::mem::{MaybeUninit, take}; use crate::ops::{Deref, DerefMut}; use crate::{cmp, fmt, slice, str, sys}; @@ -1242,6 +1242,46 @@ pub trait Read { { Take { inner: self, len: limit, limit } } + + /// Read and return a fixed array of bytes from this source. + /// + /// This function uses an array sized based on a const generic size known at compile time. You + /// can specify the size with turbofish (`reader.read_array::<8>()`), or let type inference + /// determine the number of bytes needed based on how the return value gets used. For instance, + /// this function works well with functions like [`u64::from_le_bytes`] to turn an array of + /// bytes into an integer of the same size. + /// + /// Like `read_exact`, if this function encounters an "end of file" before reading the desired + /// number of bytes, it returns an error of the kind [`ErrorKind::UnexpectedEof`]. + /// + /// ``` + /// #![feature(read_array)] + /// use std::io::Cursor; + /// use std::io::prelude::*; + /// + /// fn main() -> std::io::Result<()> { + /// let mut buf = Cursor::new([1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4, 3, 2]); + /// let x = u64::from_le_bytes(buf.read_array()?); + /// let y = u32::from_be_bytes(buf.read_array()?); + /// let z = u16::from_be_bytes(buf.read_array()?); + /// assert_eq!(x, 0x807060504030201); + /// assert_eq!(y, 0x9080706); + /// assert_eq!(z, 0x504); + /// Ok(()) + /// } + /// ``` + #[unstable(feature = "read_array", issue = "148848")] + fn read_array(&mut self) -> Result<[u8; N]> + where + Self: Sized, + { + let mut buf = [MaybeUninit::uninit(); N]; + let mut borrowed_buf = BorrowedBuf::from(buf.as_mut_slice()); + self.read_buf_exact(borrowed_buf.unfilled())?; + // Guard against incorrect `read_buf_exact` implementations. + assert_eq!(borrowed_buf.len(), N); + Ok(unsafe { MaybeUninit::array_assume_init(buf) }) + } } /// Reads all bytes from a [reader][Read] into a new [`String`]. diff --git a/std/src/lib.rs b/std/src/lib.rs index da41c1216c4d5..07618550a9cb2 100644 --- a/std/src/lib.rs +++ b/std/src/lib.rs @@ -63,10 +63,10 @@ //! type, but not the all-important methods. //! //! So for example there is a [page for the primitive type -//! `i32`](primitive::i32) that lists all the methods that can be called on -//! 32-bit integers (very useful), and there is a [page for the module -//! `std::i32`] that documents the constant values [`MIN`] and [`MAX`] (rarely -//! useful). +//! `char`](primitive::char) that lists all the methods that can be called on +//! characters (very useful), and there is a [page for the module +//! `std::char`](crate::char) that documents iterator and error types created by these methods +//! (rarely useful). //! //! Note the documentation for the primitives [`str`] and [`[T]`][prim@slice] (also //! called 'slice'). Many method calls on [`String`] and [`Vec`] are actually @@ -180,9 +180,6 @@ //! //! //! [I/O]: io -//! [`MIN`]: i32::MIN -//! [`MAX`]: i32::MAX -//! [page for the module `std::i32`]: crate::i32 //! [TCP]: net::TcpStream //! [The Rust Prelude]: prelude //! [UDP]: net::UdpSocket @@ -279,7 +276,6 @@ #![feature(cfg_sanitizer_cfi)] #![feature(cfg_target_thread_local)] #![feature(cfi_encoding)] -#![feature(char_max_len)] #![feature(const_trait_impl)] #![feature(core_float_math)] #![feature(decl_macro)] @@ -293,8 +289,6 @@ #![feature(ffi_const)] #![feature(formatting_options)] #![feature(funnel_shifts)] -#![feature(hash_map_internals)] -#![feature(hash_map_macro)] #![feature(if_let_guard)] #![feature(intra_doc_pointers)] #![feature(iter_advance_by)] @@ -353,6 +347,7 @@ #![feature(int_from_ascii)] #![feature(ip)] #![feature(lazy_get)] +#![feature(maybe_uninit_array_assume_init)] #![feature(maybe_uninit_slice)] #![feature(maybe_uninit_write_slice)] #![feature(panic_can_unwind)] @@ -386,7 +381,6 @@ #![feature(try_reserve_kind)] #![feature(try_with_capacity)] #![feature(unique_rc_arc)] -#![feature(vec_into_raw_parts)] #![feature(wtf8_internals)] // tidy-alphabetical-end // @@ -677,7 +671,7 @@ pub mod arch { pub use std_detect::is_loongarch_feature_detected; #[unstable(feature = "is_riscv_feature_detected", issue = "111192")] pub use std_detect::is_riscv_feature_detected; - #[unstable(feature = "stdarch_s390x_feature_detection", issue = "135413")] + #[stable(feature = "stdarch_s390x_feature_detection", since = "CURRENT_RUSTC_VERSION")] pub use std_detect::is_s390x_feature_detected; #[stable(feature = "simd_x86", since = "1.27.0")] pub use std_detect::is_x86_feature_detected; diff --git a/std/src/macros.rs b/std/src/macros.rs index 254570ae9c836..25e2b7ea13703 100644 --- a/std/src/macros.rs +++ b/std/src/macros.rs @@ -379,77 +379,3 @@ macro_rules! dbg { ($($crate::dbg!($val)),+,) }; } - -#[doc(hidden)] -#[macro_export] -#[allow_internal_unstable(hash_map_internals)] -#[unstable(feature = "hash_map_internals", issue = "none")] -macro_rules! repetition_utils { - (@count $($tokens:tt),*) => {{ - [$($crate::repetition_utils!(@replace $tokens => ())),*].len() - }}; - - (@replace $x:tt => $y:tt) => { $y } -} - -/// Creates a [`HashMap`] containing the arguments. -/// -/// `hash_map!` allows specifying the entries that make -/// up the [`HashMap`] where the key and value are separated by a `=>`. -/// -/// The entries are separated by commas with a trailing comma being allowed. -/// -/// It is semantically equivalent to using repeated [`HashMap::insert`] -/// on a newly created hashmap. -/// -/// `hash_map!` will attempt to avoid repeated reallocations by -/// using [`HashMap::with_capacity`]. -/// -/// # Examples -/// -/// ```rust -/// #![feature(hash_map_macro)] -/// -/// let map = hash_map! { -/// "key" => "value", -/// "key1" => "value1" -/// }; -/// -/// assert_eq!(map.get("key"), Some(&"value")); -/// assert_eq!(map.get("key1"), Some(&"value1")); -/// assert!(map.get("brrrrrrooooommm").is_none()); -/// ``` -/// -/// And with a trailing comma -/// -///```rust -/// #![feature(hash_map_macro)] -/// -/// let map = hash_map! { -/// "key" => "value", // notice the , -/// }; -/// -/// assert_eq!(map.get("key"), Some(&"value")); -/// ``` -/// -/// The key and value are moved into the HashMap. -/// -/// [`HashMap`]: crate::collections::HashMap -/// [`HashMap::insert`]: crate::collections::HashMap::insert -/// [`HashMap::with_capacity`]: crate::collections::HashMap::with_capacity -#[macro_export] -#[allow_internal_unstable(hash_map_internals)] -#[unstable(feature = "hash_map_macro", issue = "144032")] -macro_rules! hash_map { - () => {{ - $crate::collections::HashMap::new() - }}; - - ( $( $key:expr => $value:expr ),* $(,)? ) => {{ - let mut map = $crate::collections::HashMap::with_capacity( - const { $crate::repetition_utils!(@count $($key),*) } - ); - $( map.insert($key, $value); )* - map - }} -} diff --git a/std/src/num/f128.rs b/std/src/num/f128.rs index 40061d089284b..3b787713afa24 100644 --- a/std/src/num/f128.rs +++ b/std/src/num/f128.rs @@ -37,6 +37,7 @@ impl f128 { /// /// assert_eq!(f128::powf(1.0, f128::NAN), 1.0); /// assert_eq!(f128::powf(f128::NAN, 0.0), 1.0); + /// assert_eq!(f128::powf(0.0, 0.0), 1.0); /// # } /// ``` #[inline] diff --git a/std/src/num/f16.rs b/std/src/num/f16.rs index 0d43b60a62fea..4af21c95c9baf 100644 --- a/std/src/num/f16.rs +++ b/std/src/num/f16.rs @@ -37,6 +37,7 @@ impl f16 { /// /// assert_eq!(f16::powf(1.0, f16::NAN), 1.0); /// assert_eq!(f16::powf(f16::NAN, 0.0), 1.0); + /// assert_eq!(f16::powf(0.0, 0.0), 1.0); /// # } /// ``` #[inline] diff --git a/std/src/num/f32.rs b/std/src/num/f32.rs index c9e192201affc..09ced388a3399 100644 --- a/std/src/num/f32.rs +++ b/std/src/num/f32.rs @@ -308,6 +308,7 @@ impl f32 { /// assert!(abs_difference <= 1e-5); /// /// assert_eq!(f32::powi(f32::NAN, 0), 1.0); + /// assert_eq!(f32::powi(0.0, 0), 1.0); /// ``` #[rustc_allow_incoherent_impl] #[must_use = "method returns a new number and does not mutate the original value"] @@ -333,6 +334,7 @@ impl f32 { /// /// assert_eq!(f32::powf(1.0, f32::NAN), 1.0); /// assert_eq!(f32::powf(f32::NAN, 0.0), 1.0); + /// assert_eq!(f32::powf(0.0, 0.0), 1.0); /// ``` #[rustc_allow_incoherent_impl] #[must_use = "method returns a new number and does not mutate the original value"] diff --git a/std/src/num/f64.rs b/std/src/num/f64.rs index 11874f9280f02..79adf076e4b1a 100644 --- a/std/src/num/f64.rs +++ b/std/src/num/f64.rs @@ -308,6 +308,7 @@ impl f64 { /// assert!(abs_difference <= 1e-14); /// /// assert_eq!(f64::powi(f64::NAN, 0), 1.0); + /// assert_eq!(f64::powi(0.0, 0), 1.0); /// ``` #[rustc_allow_incoherent_impl] #[must_use = "method returns a new number and does not mutate the original value"] @@ -333,6 +334,7 @@ impl f64 { /// /// assert_eq!(f64::powf(1.0, f64::NAN), 1.0); /// assert_eq!(f64::powf(f64::NAN, 0.0), 1.0); + /// assert_eq!(f64::powf(0.0, 0.0), 1.0); /// ``` #[rustc_allow_incoherent_impl] #[must_use = "method returns a new number and does not mutate the original value"] diff --git a/std/src/os/fd/owned.rs b/std/src/os/fd/owned.rs index 10e1e73a115bd..6a0e7a640028b 100644 --- a/std/src/os/fd/owned.rs +++ b/std/src/os/fd/owned.rs @@ -3,6 +3,9 @@ #![stable(feature = "io_safety", since = "1.63.0")] #![deny(unsafe_op_in_unsafe_fn)] +#[cfg(target_os = "motor")] +use moto_rt::libc; + use super::raw::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; #[cfg(not(target_os = "trusty"))] use crate::fs; @@ -12,7 +15,8 @@ use crate::mem::ManuallyDrop; target_arch = "wasm32", target_env = "sgx", target_os = "hermit", - target_os = "trusty" + target_os = "trusty", + target_os = "motor" )))] use crate::sys::cvt; #[cfg(not(target_os = "trusty"))] @@ -95,7 +99,12 @@ impl OwnedFd { impl BorrowedFd<'_> { /// Creates a new `OwnedFd` instance that shares the same underlying file /// description as the existing `BorrowedFd` instance. - #[cfg(not(any(target_arch = "wasm32", target_os = "hermit", target_os = "trusty")))] + #[cfg(not(any( + target_arch = "wasm32", + target_os = "hermit", + target_os = "trusty", + target_os = "motor" + )))] #[stable(feature = "io_safety", since = "1.63.0")] pub fn try_clone_to_owned(&self) -> crate::io::Result { // We want to atomically duplicate this file descriptor and set the @@ -123,6 +132,15 @@ impl BorrowedFd<'_> { pub fn try_clone_to_owned(&self) -> crate::io::Result { Err(crate::io::Error::UNSUPPORTED_PLATFORM) } + + /// Creates a new `OwnedFd` instance that shares the same underlying file + /// description as the existing `BorrowedFd` instance. + #[cfg(target_os = "motor")] + #[stable(feature = "io_safety", since = "1.63.0")] + pub fn try_clone_to_owned(&self) -> crate::io::Result { + let fd = moto_rt::fs::duplicate(self.as_raw_fd()).map_err(crate::sys::map_motor_error)?; + Ok(unsafe { OwnedFd::from_raw_fd(fd) }) + } } #[stable(feature = "io_safety", since = "1.63.0")] diff --git a/std/src/os/fd/raw.rs b/std/src/os/fd/raw.rs index 34a6cf1a8b84d..c01e6b83cd366 100644 --- a/std/src/os/fd/raw.rs +++ b/std/src/os/fd/raw.rs @@ -4,13 +4,17 @@ #[cfg(target_os = "hermit")] use hermit_abi as libc; +#[cfg(target_os = "motor")] +use moto_rt::libc; +#[cfg(target_os = "motor")] +use super::owned::OwnedFd; #[cfg(not(target_os = "trusty"))] use crate::fs; use crate::io; #[cfg(target_os = "hermit")] use crate::os::hermit::io::OwnedFd; -#[cfg(not(target_os = "hermit"))] +#[cfg(all(not(target_os = "hermit"), not(target_os = "motor")))] use crate::os::raw; #[cfg(all(doc, not(target_arch = "wasm32")))] use crate::os::unix::io::AsFd; @@ -23,10 +27,10 @@ use crate::sys_common::{AsInner, FromInner, IntoInner}; /// Raw file descriptors. #[stable(feature = "rust1", since = "1.0.0")] -#[cfg(not(target_os = "hermit"))] +#[cfg(all(not(target_os = "hermit"), not(target_os = "motor")))] pub type RawFd = raw::c_int; #[stable(feature = "rust1", since = "1.0.0")] -#[cfg(target_os = "hermit")] +#[cfg(any(target_os = "hermit", target_os = "motor"))] pub type RawFd = i32; /// A trait to extract the raw file descriptor from an underlying object. diff --git a/std/src/os/mod.rs b/std/src/os/mod.rs index fd7a11433af1b..76374402be4b3 100644 --- a/std/src/os/mod.rs +++ b/std/src/os/mod.rs @@ -155,6 +155,8 @@ pub mod ios; pub mod l4re; #[cfg(target_os = "macos")] pub mod macos; +#[cfg(target_os = "motor")] +pub mod motor; #[cfg(target_os = "netbsd")] pub mod netbsd; #[cfg(target_os = "nto")] @@ -182,7 +184,14 @@ pub mod vxworks; #[cfg(target_os = "xous")] pub mod xous; -#[cfg(any(unix, target_os = "hermit", target_os = "trusty", target_os = "wasi", doc))] +#[cfg(any( + unix, + target_os = "hermit", + target_os = "trusty", + target_os = "wasi", + target_os = "motor", + doc +))] pub mod fd; #[cfg(any(target_os = "linux", target_os = "android", target_os = "cygwin", doc))] diff --git a/std/src/os/motor/ffi.rs b/std/src/os/motor/ffi.rs new file mode 100644 index 0000000000000..10e8da392dcc5 --- /dev/null +++ b/std/src/os/motor/ffi.rs @@ -0,0 +1,42 @@ +//! Motor OS-specific extensions to primitives in the [`std::ffi`] module. +#![unstable(feature = "motor_ext", issue = "147456")] + +use crate::ffi::{OsStr, OsString}; +use crate::sealed::Sealed; +use crate::sys_common::{AsInner, IntoInner}; + +/// Motor OS–specific extensions to [`OsString`]. +/// +/// This trait is sealed: it cannot be implemented outside the standard library. +/// This is so that future additional methods are not breaking changes. +pub trait OsStringExt: Sealed { + /// Yields the underlying UTF-8 string of this [`OsString`]. + /// + /// OS strings on Motor OS are guaranteed to be UTF-8, so are just strings. + fn into_string(self) -> String; +} + +impl OsStringExt for OsString { + #[inline] + fn into_string(self) -> String { + self.into_inner().inner + } +} + +/// Motor OS–specific extensions to [`OsString`]. +/// +/// This trait is sealed: it cannot be implemented outside the standard library. +/// This is so that future additional methods are not breaking changes. +pub trait OsStrExt: Sealed { + /// Gets the underlying UTF-8 string view of the [`OsStr`] slice. + /// + /// OS strings on Motor OS are guaranteed to be UTF-8, so are just strings. + fn as_str(&self) -> &str; +} + +impl OsStrExt for OsStr { + #[inline] + fn as_str(&self) -> &str { + &self.as_inner().inner + } +} diff --git a/std/src/os/motor/mod.rs b/std/src/os/motor/mod.rs new file mode 100644 index 0000000000000..18da079c74a15 --- /dev/null +++ b/std/src/os/motor/mod.rs @@ -0,0 +1,4 @@ +#![unstable(feature = "motor_ext", issue = "147456")] + +pub mod ffi; +pub mod process; diff --git a/std/src/os/motor/process.rs b/std/src/os/motor/process.rs new file mode 100644 index 0000000000000..015fbcb97f97d --- /dev/null +++ b/std/src/os/motor/process.rs @@ -0,0 +1,15 @@ +#![unstable(feature = "motor_ext", issue = "147456")] + +use crate::sealed::Sealed; +use crate::sys_common::AsInner; + +pub trait ChildExt: Sealed { + /// Extracts the main thread raw handle, without taking ownership + fn sys_handle(&self) -> u64; +} + +impl ChildExt for crate::process::Child { + fn sys_handle(&self) -> u64 { + self.as_inner().handle() + } +} diff --git a/std/src/os/unix/net/tests.rs b/std/src/os/unix/net/tests.rs index 4666b5e3c6c18..d88c97113efeb 100644 --- a/std/src/os/unix/net/tests.rs +++ b/std/src/os/unix/net/tests.rs @@ -24,6 +24,7 @@ macro_rules! or_panic { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn basic() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -51,6 +52,7 @@ fn basic() { } #[test] +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn vectored() { let (mut s1, mut s2) = or_panic!(UnixStream::pair()); @@ -71,6 +73,7 @@ fn vectored() { } #[test] +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn pair() { let msg1 = b"hello"; let msg2 = b"world!"; @@ -95,6 +98,7 @@ fn pair() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn try_clone() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -122,6 +126,7 @@ fn try_clone() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn iter() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -144,6 +149,7 @@ fn iter() { } #[test] +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn long_path() { let dir = tmpdir(); let socket_path = dir.path().join( @@ -173,6 +179,7 @@ fn long_path() { #[cfg(not(target_os = "nto"))] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets #[cfg_attr(target_os = "cygwin", ignore)] // Cygwin connect needs handshake +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn timeouts() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -202,6 +209,7 @@ fn timeouts() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets #[cfg_attr(target_os = "cygwin", ignore)] // Cygwin connect needs handshake +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_read_timeout() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -223,6 +231,7 @@ fn test_read_timeout() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets #[cfg_attr(target_os = "cygwin", ignore)] // Cygwin connect needs handshake +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_read_with_timeout() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -252,6 +261,7 @@ fn test_read_with_timeout() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets #[cfg_attr(target_os = "cygwin", ignore)] // Cygwin connect needs handshake +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_stream_timeout_zero_duration() { let dir = tmpdir(); let socket_path = dir.path().join("sock"); @@ -272,6 +282,7 @@ fn test_unix_stream_timeout_zero_duration() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_datagram() { let dir = tmpdir(); let path1 = dir.path().join("sock1"); @@ -290,6 +301,7 @@ fn test_unix_datagram() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets #[cfg_attr(target_os = "cygwin", ignore)] // Cygwin autobinds an address +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unnamed_unix_datagram() { let dir = tmpdir(); let path1 = dir.path().join("sock1"); @@ -308,6 +320,7 @@ fn test_unnamed_unix_datagram() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_datagram_connect_to_recv_addr() { let dir = tmpdir(); let path1 = dir.path().join("sock1"); @@ -334,6 +347,7 @@ fn test_unix_datagram_connect_to_recv_addr() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets #[cfg_attr(target_os = "cygwin", ignore)] // Cygwin autobinds an address +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_connect_unix_datagram() { let dir = tmpdir(); let path1 = dir.path().join("sock1"); @@ -361,6 +375,7 @@ fn test_connect_unix_datagram() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_datagram_recv() { let dir = tmpdir(); let path1 = dir.path().join("sock1"); @@ -378,6 +393,7 @@ fn test_unix_datagram_recv() { } #[test] +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn datagram_pair() { let msg1 = b"hello"; let msg2 = b"world!"; @@ -404,6 +420,7 @@ fn datagram_pair() { // when passed zero Durations #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_datagram_timeout_zero_duration() { let dir = tmpdir(); let path = dir.path().join("sock"); @@ -562,6 +579,7 @@ fn test_abstract_no_pathname_and_not_unnamed() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_stream_peek() { let (txdone, rxdone) = crate::sync::mpsc::channel(); @@ -595,6 +613,7 @@ fn test_unix_stream_peek() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_datagram_peek() { let dir = tmpdir(); let path1 = dir.path().join("sock"); @@ -620,6 +639,7 @@ fn test_unix_datagram_peek() { #[test] #[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets +#[cfg_attr(target_os = "vxworks", ignore = "Unix sockets are not implemented in VxWorks")] fn test_unix_datagram_peek_from() { let dir = tmpdir(); let path1 = dir.path().join("sock"); diff --git a/std/src/os/unix/process.rs b/std/src/os/unix/process.rs index 5b7b5a8ea803d..ee0c460f7dfa7 100644 --- a/std/src/os/unix/process.rs +++ b/std/src/os/unix/process.rs @@ -80,6 +80,9 @@ pub trait CommandExt: Sealed { /// or acquiring a mutex are not guaranteed to work (due to /// other threads perhaps still running when the `fork` was run). /// + /// Note that the list of allocating functions includes [`Error::new`] and + /// [`Error::other`]. To signal a non-trivial error, prefer [`panic!`]. + /// /// For further details refer to the [POSIX fork() specification] /// and the equivalent documentation for any targeted /// platform, especially the requirements around *async-signal-safety*. @@ -102,6 +105,8 @@ pub trait CommandExt: Sealed { /// [POSIX fork() specification]: /// https://pubs.opengroup.org/onlinepubs/9699919799/functions/fork.html /// [`std::env`]: mod@crate::env + /// [`Error::new`]: crate::io::Error::new + /// [`Error::other`]: crate::io::Error::other #[stable(feature = "process_pre_exec", since = "1.34.0")] unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command where diff --git a/std/src/os/windows/process.rs b/std/src/os/windows/process.rs index c223eee95b5f5..f21ed51606f6d 100644 --- a/std/src/os/windows/process.rs +++ b/std/src/os/windows/process.rs @@ -365,6 +365,20 @@ pub trait CommandExt: Sealed { /// [1]: https://learn.microsoft.com/en-us/windows/win32/api/processthreadsapi/ns-processthreadsapi-startupinfoa #[unstable(feature = "windows_process_extensions_startupinfo", issue = "141010")] fn startupinfo_force_feedback(&mut self, enabled: Option) -> &mut process::Command; + + /// If this flag is set to `true`, each inheritable handle in the calling + /// process is inherited by the new process. If the flag is `false`, the + /// handles are not inherited. + /// + /// The default value for this flag is `true`. + /// + /// **Note** that inherited handles have the same value and access rights + /// as the original handles. For additional discussion of inheritable handles, + /// see the [Remarks][1] section of the `CreateProcessW` documentation. + /// + /// [1]: https://learn.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessw#remarks + #[unstable(feature = "windows_process_extensions_inherit_handles", issue = "146407")] + fn inherit_handles(&mut self, inherit_handles: bool) -> &mut process::Command; } #[stable(feature = "windows_process_extensions", since = "1.16.0")] @@ -421,6 +435,11 @@ impl CommandExt for process::Command { self.as_inner_mut().startupinfo_force_feedback(enabled); self } + + fn inherit_handles(&mut self, inherit_handles: bool) -> &mut process::Command { + self.as_inner_mut().inherit_handles(inherit_handles); + self + } } #[unstable(feature = "windows_process_extensions_main_thread_handle", issue = "96723")] diff --git a/std/src/panicking.rs b/std/src/panicking.rs index b7be869c4eb48..7efb7ad8ee8b3 100644 --- a/std/src/panicking.rs +++ b/std/src/panicking.rs @@ -22,7 +22,7 @@ use crate::io::try_set_output_capture; use crate::mem::{self, ManuallyDrop}; use crate::panic::{BacktraceStyle, PanicHookInfo}; use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; -use crate::sync::{PoisonError, RwLock}; +use crate::sync::nonpoison::RwLock; use crate::sys::backtrace; use crate::sys::stdio::panic_output; use crate::{fmt, intrinsics, process, thread}; @@ -144,13 +144,9 @@ pub fn set_hook(hook: Box) + 'static + Sync + Send>) { panic!("cannot modify the panic hook from a panicking thread"); } - let new = Hook::Custom(hook); - let mut hook = HOOK.write().unwrap_or_else(PoisonError::into_inner); - let old = mem::replace(&mut *hook, new); - drop(hook); - // Only drop the old hook after releasing the lock to avoid deadlocking - // if its destructor panics. - drop(old); + // Drop the old hook after changing the hook to avoid deadlocking if its + // destructor panics. + drop(HOOK.replace(Hook::Custom(hook))); } /// Unregisters the current panic hook and returns it, registering the default hook @@ -188,11 +184,7 @@ pub fn take_hook() -> Box) + 'static + Sync + Send> { panic!("cannot modify the panic hook from a panicking thread"); } - let mut hook = HOOK.write().unwrap_or_else(PoisonError::into_inner); - let old_hook = mem::take(&mut *hook); - drop(hook); - - old_hook.into_box() + HOOK.replace(Hook::Default).into_box() } /// Atomic combination of [`take_hook`] and [`set_hook`]. Use this to replace the panic handler with @@ -215,10 +207,10 @@ pub fn take_hook() -> Box) + 'static + Sync + Send> { /// /// // Equivalent to /// // let prev = panic::take_hook(); -/// // panic::set_hook(move |info| { +/// // panic::set_hook(Box::new(move |info| { /// // println!("..."); /// // prev(info); -/// // ); +/// // })); /// panic::update_hook(move |prev, info| { /// println!("Print custom message and execute panic handler as usual"); /// prev(info); @@ -238,7 +230,7 @@ where panic!("cannot modify the panic hook from a panicking thread"); } - let mut hook = HOOK.write().unwrap_or_else(PoisonError::into_inner); + let mut hook = HOOK.write(); let prev = mem::take(&mut *hook).into_box(); *hook = Hook::Custom(Box::new(move |info| hook_fn(&prev, info))); } @@ -822,7 +814,7 @@ fn panic_with_hook( crate::process::abort(); } - match *HOOK.read().unwrap_or_else(PoisonError::into_inner) { + match *HOOK.read() { // Some platforms (like wasm) know that printing to stderr won't ever actually // print anything, and if that's the case we can skip the default // hook. Since string formatting happens lazily when calling `payload` diff --git a/std/src/path.rs b/std/src/path.rs index 6e3b1e6e47d0e..114fcc796c525 100644 --- a/std/src/path.rs +++ b/std/src/path.rs @@ -79,7 +79,7 @@ use crate::ops::{self, Deref}; use crate::rc::Rc; use crate::str::FromStr; use crate::sync::Arc; -use crate::sys::path::{MAIN_SEP_STR, is_sep_byte, is_verbatim_sep, parse_prefix}; +use crate::sys::path::{HAS_PREFIXES, MAIN_SEP_STR, is_sep_byte, is_verbatim_sep, parse_prefix}; use crate::{cmp, fmt, fs, io, sys}; //////////////////////////////////////////////////////////////////////////////// @@ -643,17 +643,26 @@ impl<'a> Components<'a> { // how long is the prefix, if any? #[inline] fn prefix_len(&self) -> usize { + if !HAS_PREFIXES { + return 0; + } self.prefix.as_ref().map(Prefix::len).unwrap_or(0) } #[inline] fn prefix_verbatim(&self) -> bool { + if !HAS_PREFIXES { + return false; + } self.prefix.as_ref().map(Prefix::is_verbatim).unwrap_or(false) } /// how much of the prefix is left from the point of view of iteration? #[inline] fn prefix_remaining(&self) -> usize { + if !HAS_PREFIXES { + return 0; + } if self.front == State::Prefix { self.prefix_len() } else { 0 } } @@ -707,7 +716,7 @@ impl<'a> Components<'a> { if self.has_physical_root { return true; } - if let Some(p) = self.prefix { + if HAS_PREFIXES && let Some(p) = self.prefix { if p.has_implicit_root() { return true; } @@ -720,10 +729,10 @@ impl<'a> Components<'a> { if self.has_root() { return false; } - let mut iter = self.path[self.prefix_remaining()..].iter(); - match (iter.next(), iter.next()) { - (Some(&b'.'), None) => true, - (Some(&b'.'), Some(&b)) => self.is_sep_byte(b), + let slice = &self.path[self.prefix_remaining()..]; + match slice { + [b'.'] => true, + [b'.', b, ..] => self.is_sep_byte(*b), _ => false, } } @@ -732,7 +741,7 @@ impl<'a> Components<'a> { // corresponding path component unsafe fn parse_single_component<'b>(&self, comp: &'b [u8]) -> Option> { match comp { - b"." if self.prefix_verbatim() => Some(Component::CurDir), + b"." if HAS_PREFIXES && self.prefix_verbatim() => Some(Component::CurDir), b"." => None, // . components are normalized away, except at // the beginning of a path, which is treated // separately via `include_cur_dir` @@ -889,18 +898,16 @@ impl<'a> Iterator for Components<'a> { fn next(&mut self) -> Option> { while !self.finished() { match self.front { - State::Prefix if self.prefix_len() > 0 => { - self.front = State::StartDir; - debug_assert!(self.prefix_len() <= self.path.len()); - let raw = &self.path[..self.prefix_len()]; - self.path = &self.path[self.prefix_len()..]; - return Some(Component::Prefix(PrefixComponent { - raw: unsafe { OsStr::from_encoded_bytes_unchecked(raw) }, - parsed: self.prefix.unwrap(), - })); + // most likely case first + State::Body if !self.path.is_empty() => { + let (size, comp) = self.parse_next_component(); + self.path = &self.path[size..]; + if comp.is_some() { + return comp; + } } - State::Prefix => { - self.front = State::StartDir; + State::Body => { + self.front = State::Done; } State::StartDir => { self.front = State::Body; @@ -908,7 +915,7 @@ impl<'a> Iterator for Components<'a> { debug_assert!(!self.path.is_empty()); self.path = &self.path[1..]; return Some(Component::RootDir); - } else if let Some(p) = self.prefix { + } else if HAS_PREFIXES && let Some(p) = self.prefix { if p.has_implicit_root() && !p.is_verbatim() { return Some(Component::RootDir); } @@ -918,15 +925,19 @@ impl<'a> Iterator for Components<'a> { return Some(Component::CurDir); } } - State::Body if !self.path.is_empty() => { - let (size, comp) = self.parse_next_component(); - self.path = &self.path[size..]; - if comp.is_some() { - return comp; - } + _ if const { !HAS_PREFIXES } => unreachable!(), + State::Prefix if self.prefix_len() == 0 => { + self.front = State::StartDir; } - State::Body => { - self.front = State::Done; + State::Prefix => { + self.front = State::StartDir; + debug_assert!(self.prefix_len() <= self.path.len()); + let raw = &self.path[..self.prefix_len()]; + self.path = &self.path[self.prefix_len()..]; + return Some(Component::Prefix(PrefixComponent { + raw: unsafe { OsStr::from_encoded_bytes_unchecked(raw) }, + parsed: self.prefix.unwrap(), + })); } State::Done => unreachable!(), } @@ -951,11 +962,11 @@ impl<'a> DoubleEndedIterator for Components<'a> { self.back = State::StartDir; } State::StartDir => { - self.back = State::Prefix; + self.back = if HAS_PREFIXES { State::Prefix } else { State::Done }; if self.has_physical_root { self.path = &self.path[..self.path.len() - 1]; return Some(Component::RootDir); - } else if let Some(p) = self.prefix { + } else if HAS_PREFIXES && let Some(p) = self.prefix { if p.has_implicit_root() && !p.is_verbatim() { return Some(Component::RootDir); } @@ -964,6 +975,7 @@ impl<'a> DoubleEndedIterator for Components<'a> { return Some(Component::CurDir); } } + _ if !HAS_PREFIXES => unreachable!(), State::Prefix if self.prefix_len() > 0 => { self.back = State::Done; return Some(Component::Prefix(PrefixComponent { @@ -2756,6 +2768,28 @@ impl Path { iter_after(self.components().rev(), child.components().rev()).is_some() } + /// Checks whether the `Path` is empty. + /// + /// # Examples + /// + /// ``` + /// #![feature(path_is_empty)] + /// use std::path::Path; + /// + /// let path = Path::new(""); + /// assert!(path.is_empty()); + /// + /// let path = Path::new("foo"); + /// assert!(!path.is_empty()); + /// + /// let path = Path::new("."); + /// assert!(!path.is_empty()); + /// ``` + #[unstable(feature = "path_is_empty", issue = "148494")] + pub fn is_empty(&self) -> bool { + self.as_os_str().is_empty() + } + /// Extracts the stem (non-extension) portion of [`self.file_name`]. /// /// [`self.file_name`]: Path::file_name @@ -3116,7 +3150,9 @@ impl Path { path: self.as_u8_slice(), prefix, has_physical_root: has_physical_root(self.as_u8_slice(), prefix), - front: State::Prefix, + // use a platform-specific initial state to avoid one turn of + // the state-machine when the platform doesn't have a Prefix. + front: const { if HAS_PREFIXES { State::Prefix } else { State::StartDir } }, back: State::Body, } } diff --git a/std/src/sync/barrier.rs b/std/src/sync/barrier.rs index 8988126bd90c0..c2c18889dde7d 100644 --- a/std/src/sync/barrier.rs +++ b/std/src/sync/barrier.rs @@ -125,7 +125,7 @@ impl Barrier { let local_gen = lock.generation_id; lock.count += 1; if lock.count < self.num_threads { - let _guard = self.cvar.wait_while(lock, |state| local_gen == state.generation_id); + self.cvar.wait_while(&mut lock, |state| local_gen == state.generation_id); BarrierWaitResult(false) } else { lock.count = 0; diff --git a/std/src/sync/lazy_lock.rs b/std/src/sync/lazy_lock.rs index 3231125f7a13a..f1cae4b207c9a 100644 --- a/std/src/sync/lazy_lock.rs +++ b/std/src/sync/lazy_lock.rs @@ -1,4 +1,4 @@ -use super::poison::once::ExclusiveState; +use super::once::OnceExclusiveState; use crate::cell::UnsafeCell; use crate::mem::ManuallyDrop; use crate::ops::{Deref, DerefMut}; @@ -140,14 +140,18 @@ impl T> LazyLock { pub fn into_inner(mut this: Self) -> Result { let state = this.once.state(); match state { - ExclusiveState::Poisoned => panic_poisoned(), + OnceExclusiveState::Poisoned => panic_poisoned(), state => { let this = ManuallyDrop::new(this); let data = unsafe { ptr::read(&this.data) }.into_inner(); match state { - ExclusiveState::Incomplete => Err(ManuallyDrop::into_inner(unsafe { data.f })), - ExclusiveState::Complete => Ok(ManuallyDrop::into_inner(unsafe { data.value })), - ExclusiveState::Poisoned => unreachable!(), + OnceExclusiveState::Incomplete => { + Err(ManuallyDrop::into_inner(unsafe { data.f })) + } + OnceExclusiveState::Complete => { + Ok(ManuallyDrop::into_inner(unsafe { data.value })) + } + OnceExclusiveState::Poisoned => unreachable!(), } } } @@ -189,7 +193,7 @@ impl T> LazyLock { impl Drop for PoisonOnPanic<'_, T, F> { #[inline] fn drop(&mut self) { - self.0.once.set_state(ExclusiveState::Poisoned); + self.0.once.set_state(OnceExclusiveState::Poisoned); } } @@ -200,7 +204,7 @@ impl T> LazyLock { let guard = PoisonOnPanic(this); let data = f(); guard.0.data.get_mut().value = ManuallyDrop::new(data); - guard.0.once.set_state(ExclusiveState::Complete); + guard.0.once.set_state(OnceExclusiveState::Complete); core::mem::forget(guard); // SAFETY: We put the value there above. unsafe { &mut this.data.get_mut().value } @@ -208,11 +212,11 @@ impl T> LazyLock { let state = this.once.state(); match state { - ExclusiveState::Poisoned => panic_poisoned(), + OnceExclusiveState::Poisoned => panic_poisoned(), // SAFETY: The `Once` states we completed the initialization. - ExclusiveState::Complete => unsafe { &mut this.data.get_mut().value }, + OnceExclusiveState::Complete => unsafe { &mut this.data.get_mut().value }, // SAFETY: The state is `Incomplete`. - ExclusiveState::Incomplete => unsafe { really_init_mut(this) }, + OnceExclusiveState::Incomplete => unsafe { really_init_mut(this) }, } } @@ -293,7 +297,7 @@ impl LazyLock { match state { // SAFETY: // The closure has been run successfully, so `value` has been initialized. - ExclusiveState::Complete => Some(unsafe { &mut this.data.get_mut().value }), + OnceExclusiveState::Complete => Some(unsafe { &mut this.data.get_mut().value }), _ => None, } } @@ -332,11 +336,13 @@ impl LazyLock { impl Drop for LazyLock { fn drop(&mut self) { match self.once.state() { - ExclusiveState::Incomplete => unsafe { ManuallyDrop::drop(&mut self.data.get_mut().f) }, - ExclusiveState::Complete => unsafe { + OnceExclusiveState::Incomplete => unsafe { + ManuallyDrop::drop(&mut self.data.get_mut().f) + }, + OnceExclusiveState::Complete => unsafe { ManuallyDrop::drop(&mut self.data.get_mut().value) }, - ExclusiveState::Poisoned => {} + OnceExclusiveState::Poisoned => {} } } } diff --git a/std/src/sync/mod.rs b/std/src/sync/mod.rs index 97c04d07eaf1d..19b3040dcb279 100644 --- a/std/src/sync/mod.rs +++ b/std/src/sync/mod.rs @@ -142,7 +142,7 @@ //! most one thread at a time is able to access some data. //! //! - [`Once`]: Used for a thread-safe, one-time global initialization routine. -//! Mostly useful for implementing other types like `OnceLock`. +//! Mostly useful for implementing other types like [`OnceLock`]. //! //! - [`OnceLock`]: Used for thread-safe, one-time initialization of a //! variable, with potentially different initializers based on the caller. @@ -181,7 +181,24 @@ pub use alloc_crate::sync::UniqueArc; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::sync::{Arc, Weak}; -// FIXME(sync_nonpoison,sync_poison_mod): remove all `#[doc(inline)]` once the modules are stabilized. +#[unstable(feature = "mpmc_channel", issue = "126840")] +pub mod mpmc; +pub mod mpsc; + +pub(crate) mod once; // `pub(crate)` for the `sys::sync::once` implementations and `LazyLock`. + +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::once::{Once, OnceState}; + +#[stable(feature = "rust1", since = "1.0.0")] +#[doc(inline)] +#[expect(deprecated)] +pub use self::once::ONCE_INIT; + +mod barrier; +mod lazy_lock; +mod once_lock; +mod reentrant_lock; // These exist only in one flavor: no poisoning. #[stable(feature = "rust1", since = "1.0.0")] @@ -193,48 +210,37 @@ pub use self::once_lock::OnceLock; #[unstable(feature = "reentrant_lock", issue = "121440")] pub use self::reentrant_lock::{ReentrantLock, ReentrantLockGuard}; -// These make sense and exist only with poisoning. +// Note: in the future we will change the default version in `std::sync` to the non-poisoning +// version over an edition. +// See https://github.com/rust-lang/rust/issues/134645#issuecomment-3324577500 for more details. + +#[unstable(feature = "sync_nonpoison", issue = "134645")] +pub mod nonpoison; +#[unstable(feature = "sync_poison_mod", issue = "134646")] +pub mod poison; + +// FIXME(sync_poison_mod): remove all `#[doc(inline)]` once the modules are stabilized. + +// These exist only with poisoning. #[stable(feature = "rust1", since = "1.0.0")] #[doc(inline)] pub use self::poison::{LockResult, PoisonError}; -// These (should) exist in both flavors: with and without poisoning. -// FIXME(sync_nonpoison): implement nonpoison versions: -// * Mutex (nonpoison_mutex) -// * Condvar (nonpoison_condvar) -// * Once (nonpoison_once) -// * RwLock (nonpoison_rwlock) +// These exist in both flavors: with and without poisoning. // The historical default is the version with poisoning. #[stable(feature = "rust1", since = "1.0.0")] #[doc(inline)] pub use self::poison::{ - Mutex, MutexGuard, TryLockError, TryLockResult, - Condvar, - Once, OnceState, + TryLockError, TryLockResult, + Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, + Condvar, }; -#[stable(feature = "rust1", since = "1.0.0")] -#[doc(inline)] -#[expect(deprecated)] -pub use self::poison::ONCE_INIT; + #[unstable(feature = "mapped_lock_guards", issue = "117108")] #[doc(inline)] pub use self::poison::{MappedMutexGuard, MappedRwLockReadGuard, MappedRwLockWriteGuard}; -#[unstable(feature = "mpmc_channel", issue = "126840")] -pub mod mpmc; -pub mod mpsc; - -#[unstable(feature = "sync_nonpoison", issue = "134645")] -pub mod nonpoison; -#[unstable(feature = "sync_poison_mod", issue = "134646")] -pub mod poison; - -mod barrier; -mod lazy_lock; -mod once_lock; -mod reentrant_lock; - /// A type indicating whether a timed wait on a condition variable returned /// due to a time out or not. /// diff --git a/std/src/sync/nonpoison/condvar.rs b/std/src/sync/nonpoison/condvar.rs index 994fc6816a0d0..d2b251d7c44c1 100644 --- a/std/src/sync/nonpoison/condvar.rs +++ b/std/src/sync/nonpoison/condvar.rs @@ -1,4 +1,5 @@ use crate::fmt; +use crate::ops::DerefMut; use crate::sync::WaitTimeoutResult; use crate::sync::nonpoison::{MutexGuard, mutex}; use crate::sys::sync as sys; @@ -38,7 +39,7 @@ use crate::time::{Duration, Instant}; /// let (lock, cvar) = &*pair; /// let mut started = lock.lock(); /// while !*started { -/// started = cvar.wait(started); +/// cvar.wait(&mut started); /// } /// ``` /// @@ -115,16 +116,15 @@ impl Condvar { /// let mut started = lock.lock(); /// // As long as the value inside the `Mutex` is `false`, we wait. /// while !*started { - /// started = cvar.wait(started); + /// cvar.wait(&mut started); /// } /// ``` #[unstable(feature = "nonpoison_condvar", issue = "134645")] - pub fn wait<'a, T>(&self, guard: MutexGuard<'a, T>) -> MutexGuard<'a, T> { + pub fn wait(&self, guard: &mut MutexGuard<'_, T>) { unsafe { - let lock = mutex::guard_lock(&guard); + let lock = mutex::guard_lock(guard); self.inner.wait(lock); } - guard } /// Blocks the current thread until the provided condition becomes false. @@ -167,21 +167,17 @@ impl Condvar { /// // Wait for the thread to start up. /// let (lock, cvar) = &*pair; /// // As long as the value inside the `Mutex` is `true`, we wait. - /// let _guard = cvar.wait_while(lock.lock(), |pending| { *pending }); + /// let mut guard = lock.lock(); + /// cvar.wait_while(&mut guard, |pending| { *pending }); /// ``` #[unstable(feature = "nonpoison_condvar", issue = "134645")] - pub fn wait_while<'a, T, F>( - &self, - mut guard: MutexGuard<'a, T>, - mut condition: F, - ) -> MutexGuard<'a, T> + pub fn wait_while(&self, guard: &mut MutexGuard<'_, T>, mut condition: F) where F: FnMut(&mut T) -> bool, { - while condition(&mut *guard) { - guard = self.wait(guard); + while condition(guard.deref_mut()) { + self.wait(guard); } - guard } /// Waits on this condition variable for a notification, timing out after a @@ -206,7 +202,7 @@ impl Condvar { /// The returned [`WaitTimeoutResult`] value indicates if the timeout is /// known to have elapsed. /// - /// Like [`wait`], the lock specified will be re-acquired when this function + /// Like [`wait`], the lock specified will have been re-acquired when this function /// returns, regardless of whether the timeout elapsed or not. /// /// [`wait`]: Self::wait @@ -239,9 +235,8 @@ impl Condvar { /// let mut started = lock.lock(); /// // as long as the value inside the `Mutex` is `false`, we wait /// loop { - /// let result = cvar.wait_timeout(started, Duration::from_millis(10)); + /// let result = cvar.wait_timeout(&mut started, Duration::from_millis(10)); /// // 10 milliseconds have passed, or maybe the value changed! - /// started = result.0; /// if *started == true { /// // We received the notification and the value has been updated, we can leave. /// break @@ -249,16 +244,16 @@ impl Condvar { /// } /// ``` #[unstable(feature = "nonpoison_condvar", issue = "134645")] - pub fn wait_timeout<'a, T>( + pub fn wait_timeout( &self, - guard: MutexGuard<'a, T>, + guard: &mut MutexGuard<'_, T>, dur: Duration, - ) -> (MutexGuard<'a, T>, WaitTimeoutResult) { + ) -> WaitTimeoutResult { let success = unsafe { - let lock = mutex::guard_lock(&guard); + let lock = mutex::guard_lock(guard); self.inner.wait_timeout(lock, dur) }; - (guard, WaitTimeoutResult(!success)) + WaitTimeoutResult(!success) } /// Waits on this condition variable for a notification, timing out after a @@ -277,7 +272,7 @@ impl Condvar { /// The returned [`WaitTimeoutResult`] value indicates if the timeout is /// known to have elapsed without the condition being met. /// - /// Like [`wait_while`], the lock specified will be re-acquired when this + /// Like [`wait_while`], the lock specified will have been re-acquired when this /// function returns, regardless of whether the timeout elapsed or not. /// /// [`wait_while`]: Self::wait_while @@ -307,37 +302,39 @@ impl Condvar { /// /// // wait for the thread to start up /// let (lock, cvar) = &*pair; + /// let mut guard = lock.lock(); /// let result = cvar.wait_timeout_while( - /// lock.lock(), + /// &mut guard, /// Duration::from_millis(100), /// |&mut pending| pending, /// ); - /// if result.1.timed_out() { + /// if result.timed_out() { /// // timed-out without the condition ever evaluating to false. /// } - /// // access the locked mutex via result.0 + /// // access the locked mutex via guard /// ``` #[unstable(feature = "nonpoison_condvar", issue = "134645")] - pub fn wait_timeout_while<'a, T, F>( + pub fn wait_timeout_while( &self, - mut guard: MutexGuard<'a, T>, + guard: &mut MutexGuard<'_, T>, dur: Duration, mut condition: F, - ) -> (MutexGuard<'a, T>, WaitTimeoutResult) + ) -> WaitTimeoutResult where F: FnMut(&mut T) -> bool, { let start = Instant::now(); - loop { - if !condition(&mut *guard) { - return (guard, WaitTimeoutResult(false)); - } + + while condition(guard.deref_mut()) { let timeout = match dur.checked_sub(start.elapsed()) { Some(timeout) => timeout, - None => return (guard, WaitTimeoutResult(true)), + None => return WaitTimeoutResult(true), }; - guard = self.wait_timeout(guard, timeout).0; + + self.wait_timeout(guard, timeout); } + + WaitTimeoutResult(false) } /// Wakes up one blocked thread on this condvar. @@ -378,7 +375,7 @@ impl Condvar { /// let mut started = lock.lock(); /// // As long as the value inside the `Mutex` is `false`, we wait. /// while !*started { - /// started = cvar.wait(started); + /// cvar.wait(&mut started); /// } /// ``` #[unstable(feature = "nonpoison_condvar", issue = "134645")] @@ -422,7 +419,7 @@ impl Condvar { /// let mut started = lock.lock(); /// // As long as the value inside the `Mutex` is `false`, we wait. /// while !*started { - /// started = cvar.wait(started); + /// cvar.wait(&mut started); /// } /// ``` #[unstable(feature = "nonpoison_condvar", issue = "134645")] diff --git a/std/src/sync/nonpoison/rwlock.rs b/std/src/sync/nonpoison/rwlock.rs index 568c7f3868470..dc5d9479ba5a9 100644 --- a/std/src/sync/nonpoison/rwlock.rs +++ b/std/src/sync/nonpoison/rwlock.rs @@ -700,7 +700,6 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { /// /// ``` /// #![feature(nonpoison_rwlock)] - /// #![feature(rwlock_downgrade)] /// /// use std::sync::nonpoison::{RwLock, RwLockWriteGuard}; /// @@ -719,7 +718,6 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { /// /// ``` /// #![feature(nonpoison_rwlock)] - /// #![feature(rwlock_downgrade)] /// /// use std::sync::Arc; /// use std::sync::nonpoison::{RwLock, RwLockWriteGuard}; @@ -752,8 +750,7 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { /// # let final_check = rw.read(); /// # assert_eq!(*final_check, 3); /// ``` - #[unstable(feature = "rwlock_downgrade", issue = "128203")] - // #[unstable(feature = "nonpoison_rwlock", issue = "134645")] + #[unstable(feature = "nonpoison_rwlock", issue = "134645")] pub fn downgrade(s: Self) -> RwLockReadGuard<'rwlock, T> { let lock = s.lock; diff --git a/std/src/sync/poison/once.rs b/std/src/sync/once.rs similarity index 97% rename from std/src/sync/poison/once.rs rename to std/src/sync/once.rs index faf2913c54730..12cc32f381d18 100644 --- a/std/src/sync/poison/once.rs +++ b/std/src/sync/once.rs @@ -49,7 +49,9 @@ pub struct OnceState { pub(crate) inner: sys::OnceState, } -pub(crate) enum ExclusiveState { +/// Used for the internal implementation of `sys::sync::once` on different platforms and the +/// [`LazyLock`](crate::sync::LazyLock) implementation. +pub(crate) enum OnceExclusiveState { Incomplete, Poisoned, Complete, @@ -310,7 +312,7 @@ impl Once { /// be running, so the state must be either "incomplete", "poisoned" or /// "complete". #[inline] - pub(crate) fn state(&mut self) -> ExclusiveState { + pub(crate) fn state(&mut self) -> OnceExclusiveState { self.inner.state() } @@ -320,7 +322,7 @@ impl Once { /// be running, so the state must be either "incomplete", "poisoned" or /// "complete". #[inline] - pub(crate) fn set_state(&mut self, new_state: ExclusiveState) { + pub(crate) fn set_state(&mut self, new_state: OnceExclusiveState) { self.inner.set_state(new_state); } } diff --git a/std/src/sync/poison.rs b/std/src/sync/poison.rs index 17abdb9819bf9..9f40c01546632 100644 --- a/std/src/sync/poison.rs +++ b/std/src/sync/poison.rs @@ -13,8 +13,8 @@ //! the panics are recognized reliably or on a best-effort basis depend on the //! primitive. See [Overview](#overview) below. //! -//! For the alternative implementations that do not employ poisoning, -//! see [`std::sync::nonpoison`]. +//! The synchronization objects in this module have alternative implementations that do not employ +//! poisoning in the [`std::sync::nonpoison`] module. //! //! [`std::sync::nonpoison`]: crate::sync::nonpoison //! @@ -42,14 +42,6 @@ //! [`Mutex::lock()`] returns a [`LockResult`], providing a way to deal with //! the poisoned state. See [`Mutex`'s documentation](Mutex#poisoning) for more. //! -//! - [`Once`]: A thread-safe way to run a piece of code only once. -//! Mostly useful for implementing one-time global initialization. -//! -//! [`Once`] is reliably poisoned if the piece of code passed to -//! [`Once::call_once()`] or [`Once::call_once_force()`] panics. -//! When in poisoned state, subsequent calls to [`Once::call_once()`] will panic too. -//! [`Once::call_once_force()`] can be used to clear the poisoned state. -//! //! - [`RwLock`]: Provides a mutual exclusion mechanism which allows //! multiple readers at the same time, while allowing only one //! writer at a time. In some cases, this can be more efficient than @@ -59,6 +51,14 @@ //! Note, however, that an `RwLock` may only be poisoned if a panic occurs //! while it is locked exclusively (write mode). If a panic occurs in any reader, //! then the lock will not be poisoned. +//! +//! Note that the [`Once`] type also employs poisoning, but since it has non-poisoning `force` +//! methods available on it, there is no separate `nonpoison` and `poison` version. +//! +//! [`Once`]: crate::sync::Once + +// If we are not unwinding, `PoisonError` is uninhabited. +#![cfg_attr(not(panic = "unwind"), expect(unreachable_code))] #[stable(feature = "rust1", since = "1.0.0")] pub use self::condvar::Condvar; @@ -66,11 +66,6 @@ pub use self::condvar::Condvar; pub use self::mutex::MappedMutexGuard; #[stable(feature = "rust1", since = "1.0.0")] pub use self::mutex::{Mutex, MutexGuard}; -#[stable(feature = "rust1", since = "1.0.0")] -#[expect(deprecated)] -pub use self::once::ONCE_INIT; -#[stable(feature = "rust1", since = "1.0.0")] -pub use self::once::{Once, OnceState}; #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub use self::rwlock::{MappedRwLockReadGuard, MappedRwLockWriteGuard}; #[stable(feature = "rust1", since = "1.0.0")] @@ -85,7 +80,6 @@ use crate::thread; mod condvar; #[stable(feature = "rust1", since = "1.0.0")] mod mutex; -pub(crate) mod once; mod rwlock; pub(crate) struct Flag { diff --git a/std/src/sync/poison/rwlock.rs b/std/src/sync/poison/rwlock.rs index e3a72c73bf4ed..10e45bc8c11a3 100644 --- a/std/src/sync/poison/rwlock.rs +++ b/std/src/sync/poison/rwlock.rs @@ -813,8 +813,6 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { /// `downgrade` takes ownership of the `RwLockWriteGuard` and returns a [`RwLockReadGuard`]. /// /// ``` - /// #![feature(rwlock_downgrade)] - /// /// use std::sync::{RwLock, RwLockWriteGuard}; /// /// let rw = RwLock::new(0); @@ -831,8 +829,6 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { /// thread calling `downgrade` and any reads it performs after downgrading. /// /// ``` - /// #![feature(rwlock_downgrade)] - /// /// use std::sync::{Arc, RwLock, RwLockWriteGuard}; /// /// let rw = Arc::new(RwLock::new(1)); @@ -863,7 +859,7 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { /// # let final_check = rw.read().unwrap(); /// # assert_eq!(*final_check, 3); /// ``` - #[unstable(feature = "rwlock_downgrade", issue = "128203")] + #[stable(feature = "rwlock_downgrade", since = "1.92.0")] pub fn downgrade(s: Self) -> RwLockReadGuard<'rwlock, T> { let lock = s.lock; diff --git a/std/src/sys/alloc/mod.rs b/std/src/sys/alloc/mod.rs index 2045b2fecc6ac..f2f1d1c7feceb 100644 --- a/std/src/sys/alloc/mod.rs +++ b/std/src/sys/alloc/mod.rs @@ -83,6 +83,9 @@ cfg_select! { target_os = "hermit" => { mod hermit; } + target_os = "motor" => { + mod motor; + } all(target_vendor = "fortanix", target_env = "sgx") => { mod sgx; } diff --git a/std/src/sys/alloc/motor.rs b/std/src/sys/alloc/motor.rs new file mode 100644 index 0000000000000..271e3c40c26ae --- /dev/null +++ b/std/src/sys/alloc/motor.rs @@ -0,0 +1,28 @@ +use crate::alloc::{GlobalAlloc, Layout, System}; + +#[stable(feature = "alloc_system_type", since = "1.28.0")] +unsafe impl GlobalAlloc for System { + #[inline] + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + // SAFETY: same requirements as in GlobalAlloc::alloc. + moto_rt::alloc::alloc(layout) + } + + #[inline] + unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { + // SAFETY: same requirements as in GlobalAlloc::alloc_zeroed. + moto_rt::alloc::alloc_zeroed(layout) + } + + #[inline] + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + // SAFETY: same requirements as in GlobalAlloc::dealloc. + unsafe { moto_rt::alloc::dealloc(ptr, layout) } + } + + #[inline] + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { + // SAFETY: same requirements as in GlobalAlloc::realloc. + unsafe { moto_rt::alloc::realloc(ptr, layout, new_size) } + } +} diff --git a/std/src/sys/anonymous_pipe/mod.rs b/std/src/sys/anonymous_pipe/mod.rs index b6f464161ee2b..64b2c014b54fe 100644 --- a/std/src/sys/anonymous_pipe/mod.rs +++ b/std/src/sys/anonymous_pipe/mod.rs @@ -9,6 +9,10 @@ cfg_select! { mod windows; pub use windows::{AnonPipe, pipe}; } + target_os = "motor" => { + mod motor; + pub use motor::{AnonPipe, pipe}; + } _ => { mod unsupported; pub use unsupported::{AnonPipe, pipe}; diff --git a/std/src/sys/anonymous_pipe/motor.rs b/std/src/sys/anonymous_pipe/motor.rs new file mode 100644 index 0000000000000..dfe10f7fafe49 --- /dev/null +++ b/std/src/sys/anonymous_pipe/motor.rs @@ -0,0 +1,11 @@ +use crate::io; +use crate::sys::fd::FileDesc; +use crate::sys::pipe::anon_pipe; +use crate::sys_common::IntoInner; + +pub type AnonPipe = FileDesc; + +#[inline] +pub fn pipe() -> io::Result<(AnonPipe, AnonPipe)> { + anon_pipe().map(|(rx, wx)| (rx.into_inner(), wx.into_inner())) +} diff --git a/std/src/sys/args/mod.rs b/std/src/sys/args/mod.rs index 75c59da721e19..5424d40a15883 100644 --- a/std/src/sys/args/mod.rs +++ b/std/src/sys/args/mod.rs @@ -6,6 +6,7 @@ all(target_family = "unix", not(any(target_os = "espidf", target_os = "vita"))), target_family = "windows", target_os = "hermit", + target_os = "motor", target_os = "uefi", target_os = "wasi", target_os = "xous", @@ -28,6 +29,10 @@ cfg_select! { mod sgx; pub use sgx::*; } + target_os = "motor" => { + mod motor; + pub use motor::*; + } target_os = "uefi" => { mod uefi; pub use uefi::*; diff --git a/std/src/sys/args/motor.rs b/std/src/sys/args/motor.rs new file mode 100644 index 0000000000000..c3dbe87cec411 --- /dev/null +++ b/std/src/sys/args/motor.rs @@ -0,0 +1,13 @@ +pub use super::common::Args; +use crate::ffi::OsString; + +pub fn args() -> Args { + let motor_args: Vec = moto_rt::process::args(); + let mut rust_args = Vec::new(); + + for arg in motor_args { + rust_args.push(OsString::from(arg)); + } + + Args::new(rust_args) +} diff --git a/std/src/sys/env/common.rs b/std/src/sys/env/common.rs index f161ff073f3d5..87e86e2947fad 100644 --- a/std/src/sys/env/common.rs +++ b/std/src/sys/env/common.rs @@ -5,27 +5,10 @@ pub struct Env { iter: vec::IntoIter<(OsString, OsString)>, } -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list() - .entries(self.slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - impl Env { pub(super) fn new(env: Vec<(OsString, OsString)>) -> Self { Env { iter: env.into_iter() } } - - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - EnvStrDebug { slice: self.iter.as_slice() } - } } impl fmt::Debug for Env { diff --git a/std/src/sys/env/mod.rs b/std/src/sys/env/mod.rs index f211a9fc86b3b..89856516b6dce 100644 --- a/std/src/sys/env/mod.rs +++ b/std/src/sys/env/mod.rs @@ -5,6 +5,7 @@ #[cfg(any( target_family = "unix", target_os = "hermit", + target_os = "motor", all(target_vendor = "fortanix", target_env = "sgx"), target_os = "solid_asp3", target_os = "uefi", @@ -26,6 +27,10 @@ cfg_select! { mod hermit; pub use hermit::*; } + target_os = "motor" => { + mod motor; + pub use motor::*; + } all(target_vendor = "fortanix", target_env = "sgx") => { mod sgx; pub use sgx::*; diff --git a/std/src/sys/env/motor.rs b/std/src/sys/env/motor.rs new file mode 100644 index 0000000000000..1f756ccd3ee85 --- /dev/null +++ b/std/src/sys/env/motor.rs @@ -0,0 +1,27 @@ +pub use super::common::Env; +use crate::ffi::{OsStr, OsString}; +use crate::io; +use crate::os::motor::ffi::OsStrExt; + +pub fn env() -> Env { + let motor_env: Vec<(String, String)> = moto_rt::process::env(); + let mut rust_env = vec![]; + + for (k, v) in motor_env { + rust_env.push((OsString::from(k), OsString::from(v))); + } + + Env::new(rust_env) +} + +pub fn getenv(key: &OsStr) -> Option { + moto_rt::process::getenv(key.as_str()).map(|s| OsString::from(s)) +} + +pub unsafe fn setenv(key: &OsStr, val: &OsStr) -> io::Result<()> { + Ok(moto_rt::process::setenv(key.as_str(), val.as_str())) +} + +pub unsafe fn unsetenv(key: &OsStr) -> io::Result<()> { + Ok(moto_rt::process::unsetenv(key.as_str())) +} diff --git a/std/src/sys/env/unsupported.rs b/std/src/sys/env/unsupported.rs index 98905e6482747..a967ace95f02a 100644 --- a/std/src/sys/env/unsupported.rs +++ b/std/src/sys/env/unsupported.rs @@ -3,13 +3,6 @@ use crate::{fmt, io}; pub struct Env(!); -impl Env { - // FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - self.0 - } -} - impl fmt::Debug for Env { fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { self.0 diff --git a/std/src/sys/env/windows.rs b/std/src/sys/env/windows.rs index 3c4d4a84cfd6b..219fcc4fb43f9 100644 --- a/std/src/sys/env/windows.rs +++ b/std/src/sys/env/windows.rs @@ -8,30 +8,6 @@ pub struct Env { iter: EnvIterator, } -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - iter: &'a EnvIterator, -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - let iter: EnvIterator = (*iter).clone(); - let mut list = f.debug_list(); - for (a, b) in iter { - list.entry(&(a.to_str().unwrap(), b.to_str().unwrap())); - } - list.finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { base: _, iter } = self; - EnvStrDebug { iter } - } -} - impl fmt::Debug for Env { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let Self { base: _, iter } = self; diff --git a/std/src/sys/fd/mod.rs b/std/src/sys/fd/mod.rs index 7cb9dd1cba9d3..330499ecc18f6 100644 --- a/std/src/sys/fd/mod.rs +++ b/std/src/sys/fd/mod.rs @@ -11,6 +11,10 @@ cfg_select! { mod hermit; pub use hermit::*; } + target_os = "motor" => { + mod motor; + pub use motor::*; + } all(target_vendor = "fortanix", target_env = "sgx") => { mod sgx; pub use sgx::*; diff --git a/std/src/sys/fd/motor.rs b/std/src/sys/fd/motor.rs new file mode 100644 index 0000000000000..4211fef8007ab --- /dev/null +++ b/std/src/sys/fd/motor.rs @@ -0,0 +1,124 @@ +#![unstable(reason = "not public", issue = "none", feature = "fd")] + +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, Read}; +use crate::os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; +use crate::sys::map_motor_error; +use crate::sys_common::{AsInner, FromInner, IntoInner}; + +#[derive(Debug)] +pub struct FileDesc(OwnedFd); + +impl FileDesc { + pub fn read(&self, buf: &mut [u8]) -> io::Result { + moto_rt::fs::read(self.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn read_buf(&self, cursor: BorrowedCursor<'_>) -> io::Result<()> { + crate::io::default_read_buf(|buf| self.read(buf), cursor) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + io::default_read_vectored(|b| self.read(b), bufs) + } + + pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { + let mut me = self; + (&mut me).read_to_end(buf) + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + moto_rt::fs::write(self.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + crate::io::default_write_vectored(|b| self.write(b), bufs) + } + + pub fn is_write_vectored(&self) -> bool { + false + } + + #[inline] + pub fn is_read_vectored(&self) -> bool { + false + } + + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + moto_rt::net::set_nonblocking(self.as_raw_fd(), nonblocking).map_err(map_motor_error) + } + + #[inline] + pub fn duplicate(&self) -> io::Result { + let fd = moto_rt::fs::duplicate(self.as_raw_fd()).map_err(map_motor_error)?; + // SAFETY: safe because we just got it from the OS runtime. + unsafe { Ok(Self::from_raw_fd(fd)) } + } + + #[inline] + pub fn try_clone(&self) -> io::Result { + self.duplicate() + } +} + +impl<'a> Read for &'a FileDesc { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + (**self).read(buf) + } + + fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> { + (**self).read_buf(cursor) + } + + fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + (**self).read_vectored(bufs) + } + + #[inline] + fn is_read_vectored(&self) -> bool { + (**self).is_read_vectored() + } +} + +impl AsInner for FileDesc { + #[inline] + fn as_inner(&self) -> &OwnedFd { + &self.0 + } +} + +impl IntoInner for FileDesc { + fn into_inner(self) -> OwnedFd { + self.0 + } +} + +impl FromInner for FileDesc { + fn from_inner(owned_fd: OwnedFd) -> Self { + Self(owned_fd) + } +} + +impl AsFd for FileDesc { + fn as_fd(&self) -> BorrowedFd<'_> { + self.0.as_fd() + } +} + +impl AsRawFd for FileDesc { + #[inline] + fn as_raw_fd(&self) -> RawFd { + self.0.as_raw_fd() + } +} + +impl IntoRawFd for FileDesc { + fn into_raw_fd(self) -> RawFd { + self.0.into_raw_fd() + } +} + +impl FromRawFd for FileDesc { + unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { + unsafe { Self(FromRawFd::from_raw_fd(raw_fd)) } + } +} diff --git a/std/src/sys/fs/hermit.rs b/std/src/sys/fs/hermit.rs index 175d919c289dd..21235bcfbd8c5 100644 --- a/std/src/sys/fs/hermit.rs +++ b/std/src/sys/fs/hermit.rs @@ -566,6 +566,14 @@ pub fn set_perm(_p: &Path, _perm: FilePermissions) -> io::Result<()> { Err(Error::from_raw_os_error(22)) } +pub fn set_times(_p: &Path, _times: FileTimes) -> io::Result<()> { + Err(Error::from_raw_os_error(22)) +} + +pub fn set_times_nofollow(_p: &Path, _times: FileTimes) -> io::Result<()> { + Err(Error::from_raw_os_error(22)) +} + pub fn rmdir(path: &Path) -> io::Result<()> { run_path_with_cstr(path, &|path| cvt(unsafe { hermit_abi::rmdir(path.as_ptr()) }).map(|_| ())) } diff --git a/std/src/sys/fs/mod.rs b/std/src/sys/fs/mod.rs index 64f5a6b36d3db..eaea28871241a 100644 --- a/std/src/sys/fs/mod.rs +++ b/std/src/sys/fs/mod.rs @@ -14,7 +14,7 @@ cfg_select! { pub use unix::chroot; pub(crate) use unix::debug_assert_fd_is_open; #[cfg(any(target_os = "linux", target_os = "android"))] - pub(crate) use unix::CachedFileMetadata; + pub(super) use unix::CachedFileMetadata; use crate::sys::common::small_c_string::run_path_with_cstr as with_native_path; } target_os = "windows" => { @@ -27,6 +27,10 @@ cfg_select! { mod hermit; use hermit as imp; } + target_os = "motor" => { + mod motor; + use motor as imp; + } target_os = "solid_asp3" => { mod solid; use solid as imp; @@ -118,7 +122,7 @@ pub fn set_permissions(path: &Path, perm: FilePermissions) -> io::Result<()> { with_native_path(path, &|path| imp::set_perm(path, perm.clone())) } -#[cfg(unix)] +#[cfg(all(unix, not(target_os = "vxworks")))] pub fn set_permissions_nofollow(path: &Path, perm: crate::fs::Permissions) -> io::Result<()> { use crate::fs::OpenOptions; @@ -135,7 +139,7 @@ pub fn set_permissions_nofollow(path: &Path, perm: crate::fs::Permissions) -> io options.open(path)?.set_permissions(perm) } -#[cfg(not(unix))] +#[cfg(any(not(unix), target_os = "vxworks"))] pub fn set_permissions_nofollow(_path: &Path, _perm: crate::fs::Permissions) -> io::Result<()> { crate::unimplemented!( "`set_permissions_nofollow` is currently only implemented on Unix platforms" @@ -161,3 +165,11 @@ pub fn exists(path: &Path) -> io::Result { #[cfg(windows)] with_native_path(path, &imp::exists) } + +pub fn set_times(path: &Path, times: FileTimes) -> io::Result<()> { + with_native_path(path, &|path| imp::set_times(path, times.clone())) +} + +pub fn set_times_nofollow(path: &Path, times: FileTimes) -> io::Result<()> { + with_native_path(path, &|path| imp::set_times_nofollow(path, times.clone())) +} diff --git a/std/src/sys/fs/motor.rs b/std/src/sys/fs/motor.rs new file mode 100644 index 0000000000000..f723a23bd5dff --- /dev/null +++ b/std/src/sys/fs/motor.rs @@ -0,0 +1,486 @@ +use crate::ffi::OsString; +use crate::hash::Hash; +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; +use crate::os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, RawFd}; +use crate::path::{Path, PathBuf}; +use crate::sys::fd::FileDesc; +pub use crate::sys::fs::common::exists; +use crate::sys::time::SystemTime; +use crate::sys::{map_motor_error, unsupported}; +use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub struct FileType { + rt_filetype: u8, +} + +impl FileType { + pub fn is_dir(&self) -> bool { + self.rt_filetype == moto_rt::fs::FILETYPE_DIRECTORY + } + + pub fn is_file(&self) -> bool { + self.rt_filetype == moto_rt::fs::FILETYPE_FILE + } + + pub fn is_symlink(&self) -> bool { + false + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FilePermissions { + rt_perm: u64, +} + +impl FilePermissions { + pub fn readonly(&self) -> bool { + (self.rt_perm & moto_rt::fs::PERM_WRITE == 0) + && (self.rt_perm & moto_rt::fs::PERM_READ != 0) + } + + pub fn set_readonly(&mut self, readonly: bool) { + if readonly { + self.rt_perm = moto_rt::fs::PERM_READ; + } else { + self.rt_perm = moto_rt::fs::PERM_READ | moto_rt::fs::PERM_WRITE; + } + } +} + +#[derive(Copy, Clone, Debug, Default)] +pub struct FileTimes { + modified: u128, + accessed: u128, +} + +impl FileTimes { + pub fn set_accessed(&mut self, t: SystemTime) { + self.accessed = t.as_u128(); + } + + pub fn set_modified(&mut self, t: SystemTime) { + self.modified = t.as_u128(); + } +} + +#[derive(Copy, Clone, PartialEq, Eq)] +pub struct FileAttr { + inner: moto_rt::fs::FileAttr, +} + +impl FileAttr { + pub fn size(&self) -> u64 { + self.inner.size + } + + pub fn perm(&self) -> FilePermissions { + FilePermissions { rt_perm: self.inner.perm } + } + + pub fn file_type(&self) -> FileType { + FileType { rt_filetype: self.inner.file_type } + } + + pub fn modified(&self) -> io::Result { + match self.inner.modified { + 0 => Err(crate::io::Error::from(crate::io::ErrorKind::Other)), + x => Ok(SystemTime::from_u128(x)), + } + } + + pub fn accessed(&self) -> io::Result { + match self.inner.accessed { + 0 => Err(crate::io::Error::from(crate::io::ErrorKind::Other)), + x => Ok(SystemTime::from_u128(x)), + } + } + + pub fn created(&self) -> io::Result { + match self.inner.created { + 0 => Err(crate::io::Error::from(crate::io::ErrorKind::Other)), + x => Ok(SystemTime::from_u128(x)), + } + } +} + +#[derive(Clone, Debug)] +pub struct OpenOptions { + rt_open_options: u32, +} + +impl OpenOptions { + pub fn new() -> OpenOptions { + OpenOptions { rt_open_options: 0 } + } + + pub fn read(&mut self, read: bool) { + if read { + self.rt_open_options |= moto_rt::fs::O_READ; + } else { + self.rt_open_options &= !moto_rt::fs::O_READ; + } + } + + pub fn write(&mut self, write: bool) { + if write { + self.rt_open_options |= moto_rt::fs::O_WRITE; + } else { + self.rt_open_options &= !moto_rt::fs::O_WRITE; + } + } + + pub fn append(&mut self, append: bool) { + if append { + self.rt_open_options |= moto_rt::fs::O_APPEND; + } else { + self.rt_open_options &= !moto_rt::fs::O_APPEND; + } + } + + pub fn truncate(&mut self, truncate: bool) { + if truncate { + self.rt_open_options |= moto_rt::fs::O_TRUNCATE; + } else { + self.rt_open_options &= !moto_rt::fs::O_TRUNCATE; + } + } + + pub fn create(&mut self, create: bool) { + if create { + self.rt_open_options |= moto_rt::fs::O_CREATE; + } else { + self.rt_open_options &= !moto_rt::fs::O_CREATE; + } + } + + pub fn create_new(&mut self, create_new: bool) { + if create_new { + self.rt_open_options |= moto_rt::fs::O_CREATE_NEW; + } else { + self.rt_open_options &= !moto_rt::fs::O_CREATE_NEW; + } + } +} + +#[derive(Debug)] +pub struct File(FileDesc); + +impl File { + pub fn open(path: &Path, opts: &OpenOptions) -> io::Result { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::open(path, opts.rt_open_options) + .map(|fd| unsafe { Self::from_raw_fd(fd) }) + .map_err(map_motor_error) + } + + pub fn file_attr(&self) -> io::Result { + moto_rt::fs::get_file_attr(self.as_raw_fd()) + .map(|inner| -> FileAttr { FileAttr { inner } }) + .map_err(map_motor_error) + } + + pub fn fsync(&self) -> io::Result<()> { + moto_rt::fs::fsync(self.as_raw_fd()).map_err(map_motor_error) + } + + pub fn datasync(&self) -> io::Result<()> { + moto_rt::fs::datasync(self.as_raw_fd()).map_err(map_motor_error) + } + + pub fn truncate(&self, size: u64) -> io::Result<()> { + moto_rt::fs::truncate(self.as_raw_fd(), size).map_err(map_motor_error) + } + + pub fn read(&self, buf: &mut [u8]) -> io::Result { + moto_rt::fs::read(self.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + crate::io::default_read_vectored(|b| self.read(b), bufs) + } + + pub fn is_read_vectored(&self) -> bool { + false + } + + pub fn read_buf(&self, cursor: BorrowedCursor<'_>) -> io::Result<()> { + crate::io::default_read_buf(|buf| self.read(buf), cursor) + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + moto_rt::fs::write(self.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + crate::io::default_write_vectored(|b| self.write(b), bufs) + } + + pub fn is_write_vectored(&self) -> bool { + false + } + + pub fn flush(&self) -> io::Result<()> { + moto_rt::fs::flush(self.as_raw_fd()).map_err(map_motor_error) + } + + pub fn seek(&self, pos: SeekFrom) -> io::Result { + match pos { + SeekFrom::Start(offset) => { + moto_rt::fs::seek(self.as_raw_fd(), offset as i64, moto_rt::fs::SEEK_SET) + .map_err(map_motor_error) + } + SeekFrom::End(offset) => { + moto_rt::fs::seek(self.as_raw_fd(), offset, moto_rt::fs::SEEK_END) + .map_err(map_motor_error) + } + SeekFrom::Current(offset) => { + moto_rt::fs::seek(self.as_raw_fd(), offset, moto_rt::fs::SEEK_CUR) + .map_err(map_motor_error) + } + } + } + + pub fn tell(&self) -> io::Result { + self.seek(SeekFrom::Current(0)) + } + + pub fn duplicate(&self) -> io::Result { + moto_rt::fs::duplicate(self.as_raw_fd()) + .map(|fd| unsafe { Self::from_raw_fd(fd) }) + .map_err(map_motor_error) + } + + pub fn set_permissions(&self, perm: FilePermissions) -> io::Result<()> { + moto_rt::fs::set_file_perm(self.as_raw_fd(), perm.rt_perm).map_err(map_motor_error) + } + + pub fn set_times(&self, _times: FileTimes) -> io::Result<()> { + unsupported() // Let's not do that. + } + + pub fn lock(&self) -> io::Result<()> { + unsupported() + } + + pub fn lock_shared(&self) -> io::Result<()> { + unsupported() + } + + pub fn try_lock(&self) -> Result<(), crate::fs::TryLockError> { + Err(crate::fs::TryLockError::Error(io::Error::from(io::ErrorKind::Unsupported))) + } + + pub fn try_lock_shared(&self) -> Result<(), crate::fs::TryLockError> { + Err(crate::fs::TryLockError::Error(io::Error::from(io::ErrorKind::Unsupported))) + } + + pub fn unlock(&self) -> io::Result<()> { + unsupported() + } + + pub fn size(&self) -> Option> { + None + } +} + +#[derive(Debug)] +pub struct DirBuilder {} + +impl DirBuilder { + pub fn new() -> DirBuilder { + DirBuilder {} + } + + pub fn mkdir(&self, path: &Path) -> io::Result<()> { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::mkdir(path).map_err(map_motor_error) + } +} + +pub fn unlink(path: &Path) -> io::Result<()> { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::unlink(path).map_err(map_motor_error) +} + +pub fn rename(old: &Path, new: &Path) -> io::Result<()> { + let old = old.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + let new = new.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::rename(old, new).map_err(map_motor_error) +} + +pub fn rmdir(path: &Path) -> io::Result<()> { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::rmdir(path).map_err(map_motor_error) +} + +pub fn remove_dir_all(path: &Path) -> io::Result<()> { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::rmdir_all(path).map_err(map_motor_error) +} + +pub fn set_perm(path: &Path, perm: FilePermissions) -> io::Result<()> { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::set_perm(path, perm.rt_perm).map_err(map_motor_error) +} + +pub fn set_times(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + +pub fn set_times_nofollow(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + +pub fn readlink(_p: &Path) -> io::Result { + unsupported() +} + +pub fn symlink(_original: &Path, _link: &Path) -> io::Result<()> { + unsupported() +} + +pub fn link(_src: &Path, _dst: &Path) -> io::Result<()> { + unsupported() +} + +pub fn stat(path: &Path) -> io::Result { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + let inner = moto_rt::fs::stat(path).map_err(map_motor_error)?; + Ok(FileAttr { inner }) +} + +pub fn lstat(path: &Path) -> io::Result { + stat(path) +} + +pub fn canonicalize(path: &Path) -> io::Result { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + let path = moto_rt::fs::canonicalize(path).map_err(map_motor_error)?; + Ok(path.into()) +} + +pub fn copy(from: &Path, to: &Path) -> io::Result { + let from = from.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + let to = to.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + moto_rt::fs::copy(from, to).map_err(map_motor_error) +} + +#[derive(Debug)] +pub struct ReadDir { + rt_fd: moto_rt::RtFd, + path: String, +} + +impl Drop for ReadDir { + fn drop(&mut self) { + moto_rt::fs::closedir(self.rt_fd).unwrap(); + } +} + +pub fn readdir(path: &Path) -> io::Result { + let path = path.to_str().ok_or(io::Error::from(io::ErrorKind::InvalidFilename))?; + Ok(ReadDir { + rt_fd: moto_rt::fs::opendir(path).map_err(map_motor_error)?, + path: path.to_owned(), + }) +} + +impl Iterator for ReadDir { + type Item = io::Result; + + fn next(&mut self) -> Option> { + match moto_rt::fs::readdir(self.rt_fd).map_err(map_motor_error) { + Ok(maybe_item) => match maybe_item { + Some(inner) => Some(Ok(DirEntry { inner, parent_path: self.path.clone() })), + None => None, + }, + Err(err) => Some(Err(err)), + } + } +} + +pub struct DirEntry { + parent_path: String, + inner: moto_rt::fs::DirEntry, +} + +impl DirEntry { + fn filename(&self) -> &str { + core::str::from_utf8(unsafe { + core::slice::from_raw_parts(self.inner.fname.as_ptr(), self.inner.fname_size as usize) + }) + .unwrap() + } + + pub fn path(&self) -> PathBuf { + let mut path = self.parent_path.clone(); + path.push_str("/"); + path.push_str(self.filename()); + path.into() + } + + pub fn file_name(&self) -> OsString { + self.filename().to_owned().into() + } + + pub fn metadata(&self) -> io::Result { + Ok(FileAttr { inner: self.inner.attr }) + } + + pub fn file_type(&self) -> io::Result { + Ok(FileType { rt_filetype: self.inner.attr.file_type }) + } +} + +impl AsInner for File { + #[inline] + fn as_inner(&self) -> &FileDesc { + &self.0 + } +} + +impl AsInnerMut for File { + #[inline] + fn as_inner_mut(&mut self) -> &mut FileDesc { + &mut self.0 + } +} + +impl IntoInner for File { + fn into_inner(self) -> FileDesc { + self.0 + } +} + +impl FromInner for File { + fn from_inner(file_desc: FileDesc) -> Self { + Self(file_desc) + } +} + +impl AsFd for File { + #[inline] + fn as_fd(&self) -> BorrowedFd<'_> { + self.0.as_fd() + } +} + +impl AsRawFd for File { + #[inline] + fn as_raw_fd(&self) -> RawFd { + self.0.as_raw_fd() + } +} + +impl IntoRawFd for File { + fn into_raw_fd(self) -> RawFd { + self.0.into_raw_fd() + } +} + +impl FromRawFd for File { + unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { + unsafe { Self(FromRawFd::from_raw_fd(raw_fd)) } + } +} diff --git a/std/src/sys/fs/solid.rs b/std/src/sys/fs/solid.rs index 808a95829114e..f6d5d3b784d3b 100644 --- a/std/src/sys/fs/solid.rs +++ b/std/src/sys/fs/solid.rs @@ -538,6 +538,14 @@ pub fn set_perm(p: &Path, perm: FilePermissions) -> io::Result<()> { Ok(()) } +pub fn set_times(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + +pub fn set_times_nofollow(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + pub fn rmdir(p: &Path) -> io::Result<()> { if stat(p)?.file_type().is_dir() { error::SolidError::err_if_negative(unsafe { abi::SOLID_FS_Unlink(cstr(p)?.as_ptr()) }) diff --git a/std/src/sys/fs/uefi.rs b/std/src/sys/fs/uefi.rs index 5763d7862f5ae..18c1501a655fe 100644 --- a/std/src/sys/fs/uefi.rs +++ b/std/src/sys/fs/uefi.rs @@ -7,7 +7,7 @@ use crate::hash::Hash; use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; use crate::path::{Path, PathBuf}; use crate::sys::time::SystemTime; -use crate::sys::unsupported; +use crate::sys::{helpers, unsupported}; #[expect(dead_code)] const FILE_PERMISSIONS_MASK: u64 = r_efi::protocols::file::READ_ONLY; @@ -18,6 +18,9 @@ pub struct File(!); pub struct FileAttr { attr: u64, size: u64, + accessed: SystemTime, + modified: SystemTime, + created: SystemTime, } pub struct ReadDir(!); @@ -33,7 +36,10 @@ pub struct OpenOptions { } #[derive(Copy, Clone, Debug, Default)] -pub struct FileTimes {} +pub struct FileTimes { + accessed: Option, + modified: Option, +} #[derive(Clone, PartialEq, Eq, Debug)] // Bool indicates if file is readonly @@ -60,15 +66,27 @@ impl FileAttr { } pub fn modified(&self) -> io::Result { - unsupported() + Ok(self.modified) } pub fn accessed(&self) -> io::Result { - unsupported() + Ok(self.accessed) } pub fn created(&self) -> io::Result { - unsupported() + Ok(self.created) + } + + fn from_uefi(info: helpers::UefiBox) -> Self { + unsafe { + Self { + attr: (*info.as_ptr()).attribute, + size: (*info.as_ptr()).size, + modified: uefi_fs::uefi_to_systemtime((*info.as_ptr()).modification_time), + accessed: uefi_fs::uefi_to_systemtime((*info.as_ptr()).last_access_time), + created: uefi_fs::uefi_to_systemtime((*info.as_ptr()).create_time), + } + } } } @@ -92,8 +110,13 @@ impl FilePermissions { } impl FileTimes { - pub fn set_accessed(&mut self, _t: SystemTime) {} - pub fn set_modified(&mut self, _t: SystemTime) {} + pub fn set_accessed(&mut self, t: SystemTime) { + self.accessed = Some(t); + } + + pub fn set_modified(&mut self, t: SystemTime) { + self.modified = Some(t); + } } impl FileType { @@ -333,6 +356,14 @@ pub fn set_perm(_p: &Path, _perm: FilePermissions) -> io::Result<()> { unsupported() } +pub fn set_times(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + +pub fn set_times_nofollow(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + pub fn rmdir(_p: &Path) -> io::Result<()> { unsupported() } @@ -362,8 +393,10 @@ pub fn link(_src: &Path, _dst: &Path) -> io::Result<()> { unsupported() } -pub fn stat(_p: &Path) -> io::Result { - unsupported() +pub fn stat(p: &Path) -> io::Result { + let f = uefi_fs::File::from_path(p, r_efi::protocols::file::MODE_READ, 0)?; + let inf = f.file_info()?; + Ok(FileAttr::from_uefi(inf)) } pub fn lstat(p: &Path) -> io::Result { @@ -385,7 +418,8 @@ mod uefi_fs { use crate::io; use crate::path::Path; use crate::ptr::NonNull; - use crate::sys::helpers; + use crate::sys::helpers::{self, UefiBox}; + use crate::sys::time::{self, SystemTime}; pub(crate) struct File(NonNull); @@ -472,6 +506,37 @@ mod uefi_fs { let p = NonNull::new(file_opened).unwrap(); Ok(File(p)) } + + pub(crate) fn file_info(&self) -> io::Result> { + let file_ptr = self.0.as_ptr(); + let mut info_id = file::INFO_ID; + let mut buf_size = 0; + + let r = unsafe { + ((*file_ptr).get_info)( + file_ptr, + &mut info_id, + &mut buf_size, + crate::ptr::null_mut(), + ) + }; + assert!(r.is_error()); + if r != r_efi::efi::Status::BUFFER_TOO_SMALL { + return Err(io::Error::from_raw_os_error(r.as_usize())); + } + + let mut info: UefiBox = UefiBox::new(buf_size)?; + let r = unsafe { + ((*file_ptr).get_info)( + file_ptr, + &mut info_id, + &mut buf_size, + info.as_mut_ptr().cast(), + ) + }; + + if r.is_error() { Err(io::Error::from_raw_os_error(r.as_usize())) } else { Ok(info) } + } } impl Drop for File { @@ -533,4 +598,22 @@ mod uefi_fs { Ok(()) } + + /// EDK2 FAT driver uses EFI_UNSPECIFIED_TIMEZONE to represent localtime. So for proper + /// conversion to SystemTime, we use the current time to get the timezone in such cases. + pub(crate) fn uefi_to_systemtime(mut time: r_efi::efi::Time) -> SystemTime { + time.timezone = if time.timezone == r_efi::efi::UNSPECIFIED_TIMEZONE { + time::system_time_internal::now().unwrap().timezone + } else { + time.timezone + }; + SystemTime::from_uefi(time) + } + + /// Convert to UEFI Time with the current timezone. + #[expect(dead_code)] + fn systemtime_to_uefi(time: SystemTime) -> r_efi::efi::Time { + let now = time::system_time_internal::now().unwrap(); + time.to_uefi_loose(now.timezone, now.daylight) + } } diff --git a/std/src/sys/fs/unix.rs b/std/src/sys/fs/unix.rs index 33a1e7ff5e40e..47d9ee226653e 100644 --- a/std/src/sys/fs/unix.rs +++ b/std/src/sys/fs/unix.rs @@ -14,9 +14,10 @@ use libc::c_char; target_os = "fuchsia", target_os = "hurd", target_os = "illumos", + target_vendor = "apple", ))] use libc::dirfd; -#[cfg(any(target_os = "fuchsia", target_os = "illumos"))] +#[cfg(any(target_os = "fuchsia", target_os = "illumos", target_vendor = "apple"))] use libc::fstatat as fstatat64; #[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "hurd"))] use libc::fstatat64; @@ -907,6 +908,7 @@ impl DirEntry { target_os = "fuchsia", target_os = "hurd", target_os = "illumos", + target_vendor = "apple", ), not(miri) // no dirfd on Miri ))] @@ -937,6 +939,7 @@ impl DirEntry { target_os = "fuchsia", target_os = "hurd", target_os = "illumos", + target_vendor = "apple", )), miri ))] @@ -1292,6 +1295,8 @@ impl File { target_os = "netbsd", target_os = "openbsd", target_os = "cygwin", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", ))] pub fn lock(&self) -> io::Result<()> { @@ -1316,6 +1321,8 @@ impl File { target_os = "openbsd", target_os = "cygwin", target_os = "solaris", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", )))] pub fn lock(&self) -> io::Result<()> { @@ -1329,6 +1336,8 @@ impl File { target_os = "netbsd", target_os = "openbsd", target_os = "cygwin", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", ))] pub fn lock_shared(&self) -> io::Result<()> { @@ -1353,6 +1362,8 @@ impl File { target_os = "openbsd", target_os = "cygwin", target_os = "solaris", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", )))] pub fn lock_shared(&self) -> io::Result<()> { @@ -1366,6 +1377,8 @@ impl File { target_os = "netbsd", target_os = "openbsd", target_os = "cygwin", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", ))] pub fn try_lock(&self) -> Result<(), TryLockError> { @@ -1406,6 +1419,8 @@ impl File { target_os = "openbsd", target_os = "cygwin", target_os = "solaris", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", )))] pub fn try_lock(&self) -> Result<(), TryLockError> { @@ -1422,6 +1437,8 @@ impl File { target_os = "netbsd", target_os = "openbsd", target_os = "cygwin", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", ))] pub fn try_lock_shared(&self) -> Result<(), TryLockError> { @@ -1462,6 +1479,8 @@ impl File { target_os = "openbsd", target_os = "cygwin", target_os = "solaris", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", )))] pub fn try_lock_shared(&self) -> Result<(), TryLockError> { @@ -1478,6 +1497,8 @@ impl File { target_os = "netbsd", target_os = "openbsd", target_os = "cygwin", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", ))] pub fn unlock(&self) -> io::Result<()> { @@ -1502,6 +1523,8 @@ impl File { target_os = "openbsd", target_os = "cygwin", target_os = "solaris", + target_os = "illumos", + target_os = "aix", target_vendor = "apple", )))] pub fn unlock(&self) -> io::Result<()> { @@ -1604,24 +1627,6 @@ impl File { } pub fn set_times(&self, times: FileTimes) -> io::Result<()> { - #[cfg(not(any( - target_os = "redox", - target_os = "espidf", - target_os = "horizon", - target_os = "nuttx", - )))] - let to_timespec = |time: Option| match time { - Some(time) if let Some(ts) = time.t.to_timespec() => Ok(ts), - Some(time) if time > crate::sys::time::UNIX_EPOCH => Err(io::const_error!( - io::ErrorKind::InvalidInput, - "timestamp is too large to set as a file time", - )), - Some(_) => Err(io::const_error!( - io::ErrorKind::InvalidInput, - "timestamp is too small to set as a file time", - )), - None => Ok(libc::timespec { tv_sec: 0, tv_nsec: libc::UTIME_OMIT as _ }), - }; cfg_select! { any(target_os = "redox", target_os = "espidf", target_os = "horizon", target_os = "nuttx") => { // Redox doesn't appear to support `UTIME_OMIT`. @@ -1634,36 +1639,18 @@ impl File { )) } target_vendor = "apple" => { - let mut buf = [mem::MaybeUninit::::uninit(); 3]; - let mut num_times = 0; - let mut attrlist: libc::attrlist = unsafe { mem::zeroed() }; - attrlist.bitmapcount = libc::ATTR_BIT_MAP_COUNT; - if times.created.is_some() { - buf[num_times].write(to_timespec(times.created)?); - num_times += 1; - attrlist.commonattr |= libc::ATTR_CMN_CRTIME; - } - if times.modified.is_some() { - buf[num_times].write(to_timespec(times.modified)?); - num_times += 1; - attrlist.commonattr |= libc::ATTR_CMN_MODTIME; - } - if times.accessed.is_some() { - buf[num_times].write(to_timespec(times.accessed)?); - num_times += 1; - attrlist.commonattr |= libc::ATTR_CMN_ACCTIME; - } + let ta = TimesAttrlist::from_times(×)?; cvt(unsafe { libc::fsetattrlist( self.as_raw_fd(), - (&raw const attrlist).cast::().cast_mut(), - buf.as_ptr().cast::().cast_mut(), - num_times * size_of::(), + ta.attrlist(), + ta.times_buf(), + ta.times_buf_size(), 0 ) })?; Ok(()) } target_os = "android" => { - let times = [to_timespec(times.accessed)?, to_timespec(times.modified)?]; + let times = [file_time_to_timespec(times.accessed)?, file_time_to_timespec(times.modified)?]; // futimens requires Android API level 19 cvt(unsafe { weak!( @@ -1697,7 +1684,7 @@ impl File { return Ok(()); } } - let times = [to_timespec(times.accessed)?, to_timespec(times.modified)?]; + let times = [file_time_to_timespec(times.accessed)?, file_time_to_timespec(times.modified)?]; cvt(unsafe { libc::futimens(self.as_raw_fd(), times.as_ptr()) })?; Ok(()) } @@ -1705,6 +1692,74 @@ impl File { } } +#[cfg(not(any( + target_os = "redox", + target_os = "espidf", + target_os = "horizon", + target_os = "nuttx", +)))] +fn file_time_to_timespec(time: Option) -> io::Result { + match time { + Some(time) if let Some(ts) = time.t.to_timespec() => Ok(ts), + Some(time) if time > crate::sys::time::UNIX_EPOCH => Err(io::const_error!( + io::ErrorKind::InvalidInput, + "timestamp is too large to set as a file time", + )), + Some(_) => Err(io::const_error!( + io::ErrorKind::InvalidInput, + "timestamp is too small to set as a file time", + )), + None => Ok(libc::timespec { tv_sec: 0, tv_nsec: libc::UTIME_OMIT as _ }), + } +} + +#[cfg(target_vendor = "apple")] +struct TimesAttrlist { + buf: [mem::MaybeUninit; 3], + attrlist: libc::attrlist, + num_times: usize, +} + +#[cfg(target_vendor = "apple")] +impl TimesAttrlist { + fn from_times(times: &FileTimes) -> io::Result { + let mut this = Self { + buf: [mem::MaybeUninit::::uninit(); 3], + attrlist: unsafe { mem::zeroed() }, + num_times: 0, + }; + this.attrlist.bitmapcount = libc::ATTR_BIT_MAP_COUNT; + if times.created.is_some() { + this.buf[this.num_times].write(file_time_to_timespec(times.created)?); + this.num_times += 1; + this.attrlist.commonattr |= libc::ATTR_CMN_CRTIME; + } + if times.modified.is_some() { + this.buf[this.num_times].write(file_time_to_timespec(times.modified)?); + this.num_times += 1; + this.attrlist.commonattr |= libc::ATTR_CMN_MODTIME; + } + if times.accessed.is_some() { + this.buf[this.num_times].write(file_time_to_timespec(times.accessed)?); + this.num_times += 1; + this.attrlist.commonattr |= libc::ATTR_CMN_ACCTIME; + } + Ok(this) + } + + fn attrlist(&self) -> *mut libc::c_void { + (&raw const self.attrlist).cast::().cast_mut() + } + + fn times_buf(&self) -> *mut libc::c_void { + self.buf.as_ptr().cast::().cast_mut() + } + + fn times_buf_size(&self) -> usize { + self.num_times * size_of::() + } +} + impl DirBuilder { pub fn new() -> DirBuilder { DirBuilder { mode: 0o777 } @@ -2081,6 +2136,87 @@ fn open_from(from: &Path) -> io::Result<(crate::fs::File, crate::fs::Metadata)> Ok((reader, metadata)) } +fn set_times_impl(p: &CStr, times: FileTimes, follow_symlinks: bool) -> io::Result<()> { + cfg_select! { + any(target_os = "redox", target_os = "espidf", target_os = "horizon", target_os = "nuttx") => { + let _ = (p, times, follow_symlinks); + Err(io::const_error!( + io::ErrorKind::Unsupported, + "setting file times not supported", + )) + } + target_vendor = "apple" => { + // Apple platforms use setattrlist which supports setting times on symlinks + let ta = TimesAttrlist::from_times(×)?; + let options = if follow_symlinks { + 0 + } else { + libc::FSOPT_NOFOLLOW + }; + + cvt(unsafe { libc::setattrlist( + p.as_ptr(), + ta.attrlist(), + ta.times_buf(), + ta.times_buf_size(), + options as u32 + ) })?; + Ok(()) + } + target_os = "android" => { + let times = [file_time_to_timespec(times.accessed)?, file_time_to_timespec(times.modified)?]; + let flags = if follow_symlinks { 0 } else { libc::AT_SYMLINK_NOFOLLOW }; + // utimensat requires Android API level 19 + cvt(unsafe { + weak!( + fn utimensat(dirfd: c_int, path: *const libc::c_char, times: *const libc::timespec, flags: c_int) -> c_int; + ); + match utimensat.get() { + Some(utimensat) => utimensat(libc::AT_FDCWD, p.as_ptr(), times.as_ptr(), flags), + None => return Err(io::const_error!( + io::ErrorKind::Unsupported, + "setting file times requires Android API level >= 19", + )), + } + })?; + Ok(()) + } + _ => { + let flags = if follow_symlinks { 0 } else { libc::AT_SYMLINK_NOFOLLOW }; + #[cfg(all(target_os = "linux", target_env = "gnu", target_pointer_width = "32", not(target_arch = "riscv32")))] + { + use crate::sys::{time::__timespec64, weak::weak}; + + // Added in glibc 2.34 + weak!( + fn __utimensat64(dirfd: c_int, path: *const c_char, times: *const __timespec64, flags: c_int) -> c_int; + ); + + if let Some(utimensat64) = __utimensat64.get() { + let to_timespec = |time: Option| time.map(|time| time.t.to_timespec64()) + .unwrap_or(__timespec64::new(0, libc::UTIME_OMIT as _)); + let times = [to_timespec(times.accessed), to_timespec(times.modified)]; + cvt(unsafe { utimensat64(libc::AT_FDCWD, p.as_ptr(), times.as_ptr(), flags) })?; + return Ok(()); + } + } + let times = [file_time_to_timespec(times.accessed)?, file_time_to_timespec(times.modified)?]; + cvt(unsafe { libc::utimensat(libc::AT_FDCWD, p.as_ptr(), times.as_ptr(), flags) })?; + Ok(()) + } + } +} + +#[inline(always)] +pub fn set_times(p: &CStr, times: FileTimes) -> io::Result<()> { + set_times_impl(p, times, true) +} + +#[inline(always)] +pub fn set_times_nofollow(p: &CStr, times: FileTimes) -> io::Result<()> { + set_times_impl(p, times, false) +} + #[cfg(target_os = "espidf")] fn open_to_and_set_permissions( to: &Path, @@ -2166,7 +2302,7 @@ mod cfm { } } #[cfg(any(target_os = "linux", target_os = "android"))] -pub(crate) use cfm::CachedFileMetadata; +pub(in crate::sys) use cfm::CachedFileMetadata; #[cfg(not(target_vendor = "apple"))] pub fn copy(from: &Path, to: &Path) -> io::Result { diff --git a/std/src/sys/fs/unsupported.rs b/std/src/sys/fs/unsupported.rs index efaddb51b3751..f222151d18e25 100644 --- a/std/src/sys/fs/unsupported.rs +++ b/std/src/sys/fs/unsupported.rs @@ -312,6 +312,14 @@ pub fn set_perm(_p: &Path, perm: FilePermissions) -> io::Result<()> { match perm.0 {} } +pub fn set_times(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + +pub fn set_times_nofollow(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + pub fn rmdir(_p: &Path) -> io::Result<()> { unsupported() } diff --git a/std/src/sys/fs/vexos.rs b/std/src/sys/fs/vexos.rs index f642e7cb074ec..381c87c62c688 100644 --- a/std/src/sys/fs/vexos.rs +++ b/std/src/sys/fs/vexos.rs @@ -106,7 +106,7 @@ impl FilePermissions { } pub fn set_readonly(&mut self, _readonly: bool) { - panic!("Perimissions do not exist") + panic!("Permissions do not exist") } } @@ -492,6 +492,14 @@ pub fn set_perm(_p: &Path, _perm: FilePermissions) -> io::Result<()> { unsupported() } +pub fn set_times(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + +pub fn set_times_nofollow(_p: &Path, _times: FileTimes) -> io::Result<()> { + unsupported() +} + pub fn exists(path: &Path) -> io::Result { run_path_with_cstr(path, &|path| Ok(unsafe { vex_sdk::vexFileStatus(path.as_ptr()) } != 0)) } diff --git a/std/src/sys/fs/wasi.rs b/std/src/sys/fs/wasi.rs index 0b65b9cb389df..92eb35317415f 100644 --- a/std/src/sys/fs/wasi.rs +++ b/std/src/sys/fs/wasi.rs @@ -536,17 +536,9 @@ impl File { } pub fn set_times(&self, times: FileTimes) -> io::Result<()> { - let to_timestamp = |time: Option| match time { - Some(time) if let Some(ts) = time.to_wasi_timestamp() => Ok(ts), - Some(_) => Err(io::const_error!( - io::ErrorKind::InvalidInput, - "timestamp is too large to set as a file time", - )), - None => Ok(0), - }; self.fd.filestat_set_times( - to_timestamp(times.accessed)?, - to_timestamp(times.modified)?, + to_wasi_timestamp_or_now(times.accessed)?, + to_wasi_timestamp_or_now(times.modified)?, times.accessed.map_or(0, |_| wasi::FSTFLAGS_ATIM) | times.modified.map_or(0, |_| wasi::FSTFLAGS_MTIM), ) @@ -643,6 +635,45 @@ pub fn set_perm(_p: &Path, _perm: FilePermissions) -> io::Result<()> { unsupported() } +#[inline(always)] +pub fn set_times(p: &Path, times: FileTimes) -> io::Result<()> { + let (dir, file) = open_parent(p)?; + set_times_impl(&dir, &file, times, wasi::LOOKUPFLAGS_SYMLINK_FOLLOW) +} + +#[inline(always)] +pub fn set_times_nofollow(p: &Path, times: FileTimes) -> io::Result<()> { + let (dir, file) = open_parent(p)?; + set_times_impl(&dir, &file, times, 0) +} + +fn to_wasi_timestamp_or_now(time: Option) -> io::Result { + match time { + Some(time) if let Some(ts) = time.to_wasi_timestamp() => Ok(ts), + Some(_) => Err(io::const_error!( + io::ErrorKind::InvalidInput, + "timestamp is too large to set as a file time", + )), + None => Ok(0), + } +} + +fn set_times_impl( + fd: &WasiFd, + path: &Path, + times: FileTimes, + flags: wasi::Lookupflags, +) -> io::Result<()> { + fd.path_filestat_set_times( + flags, + osstr2str(path.as_ref())?, + to_wasi_timestamp_or_now(times.accessed)?, + to_wasi_timestamp_or_now(times.modified)?, + times.accessed.map_or(0, |_| wasi::FSTFLAGS_ATIM) + | times.modified.map_or(0, |_| wasi::FSTFLAGS_MTIM), + ) +} + pub fn rmdir(p: &Path) -> io::Result<()> { let (dir, file) = open_parent(p)?; dir.remove_directory(osstr2str(file.as_ref())?) diff --git a/std/src/sys/fs/windows.rs b/std/src/sys/fs/windows.rs index ccfe410627f70..f2d325da35c7d 100644 --- a/std/src/sys/fs/windows.rs +++ b/std/src/sys/fs/windows.rs @@ -1514,6 +1514,23 @@ pub fn set_perm(p: &WCStr, perm: FilePermissions) -> io::Result<()> { } } +pub fn set_times(p: &WCStr, times: FileTimes) -> io::Result<()> { + let mut opts = OpenOptions::new(); + opts.write(true); + opts.custom_flags(c::FILE_FLAG_BACKUP_SEMANTICS); + let file = File::open_native(p, &opts)?; + file.set_times(times) +} + +pub fn set_times_nofollow(p: &WCStr, times: FileTimes) -> io::Result<()> { + let mut opts = OpenOptions::new(); + opts.write(true); + // `FILE_FLAG_OPEN_REPARSE_POINT` for no_follow behavior + opts.custom_flags(c::FILE_FLAG_BACKUP_SEMANTICS | c::FILE_FLAG_OPEN_REPARSE_POINT); + let file = File::open_native(p, &opts)?; + file.set_times(times) +} + fn get_path(f: &File) -> io::Result { fill_utf16_buf( |buf, sz| unsafe { diff --git a/std/src/sys/io/is_terminal/motor.rs b/std/src/sys/io/is_terminal/motor.rs new file mode 100644 index 0000000000000..0b70299adaaa2 --- /dev/null +++ b/std/src/sys/io/is_terminal/motor.rs @@ -0,0 +1,6 @@ +use crate::os::fd::{AsFd, AsRawFd}; + +pub fn is_terminal(fd: &impl AsFd) -> bool { + let fd = fd.as_fd(); + moto_rt::fs::is_terminal(fd.as_raw_fd()) +} diff --git a/std/src/sys/pal/unix/kernel_copy.rs b/std/src/sys/io/kernel_copy/linux.rs similarity index 97% rename from std/src/sys/pal/unix/kernel_copy.rs rename to std/src/sys/io/kernel_copy/linux.rs index b984afa149d06..1c00d317f2a52 100644 --- a/std/src/sys/pal/unix/kernel_copy.rs +++ b/std/src/sys/io/kernel_copy/linux.rs @@ -48,9 +48,9 @@ use libc::sendfile as sendfile64; use libc::sendfile64; use libc::{EBADF, EINVAL, ENOSYS, EOPNOTSUPP, EOVERFLOW, EPERM, EXDEV}; +use super::CopyState; use crate::cmp::min; use crate::fs::{File, Metadata}; -use crate::io::copy::generic_copy; use crate::io::{ BufRead, BufReader, BufWriter, Error, PipeReader, PipeWriter, Read, Result, StderrLock, StdinLock, StdoutLock, Take, Write, @@ -70,10 +70,10 @@ use crate::sys::weak::syscall; #[cfg(test)] mod tests; -pub(crate) fn copy_spec( +pub fn kernel_copy( read: &mut R, write: &mut W, -) -> Result { +) -> Result { let copier = Copier { read, write }; SpecCopy::copy(copier) } @@ -176,17 +176,17 @@ struct Copier<'a, 'b, R: Read + ?Sized, W: Write + ?Sized> { } trait SpecCopy { - fn copy(self) -> Result; + fn copy(self) -> Result; } impl SpecCopy for Copier<'_, '_, R, W> { - default fn copy(self) -> Result { - generic_copy(self.read, self.write) + default fn copy(self) -> Result { + Ok(CopyState::Fallback(0)) } } impl SpecCopy for Copier<'_, '_, R, W> { - fn copy(self) -> Result { + fn copy(self) -> Result { let (reader, writer) = (self.read, self.write); let r_cfg = reader.properties(); let w_cfg = writer.properties(); @@ -214,7 +214,9 @@ impl SpecCopy for Copier<'_, '_, R, W> { result.update_take(reader); match result { - CopyResult::Ended(bytes_copied) => return Ok(bytes_copied + written), + CopyResult::Ended(bytes_copied) => { + return Ok(CopyState::Ended(bytes_copied + written)); + } CopyResult::Error(e, _) => return Err(e), CopyResult::Fallback(bytes) => written += bytes, } @@ -231,7 +233,9 @@ impl SpecCopy for Copier<'_, '_, R, W> { result.update_take(reader); match result { - CopyResult::Ended(bytes_copied) => return Ok(bytes_copied + written), + CopyResult::Ended(bytes_copied) => { + return Ok(CopyState::Ended(bytes_copied + written)); + } CopyResult::Error(e, _) => return Err(e), CopyResult::Fallback(bytes) => written += bytes, } @@ -244,7 +248,9 @@ impl SpecCopy for Copier<'_, '_, R, W> { result.update_take(reader); match result { - CopyResult::Ended(bytes_copied) => return Ok(bytes_copied + written), + CopyResult::Ended(bytes_copied) => { + return Ok(CopyState::Ended(bytes_copied + written)); + } CopyResult::Error(e, _) => return Err(e), CopyResult::Fallback(0) => { /* use the fallback below */ } CopyResult::Fallback(_) => { @@ -255,10 +261,7 @@ impl SpecCopy for Copier<'_, '_, R, W> { } // fallback if none of the more specialized syscalls wants to work with these file descriptors - match generic_copy(reader, writer) { - Ok(bytes) => Ok(bytes + written), - err => err, - } + Ok(CopyState::Fallback(written)) } } @@ -558,7 +561,7 @@ fn fd_to_meta(fd: &T) -> FdMeta { } } -pub(super) enum CopyResult { +enum CopyResult { Ended(u64), Error(Error, u64), Fallback(u64), @@ -587,7 +590,7 @@ const INVALID_FD: RawFd = -1; /// Callers must handle fallback to a generic copy loop. /// `Fallback` may indicate non-zero number of bytes already written /// if one of the files' cursor +`max_len` would exceed u64::MAX (`EOVERFLOW`). -pub(super) fn copy_regular_files(reader: RawFd, writer: RawFd, max_len: u64) -> CopyResult { +fn copy_regular_files(reader: RawFd, writer: RawFd, max_len: u64) -> CopyResult { use crate::cmp; const NOT_PROBED: u8 = 0; diff --git a/std/src/sys/pal/unix/kernel_copy/tests.rs b/std/src/sys/io/kernel_copy/linux/tests.rs similarity index 99% rename from std/src/sys/pal/unix/kernel_copy/tests.rs rename to std/src/sys/io/kernel_copy/linux/tests.rs index 54d8f8ed2edd4..15dee768d928b 100644 --- a/std/src/sys/pal/unix/kernel_copy/tests.rs +++ b/std/src/sys/io/kernel_copy/linux/tests.rs @@ -50,7 +50,7 @@ fn copy_specialization() -> Result<()> { "inner Take allowed reading beyond end of file, some bytes should be left" ); - let mut sink = sink.into_inner()?; + let mut sink = sink.into_inner().map_err(io::Error::from)?; sink.seek(SeekFrom::Start(0))?; let mut copied = Vec::new(); sink.read_to_end(&mut copied)?; diff --git a/std/src/sys/io/kernel_copy/mod.rs b/std/src/sys/io/kernel_copy/mod.rs new file mode 100644 index 0000000000000..a89279412cf7f --- /dev/null +++ b/std/src/sys/io/kernel_copy/mod.rs @@ -0,0 +1,23 @@ +pub enum CopyState { + #[cfg_attr(not(any(target_os = "linux", target_os = "android")), expect(dead_code))] + Ended(u64), + Fallback(u64), +} + +cfg_select! { + any(target_os = "linux", target_os = "android") => { + mod linux; + pub use linux::kernel_copy; + } + _ => { + use crate::io::{Result, Read, Write}; + + pub fn kernel_copy(_reader: &mut R, _writer: &mut W) -> Result + where + R: Read, + W: Write, + { + Ok(CopyState::Fallback(0)) + } + } +} diff --git a/std/src/sys/io/mod.rs b/std/src/sys/io/mod.rs index fe8ec1dbb7325..e2c5e7f88d492 100644 --- a/std/src/sys/io/mod.rs +++ b/std/src/sys/io/mod.rs @@ -39,6 +39,10 @@ mod is_terminal { mod hermit; pub use hermit::*; } + target_os = "motor" => { + mod motor; + pub use motor::*; + } _ => { mod unsupported; pub use unsupported::*; @@ -46,8 +50,11 @@ mod is_terminal { } } +mod kernel_copy; + pub use io_slice::{IoSlice, IoSliceMut}; pub use is_terminal::is_terminal; +pub use kernel_copy::{CopyState, kernel_copy}; // Bare metal platforms usually have very small amounts of RAM // (in the order of hundreds of KB) diff --git a/std/src/sys/net/connection/mod.rs b/std/src/sys/net/connection/mod.rs index 41e7159f909ae..2f064914a8317 100644 --- a/std/src/sys/net/connection/mod.rs +++ b/std/src/sys/net/connection/mod.rs @@ -17,6 +17,10 @@ cfg_select! { mod wasip1; pub use wasip1::*; } + target_os = "motor" => { + mod motor; + pub use motor::*; + } target_os = "xous" => { mod xous; pub use xous::*; diff --git a/std/src/sys/net/connection/motor.rs b/std/src/sys/net/connection/motor.rs new file mode 100644 index 0000000000000..e9bf29e34f90c --- /dev/null +++ b/std/src/sys/net/connection/motor.rs @@ -0,0 +1,521 @@ +pub use moto_rt::netc; + +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut}; +use crate::net::SocketAddr::{V4, V6}; +use crate::net::{Ipv4Addr, Ipv6Addr, Shutdown, SocketAddr, ToSocketAddrs}; +use crate::os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, RawFd}; +use crate::sys::fd::FileDesc; +use crate::sys::map_motor_error; +use crate::sys_common::{AsInner, FromInner, IntoInner}; +use crate::time::Duration; + +// We want to re-use as much of Rust's stdlib code as possible, +// and most of it is unixy, but with a lot of nesting. +#[derive(Debug)] +pub struct Socket(FileDesc); + +#[derive(Debug)] +pub struct TcpStream { + inner: Socket, +} + +impl TcpStream { + pub fn socket(&self) -> &Socket { + &self.inner + } + + pub fn into_socket(self) -> Socket { + self.inner + } + + pub fn connect(addr: A) -> io::Result { + let addr = into_netc(&addr.to_socket_addrs()?.next().unwrap()); + moto_rt::net::tcp_connect(&addr, Duration::MAX, false) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn connect_timeout(addr: &SocketAddr, timeout: Duration) -> io::Result { + let addr = into_netc(addr); + moto_rt::net::tcp_connect(&addr, timeout, false) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn set_read_timeout(&self, timeout: Option) -> io::Result<()> { + moto_rt::net::set_read_timeout(self.inner.as_raw_fd(), timeout).map_err(map_motor_error) + } + + pub fn set_write_timeout(&self, timeout: Option) -> io::Result<()> { + moto_rt::net::set_write_timeout(self.inner.as_raw_fd(), timeout).map_err(map_motor_error) + } + + pub fn read_timeout(&self) -> io::Result> { + moto_rt::net::read_timeout(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn write_timeout(&self) -> io::Result> { + moto_rt::net::write_timeout(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn peek(&self, buf: &mut [u8]) -> io::Result { + moto_rt::net::peek(self.inner.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn read(&self, buf: &mut [u8]) -> io::Result { + moto_rt::fs::read(self.inner.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn read_buf(&self, cursor: BorrowedCursor<'_>) -> io::Result<()> { + crate::io::default_read_buf(|buf| self.read(buf), cursor) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + let bufs: &mut [&mut [u8]] = unsafe { core::mem::transmute(bufs) }; + moto_rt::fs::read_vectored(self.inner.as_raw_fd(), bufs).map_err(map_motor_error) + } + + pub fn is_read_vectored(&self) -> bool { + true + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + moto_rt::fs::write(self.inner.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + let bufs: &[&[u8]] = unsafe { core::mem::transmute(bufs) }; + moto_rt::fs::write_vectored(self.inner.as_raw_fd(), bufs).map_err(map_motor_error) + } + + pub fn is_write_vectored(&self) -> bool { + true + } + + pub fn peer_addr(&self) -> io::Result { + moto_rt::net::peer_addr(self.inner.as_raw_fd()) + .map(|addr| from_netc(&addr)) + .map_err(map_motor_error) + } + + pub fn socket_addr(&self) -> io::Result { + moto_rt::net::socket_addr(self.inner.as_raw_fd()) + .map(|addr| from_netc(&addr)) + .map_err(map_motor_error) + } + + pub fn shutdown(&self, shutdown: Shutdown) -> io::Result<()> { + let shutdown = match shutdown { + Shutdown::Read => moto_rt::net::SHUTDOWN_READ, + Shutdown::Write => moto_rt::net::SHUTDOWN_WRITE, + Shutdown::Both => moto_rt::net::SHUTDOWN_READ | moto_rt::net::SHUTDOWN_WRITE, + }; + + moto_rt::net::shutdown(self.inner.as_raw_fd(), shutdown).map_err(map_motor_error) + } + + pub fn duplicate(&self) -> io::Result { + moto_rt::fs::duplicate(self.inner.as_raw_fd()) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn set_linger(&self, timeout: Option) -> io::Result<()> { + moto_rt::net::set_linger(self.inner.as_raw_fd(), timeout).map_err(map_motor_error) + } + + pub fn linger(&self) -> io::Result> { + moto_rt::net::linger(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> { + moto_rt::net::set_nodelay(self.inner.as_raw_fd(), nodelay).map_err(map_motor_error) + } + + pub fn nodelay(&self) -> io::Result { + moto_rt::net::nodelay(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { + moto_rt::net::set_ttl(self.inner.as_raw_fd(), ttl).map_err(map_motor_error) + } + + pub fn ttl(&self) -> io::Result { + moto_rt::net::ttl(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn take_error(&self) -> io::Result> { + let e = moto_rt::net::take_error(self.inner.as_raw_fd()).map_err(map_motor_error)?; + if e == moto_rt::E_OK { Ok(None) } else { Ok(Some(map_motor_error(e))) } + } + + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + moto_rt::net::set_nonblocking(self.inner.as_raw_fd(), nonblocking).map_err(map_motor_error) + } +} + +#[derive(Debug)] +pub struct TcpListener { + inner: Socket, +} + +impl TcpListener { + #[inline] + pub fn socket(&self) -> &Socket { + &self.inner + } + + pub fn into_socket(self) -> Socket { + self.inner + } + + pub fn bind(addr: A) -> io::Result { + let addr = into_netc(&addr.to_socket_addrs()?.next().unwrap()); + moto_rt::net::bind(moto_rt::net::PROTO_TCP, &addr) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn socket_addr(&self) -> io::Result { + moto_rt::net::socket_addr(self.inner.as_raw_fd()) + .map(|addr| from_netc(&addr)) + .map_err(map_motor_error) + } + + pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> { + moto_rt::net::accept(self.inner.as_raw_fd()) + .map(|(fd, addr)| { + (TcpStream { inner: unsafe { Socket::from_raw_fd(fd) } }, from_netc(&addr)) + }) + .map_err(map_motor_error) + } + + pub fn duplicate(&self) -> io::Result { + moto_rt::fs::duplicate(self.inner.as_raw_fd()) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { + moto_rt::net::set_ttl(self.inner.as_raw_fd(), ttl).map_err(map_motor_error) + } + + pub fn ttl(&self) -> io::Result { + moto_rt::net::ttl(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> { + moto_rt::net::set_only_v6(self.inner.as_raw_fd(), only_v6).map_err(map_motor_error) + } + + pub fn only_v6(&self) -> io::Result { + moto_rt::net::only_v6(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn take_error(&self) -> io::Result> { + let e = moto_rt::net::take_error(self.inner.as_raw_fd()).map_err(map_motor_error)?; + if e == moto_rt::E_OK { Ok(None) } else { Ok(Some(map_motor_error(e))) } + } + + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + moto_rt::net::set_nonblocking(self.inner.as_raw_fd(), nonblocking).map_err(map_motor_error) + } +} + +#[derive(Debug)] +pub struct UdpSocket { + inner: Socket, +} + +impl UdpSocket { + pub fn socket(&self) -> &Socket { + &self.inner + } + + pub fn into_socket(self) -> Socket { + self.inner + } + + pub fn bind(addr: A) -> io::Result { + let addr = into_netc(&addr.to_socket_addrs()?.next().unwrap()); + moto_rt::net::bind(moto_rt::net::PROTO_UDP, &addr) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn peer_addr(&self) -> io::Result { + moto_rt::net::peer_addr(self.inner.as_raw_fd()) + .map(|addr| from_netc(&addr)) + .map_err(map_motor_error) + } + + pub fn socket_addr(&self) -> io::Result { + moto_rt::net::socket_addr(self.inner.as_raw_fd()) + .map(|addr| from_netc(&addr)) + .map_err(map_motor_error) + } + + pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { + moto_rt::net::udp_recv_from(self.inner.as_raw_fd(), buf) + .map(|(sz, addr)| (sz, from_netc(&addr))) + .map_err(map_motor_error) + } + + pub fn peek_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { + moto_rt::net::udp_peek_from(self.inner.as_raw_fd(), buf) + .map(|(sz, addr)| (sz, from_netc(&addr))) + .map_err(map_motor_error) + } + + pub fn send_to(&self, buf: &[u8], addr: &SocketAddr) -> io::Result { + let addr = into_netc(addr); + moto_rt::net::udp_send_to(self.inner.as_raw_fd(), buf, &addr).map_err(map_motor_error) + } + + pub fn duplicate(&self) -> io::Result { + moto_rt::fs::duplicate(self.inner.as_raw_fd()) + .map(|fd| Self { inner: unsafe { Socket::from_raw_fd(fd) } }) + .map_err(map_motor_error) + } + + pub fn set_read_timeout(&self, timeout: Option) -> io::Result<()> { + moto_rt::net::set_read_timeout(self.inner.as_raw_fd(), timeout).map_err(map_motor_error) + } + + pub fn set_write_timeout(&self, timeout: Option) -> io::Result<()> { + moto_rt::net::set_write_timeout(self.inner.as_raw_fd(), timeout).map_err(map_motor_error) + } + + pub fn read_timeout(&self) -> io::Result> { + moto_rt::net::read_timeout(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn write_timeout(&self) -> io::Result> { + moto_rt::net::write_timeout(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_broadcast(&self, broadcast: bool) -> io::Result<()> { + moto_rt::net::set_udp_broadcast(self.inner.as_raw_fd(), broadcast).map_err(map_motor_error) + } + + pub fn broadcast(&self) -> io::Result { + moto_rt::net::udp_broadcast(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_multicast_loop_v4(&self, val: bool) -> io::Result<()> { + moto_rt::net::set_udp_multicast_loop_v4(self.inner.as_raw_fd(), val) + .map_err(map_motor_error) + } + + pub fn multicast_loop_v4(&self) -> io::Result { + moto_rt::net::udp_multicast_loop_v4(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_multicast_ttl_v4(&self, val: u32) -> io::Result<()> { + moto_rt::net::set_udp_multicast_ttl_v4(self.inner.as_raw_fd(), val).map_err(map_motor_error) + } + + pub fn multicast_ttl_v4(&self) -> io::Result { + moto_rt::net::udp_multicast_ttl_v4(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn set_multicast_loop_v6(&self, val: bool) -> io::Result<()> { + moto_rt::net::set_udp_multicast_loop_v6(self.inner.as_raw_fd(), val) + .map_err(map_motor_error) + } + + pub fn multicast_loop_v6(&self) -> io::Result { + moto_rt::net::udp_multicast_loop_v6(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn join_multicast_v4(&self, addr: &Ipv4Addr, iface: &Ipv4Addr) -> io::Result<()> { + let addr = (*addr).into(); + let iface = (*iface).into(); + moto_rt::net::join_udp_multicast_v4(self.inner.as_raw_fd(), &addr, &iface) + .map_err(map_motor_error) + } + + pub fn join_multicast_v6(&self, addr: &Ipv6Addr, iface: u32) -> io::Result<()> { + let addr = (*addr).into(); + moto_rt::net::join_udp_multicast_v6(self.inner.as_raw_fd(), &addr, iface) + .map_err(map_motor_error) + } + + pub fn leave_multicast_v4(&self, addr: &Ipv4Addr, iface: &Ipv4Addr) -> io::Result<()> { + let addr = (*addr).into(); + let iface = (*iface).into(); + moto_rt::net::leave_udp_multicast_v4(self.inner.as_raw_fd(), &addr, &iface) + .map_err(map_motor_error) + } + + pub fn leave_multicast_v6(&self, addr: &Ipv6Addr, iface: u32) -> io::Result<()> { + let addr = (*addr).into(); + moto_rt::net::leave_udp_multicast_v6(self.inner.as_raw_fd(), &addr, iface) + .map_err(map_motor_error) + } + + pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { + moto_rt::net::set_ttl(self.inner.as_raw_fd(), ttl).map_err(map_motor_error) + } + + pub fn ttl(&self) -> io::Result { + moto_rt::net::ttl(self.inner.as_raw_fd()).map_err(map_motor_error) + } + + pub fn take_error(&self) -> io::Result> { + moto_rt::net::take_error(self.inner.as_raw_fd()) + .map(|e| match e { + moto_rt::E_OK => None, + e => Some(map_motor_error(e)), + }) + .map_err(map_motor_error) + } + + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + moto_rt::net::set_nonblocking(self.inner.as_raw_fd(), nonblocking).map_err(map_motor_error) + } + + pub fn recv(&self, buf: &mut [u8]) -> io::Result { + moto_rt::fs::read(self.inner.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn peek(&self, buf: &mut [u8]) -> io::Result { + moto_rt::net::peek(self.inner.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn send(&self, buf: &[u8]) -> io::Result { + moto_rt::fs::write(self.inner.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn connect(&self, addr: A) -> io::Result<()> { + let addr = into_netc(&addr.to_socket_addrs()?.next().unwrap()); + moto_rt::net::udp_connect(self.inner.as_raw_fd(), &addr).map_err(map_motor_error) + } +} + +pub struct LookupHost { + addresses: alloc::collections::VecDeque, +} + +pub fn lookup_host(host: &str, port: u16) -> io::Result { + let (_port, addresses) = moto_rt::net::lookup_host(host, port).map_err(map_motor_error)?; + Ok(LookupHost { addresses }) +} + +impl Iterator for LookupHost { + type Item = SocketAddr; + fn next(&mut self) -> Option { + self.addresses.pop_front().map(|addr| from_netc(&addr)) + } +} + +impl TryFrom<&str> for LookupHost { + type Error = io::Error; + + fn try_from(host_port: &str) -> io::Result { + let (host, port_str) = host_port + .rsplit_once(':') + .ok_or(moto_rt::E_INVALID_ARGUMENT) + .map_err(map_motor_error)?; + let port: u16 = + port_str.parse().map_err(|_| moto_rt::E_INVALID_ARGUMENT).map_err(map_motor_error)?; + (host, port).try_into() + } +} + +impl<'a> TryFrom<(&'a str, u16)> for LookupHost { + type Error = io::Error; + + fn try_from(host_port: (&'a str, u16)) -> io::Result { + let (host, port) = host_port; + + let (_port, addresses) = moto_rt::net::lookup_host(host, port).map_err(map_motor_error)?; + Ok(LookupHost { addresses }) + } +} + +fn into_netc(addr: &SocketAddr) -> netc::sockaddr { + match addr { + V4(addr4) => netc::sockaddr { v4: (*addr4).into() }, + V6(addr6) => netc::sockaddr { v6: (*addr6).into() }, + } +} + +fn from_netc(addr: &netc::sockaddr) -> SocketAddr { + // SAFETY: all variants of union netc::sockaddr have `sin_family` at the same offset. + let family = unsafe { addr.v4.sin_family }; + match family { + netc::AF_INET => SocketAddr::V4(crate::net::SocketAddrV4::from(unsafe { addr.v4 })), + netc::AF_INET6 => SocketAddr::V6(crate::net::SocketAddrV6::from(unsafe { addr.v6 })), + _ => panic!("bad sin_family {family}"), + } +} + +impl AsInner for Socket { + #[inline] + fn as_inner(&self) -> &FileDesc { + &self.0 + } +} + +impl IntoInner for Socket { + fn into_inner(self) -> FileDesc { + self.0 + } +} + +impl FromInner for Socket { + fn from_inner(file_desc: FileDesc) -> Self { + Self(file_desc) + } +} + +impl AsFd for Socket { + fn as_fd(&self) -> BorrowedFd<'_> { + self.0.as_fd() + } +} + +impl AsRawFd for Socket { + #[inline] + fn as_raw_fd(&self) -> RawFd { + self.0.as_raw_fd() + } +} + +impl IntoRawFd for Socket { + fn into_raw_fd(self) -> RawFd { + self.0.into_raw_fd() + } +} + +impl FromRawFd for Socket { + unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { + Self(FromRawFd::from_raw_fd(raw_fd)) + } +} + +impl AsInner for TcpStream { + #[inline] + fn as_inner(&self) -> &Socket { + &self.inner + } +} + +impl FromInner for TcpStream { + fn from_inner(socket: Socket) -> TcpStream { + TcpStream { inner: socket } + } +} + +impl FromInner for TcpListener { + fn from_inner(socket: Socket) -> TcpListener { + TcpListener { inner: socket } + } +} + +impl FromInner for UdpSocket { + fn from_inner(socket: Socket) -> UdpSocket { + UdpSocket { inner: socket } + } +} diff --git a/std/src/sys/net/connection/uefi/mod.rs b/std/src/sys/net/connection/uefi/mod.rs index 004f6d413a1f3..d76e3e576f330 100644 --- a/std/src/sys/net/connection/uefi/mod.rs +++ b/std/src/sys/net/connection/uefi/mod.rs @@ -82,12 +82,11 @@ impl TcpStream { } pub fn write_vectored(&self, buf: &[IoSlice<'_>]) -> io::Result { - // FIXME: UEFI does support vectored write, so implement that. - crate::io::default_write_vectored(|b| self.write(b), buf) + self.inner.write_vectored(buf, self.write_timeout()?) } pub fn is_write_vectored(&self) -> bool { - false + true } pub fn peer_addr(&self) -> io::Result { diff --git a/std/src/sys/net/connection/uefi/tcp.rs b/std/src/sys/net/connection/uefi/tcp.rs index aac97007bbfe5..16283e64fb35a 100644 --- a/std/src/sys/net/connection/uefi/tcp.rs +++ b/std/src/sys/net/connection/uefi/tcp.rs @@ -1,5 +1,5 @@ use super::tcp4; -use crate::io; +use crate::io::{self, IoSlice}; use crate::net::SocketAddr; use crate::ptr::NonNull; use crate::sys::{helpers, unsupported}; @@ -28,6 +28,16 @@ impl Tcp { } } + pub(crate) fn write_vectored( + &self, + buf: &[IoSlice<'_>], + timeout: Option, + ) -> io::Result { + match self { + Self::V4(client) => client.write_vectored(buf, timeout), + } + } + pub(crate) fn read(&self, buf: &mut [u8], timeout: Option) -> io::Result { match self { Self::V4(client) => client.read(buf, timeout), diff --git a/std/src/sys/net/connection/uefi/tcp4.rs b/std/src/sys/net/connection/uefi/tcp4.rs index 75862ff247b4f..ba0424454d738 100644 --- a/std/src/sys/net/connection/uefi/tcp4.rs +++ b/std/src/sys/net/connection/uefi/tcp4.rs @@ -1,7 +1,7 @@ use r_efi::efi::{self, Status}; use r_efi::protocols::tcp4; -use crate::io; +use crate::io::{self, IoSlice}; use crate::net::SocketAddrV4; use crate::ptr::NonNull; use crate::sync::atomic::{AtomicBool, Ordering}; @@ -108,11 +108,7 @@ impl Tcp4 { } pub(crate) fn write(&self, buf: &[u8], timeout: Option) -> io::Result { - let evt = unsafe { self.create_evt() }?; - let completion_token = - tcp4::CompletionToken { event: evt.as_ptr(), status: Status::SUCCESS }; let data_len = u32::try_from(buf.len()).unwrap_or(u32::MAX); - let fragment = tcp4::FragmentData { fragment_length: data_len, fragment_buffer: buf.as_ptr().cast::().cast_mut(), @@ -125,14 +121,63 @@ impl Tcp4 { fragment_table: [fragment], }; - let protocol = self.protocol.as_ptr(); - let mut token = tcp4::IoToken { - completion_token, - packet: tcp4::IoTokenPacket { - tx_data: (&raw mut tx_data).cast::>(), - }, + self.write_inner((&raw mut tx_data).cast(), timeout).map(|_| data_len as usize) + } + + pub(crate) fn write_vectored( + &self, + buf: &[IoSlice<'_>], + timeout: Option, + ) -> io::Result { + let mut data_length = 0u32; + let mut fragment_count = 0u32; + + // Calculate how many IoSlice in buf can be transmitted. + for i in buf { + // IoSlice length is always <= u32::MAX in UEFI. + match data_length + .checked_add(u32::try_from(i.as_slice().len()).expect("value is stored as a u32")) + { + Some(x) => data_length = x, + None => break, + } + fragment_count += 1; + } + + let tx_data_size = size_of::>() + + size_of::() * (fragment_count as usize); + let mut tx_data = helpers::UefiBox::::new(tx_data_size)?; + tx_data.write(tcp4::TransmitData { + push: r_efi::efi::Boolean::FALSE, + urgent: r_efi::efi::Boolean::FALSE, + data_length, + fragment_count, + fragment_table: [], + }); + unsafe { + // SAFETY: IoSlice and FragmentData are guaranteed to have same layout. + crate::ptr::copy_nonoverlapping( + buf.as_ptr().cast(), + (*tx_data.as_mut_ptr()).fragment_table.as_mut_ptr(), + fragment_count as usize, + ); }; + self.write_inner(tx_data.as_mut_ptr(), timeout).map(|_| data_length as usize) + } + + fn write_inner( + &self, + tx_data: *mut tcp4::TransmitData, + timeout: Option, + ) -> io::Result<()> { + let evt = unsafe { self.create_evt() }?; + let completion_token = + tcp4::CompletionToken { event: evt.as_ptr(), status: Status::SUCCESS }; + + let protocol = self.protocol.as_ptr(); + let mut token = tcp4::IoToken { completion_token, packet: tcp4::IoTokenPacket { tx_data } }; + let r = unsafe { ((*protocol).transmit)(protocol, &mut token) }; if r.is_error() { return Err(io::Error::from_raw_os_error(r.as_usize())); @@ -143,7 +188,7 @@ impl Tcp4 { if completion_token.status.is_error() { Err(io::Error::from_raw_os_error(completion_token.status.as_usize())) } else { - Ok(data_len as usize) + Ok(()) } } diff --git a/std/src/sys/os_str/bytes.rs b/std/src/sys/os_str/bytes.rs index f8ab4543a3a52..9373982c455fa 100644 --- a/std/src/sys/os_str/bytes.rs +++ b/std/src/sys/os_str/bytes.rs @@ -176,17 +176,17 @@ impl Buf { #[inline] pub fn as_slice(&self) -> &Slice { - // SAFETY: Slice just wraps [u8], - // and &*self.inner is &[u8], therefore - // transmuting &[u8] to &Slice is safe. + // SAFETY: Slice is just a wrapper for [u8], + // and self.inner.as_slice() returns &[u8]. + // Therefore, transmuting &[u8] to &Slice is safe. unsafe { mem::transmute(self.inner.as_slice()) } } #[inline] pub fn as_mut_slice(&mut self) -> &mut Slice { - // SAFETY: Slice just wraps [u8], - // and &mut *self.inner is &mut [u8], therefore - // transmuting &mut [u8] to &mut Slice is safe. + // SAFETY: Slice is just a wrapper for [u8], + // and self.inner.as_mut_slice() returns &mut [u8]. + // Therefore, transmuting &mut [u8] to &mut Slice is safe. unsafe { mem::transmute(self.inner.as_mut_slice()) } } @@ -233,7 +233,9 @@ impl Buf { /// /// # Safety /// - /// This encoding has no safety requirements. + /// The slice must be valid for the platform encoding (as described in + /// `OsStr::from_encoded_bytes_unchecked`). This encoding has no safety + /// requirements. #[inline] pub unsafe fn extend_from_slice_unchecked(&mut self, other: &[u8]) { self.inner.extend_from_slice(other); diff --git a/std/src/sys/os_str/mod.rs b/std/src/sys/os_str/mod.rs index 65c90d880495d..f7007cbf18b4c 100644 --- a/std/src/sys/os_str/mod.rs +++ b/std/src/sys/os_str/mod.rs @@ -5,6 +5,10 @@ cfg_select! { mod wtf8; pub use wtf8::{Buf, Slice}; } + any(target_os = "motor") => { + mod utf8; + pub use utf8::{Buf, Slice}; + } _ => { mod bytes; pub use bytes::{Buf, Slice}; diff --git a/std/src/sys/os_str/utf8.rs b/std/src/sys/os_str/utf8.rs new file mode 100644 index 0000000000000..5dd24f67d3039 --- /dev/null +++ b/std/src/sys/os_str/utf8.rs @@ -0,0 +1,330 @@ +//! An OsString/OsStr implementation that is guaranteed to be UTF-8. + +use core::clone::CloneToUninit; + +use crate::borrow::Cow; +use crate::collections::TryReserveError; +use crate::rc::Rc; +use crate::sync::Arc; +use crate::sys_common::{AsInner, FromInner, IntoInner}; +use crate::{fmt, mem}; + +#[derive(Hash)] +#[repr(transparent)] +pub struct Buf { + pub inner: String, +} + +#[repr(transparent)] +pub struct Slice { + pub inner: str, +} + +impl IntoInner for Buf { + fn into_inner(self) -> String { + self.inner + } +} + +impl FromInner for Buf { + fn from_inner(inner: String) -> Self { + Buf { inner } + } +} + +impl AsInner for Buf { + #[inline] + fn as_inner(&self) -> &str { + &self.inner + } +} + +impl fmt::Debug for Buf { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&self.inner, f) + } +} + +impl fmt::Display for Buf { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.inner, f) + } +} + +impl fmt::Debug for Slice { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&self.inner, f) + } +} + +impl fmt::Display for Slice { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.inner, f) + } +} + +impl Clone for Buf { + #[inline] + fn clone(&self) -> Self { + Buf { inner: self.inner.clone() } + } + + #[inline] + fn clone_from(&mut self, source: &Self) { + self.inner.clone_from(&source.inner) + } +} + +impl Buf { + #[inline] + pub fn into_encoded_bytes(self) -> Vec { + self.inner.into_bytes() + } + + #[inline] + pub unsafe fn from_encoded_bytes_unchecked(s: Vec) -> Self { + unsafe { Self { inner: String::from_utf8_unchecked(s) } } + } + + #[inline] + pub fn into_string(self) -> Result { + Ok(self.inner) + } + + #[inline] + pub const fn from_string(s: String) -> Buf { + Buf { inner: s } + } + + #[inline] + pub fn with_capacity(capacity: usize) -> Buf { + Buf { inner: String::with_capacity(capacity) } + } + + #[inline] + pub fn clear(&mut self) { + self.inner.clear() + } + + #[inline] + pub fn capacity(&self) -> usize { + self.inner.capacity() + } + + #[inline] + pub fn push_slice(&mut self, s: &Slice) { + self.inner.push_str(&s.inner) + } + + #[inline] + pub fn push_str(&mut self, s: &str) { + self.inner.push_str(s); + } + + #[inline] + pub fn reserve(&mut self, additional: usize) { + self.inner.reserve(additional) + } + + #[inline] + pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { + self.inner.try_reserve(additional) + } + + #[inline] + pub fn reserve_exact(&mut self, additional: usize) { + self.inner.reserve_exact(additional) + } + + #[inline] + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { + self.inner.try_reserve_exact(additional) + } + + #[inline] + pub fn shrink_to_fit(&mut self) { + self.inner.shrink_to_fit() + } + + #[inline] + pub fn shrink_to(&mut self, min_capacity: usize) { + self.inner.shrink_to(min_capacity) + } + + #[inline] + pub fn as_slice(&self) -> &Slice { + Slice::from_str(&self.inner) + } + + #[inline] + pub fn as_mut_slice(&mut self) -> &mut Slice { + Slice::from_mut_str(&mut self.inner) + } + + #[inline] + pub fn leak<'a>(self) -> &'a mut Slice { + Slice::from_mut_str(self.inner.leak()) + } + + #[inline] + pub fn into_box(self) -> Box { + unsafe { mem::transmute(self.inner.into_boxed_str()) } + } + + #[inline] + pub fn from_box(boxed: Box) -> Buf { + let inner: Box = unsafe { mem::transmute(boxed) }; + Buf { inner: inner.into_string() } + } + + #[inline] + pub fn into_arc(&self) -> Arc { + self.as_slice().into_arc() + } + + #[inline] + pub fn into_rc(&self) -> Rc { + self.as_slice().into_rc() + } + + /// Provides plumbing to `Vec::truncate` without giving full mutable access + /// to the `Vec`. + /// + /// # Safety + /// + /// The length must be at an `OsStr` boundary, according to + /// `Slice::check_public_boundary`. + #[inline] + pub unsafe fn truncate_unchecked(&mut self, len: usize) { + self.inner.truncate(len); + } + + /// Provides plumbing to `Vec::extend_from_slice` without giving full + /// mutable access to the `Vec`. + /// + /// # Safety + /// + /// The slice must be valid for the platform encoding (as described in + /// `OsStr::from_encoded_bytes_unchecked`). For this encoding, that means + /// `other` must be valid UTF-8. + #[inline] + pub unsafe fn extend_from_slice_unchecked(&mut self, other: &[u8]) { + self.inner.push_str(unsafe { str::from_utf8_unchecked(other) }); + } +} + +impl Slice { + #[inline] + pub fn as_encoded_bytes(&self) -> &[u8] { + self.inner.as_bytes() + } + + #[inline] + pub unsafe fn from_encoded_bytes_unchecked(s: &[u8]) -> &Slice { + Slice::from_str(unsafe { str::from_utf8_unchecked(s) }) + } + + #[track_caller] + #[inline] + pub fn check_public_boundary(&self, index: usize) { + if !self.inner.is_char_boundary(index) { + panic!("byte index {index} is not an OsStr boundary"); + } + } + + #[inline] + pub fn from_str(s: &str) -> &Slice { + // SAFETY: Slice is just a wrapper over str. + unsafe { mem::transmute(s) } + } + + #[inline] + fn from_mut_str(s: &mut str) -> &mut Slice { + // SAFETY: Slice is just a wrapper over str. + unsafe { mem::transmute(s) } + } + + #[inline] + pub fn to_str(&self) -> Result<&str, crate::str::Utf8Error> { + Ok(&self.inner) + } + + #[inline] + pub fn to_string_lossy(&self) -> Cow<'_, str> { + Cow::Borrowed(&self.inner) + } + + #[inline] + pub fn to_owned(&self) -> Buf { + Buf { inner: self.inner.to_owned() } + } + + #[inline] + pub fn clone_into(&self, buf: &mut Buf) { + self.inner.clone_into(&mut buf.inner) + } + + #[inline] + pub fn into_box(&self) -> Box { + let boxed: Box = self.inner.into(); + unsafe { mem::transmute(boxed) } + } + + #[inline] + pub fn empty_box() -> Box { + let boxed: Box = Default::default(); + unsafe { mem::transmute(boxed) } + } + + #[inline] + pub fn into_arc(&self) -> Arc { + let arc: Arc = Arc::from(&self.inner); + unsafe { Arc::from_raw(Arc::into_raw(arc) as *const Slice) } + } + + #[inline] + pub fn into_rc(&self) -> Rc { + let rc: Rc = Rc::from(&self.inner); + unsafe { Rc::from_raw(Rc::into_raw(rc) as *const Slice) } + } + + #[inline] + pub fn make_ascii_lowercase(&mut self) { + self.inner.make_ascii_lowercase() + } + + #[inline] + pub fn make_ascii_uppercase(&mut self) { + self.inner.make_ascii_uppercase() + } + + #[inline] + pub fn to_ascii_lowercase(&self) -> Buf { + Buf { inner: self.inner.to_ascii_lowercase() } + } + + #[inline] + pub fn to_ascii_uppercase(&self) -> Buf { + Buf { inner: self.inner.to_ascii_uppercase() } + } + + #[inline] + pub fn is_ascii(&self) -> bool { + self.inner.is_ascii() + } + + #[inline] + pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool { + self.inner.eq_ignore_ascii_case(&other.inner) + } +} + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for Slice { + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut u8) { + // SAFETY: we're just a transparent wrapper around [u8] + unsafe { self.inner.clone_to_uninit(dst) } + } +} diff --git a/std/src/sys/os_str/wtf8.rs b/std/src/sys/os_str/wtf8.rs index 96da891874ef0..208755cd5b9c7 100644 --- a/std/src/sys/os_str/wtf8.rs +++ b/std/src/sys/os_str/wtf8.rs @@ -1,5 +1,6 @@ //! The underlying OsString/OsStr implementation on Windows is a //! wrapper around the "WTF-8" encoding; see the `wtf8` module for more. + use alloc::wtf8::{Wtf8, Wtf8Buf}; use core::clone::CloneToUninit; @@ -11,6 +12,7 @@ use crate::sys_common::{AsInner, FromInner, IntoInner}; use crate::{fmt, mem}; #[derive(Hash)] +#[repr(transparent)] pub struct Buf { pub inner: Wtf8Buf, } @@ -213,11 +215,12 @@ impl Buf { /// # Safety /// /// The slice must be valid for the platform encoding (as described in - /// [`Slice::from_encoded_bytes_unchecked`]). + /// `OsStr::from_encoded_bytes_unchecked`). For this encoding, that means + /// `other` must be valid WTF-8. /// - /// This bypasses the WTF-8 surrogate joining, so either `self` must not - /// end with a leading surrogate half, or `other` must not start with a - /// trailing surrogate half. + /// Additionally, this method bypasses the WTF-8 surrogate joining, so + /// either `self` must not end with a leading surrogate half, or `other` + /// must not start with a trailing surrogate half. #[inline] pub unsafe fn extend_from_slice_unchecked(&mut self, other: &[u8]) { unsafe { diff --git a/std/src/sys/pal/mod.rs b/std/src/sys/pal/mod.rs index 9e964540a87c1..e11df38a8ee68 100644 --- a/std/src/sys/pal/mod.rs +++ b/std/src/sys/pal/mod.rs @@ -41,6 +41,10 @@ cfg_select! { mod hermit; pub use self::hermit::*; } + target_os = "motor" => { + mod motor; + pub use self::motor::*; + } target_os = "trusty" => { mod trusty; pub use self::trusty::*; diff --git a/std/src/sys/pal/motor/mod.rs b/std/src/sys/pal/motor/mod.rs new file mode 100644 index 0000000000000..c64f8ff7a8a83 --- /dev/null +++ b/std/src/sys/pal/motor/mod.rs @@ -0,0 +1,77 @@ +#![allow(unsafe_op_in_unsafe_fn)] + +pub mod os; +pub mod pipe; +pub mod time; + +pub use moto_rt::futex; + +use crate::io as std_io; +use crate::sys::RawOsError; + +pub(crate) fn map_motor_error(err: moto_rt::ErrorCode) -> crate::io::Error { + crate::io::Error::from_raw_os_error(err.into()) +} + +#[cfg(not(test))] +#[unsafe(no_mangle)] +pub extern "C" fn motor_start() -> ! { + // Initialize the runtime. + moto_rt::start(); + + // Call main. + unsafe extern "C" { + fn main(_: isize, _: *const *const u8, _: u8) -> i32; + } + let result = unsafe { main(0, core::ptr::null(), 0) }; + + // Terminate the process. + moto_rt::process::exit(result) +} + +// SAFETY: must be called only once during runtime initialization. +// NOTE: Motor OS uses moto_rt::start() to initialize runtime (see above). +pub unsafe fn init(_argc: isize, _argv: *const *const u8, _sigpipe: u8) {} + +// SAFETY: must be called only once during runtime cleanup. +// NOTE: this is not guaranteed to run, for example when the program aborts. +pub unsafe fn cleanup() {} + +pub fn unsupported() -> std_io::Result { + Err(unsupported_err()) +} + +pub fn unsupported_err() -> std_io::Error { + std_io::Error::UNSUPPORTED_PLATFORM +} + +pub fn is_interrupted(_code: RawOsError) -> bool { + false // Motor OS doesn't have signals. +} + +pub fn decode_error_kind(code: RawOsError) -> crate::io::ErrorKind { + use moto_rt::error::*; + use std_io::ErrorKind; + + if code < 0 || code > u16::MAX.into() { + return std_io::ErrorKind::Uncategorized; + } + + match code as moto_rt::ErrorCode /* u16 */ { + E_ALREADY_IN_USE => ErrorKind::AlreadyExists, + E_INVALID_FILENAME => ErrorKind::InvalidFilename, + E_NOT_FOUND => ErrorKind::NotFound, + E_TIMED_OUT => ErrorKind::TimedOut, + E_NOT_IMPLEMENTED => ErrorKind::Unsupported, + E_FILE_TOO_LARGE => ErrorKind::FileTooLarge, + E_UNEXPECTED_EOF => ErrorKind::UnexpectedEof, + E_INVALID_ARGUMENT => ErrorKind::InvalidInput, + E_NOT_READY => ErrorKind::WouldBlock, + E_NOT_CONNECTED => ErrorKind::NotConnected, + _ => crate::io::ErrorKind::Uncategorized, + } +} + +pub fn abort_internal() -> ! { + core::intrinsics::abort(); +} diff --git a/std/src/sys/pal/motor/os.rs b/std/src/sys/pal/motor/os.rs new file mode 100644 index 0000000000000..052e3b238b6af --- /dev/null +++ b/std/src/sys/pal/motor/os.rs @@ -0,0 +1,100 @@ +use super::map_motor_error; +use crate::error::Error as StdError; +use crate::ffi::{OsStr, OsString}; +use crate::marker::PhantomData; +use crate::os::motor::ffi::OsStrExt; +use crate::path::{self, PathBuf}; +use crate::sys::RawOsError; +use crate::{fmt, io}; + +pub fn errno() -> RawOsError { + // Not used in Motor OS because it is ambiguous: Motor OS + // is micro-kernel-based, and I/O happens via a shared-memory + // ring buffer, so an I/O operation that on a unix is a syscall + // may involve no sycalls on Motor OS at all, or a syscall + // that e.g. waits for a notification from the I/O driver + // (sys-io); and the wait syscall may succeed, but the + // driver may report an I/O error; or a bunch of results + // for several I/O operations, some successful and some + // not. + // + // Also I/O operations in a Motor OS process are handled by a + // separate runtime background/I/O thread, so it is really hard + // to define what "last system error in the current thread" + // actually means. + moto_rt::E_UNKNOWN.into() +} + +pub fn error_string(errno: RawOsError) -> String { + let error_code: moto_rt::ErrorCode = match errno { + x if x < 0 => moto_rt::E_UNKNOWN, + x if x > u16::MAX.into() => moto_rt::E_UNKNOWN, + x => x as moto_rt::ErrorCode, /* u16 */ + }; + format!("{}", moto_rt::Error::from(error_code)) +} + +pub fn getcwd() -> io::Result { + moto_rt::fs::getcwd().map(PathBuf::from).map_err(map_motor_error) +} + +pub fn chdir(path: &path::Path) -> io::Result<()> { + moto_rt::fs::chdir(path.as_os_str().as_str()).map_err(map_motor_error) +} + +pub struct SplitPaths<'a>(!, PhantomData<&'a ()>); + +pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> { + panic!("unsupported") +} + +impl<'a> Iterator for SplitPaths<'a> { + type Item = PathBuf; + fn next(&mut self) -> Option { + self.0 + } +} + +#[derive(Debug)] +pub struct JoinPathsError; + +pub fn join_paths(_paths: I) -> Result +where + I: Iterator, + T: AsRef, +{ + Err(JoinPathsError) +} + +impl fmt::Display for JoinPathsError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + "not supported on this platform yet".fmt(f) + } +} + +impl StdError for JoinPathsError { + #[allow(deprecated)] + fn description(&self) -> &str { + "not supported on this platform yet" + } +} + +pub fn current_exe() -> io::Result { + moto_rt::process::current_exe().map(PathBuf::from).map_err(map_motor_error) +} + +pub fn temp_dir() -> PathBuf { + PathBuf::from(moto_rt::fs::TEMP_DIR) +} + +pub fn home_dir() -> Option { + None +} + +pub fn exit(code: i32) -> ! { + moto_rt::process::exit(code) +} + +pub fn getpid() -> u32 { + panic!("Pids on Motor OS are u64.") +} diff --git a/std/src/sys/pal/motor/pipe.rs b/std/src/sys/pal/motor/pipe.rs new file mode 100644 index 0000000000000..d3be6ddf1573e --- /dev/null +++ b/std/src/sys/pal/motor/pipe.rs @@ -0,0 +1,121 @@ +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut}; +use crate::os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; +use crate::sys::fd::FileDesc; +use crate::sys::map_motor_error; +use crate::sys_common::{FromInner, IntoInner}; + +#[derive(Debug)] +pub struct AnonPipe(FileDesc); + +impl From for AnonPipe { + fn from(rt_fd: moto_rt::RtFd) -> AnonPipe { + unsafe { AnonPipe::from_raw_fd(rt_fd) } + } +} + +impl AnonPipe { + pub fn read(&self, buf: &mut [u8]) -> io::Result { + moto_rt::fs::read(self.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn read_buf(&self, cursor: BorrowedCursor<'_>) -> io::Result<()> { + crate::io::default_read_buf(|buf| self.read(buf), cursor) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + crate::io::default_read_vectored(|b| self.read(b), bufs) + } + + pub fn is_read_vectored(&self) -> bool { + false + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + moto_rt::fs::write(self.as_raw_fd(), buf).map_err(map_motor_error) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + crate::io::default_write_vectored(|b| self.write(b), bufs) + } + + pub fn is_write_vectored(&self) -> bool { + false + } + + pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { + let mut temp_vec = Vec::new(); + let mut size = 0_usize; + loop { + temp_vec.resize(256, 0_u8); + match self.read(&mut temp_vec[..]) { + Ok(sz) => { + if sz == 0 { + return Ok(size); + } + size += sz; + temp_vec.truncate(sz); + buf.append(&mut temp_vec); + } + Err(err) => { + if size != 0 { + return Ok(size); + } else { + return Err(err); + } + } + } + } + } +} + +impl AsRawFd for AnonPipe { + fn as_raw_fd(&self) -> RawFd { + self.0.as_raw_fd() + } +} + +impl FromRawFd for AnonPipe { + unsafe fn from_raw_fd(fd: RawFd) -> Self { + let desc = FileDesc::from_raw_fd(fd); + Self(desc) + } +} + +impl IntoRawFd for AnonPipe { + fn into_raw_fd(self) -> RawFd { + self.0.into_raw_fd() + } +} + +impl AsFd for AnonPipe { + fn as_fd(&self) -> BorrowedFd<'_> { + self.0.as_fd() + } +} + +impl IntoInner for AnonPipe { + fn into_inner(self) -> OwnedFd { + self.0.into_inner() + } +} + +impl IntoInner for AnonPipe { + fn into_inner(self) -> FileDesc { + self.0 + } +} + +impl FromInner for AnonPipe { + fn from_inner(owned_fd: OwnedFd) -> Self { + Self(FileDesc::from_inner(owned_fd)) + } +} + +pub fn read2(_p1: AnonPipe, _v1: &mut Vec, _p2: AnonPipe, _v2: &mut Vec) -> io::Result<()> { + Err(io::Error::from_raw_os_error(moto_rt::E_NOT_IMPLEMENTED.into())) +} + +#[inline] +pub fn anon_pipe() -> io::Result<(AnonPipe, AnonPipe)> { + Err(io::Error::UNSUPPORTED_PLATFORM) +} diff --git a/std/src/sys/pal/motor/time.rs b/std/src/sys/pal/motor/time.rs new file mode 100644 index 0000000000000..e917fd466c2e4 --- /dev/null +++ b/std/src/sys/pal/motor/time.rs @@ -0,0 +1 @@ +pub use moto_rt::time::{Instant, SystemTime, UNIX_EPOCH}; diff --git a/std/src/sys/pal/uefi/helpers.rs b/std/src/sys/pal/uefi/helpers.rs index c0d69c3e0029a..bfad6491e3219 100644 --- a/std/src/sys/pal/uefi/helpers.rs +++ b/std/src/sys/pal/uefi/helpers.rs @@ -12,6 +12,7 @@ use r_efi::efi::{self, Guid}; use r_efi::protocols::{device_path, device_path_to_text, service_binding, shell}; +use crate::alloc::Layout; use crate::ffi::{OsStr, OsString}; use crate::io::{self, const_error}; use crate::marker::PhantomData; @@ -769,3 +770,43 @@ pub(crate) const fn ipv4_to_r_efi(addr: crate::net::Ipv4Addr) -> efi::Ipv4Addres pub(crate) const fn ipv4_from_r_efi(ip: efi::Ipv4Address) -> crate::net::Ipv4Addr { crate::net::Ipv4Addr::new(ip.addr[0], ip.addr[1], ip.addr[2], ip.addr[3]) } + +/// This type is intended for use with ZSTs. Since such types are unsized, a reference to such types +/// is not valid in Rust. Thus, only pointers should be used when interacting with such types. +pub(crate) struct UefiBox { + inner: NonNull, + size: usize, +} + +impl UefiBox { + pub(crate) fn new(len: usize) -> io::Result { + assert!(len >= size_of::()); + // UEFI always expects types to be 8 byte aligned. + let layout = Layout::from_size_align(len, 8).unwrap(); + let ptr = unsafe { crate::alloc::alloc(layout) }; + + match NonNull::new(ptr.cast()) { + Some(inner) => Ok(Self { inner, size: len }), + None => Err(io::Error::new(io::ErrorKind::OutOfMemory, "Allocation failed")), + } + } + + pub(crate) fn write(&mut self, data: T) { + unsafe { self.inner.write(data) } + } + + pub(crate) fn as_mut_ptr(&mut self) -> *mut T { + self.inner.as_ptr().cast() + } + + pub(crate) fn as_ptr(&self) -> *const T { + self.inner.as_ptr().cast() + } +} + +impl Drop for UefiBox { + fn drop(&mut self) { + let layout = Layout::from_size_align(self.size, 8).unwrap(); + unsafe { crate::alloc::dealloc(self.inner.as_ptr().cast(), layout) }; + } +} diff --git a/std/src/sys/pal/uefi/tests.rs b/std/src/sys/pal/uefi/tests.rs index 56ca999cc7e99..df3344e2df346 100644 --- a/std/src/sys/pal/uefi/tests.rs +++ b/std/src/sys/pal/uefi/tests.rs @@ -8,6 +8,20 @@ use crate::time::Duration; const SECS_IN_MINUTE: u64 = 60; +const MAX_UEFI_TIME: Duration = from_uefi(r_efi::efi::Time { + year: 9999, + month: 12, + day: 31, + hour: 23, + minute: 59, + second: 59, + nanosecond: 999_999_999, + timezone: 1440, + daylight: 0, + pad1: 0, + pad2: 0, +}); + #[test] fn align() { // UEFI ABI specifies that allocation alignment minimum is always 8. So this can be @@ -28,6 +42,19 @@ fn align() { } } +// UEFI Time cannot implement Eq due to uninitilaized pad1 and pad2 +fn uefi_time_cmp(t1: r_efi::efi::Time, t2: r_efi::efi::Time) -> bool { + t1.year == t2.year + && t1.month == t2.month + && t1.day == t2.day + && t1.hour == t2.hour + && t1.minute == t2.minute + && t1.second == t2.second + && t1.nanosecond == t2.nanosecond + && t1.timezone == t2.timezone + && t1.daylight == t2.daylight +} + #[test] fn systemtime_start() { let t = r_efi::efi::Time { @@ -37,14 +64,15 @@ fn systemtime_start() { hour: 0, minute: 0, second: 0, + pad1: 0, nanosecond: 0, timezone: -1440, daylight: 0, pad2: 0, }; assert_eq!(from_uefi(&t), Duration::new(0, 0)); - assert_eq!(t, to_uefi(&from_uefi(&t), -1440, 0).unwrap()); - assert!(to_uefi(&from_uefi(&t), 0, 0).is_none()); + assert!(uefi_time_cmp(t, to_uefi(&from_uefi(&t), -1440, 0).unwrap())); + assert!(to_uefi(&from_uefi(&t), 0, 0).is_err()); } #[test] @@ -63,8 +91,8 @@ fn systemtime_utc_start() { pad2: 0, }; assert_eq!(from_uefi(&t), Duration::new(1440 * SECS_IN_MINUTE, 0)); - assert_eq!(t, to_uefi(&from_uefi(&t), 0, 0).unwrap()); - assert!(to_uefi(&from_uefi(&t), -1440, 0).is_some()); + assert!(uefi_time_cmp(t, to_uefi(&from_uefi(&t), 0, 0).unwrap())); + assert!(to_uefi(&from_uefi(&t), -1440, 0).is_ok()); } #[test] @@ -82,8 +110,49 @@ fn systemtime_end() { daylight: 0, pad2: 0, }; - assert!(to_uefi(&from_uefi(&t), 1440, 0).is_some()); - assert!(to_uefi(&from_uefi(&t), 1439, 0).is_none()); + assert!(to_uefi(&from_uefi(&t), 1440, 0).is_ok()); + assert!(to_uefi(&from_uefi(&t), 1439, 0).is_err()); +} + +#[test] +fn min_time() { + let inp = Duration::from_secs(1440 * SECS_IN_MINUTE); + let new_tz = to_uefi(&inp, 1440, 0).err().unwrap(); + assert_eq!(new_tz, 0); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); + + let inp = Duration::from_secs(1450 * SECS_IN_MINUTE); + let new_tz = to_uefi(&inp, 1440, 0).err().unwrap(); + assert_eq!(new_tz, 10); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); + + let inp = Duration::from_secs(1450 * SECS_IN_MINUTE + 10); + let new_tz = to_uefi(&inp, 1440, 0).err().unwrap(); + assert_eq!(new_tz, 10); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); + + let inp = Duration::from_secs(1430 * SECS_IN_MINUTE); + let new_tz = to_uefi(&inp, 1440, 0).err().unwrap(); + assert_eq!(new_tz, -10); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); +} + +#[test] +fn max_time() { + let inp = MAX_UEFI_TIME.0; + let new_tz = to_uefi(&inp, -1440, 0).err().unwrap(); + assert_eq!(new_tz, 1440); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); + + let inp = MAX_UEFI_TIME.0 - Duration::from_secs(1440 * SECS_IN_MINUTE); + let new_tz = to_uefi(&inp, -1440, 0).err().unwrap(); + assert_eq!(new_tz, 0); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); + + let inp = MAX_UEFI_TIME.0 - Duration::from_secs(1440 * SECS_IN_MINUTE + 10); + let new_tz = to_uefi(&inp, -1440, 0).err().unwrap(); + assert_eq!(new_tz, 0); + assert!(to_uefi(&inp, new_tz, 0).is_ok()); } // UEFI IoSlice and IoSliceMut Tests diff --git a/std/src/sys/pal/uefi/time.rs b/std/src/sys/pal/uefi/time.rs index c6636626fd58a..f9f90a454976a 100644 --- a/std/src/sys/pal/uefi/time.rs +++ b/std/src/sys/pal/uefi/time.rs @@ -1,5 +1,7 @@ use crate::time::Duration; +const SECS_IN_MINUTE: u64 = 60; + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] pub struct Instant(Duration); @@ -70,13 +72,32 @@ impl SystemTime { Self(system_time_internal::from_uefi(&t)) } - #[expect(dead_code)] - pub(crate) const fn to_uefi(self, timezone: i16, daylight: u8) -> Option { - system_time_internal::to_uefi(&self.0, timezone, daylight) + pub(crate) const fn to_uefi( + self, + timezone: i16, + daylight: u8, + ) -> Result { + // system_time_internal::to_uefi requires a valid timezone. In case of unspecified timezone, + // we just pass 0 since it is assumed that no timezone related adjustments are required. + if timezone == r_efi::efi::UNSPECIFIED_TIMEZONE { + system_time_internal::to_uefi(&self.0, 0, daylight) + } else { + system_time_internal::to_uefi(&self.0, timezone, daylight) + } + } + + /// Create UEFI Time with the closest timezone (minute offset) that still allows the time to be + /// represented. + pub(crate) fn to_uefi_loose(self, timezone: i16, daylight: u8) -> r_efi::efi::Time { + match self.to_uefi(timezone, daylight) { + Ok(x) => x, + Err(tz) => self.to_uefi(tz, daylight).unwrap(), + } } pub fn now() -> SystemTime { system_time_internal::now() + .map(Self::from_uefi) .unwrap_or_else(|| panic!("time not implemented on this platform")) } @@ -104,12 +125,11 @@ pub(crate) mod system_time_internal { use crate::mem::MaybeUninit; use crate::ptr::NonNull; - const SECS_IN_MINUTE: u64 = 60; const SECS_IN_HOUR: u64 = SECS_IN_MINUTE * 60; const SECS_IN_DAY: u64 = SECS_IN_HOUR * 24; - const TIMEZONE_DELTA: u64 = 1440 * SECS_IN_MINUTE; + const SYSTEMTIME_TIMEZONE: i64 = -1440 * SECS_IN_MINUTE as i64; - pub fn now() -> Option { + pub(crate) fn now() -> Option

::Metadata ); -define!("mir_copy_for_deref", fn CopyForDeref(place: T) -> T); define!("mir_retag", fn Retag(place: T)); define!("mir_move", fn Move(place: T) -> T); define!("mir_static", fn Static(s: T) -> &'static T); diff --git a/core/src/intrinsics/mod.rs b/core/src/intrinsics/mod.rs index cef700be9ea1f..34031d7775aca 100644 --- a/core/src/intrinsics/mod.rs +++ b/core/src/intrinsics/mod.rs @@ -5,16 +5,16 @@ //! intrinsics via stable wrapper functions. Use these instead. //! //! These are the imports making intrinsics available to Rust code. The actual implementations live in the compiler. -//! Some of these intrinsics are lowered to MIR in . -//! The remaining intrinsics are implemented for the LLVM backend in -//! and , -//! and for const evaluation in . +//! Some of these intrinsics are lowered to MIR in . +//! The remaining intrinsics are implemented for the LLVM backend in +//! and , +//! and for const evaluation in . //! //! # Const intrinsics //! //! In order to make an intrinsic unstable usable at compile-time, copy the implementation from //! to -//! +//! //! and make the intrinsic declaration below a `const fn`. This should be done in coordination with //! wg-const-eval. //! @@ -55,8 +55,8 @@ #![allow(missing_docs)] use crate::ffi::va_list::{VaArgSafe, VaListImpl}; -use crate::marker::{ConstParamTy, DiscriminantKind, PointeeSized, Tuple}; -use crate::ptr; +use crate::marker::{ConstParamTy, Destruct, DiscriminantKind, PointeeSized, Tuple}; +use crate::{mem, ptr}; mod bounds; pub mod fallback; @@ -477,11 +477,15 @@ pub const fn unlikely(b: bool) -> bool { /// However unlike the public form, the intrinsic will not drop the value that /// is not selected. #[unstable(feature = "core_intrinsics", issue = "none")] +#[rustc_const_unstable(feature = "const_select_unpredictable", issue = "145938")] #[rustc_intrinsic] #[rustc_nounwind] #[miri::intrinsic_fallback_is_spec] #[inline] -pub fn select_unpredictable(b: bool, true_val: T, false_val: T) -> T { +pub const fn select_unpredictable(b: bool, true_val: T, false_val: T) -> T +where + T: [const] Destruct, +{ if b { true_val } else { false_val } } @@ -618,7 +622,7 @@ pub const fn forget(_: T); /// // Crucially, we `as`-cast to a raw pointer before `transmute`ing to a function pointer. /// // This avoids an integer-to-pointer `transmute`, which can be problematic. /// // Transmuting between raw pointers and function pointers (i.e., two pointer types) is fine. -/// let pointer = foo as *const (); +/// let pointer = foo as fn() -> i32 as *const (); /// let function = unsafe { /// std::mem::transmute::<*const (), fn() -> i32>(pointer) /// }; @@ -765,13 +769,9 @@ pub const fn forget(_: T); /// // in terms of converting the original inner type (`&i32`) to the new one (`Option<&i32>`), /// // this has all the same caveats. Besides the information provided above, also consult the /// // [`from_raw_parts`] documentation. +/// let (ptr, len, capacity) = v_clone.into_raw_parts(); /// let v_from_raw = unsafe { -// FIXME Update this when vec_into_raw_parts is stabilized -/// // Ensure the original vector is not dropped. -/// let mut v_clone = std::mem::ManuallyDrop::new(v_clone); -/// Vec::from_raw_parts(v_clone.as_mut_ptr() as *mut Option<&i32>, -/// v_clone.len(), -/// v_clone.capacity()) +/// Vec::from_raw_parts(ptr.cast::<*mut Option<&i32>>(), len, capacity) /// }; /// ``` /// @@ -2013,7 +2013,14 @@ pub const unsafe fn unchecked_mul(x: T, y: T) -> T; #[rustc_intrinsic_const_stable_indirect] #[rustc_nounwind] #[rustc_intrinsic] -pub const fn rotate_left(x: T, shift: u32) -> T; +#[rustc_allow_const_fn_unstable(const_trait_impl, funnel_shifts)] +#[miri::intrinsic_fallback_is_spec] +pub const fn rotate_left(x: T, shift: u32) -> T { + // Make sure to call the intrinsic for `funnel_shl`, not the fallback impl. + // SAFETY: we modulo `shift` so that the result is definitely less than the size of + // `T` in bits. + unsafe { unchecked_funnel_shl(x, x, shift % (mem::size_of::() as u32 * 8)) } +} /// Performs rotate right. /// @@ -2028,7 +2035,14 @@ pub const fn rotate_left(x: T, shift: u32) -> T; #[rustc_intrinsic_const_stable_indirect] #[rustc_nounwind] #[rustc_intrinsic] -pub const fn rotate_right(x: T, shift: u32) -> T; +#[rustc_allow_const_fn_unstable(const_trait_impl, funnel_shifts)] +#[miri::intrinsic_fallback_is_spec] +pub const fn rotate_right(x: T, shift: u32) -> T { + // Make sure to call the intrinsic for `funnel_shr`, not the fallback impl. + // SAFETY: we modulo `shift` so that the result is definitely less than the size of + // `T` in bits. + unsafe { unchecked_funnel_shr(x, x, shift % (mem::size_of::() as u32 * 8)) } +} /// Returns (a + b) mod 2N, where N is the width of T in bits. /// @@ -2585,6 +2599,24 @@ pub const fn ub_checks() -> bool { cfg!(ub_checks) } +/// Returns whether we should perform some overflow-checking at runtime. This eventually evaluates to +/// `cfg!(overflow_checks)`, but behaves different from `cfg!` when mixing crates built with different +/// flags: if the crate has overflow checks enabled or carries the `#[rustc_inherit_overflow_checks]` +/// attribute, evaluation is delayed until monomorphization (or until the call gets inlined into +/// a crate that does not delay evaluation further); otherwise it can happen any time. +/// +/// The common case here is a user program built with overflow_checks linked against the distributed +/// sysroot which is built without overflow_checks but with `#[rustc_inherit_overflow_checks]`. +/// For code that gets monomorphized in the user crate (i.e., generic functions and functions with +/// `#[inline]`), gating assertions on `overflow_checks()` rather than `cfg!(overflow_checks)` means that +/// assertions are enabled whenever the *user crate* has overflow checks enabled. However if the +/// user has overflow checks disabled, the checks will still get optimized out. +#[inline(always)] +#[rustc_intrinsic] +pub const fn overflow_checks() -> bool { + cfg!(debug_assertions) +} + /// Allocates a block of memory at compile time. /// At runtime, just returns a null pointer. /// @@ -2631,23 +2663,6 @@ pub const unsafe fn const_make_global(ptr: *mut u8) -> *const u8 { ptr } -/// Returns whether we should perform contract-checking at runtime. -/// -/// This is meant to be similar to the ub_checks intrinsic, in terms -/// of not prematurely committing at compile-time to whether contract -/// checking is turned on, so that we can specify contracts in libstd -/// and let an end user opt into turning them on. -#[rustc_const_unstable(feature = "contracts_internals", issue = "128044" /* compiler-team#759 */)] -#[unstable(feature = "contracts_internals", issue = "128044" /* compiler-team#759 */)] -#[inline(always)] -#[rustc_intrinsic] -pub const fn contract_checks() -> bool { - // FIXME: should this be `false` or `cfg!(contract_checks)`? - - // cfg!(contract_checks) - false -} - /// Check if the pre-condition `cond` has been met. /// /// By default, if `contract_checks` is enabled, this will panic with no unwind if the condition @@ -2668,7 +2683,7 @@ pub const fn contract_check_requires bool + Copy>(cond: C) { if const { // Do nothing } else { - if contract_checks() && !cond() { + if !cond() { // Emit no unwind panic in case this was a safety requirement. crate::panicking::panic_nounwind("failed requires check"); } @@ -2681,6 +2696,8 @@ pub const fn contract_check_requires bool + Copy>(cond: C) { /// By default, if `contract_checks` is enabled, this will panic with no unwind if the condition /// returns false. /// +/// If `cond` is `None`, then no postcondition checking is performed. +/// /// Note that this function is a no-op during constant evaluation. #[unstable(feature = "contracts_internals", issue = "128044")] // Similar to `contract_check_requires`, we need to use the user-facing @@ -2689,16 +2706,24 @@ pub const fn contract_check_requires bool + Copy>(cond: C) { #[rustc_const_unstable(feature = "contracts", issue = "128044")] #[lang = "contract_check_ensures"] #[rustc_intrinsic] -pub const fn contract_check_ensures bool + Copy, Ret>(cond: C, ret: Ret) -> Ret { +pub const fn contract_check_ensures bool + Copy, Ret>( + cond: Option, + ret: Ret, +) -> Ret { const_eval_select!( - @capture[C: Fn(&Ret) -> bool + Copy, Ret] { cond: C, ret: Ret } -> Ret : + @capture[C: Fn(&Ret) -> bool + Copy, Ret] { cond: Option, ret: Ret } -> Ret : if const { // Do nothing ret } else { - if contract_checks() && !cond(&ret) { - // Emit no unwind panic in case this was a safety requirement. - crate::panicking::panic_nounwind("failed ensures check"); + match cond { + crate::option::Option::Some(cond) => { + if !cond(&ret) { + // Emit no unwind panic in case this was a safety requirement. + crate::panicking::panic_nounwind("failed ensures check"); + } + }, + crate::option::Option::None => {}, } ret } @@ -2735,6 +2760,11 @@ pub unsafe fn vtable_align(ptr: *const ()) -> usize; /// More specifically, this is the offset in bytes between successive /// items of the same type, including alignment padding. /// +/// Note that, unlike most intrinsics, this can only be called at compile-time +/// as backends do not have an implementation for it. The only caller (its +/// stable counterpart) wraps this intrinsic call in a `const` block so that +/// backends only see an evaluated constant. +/// /// The stabilized version of this intrinsic is [`core::mem::size_of`]. #[rustc_nounwind] #[unstable(feature = "core_intrinsics", issue = "none")] @@ -2749,6 +2779,11 @@ pub const fn size_of() -> usize; /// Therefore, implementations must not require the user to uphold /// any safety invariants. /// +/// Note that, unlike most intrinsics, this can only be called at compile-time +/// as backends do not have an implementation for it. The only caller (its +/// stable counterpart) wraps this intrinsic call in a `const` block so that +/// backends only see an evaluated constant. +/// /// The stabilized version of this intrinsic is [`core::mem::align_of`]. #[rustc_nounwind] #[unstable(feature = "core_intrinsics", issue = "none")] @@ -2756,6 +2791,26 @@ pub const fn size_of() -> usize; #[rustc_intrinsic] pub const fn align_of() -> usize; +/// The offset of a field inside a type. +/// +/// Note that, unlike most intrinsics, this is safe to call; +/// it does not require an `unsafe` block. +/// Therefore, implementations must not require the user to uphold +/// any safety invariants. +/// +/// This intrinsic can only be evaluated at compile-time, and should only appear in +/// constants or inline const blocks. +/// +/// The stabilized version of this intrinsic is [`core::mem::offset_of`]. +/// This intrinsic is also a lang item so `offset_of!` can desugar to calls to it. +#[rustc_nounwind] +#[unstable(feature = "core_intrinsics", issue = "none")] +#[rustc_const_unstable(feature = "core_intrinsics", issue = "none")] +#[rustc_intrinsic_const_stable_indirect] +#[rustc_intrinsic] +#[lang = "offset_of"] +pub const fn offset_of(variant: u32, field: u32) -> usize; + /// Returns the number of variants of the type `T` cast to a `usize`; /// if `T` has no variants, returns `0`. Uninhabited variants will be counted. /// diff --git a/core/src/intrinsics/simd.rs b/core/src/intrinsics/simd.rs index 19488082cc33d..722a765cd01ee 100644 --- a/core/src/intrinsics/simd.rs +++ b/core/src/intrinsics/simd.rs @@ -2,6 +2,8 @@ //! //! In this module, a "vector" is any `repr(simd)` type. +use crate::marker::ConstParamTy; + /// Inserts an element into a vector, returning the updated vector. /// /// `T` must be a vector with element type `U`, and `idx` must be `const`. @@ -62,21 +64,21 @@ pub unsafe fn simd_extract_dyn(x: T, idx: u32) -> U { /// `T` must be a vector of integers or floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_add(x: T, y: T) -> T; +pub const unsafe fn simd_add(x: T, y: T) -> T; /// Subtracts `rhs` from `lhs` elementwise. /// /// `T` must be a vector of integers or floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_sub(lhs: T, rhs: T) -> T; +pub const unsafe fn simd_sub(lhs: T, rhs: T) -> T; /// Multiplies two simd vectors elementwise. /// /// `T` must be a vector of integers or floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_mul(x: T, y: T) -> T; +pub const unsafe fn simd_mul(x: T, y: T) -> T; /// Divides `lhs` by `rhs` elementwise. /// @@ -87,7 +89,7 @@ pub unsafe fn simd_mul(x: T, y: T) -> T; /// Additionally for signed integers, `::MIN / -1` is undefined behavior. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_div(lhs: T, rhs: T) -> T; +pub const unsafe fn simd_div(lhs: T, rhs: T) -> T; /// Returns remainder of two vectors elementwise. /// @@ -98,7 +100,7 @@ pub unsafe fn simd_div(lhs: T, rhs: T) -> T; /// Additionally for signed integers, `::MIN / -1` is undefined behavior. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_rem(lhs: T, rhs: T) -> T; +pub const unsafe fn simd_rem(lhs: T, rhs: T) -> T; /// Shifts vector left elementwise, with UB on overflow. /// @@ -111,7 +113,7 @@ pub unsafe fn simd_rem(lhs: T, rhs: T) -> T; /// Each element of `rhs` must be less than `::BITS`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_shl(lhs: T, rhs: T) -> T; +pub const unsafe fn simd_shl(lhs: T, rhs: T) -> T; /// Shifts vector right elementwise, with UB on overflow. /// @@ -124,7 +126,7 @@ pub unsafe fn simd_shl(lhs: T, rhs: T) -> T; /// Each element of `rhs` must be less than `::BITS`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_shr(lhs: T, rhs: T) -> T; +pub const unsafe fn simd_shr(lhs: T, rhs: T) -> T; /// Funnel Shifts vector left elementwise, with UB on overflow. /// @@ -141,7 +143,7 @@ pub unsafe fn simd_shr(lhs: T, rhs: T) -> T; /// Each element of `shift` must be less than `::BITS`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_funnel_shl(a: T, b: T, shift: T) -> T; +pub const unsafe fn simd_funnel_shl(a: T, b: T, shift: T) -> T; /// Funnel Shifts vector right elementwise, with UB on overflow. /// @@ -158,28 +160,28 @@ pub unsafe fn simd_funnel_shl(a: T, b: T, shift: T) -> T; /// Each element of `shift` must be less than `::BITS`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_funnel_shr(a: T, b: T, shift: T) -> T; +pub const unsafe fn simd_funnel_shr(a: T, b: T, shift: T) -> T; /// "And"s vectors elementwise. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_and(x: T, y: T) -> T; +pub const unsafe fn simd_and(x: T, y: T) -> T; /// "Ors" vectors elementwise. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_or(x: T, y: T) -> T; +pub const unsafe fn simd_or(x: T, y: T) -> T; /// "Exclusive ors" vectors elementwise. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_xor(x: T, y: T) -> T; +pub const unsafe fn simd_xor(x: T, y: T) -> T; /// Numerically casts a vector, elementwise. /// @@ -200,7 +202,7 @@ pub unsafe fn simd_xor(x: T, y: T) -> T; /// * Be representable in the return type, after truncating off its fractional part #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_cast(x: T) -> U; +pub const unsafe fn simd_cast(x: T) -> U; /// Numerically casts a vector, elementwise. /// @@ -214,7 +216,7 @@ pub unsafe fn simd_cast(x: T) -> U; /// Otherwise, truncates or extends the value, maintaining the sign for signed integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_as(x: T) -> U; +pub const unsafe fn simd_as(x: T) -> U; /// Negates a vector elementwise. /// @@ -223,14 +225,14 @@ pub unsafe fn simd_as(x: T) -> U; /// Rust panics for `-::Min` due to overflow, but it is not UB with this intrinsic. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_neg(x: T) -> T; +pub const unsafe fn simd_neg(x: T) -> T; /// Returns absolute value of a vector, elementwise. /// /// `T` must be a vector of floating-point primitive types. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_fabs(x: T) -> T; +pub const unsafe fn simd_fabs(x: T) -> T; /// Returns the minimum of two vectors, elementwise. /// @@ -239,7 +241,7 @@ pub unsafe fn simd_fabs(x: T) -> T; /// Follows IEEE-754 `minNum` semantics. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_fmin(x: T, y: T) -> T; +pub const unsafe fn simd_fmin(x: T, y: T) -> T; /// Returns the maximum of two vectors, elementwise. /// @@ -248,7 +250,7 @@ pub unsafe fn simd_fmin(x: T, y: T) -> T; /// Follows IEEE-754 `maxNum` semantics. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_fmax(x: T, y: T) -> T; +pub const unsafe fn simd_fmax(x: T, y: T) -> T; /// Tests elementwise equality of two vectors. /// @@ -259,7 +261,7 @@ pub unsafe fn simd_fmax(x: T, y: T) -> T; /// Returns `0` for false and `!0` for true. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_eq(x: T, y: T) -> U; +pub const unsafe fn simd_eq(x: T, y: T) -> U; /// Tests elementwise inequality equality of two vectors. /// @@ -270,7 +272,7 @@ pub unsafe fn simd_eq(x: T, y: T) -> U; /// Returns `0` for false and `!0` for true. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_ne(x: T, y: T) -> U; +pub const unsafe fn simd_ne(x: T, y: T) -> U; /// Tests if `x` is less than `y`, elementwise. /// @@ -281,7 +283,7 @@ pub unsafe fn simd_ne(x: T, y: T) -> U; /// Returns `0` for false and `!0` for true. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_lt(x: T, y: T) -> U; +pub const unsafe fn simd_lt(x: T, y: T) -> U; /// Tests if `x` is less than or equal to `y`, elementwise. /// @@ -292,7 +294,7 @@ pub unsafe fn simd_lt(x: T, y: T) -> U; /// Returns `0` for false and `!0` for true. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_le(x: T, y: T) -> U; +pub const unsafe fn simd_le(x: T, y: T) -> U; /// Tests if `x` is greater than `y`, elementwise. /// @@ -303,7 +305,7 @@ pub unsafe fn simd_le(x: T, y: T) -> U; /// Returns `0` for false and `!0` for true. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_gt(x: T, y: T) -> U; +pub const unsafe fn simd_gt(x: T, y: T) -> U; /// Tests if `x` is greater than or equal to `y`, elementwise. /// @@ -314,7 +316,7 @@ pub unsafe fn simd_gt(x: T, y: T) -> U; /// Returns `0` for false and `!0` for true. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_ge(x: T, y: T) -> U; +pub const unsafe fn simd_ge(x: T, y: T) -> U; /// Shuffles two vectors by const indices. /// @@ -330,7 +332,7 @@ pub unsafe fn simd_ge(x: T, y: T) -> U; /// of `xy`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_shuffle(x: T, y: T, idx: U) -> V; +pub const unsafe fn simd_shuffle(x: T, y: T, idx: U) -> V; /// Reads a vector of pointers. /// @@ -351,7 +353,7 @@ pub unsafe fn simd_shuffle(x: T, y: T, idx: U) -> V; /// `mask` must only contain `0` or `!0` values. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_gather(val: T, ptr: U, mask: V) -> T; +pub const unsafe fn simd_gather(val: T, ptr: U, mask: V) -> T; /// Writes to a vector of pointers. /// @@ -375,7 +377,20 @@ pub unsafe fn simd_gather(val: T, ptr: U, mask: V) -> T; /// `mask` must only contain `0` or `!0` values. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_scatter(val: T, ptr: U, mask: V); +pub const unsafe fn simd_scatter(val: T, ptr: U, mask: V); + +/// A type for alignment options for SIMD masked load/store intrinsics. +#[derive(Debug, ConstParamTy, PartialEq, Eq)] +pub enum SimdAlign { + // These values must match the compiler's `SimdAlign` defined in + // `rustc_middle/src/ty/consts/int.rs`! + /// No alignment requirements on the pointer + Unaligned = 0, + /// The pointer must be aligned to the element type of the SIMD vector + Element = 1, + /// The pointer must be aligned to the SIMD vector type + Vector = 2, +} /// Reads a vector of pointers. /// @@ -392,13 +407,13 @@ pub unsafe fn simd_scatter(val: T, ptr: U, mask: V); /// `val`. /// /// # Safety -/// Unmasked values in `T` must be readable as if by `::read` (e.g. aligned to the element -/// type). +/// `ptr` must be aligned according to the `ALIGN` parameter, see [`SimdAlign`] for details. /// /// `mask` must only contain `0` or `!0` values. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_masked_load(mask: V, ptr: U, val: T) -> T; +pub const unsafe fn simd_masked_load(mask: V, ptr: U, val: T) +-> T; /// Writes to a vector of pointers. /// @@ -414,20 +429,19 @@ pub unsafe fn simd_masked_load(mask: V, ptr: U, val: T) -> T; /// Otherwise if the corresponding value in `mask` is `0`, do nothing. /// /// # Safety -/// Unmasked values in `T` must be writeable as if by `::write` (e.g. aligned to the element -/// type). +/// `ptr` must be aligned according to the `ALIGN` parameter, see [`SimdAlign`] for details. /// /// `mask` must only contain `0` or `!0` values. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_masked_store(mask: V, ptr: U, val: T); +pub const unsafe fn simd_masked_store(mask: V, ptr: U, val: T); /// Adds two simd vectors elementwise, with saturation. /// /// `T` must be a vector of integer primitive types. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_saturating_add(x: T, y: T) -> T; +pub const unsafe fn simd_saturating_add(x: T, y: T) -> T; /// Subtracts two simd vectors elementwise, with saturation. /// @@ -436,7 +450,7 @@ pub unsafe fn simd_saturating_add(x: T, y: T) -> T; /// Subtract `rhs` from `lhs`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_saturating_sub(lhs: T, rhs: T) -> T; +pub const unsafe fn simd_saturating_sub(lhs: T, rhs: T) -> T; /// Adds elements within a vector from left to right. /// @@ -447,7 +461,7 @@ pub unsafe fn simd_saturating_sub(lhs: T, rhs: T) -> T; /// Starting with the value `y`, add the elements of `x` and accumulate. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_add_ordered(x: T, y: U) -> U; +pub const unsafe fn simd_reduce_add_ordered(x: T, y: U) -> U; /// Adds elements within a vector in arbitrary order. May also be re-associated with /// unordered additions on the inputs/outputs. @@ -468,7 +482,7 @@ pub unsafe fn simd_reduce_add_unordered(x: T) -> U; /// Starting with the value `y`, multiply the elements of `x` and accumulate. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_mul_ordered(x: T, y: U) -> U; +pub const unsafe fn simd_reduce_mul_ordered(x: T, y: U) -> U; /// Multiplies elements within a vector in arbitrary order. May also be re-associated with /// unordered additions on the inputs/outputs. @@ -488,7 +502,7 @@ pub unsafe fn simd_reduce_mul_unordered(x: T) -> U; /// `x` must contain only `0` or `!0`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_all(x: T) -> bool; +pub const unsafe fn simd_reduce_all(x: T) -> bool; /// Checks if any mask value is true. /// @@ -498,7 +512,7 @@ pub unsafe fn simd_reduce_all(x: T) -> bool; /// `x` must contain only `0` or `!0`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_any(x: T) -> bool; +pub const unsafe fn simd_reduce_any(x: T) -> bool; /// Returns the maximum element of a vector. /// @@ -509,7 +523,7 @@ pub unsafe fn simd_reduce_any(x: T) -> bool; /// For floating-point values, uses IEEE-754 `maxNum`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_max(x: T) -> U; +pub const unsafe fn simd_reduce_max(x: T) -> U; /// Returns the minimum element of a vector. /// @@ -520,7 +534,7 @@ pub unsafe fn simd_reduce_max(x: T) -> U; /// For floating-point values, uses IEEE-754 `minNum`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_min(x: T) -> U; +pub const unsafe fn simd_reduce_min(x: T) -> U; /// Logical "and"s all elements together. /// @@ -529,7 +543,7 @@ pub unsafe fn simd_reduce_min(x: T) -> U; /// `U` must be the element type of `T`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_and(x: T) -> U; +pub const unsafe fn simd_reduce_and(x: T) -> U; /// Logical "ors" all elements together. /// @@ -538,7 +552,7 @@ pub unsafe fn simd_reduce_and(x: T) -> U; /// `U` must be the element type of `T`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_or(x: T) -> U; +pub const unsafe fn simd_reduce_or(x: T) -> U; /// Logical "exclusive ors" all elements together. /// @@ -547,7 +561,7 @@ pub unsafe fn simd_reduce_or(x: T) -> U; /// `U` must be the element type of `T`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_reduce_xor(x: T) -> U; +pub const unsafe fn simd_reduce_xor(x: T) -> U; /// Truncates an integer vector to a bitmask. /// @@ -584,7 +598,7 @@ pub unsafe fn simd_reduce_xor(x: T) -> U; /// `x` must contain only `0` and `!0`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_bitmask(x: T) -> U; +pub const unsafe fn simd_bitmask(x: T) -> U; /// Selects elements from a mask. /// @@ -600,7 +614,7 @@ pub unsafe fn simd_bitmask(x: T) -> U; /// `mask` must only contain `0` and `!0`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_select(mask: M, if_true: T, if_false: T) -> T; +pub const unsafe fn simd_select(mask: M, if_true: T, if_false: T) -> T; /// Selects elements from a bitmask. /// @@ -616,7 +630,7 @@ pub unsafe fn simd_select(mask: M, if_true: T, if_false: T) -> T; /// The bitmask bit order matches `simd_bitmask`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_select_bitmask(m: M, yes: T, no: T) -> T; +pub const unsafe fn simd_select_bitmask(m: M, yes: T, no: T) -> T; /// Calculates the offset from a pointer vector elementwise, potentially /// wrapping. @@ -628,14 +642,14 @@ pub unsafe fn simd_select_bitmask(m: M, yes: T, no: T) -> T; /// Operates as if by `::wrapping_offset`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_arith_offset(ptr: T, offset: U) -> T; +pub const unsafe fn simd_arith_offset(ptr: T, offset: U) -> T; /// Casts a vector of pointers. /// /// `T` and `U` must be vectors of pointers with the same number of elements. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_cast_ptr(ptr: T) -> U; +pub const unsafe fn simd_cast_ptr(ptr: T) -> U; /// Exposes a vector of pointers as a vector of addresses. /// @@ -653,56 +667,56 @@ pub unsafe fn simd_expose_provenance(ptr: T) -> U; /// `U` must be a vector of pointers, with the same length as `T`. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_with_exposed_provenance(addr: T) -> U; +pub const unsafe fn simd_with_exposed_provenance(addr: T) -> U; /// Swaps bytes of each element. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_bswap(x: T) -> T; +pub const unsafe fn simd_bswap(x: T) -> T; /// Reverses bits of each element. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_bitreverse(x: T) -> T; +pub const unsafe fn simd_bitreverse(x: T) -> T; /// Counts the leading zeros of each element. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_ctlz(x: T) -> T; +pub const unsafe fn simd_ctlz(x: T) -> T; /// Counts the number of ones in each element. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_ctpop(x: T) -> T; +pub const unsafe fn simd_ctpop(x: T) -> T; /// Counts the trailing zeros of each element. /// /// `T` must be a vector of integers. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_cttz(x: T) -> T; +pub const unsafe fn simd_cttz(x: T) -> T; /// Rounds up each element to the next highest integer-valued float. /// /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_ceil(x: T) -> T; +pub const unsafe fn simd_ceil(x: T) -> T; /// Rounds down each element to the next lowest integer-valued float. /// /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_floor(x: T) -> T; +pub const unsafe fn simd_floor(x: T) -> T; /// Rounds each element to the closest integer-valued float. /// Ties are resolved by rounding away from 0. @@ -710,7 +724,7 @@ pub unsafe fn simd_floor(x: T) -> T; /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_round(x: T) -> T; +pub const unsafe fn simd_round(x: T) -> T; /// Rounds each element to the closest integer-valued float. /// Ties are resolved by rounding to the number with an even least significant digit @@ -718,7 +732,7 @@ pub unsafe fn simd_round(x: T) -> T; /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_round_ties_even(x: T) -> T; +pub const unsafe fn simd_round_ties_even(x: T) -> T; /// Returns the integer part of each element as an integer-valued float. /// In other words, non-integer values are truncated towards zero. @@ -726,7 +740,7 @@ pub unsafe fn simd_round_ties_even(x: T) -> T; /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_trunc(x: T) -> T; +pub const unsafe fn simd_trunc(x: T) -> T; /// Takes the square root of each element. /// @@ -740,7 +754,7 @@ pub unsafe fn simd_fsqrt(x: T) -> T; /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_fma(x: T, y: T, z: T) -> T; +pub const unsafe fn simd_fma(x: T, y: T, z: T) -> T; /// Computes `(x*y) + z` for each element, non-deterministically executing either /// a fused multiply-add or two operations with rounding of the intermediate result. @@ -755,7 +769,7 @@ pub unsafe fn simd_fma(x: T, y: T, z: T) -> T; /// `T` must be a vector of floats. #[rustc_intrinsic] #[rustc_nounwind] -pub unsafe fn simd_relaxed_fma(x: T, y: T, z: T) -> T; +pub const unsafe fn simd_relaxed_fma(x: T, y: T, z: T) -> T; // Computes the sine of each element. /// diff --git a/core/src/iter/adapters/copied.rs b/core/src/iter/adapters/copied.rs index 23e4e25ab5388..9627ace29795c 100644 --- a/core/src/iter/adapters/copied.rs +++ b/core/src/iter/adapters/copied.rs @@ -24,6 +24,12 @@ impl Copied { pub(in crate::iter) fn new(it: I) -> Copied { Copied { it } } + + #[doc(hidden)] + #[unstable(feature = "copied_into_inner", issue = "none")] + pub fn into_inner(self) -> I { + self.it + } } fn copy_fold(mut f: impl FnMut(Acc, T) -> Acc) -> impl FnMut(Acc, &T) -> Acc { diff --git a/core/src/iter/sources/repeat.rs b/core/src/iter/sources/repeat.rs index 4bcd5b16aea6a..f578ae86a9fce 100644 --- a/core/src/iter/sources/repeat.rs +++ b/core/src/iter/sources/repeat.rs @@ -8,6 +8,9 @@ use crate::num::NonZero; /// Infinite iterators like `repeat()` are often used with adapters like /// [`Iterator::take()`], in order to make them finite. /// +/// If you know the number of repetitions in advance, consider using [`repeat_n()`] +/// instead, as it is more efficient and conveys the intent more clearly. +/// /// Use [`str::repeat()`] instead of this function if you just want to repeat /// a char/string `n` times. /// @@ -15,6 +18,7 @@ use crate::num::NonZero; /// or if you do not want to keep the repeated element in memory, you can /// instead use the [`repeat_with()`] function. /// +/// [`repeat_n()`]: crate::iter::repeat_n /// [`repeat_with()`]: crate::iter::repeat_with /// [`str::repeat()`]: ../../std/primitive.str.html#method.repeat /// @@ -97,8 +101,9 @@ impl Iterator for Repeat { Some(self.element.clone()) } + #[track_caller] fn last(self) -> Option { - Some(self.element) + panic!("iterator is infinite"); } #[track_caller] diff --git a/core/src/iter/traits/collect.rs b/core/src/iter/traits/collect.rs index ab27650067980..cdf81385bdafb 100644 --- a/core/src/iter/traits/collect.rs +++ b/core/src/iter/traits/collect.rs @@ -455,234 +455,274 @@ impl Extend<()> for () { fn extend_one(&mut self, _item: ()) {} } -macro_rules! spec_tuple_impl { - ( - ( - $ty_name:ident, $var_name:ident, $extend_ty_name: ident, - $trait_name:ident, $default_fn_name:ident, $cnt:tt - ), - ) => { - spec_tuple_impl!( - $trait_name, - $default_fn_name, - #[doc(fake_variadic)] - #[doc = "This trait is implemented for tuples up to twelve items long. The `impl`s for \ - 1- and 3- through 12-ary tuples were stabilized after 2-tuples, in \ - 1.85.0."] - => ($ty_name, $var_name, $extend_ty_name, $cnt), - ); - }; - ( - ( - $ty_name:ident, $var_name:ident, $extend_ty_name: ident, - $trait_name:ident, $default_fn_name:ident, $cnt:tt - ), - $( - ( - $ty_names:ident, $var_names:ident, $extend_ty_names:ident, - $trait_names:ident, $default_fn_names:ident, $cnts:tt - ), - )* - ) => { - spec_tuple_impl!( - $( - ( - $ty_names, $var_names, $extend_ty_names, - $trait_names, $default_fn_names, $cnts - ), - )* - ); - spec_tuple_impl!( - $trait_name, - $default_fn_name, - #[doc(hidden)] - => ( - $ty_name, $var_name, $extend_ty_name, $cnt - ), - $( - ( - $ty_names, $var_names, $extend_ty_names, $cnts - ), - )* - ); - }; - ( - $trait_name:ident, $default_fn_name:ident, #[$meta:meta] - $(#[$doctext:meta])? => $( - ( - $ty_names:ident, $var_names:ident, $extend_ty_names:ident, $cnts:tt - ), - )* - ) => { - #[$meta] - $(#[$doctext])? - #[stable(feature = "extend_for_tuple", since = "1.56.0")] - impl<$($ty_names,)* $($extend_ty_names,)*> Extend<($($ty_names,)*)> for ($($extend_ty_names,)*) - where - $($extend_ty_names: Extend<$ty_names>,)* - { - /// Allows to `extend` a tuple of collections that also implement `Extend`. - /// - /// See also: [`Iterator::unzip`] - /// - /// # Examples - /// ``` - /// // Example given for a 2-tuple, but 1- through 12-tuples are supported - /// let mut tuple = (vec![0], vec![1]); - /// tuple.extend([(2, 3), (4, 5), (6, 7)]); - /// assert_eq!(tuple.0, [0, 2, 4, 6]); - /// assert_eq!(tuple.1, [1, 3, 5, 7]); - /// - /// // also allows for arbitrarily nested tuples as elements - /// let mut nested_tuple = (vec![1], (vec![2], vec![3])); - /// nested_tuple.extend([(4, (5, 6)), (7, (8, 9))]); - /// - /// let (a, (b, c)) = nested_tuple; - /// assert_eq!(a, [1, 4, 7]); - /// assert_eq!(b, [2, 5, 8]); - /// assert_eq!(c, [3, 6, 9]); - /// ``` - fn extend>(&mut self, into_iter: T) { - let ($($var_names,)*) = self; - let iter = into_iter.into_iter(); - $trait_name::extend(iter, $($var_names,)*); - } +/// This trait is implemented for tuples up to twelve items long. The `impl`s for +/// 1- and 3- through 12-ary tuples were stabilized after 2-tuples, in 1.85.0. +#[doc(fake_variadic)] // the other implementations are below. +#[stable(feature = "extend_for_tuple", since = "1.56.0")] +impl Extend<(T,)> for (ExtendT,) +where + ExtendT: Extend, +{ + /// Allows to `extend` a tuple of collections that also implement `Extend`. + /// + /// See also: [`Iterator::unzip`] + /// + /// # Examples + /// ``` + /// // Example given for a 2-tuple, but 1- through 12-tuples are supported + /// let mut tuple = (vec![0], vec![1]); + /// tuple.extend([(2, 3), (4, 5), (6, 7)]); + /// assert_eq!(tuple.0, [0, 2, 4, 6]); + /// assert_eq!(tuple.1, [1, 3, 5, 7]); + /// + /// // also allows for arbitrarily nested tuples as elements + /// let mut nested_tuple = (vec![1], (vec![2], vec![3])); + /// nested_tuple.extend([(4, (5, 6)), (7, (8, 9))]); + /// + /// let (a, (b, c)) = nested_tuple; + /// assert_eq!(a, [1, 4, 7]); + /// assert_eq!(b, [2, 5, 8]); + /// assert_eq!(c, [3, 6, 9]); + /// ``` + fn extend>(&mut self, iter: I) { + self.0.extend(iter.into_iter().map(|t| t.0)); + } - fn extend_one(&mut self, item: ($($ty_names,)*)) { - $(self.$cnts.extend_one(item.$cnts);)* - } + fn extend_one(&mut self, item: (T,)) { + self.0.extend_one(item.0) + } - fn extend_reserve(&mut self, additional: usize) { - $(self.$cnts.extend_reserve(additional);)* - } + fn extend_reserve(&mut self, additional: usize) { + self.0.extend_reserve(additional) + } - unsafe fn extend_one_unchecked(&mut self, item: ($($ty_names,)*)) { - // SAFETY: Those are our safety preconditions, and we correctly forward `extend_reserve`. - unsafe { - $(self.$cnts.extend_one_unchecked(item.$cnts);)* - } - } - } + unsafe fn extend_one_unchecked(&mut self, item: (T,)) { + // SAFETY: the caller guarantees all preconditions. + unsafe { self.0.extend_one_unchecked(item.0) } + } +} - trait $trait_name<$($ty_names),*> { - fn extend(self, $($var_names: &mut $ty_names,)*); - } +/// This implementation turns an iterator of tuples into a tuple of types which implement +/// [`Default`] and [`Extend`]. +/// +/// This is similar to [`Iterator::unzip`], but is also composable with other [`FromIterator`] +/// implementations: +/// +/// ```rust +/// # fn main() -> Result<(), core::num::ParseIntError> { +/// let string = "1,2,123,4"; +/// +/// // Example given for a 2-tuple, but 1- through 12-tuples are supported +/// let (numbers, lengths): (Vec<_>, Vec<_>) = string +/// .split(',') +/// .map(|s| s.parse().map(|n: u32| (n, s.len()))) +/// .collect::>()?; +/// +/// assert_eq!(numbers, [1, 2, 123, 4]); +/// assert_eq!(lengths, [1, 1, 3, 1]); +/// # Ok(()) } +/// ``` +#[doc(fake_variadic)] // the other implementations are below. +#[stable(feature = "from_iterator_for_tuple", since = "1.79.0")] +impl FromIterator<(T,)> for (ExtendT,) +where + ExtendT: Default + Extend, +{ + fn from_iter>(iter: Iter) -> Self { + let mut res = ExtendT::default(); + res.extend(iter.into_iter().map(|t| t.0)); + (res,) + } +} - fn $default_fn_name<$($ty_names,)* $($extend_ty_names,)*>( - iter: impl Iterator, - $($var_names: &mut $extend_ty_names,)* - ) where - $($extend_ty_names: Extend<$ty_names>,)* - { - fn extend<'a, $($ty_names,)*>( - $($var_names: &'a mut impl Extend<$ty_names>,)* - ) -> impl FnMut((), ($($ty_names,)*)) + 'a { - #[allow(non_snake_case)] - move |(), ($($extend_ty_names,)*)| { - $($var_names.extend_one($extend_ty_names);)* - } - } +/// An implementation of [`extend`](Extend::extend) that calls `extend_one` or +/// `extend_one_unchecked` for each element of the iterator. +fn default_extend(collection: &mut ExtendT, iter: I) +where + ExtendT: Extend, + I: IntoIterator, +{ + // Specialize on `TrustedLen` and call `extend_one_unchecked` where + // applicable. + trait SpecExtend { + fn extend(&mut self, iter: I); + } + // Extracting these to separate functions avoid monomorphising the closures + // for every iterator type. + fn extender(collection: &mut ExtendT) -> impl FnMut(T) + use<'_, ExtendT, T> + where + ExtendT: Extend, + { + move |item| collection.extend_one(item) + } + + unsafe fn unchecked_extender( + collection: &mut ExtendT, + ) -> impl FnMut(T) + use<'_, ExtendT, T> + where + ExtendT: Extend, + { + // SAFETY: we make sure that there is enough space at the callsite of + // this function. + move |item| unsafe { collection.extend_one_unchecked(item) } + } + + impl SpecExtend for ExtendT + where + ExtendT: Extend, + I: Iterator, + { + default fn extend(&mut self, iter: I) { let (lower_bound, _) = iter.size_hint(); if lower_bound > 0 { - $($var_names.extend_reserve(lower_bound);)* + self.extend_reserve(lower_bound); } - iter.fold((), extend($($var_names,)*)); + iter.for_each(extender(self)) } + } - impl<$($ty_names,)* $($extend_ty_names,)* Iter> $trait_name<$($extend_ty_names),*> for Iter - where - $($extend_ty_names: Extend<$ty_names>,)* - Iter: Iterator, - { - default fn extend(self, $($var_names: &mut $extend_ty_names),*) { - $default_fn_name(self, $($var_names),*); + impl SpecExtend for ExtendT + where + ExtendT: Extend, + I: TrustedLen, + { + fn extend(&mut self, iter: I) { + let (lower_bound, upper_bound) = iter.size_hint(); + if lower_bound > 0 { + self.extend_reserve(lower_bound); + } + + if upper_bound.is_none() { + // We cannot reserve more than `usize::MAX` items, and this is likely to go out of memory anyway. + iter.for_each(extender(self)) + } else { + // SAFETY: We reserve enough space for the `size_hint`, and the iterator is + // `TrustedLen` so its `size_hint` is exact. + iter.for_each(unsafe { unchecked_extender(self) }) } } + } - impl<$($ty_names,)* $($extend_ty_names,)* Iter> $trait_name<$($extend_ty_names),*> for Iter + SpecExtend::extend(collection, iter.into_iter()); +} + +// Implements `Extend` and `FromIterator` for tuples with length larger than one. +macro_rules! impl_extend_tuple { + ($(($ty:tt, $extend_ty:tt, $index:tt)),+) => { + #[doc(hidden)] + #[stable(feature = "extend_for_tuple", since = "1.56.0")] + impl<$($ty,)+ $($extend_ty,)+> Extend<($($ty,)+)> for ($($extend_ty,)+) where - $($extend_ty_names: Extend<$ty_names>,)* - Iter: TrustedLen, + $($extend_ty: Extend<$ty>,)+ { - fn extend(self, $($var_names: &mut $extend_ty_names,)*) { - fn extend<'a, $($ty_names,)*>( - $($var_names: &'a mut impl Extend<$ty_names>,)* - ) -> impl FnMut((), ($($ty_names,)*)) + 'a { - #[allow(non_snake_case)] - // SAFETY: We reserve enough space for the `size_hint`, and the iterator is - // `TrustedLen` so its `size_hint` is exact. - move |(), ($($extend_ty_names,)*)| unsafe { - $($var_names.extend_one_unchecked($extend_ty_names);)* - } - } + fn extend>(&mut self, iter: T) { + default_extend(self, iter) + } - let (lower_bound, upper_bound) = self.size_hint(); + fn extend_one(&mut self, item: ($($ty,)+)) { + $(self.$index.extend_one(item.$index);)+ + } - if upper_bound.is_none() { - // We cannot reserve more than `usize::MAX` items, and this is likely to go out of memory anyway. - $default_fn_name(self, $($var_names,)*); - return; - } + fn extend_reserve(&mut self, additional: usize) { + $(self.$index.extend_reserve(additional);)+ + } - if lower_bound > 0 { - $($var_names.extend_reserve(lower_bound);)* + unsafe fn extend_one_unchecked(&mut self, item: ($($ty,)+)) { + // SAFETY: Those are our safety preconditions, and we correctly forward `extend_reserve`. + unsafe { + $(self.$index.extend_one_unchecked(item.$index);)+ } - - self.fold((), extend($($var_names,)*)); } } - /// This implementation turns an iterator of tuples into a tuple of types which implement - /// [`Default`] and [`Extend`]. - /// - /// This is similar to [`Iterator::unzip`], but is also composable with other [`FromIterator`] - /// implementations: - /// - /// ```rust - /// # fn main() -> Result<(), core::num::ParseIntError> { - /// let string = "1,2,123,4"; - /// - /// // Example given for a 2-tuple, but 1- through 12-tuples are supported - /// let (numbers, lengths): (Vec<_>, Vec<_>) = string - /// .split(',') - /// .map(|s| s.parse().map(|n: u32| (n, s.len()))) - /// .collect::>()?; - /// - /// assert_eq!(numbers, [1, 2, 123, 4]); - /// assert_eq!(lengths, [1, 1, 3, 1]); - /// # Ok(()) } - /// ``` - #[$meta] - $(#[$doctext])? + #[doc(hidden)] #[stable(feature = "from_iterator_for_tuple", since = "1.79.0")] - impl<$($ty_names,)* $($extend_ty_names,)*> FromIterator<($($extend_ty_names,)*)> for ($($ty_names,)*) + impl<$($ty,)+ $($extend_ty,)+> FromIterator<($($ty,)+)> for ($($extend_ty,)+) where - $($ty_names: Default + Extend<$extend_ty_names>,)* + $($extend_ty: Default + Extend<$ty>,)+ { - fn from_iter>(iter: Iter) -> Self { - let mut res = <($($ty_names,)*)>::default(); + fn from_iter>(iter: Iter) -> Self { + let mut res = Self::default(); res.extend(iter); - res } } - }; } -spec_tuple_impl!( - (L, l, EL, TraitL, default_extend_tuple_l, 11), - (K, k, EK, TraitK, default_extend_tuple_k, 10), - (J, j, EJ, TraitJ, default_extend_tuple_j, 9), - (I, i, EI, TraitI, default_extend_tuple_i, 8), - (H, h, EH, TraitH, default_extend_tuple_h, 7), - (G, g, EG, TraitG, default_extend_tuple_g, 6), - (F, f, EF, TraitF, default_extend_tuple_f, 5), - (E, e, EE, TraitE, default_extend_tuple_e, 4), - (D, d, ED, TraitD, default_extend_tuple_d, 3), - (C, c, EC, TraitC, default_extend_tuple_c, 2), - (B, b, EB, TraitB, default_extend_tuple_b, 1), - (A, a, EA, TraitA, default_extend_tuple_a, 0), +impl_extend_tuple!((A, ExA, 0), (B, ExB, 1)); +impl_extend_tuple!((A, ExA, 0), (B, ExB, 1), (C, ExC, 2)); +impl_extend_tuple!((A, ExA, 0), (B, ExB, 1), (C, ExC, 2), (D, ExD, 3)); +impl_extend_tuple!((A, ExA, 0), (B, ExB, 1), (C, ExC, 2), (D, ExD, 3), (E, ExE, 4)); +impl_extend_tuple!((A, ExA, 0), (B, ExB, 1), (C, ExC, 2), (D, ExD, 3), (E, ExE, 4), (F, ExF, 5)); +impl_extend_tuple!( + (A, ExA, 0), + (B, ExB, 1), + (C, ExC, 2), + (D, ExD, 3), + (E, ExE, 4), + (F, ExF, 5), + (G, ExG, 6) +); +impl_extend_tuple!( + (A, ExA, 0), + (B, ExB, 1), + (C, ExC, 2), + (D, ExD, 3), + (E, ExE, 4), + (F, ExF, 5), + (G, ExG, 6), + (H, ExH, 7) +); +impl_extend_tuple!( + (A, ExA, 0), + (B, ExB, 1), + (C, ExC, 2), + (D, ExD, 3), + (E, ExE, 4), + (F, ExF, 5), + (G, ExG, 6), + (H, ExH, 7), + (I, ExI, 8) +); +impl_extend_tuple!( + (A, ExA, 0), + (B, ExB, 1), + (C, ExC, 2), + (D, ExD, 3), + (E, ExE, 4), + (F, ExF, 5), + (G, ExG, 6), + (H, ExH, 7), + (I, ExI, 8), + (J, ExJ, 9) +); +impl_extend_tuple!( + (A, ExA, 0), + (B, ExB, 1), + (C, ExC, 2), + (D, ExD, 3), + (E, ExE, 4), + (F, ExF, 5), + (G, ExG, 6), + (H, ExH, 7), + (I, ExI, 8), + (J, ExJ, 9), + (K, ExK, 10) +); +impl_extend_tuple!( + (A, ExA, 0), + (B, ExB, 1), + (C, ExC, 2), + (D, ExD, 3), + (E, ExE, 4), + (F, ExF, 5), + (G, ExG, 6), + (H, ExH, 7), + (I, ExI, 8), + (J, ExJ, 9), + (K, ExK, 10), + (L, ExL, 11) ); diff --git a/core/src/lib.rs b/core/src/lib.rs index 54adf97f10020..2dd48ef18369b 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -106,6 +106,7 @@ #![feature(const_cmp)] #![feature(const_destruct)] #![feature(const_eval_select)] +#![feature(const_select_unpredictable)] #![feature(core_intrinsics)] #![feature(coverage_attribute)] #![feature(disjoint_bitor)] @@ -116,10 +117,10 @@ #![feature(link_cfg)] #![feature(offset_of_enum)] #![feature(panic_internals)] +#![feature(pattern_type_macro)] #![feature(ptr_alignment_type)] #![feature(ptr_metadata)] #![feature(set_ptr_value)] -#![feature(slice_as_array)] #![feature(slice_ptr_get)] #![feature(str_internals)] #![feature(str_split_inclusive_remainder)] @@ -171,6 +172,7 @@ #![feature(never_type)] #![feature(no_core)] #![feature(optimize_attribute)] +#![feature(pattern_types)] #![feature(prelude_import)] #![feature(reborrow)] #![feature(repr_simd)] @@ -279,6 +281,8 @@ pub mod num; pub mod hint; pub mod intrinsics; pub mod mem; +#[unstable(feature = "profiling_marker_api", issue = "148197")] +pub mod profiling; pub mod ptr; #[unstable(feature = "ub_checks", issue = "none")] pub mod ub_checks; @@ -291,6 +295,7 @@ pub mod cmp; pub mod convert; pub mod default; pub mod error; +pub mod index; pub mod marker; pub mod ops; diff --git a/core/src/macros/mod.rs b/core/src/macros/mod.rs index df24dd43b82eb..f3386985bdf1b 100644 --- a/core/src/macros/mod.rs +++ b/core/src/macros/mod.rs @@ -991,7 +991,7 @@ pub(crate) mod builtin { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "format_args_macro"] #[allow_internal_unsafe] - #[allow_internal_unstable(fmt_internals)] + #[allow_internal_unstable(fmt_internals, fmt_arguments_from_str)] #[rustc_builtin_macro] #[macro_export] macro_rules! format_args { @@ -1005,7 +1005,7 @@ pub(crate) mod builtin { /// /// This macro will be removed once `format_args` is allowed in const contexts. #[unstable(feature = "const_format_args", issue = "none")] - #[allow_internal_unstable(fmt_internals, const_fmt_arguments_new)] + #[allow_internal_unstable(fmt_internals, fmt_arguments_from_str)] #[rustc_builtin_macro] #[macro_export] macro_rules! const_format_args { @@ -1020,7 +1020,7 @@ pub(crate) mod builtin { reason = "`format_args_nl` is only for internal \ language use and is subject to change" )] - #[allow_internal_unstable(fmt_internals)] + #[allow_internal_unstable(fmt_internals, fmt_arguments_from_str)] #[rustc_builtin_macro] #[doc(hidden)] #[macro_export] @@ -1499,6 +1499,55 @@ pub(crate) mod builtin { /// - `INPUT_ACTIVITIES`: Specifies one valid activity for each input parameter. /// - `OUTPUT_ACTIVITY`: Must not be set if the function implicitly returns nothing /// (or explicitly returns `-> ()`). Otherwise, it must be set to one of the allowed activities. + /// + /// ACTIVITIES might either be `Dual` or `Const`, more options will be exposed later. + /// + /// `Const` should be used on non-float arguments, or float-based arguments as an optimization + /// if we are not interested in computing the derivatives with respect to this argument. + /// + /// `Dual` can be used for float scalar values or for references, raw pointers, or other + /// indirect input arguments. It can also be used on a scalar float return value. + /// If used on a return value, the generated function will return a tuple of two float scalars. + /// If used on an input argument, a new shadow argument of the same type will be created, + /// directly following the original argument. + /// + /// ### Usage examples: + /// + /// ```rust,ignore (autodiff requires a -Z flag as well as fat-lto for testing) + /// #![feature(autodiff)] + /// use std::autodiff::*; + /// #[autodiff_forward(rb_fwd1, Dual, Const, Dual)] + /// #[autodiff_forward(rb_fwd2, Const, Dual, Dual)] + /// #[autodiff_forward(rb_fwd3, Dual, Dual, Dual)] + /// fn rosenbrock(x: f64, y: f64) -> f64 { + /// (1.0 - x).powi(2) + 100.0 * (y - x.powi(2)).powi(2) + /// } + /// #[autodiff_forward(rb_inp_fwd, Dual, Dual, Dual)] + /// fn rosenbrock_inp(x: f64, y: f64, out: &mut f64) { + /// *out = (1.0 - x).powi(2) + 100.0 * (y - x.powi(2)).powi(2); + /// } + /// + /// fn main() { + /// let x0 = rosenbrock(1.0, 3.0); // 400.0 + /// let (x1, dx1) = rb_fwd1(1.0, 1.0, 3.0); // (400.0, -800.0) + /// let (x2, dy1) = rb_fwd2(1.0, 3.0, 1.0); // (400.0, 400.0) + /// // When seeding both arguments at once the tangent return is the sum of both. + /// let (x3, dxy) = rb_fwd3(1.0, 1.0, 3.0, 1.0); // (400.0, -400.0) + /// + /// let mut out = 0.0; + /// let mut dout = 0.0; + /// rb_inp_fwd(1.0, 1.0, 3.0, 1.0, &mut out, &mut dout); + /// // (out, dout) == (400.0, -400.0) + /// } + /// ``` + /// + /// We might want to track how one input float affects one or more output floats. In this case, + /// the shadow of one input should be initialized to `1.0`, while the shadows of the other + /// inputs should be initialized to `0.0`. The shadow of the output(s) should be initialized to + /// `0.0`. After calling the generated function, the shadow of the input will be zeroed, + /// while the shadow(s) of the output(s) will contain the derivatives. Forward mode is generally + /// more efficient if we have more output floats marked as `Dual` than input floats. + /// Related information can also be found under the term "Vector-Jacobian product" (VJP). #[unstable(feature = "autodiff", issue = "124509")] #[allow_internal_unstable(rustc_attrs)] #[allow_internal_unstable(core_intrinsics)] @@ -1518,6 +1567,60 @@ pub(crate) mod builtin { /// - `INPUT_ACTIVITIES`: Specifies one valid activity for each input parameter. /// - `OUTPUT_ACTIVITY`: Must not be set if the function implicitly returns nothing /// (or explicitly returns `-> ()`). Otherwise, it must be set to one of the allowed activities. + /// + /// ACTIVITIES might either be `Active`, `Duplicated` or `Const`, more options will be exposed later. + /// + /// `Active` can be used for float scalar values. + /// If used on an input, a new float will be appended to the return tuple of the generated + /// function. If the function returns a float scalar, `Active` can be used for the return as + /// well. In this case a float scalar will be appended to the argument list, it works as seed. + /// + /// `Duplicated` can be used on references, raw pointers, or other indirect input + /// arguments. It creates a new shadow argument of the same type, following the original argument. + /// A const reference or pointer argument will receive a mutable reference or pointer as shadow. + /// + /// `Const` should be used on non-float arguments, or float-based arguments as an optimization + /// if we are not interested in computing the derivatives with respect to this argument. + /// + /// ### Usage examples: + /// + /// ```rust,ignore (autodiff requires a -Z flag as well as fat-lto for testing) + /// #![feature(autodiff)] + /// use std::autodiff::*; + /// #[autodiff_reverse(rb_rev, Active, Active, Active)] + /// fn rosenbrock(x: f64, y: f64) -> f64 { + /// (1.0 - x).powi(2) + 100.0 * (y - x.powi(2)).powi(2) + /// } + /// #[autodiff_reverse(rb_inp_rev, Active, Active, Duplicated)] + /// fn rosenbrock_inp(x: f64, y: f64, out: &mut f64) { + /// *out = (1.0 - x).powi(2) + 100.0 * (y - x.powi(2)).powi(2); + /// } + /// + /// fn main() { + /// let (output1, dx1, dy1) = rb_rev(1.0, 3.0, 1.0); + /// dbg!(output1, dx1, dy1); // (400.0, -800.0, 400.0) + /// let mut output2 = 0.0; + /// let mut seed = 1.0; + /// let (dx2, dy2) = rb_inp_rev(1.0, 3.0, &mut output2, &mut seed); + /// // (dx2, dy2, output2, seed) == (-800.0, 400.0, 400.0, 0.0) + /// } + /// ``` + /// + /// + /// We often want to track how one or more input floats affect one output float. This output can + /// be a scalar return value, or a mutable reference or pointer argument. In the latter case, the + /// mutable input should be marked as duplicated and its shadow initialized to `0.0`. The shadow of + /// the output should be marked as active or duplicated and initialized to `1.0`. After calling + /// the generated function, the shadow(s) of the input(s) will contain the derivatives. The + /// shadow of the outputs ("seed") will be reset to zero. + /// If the function has more than one output float marked as active or duplicated, users might want to + /// set one of them to `1.0` and the others to `0.0` to compute partial derivatives. + /// Unlike forward-mode, a call to the generated function does not reset the shadow of the + /// inputs. + /// Reverse mode is generally more efficient if we have more active/duplicated input than + /// output floats. + /// + /// Related information can also be found under the term "Jacobian-Vector Product" (JVP). #[unstable(feature = "autodiff", issue = "124509")] #[allow_internal_unstable(rustc_attrs)] #[allow_internal_unstable(core_intrinsics)] diff --git a/core/src/marker.rs b/core/src/marker.rs index 4b767d8d62218..e945cd77a75f7 100644 --- a/core/src/marker.rs +++ b/core/src/marker.rs @@ -14,6 +14,7 @@ pub use self::variance::{ PhantomInvariant, PhantomInvariantLifetime, Variance, variance, }; use crate::cell::UnsafeCell; +use crate::clone::TrivialClone; use crate::cmp; use crate::fmt::Debug; use crate::hash::{Hash, Hasher}; @@ -454,12 +455,8 @@ marker_impls! { /// [impls]: #implementors #[stable(feature = "rust1", since = "1.0.0")] #[lang = "copy"] -// FIXME(matthewjasper) This allows copying a type that doesn't implement -// `Copy` because of unsatisfied lifetime bounds (copying `A<'_>` when only -// `A<'static>: Copy` and `A<'_>: Clone`). -// We have this attribute here for now only because there are quite a few -// existing specializations on `Copy` that already exist in the standard -// library, and there's no way to safely have this behavior right now. +// This is unsound, but required by `hashbrown` +// FIXME(joboet): change `hashbrown` to use `TrivialClone` #[rustc_unsafe_specialization_marker] #[rustc_diagnostic_item = "Copy"] pub trait Copy: Clone { @@ -861,6 +858,10 @@ impl Clone for PhantomData { } } +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for PhantomData {} + #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_default", issue = "143894")] impl const Default for PhantomData { @@ -1057,7 +1058,7 @@ marker_impls! { #[rustc_on_unimplemented(message = "can't drop `{Self}`", append_const_msg)] #[rustc_deny_explicit_impl] #[rustc_do_not_implement_via_object] -pub const trait Destruct {} +pub const trait Destruct: PointeeSized {} /// A marker for tuple types. /// diff --git a/core/src/marker/variance.rs b/core/src/marker/variance.rs index 55fdacb014e67..5fc62a5ad7ac2 100644 --- a/core/src/marker/variance.rs +++ b/core/src/marker/variance.rs @@ -2,6 +2,7 @@ use super::PhantomData; use crate::any::type_name; +use crate::clone::TrivialClone; use crate::cmp::Ordering; use crate::fmt; use crate::hash::{Hash, Hasher}; @@ -60,6 +61,9 @@ macro_rules! phantom_type { impl Copy for $name where T: ?Sized {} + #[doc(hidden)] + unsafe impl TrivialClone for $name where T: ?Sized {} + impl PartialEq for $name where T: ?Sized { diff --git a/core/src/mem/manually_drop.rs b/core/src/mem/manually_drop.rs index 8868f05f1b98f..7d49da8509577 100644 --- a/core/src/mem/manually_drop.rs +++ b/core/src/mem/manually_drop.rs @@ -1,3 +1,4 @@ +use crate::marker::Destruct; use crate::ops::{Deref, DerefMut, DerefPure}; use crate::ptr; @@ -216,8 +217,9 @@ impl ManuallyDrop { /// #[must_use = "if you don't need the value, you can use `ManuallyDrop::drop` instead"] #[stable(feature = "manually_drop_take", since = "1.42.0")] + #[rustc_const_unstable(feature = "const_manually_drop_take", issue = "148773")] #[inline] - pub unsafe fn take(slot: &mut ManuallyDrop) -> T { + pub const unsafe fn take(slot: &mut ManuallyDrop) -> T { // SAFETY: we are reading from a reference, which is guaranteed // to be valid for reads. unsafe { ptr::read(&slot.value) } @@ -249,7 +251,11 @@ impl ManuallyDrop { /// [pinned]: crate::pin #[stable(feature = "manually_drop", since = "1.20.0")] #[inline] - pub unsafe fn drop(slot: &mut ManuallyDrop) { + #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")] + pub const unsafe fn drop(slot: &mut ManuallyDrop) + where + T: [const] Destruct, + { // SAFETY: we are dropping the value pointed to by a mutable reference // which is guaranteed to be valid for writes. // It is up to the caller to make sure that `slot` isn't dropped again. diff --git a/core/src/mem/maybe_uninit.rs b/core/src/mem/maybe_uninit.rs index c160360cfacf9..3507d1a0a9a8c 100644 --- a/core/src/mem/maybe_uninit.rs +++ b/core/src/mem/maybe_uninit.rs @@ -1,4 +1,6 @@ use crate::any::type_name; +use crate::clone::TrivialClone; +use crate::marker::Destruct; use crate::mem::ManuallyDrop; use crate::{fmt, intrinsics, ptr, slice}; @@ -252,6 +254,89 @@ use crate::{fmt, intrinsics, ptr, slice}; /// std::process::exit(*code); // UB! Accessing uninitialized memory. /// } /// ``` +/// +/// # Validity +/// +/// `MaybeUninit` has no validity requirements –- any sequence of [bytes] of +/// the appropriate length, initialized or uninitialized, are a valid +/// representation. +/// +/// Moving or copying a value of type `MaybeUninit` (i.e., performing a +/// "typed copy") will exactly preserve the contents, including the +/// [provenance], of all non-padding bytes of type `T` in the value's +/// representation. +/// +/// Therefore `MaybeUninit` can be used to perform a round trip of a value from +/// type `T` to type `MaybeUninit` then back to type `T`, while preserving +/// the original value, if two conditions are met. One, type `U` must have the +/// same size as type `T`. Two, for all byte offsets where type `U` has padding, +/// the corresponding bytes in the representation of the value must be +/// uninitialized. +/// +/// For example, due to the fact that the type `[u8; size_of::]` has no +/// padding, the following is sound for any type `T` and will return the +/// original value: +/// +/// ```rust,no_run +/// # use core::mem::{MaybeUninit, transmute}; +/// # struct T; +/// fn identity(t: T) -> T { +/// unsafe { +/// let u: MaybeUninit<[u8; size_of::()]> = transmute(t); +/// transmute(u) // OK. +/// } +/// } +/// ``` +/// +/// Note: Copying a value that contains references may implicitly reborrow them +/// causing the provenance of the returned value to differ from that of the +/// original. This applies equally to the trivial identity function: +/// +/// ```rust,no_run +/// fn trivial_identity(t: T) -> T { t } +/// ``` +/// +/// Note: Moving or copying a value whose representation has initialized bytes +/// at byte offsets where the type has padding may lose the value of those +/// bytes, so while the original value will be preserved, the original +/// *representation* of that value as bytes may not be. Again, this applies +/// equally to `trivial_identity`. +/// +/// Note: Performing this round trip when type `U` has padding at byte offsets +/// where the representation of the original value has initialized bytes may +/// produce undefined behavior or a different value. For example, the following +/// is unsound since `T` requires all bytes to be initialized: +/// +/// ```rust,no_run +/// # use core::mem::{MaybeUninit, transmute}; +/// #[repr(C)] struct T([u8; 4]); +/// #[repr(C)] struct U(u8, u16); +/// fn unsound_identity(t: T) -> T { +/// unsafe { +/// let u: MaybeUninit = transmute(t); +/// transmute(u) // UB. +/// } +/// } +/// ``` +/// +/// Conversely, the following is sound since `T` allows uninitialized bytes in +/// the representation of a value, but the round trip may alter the value: +/// +/// ```rust,no_run +/// # use core::mem::{MaybeUninit, transmute}; +/// #[repr(C)] struct T(MaybeUninit<[u8; 4]>); +/// #[repr(C)] struct U(u8, u16); +/// fn non_identity(t: T) -> T { +/// unsafe { +/// // May lose an initialized byte. +/// let u: MaybeUninit = transmute(t); +/// transmute(u) +/// } +/// } +/// ``` +/// +/// [bytes]: ../../reference/memory-model.html#bytes +/// [provenance]: crate::ptr#provenance #[stable(feature = "maybe_uninit", since = "1.36.0")] // Lang item so we can wrap other types in it. This is useful for coroutines. #[lang = "maybe_uninit"] @@ -272,6 +357,11 @@ impl Clone for MaybeUninit { } } +// SAFETY: the clone implementation is a copy, see above. +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for MaybeUninit where MaybeUninit: Clone {} + #[stable(feature = "maybe_uninit_debug", since = "1.41.0")] impl fmt::Debug for MaybeUninit { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -714,7 +804,11 @@ impl MaybeUninit { /// /// [`assume_init`]: MaybeUninit::assume_init #[stable(feature = "maybe_uninit_extra", since = "1.60.0")] - pub unsafe fn assume_init_drop(&mut self) { + #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")] + pub const unsafe fn assume_init_drop(&mut self) + where + T: [const] Destruct, + { // SAFETY: the caller must guarantee that `self` is initialized and // satisfies all invariants of `T`. // Dropping the value in place is safe if that is the case. @@ -1390,7 +1484,11 @@ impl [MaybeUninit] { /// behaviour. #[unstable(feature = "maybe_uninit_slice", issue = "63569")] #[inline(always)] - pub unsafe fn assume_init_drop(&mut self) { + #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")] + pub const unsafe fn assume_init_drop(&mut self) + where + T: [const] Destruct, + { if !self.is_empty() { // SAFETY: the caller must guarantee that every element of `self` // is initialized and satisfies all invariants of `T`. @@ -1507,8 +1605,12 @@ impl SpecFill for [MaybeUninit] { } } -impl SpecFill for [MaybeUninit] { +impl SpecFill for [MaybeUninit] { fn spec_fill(&mut self, value: T) { - self.fill(MaybeUninit::new(value)); + // SAFETY: because `T` is `TrivialClone`, this is equivalent to calling + // `T::clone` for every element. Notably, `TrivialClone` also implies + // that the `clone` implementation will not panic, so we can avoid + // initialization guards and such. + self.fill_with(|| MaybeUninit::new(unsafe { ptr::read(&value) })); } } diff --git a/core/src/mem/mod.rs b/core/src/mem/mod.rs index c484551187cc3..7d9d7f3f586cf 100644 --- a/core/src/mem/mod.rs +++ b/core/src/mem/mod.rs @@ -6,7 +6,8 @@ #![stable(feature = "rust1", since = "1.0.0")] use crate::alloc::Layout; -use crate::marker::DiscriminantKind; +use crate::clone::TrivialClone; +use crate::marker::{Destruct, DiscriminantKind}; use crate::panic::const_assert; use crate::{clone, cmp, fmt, hash, intrinsics, ptr}; @@ -333,7 +334,7 @@ pub fn forget_unsized(t: T) { #[rustc_const_stable(feature = "const_mem_size_of", since = "1.24.0")] #[rustc_diagnostic_item = "mem_size_of"] pub const fn size_of() -> usize { - intrinsics::size_of::() + ::SIZE } /// Returns the size of the pointed-to value in bytes. @@ -441,7 +442,7 @@ pub const unsafe fn size_of_val_raw(val: *const T) -> usize { #[stable(feature = "rust1", since = "1.0.0")] #[deprecated(note = "use `align_of` instead", since = "1.2.0", suggestion = "align_of")] pub fn min_align_of() -> usize { - intrinsics::align_of::() + ::ALIGN } /// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to in @@ -488,7 +489,7 @@ pub fn min_align_of_val(val: &T) -> usize { #[rustc_const_stable(feature = "const_align_of", since = "1.24.0")] #[rustc_diagnostic_item = "mem_align_of"] pub const fn align_of() -> usize { - intrinsics::align_of::() + ::ALIGN } /// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to in @@ -807,7 +808,8 @@ pub const fn swap(x: &mut T, y: &mut T) { /// ``` #[inline] #[stable(feature = "mem_take", since = "1.40.0")] -pub fn take(dest: &mut T) -> T { +#[rustc_const_unstable(feature = "const_default", issue = "143894")] +pub const fn take(dest: &mut T) -> T { replace(dest, T::default()) } @@ -958,8 +960,13 @@ pub const fn replace(dest: &mut T, src: T) -> T { /// [`RefCell`]: crate::cell::RefCell #[inline] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "const_destruct", issue = "133214")] #[rustc_diagnostic_item = "mem_drop"] -pub fn drop(_x: T) {} +pub const fn drop(_x: T) +where + T: [const] Destruct, +{ +} /// Bitwise-copies a value. /// @@ -1065,6 +1072,10 @@ impl clone::Clone for Discriminant { } } +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for Discriminant {} + #[stable(feature = "discriminant_value", since = "1.21.0")] impl cmp::PartialEq for Discriminant { fn eq(&self, rhs: &Self) -> bool { @@ -1231,6 +1242,16 @@ pub const fn variant_count() -> usize { #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] pub trait SizedTypeProperties: Sized { + #[doc(hidden)] + #[unstable(feature = "sized_type_properties", issue = "none")] + #[lang = "mem_size_const"] + const SIZE: usize = intrinsics::size_of::(); + + #[doc(hidden)] + #[unstable(feature = "sized_type_properties", issue = "none")] + #[lang = "mem_align_const"] + const ALIGN: usize = intrinsics::align_of::(); + /// `true` if this type requires no storage. /// `false` if its [size](size_of) is greater than zero. /// @@ -1258,7 +1279,7 @@ pub trait SizedTypeProperties: Sized { /// ``` #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] - const IS_ZST: bool = size_of::() == 0; + const IS_ZST: bool = Self::SIZE == 0; #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] @@ -1270,7 +1291,7 @@ pub trait SizedTypeProperties: Sized { /// which is never allowed for a single object. #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] - const MAX_SLICE_LEN: usize = match size_of::() { + const MAX_SLICE_LEN: usize = match Self::SIZE { 0 => usize::MAX, n => (isize::MAX as usize) / n, }; @@ -1403,10 +1424,10 @@ impl SizedTypeProperties for T {} /// [`offset_of_enum`]: https://doc.rust-lang.org/nightly/unstable-book/language-features/offset-of-enum.html /// [`offset_of_slice`]: https://doc.rust-lang.org/nightly/unstable-book/language-features/offset-of-slice.html #[stable(feature = "offset_of", since = "1.77.0")] -#[allow_internal_unstable(builtin_syntax)] +#[allow_internal_unstable(builtin_syntax, core_intrinsics)] pub macro offset_of($Container:ty, $($fields:expr)+ $(,)?) { // The `{}` is for better error messages - {builtin # offset_of($Container, $($fields)+)} + const {builtin # offset_of($Container, $($fields)+)} } /// Create a fresh instance of the inhabited ZST type `T`. diff --git a/core/src/num/bignum.rs b/core/src/num/bignum.rs index f21fe0b4438fb..95b49a38ded06 100644 --- a/core/src/num/bignum.rs +++ b/core/src/num/bignum.rs @@ -38,9 +38,8 @@ macro_rules! impl_full_ops { fn full_mul_add(self, other: $ty, other2: $ty, carry: $ty) -> ($ty, $ty) { // This cannot overflow; // the output is between `0` and `2^nbits * (2^nbits - 1)`. - let v = (self as $bigty) * (other as $bigty) + (other2 as $bigty) + - (carry as $bigty); - ((v >> <$ty>::BITS) as $ty, v as $ty) + let (lo, hi) = self.carrying_mul_add(other, other2, carry); + (hi, lo) } fn full_div_rem(self, other: $ty, borrow: $ty) -> ($ty, $ty) { @@ -59,8 +58,7 @@ impl_full_ops! { u8: add(intrinsics::u8_add_with_overflow), mul/div(u16); u16: add(intrinsics::u16_add_with_overflow), mul/div(u32); u32: add(intrinsics::u32_add_with_overflow), mul/div(u64); - // See RFC #521 for enabling this. - // u64: add(intrinsics::u64_add_with_overflow), mul/div(u128); + u64: add(intrinsics::u64_add_with_overflow), mul/div(u128); } /// Table of powers of 5 representable in digits. Specifically, the largest {u8, u16, u32} value diff --git a/core/src/num/f128.rs b/core/src/num/f128.rs index 4fe4735e304c9..2cf06b6d6a35a 100644 --- a/core/src/num/f128.rs +++ b/core/src/num/f128.rs @@ -18,6 +18,7 @@ use crate::{intrinsics, mem}; /// Basic mathematical constants. #[unstable(feature = "f128", issue = "116909")] +#[rustc_diagnostic_item = "f128_consts_mod"] pub mod consts { // FIXME: replace with mathematical constants from cmath. @@ -1769,6 +1770,7 @@ impl f128 { /// assert!(abs_difference <= f128::EPSILON); /// /// assert_eq!(f128::powi(f128::NAN, 0), 1.0); + /// assert_eq!(f128::powi(0.0, 0), 1.0); /// # } /// ``` #[inline] diff --git a/core/src/num/f16.rs b/core/src/num/f16.rs index 0bea6bc8801d8..51f803672e5c6 100644 --- a/core/src/num/f16.rs +++ b/core/src/num/f16.rs @@ -20,6 +20,7 @@ use crate::{intrinsics, mem}; /// Basic mathematical constants. #[unstable(feature = "f16", issue = "116909")] +#[rustc_diagnostic_item = "f16_consts_mod"] pub mod consts { // FIXME: replace with mathematical constants from cmath. @@ -1744,6 +1745,7 @@ impl f16 { /// assert!(abs_difference <= f16::EPSILON); /// /// assert_eq!(f16::powi(f16::NAN, 0), 1.0); + /// assert_eq!(f16::powi(0.0, 0), 1.0); /// # } /// ``` #[inline] diff --git a/core/src/num/f32.rs b/core/src/num/f32.rs index e380cc698f574..3070e1dedbe43 100644 --- a/core/src/num/f32.rs +++ b/core/src/num/f32.rs @@ -277,6 +277,7 @@ pub const NEG_INFINITY: f32 = f32::NEG_INFINITY; /// Basic mathematical constants. #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_diagnostic_item = "f32_consts_mod"] pub mod consts { // FIXME: replace with mathematical constants from cmath. diff --git a/core/src/num/f64.rs b/core/src/num/f64.rs index ff7449fd996ce..dc8ccc551b2da 100644 --- a/core/src/num/f64.rs +++ b/core/src/num/f64.rs @@ -277,6 +277,7 @@ pub const NEG_INFINITY: f64 = f64::NEG_INFINITY; /// Basic mathematical constants. #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_diagnostic_item = "f64_consts_mod"] pub mod consts { // FIXME: replace with mathematical constants from cmath. diff --git a/core/src/num/int_macros.rs b/core/src/num/int_macros.rs index c3460a6409069..93fdf2823aeb7 100644 --- a/core/src/num/int_macros.rs +++ b/core/src/num/int_macros.rs @@ -217,10 +217,10 @@ macro_rules! int_impl { /// ``` /// #![feature(int_lowest_highest_one)] /// - #[doc = concat!("assert_eq!(0x0_", stringify!($SelfT), ".highest_one(), None);")] - #[doc = concat!("assert_eq!(0x1_", stringify!($SelfT), ".highest_one(), Some(0));")] - #[doc = concat!("assert_eq!(0x10_", stringify!($SelfT), ".highest_one(), Some(4));")] - #[doc = concat!("assert_eq!(0x1f_", stringify!($SelfT), ".highest_one(), Some(4));")] + #[doc = concat!("assert_eq!(0b0_", stringify!($SelfT), ".highest_one(), None);")] + #[doc = concat!("assert_eq!(0b1_", stringify!($SelfT), ".highest_one(), Some(0));")] + #[doc = concat!("assert_eq!(0b1_0000_", stringify!($SelfT), ".highest_one(), Some(4));")] + #[doc = concat!("assert_eq!(0b1_1111_", stringify!($SelfT), ".highest_one(), Some(4));")] /// ``` #[unstable(feature = "int_lowest_highest_one", issue = "145203")] #[must_use = "this returns the result of the operation, \ @@ -238,10 +238,10 @@ macro_rules! int_impl { /// ``` /// #![feature(int_lowest_highest_one)] /// - #[doc = concat!("assert_eq!(0x0_", stringify!($SelfT), ".lowest_one(), None);")] - #[doc = concat!("assert_eq!(0x1_", stringify!($SelfT), ".lowest_one(), Some(0));")] - #[doc = concat!("assert_eq!(0x10_", stringify!($SelfT), ".lowest_one(), Some(4));")] - #[doc = concat!("assert_eq!(0x1f_", stringify!($SelfT), ".lowest_one(), Some(0));")] + #[doc = concat!("assert_eq!(0b0_", stringify!($SelfT), ".lowest_one(), None);")] + #[doc = concat!("assert_eq!(0b1_", stringify!($SelfT), ".lowest_one(), Some(0));")] + #[doc = concat!("assert_eq!(0b1_0000_", stringify!($SelfT), ".lowest_one(), Some(4));")] + #[doc = concat!("assert_eq!(0b1_1111_", stringify!($SelfT), ".lowest_one(), Some(0));")] /// ``` #[unstable(feature = "int_lowest_highest_one", issue = "145203")] #[must_use = "this returns the result of the operation, \ @@ -275,6 +275,10 @@ macro_rules! int_impl { /// Shifts the bits to the left by a specified amount, `n`, /// wrapping the truncated bits to the end of the resulting integer. /// + /// `rotate_left(n)` is equivalent to applying `rotate_left(1)` a total of `n` times. In + /// particular, a rotation by the number of bits in `self` returns the input value + /// unchanged. + /// /// Please note this isn't the same operation as the `<<` shifting operator! /// /// # Examples @@ -284,6 +288,7 @@ macro_rules! int_impl { #[doc = concat!("let m = ", $rot_result, ";")] /// #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] + #[doc = concat!("assert_eq!(n.rotate_left(1024), n);")] /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] @@ -298,6 +303,10 @@ macro_rules! int_impl { /// wrapping the truncated bits to the beginning of the resulting /// integer. /// + /// `rotate_right(n)` is equivalent to applying `rotate_right(1)` a total of `n` times. In + /// particular, a rotation by the number of bits in `self` returns the input value + /// unchanged. + /// /// Please note this isn't the same operation as the `>>` shifting operator! /// /// # Examples @@ -307,6 +316,7 @@ macro_rules! int_impl { #[doc = concat!("let m = ", $rot_op, ";")] /// #[doc = concat!("assert_eq!(n.rotate_right(", $rot, "), m);")] + #[doc = concat!("assert_eq!(n.rotate_right(1024), n);")] /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")] @@ -992,10 +1002,10 @@ macro_rules! int_impl { /// /// ``` /// #![feature(exact_div)] - #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MIN + 1).checked_exact_div(-1), Some(", stringify!($Max), "));")] - #[doc = concat!("assert_eq!((-5", stringify!($SelfT), ").checked_exact_div(2), None);")] - #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MIN.checked_exact_div(-1), None);")] - #[doc = concat!("assert_eq!((1", stringify!($SelfT), ").checked_exact_div(0), None);")] + #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MIN + 1).checked_div_exact(-1), Some(", stringify!($Max), "));")] + #[doc = concat!("assert_eq!((-5", stringify!($SelfT), ").checked_div_exact(2), None);")] + #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MIN.checked_div_exact(-1), None);")] + #[doc = concat!("assert_eq!((1", stringify!($SelfT), ").checked_div_exact(0), None);")] /// ``` #[unstable( feature = "exact_div", @@ -1004,7 +1014,7 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn checked_exact_div(self, rhs: Self) -> Option { + pub const fn checked_div_exact(self, rhs: Self) -> Option { if intrinsics::unlikely(rhs == 0 || ((self == Self::MIN) && (rhs == -1))) { None } else { @@ -1019,29 +1029,33 @@ macro_rules! int_impl { } } - /// Checked integer division without remainder. Computes `self / rhs`. + /// Integer division without remainder. Computes `self / rhs`, returning `None` if `self % rhs != 0`. /// /// # Panics /// - /// This function will panic if `rhs == 0`, the division results in overflow, - /// or `self % rhs != 0`. + /// This function will panic if `rhs == 0`. + /// + /// ## Overflow behavior + /// + /// On overflow, this function will panic if overflow checks are enabled (default in debug + /// mode) and wrap if overflow checks are disabled (default in release mode). /// /// # Examples /// /// ``` /// #![feature(exact_div)] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".exact_div(2), 32);")] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".exact_div(32), 2);")] - #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MIN + 1).exact_div(-1), ", stringify!($Max), ");")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".div_exact(2), Some(32));")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".div_exact(32), Some(2));")] + #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MIN + 1).div_exact(-1), Some(", stringify!($Max), "));")] + #[doc = concat!("assert_eq!(65", stringify!($SelfT), ".div_exact(2), None);")] /// ``` - /// /// ```should_panic /// #![feature(exact_div)] - #[doc = concat!("let _ = 65", stringify!($SelfT), ".exact_div(2);")] + #[doc = concat!("let _ = 64", stringify!($SelfT),".div_exact(0);")] /// ``` /// ```should_panic /// #![feature(exact_div)] - #[doc = concat!("let _ = ", stringify!($SelfT), "::MIN.exact_div(-1);")] + #[doc = concat!("let _ = ", stringify!($SelfT), "::MIN.div_exact(-1);")] /// ``` #[unstable( feature = "exact_div", @@ -1050,10 +1064,12 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn exact_div(self, rhs: Self) -> Self { - match self.checked_exact_div(rhs) { - Some(x) => x, - None => panic!("Failed to divide without remainder"), + #[rustc_inherit_overflow_checks] + pub const fn div_exact(self, rhs: Self) -> Option { + if self % rhs != 0 { + None + } else { + Some(self / rhs) } } @@ -1063,7 +1079,7 @@ macro_rules! int_impl { /// /// This results in undefined behavior when `rhs == 0`, `self % rhs != 0`, or #[doc = concat!("`self == ", stringify!($SelfT), "::MIN && rhs == -1`,")] - /// i.e. when [`checked_exact_div`](Self::checked_exact_div) would return `None`. + /// i.e. when [`checked_div_exact`](Self::checked_div_exact) would return `None`. #[unstable( feature = "exact_div", issue = "139911", @@ -1071,10 +1087,10 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const unsafe fn unchecked_exact_div(self, rhs: Self) -> Self { + pub const unsafe fn unchecked_div_exact(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, - concat!(stringify!($SelfT), "::unchecked_exact_div cannot overflow, divide by zero, or leave a remainder"), + concat!(stringify!($SelfT), "::unchecked_div_exact cannot overflow, divide by zero, or leave a remainder"), ( lhs: $SelfT = self, rhs: $SelfT = rhs, @@ -1425,17 +1441,17 @@ macro_rules! int_impl { /// ``` /// #![feature(exact_bitshifts)] /// - #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".exact_shl(4), Some(0x10));")] - #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".exact_shl(", stringify!($SelfT), "::BITS - 2), Some(1 << ", stringify!($SelfT), "::BITS - 2));")] - #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".exact_shl(", stringify!($SelfT), "::BITS - 1), None);")] - #[doc = concat!("assert_eq!((-0x2", stringify!($SelfT), ").exact_shl(", stringify!($SelfT), "::BITS - 2), Some(-0x2 << ", stringify!($SelfT), "::BITS - 2));")] - #[doc = concat!("assert_eq!((-0x2", stringify!($SelfT), ").exact_shl(", stringify!($SelfT), "::BITS - 1), None);")] + #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".shl_exact(4), Some(0x10));")] + #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".shl_exact(", stringify!($SelfT), "::BITS - 2), Some(1 << ", stringify!($SelfT), "::BITS - 2));")] + #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".shl_exact(", stringify!($SelfT), "::BITS - 1), None);")] + #[doc = concat!("assert_eq!((-0x2", stringify!($SelfT), ").shl_exact(", stringify!($SelfT), "::BITS - 2), Some(-0x2 << ", stringify!($SelfT), "::BITS - 2));")] + #[doc = concat!("assert_eq!((-0x2", stringify!($SelfT), ").shl_exact(", stringify!($SelfT), "::BITS - 1), None);")] /// ``` #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn exact_shl(self, rhs: u32) -> Option<$SelfT> { + pub const fn shl_exact(self, rhs: u32) -> Option<$SelfT> { if rhs < self.leading_zeros() || rhs < self.leading_ones() { // SAFETY: rhs is checked above Some(unsafe { self.unchecked_shl(rhs) }) @@ -1452,16 +1468,16 @@ macro_rules! int_impl { /// /// This results in undefined behavior when `rhs >= self.leading_zeros() && rhs >= /// self.leading_ones()` i.e. when - #[doc = concat!("[`", stringify!($SelfT), "::exact_shl`]")] + #[doc = concat!("[`", stringify!($SelfT), "::shl_exact`]")] /// would return `None`. #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const unsafe fn unchecked_exact_shl(self, rhs: u32) -> $SelfT { + pub const unsafe fn unchecked_shl_exact(self, rhs: u32) -> $SelfT { assert_unsafe_precondition!( check_library_ub, - concat!(stringify!($SelfT), "::unchecked_exact_shl cannot shift out bits that would change the value of the first bit"), + concat!(stringify!($SelfT), "::unchecked_shl_exact cannot shift out bits that would change the value of the first bit"), ( zeros: u32 = self.leading_zeros(), ones: u32 = self.leading_ones(), @@ -1497,7 +1513,7 @@ macro_rules! int_impl { } } - /// Strict shift right. Computes `self >> rhs`, panicking `rhs` is + /// Strict shift right. Computes `self >> rhs`, panicking if `rhs` is /// larger than or equal to the number of bits in `self`. /// /// # Panics @@ -1605,14 +1621,14 @@ macro_rules! int_impl { /// ``` /// #![feature(exact_bitshifts)] /// - #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".exact_shr(4), Some(0x1));")] - #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".exact_shr(5), None);")] + #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".shr_exact(4), Some(0x1));")] + #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".shr_exact(5), None);")] /// ``` #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn exact_shr(self, rhs: u32) -> Option<$SelfT> { + pub const fn shr_exact(self, rhs: u32) -> Option<$SelfT> { if rhs <= self.trailing_zeros() && rhs < <$SelfT>::BITS { // SAFETY: rhs is checked above Some(unsafe { self.unchecked_shr(rhs) }) @@ -1630,16 +1646,16 @@ macro_rules! int_impl { /// This results in undefined behavior when `rhs > self.trailing_zeros() || rhs >= #[doc = concat!(stringify!($SelfT), "::BITS`")] /// i.e. when - #[doc = concat!("[`", stringify!($SelfT), "::exact_shr`]")] + #[doc = concat!("[`", stringify!($SelfT), "::shr_exact`]")] /// would return `None`. #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const unsafe fn unchecked_exact_shr(self, rhs: u32) -> $SelfT { + pub const unsafe fn unchecked_shr_exact(self, rhs: u32) -> $SelfT { assert_unsafe_precondition!( check_library_ub, - concat!(stringify!($SelfT), "::unchecked_exact_shr cannot shift out non-zero bits"), + concat!(stringify!($SelfT), "::unchecked_shr_exact cannot shift out non-zero bits"), ( zeros: u32 = self.trailing_zeros(), bits: u32 = <$SelfT>::BITS, @@ -1714,6 +1730,7 @@ macro_rules! int_impl { /// /// ``` #[doc = concat!("assert_eq!(8", stringify!($SelfT), ".checked_pow(2), Some(64));")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".checked_pow(0), Some(1));")] #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.checked_pow(2), None);")] /// ``` @@ -1755,6 +1772,7 @@ macro_rules! int_impl { /// /// ``` #[doc = concat!("assert_eq!(8", stringify!($SelfT), ".strict_pow(2), 64);")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".strict_pow(0), 1);")] /// ``` /// /// The following panics because of overflow: @@ -2027,6 +2045,7 @@ macro_rules! int_impl { /// /// ``` #[doc = concat!("assert_eq!((-4", stringify!($SelfT), ").saturating_pow(3), -64);")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".saturating_pow(0), 1);")] #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MIN.saturating_pow(2), ", stringify!($SelfT), "::MAX);")] #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MIN.saturating_pow(3), ", stringify!($SelfT), "::MIN);")] /// ``` @@ -2371,6 +2390,7 @@ macro_rules! int_impl { #[doc = concat!("assert_eq!(3", stringify!($SelfT), ".wrapping_pow(4), 81);")] /// assert_eq!(3i8.wrapping_pow(5), -13); /// assert_eq!(3i8.wrapping_pow(6), -39); + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".wrapping_pow(0), 1);")] /// ``` #[stable(feature = "no_panic_pow", since = "1.34.0")] #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] @@ -2705,7 +2725,7 @@ macro_rules! int_impl { Self::carrying_mul_add(self, rhs, carry, 0) } - /// Calculates the "full multiplication" `self * rhs + carry1 + carry2` + /// Calculates the "full multiplication" `self * rhs + carry + add` /// without the possibility to overflow. /// /// This returns the low-order (wrapping) bits and the high-order (overflow) bits @@ -2961,6 +2981,7 @@ macro_rules! int_impl { /// /// ``` #[doc = concat!("assert_eq!(3", stringify!($SelfT), ".overflowing_pow(4), (81, false));")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".overflowing_pow(0), (1, false));")] /// assert_eq!(3i8.overflowing_pow(5), (-13, true)); /// ``` #[stable(feature = "no_panic_pow", since = "1.34.0")] @@ -3004,6 +3025,7 @@ macro_rules! int_impl { #[doc = concat!("let x: ", stringify!($SelfT), " = 2; // or any other integer type")] /// /// assert_eq!(x.pow(5), 32); + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".pow(0), 1);")] /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] diff --git a/core/src/num/mod.rs b/core/src/num/mod.rs index c75ee11d15efe..35141dfeb3a6d 100644 --- a/core/src/num/mod.rs +++ b/core/src/num/mod.rs @@ -1422,6 +1422,10 @@ macro_rules! from_str_int_impl { /// whitespace) represent an error. Underscores (which are accepted in Rust literals) /// also represent an error. /// + /// # See also + /// For parsing numbers in other bases, such as binary or hexadecimal, + /// see [`from_str_radix`][Self::from_str_radix]. + /// /// # Examples /// /// ``` @@ -1467,6 +1471,14 @@ macro_rules! from_str_int_impl { /// /// This function panics if `radix` is not in the range from 2 to 36. /// + /// # See also + /// If the string to be parsed is in base 10 (decimal), + /// [`from_str`] or [`str::parse`] can also be used. + /// + // FIXME(#122566): These HTML links work around a rustdoc-json test failure. + /// [`from_str`]: #method.from_str + /// [`str::parse`]: primitive.str.html#method.parse + /// /// # Examples /// /// ``` diff --git a/core/src/num/nonzero.rs b/core/src/num/nonzero.rs index d9184e3c9c229..ee375dbaaab2d 100644 --- a/core/src/num/nonzero.rs +++ b/core/src/num/nonzero.rs @@ -1,7 +1,7 @@ //! Definitions of integer that is known not to equal zero. use super::{IntErrorKind, ParseIntError}; -use crate::clone::UseCloned; +use crate::clone::{TrivialClone, UseCloned}; use crate::cmp::Ordering; use crate::hash::{Hash, Hasher}; use crate::marker::{Destruct, Freeze, StructuralPartialEq}; @@ -199,6 +199,10 @@ impl UseCloned for NonZero where T: ZeroablePrimitive {} #[stable(feature = "nonzero", since = "1.28.0")] impl Copy for NonZero where T: ZeroablePrimitive {} +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for NonZero where T: ZeroablePrimitive {} + #[stable(feature = "nonzero", since = "1.28.0")] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] impl const PartialEq for NonZero @@ -660,12 +664,15 @@ macro_rules! nonzero_integer { without modifying the original"] #[inline(always)] pub const fn isolate_highest_one(self) -> Self { - let n = self.get() & (((1 as $Int) << (<$Int>::BITS - 1)).wrapping_shr(self.leading_zeros())); - // SAFETY: // `self` is non-zero, so masking to preserve only the most // significant set bit will result in a non-zero `n`. - unsafe { NonZero::new_unchecked(n) } + // and self.leading_zeros() is always < $INT::BITS since + // at least one of the bits in the number is not zero + unsafe { + let bit = (((1 as $Uint) << (<$Uint>::BITS - 1)).unchecked_shr(self.leading_zeros())); + NonZero::new_unchecked(bit as $Int) + } } /// Returns `self` with only the least significant bit set. @@ -708,9 +715,9 @@ macro_rules! nonzero_integer { /// # use core::num::NonZero; /// # fn main() { test().unwrap(); } /// # fn test() -> Option<()> { - #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0x1)?.highest_one(), 0);")] - #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0x10)?.highest_one(), 4);")] - #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0x1f)?.highest_one(), 4);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1)?.highest_one(), 0);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1_0000)?.highest_one(), 4);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1_1111)?.highest_one(), 4);")] /// # Some(()) /// # } /// ``` @@ -732,9 +739,9 @@ macro_rules! nonzero_integer { /// # use core::num::NonZero; /// # fn main() { test().unwrap(); } /// # fn test() -> Option<()> { - #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0x1)?.lowest_one(), 0);")] - #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0x10)?.lowest_one(), 4);")] - #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0x1f)?.lowest_one(), 0);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1)?.lowest_one(), 0);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1_0000)?.lowest_one(), 4);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1_1111)?.lowest_one(), 0);")] /// # Some(()) /// # } /// ``` @@ -1373,7 +1380,6 @@ macro_rules! nonzero_integer_signedness_dependent_impls { /// # Examples /// /// ``` - /// # #![feature(unsigned_nonzero_div_ceil)] /// # use std::num::NonZero; #[doc = concat!("let one = NonZero::new(1", stringify!($Int), ").unwrap();")] #[doc = concat!("let max = NonZero::new(", stringify!($Int), "::MAX).unwrap();")] @@ -1383,7 +1389,8 @@ macro_rules! nonzero_integer_signedness_dependent_impls { #[doc = concat!("let three = NonZero::new(3", stringify!($Int), ").unwrap();")] /// assert_eq!(three.div_ceil(two), two); /// ``` - #[unstable(feature = "unsigned_nonzero_div_ceil", issue = "132968")] + #[stable(feature = "unsigned_nonzero_div_ceil", since = "1.92.0")] + #[rustc_const_stable(feature = "unsigned_nonzero_div_ceil", since = "1.92.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] @@ -1774,6 +1781,33 @@ macro_rules! nonzero_integer_signedness_dependent_methods { // SAFETY: `self.get()` can't be zero unsafe { NonZero::new_unchecked(self.get().cast_signed()) } } + + /// Returns the minimum number of bits required to represent `self`. + /// + /// # Examples + /// + /// ``` + /// #![feature(uint_bit_width)] + /// + /// # use core::num::NonZero; + /// # + /// # fn main() { test().unwrap(); } + /// # fn test() -> Option<()> { + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::MIN.bit_width(), NonZero::new(1)?);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b111)?.bit_width(), NonZero::new(3)?);")] + #[doc = concat!("assert_eq!(NonZero::<", stringify!($Int), ">::new(0b1110)?.bit_width(), NonZero::new(4)?);")] + /// # Some(()) + /// # } + /// ``` + #[unstable(feature = "uint_bit_width", issue = "142326")] + #[must_use = "this returns the result of the operation, \ + without modifying the original"] + #[inline(always)] + pub const fn bit_width(self) -> NonZero { + // SAFETY: Since `self.leading_zeros()` is always less than + // `Self::BITS`, this subtraction can never be zero. + unsafe { NonZero::new_unchecked(Self::BITS - self.leading_zeros()) } + } }; // Associated items for signed nonzero types only. diff --git a/core/src/num/uint_macros.rs b/core/src/num/uint_macros.rs index 752498bfbd815..1d108cb0cf4a9 100644 --- a/core/src/num/uint_macros.rs +++ b/core/src/num/uint_macros.rs @@ -272,10 +272,10 @@ macro_rules! uint_impl { /// ``` /// #![feature(int_lowest_highest_one)] /// - #[doc = concat!("assert_eq!(0x0_", stringify!($SelfT), ".highest_one(), None);")] - #[doc = concat!("assert_eq!(0x1_", stringify!($SelfT), ".highest_one(), Some(0));")] - #[doc = concat!("assert_eq!(0x10_", stringify!($SelfT), ".highest_one(), Some(4));")] - #[doc = concat!("assert_eq!(0x1f_", stringify!($SelfT), ".highest_one(), Some(4));")] + #[doc = concat!("assert_eq!(0b0_", stringify!($SelfT), ".highest_one(), None);")] + #[doc = concat!("assert_eq!(0b1_", stringify!($SelfT), ".highest_one(), Some(0));")] + #[doc = concat!("assert_eq!(0b1_0000_", stringify!($SelfT), ".highest_one(), Some(4));")] + #[doc = concat!("assert_eq!(0b1_1111_", stringify!($SelfT), ".highest_one(), Some(4));")] /// ``` #[unstable(feature = "int_lowest_highest_one", issue = "145203")] #[must_use = "this returns the result of the operation, \ @@ -296,10 +296,10 @@ macro_rules! uint_impl { /// ``` /// #![feature(int_lowest_highest_one)] /// - #[doc = concat!("assert_eq!(0x0_", stringify!($SelfT), ".lowest_one(), None);")] - #[doc = concat!("assert_eq!(0x1_", stringify!($SelfT), ".lowest_one(), Some(0));")] - #[doc = concat!("assert_eq!(0x10_", stringify!($SelfT), ".lowest_one(), Some(4));")] - #[doc = concat!("assert_eq!(0x1f_", stringify!($SelfT), ".lowest_one(), Some(0));")] + #[doc = concat!("assert_eq!(0b0_", stringify!($SelfT), ".lowest_one(), None);")] + #[doc = concat!("assert_eq!(0b1_", stringify!($SelfT), ".lowest_one(), Some(0));")] + #[doc = concat!("assert_eq!(0b1_0000_", stringify!($SelfT), ".lowest_one(), Some(4));")] + #[doc = concat!("assert_eq!(0b1_1111_", stringify!($SelfT), ".lowest_one(), Some(0));")] /// ``` #[unstable(feature = "int_lowest_highest_one", issue = "145203")] #[must_use = "this returns the result of the operation, \ @@ -336,6 +336,10 @@ macro_rules! uint_impl { /// Shifts the bits to the left by a specified amount, `n`, /// wrapping the truncated bits to the end of the resulting integer. /// + /// `rotate_left(n)` is equivalent to applying `rotate_left(1)` a total of `n` times. In + /// particular, a rotation by the number of bits in `self` returns the input value + /// unchanged. + /// /// Please note this isn't the same operation as the `<<` shifting operator! /// /// # Examples @@ -345,12 +349,14 @@ macro_rules! uint_impl { #[doc = concat!("let m = ", $rot_result, ";")] /// #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] + #[doc = concat!("assert_eq!(n.rotate_left(1024), n);")] /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(const_trait_impl)] // for the intrinsic fallback pub const fn rotate_left(self, n: u32) -> Self { return intrinsics::rotate_left(self, n); } @@ -359,6 +365,10 @@ macro_rules! uint_impl { /// wrapping the truncated bits to the beginning of the resulting /// integer. /// + /// `rotate_right(n)` is equivalent to applying `rotate_right(1)` a total of `n` times. In + /// particular, a rotation by the number of bits in `self` returns the input value + /// unchanged. + /// /// Please note this isn't the same operation as the `>>` shifting operator! /// /// # Examples @@ -368,12 +378,14 @@ macro_rules! uint_impl { #[doc = concat!("let m = ", $rot_op, ";")] /// #[doc = concat!("assert_eq!(n.rotate_right(", $rot, "), m);")] + #[doc = concat!("assert_eq!(n.rotate_right(1024), n);")] /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_math", since = "1.32.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(const_trait_impl)] // for the intrinsic fallback pub const fn rotate_right(self, n: u32) -> Self { return intrinsics::rotate_right(self, n); } @@ -1222,10 +1234,10 @@ macro_rules! uint_impl { /// /// ``` /// #![feature(exact_div)] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".checked_exact_div(2), Some(32));")] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".checked_exact_div(32), Some(2));")] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".checked_exact_div(0), None);")] - #[doc = concat!("assert_eq!(65", stringify!($SelfT), ".checked_exact_div(2), None);")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".checked_div_exact(2), Some(32));")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".checked_div_exact(32), Some(2));")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".checked_div_exact(0), None);")] + #[doc = concat!("assert_eq!(65", stringify!($SelfT), ".checked_div_exact(2), None);")] /// ``` #[unstable( feature = "exact_div", @@ -1234,7 +1246,7 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn checked_exact_div(self, rhs: Self) -> Option { + pub const fn checked_div_exact(self, rhs: Self) -> Option { if intrinsics::unlikely(rhs == 0) { None } else { @@ -1249,23 +1261,19 @@ macro_rules! uint_impl { } } - /// Checked integer division without remainder. Computes `self / rhs`. + /// Integer division without remainder. Computes `self / rhs`, returning `None` if `self % rhs != 0`. /// /// # Panics /// - /// This function will panic if `rhs == 0` or `self % rhs != 0`. + /// This function will panic if `rhs == 0`. /// /// # Examples /// /// ``` /// #![feature(exact_div)] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".exact_div(2), 32);")] - #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".exact_div(32), 2);")] - /// ``` - /// - /// ```should_panic - /// #![feature(exact_div)] - #[doc = concat!("let _ = 65", stringify!($SelfT), ".exact_div(2);")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".div_exact(2), Some(32));")] + #[doc = concat!("assert_eq!(64", stringify!($SelfT), ".div_exact(32), Some(2));")] + #[doc = concat!("assert_eq!(65", stringify!($SelfT), ".div_exact(2), None);")] /// ``` #[unstable( feature = "exact_div", @@ -1274,10 +1282,12 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn exact_div(self, rhs: Self) -> Self { - match self.checked_exact_div(rhs) { - Some(x) => x, - None => panic!("Failed to divide without remainder"), + #[rustc_inherit_overflow_checks] + pub const fn div_exact(self, rhs: Self) -> Option { + if self % rhs != 0 { + None + } else { + Some(self / rhs) } } @@ -1286,7 +1296,7 @@ macro_rules! uint_impl { /// # Safety /// /// This results in undefined behavior when `rhs == 0` or `self % rhs != 0`, - /// i.e. when [`checked_exact_div`](Self::checked_exact_div) would return `None`. + /// i.e. when [`checked_div_exact`](Self::checked_div_exact) would return `None`. #[unstable( feature = "exact_div", issue = "139911", @@ -1294,10 +1304,10 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const unsafe fn unchecked_exact_div(self, rhs: Self) -> Self { + pub const unsafe fn unchecked_div_exact(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, - concat!(stringify!($SelfT), "::unchecked_exact_div divide by zero or leave a remainder"), + concat!(stringify!($SelfT), "::unchecked_div_exact divide by zero or leave a remainder"), ( lhs: $SelfT = self, rhs: $SelfT = rhs, @@ -1832,14 +1842,14 @@ macro_rules! uint_impl { /// ``` /// #![feature(exact_bitshifts)] /// - #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".exact_shl(4), Some(0x10));")] - #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".exact_shl(129), None);")] + #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".shl_exact(4), Some(0x10));")] + #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".shl_exact(129), None);")] /// ``` #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn exact_shl(self, rhs: u32) -> Option<$SelfT> { + pub const fn shl_exact(self, rhs: u32) -> Option<$SelfT> { if rhs <= self.leading_zeros() && rhs < <$SelfT>::BITS { // SAFETY: rhs is checked above Some(unsafe { self.unchecked_shl(rhs) }) @@ -1857,16 +1867,16 @@ macro_rules! uint_impl { /// This results in undefined behavior when `rhs > self.leading_zeros() || rhs >= #[doc = concat!(stringify!($SelfT), "::BITS`")] /// i.e. when - #[doc = concat!("[`", stringify!($SelfT), "::exact_shl`]")] + #[doc = concat!("[`", stringify!($SelfT), "::shl_exact`]")] /// would return `None`. #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const unsafe fn unchecked_exact_shl(self, rhs: u32) -> $SelfT { + pub const unsafe fn unchecked_shl_exact(self, rhs: u32) -> $SelfT { assert_unsafe_precondition!( check_library_ub, - concat!(stringify!($SelfT), "::exact_shl_unchecked cannot shift out non-zero bits"), + concat!(stringify!($SelfT), "::unchecked_shl_exact cannot shift out non-zero bits"), ( zeros: u32 = self.leading_zeros(), bits: u32 = <$SelfT>::BITS, @@ -1902,7 +1912,7 @@ macro_rules! uint_impl { } } - /// Strict shift right. Computes `self >> rhs`, panicking `rhs` is + /// Strict shift right. Computes `self >> rhs`, panicking if `rhs` is /// larger than or equal to the number of bits in `self`. /// /// # Panics @@ -2004,14 +2014,14 @@ macro_rules! uint_impl { /// ``` /// #![feature(exact_bitshifts)] /// - #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".exact_shr(4), Some(0x1));")] - #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".exact_shr(5), None);")] + #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".shr_exact(4), Some(0x1));")] + #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".shr_exact(5), None);")] /// ``` #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const fn exact_shr(self, rhs: u32) -> Option<$SelfT> { + pub const fn shr_exact(self, rhs: u32) -> Option<$SelfT> { if rhs <= self.trailing_zeros() && rhs < <$SelfT>::BITS { // SAFETY: rhs is checked above Some(unsafe { self.unchecked_shr(rhs) }) @@ -2029,16 +2039,16 @@ macro_rules! uint_impl { /// This results in undefined behavior when `rhs > self.trailing_zeros() || rhs >= #[doc = concat!(stringify!($SelfT), "::BITS`")] /// i.e. when - #[doc = concat!("[`", stringify!($SelfT), "::exact_shr`]")] + #[doc = concat!("[`", stringify!($SelfT), "::shr_exact`]")] /// would return `None`. #[unstable(feature = "exact_bitshifts", issue = "144336")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] - pub const unsafe fn unchecked_exact_shr(self, rhs: u32) -> $SelfT { + pub const unsafe fn unchecked_shr_exact(self, rhs: u32) -> $SelfT { assert_unsafe_precondition!( check_library_ub, - concat!(stringify!($SelfT), "::exact_shr_unchecked cannot shift out non-zero bits"), + concat!(stringify!($SelfT), "::unchecked_shr_exact cannot shift out non-zero bits"), ( zeros: u32 = self.trailing_zeros(), bits: u32 = <$SelfT>::BITS, @@ -2057,6 +2067,7 @@ macro_rules! uint_impl { /// /// ``` #[doc = concat!("assert_eq!(2", stringify!($SelfT), ".checked_pow(5), Some(32));")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".checked_pow(0), Some(1));")] #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.checked_pow(2), None);")] /// ``` #[stable(feature = "no_panic_pow", since = "1.34.0")] @@ -2097,6 +2108,7 @@ macro_rules! uint_impl { /// /// ``` #[doc = concat!("assert_eq!(2", stringify!($SelfT), ".strict_pow(5), 32);")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".strict_pow(0), 1);")] /// ``` /// /// The following panics because of overflow: @@ -2271,6 +2283,7 @@ macro_rules! uint_impl { /// /// ``` #[doc = concat!("assert_eq!(4", stringify!($SelfT), ".saturating_pow(3), 64);")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".saturating_pow(0), 1);")] #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.saturating_pow(2), ", stringify!($SelfT), "::MAX);")] /// ``` #[stable(feature = "no_panic_pow", since = "1.34.0")] @@ -2580,6 +2593,7 @@ macro_rules! uint_impl { /// ``` #[doc = concat!("assert_eq!(3", stringify!($SelfT), ".wrapping_pow(5), 243);")] /// assert_eq!(3u8.wrapping_pow(6), 217); + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".wrapping_pow(0), 1);")] /// ``` #[stable(feature = "no_panic_pow", since = "1.34.0")] #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] @@ -3000,7 +3014,7 @@ macro_rules! uint_impl { Self::carrying_mul_add(self, rhs, carry, 0) } - /// Calculates the "full multiplication" `self * rhs + carry1 + carry2`. + /// Calculates the "full multiplication" `self * rhs + carry + add`. /// /// This returns the low-order (wrapping) bits and the high-order (overflow) bits /// of the result as two separate values, in that order. @@ -3254,6 +3268,7 @@ macro_rules! uint_impl { /// /// ``` #[doc = concat!("assert_eq!(3", stringify!($SelfT), ".overflowing_pow(5), (243, false));")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".overflowing_pow(0), (1, false));")] /// assert_eq!(3u8.overflowing_pow(6), (217, true)); /// ``` #[stable(feature = "no_panic_pow", since = "1.34.0")] @@ -3295,6 +3310,7 @@ macro_rules! uint_impl { /// /// ``` #[doc = concat!("assert_eq!(2", stringify!($SelfT), ".pow(5), 32);")] + #[doc = concat!("assert_eq!(0_", stringify!($SelfT), ".pow(0), 1);")] /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] @@ -3552,7 +3568,6 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "unsigned_is_multiple_of", since = "1.87.0")] #[must_use] #[inline] - #[rustc_inherit_overflow_checks] pub const fn is_multiple_of(self, rhs: Self) -> bool { match rhs { 0 => self == 0, diff --git a/core/src/ops/control_flow.rs b/core/src/ops/control_flow.rs index b760a7c4e21eb..3cc184f0ab75c 100644 --- a/core/src/ops/control_flow.rs +++ b/core/src/ops/control_flow.rs @@ -1,3 +1,4 @@ +use crate::marker::Destruct; use crate::{convert, ops}; /// Used to tell an operation whether it should exit early or go on as usual. @@ -150,7 +151,8 @@ impl ControlFlow { /// ``` #[inline] #[stable(feature = "control_flow_enum_is", since = "1.59.0")] - pub fn is_break(&self) -> bool { + #[rustc_const_unstable(feature = "min_const_control_flow", issue = "148738")] + pub const fn is_break(&self) -> bool { matches!(*self, ControlFlow::Break(_)) } @@ -166,7 +168,8 @@ impl ControlFlow { /// ``` #[inline] #[stable(feature = "control_flow_enum_is", since = "1.59.0")] - pub fn is_continue(&self) -> bool { + #[rustc_const_unstable(feature = "min_const_control_flow", issue = "148738")] + pub const fn is_continue(&self) -> bool { matches!(*self, ControlFlow::Continue(_)) } @@ -183,7 +186,11 @@ impl ControlFlow { /// ``` #[inline] #[stable(feature = "control_flow_enum", since = "1.83.0")] - pub fn break_value(self) -> Option { + #[rustc_const_unstable(feature = "const_control_flow", issue = "148739")] + pub const fn break_value(self) -> Option + where + Self: [const] Destruct, + { match self { ControlFlow::Continue(..) => None, ControlFlow::Break(x) => Some(x), @@ -257,7 +264,8 @@ impl ControlFlow { /// ``` #[inline] #[unstable(feature = "control_flow_ok", issue = "140266")] - pub fn break_ok(self) -> Result { + #[rustc_const_unstable(feature = "min_const_control_flow", issue = "148738")] + pub const fn break_ok(self) -> Result { match self { ControlFlow::Continue(c) => Err(c), ControlFlow::Break(b) => Ok(b), @@ -268,7 +276,11 @@ impl ControlFlow { /// to the break value in case it exists. #[inline] #[stable(feature = "control_flow_enum", since = "1.83.0")] - pub fn map_break(self, f: impl FnOnce(B) -> T) -> ControlFlow { + #[rustc_const_unstable(feature = "const_control_flow", issue = "148739")] + pub const fn map_break(self, f: F) -> ControlFlow + where + F: [const] FnOnce(B) -> T + [const] Destruct, + { match self { ControlFlow::Continue(x) => ControlFlow::Continue(x), ControlFlow::Break(x) => ControlFlow::Break(f(x)), @@ -288,7 +300,11 @@ impl ControlFlow { /// ``` #[inline] #[stable(feature = "control_flow_enum", since = "1.83.0")] - pub fn continue_value(self) -> Option { + #[rustc_const_unstable(feature = "const_control_flow", issue = "148739")] + pub const fn continue_value(self) -> Option + where + Self: [const] Destruct, + { match self { ControlFlow::Continue(x) => Some(x), ControlFlow::Break(..) => None, @@ -361,7 +377,8 @@ impl ControlFlow { /// ``` #[inline] #[unstable(feature = "control_flow_ok", issue = "140266")] - pub fn continue_ok(self) -> Result { + #[rustc_const_unstable(feature = "min_const_control_flow", issue = "148738")] + pub const fn continue_ok(self) -> Result { match self { ControlFlow::Continue(c) => Ok(c), ControlFlow::Break(b) => Err(b), @@ -372,7 +389,11 @@ impl ControlFlow { /// to the continue value in case it exists. #[inline] #[stable(feature = "control_flow_enum", since = "1.83.0")] - pub fn map_continue(self, f: impl FnOnce(C) -> T) -> ControlFlow { + #[rustc_const_unstable(feature = "const_control_flow", issue = "148739")] + pub const fn map_continue(self, f: F) -> ControlFlow + where + F: [const] FnOnce(C) -> T + [const] Destruct, + { match self { ControlFlow::Continue(x) => ControlFlow::Continue(f(x)), ControlFlow::Break(x) => ControlFlow::Break(x), diff --git a/core/src/ops/index_range.rs b/core/src/ops/index_range.rs index 507fa9460bea6..84395ddadf2b7 100644 --- a/core/src/ops/index_range.rs +++ b/core/src/ops/index_range.rs @@ -9,7 +9,8 @@ use crate::ub_checks; /// /// (Normal `Range` code needs to handle degenerate ranges like `10..0`, /// which takes extra checks compared to only handling the canonical form.) -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Debug)] +#[derive_const(Clone, Eq, PartialEq)] pub(crate) struct IndexRange { start: usize, end: usize, @@ -54,7 +55,7 @@ impl IndexRange { /// # Safety /// - Can only be called when `start < end`, aka when `len > 0`. #[inline] - unsafe fn next_unchecked(&mut self) -> usize { + const unsafe fn next_unchecked(&mut self) -> usize { debug_assert!(self.start < self.end); let value = self.start; @@ -66,7 +67,7 @@ impl IndexRange { /// # Safety /// - Can only be called when `start < end`, aka when `len > 0`. #[inline] - unsafe fn next_back_unchecked(&mut self) -> usize { + const unsafe fn next_back_unchecked(&mut self) -> usize { debug_assert!(self.start < self.end); // SAFETY: The range isn't empty, so this cannot overflow @@ -116,7 +117,7 @@ impl IndexRange { } #[inline] - fn assume_range(&self) { + const fn assume_range(&self) { // SAFETY: This is the type invariant unsafe { crate::hint::assert_unchecked(self.start <= self.end) } } diff --git a/core/src/ops/range.rs b/core/src/ops/range.rs index 58a9431bd845d..a0b74ff383ea4 100644 --- a/core/src/ops/range.rs +++ b/core/src/ops/range.rs @@ -1,6 +1,6 @@ use crate::fmt; use crate::hash::Hash; - +use crate::marker::Destruct; /// An unbounded range (`..`). /// /// `RangeFull` is primarily used as a [slicing index], its shorthand is `..`. @@ -38,7 +38,8 @@ use crate::hash::Hash; /// [slicing index]: crate::slice::SliceIndex #[lang = "RangeFull"] #[doc(alias = "..")] -#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] +#[derive(Copy, Hash)] +#[derive_const(Clone, Default, Eq, PartialEq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct RangeFull; @@ -75,7 +76,8 @@ impl fmt::Debug for RangeFull { /// ``` #[lang = "Range"] #[doc(alias = "..")] -#[derive(Clone, Default, PartialEq, Eq, Hash)] // not Copy -- see #27186 +#[derive(Eq, Hash)] +#[derive_const(Clone, Default, PartialEq)] // not Copy -- see #27186 #[stable(feature = "rust1", since = "1.0.0")] pub struct Range { /// The lower bound of the range (inclusive). @@ -117,10 +119,11 @@ impl> Range { /// ``` #[inline] #[stable(feature = "range_contains", since = "1.35.0")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -144,7 +147,11 @@ impl> Range { /// ``` #[inline] #[stable(feature = "range_is_empty", since = "1.47.0")] - pub fn is_empty(&self) -> bool { + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn is_empty(&self) -> bool + where + Idx: [const] PartialOrd, + { !(self.start < self.end) } } @@ -184,7 +191,8 @@ impl> Range { /// ``` #[lang = "RangeFrom"] #[doc(alias = "..")] -#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186 +#[derive(Eq, Hash)] +#[derive_const(Clone, PartialEq)] // not Copy -- see #27186 #[stable(feature = "rust1", since = "1.0.0")] pub struct RangeFrom { /// The lower bound of the range (inclusive). @@ -217,10 +225,11 @@ impl> RangeFrom { /// ``` #[inline] #[stable(feature = "range_contains", since = "1.35.0")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -266,7 +275,8 @@ impl> RangeFrom { /// [slicing index]: crate::slice::SliceIndex #[lang = "RangeTo"] #[doc(alias = "..")] -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Eq, Hash)] +#[derive_const(Clone, PartialEq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct RangeTo { /// The upper bound of the range (exclusive). @@ -299,10 +309,11 @@ impl> RangeTo { /// ``` #[inline] #[stable(feature = "range_contains", since = "1.35.0")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -340,7 +351,8 @@ impl> RangeTo { /// ``` #[lang = "RangeInclusive"] #[doc(alias = "..=")] -#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186 +#[derive(Clone, Hash)] +#[derive_const(Eq, PartialEq)] // not Copy -- see #27186 #[stable(feature = "inclusive_range", since = "1.26.0")] pub struct RangeInclusive { // Note that the fields here are not public to allow changing the @@ -506,10 +518,11 @@ impl> RangeInclusive { /// ``` #[inline] #[stable(feature = "range_contains", since = "1.35.0")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -542,7 +555,11 @@ impl> RangeInclusive { /// ``` #[stable(feature = "range_is_empty", since = "1.47.0")] #[inline] - pub fn is_empty(&self) -> bool { + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn is_empty(&self) -> bool + where + Idx: [const] PartialOrd, + { self.exhausted || !(self.start <= self.end) } } @@ -587,7 +604,8 @@ impl> RangeInclusive { /// [slicing index]: crate::slice::SliceIndex #[lang = "RangeToInclusive"] #[doc(alias = "..=")] -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Hash)] +#[derive(Clone, PartialEq, Eq)] #[stable(feature = "inclusive_range", since = "1.26.0")] pub struct RangeToInclusive { /// The upper bound of the range (inclusive) @@ -620,10 +638,11 @@ impl> RangeToInclusive { /// ``` #[inline] #[stable(feature = "range_contains", since = "1.35.0")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -668,7 +687,8 @@ impl> RangeToInclusive { /// /// [`BTreeMap::range`]: ../../std/collections/btree_map/struct.BTreeMap.html#method.range #[stable(feature = "collections_bound", since = "1.17.0")] -#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] +#[derive(Copy, Debug, Hash)] +#[derive_const(Clone, Eq, PartialEq)] pub enum Bound { /// An inclusive bound. #[stable(feature = "collections_bound", since = "1.17.0")] @@ -685,7 +705,8 @@ impl Bound { /// Converts from `&Bound` to `Bound<&T>`. #[inline] #[stable(feature = "bound_as_ref_shared", since = "1.65.0")] - pub fn as_ref(&self) -> Bound<&T> { + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn as_ref(&self) -> Bound<&T> { match *self { Included(ref x) => Included(x), Excluded(ref x) => Excluded(x), @@ -696,7 +717,7 @@ impl Bound { /// Converts from `&mut Bound` to `Bound<&mut T>`. #[inline] #[unstable(feature = "bound_as_ref", issue = "80996")] - pub fn as_mut(&mut self) -> Bound<&mut T> { + pub const fn as_mut(&mut self) -> Bound<&mut T> { match *self { Included(ref mut x) => Included(x), Excluded(ref mut x) => Excluded(x), @@ -778,7 +799,11 @@ impl Bound<&T> { /// ``` #[must_use = "`self` will be dropped if the result is not used"] #[stable(feature = "bound_cloned", since = "1.55.0")] - pub fn cloned(self) -> Bound { + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn cloned(self) -> Bound + where + T: [const] Clone, + { match self { Bound::Unbounded => Bound::Unbounded, Bound::Included(x) => Bound::Included(x.clone()), @@ -791,7 +816,8 @@ impl Bound<&T> { /// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`. #[stable(feature = "collections_range", since = "1.28.0")] #[rustc_diagnostic_item = "RangeBounds"] -pub trait RangeBounds { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +pub const trait RangeBounds { /// Start index bound. /// /// Returns the start value as a `Bound`. @@ -841,8 +867,8 @@ pub trait RangeBounds { #[stable(feature = "range_contains", since = "1.35.0")] fn contains(&self, item: &U) -> bool where - T: PartialOrd, - U: ?Sized + PartialOrd, + T: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { (match self.start_bound() { Included(start) => start <= item, @@ -909,7 +935,7 @@ pub trait RangeBounds { #[unstable(feature = "range_bounds_is_empty", issue = "137300")] fn is_empty(&self) -> bool where - T: PartialOrd, + T: [const] PartialOrd, { !match (self.start_bound(), self.end_bound()) { (Unbounded, _) | (_, Unbounded) => true, @@ -927,7 +953,8 @@ pub trait RangeBounds { /// `IntoBounds` is implemented by Rust’s built-in range types, produced /// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`. #[unstable(feature = "range_into_bounds", issue = "136903")] -pub trait IntoBounds: RangeBounds { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +pub const trait IntoBounds: [const] RangeBounds { /// Convert this range into the start and end bounds. /// Returns `(start_bound, end_bound)`. /// @@ -973,8 +1000,8 @@ pub trait IntoBounds: RangeBounds { fn intersect(self, other: R) -> (Bound, Bound) where Self: Sized, - T: Ord, - R: Sized + IntoBounds, + T: [const] Ord + [const] Destruct, + R: Sized + [const] IntoBounds, { let (self_start, self_end) = IntoBounds::into_bounds(self); let (other_start, other_end) = IntoBounds::into_bounds(other); @@ -1017,7 +1044,8 @@ pub trait IntoBounds: RangeBounds { use self::Bound::{Excluded, Included, Unbounded}; #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeFull { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeFull { fn start_bound(&self) -> Bound<&T> { Unbounded } @@ -1027,14 +1055,16 @@ impl RangeBounds for RangeFull { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for RangeFull { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeFull { fn into_bounds(self) -> (Bound, Bound) { (Unbounded, Unbounded) } } #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeFrom { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeFrom { fn start_bound(&self) -> Bound<&T> { Included(&self.start) } @@ -1044,14 +1074,16 @@ impl RangeBounds for RangeFrom { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for RangeFrom { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeFrom { fn into_bounds(self) -> (Bound, Bound) { (Included(self.start), Unbounded) } } #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeTo { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeTo { fn start_bound(&self) -> Bound<&T> { Unbounded } @@ -1061,14 +1093,16 @@ impl RangeBounds for RangeTo { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for RangeTo { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeTo { fn into_bounds(self) -> (Bound, Bound) { (Unbounded, Excluded(self.end)) } } #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for Range { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for Range { fn start_bound(&self) -> Bound<&T> { Included(&self.start) } @@ -1078,14 +1112,16 @@ impl RangeBounds for Range { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for Range { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for Range { fn into_bounds(self) -> (Bound, Bound) { (Included(self.start), Excluded(self.end)) } } #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeInclusive { fn start_bound(&self) -> Bound<&T> { Included(&self.start) } @@ -1101,7 +1137,8 @@ impl RangeBounds for RangeInclusive { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for RangeInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeInclusive { fn into_bounds(self) -> (Bound, Bound) { ( Included(self.start), @@ -1117,7 +1154,8 @@ impl IntoBounds for RangeInclusive { } #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeToInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeToInclusive { fn start_bound(&self) -> Bound<&T> { Unbounded } @@ -1127,14 +1165,16 @@ impl RangeBounds for RangeToInclusive { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for RangeToInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeToInclusive { fn into_bounds(self) -> (Bound, Bound) { (Unbounded, Included(self.end)) } } #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for (Bound, Bound) { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for (Bound, Bound) { fn start_bound(&self) -> Bound<&T> { match *self { (Included(ref start), _) => Included(start), @@ -1153,14 +1193,16 @@ impl RangeBounds for (Bound, Bound) { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for (Bound, Bound) { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for (Bound, Bound) { fn into_bounds(self) -> (Bound, Bound) { self } } #[stable(feature = "collections_range", since = "1.28.0")] -impl<'a, T: ?Sized + 'a> RangeBounds for (Bound<&'a T>, Bound<&'a T>) { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl<'a, T: ?Sized + 'a> const RangeBounds for (Bound<&'a T>, Bound<&'a T>) { fn start_bound(&self) -> Bound<&T> { self.0 } @@ -1177,7 +1219,8 @@ impl<'a, T: ?Sized + 'a> RangeBounds for (Bound<&'a T>, Bound<&'a T>) { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `start..` with `(Bound::Included(start), Bound::Unbounded)`. #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeFrom<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeFrom<&T> { fn start_bound(&self) -> Bound<&T> { Included(self.start) } @@ -1193,7 +1236,8 @@ impl RangeBounds for RangeFrom<&T> { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `..end` with `(Bound::Unbounded, Bound::Excluded(end))`. #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeTo<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeTo<&T> { fn start_bound(&self) -> Bound<&T> { Unbounded } @@ -1209,7 +1253,8 @@ impl RangeBounds for RangeTo<&T> { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `start..end` with `(Bound::Included(start), Bound::Excluded(end))`. #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for Range<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for Range<&T> { fn start_bound(&self) -> Bound<&T> { Included(self.start) } @@ -1225,7 +1270,8 @@ impl RangeBounds for Range<&T> { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `start..=end` with `(Bound::Included(start), Bound::Included(end))`. #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeInclusive<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeInclusive<&T> { fn start_bound(&self) -> Bound<&T> { Included(self.start) } @@ -1241,7 +1287,8 @@ impl RangeBounds for RangeInclusive<&T> { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `..=end` with `(Bound::Unbounded, Bound::Included(end))`. #[stable(feature = "collections_range", since = "1.28.0")] -impl RangeBounds for RangeToInclusive<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeToInclusive<&T> { fn start_bound(&self) -> Bound<&T> { Unbounded } @@ -1270,14 +1317,16 @@ pub enum OneSidedRangeBound { /// Types that implement `OneSidedRange` must return `Bound::Unbounded` /// from one of `RangeBounds::start_bound` or `RangeBounds::end_bound`. #[unstable(feature = "one_sided_range", issue = "69780")] -pub trait OneSidedRange: RangeBounds { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +pub const trait OneSidedRange: RangeBounds { /// An internal-only helper function for `split_off` and /// `split_off_mut` that returns the bound of the one-sided range. fn bound(self) -> (OneSidedRangeBound, T); } #[unstable(feature = "one_sided_range", issue = "69780")] -impl OneSidedRange for RangeTo +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const OneSidedRange for RangeTo where Self: RangeBounds, { @@ -1287,7 +1336,8 @@ where } #[unstable(feature = "one_sided_range", issue = "69780")] -impl OneSidedRange for RangeFrom +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const OneSidedRange for RangeFrom where Self: RangeBounds, { @@ -1297,7 +1347,8 @@ where } #[unstable(feature = "one_sided_range", issue = "69780")] -impl OneSidedRange for RangeToInclusive +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const OneSidedRange for RangeToInclusive where Self: RangeBounds, { diff --git a/core/src/ops/try_trait.rs b/core/src/ops/try_trait.rs index e1f2ebcf4c289..f68782c804cdb 100644 --- a/core/src/ops/try_trait.rs +++ b/core/src/ops/try_trait.rs @@ -359,11 +359,24 @@ where /// and in the other direction, /// ` as Residual>::TryType = Result`. #[unstable(feature = "try_trait_v2_residual", issue = "91285")] -#[rustc_const_unstable(feature = "const_try", issue = "74935")] -pub const trait Residual { +#[rustc_const_unstable(feature = "const_try_residual", issue = "91285")] +pub const trait Residual: Sized { /// The "return" type of this meta-function. #[unstable(feature = "try_trait_v2_residual", issue = "91285")] - type TryType: Try; + type TryType: [const] Try; +} + +/// Used in `try {}` blocks so the type produced in the `?` desugaring +/// depends on the residual type `R` and the output type of the block `O`, +/// but importantly not on the contextual type the way it would be if +/// we called `<_ as FromResidual>::from_residual(r)` directly. +#[unstable(feature = "try_trait_v2_residual", issue = "91285")] +// needs to be `pub` to avoid `private type` errors +#[expect(unreachable_pub)] +#[inline] // FIXME: force would be nice, but fails -- see #148915 +#[lang = "into_try_type"] +pub fn residual_into_try_type, O>(r: R) -> >::TryType { + FromResidual::from_residual(r) } #[unstable(feature = "pub_crate_should_not_need_unstable_attr", issue = "none")] diff --git a/core/src/option.rs b/core/src/option.rs index 430ee3470ac3f..57098b95f641b 100644 --- a/core/src/option.rs +++ b/core/src/option.rs @@ -118,9 +118,14 @@ //! //! # Representation //! -//! Rust guarantees to optimize the following types `T` such that -//! [`Option`] has the same size, alignment, and [function call ABI] as `T`. In some -//! of these cases, Rust further guarantees the following: +//! Rust guarantees to optimize the following types `T` such that [`Option`] +//! has the same size, alignment, and [function call ABI] as `T`. It is +//! therefore sound, when `T` is one of these types, to transmute a value `t` of +//! type `T` to type `Option` (producing the value `Some(t)`) and to +//! transmute a value `Some(t)` of type `Option` to type `T` (producing the +//! value `t`). +//! +//! In some of these cases, Rust further guarantees the following: //! - `transmute::<_, Option>([0u8; size_of::()])` is sound and produces //! `Option::::None` //! - `transmute::<_, [u8; size_of::()]>(Option::::None)` is sound and produces @@ -576,6 +581,7 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::clone::TrivialClone; use crate::iter::{self, FusedIterator, TrustedLen}; use crate::marker::Destruct; use crate::ops::{self, ControlFlow, Deref, DerefMut}; @@ -2210,6 +2216,11 @@ where #[unstable(feature = "ergonomic_clones", issue = "132290")] impl crate::clone::UseCloned for Option where T: crate::clone::UseCloned {} +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +#[rustc_const_unstable(feature = "const_clone", issue = "142757")] +unsafe impl const TrivialClone for Option where T: [const] TrivialClone + [const] Destruct {} + #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_default", issue = "143894")] impl const Default for Option { diff --git a/core/src/panic/location.rs b/core/src/panic/location.rs index 5935849344475..8176af03d13a5 100644 --- a/core/src/panic/location.rs +++ b/core/src/panic/location.rs @@ -194,8 +194,8 @@ impl<'a> Location<'a> { /// `std::source_location::file_name`, both of which return a nul-terminated `const char*`. #[must_use] #[inline] - #[stable(feature = "file_with_nul", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "file_with_nul", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "file_with_nul", since = "1.92.0")] + #[rustc_const_stable(feature = "file_with_nul", since = "1.92.0")] pub const fn file_as_c_str(&self) -> &'a CStr { let filename = self.filename.as_ptr(); diff --git a/core/src/panic/unwind_safe.rs b/core/src/panic/unwind_safe.rs index 722af55103839..21dbd09f49606 100644 --- a/core/src/panic/unwind_safe.rs +++ b/core/src/panic/unwind_safe.rs @@ -101,9 +101,9 @@ pub auto trait UnwindSafe {} #[rustc_diagnostic_item = "ref_unwind_safe_trait"] #[diagnostic::on_unimplemented( message = "the type `{Self}` may contain interior mutability and a reference may not be safely \ - transferrable across a catch_unwind boundary", + transferable across a catch_unwind boundary", label = "`{Self}` may contain interior mutability and a reference may not be safely \ - transferrable across a catch_unwind boundary" + transferable across a catch_unwind boundary" )] pub auto trait RefUnwindSafe {} diff --git a/core/src/panicking.rs b/core/src/panicking.rs index b5150837e6a94..3609dd1fe2e02 100644 --- a/core/src/panicking.rs +++ b/core/src/panicking.rs @@ -36,7 +36,8 @@ use crate::panic::{Location, PanicInfo}; compile_error!( "panic_immediate_abort is now a real panic strategy! \ Enable it with `panic = \"immediate-abort\"` in Cargo.toml, \ - or with the compiler flags `-Zunstable-options -Cpanic=immediate-abort`" + or with the compiler flags `-Zunstable-options -Cpanic=immediate-abort`. \ + In both cases, you still need to build core, e.g. with `-Zbuild-std`" ); // First we define the two main entry points that all panics go through. @@ -135,18 +136,18 @@ pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: boo #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable #[lang = "panic"] // used by lints and miri for panics pub const fn panic(expr: &'static str) -> ! { - // Use Arguments::new_const instead of format_args!("{expr}") to potentially + // Use Arguments::from_str instead of format_args!("{expr}") to potentially // reduce size overhead. The format_args! macro uses str's Display trait to // write expr, which calls Formatter::pad, which must accommodate string // truncation and padding (even though none is used here). Using - // Arguments::new_const may allow the compiler to omit Formatter::pad from the + // Arguments::from_str may allow the compiler to omit Formatter::pad from the // output binary, saving up to a few kilobytes. - // However, this optimization only works for `'static` strings: `new_const` also makes this + // However, this optimization only works for `'static` strings: `from_str` also makes this // message return `Some` from `Arguments::as_str`, which means it can become part of the panic // payload without any allocation or copying. Shorter-lived strings would become invalid as // stack frames get popped during unwinding, and couldn't be directly referenced from the // payload. - panic_fmt(fmt::Arguments::new_const(&[expr])); + panic_fmt(fmt::Arguments::from_str(expr)); } // We generate functions for usage by compiler-generated assertions. @@ -170,13 +171,8 @@ macro_rules! panic_const { #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable #[lang = stringify!($lang)] pub const fn $lang() -> ! { - // Use Arguments::new_const instead of format_args!("{expr}") to potentially - // reduce size overhead. The format_args! macro uses str's Display trait to - // write expr, which calls Formatter::pad, which must accommodate string - // truncation and padding (even though none is used here). Using - // Arguments::new_const may allow the compiler to omit Formatter::pad from the - // output binary, saving up to a few kilobytes. - panic_fmt(fmt::Arguments::new_const(&[$message])); + // See the comment in `panic(&'static str)` for why we use `Arguments::from_str` here. + panic_fmt(fmt::Arguments::from_str($message)); } )+ } @@ -226,7 +222,7 @@ pub mod panic_const { #[rustc_nounwind] #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable pub const fn panic_nounwind(expr: &'static str) -> ! { - panic_nounwind_fmt(fmt::Arguments::new_const(&[expr]), /* force_no_backtrace */ false); + panic_nounwind_fmt(fmt::Arguments::from_str(expr), /* force_no_backtrace */ false); } /// Like `panic_nounwind`, but also inhibits showing a backtrace. @@ -234,7 +230,7 @@ pub const fn panic_nounwind(expr: &'static str) -> ! { #[cfg_attr(panic = "immediate-abort", inline)] #[rustc_nounwind] pub fn panic_nounwind_nobacktrace(expr: &'static str) -> ! { - panic_nounwind_fmt(fmt::Arguments::new_const(&[expr]), /* force_no_backtrace */ true); + panic_nounwind_fmt(fmt::Arguments::from_str(expr), /* force_no_backtrace */ true); } #[inline] diff --git a/core/src/pat.rs b/core/src/pat.rs index a13eea3fb585c..2670c2614198c 100644 --- a/core/src/pat.rs +++ b/core/src/pat.rs @@ -1,5 +1,8 @@ //! Helper module for exporting the `pattern_type` macro +use crate::marker::{Freeze, PointeeSized, Unsize}; +use crate::ops::{CoerceUnsized, DispatchFromDyn}; + /// Creates a pattern type. /// ```ignore (cannot test this from within core yet) /// type Positive = std::pat::pattern_type!(i32 is 1..); @@ -73,3 +76,16 @@ impl const RangePattern for char { } } } + +impl CoerceUnsized for pattern_type!(*const T is !null) where + T: Unsize +{ +} + +impl, U> DispatchFromDyn for pattern_type!(T is !null) {} + +impl Unpin for pattern_type!(*const T is !null) {} + +unsafe impl Freeze for pattern_type!(*const T is !null) {} + +unsafe impl Freeze for pattern_type!(*mut T is !null) {} diff --git a/core/src/primitive_docs.rs b/core/src/primitive_docs.rs index 1c824e336bed7..15ba72bccaa9b 100644 --- a/core/src/primitive_docs.rs +++ b/core/src/primitive_docs.rs @@ -1531,9 +1531,8 @@ mod prim_usize {} /// `&mut T` references can be freely coerced into `&T` references with the same referent type, and /// references with longer lifetimes can be freely coerced into references with shorter ones. /// -/// Reference equality by address, instead of comparing the values pointed to, is accomplished via -/// implicit reference-pointer coercion and raw pointer equality via [`ptr::eq`], while -/// [`PartialEq`] compares values. +/// [`PartialEq`] will compare referenced values. It is possible to compare the reference address +/// using reference-pointer coercion and raw pointer equality via [`ptr::eq`]. /// /// ``` /// use std::ptr; @@ -1648,7 +1647,7 @@ mod prim_usize {} /// For the other direction, things are more complicated: when unsafe code passes arguments /// to safe functions or returns values from safe functions, they generally must *at least* /// not violate these invariants. The full requirements are stronger, as the reference generally -/// must point to data that is safe to use at type `T`. +/// must point to data that is safe to use as type `T`. /// /// It is not decided yet whether unsafe code may violate these invariants temporarily on internal /// data. As a consequence, unsafe code which violates these invariants temporarily on internal data diff --git a/core/src/profiling.rs b/core/src/profiling.rs new file mode 100644 index 0000000000000..db4a62480a3a1 --- /dev/null +++ b/core/src/profiling.rs @@ -0,0 +1,33 @@ +//! Profiling markers for compiler instrumentation. + +/// Profiling marker for move operations. +/// +/// This function is never called at runtime. When `-Z annotate-moves` is enabled, +/// the compiler creates synthetic debug info that makes move operations appear as +/// calls to this function in profilers. +/// +/// The `SIZE` parameter encodes the size of the type being copied. It's the same as +/// `size_of::()`, and is only present for convenience. +#[unstable(feature = "profiling_marker_api", issue = "148197")] +#[lang = "compiler_move"] +pub fn compiler_move(_src: *const T, _dst: *mut T) { + unreachable!( + "compiler_move marks where the compiler-generated a memcpy for moves. It is never actually called." + ) +} + +/// Profiling marker for copy operations. +/// +/// This function is never called at runtime. When `-Z annotate-moves` is enabled, +/// the compiler creates synthetic debug info that makes copy operations appear as +/// calls to this function in profilers. +/// +/// The `SIZE` parameter encodes the size of the type being copied. It's the same as +/// `size_of::()`, and is only present for convenience. +#[unstable(feature = "profiling_marker_api", issue = "148197")] +#[lang = "compiler_copy"] +pub fn compiler_copy(_src: *const T, _dst: *mut T) { + unreachable!( + "compiler_copy marks where the compiler-generated a memcpy for Copies. It is never actually called." + ) +} diff --git a/core/src/ptr/const_ptr.rs b/core/src/ptr/const_ptr.rs index 451092709443b..84a6982d56805 100644 --- a/core/src/ptr/const_ptr.rs +++ b/core/src/ptr/const_ptr.rs @@ -1462,7 +1462,8 @@ impl *const [T] { /// Gets a raw pointer to the underlying array. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] #[inline] #[must_use] pub const fn as_array(self) -> Option<*const [T; N]> { diff --git a/core/src/ptr/metadata.rs b/core/src/ptr/metadata.rs index dc3ec3fd19945..998a5b031c280 100644 --- a/core/src/ptr/metadata.rs +++ b/core/src/ptr/metadata.rs @@ -1,5 +1,6 @@ #![unstable(feature = "ptr_metadata", issue = "81513")] +use crate::clone::TrivialClone; use crate::fmt; use crate::hash::{Hash, Hasher}; use crate::intrinsics::{aggregate_raw_ptr, ptr_metadata}; @@ -231,6 +232,9 @@ impl Clone for DynMetadata { } } +#[doc(hidden)] +unsafe impl TrivialClone for DynMetadata {} + impl Eq for DynMetadata {} impl PartialEq for DynMetadata { diff --git a/core/src/ptr/mod.rs b/core/src/ptr/mod.rs index b29d267654252..ea0514f405f1e 100644 --- a/core/src/ptr/mod.rs +++ b/core/src/ptr/mod.rs @@ -403,7 +403,7 @@ use crate::cmp::Ordering; use crate::intrinsics::const_eval_select; -use crate::marker::{FnPtr, PointeeSized}; +use crate::marker::{Destruct, FnPtr, PointeeSized}; use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::num::NonZero; use crate::{fmt, hash, intrinsics, ub_checks}; @@ -801,7 +801,11 @@ pub const unsafe fn write_bytes(dst: *mut T, val: u8, count: usize) { #[lang = "drop_in_place"] #[allow(unconditional_recursion)] #[rustc_diagnostic_item = "ptr_drop_in_place"] -pub unsafe fn drop_in_place(to_drop: *mut T) { +#[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")] +pub const unsafe fn drop_in_place(to_drop: *mut T) +where + T: [const] Destruct, +{ // Code here does not matter - this is replaced by the // real drop glue by the compiler. @@ -1348,40 +1352,6 @@ pub const unsafe fn swap(x: *mut T, y: *mut T) { /// assert_eq!(x, [7, 8, 3, 4]); /// assert_eq!(y, [1, 2, 9]); /// ``` -/// -/// # Const evaluation limitations -/// -/// If this function is invoked during const-evaluation, the current implementation has a small (and -/// rarely relevant) limitation: if `count` is at least 2 and the data pointed to by `x` or `y` -/// contains a pointer that crosses the boundary of two `T`-sized chunks of memory, the function may -/// fail to evaluate (similar to a panic during const-evaluation). This behavior may change in the -/// future. -/// -/// The limitation is illustrated by the following example: -/// -/// ``` -/// use std::mem::size_of; -/// use std::ptr; -/// -/// const { unsafe { -/// const PTR_SIZE: usize = size_of::<*const i32>(); -/// let mut data1 = [0u8; PTR_SIZE]; -/// let mut data2 = [0u8; PTR_SIZE]; -/// // Store a pointer in `data1`. -/// data1.as_mut_ptr().cast::<*const i32>().write_unaligned(&42); -/// // Swap the contents of `data1` and `data2` by swapping `PTR_SIZE` many `u8`-sized chunks. -/// // This call will fail, because the pointer in `data1` crosses the boundary -/// // between several of the 1-byte chunks that are being swapped here. -/// //ptr::swap_nonoverlapping(data1.as_mut_ptr(), data2.as_mut_ptr(), PTR_SIZE); -/// // Swap the contents of `data1` and `data2` by swapping a single chunk of size -/// // `[u8; PTR_SIZE]`. That works, as there is no pointer crossing the boundary between -/// // two chunks. -/// ptr::swap_nonoverlapping(&mut data1, &mut data2, 1); -/// // Read the pointer from `data2` and dereference it. -/// let ptr = data2.as_ptr().cast::<*const i32>().read_unaligned(); -/// assert!(*ptr == 42); -/// } } -/// ``` #[inline] #[stable(feature = "swap_nonoverlapping", since = "1.27.0")] #[rustc_const_stable(feature = "const_swap_nonoverlapping", since = "1.88.0")] @@ -1410,9 +1380,7 @@ pub const unsafe fn swap_nonoverlapping(x: *mut T, y: *mut T, count: usize) { const_eval_select!( @capture[T] { x: *mut T, y: *mut T, count: usize }: if const { - // At compile-time we want to always copy this in chunks of `T`, to ensure that if there - // are pointers inside `T` we will copy them in one go rather than trying to copy a part - // of a pointer (which would not work). + // At compile-time we don't need all the special code below. // SAFETY: Same preconditions as this function unsafe { swap_nonoverlapping_const(x, y, count) } } else { diff --git a/core/src/ptr/mut_ptr.rs b/core/src/ptr/mut_ptr.rs index ba78afc7ea114..85d54b4d3b9b3 100644 --- a/core/src/ptr/mut_ptr.rs +++ b/core/src/ptr/mut_ptr.rs @@ -1,7 +1,7 @@ use super::*; use crate::cmp::Ordering::{Equal, Greater, Less}; use crate::intrinsics::const_eval_select; -use crate::marker::PointeeSized; +use crate::marker::{Destruct, PointeeSized}; use crate::mem::{self, SizedTypeProperties}; use crate::slice::{self, SliceIndex}; @@ -1390,8 +1390,12 @@ impl *mut T { /// /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place() #[stable(feature = "pointer_methods", since = "1.26.0")] + #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")] #[inline(always)] - pub unsafe fn drop_in_place(self) { + pub const unsafe fn drop_in_place(self) + where + T: [const] Destruct, + { // SAFETY: the caller must uphold the safety contract for `drop_in_place`. unsafe { drop_in_place(self) } } @@ -1708,7 +1712,8 @@ impl *mut [T] { /// Gets a raw, mutable pointer to the underlying array. /// /// If `N` is not exactly equal to the length of `self`, then this method returns `None`. - #[unstable(feature = "slice_as_array", issue = "133508")] + #[stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "core_slice_as_array", since = "CURRENT_RUSTC_VERSION")] #[inline] #[must_use] pub const fn as_mut_array(self) -> Option<*mut [T; N]> { diff --git a/core/src/ptr/non_null.rs b/core/src/ptr/non_null.rs index 10f83120428b9..aa3af2f185287 100644 --- a/core/src/ptr/non_null.rs +++ b/core/src/ptr/non_null.rs @@ -1,5 +1,6 @@ +use crate::clone::TrivialClone; use crate::cmp::Ordering; -use crate::marker::{PointeeSized, Unsize}; +use crate::marker::{Destruct, PointeeSized, Unsize}; use crate::mem::{MaybeUninit, SizedTypeProperties}; use crate::num::NonZero; use crate::ops::{CoerceUnsized, DispatchFromDyn}; @@ -1118,7 +1119,11 @@ impl NonNull { /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place() #[inline(always)] #[stable(feature = "non_null_convenience", since = "1.80.0")] - pub unsafe fn drop_in_place(self) { + #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")] + pub const unsafe fn drop_in_place(self) + where + T: [const] Destruct, + { // SAFETY: the caller must uphold the safety contract for `drop_in_place`. unsafe { ptr::drop_in_place(self.as_ptr()) } } @@ -1649,6 +1654,10 @@ impl Clone for NonNull { #[stable(feature = "nonnull", since = "1.25.0")] impl Copy for NonNull {} +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for NonNull {} + #[unstable(feature = "coerce_unsized", issue = "18598")] impl CoerceUnsized> for NonNull where T: Unsize {} diff --git a/core/src/ptr/unique.rs b/core/src/ptr/unique.rs index cdc8b6cc936df..5e7b1f7038024 100644 --- a/core/src/ptr/unique.rs +++ b/core/src/ptr/unique.rs @@ -1,3 +1,4 @@ +use crate::clone::TrivialClone; use crate::fmt; use crate::marker::{PhantomData, PointeeSized, Unsize}; use crate::ops::{CoerceUnsized, DispatchFromDyn}; @@ -165,6 +166,10 @@ impl Clone for Unique { #[unstable(feature = "ptr_internals", issue = "none")] impl Copy for Unique {} +#[doc(hidden)] +#[unstable(feature = "trivial_clone", issue = "none")] +unsafe impl TrivialClone for Unique {} + #[unstable(feature = "ptr_internals", issue = "none")] impl CoerceUnsized> for Unique where T: Unsize {} diff --git a/core/src/range.rs b/core/src/range.rs index a096a8ceafc87..2df520d35b398 100644 --- a/core/src/range.rs +++ b/core/src/range.rs @@ -118,10 +118,11 @@ impl> Range { /// ``` #[inline] #[unstable(feature = "new_range_api", issue = "125687")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -151,13 +152,18 @@ impl> Range { /// ``` #[inline] #[unstable(feature = "new_range_api", issue = "125687")] - pub fn is_empty(&self) -> bool { + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn is_empty(&self) -> bool + where + Idx: [const] PartialOrd, + { !(self.start < self.end) } } #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for Range { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for Range { fn start_bound(&self) -> Bound<&T> { Included(&self.start) } @@ -173,7 +179,8 @@ impl RangeBounds for Range { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `start..end` with `(Bound::Included(start), Bound::Excluded(end))`. #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for Range<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for Range<&T> { fn start_bound(&self) -> Bound<&T> { Included(self.start) } @@ -184,7 +191,8 @@ impl RangeBounds for Range<&T> { // #[unstable(feature = "range_into_bounds", issue = "136903")] #[unstable(feature = "new_range_api", issue = "125687")] -impl IntoBounds for Range { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for Range { fn into_bounds(self) -> (Bound, Bound) { (Included(self.start), Excluded(self.end)) } @@ -271,10 +279,11 @@ impl> RangeInclusive { /// ``` #[inline] #[unstable(feature = "new_range_api", issue = "125687")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } @@ -304,7 +313,11 @@ impl> RangeInclusive { /// ``` #[unstable(feature = "new_range_api", issue = "125687")] #[inline] - pub fn is_empty(&self) -> bool { + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn is_empty(&self) -> bool + where + Idx: [const] PartialOrd, + { !(self.start <= self.last) } } @@ -342,7 +355,8 @@ impl RangeInclusive { } #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for RangeInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeInclusive { fn start_bound(&self) -> Bound<&T> { Included(&self.start) } @@ -358,7 +372,8 @@ impl RangeBounds for RangeInclusive { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `start..=end` with `(Bound::Included(start), Bound::Included(end))`. #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for RangeInclusive<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeInclusive<&T> { fn start_bound(&self) -> Bound<&T> { Included(self.start) } @@ -369,7 +384,8 @@ impl RangeBounds for RangeInclusive<&T> { // #[unstable(feature = "range_into_bounds", issue = "136903")] #[unstable(feature = "new_range_api", issue = "125687")] -impl IntoBounds for RangeInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeInclusive { fn into_bounds(self) -> (Bound, Bound) { (Included(self.start), Included(self.last)) } @@ -485,17 +501,19 @@ impl> RangeFrom { /// ``` #[inline] #[unstable(feature = "new_range_api", issue = "125687")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } } #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for RangeFrom { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeFrom { fn start_bound(&self) -> Bound<&T> { Included(&self.start) } @@ -511,7 +529,8 @@ impl RangeBounds for RangeFrom { /// consider using the `RangeBounds` impl for a 2-tuple of [`Bound<&T>`][Bound], /// i.e. replace `start..` with `(Bound::Included(start), Bound::Unbounded)`. #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for RangeFrom<&T> { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeFrom<&T> { fn start_bound(&self) -> Bound<&T> { Included(self.start) } @@ -522,7 +541,8 @@ impl RangeBounds for RangeFrom<&T> { // #[unstable(feature = "range_into_bounds", issue = "136903")] #[unstable(feature = "new_range_api", issue = "125687")] -impl IntoBounds for RangeFrom { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeFrom { fn into_bounds(self) -> (Bound, Bound) { (Included(self.start), Unbounded) } @@ -620,20 +640,34 @@ impl> RangeToInclusive { /// ``` #[inline] #[unstable(feature = "new_range_api", issue = "125687")] - pub fn contains(&self, item: &U) -> bool + #[rustc_const_unstable(feature = "const_range", issue = "none")] + pub const fn contains(&self, item: &U) -> bool where - Idx: PartialOrd, - U: ?Sized + PartialOrd, + Idx: [const] PartialOrd, + U: ?Sized + [const] PartialOrd, { >::contains(self, item) } } +impl From> for RangeToInclusive { + fn from(value: legacy::RangeToInclusive) -> Self { + Self { last: value.end } + } +} + +impl From> for legacy::RangeToInclusive { + fn from(value: RangeToInclusive) -> Self { + Self { end: value.last } + } +} + // RangeToInclusive cannot impl From> // because underflow would be possible with (..0).into() #[unstable(feature = "new_range_api", issue = "125687")] -impl RangeBounds for RangeToInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const RangeBounds for RangeToInclusive { fn start_bound(&self) -> Bound<&T> { Unbounded } @@ -643,7 +677,8 @@ impl RangeBounds for RangeToInclusive { } #[unstable(feature = "range_into_bounds", issue = "136903")] -impl IntoBounds for RangeToInclusive { +#[rustc_const_unstable(feature = "const_range", issue = "none")] +impl const IntoBounds for RangeToInclusive { fn into_bounds(self) -> (Bound, Bound) { (Unbounded, Included(self.last)) } diff --git a/core/src/range/iter.rs b/core/src/range/iter.rs index 24efd4a204a5f..9a8824baefe4e 100644 --- a/core/src/range/iter.rs +++ b/core/src/range/iter.rs @@ -3,6 +3,7 @@ use crate::iter::{ }; use crate::num::NonZero; use crate::range::{Range, RangeFrom, RangeInclusive, legacy}; +use crate::{intrinsics, mem}; /// By-value [`Range`] iterator. #[unstable(feature = "new_range_api", issue = "125687")] @@ -168,7 +169,7 @@ impl IterRangeInclusive { } } -#[unstable(feature = "trusted_random_access", issue = "none")] +#[unstable(feature = "new_range_api", issue = "125687")] impl Iterator for IterRangeInclusive { type Item = A; @@ -293,32 +294,74 @@ range_incl_exact_iter_impl! { /// By-value [`RangeFrom`] iterator. #[unstable(feature = "new_range_api", issue = "125687")] #[derive(Debug, Clone)] -pub struct IterRangeFrom(legacy::RangeFrom); +pub struct IterRangeFrom { + start: A, + /// Whether the first element of the iterator has yielded. + /// Only used when overflow checks are enabled. + first: bool, +} -impl IterRangeFrom { +impl IterRangeFrom { /// Returns the remainder of the range being iterated over. + #[inline] + #[rustc_inherit_overflow_checks] pub fn remainder(self) -> RangeFrom { - RangeFrom { start: self.0.start } + if intrinsics::overflow_checks() { + if !self.first { + return RangeFrom { start: Step::forward(self.start, 1) }; + } + } + + RangeFrom { start: self.start } } } -#[unstable(feature = "trusted_random_access", issue = "none")] +#[unstable(feature = "new_range_api", issue = "125687")] impl Iterator for IterRangeFrom { type Item = A; #[inline] + #[rustc_inherit_overflow_checks] fn next(&mut self) -> Option { - self.0.next() + if intrinsics::overflow_checks() { + if self.first { + self.first = false; + return Some(self.start.clone()); + } + + self.start = Step::forward(self.start.clone(), 1); + return Some(self.start.clone()); + } + + let n = Step::forward(self.start.clone(), 1); + Some(mem::replace(&mut self.start, n)) } #[inline] fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() + (usize::MAX, None) } #[inline] + #[rustc_inherit_overflow_checks] fn nth(&mut self, n: usize) -> Option { - self.0.nth(n) + if intrinsics::overflow_checks() { + if self.first { + self.first = false; + + let plus_n = Step::forward(self.start.clone(), n); + self.start = plus_n.clone(); + return Some(plus_n); + } + + let plus_n = Step::forward(self.start.clone(), n); + self.start = Step::forward(plus_n.clone(), 1); + return Some(self.start.clone()); + } + + let plus_n = Step::forward(self.start.clone(), n); + self.start = Step::forward(plus_n.clone(), 1); + Some(plus_n) } } @@ -334,6 +377,6 @@ impl IntoIterator for RangeFrom { type IntoIter = IterRangeFrom; fn into_iter(self) -> Self::IntoIter { - IterRangeFrom(self.into()) + IterRangeFrom { start: self.start, first: true } } } diff --git a/core/src/result.rs b/core/src/result.rs index c69762a728598..9afa71ec0f117 100644 --- a/core/src/result.rs +++ b/core/src/result.rs @@ -230,24 +230,31 @@ //! //! # Representation //! -//! In some cases, [`Result`] will gain the same size, alignment, and ABI -//! guarantees as [`Option`] has. One of either the `T` or `E` type must be a -//! type that qualifies for the `Option` [representation guarantees][opt-rep], -//! and the *other* type must meet all of the following conditions: -//! * Is a zero-sized type with alignment 1 (a "1-ZST"). -//! * Has no fields. -//! * Does not have the `#[non_exhaustive]` attribute. +//! In some cases, [`Result`] comes with size, alignment, and ABI +//! guarantees. Specifically, one of either the `T` or `E` type must be a type +//! that qualifies for the `Option` [representation guarantees][opt-rep] (let's +//! call that type `I`), and the *other* type is a zero-sized type with +//! alignment 1 (a "1-ZST"). +//! +//! If that is the case, then `Result` has the same size, alignment, and +//! [function call ABI] as `I` (and therefore, as `Option`). If `I` is `T`, +//! it is therefore sound to transmute a value `t` of type `I` to type +//! `Result` (producing the value `Ok(t)`) and to transmute a value +//! `Ok(t)` of type `Result` to type `I` (producing the value `t`). If `I` +//! is `E`, the same applies with `Ok` replaced by `Err`. //! //! For example, `NonZeroI32` qualifies for the `Option` representation -//! guarantees, and `()` is a zero-sized type with alignment 1, no fields, and -//! it isn't `non_exhaustive`. This means that both `Result` and -//! `Result<(), NonZeroI32>` have the same size, alignment, and ABI guarantees -//! as `Option`. The only difference is the implied semantics: +//! guarantees and `()` is a zero-sized type with alignment 1. This means that +//! both `Result` and `Result<(), NonZeroI32>` have the same +//! size, alignment, and ABI as `NonZeroI32` (and `Option`). The +//! only difference between these is in the implied semantics: +//! //! * `Option` is "a non-zero i32 might be present" //! * `Result` is "a non-zero i32 success result, if any" //! * `Result<(), NonZeroI32>` is "a non-zero i32 error result, if any" //! //! [opt-rep]: ../option/index.html#representation "Option Representation" +//! [function call ABI]: ../primitive.fn.html#abi-compatibility //! //! # Method overview //! @@ -1639,11 +1646,16 @@ impl Result { #[inline] #[track_caller] #[stable(feature = "option_result_unwrap_unchecked", since = "1.58.0")] - pub unsafe fn unwrap_unchecked(self) -> T { + #[rustc_const_unstable(feature = "const_result_unwrap_unchecked", issue = "148714")] + pub const unsafe fn unwrap_unchecked(self) -> T { match self { Ok(t) => t, - // SAFETY: the safety contract must be upheld by the caller. - Err(_) => unsafe { hint::unreachable_unchecked() }, + Err(e) => { + // FIXME(const-hack): to avoid E: const Destruct bound + super::mem::forget(e); + // SAFETY: the safety contract must be upheld by the caller. + unsafe { hint::unreachable_unchecked() } + } } } diff --git a/core/src/slice/ascii.rs b/core/src/slice/ascii.rs index e17a2e03d2dc4..3e8c553f9f159 100644 --- a/core/src/slice/ascii.rs +++ b/core/src/slice/ascii.rs @@ -3,12 +3,17 @@ use core::ascii::EscapeDefault; use crate::fmt::{self, Write}; -#[cfg(not(all(target_arch = "x86_64", target_feature = "sse2")))] +#[cfg(not(any( + all(target_arch = "x86_64", target_feature = "sse2"), + all(target_arch = "loongarch64", target_feature = "lsx") +)))] use crate::intrinsics::const_eval_select; use crate::{ascii, iter, ops}; impl [u8] { /// Checks if all bytes in this slice are within the ASCII range. + /// + /// An empty slice returns `true`. #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")] #[rustc_const_stable(feature = "const_slice_is_ascii", since = "1.74.0")] #[must_use] @@ -357,7 +362,10 @@ pub const fn is_ascii_simple(mut bytes: &[u8]) -> bool { /// /// If any of these loads produces something for which `contains_nonascii` /// (above) returns true, then we know the answer is false. -#[cfg(not(all(target_arch = "x86_64", target_feature = "sse2")))] +#[cfg(not(any( + all(target_arch = "x86_64", target_feature = "sse2"), + all(target_arch = "loongarch64", target_feature = "lsx") +)))] #[inline] #[rustc_allow_const_fn_unstable(const_eval_select)] // fallback impl has same behavior const fn is_ascii(s: &[u8]) -> bool { @@ -455,12 +463,15 @@ const fn is_ascii(s: &[u8]) -> bool { ) } -/// ASCII test optimized to use the `pmovmskb` instruction available on `x86-64` -/// platforms. +/// ASCII test optimized to use the `pmovmskb` instruction on `x86-64` and the +/// `vmskltz.b` instruction on `loongarch64`. /// /// Other platforms are not likely to benefit from this code structure, so they /// use SWAR techniques to test for ASCII in `usize`-sized chunks. -#[cfg(all(target_arch = "x86_64", target_feature = "sse2"))] +#[cfg(any( + all(target_arch = "x86_64", target_feature = "sse2"), + all(target_arch = "loongarch64", target_feature = "lsx") +))] #[inline] const fn is_ascii(bytes: &[u8]) -> bool { // Process chunks of 32 bytes at a time in the fast path to enable diff --git a/core/src/slice/cmp.rs b/core/src/slice/cmp.rs index 103630aba0f79..fd1ca23fb79c5 100644 --- a/core/src/slice/cmp.rs +++ b/core/src/slice/cmp.rs @@ -155,18 +155,16 @@ where } #[doc(hidden)] -#[const_trait] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] // intermediate trait for specialization of slice's PartialOrd -trait SlicePartialOrd: Sized { +const trait SlicePartialOrd: Sized { fn partial_compare(left: &[Self], right: &[Self]) -> Option; } #[doc(hidden)] -#[const_trait] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] // intermediate trait for specialization of slice's PartialOrd chaining methods -trait SliceChain: Sized { +const trait SliceChain: Sized { fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow; fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow; fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow; @@ -244,9 +242,8 @@ impl const SlicePartialOrd for A { } #[rustc_specialization_trait] -#[const_trait] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] -trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {} +const trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {} macro_rules! always_applicable_ord { ($([$($p:tt)*] $t:ty,)*) => { @@ -265,10 +262,9 @@ always_applicable_ord! { } #[doc(hidden)] -#[const_trait] #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] // intermediate trait for specialization of slice's Ord -trait SliceOrd: Sized { +const trait SliceOrd: Sized { fn compare(left: &[Self], right: &[Self]) -> Ordering; } @@ -292,8 +288,7 @@ impl SliceOrd for A { /// * For every `x` and `y` of this type, `Ord(x, y)` must return the same /// value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`. #[rustc_specialization_trait] -#[const_trait] -unsafe trait UnsignedBytewiseOrd: [const] Ord {} +const unsafe trait UnsignedBytewiseOrd: [const] Ord {} #[rustc_const_unstable(feature = "const_cmp", issue = "143800")] unsafe impl const UnsignedBytewiseOrd for bool {} diff --git a/core/src/slice/index.rs b/core/src/slice/index.rs index de220e7e38a4b..d8ed521f44353 100644 --- a/core/src/slice/index.rs +++ b/core/src/slice/index.rs @@ -1,6 +1,7 @@ //! Indexing implementations for `[T]`. use crate::intrinsics::slice_get_unchecked; +use crate::marker::Destruct; use crate::panic::const_panic; use crate::ub_checks::assert_unsafe_precondition; use crate::{ops, range}; @@ -134,6 +135,11 @@ mod private_slice_index { impl Sealed for range::RangeFrom {} impl Sealed for ops::IndexRange {} + + #[unstable(feature = "sliceindex_wrappers", issue = "146179")] + impl Sealed for crate::index::Last {} + #[unstable(feature = "sliceindex_wrappers", issue = "146179")] + impl Sealed for crate::index::Clamp where T: Sealed {} } /// A helper trait used for indexing operations. @@ -153,9 +159,8 @@ mod private_slice_index { message = "the type `{T}` cannot be indexed by `{Self}`", label = "slice indices are of type `usize` or ranges of `usize`" )] -#[const_trait] // FIXME(const_trait_impl): Migrate to `const unsafe trait` once #146122 is fixed. #[rustc_const_unstable(feature = "const_index", issue = "143775")] -pub unsafe trait SliceIndex: private_slice_index::Sealed { +pub const unsafe trait SliceIndex: private_slice_index::Sealed { /// The output type returned by methods. #[stable(feature = "slice_get_slice", since = "1.28.0")] type Output: ?Sized; @@ -564,7 +569,10 @@ unsafe impl const SliceIndex<[T]> for ops::RangeFrom { slice_index_fail(self.start, slice.len(), slice.len()) } // SAFETY: `self` is checked to be valid and in bounds above. - unsafe { &*self.get_unchecked(slice) } + unsafe { + let new_len = crate::intrinsics::unchecked_sub(slice.len(), self.start); + &*get_offset_len_noubcheck(slice, self.start, new_len) + } } #[inline] @@ -573,7 +581,10 @@ unsafe impl const SliceIndex<[T]> for ops::RangeFrom { slice_index_fail(self.start, slice.len(), slice.len()) } // SAFETY: `self` is checked to be valid and in bounds above. - unsafe { &mut *self.get_unchecked_mut(slice) } + unsafe { + let new_len = crate::intrinsics::unchecked_sub(slice.len(), self.start); + &mut *get_offset_len_mut_noubcheck(slice, self.start, new_len) + } } } @@ -893,9 +904,10 @@ unsafe impl const SliceIndex<[T]> for range::RangeToInclusive { #[track_caller] #[unstable(feature = "slice_range", issue = "76393")] #[must_use] -pub fn range(range: R, bounds: ops::RangeTo) -> ops::Range +#[rustc_const_unstable(feature = "const_range", issue = "none")] +pub const fn range(range: R, bounds: ops::RangeTo) -> ops::Range where - R: ops::RangeBounds, + R: [const] ops::RangeBounds + [const] Destruct, { let len = bounds.end; @@ -978,7 +990,7 @@ where /// Converts a pair of `ops::Bound`s into `ops::Range` without performing any /// bounds checking or (in debug) overflow checking. -pub(crate) fn into_range_unchecked( +pub(crate) const fn into_range_unchecked( len: usize, (start, end): (ops::Bound, ops::Bound), ) -> ops::Range { @@ -998,7 +1010,8 @@ pub(crate) fn into_range_unchecked( /// Converts pair of `ops::Bound`s into `ops::Range`. /// Returns `None` on overflowing indices. -pub(crate) fn into_range( +#[rustc_const_unstable(feature = "const_range", issue = "none")] +pub(crate) const fn into_range( len: usize, (start, end): (ops::Bound, ops::Bound), ) -> Option> { diff --git a/core/src/slice/iter/macros.rs b/core/src/slice/iter/macros.rs index 7c1ed3fe8a246..c46b7c797aab6 100644 --- a/core/src/slice/iter/macros.rs +++ b/core/src/slice/iter/macros.rs @@ -350,7 +350,6 @@ macro_rules! iterator { // because this simple implementation generates less LLVM IR and is // faster to compile. Also, the `assume` avoids a bounds check. #[inline] - #[rustc_inherit_overflow_checks] fn position