Skip to content

Commit

Permalink
Auto merge of #128253 - tgross35:rollup-rpmoebz, r=tgross35
Browse files Browse the repository at this point in the history
Rollup of 9 pull requests

Successful merges:

 - #124941 (Stabilize const `{integer}::from_str_radix` i.e. `const_int_from_str`)
 - #128201 (Implement `Copy`/`Clone` for async closures)
 - #128210 (rustdoc: change title of search results)
 - #128223 (Refactor complex conditions in `collect_tokens_trailing_token`)
 - #128224 (Remove unnecessary range replacements)
 - #128226 (Remove redundant option that was just encoding that a slice was empty)
 - #128227 (CI: do not respect custom try jobs for unrolled perf builds)
 - #128229 (Improve `extern "<abi>" unsafe fn()` error message)
 - #128235 (Fix `Iterator::filter` docs)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Jul 27, 2024
2 parents 7c2012d + 8385f3b commit 8b6b857
Show file tree
Hide file tree
Showing 32 changed files with 265 additions and 133 deletions.
6 changes: 3 additions & 3 deletions compiler/rustc_ast_lowering/src/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
id: NodeId,
hir_id: hir::HirId,
ident: &mut Ident,
attrs: Option<&'hir [Attribute]>,
attrs: &'hir [Attribute],
vis_span: Span,
i: &ItemKind,
) -> hir::ItemKind<'hir> {
Expand Down Expand Up @@ -488,7 +488,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
id: NodeId,
vis_span: Span,
ident: &mut Ident,
attrs: Option<&'hir [Attribute]>,
attrs: &'hir [Attribute],
) -> hir::ItemKind<'hir> {
let path = &tree.prefix;
let segments = prefix.segments.iter().chain(path.segments.iter()).cloned().collect();
Expand Down Expand Up @@ -566,7 +566,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// `ItemLocalId` and the new owner. (See `lower_node_id`)
let kind =
this.lower_use_tree(use_tree, &prefix, id, vis_span, &mut ident, attrs);
if let Some(attrs) = attrs {
if !attrs.is_empty() {
this.attrs.insert(hir::ItemLocalId::ZERO, attrs);
}

Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -913,15 +913,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
ret
}

fn lower_attrs(&mut self, id: HirId, attrs: &[Attribute]) -> Option<&'hir [Attribute]> {
fn lower_attrs(&mut self, id: HirId, attrs: &[Attribute]) -> &'hir [Attribute] {
if attrs.is_empty() {
None
&[]
} else {
debug_assert_eq!(id.owner, self.current_hir_id_owner);
let ret = self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a)));
debug_assert!(!ret.is_empty());
self.attrs.insert(id.local_id, ret);
Some(ret)
ret
}
}

Expand Down
5 changes: 5 additions & 0 deletions compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1442,9 +1442,14 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> {
// See `tests/ui/moves/needs-clone-through-deref.rs`
return false;
}
// We don't want to suggest `.clone()` in a move closure, since the value has already been captured.
if self.in_move_closure(expr) {
return false;
}
// We also don't want to suggest cloning a closure itself, since the value has already been captured.
if let hir::ExprKind::Closure(_) = expr.kind {
return false;
}
// Try to find predicates on *generic params* that would allow copying `ty`
let mut suggestion =
if let Some(symbol) = tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) {
Expand Down
3 changes: 3 additions & 0 deletions compiler/rustc_mir_transform/src/shim.rs
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,9 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -
match self_ty.kind() {
ty::FnDef(..) | ty::FnPtr(_) => builder.copy_shim(),
ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()),
ty::CoroutineClosure(_, args) => {
builder.tuple_like_shim(dest, src, args.as_coroutine_closure().upvar_tys())
}
ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()),
ty::Coroutine(coroutine_def_id, args) => {
assert_eq!(tcx.coroutine_movability(*coroutine_def_id), hir::Movability::Movable);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,10 @@ where
// impl Copy/Clone for Closure where Self::TupledUpvars: Copy/Clone
ty::Closure(_, args) => Ok(vec![ty::Binder::dummy(args.as_closure().tupled_upvars_ty())]),

ty::CoroutineClosure(..) => Err(NoSolution),
// impl Copy/Clone for CoroutineClosure where Self::TupledUpvars: Copy/Clone
ty::CoroutineClosure(_, args) => {
Ok(vec![ty::Binder::dummy(args.as_coroutine_closure().tupled_upvars_ty())])
}

// only when `coroutine_clone` is enabled and the coroutine is movable
// impl Copy/Clone for Coroutine where T: Copy/Clone forall T in (upvars, witnesses)
Expand Down
11 changes: 0 additions & 11 deletions compiler/rustc_parse/src/parser/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -457,14 +457,3 @@ impl<'a> Parser<'a> {
Err(self.dcx().create_err(err))
}
}

/// The attributes are complete if all attributes are either a doc comment or a
/// builtin attribute other than `cfg_attr`.
pub fn is_complete(attrs: &[ast::Attribute]) -> bool {
attrs.iter().all(|attr| {
attr.is_doc_comment()
|| attr.ident().is_some_and(|ident| {
ident.name != sym::cfg_attr && rustc_feature::is_builtin_attr_name(ident.name)
})
})
}
123 changes: 64 additions & 59 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,6 @@ impl AttrWrapper {
pub fn is_empty(&self) -> bool {
self.attrs.is_empty()
}

pub fn is_complete(&self) -> bool {
crate::parser::attr::is_complete(&self.attrs)
}
}

/// Returns `true` if `attrs` contains a `cfg` or `cfg_attr` attribute
Expand Down Expand Up @@ -114,17 +110,15 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
replace_ranges.sort_by_key(|(range, _)| range.start);

#[cfg(debug_assertions)]
{
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
assert!(
range.end <= next_range.start || range.end >= next_range.end,
"Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
range,
tokens,
next_range,
next_tokens,
);
}
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
assert!(
range.end <= next_range.start || range.end >= next_range.end,
"Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
range,
tokens,
next_range,
next_tokens,
);
}

// Process the replace ranges, starting from the highest start
Expand All @@ -137,9 +131,9 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
//
// By starting processing from the replace range with the greatest
// start position, we ensure that any replace range which encloses
// another replace range will capture the *replaced* tokens for the inner
// range, not the original tokens.
// start position, we ensure that any (outer) replace range which
// encloses another (inner) replace range will fully overwrite the
// inner range's replacement.
for (range, target) in replace_ranges.into_iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");

Expand Down Expand Up @@ -199,20 +193,20 @@ impl<'a> Parser<'a> {
force_collect: ForceCollect,
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, bool)>,
) -> PResult<'a, R> {
// Skip collection when nothing could observe the collected tokens, i.e.
// all of the following conditions hold.
// - We are not force collecting tokens (because force collection
// requires tokens by definition).
if matches!(force_collect, ForceCollect::No)
// - None of our outer attributes require tokens.
&& attrs.is_complete()
// - Our target doesn't support custom inner attributes (custom
// We must collect if anything could observe the collected tokens, i.e.
// if any of the following conditions hold.
// - We are force collecting tokens (because force collection requires
// tokens by definition).
let needs_collection = matches!(force_collect, ForceCollect::Yes)
// - Any of our outer attributes require tokens.
|| needs_tokens(&attrs.attrs)
// - Our target supports custom inner attributes (custom
// inner attribute invocation might require token capturing).
&& !R::SUPPORTS_CUSTOM_INNER_ATTRS
// - We are not in `capture_cfg` mode (which requires tokens if
|| R::SUPPORTS_CUSTOM_INNER_ATTRS
// - We are in `capture_cfg` mode (which requires tokens if
// the parsed node has `#[cfg]` or `#[cfg_attr]` attributes).
&& !self.capture_cfg
{
|| self.capture_cfg;
if !needs_collection {
return Ok(f(self, attrs.attrs)?.0);
}

Expand Down Expand Up @@ -250,28 +244,28 @@ impl<'a> Parser<'a> {
return Ok(ret);
}

// This is similar to the "skip collection" check at the start of this
// function, but now that we've parsed an AST node we have more
// This is similar to the `needs_collection` check at the start of this
// function, but now that we've parsed an AST node we have complete
// information available. (If we return early here that means the
// setup, such as cloning the token cursor, was unnecessary. That's
// hard to avoid.)
//
// Skip collection when nothing could observe the collected tokens, i.e.
// all of the following conditions hold.
// - We are not force collecting tokens.
if matches!(force_collect, ForceCollect::No)
// - None of our outer *or* inner attributes require tokens.
// (`attrs` was just outer attributes, but `ret.attrs()` is outer
// and inner attributes. That makes this check more precise than
// `attrs.is_complete()` at the start of the function, and we can
// skip the subsequent check on `R::SUPPORTS_CUSTOM_INNER_ATTRS`.
&& crate::parser::attr::is_complete(ret.attrs())
// - We are not in `capture_cfg` mode, or we are but there are no
// `#[cfg]` or `#[cfg_attr]` attributes. (During normal
// non-`capture_cfg` parsing, we don't need any special capturing
// for those attributes, because they're builtin.)
&& (!self.capture_cfg || !has_cfg_or_cfg_attr(ret.attrs()))
{
// We must collect if anything could observe the collected tokens, i.e.
// if any of the following conditions hold.
// - We are force collecting tokens.
let needs_collection = matches!(force_collect, ForceCollect::Yes)
// - Any of our outer *or* inner attributes require tokens.
// (`attr.attrs` was just outer attributes, but `ret.attrs()` is
// outer and inner attributes. So this check is more precise than
// the earlier `needs_tokens` check, and we don't need to
// check `R::SUPPORTS_CUSTOM_INNER_ATTRS`.)
|| needs_tokens(ret.attrs())
// - We are in `capture_cfg` mode and there are `#[cfg]` or
// `#[cfg_attr]` attributes. (During normal non-`capture_cfg`
// parsing, we don't need any special capturing for those
// attributes, because they're builtin.)
|| (self.capture_cfg && has_cfg_or_cfg_attr(ret.attrs()));
if !needs_collection {
return Ok(ret);
}

Expand All @@ -297,11 +291,13 @@ impl<'a> Parser<'a> {
// with `None`, which means the relevant tokens will be removed. (More
// details below.)
let mut inner_attr_replace_ranges = Vec::new();
for inner_attr in ret.attrs().iter().filter(|a| a.style == ast::AttrStyle::Inner) {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&inner_attr.id) {
inner_attr_replace_ranges.push((attr_range, None));
} else {
self.dcx().span_delayed_bug(inner_attr.span, "Missing token range for attribute");
for attr in ret.attrs() {
if attr.style == ast::AttrStyle::Inner {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&attr.id) {
inner_attr_replace_ranges.push((attr_range, None));
} else {
self.dcx().span_delayed_bug(attr.span, "Missing token range for attribute");
}
}
}

Expand Down Expand Up @@ -337,8 +333,7 @@ impl<'a> Parser<'a> {
// When parsing `m`:
// - `start_pos..end_pos` is `0..34` (`mod m`, excluding the `#[cfg_eval]` attribute).
// - `inner_attr_replace_ranges` is empty.
// - `replace_range_start..replace_ranges_end` has two entries.
// - One to delete the inner attribute (`17..27`), obtained when parsing `g` (see above).
// - `replace_range_start..replace_ranges_end` has one entry.
// - One `AttrsTarget` (added below when parsing `g`) to replace all of `g` (`3..33`,
// including its outer attribute), with:
// - `attrs`: includes the outer and the inner attr.
Expand Down Expand Up @@ -369,12 +364,10 @@ impl<'a> Parser<'a> {

// What is the status here when parsing the example code at the top of this method?
//
// When parsing `g`, we add two entries:
// When parsing `g`, we add one entry:
// - The `start_pos..end_pos` (`3..33`) entry has a new `AttrsTarget` with:
// - `attrs`: includes the outer and the inner attr.
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
// - `inner_attr_replace_ranges` contains the one entry to delete the inner attr's
// tokens (`17..27`).
//
// When parsing `m`, we do nothing here.

Expand All @@ -384,7 +377,6 @@ impl<'a> Parser<'a> {
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
} else if matches!(self.capture_state.capturing, Capturing::No) {
// Only clear the ranges once we've finished capturing entirely, i.e. we've finished
// the outermost call to this method.
Expand Down Expand Up @@ -461,6 +453,19 @@ fn make_attr_token_stream(
AttrTokenStream::new(stack_top.inner)
}

/// Tokens are needed if:
/// - any non-single-segment attributes (other than doc comments) are present; or
/// - any `cfg_attr` attributes are present;
/// - any single-segment, non-builtin attributes are present.
fn needs_tokens(attrs: &[ast::Attribute]) -> bool {
attrs.iter().any(|attr| match attr.ident() {
None => !attr.is_doc_comment(),
Some(ident) => {
ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
}
})
}

// Some types are used a lot. Make sure they don't unintentionally get bigger.
#[cfg(target_pointer_width = "64")]
mod size_asserts {
Expand Down
9 changes: 6 additions & 3 deletions compiler/rustc_parse/src/parser/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2483,12 +2483,15 @@ impl<'a> Parser<'a> {
/// `check_pub` adds additional `pub` to the checks in case users place it
/// wrongly, can be used to ensure `pub` never comes after `default`.
pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
const ALL_QUALS: &[Symbol] =
&[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern];

// We use an over-approximation here.
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
// `pub` is added in case users got confused with the ordering like `async pub fn`,
// only if it wasn't preceded by `default` as `default pub` is invalid.
let quals: &[Symbol] = if check_pub {
&[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
ALL_QUALS
} else {
&[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
};
Expand Down Expand Up @@ -2518,9 +2521,9 @@ impl<'a> Parser<'a> {
|| self.check_keyword_case(kw::Extern, case)
&& self.look_ahead(1, |t| t.can_begin_string_literal())
&& (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
// this branch is only for better diagnostic in later, `pub` is not allowed here
// this branch is only for better diagnostics; `pub`, `unsafe`, etc. are not allowed here
(self.may_recover()
&& self.look_ahead(2, |t| t.is_keyword(kw::Pub))
&& self.look_ahead(2, |t| ALL_QUALS.iter().any(|&kw| t.is_keyword(kw)))
&& self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
}

Expand Down
17 changes: 15 additions & 2 deletions compiler/rustc_trait_selection/src/traits/select/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2262,8 +2262,21 @@ impl<'tcx> SelectionContext<'_, 'tcx> {
}
}

// FIXME(async_closures): These are never clone, for now.
ty::CoroutineClosure(_, _) => None,
ty::CoroutineClosure(_, args) => {
// (*) binder moved here
let ty = self.infcx.shallow_resolve(args.as_coroutine_closure().tupled_upvars_ty());
if let ty::Infer(ty::TyVar(_)) = ty.kind() {
// Not yet resolved.
Ambiguous
} else {
Where(
obligation
.predicate
.rebind(args.as_coroutine_closure().upvar_tys().to_vec()),
)
}
}

// `Copy` and `Clone` are automatically implemented for an anonymous adt
// if all of its fields are `Copy` and `Clone`
ty::Adt(adt, args) if adt.is_anonymous() => {
Expand Down
2 changes: 1 addition & 1 deletion library/core/src/iter/traits/iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -823,7 +823,7 @@ pub trait Iterator {
///
/// Given an element the closure must return `true` or `false`. The returned
/// iterator will yield only the elements for which the closure returns
/// true.
/// `true`.
///
/// # Examples
///
Expand Down
1 change: 0 additions & 1 deletion library/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@
#![feature(const_hash)]
#![feature(const_heap)]
#![feature(const_index_range_slice_index)]
#![feature(const_int_from_str)]
#![feature(const_intrinsic_copy)]
#![feature(const_intrinsic_forget)]
#![feature(const_ipv4)]
Expand Down
2 changes: 1 addition & 1 deletion library/core/src/num/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ pub enum IntErrorKind {
impl ParseIntError {
/// Outputs the detailed cause of parsing an integer failing.
#[must_use]
#[rustc_const_unstable(feature = "const_int_from_str", issue = "59133")]
#[rustc_const_stable(feature = "const_int_from_str", since = "CURRENT_RUSTC_VERSION")]
#[stable(feature = "int_error_matching", since = "1.55.0")]
pub const fn kind(&self) -> &IntErrorKind {
&self.kind
Expand Down
Loading

0 comments on commit 8b6b857

Please sign in to comment.