This trait is not \
dyn compatible.
\
In older versions of Rust, dyn compatibility was called \"object safety\", \
@@ -1225,14 +1225,14 @@ fn item_type_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &c
w,
cx,
Some(&t.generics),
- &variants,
+ variants,
variants_count,
has_stripped_entries,
*is_non_exhaustive,
enum_def_id,
)
});
- item_variants(w, cx, it, &variants, enum_def_id);
+ item_variants(w, cx, it, variants, enum_def_id);
}
clean::TypeAliasInnerType::Union { fields } => {
wrap_item(w, |w| {
@@ -1824,7 +1824,7 @@ fn item_constant(
name = it.name.unwrap(),
generics = generics.print(cx),
typ = ty.print(cx),
- where_clause = print_where_clause(&generics, cx, 0, Ending::NoNewline),
+ where_clause = print_where_clause(generics, cx, 0, Ending::NoNewline),
);
// FIXME: The code below now prints
@@ -2194,7 +2194,7 @@ fn render_union<'a, 'cx: 'a>(
f.write_str(" ")?;
}
- write!(f, "{{\n")?;
+ writeln!(f, "{{")?;
let count_fields =
fields.iter().filter(|field| matches!(field.kind, clean::StructFieldItem(..))).count();
let toggle = should_hide_fields(count_fields);
@@ -2204,9 +2204,9 @@ fn render_union<'a, 'cx: 'a>(
for field in fields {
if let clean::StructFieldItem(ref ty) = field.kind {
- write!(
+ writeln!(
f,
- " {}{}: {},\n",
+ " {}{}: {},",
visibility_print_with_space(field, cx),
field.name.unwrap(),
ty.print(cx)
@@ -2215,7 +2215,7 @@ fn render_union<'a, 'cx: 'a>(
}
if it.has_stripped_entries().unwrap() {
- write!(f, " \n")?;
+ writeln!(f, " ")?;
}
if toggle {
toggle_close(&mut f);
@@ -2355,7 +2355,7 @@ fn document_non_exhaustive_header(item: &clean::Item) -> &str {
if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" }
}
-fn document_non_exhaustive<'a>(item: &'a clean::Item) -> impl fmt::Display + 'a {
+fn document_non_exhaustive(item: &clean::Item) -> impl fmt::Display + '_ {
display_fn(|f| {
if item.is_non_exhaustive() {
write!(
diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs
index f91fdfa1fb5f5..cfb62c3ca1640 100644
--- a/src/librustdoc/html/render/search_index.rs
+++ b/src/librustdoc/html/render/search_index.rs
@@ -55,10 +55,10 @@ pub(crate) struct SerializedSearchIndex {
const DESC_INDEX_SHARD_LEN: usize = 128 * 1024;
/// Builds the search index from the collected metadata
-pub(crate) fn build_index<'tcx>(
+pub(crate) fn build_index(
krate: &clean::Crate,
cache: &mut Cache,
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxt<'_>,
) -> SerializedSearchIndex {
// Maps from ID to position in the `crate_paths` array.
let mut itemid_to_pathid = FxHashMap::default();
@@ -126,7 +126,7 @@ pub(crate) fn build_index<'tcx>(
let mut lastpathid = 0isize;
// First, on function signatures
- let mut search_index = std::mem::replace(&mut cache.search_index, Vec::new());
+ let mut search_index = std::mem::take(&mut cache.search_index);
for item in search_index.iter_mut() {
fn insert_into_map(
map: &mut FxHashMap,
@@ -194,7 +194,7 @@ pub(crate) fn build_index<'tcx>(
{
let exact_fqp = exact_paths
.get(&defid)
- .or_else(|| external_paths.get(&defid).map(|&(ref fqp, _)| fqp))
+ .or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
// Re-exports only count if the name is exactly the same.
// This is a size optimization, since it means we only need
// to store the name once (and the path is re-used for everything
@@ -298,7 +298,7 @@ pub(crate) fn build_index<'tcx>(
true
});
}
- let Some(id) = ty.id.clone() else {
+ let Some(id) = ty.id else {
assert!(ty.generics.is_some());
return;
};
@@ -372,7 +372,7 @@ pub(crate) fn build_index<'tcx>(
if let Some(&(ref fqp, short)) = paths.get(&defid) {
let exact_fqp = exact_paths
.get(&defid)
- .or_else(|| external_paths.get(&defid).map(|&(ref fqp, _)| fqp))
+ .or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
.filter(|exact_fqp| {
exact_fqp.last() == Some(&item.name) && *exact_fqp != fqp
});
@@ -397,7 +397,7 @@ pub(crate) fn build_index<'tcx>(
// Their parent carries the exact fqp instead.
let exact_fqp = exact_paths
.get(&defid)
- .or_else(|| external_paths.get(&defid).map(|&(ref fqp, _)| fqp));
+ .or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp));
item.exact_path = exact_fqp.and_then(|fqp| {
// Re-exports only count if the name is exactly the same.
// This is a size optimization, since it means we only need
@@ -426,7 +426,7 @@ pub(crate) fn build_index<'tcx>(
}
// Omit the parent path if it is same to that of the prior item.
- if lastpath == &item.path {
+ if lastpath == item.path {
item.path.clear();
} else {
lastpath = &item.path;
@@ -512,7 +512,7 @@ pub(crate) fn build_index<'tcx>(
}
}
- impl<'a> Serialize for CrateData<'a> {
+ impl Serialize for CrateData<'_> {
fn serialize(&self, serializer: S) -> Result
where
S: Serializer,
@@ -640,7 +640,7 @@ pub(crate) fn build_index<'tcx>(
let mut last_name = None;
for (index, item) in self.items.iter().enumerate() {
let n = item.ty as u8;
- let c = char::try_from(n + b'A').expect("item types must fit in ASCII");
+ let c = char::from(n + b'A');
assert!(c <= 'z', "item types must fit within ASCII printables");
types.push(c);
@@ -741,22 +741,22 @@ pub(crate) fn build_index<'tcx>(
let mut len: usize = 0;
let mut item_index: u32 = 0;
for desc in std::iter::once(&crate_doc).chain(crate_items.iter().map(|item| &item.desc)) {
- if desc == "" {
+ if desc.is_empty() {
empty_desc.push(item_index);
item_index += 1;
continue;
}
if set.len() >= DESC_INDEX_SHARD_LEN {
- result.push((len, std::mem::replace(&mut set, String::new())));
+ result.push((len, std::mem::take(&mut set)));
len = 0;
} else if len != 0 {
set.push('\n');
}
- set.push_str(&desc);
+ set.push_str(desc);
len += 1;
item_index += 1;
}
- result.push((len, std::mem::replace(&mut set, String::new())));
+ result.push((len, std::mem::take(&mut set)));
(empty_desc, result)
};
@@ -792,9 +792,9 @@ pub(crate) fn build_index<'tcx>(
SerializedSearchIndex { index, desc }
}
-pub(crate) fn get_function_type_for_search<'tcx>(
+pub(crate) fn get_function_type_for_search(
item: &clean::Item,
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxt<'_>,
impl_generics: Option<&(clean::Type, clean::Generics)>,
parent: Option,
cache: &Cache,
@@ -861,7 +861,7 @@ fn get_index_type_id(
match *clean_type {
clean::Type::Path { ref path, .. } => Some(RenderTypeId::DefId(path.def_id())),
clean::DynTrait(ref bounds, _) => {
- bounds.get(0).map(|b| RenderTypeId::DefId(b.trait_.def_id()))
+ bounds.first().map(|b| RenderTypeId::DefId(b.trait_.def_id()))
}
clean::Primitive(p) => Some(RenderTypeId::Primitive(p)),
clean::BorrowedRef { .. } => Some(RenderTypeId::Primitive(clean::PrimitiveType::Reference)),
@@ -953,7 +953,7 @@ fn simplify_fn_type<'a, 'tcx>(
WherePredicate::BoundPredicate { ty, .. } => *ty == *arg,
_ => false,
}) {
- let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
+ let bounds = where_pred.get_bounds().unwrap_or(&[]);
for bound in bounds.iter() {
if let Some(path) = bound.get_trait_path() {
let ty = Type::Path { path };
@@ -1043,7 +1043,7 @@ fn simplify_fn_type<'a, 'tcx>(
simplify_fn_type(
self_,
generics,
- &ty,
+ ty,
tcx,
recurse + 1,
&mut ty_generics,
@@ -1058,7 +1058,7 @@ fn simplify_fn_type<'a, 'tcx>(
simplify_fn_type(
self_,
generics,
- &ty,
+ ty,
tcx,
recurse + 1,
&mut ty_generics,
@@ -1074,7 +1074,7 @@ fn simplify_fn_type<'a, 'tcx>(
simplify_fn_type(
self_,
generics,
- &ty,
+ ty,
tcx,
recurse + 1,
&mut ty_generics,
@@ -1117,7 +1117,7 @@ fn simplify_fn_type<'a, 'tcx>(
);
let ty_bindings = vec![(RenderTypeId::AssociatedType(sym::Output), ty_output)];
res.push(RenderType {
- id: get_index_type_id(&arg, rgen),
+ id: get_index_type_id(arg, rgen),
bindings: Some(ty_bindings),
generics: Some(ty_generics),
});
@@ -1134,7 +1134,7 @@ fn simplify_fn_type<'a, 'tcx>(
simplify_fn_type(
self_,
generics,
- &type_,
+ type_,
tcx,
recurse + 1,
&mut ty_generics,
@@ -1249,7 +1249,7 @@ fn simplify_fn_type<'a, 'tcx>(
}
}
}
- let id = get_index_type_id(&arg, rgen);
+ let id = get_index_type_id(arg, rgen);
if id.is_some() || !ty_generics.is_empty() {
res.push(RenderType {
id,
@@ -1261,11 +1261,11 @@ fn simplify_fn_type<'a, 'tcx>(
}
}
-fn simplify_fn_constraint<'a, 'tcx>(
+fn simplify_fn_constraint<'a>(
self_: Option<&'a Type>,
generics: &Generics,
constraint: &'a clean::AssocItemConstraint,
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxt<'_>,
recurse: usize,
res: &mut Vec<(RenderTypeId, Vec)>,
rgen: &mut FxIndexMap)>,
@@ -1347,9 +1347,9 @@ fn simplify_fn_constraint<'a, 'tcx>(
///
/// i.e. `fn foo>(x: u32, y: B)` will return
/// `[u32, Display, Option]`.
-fn get_fn_inputs_and_outputs<'tcx>(
+fn get_fn_inputs_and_outputs(
func: &Function,
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxt<'_>,
impl_or_trait_generics: Option<&(clean::Type, clean::Generics)>,
cache: &Cache,
) -> (Vec, Vec, Vec, Vec>) {
diff --git a/src/librustdoc/html/render/search_index/encode.rs b/src/librustdoc/html/render/search_index/encode.rs
index 8d715814faad7..8816ea650593b 100644
--- a/src/librustdoc/html/render/search_index/encode.rs
+++ b/src/librustdoc/html/render/search_index/encode.rs
@@ -25,7 +25,7 @@ pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
break;
}
shift = shift.wrapping_sub(4);
- mask = mask >> 4;
+ mask >>= 4;
}
// now write the rest
while shift < 32 {
@@ -33,7 +33,7 @@ pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
let hex = char::try_from(if shift == 0 { '`' } else { '@' } as u32 + hexit).unwrap();
string.push(hex);
shift = shift.wrapping_sub(4);
- mask = mask >> 4;
+ mask >>= 4;
}
}
@@ -64,7 +64,7 @@ impl Container {
Container::Array(array) => {
array.push(value);
if array.len() >= 4096 {
- let array = std::mem::replace(array, Vec::new());
+ let array = std::mem::take(array);
*self = Container::Bits(Box::new([0; 1024]));
for value in array {
self.push(value);
@@ -123,7 +123,7 @@ impl Container {
if 2 + 4 * r >= 2 * array.len() + 2 {
return false;
}
- let array = std::mem::replace(array, Vec::new());
+ let array = std::mem::take(array);
*self = Container::Run(Vec::new());
for value in array {
self.push(value);
@@ -145,7 +145,7 @@ pub(crate) fn write_bitmap_to_bytes(
let mut keys = Vec::::new();
let mut containers = Vec::::new();
let mut key: u16;
- let mut domain_iter = domain.into_iter().copied().peekable();
+ let mut domain_iter = domain.iter().copied().peekable();
let mut has_run = false;
while let Some(entry) = domain_iter.next() {
key = (entry >> 16).try_into().expect("shifted off the top 16 bits, so it should fit");
@@ -236,7 +236,7 @@ pub(crate) fn write_bitmap_to_bytes(
pub(crate) fn bitmap_to_string(domain: &[u32]) -> String {
let mut buf = Vec::new();
let mut strbuf = String::new();
- write_bitmap_to_bytes(&domain, &mut buf).unwrap();
+ write_bitmap_to_bytes(domain, &mut buf).unwrap();
BASE64_STANDARD.encode_string(&buf, &mut strbuf);
strbuf
}
diff --git a/src/librustdoc/html/render/sidebar.rs b/src/librustdoc/html/render/sidebar.rs
index 6df9486e65809..76de8d872311e 100644
--- a/src/librustdoc/html/render/sidebar.rs
+++ b/src/librustdoc/html/render/sidebar.rs
@@ -44,7 +44,7 @@ pub(super) struct Sidebar<'a> {
pub(super) path: String,
}
-impl<'a> Sidebar<'a> {
+impl Sidebar<'_> {
/// Only create a `` if there are any blocks
/// which should actually be rendered.
pub fn should_render_blocks(&self) -> bool {
@@ -564,9 +564,9 @@ pub(crate) fn sidebar_module_like(
.filter(|sec| item_sections_in_use.contains(sec))
.map(|sec| Link::new(ids.derive(sec.id()), sec.name()))
.collect();
- let header = if let Some(first_section) = item_sections.get(0) {
+ let header = if let Some(first_section) = item_sections.first() {
Link::new(
- first_section.href.to_owned(),
+ first_section.href.clone(),
if module_like.is_crate() { "Crate Items" } else { "Module Items" },
)
} else {
diff --git a/src/librustdoc/html/render/sorted_template.rs b/src/librustdoc/html/render/sorted_template.rs
index 28f7766d7c7ac..dc894840f92a0 100644
--- a/src/librustdoc/html/render/sorted_template.rs
+++ b/src/librustdoc/html/render/sorted_template.rs
@@ -84,7 +84,7 @@ impl FromStr for SortedTemplate {
let offset = offset
.strip_suffix(F::COMMENT_END)
.ok_or(Error("last line expected to end with a comment"))?;
- let offset: Offset = serde_json::from_str(&offset).map_err(|_| {
+ let offset: Offset = serde_json::from_str(offset).map_err(|_| {
Error("could not find insertion location descriptor object on last line")
})?;
let (before, mut s) =
diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs
index d4cca562d6c4a..81d79a6be9683 100644
--- a/src/librustdoc/html/render/span_map.rs
+++ b/src/librustdoc/html/render/span_map.rs
@@ -63,7 +63,7 @@ struct SpanMapVisitor<'tcx> {
pub(crate) matches: FxHashMap,
}
-impl<'tcx> SpanMapVisitor<'tcx> {
+impl SpanMapVisitor<'_> {
/// This function is where we handle `hir::Path` elements and add them into the "span map".
fn handle_path(&mut self, path: &rustc_hir::Path<'_>) {
match path.res {
diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs
index c82f7e9aaf927..7c676469597dc 100644
--- a/src/librustdoc/html/render/write_shared.rs
+++ b/src/librustdoc/html/render/write_shared.rs
@@ -68,8 +68,8 @@ pub(crate) fn write_shared(
let _lock = try_err!(flock::Lock::new(&lock_file, true, true, true), &lock_file);
let SerializedSearchIndex { index, desc } =
- build_index(&krate, &mut Rc::get_mut(&mut cx.shared).unwrap().cache, tcx);
- write_search_desc(cx, &krate, &desc)?; // does not need to be merged
+ build_index(krate, &mut Rc::get_mut(&mut cx.shared).unwrap().cache, tcx);
+ write_search_desc(cx, krate, &desc)?; // does not need to be merged
let crate_name = krate.name(cx.tcx());
let crate_name = crate_name.as_str(); // rand
@@ -80,7 +80,7 @@ pub(crate) fn write_shared(
src_files_js: SourcesPart::get(cx, &crate_name_json)?,
search_index_js: SearchIndexPart::get(index, &cx.shared.resource_suffix)?,
all_crates: AllCratesPart::get(crate_name_json.clone(), &cx.shared.resource_suffix)?,
- crates_index: CratesIndexPart::get(&crate_name, &external_crates)?,
+ crates_index: CratesIndexPart::get(crate_name, &external_crates)?,
trait_impl: TraitAliasPart::get(cx, &crate_name_json)?,
type_impl: TypeAliasPart::get(cx, krate, &crate_name_json)?,
};
@@ -112,7 +112,7 @@ pub(crate) fn write_shared(
md_opts.output = cx.dst.clone();
md_opts.external_html = cx.shared.layout.external_html.clone();
try_err!(
- crate::markdown::render_and_write(&index_page, md_opts, cx.shared.edition()),
+ crate::markdown::render_and_write(index_page, md_opts, cx.shared.edition()),
&index_page
);
}
@@ -158,13 +158,13 @@ fn write_rendered_cross_crate_info(
let m = &opt.should_merge;
if opt.emit.is_empty() || opt.emit.contains(&EmitType::InvocationSpecific) {
if include_sources {
- write_rendered_cci::(SourcesPart::blank, dst, &crates, m)?;
+ write_rendered_cci::(SourcesPart::blank, dst, crates, m)?;
}
- write_rendered_cci::(SearchIndexPart::blank, dst, &crates, m)?;
- write_rendered_cci::(AllCratesPart::blank, dst, &crates, m)?;
+ write_rendered_cci::(SearchIndexPart::blank, dst, crates, m)?;
+ write_rendered_cci::(AllCratesPart::blank, dst, crates, m)?;
}
- write_rendered_cci::(TraitAliasPart::blank, dst, &crates, m)?;
- write_rendered_cci::(TypeAliasPart::blank, dst, &crates, m)?;
+ write_rendered_cci::(TraitAliasPart::blank, dst, crates, m)?;
+ write_rendered_cci::(TypeAliasPart::blank, dst, crates, m)?;
Ok(())
}
@@ -234,7 +234,7 @@ fn write_search_desc(
&cx.shared.resource_suffix,
);
let path = path.join(filename);
- let part = OrderedJson::serialize(&part).unwrap();
+ let part = OrderedJson::serialize(part).unwrap();
let part = format!("searchState.loadedDescShard({encoded_crate_name}, {i}, {part})");
create_parents(&path)?;
try_err!(fs::write(&path, part), &path);
@@ -261,7 +261,7 @@ impl CrateInfo {
.iter()
.map(|parts_path| {
let path = &parts_path.0;
- let parts = try_err!(fs::read(&path), &path);
+ let parts = try_err!(fs::read(path), &path);
let parts: CrateInfo = try_err!(serde_json::from_slice(&parts), &path);
Ok::<_, Error>(parts)
})
@@ -439,7 +439,7 @@ impl CratesIndexPart {
const DELIMITER: &str = "\u{FFFC}"; // users are being naughty if they have this
let content =
format!("List of all crates
");
- let template = layout::render(layout, &page, "", content, &style_files);
+ let template = layout::render(layout, &page, "", content, style_files);
match SortedTemplate::from_template(&template, DELIMITER) {
Ok(template) => template,
Err(e) => panic!(
@@ -534,7 +534,7 @@ impl Hierarchy {
}
fn add_path(self: &Rc, path: &Path) {
- let mut h = Rc::clone(&self);
+ let mut h = Rc::clone(self);
let mut elems = path
.components()
.filter_map(|s| match s {
@@ -606,7 +606,7 @@ impl TypeAliasPart {
cache,
cx,
};
- DocVisitor::visit_crate(&mut type_impl_collector, &krate);
+ DocVisitor::visit_crate(&mut type_impl_collector, krate);
let cx = type_impl_collector.cx;
let aliased_types = type_impl_collector.aliased_types;
for aliased_type in aliased_types.values() {
@@ -623,7 +623,7 @@ impl TypeAliasPart {
// render_impl will filter out "impossible-to-call" methods
// to make that functionality work here, it needs to be called with
// each type alias, and if it gives a different result, split the impl
- for &(type_alias_fqp, ref type_alias_item) in type_aliases {
+ for &(type_alias_fqp, type_alias_item) in type_aliases {
let mut buf = Buffer::html();
cx.id_map = Default::default();
cx.deref_id_map = Default::default();
@@ -643,8 +643,8 @@ impl TypeAliasPart {
super::render_impl(
&mut buf,
cx,
- *impl_,
- &type_alias_item,
+ impl_,
+ type_alias_item,
assoc_link,
RenderMode::Normal,
None,
@@ -680,7 +680,7 @@ impl TypeAliasPart {
path.push(component.as_str());
}
let aliased_item_type = aliased_type.target_type;
- path.push(&format!(
+ path.push(format!(
"{aliased_item_type}.{}.js",
aliased_type.target_fqp[aliased_type.target_fqp.len() - 1]
));
@@ -781,7 +781,7 @@ impl TraitAliasPart {
for component in &remote_path[..remote_path.len() - 1] {
path.push(component.as_str());
}
- path.push(&format!("{remote_item_type}.{}.js", remote_path[remote_path.len() - 1]));
+ path.push(format!("{remote_item_type}.{}.js", remote_path[remote_path.len() - 1]));
let part = OrderedJson::array_sorted(
implementors
@@ -865,7 +865,7 @@ struct AliasedTypeImpl<'cache, 'item> {
type_aliases: Vec<(&'cache [Symbol], &'item Item)>,
}
-impl<'cx, 'cache, 'item> DocVisitor<'item> for TypeImplCollector<'cx, 'cache, 'item> {
+impl<'item> DocVisitor<'item> for TypeImplCollector<'_, '_, 'item> {
fn visit_item(&mut self, it: &'item Item) {
self.visit_item_recur(it);
let cache = self.cache;
@@ -963,15 +963,13 @@ fn get_path_parts(
crates_info: &[CrateInfo],
) -> FxIndexMap> {
let mut templates: FxIndexMap> = FxIndexMap::default();
- crates_info
- .iter()
- .map(|crate_info| T::from_crate_info(crate_info).parts.iter())
- .flatten()
- .for_each(|(path, part)| {
- let path = dst.join(&path);
+ crates_info.iter().flat_map(|crate_info| T::from_crate_info(crate_info).parts.iter()).for_each(
+ |(path, part)| {
+ let path = dst.join(path);
let part = part.to_string();
templates.entry(path).or_default().push(part);
- });
+ },
+ );
templates
}
@@ -994,10 +992,10 @@ where
if !should_merge.read_rendered_cci {
return Ok(make_blank());
}
- match fs::read_to_string(&path) {
+ match fs::read_to_string(path) {
Ok(template) => Ok(try_err!(SortedTemplate::from_str(&template), &path)),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(make_blank()),
- Err(e) => Err(Error::new(e, &path)),
+ Err(e) => Err(Error::new(e, path)),
}
}
diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs
index f4a0ef01c253b..2fe9364c259c3 100644
--- a/src/librustdoc/html/sources.rs
+++ b/src/librustdoc/html/sources.rs
@@ -35,8 +35,8 @@ pub(crate) fn render(cx: &mut Context<'_>, krate: &clean::Crate) -> Result<(), E
Ok(())
}
-pub(crate) fn collect_local_sources<'tcx>(
- tcx: TyCtxt<'tcx>,
+pub(crate) fn collect_local_sources(
+ tcx: TyCtxt<'_>,
src_root: &Path,
krate: &clean::Crate,
) -> FxIndexMap {
@@ -80,7 +80,7 @@ impl LocalSourcesCollector<'_, '_> {
let href = RefCell::new(PathBuf::new());
clean_path(
- &self.src_root,
+ self.src_root,
&p,
|component| {
href.borrow_mut().push(component);
diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs
index a4dc8cd1ed912..6457ac731cb76 100644
--- a/src/librustdoc/html/static_files.rs
+++ b/src/librustdoc/html/static_files.rs
@@ -57,7 +57,7 @@ pub(crate) fn suffix_path(filename: &str, suffix: &str) -> PathBuf {
pub(crate) fn static_filename(filename: &str, sha256: &str) -> PathBuf {
let filename = filename.rsplit('/').next().unwrap();
- suffix_path(filename, &sha256)
+ suffix_path(filename, sha256)
}
macro_rules! static_files {
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
index df97c5ea2634a..560ed872ef3af 100644
--- a/src/librustdoc/json/mod.rs
+++ b/src/librustdoc/json/mod.rs
@@ -73,7 +73,7 @@ impl<'tcx> JsonRenderer<'tcx> {
.map(|i| {
let item = &i.impl_item;
self.item(item.clone()).unwrap();
- self.id_from_item(&item)
+ self.id_from_item(item)
})
.collect()
})
@@ -104,7 +104,7 @@ impl<'tcx> JsonRenderer<'tcx> {
if item.item_id.is_local() || is_primitive_impl {
self.item(item.clone()).unwrap();
- Some(self.id_from_item(&item))
+ Some(self.id_from_item(item))
} else {
None
}
@@ -223,7 +223,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
| types::ItemEnum::Macro(_)
| types::ItemEnum::ProcMacro(_) => false,
};
- let removed = self.index.borrow_mut().insert(new_item.id.clone(), new_item.clone());
+ let removed = self.index.borrow_mut().insert(new_item.id, new_item.clone());
// FIXME(adotinthevoid): Currently, the index is duplicated. This is a sanity check
// to make sure the items are unique. The main place this happens is when an item, is
@@ -289,7 +289,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
format_version: types::FORMAT_VERSION,
};
if let Some(ref out_dir) = self.out_dir {
- try_err!(create_dir_all(&out_dir), out_dir);
+ try_err!(create_dir_all(out_dir), out_dir);
let mut p = out_dir.clone();
p.push(output_crate.index.get(&output_crate.root).unwrap().name.clone().unwrap());
diff --git a/src/librustdoc/lint.rs b/src/librustdoc/lint.rs
index 2afb9e549d902..dcc27cd62e389 100644
--- a/src/librustdoc/lint.rs
+++ b/src/librustdoc/lint.rs
@@ -222,7 +222,7 @@ pub(crate) static RUSTDOC_LINTS: Lazy> = Lazy::new(|| {
});
pub(crate) fn register_lints(_sess: &Session, lint_store: &mut LintStore) {
- lint_store.register_lints(&**RUSTDOC_LINTS);
+ lint_store.register_lints(&RUSTDOC_LINTS);
lint_store.register_group(
true,
"rustdoc::all",
diff --git a/src/librustdoc/passes/calculate_doc_coverage.rs b/src/librustdoc/passes/calculate_doc_coverage.rs
index 9f9a093da8a38..135aa79906010 100644
--- a/src/librustdoc/passes/calculate_doc_coverage.rs
+++ b/src/librustdoc/passes/calculate_doc_coverage.rs
@@ -118,7 +118,7 @@ fn limit_filename_len(filename: String) -> String {
}
}
-impl<'a, 'b> CoverageCalculator<'a, 'b> {
+impl CoverageCalculator<'_, '_> {
fn to_json(&self) -> String {
serde_json::to_string(
&self
@@ -188,7 +188,7 @@ impl<'a, 'b> CoverageCalculator<'a, 'b> {
}
}
-impl<'a, 'b> DocVisitor<'_> for CoverageCalculator<'a, 'b> {
+impl DocVisitor<'_> for CoverageCalculator<'_, '_> {
fn visit_item(&mut self, i: &clean::Item) {
if !i.item_id.is_local() {
// non-local items are skipped because they can be out of the users control,
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index 484bdb5627c37..bf851b278b82a 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -34,7 +34,7 @@ pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -
krate
}
-impl<'a, 'tcx> DocVisitor<'_> for DocTestVisibilityLinter<'a, 'tcx> {
+impl DocVisitor<'_> for DocTestVisibilityLinter<'_, '_> {
fn visit_item(&mut self, item: &Item) {
look_for_tests(self.cx, &item.doc_value(), item);
@@ -106,7 +106,7 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
level != lint::Level::Allow || matches!(source, LintLevelSource::Default)
}
-pub(crate) fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) {
+pub(crate) fn look_for_tests(cx: &DocContext<'_>, dox: &str, item: &Item) {
let Some(hir_id) = DocContext::as_local_hir_id(cx.tcx, item.item_id) else {
// If non-local, no need to check anything.
return;
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 140fda7091885..c9d1ceb0a91a9 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -53,12 +53,12 @@ pub(crate) fn collect_intra_doc_links<'a, 'tcx>(
(krate, collector)
}
-fn filter_assoc_items_by_name_and_namespace<'a>(
- tcx: TyCtxt<'a>,
+fn filter_assoc_items_by_name_and_namespace(
+ tcx: TyCtxt<'_>,
assoc_items_of: DefId,
ident: Ident,
ns: Namespace,
-) -> impl Iterator- + 'a {
+) -> impl Iterator
- + '_ {
tcx.associated_items(assoc_items_of).filter_by_name_unhygienic(ident.name).filter(move |item| {
item.kind.namespace() == ns && tcx.hygienic_eq(ident, item.ident(tcx), assoc_items_of)
})
@@ -232,7 +232,7 @@ impl UrlFragment {
s.push_str(kind);
s.push_str(tcx.item_name(def_id).as_str());
}
- UrlFragment::UserWritten(raw) => s.push_str(&raw),
+ UrlFragment::UserWritten(raw) => s.push_str(raw),
}
}
}
@@ -307,7 +307,7 @@ pub(crate) struct AmbiguousLinks {
resolved: Vec<(Res, Option)>,
}
-impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
+impl<'tcx> LinkCollector<'_, 'tcx> {
/// Given a full link, parse it as an [enum struct variant].
///
/// In particular, this will return an error whenever there aren't three
@@ -339,7 +339,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// If there's no third component, we saw `[a::b]` before and it failed to resolve.
// So there's no partial res.
let path = split.next().ok_or_else(no_res)?;
- let ty_res = self.resolve_path(&path, TypeNS, item_id, module_id).ok_or_else(no_res)?;
+ let ty_res = self.resolve_path(path, TypeNS, item_id, module_id).ok_or_else(no_res)?;
match ty_res {
Res::Def(DefKind::Enum, did) => match tcx.type_of(did).instantiate_identity().kind() {
@@ -628,7 +628,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
.map(|item| (root_res, item.def_id))
.collect::>()
})
- .unwrap_or(Vec::new())
+ .unwrap_or_default()
}
}
Res::Def(DefKind::TyAlias, did) => {
@@ -693,7 +693,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// Checks if item_name belongs to `impl SomeItem`
let mut assoc_items: Vec<_> = tcx
.inherent_impls(did)
- .into_iter()
+ .iter()
.flat_map(|&imp| {
filter_assoc_items_by_name_and_namespace(
tcx,
@@ -878,7 +878,7 @@ fn is_derive_trait_collision(ns: &PerNS, ResolutionFailu
}
}
-impl<'a, 'tcx> DocVisitor<'_> for LinkCollector<'a, 'tcx> {
+impl DocVisitor<'_> for LinkCollector<'_, '_> {
fn visit_item(&mut self, item: &Item) {
self.resolve_links(item);
self.visit_item_recur(item)
@@ -1152,7 +1152,7 @@ impl LinkCollector<'_, '_> {
}
cache.paths.get(&did).is_some()
- || cache.external_paths.get(&did).is_some()
+ || cache.external_paths.contains_key(&did)
|| !did.is_local()
}
@@ -1271,7 +1271,7 @@ impl LinkCollector<'_, '_> {
}
res.def_id(self.cx.tcx).map(|page_id| ItemLink {
- link: Box::::from(&*diag_info.ori_link),
+ link: Box::::from(diag_info.ori_link),
link_text: link_text.clone(),
page_id,
fragment,
@@ -1293,7 +1293,7 @@ impl LinkCollector<'_, '_> {
let page_id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id));
Some(ItemLink {
- link: Box::::from(&*diag_info.ori_link),
+ link: Box::::from(diag_info.ori_link),
link_text: link_text.clone(),
page_id,
fragment,
@@ -1387,7 +1387,7 @@ impl LinkCollector<'_, '_> {
)
.unwrap_or_else(|| item.attr_span(self.cx.tcx));
rustc_session::parse::feature_err(
- &self.cx.tcx.sess,
+ self.cx.tcx.sess,
sym::intra_doc_pointers,
span,
"linking to associated items of raw pointers is experimental",
@@ -1414,7 +1414,7 @@ impl LinkCollector<'_, '_> {
// FIXME: it would be nice to check that the feature gate was enabled in the original crate, not just ignore it altogether.
// However I'm not sure how to check that across crates.
- if let Some(candidate) = candidates.get(0)
+ if let Some(candidate) = candidates.first()
&& candidate.0 == Res::Primitive(PrimitiveType::RawPointer)
&& key.path_str.contains("::")
// We only want to check this if this is an associated item.
@@ -1493,7 +1493,7 @@ impl LinkCollector<'_, '_> {
}
}
resolution_failure(self, diag, path_str, disambiguator, smallvec![err]);
- return vec![];
+ vec![]
}
}
}
@@ -1509,15 +1509,12 @@ impl LinkCollector<'_, '_> {
type_ns: candidate(TypeNS),
value_ns: candidate(ValueNS).and_then(|v_res| {
for (res, _) in v_res.iter() {
- match res {
- // Constructors are picked up in the type namespace.
- Res::Def(DefKind::Ctor(..), _) => {
- return Err(ResolutionFailure::WrongNamespace {
- res: *res,
- expected_ns: TypeNS,
- });
- }
- _ => {}
+ // Constructors are picked up in the type namespace.
+ if let Res::Def(DefKind::Ctor(..), _) = res {
+ return Err(ResolutionFailure::WrongNamespace {
+ res: *res,
+ expected_ns: TypeNS,
+ });
}
}
Ok(v_res)
@@ -1536,7 +1533,7 @@ impl LinkCollector<'_, '_> {
disambiguator,
candidates.into_iter().filter_map(|res| res.err()).collect(),
);
- return vec![];
+ vec![]
} else if len == 1 {
candidates.into_iter().filter_map(|res| res.ok()).flatten().collect::>()
} else {
@@ -1850,7 +1847,7 @@ fn report_diagnostic(
(sp, MarkdownLinkRange::Destination(md_range))
}
MarkdownLinkRange::WholeLink(md_range) => (
- source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs.doc_strings),
+ source_span_for_markdown_range(tcx, dox, md_range, &item.attrs.doc_strings),
link_range.clone(),
),
};
@@ -1985,8 +1982,7 @@ fn resolution_failure(
.tcx
.resolutions(())
.all_macro_rules
- .get(&Symbol::intern(path_str))
- .is_some()
+ .contains_key(&Symbol::intern(path_str))
{
diag.note(format!(
"`macro_rules` named `{path_str}` exists in this crate, \
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index f358908032285..87f85c5731528 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -229,7 +229,7 @@ struct SyntheticImplCollector<'a, 'tcx> {
impls: Vec
- ,
}
-impl<'a, 'tcx> DocVisitor<'_> for SyntheticImplCollector<'a, 'tcx> {
+impl DocVisitor<'_> for SyntheticImplCollector<'_, '_> {
fn visit_item(&mut self, i: &Item) {
if i.is_struct() || i.is_enum() || i.is_union() {
// FIXME(eddyb) is this `doc(hidden)` check needed?
@@ -256,7 +256,7 @@ impl<'cache> ItemAndAliasCollector<'cache> {
}
}
-impl<'cache> DocVisitor<'_> for ItemAndAliasCollector<'cache> {
+impl DocVisitor<'_> for ItemAndAliasCollector<'_> {
fn visit_item(&mut self, i: &Item) {
self.items.insert(i.item_id);
@@ -276,7 +276,7 @@ struct BadImplStripper<'a> {
cache: &'a Cache,
}
-impl<'a> BadImplStripper<'a> {
+impl BadImplStripper<'_> {
fn keep_impl(&self, ty: &Type, is_deref: bool) -> bool {
if let Generic(_) = ty {
// keep impls made on generics
diff --git a/src/librustdoc/passes/lint.rs b/src/librustdoc/passes/lint.rs
index 35b62370abb29..1ecb53e61ac39 100644
--- a/src/librustdoc/passes/lint.rs
+++ b/src/librustdoc/passes/lint.rs
@@ -25,7 +25,7 @@ pub(crate) fn run_lints(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
krate
}
-impl<'a, 'tcx> DocVisitor<'_> for Linter<'a, 'tcx> {
+impl DocVisitor<'_> for Linter<'_, '_> {
fn visit_item(&mut self, item: &Item) {
let Some(hir_id) = DocContext::as_local_hir_id(self.cx.tcx, item.item_id) else {
// If non-local, no need to check anything.
@@ -34,7 +34,7 @@ impl<'a, 'tcx> DocVisitor<'_> for Linter<'a, 'tcx> {
let dox = item.doc_value();
if !dox.is_empty() {
let may_have_link = dox.contains(&[':', '['][..]);
- let may_have_block_comment_or_html = dox.contains(&['<', '>']);
+ let may_have_block_comment_or_html = dox.contains(['<', '>']);
// ~~~rust
// // This is a real, supported commonmark syntax for block code
// ~~~
diff --git a/src/librustdoc/passes/lint/bare_urls.rs b/src/librustdoc/passes/lint/bare_urls.rs
index 1397eadb2884c..77d7cf5772d67 100644
--- a/src/librustdoc/passes/lint/bare_urls.rs
+++ b/src/librustdoc/passes/lint/bare_urls.rs
@@ -18,7 +18,7 @@ use crate::html::markdown::main_body_opts;
pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &str) {
let report_diag = |cx: &DocContext<'_>, msg: &'static str, range: Range| {
- let sp = source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs.doc_strings)
+ let sp = source_span_for_markdown_range(cx.tcx, dox, &range, &item.attrs.doc_strings)
.unwrap_or_else(|| item.attr_span(cx.tcx));
cx.tcx.node_span_lint(crate::lint::BARE_URLS, hir_id, sp, |lint| {
lint.primary_message(msg)
@@ -34,14 +34,14 @@ pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
});
};
- let mut p = Parser::new_ext(&dox, main_body_opts()).into_offset_iter();
+ let mut p = Parser::new_ext(dox, main_body_opts()).into_offset_iter();
while let Some((event, range)) = p.next() {
match event {
Event::Text(s) => find_raw_urls(cx, &s, range, &report_diag),
// We don't want to check the text inside code blocks or links.
Event::Start(tag @ (Tag::CodeBlock(_) | Tag::Link { .. })) => {
- while let Some((event, _)) = p.next() {
+ for (event, _) in p.by_ref() {
match event {
Event::End(end)
if mem::discriminant(&end) == mem::discriminant(&tag.to_end()) =>
diff --git a/src/librustdoc/passes/lint/check_code_block_syntax.rs b/src/librustdoc/passes/lint/check_code_block_syntax.rs
index e0dc5b4c51333..20d65d8cd9f02 100644
--- a/src/librustdoc/passes/lint/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/lint/check_code_block_syntax.rs
@@ -150,7 +150,7 @@ impl Translate for BufferEmitter {
}
fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle {
- &**self.fallback_bundle
+ &self.fallback_bundle
}
}
diff --git a/src/librustdoc/passes/lint/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index 223174838ade0..3fb154dc51549 100644
--- a/src/librustdoc/passes/lint/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -15,7 +15,7 @@ use crate::html::markdown::main_body_opts;
pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &str) {
let tcx = cx.tcx;
let report_diag = |msg: String, range: &Range, is_open_tag: bool| {
- let sp = match source_span_for_markdown_range(tcx, &dox, range, &item.attrs.doc_strings) {
+ let sp = match source_span_for_markdown_range(tcx, dox, range, &item.attrs.doc_strings) {
Some(sp) => sp,
None => item.attr_span(tcx),
};
@@ -30,7 +30,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
let mut generics_end = range.end;
if let Some(Some(mut generics_start)) = (is_open_tag
&& dox[..generics_end].ends_with('>'))
- .then(|| extract_path_backwards(&dox, range.start))
+ .then(|| extract_path_backwards(dox, range.start))
{
while generics_start != 0
&& generics_end < dox.len()
@@ -39,19 +39,19 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
{
generics_end += 1;
generics_start -= 1;
- if let Some(new_start) = extract_path_backwards(&dox, generics_start) {
+ if let Some(new_start) = extract_path_backwards(dox, generics_start) {
generics_start = new_start;
}
- if let Some(new_end) = extract_path_forward(&dox, generics_end) {
+ if let Some(new_end) = extract_path_forward(dox, generics_end) {
generics_end = new_end;
}
}
- if let Some(new_end) = extract_path_forward(&dox, generics_end) {
+ if let Some(new_end) = extract_path_forward(dox, generics_end) {
generics_end = new_end;
}
let generics_sp = match source_span_for_markdown_range(
tcx,
- &dox,
+ dox,
&(generics_start..generics_end),
&item.attrs.doc_strings,
) {
@@ -125,7 +125,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
}
};
- let p = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
+ let p = Parser::new_with_broken_link_callback(dox, main_body_opts(), Some(&mut replacer))
.into_offset_iter();
for (event, range) in p {
@@ -233,7 +233,7 @@ fn extract_path_forward(text: &str, start_pos: usize) -> Option {
break;
}
}
- while let Some(c) = chars.next() {
+ for c in chars {
if is_id_continue(c) {
current_pos += c.len_utf8();
} else {
diff --git a/src/librustdoc/passes/lint/redundant_explicit_links.rs b/src/librustdoc/passes/lint/redundant_explicit_links.rs
index f3599688454b2..6bc4374c06b1d 100644
--- a/src/librustdoc/passes/lint/redundant_explicit_links.rs
+++ b/src/librustdoc/passes/lint/redundant_explicit_links.rs
@@ -35,12 +35,12 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId) {
}
}
-fn check_redundant_explicit_link_for_did<'md>(
+fn check_redundant_explicit_link_for_did(
cx: &DocContext<'_>,
item: &Item,
did: DefId,
hir_id: HirId,
- doc: &'md str,
+ doc: &str,
) {
let Some(local_item_id) = did.as_local() else {
return;
@@ -71,7 +71,7 @@ fn check_redundant_explicit_link_for_did<'md>(
return;
};
- check_redundant_explicit_link(cx, item, hir_id, &doc, &resolutions);
+ check_redundant_explicit_link(cx, item, hir_id, doc, resolutions);
}
fn check_redundant_explicit_link<'md>(
@@ -90,60 +90,52 @@ fn check_redundant_explicit_link<'md>(
.into_offset_iter();
while let Some((event, link_range)) = offset_iter.next() {
- match event {
- Event::Start(Tag::Link { link_type, dest_url, .. }) => {
- let link_data = collect_link_data(&mut offset_iter);
-
- if let Some(resolvable_link) = link_data.resolvable_link.as_ref() {
- if &link_data.display_link.replace('`', "") != resolvable_link {
- // Skips if display link does not match to actual
- // resolvable link, usually happens if display link
- // has several segments, e.g.
- // [this is just an `Option`](Option)
- continue;
- }
+ if let Event::Start(Tag::Link { link_type, dest_url, .. }) = event {
+ let link_data = collect_link_data(&mut offset_iter);
+
+ if let Some(resolvable_link) = link_data.resolvable_link.as_ref() {
+ if &link_data.display_link.replace('`', "") != resolvable_link {
+ // Skips if display link does not match to actual
+ // resolvable link, usually happens if display link
+ // has several segments, e.g.
+ // [this is just an `Option`](Option)
+ continue;
}
+ }
- let explicit_link = dest_url.to_string();
- let display_link = link_data.resolvable_link.clone()?;
-
- if explicit_link.ends_with(&display_link) || display_link.ends_with(&explicit_link)
- {
- match link_type {
- LinkType::Inline | LinkType::ReferenceUnknown => {
- check_inline_or_reference_unknown_redundancy(
- cx,
- item,
- hir_id,
- doc,
- resolutions,
- link_range,
- dest_url.to_string(),
- link_data,
- if link_type == LinkType::Inline {
- (b'(', b')')
- } else {
- (b'[', b']')
- },
- );
- }
- LinkType::Reference => {
- check_reference_redundancy(
- cx,
- item,
- hir_id,
- doc,
- resolutions,
- link_range,
- &dest_url,
- link_data,
- );
- }
- _ => {}
+ let explicit_link = dest_url.to_string();
+ let display_link = link_data.resolvable_link.clone()?;
+
+ if explicit_link.ends_with(&display_link) || display_link.ends_with(&explicit_link) {
+ match link_type {
+ LinkType::Inline | LinkType::ReferenceUnknown => {
+ check_inline_or_reference_unknown_redundancy(
+ cx,
+ item,
+ hir_id,
+ doc,
+ resolutions,
+ link_range,
+ dest_url.to_string(),
+ link_data,
+ if link_type == LinkType::Inline { (b'(', b')') } else { (b'[', b']') },
+ );
+ }
+ LinkType::Reference => {
+ check_reference_redundancy(
+ cx,
+ item,
+ hir_id,
+ doc,
+ resolutions,
+ link_range,
+ &dest_url,
+ link_data,
+ );
}
+ _ => {}
}
}
- _ => {}
}
}
@@ -169,18 +161,18 @@ fn check_inline_or_reference_unknown_redundancy(
if dest_res == display_res {
let link_span =
- source_span_for_markdown_range(cx.tcx, &doc, &link_range, &item.attrs.doc_strings)
+ source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
.unwrap_or(item.attr_span(cx.tcx));
let explicit_span = source_span_for_markdown_range(
cx.tcx,
- &doc,
+ doc,
&offset_explicit_range(doc, link_range, open, close),
&item.attrs.doc_strings,
)?;
let display_span = source_span_for_markdown_range(
cx.tcx,
- &doc,
- &resolvable_link_range,
+ doc,
+ resolvable_link_range,
&item.attrs.doc_strings,
)?;
@@ -210,27 +202,27 @@ fn check_reference_redundancy(
let (resolvable_link, resolvable_link_range) =
(&link_data.resolvable_link?, &link_data.resolvable_link_range?);
let (dest_res, display_res) =
- (find_resolution(resolutions, &dest)?, find_resolution(resolutions, resolvable_link)?);
+ (find_resolution(resolutions, dest)?, find_resolution(resolutions, resolvable_link)?);
if dest_res == display_res {
let link_span =
- source_span_for_markdown_range(cx.tcx, &doc, &link_range, &item.attrs.doc_strings)
+ source_span_for_markdown_range(cx.tcx, doc, &link_range, &item.attrs.doc_strings)
.unwrap_or(item.attr_span(cx.tcx));
let explicit_span = source_span_for_markdown_range(
cx.tcx,
- &doc,
+ doc,
&offset_explicit_range(doc, link_range.clone(), b'[', b']'),
&item.attrs.doc_strings,
)?;
let display_span = source_span_for_markdown_range(
cx.tcx,
- &doc,
- &resolvable_link_range,
+ doc,
+ resolvable_link_range,
&item.attrs.doc_strings,
)?;
let def_span = source_span_for_markdown_range(
cx.tcx,
- &doc,
+ doc,
&offset_reference_def_range(doc, dest, link_range),
&item.attrs.doc_strings,
)?;
@@ -263,7 +255,7 @@ fn collect_link_data<'input, F: BrokenLinkCallback<'input>>(
let mut display_link = String::new();
let mut is_resolvable = true;
- while let Some((event, range)) = offset_iter.next() {
+ for (event, range) in offset_iter.by_ref() {
match event {
Event::Text(code) => {
let code = code.to_string();
diff --git a/src/librustdoc/passes/lint/unescaped_backticks.rs b/src/librustdoc/passes/lint/unescaped_backticks.rs
index d79f682a580f8..88f4c3ac1cd79 100644
--- a/src/librustdoc/passes/lint/unescaped_backticks.rs
+++ b/src/librustdoc/passes/lint/unescaped_backticks.rs
@@ -22,7 +22,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
.find(|link| *link.original_text == *broken_link.reference)
.map(|link| ((*link.href).into(), (*link.new_text).into()))
};
- let parser = Parser::new_with_broken_link_callback(&dox, main_body_opts(), Some(&mut replacer))
+ let parser = Parser::new_with_broken_link_callback(dox, main_body_opts(), Some(&mut replacer))
.into_offset_iter();
let mut element_stack = Vec::new();
@@ -44,7 +44,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
// use the span of the entire attribute as a fallback.
let span = source_span_for_markdown_range(
tcx,
- &dox,
+ dox,
&(backtick_index..backtick_index + 1),
&item.attrs.doc_strings,
)
@@ -61,12 +61,12 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
// "foo` `bar`" -> "`foo` `bar`"
if let Some(suggest_index) =
clamp_start(guess, &element.suggestible_ranges)
- && can_suggest_backtick(&dox, suggest_index)
+ && can_suggest_backtick(dox, suggest_index)
{
suggest_insertion(
cx,
item,
- &dox,
+ dox,
lint,
suggest_index,
'`',
@@ -80,11 +80,11 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
// Don't `clamp_end` here, because the suggestion is guaranteed to be inside
// an inline code node and we intentionally "break" the inline code here.
let suggest_index = guess;
- if can_suggest_backtick(&dox, suggest_index) {
+ if can_suggest_backtick(dox, suggest_index) {
suggest_insertion(
cx,
item,
- &dox,
+ dox,
lint,
suggest_index,
'`',
@@ -98,15 +98,15 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
if !element.prev_code_guess.is_confident() {
// "`foo` bar`" -> "`foo` `bar`"
if let Some(guess) =
- guess_start_of_code(&dox, element.element_range.start..backtick_index)
+ guess_start_of_code(dox, element.element_range.start..backtick_index)
&& let Some(suggest_index) =
clamp_start(guess, &element.suggestible_ranges)
- && can_suggest_backtick(&dox, suggest_index)
+ && can_suggest_backtick(dox, suggest_index)
{
suggest_insertion(
cx,
item,
- &dox,
+ dox,
lint,
suggest_index,
'`',
@@ -120,16 +120,16 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
// if we already suggested opening backtick. For example:
// "foo`." -> "`foo`." or "foo`s" -> "`foo`s".
if let Some(guess) =
- guess_end_of_code(&dox, backtick_index + 1..element.element_range.end)
+ guess_end_of_code(dox, backtick_index + 1..element.element_range.end)
&& let Some(suggest_index) =
clamp_end(guess, &element.suggestible_ranges)
- && can_suggest_backtick(&dox, suggest_index)
+ && can_suggest_backtick(dox, suggest_index)
&& (!help_emitted || suggest_index - backtick_index > 2)
{
suggest_insertion(
cx,
item,
- &dox,
+ dox,
lint,
suggest_index,
'`',
@@ -148,7 +148,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
suggest_insertion(
cx,
item,
- &dox,
+ dox,
lint,
backtick_index,
'\\',
@@ -177,13 +177,13 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
let is_confident = text_inside.starts_with(char::is_whitespace)
|| text_inside.ends_with(char::is_whitespace);
- if let Some(guess) = guess_end_of_code(&dox, range_inside) {
+ if let Some(guess) = guess_end_of_code(dox, range_inside) {
// Find earlier end of code.
element.prev_code_guess = PrevCodeGuess::End { guess, is_confident };
} else {
// Find alternate start of code.
let range_before = element.element_range.start..event_range.start;
- if let Some(guess) = guess_start_of_code(&dox, range_before) {
+ if let Some(guess) = guess_start_of_code(dox, range_before) {
element.prev_code_guess = PrevCodeGuess::Start { guess, is_confident };
}
}
@@ -421,7 +421,7 @@ fn suggest_insertion(
if let Some(span) = source_span_for_markdown_range(
cx.tcx,
- &dox,
+ dox,
&(insert_index..insert_index),
&item.attrs.doc_strings,
) {
diff --git a/src/librustdoc/passes/lint/unportable_markdown.rs b/src/librustdoc/passes/lint/unportable_markdown.rs
index f8368a866c882..a3c3134f4c2c5 100644
--- a/src/librustdoc/passes/lint/unportable_markdown.rs
+++ b/src/librustdoc/passes/lint/unportable_markdown.rs
@@ -49,8 +49,8 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
| cmarkn::Options::ENABLE_TASKLISTS
| cmarkn::Options::ENABLE_SMART_PUNCTUATION
}
- let mut parser_new = cmarkn::Parser::new_ext(&dox, main_body_opts_new()).into_offset_iter();
- while let Some((event, span)) = parser_new.next() {
+ let parser_new = cmarkn::Parser::new_ext(dox, main_body_opts_new()).into_offset_iter();
+ for (event, span) in parser_new {
if let cmarkn::Event::Start(cmarkn::Tag::BlockQuote(_)) = event {
if !dox[span.clone()].starts_with("> ") {
spaceless_block_quotes.insert(span.start);
@@ -71,8 +71,8 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
| cmarko::Options::ENABLE_TASKLISTS
| cmarko::Options::ENABLE_SMART_PUNCTUATION
}
- let mut parser_old = cmarko::Parser::new_ext(&dox, main_body_opts_old()).into_offset_iter();
- while let Some((event, span)) = parser_old.next() {
+ let parser_old = cmarko::Parser::new_ext(dox, main_body_opts_old()).into_offset_iter();
+ for (event, span) in parser_old {
if let cmarko::Event::Start(cmarko::Tag::BlockQuote) = event {
if !dox[span.clone()].starts_with("> ") {
spaceless_block_quotes.remove(&span.start);
@@ -88,13 +88,13 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
for start in spaceless_block_quotes {
let (span, precise) =
- source_span_for_markdown_range(tcx, &dox, &(start..start + 1), &item.attrs.doc_strings)
+ source_span_for_markdown_range(tcx, dox, &(start..start + 1), &item.attrs.doc_strings)
.map(|span| (span, true))
.unwrap_or_else(|| (item.attr_span(tcx), false));
tcx.node_span_lint(crate::lint::UNPORTABLE_MARKDOWN, hir_id, span, |lint| {
lint.primary_message("unportable markdown");
- lint.help(format!("confusing block quote with no space after the `>` marker"));
+ lint.help("confusing block quote with no space after the `>` marker".to_string());
if precise {
lint.span_suggestion(
span.shrink_to_hi(),
@@ -113,7 +113,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
}
for (_caret, span) in missing_footnote_references {
let (ref_span, precise) =
- source_span_for_markdown_range(tcx, &dox, &span, &item.attrs.doc_strings)
+ source_span_for_markdown_range(tcx, dox, &span, &item.attrs.doc_strings)
.map(|span| (span, true))
.unwrap_or_else(|| (item.attr_span(tcx), false));
diff --git a/src/librustdoc/passes/propagate_doc_cfg.rs b/src/librustdoc/passes/propagate_doc_cfg.rs
index 350be37f553da..572c9bf7552fa 100644
--- a/src/librustdoc/passes/propagate_doc_cfg.rs
+++ b/src/librustdoc/passes/propagate_doc_cfg.rs
@@ -27,7 +27,7 @@ struct CfgPropagator<'a, 'tcx> {
cx: &'a mut DocContext<'tcx>,
}
-impl<'a, 'tcx> CfgPropagator<'a, 'tcx> {
+impl CfgPropagator<'_, '_> {
// Some items need to merge their attributes with their parents' otherwise a few of them
// (mostly `cfg` ones) will be missing.
fn merge_with_parent_attributes(&mut self, item: &mut Item) {
@@ -65,7 +65,7 @@ impl<'a, 'tcx> CfgPropagator<'a, 'tcx> {
}
}
-impl<'a, 'tcx> DocFolder for CfgPropagator<'a, 'tcx> {
+impl DocFolder for CfgPropagator<'_, '_> {
fn fold_item(&mut self, mut item: Item) -> Option
- {
let old_parent_cfg = self.parent_cfg.clone();
diff --git a/src/librustdoc/passes/propagate_stability.rs b/src/librustdoc/passes/propagate_stability.rs
index a28487cc79e5d..a81b130a218b3 100644
--- a/src/librustdoc/passes/propagate_stability.rs
+++ b/src/librustdoc/passes/propagate_stability.rs
@@ -30,7 +30,7 @@ struct StabilityPropagator<'a, 'tcx> {
cx: &'a mut DocContext<'tcx>,
}
-impl<'a, 'tcx> DocFolder for StabilityPropagator<'a, 'tcx> {
+impl DocFolder for StabilityPropagator<'_, '_> {
fn fold_item(&mut self, mut item: Item) -> Option
- {
let parent_stability = self.parent_stability;
diff --git a/src/librustdoc/passes/strip_aliased_non_local.rs b/src/librustdoc/passes/strip_aliased_non_local.rs
index a078eec048ece..fa7737bc14386 100644
--- a/src/librustdoc/passes/strip_aliased_non_local.rs
+++ b/src/librustdoc/passes/strip_aliased_non_local.rs
@@ -21,7 +21,7 @@ struct AliasedNonLocalStripper<'tcx> {
tcx: TyCtxt<'tcx>,
}
-impl<'tcx> DocFolder for AliasedNonLocalStripper<'tcx> {
+impl DocFolder for AliasedNonLocalStripper<'_> {
fn fold_item(&mut self, i: Item) -> Option
- {
Some(match i.kind {
clean::TypeAliasItem(..) => {
@@ -39,7 +39,7 @@ struct NonLocalStripper<'tcx> {
tcx: TyCtxt<'tcx>,
}
-impl<'tcx> DocFolder for NonLocalStripper<'tcx> {
+impl DocFolder for NonLocalStripper<'_> {
fn fold_item(&mut self, i: Item) -> Option
- {
// If not local, we want to respect the original visibility of
// the field and not the one given by the user for the currrent crate.
@@ -50,7 +50,7 @@ impl<'tcx> DocFolder for NonLocalStripper<'tcx> {
{
if i.is_doc_hidden()
// Default to *not* stripping items with inherited visibility.
- || i.visibility(self.tcx).map_or(false, |viz| viz != Visibility::Public)
+ || i.visibility(self.tcx).is_some_and(|viz| viz != Visibility::Public)
{
return Some(strip_item(i));
}
diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs
index 4ef5f7f20a917..a71bb62e56c74 100644
--- a/src/librustdoc/passes/strip_hidden.rs
+++ b/src/librustdoc/passes/strip_hidden.rs
@@ -57,7 +57,7 @@ struct Stripper<'a, 'tcx> {
last_reexport: Option,
}
-impl<'a, 'tcx> Stripper<'a, 'tcx> {
+impl Stripper<'_, '_> {
fn set_last_reexport_then_fold_item(&mut self, i: Item) -> Item {
let prev_from_reexport = self.last_reexport;
if i.inline_stmt_id.is_some() {
@@ -86,7 +86,7 @@ impl<'a, 'tcx> Stripper<'a, 'tcx> {
}
}
-impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
+impl DocFolder for Stripper<'_, '_> {
fn fold_item(&mut self, i: Item) -> Option
- {
let has_doc_hidden = i.is_doc_hidden();
let is_impl_or_exported_macro = match i.kind {
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index 98b3446c26d23..60909754b3330 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -37,7 +37,7 @@ fn is_item_reachable(
}
}
-impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
+impl DocFolder for Stripper<'_, '_> {
fn fold_item(&mut self, i: Item) -> Option
- {
match i.kind {
clean::StrippedItem(..) => {
@@ -171,7 +171,7 @@ pub(crate) struct ImplStripper<'a, 'tcx> {
pub(crate) document_hidden: bool,
}
-impl<'a> ImplStripper<'a, '_> {
+impl ImplStripper<'_, '_> {
#[inline]
fn should_keep_impl(&self, item: &Item, for_def_id: DefId) -> bool {
if !for_def_id.is_local() || self.retained.contains(&for_def_id.into()) {
@@ -193,7 +193,7 @@ impl<'a> ImplStripper<'a, '_> {
}
}
-impl<'a> DocFolder for ImplStripper<'a, '_> {
+impl DocFolder for ImplStripper<'_, '_> {
fn fold_item(&mut self, i: Item) -> Option
- {
if let clean::ImplItem(ref imp) = i.kind {
// Impl blocks can be skipped if they are: empty; not a trait impl; and have no
@@ -259,7 +259,7 @@ pub(crate) struct ImportStripper<'tcx> {
pub(crate) document_hidden: bool,
}
-impl<'tcx> ImportStripper<'tcx> {
+impl ImportStripper<'_> {
fn import_should_be_hidden(&self, i: &Item, imp: &clean::Import) -> bool {
if self.is_json_output {
// FIXME: This should be handled the same way as for HTML output.
@@ -270,11 +270,11 @@ impl<'tcx> ImportStripper<'tcx> {
}
}
-impl<'tcx> DocFolder for ImportStripper<'tcx> {
+impl DocFolder for ImportStripper<'_> {
fn fold_item(&mut self, i: Item) -> Option
- {
match &i.kind {
clean::ImportItem(imp)
- if !self.document_hidden && self.import_should_be_hidden(&i, &imp) =>
+ if !self.document_hidden && self.import_should_be_hidden(&i, imp) =>
{
None
}
diff --git a/src/librustdoc/theme.rs b/src/librustdoc/theme.rs
index 2c00bb7e13295..a49fb06bde35f 100644
--- a/src/librustdoc/theme.rs
+++ b/src/librustdoc/theme.rs
@@ -54,7 +54,7 @@ fn skip_comment(iter: &mut Peekable>) {
/// Skips a line comment (`//`).
fn skip_line_comment(iter: &mut Peekable>) {
- while let Some(c) = iter.next() {
+ for c in iter.by_ref() {
if c == '\n' {
break;
}
diff --git a/src/librustdoc/visit.rs b/src/librustdoc/visit.rs
index bfa285c57fa93..c2e8ffd7665b8 100644
--- a/src/librustdoc/visit.rs
+++ b/src/librustdoc/visit.rs
@@ -55,7 +55,7 @@ pub(crate) trait DocVisitor<'a>: Sized {
/// Don't override!
fn visit_item_recur(&mut self, item: &'a Item) {
match &item.kind {
- StrippedItem(i) => self.visit_inner_recur(&*i),
+ StrippedItem(i) => self.visit_inner_recur(i),
_ => self.visit_inner_recur(&item.kind),
}
}
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index 31c33fbf49737..11f06f7d917c5 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -312,7 +312,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
Node::Item(_) if is_bang_macro && !please_inline && renamed.is_some() && is_hidden => {
return false;
}
- Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
+ Node::Item(&hir::Item { kind: hir::ItemKind::Mod(m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for &i in m.item_ids {
let i = tcx.hir().item(i);
@@ -476,7 +476,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
self.add_to_current_mod(item, renamed, import_id);
}
}
- hir::ItemKind::Macro(ref macro_def, _) => {
+ hir::ItemKind::Macro(macro_def, _) => {
// `#[macro_export] macro_rules!` items are handled separately in `visit()`,
// above, since they need to be documented at the module top level. Accordingly,
// we only want to handle macros if one of three conditions holds:
@@ -496,7 +496,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
self.add_to_current_mod(item, renamed, import_id);
}
}
- hir::ItemKind::Mod(ref m) => {
+ hir::ItemKind::Mod(m) => {
self.enter_mod(item.owner_id.def_id, m, name, renamed, import_id);
}
hir::ItemKind::Fn(..)
@@ -560,7 +560,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
// We need to implement this visitor so it'll go everywhere and retrieve items we're interested in
// such as impl blocks in const blocks.
-impl<'a, 'tcx> Visitor<'tcx> for RustdocVisitor<'a, 'tcx> {
+impl<'tcx> Visitor<'tcx> for RustdocVisitor<'_, 'tcx> {
type NestedFilter = nested_filter::All;
fn nested_visit_map(&mut self) -> Self::Map {
diff --git a/tests/crashes/126646.rs b/tests/crashes/126646.rs
deleted file mode 100644
index 24e3530320a89..0000000000000
--- a/tests/crashes/126646.rs
+++ /dev/null
@@ -1,18 +0,0 @@
-//@ known-bug: rust-lang/rust#126646
-mod foo {
- pub trait Callable {
- type Output;
- fn call() -> Self::Output;
- }
-
- impl<'a, V: ?Sized> Callable for &'a () {
- type Output = ();
- }
-}
-use foo::*;
-
-fn test<'a>() -> impl Sized {
- <&'a () as Callable>::call()
-}
-
-fn main() {}
diff --git a/tests/ui/impl-trait/in-trait/do-not-imply-from-trait-impl.rs b/tests/ui/impl-trait/in-trait/do-not-imply-from-trait-impl.rs
new file mode 100644
index 0000000000000..30ca3d271b815
--- /dev/null
+++ b/tests/ui/impl-trait/in-trait/do-not-imply-from-trait-impl.rs
@@ -0,0 +1,30 @@
+// Make sure that we don't accidentally collect an RPITIT hidden type that does not
+// hold for all instantiations of the trait signature.
+
+trait MkStatic {
+ fn mk_static(self) -> &'static str;
+}
+
+impl MkStatic for &'static str {
+ fn mk_static(self) -> &'static str { self }
+}
+
+trait Foo {
+ fn foo<'a: 'static, 'late>(&'late self) -> impl MkStatic;
+}
+
+impl Foo for str {
+ fn foo<'a: 'static>(&'a self) -> impl MkStatic + 'static {
+ //~^ ERROR method not compatible with trait
+ self
+ }
+}
+
+fn call_foo(t: &T) -> &'static str {
+ t.foo().mk_static()
+}
+
+fn main() {
+ let s = call_foo(String::from("hello, world").as_str());
+ println!("> {s}");
+}
diff --git a/tests/ui/impl-trait/in-trait/do-not-imply-from-trait-impl.stderr b/tests/ui/impl-trait/in-trait/do-not-imply-from-trait-impl.stderr
new file mode 100644
index 0000000000000..95d8699e19ff1
--- /dev/null
+++ b/tests/ui/impl-trait/in-trait/do-not-imply-from-trait-impl.stderr
@@ -0,0 +1,22 @@
+error[E0308]: method not compatible with trait
+ --> $DIR/do-not-imply-from-trait-impl.rs:17:38
+ |
+LL | fn foo<'a: 'static>(&'a self) -> impl MkStatic + 'static {
+ | ^^^^^^^^^^^^^^^^^^^^^^^ lifetime mismatch
+ |
+ = note: expected signature `fn(&'late _) -> _`
+ found signature `fn(&'a _) -> _`
+note: the lifetime `'late` as defined here...
+ --> $DIR/do-not-imply-from-trait-impl.rs:13:25
+ |
+LL | fn foo<'a: 'static, 'late>(&'late self) -> impl MkStatic;
+ | ^^^^^
+note: ...does not necessarily outlive the lifetime `'a` as defined here
+ --> $DIR/do-not-imply-from-trait-impl.rs:17:12
+ |
+LL | fn foo<'a: 'static>(&'a self) -> impl MkStatic + 'static {
+ | ^^
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/tests/ui/impl-trait/in-trait/method-signature-matches.lt.stderr b/tests/ui/impl-trait/in-trait/method-signature-matches.lt.stderr
index 6f6b787b6fe1b..a23879eb6c376 100644
--- a/tests/ui/impl-trait/in-trait/method-signature-matches.lt.stderr
+++ b/tests/ui/impl-trait/in-trait/method-signature-matches.lt.stderr
@@ -11,12 +11,12 @@ note: type in trait
|
LL | fn early<'early, T>(x: &'early T) -> impl Sized;
| ^^^^^^^^^
- = note: expected signature `fn(&T)`
- found signature `fn(&'late ())`
+ = note: expected signature `fn(&'early T)`
+ found signature `fn(&())`
help: change the parameter type to match the trait
|
-LL | fn early<'late, T>(_: &T) {}
- | ~~
+LL | fn early<'late, T>(_: &'early T) {}
+ | ~~~~~~~~~
error: aborting due to 1 previous error
diff --git a/tests/ui/impl-trait/in-trait/rpitit-hidden-types-self-implied-wf.stderr b/tests/ui/impl-trait/in-trait/rpitit-hidden-types-self-implied-wf.stderr
index 3430055dab171..4c10422f985f1 100644
--- a/tests/ui/impl-trait/in-trait/rpitit-hidden-types-self-implied-wf.stderr
+++ b/tests/ui/impl-trait/in-trait/rpitit-hidden-types-self-implied-wf.stderr
@@ -6,9 +6,9 @@ LL | fn extend(s: &str) -> (Option<&'static &'_ ()>, &'static str) {
|
= note: the pointer is valid for the static lifetime
note: but the referenced data is only valid for the anonymous lifetime defined here
- --> $DIR/rpitit-hidden-types-self-implied-wf.rs:6:18
+ --> $DIR/rpitit-hidden-types-self-implied-wf.rs:2:18
|
-LL | fn extend(s: &str) -> (Option<&'static &'_ ()>, &'static str) {
+LL | fn extend(_: &str) -> (impl Sized + '_, &'static str);
| ^^^^
error: aborting due to 1 previous error
diff --git a/tests/ui/impl-trait/in-trait/signature-mismatch.failure.stderr b/tests/ui/impl-trait/in-trait/signature-mismatch.failure.stderr
index 56b83cbca77ac..b27d7870955ed 100644
--- a/tests/ui/impl-trait/in-trait/signature-mismatch.failure.stderr
+++ b/tests/ui/impl-trait/in-trait/signature-mismatch.failure.stderr
@@ -1,14 +1,15 @@
-error[E0623]: lifetime mismatch
+error[E0477]: the type `impl Future