Skip to content

Commit 72fd41a

Browse files
committed
Auto merge of #98335 - JohnTitor:rollup-j2zudxv, r=JohnTitor
Rollup of 11 pull requests Successful merges: - #94033 (Improve docs for `is_running` to explain use case) - #97269 (adjust transmute const stabilization version) - #97805 (Add proper tracing spans to rustc_trait_selection::traits::error_reporting) - #98022 (Fix erroneous span for borrowck error) - #98124 (Improve loading of crates.js and sidebar-items.js) - #98278 (Some token stream cleanups) - #98306 (`try_fold_unevaluated` for infallible folders) - #98313 (Remove lies in comments.) - #98323 (:arrow_up: rust-analyzer) - #98329 (Avoid an ICE and instead let the compiler report a useful error) - #98330 (update ioslice docs to use shared slices) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
2 parents a25b131 + e509242 commit 72fd41a

File tree

32 files changed

+359
-334
lines changed

32 files changed

+359
-334
lines changed

compiler/rustc_ast/src/tokenstream.rs

+57-95
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
2525
use rustc_span::{Span, DUMMY_SP};
2626
use smallvec::{smallvec, SmallVec};
2727

28-
use std::{fmt, iter, mem};
28+
use std::{fmt, iter};
2929

3030
/// When the main Rust parser encounters a syntax-extension invocation, it
3131
/// parses the arguments to the invocation as a token tree. This is a very
@@ -399,45 +399,6 @@ impl TokenStream {
399399
self.0.len()
400400
}
401401

402-
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
403-
match streams.len() {
404-
0 => TokenStream::default(),
405-
1 => streams.pop().unwrap(),
406-
_ => {
407-
// We are going to extend the first stream in `streams` with
408-
// the elements from the subsequent streams. This requires
409-
// using `make_mut()` on the first stream, and in practice this
410-
// doesn't cause cloning 99.9% of the time.
411-
//
412-
// One very common use case is when `streams` has two elements,
413-
// where the first stream has any number of elements within
414-
// (often 1, but sometimes many more) and the second stream has
415-
// a single element within.
416-
417-
// Determine how much the first stream will be extended.
418-
// Needed to avoid quadratic blow up from on-the-fly
419-
// reallocations (#57735).
420-
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
421-
422-
// Get the first stream. If it's `None`, create an empty
423-
// stream.
424-
let mut iter = streams.drain(..);
425-
let mut first_stream_lrc = iter.next().unwrap().0;
426-
427-
// Append the elements to the first stream, after reserving
428-
// space for them.
429-
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
430-
first_vec_mut.reserve(num_appends);
431-
for stream in iter {
432-
first_vec_mut.extend(stream.0.iter().cloned());
433-
}
434-
435-
// Create the final `TokenStream`.
436-
TokenStream(first_stream_lrc)
437-
}
438-
}
439-
}
440-
441402
pub fn trees(&self) -> CursorRef<'_> {
442403
CursorRef::new(self)
443404
}
@@ -562,50 +523,65 @@ impl TokenStreamBuilder {
562523
}
563524

564525
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
565-
let mut stream = stream.into();
566-
567-
// If `self` is not empty and the last tree within the last stream is a
568-
// token tree marked with `Joint`...
569-
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut()
570-
&& let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last()
571-
// ...and `stream` is not empty and the first tree within it is
572-
// a token tree...
573-
&& let TokenStream(ref mut stream_lrc) = stream
574-
&& let Some((TokenTree::Token(token), spacing)) = stream_lrc.first()
575-
// ...and the two tokens can be glued together...
576-
&& let Some(glued_tok) = last_token.glue(&token)
577-
{
578-
// ...then do so, by overwriting the last token
579-
// tree in `self` and removing the first token tree
580-
// from `stream`. This requires using `make_mut()`
581-
// on the last stream in `self` and on `stream`,
582-
// and in practice this doesn't cause cloning 99.9%
583-
// of the time.
584-
585-
// Overwrite the last token tree with the merged
586-
// token.
587-
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
588-
*last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
589-
590-
// Remove the first token tree from `stream`. (This
591-
// is almost always the only tree in `stream`.)
592-
let stream_vec_mut = Lrc::make_mut(stream_lrc);
593-
stream_vec_mut.remove(0);
594-
595-
// Don't push `stream` if it's empty -- that could
596-
// block subsequent token gluing, by getting
597-
// between two token trees that should be glued
598-
// together.
599-
if !stream.is_empty() {
600-
self.0.push(stream);
601-
}
602-
return;
603-
}
604-
self.0.push(stream);
526+
self.0.push(stream.into());
605527
}
606528

607529
pub fn build(self) -> TokenStream {
608-
TokenStream::from_streams(self.0)
530+
let mut streams = self.0;
531+
match streams.len() {
532+
0 => TokenStream::default(),
533+
1 => streams.pop().unwrap(),
534+
_ => {
535+
// We will extend the first stream in `streams` with the
536+
// elements from the subsequent streams. This requires using
537+
// `make_mut()` on the first stream, and in practice this
538+
// doesn't cause cloning 99.9% of the time.
539+
//
540+
// One very common use case is when `streams` has two elements,
541+
// where the first stream has any number of elements within
542+
// (often 1, but sometimes many more) and the second stream has
543+
// a single element within.
544+
545+
// Determine how much the first stream will be extended.
546+
// Needed to avoid quadratic blow up from on-the-fly
547+
// reallocations (#57735).
548+
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
549+
550+
// Get the first stream, which will become the result stream.
551+
// If it's `None`, create an empty stream.
552+
let mut iter = streams.drain(..);
553+
let mut res_stream_lrc = iter.next().unwrap().0;
554+
555+
// Append the subsequent elements to the result stream, after
556+
// reserving space for them.
557+
let res_vec_mut = Lrc::make_mut(&mut res_stream_lrc);
558+
res_vec_mut.reserve(num_appends);
559+
for stream in iter {
560+
let stream_iter = stream.0.iter().cloned();
561+
562+
// If (a) `res_mut_vec` is not empty and the last tree
563+
// within it is a token tree marked with `Joint`, and (b)
564+
// `stream` is not empty and the first tree within it is a
565+
// token tree, and (c) the two tokens can be glued
566+
// together...
567+
if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last()
568+
&& let Some((TokenTree::Token(tok), spacing)) = stream.0.first()
569+
&& let Some(glued_tok) = last_tok.glue(&tok)
570+
{
571+
// ...then overwrite the last token tree in
572+
// `res_vec_mut` with the glued token, and skip the
573+
// first token tree from `stream`.
574+
*res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
575+
res_vec_mut.extend(stream_iter.skip(1));
576+
} else {
577+
// Append all of `stream`.
578+
res_vec_mut.extend(stream_iter);
579+
}
580+
}
581+
582+
TokenStream(res_stream_lrc)
583+
}
584+
}
609585
}
610586
}
611587

@@ -679,20 +655,6 @@ impl Cursor {
679655
})
680656
}
681657

682-
pub fn index(&self) -> usize {
683-
self.index
684-
}
685-
686-
pub fn append(&mut self, new_stream: TokenStream) {
687-
if new_stream.is_empty() {
688-
return;
689-
}
690-
let index = self.index;
691-
let stream = mem::take(&mut self.stream);
692-
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
693-
self.index = index;
694-
}
695-
696658
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
697659
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
698660
}

compiler/rustc_borrowck/src/type_check/mod.rs

+14-10
Original file line numberDiff line numberDiff line change
@@ -357,12 +357,20 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
357357
.add_element(live_region_vid, location);
358358
});
359359

360+
// HACK(compiler-errors): Constants that are gathered into Body.required_consts
361+
// have their locations erased...
362+
let locations = if location != Location::START {
363+
location.to_locations()
364+
} else {
365+
Locations::All(constant.span)
366+
};
367+
360368
if let Some(annotation_index) = constant.user_ty {
361369
if let Err(terr) = self.cx.relate_type_and_user_type(
362370
constant.literal.ty(),
363371
ty::Variance::Invariant,
364372
&UserTypeProjection { base: annotation_index, projs: vec![] },
365-
location.to_locations(),
373+
locations,
366374
ConstraintCategory::Boring,
367375
) {
368376
let annotation = &self.cx.user_type_annotations[annotation_index];
@@ -390,12 +398,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
390398
promoted: &Body<'tcx>,
391399
ty,
392400
san_ty| {
393-
if let Err(terr) = verifier.cx.eq_types(
394-
ty,
395-
san_ty,
396-
location.to_locations(),
397-
ConstraintCategory::Boring,
398-
) {
401+
if let Err(terr) =
402+
verifier.cx.eq_types(ty, san_ty, locations, ConstraintCategory::Boring)
403+
{
399404
span_mirbug!(
400405
verifier,
401406
promoted,
@@ -416,7 +421,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
416421
}
417422
} else {
418423
if let Err(terr) = self.cx.fully_perform_op(
419-
location.to_locations(),
424+
locations,
420425
ConstraintCategory::Boring,
421426
self.cx.param_env.and(type_op::ascribe_user_type::AscribeUserType::new(
422427
constant.literal.ty(),
@@ -435,7 +440,6 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
435440
}
436441
} else if let Some(static_def_id) = constant.check_static_ptr(tcx) {
437442
let unnormalized_ty = tcx.type_of(static_def_id);
438-
let locations = location.to_locations();
439443
let normalized_ty = self.cx.normalize(unnormalized_ty, locations);
440444
let literal_ty = constant.literal.ty().builtin_deref(true).unwrap().ty;
441445

@@ -454,7 +458,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
454458
self.cx.normalize_and_prove_instantiated_predicates(
455459
def_id,
456460
instantiated_predicates,
457-
location.to_locations(),
461+
locations,
458462
);
459463
}
460464
}

0 commit comments

Comments
 (0)