Auto merge of #139525 - Zalathar:rollup-5t5xsrw, r=Zalathar

Rollup of 10 pull requests

Successful merges:

 - #138676 (Implement overflow for infinite implied lifetime bounds)
 - #139024 (Make error message for missing fields with `..` and without `..` more consistent)
 - #139098 (Tell LLVM about impossible niche tags)
 - #139124 (compiler: report error when trait object type param reference self)
 - #139321 (Update to new rinja version (askama))
 - #139346 (Don't construct preds w escaping bound vars in `diagnostic_hir_wf_check`)
 - #139386 (make it possible to use stage0 libtest on compiletest)
 - #139421 (Fix trait upcasting to dyn type with no principal when there are projections)
 - #139464 (Allow for reparsing failure when reparsing a pasted metavar.)
 - #139490 (Update some comment/docs related to "extern intrinsic" removal)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-04-08 12:05:54 +00:00
commit f820b75fee
52 changed files with 1003 additions and 184 deletions

View file

@ -186,6 +186,48 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "askama"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a4e46abb203e00ef226442d452769233142bbfdd79c3941e84c8e61c4112543"
dependencies = [
"askama_derive",
"itoa",
"percent-encoding",
"serde",
"serde_json",
]
[[package]]
name = "askama_derive"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54398906821fd32c728135f7b351f0c7494ab95ae421d41b6f5a020e158f28a6"
dependencies = [
"askama_parser",
"basic-toml",
"memchr",
"proc-macro2",
"quote",
"rustc-hash 2.1.1",
"serde",
"serde_derive",
"syn 2.0.100",
]
[[package]]
name = "askama_parser"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf315ce6524c857bb129ff794935cf6d42c82a6cff60526fe2a63593de4d0d4f"
dependencies = [
"memchr",
"serde",
"serde_derive",
"winnow 0.7.4",
]
[[package]]
name = "autocfg"
version = "1.4.0"
@ -1345,8 +1387,8 @@ name = "generate-copyright"
version = "0.1.0"
dependencies = [
"anyhow",
"askama",
"cargo_metadata 0.18.1",
"rinja",
"serde",
"serde_json",
"thiserror 1.0.69",
@ -3069,9 +3111,7 @@ version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5"
dependencies = [
"humansize",
"itoa",
"percent-encoding",
"rinja_derive",
]
@ -4628,6 +4668,7 @@ name = "rustdoc"
version = "0.0.0"
dependencies = [
"arrayvec",
"askama",
"base64",
"expect-test",
"indexmap",
@ -4636,7 +4677,6 @@ dependencies = [
"pulldown-cmark 0.9.6",
"pulldown-cmark-escape",
"regex",
"rinja",
"rustdoc-json-types",
"serde",
"serde_json",
@ -5426,7 +5466,7 @@ dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
"winnow 0.5.40",
]
[[package]]
@ -6437,6 +6477,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "winnow"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e97b544156e9bebe1a0ffbc03484fc1ffe3100cbce3ffb17eac35f7cdd7ab36"
dependencies = [
"memchr",
]
[[package]]
name = "winsplit"
version = "0.1.0"

View file

@ -28,7 +28,7 @@
# - A new option
# - A change in the default values
#
# If the change-id does not match the version currently in use, x.py will
# If the change-id does not match the version currently in use, x.py will
# display the changes made to the bootstrap.
# To suppress these warnings, you can set change-id = "ignore".
#change-id = <latest change id in src/bootstrap/src/utils/change_tracker.rs>
@ -442,6 +442,9 @@
# What custom diff tool to use for displaying compiletest tests.
#compiletest-diff-tool = <none>
# Whether to use the precompiled stage0 libtest with compiletest.
#compiletest-use-stage0-libtest = true
# Indicates whether ccache is used when building certain artifacts (e.g. LLVM).
# Set to `true` to use the first `ccache` in PATH, or set an absolute path to use
# a specific version.

View file

@ -9,6 +9,7 @@ use rustc_middle::mir::{self, ConstValue};
use rustc_middle::ty::Ty;
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::{bug, span_bug};
use rustc_session::config::OptLevel;
use tracing::{debug, instrument};
use super::place::{PlaceRef, PlaceValue};
@ -496,6 +497,18 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
_ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)),
};
// Layout ensures that we only get here for cases where the discriminant
// value and the variant index match, since that's all `Niche` can encode.
// But for emphasis and debugging, let's double-check one anyway.
debug_assert_eq!(
self.layout
.ty
.discriminant_for_variant(bx.tcx(), untagged_variant)
.unwrap()
.val,
u128::from(untagged_variant.as_u32()),
);
let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
// We have a subrange `niche_start..=niche_end` inside `range`.
@ -537,6 +550,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
relative_discr,
bx.cx().const_uint(tag_llty, relative_max as u64),
);
// Thanks to parameter attributes and load metadata, LLVM already knows
// the general valid range of the tag. It's possible, though, for there
// to be an impossible value *in the middle*, which those ranges don't
// communicate, so it's worth an `assume` to let the optimizer know.
if niche_variants.contains(&untagged_variant)
&& bx.cx().sess().opts.optimize != OptLevel::No
{
let impossible =
u64::from(untagged_variant.as_u32() - niche_variants.start().as_u32());
let impossible = bx.cx().const_uint(tag_llty, impossible);
let ne = bx.icmp(IntPredicate::IntNE, relative_discr, impossible);
bx.assume(ne);
}
(is_niche, cast_tag, niche_variants.start().as_u32() as u128)
};
@ -553,7 +581,9 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
);
// In principle we could insert assumes on the possible range of `discr`, but
// currently in LLVM this seems to be a pessimization.
// currently in LLVM this isn't worth it because the original `tag` will
// have either a `range` parameter attribute or `!range` metadata,
// or come from a `transmute` that already `assume`d it.
discr
}

View file

@ -486,6 +486,9 @@ hir_analysis_self_in_impl_self =
`Self` is not valid in the self type of an impl block
.note = replace `Self` with a different type
hir_analysis_self_in_type_alias = `Self` is not allowed in type aliases
.label = `Self` is only available in impls, traits, and concrete type definitions
hir_analysis_self_ty_not_captured = `impl Trait` must mention the `Self` type of the trait in `use<...>`
.label = `Self` type parameter is implicitly captured by this `impl Trait`
.note = currently, all type parameters are required to be mentioned in the precise captures list

View file

@ -1707,3 +1707,11 @@ pub(crate) enum SupertraitItemShadowee {
traits: DiagSymbolList,
},
}
#[derive(Diagnostic)]
#[diag(hir_analysis_self_in_type_alias, code = E0411)]
pub(crate) struct SelfInTypeAlias {
#[primary_span]
#[label]
pub span: Span,
}

View file

@ -16,6 +16,7 @@ use smallvec::{SmallVec, smallvec};
use tracing::{debug, instrument};
use super::HirTyLowerer;
use crate::errors::SelfInTypeAlias;
use crate::hir_ty_lowering::{
GenericArgCountMismatch, GenericArgCountResult, PredicateFilter, RegionInferReason,
};
@ -125,6 +126,19 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
// ```
let mut projection_bounds = FxIndexMap::default();
for (proj, proj_span) in elaborated_projection_bounds {
let proj = proj.map_bound(|mut b| {
if let Some(term_ty) = &b.term.as_type() {
let references_self = term_ty.walk().any(|arg| arg == dummy_self.into());
if references_self {
// With trait alias and type alias combined, type resolver
// may not be able to catch all illegal `Self` usages (issue 139082)
let guar = tcx.dcx().emit_err(SelfInTypeAlias { span });
b.term = replace_dummy_self_with_error(tcx, b.term, guar);
}
}
b
});
let key = (
proj.skip_binder().projection_term.def_id,
tcx.anonymize_bound_vars(

View file

@ -4,7 +4,7 @@ use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::{ObligationCause, WellFormedLoc};
use rustc_middle::bug;
use rustc_middle::query::Providers;
use rustc_middle::ty::{self, TyCtxt, TypingMode, fold_regions};
use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt, TypingMode, fold_regions};
use rustc_span::def_id::LocalDefId;
use rustc_trait_selection::traits::{self, ObligationCtxt};
use tracing::debug;
@ -77,6 +77,15 @@ fn diagnostic_hir_wf_check<'tcx>(
let tcx_ty = fold_regions(self.tcx, tcx_ty, |r, _| {
if r.is_bound() { self.tcx.lifetimes.re_erased } else { r }
});
// We may be checking the WFness of a type in an opaque with a non-lifetime bound.
// Perhaps we could rebind all the escaping bound vars, but they're coming from
// arbitrary debruijn indices and aren't particularly important anyways, since they
// are only coming from `feature(non_lifetime_binders)` anyways.
if tcx_ty.has_escaping_bound_vars() {
return;
}
let cause = traits::ObligationCause::new(
ty.span,
self.def_id,

View file

@ -24,8 +24,8 @@ pub(super) fn infer_predicates(
// If new predicates were added then we need to re-calculate
// all crates since there could be new implied predicates.
loop {
let mut predicates_added = false;
for i in 0.. {
let mut predicates_added = vec![];
// Visit all the crates and infer predicates
for id in tcx.hir_free_items() {
@ -83,14 +83,27 @@ pub(super) fn infer_predicates(
.get(&item_did.to_def_id())
.map_or(0, |p| p.as_ref().skip_binder().len());
if item_required_predicates.len() > item_predicates_len {
predicates_added = true;
predicates_added.push(item_did);
global_inferred_outlives
.insert(item_did.to_def_id(), ty::EarlyBinder::bind(item_required_predicates));
}
}
if !predicates_added {
if predicates_added.is_empty() {
// We've reached a fixed point.
break;
} else if !tcx.recursion_limit().value_within_limit(i) {
let msg = if let &[id] = &predicates_added[..] {
format!("overflow computing implied lifetime bounds for `{}`", tcx.def_path_str(id),)
} else {
"overflow computing implied lifetime bounds".to_string()
};
tcx.dcx()
.struct_span_fatal(
predicates_added.iter().map(|id| tcx.def_span(*id)).collect::<Vec<_>>(),
msg,
)
.emit();
}
}

View file

@ -2205,8 +2205,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let fields = listify(&missing_mandatory_fields, |f| format!("`{f}`")).unwrap();
self.dcx()
.struct_span_err(
span.shrink_to_hi(),
format!("missing mandatory field{s} {fields}"),
span.shrink_to_lo(),
format!("missing field{s} {fields} in initializer"),
)
.with_span_label(
span.shrink_to_lo(),
"fields that do not have a defaulted value must be provided explicitly",
)
.emit();
return;

View file

@ -782,9 +782,16 @@ impl<'a> Parser<'a> {
// Recovery is disabled when parsing macro arguments, so it must
// also be disabled when reparsing pasted macro arguments,
// otherwise we get inconsistent results (e.g. #137874).
let res = self.with_recovery(Recovery::Forbidden, |this| {
f(this).expect("failed to reparse {mv_kind:?}")
});
let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
let res = match res {
Ok(res) => res,
Err(err) => {
// This can occur in unusual error cases, e.g. #139445.
err.delay_as_bug();
return None;
}
};
if let token::CloseDelim(delim) = self.token.kind
&& let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
@ -793,7 +800,12 @@ impl<'a> Parser<'a> {
self.bump();
Some(res)
} else {
panic!("no close delim when reparsing {mv_kind:?}");
// This can occur when invalid syntax is passed to a decl macro. E.g. see #139248,
// where the reparse attempt of an invalid expr consumed the trailing invisible
// delimiter.
self.dcx()
.span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
None
}
} else {
None

View file

@ -1090,26 +1090,36 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
{
// See `assemble_candidates_for_unsizing` for more info.
// We already checked the compatibility of auto traits within `assemble_candidates_for_unsizing`.
let iter = data_a
.principal()
.filter(|_| {
// optionally drop the principal, if we're unsizing to no principal
data_b.principal().is_some()
})
.map(|b| b.map_bound(ty::ExistentialPredicate::Trait))
.into_iter()
.chain(
let existential_predicates = if data_b.principal().is_some() {
tcx.mk_poly_existential_predicates_from_iter(
data_a
.projection_bounds()
.map(|b| b.map_bound(ty::ExistentialPredicate::Projection)),
.principal()
.map(|b| b.map_bound(ty::ExistentialPredicate::Trait))
.into_iter()
.chain(
data_a
.projection_bounds()
.map(|b| b.map_bound(ty::ExistentialPredicate::Projection)),
)
.chain(
data_b
.auto_traits()
.map(ty::ExistentialPredicate::AutoTrait)
.map(ty::Binder::dummy),
),
)
.chain(
} else {
// If we're unsizing to a dyn type that has no principal, then drop
// the principal and projections from the type. We use the auto traits
// from the RHS type since as we noted that we've checked for auto
// trait compatibility during unsizing.
tcx.mk_poly_existential_predicates_from_iter(
data_b
.auto_traits()
.map(ty::ExistentialPredicate::AutoTrait)
.map(ty::Binder::dummy),
);
let existential_predicates = tcx.mk_poly_existential_predicates_from_iter(iter);
)
};
let source_trait = Ty::new_dynamic(tcx, existential_predicates, r_b, dyn_a);
// Require that the traits involved in this upcast are **equal**;

View file

@ -5,15 +5,11 @@
//!
//! # Const intrinsics
//!
//! Note: any changes to the constness of intrinsics should be discussed with the language team.
//! This includes changes in the stability of the constness.
//!
//! //FIXME(#132735) "old" style intrinsics support has been removed
//! In order to make an intrinsic usable at compile-time, it needs to be declared in the "new"
//! style, i.e. as a `#[rustc_intrinsic]` function, not inside an `extern` block. Then copy the
//! implementation from <https://github.com/rust-lang/miri/blob/master/src/intrinsics> to
//! In order to make an intrinsic unstable usable at compile-time, copy the implementation from
//! <https://github.com/rust-lang/miri/blob/master/src/intrinsics> to
//! <https://github.com/rust-lang/rust/blob/master/compiler/rustc_const_eval/src/interpret/intrinsics.rs>
//! and make the intrinsic declaration a `const fn`.
//! and make the intrinsic declaration below a `const fn`. This should be done in coordination with
//! wg-const-eval.
//!
//! If an intrinsic is supposed to be used from a `const fn` with a `rustc_const_stable` attribute,
//! `#[rustc_intrinsic_const_stable_indirect]` needs to be added to the intrinsic. Such a change requires

View file

@ -7,6 +7,8 @@ test-stage = 2
doc-stage = 2
# When compiling from source, you usually want all tools.
extended = true
# Use libtest built from the source tree instead of the precompiled one from stage 0.
compiletest-use-stage0-libtest = false
# Most users installing from source want to build all parts of the project from source.
[llvm]

View file

@ -369,6 +369,69 @@ impl Step for RustAnalyzer {
}
}
/// Compiletest is implicitly "checked" when it gets built in order to run tests,
/// so this is mainly for people working on compiletest to run locally.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Compiletest {
pub target: TargetSelection,
}
impl Step for Compiletest {
type Output = ();
const ONLY_HOSTS: bool = true;
const DEFAULT: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/compiletest")
}
fn make_run(run: RunConfig<'_>) {
run.builder.ensure(Compiletest { target: run.target });
}
fn run(self, builder: &Builder<'_>) {
let mode = if builder.config.compiletest_use_stage0_libtest {
Mode::ToolBootstrap
} else {
Mode::ToolStd
};
let compiler = builder.compiler(
if mode == Mode::ToolBootstrap { 0 } else { builder.top_stage },
builder.config.build,
);
if mode != Mode::ToolBootstrap {
builder.ensure(Rustc::new(self.target, builder));
}
let mut cargo = prepare_tool_cargo(
builder,
compiler,
mode,
self.target,
builder.kind,
"src/tools/compiletest",
SourceType::InTree,
&[],
);
cargo.allow_features("test");
// For ./x.py clippy, don't run with --all-targets because
// linting tests and benchmarks can produce very noisy results
if builder.kind != Kind::Clippy {
cargo.arg("--all-targets");
}
let stamp = BuildStamp::new(&builder.cargo_out(compiler, mode, self.target))
.with_prefix("compiletest-check");
let _guard = builder.msg_check("compiletest artifacts", self.target);
run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false);
}
}
macro_rules! tool_check_step {
(
$name:ident {
@ -464,7 +527,3 @@ tool_check_step!(Bootstrap { path: "src/bootstrap", default: false });
// `run-make-support` will be built as part of suitable run-make compiletest test steps, but support
// check to make it easier to work on.
tool_check_step!(RunMakeSupport { path: "src/tools/run-make-support", default: false });
// Compiletest is implicitly "checked" when it gets built in order to run tests,
// so this is mainly for people working on compiletest to run locally.
tool_check_step!(Compiletest { path: "src/tools/compiletest", default: false });

View file

@ -2398,7 +2398,9 @@ pub fn run_cargo(
// Ok now we need to actually find all the files listed in `toplevel`. We've
// got a list of prefix/extensions and we basically just need to find the
// most recent file in the `deps` folder corresponding to each one.
let contents = t!(target_deps_dir.read_dir())
let contents = target_deps_dir
.read_dir()
.unwrap_or_else(|e| panic!("Couldn't read {}: {}", target_deps_dir.display(), e))
.map(|e| t!(e))
.map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata())))
.collect::<Vec<_>>();

View file

@ -425,11 +425,14 @@ macro_rules! bootstrap_tool {
}
)*
let is_unstable = false $(|| $unstable)*;
let compiletest_wants_stage0 = $tool_name == "compiletest" && builder.config.compiletest_use_stage0_libtest;
builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.target,
tool: $tool_name,
mode: if false $(|| $unstable)* {
mode: if is_unstable && !compiletest_wants_stage0 {
// use in-tree libraries for unstable features
Mode::ToolStd
} else {

View file

@ -417,6 +417,9 @@ pub struct Config {
/// Command for visual diff display, e.g. `diff-tool --color=always`.
pub compiletest_diff_tool: Option<String>,
/// Whether to use the precompiled stage0 libtest with compiletest.
pub compiletest_use_stage0_libtest: bool,
pub is_running_on_ci: bool,
}
@ -983,6 +986,7 @@ define_config! {
optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
jobs: Option<u32> = "jobs",
compiletest_diff_tool: Option<String> = "compiletest-diff-tool",
compiletest_use_stage0_libtest: Option<bool> = "compiletest-use-stage0-libtest",
ccache: Option<StringOrBool> = "ccache",
exclude: Option<Vec<PathBuf>> = "exclude",
}
@ -1682,6 +1686,7 @@ impl Config {
optimized_compiler_builtins,
jobs,
compiletest_diff_tool,
compiletest_use_stage0_libtest,
mut ccache,
exclude,
} = toml.build.unwrap_or_default();
@ -2415,6 +2420,7 @@ impl Config {
config.optimized_compiler_builtins =
optimized_compiler_builtins.unwrap_or(config.channel != "dev");
config.compiletest_diff_tool = compiletest_diff_tool;
config.compiletest_use_stage0_libtest = compiletest_use_stage0_libtest.unwrap_or(true);
let download_rustc = config.download_rustc_commit.is_some();
config.explicit_stage_from_cli = flags.stage.is_some();

View file

@ -391,4 +391,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
severity: ChangeSeverity::Info,
summary: "You can now use `change-id = \"ignore\"` to suppress `change-id ` warnings in the console.",
},
ChangeInfo {
change_id: 139386,
severity: ChangeSeverity::Info,
summary: "Added a new option `build.compiletest-use-stage0-libtest` to force `compiletest` to use the stage 0 libtest.",
},
];

View file

@ -5,6 +5,7 @@ pub static CRATES: &[&str] = &[
// tidy-alphabetical-start
"annotate-snippets",
"anstyle",
"askama_parser",
"basic-toml",
"block-buffer",
"bumpalo",
@ -64,6 +65,7 @@ pub static CRATES: &[&str] = &[
"wasm-bindgen-backend",
"wasm-bindgen-macro-support",
"wasm-bindgen-shared",
"winnow",
"yoke",
"zerofrom",
"zerovec",

View file

@ -27,7 +27,7 @@ runners:
<<: *base-job
envs:
env-x86_64-apple-tests: &env-x86_64-apple-tests
SCRIPT: ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact
SCRIPT: ./x.py check compiletest --set build.compiletest-use-stage0-libtest=true && ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact
RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
# Ensure that host tooling is tested on our minimum supported macOS version.

View file

@ -47,6 +47,7 @@ COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
ENV SCRIPT \
python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
/scripts/check-default-config-profiles.sh && \
python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \
python3 ../x.py check --target=x86_64-pc-windows-gnu --host=x86_64-pc-windows-gnu && \
python3 ../x.py clippy ci && \
python3 ../x.py build --stage 0 src/tools/build-manifest && \

View file

@ -101,4 +101,5 @@ COPY scripts/shared.sh /scripts/
# the local version of the package is different than the one used by the CI.
ENV SCRIPT /tmp/checktools.sh ../x.py && \
npm install browser-ui-test@$(head -n 1 /tmp/browser-ui-test.version) --unsafe-perm=true && \
python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \
python3 ../x.py test tests/rustdoc-gui --stage 2 --test-args "'--jobs 1'"

View file

@ -58,7 +58,7 @@ runners:
<<: *base-job
envs:
env-x86_64-apple-tests: &env-x86_64-apple-tests
SCRIPT: ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact
SCRIPT: ./x.py check compiletest --set build.compiletest-use-stage0-libtest=true && ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact
RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
# Ensure that host tooling is tested on our minimum supported macOS version.

View file

@ -53,7 +53,8 @@ Various intrinsics have native MIR operations that they correspond to. Instead o
backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass
will convert the calls to the MIR operation. Backends do not need to know about these intrinsics
at all. These intrinsics only make sense without a body, and can be declared as a `#[rustc_intrinsic]`.
The body is never used, as calls to the intrinsic do not exist anymore after MIR analyses.
The body is never used as the lowering pass implements support for all backends, so we never have to
use the fallback logic.
## Intrinsics without fallback logic

View file

@ -9,7 +9,7 @@ path = "lib.rs"
[dependencies]
arrayvec = { version = "0.7", default-features = false }
rinja = { version = "0.3", default-features = false, features = ["config"] }
askama = { version = "0.13", default-features = false, features = ["alloc", "config", "derive"] }
base64 = "0.21.7"
itertools = "0.12"
indexmap = "2"

View file

@ -1,7 +1,7 @@
use std::fmt::{self, Display};
use std::path::PathBuf;
use rinja::Template;
use askama::Template;
use rustc_data_structures::fx::FxIndexMap;
use super::static_files::{STATIC_FILES, StaticFiles};

View file

@ -5,7 +5,7 @@ use std::io;
use std::path::{Path, PathBuf};
use std::sync::mpsc::{Receiver, channel};
use rinja::Template;
use askama::Template;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE};
use rustc_middle::ty::TyCtxt;

View file

@ -43,7 +43,7 @@ use std::iter::Peekable;
use std::path::PathBuf;
use std::{fs, str};
use rinja::Template;
use askama::Template;
use rustc_attr_parsing::{
ConstStability, DeprecatedSince, Deprecation, RustcVersion, StabilityLevel, StableSince,
};

View file

@ -2,7 +2,7 @@ use std::cmp::Ordering;
use std::fmt::{self, Display, Write as _};
use std::iter;
use rinja::Template;
use askama::Template;
use rustc_abi::VariantIdx;
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_hir as hir;
@ -37,7 +37,7 @@ use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
use crate::html::render::{document_full, document_item_info};
use crate::html::url_parts_builder::UrlPartsBuilder;
/// Generates a Rinja template struct for rendering items with common methods.
/// Generates an Askama template struct for rendering items with common methods.
///
/// Usage:
/// ```ignore (illustrative)
@ -301,7 +301,7 @@ fn toggle_close(mut w: impl fmt::Write) {
w.write_str("</details>").unwrap();
}
trait ItemTemplate<'a, 'cx: 'a>: rinja::Template + Display {
trait ItemTemplate<'a, 'cx: 'a>: askama::Template + Display {
fn item_and_cx(&self) -> (&'a clean::Item, &'a Context<'cx>);
}
@ -1867,7 +1867,7 @@ fn item_proc_macro(cx: &Context<'_>, it: &clean::Item, m: &clean::ProcMacro) ->
}
}
}
Ok(())
fmt::Result::Ok(())
})?;
write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
})
@ -1944,7 +1944,7 @@ fn item_constant(
}
}
}
Ok(())
Ok::<(), fmt::Error>(())
})?;
write!(w, "{}", document(cx, it, None, HeadingOffset::H2))

View file

@ -1,7 +1,7 @@
use std::borrow::Cow;
use std::cmp::Ordering;
use rinja::Template;
use askama::Template;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def::CtorKind;
use rustc_hir::def_id::{DefIdMap, DefIdSet};
@ -123,10 +123,10 @@ impl<'a> Link<'a> {
pub(crate) mod filters {
use std::fmt::{self, Display};
use rinja::filters::Safe;
use askama::filters::Safe;
use crate::html::escape::EscapeBodyTextWithWbr;
pub(crate) fn wrapped<T>(v: T) -> rinja::Result<Safe<impl Display>>
pub(crate) fn wrapped<T>(v: T) -> askama::Result<Safe<impl Display>>
where
T: Display,
{

View file

@ -1,6 +1,6 @@
use std::fmt;
use rinja::Template;
use askama::Template;
use rustc_abi::{Primitive, TagEncoding, Variants};
use rustc_hir::def_id::DefId;
use rustc_middle::span_bug;

View file

@ -3,7 +3,7 @@ use std::ffi::OsStr;
use std::path::{Component, Path, PathBuf};
use std::{fmt, fs};
use rinja::Template;
use askama::Template;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::ty::TyCtxt;

View file

@ -1,13 +1,13 @@
# Style for Templates
This directory has templates in the [Rinja templating language][rinjadoc], which is very
This directory has templates in the [Askama templating language][askamadoc], which is very
similar to [Jinja2][jinjadoc].
[jinjadoc]: https://jinja.palletsprojects.com/en/3.1.x/templates/
[rinjadoc]: https://docs.rs/rinja/latest/rinja/
[askamadoc]: https://docs.rs/askama/latest/askama/
We want our rendered output to have as little unnecessary whitespace as
possible, so that pages load quickly. To achieve that we use Rinja's
possible, so that pages load quickly. To achieve that we use Askama's
[whitespace control] features. By default, whitespace characters are removed
around jinja tags (`{% %}` for example). At the end of most lines, we put an
empty comment tag: `{# #}`. This causes all whitespace between the end of the
@ -18,7 +18,7 @@ remove following whitespace but not preceding. We also use the whitespace
control characters in most instances of tags with control flow, for example
`{% if foo %}`.
[whitespace control]: https://rinja.readthedocs.io/en/stable/configuration.html#whitespace-control
[whitespace control]: https://askama.readthedocs.io/en/stable/configuration.html#whitespace-control
We want our templates to be readable, so we use indentation and newlines
liberally. We indent by four spaces after opening an HTML tag _or_ a Jinja
@ -26,11 +26,11 @@ tag. In most cases an HTML tag should be followed by a newline, but if the
tag has simple contents and fits with its close tag on a single line, the
contents don't necessarily need a new line.
Rinja templates support quite sophisticated control flow. To keep our templates
Askama templates support quite sophisticated control flow. To keep our templates
simple and understandable, we use only a subset: `if` and `for`. In particular
we avoid [assignments in the template logic][assignments] and [Rinja
we avoid [assignments in the template logic][assignments] and [Askama
macros][macros]. This also may make things easier if we switch to a different
Jinja-style template system in the future.
[assignments]: https://rinja.readthedocs.io/en/stable/template_syntax.html#assignments
[macros]: https://rinja.readthedocs.io/en/stable/template_syntax.html#macros
[assignments]: https://askama.readthedocs.io/en/stable/template_syntax.html#assignments
[macros]: https://askama.readthedocs.io/en/stable/template_syntax.html#macros

View file

@ -8,8 +8,8 @@ description = "Produces a manifest of all the copyrighted materials in the Rust
[dependencies]
anyhow = "1.0.65"
askama = "0.13.0"
cargo_metadata = "0.18.1"
rinja = "0.3.0"
serde = { version = "1.0.147", features = ["derive"] }
serde_json = "1.0.85"
thiserror = "1"

View file

@ -2,7 +2,7 @@ use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use anyhow::Error;
use rinja::Template;
use askama::Template;
mod cargo_metadata;
@ -117,7 +117,7 @@ struct Metadata {
}
/// Describes one node in our metadata tree
#[derive(serde::Deserialize, rinja::Template, Clone, Debug, PartialEq, Eq)]
#[derive(serde::Deserialize, Template, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "kebab-case", tag = "type")]
#[template(path = "Node.html")]
pub(crate) enum Node {

View file

@ -6882,85 +6882,14 @@ pub fn foo() {}
#[test]
fn hover_feature() {
check(
r#"#![feature(intrinsics$0)]"#,
expect![[r#"
*intrinsics*
```
intrinsics
```
___
# `intrinsics`
The tracking issue for this feature is: None.
Intrinsics are rarely intended to be stable directly, but are usually
exported in some sort of stable manner. Prefer using the stable interfaces to
the intrinsic directly when you can.
------------------------
## Intrinsics with fallback logic
Many intrinsics can be written in pure rust, albeit inefficiently or without supporting
some features that only exist on some backends. Backends can simply not implement those
intrinsics without causing any code miscompilations or failures to compile.
All intrinsic fallback bodies are automatically made cross-crate inlineable (like `#[inline]`)
by the codegen backend, but not the MIR inliner.
```rust
#![feature(intrinsics)]
#![allow(internal_features)]
#[rustc_intrinsic]
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {}
```
Since these are just regular functions, it is perfectly ok to create the intrinsic twice:
```rust
#![feature(intrinsics)]
#![allow(internal_features)]
#[rustc_intrinsic]
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {}
mod foo {
#[rustc_intrinsic]
const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {
panic!("noisy const dealloc")
}
}
```
The behaviour on backends that override the intrinsic is exactly the same. On other
backends, the intrinsic behaviour depends on which implementation is called, just like
with any regular function.
## Intrinsics lowered to MIR instructions
Various intrinsics have native MIR operations that they correspond to. Instead of requiring
backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass
will convert the calls to the MIR operation. Backends do not need to know about these intrinsics
at all. These intrinsics only make sense without a body, and can be as a `#[rustc_intrinsic]`.
The body is never used, as calls to the intrinsic do not exist anymore after MIR analyses.
## Intrinsics without fallback logic
These must be implemented by all backends.
### `#[rustc_intrinsic]` declarations
These are written like intrinsics with fallback bodies, but the body is irrelevant.
Use `loop {}` for the body or call the intrinsic recursively and add
`#[rustc_intrinsic_must_be_overridden]` to the function to ensure that backends don't
invoke the body.
"#]],
)
let (analysis, position) = fixture::position(r#"#![feature(intrinsics$0)]"#);
analysis
.hover(
&HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG },
FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
)
.unwrap()
.unwrap();
}
#[test]

View file

@ -1,21 +1,26 @@
//@ compile-flags: -Copt-level=1
//@ only-x86_64
//@ only-64bit
#![crate_type = "lib"]
#![feature(core_intrinsics)]
// Check each of the 3 cases for `codegen_get_discr`.
// FIXME: once our min-bar LLVM has `range` attributes, update the various
// tests here to no longer have the `range`s and `nsw`s as optional.
// Case 0: One tagged variant.
pub enum Enum0 {
A(bool),
B,
}
// CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match0{{.*}}
// CHECK-LABEL: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match0(i8{{.+}}%0)
// CHECK-NEXT: start:
// CHECK-NEXT: %1 = icmp eq i8 %0, 2
// CHECK-NEXT: %2 = and i8 %0, 1
// CHECK-NEXT: %{{.+}} = select i1 %1, i8 13, i8 %2
// CHECK-NEXT: %[[IS_B:.+]] = icmp eq i8 %0, 2
// CHECK-NEXT: %[[TRUNC:.+]] = and i8 %0, 1
// CHECK-NEXT: %[[R:.+]] = select i1 %[[IS_B]], i8 13, i8 %[[TRUNC]]
// CHECK-NEXT: ret i8 %[[R]]
#[no_mangle]
pub fn match0(e: Enum0) -> u8 {
use Enum0::*;
@ -32,13 +37,14 @@ pub enum Enum1 {
C,
}
// CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match1{{.*}}
// CHECK-LABEL: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match1(i8{{.+}}%0)
// CHECK-NEXT: start:
// CHECK-NEXT: %1 = add{{( nsw)?}} i8 %0, -2
// CHECK-NEXT: %2 = zext i8 %1 to i64
// CHECK-NEXT: %3 = icmp ult i8 %1, 2
// CHECK-NEXT: %4 = add nuw nsw i64 %2, 1
// CHECK-NEXT: %_2 = select i1 %3, i64 %4, i64 0
// CHECK-NEXT: %[[REL_VAR:.+]] = add{{( nsw)?}} i8 %0, -2
// CHECK-NEXT: %[[REL_VAR_WIDE:.+]] = zext i8 %[[REL_VAR]] to i64
// CHECK-NEXT: %[[IS_NICHE:.+]] = icmp ult i8 %[[REL_VAR]], 2
// CHECK-NEXT: %[[NICHE_DISCR:.+]] = add nuw nsw i64 %[[REL_VAR_WIDE]], 1
// CHECK-NEXT: %[[DISCR:.+]] = select i1 %[[IS_NICHE]], i64 %[[NICHE_DISCR]], i64 0
// CHECK-NEXT: switch i64 %[[DISCR]]
#[no_mangle]
pub fn match1(e: Enum1) -> u8 {
use Enum1::*;
@ -92,14 +98,14 @@ pub enum Enum2 {
E,
}
// CHECK: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match2{{.*}}
// CHECK-LABEL: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match2(i8{{.+}}%0)
// CHECK-NEXT: start:
// CHECK-NEXT: %1 = add i8 %0, 2
// CHECK-NEXT: %2 = zext i8 %1 to i64
// CHECK-NEXT: %3 = icmp ult i8 %1, 4
// CHECK-NEXT: %4 = add nuw nsw i64 %2, 1
// CHECK-NEXT: %_2 = select i1 %3, i64 %4, i64 0
// CHECK-NEXT: switch i64 %_2, label {{.*}} [
// CHECK-NEXT: %[[REL_VAR:.+]] = add i8 %0, 2
// CHECK-NEXT: %[[REL_VAR_WIDE:.+]] = zext i8 %[[REL_VAR]] to i64
// CHECK-NEXT: %[[IS_NICHE:.+]] = icmp ult i8 %[[REL_VAR]], 4
// CHECK-NEXT: %[[NICHE_DISCR:.+]] = add nuw nsw i64 %[[REL_VAR_WIDE]], 1
// CHECK-NEXT: %[[DISCR:.+]] = select i1 %[[IS_NICHE]], i64 %[[NICHE_DISCR]], i64 0
// CHECK-NEXT: switch i64 %[[DISCR]]
#[no_mangle]
pub fn match2(e: Enum2) -> u8 {
use Enum2::*;
@ -111,3 +117,357 @@ pub fn match2(e: Enum2) -> u8 {
E => 250,
}
}
// And make sure it works even if the niched scalar is a pointer.
// (For example, that we don't try to `sub` on pointers.)
// CHECK-LABEL: define noundef{{( range\(i16 -?[0-9]+, -?[0-9]+\))?}} i16 @match3(ptr{{.+}}%0)
// CHECK-NEXT: start:
// CHECK-NEXT: %[[IS_NULL:.+]] = icmp eq ptr %0, null
// CHECK-NEXT: br i1 %[[IS_NULL]]
#[no_mangle]
pub fn match3(e: Option<&u8>) -> i16 {
match e {
Some(r) => *r as _,
None => -1,
}
}
// If the untagged variant is in the middle, there's an impossible value that's
// not reflected in the `range` parameter attribute, so we assume it away.
#[derive(PartialEq)]
pub enum MiddleNiche {
A,
B,
C(bool),
D,
E,
}
// CHECK-LABEL: define noundef{{( range\(i8 -?[0-9]+, -?[0-9]+\))?}} i8 @match4(i8{{.+}}%0)
// CHECK-NEXT: start:
// CHECK-NEXT: %[[REL_VAR:.+]] = add{{( nsw)?}} i8 %0, -2
// CHECK-NEXT: %[[IS_NICHE:.+]] = icmp ult i8 %[[REL_VAR]], 5
// CHECK-NEXT: %[[NOT_IMPOSSIBLE:.+]] = icmp ne i8 %[[REL_VAR]], 2
// CHECK-NEXT: call void @llvm.assume(i1 %[[NOT_IMPOSSIBLE]])
// CHECK-NEXT: %[[DISCR:.+]] = select i1 %[[IS_NICHE]], i8 %[[REL_VAR]], i8 2
// CHECK-NEXT: switch i8 %[[DISCR]]
#[no_mangle]
pub fn match4(e: MiddleNiche) -> u8 {
use MiddleNiche::*;
match e {
A => 13,
B => 100,
C(b) => b as u8,
D => 200,
E => 250,
}
}
// CHECK-LABEL: define{{.+}}i1 @match4_is_c(i8{{.+}}%e)
// CHECK-NEXT: start
// CHECK-NEXT: %[[REL_VAR:.+]] = add{{( nsw)?}} i8 %e, -2
// CHECK-NEXT: %[[NOT_NICHE:.+]] = icmp ugt i8 %[[REL_VAR]], 4
// CHECK-NEXT: %[[NOT_IMPOSSIBLE:.+]] = icmp ne i8 %[[REL_VAR]], 2
// CHECK-NEXT: call void @llvm.assume(i1 %[[NOT_IMPOSSIBLE]])
// CHECK-NEXT: ret i1 %[[NOT_NICHE]]
#[no_mangle]
pub fn match4_is_c(e: MiddleNiche) -> bool {
// Before #139098, this couldn't optimize out the `select` because it looked
// like it was possible for a `2` to be produced on both sides.
std::intrinsics::discriminant_value(&e) == 2
}
// You have to do something pretty obnoxious to get a variant index that doesn't
// fit in the tag size, but it's possible
pub enum Never {}
pub enum HugeVariantIndex {
V000(Never),
V001(Never),
V002(Never),
V003(Never),
V004(Never),
V005(Never),
V006(Never),
V007(Never),
V008(Never),
V009(Never),
V010(Never),
V011(Never),
V012(Never),
V013(Never),
V014(Never),
V015(Never),
V016(Never),
V017(Never),
V018(Never),
V019(Never),
V020(Never),
V021(Never),
V022(Never),
V023(Never),
V024(Never),
V025(Never),
V026(Never),
V027(Never),
V028(Never),
V029(Never),
V030(Never),
V031(Never),
V032(Never),
V033(Never),
V034(Never),
V035(Never),
V036(Never),
V037(Never),
V038(Never),
V039(Never),
V040(Never),
V041(Never),
V042(Never),
V043(Never),
V044(Never),
V045(Never),
V046(Never),
V047(Never),
V048(Never),
V049(Never),
V050(Never),
V051(Never),
V052(Never),
V053(Never),
V054(Never),
V055(Never),
V056(Never),
V057(Never),
V058(Never),
V059(Never),
V060(Never),
V061(Never),
V062(Never),
V063(Never),
V064(Never),
V065(Never),
V066(Never),
V067(Never),
V068(Never),
V069(Never),
V070(Never),
V071(Never),
V072(Never),
V073(Never),
V074(Never),
V075(Never),
V076(Never),
V077(Never),
V078(Never),
V079(Never),
V080(Never),
V081(Never),
V082(Never),
V083(Never),
V084(Never),
V085(Never),
V086(Never),
V087(Never),
V088(Never),
V089(Never),
V090(Never),
V091(Never),
V092(Never),
V093(Never),
V094(Never),
V095(Never),
V096(Never),
V097(Never),
V098(Never),
V099(Never),
V100(Never),
V101(Never),
V102(Never),
V103(Never),
V104(Never),
V105(Never),
V106(Never),
V107(Never),
V108(Never),
V109(Never),
V110(Never),
V111(Never),
V112(Never),
V113(Never),
V114(Never),
V115(Never),
V116(Never),
V117(Never),
V118(Never),
V119(Never),
V120(Never),
V121(Never),
V122(Never),
V123(Never),
V124(Never),
V125(Never),
V126(Never),
V127(Never),
V128(Never),
V129(Never),
V130(Never),
V131(Never),
V132(Never),
V133(Never),
V134(Never),
V135(Never),
V136(Never),
V137(Never),
V138(Never),
V139(Never),
V140(Never),
V141(Never),
V142(Never),
V143(Never),
V144(Never),
V145(Never),
V146(Never),
V147(Never),
V148(Never),
V149(Never),
V150(Never),
V151(Never),
V152(Never),
V153(Never),
V154(Never),
V155(Never),
V156(Never),
V157(Never),
V158(Never),
V159(Never),
V160(Never),
V161(Never),
V162(Never),
V163(Never),
V164(Never),
V165(Never),
V166(Never),
V167(Never),
V168(Never),
V169(Never),
V170(Never),
V171(Never),
V172(Never),
V173(Never),
V174(Never),
V175(Never),
V176(Never),
V177(Never),
V178(Never),
V179(Never),
V180(Never),
V181(Never),
V182(Never),
V183(Never),
V184(Never),
V185(Never),
V186(Never),
V187(Never),
V188(Never),
V189(Never),
V190(Never),
V191(Never),
V192(Never),
V193(Never),
V194(Never),
V195(Never),
V196(Never),
V197(Never),
V198(Never),
V199(Never),
V200(Never),
V201(Never),
V202(Never),
V203(Never),
V204(Never),
V205(Never),
V206(Never),
V207(Never),
V208(Never),
V209(Never),
V210(Never),
V211(Never),
V212(Never),
V213(Never),
V214(Never),
V215(Never),
V216(Never),
V217(Never),
V218(Never),
V219(Never),
V220(Never),
V221(Never),
V222(Never),
V223(Never),
V224(Never),
V225(Never),
V226(Never),
V227(Never),
V228(Never),
V229(Never),
V230(Never),
V231(Never),
V232(Never),
V233(Never),
V234(Never),
V235(Never),
V236(Never),
V237(Never),
V238(Never),
V239(Never),
V240(Never),
V241(Never),
V242(Never),
V243(Never),
V244(Never),
V245(Never),
V246(Never),
V247(Never),
V248(Never),
V249(Never),
V250(Never),
V251(Never),
V252(Never),
V253(Never),
V254(Never),
V255(Never),
V256(Never),
Possible257,
Bool258(bool),
Possible259,
}
// CHECK-LABEL: define noundef{{( range\(i8 [0-9]+, [0-9]+\))?}} i8 @match5(i8{{.+}}%0)
// CHECK-NEXT: start:
// CHECK-NEXT: %[[REL_VAR:.+]] = add{{( nsw)?}} i8 %0, -2
// CHECK-NEXT: %[[REL_VAR_WIDE:.+]] = zext i8 %[[REL_VAR]] to i64
// CHECK-NEXT: %[[IS_NICHE:.+]] = icmp ult i8 %[[REL_VAR]], 3
// CHECK-NEXT: %[[NOT_IMPOSSIBLE:.+]] = icmp ne i8 %[[REL_VAR]], 1
// CHECK-NEXT: call void @llvm.assume(i1 %[[NOT_IMPOSSIBLE]])
// CHECK-NEXT: %[[NICHE_DISCR:.+]] = add nuw nsw i64 %[[REL_VAR_WIDE]], 257
// CHECK-NEXT: %[[DISCR:.+]] = select i1 %[[IS_NICHE]], i64 %[[NICHE_DISCR]], i64 258
// CHECK-NEXT: switch i64 %[[DISCR]],
// CHECK-NEXT: i64 257,
// CHECK-NEXT: i64 258,
// CHECK-NEXT: i64 259,
#[no_mangle]
pub fn match5(e: HugeVariantIndex) -> u8 {
use HugeVariantIndex::*;
match e {
Possible257 => 13,
Bool258(b) => b as u8,
Possible259 => 100,
}
}

View file

@ -1,8 +1,12 @@
//@ compile-flags: -Copt-level=3 -C no-prepopulate-passes
//@ only-x86_64 (because these discriminants are isize)
//@ only-64bit (because these discriminants are isize)
#![crate_type = "lib"]
// This directly tests what we emit for these matches, rather than what happens
// after optimization, so it doesn't need to worry about extra flags on the
// instructions and is less susceptible to being broken on LLVM updates.
// CHECK-LABEL: @option_match
#[no_mangle]
pub fn option_match(x: Option<i32>) -> u16 {
@ -51,3 +55,76 @@ pub fn result_match(x: Result<u64, i64>) -> u16 {
Ok(_) => 42,
}
}
// CHECK-LABEL: @option_bool_match(
#[no_mangle]
pub fn option_bool_match(x: Option<bool>) -> char {
// CHECK: %[[RAW:.+]] = load i8, ptr %x
// CHECK: %[[IS_NONE:.+]] = icmp eq i8 %[[RAW]], 2
// CHECK: %[[OPT_DISCR:.+]] = select i1 %[[IS_NONE]], i64 0, i64 1
// CHECK: %[[OPT_DISCR_T:.+]] = trunc nuw i64 %[[OPT_DISCR]] to i1
// CHECK: br i1 %[[OPT_DISCR_T]], label %[[BB_SOME:.+]], label %[[BB_NONE:.+]]
// CHECK: [[BB_SOME]]:
// CHECK: %[[FIELD:.+]] = load i8, ptr %x
// CHECK: %[[FIELD_T:.+]] = trunc nuw i8 %[[FIELD]] to i1
// CHECK: br i1 %[[FIELD_T]]
match x {
None => 'n',
Some(false) => 'f',
Some(true) => 't',
}
}
use std::cmp::Ordering::{self, *};
// CHECK-LABEL: @option_ordering_match(
#[no_mangle]
pub fn option_ordering_match(x: Option<Ordering>) -> char {
// CHECK: %[[RAW:.+]] = load i8, ptr %x
// CHECK: %[[IS_NONE:.+]] = icmp eq i8 %[[RAW]], 2
// CHECK: %[[OPT_DISCR:.+]] = select i1 %[[IS_NONE]], i64 0, i64 1
// CHECK: %[[OPT_DISCR_T:.+]] = trunc nuw i64 %[[OPT_DISCR]] to i1
// CHECK: br i1 %[[OPT_DISCR_T]], label %[[BB_SOME:.+]], label %[[BB_NONE:.+]]
// CHECK: [[BB_SOME]]:
// CHECK: %[[FIELD:.+]] = load i8, ptr %x
// CHECK: switch i8 %[[FIELD]], label %[[UNREACHABLE:.+]] [
// CHECK-NEXT: i8 -1, label
// CHECK-NEXT: i8 0, label
// CHECK-NEXT: i8 1, label
// CHECK-NEXT: ]
// CHECK: [[UNREACHABLE]]:
// CHECK-NEXT: unreachable
match x {
None => '?',
Some(Less) => '<',
Some(Equal) => '=',
Some(Greater) => '>',
}
}
// CHECK-LABEL: @option_nonzero_match(
#[no_mangle]
pub fn option_nonzero_match(x: Option<std::num::NonZero<u16>>) -> u16 {
// CHECK: %[[OUT:.+]] = alloca [2 x i8]
// CHECK: %[[IS_NONE:.+]] = icmp eq i16 %x, 0
// CHECK: %[[OPT_DISCR:.+]] = select i1 %[[IS_NONE]], i64 0, i64 1
// CHECK: %[[OPT_DISCR_T:.+]] = trunc nuw i64 %[[OPT_DISCR]] to i1
// CHECK: br i1 %[[OPT_DISCR_T]], label %[[BB_SOME:.+]], label %[[BB_NONE:.+]]
// CHECK: [[BB_SOME]]:
// CHECK: store i16 987, ptr %[[OUT]]
// CHECK: [[BB_NONE]]:
// CHECK: store i16 123, ptr %[[OUT]]
// CHECK: %[[RET:.+]] = load i16, ptr %[[OUT]]
// CHECK: ret i16 %[[RET]]
match x {
None => 123,
Some(_) => 987,
}
}

View file

@ -0,0 +1,12 @@
#![feature(trait_alias)]
trait B = Fn() -> Self;
type D = &'static dyn B;
//~^ ERROR E0411
fn a() -> D {
unreachable!();
}
fn main() {
_ = a();
}

View file

@ -0,0 +1,9 @@
error[E0411]: `Self` is not allowed in type aliases
--> $DIR/trait-alias-self-projection.rs:3:19
|
LL | type D = &'static dyn B;
| ^^^^^ `Self` is only available in impls, traits, and concrete type definitions
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0411`.

View file

@ -0,0 +1,11 @@
trait Tailed<'a>: 'a {
type Tail: Tailed<'a>;
}
struct List<'a, T: Tailed<'a>> {
//~^ ERROR overflow computing implied lifetime bounds for `List`
next: Box<List<'a, T::Tail>>,
node: &'a T,
}
fn main() {}

View file

@ -0,0 +1,8 @@
error: overflow computing implied lifetime bounds for `List`
--> $DIR/overflow.rs:5:1
|
LL | struct List<'a, T: Tailed<'a>> {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 1 previous error

View file

@ -0,0 +1,6 @@
fn main() {
assert_eq!(3, 'a,)
//~^ ERROR expected `while`, `for`, `loop` or `{` after a label
//~| ERROR expected `while`, `for`, `loop` or `{` after a label
//~| ERROR expected expression, found ``
}

View file

@ -0,0 +1,24 @@
error: expected `while`, `for`, `loop` or `{` after a label
--> $DIR/failed-to-reparse-issue-139445.rs:2:21
|
LL | assert_eq!(3, 'a,)
| ^ expected `while`, `for`, `loop` or `{` after a label
error: expected `while`, `for`, `loop` or `{` after a label
--> $DIR/failed-to-reparse-issue-139445.rs:2:5
|
LL | assert_eq!(3, 'a,)
| ^^^^^^^^^^^^^^^^^^ expected `while`, `for`, `loop` or `{` after a label
|
= note: this error originates in the macro `assert_eq` (in Nightly builds, run with -Z macro-backtrace for more info)
error: expected expression, found ``
--> $DIR/failed-to-reparse-issue-139445.rs:2:5
|
LL | assert_eq!(3, 'a,)
| ^^^^^^^^^^^^^^^^^^ expected expression
|
= note: this error originates in the macro `assert_eq` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 3 previous errors

View file

@ -0,0 +1,14 @@
// This code caused a "no close delim when reparsing Expr" ICE in #139248.
macro_rules! m {
(static a : () = $e:expr) => {
static a : () = $e;
//~^ ERROR macro expansion ends with an incomplete expression: expected expression
}
}
m! { static a : () = (if b) }
//~^ ERROR expected `{`, found `)`
//~| ERROR expected `{`, found `)`
fn main() {}

View file

@ -0,0 +1,33 @@
error: expected `{`, found `)`
--> $DIR/no-close-delim-issue-139248.rs:10:27
|
LL | m! { static a : () = (if b) }
| ^ expected `{`
|
note: the `if` expression is missing a block after this condition
--> $DIR/no-close-delim-issue-139248.rs:10:26
|
LL | m! { static a : () = (if b) }
| ^
error: expected `{`, found `)`
--> $DIR/no-close-delim-issue-139248.rs:10:27
|
LL | m! { static a : () = (if b) }
| ^ expected `{`
|
note: the `if` expression is missing a block after this condition
--> $DIR/no-close-delim-issue-139248.rs:10:26
|
LL | m! { static a : () = (if b) }
| ^
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error: macro expansion ends with an incomplete expression: expected expression
--> $DIR/no-close-delim-issue-139248.rs:5:28
|
LL | static a : () = $e;
| ^ expected expression
error: aborting due to 3 previous errors

View file

@ -1,4 +1,4 @@
#![feature(default_field_values)]
#![feature(default_field_values)]
#[derive(Debug)]
pub struct S;
@ -50,7 +50,8 @@ enum E {
fn main () {
let _ = Foo { .. }; // ok
let _ = Foo::default(); // ok
let _ = Bar { .. }; //~ ERROR mandatory field
let _ = Bar { .. }; //~ ERROR missing field
let _ = Bar { baz: 0, .. }; //~ ERROR missing field
let _ = Bar::default(); // silenced
let _ = Bar { bar: S, .. }; // ok
let _ = Qux::<4> { .. };

View file

@ -27,14 +27,20 @@ LL + #[derive(Default)]
LL | pub struct S;
|
error: missing mandatory field `bar`
--> $DIR/failures.rs:53:21
error: missing field `bar` in initializer
--> $DIR/failures.rs:53:19
|
LL | let _ = Bar { .. };
| ^
| ^ fields that do not have a defaulted value must be provided explicitly
error: missing field `bar` in initializer
--> $DIR/failures.rs:54:27
|
LL | let _ = Bar { baz: 0, .. };
| ^ fields that do not have a defaulted value must be provided explicitly
error[E0308]: mismatched types
--> $DIR/failures.rs:57:17
--> $DIR/failures.rs:58:17
|
LL | let _ = Rak(..);
| --- ^^ expected `i32`, found `RangeFull`
@ -47,19 +53,19 @@ note: tuple struct defined here
LL | pub struct Rak(i32 = 42);
| ^^^
help: you might have meant to use `..` to skip providing a value for expected fields, but this is only supported on non-tuple struct literals; it is instead interpreted as a `std::ops::RangeFull` literal
--> $DIR/failures.rs:57:17
--> $DIR/failures.rs:58:17
|
LL | let _ = Rak(..);
| ^^
error[E0061]: this struct takes 1 argument but 2 arguments were supplied
--> $DIR/failures.rs:59:13
--> $DIR/failures.rs:60:13
|
LL | let _ = Rak(0, ..);
| ^^^ -- unexpected argument #2 of type `RangeFull`
|
help: you might have meant to use `..` to skip providing a value for expected fields, but this is only supported on non-tuple struct literals; it is instead interpreted as a `std::ops::RangeFull` literal
--> $DIR/failures.rs:59:20
--> $DIR/failures.rs:60:20
|
LL | let _ = Rak(0, ..);
| ^^
@ -75,13 +81,13 @@ LL + let _ = Rak(0);
|
error[E0061]: this struct takes 1 argument but 2 arguments were supplied
--> $DIR/failures.rs:61:13
--> $DIR/failures.rs:62:13
|
LL | let _ = Rak(.., 0);
| ^^^ -- unexpected argument #1 of type `RangeFull`
|
help: you might have meant to use `..` to skip providing a value for expected fields, but this is only supported on non-tuple struct literals; it is instead interpreted as a `std::ops::RangeFull` literal
--> $DIR/failures.rs:61:17
--> $DIR/failures.rs:62:17
|
LL | let _ = Rak(.., 0);
| ^^
@ -96,7 +102,7 @@ LL - let _ = Rak(.., 0);
LL + let _ = Rak(0);
|
error: aborting due to 7 previous errors
error: aborting due to 8 previous errors
Some errors have detailed explanations: E0061, E0277, E0308.
For more information about an error, try `rustc --explain E0061`.

View file

@ -0,0 +1,13 @@
//@ check-pass
trait Tr {
type Assoc;
}
impl Tr for () {
type Assoc = ();
}
fn main() {
let x = &() as &(dyn Tr<Assoc = ()> + Send) as &dyn Send;
}

View file

@ -0,0 +1,18 @@
// Make sure not to construct predicates with escaping bound vars in `diagnostic_hir_wf_check`.
// Regression test for <https://github.com/rust-lang/rust/issues/139330>.
#![feature(non_lifetime_binders)]
//~^ WARN the feature `non_lifetime_binders` is incomplete
trait A<T: ?Sized> {}
impl<T: ?Sized> A<T> for () {}
trait B {}
struct W<T: B>(T);
fn b() -> (W<()>, impl for<C> A<C>) { (W(()), ()) }
//~^ ERROR the trait bound `(): B` is not satisfied
//~| ERROR the trait bound `(): B` is not satisfied
//~| ERROR the trait bound `(): B` is not satisfied
fn main() {}

View file

@ -0,0 +1,65 @@
warning: the feature `non_lifetime_binders` is incomplete and may not be safe to use and/or cause compiler crashes
--> $DIR/diagnostic-hir-wf-check.rs:4:12
|
LL | #![feature(non_lifetime_binders)]
| ^^^^^^^^^^^^^^^^^^^^
|
= note: see issue #108185 <https://github.com/rust-lang/rust/issues/108185> for more information
= note: `#[warn(incomplete_features)]` on by default
error[E0277]: the trait bound `(): B` is not satisfied
--> $DIR/diagnostic-hir-wf-check.rs:13:12
|
LL | fn b() -> (W<()>, impl for<C> A<C>) { (W(()), ()) }
| ^^^^^ the trait `B` is not implemented for `()`
|
help: this trait has no implementations, consider adding one
--> $DIR/diagnostic-hir-wf-check.rs:10:1
|
LL | trait B {}
| ^^^^^^^
note: required by a bound in `W`
--> $DIR/diagnostic-hir-wf-check.rs:11:13
|
LL | struct W<T: B>(T);
| ^ required by this bound in `W`
error[E0277]: the trait bound `(): B` is not satisfied
--> $DIR/diagnostic-hir-wf-check.rs:13:42
|
LL | fn b() -> (W<()>, impl for<C> A<C>) { (W(()), ()) }
| - ^^ the trait `B` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/diagnostic-hir-wf-check.rs:10:1
|
LL | trait B {}
| ^^^^^^^
note: required by a bound in `W`
--> $DIR/diagnostic-hir-wf-check.rs:11:13
|
LL | struct W<T: B>(T);
| ^ required by this bound in `W`
error[E0277]: the trait bound `(): B` is not satisfied
--> $DIR/diagnostic-hir-wf-check.rs:13:40
|
LL | fn b() -> (W<()>, impl for<C> A<C>) { (W(()), ()) }
| ^^^^^ the trait `B` is not implemented for `()`
|
help: this trait has no implementations, consider adding one
--> $DIR/diagnostic-hir-wf-check.rs:10:1
|
LL | trait B {}
| ^^^^^^^
note: required by a bound in `W`
--> $DIR/diagnostic-hir-wf-check.rs:11:13
|
LL | struct W<T: B>(T);
| ^ required by this bound in `W`
error: aborting due to 3 previous errors; 1 warning emitted
For more information about this error, try `rustc --explain E0277`.