mirror of https://github.com/rust-lang/rust.git
Auto merge of #132594 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
fbab78289d
|
@ -51,6 +51,7 @@ jobs:
|
|||
cargo workspaces rename --from proc-macro-api proc_macro_api
|
||||
cargo workspaces rename --from proc-macro-srv proc_macro_srv
|
||||
cargo workspaces rename --from project-model project_model
|
||||
cargo workspaces rename --from test-fixture test_fixture
|
||||
cargo workspaces rename --from test-utils test_utils
|
||||
cargo workspaces rename --from text-edit text_edit
|
||||
# Remove library crates from the workspaces so we don't auto-publish them as well
|
||||
|
|
|
@ -1492,9 +1492,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_abi"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d5bc2cfc7264d84215a08875ef90a1d35f76b5c9ad1993515d2da7e4e40b2b4b"
|
||||
checksum = "709fde78db053c78c87776ec738677649f791645883f82ff145f68caf9f18e1a"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"ra-ap-rustc_index",
|
||||
|
@ -1503,9 +1503,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8929140697812e5dd09e19cf446d85146332363f0dbc125d4214834c34ead96"
|
||||
checksum = "da115d496e5abd65e2dceb6883d7597593badfe23fea3439202b8da5a11ea250"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"ra-ap-rustc_index_macros",
|
||||
|
@ -1514,9 +1514,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index_macros"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "514a3f5d04c8b4a2750f29746cc9abb1f78deb7e72e4ad1dc95bbc608f3db157"
|
||||
checksum = "be86d06a75a8125c1ace197d5030e6e02721348d32e572baea35c891669ad1e2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1525,9 +1525,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_lexer"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "276fcb1205da071a0cd64416f3f0e198043c11f176c5b501a45dbf0cb33979f2"
|
||||
checksum = "b64b46ae0d8f59acc32e64e0085532b831f0d6182d870a7cd86c046c2c46e722"
|
||||
dependencies = [
|
||||
"unicode-properties",
|
||||
"unicode-xid",
|
||||
|
@ -1535,9 +1535,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_parse_format"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "961b30b22cfac296b14b72e9f95e79c16cebc8c926872755fb1568a6c4243a62"
|
||||
checksum = "dbdaad19ddbd0ff46e947ca8dbb6ae678a112d3938669fb3ad6bfd244917e24b"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"ra-ap-rustc_lexer",
|
||||
|
@ -1545,9 +1545,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_pattern_analysis"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "614232513814a4b714fea7f11345d31c0c277bca3089bb6ca1ec20870bfc022a"
|
||||
checksum = "dc5761e37c78d98ede9f20f6b66526093d0be66aa256d5cbdf214495843ba74d"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"rustc-hash 2.0.0",
|
||||
|
|
|
@ -84,11 +84,11 @@ tt = { path = "./crates/tt", version = "0.0.0" }
|
|||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
|
||||
ra-ap-rustc_lexer = { version = "0.75", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.75", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.75", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.75", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.75", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.76", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.76", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.76", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.76", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.76", default-features = false }
|
||||
|
||||
# local crates that aren't published to crates.io. These should not have versions.
|
||||
test-fixture = { path = "./crates/test-fixture" }
|
||||
|
|
|
@ -407,7 +407,7 @@ impl ExprCollector<'_> {
|
|||
let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
||||
let generic_args = e
|
||||
.generic_arg_list()
|
||||
.and_then(|it| GenericArgs::from_ast(&self.ctx(), it))
|
||||
.and_then(|it| GenericArgs::from_ast(&mut self.ctx(), it))
|
||||
.map(Box::new);
|
||||
self.alloc_expr(
|
||||
Expr::MethodCall { receiver, method_name, args, generic_args },
|
||||
|
@ -533,7 +533,7 @@ impl ExprCollector<'_> {
|
|||
ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e),
|
||||
ast::Expr::CastExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
let type_ref = TypeRef::from_ast_opt(&self.ctx(), e.ty());
|
||||
let type_ref = TypeRef::from_ast_opt(&mut self.ctx(), e.ty());
|
||||
self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::RefExpr(e) => {
|
||||
|
@ -572,13 +572,15 @@ impl ExprCollector<'_> {
|
|||
arg_types.reserve_exact(num_params);
|
||||
for param in pl.params() {
|
||||
let pat = this.collect_pat_top(param.pat());
|
||||
let type_ref = param.ty().map(|it| TypeRef::from_ast(&this.ctx(), it));
|
||||
let type_ref = param.ty().map(|it| TypeRef::from_ast(&mut this.ctx(), it));
|
||||
args.push(pat);
|
||||
arg_types.push(type_ref);
|
||||
}
|
||||
}
|
||||
let ret_type =
|
||||
e.ret_type().and_then(|r| r.ty()).map(|it| TypeRef::from_ast(&this.ctx(), it));
|
||||
let ret_type = e
|
||||
.ret_type()
|
||||
.and_then(|r| r.ty())
|
||||
.map(|it| TypeRef::from_ast(&mut this.ctx(), it));
|
||||
|
||||
let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine);
|
||||
let prev_try_block_label = this.current_try_block_label.take();
|
||||
|
@ -705,7 +707,7 @@ impl ExprCollector<'_> {
|
|||
ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
|
||||
ast::Expr::AsmExpr(e) => self.lower_inline_asm(e, syntax_ptr),
|
||||
ast::Expr::OffsetOfExpr(e) => {
|
||||
let container = TypeRef::from_ast_opt(&self.ctx(), e.ty());
|
||||
let container = TypeRef::from_ast_opt(&mut self.ctx(), e.ty());
|
||||
let fields = e.fields().map(|it| it.as_name()).collect();
|
||||
self.alloc_expr(Expr::OffsetOf(OffsetOf { container, fields }), syntax_ptr)
|
||||
}
|
||||
|
@ -1317,7 +1319,7 @@ impl ExprCollector<'_> {
|
|||
return;
|
||||
}
|
||||
let pat = self.collect_pat_top(stmt.pat());
|
||||
let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it));
|
||||
let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&mut self.ctx(), it));
|
||||
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
|
||||
let else_branch = stmt
|
||||
.let_else()
|
||||
|
|
|
@ -37,6 +37,7 @@ pub struct FunctionData {
|
|||
pub name: Name,
|
||||
pub params: Box<[TypeRefId]>,
|
||||
pub ret_type: TypeRefId,
|
||||
// FIXME: why are these stored here? They should be accessed via the query
|
||||
pub attrs: Attrs,
|
||||
pub visibility: RawVisibility,
|
||||
pub abi: Option<Symbol>,
|
||||
|
|
|
@ -21,7 +21,79 @@
|
|||
//!
|
||||
//! This is a work of fiction. Any similarities to Kotlin's `BindingContext` are
|
||||
//! a coincidence.
|
||||
pub mod keys;
|
||||
|
||||
pub mod keys {
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use hir_expand::{attrs::AttrId, MacroCallId};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, AstNode, AstPtr};
|
||||
|
||||
use crate::{
|
||||
dyn_map::{DynMap, Policy},
|
||||
BlockId, ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId,
|
||||
LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId,
|
||||
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
|
||||
};
|
||||
|
||||
pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
|
||||
|
||||
pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
|
||||
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
|
||||
pub const CONST: Key<ast::Const, ConstId> = Key::new();
|
||||
pub const STATIC: Key<ast::Static, StaticId> = Key::new();
|
||||
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
|
||||
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
|
||||
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
|
||||
pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
|
||||
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
||||
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
||||
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
||||
pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new();
|
||||
pub const USE: Key<ast::Use, UseId> = Key::new();
|
||||
|
||||
pub const ENUM_VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
|
||||
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
|
||||
pub const RECORD_FIELD: Key<ast::RecordField, FieldId> = Key::new();
|
||||
pub const TYPE_PARAM: Key<ast::TypeParam, TypeOrConstParamId> = Key::new();
|
||||
pub const CONST_PARAM: Key<ast::ConstParam, TypeOrConstParamId> = Key::new();
|
||||
pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
|
||||
|
||||
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
|
||||
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
|
||||
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
|
||||
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
|
||||
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
|
||||
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
|
||||
Key::new();
|
||||
|
||||
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
|
||||
/// equal if they point to exactly the same object.
|
||||
///
|
||||
/// In general, we do not guarantee that we have exactly one instance of a
|
||||
/// syntax tree for each file. We probably should add such guarantee, but, for
|
||||
/// the time being, we will use identity-less AstPtr comparison.
|
||||
pub struct AstPtrPolicy<AST, ID> {
|
||||
_phantom: PhantomData<(AST, ID)>,
|
||||
}
|
||||
|
||||
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
|
||||
type K = AstPtr<AST>;
|
||||
type V = ID;
|
||||
fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
|
||||
map.map
|
||||
.entry::<FxHashMap<AstPtr<AST>, ID>>()
|
||||
.or_insert_with(Default::default)
|
||||
.insert(key, value);
|
||||
}
|
||||
fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
|
||||
}
|
||||
fn is_empty(map: &DynMap) -> bool {
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use std::{
|
||||
hash::Hash,
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
//! keys to be used with `DynMap`
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use hir_expand::{attrs::AttrId, MacroCallId};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, AstNode, AstPtr};
|
||||
|
||||
use crate::{
|
||||
dyn_map::{DynMap, Policy},
|
||||
BlockId, ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId,
|
||||
LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId,
|
||||
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
|
||||
};
|
||||
|
||||
pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
|
||||
|
||||
pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
|
||||
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
|
||||
pub const CONST: Key<ast::Const, ConstId> = Key::new();
|
||||
pub const STATIC: Key<ast::Static, StaticId> = Key::new();
|
||||
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
|
||||
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
|
||||
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
|
||||
pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
|
||||
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
||||
pub const UNION: Key<ast::Union, UnionId> = Key::new();
|
||||
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
|
||||
pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new();
|
||||
pub const USE: Key<ast::Use, UseId> = Key::new();
|
||||
|
||||
pub const ENUM_VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
|
||||
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
|
||||
pub const RECORD_FIELD: Key<ast::RecordField, FieldId> = Key::new();
|
||||
pub const TYPE_PARAM: Key<ast::TypeParam, TypeOrConstParamId> = Key::new();
|
||||
pub const CONST_PARAM: Key<ast::ConstParam, TypeOrConstParamId> = Key::new();
|
||||
pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
|
||||
|
||||
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
|
||||
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
|
||||
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
|
||||
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
|
||||
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
|
||||
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
|
||||
Key::new();
|
||||
|
||||
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
|
||||
/// equal if they point to exactly the same object.
|
||||
///
|
||||
/// In general, we do not guarantee that we have exactly one instance of a
|
||||
/// syntax tree for each file. We probably should add such guarantee, but, for
|
||||
/// the time being, we will use identity-less AstPtr comparison.
|
||||
pub struct AstPtrPolicy<AST, ID> {
|
||||
_phantom: PhantomData<(AST, ID)>,
|
||||
}
|
||||
|
||||
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
|
||||
type K = AstPtr<AST>;
|
||||
type V = ID;
|
||||
fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
|
||||
map.map
|
||||
.entry::<FxHashMap<AstPtr<AST>, ID>>()
|
||||
.or_insert_with(Default::default)
|
||||
.insert(key, value);
|
||||
}
|
||||
fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
|
||||
}
|
||||
fn is_empty(map: &DynMap) -> bool {
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
|
||||
}
|
||||
}
|
|
@ -161,14 +161,14 @@ impl Expander {
|
|||
types_map: &mut TypesMap,
|
||||
types_source_map: &mut TypesSourceMap,
|
||||
) -> Option<Path> {
|
||||
let ctx = LowerCtx::with_span_map_cell(
|
||||
let mut ctx = LowerCtx::with_span_map_cell(
|
||||
db,
|
||||
self.current_file_id,
|
||||
self.span_map.clone(),
|
||||
types_map,
|
||||
types_source_map,
|
||||
);
|
||||
Path::from_src(&ctx, path)
|
||||
Path::from_src(&mut ctx, path)
|
||||
}
|
||||
|
||||
fn within_limit<F, T: ast::AstNode>(
|
||||
|
|
|
@ -451,7 +451,7 @@ pub(crate) struct GenericParamsCollector {
|
|||
impl GenericParamsCollector {
|
||||
pub(crate) fn fill(
|
||||
&mut self,
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
node: &dyn HasGenericParams,
|
||||
add_param_attrs: impl FnMut(
|
||||
Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
|
||||
|
@ -468,7 +468,7 @@ impl GenericParamsCollector {
|
|||
|
||||
pub(crate) fn fill_bounds(
|
||||
&mut self,
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
type_bounds: Option<ast::TypeBoundList>,
|
||||
target: Either<TypeRefId, LifetimeRef>,
|
||||
) {
|
||||
|
@ -479,7 +479,7 @@ impl GenericParamsCollector {
|
|||
|
||||
fn fill_params(
|
||||
&mut self,
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
params: ast::GenericParamList,
|
||||
mut add_param_attrs: impl FnMut(
|
||||
Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
|
||||
|
@ -535,7 +535,11 @@ impl GenericParamsCollector {
|
|||
}
|
||||
}
|
||||
|
||||
fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx<'_>, where_clause: ast::WhereClause) {
|
||||
fn fill_where_predicates(
|
||||
&mut self,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
where_clause: ast::WhereClause,
|
||||
) {
|
||||
for pred in where_clause.predicates() {
|
||||
let target = if let Some(type_ref) = pred.ty() {
|
||||
Either::Left(TypeRef::from_ast(lower_ctx, type_ref))
|
||||
|
@ -569,7 +573,7 @@ impl GenericParamsCollector {
|
|||
|
||||
fn add_where_predicate_from_bound(
|
||||
&mut self,
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
bound: ast::TypeBound,
|
||||
hrtb_lifetimes: Option<&[Name]>,
|
||||
target: Either<TypeRefId, LifetimeRef>,
|
||||
|
@ -670,8 +674,9 @@ impl GenericParamsCollector {
|
|||
{
|
||||
let (mut macro_types_map, mut macro_types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx = expander.ctx(db, &mut macro_types_map, &mut macro_types_source_map);
|
||||
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
|
||||
let mut ctx =
|
||||
expander.ctx(db, &mut macro_types_map, &mut macro_types_source_map);
|
||||
let type_ref = TypeRef::from_ast(&mut ctx, expanded.tree());
|
||||
self.fill_implicit_impl_trait_args(
|
||||
db,
|
||||
generics_types_map,
|
||||
|
|
|
@ -98,7 +98,7 @@ pub struct TraitRef {
|
|||
|
||||
impl TraitRef {
|
||||
/// Converts an `ast::PathType` to a `hir::TraitRef`.
|
||||
pub(crate) fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> Option<Self> {
|
||||
pub(crate) fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> Option<Self> {
|
||||
// FIXME: Use `Path::from_src`
|
||||
match node {
|
||||
ast::Type::PathType(path) => {
|
||||
|
@ -240,7 +240,7 @@ pub enum TraitBoundModifier {
|
|||
|
||||
impl TypeRef {
|
||||
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
|
||||
pub fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> TypeRefId {
|
||||
pub fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> TypeRefId {
|
||||
let ty = match &node {
|
||||
ast::Type::ParenType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()),
|
||||
ast::Type::TupleType(inner) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(
|
||||
|
@ -321,8 +321,9 @@ impl TypeRef {
|
|||
// Disallow nested impl traits
|
||||
TypeRef::Error
|
||||
} else {
|
||||
let _guard = ctx.outer_impl_trait_scope(true);
|
||||
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||
ctx.with_outer_impl_trait_scope(true, |ctx| {
|
||||
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||
})
|
||||
}
|
||||
}
|
||||
ast::Type::DynTraitType(inner) => {
|
||||
|
@ -336,7 +337,7 @@ impl TypeRef {
|
|||
ctx.alloc_type_ref(ty, AstPtr::new(&node))
|
||||
}
|
||||
|
||||
pub(crate) fn from_ast_opt(ctx: &LowerCtx<'_>, node: Option<ast::Type>) -> TypeRefId {
|
||||
pub(crate) fn from_ast_opt(ctx: &mut LowerCtx<'_>, node: Option<ast::Type>) -> TypeRefId {
|
||||
match node {
|
||||
Some(node) => TypeRef::from_ast(ctx, node),
|
||||
None => ctx.alloc_error_type(),
|
||||
|
@ -410,7 +411,7 @@ impl TypeRef {
|
|||
}
|
||||
|
||||
pub(crate) fn type_bounds_from_ast(
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
type_bounds_opt: Option<ast::TypeBoundList>,
|
||||
) -> ThinVec<TypeBound> {
|
||||
if let Some(type_bounds) = type_bounds_opt {
|
||||
|
@ -423,8 +424,8 @@ pub(crate) fn type_bounds_from_ast(
|
|||
}
|
||||
|
||||
impl TypeBound {
|
||||
pub(crate) fn from_ast(ctx: &LowerCtx<'_>, node: ast::TypeBound) -> Self {
|
||||
let lower_path_type = |path_type: ast::PathType| ctx.lower_path(path_type.path()?);
|
||||
pub(crate) fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::TypeBound) -> Self {
|
||||
let mut lower_path_type = |path_type: ast::PathType| ctx.lower_path(path_type.path()?);
|
||||
|
||||
match node.kind() {
|
||||
ast::TypeBoundKind::PathType(path_type) => {
|
||||
|
|
|
@ -361,9 +361,7 @@ impl ItemScope {
|
|||
self.macro_invocations.get(&call).copied()
|
||||
}
|
||||
|
||||
pub(crate) fn iter_macro_invoc(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (&AstId<ast::MacroCall>, &MacroCallId)> {
|
||||
pub fn iter_macro_invoc(&self) -> impl Iterator<Item = (&AstId<ast::MacroCall>, &MacroCallId)> {
|
||||
self.macro_invocations.iter()
|
||||
}
|
||||
}
|
||||
|
@ -401,9 +399,7 @@ impl ItemScope {
|
|||
self.macro_invocations.insert(call, call_id);
|
||||
}
|
||||
|
||||
pub(crate) fn attr_macro_invocs(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
|
||||
pub fn attr_macro_invocs(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
|
||||
self.attr_macros.iter().map(|(k, v)| (*k, *v))
|
||||
}
|
||||
|
||||
|
@ -440,7 +436,7 @@ impl ItemScope {
|
|||
});
|
||||
}
|
||||
|
||||
pub(crate) fn derive_macro_invocs(
|
||||
pub fn derive_macro_invocs(
|
||||
&self,
|
||||
) -> impl Iterator<
|
||||
Item = (
|
||||
|
|
|
@ -234,11 +234,11 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let visibility = self.lower_visibility(strukt);
|
||||
let name = strukt.name()?.as_name();
|
||||
let ast_id = self.source_ast_id_map.ast_id(strukt);
|
||||
let (fields, kind, attrs) = self.lower_fields(&strukt.kind(), &body_ctx);
|
||||
let (fields, kind, attrs) = self.lower_fields(&strukt.kind(), &mut body_ctx);
|
||||
let (generic_params, generics_source_map) =
|
||||
self.lower_generic_params(HasImplicitSelf::No, strukt);
|
||||
types_map.shrink_to_fit();
|
||||
|
@ -273,7 +273,7 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_fields(
|
||||
&mut self,
|
||||
strukt_kind: &ast::StructKind,
|
||||
body_ctx: &LowerCtx<'_>,
|
||||
body_ctx: &mut LowerCtx<'_>,
|
||||
) -> (Box<[Field]>, FieldsShape, Vec<(usize, RawAttrs)>) {
|
||||
match strukt_kind {
|
||||
ast::StructKind::Record(it) => {
|
||||
|
@ -308,7 +308,11 @@ impl<'a> Ctx<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn lower_record_field(&mut self, field: &ast::RecordField, body_ctx: &LowerCtx<'_>) -> Field {
|
||||
fn lower_record_field(
|
||||
&mut self,
|
||||
field: &ast::RecordField,
|
||||
body_ctx: &mut LowerCtx<'_>,
|
||||
) -> Field {
|
||||
let name = match field.name() {
|
||||
Some(name) => name.as_name(),
|
||||
None => Name::missing(),
|
||||
|
@ -323,7 +327,7 @@ impl<'a> Ctx<'a> {
|
|||
&mut self,
|
||||
idx: usize,
|
||||
field: &ast::TupleField,
|
||||
body_ctx: &LowerCtx<'_>,
|
||||
body_ctx: &mut LowerCtx<'_>,
|
||||
) -> Field {
|
||||
let name = Name::new_tuple_field(idx);
|
||||
let visibility = self.lower_visibility(field);
|
||||
|
@ -334,13 +338,13 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let visibility = self.lower_visibility(union);
|
||||
let name = union.name()?.as_name();
|
||||
let ast_id = self.source_ast_id_map.ast_id(union);
|
||||
let (fields, _, attrs) = match union.record_field_list() {
|
||||
Some(record_field_list) => {
|
||||
self.lower_fields(&StructKind::Record(record_field_list), &body_ctx)
|
||||
self.lower_fields(&StructKind::Record(record_field_list), &mut body_ctx)
|
||||
}
|
||||
None => (Box::default(), FieldsShape::Record, Vec::default()),
|
||||
};
|
||||
|
@ -409,12 +413,12 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_variant(&mut self, variant: &ast::Variant) -> Idx<Variant> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let name = match variant.name() {
|
||||
Some(name) => name.as_name(),
|
||||
None => Name::missing(),
|
||||
};
|
||||
let (fields, kind, attrs) = self.lower_fields(&variant.kind(), &body_ctx);
|
||||
let (fields, kind, attrs) = self.lower_fields(&variant.kind(), &mut body_ctx);
|
||||
let ast_id = self.source_ast_id_map.ast_id(variant);
|
||||
types_map.shrink_to_fit();
|
||||
types_source_map.shrink_to_fit();
|
||||
|
@ -436,7 +440,7 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
|
||||
let visibility = self.lower_visibility(func);
|
||||
let name = func.name()?.as_name();
|
||||
|
@ -457,7 +461,7 @@ impl<'a> Ctx<'a> {
|
|||
RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
|
||||
);
|
||||
let self_type = match self_param.ty() {
|
||||
Some(type_ref) => TypeRef::from_ast(&body_ctx, type_ref),
|
||||
Some(type_ref) => TypeRef::from_ast(&mut body_ctx, type_ref),
|
||||
None => {
|
||||
let self_type = body_ctx.alloc_type_ref_desugared(TypeRef::Path(
|
||||
Name::new_symbol_root(sym::Self_.clone()).into(),
|
||||
|
@ -492,7 +496,7 @@ impl<'a> Ctx<'a> {
|
|||
Param { type_ref: None }
|
||||
}
|
||||
None => {
|
||||
let type_ref = TypeRef::from_ast_opt(&body_ctx, param.ty());
|
||||
let type_ref = TypeRef::from_ast_opt(&mut body_ctx, param.ty());
|
||||
Param { type_ref: Some(type_ref) }
|
||||
}
|
||||
};
|
||||
|
@ -502,7 +506,7 @@ impl<'a> Ctx<'a> {
|
|||
|
||||
let ret_type = match func.ret_type() {
|
||||
Some(rt) => match rt.ty() {
|
||||
Some(type_ref) => TypeRef::from_ast(&body_ctx, type_ref),
|
||||
Some(type_ref) => TypeRef::from_ast(&mut body_ctx, type_ref),
|
||||
None if rt.thin_arrow_token().is_some() => body_ctx.alloc_error_type(),
|
||||
None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()),
|
||||
},
|
||||
|
@ -581,11 +585,11 @@ impl<'a> Ctx<'a> {
|
|||
) -> Option<FileItemTreeId<TypeAlias>> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let name = type_alias.name()?.as_name();
|
||||
let type_ref = type_alias.ty().map(|it| TypeRef::from_ast(&body_ctx, it));
|
||||
let type_ref = type_alias.ty().map(|it| TypeRef::from_ast(&mut body_ctx, it));
|
||||
let visibility = self.lower_visibility(type_alias);
|
||||
let bounds = self.lower_type_bounds(type_alias, &body_ctx);
|
||||
let bounds = self.lower_type_bounds(type_alias, &mut body_ctx);
|
||||
let ast_id = self.source_ast_id_map.ast_id(type_alias);
|
||||
let (generic_params, generics_source_map) =
|
||||
self.lower_generic_params(HasImplicitSelf::No, type_alias);
|
||||
|
@ -612,9 +616,9 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let name = static_.name()?.as_name();
|
||||
let type_ref = TypeRef::from_ast_opt(&body_ctx, static_.ty());
|
||||
let type_ref = TypeRef::from_ast_opt(&mut body_ctx, static_.ty());
|
||||
let visibility = self.lower_visibility(static_);
|
||||
let mutable = static_.mut_token().is_some();
|
||||
let has_safe_kw = static_.safe_token().is_some();
|
||||
|
@ -639,9 +643,9 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let name = konst.name().map(|it| it.as_name());
|
||||
let type_ref = TypeRef::from_ast_opt(&body_ctx, konst.ty());
|
||||
let type_ref = TypeRef::from_ast_opt(&mut body_ctx, konst.ty());
|
||||
let visibility = self.lower_visibility(konst);
|
||||
let ast_id = self.source_ast_id_map.ast_id(konst);
|
||||
types_map.shrink_to_fit();
|
||||
|
@ -724,14 +728,14 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_impl(&mut self, impl_def: &ast::Impl) -> FileItemTreeId<Impl> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
|
||||
let ast_id = self.source_ast_id_map.ast_id(impl_def);
|
||||
// FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
|
||||
// as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
|
||||
// equals itself.
|
||||
let self_ty = TypeRef::from_ast_opt(&body_ctx, impl_def.self_ty());
|
||||
let target_trait = impl_def.trait_().and_then(|tr| TraitRef::from_ast(&body_ctx, tr));
|
||||
let self_ty = TypeRef::from_ast_opt(&mut body_ctx, impl_def.self_ty());
|
||||
let target_trait = impl_def.trait_().and_then(|tr| TraitRef::from_ast(&mut body_ctx, tr));
|
||||
let is_negative = impl_def.excl_token().is_some();
|
||||
let is_unsafe = impl_def.unsafe_token().is_some();
|
||||
|
||||
|
@ -870,13 +874,8 @@ impl<'a> Ctx<'a> {
|
|||
) -> (Arc<GenericParams>, TypesSourceMap) {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map);
|
||||
debug_assert!(self.generic_param_attr_buffer.is_empty(),);
|
||||
let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
|
||||
param| {
|
||||
let attrs = RawAttrs::new(self.db.upcast(), ¶m, body_ctx.span_map());
|
||||
debug_assert!(self.generic_param_attr_buffer.insert(item, attrs).is_none());
|
||||
};
|
||||
body_ctx.take_impl_traits_bounds();
|
||||
let mut generics = GenericParamsCollector::default();
|
||||
|
||||
|
@ -892,16 +891,19 @@ impl<'a> Ctx<'a> {
|
|||
);
|
||||
// add super traits as bounds on Self
|
||||
// i.e., `trait Foo: Bar` is equivalent to `trait Foo where Self: Bar`
|
||||
generics.fill_bounds(
|
||||
&body_ctx,
|
||||
bounds,
|
||||
Either::Left(body_ctx.alloc_type_ref_desugared(TypeRef::Path(
|
||||
Name::new_symbol_root(sym::Self_.clone()).into(),
|
||||
))),
|
||||
);
|
||||
let bound_target = Either::Left(body_ctx.alloc_type_ref_desugared(TypeRef::Path(
|
||||
Name::new_symbol_root(sym::Self_.clone()).into(),
|
||||
)));
|
||||
generics.fill_bounds(&mut body_ctx, bounds, bound_target);
|
||||
}
|
||||
|
||||
generics.fill(&body_ctx, node, add_param_attrs);
|
||||
let span_map = body_ctx.span_map().clone();
|
||||
let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
|
||||
param| {
|
||||
let attrs = RawAttrs::new(self.db.upcast(), ¶m, span_map.as_ref());
|
||||
debug_assert!(self.generic_param_attr_buffer.insert(item, attrs).is_none());
|
||||
};
|
||||
generics.fill(&mut body_ctx, node, add_param_attrs);
|
||||
|
||||
let generics = generics.finish(types_map, &mut types_source_map);
|
||||
(generics, types_source_map)
|
||||
|
@ -910,7 +912,7 @@ impl<'a> Ctx<'a> {
|
|||
fn lower_type_bounds(
|
||||
&mut self,
|
||||
node: &dyn ast::HasTypeBounds,
|
||||
body_ctx: &LowerCtx<'_>,
|
||||
body_ctx: &mut LowerCtx<'_>,
|
||||
) -> Box<[TypeBound]> {
|
||||
match node.type_bound_list() {
|
||||
Some(bound_list) => {
|
||||
|
|
|
@ -47,7 +47,6 @@ pub mod resolver;
|
|||
|
||||
pub mod nameres;
|
||||
|
||||
pub mod child_by_source;
|
||||
pub mod src;
|
||||
|
||||
pub mod find_path;
|
||||
|
@ -354,9 +353,9 @@ impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId);
|
|||
pub struct BlockId(ra_salsa::InternId);
|
||||
#[derive(Debug, Hash, PartialEq, Eq, Clone)]
|
||||
pub struct BlockLoc {
|
||||
ast_id: AstId<ast::BlockExpr>,
|
||||
pub ast_id: AstId<ast::BlockExpr>,
|
||||
/// The containing module.
|
||||
module: ModuleId,
|
||||
pub module: ModuleId,
|
||||
}
|
||||
impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
|
||||
|
||||
|
@ -838,16 +837,18 @@ impl InTypeConstId {
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum GeneralConstId {
|
||||
ConstId(ConstId),
|
||||
StaticId(StaticId),
|
||||
ConstBlockId(ConstBlockId),
|
||||
InTypeConstId(InTypeConstId),
|
||||
}
|
||||
|
||||
impl_from!(ConstId, ConstBlockId, InTypeConstId for GeneralConstId);
|
||||
impl_from!(ConstId, StaticId, ConstBlockId, InTypeConstId for GeneralConstId);
|
||||
|
||||
impl GeneralConstId {
|
||||
pub fn generic_def(self, db: &dyn DefDatabase) -> Option<GenericDefId> {
|
||||
match self {
|
||||
GeneralConstId::ConstId(it) => Some(it.into()),
|
||||
GeneralConstId::StaticId(_) => None,
|
||||
GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db),
|
||||
GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db),
|
||||
}
|
||||
|
@ -855,6 +856,9 @@ impl GeneralConstId {
|
|||
|
||||
pub fn name(self, db: &dyn DefDatabase) -> String {
|
||||
match self {
|
||||
GeneralConstId::StaticId(it) => {
|
||||
db.static_data(it).name.display(db.upcast(), Edition::CURRENT).to_string()
|
||||
}
|
||||
GeneralConstId::ConstId(const_id) => db
|
||||
.const_data(const_id)
|
||||
.name
|
||||
|
@ -935,7 +939,7 @@ impl_from!(
|
|||
);
|
||||
|
||||
impl GenericDefId {
|
||||
fn file_id_and_params_of(
|
||||
pub fn file_id_and_params_of(
|
||||
self,
|
||||
db: &dyn DefDatabase,
|
||||
) -> (HirFileId, Option<ast::GenericParamList>) {
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
//! Context for lowering paths.
|
||||
use std::cell::{OnceCell, RefCell};
|
||||
use std::{cell::OnceCell, mem};
|
||||
|
||||
use hir_expand::{
|
||||
span_map::{SpanMap, SpanMapRef},
|
||||
AstId, HirFileId, InFile,
|
||||
};
|
||||
use hir_expand::{span_map::SpanMap, AstId, HirFileId, InFile};
|
||||
use span::{AstIdMap, AstIdNode};
|
||||
use stdx::thin_vec::ThinVec;
|
||||
use syntax::ast;
|
||||
|
@ -21,28 +18,11 @@ pub struct LowerCtx<'a> {
|
|||
file_id: HirFileId,
|
||||
span_map: OnceCell<SpanMap>,
|
||||
ast_id_map: OnceCell<Arc<AstIdMap>>,
|
||||
impl_trait_bounds: RefCell<Vec<ThinVec<TypeBound>>>,
|
||||
impl_trait_bounds: Vec<ThinVec<TypeBound>>,
|
||||
// Prevent nested impl traits like `impl Foo<impl Bar>`.
|
||||
outer_impl_trait: RefCell<bool>,
|
||||
types_map: RefCell<(&'a mut TypesMap, &'a mut TypesSourceMap)>,
|
||||
}
|
||||
|
||||
pub(crate) struct OuterImplTraitGuard<'a, 'b> {
|
||||
ctx: &'a LowerCtx<'b>,
|
||||
old: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'b> OuterImplTraitGuard<'a, 'b> {
|
||||
fn new(ctx: &'a LowerCtx<'b>, impl_trait: bool) -> Self {
|
||||
let old = ctx.outer_impl_trait.replace(impl_trait);
|
||||
Self { ctx, old }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for OuterImplTraitGuard<'_, '_> {
|
||||
fn drop(&mut self) {
|
||||
self.ctx.outer_impl_trait.replace(self.old);
|
||||
}
|
||||
outer_impl_trait: bool,
|
||||
types_map: &'a mut TypesMap,
|
||||
types_source_map: &'a mut TypesSourceMap,
|
||||
}
|
||||
|
||||
impl<'a> LowerCtx<'a> {
|
||||
|
@ -57,9 +37,10 @@ impl<'a> LowerCtx<'a> {
|
|||
file_id,
|
||||
span_map: OnceCell::new(),
|
||||
ast_id_map: OnceCell::new(),
|
||||
impl_trait_bounds: RefCell::new(Vec::new()),
|
||||
outer_impl_trait: RefCell::default(),
|
||||
types_map: RefCell::new((types_map, types_source_map)),
|
||||
impl_trait_bounds: Vec::new(),
|
||||
outer_impl_trait: false,
|
||||
types_map,
|
||||
types_source_map,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,17 +56,18 @@ impl<'a> LowerCtx<'a> {
|
|||
file_id,
|
||||
span_map,
|
||||
ast_id_map: OnceCell::new(),
|
||||
impl_trait_bounds: RefCell::new(Vec::new()),
|
||||
outer_impl_trait: RefCell::default(),
|
||||
types_map: RefCell::new((types_map, types_source_map)),
|
||||
impl_trait_bounds: Vec::new(),
|
||||
outer_impl_trait: false,
|
||||
types_map,
|
||||
types_source_map,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
|
||||
self.span_map.get_or_init(|| self.db.span_map(self.file_id)).as_ref()
|
||||
pub(crate) fn span_map(&self) -> &SpanMap {
|
||||
self.span_map.get_or_init(|| self.db.span_map(self.file_id))
|
||||
}
|
||||
|
||||
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
|
||||
pub(crate) fn lower_path(&mut self, ast: ast::Path) -> Option<Path> {
|
||||
Path::from_src(self, ast)
|
||||
}
|
||||
|
||||
|
@ -96,44 +78,44 @@ impl<'a> LowerCtx<'a> {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn update_impl_traits_bounds(&self, bounds: ThinVec<TypeBound>) {
|
||||
self.impl_trait_bounds.borrow_mut().push(bounds);
|
||||
pub fn update_impl_traits_bounds_from_type_ref(&mut self, type_ref: TypeRefId) {
|
||||
TypeRef::walk(type_ref, self.types_map, &mut |tr| {
|
||||
if let TypeRef::ImplTrait(bounds) = tr {
|
||||
self.impl_trait_bounds.push(bounds.clone());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn take_impl_traits_bounds(&self) -> Vec<ThinVec<TypeBound>> {
|
||||
self.impl_trait_bounds.take()
|
||||
pub fn take_impl_traits_bounds(&mut self) -> Vec<ThinVec<TypeBound>> {
|
||||
mem::take(&mut self.impl_trait_bounds)
|
||||
}
|
||||
|
||||
pub(crate) fn outer_impl_trait(&self) -> bool {
|
||||
*self.outer_impl_trait.borrow()
|
||||
self.outer_impl_trait
|
||||
}
|
||||
|
||||
pub(crate) fn outer_impl_trait_scope<'b>(
|
||||
&'b self,
|
||||
pub(crate) fn with_outer_impl_trait_scope<R>(
|
||||
&mut self,
|
||||
impl_trait: bool,
|
||||
) -> OuterImplTraitGuard<'b, 'a> {
|
||||
OuterImplTraitGuard::new(self, impl_trait)
|
||||
f: impl FnOnce(&mut Self) -> R,
|
||||
) -> R {
|
||||
let old = mem::replace(&mut self.outer_impl_trait, impl_trait);
|
||||
let result = f(self);
|
||||
self.outer_impl_trait = old;
|
||||
result
|
||||
}
|
||||
|
||||
pub(crate) fn alloc_type_ref(&self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
|
||||
let mut types_map = self.types_map.borrow_mut();
|
||||
let (types_map, types_source_map) = &mut *types_map;
|
||||
let id = types_map.types.alloc(type_ref);
|
||||
types_source_map.types_map_back.insert(id, InFile::new(self.file_id, node));
|
||||
pub(crate) fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
|
||||
let id = self.types_map.types.alloc(type_ref);
|
||||
self.types_source_map.types_map_back.insert(id, InFile::new(self.file_id, node));
|
||||
id
|
||||
}
|
||||
|
||||
pub(crate) fn alloc_type_ref_desugared(&self, type_ref: TypeRef) -> TypeRefId {
|
||||
self.types_map.borrow_mut().0.types.alloc(type_ref)
|
||||
pub(crate) fn alloc_type_ref_desugared(&mut self, type_ref: TypeRef) -> TypeRefId {
|
||||
self.types_map.types.alloc(type_ref)
|
||||
}
|
||||
|
||||
pub(crate) fn alloc_error_type(&self) -> TypeRefId {
|
||||
self.types_map.borrow_mut().0.types.alloc(TypeRef::Error)
|
||||
}
|
||||
|
||||
// FIXME: If we alloc while holding this, well... Bad Things will happen. Need to change this
|
||||
// to use proper mutability instead of interior mutability.
|
||||
pub(crate) fn types_map(&self) -> std::cell::Ref<'_, TypesMap> {
|
||||
std::cell::Ref::map(self.types_map.borrow(), |it| &*it.0)
|
||||
pub(crate) fn alloc_error_type(&mut self) -> TypeRefId {
|
||||
self.types_map.types.alloc(TypeRef::Error)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ pub enum GenericArg {
|
|||
impl Path {
|
||||
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
||||
/// It correctly handles `$crate` based path from macro call.
|
||||
pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
|
||||
pub fn from_src(ctx: &mut LowerCtx<'_>, path: ast::Path) -> Option<Path> {
|
||||
lower::lower_path(ctx, path)
|
||||
}
|
||||
|
||||
|
@ -284,7 +284,7 @@ impl<'a> PathSegments<'a> {
|
|||
|
||||
impl GenericArgs {
|
||||
pub(crate) fn from_ast(
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
node: ast::GenericArgList,
|
||||
) -> Option<GenericArgs> {
|
||||
lower::lower_generic_args(lower_ctx, node)
|
||||
|
|
|
@ -19,12 +19,11 @@ use crate::{
|
|||
|
||||
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
||||
/// It correctly handles `$crate` based path from macro call.
|
||||
pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
|
||||
pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
|
||||
let mut kind = PathKind::Plain;
|
||||
let mut type_anchor = None;
|
||||
let mut segments = Vec::new();
|
||||
let mut generic_args = Vec::new();
|
||||
let span_map = ctx.span_map();
|
||||
loop {
|
||||
let segment = path.segment()?;
|
||||
|
||||
|
@ -37,7 +36,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
|
|||
if name_ref.text() == "$crate" {
|
||||
break kind = resolve_crate_root(
|
||||
ctx.db.upcast(),
|
||||
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
|
||||
ctx.span_map().span_for_range(name_ref.syntax().text_range()).ctx,
|
||||
)
|
||||
.map(PathKind::DollarCrate)
|
||||
.unwrap_or(PathKind::Crate);
|
||||
|
@ -151,7 +150,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
|
|||
// We follow what it did anyway :)
|
||||
if segments.len() == 1 && kind == PathKind::Plain {
|
||||
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
|
||||
let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
|
||||
let syn_ctxt = ctx.span_map().span_for_range(path.segment()?.syntax().text_range()).ctx;
|
||||
if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
|
||||
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
|
||||
kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
|
||||
|
@ -183,7 +182,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
|
|||
}
|
||||
|
||||
pub(super) fn lower_generic_args(
|
||||
lower_ctx: &LowerCtx<'_>,
|
||||
lower_ctx: &mut LowerCtx<'_>,
|
||||
node: ast::GenericArgList,
|
||||
) -> Option<GenericArgs> {
|
||||
let mut args = Vec::new();
|
||||
|
@ -192,13 +191,7 @@ pub(super) fn lower_generic_args(
|
|||
match generic_arg {
|
||||
ast::GenericArg::TypeArg(type_arg) => {
|
||||
let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty());
|
||||
let types_map = lower_ctx.types_map();
|
||||
TypeRef::walk(type_ref, &types_map, &mut |tr| {
|
||||
if let TypeRef::ImplTrait(bounds) = tr {
|
||||
lower_ctx.update_impl_traits_bounds(bounds.clone());
|
||||
}
|
||||
});
|
||||
drop(types_map);
|
||||
lower_ctx.update_impl_traits_bounds_from_type_ref(type_ref);
|
||||
args.push(GenericArg::Type(type_ref));
|
||||
}
|
||||
ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
|
||||
|
@ -208,27 +201,22 @@ pub(super) fn lower_generic_args(
|
|||
}
|
||||
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
||||
// Nested impl traits like `impl Foo<Assoc = impl Bar>` are allowed
|
||||
let _guard = lower_ctx.outer_impl_trait_scope(false);
|
||||
let name = name_ref.as_name();
|
||||
let args = assoc_type_arg
|
||||
.generic_arg_list()
|
||||
.and_then(|args| lower_generic_args(lower_ctx, args));
|
||||
let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
|
||||
let type_ref = type_ref.inspect(|&tr| {
|
||||
let types_map = lower_ctx.types_map();
|
||||
TypeRef::walk(tr, &types_map, &mut |tr| {
|
||||
if let TypeRef::ImplTrait(bounds) = tr {
|
||||
lower_ctx.update_impl_traits_bounds(bounds.clone());
|
||||
}
|
||||
});
|
||||
drop(types_map);
|
||||
lower_ctx.with_outer_impl_trait_scope(false, |lower_ctx| {
|
||||
let name = name_ref.as_name();
|
||||
let args = assoc_type_arg
|
||||
.generic_arg_list()
|
||||
.and_then(|args| lower_generic_args(lower_ctx, args));
|
||||
let type_ref =
|
||||
assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
|
||||
let type_ref = type_ref
|
||||
.inspect(|&tr| lower_ctx.update_impl_traits_bounds_from_type_ref(tr));
|
||||
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
|
||||
l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
|
||||
} else {
|
||||
Box::default()
|
||||
};
|
||||
bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
|
||||
});
|
||||
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
|
||||
l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
|
||||
} else {
|
||||
Box::default()
|
||||
};
|
||||
bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
|
||||
}
|
||||
}
|
||||
ast::GenericArg::LifetimeArg(lifetime_arg) => {
|
||||
|
@ -258,7 +246,7 @@ pub(super) fn lower_generic_args(
|
|||
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
|
||||
/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
|
||||
fn lower_generic_args_from_fn_path(
|
||||
ctx: &LowerCtx<'_>,
|
||||
ctx: &mut LowerCtx<'_>,
|
||||
params: Option<ast::ParamList>,
|
||||
ret_type: Option<ast::RetType>,
|
||||
) -> Option<GenericArgs> {
|
||||
|
|
|
@ -615,7 +615,7 @@ pub(crate) fn associated_ty_data_query(
|
|||
let type_alias_data = db.type_alias_data(type_alias);
|
||||
let generic_params = generics(db.upcast(), type_alias.into());
|
||||
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
|
||||
let ctx =
|
||||
let mut ctx =
|
||||
crate::TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, type_alias.into())
|
||||
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
|
||||
|
||||
|
@ -627,14 +627,16 @@ pub(crate) fn associated_ty_data_query(
|
|||
.build();
|
||||
let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
|
||||
|
||||
let mut bounds: Vec<_> = type_alias_data
|
||||
.bounds
|
||||
.iter()
|
||||
.flat_map(|bound| ctx.lower_type_bound(bound, self_ty.clone(), false))
|
||||
.filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
|
||||
.collect();
|
||||
let mut bounds = Vec::new();
|
||||
for bound in &type_alias_data.bounds {
|
||||
ctx.lower_type_bound(bound, self_ty.clone(), false).for_each(|pred| {
|
||||
if let Some(pred) = generic_predicate_to_inline_bound(db, &pred, &self_ty) {
|
||||
bounds.push(pred);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if !ctx.unsized_types.borrow().contains(&self_ty) {
|
||||
if !ctx.unsized_types.contains(&self_ty) {
|
||||
let sized_trait = db
|
||||
.lang_item(resolver.krate(), LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
|
||||
|
|
|
@ -8,7 +8,7 @@ use hir_def::{
|
|||
path::Path,
|
||||
resolver::{Resolver, ValueNs},
|
||||
type_ref::LiteralConstRef,
|
||||
ConstBlockLoc, EnumVariantId, GeneralConstId, StaticId,
|
||||
ConstBlockLoc, EnumVariantId, GeneralConstId, HasModule as _, StaticId,
|
||||
};
|
||||
use hir_expand::Lookup;
|
||||
use stdx::never;
|
||||
|
@ -56,6 +56,21 @@ pub enum ConstEvalError {
|
|||
MirEvalError(MirEvalError),
|
||||
}
|
||||
|
||||
impl ConstEvalError {
|
||||
pub fn pretty_print(
|
||||
&self,
|
||||
f: &mut String,
|
||||
db: &dyn HirDatabase,
|
||||
span_formatter: impl Fn(span::FileId, span::TextRange) -> String,
|
||||
edition: span::Edition,
|
||||
) -> std::result::Result<(), std::fmt::Error> {
|
||||
match self {
|
||||
ConstEvalError::MirLowerError(e) => e.pretty_print(f, db, span_formatter, edition),
|
||||
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter, edition),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MirLowerError> for ConstEvalError {
|
||||
fn from(value: MirLowerError) -> Self {
|
||||
match value {
|
||||
|
@ -236,6 +251,10 @@ pub(crate) fn const_eval_query(
|
|||
GeneralConstId::ConstId(c) => {
|
||||
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
|
||||
}
|
||||
GeneralConstId::StaticId(s) => {
|
||||
let krate = s.module(db.upcast()).krate();
|
||||
db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))?
|
||||
}
|
||||
GeneralConstId::ConstBlockId(c) => {
|
||||
let ConstBlockLoc { parent, root } = db.lookup_intern_anonymous_const(c);
|
||||
let body = db.body(parent);
|
||||
|
@ -249,7 +268,7 @@ pub(crate) fn const_eval_query(
|
|||
}
|
||||
GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
|
||||
};
|
||||
let c = interpret_mir(db, body, false, trait_env).0?;
|
||||
let c = interpret_mir(db, body, false, trait_env)?.0?;
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
|
@ -262,7 +281,7 @@ pub(crate) fn const_eval_static_query(
|
|||
Substitution::empty(Interner),
|
||||
db.trait_environment_for_body(def.into()),
|
||||
)?;
|
||||
let c = interpret_mir(db, body, false, None).0?;
|
||||
let c = interpret_mir(db, body, false, None)?.0?;
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
|
@ -294,7 +313,7 @@ pub(crate) fn const_eval_discriminant_variant(
|
|||
Substitution::empty(Interner),
|
||||
db.trait_environment_for_body(def),
|
||||
)?;
|
||||
let c = interpret_mir(db, mir_body, false, None).0?;
|
||||
let c = interpret_mir(db, mir_body, false, None)?.0?;
|
||||
let c = if is_signed {
|
||||
try_const_isize(db, &c).unwrap()
|
||||
} else {
|
||||
|
@ -335,7 +354,7 @@ pub(crate) fn eval_to_const(
|
|||
}
|
||||
}
|
||||
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) {
|
||||
if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
|
||||
if let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2033,7 +2033,7 @@ impl HirDisplayWithTypesMap for TypeRefId {
|
|||
TypeRef::Macro(macro_call) => {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx = hir_def::lower::LowerCtx::new(
|
||||
let mut ctx = hir_def::lower::LowerCtx::new(
|
||||
f.db.upcast(),
|
||||
macro_call.file_id,
|
||||
&mut types_map,
|
||||
|
@ -2041,7 +2041,7 @@ impl HirDisplayWithTypesMap for TypeRefId {
|
|||
);
|
||||
let macro_call = macro_call.to_node(f.db.upcast());
|
||||
match macro_call.path() {
|
||||
Some(path) => match Path::from_src(&ctx, path) {
|
||||
Some(path) => match Path::from_src(&mut ctx, path) {
|
||||
Some(path) => path.hir_fmt(f, &types_map)?,
|
||||
None => write!(f, "{{macro}}")?,
|
||||
},
|
||||
|
|
|
@ -1420,7 +1420,7 @@ impl<'a> InferenceContext<'a> {
|
|||
Some(path) => path,
|
||||
None => return (self.err_ty(), None),
|
||||
};
|
||||
let ctx = crate::lower::TyLoweringContext::new(
|
||||
let mut ctx = crate::lower::TyLoweringContext::new(
|
||||
self.db,
|
||||
&self.resolver,
|
||||
&self.body.types,
|
||||
|
|
|
@ -151,7 +151,7 @@ impl InferenceContext<'_> {
|
|||
let last = path.segments().last()?;
|
||||
|
||||
// Don't use `self.make_ty()` here as we need `orig_ns`.
|
||||
let ctx = crate::lower::TyLoweringContext::new(
|
||||
let mut ctx = crate::lower::TyLoweringContext::new(
|
||||
self.db,
|
||||
&self.resolver,
|
||||
&self.body.types,
|
||||
|
|
|
@ -6,8 +6,9 @@ use base_db::ra_salsa::Cycle;
|
|||
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
|
||||
use hir_def::{
|
||||
layout::{
|
||||
BackendRepr, FieldsShape, Float, Integer, LayoutCalculator, LayoutCalculatorError, LayoutData,
|
||||
Primitive, ReprOptions, Scalar, Size, StructKind, TargetDataLayout, WrappingRange,
|
||||
BackendRepr, FieldsShape, Float, Integer, LayoutCalculator, LayoutCalculatorError,
|
||||
LayoutData, Primitive, ReprOptions, Scalar, Size, StructKind, TargetDataLayout,
|
||||
WrappingRange,
|
||||
},
|
||||
LocalFieldId, StructId,
|
||||
};
|
||||
|
@ -294,11 +295,12 @@ pub fn layout_of_ty_query(
|
|||
.checked_mul(count, dl)
|
||||
.ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?;
|
||||
|
||||
let backend_repr = if count != 0 && matches!(element.backend_repr, BackendRepr::Uninhabited) {
|
||||
BackendRepr::Uninhabited
|
||||
} else {
|
||||
BackendRepr::Memory { sized: true }
|
||||
};
|
||||
let backend_repr =
|
||||
if count != 0 && matches!(element.backend_repr, BackendRepr::Uninhabited) {
|
||||
BackendRepr::Uninhabited
|
||||
} else {
|
||||
BackendRepr::Memory { sized: true }
|
||||
};
|
||||
|
||||
let largest_niche = if count != 0 { element.largest_niche } else { None };
|
||||
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
//!
|
||||
//! This usually involves resolving names, collecting generic arguments etc.
|
||||
use std::{
|
||||
cell::{Cell, OnceCell, RefCell, RefMut},
|
||||
iter,
|
||||
cell::OnceCell,
|
||||
iter, mem,
|
||||
ops::{self, Not as _},
|
||||
};
|
||||
|
||||
|
@ -72,47 +72,32 @@ use crate::{
|
|||
TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ImplTraitLoweringState {
|
||||
#[derive(Debug, Default)]
|
||||
struct ImplTraitLoweringState {
|
||||
/// When turning `impl Trait` into opaque types, we have to collect the
|
||||
/// bounds at the same time to get the IDs correct (without becoming too
|
||||
/// complicated). I don't like using interior mutability (as for the
|
||||
/// counter), but I've tried and failed to make the lifetimes work for
|
||||
/// passing around a `&mut TyLoweringContext`. The core problem is that
|
||||
/// we're grouping the mutable data (the counter and this field) together
|
||||
/// with the immutable context (the references to the DB and resolver).
|
||||
/// Splitting this up would be a possible fix.
|
||||
Opaque(RefCell<Arena<ImplTrait>>),
|
||||
Param(Cell<u16>),
|
||||
Variable(Cell<u16>),
|
||||
Disallowed,
|
||||
/// complicated).
|
||||
mode: ImplTraitLoweringMode,
|
||||
// This is structured as a struct with fields and not as an enum because it helps with the borrow checker.
|
||||
opaque_type_data: Arena<ImplTrait>,
|
||||
param_and_variable_counter: u16,
|
||||
}
|
||||
impl ImplTraitLoweringState {
|
||||
fn new(impl_trait_mode: ImplTraitLoweringMode) -> ImplTraitLoweringState {
|
||||
match impl_trait_mode {
|
||||
ImplTraitLoweringMode::Opaque => Self::Opaque(RefCell::new(Arena::new())),
|
||||
ImplTraitLoweringMode::Param => Self::Param(Cell::new(0)),
|
||||
ImplTraitLoweringMode::Variable => Self::Variable(Cell::new(0)),
|
||||
ImplTraitLoweringMode::Disallowed => Self::Disallowed,
|
||||
fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState {
|
||||
Self { mode, opaque_type_data: Arena::new(), param_and_variable_counter: 0 }
|
||||
}
|
||||
fn param(counter: u16) -> Self {
|
||||
Self {
|
||||
mode: ImplTraitLoweringMode::Param,
|
||||
opaque_type_data: Arena::new(),
|
||||
param_and_variable_counter: counter,
|
||||
}
|
||||
}
|
||||
|
||||
fn take(&self) -> Self {
|
||||
match self {
|
||||
Self::Opaque(x) => Self::Opaque(RefCell::new(x.take())),
|
||||
Self::Param(x) => Self::Param(Cell::new(x.get())),
|
||||
Self::Variable(x) => Self::Variable(Cell::new(x.get())),
|
||||
Self::Disallowed => Self::Disallowed,
|
||||
}
|
||||
}
|
||||
|
||||
fn swap(&self, impl_trait_mode: &Self) {
|
||||
match (self, impl_trait_mode) {
|
||||
(Self::Opaque(x), Self::Opaque(y)) => x.swap(y),
|
||||
(Self::Param(x), Self::Param(y)) => x.swap(y),
|
||||
(Self::Variable(x), Self::Variable(y)) => x.swap(y),
|
||||
(Self::Disallowed, Self::Disallowed) => (),
|
||||
_ => panic!("mismatched lowering mode"),
|
||||
fn variable(counter: u16) -> Self {
|
||||
Self {
|
||||
mode: ImplTraitLoweringMode::Variable,
|
||||
opaque_type_data: Arena::new(),
|
||||
param_and_variable_counter: counter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -137,9 +122,9 @@ pub struct TyLoweringContext<'a> {
|
|||
/// possible currently, so this should be fine for now.
|
||||
pub type_param_mode: ParamLoweringMode,
|
||||
impl_trait_mode: ImplTraitLoweringState,
|
||||
expander: RefCell<Option<Expander>>,
|
||||
expander: Option<Expander>,
|
||||
/// Tracks types with explicit `?Sized` bounds.
|
||||
pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
|
||||
pub(crate) unsized_types: FxHashSet<Ty>,
|
||||
}
|
||||
|
||||
impl<'a> TyLoweringContext<'a> {
|
||||
|
@ -159,7 +144,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
types_source_map: Option<&'a TypesSourceMap>,
|
||||
owner: Option<TypeOwnerId>,
|
||||
) -> Self {
|
||||
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
|
||||
let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed);
|
||||
let type_param_mode = ParamLoweringMode::Placeholder;
|
||||
let in_binders = DebruijnIndex::INNERMOST;
|
||||
Self {
|
||||
|
@ -172,38 +157,26 @@ impl<'a> TyLoweringContext<'a> {
|
|||
in_binders,
|
||||
impl_trait_mode,
|
||||
type_param_mode,
|
||||
expander: RefCell::new(None),
|
||||
unsized_types: RefCell::default(),
|
||||
expander: None,
|
||||
unsized_types: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_debruijn<T>(
|
||||
&self,
|
||||
&mut self,
|
||||
debruijn: DebruijnIndex,
|
||||
f: impl FnOnce(&TyLoweringContext<'_>) -> T,
|
||||
f: impl FnOnce(&mut TyLoweringContext<'_>) -> T,
|
||||
) -> T {
|
||||
let impl_trait_mode = self.impl_trait_mode.take();
|
||||
let expander = self.expander.take();
|
||||
let unsized_types = self.unsized_types.take();
|
||||
let new_ctx = Self {
|
||||
in_binders: debruijn,
|
||||
impl_trait_mode,
|
||||
expander: RefCell::new(expander),
|
||||
unsized_types: RefCell::new(unsized_types),
|
||||
generics: self.generics.clone(),
|
||||
..*self
|
||||
};
|
||||
let result = f(&new_ctx);
|
||||
self.impl_trait_mode.swap(&new_ctx.impl_trait_mode);
|
||||
self.expander.replace(new_ctx.expander.into_inner());
|
||||
self.unsized_types.replace(new_ctx.unsized_types.into_inner());
|
||||
let old_debruijn = mem::replace(&mut self.in_binders, debruijn);
|
||||
let result = f(self);
|
||||
self.in_binders = old_debruijn;
|
||||
result
|
||||
}
|
||||
|
||||
pub fn with_shifted_in<T>(
|
||||
&self,
|
||||
&mut self,
|
||||
debruijn: DebruijnIndex,
|
||||
f: impl FnOnce(&TyLoweringContext<'_>) -> T,
|
||||
f: impl FnOnce(&mut TyLoweringContext<'_>) -> T,
|
||||
) -> T {
|
||||
self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
|
||||
}
|
||||
|
@ -227,7 +200,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
|
||||
pub enum ImplTraitLoweringMode {
|
||||
/// `impl Trait` gets lowered into an opaque type that doesn't unify with
|
||||
/// anything except itself. This is used in places where values flow 'out',
|
||||
|
@ -244,6 +217,7 @@ pub enum ImplTraitLoweringMode {
|
|||
/// currently checking.
|
||||
Variable,
|
||||
/// `impl Trait` is disallowed and will be an error.
|
||||
#[default]
|
||||
Disallowed,
|
||||
}
|
||||
|
||||
|
@ -254,12 +228,13 @@ pub enum ParamLoweringMode {
|
|||
}
|
||||
|
||||
impl<'a> TyLoweringContext<'a> {
|
||||
pub fn lower_ty(&self, type_ref: TypeRefId) -> Ty {
|
||||
pub fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty {
|
||||
self.lower_ty_ext(type_ref).0
|
||||
}
|
||||
|
||||
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
|
||||
pub fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty) -> Const {
|
||||
let Some(owner) = self.owner else { return unknown_const(const_type) };
|
||||
let debruijn = self.in_binders;
|
||||
const_or_path_to_chalk(
|
||||
self.db,
|
||||
self.resolver,
|
||||
|
@ -268,7 +243,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
const_ref,
|
||||
self.type_param_mode,
|
||||
|| self.generics(),
|
||||
self.in_binders,
|
||||
debruijn,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -278,7 +253,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
.as_ref()
|
||||
}
|
||||
|
||||
pub fn lower_ty_ext(&self, type_ref_id: TypeRefId) -> (Ty, Option<TypeNs>) {
|
||||
pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option<TypeNs>) {
|
||||
let mut res = None;
|
||||
let type_ref = &self.types_map[type_ref_id];
|
||||
let ty = match type_ref {
|
||||
|
@ -337,8 +312,8 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds),
|
||||
TypeRef::ImplTrait(bounds) => {
|
||||
match &self.impl_trait_mode {
|
||||
ImplTraitLoweringState::Opaque(opaque_type_data) => {
|
||||
match self.impl_trait_mode.mode {
|
||||
ImplTraitLoweringMode::Opaque => {
|
||||
let origin = match self.resolver.generic_def() {
|
||||
Some(GenericDefId::FunctionId(it)) => Either::Left(it),
|
||||
Some(GenericDefId::TypeAliasId(it)) => Either::Right(it),
|
||||
|
@ -350,7 +325,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
// this dance is to make sure the data is in the right
|
||||
// place even if we encounter more opaque types while
|
||||
// lowering the bounds
|
||||
let idx = opaque_type_data.borrow_mut().alloc(ImplTrait {
|
||||
let idx = self.impl_trait_mode.opaque_type_data.alloc(ImplTrait {
|
||||
bounds: crate::make_single_type_binders(Vec::default()),
|
||||
});
|
||||
// We don't want to lower the bounds inside the binders
|
||||
|
@ -366,7 +341,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
.with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
|
||||
ctx.lower_impl_trait(bounds, self.resolver.krate())
|
||||
});
|
||||
opaque_type_data.borrow_mut()[idx] = actual_opaque_type_data;
|
||||
self.impl_trait_mode.opaque_type_data[idx] = actual_opaque_type_data;
|
||||
|
||||
let impl_trait_id = origin.either(
|
||||
|f| ImplTraitId::ReturnTypeImplTrait(f, idx),
|
||||
|
@ -378,11 +353,13 @@ impl<'a> TyLoweringContext<'a> {
|
|||
let parameters = generics.bound_vars_subst(self.db, self.in_binders);
|
||||
TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
|
||||
}
|
||||
ImplTraitLoweringState::Param(counter) => {
|
||||
let idx = counter.get();
|
||||
ImplTraitLoweringMode::Param => {
|
||||
let idx = self.impl_trait_mode.param_and_variable_counter;
|
||||
// Count the number of `impl Trait` things that appear within our bounds.
|
||||
// Since those have been emitted as implicit type args already.
|
||||
counter.set(idx + self.count_impl_traits(type_ref_id) as u16);
|
||||
self.impl_trait_mode.param_and_variable_counter =
|
||||
idx + self.count_impl_traits(type_ref_id) as u16;
|
||||
let db = self.db;
|
||||
let kind = self
|
||||
.generics()
|
||||
.expect("param impl trait lowering must be in a generic def")
|
||||
|
@ -398,15 +375,17 @@ impl<'a> TyLoweringContext<'a> {
|
|||
})
|
||||
.nth(idx as usize)
|
||||
.map_or(TyKind::Error, |id| {
|
||||
TyKind::Placeholder(to_placeholder_idx(self.db, id.into()))
|
||||
TyKind::Placeholder(to_placeholder_idx(db, id.into()))
|
||||
});
|
||||
kind.intern(Interner)
|
||||
}
|
||||
ImplTraitLoweringState::Variable(counter) => {
|
||||
let idx = counter.get();
|
||||
ImplTraitLoweringMode::Variable => {
|
||||
let idx = self.impl_trait_mode.param_and_variable_counter;
|
||||
// Count the number of `impl Trait` things that appear within our bounds.
|
||||
// Since t hose have been emitted as implicit type args already.
|
||||
counter.set(idx + self.count_impl_traits(type_ref_id) as u16);
|
||||
self.impl_trait_mode.param_and_variable_counter =
|
||||
idx + self.count_impl_traits(type_ref_id) as u16;
|
||||
let debruijn = self.in_binders;
|
||||
let kind = self
|
||||
.generics()
|
||||
.expect("variable impl trait lowering must be in a generic def")
|
||||
|
@ -423,33 +402,31 @@ impl<'a> TyLoweringContext<'a> {
|
|||
})
|
||||
.nth(idx as usize)
|
||||
.map_or(TyKind::Error, |id| {
|
||||
TyKind::BoundVar(BoundVar { debruijn: self.in_binders, index: id })
|
||||
TyKind::BoundVar(BoundVar { debruijn, index: id })
|
||||
});
|
||||
kind.intern(Interner)
|
||||
}
|
||||
ImplTraitLoweringState::Disallowed => {
|
||||
ImplTraitLoweringMode::Disallowed => {
|
||||
// FIXME: report error
|
||||
TyKind::Error.intern(Interner)
|
||||
}
|
||||
}
|
||||
}
|
||||
TypeRef::Macro(macro_call) => {
|
||||
let (mut expander, recursion_start) = {
|
||||
match RefMut::filter_map(self.expander.borrow_mut(), Option::as_mut) {
|
||||
let (expander, recursion_start) = {
|
||||
match &mut self.expander {
|
||||
// There already is an expander here, this means we are already recursing
|
||||
Ok(expander) => (expander, false),
|
||||
Some(expander) => (expander, false),
|
||||
// No expander was created yet, so we are at the start of the expansion recursion
|
||||
// and therefore have to create an expander.
|
||||
Err(expander) => (
|
||||
RefMut::map(expander, |it| {
|
||||
it.insert(Expander::new(
|
||||
self.db.upcast(),
|
||||
macro_call.file_id,
|
||||
self.resolver.module(),
|
||||
))
|
||||
}),
|
||||
true,
|
||||
),
|
||||
None => {
|
||||
let expander = self.expander.insert(Expander::new(
|
||||
self.db.upcast(),
|
||||
macro_call.file_id,
|
||||
self.resolver.module(),
|
||||
));
|
||||
(expander, true)
|
||||
}
|
||||
}
|
||||
};
|
||||
let ty = {
|
||||
|
@ -465,19 +442,16 @@ impl<'a> TyLoweringContext<'a> {
|
|||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
|
||||
let ctx = expander.ctx(
|
||||
let mut ctx = expander.ctx(
|
||||
self.db.upcast(),
|
||||
&mut types_map,
|
||||
&mut types_source_map,
|
||||
);
|
||||
// FIXME: Report syntax errors in expansion here
|
||||
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
|
||||
let type_ref = TypeRef::from_ast(&mut ctx, expanded.tree());
|
||||
|
||||
drop(expander);
|
||||
|
||||
// FIXME: That may be better served by mutating `self` then restoring, but this requires
|
||||
// making it `&mut self`.
|
||||
let inner_ctx = TyLoweringContext {
|
||||
// Can't mutate `self`, must create a new instance, because of the lifetimes.
|
||||
let mut inner_ctx = TyLoweringContext {
|
||||
db: self.db,
|
||||
resolver: self.resolver,
|
||||
generics: self.generics.clone(),
|
||||
|
@ -486,30 +460,27 @@ impl<'a> TyLoweringContext<'a> {
|
|||
in_binders: self.in_binders,
|
||||
owner: self.owner,
|
||||
type_param_mode: self.type_param_mode,
|
||||
impl_trait_mode: self.impl_trait_mode.take(),
|
||||
expander: RefCell::new(self.expander.take()),
|
||||
unsized_types: RefCell::new(self.unsized_types.take()),
|
||||
impl_trait_mode: mem::take(&mut self.impl_trait_mode),
|
||||
expander: self.expander.take(),
|
||||
unsized_types: mem::take(&mut self.unsized_types),
|
||||
};
|
||||
|
||||
let ty = inner_ctx.lower_ty(type_ref);
|
||||
|
||||
self.impl_trait_mode.swap(&inner_ctx.impl_trait_mode);
|
||||
*self.expander.borrow_mut() = inner_ctx.expander.into_inner();
|
||||
*self.unsized_types.borrow_mut() = inner_ctx.unsized_types.into_inner();
|
||||
self.impl_trait_mode = inner_ctx.impl_trait_mode;
|
||||
self.expander = inner_ctx.expander;
|
||||
self.unsized_types = inner_ctx.unsized_types;
|
||||
|
||||
self.expander.borrow_mut().as_mut().unwrap().exit(mark);
|
||||
self.expander.as_mut().unwrap().exit(mark);
|
||||
Some(ty)
|
||||
}
|
||||
_ => {
|
||||
drop(expander);
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
|
||||
// drop the expander, resetting it to pre-recursion state
|
||||
if recursion_start {
|
||||
*self.expander.borrow_mut() = None;
|
||||
self.expander = None;
|
||||
}
|
||||
ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
|
||||
}
|
||||
|
@ -544,7 +515,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn lower_ty_relative_path(
|
||||
&self,
|
||||
&mut self,
|
||||
ty: Ty,
|
||||
// We need the original resolution to lower `Self::AssocTy` correctly
|
||||
res: Option<TypeNs>,
|
||||
|
@ -565,7 +536,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn lower_partly_resolved_path(
|
||||
&self,
|
||||
&mut self,
|
||||
resolution: TypeNs,
|
||||
resolved_segment: PathSegment<'_>,
|
||||
remaining_segments: PathSegments<'_>,
|
||||
|
@ -706,7 +677,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
|
||||
pub(crate) fn lower_path(&mut self, path: &Path) -> (Ty, Option<TypeNs>) {
|
||||
// Resolve the path (in type namespace)
|
||||
if let Some(type_ref) = path.type_anchor() {
|
||||
let (ty, res) = self.lower_ty_ext(type_ref);
|
||||
|
@ -736,7 +707,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
|
||||
}
|
||||
|
||||
fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
|
||||
fn select_associated_type(&mut self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
|
||||
let Some((generics, res)) = self.generics().zip(res) else {
|
||||
return TyKind::Error.intern(Interner);
|
||||
};
|
||||
|
@ -746,6 +717,8 @@ impl<'a> TyLoweringContext<'a> {
|
|||
res,
|
||||
Some(segment.name.clone()),
|
||||
move |name, t, associated_ty| {
|
||||
let generics = self.generics().unwrap();
|
||||
|
||||
if name != segment.name {
|
||||
return None;
|
||||
}
|
||||
|
@ -797,7 +770,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
fn lower_path_inner(
|
||||
&self,
|
||||
&mut self,
|
||||
segment: PathSegment<'_>,
|
||||
typeable: TyDefId,
|
||||
infer_args: bool,
|
||||
|
@ -814,7 +787,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
/// Collect generic arguments from a path into a `Substs`. See also
|
||||
/// `create_substs_for_ast_path` and `def_to_ty` in rustc.
|
||||
pub(super) fn substs_from_path(
|
||||
&self,
|
||||
&mut self,
|
||||
path: &Path,
|
||||
// Note that we don't call `db.value_type(resolved)` here,
|
||||
// `ValueTyDefId` is just a convenient way to pass generics and
|
||||
|
@ -855,7 +828,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
pub(super) fn substs_from_path_segment(
|
||||
&self,
|
||||
&mut self,
|
||||
segment: PathSegment<'_>,
|
||||
def: Option<GenericDefId>,
|
||||
infer_args: bool,
|
||||
|
@ -870,7 +843,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
fn substs_from_args_and_bindings(
|
||||
&self,
|
||||
&mut self,
|
||||
args_and_bindings: Option<&GenericArgs>,
|
||||
def: Option<GenericDefId>,
|
||||
infer_args: bool,
|
||||
|
@ -959,11 +932,11 @@ impl<'a> TyLoweringContext<'a> {
|
|||
self.db,
|
||||
id,
|
||||
arg,
|
||||
&mut (),
|
||||
self,
|
||||
self.types_map,
|
||||
|_, type_ref| self.lower_ty(type_ref),
|
||||
|_, const_ref, ty| self.lower_const(const_ref, ty),
|
||||
|_, lifetime_ref| self.lower_lifetime(lifetime_ref),
|
||||
|this, type_ref| this.lower_ty(type_ref),
|
||||
|this, const_ref, ty| this.lower_const(const_ref, ty),
|
||||
|this, lifetime_ref| this.lower_lifetime(lifetime_ref),
|
||||
);
|
||||
substs.push(arg);
|
||||
}
|
||||
|
@ -1016,7 +989,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn lower_trait_ref_from_resolved_path(
|
||||
&self,
|
||||
&mut self,
|
||||
resolved: TraitId,
|
||||
segment: PathSegment<'_>,
|
||||
explicit_self_ty: Ty,
|
||||
|
@ -1025,7 +998,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
|
||||
}
|
||||
|
||||
fn lower_trait_ref_from_path(&self, path: &Path, explicit_self_ty: Ty) -> Option<TraitRef> {
|
||||
fn lower_trait_ref_from_path(&mut self, path: &Path, explicit_self_ty: Ty) -> Option<TraitRef> {
|
||||
let resolved = match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path)? {
|
||||
// FIXME(trait_alias): We need to handle trait alias here.
|
||||
TypeNs::TraitId(tr) => tr,
|
||||
|
@ -1035,12 +1008,16 @@ impl<'a> TyLoweringContext<'a> {
|
|||
Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
|
||||
}
|
||||
|
||||
fn lower_trait_ref(&self, trait_ref: &HirTraitRef, explicit_self_ty: Ty) -> Option<TraitRef> {
|
||||
fn lower_trait_ref(
|
||||
&mut self,
|
||||
trait_ref: &HirTraitRef,
|
||||
explicit_self_ty: Ty,
|
||||
) -> Option<TraitRef> {
|
||||
self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
|
||||
}
|
||||
|
||||
fn trait_ref_substs_from_path(
|
||||
&self,
|
||||
&mut self,
|
||||
segment: PathSegment<'_>,
|
||||
resolved: TraitId,
|
||||
explicit_self_ty: Ty,
|
||||
|
@ -1049,11 +1026,11 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn lower_where_predicate<'b>(
|
||||
&'b self,
|
||||
&'b mut self,
|
||||
where_predicate: &'b WherePredicate,
|
||||
&def: &GenericDefId,
|
||||
ignore_bindings: bool,
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + 'b {
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b> {
|
||||
match where_predicate {
|
||||
WherePredicate::ForLifetime { target, bound, .. }
|
||||
| WherePredicate::TypeBound { target, bound } => {
|
||||
|
@ -1087,12 +1064,12 @@ impl<'a> TyLoweringContext<'a> {
|
|||
.into_iter()
|
||||
}
|
||||
|
||||
pub(crate) fn lower_type_bound(
|
||||
&'a self,
|
||||
bound: &'a TypeBound,
|
||||
pub(crate) fn lower_type_bound<'b>(
|
||||
&'b mut self,
|
||||
bound: &'b TypeBound,
|
||||
self_ty: Ty,
|
||||
ignore_bindings: bool,
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> {
|
||||
let mut trait_ref = None;
|
||||
let clause = match bound {
|
||||
TypeBound::Path(path, TraitBoundModifier::None) => {
|
||||
|
@ -1111,7 +1088,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
.lower_trait_ref_from_path(path, self_ty.clone())
|
||||
.map(|trait_ref| trait_ref.hir_trait_id());
|
||||
if trait_id == sized_trait {
|
||||
self.unsized_types.borrow_mut().insert(self_ty);
|
||||
self.unsized_types.insert(self_ty);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
@ -1131,17 +1108,18 @@ impl<'a> TyLoweringContext<'a> {
|
|||
};
|
||||
clause.into_iter().chain(
|
||||
trait_ref
|
||||
.into_iter()
|
||||
.filter(move |_| !ignore_bindings)
|
||||
.flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
|
||||
.map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr))
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
)
|
||||
}
|
||||
|
||||
fn assoc_type_bindings_from_type_bound(
|
||||
&'a self,
|
||||
bound: &'a TypeBound,
|
||||
fn assoc_type_bindings_from_type_bound<'b>(
|
||||
&'b mut self,
|
||||
bound: &'b TypeBound,
|
||||
trait_ref: TraitRef,
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
|
||||
) -> impl Iterator<Item = QuantifiedWhereClause> + use<'b, 'a> {
|
||||
let last_segment = match bound {
|
||||
TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
|
||||
path.segments().last()
|
||||
|
@ -1192,22 +1170,16 @@ impl<'a> TyLoweringContext<'a> {
|
|||
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
|
||||
);
|
||||
if let Some(type_ref) = binding.type_ref {
|
||||
match (&self.types_map[type_ref], &self.impl_trait_mode) {
|
||||
(TypeRef::ImplTrait(_), ImplTraitLoweringState::Disallowed) => (),
|
||||
(
|
||||
_,
|
||||
ImplTraitLoweringState::Disallowed | ImplTraitLoweringState::Opaque(_),
|
||||
) => {
|
||||
match (&self.types_map[type_ref], self.impl_trait_mode.mode) {
|
||||
(TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
|
||||
(_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => {
|
||||
let ty = self.lower_ty(type_ref);
|
||||
let alias_eq =
|
||||
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
|
||||
predicates
|
||||
.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
|
||||
}
|
||||
(
|
||||
_,
|
||||
ImplTraitLoweringState::Param(_) | ImplTraitLoweringState::Variable(_),
|
||||
) => {
|
||||
(_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
|
||||
// Find the generic index for the target of our `bound`
|
||||
let target_param_idx = self
|
||||
.resolver
|
||||
|
@ -1244,14 +1216,14 @@ impl<'a> TyLoweringContext<'a> {
|
|||
self.owner,
|
||||
)
|
||||
.with_type_param_mode(self.type_param_mode);
|
||||
match &self.impl_trait_mode {
|
||||
ImplTraitLoweringState::Param(_) => {
|
||||
match self.impl_trait_mode.mode {
|
||||
ImplTraitLoweringMode::Param => {
|
||||
ext.impl_trait_mode =
|
||||
ImplTraitLoweringState::Param(Cell::new(counter));
|
||||
ImplTraitLoweringState::param(counter);
|
||||
}
|
||||
ImplTraitLoweringState::Variable(_) => {
|
||||
ImplTraitLoweringMode::Variable => {
|
||||
ext.impl_trait_mode =
|
||||
ImplTraitLoweringState::Variable(Cell::new(counter));
|
||||
ImplTraitLoweringState::variable(counter);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
@ -1278,7 +1250,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn lower_dyn_trait(&self, bounds: &[TypeBound]) -> Ty {
|
||||
fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty {
|
||||
let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
|
||||
// INVARIANT: The principal trait bound, if present, must come first. Others may be in any
|
||||
// order but should be in the same order for the same set but possibly different order of
|
||||
|
@ -1287,22 +1259,26 @@ impl<'a> TyLoweringContext<'a> {
|
|||
// These invariants are utilized by `TyExt::dyn_trait()` and chalk.
|
||||
let mut lifetime = None;
|
||||
let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
|
||||
let mut bounds: Vec<_> = bounds
|
||||
.iter()
|
||||
.flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
|
||||
.filter(|b| match b.skip_binders() {
|
||||
WhereClause::Implemented(_) | WhereClause::AliasEq(_) => true,
|
||||
WhereClause::LifetimeOutlives(_) => false,
|
||||
WhereClause::TypeOutlives(t) => {
|
||||
lifetime = Some(t.lifetime.clone());
|
||||
false
|
||||
let mut lowered_bounds = Vec::new();
|
||||
for b in bounds {
|
||||
ctx.lower_type_bound(b, self_ty.clone(), false).for_each(|b| {
|
||||
let filter = match b.skip_binders() {
|
||||
WhereClause::Implemented(_) | WhereClause::AliasEq(_) => true,
|
||||
WhereClause::LifetimeOutlives(_) => false,
|
||||
WhereClause::TypeOutlives(t) => {
|
||||
lifetime = Some(t.lifetime.clone());
|
||||
false
|
||||
}
|
||||
};
|
||||
if filter {
|
||||
lowered_bounds.push(b);
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
});
|
||||
}
|
||||
|
||||
let mut multiple_regular_traits = false;
|
||||
let mut multiple_same_projection = false;
|
||||
bounds.sort_unstable_by(|lhs, rhs| {
|
||||
lowered_bounds.sort_unstable_by(|lhs, rhs| {
|
||||
use std::cmp::Ordering;
|
||||
match (lhs.skip_binders(), rhs.skip_binders()) {
|
||||
(WhereClause::Implemented(lhs), WhereClause::Implemented(rhs)) => {
|
||||
|
@ -1344,13 +1320,13 @@ impl<'a> TyLoweringContext<'a> {
|
|||
return None;
|
||||
}
|
||||
|
||||
bounds.first().and_then(|b| b.trait_id())?;
|
||||
lowered_bounds.first().and_then(|b| b.trait_id())?;
|
||||
|
||||
// As multiple occurrences of the same auto traits *are* permitted, we deduplicate the
|
||||
// bounds. We shouldn't have repeated elements besides auto traits at this point.
|
||||
bounds.dedup();
|
||||
lowered_bounds.dedup();
|
||||
|
||||
Some(QuantifiedWhereClauses::from_iter(Interner, bounds))
|
||||
Some(QuantifiedWhereClauses::from_iter(Interner, lowered_bounds))
|
||||
});
|
||||
|
||||
if let Some(bounds) = bounds {
|
||||
|
@ -1376,16 +1352,16 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn lower_impl_trait(&self, bounds: &[TypeBound], krate: CrateId) -> ImplTrait {
|
||||
fn lower_impl_trait(&mut self, bounds: &[TypeBound], krate: CrateId) -> ImplTrait {
|
||||
cov_mark::hit!(lower_rpit);
|
||||
let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
|
||||
let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
|
||||
let mut predicates: Vec<_> = bounds
|
||||
.iter()
|
||||
.flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
|
||||
.collect();
|
||||
let mut predicates = Vec::new();
|
||||
for b in bounds {
|
||||
predicates.extend(ctx.lower_type_bound(b, self_ty.clone(), false));
|
||||
}
|
||||
|
||||
if !ctx.unsized_types.borrow().contains(&self_ty) {
|
||||
if !ctx.unsized_types.contains(&self_ty) {
|
||||
let sized_trait = ctx
|
||||
.db
|
||||
.lang_item(krate, LangItem::Sized)
|
||||
|
@ -1562,7 +1538,7 @@ pub(crate) fn field_types_query(
|
|||
};
|
||||
let generics = generics(db.upcast(), def);
|
||||
let mut res = ArenaMap::default();
|
||||
let ctx = TyLoweringContext::new(db, &resolver, var_data.types_map(), def.into())
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, var_data.types_map(), def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
for (field_id, field_data) in var_data.fields().iter() {
|
||||
res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref)));
|
||||
|
@ -1596,7 +1572,7 @@ pub(crate) fn generic_predicates_for_param_query(
|
|||
let generics = generics(db.upcast(), def);
|
||||
|
||||
// we have to filter out all other predicates *first*, before attempting to lower them
|
||||
let predicate = |pred: &_, def: &_, ctx: &TyLoweringContext<'_>| match pred {
|
||||
let predicate = |pred: &_, def: &_, ctx: &mut TyLoweringContext<'_>| match pred {
|
||||
WherePredicate::ForLifetime { target, bound, .. }
|
||||
| WherePredicate::TypeBound { target, bound, .. } => {
|
||||
let invalid_target = match target {
|
||||
|
@ -1642,16 +1618,19 @@ pub(crate) fn generic_predicates_for_param_query(
|
|||
let mut predicates = Vec::new();
|
||||
for (params, def) in resolver.all_generic_params() {
|
||||
ctx.types_map = ¶ms.types_map;
|
||||
predicates.extend(
|
||||
params.where_predicates().filter(|pred| predicate(pred, def, &ctx)).flat_map(|pred| {
|
||||
ctx.lower_where_predicate(pred, def, true).map(|p| make_binders(db, &generics, p))
|
||||
}),
|
||||
);
|
||||
for pred in params.where_predicates() {
|
||||
if predicate(pred, def, &mut ctx) {
|
||||
predicates.extend(
|
||||
ctx.lower_where_predicate(pred, def, true)
|
||||
.map(|p| make_binders(db, &generics, p)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
if !subst.is_empty(Interner) {
|
||||
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
|
||||
let explicitly_unsized_tys = ctx.unsized_types;
|
||||
if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(
|
||||
db,
|
||||
param_id.parent,
|
||||
|
@ -1731,7 +1710,7 @@ pub(crate) fn trait_environment_query(
|
|||
|
||||
let subst = generics(db.upcast(), def).placeholder_subst(db);
|
||||
if !subst.is_empty(Interner) {
|
||||
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
|
||||
let explicitly_unsized_tys = ctx.unsized_types;
|
||||
if let Some(implicitly_sized_clauses) =
|
||||
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
|
||||
{
|
||||
|
@ -1801,16 +1780,19 @@ where
|
|||
let mut predicates = Vec::new();
|
||||
for (params, def) in resolver.all_generic_params() {
|
||||
ctx.types_map = ¶ms.types_map;
|
||||
predicates.extend(params.where_predicates().filter(|pred| filter(pred, def)).flat_map(
|
||||
|pred| {
|
||||
ctx.lower_where_predicate(pred, def, false).map(|p| make_binders(db, &generics, p))
|
||||
},
|
||||
));
|
||||
for pred in params.where_predicates() {
|
||||
if filter(pred, def) {
|
||||
predicates.extend(
|
||||
ctx.lower_where_predicate(pred, def, false)
|
||||
.map(|p| make_binders(db, &generics, p)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if generics.len() > 0 {
|
||||
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
let explicitly_unsized_tys = ctx.unsized_types.into_inner();
|
||||
let explicitly_unsized_tys = ctx.unsized_types;
|
||||
if let Some(implicitly_sized_predicates) =
|
||||
implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
|
||||
{
|
||||
|
@ -1906,7 +1888,8 @@ pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) ->
|
|||
let mut val = p.default.as_ref().map_or_else(
|
||||
|| unknown_const_as_generic(db.const_param_ty(id)),
|
||||
|c| {
|
||||
let c = ctx.lower_const(c, ctx.lower_ty(p.ty));
|
||||
let param_ty = ctx.lower_ty(p.ty);
|
||||
let c = ctx.lower_const(c, param_ty);
|
||||
c.cast(Interner)
|
||||
},
|
||||
);
|
||||
|
@ -1946,11 +1929,11 @@ pub(crate) fn generic_defaults_recover(
|
|||
fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
|
||||
let data = db.function_data(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx_params = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
let mut ctx_params = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Variable)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr));
|
||||
let ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let ret = ctx_ret.lower_ty(data.ret_type);
|
||||
|
@ -1982,7 +1965,7 @@ fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
|
|||
let data = db.const_data(def);
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
|
||||
make_binders(db, &generics, ctx.lower_ty(data.type_ref))
|
||||
|
@ -1992,7 +1975,7 @@ fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
|
|||
fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
|
||||
let data = db.static_data(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into());
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into());
|
||||
|
||||
Binders::empty(Interner, ctx.lower_ty(data.type_ref))
|
||||
}
|
||||
|
@ -2001,7 +1984,7 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
|
|||
let struct_data = db.struct_data(def);
|
||||
let fields = struct_data.variant_data.fields();
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(
|
||||
let mut ctx = TyLoweringContext::new(
|
||||
db,
|
||||
&resolver,
|
||||
struct_data.variant_data.types_map(),
|
||||
|
@ -2038,7 +2021,7 @@ fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId)
|
|||
let var_data = db.enum_variant_data(def);
|
||||
let fields = var_data.variant_data.fields();
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(
|
||||
let mut ctx = TyLoweringContext::new(
|
||||
db,
|
||||
&resolver,
|
||||
var_data.variant_data.types_map(),
|
||||
|
@ -2087,7 +2070,7 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
|
|||
let generics = generics(db.upcast(), t.into());
|
||||
let resolver = t.resolver(db.upcast());
|
||||
let type_alias_data = db.type_alias_data(t);
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, t.into())
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, t.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let inner = if type_alias_data.is_extern {
|
||||
|
@ -2169,7 +2152,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde
|
|||
let impl_data = db.impl_data(impl_id);
|
||||
let resolver = impl_id.resolver(db.upcast());
|
||||
let generics = generics(db.upcast(), impl_id.into());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into())
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
make_binders(db, &generics, ctx.lower_ty(impl_data.self_ty))
|
||||
}
|
||||
|
@ -2179,7 +2162,8 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T
|
|||
let parent_data = db.generic_params(def.parent());
|
||||
let data = &parent_data[def.local_id()];
|
||||
let resolver = def.parent().resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &parent_data.types_map, def.parent().into());
|
||||
let mut ctx =
|
||||
TyLoweringContext::new(db, &resolver, &parent_data.types_map, def.parent().into());
|
||||
match data {
|
||||
TypeOrConstParamData::TypeParamData(_) => {
|
||||
never!();
|
||||
|
@ -2201,7 +2185,7 @@ pub(crate) fn impl_self_ty_recover(
|
|||
pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
|
||||
let impl_data = db.impl_data(impl_id);
|
||||
let resolver = impl_id.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into())
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
|
||||
let target_trait = impl_data.target_trait.as_ref()?;
|
||||
|
@ -2215,17 +2199,13 @@ pub(crate) fn return_type_impl_traits(
|
|||
// FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
|
||||
let data = db.function_data(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let _ret = ctx_ret.lower_ty(data.ret_type);
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let return_type_impl_traits = ImplTraits {
|
||||
impl_traits: match ctx_ret.impl_trait_mode {
|
||||
ImplTraitLoweringState::Opaque(x) => x.into_inner(),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
};
|
||||
let return_type_impl_traits =
|
||||
ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
|
||||
if return_type_impl_traits.impl_traits.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
@ -2239,18 +2219,13 @@ pub(crate) fn type_alias_impl_traits(
|
|||
) -> Option<Arc<Binders<ImplTraits>>> {
|
||||
let data = db.type_alias_data(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
if let Some(type_ref) = data.type_ref {
|
||||
let _ty = ctx.lower_ty(type_ref);
|
||||
}
|
||||
let type_alias_impl_traits = ImplTraits {
|
||||
impl_traits: match ctx.impl_trait_mode {
|
||||
ImplTraitLoweringState::Opaque(x) => x.into_inner(),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
};
|
||||
let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data };
|
||||
if type_alias_impl_traits.impl_traits.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
|
@ -12,8 +12,8 @@ use hir_def::{
|
|||
lang_item::LangItem,
|
||||
layout::{TagEncoding, Variants},
|
||||
resolver::{HasResolver, TypeNs, ValueNs},
|
||||
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
|
||||
StaticId, VariantId,
|
||||
AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
|
||||
VariantId,
|
||||
};
|
||||
use hir_expand::{mod_path::path, name::Name, HirFileIdExt, InFile};
|
||||
use intern::sym;
|
||||
|
@ -40,8 +40,8 @@ use crate::{
|
|||
static_lifetime,
|
||||
traits::FnTrait,
|
||||
utils::{detect_variant_from_bytes, ClosureSubst},
|
||||
CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstScalar, FnDefId, Interner, MemoryMap,
|
||||
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
|
||||
CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner,
|
||||
MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
|
||||
};
|
||||
|
||||
use super::{
|
||||
|
@ -585,13 +585,9 @@ pub fn interpret_mir(
|
|||
// (and probably should) do better here, for example by excluding bindings outside of the target expression.
|
||||
assert_placeholder_ty_is_unused: bool,
|
||||
trait_env: Option<Arc<TraitEnvironment>>,
|
||||
) -> (Result<Const>, MirOutput) {
|
||||
) -> Result<(Result<Const>, MirOutput)> {
|
||||
let ty = body.locals[return_slot()].ty.clone();
|
||||
let mut evaluator =
|
||||
match Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env) {
|
||||
Ok(it) => it,
|
||||
Err(e) => return (Err(e), MirOutput { stdout: vec![], stderr: vec![] }),
|
||||
};
|
||||
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;
|
||||
let it: Result<Const> = (|| {
|
||||
if evaluator.ptr_size() != std::mem::size_of::<usize>() {
|
||||
not_supported!("targets with different pointer size from host");
|
||||
|
@ -613,7 +609,7 @@ pub fn interpret_mir(
|
|||
};
|
||||
Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty))
|
||||
})();
|
||||
(it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr })
|
||||
Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -1899,8 +1895,8 @@ impl Evaluator<'_> {
|
|||
|
||||
#[allow(clippy::double_parens)]
|
||||
fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
|
||||
let ty = &konst.data(Interner).ty;
|
||||
let chalk_ir::ConstValue::Concrete(c) = &konst.data(Interner).value else {
|
||||
let ConstData { ty, value: chalk_ir::ConstValue::Concrete(c) } = &konst.data(Interner)
|
||||
else {
|
||||
not_supported!("evaluating non concrete constant");
|
||||
};
|
||||
let result_owner;
|
||||
|
@ -2908,14 +2904,14 @@ impl Evaluator<'_> {
|
|||
|
||||
pub fn render_const_using_debug_impl(
|
||||
db: &dyn HirDatabase,
|
||||
owner: ConstId,
|
||||
owner: DefWithBodyId,
|
||||
c: &Const,
|
||||
) -> Result<String> {
|
||||
let mut evaluator = Evaluator::new(db, owner.into(), false, None)?;
|
||||
let mut evaluator = Evaluator::new(db, owner, false, None)?;
|
||||
let locals = &Locals {
|
||||
ptr: ArenaMap::new(),
|
||||
body: db
|
||||
.mir_body(owner.into())
|
||||
.mir_body(owner)
|
||||
.map_err(|_| MirEvalError::NotSupported("unreachable".to_owned()))?,
|
||||
drop_flags: DropFlags::default(),
|
||||
};
|
||||
|
|
|
@ -32,7 +32,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
|
|||
)
|
||||
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
|
||||
|
||||
let (result, output) = interpret_mir(db, body, false, None);
|
||||
let (result, output) = interpret_mir(db, body, false, None)?;
|
||||
result?;
|
||||
Ok((output.stdout().into_owned(), output.stderr().into_owned()))
|
||||
}
|
||||
|
|
|
@ -34,7 +34,10 @@ pub mod term_search;
|
|||
|
||||
mod display;
|
||||
|
||||
use std::{mem::discriminant, ops::ControlFlow};
|
||||
use std::{
|
||||
mem::discriminant,
|
||||
ops::{ControlFlow, Not},
|
||||
};
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use base_db::{CrateDisplayName, CrateId, CrateOrigin};
|
||||
|
@ -2303,22 +2306,15 @@ impl Function {
|
|||
self,
|
||||
db: &dyn HirDatabase,
|
||||
span_formatter: impl Fn(FileId, TextRange) -> String,
|
||||
) -> String {
|
||||
) -> Result<String, ConstEvalError> {
|
||||
let krate = HasModule::krate(&self.id, db.upcast());
|
||||
let edition = db.crate_graph()[krate].edition;
|
||||
let body = match db.monomorphized_mir_body(
|
||||
let body = db.monomorphized_mir_body(
|
||||
self.id.into(),
|
||||
Substitution::empty(Interner),
|
||||
db.trait_environment(self.id.into()),
|
||||
) {
|
||||
Ok(body) => body,
|
||||
Err(e) => {
|
||||
let mut r = String::new();
|
||||
_ = e.pretty_print(&mut r, db, &span_formatter, edition);
|
||||
return r;
|
||||
}
|
||||
};
|
||||
let (result, output) = interpret_mir(db, body, false, None);
|
||||
)?;
|
||||
let (result, output) = interpret_mir(db, body, false, None)?;
|
||||
let mut text = match result {
|
||||
Ok(_) => "pass".to_owned(),
|
||||
Err(e) => {
|
||||
|
@ -2337,7 +2333,7 @@ impl Function {
|
|||
text += "\n--------- stderr ---------\n";
|
||||
text += &stderr;
|
||||
}
|
||||
text
|
||||
Ok(text)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2560,9 +2556,9 @@ impl Const {
|
|||
/// Evaluate the constant and return the result as a string.
|
||||
///
|
||||
/// This function is intended for IDE assistance, different from [`Const::render_eval`].
|
||||
pub fn eval(self, db: &dyn HirDatabase, edition: Edition) -> Result<String, ConstEvalError> {
|
||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
|
||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
||||
Ok(format!("{}", c.display(db, edition)))
|
||||
Ok(format!("{}", c.display(db, self.krate(db).edition(db))))
|
||||
}
|
||||
|
||||
/// Evaluate the constant and return the result as a string, with more detailed information.
|
||||
|
@ -2597,7 +2593,7 @@ impl Const {
|
|||
}
|
||||
}
|
||||
}
|
||||
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
|
||||
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id.into(), &c) {
|
||||
Ok(s)
|
||||
} else {
|
||||
Ok(format!("{}", c.display(db, edition)))
|
||||
|
@ -2636,6 +2632,53 @@ impl Static {
|
|||
pub fn ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_value_def(db, self.id)
|
||||
}
|
||||
|
||||
/// Evaluate the static and return the result as a string.
|
||||
///
|
||||
/// This function is intended for IDE assistance, different from [`Static::render_eval`].
|
||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
|
||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
||||
Ok(format!("{}", c.display(db, self.krate(db).edition(db))))
|
||||
}
|
||||
|
||||
/// Evaluate the static and return the result as a string, with more detailed information.
|
||||
///
|
||||
/// This function is intended for user-facing display.
|
||||
pub fn render_eval(
|
||||
self,
|
||||
db: &dyn HirDatabase,
|
||||
edition: Edition,
|
||||
) -> Result<String, ConstEvalError> {
|
||||
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
|
||||
let data = &c.data(Interner);
|
||||
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
|
||||
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
|
||||
if let hir_ty::ConstValue::Concrete(c) = &data.value {
|
||||
if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned {
|
||||
let value = u128::from_le_bytes(mir::pad16(b, false));
|
||||
let value_signed =
|
||||
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
|
||||
let mut result = if let Scalar::Int(_) = s {
|
||||
value_signed.to_string()
|
||||
} else {
|
||||
value.to_string()
|
||||
};
|
||||
if value >= 10 {
|
||||
format_to!(result, " ({value:#X})");
|
||||
return Ok(result);
|
||||
} else {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id.into(), &c) {
|
||||
Ok(s)
|
||||
} else {
|
||||
Ok(format!("{}", c.display(db, edition)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasVisibility for Static {
|
||||
|
@ -2697,6 +2740,18 @@ impl Trait {
|
|||
hir_ty::dyn_compatibility::dyn_compatibility(db, self.id)
|
||||
}
|
||||
|
||||
pub fn dyn_compatibility_all_violations(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
) -> Option<Vec<DynCompatibilityViolation>> {
|
||||
let mut violations = vec![];
|
||||
hir_ty::dyn_compatibility::dyn_compatibility_with_callback(db, self.id, &mut |violation| {
|
||||
violations.push(violation);
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
violations.is_empty().not().then_some(violations)
|
||||
}
|
||||
|
||||
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
|
||||
db.trait_data(self.id)
|
||||
.macro_calls
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//! See `Semantics`.
|
||||
|
||||
mod child_by_source;
|
||||
mod source_to_def;
|
||||
|
||||
use std::{
|
||||
|
@ -1271,9 +1272,9 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let analyze = self.analyze(ty.syntax())?;
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx =
|
||||
let mut ctx =
|
||||
LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map);
|
||||
let type_ref = crate::TypeRef::from_ast(&ctx, ty.clone());
|
||||
let type_ref = crate::TypeRef::from_ast(&mut ctx, ty.clone());
|
||||
let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
|
||||
self.db,
|
||||
&analyze.resolver,
|
||||
|
@ -1289,9 +1290,9 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let analyze = self.analyze(path.syntax())?;
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx =
|
||||
let mut ctx =
|
||||
LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map);
|
||||
let hir_path = Path::from_src(&ctx, path.clone())?;
|
||||
let hir_path = Path::from_src(&mut ctx, path.clone())?;
|
||||
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
|
||||
TypeNs::TraitId(id) => Some(Trait { id }),
|
||||
_ => None,
|
||||
|
@ -1974,9 +1975,9 @@ impl SemanticsScope<'_> {
|
|||
pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx =
|
||||
let mut ctx =
|
||||
LowerCtx::new(self.db.upcast(), self.file_id, &mut types_map, &mut types_source_map);
|
||||
let path = Path::from_src(&ctx, ast_path.clone())?;
|
||||
let path = Path::from_src(&mut ctx, ast_path.clone())?;
|
||||
resolve_hir_path(
|
||||
self.db,
|
||||
&self.resolver,
|
||||
|
|
|
@ -8,7 +8,7 @@ use either::Either;
|
|||
use hir_expand::{attrs::collect_attrs, HirFileId};
|
||||
use syntax::{ast, AstPtr};
|
||||
|
||||
use crate::{
|
||||
use hir_def::{
|
||||
db::DefDatabase,
|
||||
dyn_map::{
|
||||
keys::{self, Key},
|
||||
|
@ -23,7 +23,7 @@ use crate::{
|
|||
VariantId,
|
||||
};
|
||||
|
||||
pub trait ChildBySource {
|
||||
pub(crate) trait ChildBySource {
|
||||
fn child_by_source(&self, db: &dyn DefDatabase, file_id: HirFileId) -> DynMap {
|
||||
let mut res = DynMap::default();
|
||||
self.child_by_source_to(db, &mut res, file_id);
|
|
@ -87,7 +87,6 @@
|
|||
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
child_by_source::ChildBySource,
|
||||
dyn_map::{
|
||||
keys::{self, Key},
|
||||
DynMap,
|
||||
|
@ -111,7 +110,10 @@ use syntax::{
|
|||
AstNode, AstPtr, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{db::HirDatabase, InFile, InlineAsmOperand, SemanticsImpl};
|
||||
use crate::{
|
||||
db::HirDatabase, semantics::child_by_source::ChildBySource, InFile, InlineAsmOperand,
|
||||
SemanticsImpl,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(super) struct SourceToDefCache {
|
||||
|
|
|
@ -616,9 +616,9 @@ impl SourceAnalyzer {
|
|||
) -> Option<Macro> {
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx =
|
||||
let mut ctx =
|
||||
LowerCtx::new(db.upcast(), macro_call.file_id, &mut types_map, &mut types_source_map);
|
||||
let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
|
||||
let path = macro_call.value.path().and_then(|ast| Path::from_src(&mut ctx, ast))?;
|
||||
self.resolver
|
||||
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
|
||||
.map(|(it, _)| it.into())
|
||||
|
@ -731,8 +731,9 @@ impl SourceAnalyzer {
|
|||
|
||||
let (mut types_map, mut types_source_map) =
|
||||
(TypesMap::default(), TypesSourceMap::default());
|
||||
let ctx = LowerCtx::new(db.upcast(), self.file_id, &mut types_map, &mut types_source_map);
|
||||
let hir_path = Path::from_src(&ctx, path.clone())?;
|
||||
let mut ctx =
|
||||
LowerCtx::new(db.upcast(), self.file_id, &mut types_map, &mut types_source_map);
|
||||
let hir_path = Path::from_src(&mut ctx, path.clone())?;
|
||||
|
||||
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
|
||||
// trying to resolve foo::bar.
|
||||
|
|
|
@ -104,7 +104,7 @@ fn edit_struct_def(
|
|||
ast::make::tokens::single_newline().text(),
|
||||
);
|
||||
edit.insert(tuple_fields_text_range.start(), w.syntax().text());
|
||||
if !w.syntax().last_token().is_some_and(|t| t.kind() == SyntaxKind::COMMA) {
|
||||
if w.syntax().last_token().is_none_or(|t| t.kind() != SyntaxKind::COMMA) {
|
||||
edit.insert(tuple_fields_text_range.start(), ",");
|
||||
}
|
||||
edit.insert(
|
||||
|
|
|
@ -0,0 +1,430 @@
|
|||
use either::Either;
|
||||
use ide_db::assists::{AssistId, AssistKind, GroupLabel};
|
||||
use syntax::{
|
||||
ast::{self, edit::IndentLevel, make, HasGenericParams, HasName},
|
||||
syntax_editor, AstNode,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, Assists};
|
||||
|
||||
// Assist: generate_fn_type_alias_named
|
||||
//
|
||||
// Generate a type alias for the function with named parameters.
|
||||
//
|
||||
// ```
|
||||
// unsafe fn fo$0o(n: i32) -> i32 { 42i32 }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// type ${0:FooFn} = unsafe fn(n: i32) -> i32;
|
||||
//
|
||||
// unsafe fn foo(n: i32) -> i32 { 42i32 }
|
||||
// ```
|
||||
|
||||
// Assist: generate_fn_type_alias_unnamed
|
||||
//
|
||||
// Generate a type alias for the function with unnamed parameters.
|
||||
//
|
||||
// ```
|
||||
// unsafe fn fo$0o(n: i32) -> i32 { 42i32 }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// type ${0:FooFn} = unsafe fn(i32) -> i32;
|
||||
//
|
||||
// unsafe fn foo(n: i32) -> i32 { 42i32 }
|
||||
// ```
|
||||
|
||||
pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let name = ctx.find_node_at_offset::<ast::Name>()?;
|
||||
let func = &name.syntax().parent()?;
|
||||
let func_node = ast::Fn::cast(func.clone())?;
|
||||
let param_list = func_node.param_list()?;
|
||||
|
||||
let assoc_owner = func.ancestors().nth(2).and_then(Either::<ast::Trait, ast::Impl>::cast);
|
||||
// This is where we'll insert the type alias, since type aliases in `impl`s or `trait`s are not supported
|
||||
let insertion_node = assoc_owner
|
||||
.as_ref()
|
||||
.map_or_else(|| func, |impl_| impl_.as_ref().either(AstNode::syntax, AstNode::syntax));
|
||||
|
||||
for style in ParamStyle::ALL {
|
||||
acc.add_group(
|
||||
&GroupLabel("Generate a type alias for function...".into()),
|
||||
style.assist_id(),
|
||||
style.label(),
|
||||
func_node.syntax().text_range(),
|
||||
|builder| {
|
||||
let mut edit = builder.make_editor(func);
|
||||
|
||||
let alias_name = format!("{}Fn", stdx::to_camel_case(&name.to_string()));
|
||||
|
||||
let mut fn_params_vec = Vec::new();
|
||||
|
||||
if let Some(self_ty) =
|
||||
param_list.self_param().and_then(|p| ctx.sema.type_of_self(&p))
|
||||
{
|
||||
let is_ref = self_ty.is_reference();
|
||||
let is_mut = self_ty.is_mutable_reference();
|
||||
|
||||
if let Some(adt) = self_ty.strip_references().as_adt() {
|
||||
let inner_type = make::ty(adt.name(ctx.db()).as_str());
|
||||
|
||||
let ast_self_ty =
|
||||
if is_ref { make::ty_ref(inner_type, is_mut) } else { inner_type };
|
||||
|
||||
fn_params_vec.push(make::unnamed_param(ast_self_ty));
|
||||
}
|
||||
}
|
||||
|
||||
fn_params_vec.extend(param_list.params().filter_map(|p| match style {
|
||||
ParamStyle::Named => Some(p),
|
||||
ParamStyle::Unnamed => p.ty().map(make::unnamed_param),
|
||||
}));
|
||||
|
||||
let generic_params = func_node.generic_param_list();
|
||||
|
||||
let is_unsafe = func_node.unsafe_token().is_some();
|
||||
let ty = make::ty_fn_ptr(
|
||||
None,
|
||||
is_unsafe,
|
||||
func_node.abi(),
|
||||
fn_params_vec.into_iter(),
|
||||
func_node.ret_type(),
|
||||
);
|
||||
|
||||
// Insert new alias
|
||||
let ty_alias = make::ty_alias(
|
||||
&alias_name,
|
||||
generic_params,
|
||||
None,
|
||||
None,
|
||||
Some((ast::Type::FnPtrType(ty), None)),
|
||||
)
|
||||
.clone_for_update();
|
||||
|
||||
let indent = IndentLevel::from_node(insertion_node);
|
||||
edit.insert_all(
|
||||
syntax_editor::Position::before(insertion_node),
|
||||
vec![
|
||||
ty_alias.syntax().clone().into(),
|
||||
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
|
||||
],
|
||||
);
|
||||
|
||||
if let Some(cap) = ctx.config.snippet_cap {
|
||||
if let Some(name) = ty_alias.name() {
|
||||
edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap));
|
||||
}
|
||||
}
|
||||
|
||||
builder.add_file_edits(ctx.file_id(), edit);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
enum ParamStyle {
|
||||
Named,
|
||||
Unnamed,
|
||||
}
|
||||
|
||||
impl ParamStyle {
|
||||
const ALL: &'static [ParamStyle] = &[ParamStyle::Named, ParamStyle::Unnamed];
|
||||
|
||||
fn assist_id(&self) -> AssistId {
|
||||
let s = match self {
|
||||
ParamStyle::Named => "generate_fn_type_alias_named",
|
||||
ParamStyle::Unnamed => "generate_fn_type_alias_unnamed",
|
||||
};
|
||||
|
||||
AssistId(s, AssistKind::Generate)
|
||||
}
|
||||
|
||||
fn label(&self) -> &'static str {
|
||||
match self {
|
||||
ParamStyle::Named => "Generate a type alias for function with named params",
|
||||
ParamStyle::Unnamed => "Generate a type alias for function with unnamed params",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::check_assist_by_label;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_simple() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = fn(u32) -> i32;
|
||||
|
||||
fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_unsafe() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
unsafe fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = unsafe fn(u32) -> i32;
|
||||
|
||||
unsafe fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_extern() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
extern fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = extern fn(u32) -> i32;
|
||||
|
||||
extern fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_type_unnamed_extern_abi() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
extern "FooABI" fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = extern "FooABI" fn(u32) -> i32;
|
||||
|
||||
extern "FooABI" fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_unsafe_extern_abi() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
unsafe extern "FooABI" fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = unsafe extern "FooABI" fn(u32) -> i32;
|
||||
|
||||
unsafe extern "FooABI" fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_generics() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
fn fo$0o<A, B>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn}<A, B> = fn(A, B) -> i32;
|
||||
|
||||
fn foo<A, B>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_generics_bounds() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
fn fo$0o<A: Trait, B: Trait>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn}<A: Trait, B: Trait> = fn(A, B) -> i32;
|
||||
|
||||
fn foo<A: Trait, B: Trait>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_unnamed_self() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
struct S;
|
||||
|
||||
impl S {
|
||||
fn fo$0o(&mut self, param: u32) -> i32 { return 42; }
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct S;
|
||||
|
||||
type ${0:FooFn} = fn(&mut S, u32) -> i32;
|
||||
|
||||
impl S {
|
||||
fn foo(&mut self, param: u32) -> i32 { return 42; }
|
||||
}
|
||||
"#,
|
||||
ParamStyle::Unnamed.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_simple() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = fn(param: u32) -> i32;
|
||||
|
||||
fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_unsafe() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
unsafe fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = unsafe fn(param: u32) -> i32;
|
||||
|
||||
unsafe fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_extern() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
extern fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = extern fn(param: u32) -> i32;
|
||||
|
||||
extern fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_type_named_extern_abi() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
extern "FooABI" fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = extern "FooABI" fn(param: u32) -> i32;
|
||||
|
||||
extern "FooABI" fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_unsafe_extern_abi() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
unsafe extern "FooABI" fn fo$0o(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn} = unsafe extern "FooABI" fn(param: u32) -> i32;
|
||||
|
||||
unsafe extern "FooABI" fn foo(param: u32) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_generics() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
fn fo$0o<A, B>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn}<A, B> = fn(a: A, b: B) -> i32;
|
||||
|
||||
fn foo<A, B>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_generics_bounds() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
fn fo$0o<A: Trait, B: Trait>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
r#"
|
||||
type ${0:FooFn}<A: Trait, B: Trait> = fn(a: A, b: B) -> i32;
|
||||
|
||||
fn foo<A: Trait, B: Trait>(a: A, b: B) -> i32 { return 42; }
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_fn_alias_named_self() {
|
||||
check_assist_by_label(
|
||||
generate_fn_type_alias,
|
||||
r#"
|
||||
struct S;
|
||||
|
||||
impl S {
|
||||
fn fo$0o(&mut self, param: u32) -> i32 { return 42; }
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct S;
|
||||
|
||||
type ${0:FooFn} = fn(&mut S, param: u32) -> i32;
|
||||
|
||||
impl S {
|
||||
fn foo(&mut self, param: u32) -> i32 { return 42; }
|
||||
}
|
||||
"#,
|
||||
ParamStyle::Named.label(),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -51,10 +51,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
| ast::Expr::MatchExpr(_)
|
||||
| ast::Expr::MacroExpr(_)
|
||||
| ast::Expr::BinExpr(_)
|
||||
| ast::Expr::CallExpr(_) => {
|
||||
let edition = ctx.sema.scope(variable.syntax())?.krate().edition(ctx.db());
|
||||
konst.eval(ctx.sema.db, edition).ok()?
|
||||
}
|
||||
| ast::Expr::CallExpr(_) => konst.eval(ctx.sema.db).ok()?,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
|
|
@ -161,6 +161,7 @@ mod handlers {
|
|||
mod generate_enum_is_method;
|
||||
mod generate_enum_projection_method;
|
||||
mod generate_enum_variant;
|
||||
mod generate_fn_type_alias;
|
||||
mod generate_from_impl_for_enum;
|
||||
mod generate_function;
|
||||
mod generate_getter_or_setter;
|
||||
|
@ -289,6 +290,7 @@ mod handlers {
|
|||
generate_enum_projection_method::generate_enum_as_method,
|
||||
generate_enum_projection_method::generate_enum_try_into_method,
|
||||
generate_enum_variant::generate_enum_variant,
|
||||
generate_fn_type_alias::generate_fn_type_alias,
|
||||
generate_from_impl_for_enum::generate_from_impl_for_enum,
|
||||
generate_function::generate_function,
|
||||
generate_impl::generate_impl,
|
||||
|
|
|
@ -1548,6 +1548,36 @@ fn main() {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_fn_type_alias_named() {
|
||||
check_doc_test(
|
||||
"generate_fn_type_alias_named",
|
||||
r#####"
|
||||
unsafe fn fo$0o(n: i32) -> i32 { 42i32 }
|
||||
"#####,
|
||||
r#####"
|
||||
type ${0:FooFn} = unsafe fn(n: i32) -> i32;
|
||||
|
||||
unsafe fn foo(n: i32) -> i32 { 42i32 }
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_fn_type_alias_unnamed() {
|
||||
check_doc_test(
|
||||
"generate_fn_type_alias_unnamed",
|
||||
r#####"
|
||||
unsafe fn fo$0o(n: i32) -> i32 { 42i32 }
|
||||
"#####,
|
||||
r#####"
|
||||
type ${0:FooFn} = unsafe fn(i32) -> i32;
|
||||
|
||||
unsafe fn foo(n: i32) -> i32 { 42i32 }
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_generate_from_impl_for_enum() {
|
||||
check_doc_test(
|
||||
|
|
|
@ -281,8 +281,8 @@ pub(crate) fn render_resolution_with_import(
|
|||
import_edit: LocatedImport,
|
||||
) -> Option<Builder> {
|
||||
let resolution = ScopeDef::from(import_edit.original_item);
|
||||
let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
|
||||
//this now just renders the alias text, but we need to find the aliases earlier and call this with the alias instead
|
||||
let local_name = get_import_name(resolution, &ctx, &import_edit)?;
|
||||
// This now just renders the alias text, but we need to find the aliases earlier and call this with the alias instead.
|
||||
let doc_aliases = ctx.completion.doc_aliases_in_scope(resolution);
|
||||
let ctx = ctx.doc_aliases(doc_aliases);
|
||||
Some(render_resolution_path(ctx, path_ctx, local_name, Some(import_edit), resolution))
|
||||
|
@ -294,7 +294,7 @@ pub(crate) fn render_resolution_with_import_pat(
|
|||
import_edit: LocatedImport,
|
||||
) -> Option<Builder> {
|
||||
let resolution = ScopeDef::from(import_edit.original_item);
|
||||
let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
|
||||
let local_name = get_import_name(resolution, &ctx, &import_edit)?;
|
||||
Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
|
||||
}
|
||||
|
||||
|
@ -357,6 +357,24 @@ pub(crate) fn render_expr(
|
|||
Some(item)
|
||||
}
|
||||
|
||||
fn get_import_name(
|
||||
resolution: ScopeDef,
|
||||
ctx: &RenderContext<'_>,
|
||||
import_edit: &LocatedImport,
|
||||
) -> Option<hir::Name> {
|
||||
// FIXME: Temporary workaround for handling aliased import.
|
||||
// This should be removed after we have proper support for importing alias.
|
||||
// <https://github.com/rust-lang/rust-analyzer/issues/14079>
|
||||
|
||||
// If `item_to_import` matches `original_item`, we are importing the item itself (not its parent module).
|
||||
// In this case, we can use the last segment of `import_path`, as it accounts for the aliased name.
|
||||
if import_edit.item_to_import == import_edit.original_item {
|
||||
import_edit.import_path.segments().last().cloned()
|
||||
} else {
|
||||
scope_def_to_name(resolution, ctx, import_edit)
|
||||
}
|
||||
}
|
||||
|
||||
fn scope_def_to_name(
|
||||
resolution: ScopeDef,
|
||||
ctx: &RenderContext<'_>,
|
||||
|
|
|
@ -1669,3 +1669,54 @@ mod module {
|
|||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn re_export_aliased() {
|
||||
check(
|
||||
r#"
|
||||
mod outer {
|
||||
mod inner {
|
||||
pub struct BarStruct;
|
||||
pub fn bar_fun() {}
|
||||
pub mod bar {}
|
||||
}
|
||||
pub use inner::bar as foo;
|
||||
pub use inner::bar_fun as foo_fun;
|
||||
pub use inner::BarStruct as FooStruct;
|
||||
}
|
||||
fn function() {
|
||||
foo$0
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
st FooStruct (use outer::FooStruct) BarStruct
|
||||
md foo (use outer::foo)
|
||||
fn foo_fun() (use outer::foo_fun) fn()
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn re_export_aliased_pattern() {
|
||||
check(
|
||||
r#"
|
||||
mod outer {
|
||||
mod inner {
|
||||
pub struct BarStruct;
|
||||
pub fn bar_fun() {}
|
||||
pub mod bar {}
|
||||
}
|
||||
pub use inner::bar as foo;
|
||||
pub use inner::bar_fun as foo_fun;
|
||||
pub use inner::BarStruct as FooStruct;
|
||||
}
|
||||
fn function() {
|
||||
let foo$0
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
st FooStruct (use outer::FooStruct)
|
||||
md foo (use outer::foo)
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -382,7 +382,7 @@ pub fn semantic_diagnostics(
|
|||
// A bunch of parse errors in a file indicate some bigger structural parse changes in the
|
||||
// file, so we skip semantic diagnostics so we can show these faster.
|
||||
Some(m) => {
|
||||
if !db.parse_errors(file_id).as_deref().is_some_and(|es| es.len() >= 16) {
|
||||
if db.parse_errors(file_id).as_deref().is_none_or(|es| es.len() < 16) {
|
||||
m.diagnostics(db, &mut diags, config.style_lints);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ fn hover_offset(
|
|||
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
|
||||
cov_mark::hit!(no_highlight_on_comment_hover);
|
||||
return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
|
||||
let res = hover_for_definition(sema, file_id, def, &node, None, config, edition);
|
||||
let res = hover_for_definition(sema, file_id, def, &node, None, false, config, edition);
|
||||
Some(RangeInfo::new(range, res))
|
||||
});
|
||||
}
|
||||
|
@ -172,6 +172,7 @@ fn hover_offset(
|
|||
Definition::from(resolution?),
|
||||
&original_token.parent()?,
|
||||
None,
|
||||
false,
|
||||
config,
|
||||
edition,
|
||||
);
|
||||
|
@ -218,6 +219,7 @@ fn hover_offset(
|
|||
break 'a vec![(
|
||||
Definition::Macro(macro_),
|
||||
sema.resolve_macro_call_arm(¯o_call),
|
||||
false,
|
||||
node,
|
||||
)];
|
||||
}
|
||||
|
@ -234,19 +236,34 @@ fn hover_offset(
|
|||
decl,
|
||||
..
|
||||
}) => {
|
||||
vec![(Definition::ExternCrateDecl(decl), None, node)]
|
||||
vec![(Definition::ExternCrateDecl(decl), None, false, node)]
|
||||
}
|
||||
|
||||
class => {
|
||||
multizip((class.definitions(), iter::repeat(None), iter::repeat(node)))
|
||||
.collect::<Vec<_>>()
|
||||
let is_def = matches!(class, IdentClass::NameClass(_));
|
||||
multizip((
|
||||
class.definitions(),
|
||||
iter::repeat(None),
|
||||
iter::repeat(is_def),
|
||||
iter::repeat(node),
|
||||
))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
}
|
||||
.into_iter()
|
||||
.unique_by(|&(def, _, _)| def)
|
||||
.map(|(def, macro_arm, node)| {
|
||||
hover_for_definition(sema, file_id, def, &node, macro_arm, config, edition)
|
||||
.unique_by(|&(def, _, _, _)| def)
|
||||
.map(|(def, macro_arm, hovered_definition, node)| {
|
||||
hover_for_definition(
|
||||
sema,
|
||||
file_id,
|
||||
def,
|
||||
&node,
|
||||
macro_arm,
|
||||
hovered_definition,
|
||||
config,
|
||||
edition,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
|
@ -366,6 +383,7 @@ pub(crate) fn hover_for_definition(
|
|||
def: Definition,
|
||||
scope_node: &SyntaxNode,
|
||||
macro_arm: Option<u32>,
|
||||
hovered_definition: bool,
|
||||
config: &HoverConfig,
|
||||
edition: Edition,
|
||||
) -> HoverResult {
|
||||
|
@ -397,6 +415,7 @@ pub(crate) fn hover_for_definition(
|
|||
famous_defs.as_ref(),
|
||||
¬able_traits,
|
||||
macro_arm,
|
||||
hovered_definition,
|
||||
config,
|
||||
edition,
|
||||
);
|
||||
|
|
|
@ -273,7 +273,7 @@ pub(super) fn keyword(
|
|||
let markup = process_markup(
|
||||
sema.db,
|
||||
Definition::Module(doc_owner),
|
||||
&markup(Some(docs.into()), description, None),
|
||||
&markup(Some(docs.into()), description, None, None),
|
||||
config,
|
||||
);
|
||||
Some(HoverResult { markup, actions })
|
||||
|
@ -419,6 +419,7 @@ pub(super) fn definition(
|
|||
famous_defs: Option<&FamousDefs<'_, '_>>,
|
||||
notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
|
||||
macro_arm: Option<u32>,
|
||||
hovered_definition: bool,
|
||||
config: &HoverConfig,
|
||||
edition: Edition,
|
||||
) -> Markup {
|
||||
|
@ -456,7 +457,7 @@ pub(super) fn definition(
|
|||
_ => def.label(db, edition),
|
||||
};
|
||||
let docs = def.docs(db, famous_defs, edition);
|
||||
let value = (|| match def {
|
||||
let value = || match def {
|
||||
Definition::Variant(it) => {
|
||||
if !it.parent_enum(db).is_data_carrying(db) {
|
||||
match it.eval(db) {
|
||||
|
@ -485,18 +486,24 @@ pub(super) fn definition(
|
|||
}
|
||||
}
|
||||
Definition::Static(it) => {
|
||||
let source = it.source(db)?;
|
||||
let mut body = source.value.body()?.syntax().clone();
|
||||
if let Some(macro_file) = source.file_id.macro_file() {
|
||||
let span_map = db.expansion_span_map(macro_file);
|
||||
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
|
||||
let body = it.render_eval(db, edition);
|
||||
match body {
|
||||
Ok(it) => Some(it),
|
||||
Err(_) => {
|
||||
let source = it.source(db)?;
|
||||
let mut body = source.value.body()?.syntax().clone();
|
||||
if let Some(macro_file) = source.file_id.macro_file() {
|
||||
let span_map = db.expansion_span_map(macro_file);
|
||||
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
|
||||
}
|
||||
Some(body.to_string())
|
||||
}
|
||||
}
|
||||
Some(body.to_string())
|
||||
}
|
||||
_ => None,
|
||||
})();
|
||||
};
|
||||
|
||||
let layout_info = match def {
|
||||
let layout_info = || match def {
|
||||
Definition::Field(it) => render_memory_layout(
|
||||
config.memory_layout,
|
||||
|| it.layout(db),
|
||||
|
@ -529,34 +536,38 @@ pub(super) fn definition(
|
|||
_ => None,
|
||||
};
|
||||
|
||||
let dyn_compatibility_info = if let Definition::Trait(it) = def {
|
||||
let mut dyn_compatibility_info = String::new();
|
||||
render_dyn_compatibility(db, &mut dyn_compatibility_info, it.dyn_compatibility(db));
|
||||
Some(dyn_compatibility_info)
|
||||
} else {
|
||||
None
|
||||
let dyn_compatibility_info = || match def {
|
||||
Definition::Trait(it) => {
|
||||
let mut dyn_compatibility_info = String::new();
|
||||
render_dyn_compatibility(db, &mut dyn_compatibility_info, it.dyn_compatibility(db));
|
||||
Some(dyn_compatibility_info)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let mut extra = String::new();
|
||||
if hovered_definition {
|
||||
if let Some(notable_traits) = render_notable_trait(db, notable_traits, edition) {
|
||||
extra.push_str("\n___\n");
|
||||
extra.push_str(¬able_traits);
|
||||
}
|
||||
if let Some(layout_info) = layout_info() {
|
||||
extra.push_str("\n___\n");
|
||||
extra.push_str(&layout_info);
|
||||
}
|
||||
if let Some(dyn_compatibility_info) = dyn_compatibility_info() {
|
||||
extra.push_str("\n___\n");
|
||||
extra.push_str(&dyn_compatibility_info);
|
||||
}
|
||||
}
|
||||
let mut desc = String::new();
|
||||
if let Some(notable_traits) = render_notable_trait_comment(db, notable_traits, edition) {
|
||||
desc.push_str(¬able_traits);
|
||||
desc.push('\n');
|
||||
}
|
||||
if let Some(layout_info) = layout_info {
|
||||
desc.push_str(&layout_info);
|
||||
desc.push('\n');
|
||||
}
|
||||
if let Some(dyn_compatibility_info) = dyn_compatibility_info {
|
||||
desc.push_str(&dyn_compatibility_info);
|
||||
desc.push('\n');
|
||||
}
|
||||
desc.push_str(&label);
|
||||
if let Some(value) = value {
|
||||
if let Some(value) = value() {
|
||||
desc.push_str(" = ");
|
||||
desc.push_str(&value);
|
||||
}
|
||||
|
||||
markup(docs.map(Into::into), desc, mod_path)
|
||||
markup(docs.map(Into::into), desc, extra.is_empty().not().then_some(extra), mod_path)
|
||||
}
|
||||
|
||||
pub(super) fn literal(
|
||||
|
@ -626,7 +637,7 @@ pub(super) fn literal(
|
|||
Some(s.into())
|
||||
}
|
||||
|
||||
fn render_notable_trait_comment(
|
||||
fn render_notable_trait(
|
||||
db: &RootDatabase,
|
||||
notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
|
||||
edition: Edition,
|
||||
|
@ -635,7 +646,7 @@ fn render_notable_trait_comment(
|
|||
let mut needs_impl_header = true;
|
||||
for (trait_, assoc_types) in notable_traits {
|
||||
desc.push_str(if mem::take(&mut needs_impl_header) {
|
||||
"// Implements notable traits: "
|
||||
"Implements notable traits: "
|
||||
} else {
|
||||
", "
|
||||
});
|
||||
|
@ -728,13 +739,12 @@ fn type_info(
|
|||
)
|
||||
.into()
|
||||
} else {
|
||||
let mut desc =
|
||||
match render_notable_trait_comment(db, ¬able_traits(db, &original), edition) {
|
||||
Some(desc) => desc + "\n",
|
||||
None => String::new(),
|
||||
};
|
||||
format_to!(desc, "{}", original.display(db, edition));
|
||||
Markup::fenced_block(&desc)
|
||||
let mut desc = format!("```rust\n{}\n```", original.display(db, edition));
|
||||
if let Some(extra) = render_notable_trait(db, ¬able_traits(db, &original), edition) {
|
||||
desc.push_str("\n___\n");
|
||||
desc.push_str(&extra);
|
||||
};
|
||||
desc.into()
|
||||
};
|
||||
if let Some(actions) = HoverAction::goto_type_from_targets(db, targets, edition) {
|
||||
res.actions.push(actions);
|
||||
|
@ -786,20 +796,16 @@ fn closure_ty(
|
|||
};
|
||||
let mut markup = format!("```rust\n{}", c.display_with_id(sema.db, edition));
|
||||
|
||||
if let Some(layout) =
|
||||
render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None)
|
||||
{
|
||||
format_to!(markup, " {layout}");
|
||||
}
|
||||
if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db).into()) {
|
||||
push_new_def(hir::Trait::from(trait_).into())
|
||||
}
|
||||
format_to!(
|
||||
markup,
|
||||
"\n{}\n```{adjusted}\n\n## Captures\n{}",
|
||||
c.display_with_impl(sema.db, edition),
|
||||
captures_rendered,
|
||||
);
|
||||
format_to!(markup, "\n{}\n```", c.display_with_impl(sema.db, edition),);
|
||||
if let Some(layout) =
|
||||
render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None)
|
||||
{
|
||||
format_to!(markup, "\n___\n{layout}");
|
||||
}
|
||||
format_to!(markup, "{adjusted}\n\n## Captures\n{}", captures_rendered,);
|
||||
|
||||
let mut res = HoverResult::default();
|
||||
if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) {
|
||||
|
@ -824,7 +830,12 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition, edition: Edition) ->
|
|||
.map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
|
||||
}
|
||||
|
||||
fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup {
|
||||
fn markup(
|
||||
docs: Option<String>,
|
||||
rust: String,
|
||||
extra: Option<String>,
|
||||
mod_path: Option<String>,
|
||||
) -> Markup {
|
||||
let mut buf = String::new();
|
||||
|
||||
if let Some(mod_path) = mod_path {
|
||||
|
@ -832,7 +843,11 @@ fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Marku
|
|||
format_to!(buf, "```rust\n{}\n```\n\n", mod_path);
|
||||
}
|
||||
}
|
||||
format_to!(buf, "```rust\n{}\n```", desc);
|
||||
format_to!(buf, "```rust\n{}\n```", rust);
|
||||
|
||||
if let Some(extra) = extra {
|
||||
buf.push_str(&extra);
|
||||
}
|
||||
|
||||
if let Some(doc) = docs {
|
||||
format_to!(buf, "\n___\n\n{}", doc);
|
||||
|
@ -862,7 +877,7 @@ fn render_memory_layout(
|
|||
let config = config?;
|
||||
let layout = layout().ok()?;
|
||||
|
||||
let mut label = String::from("// ");
|
||||
let mut label = String::new();
|
||||
|
||||
if let Some(render) = config.size {
|
||||
let size = match tag(&layout) {
|
||||
|
@ -994,55 +1009,53 @@ fn render_dyn_compatibility(
|
|||
safety: Option<DynCompatibilityViolation>,
|
||||
) {
|
||||
let Some(osv) = safety else {
|
||||
buf.push_str("// Dyn Compatible: Yes");
|
||||
buf.push_str("Is Dyn compatible");
|
||||
return;
|
||||
};
|
||||
buf.push_str("// Dyn Compatible: No\n// - Reason: ");
|
||||
buf.push_str("Is not Dyn compatible due to ");
|
||||
match osv {
|
||||
DynCompatibilityViolation::SizedSelf => {
|
||||
buf.push_str("has a `Self: Sized` bound");
|
||||
buf.push_str("having a `Self: Sized` bound");
|
||||
}
|
||||
DynCompatibilityViolation::SelfReferential => {
|
||||
buf.push_str("has a bound that references `Self`");
|
||||
buf.push_str("having a bound that references `Self`");
|
||||
}
|
||||
DynCompatibilityViolation::Method(func, mvc) => {
|
||||
let name = hir::Function::from(func).name(db);
|
||||
format_to!(
|
||||
buf,
|
||||
"has a method `{}` that is non dispatchable because of:\n// - ",
|
||||
name.as_str()
|
||||
);
|
||||
format_to!(buf, "having a method `{}` that is not dispatchable due to ", name.as_str());
|
||||
let desc = match mvc {
|
||||
MethodViolationCode::StaticMethod => "missing a receiver",
|
||||
MethodViolationCode::ReferencesSelfInput => "a parameter references `Self`",
|
||||
MethodViolationCode::ReferencesSelfOutput => "the return type references `Self`",
|
||||
MethodViolationCode::ReferencesSelfInput => "having a parameter referencing `Self`",
|
||||
MethodViolationCode::ReferencesSelfOutput => "the return type referencing `Self`",
|
||||
MethodViolationCode::ReferencesImplTraitInTrait => {
|
||||
"the return type contains `impl Trait`"
|
||||
"the return type containing `impl Trait`"
|
||||
}
|
||||
MethodViolationCode::AsyncFn => "being async",
|
||||
MethodViolationCode::WhereClauseReferencesSelf => {
|
||||
"a where clause references `Self`"
|
||||
"a where clause referencing `Self`"
|
||||
}
|
||||
MethodViolationCode::Generic => "having a const or type generic parameter",
|
||||
MethodViolationCode::UndispatchableReceiver => {
|
||||
"having a non-dispatchable receiver type"
|
||||
}
|
||||
MethodViolationCode::Generic => "a non-lifetime generic parameter",
|
||||
MethodViolationCode::UndispatchableReceiver => "a non-dispatchable receiver type",
|
||||
};
|
||||
buf.push_str(desc);
|
||||
}
|
||||
DynCompatibilityViolation::AssocConst(const_) => {
|
||||
let name = hir::Const::from(const_).name(db);
|
||||
if let Some(name) = name {
|
||||
format_to!(buf, "has an associated constant `{}`", name.as_str());
|
||||
format_to!(buf, "having an associated constant `{}`", name.as_str());
|
||||
} else {
|
||||
buf.push_str("has an associated constant");
|
||||
buf.push_str("having an associated constant");
|
||||
}
|
||||
}
|
||||
DynCompatibilityViolation::GAT(alias) => {
|
||||
let name = hir::TypeAlias::from(alias).name(db);
|
||||
format_to!(buf, "has a generic associated type `{}`", name.as_str());
|
||||
format_to!(buf, "having a generic associated type `{}`", name.as_str());
|
||||
}
|
||||
DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait) => {
|
||||
let name = hir::Trait::from(super_trait).name(db);
|
||||
format_to!(buf, "has a dyn incompatible supertrait `{}`", name.as_str());
|
||||
format_to!(buf, "having a dyn incompatible supertrait `{}`", name.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
|||
use hir::{DefWithBody, Semantics};
|
||||
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
|
||||
use std::time::{Duration, Instant};
|
||||
use stdx::format_to;
|
||||
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
|
||||
|
||||
// Feature: Interpret A Function, Static Or Const.
|
||||
//
|
||||
// |===
|
||||
// | Editor | Action Name
|
||||
//
|
||||
// | VS Code | **rust-analyzer: Interpret**
|
||||
// |===
|
||||
pub(crate) fn interpret(db: &RootDatabase, position: FilePosition) -> String {
|
||||
match find_and_interpret(db, position) {
|
||||
Some((duration, mut result)) => {
|
||||
result.push('\n');
|
||||
format_to!(result, "----------------------\n");
|
||||
format_to!(result, " Finished in {}s\n", duration.as_secs_f32());
|
||||
result
|
||||
}
|
||||
_ => "Not inside a function, const or static".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Duration, String)> {
|
||||
let sema = Semantics::new(db);
|
||||
let source_file = sema.parse_guess_edition(position.file_id);
|
||||
|
||||
let item = ancestors_at_offset(source_file.syntax(), position.offset)
|
||||
.filter(|it| !ast::MacroCall::can_cast(it.kind()))
|
||||
.find_map(ast::Item::cast)?;
|
||||
let def: DefWithBody = match item {
|
||||
ast::Item::Fn(it) => sema.to_def(&it)?.into(),
|
||||
ast::Item::Const(it) => sema.to_def(&it)?.into(),
|
||||
ast::Item::Static(it) => sema.to_def(&it)?.into(),
|
||||
_ => return None,
|
||||
};
|
||||
let span_formatter = |file_id, text_range: TextRange| {
|
||||
let path = &db
|
||||
.source_root(db.file_source_root(file_id))
|
||||
.path_for_file(&file_id)
|
||||
.map(|x| x.to_string());
|
||||
let path = path.as_deref().unwrap_or("<unknown file>");
|
||||
match db.line_index(file_id).try_line_col(text_range.start()) {
|
||||
Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
|
||||
None => format!("file://{path} range {text_range:?}"),
|
||||
}
|
||||
};
|
||||
let start_time = Instant::now();
|
||||
let res = match def {
|
||||
DefWithBody::Function(it) => it.eval(db, span_formatter),
|
||||
DefWithBody::Static(it) => it.eval(db),
|
||||
DefWithBody::Const(it) => it.eval(db),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let res = res.unwrap_or_else(|e| {
|
||||
let mut r = String::new();
|
||||
_ = e.pretty_print(&mut r, db, span_formatter, def.module(db).krate().edition(db));
|
||||
r
|
||||
});
|
||||
let duration = Instant::now() - start_time;
|
||||
Some((duration, res))
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
use hir::Semantics;
|
||||
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
|
||||
use std::{fmt::Write, time::Instant};
|
||||
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
|
||||
|
||||
// Feature: Interpret Function
|
||||
//
|
||||
// |===
|
||||
// | Editor | Action Name
|
||||
//
|
||||
// | VS Code | **rust-analyzer: Interpret Function**
|
||||
// |===
|
||||
pub(crate) fn interpret_function(db: &RootDatabase, position: FilePosition) -> String {
|
||||
let start_time = Instant::now();
|
||||
let mut result =
|
||||
find_and_interpret(db, position).unwrap_or_else(|| "Not inside a function body".to_owned());
|
||||
let duration = Instant::now() - start_time;
|
||||
writeln!(result).unwrap();
|
||||
writeln!(result, "----------------------").unwrap();
|
||||
writeln!(result, " Finished in {}s", duration.as_secs_f32()).unwrap();
|
||||
result
|
||||
}
|
||||
|
||||
fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<String> {
|
||||
let sema = Semantics::new(db);
|
||||
let source_file = sema.parse_guess_edition(position.file_id);
|
||||
|
||||
let item = ancestors_at_offset(source_file.syntax(), position.offset)
|
||||
.filter(|it| !ast::MacroCall::can_cast(it.kind()))
|
||||
.find_map(ast::Item::cast)?;
|
||||
let def = match item {
|
||||
ast::Item::Fn(it) => sema.to_def(&it)?,
|
||||
_ => return None,
|
||||
};
|
||||
let span_formatter = |file_id, text_range: TextRange| {
|
||||
let path = &db
|
||||
.source_root(db.file_source_root(file_id))
|
||||
.path_for_file(&file_id)
|
||||
.map(|x| x.to_string());
|
||||
let path = path.as_deref().unwrap_or("<unknown file>");
|
||||
match db.line_index(file_id).try_line_col(text_range.start()) {
|
||||
Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col),
|
||||
None => format!("file://{path} range {text_range:?}"),
|
||||
}
|
||||
};
|
||||
Some(def.eval(db, span_formatter))
|
||||
}
|
|
@ -33,7 +33,7 @@ mod goto_type_definition;
|
|||
mod highlight_related;
|
||||
mod hover;
|
||||
mod inlay_hints;
|
||||
mod interpret_function;
|
||||
mod interpret;
|
||||
mod join_lines;
|
||||
mod markdown_remove;
|
||||
mod matching_brace;
|
||||
|
@ -350,7 +350,7 @@ impl Analysis {
|
|||
}
|
||||
|
||||
pub fn interpret_function(&self, position: FilePosition) -> Cancellable<String> {
|
||||
self.with_db(|db| interpret_function::interpret_function(db, position))
|
||||
self.with_db(|db| interpret::interpret(db, position))
|
||||
}
|
||||
|
||||
pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
|
||||
|
|
|
@ -212,6 +212,7 @@ impl StaticIndex<'_> {
|
|||
def,
|
||||
&node,
|
||||
None,
|
||||
false,
|
||||
&hover_config,
|
||||
edition,
|
||||
)),
|
||||
|
|
|
@ -398,6 +398,8 @@ define_symbols! {
|
|||
rustc_const_panic_str,
|
||||
rustc_deprecated_safe_2024,
|
||||
rustc_has_incoherent_inherent_impls,
|
||||
rustc_intrinsic,
|
||||
rustc_intrinsic_must_be_overridden,
|
||||
rustc_layout_scalar_valid_range_end,
|
||||
rustc_layout_scalar_valid_range_start,
|
||||
rustc_legacy_const_generics,
|
||||
|
|
|
@ -67,7 +67,7 @@ pub(crate) mod entry {
|
|||
}
|
||||
|
||||
pub(crate) fn pat_top(p: &mut Parser<'_>) {
|
||||
patterns::pattern_top(p);
|
||||
patterns::pattern(p);
|
||||
}
|
||||
|
||||
pub(crate) fn ty(p: &mut Parser<'_>) {
|
||||
|
@ -80,7 +80,8 @@ pub(crate) mod entry {
|
|||
paths::type_path(p);
|
||||
}
|
||||
pub(crate) fn item(p: &mut Parser<'_>) {
|
||||
items::item_or_macro(p, true);
|
||||
// We can set `is_in_extern=true`, because it only allows `safe fn`, and there is no ambiguity here.
|
||||
items::item_or_macro(p, true, true);
|
||||
}
|
||||
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]
|
||||
pub(crate) fn meta_item(p: &mut Parser<'_>) {
|
||||
|
@ -116,7 +117,7 @@ pub(crate) mod entry {
|
|||
|
||||
pub(crate) fn pattern(p: &mut Parser<'_>) {
|
||||
let m = p.start();
|
||||
patterns::pattern_top(p);
|
||||
patterns::pattern(p);
|
||||
if p.at(EOF) {
|
||||
m.abandon(p);
|
||||
return;
|
||||
|
|
|
@ -66,7 +66,7 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
|
|||
|
||||
// test block_items
|
||||
// fn a() { fn b() {} }
|
||||
let m = match items::opt_item(p, m) {
|
||||
let m = match items::opt_item(p, m, false) {
|
||||
Ok(()) => return,
|
||||
Err(m) => m,
|
||||
};
|
||||
|
|
|
@ -660,7 +660,7 @@ fn for_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
|
|||
fn let_expr(p: &mut Parser<'_>) -> CompletedMarker {
|
||||
let m = p.start();
|
||||
p.bump(T![let]);
|
||||
patterns::pattern_top(p);
|
||||
patterns::pattern(p);
|
||||
p.expect(T![=]);
|
||||
expr_let(p);
|
||||
m.complete(p, LET_EXPR)
|
||||
|
|
|
@ -20,7 +20,8 @@ use super::*;
|
|||
pub(super) fn mod_contents(p: &mut Parser<'_>, stop_on_r_curly: bool) {
|
||||
attributes::inner_attrs(p);
|
||||
while !(p.at(EOF) || (p.at(T!['}']) && stop_on_r_curly)) {
|
||||
item_or_macro(p, stop_on_r_curly);
|
||||
// We can set `is_in_extern=true`, because it only allows `safe fn`, and there is no ambiguity here.
|
||||
item_or_macro(p, stop_on_r_curly, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,11 +42,11 @@ pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
|||
T![;],
|
||||
]);
|
||||
|
||||
pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
|
||||
pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool, is_in_extern: bool) {
|
||||
let m = p.start();
|
||||
attributes::outer_attrs(p);
|
||||
|
||||
let m = match opt_item(p, m) {
|
||||
let m = match opt_item(p, m, is_in_extern) {
|
||||
Ok(()) => {
|
||||
if p.at(T![;]) {
|
||||
p.err_and_bump(
|
||||
|
@ -91,7 +92,7 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
|
|||
}
|
||||
|
||||
/// Try to parse an item, completing `m` in case of success.
|
||||
pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
|
||||
pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker, is_in_extern: bool) -> Result<(), Marker> {
|
||||
// test_err pub_expr
|
||||
// fn foo() { pub 92; }
|
||||
let has_visibility = opt_visibility(p, false);
|
||||
|
@ -135,7 +136,9 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
|
|||
has_mods = true;
|
||||
}
|
||||
|
||||
if p.at_contextual_kw(T![safe]) {
|
||||
// test safe_outside_of_extern
|
||||
// fn foo() { safe = true; }
|
||||
if is_in_extern && p.at_contextual_kw(T![safe]) {
|
||||
p.eat_contextual_kw(T![safe]);
|
||||
has_mods = true;
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ pub(crate) fn assoc_item_list(p: &mut Parser<'_>) {
|
|||
error_block(p, "expected an item");
|
||||
continue;
|
||||
}
|
||||
item_or_macro(p, true);
|
||||
item_or_macro(p, true, false);
|
||||
}
|
||||
p.expect(T!['}']);
|
||||
m.complete(p, ASSOC_ITEM_LIST);
|
||||
|
|
|
@ -20,14 +20,9 @@ const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]]));
|
|||
const RANGE_PAT_END_FIRST: TokenSet =
|
||||
expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[T![-], T![const]]));
|
||||
|
||||
pub(crate) fn pattern(p: &mut Parser<'_>) {
|
||||
let m = p.start();
|
||||
pattern_r(p, m, false, PAT_RECOVERY_SET);
|
||||
}
|
||||
|
||||
/// Parses a pattern list separated by pipes `|`.
|
||||
pub(super) fn pattern_top(p: &mut Parser<'_>) {
|
||||
pattern_top_r(p, PAT_RECOVERY_SET);
|
||||
pub(crate) fn pattern(p: &mut Parser<'_>) {
|
||||
pattern_r(p, PAT_RECOVERY_SET);
|
||||
}
|
||||
|
||||
pub(crate) fn pattern_single(p: &mut Parser<'_>) {
|
||||
|
@ -37,9 +32,7 @@ pub(crate) fn pattern_single(p: &mut Parser<'_>) {
|
|||
/// Parses a pattern list separated by pipes `|`
|
||||
/// using the given `recovery_set`.
|
||||
pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
|
||||
let m = p.start();
|
||||
let has_leading_pipe = p.eat(T![|]);
|
||||
pattern_r(p, m, has_leading_pipe, recovery_set);
|
||||
pattern_r(p, recovery_set);
|
||||
}
|
||||
|
||||
// test or_pattern
|
||||
|
@ -53,7 +46,10 @@ pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
|
|||
// }
|
||||
/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the
|
||||
/// given `recovery_set`.
|
||||
fn pattern_r(p: &mut Parser<'_>, m: Marker, has_leading_pipe: bool, recovery_set: TokenSet) {
|
||||
fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
|
||||
let m = p.start();
|
||||
let has_leading_pipe = p.eat(T![|]);
|
||||
|
||||
pattern_single_r(p, recovery_set);
|
||||
|
||||
if !p.at(T![|]) && !has_leading_pipe {
|
||||
|
@ -319,6 +315,8 @@ fn record_pat_field(p: &mut Parser<'_>) {
|
|||
IDENT | INT_NUMBER if p.nth(1) == T![:] => {
|
||||
name_ref_or_index(p);
|
||||
p.bump(T![:]);
|
||||
// test record_field_pat_leading_or
|
||||
// fn foo() { let R { a: | 1 | 2 } = 0; }
|
||||
pattern(p);
|
||||
}
|
||||
// test_err record_pat_field_eq_recovery
|
||||
|
@ -438,7 +436,7 @@ fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
}
|
||||
has_rest |= p.at(T![..]);
|
||||
|
||||
pattern_top(p);
|
||||
pattern(p);
|
||||
if !p.at(T![')']) {
|
||||
has_comma = true;
|
||||
p.expect(T![,]);
|
||||
|
@ -465,7 +463,7 @@ fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
|
||||
fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
|
||||
while !p.at(EOF) && !p.at(ket) {
|
||||
pattern_top(p);
|
||||
pattern(p);
|
||||
if !p.eat(T![,]) {
|
||||
if p.at_ts(PAT_TOP_FIRST) {
|
||||
p.error(format!("expected {:?}, got {:?}", T![,], p.current()));
|
||||
|
|
|
@ -493,6 +493,10 @@ mod ok {
|
|||
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_list.rs");
|
||||
}
|
||||
#[test]
|
||||
fn record_field_pat_leading_or() {
|
||||
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_pat_leading_or.rs");
|
||||
}
|
||||
#[test]
|
||||
fn record_lit() { run_and_expect_no_errors("test_data/parser/inline/ok/record_lit.rs"); }
|
||||
#[test]
|
||||
fn record_literal_field_with_attr() {
|
||||
|
@ -527,6 +531,10 @@ mod ok {
|
|||
run_and_expect_no_errors("test_data/parser/inline/ok/return_type_syntax_in_path.rs");
|
||||
}
|
||||
#[test]
|
||||
fn safe_outside_of_extern() {
|
||||
run_and_expect_no_errors("test_data/parser/inline/ok/safe_outside_of_extern.rs");
|
||||
}
|
||||
#[test]
|
||||
fn self_param() { run_and_expect_no_errors("test_data/parser/inline/ok/self_param.rs"); }
|
||||
#[test]
|
||||
fn self_param_outer_attr() {
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
SOURCE_FILE
|
||||
FN
|
||||
FN_KW "fn"
|
||||
WHITESPACE " "
|
||||
NAME
|
||||
IDENT "foo"
|
||||
PARAM_LIST
|
||||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
BLOCK_EXPR
|
||||
STMT_LIST
|
||||
L_CURLY "{"
|
||||
WHITESPACE " "
|
||||
LET_STMT
|
||||
LET_KW "let"
|
||||
WHITESPACE " "
|
||||
RECORD_PAT
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "R"
|
||||
WHITESPACE " "
|
||||
RECORD_PAT_FIELD_LIST
|
||||
L_CURLY "{"
|
||||
WHITESPACE " "
|
||||
RECORD_PAT_FIELD
|
||||
NAME_REF
|
||||
IDENT "a"
|
||||
COLON ":"
|
||||
WHITESPACE " "
|
||||
OR_PAT
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
LITERAL_PAT
|
||||
LITERAL
|
||||
INT_NUMBER "1"
|
||||
WHITESPACE " "
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
LITERAL_PAT
|
||||
LITERAL
|
||||
INT_NUMBER "2"
|
||||
WHITESPACE " "
|
||||
R_CURLY "}"
|
||||
WHITESPACE " "
|
||||
EQ "="
|
||||
WHITESPACE " "
|
||||
LITERAL
|
||||
INT_NUMBER "0"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE " "
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
|
@ -0,0 +1 @@
|
|||
fn foo() { let R { a: | 1 | 2 } = 0; }
|
|
@ -0,0 +1,30 @@
|
|||
SOURCE_FILE
|
||||
FN
|
||||
FN_KW "fn"
|
||||
WHITESPACE " "
|
||||
NAME
|
||||
IDENT "foo"
|
||||
PARAM_LIST
|
||||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
BLOCK_EXPR
|
||||
STMT_LIST
|
||||
L_CURLY "{"
|
||||
WHITESPACE " "
|
||||
EXPR_STMT
|
||||
BIN_EXPR
|
||||
PATH_EXPR
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "safe"
|
||||
WHITESPACE " "
|
||||
EQ "="
|
||||
WHITESPACE " "
|
||||
LITERAL
|
||||
TRUE_KW "true"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE " "
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
|
@ -0,0 +1 @@
|
|||
fn foo() { safe = true; }
|
|
@ -56,8 +56,8 @@ impl ProcMacroProcessSrv {
|
|||
match srv.version_check() {
|
||||
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"proc-macro server's api version ({v}) is newer than rust-analyzer's ({CURRENT_API_VERSION})"
|
||||
format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
|
||||
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
|
||||
),
|
||||
)),
|
||||
Ok(v) => {
|
||||
|
|
|
@ -172,7 +172,6 @@ impl flags::AnalysisStats {
|
|||
let mut num_decls = 0;
|
||||
let mut bodies = Vec::new();
|
||||
let mut adts = Vec::new();
|
||||
let mut consts = Vec::new();
|
||||
let mut file_ids = Vec::new();
|
||||
while let Some(module) = visit_queue.pop() {
|
||||
if visited_modules.insert(module) {
|
||||
|
@ -193,7 +192,6 @@ impl flags::AnalysisStats {
|
|||
}
|
||||
ModuleDef::Const(c) => {
|
||||
bodies.push(DefWithBody::from(c));
|
||||
consts.push(c)
|
||||
}
|
||||
ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
|
||||
_ => (),
|
||||
|
@ -207,7 +205,6 @@ impl flags::AnalysisStats {
|
|||
AssocItem::Function(f) => bodies.push(DefWithBody::from(f)),
|
||||
AssocItem::Const(c) => {
|
||||
bodies.push(DefWithBody::from(c));
|
||||
consts.push(c);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
@ -220,7 +217,10 @@ impl flags::AnalysisStats {
|
|||
visited_modules.len(),
|
||||
bodies.len(),
|
||||
adts.len(),
|
||||
consts.len(),
|
||||
bodies
|
||||
.iter()
|
||||
.filter(|it| matches!(it, DefWithBody::Const(_) | DefWithBody::Static(_)))
|
||||
.count(),
|
||||
);
|
||||
let crate_def_map_time = crate_def_map_sw.elapsed();
|
||||
eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
|
||||
|
@ -247,7 +247,7 @@ impl flags::AnalysisStats {
|
|||
}
|
||||
|
||||
if !self.skip_const_eval {
|
||||
self.run_const_eval(db, &consts, verbosity);
|
||||
self.run_const_eval(db, &bodies, verbosity);
|
||||
}
|
||||
|
||||
if self.run_all_ide_things {
|
||||
|
@ -320,18 +320,23 @@ impl flags::AnalysisStats {
|
|||
report_metric("data layout time", data_layout_time.time.as_millis() as u64, "ms");
|
||||
}
|
||||
|
||||
fn run_const_eval(&self, db: &RootDatabase, consts: &[hir::Const], verbosity: Verbosity) {
|
||||
fn run_const_eval(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
|
||||
let mut sw = self.stop_watch();
|
||||
let mut all = 0;
|
||||
let mut fail = 0;
|
||||
for &c in consts {
|
||||
for &b in bodies {
|
||||
let res = match b {
|
||||
DefWithBody::Const(c) => c.render_eval(db, Edition::LATEST),
|
||||
DefWithBody::Static(s) => s.render_eval(db, Edition::LATEST),
|
||||
_ => continue,
|
||||
};
|
||||
all += 1;
|
||||
let Err(error) = c.render_eval(db, Edition::LATEST) else {
|
||||
let Err(error) = res else {
|
||||
continue;
|
||||
};
|
||||
if verbosity.is_spammy() {
|
||||
let full_name =
|
||||
full_name_of_item(db, c.module(db), c.name(db).unwrap_or(Name::missing()));
|
||||
full_name_of_item(db, b.module(db), b.name(db).unwrap_or(Name::missing()));
|
||||
println!("Const eval for {full_name} failed due {error:?}");
|
||||
}
|
||||
fail += 1;
|
||||
|
|
|
@ -61,12 +61,11 @@ impl flags::RunTests {
|
|||
}
|
||||
let mut sw_one = StopWatch::start();
|
||||
let result = test.eval(db, span_formatter);
|
||||
if result.trim() == "pass" {
|
||||
pass_count += 1;
|
||||
} else {
|
||||
fail_count += 1;
|
||||
match &result {
|
||||
Ok(result) if result.trim() == "pass" => pass_count += 1,
|
||||
_ => fail_count += 1,
|
||||
}
|
||||
println!("{result}");
|
||||
println!("{result:?}");
|
||||
eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed());
|
||||
}
|
||||
println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored");
|
||||
|
|
|
@ -463,6 +463,11 @@ impl ClientCapabilities {
|
|||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn diagnostics_refresh(&self) -> bool {
|
||||
(|| -> _ { self.0.workspace.as_ref()?.diagnostic.as_ref()?.refresh_support })()
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<&str> {
|
||||
self.0
|
||||
.text_document
|
||||
|
|
|
@ -417,8 +417,6 @@ impl GlobalState {
|
|||
}
|
||||
}
|
||||
|
||||
let supports_diagnostic_pull_model = self.config.text_document_diagnostic();
|
||||
|
||||
let client_refresh = became_quiescent || state_changed;
|
||||
if client_refresh {
|
||||
// Refresh semantic tokens if the client supports it.
|
||||
|
@ -437,7 +435,7 @@ impl GlobalState {
|
|||
self.send_request::<lsp_types::request::InlayHintRefreshRequest>((), |_, _| ());
|
||||
}
|
||||
|
||||
if supports_diagnostic_pull_model {
|
||||
if self.config.diagnostics_refresh() {
|
||||
self.send_request::<lsp_types::request::WorkspaceDiagnosticRefresh>(
|
||||
(),
|
||||
|_, _| (),
|
||||
|
@ -448,7 +446,7 @@ impl GlobalState {
|
|||
let project_or_mem_docs_changed =
|
||||
became_quiescent || state_changed || memdocs_added_or_removed;
|
||||
if project_or_mem_docs_changed
|
||||
&& !supports_diagnostic_pull_model
|
||||
&& !self.config.text_document_diagnostic()
|
||||
&& self.config.publish_diagnostics(None)
|
||||
{
|
||||
self.update_diagnostics();
|
||||
|
|
|
@ -15,7 +15,11 @@ use parser::{Edition, T};
|
|||
use rowan::NodeOrToken;
|
||||
use stdx::{format_to, format_to_acc, never};
|
||||
|
||||
use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
|
||||
use crate::{
|
||||
ast::{self, Param},
|
||||
utils::is_raw_identifier,
|
||||
AstNode, SourceFile, SyntaxKind, SyntaxToken,
|
||||
};
|
||||
|
||||
/// While the parent module defines basic atomic "constructors", the `ext`
|
||||
/// module defines shortcuts for common things.
|
||||
|
@ -198,6 +202,38 @@ pub fn ty_alias(
|
|||
ast_from_text(&s)
|
||||
}
|
||||
|
||||
pub fn ty_fn_ptr<I: Iterator<Item = Param>>(
|
||||
for_lifetime_list: Option<ast::GenericParamList>,
|
||||
is_unsafe: bool,
|
||||
abi: Option<ast::Abi>,
|
||||
params: I,
|
||||
ret_type: Option<ast::RetType>,
|
||||
) -> ast::FnPtrType {
|
||||
let mut s = String::from("type __ = ");
|
||||
|
||||
if let Some(list) = for_lifetime_list {
|
||||
format_to!(s, "for{} ", list);
|
||||
}
|
||||
|
||||
if is_unsafe {
|
||||
s.push_str("unsafe ");
|
||||
}
|
||||
|
||||
if let Some(abi) = abi {
|
||||
format_to!(s, "{} ", abi)
|
||||
}
|
||||
|
||||
s.push_str("fn");
|
||||
|
||||
format_to!(s, "({})", params.map(|p| p.to_string()).join(", "));
|
||||
|
||||
if let Some(ret_type) = ret_type {
|
||||
format_to!(s, " {}", ret_type);
|
||||
}
|
||||
|
||||
ast_from_text(&s)
|
||||
}
|
||||
|
||||
pub fn assoc_item_list() -> ast::AssocItemList {
|
||||
ast_from_text("impl C for D {}")
|
||||
}
|
||||
|
@ -862,6 +898,10 @@ pub fn item_const(
|
|||
ast_from_text(&format!("{visibility} const {name}: {ty} = {expr};"))
|
||||
}
|
||||
|
||||
pub fn unnamed_param(ty: ast::Type) -> ast::Param {
|
||||
ast_from_text(&format!("fn f({ty}) {{ }}"))
|
||||
}
|
||||
|
||||
pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param {
|
||||
ast_from_text(&format!("fn f({pat}: {ty}) {{ }}"))
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@ rust-version.workspace = true
|
|||
edition.workspace = true
|
||||
license.workspace = true
|
||||
authors.workspace = true
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
hir-expand.workspace = true
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
# Setup Guide
|
||||
|
||||
This guide gives a simplified opinionated setup for developers contributing to rust-analyzer using Visual Studio Code to make changes and Visual Studio Code Insiders to test those changes. This guide will assume you have Visual Studio Code and Visual Studio Code Insiders installed.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Since rust-analyzer is a Rust project, you will need to install Rust. You can download and install the latest stable version of Rust [here](https://www.rust-lang.org/tools/install).
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
**Step 01**: Fork the rust-analyzer repository and clone the fork to your local machine.
|
||||
|
||||
**Step 02**: Open the project in Visual Studio Code.
|
||||
|
||||
**Step 03**: Open a terminal and run `cargo build` to build the project.
|
||||
|
||||
**Step 04**: Install the language server locally by running the following command:
|
||||
|
||||
```sh
|
||||
cargo xtask install --server --code-bin code-insiders --dev-rel
|
||||
```
|
||||
|
||||
In the output of this command, there should be a file path provided to the installed binary on your local machine.
|
||||
It should look something like the following output below:
|
||||
|
||||
```
|
||||
Installing <path-to-rust-analyzer-binary>
|
||||
Installed package `rust-analyzer v0.0.0 (<path-to-rust-analyzer-binary>)` (executable `rust-analyzer.exe`)
|
||||
```
|
||||
|
||||
In Visual Studio Code Insiders, you will want to open your User Settings (JSON) from the Command Palette. From there you should ensure that the `rust-analyzer.server.path` key is set to the `<path-to-rust-analyzer-binary>`. This will tell Visual Studio Code Insiders to use the locally installed version that you can debug.
|
||||
|
||||
The User Settings (JSON) file should contain the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"rust-analyzer.server.path": "<path-to-rust-analyzer-binary>"
|
||||
}
|
||||
```
|
||||
|
||||
Now you should be able to make changes to rust-analyzer in Visual Studio Code and then view the changes in Visual Studio Code Insiders.
|
||||
|
||||
## Debugging rust-analyzer
|
||||
The simplest way to debug rust-analyzer is to use the `eprintln!` macro. The reason why we use `eprintln!` instead of `println!` is because the language server uses `stdout` to send messages. So instead we will debug using `stderr`.
|
||||
|
||||
An example debugging statement could go into the `main_loop.rs` file which can be found at `crates/rust-analyzer/src/main_loop.rs`. Inside the `main_loop` we will add the following `eprintln!` to test debugging rust-analyzer:
|
||||
|
||||
```rs
|
||||
eprintln!("Hello, world!");
|
||||
```
|
||||
|
||||
Now we run `cargo build` and `sh
|
||||
cargo xtask install --server --code-bin code-insiders --dev-rel` to reinstall the server.
|
||||
|
||||
Now on Visual Studio Code Insiders, we should be able to open the Output tab on our terminal and switch to Rust Analyzer Language Server to see the `eprintln!` statement we just wrote.
|
||||
|
||||
If you are able to see your output, you now have a complete workflow for debugging rust-analyzer.
|
|
@ -125,7 +125,7 @@
|
|||
},
|
||||
{
|
||||
"command": "rust-analyzer.interpretFunction",
|
||||
"title": "Interpret Function",
|
||||
"title": "Interpret",
|
||||
"category": "rust-analyzer (debug command)"
|
||||
},
|
||||
{
|
||||
|
|
|
@ -1 +1 @@
|
|||
a9d17627d241645a54c1134a20f1596127fedb60
|
||||
145f9cf95de1fbde3fa11e98461310e0373253e6
|
||||
|
|
|
@ -135,7 +135,6 @@ Apache-2.0 WITH LLVM-exception
|
|||
Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
|
||||
Apache-2.0/MIT
|
||||
BSD-2-Clause OR Apache-2.0 OR MIT
|
||||
BSD-3-Clause
|
||||
CC0-1.0
|
||||
ISC
|
||||
MIT
|
||||
|
|
Loading…
Reference in New Issue