Merge remote-tracking branch 'origin/master'

This commit is contained in:
Dmitry 2020-08-15 01:32:05 +07:00
commit 178c3e135a
1185 changed files with 4251 additions and 3672 deletions

2
.gitattributes vendored
View file

@ -1,5 +1,5 @@
* text=auto eol=lf
crates/ra_syntax/test_data/** -text eof=LF
crates/syntax/test_data/** -text eof=LF
# Older git versions try to fix line endings on images, this prevents it.
*.png binary
*.jpg binary

View file

@ -16,20 +16,6 @@ env:
RUSTUP_MAX_RETRIES: 10
jobs:
# rust-audit:
# name: Audit Rust vulnerabilities
# runs-on: ubuntu-latest
# steps:
# - name: Checkout repository
# uses: actions/checkout@v2
# - uses: actions-rs/install@v0.1
# with:
# crate: cargo-audit
# use-tool-cache: true
# - run: cargo audit
rust:
name: Rust
runs-on: ${{ matrix.os }}
@ -39,7 +25,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
os: [ubuntu-latest, windows-latest] #, macos-latest]
steps:
- name: Checkout repository
@ -88,11 +74,14 @@ jobs:
if: matrix.os == 'windows-latest'
run: Remove-Item ./target/debug/xtask.exe, ./target/debug/deps/xtask.exe
# Weird target to catch non-portable code
rust-power:
name: Rust Power
# Weird targets to catch non-portable code
rust-cross:
name: Rust Cross
runs-on: ubuntu-latest
env:
targets: "powerpc-unknown-linux-gnu x86_64-unknown-linux-musl"
steps:
- name: Checkout repository
uses: actions/checkout@v2
@ -103,7 +92,9 @@ jobs:
toolchain: stable
profile: minimal
override: true
target: 'powerpc-unknown-linux-gnu'
- name: Install Rust targets
run: rustup target add ${{ env.targets }}
- name: Cache cargo directories
uses: actions/cache@v2
@ -114,14 +105,17 @@ jobs:
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Check
run: cargo check --target=powerpc-unknown-linux-gnu --all-targets
run: |
for target in ${{ env.targets }}; do
cargo check --target=$target --all-targets
done
typescript:
name: TypeScript
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
os: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}

763
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -39,7 +39,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
* Website: https://rust-analyzer.github.io/
* Metrics: https://rust-analyzer.github.io/metrics/
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide/
* API docs: https://rust-analyzer.github.io/rust-analyzer/ide/
## License

View file

@ -1,9 +1,8 @@
status = [
"Rust (ubuntu-latest)",
"Rust (windows-latest)",
"Rust (macos-latest)",
# "Rust (macos-latest)",
"TypeScript (ubuntu-latest)",
"TypeScript (windows-latest)",
"TypeScript (macos-latest)",
]
delete_merged_branches = true

View file

@ -1,9 +1,9 @@
[package]
edition = "2018"
name = "ra_arena"
version = "0.1.0"
authors = ["rust-analyzer developers"]
name = "arena"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false

23
crates/assists/Cargo.toml Normal file
View file

@ -0,0 +1,23 @@
[package]
name = "assists"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
[dependencies]
rustc-hash = "1.1.0"
itertools = "0.9.0"
either = "1.5.3"
stdx = { path = "../stdx" }
syntax = { path = "../syntax" }
text_edit = { path = "../text_edit" }
profile = { path = "../profile" }
base_db = { path = "../base_db" }
ide_db = { path = "../ide_db" }
hir = { path = "../hir" }
test_utils = { path = "../test_utils" }

View file

@ -3,19 +3,18 @@
use std::mem;
use algo::find_covering_element;
use base_db::{FileId, FileRange};
use hir::Semantics;
use ra_db::{FileId, FileRange};
use ra_fmt::{leading_indent, reindent};
use ra_ide_db::{
use ide_db::{
source_change::{SourceChange, SourceFileEdit},
RootDatabase,
};
use ra_syntax::{
use syntax::{
algo::{self, find_node_at_offset, SyntaxRewriter},
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize,
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize,
TokenAtOffset,
};
use ra_text_edit::TextEditBuilder;
use text_edit::{TextEdit, TextEditBuilder};
use crate::{
assist_config::{AssistConfig, SnippetCap},
@ -214,7 +213,7 @@ pub(crate) struct AssistBuilder {
impl AssistBuilder {
pub(crate) fn new(file_id: FileId) -> AssistBuilder {
AssistBuilder {
edit: TextEditBuilder::default(),
edit: TextEdit::builder(),
file_id,
is_snippet: false,
change: SourceChange::default(),
@ -269,20 +268,6 @@ pub(crate) fn replace_snippet(
pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
}
/// Replaces specified `node` of text with a given string, reindenting the
/// string to maintain `node`'s existing indent.
// FIXME: remove in favor of ra_syntax::edit::IndentLevel::increase_indent
pub(crate) fn replace_node_and_indent(
&mut self,
node: &SyntaxNode,
replace_with: impl Into<String>,
) {
let mut replace_with = replace_with.into();
if let Some(indent) = leading_indent(node) {
replace_with = reindent(&replace_with, &indent)
}
self.replace(node.text_range(), replace_with)
}
pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) {
let node = rewriter.rewrite_root().unwrap();
let new = rewriter.rewrite(&node);

View file

@ -2,13 +2,13 @@
use rustc_hash::FxHashMap;
use hir::{HirDisplay, PathResolution, SemanticsScope};
use ra_syntax::{
use syntax::{
algo::SyntaxRewriter,
ast::{self, AstNode},
};
pub trait AstTransform<'a> {
fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode>;
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>;
fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a>
@ -22,7 +22,7 @@ fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a
struct NullTransformer;
impl<'a> AstTransform<'a> for NullTransformer {
fn get_substitution(&self, _node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution(&self, _node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> {
None
}
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@ -51,7 +51,7 @@ pub fn for_trait_impl(
// this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
.skip(1)
// The actual list of trait type parameters may be longer than the one
// used in the `impl` block due to trailing default type parametrs.
// used in the `impl` block due to trailing default type parameters.
// For that case we extend the `substs` with an empty iterator so we
// can still hit those trailing values and check if they actually have
// a default type. If they do, go for that type from `hir` to `ast` so
@ -101,10 +101,7 @@ fn get_syntactic_substs(impl_def: ast::Impl) -> Option<Vec<ast::Type>> {
Some(result)
}
}
fn get_substitution_inner(
&self,
node: &ra_syntax::SyntaxNode,
) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution_inner(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> {
let type_ref = ast::Type::cast(node.clone())?;
let path = match &type_ref {
ast::Type::PathType(path_type) => path_type.path()?,
@ -122,7 +119,7 @@ fn get_substitution_inner(
}
impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> {
self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
}
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@ -141,10 +138,7 @@ pub fn new(target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScop
Self { target_scope, source_scope, previous: Box::new(NullTransformer) }
}
fn get_substitution_inner(
&self,
node: &ra_syntax::SyntaxNode,
) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution_inner(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> {
// FIXME handle value ns?
let from = self.target_scope.module()?;
let p = ast::Path::cast(node.clone())?;
@ -183,7 +177,7 @@ fn get_substitution_inner(
pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
SyntaxRewriter::from_fn(|element| match element {
ra_syntax::SyntaxElement::Node(n) => {
syntax::SyntaxElement::Node(n) => {
let replacement = transformer.get_substitution(&n)?;
Some(replacement.into())
}
@ -193,7 +187,7 @@ pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
}
impl<'a> AstTransform<'a> for QualifyPaths<'a> {
fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
fn get_substitution(&self, node: &syntax::SyntaxNode) -> Option<syntax::SyntaxNode> {
self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
}
fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {

View file

@ -1,10 +1,10 @@
use ra_syntax::{
use itertools::Itertools;
use syntax::{
ast::{self, AstNode},
Direction, SmolStr,
SyntaxKind::{IDENT, WHITESPACE},
TextRange, TextSize,
};
use stdx::SepBy;
use crate::{
assist_context::{AssistContext, Assists},
@ -61,9 +61,9 @@ pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<
.filter(|t| t != trait_token.text())
.collect::<Vec<SmolStr>>();
let has_more_derives = !new_attr_input.is_empty();
let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string();
if has_more_derives {
let new_attr_input = format!("({})", new_attr_input.iter().format(", "));
builder.replace(input.syntax().text_range(), new_attr_input);
} else {
let attr_range = attr.syntax().text_range();

View file

@ -1,5 +1,5 @@
use hir::HirDisplay;
use ra_syntax::{
use syntax::{
ast::{self, AstNode, LetStmt, NameOwner},
TextRange,
};

View file

@ -1,5 +1,5 @@
use hir::HasSource;
use ra_syntax::{
use syntax::{
ast::{
self,
edit::{self, AstNodeEdit, IndentLevel},
@ -110,7 +110,7 @@ fn add_missing_impl_members_inner(
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
let _p = ra_prof::profile("add_missing_impl_members_inner");
let _p = profile::span("add_missing_impl_members_inner");
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
let impl_item_list = impl_def.assoc_item_list()?;

View file

@ -1,5 +1,5 @@
use ra_ide_db::defs::{classify_name_ref, Definition, NameRefClass};
use ra_syntax::{ast, AstNode, SyntaxKind, T};
use ide_db::defs::{classify_name_ref, Definition, NameRefClass};
use syntax::{ast, AstNode, SyntaxKind, T};
use test_utils::mark;
use crate::{
@ -41,7 +41,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
let name_ref = ast::NameRef::cast(ident.parent())?;
let def = match classify_name_ref(&ctx.sema, &name_ref)? {
NameRefClass::Definition(def) => def,
NameRefClass::FieldShorthand { .. } => return None,
NameRefClass::ExternCrate(_) | NameRefClass::FieldShorthand { .. } => return None,
};
let fun = match def {
Definition::ModuleDef(hir::ModuleDef::Function(it)) => it,

View file

@ -1,10 +1,10 @@
use ra_syntax::ast::{self, AstNode};
use syntax::ast::{self, AstNode};
use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
// Assist: apply_demorgan
//
// Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).
// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law].
// This transforms expressions of the form `!l || !r` into `!(l && r)`.
// This also works with `&&`. This assist can only be applied with the cursor
// on either `||` or `&&`, with both operands being a negation of some kind.

View file

@ -5,13 +5,12 @@
AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
Type,
};
use ra_ide_db::{imports_locator, RootDatabase};
use ra_prof::profile;
use ra_syntax::{
use ide_db::{imports_locator, RootDatabase};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, AstNode},
SyntaxNode,
};
use rustc_hash::FxHashSet;
use crate::{
utils::insert_use_statement, AssistContext, AssistId, AssistKind, Assists, GroupLabel,
@ -130,7 +129,7 @@ fn get_import_group_message(&self) -> GroupLabel {
}
fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet<ModPath> {
let _p = profile("auto_import::search_for_imports");
let _p = profile::span("auto_import::search_for_imports");
let db = ctx.db();
let current_crate = self.module_with_name_to_import.krate();
imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query())

View file

@ -1,10 +1,12 @@
use ra_syntax::{
ast::{self, BlockExpr, Expr, LoopBodyOwner},
use std::iter;
use syntax::{
ast::{self, make, BlockExpr, Expr, LoopBodyOwner},
AstNode, SyntaxNode,
};
use test_utils::mark;
use crate::{AssistContext, AssistId, AssistKind, Assists};
use test_utils::mark;
// Assist: change_return_type_to_result
//
@ -44,7 +46,13 @@ pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContex
tail_return_expr_collector.collect_tail_exprs(block_expr);
for ret_expr_arg in tail_return_expr_collector.exprs_to_wrap {
builder.replace_node_and_indent(&ret_expr_arg, format!("Ok({})", ret_expr_arg));
let ok_wrapped = make::expr_call(
make::expr_path(make::path_unqualified(make::path_segment(make::name_ref(
"Ok",
)))),
make::arg_list(iter::once(ret_expr_arg.clone())),
);
builder.replace_ast(ret_expr_arg, ok_wrapped);
}
match ctx.config.snippet_cap {
@ -60,7 +68,7 @@ pub(crate) fn change_return_type_to_result(acc: &mut Assists, ctx: &AssistContex
}
struct TailReturnCollector {
exprs_to_wrap: Vec<SyntaxNode>,
exprs_to_wrap: Vec<ast::Expr>,
}
impl TailReturnCollector {
@ -86,7 +94,8 @@ fn collect_jump_exprs(&mut self, block_expr: &BlockExpr, collect_break: bool) {
if let Some(last_exprs) = get_tail_expr_from_block(&expr) {
for last_expr in last_exprs {
let last_expr = match last_expr {
NodeType::Node(expr) | NodeType::Leaf(expr) => expr,
NodeType::Node(expr) => expr,
NodeType::Leaf(expr) => expr.syntax().clone(),
};
if let Some(last_expr) = Expr::cast(last_expr.clone()) {
@ -113,12 +122,12 @@ fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) {
}
Expr::ReturnExpr(ret_expr) => {
if let Some(ret_expr_arg) = &ret_expr.expr() {
self.exprs_to_wrap.push(ret_expr_arg.syntax().clone());
self.exprs_to_wrap.push(ret_expr_arg.clone());
}
}
Expr::BreakExpr(break_expr) if collect_break => {
if let Some(break_expr_arg) = &break_expr.expr() {
self.exprs_to_wrap.push(break_expr_arg.syntax().clone());
self.exprs_to_wrap.push(break_expr_arg.clone());
}
}
Expr::IfExpr(if_expr) => {
@ -166,14 +175,11 @@ fn fetch_tail_exprs(&mut self, expr: &Expr) {
NodeType::Leaf(expr) => {
self.exprs_to_wrap.push(expr.clone());
}
NodeType::Node(expr) => match &Expr::cast(expr.clone()) {
Some(last_expr) => {
self.fetch_tail_exprs(last_expr);
NodeType::Node(expr) => {
if let Some(last_expr) = Expr::cast(expr.clone()) {
self.fetch_tail_exprs(&last_expr);
}
None => {
self.exprs_to_wrap.push(expr.clone());
}
},
}
}
}
}
@ -182,7 +188,7 @@ fn fetch_tail_exprs(&mut self, expr: &Expr) {
#[derive(Debug)]
enum NodeType {
Leaf(SyntaxNode),
Leaf(ast::Expr),
Node(SyntaxNode),
}
@ -233,25 +239,26 @@ fn get_tail_expr_from_block(expr: &Expr) -> Option<Vec<NodeType>> {
Some(arms)
}
Expr::BreakExpr(expr) => expr.expr().map(|e| vec![NodeType::Leaf(e.syntax().clone())]),
Expr::BreakExpr(expr) => expr.expr().map(|e| vec![NodeType::Leaf(e)]),
Expr::ReturnExpr(ret_expr) => Some(vec![NodeType::Node(ret_expr.syntax().clone())]),
Expr::CallExpr(call_expr) => Some(vec![NodeType::Leaf(call_expr.syntax().clone())]),
Expr::Literal(lit_expr) => Some(vec![NodeType::Leaf(lit_expr.syntax().clone())]),
Expr::TupleExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::ArrayExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::ParenExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::PathExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::RecordExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::IndexExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::MethodCallExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::AwaitExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::CastExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::RefExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::PrefixExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::RangeExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::BinExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::MacroCall(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::BoxExpr(expr) => Some(vec![NodeType::Leaf(expr.syntax().clone())]),
Expr::CallExpr(_)
| Expr::Literal(_)
| Expr::TupleExpr(_)
| Expr::ArrayExpr(_)
| Expr::ParenExpr(_)
| Expr::PathExpr(_)
| Expr::RecordExpr(_)
| Expr::IndexExpr(_)
| Expr::MethodCallExpr(_)
| Expr::AwaitExpr(_)
| Expr::CastExpr(_)
| Expr::RefExpr(_)
| Expr::PrefixExpr(_)
| Expr::RangeExpr(_)
| Expr::BinExpr(_)
| Expr::MacroCall(_)
| Expr::BoxExpr(_) => Some(vec![NodeType::Leaf(expr.clone())]),
_ => None,
}
}

View file

@ -1,4 +1,4 @@
use ra_syntax::{
use syntax::{
ast::{self, NameOwner, VisibilityOwner},
AstNode,
SyntaxKind::{CONST, ENUM, FN, MODULE, STATIC, STRUCT, TRAIT, VISIBILITY},

View file

@ -1,6 +1,6 @@
use std::{iter::once, ops::RangeInclusive};
use ra_syntax::{
use syntax::{
algo::replace_children,
ast::{
self,

View file

@ -1,9 +1,9 @@
use hir::{AssocItem, MacroDef, ModuleDef, Name, PathResolution, ScopeDef, SemanticsScope};
use ra_ide_db::{
use ide_db::{
defs::{classify_name_ref, Definition, NameRefClass},
RootDatabase,
};
use ra_syntax::{algo, ast, match_ast, AstNode, SyntaxNode, SyntaxToken, T};
use syntax::{algo, ast, match_ast, AstNode, SyntaxNode, SyntaxToken, T};
use crate::{
assist_context::{AssistBuilder, AssistContext, Assists},

View file

@ -1,13 +1,12 @@
use base_db::FileId;
use hir::{EnumVariant, Module, ModuleDef, Name};
use ra_db::FileId;
use ra_fmt::leading_indent;
use ra_ide_db::{defs::Definition, search::Reference, RootDatabase};
use ra_syntax::{
algo::find_node_at_offset,
ast::{self, ArgListOwner, AstNode, NameOwner, VisibilityOwner},
SourceFile, SyntaxNode, TextRange, TextSize,
};
use ide_db::{defs::Definition, search::Reference, RootDatabase};
use rustc_hash::FxHashSet;
use syntax::{
algo::find_node_at_offset,
ast::{self, edit::IndentLevel, ArgListOwner, AstNode, NameOwner, VisibilityOwner},
SourceFile, TextRange, TextSize,
};
use crate::{
assist_context::AssistBuilder, utils::insert_use_statement, AssistContext, AssistId,
@ -72,7 +71,7 @@ pub(crate) fn extract_struct_from_enum_variant(
}
extract_struct_def(
builder,
enum_ast.syntax(),
&enum_ast,
&variant_name,
&field_list.to_string(),
start_offset,
@ -112,9 +111,10 @@ fn insert_import(
Some(())
}
// FIXME: this should use strongly-typed `make`, rather than string manipulation.
fn extract_struct_def(
builder: &mut AssistBuilder,
enum_ast: &SyntaxNode,
enum_: &ast::Enum,
variant_name: &str,
variant_list: &str,
start_offset: TextSize,
@ -126,11 +126,7 @@ fn extract_struct_def(
} else {
"".to_string()
};
let indent = if let Some(indent) = leading_indent(enum_ast) {
indent.to_string()
} else {
"".to_string()
};
let indent = IndentLevel::from_node(enum_.syntax());
let struct_def = format!(
r#"{}struct {}{};

View file

@ -1,11 +1,11 @@
use ra_syntax::{
use stdx::format_to;
use syntax::{
ast::{self, AstNode},
SyntaxKind::{
BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, PATH_EXPR, RETURN_EXPR,
},
SyntaxNode,
};
use stdx::format_to;
use test_utils::mark;
use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,9 +1,9 @@
use std::iter;
use hir::{Adt, HasSource, ModuleDef, Semantics};
use ide_db::RootDatabase;
use itertools::Itertools;
use ra_ide_db::RootDatabase;
use ra_syntax::ast::{self, make, AstNode, MatchArm, NameOwner, Pat};
use syntax::ast::{self, make, AstNode, MatchArm, NameOwner, Pat};
use test_utils::mark;
use crate::{

View file

@ -1,6 +1,6 @@
use base_db::FileId;
use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution};
use ra_db::FileId;
use ra_syntax::{ast, AstNode, TextRange, TextSize};
use syntax::{ast, AstNode, TextRange, TextSize};
use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
use ast::VisibilityOwner;
@ -121,7 +121,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) ->
Some(cap) => match current_visibility {
Some(current_visibility) => builder.replace_snippet(
cap,
dbg!(current_visibility.syntax()).text_range(),
current_visibility.syntax().text_range(),
format!("$0{}", missing_visibility),
),
None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)),

View file

@ -1,4 +1,4 @@
use ra_syntax::ast::{AstNode, BinExpr, BinOp};
use syntax::ast::{AstNode, BinExpr, BinOp};
use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,4 +1,4 @@
use ra_syntax::{algo::non_trivia_sibling, Direction, T};
use syntax::{algo::non_trivia_sibling, Direction, T};
use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,4 +1,4 @@
use ra_syntax::{
use syntax::{
algo::non_trivia_sibling,
ast::{self, AstNode},
Direction, T,

View file

@ -1,4 +1,4 @@
use ra_syntax::{
use syntax::{
ast::{self, AstNode, AttrsOwner},
SyntaxKind::{COMMENT, WHITESPACE},
TextSize,

View file

@ -1,5 +1,5 @@
use ra_ide_db::RootDatabase;
use ra_syntax::ast::{self, AstNode, NameOwner};
use ide_db::RootDatabase;
use syntax::ast::{self, AstNode, NameOwner};
use test_utils::mark;
use crate::{utils::FamousDefs, AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,6 +1,7 @@
use base_db::FileId;
use hir::HirDisplay;
use ra_db::FileId;
use ra_syntax::{
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
@ -8,7 +9,6 @@
},
SyntaxKind, SyntaxNode, TextSize,
};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
assist_config::SnippetCap,

View file

@ -1,5 +1,6 @@
use ra_syntax::ast::{self, AstNode, GenericParamsOwner, NameOwner};
use stdx::{format_to, SepBy};
use itertools::Itertools;
use stdx::format_to;
use syntax::ast::{self, AstNode, GenericParamsOwner, NameOwner};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -50,7 +51,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()
.filter_map(|it| it.name())
.map(|it| it.text().clone());
let generic_params = lifetime_params.chain(type_params).sep_by(", ");
let generic_params = lifetime_params.chain(type_params).format(", ");
format_to!(buf, "<{}>", generic_params)
}
match ctx.config.snippet_cap {

View file

@ -1,9 +1,10 @@
use hir::Adt;
use ra_syntax::{
use itertools::Itertools;
use stdx::format_to;
use syntax::{
ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
T,
};
use stdx::{format_to, SepBy};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -52,8 +53,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
let params = field_list
.fields()
.filter_map(|f| Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax())))
.sep_by(", ");
let fields = field_list.fields().filter_map(|f| f.name()).sep_by(", ");
.format(", ");
let fields = field_list.fields().filter_map(|f| f.name()).format(", ");
format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields);
@ -102,7 +103,7 @@ fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String {
.map(|it| it.text().clone());
let type_params =
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
format_to!(buf, "<{}>", lifetime_params.chain(type_params).sep_by(", "))
format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
}
format_to!(buf, " {{\n{}\n}}\n", code);

View file

@ -1,5 +1,5 @@
use ra_ide_db::defs::Definition;
use ra_syntax::{
use ide_db::defs::Definition;
use syntax::{
ast::{self, AstNode, AstToken},
TextRange,
};

View file

@ -1,8 +1,8 @@
use ra_syntax::{
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, GenericParamsOwner, NameOwner},
AstNode, SyntaxKind, TextRange, TextSize,
};
use rustc_hash::FxHashSet;
use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,4 +1,4 @@
use ra_syntax::{
use syntax::{
ast::{self, AstNode},
T,
};

View file

@ -1,6 +1,6 @@
use std::iter::successors;
use ra_syntax::{
use syntax::{
algo::{neighbor, skip_trivia_token, SyntaxRewriter},
ast::{self, edit::AstNodeEdit, make},
AstNode, Direction, InsertPosition, SyntaxElement, T,
@ -164,6 +164,33 @@ fn test_merge_second() {
);
}
#[test]
fn merge_self1() {
check_assist(
merge_imports,
r"
use std::fmt<|>;
use std::fmt::Display;
",
r"
use std::fmt::{self, Display};
",
);
}
#[test]
fn merge_self2() {
check_assist(
merge_imports,
r"
use std::{fmt, <|>fmt::Display};
",
r"
use std::{fmt::{Display, self}};
",
);
}
#[test]
fn test_merge_nested() {
check_assist(

View file

@ -1,6 +1,6 @@
use std::iter::successors;
use ra_syntax::{
use syntax::{
algo::neighbor,
ast::{self, AstNode},
Direction,

View file

@ -1,4 +1,4 @@
use ra_syntax::{
use syntax::{
ast::{self, edit::AstNodeEdit, make, AstNode, NameOwner, TypeBoundsOwner},
match_ast,
SyntaxKind::*,

View file

@ -1,5 +1,5 @@
use ra_syntax::{
ast::{AstNode, IfExpr, MatchArm},
use syntax::{
ast::{edit::AstNodeEdit, make, AstNode, IfExpr, MatchArm},
SyntaxKind::WHITESPACE,
};
@ -25,7 +25,9 @@
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } => if distance > 10 { foo() },
// Action::Move { distance } => if distance > 10 {
// foo()
// },
// _ => (),
// }
// }
@ -35,9 +37,13 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) ->
let guard = match_arm.guard()?;
let space_before_guard = guard.syntax().prev_sibling_or_token();
let guard_conditions = guard.expr()?;
let guard_condition = guard.expr()?;
let arm_expr = match_arm.expr()?;
let buf = format!("if {} {{ {} }}", guard_conditions.syntax().text(), arm_expr.syntax().text());
let if_expr = make::expr_if(
make::condition(guard_condition, None),
make::block_expr(None, Some(arm_expr.clone())),
)
.indent(arm_expr.indent_level());
let target = guard.syntax().text_range();
acc.add(
@ -53,7 +59,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) ->
};
edit.delete(guard.syntax().text_range());
edit.replace_node_and_indent(arm_expr.syntax(), buf);
edit.replace_ast(arm_expr, if_expr);
},
)
}
@ -134,16 +140,14 @@ fn move_guard_to_arm_body_target() {
check_assist_target(
move_guard_to_arm_body,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' <|>if chars.clone().next() == Some('\n') => false,
_ => true
}
}
"#,
r#"if chars.clone().next() == Some('\n')"#,
fn main() {
match 92 {
x <|>if x > 10 => false,
_ => true
}
}
"#,
r#"if x > 10"#,
);
}
@ -152,25 +156,23 @@ fn move_guard_to_arm_body_works() {
check_assist(
move_guard_to_arm_body,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' <|>if chars.clone().next() == Some('\n') => false,
_ => true
}
}
"#,
fn main() {
match 92 {
x <|>if x > 10 => false,
_ => true
}
}
"#,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' => if chars.clone().next() == Some('\n') { false },
_ => true
}
}
"#,
fn main() {
match 92 {
x => if x > 10 {
false
},
_ => true
}
}
"#,
);
}
@ -179,21 +181,23 @@ fn move_guard_to_arm_body_works_complex_match() {
check_assist(
move_guard_to_arm_body,
r#"
fn f() {
match x {
<|>y @ 4 | y @ 5 if y > 5 => true,
_ => false
}
}
"#,
fn main() {
match 92 {
<|>x @ 4 | x @ 5 if x > 5 => true,
_ => false
}
}
"#,
r#"
fn f() {
match x {
y @ 4 | y @ 5 => if y > 5 { true },
_ => false
}
}
"#,
fn main() {
match 92 {
x @ 4 | x @ 5 => if x > 5 {
true
},
_ => false
}
}
"#,
);
}
@ -202,25 +206,21 @@ fn move_arm_cond_to_match_guard_works() {
check_assist(
move_arm_cond_to_match_guard,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' => if chars.clone().next() == Some('\n') { <|>false },
_ => true
}
}
"#,
fn main() {
match 92 {
x => if x > 10 { <|>false },
_ => true
}
}
"#,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' if chars.clone().next() == Some('\n') => false,
_ => true
}
}
"#,
fn main() {
match 92 {
x if x > 10 => false,
_ => true
}
}
"#,
);
}
@ -229,15 +229,13 @@ fn move_arm_cond_to_match_guard_if_let_not_works() {
check_assist_not_applicable(
move_arm_cond_to_match_guard,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' => if let Some(_) = chars.clone().next() { <|>false },
_ => true
}
}
"#,
fn main() {
match 92 {
x => if let 62 = x { <|>false },
_ => true
}
}
"#,
);
}
@ -246,25 +244,21 @@ fn move_arm_cond_to_match_guard_if_empty_body_works() {
check_assist(
move_arm_cond_to_match_guard,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' => if chars.clone().next().is_some() { <|> },
_ => true
}
}
"#,
fn main() {
match 92 {
x => if x > 10 { <|> },
_ => true
}
}
"#,
r#"
fn f() {
let t = 'a';
let chars = "abcd";
match t {
'\r' if chars.clone().next().is_some() => { },
_ => true
}
}
"#,
fn main() {
match 92 {
x if x > 10 => { },
_ => true
}
}
"#,
);
}
@ -273,31 +267,27 @@ fn move_arm_cond_to_match_guard_if_multiline_body_works() {
check_assist(
move_arm_cond_to_match_guard,
r#"
fn f() {
let mut t = 'a';
let chars = "abcd";
match t {
'\r' => if chars.clone().next().is_some() {
t = 'e';<|>
false
},
_ => true
}
}
"#,
fn main() {
match 92 {
x => if x > 10 {
92;<|>
false
},
_ => true
}
}
"#,
r#"
fn f() {
let mut t = 'a';
let chars = "abcd";
match t {
'\r' if chars.clone().next().is_some() => {
t = 'e';
false
},
_ => true
}
}
"#,
fn main() {
match 92 {
x if x > 10 => {
92;
false
},
_ => true
}
}
"#,
);
}
}

View file

@ -1,6 +1,6 @@
use std::borrow::Cow;
use ra_syntax::{
use syntax::{
ast::{self, HasQuotes, HasStringValue},
AstToken,
SyntaxKind::{RAW_STRING, STRING},

View file

@ -1,4 +1,4 @@
use ra_syntax::{
use syntax::{
ast::{self, AstNode},
TextRange, TextSize, T,
};

View file

@ -1,4 +1,4 @@
use ra_syntax::{SyntaxKind, TextRange, T};
use syntax::{SyntaxKind, TextRange, T};
use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -2,8 +2,8 @@
use rustc_hash::FxHashMap;
use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct};
use ra_ide_db::RootDatabase;
use ra_syntax::{algo, ast, match_ast, AstNode, SyntaxKind, SyntaxKind::*, SyntaxNode};
use ide_db::RootDatabase;
use syntax::{algo, ast, match_ast, AstNode, SyntaxKind, SyntaxKind::*, SyntaxNode};
use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,5 +1,4 @@
use ra_fmt::unwrap_trivial_block;
use ra_syntax::{
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
@ -8,7 +7,10 @@
AstNode,
};
use crate::{utils::TryEnum, AssistContext, AssistId, AssistKind, Assists};
use crate::{
utils::{unwrap_trivial_block, TryEnum},
AssistContext, AssistId, AssistKind, Assists,
};
// Assist: replace_if_let_with_match
//

View file

@ -1,6 +1,6 @@
use std::iter::once;
use ra_syntax::{
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},

View file

@ -1,5 +1,5 @@
use hir;
use ra_syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode};
use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode};
use crate::{
utils::{find_insert_use_container, insert_use_statement},

View file

@ -1,6 +1,6 @@
use std::iter;
use ra_syntax::{
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},

View file

@ -1,6 +1,6 @@
use std::iter::successors;
use ra_syntax::{ast, AstNode, T};
use syntax::{ast, AstNode, T};
use crate::{AssistContext, AssistId, AssistKind, Assists};

View file

@ -1,5 +1,4 @@
use ra_fmt::unwrap_trivial_block;
use ra_syntax::{
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
@ -7,7 +6,7 @@
AstNode, TextRange, T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
use crate::{utils::unwrap_trivial_block, AssistContext, AssistId, AssistKind, Assists};
// Assist: unwrap_block
//

View file

@ -1,4 +1,4 @@
//! `ra_assists` crate provides a bunch of code assists, also known as code
//! `assists` crate provides a bunch of code assists, also known as code
//! actions (in LSP) or intentions (in IntelliJ).
//!
//! An assist is a micro-refactoring, which is automatically activated in
@ -17,10 +17,10 @@ macro_rules! eprintln {
pub mod utils;
pub mod ast_transform;
use base_db::FileRange;
use hir::Semantics;
use ra_db::FileRange;
use ra_ide_db::{source_change::SourceChange, RootDatabase};
use ra_syntax::TextRange;
use ide_db::{source_change::SourceChange, RootDatabase};
use syntax::TextRange;
pub(crate) use crate::assist_context::{AssistContext, Assists};
@ -66,13 +66,13 @@ pub fn contains(self, other: AssistKind) -> bool {
#[derive(Debug, Clone)]
pub struct Assist {
pub id: AssistId,
id: AssistId,
/// Short description of the assist, as shown in the UI.
pub label: String,
pub group: Option<GroupLabel>,
label: String,
group: Option<GroupLabel>,
/// Target ranges are used to sort assists: the smaller the target range,
/// the more specific assist is, and so it should be sorted first.
pub target: TextRange,
target: TextRange,
}
#[derive(Debug, Clone)]
@ -120,10 +120,25 @@ pub(crate) fn new(
group: Option<GroupLabel>,
target: TextRange,
) -> Assist {
// FIXME: make fields private, so that this invariant can't be broken
assert!(label.starts_with(|c: char| c.is_uppercase()));
Assist { id, label, group, target }
}
pub fn id(&self) -> AssistId {
self.id
}
pub fn label(&self) -> String {
self.label.clone()
}
pub fn group(&self) -> Option<GroupLabel> {
self.group.clone()
}
pub fn target(&self) -> TextRange {
self.target
}
}
mod handlers {

View file

@ -1,9 +1,9 @@
mod generated;
use base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
use hir::Semantics;
use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
use ra_ide_db::RootDatabase;
use ra_syntax::TextRange;
use ide_db::RootDatabase;
use syntax::TextRange;
use test_utils::{assert_eq_text, extract_offset, extract_range};
use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, Assists};
@ -20,7 +20,7 @@ pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_
// FIXME: instead of having a separate function here, maybe use
// `extract_ranges` and mark the target as `<target> </target>` in the
// fixuture?
// fixture?
pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) {
check(assist, ra_fixture, ExpectedResult::Target(target));
}

View file

@ -690,7 +690,9 @@ enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } => if distance > 10 { foo() },
Action::Move { distance } => if distance > 10 {
foo()
},
_ => (),
}
}

View file

@ -4,19 +4,57 @@
use std::{iter, ops};
use hir::{Adt, Crate, Enum, ScopeDef, Semantics, Trait, Type};
use ra_ide_db::RootDatabase;
use ra_syntax::{
use ide_db::RootDatabase;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, make, NameOwner},
AstNode,
SyntaxKind::*,
SyntaxNode, TextSize, T,
};
use rustc_hash::FxHashSet;
use crate::assist_config::SnippetCap;
pub(crate) use insert_use::{find_insert_use_container, insert_use_statement};
pub(crate) fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr {
extract_trivial_expression(&block)
.filter(|expr| !expr.syntax().text().contains_char('\n'))
.unwrap_or_else(|| block.into())
}
pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> {
let has_anything_else = |thing: &SyntaxNode| -> bool {
let mut non_trivial_children =
block.syntax().children_with_tokens().filter(|it| match it.kind() {
WHITESPACE | T!['{'] | T!['}'] => false,
_ => it.as_node() != Some(thing),
});
non_trivial_children.next().is_some()
};
if let Some(expr) = block.expr() {
if has_anything_else(expr.syntax()) {
return None;
}
return Some(expr);
}
// Unwrap `{ continue; }`
let (stmt,) = block.statements().next_tuple()?;
if let ast::Stmt::ExprStmt(expr_stmt) = stmt {
if has_anything_else(expr_stmt.syntax()) {
return None;
}
let expr = expr_stmt.expr()?;
match expr.syntax().kind() {
CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr),
_ => (),
}
}
None
}
#[derive(Clone, Copy, Debug)]
pub(crate) enum Cursor<'a> {
Replace(&'a SyntaxNode),
@ -257,7 +295,7 @@ fn find_def(&self, path: &str) -> Option<ScopeDef> {
.find(|dep| &dep.name.to_string() == std_crate)?
.krate;
let mut module = std_crate.root_module(db)?;
let mut module = std_crate.root_module(db);
for segment in path {
module = module.children(db).find_map(|child| {
let name = child.name(db)?;

View file

@ -2,17 +2,19 @@
// FIXME: rewrite according to the plan, outlined in
// https://github.com/rust-analyzer/rust-analyzer/issues/3301#issuecomment-592931553
use std::iter::successors;
use either::Either;
use hir::{self, ModPath};
use ra_syntax::{
use syntax::{
ast::{self, NameOwner, VisibilityOwner},
AstNode, Direction, SmolStr,
AstNode, AstToken, Direction, SmolStr,
SyntaxKind::{PATH, PATH_SEGMENT},
SyntaxNode, T,
SyntaxNode, SyntaxToken, T,
};
use ra_text_edit::TextEditBuilder;
use text_edit::TextEditBuilder;
use crate::assist_context::AssistContext;
use either::Either;
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
pub(crate) fn find_insert_use_container(
@ -442,7 +444,7 @@ fn make_assist_add_new_use(
edit: &mut TextEditBuilder,
) {
if let Some(anchor) = anchor {
let indent = ra_fmt::leading_indent(anchor);
let indent = leading_indent(anchor);
let mut buf = String::new();
if after {
buf.push_str("\n");
@ -524,3 +526,22 @@ fn make_assist_add_nested_import(
edit.insert(end, "}".to_string());
}
}
/// If the node is on the beginning of the line, calculate indent.
fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
for token in prev_tokens(node.first_token()?) {
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
let ws_text = ws.text();
if let Some(pos) = ws_text.rfind('\n') {
return Some(ws_text[pos + 1..].into());
}
}
if token.text().contains('\n') {
break;
}
}
return None;
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
successors(token.prev_token(), |token| token.prev_token())
}
}

View file

@ -1,9 +1,9 @@
[package]
edition = "2018"
name = "ra_db"
version = "0.1.0"
authors = ["rust-analyzer developers"]
name = "base_db"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
@ -12,10 +12,10 @@ doctest = false
salsa = "0.15.2"
rustc-hash = "1.1.0"
ra_syntax = { path = "../ra_syntax" }
ra_cfg = { path = "../ra_cfg" }
ra_prof = { path = "../ra_prof" }
ra_tt = { path = "../ra_tt" }
syntax = { path = "../syntax" }
cfg = { path = "../cfg" }
profile = { path = "../profile" }
tt = { path = "../tt" }
test_utils = { path = "../test_utils" }
vfs = { path = "../vfs" }
stdx = { path = "../stdx" }

View file

@ -59,7 +59,7 @@
//! ```
use std::{str::FromStr, sync::Arc};
use ra_cfg::CfgOptions;
use cfg::CfgOptions;
use rustc_hash::FxHashMap;
use test_utils::{extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER};
use vfs::{file_set::FileSet, VfsPath};

View file

@ -8,10 +8,10 @@
use std::{fmt, iter::FromIterator, ops, str::FromStr, sync::Arc};
use ra_cfg::CfgOptions;
use ra_syntax::SmolStr;
use ra_tt::TokenExpander;
use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use tt::TokenExpander;
use vfs::file_set::FileSet;
pub use vfs::FileId;
@ -156,7 +156,7 @@ pub fn add_crate_root(
display_name: Option<String>,
cfg_options: CfgOptions,
env: Env,
proc_macro: Vec<(SmolStr, Arc<dyn ra_tt::TokenExpander>)>,
proc_macro: Vec<(SmolStr, Arc<dyn tt::TokenExpander>)>,
) -> CrateId {
let proc_macro =
proc_macro.into_iter().map(|(name, it)| ProcMacro { name, expander: it }).collect();

View file

@ -1,13 +1,12 @@
//! ra_db defines basic database traits. The concrete DB is defined by ra_ide.
//! base_db defines basic database traits. The concrete DB is defined by ide.
mod cancellation;
mod input;
pub mod fixture;
use std::{panic, sync::Arc};
use ra_prof::profile;
use ra_syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
pub use crate::{
cancellation::Canceled,
@ -113,7 +112,7 @@ pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug {
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile("parse_query").detail(|| format!("{:?}", file_id));
let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
let text = db.file_text(file_id);
SourceFile::parse(&*text)
}

View file

@ -1,9 +1,9 @@
[package]
edition = "2018"
name = "ra_cfg"
version = "0.1.0"
authors = ["rust-analyzer developers"]
name = "cfg"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
@ -11,8 +11,8 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
ra_syntax = { path = "../ra_syntax" }
tt = { path = "../ra_tt", package = "ra_tt" }
tt = { path = "../tt" }
[dev-dependencies]
mbe = { path = "../ra_mbe", package = "ra_mbe" }
mbe = { path = "../mbe" }
syntax = { path = "../syntax" }

View file

@ -4,7 +4,7 @@
use std::slice::Iter as SliceIter;
use ra_syntax::SmolStr;
use tt::SmolStr;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum CfgExpr {
@ -86,17 +86,15 @@ fn next_cfg_expr(it: &mut SliceIter<tt::TokenTree>) -> Option<CfgExpr> {
mod tests {
use super::*;
use mbe::{ast_to_token_tree, TokenMap};
use ra_syntax::ast::{self, AstNode};
fn get_token_tree_generated(input: &str) -> (tt::Subtree, TokenMap) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt).unwrap()
}
use mbe::ast_to_token_tree;
use syntax::ast::{self, AstNode};
fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = get_token_tree_generated(input);
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
ast_to_token_tree(&tt).unwrap()
};
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}

View file

@ -1,9 +1,9 @@
//! ra_cfg defines conditional compiling options, `cfg` attibute parser and evaluator
//! cfg defines conditional compiling options, `cfg` attibute parser and evaluator
mod cfg_expr;
use ra_syntax::SmolStr;
use rustc_hash::FxHashSet;
use tt::SmolStr;
pub use cfg_expr::CfgExpr;

View file

@ -1,9 +1,9 @@
[package]
name = "expect"
version = "0.1.0"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
license = "MIT OR Apache-2.0"
[lib]
doctest = false
@ -11,4 +11,5 @@ doctest = false
[dependencies]
once_cell = "1"
difference = "2"
stdx = { path = "../stdx" }

View file

@ -74,7 +74,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
impl Expect {
pub fn assert_eq(&self, actual: &str) {
let trimmed = self.trimmed();
if &trimmed == actual {
if trimmed == actual {
return;
}
Runtime::fail_expect(self, &trimmed, actual);

View file

@ -1,9 +1,9 @@
[package]
edition = "2018"
name = "flycheck"
version = "0.1.0"
authors = ["rust-analyzer developers"]
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
@ -14,4 +14,5 @@ log = "0.4.8"
cargo_metadata = "0.11.1"
serde_json = "1.0.48"
jod-thread = "0.1.1"
ra_toolchain = { path = "../ra_toolchain" }
toolchain = { path = "../toolchain" }

View file

@ -1,4 +1,4 @@
//! cargo_check provides the functionality needed to run `cargo check` or
//! Flycheck provides the functionality needed to run `cargo check` or
//! another compatible command (f.x. clippy) in a background thread and provide
//! LSP diagnostics based on the output of the command.
@ -147,6 +147,12 @@ fn run(mut self, inbox: Receiver<Restart>) {
// avoid busy-waiting.
let cargo_handle = self.cargo_handle.take().unwrap();
let res = cargo_handle.join();
if res.is_err() {
log::error!(
"Flycheck failed to run the following command: {:?}",
self.check_command()
)
}
self.send(Message::Progress(Progress::DidFinish(res)));
}
Event::CheckEvent(Some(message)) => match message {
@ -187,7 +193,7 @@ fn check_command(&self) -> Command {
extra_args,
features,
} => {
let mut cmd = Command::new(ra_toolchain::cargo());
let mut cmd = Command::new(toolchain::cargo());
cmd.arg(command);
cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
.arg(self.workspace_root.join("Cargo.toml"));
@ -253,7 +259,7 @@ fn join(mut self) -> io::Result<()> {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"Cargo watcher failed,the command produced no valid metadata (exit code: {:?})",
"Cargo watcher failed, the command produced no valid metadata (exit code: {:?})",
exit_status
),
));

24
crates/hir/Cargo.toml Normal file
View file

@ -0,0 +1,24 @@
[package]
name = "hir"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
[dependencies]
log = "0.4.8"
rustc-hash = "1.1.0"
either = "1.5.3"
arrayvec = "0.5.1"
itertools = "0.9.0"
stdx = { path = "../stdx" }
syntax = { path = "../syntax" }
base_db = { path = "../base_db" }
profile = { path = "../profile" }
hir_expand = { path = "../hir_expand" }
hir_def = { path = "../hir_def" }
hir_ty = { path = "../hir_ty" }

View file

@ -2,8 +2,10 @@
use std::{iter, sync::Arc};
use arrayvec::ArrayVec;
use base_db::{CrateId, Edition, FileId};
use either::Either;
use hir_def::{
adt::ReprKind,
adt::StructKind,
adt::VariantData,
builtin_type::BuiltinType,
@ -29,14 +31,12 @@
method_resolution, ApplicationTy, CallableDefId, Canonical, FnSig, GenericPredicate,
InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor,
};
use ra_db::{CrateId, Edition, FileId};
use ra_prof::profile;
use ra_syntax::{
use rustc_hash::FxHashSet;
use stdx::impl_from;
use syntax::{
ast::{self, AttrsOwner, NameOwner},
AstNode,
};
use rustc_hash::FxHashSet;
use stdx::impl_from;
use crate::{
db::{DefDatabase, HirDatabase},
@ -83,9 +83,9 @@ pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
.collect()
}
pub fn root_module(self, db: &dyn HirDatabase) -> Option<Module> {
pub fn root_module(self, db: &dyn HirDatabase) -> Module {
let module_id = db.crate_def_map(self.id).root;
Some(Module::new(self, module_id))
Module::new(self, module_id)
}
pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
@ -303,7 +303,7 @@ pub fn visibility_of(self, db: &dyn HirDatabase, def: &ModuleDef) -> Option<Visi
}
pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
let _p = profile("Module::diagnostics");
let _p = profile::span("Module::diagnostics");
let crate_def_map = db.crate_def_map(self.id.krate);
crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink);
for decl in self.declarations(db) {
@ -431,6 +431,10 @@ pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id)
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
db.struct_data(self.id).repr.clone()
}
fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.struct_data(self.id).variant_data.clone()
}
@ -811,7 +815,7 @@ pub struct MacroDef {
impl MacroDef {
/// FIXME: right now, this just returns the root module of the crate that
/// defines this macro. The reasons for this is that macros are expanded
/// early, in `ra_hir_expand`, where modules simply do not exist yet.
/// early, in `hir_expand`, where modules simply do not exist yet.
pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
let krate = self.id.krate?;
let module_id = db.crate_def_map(krate).root;
@ -879,6 +883,13 @@ fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -
}
impl AssocItem {
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
match self {
AssocItem::Function(it) => Some(it.name(db)),
AssocItem::Const(it) => it.name(db),
AssocItem::TypeAlias(it) => Some(it.name(db)),
}
}
pub fn module(self, db: &dyn HirDatabase) -> Module {
match self {
AssocItem::Function(f) => f.module(db),
@ -1253,6 +1264,19 @@ pub fn is_fn(&self) -> bool {
)
}
pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
let adt_id = match self.ty.value {
Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_id), .. }) => adt_id,
_ => return false,
};
let adt = adt_id.into();
match adt {
Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
_ => false,
}
}
pub fn is_raw_ptr(&self) -> bool {
matches!(&self.ty.value, Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }))
}

View file

@ -1,8 +1,6 @@
//! FIXME: write short doc here
pub use hir_def::diagnostics::UnresolvedModule;
pub use hir_expand::diagnostics::{
AstDiagnostic, Diagnostic, DiagnosticSink, DiagnosticSinkBuilder,
};
pub use hir_expand::diagnostics::{Diagnostic, DiagnosticSink, DiagnosticSinkBuilder};
pub use hir_ty::diagnostics::{
MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField,
};

View file

@ -29,7 +29,7 @@ fn from(ty: $ty) -> $id {
}
from_id![
(ra_db::CrateId, crate::Crate),
(base_db::CrateId, crate::Crate),
(hir_def::ModuleId, crate::Module),
(hir_def::StructId, crate::Struct),
(hir_def::UnionId, crate::Union),

View file

@ -1,4 +1,4 @@
//! Provides set of implementation for hir's objects that allows get back location in file.
//! FIXME: write short doc here
use either::Either;
use hir_def::{
@ -6,7 +6,7 @@
src::{HasChildSource, HasSource as _},
Lookup, VariantId,
};
use ra_syntax::ast;
use syntax::ast;
use crate::{
db::HirDatabase, Const, Enum, EnumVariant, Field, FieldSource, Function, ImplDef, MacroDef,

View file

@ -9,11 +9,11 @@
//! It is written in "OO" style. Each type is self contained (as in, it knows it's
//! parents and full context). It should be "clean code".
//!
//! `ra_hir_*` crates are the implementation of the compiler logic.
//! `hir_*` crates are the implementation of the compiler logic.
//! They are written in "ECS" style, with relatively little abstractions.
//! Many types are not self-contained, and explicitly use local indexes, arenas, etc.
//!
//! `ra_hir` is what insulates the "we don't know how to actually write an incremental compiler"
//! `hir` is what insulates the "we don't know how to actually write an incremental compiler"
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
//! https://www.tedinski.com/2018/02/06/system-boundaries.html.
@ -49,11 +49,15 @@
docs::Documentation,
nameres::ModuleSource,
path::{ModPath, Path, PathKind},
type_ref::Mutability,
type_ref::{Mutability, TypeRef},
};
pub use hir_expand::{
hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc,
MacroDefId, /* FIXME */
name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, /* FIXME */ MacroDefId,
MacroFile, Origin,
};
pub use hir_ty::display::HirDisplay;
// These are negative re-exports: pub using these names is forbidden, they
// should remain private to hir internals.
#[allow(unused)]
use hir_expand::hygiene::Hygiene;

View file

@ -4,28 +4,28 @@
use std::{cell::RefCell, fmt, iter::successors};
use base_db::{FileId, FileRange};
use hir_def::{
resolver::{self, HasResolver, Resolver},
AsMacroCall, FunctionId, TraitId, VariantId,
};
use hir_expand::{diagnostics::AstDiagnostic, hygiene::Hygiene, ExpansionInfo};
use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo};
use hir_ty::associated_type_shorthand_candidates;
use itertools::Itertools;
use ra_db::{FileId, FileRange};
use ra_prof::profile;
use ra_syntax::{
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
algo::{find_node_at_offset, skip_trivia_token},
ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
db::HirDatabase,
diagnostics::Diagnostic,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer},
AssocItem, Callable, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module,
ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef,
AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef,
Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef,
VariantDef,
};
use resolver::TypeNs;
@ -109,24 +109,16 @@ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
self.imp.parse(file_id)
}
pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST {
let file_id = d.source().file_id;
let root = self.db.parse_or_expand(file_id).unwrap();
self.imp.cache(root, file_id);
d.ast(self.db.upcast())
}
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
self.imp.expand(macro_call)
}
pub fn expand_hypothetical(
pub fn speculative_expand(
&self,
actual_macro_call: &ast::MacroCall,
hypothetical_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map)
self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map)
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
@ -145,8 +137,8 @@ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
self.imp.original_range(node)
}
pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
self.imp.diagnostics_range(diagnostics)
pub fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
self.imp.diagnostics_display_range(diagnostics)
}
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
@ -228,6 +220,10 @@ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.imp.resolve_path(path)
}
pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
self.imp.resolve_extern_crate(extern_crate)
}
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
self.imp.resolve_variant(record_lit).map(VariantDef::from)
}
@ -275,6 +271,18 @@ pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.imp.assert_contains_node(node)
}
pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool {
self.imp.is_unsafe_method_call(method_call_expr)
}
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
self.imp.is_unsafe_ref_expr(ref_expr)
}
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
self.imp.is_unsafe_ident_pat(ident_pat)
}
}
impl<'db> SemanticsImpl<'db> {
@ -302,7 +310,7 @@ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
Some(node)
}
fn expand_hypothetical(
fn speculative_expand(
&self,
actual_macro_call: &ast::MacroCall,
hypothetical_args: &ast::TokenTree,
@ -324,7 +332,7 @@ fn expand_hypothetical(
}
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
let _p = profile("descend_into_macros");
let _p = profile::span("descend_into_macros");
let parent = token.parent();
let parent = self.find_file(parent);
let sa = self.analyze2(parent.as_ref(), None);
@ -372,10 +380,11 @@ fn original_range(&self, node: &SyntaxNode) -> FileRange {
original_range(self.db, node.as_ref())
}
fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
let src = diagnostics.source();
fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
let src = diagnostics.display_source();
let root = self.db.parse_or_expand(src.file_id).unwrap();
let node = src.value.to_node(&root);
self.cache(root, src.file_id);
original_range(self.db, src.with_value(&node))
}
@ -443,6 +452,17 @@ fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.analyze(path.syntax()).resolve_path(self.db, path)
}
fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
let krate = self.scope(extern_crate.syntax()).krate()?;
krate.dependencies(self.db).into_iter().find_map(|dep| {
if dep.name == extern_crate.name_ref()?.as_name() {
Some(dep.krate)
} else {
None
}
})
}
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit)
}
@ -481,18 +501,19 @@ fn to_module_def(&self, file: FileId) -> Option<Module> {
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
let node = self.find_file(node.clone());
let resolver = self.analyze2(node.as_ref(), None).resolver;
SemanticsScope { db: self.db, resolver }
SemanticsScope { db: self.db, file_id: node.file_id, resolver }
}
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
let node = self.find_file(node.clone());
let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
SemanticsScope { db: self.db, resolver }
SemanticsScope { db: self.db, file_id: node.file_id, resolver }
}
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
let file_id = self.db.lookup_intern_trait(def.id).id.file_id;
let resolver = def.id.resolver(self.db.upcast());
SemanticsScope { db: self.db, resolver }
SemanticsScope { db: self.db, file_id, resolver }
}
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
@ -501,7 +522,7 @@ fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
}
fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer {
let _p = profile("Semantics::analyze2");
let _p = profile::span("Semantics::analyze2");
let container = match self.with_ctx(|ctx| ctx.find_container(src)) {
Some(it) => it,
@ -559,6 +580,90 @@ fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
});
InFile::new(file_id, node)
}
pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool {
method_call_expr
.expr()
.and_then(|expr| {
let field_expr = if let ast::Expr::FieldExpr(field_expr) = expr {
field_expr
} else {
return None;
};
let ty = self.type_of_expr(&field_expr.expr()?)?;
if !ty.is_packed(self.db) {
return None;
}
let func = self.resolve_method_call(&method_call_expr).map(Function::from)?;
let is_unsafe = func.has_self_param(self.db)
&& matches!(func.params(self.db).first(), Some(TypeRef::Reference(..)));
Some(is_unsafe)
})
.unwrap_or(false)
}
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
ref_expr
.expr()
.and_then(|expr| {
let field_expr = match expr {
ast::Expr::FieldExpr(field_expr) => field_expr,
_ => return None,
};
let expr = field_expr.expr()?;
self.type_of_expr(&expr)
})
// Binding a reference to a packed type is possibly unsafe.
.map(|ty| ty.is_packed(self.db))
.unwrap_or(false)
// FIXME This needs layout computation to be correct. It will highlight
// more than it should with the current implementation.
}
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
if !ident_pat.ref_token().is_some() {
return false;
}
ident_pat
.syntax()
.parent()
.and_then(|parent| {
// `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
// `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
// so this tries to lookup the `IdentPat` anywhere along that structure to the
// `RecordPat` so we can get the containing type.
let record_pat = ast::RecordPatField::cast(parent.clone())
.and_then(|record_pat| record_pat.syntax().parent())
.or_else(|| Some(parent.clone()))
.and_then(|parent| {
ast::RecordPatFieldList::cast(parent)?
.syntax()
.parent()
.and_then(ast::RecordPat::cast)
});
// If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
// this is initialized from a `FieldExpr`.
if let Some(record_pat) = record_pat {
self.type_of_pat(&ast::Pat::RecordPat(record_pat))
} else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
let field_expr = match let_stmt.initializer()? {
ast::Expr::FieldExpr(field_expr) => field_expr,
_ => return None,
};
self.type_of_expr(&field_expr.expr()?)
} else {
None
}
})
// Binding a reference to a packed type is possibly unsafe.
.map(|ty| ty.is_packed(self.db))
.unwrap_or(false)
}
}
pub trait ToDef: AstNode + Clone {
@ -604,6 +709,7 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode {
#[derive(Debug)]
pub struct SemanticsScope<'a> {
pub db: &'a dyn HirDatabase,
file_id: HirFileId,
resolver: Resolver,
}
@ -612,6 +718,10 @@ pub fn module(&self) -> Option<Module> {
Some(Module { id: self.resolver.module()? })
}
pub fn krate(&self) -> Option<Crate> {
Some(Crate { id: self.resolver.krate()? })
}
/// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
// FIXME: rename to visible_traits to not repeat scope?
pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
@ -643,6 +753,14 @@ pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
})
}
/// Resolve a path as-if it was written at the given scope. This is
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let hygiene = Hygiene::new(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &hygiene)?;
self.resolve_hir_path(&path)
}
pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> {
resolve_hir_path(self.db, &self.resolver, path)
}

View file

@ -1,5 +1,6 @@
//! Maps *syntax* of various definitions to their semantic ids.
use base_db::FileId;
use hir_def::{
child_by_source::ChildBySource,
dyn_map::DynMap,
@ -9,14 +10,12 @@
ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
};
use hir_expand::{name::AsName, AstId, MacroDefKind};
use ra_db::FileId;
use ra_prof::profile;
use ra_syntax::{
use rustc_hash::FxHashMap;
use stdx::impl_from;
use syntax::{
ast::{self, NameOwner},
match_ast, AstNode, SyntaxNode,
};
use rustc_hash::FxHashMap;
use stdx::impl_from;
use crate::{db::HirDatabase, InFile, MacroDefId};
@ -29,7 +28,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&mut self, file: FileId) -> Option<ModuleId> {
let _p = profile("SourceBinder::to_module_def");
let _p = profile::span("SourceBinder::to_module_def");
let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| {
let crate_def_map = self.db.crate_def_map(crate_id);
let local_id = crate_def_map.modules_for_file(file).next()?;
@ -39,7 +38,7 @@ pub(super) fn file_to_def(&mut self, file: FileId) -> Option<ModuleId> {
}
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
let _p = profile("module_to_def");
let _p = profile::span("module_to_def");
let parent_declaration = src
.as_ref()
.map(|it| it.syntax())

View file

@ -21,7 +21,7 @@
diagnostics::{record_literal_missing_fields, record_pattern_missing_fields},
InferenceResult, Substs, Ty,
};
use ra_syntax::{
use syntax::{
ast::{self, AstNode},
SyntaxNode, TextRange, TextSize,
};
@ -31,7 +31,7 @@
MacroDef, ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias,
TypeParam,
};
use ra_db::CrateId;
use base_db::CrateId;
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
/// original source files. It should not be used inside the HIR itself.
@ -265,8 +265,7 @@ pub(crate) fn resolve_path(
}
// This must be a normal source file rather than macro file.
let hir_path =
crate::Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?;
let hir_path = Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?;
// Case where path is a qualifier of another path, e.g. foo::bar::Baz where we
// trying to resolve foo::bar.
@ -451,7 +450,7 @@ fn adjust(
pub(crate) fn resolve_hir_path(
db: &dyn HirDatabase,
resolver: &Resolver,
path: &crate::Path,
path: &Path,
) -> Option<PathResolution> {
let types =
resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty {
@ -512,7 +511,7 @@ pub(crate) fn resolve_hir_path(
pub(crate) fn resolve_hir_path_qualifier(
db: &dyn HirDatabase,
resolver: &Resolver,
path: &crate::Path,
path: &Path,
) -> Option<PathResolution> {
let items = resolver
.resolve_module_path_in_items(db.upcast(), path.mod_path())

View file

@ -1,9 +1,9 @@
[package]
edition = "2018"
name = "ra_hir_def"
version = "0.1.0"
authors = ["rust-analyzer developers"]
name = "hir_def"
version = "0.0.0"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[lib]
doctest = false
@ -21,16 +21,15 @@ indexmap = "1.4.0"
smallvec = "1.4.0"
stdx = { path = "../stdx" }
ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_prof = { path = "../ra_prof" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
arena = { path = "../arena" }
base_db = { path = "../base_db" }
syntax = { path = "../syntax" }
profile = { path = "../profile" }
hir_expand = { path = "../hir_expand" }
test_utils = { path = "../test_utils" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }
ra_cfg = { path = "../ra_cfg" }
tt = { path = "../ra_tt", package = "ra_tt" }
mbe = { path = "../mbe" }
cfg = { path = "../cfg" }
tt = { path = "../tt" }
[dev-dependencies]
expect = { path = "../expect" }

View file

@ -2,18 +2,19 @@
use std::sync::Arc;
use arena::{map::ArenaMap, Arena};
use either::Either;
use hir_expand::{
name::{AsName, Name},
InFile,
};
use ra_arena::{map::ArenaMap, Arena};
use ra_syntax::ast::{self, NameOwner, VisibilityOwner};
use syntax::ast::{self, NameOwner, VisibilityOwner};
use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
use crate::{
body::{CfgExpander, LowerCtx},
db::DefDatabase,
item_tree::{Field, Fields, ItemTree},
item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem},
src::HasChildSource,
src::HasSource,
trace::Trace,
@ -22,13 +23,14 @@
EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId,
VariantId,
};
use ra_cfg::CfgOptions;
use cfg::CfgOptions;
/// Note that we use `StructData` for unions as well!
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StructData {
pub name: Name,
pub variant_data: Arc<VariantData>,
pub repr: Option<ReprKind>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -58,26 +60,58 @@ pub struct FieldData {
pub visibility: RawVisibility,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ReprKind {
Packed,
Other,
}
fn repr_from_value(item_tree: &ItemTree, of: AttrOwner) -> Option<ReprKind> {
item_tree.attrs(of).by_key("repr").tt_values().find_map(parse_repr_tt)
}
fn parse_repr_tt(tt: &Subtree) -> Option<ReprKind> {
match tt.delimiter {
Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {}
_ => return None,
}
let mut it = tt.token_trees.iter();
match it.next()? {
TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed),
_ => Some(ReprKind::Other),
}
}
impl StructData {
pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
let loc = id.lookup(db);
let item_tree = db.item_tree(loc.id.file_id);
let repr = repr_from_value(&item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone();
let strukt = &item_tree[loc.id.value];
let variant_data = lower_fields(&item_tree, &cfg_options, &strukt.fields);
Arc::new(StructData { name: strukt.name.clone(), variant_data: Arc::new(variant_data) })
Arc::new(StructData {
name: strukt.name.clone(),
variant_data: Arc::new(variant_data),
repr,
})
}
pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
let loc = id.lookup(db);
let item_tree = db.item_tree(loc.id.file_id);
let repr = repr_from_value(&item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.module(db).krate].cfg_options.clone();
let union = &item_tree[loc.id.value];
let variant_data = lower_fields(&item_tree, &cfg_options, &union.fields);
Arc::new(StructData { name: union.name.clone(), variant_data: Arc::new(variant_data) })
Arc::new(StructData {
name: union.name.clone(),
variant_data: Arc::new(variant_data),
repr,
})
}
}

View file

@ -2,11 +2,11 @@
use std::{ops, sync::Arc};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{hygiene::Hygiene, AstId, InFile};
use mbe::ast_to_token_tree;
use ra_cfg::{CfgExpr, CfgOptions};
use ra_syntax::{
use syntax::{
ast::{self, AstNode, AttrsOwner},
SmolStr,
};

View file

@ -5,15 +5,14 @@
use std::{mem, ops::Index, sync::Arc};
use arena::{map::ArenaMap, Arena};
use base_db::CrateId;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use either::Either;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId};
use ra_arena::{map::ArenaMap, Arena};
use ra_cfg::CfgOptions;
use ra_db::CrateId;
use ra_prof::profile;
use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap;
use syntax::{ast, AstNode, AstPtr};
use test_utils::mark;
pub(crate) use lower::LowerCtx;
@ -228,7 +227,7 @@ pub(crate) fn body_with_source_map_query(
db: &dyn DefDatabase,
def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) {
let _p = profile("body_with_source_map_query");
let _p = profile::span("body_with_source_map_query");
let mut params = None;
let (file_id, module, body) = match def {
@ -321,7 +320,7 @@ pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile<AstPtr<ast::Rec
#[cfg(test)]
mod tests {
use ra_db::{fixture::WithFixture, SourceDatabase};
use base_db::{fixture::WithFixture, SourceDatabase};
use test_utils::mark;
use crate::ModuleDefId;

View file

@ -3,21 +3,21 @@
use std::{any::type_name, sync::Arc};
use arena::Arena;
use either::Either;
use hir_expand::{
hygiene::Hygiene,
name::{name, AsName, Name},
HirFileId, MacroDefId, MacroDefKind,
};
use ra_arena::Arena;
use ra_syntax::{
use rustc_hash::FxHashMap;
use syntax::{
ast::{
self, ArgListOwner, ArrayExprKind, AstChildren, LiteralKind, LoopBodyOwner, NameOwner,
SlicePatComponents,
},
AstNode, AstPtr,
};
use rustc_hash::FxHashMap;
use test_utils::mark;
use crate::{

View file

@ -1,8 +1,8 @@
//! Name resolution for expressions.
use std::sync::Arc;
use arena::{Arena, Idx};
use hir_expand::name::Name;
use ra_arena::{Arena, Idx};
use rustc_hash::FxHashMap;
use crate::{
@ -169,9 +169,9 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
#[cfg(test)]
mod tests {
use base_db::{fixture::WithFixture, FileId, SourceDatabase};
use hir_expand::{name::AsName, InFile};
use ra_db::{fixture::WithFixture, FileId, SourceDatabase};
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
use syntax::{algo::find_node_at_offset, ast, AstNode};
use test_utils::{assert_eq_text, extract_offset, mark};
use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId};

View file

@ -3,8 +3,7 @@
use std::sync::Arc;
use hir_expand::{name::Name, InFile};
use ra_prof::profile;
use ra_syntax::ast;
use syntax::ast;
use crate::{
attr::Attrs,
@ -133,7 +132,7 @@ pub struct ImplData {
impl ImplData {
pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
let _p = profile("impl_data_query");
let _p = profile::span("impl_data_query");
let impl_loc = id.lookup(db);
let item_tree = db.item_tree(impl_loc.id.file_id);

View file

@ -1,10 +1,9 @@
//! Defines database & queries for name resolution.
use std::sync::Arc;
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
use hir_expand::{db::AstDatabase, HirFileId};
use ra_db::{salsa, CrateId, SourceDatabase, Upcast};
use ra_prof::profile;
use ra_syntax::SmolStr;
use syntax::SmolStr;
use crate::{
adt::{EnumData, StructData},
@ -116,6 +115,6 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
}
fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
let _p = profile("crate_def_map:wait");
let _p = profile::span("crate_def_map:wait");
db.crate_def_map_query(krate)
}

View file

@ -3,7 +3,7 @@
use std::any::Any;
use hir_expand::diagnostics::Diagnostic;
use ra_syntax::{ast, AstPtr, SyntaxNodePtr};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use hir_expand::{HirFileId, InFile};
@ -18,7 +18,7 @@ impl Diagnostic for UnresolvedModule {
fn message(&self) -> String {
"unresolved module".to_string()
}
fn source(&self) -> InFile<SyntaxNodePtr> {
fn display_source(&self) -> InFile<SyntaxNodePtr> {
InFile::new(self.file, self.decl.clone().into())
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {

View file

@ -6,7 +6,7 @@
use std::sync::Arc;
use either::Either;
use ra_syntax::ast;
use syntax::ast;
use crate::{
db::DefDatabase,

View file

@ -12,9 +12,9 @@
//!
//! See also a neighboring `body` module.
use arena::{Idx, RawId};
use hir_expand::name::Name;
use ra_arena::{Idx, RawId};
use ra_syntax::ast::RangeOp;
use syntax::ast::RangeOp;
use crate::{
builtin_type::{BuiltinFloat, BuiltinInt},
@ -197,7 +197,7 @@ pub enum ArithOp {
BitAnd,
}
pub use ra_syntax::ast::PrefixOp as UnaryOp;
pub use syntax::ast::PrefixOp as UnaryOp;
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Array {
ElementList(Vec<ExprId>),

View file

@ -1,7 +1,6 @@
//! An algorithm to find a path to refer to a certain item.
use hir_expand::name::{known, AsName, Name};
use ra_prof::profile;
use rustc_hash::FxHashSet;
use test_utils::mark;
@ -18,7 +17,7 @@
/// Find a path that can be used to refer to a certain item. This can depend on
/// *from where* you're referring to the item, hence the `from` parameter.
pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
let _p = profile("find_path");
let _p = profile::span("find_path");
find_path_inner(db, item, from, MAX_PATH_LEN)
}
@ -215,7 +214,7 @@ fn find_local_import_locations(
item: ItemInNs,
from: ModuleId,
) -> Vec<(ModuleId, Name)> {
let _p = profile("find_local_import_locations");
let _p = profile::span("find_local_import_locations");
// `from` can import anything below `from` with visibility of at least `from`, and anything
// above `from` with any visibility. That means we do not need to descend into private siblings
@ -293,9 +292,9 @@ fn find_local_import_locations(
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
use hir_expand::hygiene::Hygiene;
use ra_db::fixture::WithFixture;
use ra_syntax::ast::AstNode;
use syntax::ast::AstNode;
use test_utils::mark;
use crate::test_db::TestDB;
@ -308,12 +307,9 @@ mod tests {
fn check_found_path(ra_fixture: &str, path: &str) {
let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_for_file(pos.file_id);
let parsed_path_file = ra_syntax::SourceFile::parse(&format!("use {};", path));
let ast_path = parsed_path_file
.syntax_node()
.descendants()
.find_map(ra_syntax::ast::Path::cast)
.unwrap();
let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(ast_path, &Hygiene::new_unhygienic()).unwrap();
let crate_def_map = db.crate_def_map(module.krate);
@ -442,12 +438,12 @@ fn partially_imported() {
// already in scope.
check_found_path(
r#"
//- /main.rs crate:main deps:ra_syntax
//- /main.rs crate:main deps:syntax
use ra_syntax::ast;
use syntax::ast;
<|>
//- /lib.rs crate:ra_syntax
//- /lib.rs crate:syntax
pub mod ast {
pub enum ModuleItem {
A, B, C,
@ -459,18 +455,18 @@ pub enum ModuleItem {
check_found_path(
r#"
//- /main.rs crate:main deps:ra_syntax
//- /main.rs crate:main deps:syntax
<|>
//- /lib.rs crate:ra_syntax
//- /lib.rs crate:syntax
pub mod ast {
pub enum ModuleItem {
A, B, C,
}
}
"#,
"ra_syntax::ast::ModuleItem",
"syntax::ast::ModuleItem",
);
}

View file

@ -4,15 +4,14 @@
//! in rustc.
use std::sync::Arc;
use arena::{map::ArenaMap, Arena};
use base_db::FileId;
use either::Either;
use hir_expand::{
name::{name, AsName, Name},
InFile,
};
use ra_arena::{map::ArenaMap, Arena};
use ra_db::FileId;
use ra_prof::profile;
use ra_syntax::ast::{self, GenericParamsOwner, NameOwner, TypeBoundsOwner};
use syntax::ast::{self, GenericParamsOwner, NameOwner, TypeBoundsOwner};
use crate::{
body::LowerCtx,
@ -73,7 +72,7 @@ pub(crate) fn generic_params_query(
db: &dyn DefDatabase,
def: GenericDefId,
) -> Arc<GenericParams> {
let _p = profile("generic_params_query");
let _p = profile::span("generic_params_query");
let generics = match def {
GenericDefId::FunctionId(id) => {

View file

@ -2,12 +2,12 @@
use std::{cmp::Ordering, fmt, hash::BuildHasherDefault, sync::Arc};
use base_db::CrateId;
use fst::{self, Streamer};
use indexmap::{map::Entry, IndexMap};
use ra_db::CrateId;
use ra_syntax::SmolStr;
use rustc_hash::{FxHashMap, FxHasher};
use smallvec::SmallVec;
use syntax::SmolStr;
use crate::{
db::DefDatabase,
@ -56,7 +56,7 @@ pub struct ImportMap {
impl ImportMap {
pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = ra_prof::profile("import_map_query");
let _p = profile::span("import_map_query");
let def_map = db.crate_def_map(krate);
let mut import_map = Self::default();
@ -254,7 +254,7 @@ pub fn search_dependencies<'a>(
krate: CrateId,
query: Query,
) -> Vec<ItemInNs> {
let _p = ra_prof::profile("search_dependencies").detail(|| format!("{:?}", query));
let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query));
let graph = db.crate_graph();
let import_maps: Vec<_> =
@ -327,8 +327,8 @@ pub fn search_dependencies<'a>(
#[cfg(test)]
mod tests {
use base_db::{fixture::WithFixture, SourceDatabase, Upcast};
use expect::{expect, Expect};
use ra_db::{fixture::WithFixture, SourceDatabase, Upcast};
use crate::{test_db::TestDB, AssocContainerId, Lookup};

View file

@ -3,9 +3,9 @@
use std::collections::hash_map::Entry;
use base_db::CrateId;
use hir_expand::name::Name;
use once_cell::sync::Lazy;
use ra_db::CrateId;
use rustc_hash::{FxHashMap, FxHashSet};
use test_utils::mark;

View file

@ -13,6 +13,7 @@
sync::Arc,
};
use arena::{Arena, Idx, RawId};
use ast::{AstNode, AttrsOwner, NameOwner, StructKind};
use either::Either;
use hir_expand::{
@ -21,10 +22,9 @@
name::{name, AsName, Name},
HirFileId, InFile,
};
use ra_arena::{Arena, Idx, RawId};
use ra_syntax::{ast, match_ast};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use syntax::{ast, match_ast};
use test_utils::mark;
use crate::{
@ -77,7 +77,7 @@ pub struct ItemTree {
impl ItemTree {
pub fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = ra_prof::profile("item_tree_query").detail(|| format!("{:?}", file_id));
let _p = profile::span("item_tree_query").detail(|| format!("{:?}", file_id));
let syntax = if let Some(node) = db.parse_or_expand(file_id) {
node
} else {

View file

@ -2,13 +2,13 @@
use std::{collections::hash_map::Entry, mem, sync::Arc};
use arena::map::ArenaMap;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
use ra_arena::map::ArenaMap;
use ra_syntax::{
use smallvec::SmallVec;
use syntax::{
ast::{self, ModuleItemOwner},
SyntaxNode,
};
use smallvec::SmallVec;
use crate::{
attr::Attrs,

View file

@ -1,10 +1,10 @@
use base_db::fixture::WithFixture;
use expect::{expect, Expect};
use hir_expand::{db::AstDatabase, HirFileId, InFile};
use ra_db::fixture::WithFixture;
use ra_syntax::{ast, AstNode};
use rustc_hash::FxHashSet;
use std::sync::Arc;
use stdx::format_to;
use syntax::{ast, AstNode};
use crate::{db::DefDatabase, test_db::TestDB};
@ -228,31 +228,31 @@ union Un {
top-level items:
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }]
Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Use>(0) }
Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::<syntax::ast::generated::nodes::Use>(0) }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }]
Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Use>(0) }
Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::<syntax::ast::generated::nodes::Use>(0) }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("ext_crate"))] }, input: None }]) }]
ExternCrate { path: ModPath { kind: Plain, segments: [Name(Text("krate"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::ExternCrate>(1) }
ExternCrate { path: ModPath { kind: Plain, segments: [Name(Text("krate"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::<syntax::ast::generated::nodes::ExternCrate>(1) }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_trait"))] }, input: None }]) }]
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::<TypeAlias>(0)), Const(Idx::<Const>(0)), Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Trait>(2) }
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::<TypeAlias>(0)), Const(Idx::<Const>(0)), Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Trait>(2) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_ty"))] }, input: None }]) }]
> TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::TypeAlias>(8) }
> TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, ast_id: FileAstId::<syntax::ast::generated::nodes::TypeAlias>(8) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_const"))] }, input: None }]) }]
> Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Const>(9) }
> Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::<syntax::ast::generated::nodes::Const>(9) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_method"))] }, input: None }]) }]
> Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(10) }
> Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(10) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_dfl_method"))] }, input: None }]) }]
> Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(11) }
> Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(11) }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }]
Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Struct>(3), kind: Unit }
Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<syntax::ast::generated::nodes::Struct>(3), kind: Unit }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }]
Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::<ra_hir_def::item_tree::Field>(0..1)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Struct>(4), kind: Tuple }
Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::<hir_def::item_tree::Field>(0..1)), ast_id: FileAstId::<syntax::ast::generated::nodes::Struct>(4), kind: Tuple }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }]
Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(1..2)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Struct>(5), kind: Record }
Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::<hir_def::item_tree::Field>(1..2)), ast_id: FileAstId::<syntax::ast::generated::nodes::Struct>(5), kind: Record }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }]
Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::<ra_hir_def::item_tree::Variant>(0..1), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Enum>(6) }
Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::<hir_def::item_tree::Variant>(0..1), ast_id: FileAstId::<syntax::ast::generated::nodes::Enum>(6) }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }]
Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::<ra_hir_def::item_tree::Field>(3..4)), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Union>(7) }
Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::<hir_def::item_tree::Field>(3..4)), ast_id: FileAstId::<syntax::ast::generated::nodes::Union>(7) }
"##]],
);
}
@ -274,13 +274,13 @@ fn end<W: Write>() {
inner attrs: Attrs { entries: None }
top-level items:
Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Impl>(0) }
> Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Impl>(0) }
> Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) }
inner items:
for AST FileAstId::<ra_syntax::ast::generated::nodes::Item>(2):
Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
for AST FileAstId::<syntax::ast::generated::nodes::Item>(2):
Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) }
"#]],
);
@ -303,9 +303,9 @@ fn b() {}
top-level items:
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }]
Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) }
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }]
Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) }
"##]],
);
}
@ -327,11 +327,11 @@ fn b() {}
top-level items:
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("trait_attr"))] }, input: None }]) }]
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Trait>(0) }
Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Trait>(0) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }]
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }]
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) }
"##]],
);
}
@ -353,11 +353,11 @@ fn b() {}
top-level items:
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("impl_attr"))] }, input: None }]) }]
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Impl>(0) }
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Impl>(0) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }]
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
> Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) }
> #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }]
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(2) }
> Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) }
"##]],
);
}
@ -408,13 +408,13 @@ fn inner() {}
inner attrs: Attrs { entries: None }
top-level items:
Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(0) }
Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(0) }
inner items:
for AST FileAstId::<ra_syntax::ast::generated::nodes::Item>(1):
for AST FileAstId::<syntax::ast::generated::nodes::Item>(1):
#[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_inner"))] }, input: None }]) }]
Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Fn>(1) }
Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) }
"##]],
);
@ -432,8 +432,8 @@ impl S {
inner attrs: Attrs { entries: None }
top-level items:
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::<MacroCall>(0))], ast_id: FileAstId::<ra_syntax::ast::generated::nodes::Impl>(0) }
> MacroCall { name: None, path: ModPath { kind: Plain, segments: [Name(Text("items"))] }, is_export: false, is_local_inner: false, is_builtin: false, ast_id: FileAstId::<ra_syntax::ast::generated::nodes::MacroCall>(1) }
Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::<MacroCall>(0))], ast_id: FileAstId::<syntax::ast::generated::nodes::Impl>(0) }
> MacroCall { name: None, path: ModPath { kind: Plain, segments: [Name(Text("items"))] }, is_export: false, is_local_inner: false, is_builtin: false, ast_id: FileAstId::<syntax::ast::generated::nodes::MacroCall>(1) }
"#]],
);
}

View file

@ -3,8 +3,8 @@
use std::marker::PhantomData;
use hir_expand::{InFile, MacroDefId};
use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap;
use syntax::{ast, AstNode, AstPtr};
use crate::{
dyn_map::{DynMap, Policy},

Some files were not shown because too many files have changed in this diff Show more