mirror of https://github.com/rust-lang/rust.git
Auto merge of #101220 - JohnTitor:rollup-ov7upr7, r=JohnTitor
Rollup of 10 pull requests Successful merges: - #100804 (Fix search results color on hover for ayu theme) - #100892 (Add `AsFd` implementations for stdio types on WASI.) - #100927 (Adding new Fuchsia rustup docs... reworking walkthrough) - #101088 (Set DebuginfoKind::Pdb in msvc_base) - #101159 (add tracking issue number to const_slice_split_at_not_mut) - #101192 (Remove path string) - #101193 (Avoid zeroing large stack buffers in stdio on Windows) - #101197 (⬆️ rust-analyzer) - #101200 (Add test for issue #85872) - #101219 (Update books) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
7f442f8ba1
|
@ -2055,22 +2055,22 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
(exp_found.expected.kind(), exp_found.found.kind())
|
||||
{
|
||||
if let ty::Adt(found_def, found_substs) = *found_ty.kind() {
|
||||
let path_str = format!("{:?}", exp_def);
|
||||
if exp_def == &found_def {
|
||||
let opt_msg = "you can convert from `&Option<T>` to `Option<&T>` using \
|
||||
`.as_ref()`";
|
||||
let result_msg = "you can convert from `&Result<T, E>` to \
|
||||
`Result<&T, &E>` using `.as_ref()`";
|
||||
let have_as_ref = &[
|
||||
("std::option::Option", opt_msg),
|
||||
("core::option::Option", opt_msg),
|
||||
("std::result::Result", result_msg),
|
||||
("core::result::Result", result_msg),
|
||||
(
|
||||
sym::Option,
|
||||
"you can convert from `&Option<T>` to `Option<&T>` using \
|
||||
`.as_ref()`",
|
||||
),
|
||||
(
|
||||
sym::Result,
|
||||
"you can convert from `&Result<T, E>` to \
|
||||
`Result<&T, &E>` using `.as_ref()`",
|
||||
),
|
||||
];
|
||||
if let Some(msg) = have_as_ref
|
||||
.iter()
|
||||
.find_map(|(path, msg)| (&path_str == path).then_some(msg))
|
||||
{
|
||||
if let Some(msg) = have_as_ref.iter().find_map(|(name, msg)| {
|
||||
self.tcx.is_diagnostic_item(*name, exp_def.did()).then_some(msg)
|
||||
}) {
|
||||
let mut show_suggestion = true;
|
||||
for (exp_ty, found_ty) in
|
||||
iter::zip(exp_substs.types(), found_substs.types())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::spec::{LinkerFlavor, LldFlavor, SplitDebuginfo, TargetOptions};
|
||||
use crate::spec::{DebuginfoKind, LinkerFlavor, LldFlavor, SplitDebuginfo, TargetOptions};
|
||||
use std::borrow::Cow;
|
||||
|
||||
pub fn opts() -> TargetOptions {
|
||||
|
@ -20,6 +20,7 @@ pub fn opts() -> TargetOptions {
|
|||
// where `*.pdb` files show up next to the final artifact.
|
||||
split_debuginfo: SplitDebuginfo::Packed,
|
||||
supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Packed]),
|
||||
debuginfo_kind: DebuginfoKind::Pdb,
|
||||
|
||||
..Default::default()
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::spec::{cvs, DebuginfoKind, TargetOptions};
|
||||
use crate::spec::{cvs, TargetOptions};
|
||||
|
||||
pub fn opts() -> TargetOptions {
|
||||
let base = super::msvc_base::opts();
|
||||
|
@ -28,7 +28,6 @@ pub fn opts() -> TargetOptions {
|
|||
// not ever be possible for us to pass this flag.
|
||||
no_default_libraries: false,
|
||||
has_thread_local: true,
|
||||
debuginfo_kind: DebuginfoKind::Pdb,
|
||||
|
||||
..base
|
||||
}
|
||||
|
|
|
@ -1541,7 +1541,7 @@ impl<T> [T] {
|
|||
/// }
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_const_unstable(feature = "const_slice_split_at_not_mut", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "const_slice_split_at_not_mut", issue = "101158")]
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
#[must_use]
|
||||
|
|
|
@ -4,7 +4,7 @@ use super::fd::WasiFd;
|
|||
use crate::io::{self, IoSlice, IoSliceMut};
|
||||
use crate::mem::ManuallyDrop;
|
||||
use crate::os::raw;
|
||||
use crate::os::wasi::io::{AsRawFd, FromRawFd};
|
||||
use crate::os::wasi::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd};
|
||||
|
||||
pub struct Stdin;
|
||||
pub struct Stdout;
|
||||
|
@ -23,6 +23,13 @@ impl AsRawFd for Stdin {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsFd for Stdin {
|
||||
#[inline]
|
||||
fn as_fd(&self) -> BorrowedFd<'_> {
|
||||
unsafe { BorrowedFd::borrow_raw(0) }
|
||||
}
|
||||
}
|
||||
|
||||
impl io::Read for Stdin {
|
||||
fn read(&mut self, data: &mut [u8]) -> io::Result<usize> {
|
||||
self.read_vectored(&mut [IoSliceMut::new(data)])
|
||||
|
@ -51,6 +58,13 @@ impl AsRawFd for Stdout {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsFd for Stdout {
|
||||
#[inline]
|
||||
fn as_fd(&self) -> BorrowedFd<'_> {
|
||||
unsafe { BorrowedFd::borrow_raw(1) }
|
||||
}
|
||||
}
|
||||
|
||||
impl io::Write for Stdout {
|
||||
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
|
||||
self.write_vectored(&[IoSlice::new(data)])
|
||||
|
@ -82,6 +96,13 @@ impl AsRawFd for Stderr {
|
|||
}
|
||||
}
|
||||
|
||||
impl AsFd for Stderr {
|
||||
#[inline]
|
||||
fn as_fd(&self) -> BorrowedFd<'_> {
|
||||
unsafe { BorrowedFd::borrow_raw(2) }
|
||||
}
|
||||
}
|
||||
|
||||
impl io::Write for Stderr {
|
||||
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
|
||||
self.write_vectored(&[IoSlice::new(data)])
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use crate::char::decode_utf16;
|
||||
use crate::cmp;
|
||||
use crate::io;
|
||||
use crate::mem::MaybeUninit;
|
||||
use crate::os::windows::io::{FromRawHandle, IntoRawHandle};
|
||||
use crate::ptr;
|
||||
use crate::str;
|
||||
|
@ -169,13 +170,14 @@ fn write(
|
|||
}
|
||||
|
||||
fn write_valid_utf8_to_console(handle: c::HANDLE, utf8: &str) -> io::Result<usize> {
|
||||
let mut utf16 = [0u16; MAX_BUFFER_SIZE / 2];
|
||||
let mut utf16 = [MaybeUninit::<u16>::uninit(); MAX_BUFFER_SIZE / 2];
|
||||
let mut len_utf16 = 0;
|
||||
for (chr, dest) in utf8.encode_utf16().zip(utf16.iter_mut()) {
|
||||
*dest = chr;
|
||||
*dest = MaybeUninit::new(chr);
|
||||
len_utf16 += 1;
|
||||
}
|
||||
let utf16 = &utf16[..len_utf16];
|
||||
// Safety: We've initialized `len_utf16` values.
|
||||
let utf16: &[u16] = unsafe { MaybeUninit::slice_assume_init_ref(&utf16[..len_utf16]) };
|
||||
|
||||
let mut written = write_u16s(handle, &utf16)?;
|
||||
|
||||
|
@ -250,11 +252,14 @@ impl io::Read for Stdin {
|
|||
return Ok(bytes_copied);
|
||||
} else if buf.len() - bytes_copied < 4 {
|
||||
// Not enough space to get a UTF-8 byte. We will use the incomplete UTF8.
|
||||
let mut utf16_buf = [0u16; 1];
|
||||
let mut utf16_buf = [MaybeUninit::new(0); 1];
|
||||
// Read one u16 character.
|
||||
let read = read_u16s_fixup_surrogates(handle, &mut utf16_buf, 1, &mut self.surrogate)?;
|
||||
// Read bytes, using the (now-empty) self.incomplete_utf8 as extra space.
|
||||
let read_bytes = utf16_to_utf8(&utf16_buf[..read], &mut self.incomplete_utf8.bytes)?;
|
||||
let read_bytes = utf16_to_utf8(
|
||||
unsafe { MaybeUninit::slice_assume_init_ref(&utf16_buf[..read]) },
|
||||
&mut self.incomplete_utf8.bytes,
|
||||
)?;
|
||||
|
||||
// Read in the bytes from incomplete_utf8 until the buffer is full.
|
||||
self.incomplete_utf8.len = read_bytes as u8;
|
||||
|
@ -262,15 +267,18 @@ impl io::Read for Stdin {
|
|||
bytes_copied += self.incomplete_utf8.read(&mut buf[bytes_copied..]);
|
||||
Ok(bytes_copied)
|
||||
} else {
|
||||
let mut utf16_buf = [0u16; MAX_BUFFER_SIZE / 2];
|
||||
let mut utf16_buf = [MaybeUninit::<u16>::uninit(); MAX_BUFFER_SIZE / 2];
|
||||
|
||||
// In the worst case, a UTF-8 string can take 3 bytes for every `u16` of a UTF-16. So
|
||||
// we can read at most a third of `buf.len()` chars and uphold the guarantee no data gets
|
||||
// lost.
|
||||
let amount = cmp::min(buf.len() / 3, utf16_buf.len());
|
||||
let read =
|
||||
read_u16s_fixup_surrogates(handle, &mut utf16_buf, amount, &mut self.surrogate)?;
|
||||
|
||||
match utf16_to_utf8(&utf16_buf[..read], buf) {
|
||||
// Safety `read_u16s_fixup_surrogates` returns the number of items
|
||||
// initialized.
|
||||
let utf16s = unsafe { MaybeUninit::slice_assume_init_ref(&utf16_buf[..read]) };
|
||||
match utf16_to_utf8(utf16s, buf) {
|
||||
Ok(value) => return Ok(bytes_copied + value),
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
|
@ -283,14 +291,14 @@ impl io::Read for Stdin {
|
|||
// This is a best effort, and might not work if we are not the only reader on Stdin.
|
||||
fn read_u16s_fixup_surrogates(
|
||||
handle: c::HANDLE,
|
||||
buf: &mut [u16],
|
||||
buf: &mut [MaybeUninit<u16>],
|
||||
mut amount: usize,
|
||||
surrogate: &mut u16,
|
||||
) -> io::Result<usize> {
|
||||
// Insert possibly remaining unpaired surrogate from last read.
|
||||
let mut start = 0;
|
||||
if *surrogate != 0 {
|
||||
buf[0] = *surrogate;
|
||||
buf[0] = MaybeUninit::new(*surrogate);
|
||||
*surrogate = 0;
|
||||
start = 1;
|
||||
if amount == 1 {
|
||||
|
@ -303,7 +311,10 @@ fn read_u16s_fixup_surrogates(
|
|||
let mut amount = read_u16s(handle, &mut buf[start..amount])? + start;
|
||||
|
||||
if amount > 0 {
|
||||
let last_char = buf[amount - 1];
|
||||
// Safety: The returned `amount` is the number of values initialized,
|
||||
// and it is not 0, so we know that `buf[amount - 1]` have been
|
||||
// initialized.
|
||||
let last_char = unsafe { buf[amount - 1].assume_init() };
|
||||
if last_char >= 0xD800 && last_char <= 0xDBFF {
|
||||
// high surrogate
|
||||
*surrogate = last_char;
|
||||
|
@ -313,7 +324,8 @@ fn read_u16s_fixup_surrogates(
|
|||
Ok(amount)
|
||||
}
|
||||
|
||||
fn read_u16s(handle: c::HANDLE, buf: &mut [u16]) -> io::Result<usize> {
|
||||
// Returns `Ok(n)` if it initialized `n` values in `buf`.
|
||||
fn read_u16s(handle: c::HANDLE, buf: &mut [MaybeUninit<u16>]) -> io::Result<usize> {
|
||||
// Configure the `pInputControl` parameter to not only return on `\r\n` but also Ctrl-Z, the
|
||||
// traditional DOS method to indicate end of character stream / user input (SUB).
|
||||
// See #38274 and https://stackoverflow.com/questions/43836040/win-api-readconsole.
|
||||
|
@ -346,8 +358,9 @@ fn read_u16s(handle: c::HANDLE, buf: &mut [u16]) -> io::Result<usize> {
|
|||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if amount > 0 && buf[amount as usize - 1] == CTRL_Z {
|
||||
// Safety: if `amount > 0`, then that many bytes were written, so
|
||||
// `buf[amount as usize - 1]` has been initialized.
|
||||
if amount > 0 && unsafe { buf[amount as usize - 1].assume_init() } == CTRL_Z {
|
||||
amount -= 1;
|
||||
}
|
||||
Ok(amount as usize)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 42ca0ef484fcc8437a0682cee23abe4b7c407d52
|
||||
Subproject commit 0a5421ceb238357b3634fb75234eba4d1dad643c
|
|
@ -1 +1 @@
|
|||
Subproject commit 8e6aa3448515a0654e347b5e2510f1d4bc4d5a64
|
||||
Subproject commit d880e6ac2acf133dce640da24b9fb692844f02d4
|
|
@ -1 +1 @@
|
|||
Subproject commit e647eb102890e8927f488bea12672b079eff8d9d
|
||||
Subproject commit f62e93c28323ed9637d0a205a0c256498674a509
|
|
@ -1 +1 @@
|
|||
Subproject commit d3daa1f28e169087becbc5e2b49ac91ca0405a44
|
||||
Subproject commit 04892c1a6fc145602ac7367945fda9d4ee83c9fb
|
|
@ -5,14 +5,10 @@
|
|||
[Fuchsia] is a modern open source operating system that's simple, secure,
|
||||
updatable, and performant.
|
||||
|
||||
[Fuchsia]: https://fuchsia.dev/
|
||||
|
||||
## Target maintainers
|
||||
|
||||
The [Fuchsia team]:
|
||||
|
||||
[Fuchsia team]: https://team-api.infra.rust-lang.org/v1/teams/fuchsia.json
|
||||
|
||||
- Tyler Mandry ([@tmandry](https://github.com/tmandry))
|
||||
- Dan Johnson ([@computerdruid](https://github.com/computerdruid))
|
||||
- David Koloski ([@djkoloski](https://github.com/djkoloski))
|
||||
|
@ -24,27 +20,162 @@ the members reported by the API. The API should be considered to be
|
|||
authoritative if this occurs. Instead of pinging individual members, use
|
||||
`@rustbot ping fuchsia` to contact the team on GitHub.
|
||||
|
||||
## Table of contents
|
||||
|
||||
1. [Requirements](#requirements)
|
||||
1. [Walkthrough structure](#walkthrough-structure)
|
||||
1. [Compiling a Rust binary targeting Fuchsia](#compiling-a-rust-binary-targeting-fuchsia)
|
||||
1. [Targeting Fuchsia with rustup and cargo](#targeting-fuchsia-with-rustup-and-cargo)
|
||||
1. [Targeting Fuchsia with a compiler built from source](#targeting-fuchsia-with-a-compiler-built-from-source)
|
||||
1. [Creating a Fuchsia package](#creating-a-fuchsia-package)
|
||||
1. [Creating a Fuchsia component](#creating-a-fuchsia-component)
|
||||
1. [Building a Fuchsia package](#building-a-fuchsia-package)
|
||||
1. [Publishing a Fuchsia package](#publishing-a-fuchsia-package)
|
||||
1. [Creating a Fuchsia package repository](#creating-a-fuchsia-package-repository)
|
||||
1. [Publishing Fuchsia package to repository](#publishing-fuchsia-package-to-repository)
|
||||
1. [Running a Fuchsia component on an emulator](#running-a-fuchsia-component-on-an-emulator)
|
||||
1. [Starting the Fuchsia emulator](#starting-the-fuchsia-emulator)
|
||||
1. [Watching emulator logs](#watching-emulator-logs)
|
||||
1. [Serving a Fuchsia package](#serving-a-fuchsia-package)
|
||||
1. [Running a Fuchsia component](#running-a-fuchsia-component)
|
||||
1. [`.gitignore` extensions](#gitignore-extensions)
|
||||
1. [Testing](#testing)
|
||||
1. [Running unit tests](#running-unit-tests)
|
||||
1. [Running the compiler test suite](#running-the-compiler-test-suite)
|
||||
|
||||
## Requirements
|
||||
|
||||
This target is cross-compiled from a host environment. Development may be done
|
||||
from the [source tree] or using the Fuchsia SDK.
|
||||
This target is cross-compiled from a host environment. You will need a recent
|
||||
copy of the [Fuchsia SDK], which provides the tools, libraries, and binaries
|
||||
required to build and link programs for Fuchsia.
|
||||
|
||||
[source tree]: https://fuchsia.dev/fuchsia-src/get-started/learn/build
|
||||
Development may also be done from the [source tree].
|
||||
|
||||
Fuchsia targets support std and follow the `sysv64` calling convention on
|
||||
Fuchsia targets support `std` and follow the `sysv64` calling convention on
|
||||
x86_64. Fuchsia binaries use the ELF file format.
|
||||
|
||||
## Building the target
|
||||
## Walkthrough structure
|
||||
|
||||
This walkthrough will cover:
|
||||
|
||||
1. Compiling a Rust binary targeting Fuchsia.
|
||||
1. Building a Fuchsia package.
|
||||
1. Publishing and running a Fuchsia package to a Fuchsia emulator.
|
||||
|
||||
For the purposes of this walkthrough, we will only target `x86_64-fuchsia`.
|
||||
|
||||
## Compiling a Rust binary targeting Fuchsia
|
||||
|
||||
Today, there are two main ways to build a Rust binary targeting Fuchsia
|
||||
using the Fuchsia SDK:
|
||||
1. Allow [rustup] to handle the installation of Fuchsia targets for you.
|
||||
1. Build a toolchain locally that can target Fuchsia.
|
||||
|
||||
### Targeting Fuchsia with rustup and cargo
|
||||
|
||||
The easiest way to build a Rust binary targeting Fuchsia is by allowing [rustup]
|
||||
to handle the installation of Fuchsia targets for you. This can be done by issuing
|
||||
the following commands:
|
||||
|
||||
```sh
|
||||
rustup target add x86_64-fuchsia
|
||||
rustup target add aarch_64-fuchsia
|
||||
```
|
||||
|
||||
After installing our Fuchsia targets, we can now compile a Rust binary that targets
|
||||
Fuchsia.
|
||||
|
||||
To create our Rust project, we can issue a standard `cargo` command as follows:
|
||||
|
||||
**From base working directory**
|
||||
```sh
|
||||
cargo new hello_fuchsia
|
||||
```
|
||||
|
||||
The rest of this walkthrough will take place from `hello_fuchsia`, so we can
|
||||
change into that directory now:
|
||||
|
||||
```sh
|
||||
cd hello_fuchsia
|
||||
```
|
||||
|
||||
*Note: From this point onwards, all commands will be issued from the `hello_fuchsia/`
|
||||
directory, and all `hello_fuchsia/` prefixes will be removed from references for sake of brevity.*
|
||||
|
||||
We can edit our `src/main.rs` to include a test as follows:
|
||||
|
||||
**`src/main.rs`**
|
||||
```rust
|
||||
fn main() {
|
||||
println!("Hello Fuchsia!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
assert_eq!(2 + 2, 4);
|
||||
}
|
||||
```
|
||||
|
||||
In addition to the standard workspace created, we will want to create a
|
||||
`.cargo/config.toml` file to link necessary libraries
|
||||
during compilation:
|
||||
|
||||
**`.cargo/config.toml`**
|
||||
```txt
|
||||
[target.x86_64-fuchsia]
|
||||
|
||||
rustflags = [
|
||||
"-Lnative", "<SDK_PATH>/arch/x64/sysroot/lib",
|
||||
"-Lnative", "<SDK_PATH>/arch/x64/lib"
|
||||
]
|
||||
```
|
||||
|
||||
*Note: Make sure to fill out `<SDK_PATH>` with the path to the downloaded [Fuchsia SDK].*
|
||||
|
||||
In total, our new project will look like:
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┣━ src/
|
||||
┃ ┗━ main.rs
|
||||
┣━ Cargo.toml
|
||||
┗━ .cargo/
|
||||
┗━ config.toml
|
||||
```
|
||||
|
||||
Finally, we can build our rust binary as:
|
||||
|
||||
```sh
|
||||
cargo build --target x86_64-fuchsia
|
||||
```
|
||||
|
||||
Now we have a Rust binary at `target/x86_64-fuchsia/debug/hello_fuchsia`,
|
||||
targeting our desired Fuchsia target.
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┣━ src/
|
||||
┃ ┗━ main.rs
|
||||
┣━ target/
|
||||
┃ ┗━ x86_64-fuchsia/
|
||||
┃ ┗━ debug/
|
||||
┃ ┗━ hello_fuchsia
|
||||
┣━ Cargo.toml
|
||||
┗━ .cargo/
|
||||
┗━ config.toml
|
||||
```
|
||||
|
||||
### Targeting Fuchsia with a compiler built from source
|
||||
|
||||
An alternative to the first workflow is to target Fuchsia by using
|
||||
`rustc` built from source.
|
||||
|
||||
Before building Rust for Fuchsia, you'll need a clang toolchain that supports
|
||||
Fuchsia as well. A recent version (14+) of clang should be sufficient to compile
|
||||
Rust for Fuchsia.
|
||||
|
||||
You'll also need a recent copy of the [Fuchsia SDK], which provides the tools
|
||||
and binaries required to build and link programs for Fuchsia.
|
||||
|
||||
[Fuchsia SDK]: https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/core
|
||||
|
||||
x86-64 and AArch64 Fuchsia targets can be enabled using the following
|
||||
configuration.
|
||||
|
||||
|
@ -75,15 +206,21 @@ export CARGO_TARGET_X86_64_FUCHSIA_RUSTFLAGS="-C link-arg=--sysroot=${SDK_PATH}/
|
|||
These can be run together in a shell environment by executing
|
||||
`(source config-env.sh && ./x.py install)`.
|
||||
|
||||
## Building Rust programs
|
||||
Once `rustc` is installed, we can create a new working directory to work from,
|
||||
`hello_fuchsia` along with `hello_fuchsia/src`:
|
||||
|
||||
After compiling Rust binaries, you'll need to build a component, package it, and
|
||||
serve it to a Fuchsia device or emulator. All of this can be done using the
|
||||
Fuchsia SDK.
|
||||
```sh
|
||||
mkdir hello_fuchsia
|
||||
cd hello_fuchsia
|
||||
mkdir src
|
||||
```
|
||||
|
||||
As an example, we'll compile and run this simple program on a Fuchsia emulator:
|
||||
*Note: From this point onwards, all commands will be issued from the `hello_fuchsia/`
|
||||
directory, and all `hello_fuchsia/` prefixes will be removed from references for sake of brevity.*
|
||||
|
||||
**`hello_fuchsia.rs`**
|
||||
There, we can create a new file named `src/hello_fuchsia.rs`:
|
||||
|
||||
**`src/hello_fuchsia.rs`**
|
||||
```rust
|
||||
fn main() {
|
||||
println!("Hello Fuchsia!");
|
||||
|
@ -95,62 +232,14 @@ fn it_works() {
|
|||
}
|
||||
```
|
||||
|
||||
Create a new file named `hello_fuchsia.rs` and fill out its contents with that
|
||||
code.
|
||||
|
||||
### Create a package
|
||||
|
||||
On Fuchsia, a package is the unit of distribution for software. We'll need to
|
||||
create a new package directory where we will place files like our finished
|
||||
binary and any data it may need. The working directory will have this layout:
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia.rs
|
||||
hello_fuchsia.cml
|
||||
package
|
||||
┣━ bin
|
||||
┃ ┗━ hello_fuchsia
|
||||
┣━ meta
|
||||
┃ ┣━ package
|
||||
┃ ┗━ hello_fuchsia.cm
|
||||
┗━ hello_fuchsia.manifest
|
||||
hello_fuchsia/
|
||||
┗━ src/
|
||||
┗━ hello_fuchsia.rs
|
||||
```
|
||||
|
||||
Make the `package`, `package/bin`, and `package/meta` directories and create the
|
||||
following files inside:
|
||||
|
||||
**`package/meta/package`**
|
||||
```json
|
||||
{
|
||||
"name": "hello_fuchsia",
|
||||
"version": "0"
|
||||
}
|
||||
```
|
||||
|
||||
The `package` file describes our package's name and version number. Every
|
||||
package must contain one.
|
||||
|
||||
**`package/hello_fuchsia.manifest`**
|
||||
```txt
|
||||
bin/hello_fuchsia=package/bin/hello_fuchsia
|
||||
lib/ld.so.1=<SDK_PATH>/arch/x64/sysroot/dist/lib/ld.so.1
|
||||
lib/libfdio.so=<SDK_PATH>/arch/x64/dist/libfdio.so
|
||||
meta/package=package/meta/package
|
||||
meta/hello_fuchsia.cm=package/meta/hello_fuchsia.cm
|
||||
```
|
||||
|
||||
*Note: Relative manifest paths are resolved starting from the working directory
|
||||
of `pm`. Make sure to fill out `<SDK_PATH>` with the path to the downloaded
|
||||
SDK.*
|
||||
|
||||
The `.manifest` file will be used to describe the contents of the package by
|
||||
relating their location when installed to their location on the file system. You
|
||||
can use this to make a package pull files from other places, but for this
|
||||
example we'll just be placing everything in the `package` directory.
|
||||
|
||||
### Compiling a binary
|
||||
|
||||
Using your freshly compiled `rustc`, you can compile a binary for Fuchsia using
|
||||
Using your freshly installed `rustc`, you can compile a binary for Fuchsia using
|
||||
the following options:
|
||||
|
||||
* `--target x86_64-fuchsia`/`--target aarch64-fuchsia`: Targets the Fuchsia
|
||||
|
@ -167,19 +256,108 @@ Putting it all together:
|
|||
TARGET_ARCH="<x86_64-fuchsia|aarch64-fuchsia>"
|
||||
ARCH="<x64|aarch64>"
|
||||
|
||||
rustc --target ${TARGET_ARCH} -Lnative=${SDK_PATH}/arch/${ARCH}/lib -Lnative=${SDK_PATH}/arch/${ARCH}/sysroot/lib -o package/bin/hello_fuchsia hello_fuchsia.rs
|
||||
rustc \
|
||||
--target ${TARGET_ARCH} \
|
||||
-Lnative=${SDK_PATH}/arch/${ARCH}/lib \
|
||||
-Lnative=${SDK_PATH}/arch/${ARCH}/sysroot/lib \
|
||||
--out-dir bin src/hello_fuchsia.rs
|
||||
```
|
||||
|
||||
### Bulding a component
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┣━ src/
|
||||
┃ ┗━ hello_fuchsia.rs
|
||||
┗━ bin/
|
||||
┗━ hello_fuchsia
|
||||
```
|
||||
|
||||
On Fuchsia, components require a component manifest written in Fuchia's markup
|
||||
## Creating a Fuchsia package
|
||||
|
||||
Before moving on, double check your directory structure:
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┣━ src/ (if using rustc)
|
||||
┃ ┗━ hello_fuchsia.rs ...
|
||||
┣━ bin/ ...
|
||||
┃ ┗━ hello_fuchsia ...
|
||||
┣━ src/ (if using cargo)
|
||||
┃ ┗━ main.rs ...
|
||||
┗━ target/ ...
|
||||
┗━ x86_64-fuchsia/ ...
|
||||
┗━ debug/ ...
|
||||
┗━ hello_fuchsia ...
|
||||
```
|
||||
|
||||
With our Rust binary built, we can move to creating a Fuchsia package.
|
||||
On Fuchsia, a package is the unit of distribution for software. We'll need to
|
||||
create a new package directory where we will place files like our finished
|
||||
binary and any data it may need.
|
||||
|
||||
To start, make the `pkg`, and `pkg/meta` directories:
|
||||
|
||||
```sh
|
||||
mkdir pkg
|
||||
mkdir pkg/meta
|
||||
```
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┗━ pkg/
|
||||
┗━ meta/
|
||||
```
|
||||
|
||||
Now, create the following files inside:
|
||||
|
||||
**`pkg/meta/package`**
|
||||
```json
|
||||
{
|
||||
"name": "hello_fuchsia",
|
||||
"version": "0"
|
||||
}
|
||||
```
|
||||
|
||||
The `package` file describes our package's name and version number. Every
|
||||
package must contain one.
|
||||
|
||||
**`pkg/hello_fuchsia.manifest`**
|
||||
```txt
|
||||
bin/hello_fuchsia=target/x86_64-fuchsia/debug/hello_fuchsia # If using cargo...
|
||||
bin/hello_fuchsia=bin/hello_fuchsia # If using rustc...
|
||||
lib/ld.so.1=<SDK_PATH>/arch/x64/sysroot/dist/lib/ld.so.1
|
||||
lib/libfdio.so=<SDK_PATH>/arch/x64/dist/libfdio.so
|
||||
meta/package=pkg/meta/package
|
||||
meta/hello_fuchsia.cm=pkg/meta/hello_fuchsia.cm
|
||||
```
|
||||
|
||||
*Note: Relative manifest paths are resolved starting from the working directory
|
||||
of `pm`. Make sure to fill out `<SDK_PATH>` with the path to the downloaded
|
||||
SDK.*
|
||||
|
||||
The `.manifest` file will be used to describe the contents of the package by
|
||||
relating their location when installed to their location on the file system. The
|
||||
`bin/hello_fuchsia=` entry will be different depending on how your Rust binary
|
||||
was built, so choose accordingly.
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┗━ pkg/
|
||||
┣━ meta/
|
||||
┃ ┗━ package
|
||||
┗━ hello_fuchsia.manifest
|
||||
```
|
||||
|
||||
### Creating a Fuchsia component
|
||||
|
||||
On Fuchsia, components require a component manifest written in Fuchsia's markup
|
||||
language called CML. The Fuchsia devsite contains an [overview of CML] and a
|
||||
[reference for the file format]. Here's a basic one that can run our single binary:
|
||||
|
||||
[overview of CML]: https://fuchsia.dev/fuchsia-src/concepts/components/v2/component_manifests
|
||||
[reference for the file format]: https://fuchsia.dev/reference/cml
|
||||
|
||||
**`hello_fuchsia.cml`**
|
||||
**`pkg/hello_fuchsia.cml`**
|
||||
```txt
|
||||
{
|
||||
include: [ "syslog/client.shard.cml" ],
|
||||
|
@ -190,43 +368,152 @@ language called CML. The Fuchsia devsite contains an [overview of CML] and a
|
|||
}
|
||||
```
|
||||
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┗━ pkg/
|
||||
┣━ meta/
|
||||
┃ ┗━ package
|
||||
┣━ hello_fuchsia.manifest
|
||||
┗━ hello_fuchsia.cml
|
||||
```
|
||||
|
||||
Now we can compile that CML into a component manifest:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/cmc compile hello_fuchsia.cml --includepath ${SDK_PATH}/pkg -o package/meta/hello_fuchsia.cm
|
||||
${SDK_PATH}/tools/${ARCH}/cmc compile \
|
||||
pkg/hello_fuchsia.cml \
|
||||
--includepath ${SDK_PATH}/pkg \
|
||||
-o pkg/meta/hello_fuchsia.cm
|
||||
```
|
||||
|
||||
`--includepath` tells the compiler where to look for `include`s from our CML.
|
||||
In our case, we're only using `syslog/client.shard.cml`.
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┗━ pkg/
|
||||
┣━ meta/
|
||||
┃ ┣━ package
|
||||
┃ ┗━ hello_fuchsia.cm
|
||||
┣━ hello_fuchsia.manifest
|
||||
┗━ hello_fuchsia.cml
|
||||
```
|
||||
|
||||
### Building and publishing a package
|
||||
*Note: `--includepath` tells the compiler where to look for `include`s from our CML.
|
||||
In our case, we're only using `syslog/client.shard.cml`.*
|
||||
|
||||
Next, we'll build our package as defined by our manifest:
|
||||
### Building a Fuchsia package
|
||||
|
||||
Next, we'll build a package manifest as defined by our manifest:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/pm -o hello_fuchsia -m package/hello_fuchsia.manifest build -output-package-manifest hello_fuchsia_manifest
|
||||
${SDK_PATH}/tools/${ARCH}/pm \
|
||||
-o hello_fuchsia_manifest \
|
||||
-m pkg/hello_fuchsia.manifest \
|
||||
build \
|
||||
-output-package-manifest hello_fuchsia_package_manifest
|
||||
```
|
||||
|
||||
This will produce `hello_fuchsia_manifest` which is a package manifest we can
|
||||
publish directly to a repository. We can set up that repository with:
|
||||
This will produce `pkg/hello_fuchsia_manifest/` which is a package manifest we can
|
||||
publish directly to a repository.
|
||||
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┗━ pkg/
|
||||
┣━ meta/
|
||||
┃ ┣━ package
|
||||
┃ ┗━ hello_fuchsia.cm
|
||||
┣━ hello_fuchsia_manifest/
|
||||
┃ ┗━ ...
|
||||
┣━ hello_fuchsia.manifest
|
||||
┣━ hello_fuchsia.cml
|
||||
┗━ hello_fuchsia_package_manifest
|
||||
```
|
||||
|
||||
We are now ready to publish the package.
|
||||
|
||||
## Publishing a Fuchsia package
|
||||
|
||||
With our package and component manifests setup,
|
||||
we can now publish our package. The first step will
|
||||
be to create a Fuchsia package repository to publish
|
||||
to.
|
||||
|
||||
### Creating a Fuchsia package repository
|
||||
|
||||
We can set up our repository with:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/pm newrepo -repo repo
|
||||
${SDK_PATH}/tools/${ARCH}/pm newrepo \
|
||||
-repo pkg/repo
|
||||
```
|
||||
|
||||
And then publish our new package to that repository with:
|
||||
**Current directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┗━ pkg/
|
||||
┣━ meta/
|
||||
┃ ┣━ package
|
||||
┃ ┗━ hello_fuchsia.cm
|
||||
┣━ hello_fuchsia_manifest/
|
||||
┃ ┗━ ...
|
||||
┣━ repo/
|
||||
┃ ┗━ ...
|
||||
┣━ hello_fuchsia.manifest
|
||||
┣━ hello_fuchsia.cml
|
||||
┗━ hello_fuchsia_package_manifest
|
||||
```
|
||||
|
||||
## Publishing Fuchsia package to repository
|
||||
|
||||
We can publish our new package to that repository with:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/pm publish -repo repo -lp -f <(echo "hello_fuchsia_manifest")
|
||||
${SDK_PATH}/tools/${ARCH}/pm publish \
|
||||
-repo repo \
|
||||
-lp -f <(echo "hello_fuchsia_package_manifest")
|
||||
```
|
||||
|
||||
Then we can add it to `ffx`'s package server as `hello-fuchsia` using:
|
||||
Then we can add the repository to `ffx`'s package server as `hello-fuchsia` using:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx repository add-from-pm repo -r hello-fuchsia
|
||||
${SDK_PATH}/tools/${ARCH}/ffx repository add-from-pm \
|
||||
repo \
|
||||
-r hello-fuchsia
|
||||
```
|
||||
|
||||
### Starting the emulator
|
||||
## Running a Fuchsia component on an emulator
|
||||
|
||||
At this point, we are ready to run our Fuchsia
|
||||
component. For reference, our final directory
|
||||
structure will look like:
|
||||
|
||||
**Final directory structure**
|
||||
```txt
|
||||
hello_fuchsia/
|
||||
┣━ src/ (if using rustc)
|
||||
┃ ┗━ hello_fuchsia.rs ...
|
||||
┣━ bin/ ...
|
||||
┃ ┗━ hello_fuchsia ...
|
||||
┣━ src/ (if using cargo)
|
||||
┃ ┗━ main.rs ...
|
||||
┣━ target/ ...
|
||||
┃ ┗━ x86_64-fuchsia/ ...
|
||||
┃ ┗━ debug/ ...
|
||||
┃ ┗━ hello_fuchsia ...
|
||||
┗━ pkg/
|
||||
┣━ meta/
|
||||
┃ ┣━ package
|
||||
┃ ┗━ hello_fuchsia.cm
|
||||
┣━ hello_fuchsia_manifest/
|
||||
┃ ┗━ ...
|
||||
┣━ repo/
|
||||
┃ ┗━ ...
|
||||
┣━ hello_fuchsia.manifest
|
||||
┣━ hello_fuchsia.cml
|
||||
┗━ hello_fuchsia_package_manifest
|
||||
```
|
||||
|
||||
### Starting the Fuchsia emulator
|
||||
|
||||
Start a Fuchsia emulator in a new terminal using:
|
||||
|
||||
|
@ -235,50 +522,83 @@ ${SDK_PATH}/tools/${ARCH}/ffx product-bundle get workstation_eng.qemu-${ARCH}
|
|||
${SDK_PATH}/tools/${ARCH}/ffx emu start workstation_eng.qemu-${ARCH} --headless
|
||||
```
|
||||
|
||||
Once the emulator is running, start a package repository server to serve our
|
||||
### Watching emulator logs
|
||||
|
||||
Once the emulator is running, open a separate terminal to watch the emulator logs:
|
||||
|
||||
**In separate terminal**
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx log \
|
||||
--since now
|
||||
```
|
||||
|
||||
### Serving a Fuchsia package
|
||||
|
||||
Now, start a package repository server to serve our
|
||||
package to the emulator:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx repository server start
|
||||
```
|
||||
|
||||
Once the repository server is up and running, register our repository:
|
||||
Once the repository server is up and running, register it with the target Fuchsia system running in the emulator:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx target repository register --repository hello-fuchsia
|
||||
${SDK_PATH}/tools/${ARCH}/ffx target repository register \
|
||||
--repository hello-fuchsia
|
||||
```
|
||||
|
||||
And watch the logs from the emulator in a separate terminal:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx log --since now
|
||||
```
|
||||
### Running a Fuchsia component
|
||||
|
||||
Finally, run the component:
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx component run fuchsia-pkg://hello-fuchsia/hello_fuchsia#meta/hello_fuchsia.cm
|
||||
${SDK_PATH}/tools/${ARCH}/ffx component run \
|
||||
fuchsia-pkg://hello-fuchsia/hello_fuchsia_manifest#meta/hello_fuchsia.cm
|
||||
```
|
||||
|
||||
On reruns of the component, the `--recreate` argument may also need to be
|
||||
passed.
|
||||
|
||||
```sh
|
||||
${SDK_PATH}/tools/${ARCH}/ffx component run --recreate fuchsia-pkg://hello-fuchsia/hello_fuchsia#meta/hello_fuchsia.cm
|
||||
${SDK_PATH}/tools/${ARCH}/ffx component run \
|
||||
--recreate \
|
||||
fuchsia-pkg://hello-fuchsia/hello_fuchsia_manifest#meta/hello_fuchsia.cm
|
||||
```
|
||||
|
||||
## `.gitignore` extensions
|
||||
|
||||
Optionally, we can create/extend our `.gitignore` file to ignore files and
|
||||
directories that are not helpful to track:
|
||||
|
||||
```txt
|
||||
pkg/repo
|
||||
pkg/meta/hello_fuchsia.cm
|
||||
pkg/hello_fuchsia_manifest
|
||||
pkg/hello_fuchsia_package_manifest
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
### Running unit tests
|
||||
|
||||
Tests can be run in the same way as a regular binary, simply by passing `--test`
|
||||
to the `rustc` invocation and then repackaging and rerunning. The test harness
|
||||
will run the applicable unit tests.
|
||||
Tests can be run in the same way as a regular binary.
|
||||
|
||||
* If using `cargo`, you can simply pass `test --no-run`
|
||||
to the `cargo` invocation and then repackage and rerun the Fuchsia package. From our previous example,
|
||||
this would look like `cargo test --target x86_64-fuchsia --no-run`, and moving the executable
|
||||
binary path found from the line `Executable unittests src/main.rs (target/x86_64-fuchsia/debug/deps/hello_fuchsia-<HASH>)`
|
||||
into `pkg/hello_fuchsia.manifest`.
|
||||
|
||||
* If using the compiled `rustc`, you can simply pass `--test`
|
||||
to the `rustc` invocation and then repackage and rerun the Fuchsia package.
|
||||
|
||||
The test harness will run the applicable unit tests.
|
||||
|
||||
Often when testing, you may want to pass additional command line arguments to
|
||||
your binary. Additional arguments can be set in the component manifest:
|
||||
|
||||
**`hello_fuchsia.cml`**
|
||||
**`pkg/hello_fuchsia.cml`**
|
||||
```txt
|
||||
{
|
||||
include: [ "syslog/client.shard.cml" ],
|
||||
|
@ -293,11 +613,20 @@ your binary. Additional arguments can be set in the component manifest:
|
|||
This will pass the argument `it_works` to the binary, filtering the tests to
|
||||
only those tests that match the pattern. There are many more configuration
|
||||
options available in CML including environment variables. More documentation is
|
||||
available on the [Fuchsia devsite](https://fuchsia.dev/reference/cml).
|
||||
available on the [Fuchsia devsite].
|
||||
|
||||
### Running the compiler test suite
|
||||
|
||||
Running the Rust test suite on Fuchsia is [not currently supported], but work is
|
||||
underway to enable it.
|
||||
|
||||
[Fuchsia team]: https://team-api.infra.rust-lang.org/v1/teams/fuchsia.json
|
||||
[Fuchsia]: https://fuchsia.dev/
|
||||
[source tree]: https://fuchsia.dev/fuchsia-src/get-started/learn/build
|
||||
[rustup]: https://rustup.rs/
|
||||
[cargo]: https://doc.rust-lang.org/cargo/
|
||||
[Fuchsia SDK]: https://chrome-infra-packages.appspot.com/p/fuchsia/sdk/core
|
||||
[overview of CML]: https://fuchsia.dev/fuchsia-src/concepts/components/v2/component_manifests
|
||||
[reference for the file format]: https://fuchsia.dev/reference/cml
|
||||
[Fuchsia devsite]: https://fuchsia.dev/reference/cml
|
||||
[not currently supported]: https://fxbug.dev/105393
|
||||
|
|
|
@ -83,12 +83,13 @@ pre, .rustdoc.source .example-wrap {
|
|||
}
|
||||
|
||||
.search-results a:hover {
|
||||
background-color: #777;
|
||||
color: #fff !important;
|
||||
background-color: #3c3c3c;
|
||||
}
|
||||
|
||||
.search-results a:focus {
|
||||
color: #000 !important;
|
||||
background-color: #c6afb3;
|
||||
color: #fff !important;
|
||||
background-color: #3c3c3c;
|
||||
}
|
||||
.search-results a {
|
||||
color: #0096cf;
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
// min-llvm-version: 15.0.0
|
||||
// compile-flags: -O
|
||||
|
||||
#![crate_type = "lib"]
|
||||
|
||||
#[no_mangle]
|
||||
pub fn u16_be_to_arch(mut data: [u8; 2]) -> [u8; 2] {
|
||||
// CHECK-LABEL: @u16_be_to_arch
|
||||
// CHECK: @llvm.bswap.i16
|
||||
data.reverse();
|
||||
data
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub fn u32_be_to_arch(mut data: [u8; 4]) -> [u8; 4] {
|
||||
// CHECK-LABEL: @u32_be_to_arch
|
||||
// CHECK: @llvm.bswap.i32
|
||||
data.reverse();
|
||||
data
|
||||
}
|
|
@ -29,6 +29,23 @@ assert-css: (
|
|||
{"color": "rgb(120, 135, 151)"},
|
||||
)
|
||||
|
||||
// Checking the `<a>` container.
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']/ancestor::a",
|
||||
{"color": "rgb(0, 150, 207)", "background-color": "rgba(0, 0, 0, 0)"},
|
||||
)
|
||||
|
||||
// Checking color and background on hover.
|
||||
move-cursor-to: "//*[@class='desc']//*[text()='Just a normal struct.']"
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']",
|
||||
{"color": "rgb(255, 255, 255)"},
|
||||
)
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']/ancestor::a",
|
||||
{"color": "rgb(255, 255, 255)", "background-color": "rgb(60, 60, 60)"},
|
||||
)
|
||||
|
||||
// Dark theme
|
||||
local-storage: {
|
||||
"rustdoc-theme": "dark",
|
||||
|
@ -54,6 +71,23 @@ assert-css: (
|
|||
{"color": "rgb(221, 221, 221)"},
|
||||
)
|
||||
|
||||
// Checking the `<a>` container.
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']/ancestor::a",
|
||||
{"color": "rgb(221, 221, 221)", "background-color": "rgba(0, 0, 0, 0)"},
|
||||
)
|
||||
|
||||
// Checking color and background on hover.
|
||||
move-cursor-to: "//*[@class='desc']//*[text()='Just a normal struct.']"
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']",
|
||||
{"color": "rgb(221, 221, 221)"},
|
||||
)
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']/ancestor::a",
|
||||
{"color": "rgb(221, 221, 221)", "background-color": "rgb(119, 119, 119)"},
|
||||
)
|
||||
|
||||
// Light theme
|
||||
local-storage: {"rustdoc-theme": "light", "rustdoc-use-system-theme": "false"}
|
||||
reload:
|
||||
|
@ -75,6 +109,23 @@ assert-css: (
|
|||
{"color": "rgb(0, 0, 0)"},
|
||||
)
|
||||
|
||||
// Checking the `<a>` container.
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']/ancestor::a",
|
||||
{"color": "rgb(0, 0, 0)", "background-color": "rgba(0, 0, 0, 0)"},
|
||||
)
|
||||
|
||||
// Checking color and background on hover.
|
||||
move-cursor-to: "//*[@class='desc']//*[text()='Just a normal struct.']"
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']",
|
||||
{"color": "rgb(0, 0, 0)"},
|
||||
)
|
||||
assert-css: (
|
||||
"//*[@class='result-name']/*[text()='test_docs::']/ancestor::a",
|
||||
{"color": "rgb(0, 0, 0)", "background-color": "rgb(221, 221, 221)"},
|
||||
)
|
||||
|
||||
// Check the alias more specifically in the dark theme.
|
||||
goto: file://|DOC_PATH|/test_docs/index.html
|
||||
// We set the theme so we're sure that the correct values will be used, whatever the computer
|
||||
|
|
|
@ -247,20 +247,6 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-queue",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.6"
|
||||
|
@ -296,16 +282,6 @@ dependencies = [
|
|||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.11"
|
||||
|
@ -728,6 +704,7 @@ dependencies = [
|
|||
"ide-db",
|
||||
"itertools",
|
||||
"parser",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"test-utils",
|
||||
"text-edit",
|
||||
|
@ -895,9 +872,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.93.0"
|
||||
version = "0.93.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "70c74e2173b2b31f8655d33724b4b45ac13f439386f66290f539c22b144c2212"
|
||||
checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"serde",
|
||||
|
@ -1178,7 +1155,6 @@ dependencies = [
|
|||
name = "proc-macro-srv"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"crossbeam",
|
||||
"expect-test",
|
||||
"libloading",
|
||||
"mbe",
|
||||
|
@ -1254,6 +1230,26 @@ dependencies = [
|
|||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "3.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ee4a7d8b91800c8f167a6268d1a1026607368e1adc84e98fe044aeb905302f7"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"protobuf-support",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "protobuf-support"
|
||||
version = "3.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ca157fe12fc7ee2e315f2f735e27df41b3d97cdd70ea112824dac1ffb08ee1c"
|
||||
dependencies = [
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.9.2"
|
||||
|
@ -1385,6 +1381,7 @@ dependencies = [
|
|||
"project-model",
|
||||
"rayon",
|
||||
"rustc-hash",
|
||||
"scip",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sourcegen",
|
||||
|
@ -1471,6 +1468,15 @@ dependencies = [
|
|||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scip"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2bfbb10286f69fad7c78db71004b7839bf957788359fe0c479f029f9849136b"
|
||||
dependencies = [
|
||||
"protobuf",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scoped-tls"
|
||||
version = "1.0.0"
|
||||
|
@ -1656,6 +1662,26 @@ version = "1.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.4"
|
||||
|
@ -1896,6 +1922,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"paths",
|
||||
"rustc-hash",
|
||||
"stdx",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -9,10 +9,11 @@
|
|||
use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
|
||||
|
||||
use cfg::CfgOptions;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::hash::{NoHashHashMap, NoHashHashSet};
|
||||
use syntax::SmolStr;
|
||||
use tt::Subtree;
|
||||
use vfs::{file_set::FileSet, FileId, VfsPath};
|
||||
use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath};
|
||||
|
||||
/// Files are grouped into source roots. A source root is a directory on the
|
||||
/// file systems which is watched for changes. Typically it corresponds to a
|
||||
|
@ -31,22 +32,30 @@ pub struct SourceRoot {
|
|||
/// Libraries are considered mostly immutable, this assumption is used to
|
||||
/// optimize salsa's query structure
|
||||
pub is_library: bool,
|
||||
pub(crate) file_set: FileSet,
|
||||
file_set: FileSet,
|
||||
}
|
||||
|
||||
impl SourceRoot {
|
||||
pub fn new_local(file_set: FileSet) -> SourceRoot {
|
||||
SourceRoot { is_library: false, file_set }
|
||||
}
|
||||
|
||||
pub fn new_library(file_set: FileSet) -> SourceRoot {
|
||||
SourceRoot { is_library: true, file_set }
|
||||
}
|
||||
|
||||
pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
|
||||
self.file_set.path_for_file(file)
|
||||
}
|
||||
|
||||
pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
|
||||
self.file_set.file_for_path(path)
|
||||
}
|
||||
|
||||
pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||
self.file_set.resolve_path(path)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
|
||||
self.file_set.iter()
|
||||
}
|
||||
|
@ -72,12 +81,19 @@ impl SourceRoot {
|
|||
/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
|
||||
#[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
|
||||
pub struct CrateGraph {
|
||||
arena: FxHashMap<CrateId, CrateData>,
|
||||
arena: NoHashHashMap<CrateId, CrateData>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct CrateId(pub u32);
|
||||
|
||||
impl stdx::hash::NoHashHashable for CrateId {}
|
||||
impl std::hash::Hash for CrateId {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CrateName(SmolStr);
|
||||
|
||||
|
@ -342,7 +358,7 @@ impl CrateGraph {
|
|||
// Check if adding a dep from `from` to `to` creates a cycle. To figure
|
||||
// that out, look for a path in the *opposite* direction, from `to` to
|
||||
// `from`.
|
||||
if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
|
||||
if let Some(path) = self.find_path(&mut NoHashHashSet::default(), dep.crate_id, from) {
|
||||
let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
|
||||
let err = CyclicDependenciesError { path };
|
||||
assert!(err.from().0 == from && err.to().0 == dep.crate_id);
|
||||
|
@ -365,7 +381,7 @@ impl CrateGraph {
|
|||
/// including the crate itself.
|
||||
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
|
||||
let mut worklist = vec![of];
|
||||
let mut deps = FxHashSet::default();
|
||||
let mut deps = NoHashHashSet::default();
|
||||
|
||||
while let Some(krate) = worklist.pop() {
|
||||
if !deps.insert(krate) {
|
||||
|
@ -382,10 +398,10 @@ impl CrateGraph {
|
|||
/// including the crate itself.
|
||||
pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
|
||||
let mut worklist = vec![of];
|
||||
let mut rev_deps = FxHashSet::default();
|
||||
let mut rev_deps = NoHashHashSet::default();
|
||||
rev_deps.insert(of);
|
||||
|
||||
let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
|
||||
let mut inverted_graph = NoHashHashMap::<_, Vec<_>>::default();
|
||||
self.arena.iter().for_each(|(&krate, data)| {
|
||||
data.dependencies
|
||||
.iter()
|
||||
|
@ -409,7 +425,7 @@ impl CrateGraph {
|
|||
/// come before the crate itself).
|
||||
pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
|
||||
let mut res = Vec::new();
|
||||
let mut visited = FxHashSet::default();
|
||||
let mut visited = NoHashHashSet::default();
|
||||
|
||||
for krate in self.arena.keys().copied() {
|
||||
go(self, &mut visited, &mut res, krate);
|
||||
|
@ -419,7 +435,7 @@ impl CrateGraph {
|
|||
|
||||
fn go(
|
||||
graph: &CrateGraph,
|
||||
visited: &mut FxHashSet<CrateId>,
|
||||
visited: &mut NoHashHashSet<CrateId>,
|
||||
res: &mut Vec<CrateId>,
|
||||
source: CrateId,
|
||||
) {
|
||||
|
@ -459,7 +475,7 @@ impl CrateGraph {
|
|||
|
||||
fn find_path(
|
||||
&self,
|
||||
visited: &mut FxHashSet<CrateId>,
|
||||
visited: &mut NoHashHashSet<CrateId>,
|
||||
from: CrateId,
|
||||
to: CrateId,
|
||||
) -> Option<Vec<CrateId>> {
|
||||
|
|
|
@ -8,7 +8,7 @@ pub mod fixture;
|
|||
|
||||
use std::{panic, sync::Arc};
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use stdx::hash::NoHashHashSet;
|
||||
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
|
||||
|
||||
pub use crate::{
|
||||
|
@ -58,7 +58,7 @@ pub trait FileLoader {
|
|||
/// Text of the file.
|
||||
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>>;
|
||||
}
|
||||
|
||||
/// Database which stores all significant input facts: source code and project
|
||||
|
@ -94,10 +94,10 @@ pub trait SourceDatabaseExt: SourceDatabase {
|
|||
#[salsa::input]
|
||||
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
|
||||
|
||||
fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
|
||||
fn source_root_crates(&self, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>>;
|
||||
}
|
||||
|
||||
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
|
||||
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>> {
|
||||
let graph = db.crate_graph();
|
||||
let res = graph
|
||||
.iter()
|
||||
|
@ -120,10 +120,10 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
|
|||
// FIXME: this *somehow* should be platform agnostic...
|
||||
let source_root = self.0.file_source_root(path.anchor);
|
||||
let source_root = self.0.source_root(source_root);
|
||||
source_root.file_set.resolve_path(path)
|
||||
source_root.resolve_path(path)
|
||||
}
|
||||
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
|
||||
let _p = profile::span("relevant_crates");
|
||||
let source_root = self.0.file_source_root(file_id);
|
||||
self.0.source_root_crates(source_root)
|
||||
|
|
|
@ -125,6 +125,7 @@ pub enum Progress {
|
|||
DidCheckCrate(String),
|
||||
DidFinish(io::Result<()>),
|
||||
DidCancel,
|
||||
DidFailToRestart(String),
|
||||
}
|
||||
|
||||
enum Restart {
|
||||
|
@ -193,10 +194,11 @@ impl FlycheckActor {
|
|||
self.progress(Progress::DidStart);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!(
|
||||
command = ?self.check_command(),
|
||||
%error, "failed to restart flycheck"
|
||||
);
|
||||
self.progress(Progress::DidFailToRestart(format!(
|
||||
"Failed to run the following command: {:?} error={}",
|
||||
self.check_command(),
|
||||
error
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind};
|
||||
use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind};
|
||||
use smallvec::SmallVec;
|
||||
use syntax::ast;
|
||||
|
||||
|
@ -12,7 +12,10 @@ use crate::{
|
|||
db::DefDatabase,
|
||||
intern::Interned,
|
||||
item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
|
||||
nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap},
|
||||
nameres::{
|
||||
attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, proc_macro::ProcMacroKind,
|
||||
DefMap,
|
||||
},
|
||||
type_ref::{TraitRef, TypeBound, TypeRef},
|
||||
visibility::RawVisibility,
|
||||
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
|
||||
|
@ -210,6 +213,13 @@ pub struct TraitData {
|
|||
|
||||
impl TraitData {
|
||||
pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
|
||||
db.trait_data_with_diagnostics(tr).0
|
||||
}
|
||||
|
||||
pub(crate) fn trait_data_with_diagnostics_query(
|
||||
db: &dyn DefDatabase,
|
||||
tr: TraitId,
|
||||
) -> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>) {
|
||||
let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
|
||||
let item_tree = tree_id.item_tree(db);
|
||||
let tr_def = &item_tree[tree_id.value];
|
||||
|
@ -229,17 +239,20 @@ impl TraitData {
|
|||
let mut collector =
|
||||
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
|
||||
collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
|
||||
let (items, attribute_calls) = collector.finish();
|
||||
let (items, attribute_calls, diagnostics) = collector.finish();
|
||||
|
||||
Arc::new(TraitData {
|
||||
name,
|
||||
attribute_calls,
|
||||
items,
|
||||
is_auto,
|
||||
is_unsafe,
|
||||
visibility,
|
||||
skip_array_during_method_dispatch,
|
||||
})
|
||||
(
|
||||
Arc::new(TraitData {
|
||||
name,
|
||||
attribute_calls,
|
||||
items,
|
||||
is_auto,
|
||||
is_unsafe,
|
||||
visibility,
|
||||
skip_array_during_method_dispatch,
|
||||
}),
|
||||
Arc::new(diagnostics),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
|
||||
|
@ -280,7 +293,14 @@ pub struct ImplData {
|
|||
|
||||
impl ImplData {
|
||||
pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
|
||||
let _p = profile::span("impl_data_query");
|
||||
db.impl_data_with_diagnostics(id).0
|
||||
}
|
||||
|
||||
pub(crate) fn impl_data_with_diagnostics_query(
|
||||
db: &dyn DefDatabase,
|
||||
id: ImplId,
|
||||
) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>) {
|
||||
let _p = profile::span("impl_data_with_diagnostics_query");
|
||||
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
|
||||
|
||||
let item_tree = tree_id.item_tree(db);
|
||||
|
@ -293,10 +313,13 @@ impl ImplData {
|
|||
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
|
||||
collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
|
||||
|
||||
let (items, attribute_calls) = collector.finish();
|
||||
let (items, attribute_calls, diagnostics) = collector.finish();
|
||||
let items = items.into_iter().map(|(_, item)| item).collect();
|
||||
|
||||
Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls })
|
||||
(
|
||||
Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
|
||||
Arc::new(diagnostics),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
|
||||
|
@ -437,6 +460,7 @@ struct AssocItemCollector<'a> {
|
|||
db: &'a dyn DefDatabase,
|
||||
module_id: ModuleId,
|
||||
def_map: Arc<DefMap>,
|
||||
inactive_diagnostics: Vec<DefDiagnostic>,
|
||||
container: ItemContainerId,
|
||||
expander: Expander,
|
||||
|
||||
|
@ -459,15 +483,21 @@ impl<'a> AssocItemCollector<'a> {
|
|||
expander: Expander::new(db, file_id, module_id),
|
||||
items: Vec::new(),
|
||||
attr_calls: Vec::new(),
|
||||
inactive_diagnostics: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(
|
||||
self,
|
||||
) -> (Vec<(Name, AssocItemId)>, Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>) {
|
||||
) -> (
|
||||
Vec<(Name, AssocItemId)>,
|
||||
Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
|
||||
Vec<DefDiagnostic>,
|
||||
) {
|
||||
(
|
||||
self.items,
|
||||
if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) },
|
||||
self.inactive_diagnostics,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -479,6 +509,12 @@ impl<'a> AssocItemCollector<'a> {
|
|||
'items: for &item in assoc_items {
|
||||
let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
|
||||
if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
|
||||
self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code(
|
||||
self.module_id.local_id,
|
||||
InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()),
|
||||
attrs.cfg().unwrap(),
|
||||
self.expander.cfg_options().clone(),
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ use crate::{
|
|||
intern::Interned,
|
||||
item_tree::{AttrOwner, ItemTree},
|
||||
lang_item::{LangItemTarget, LangItems},
|
||||
nameres::DefMap,
|
||||
nameres::{diagnostics::DefDiagnostic, DefMap},
|
||||
visibility::{self, Visibility},
|
||||
AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
|
||||
ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
|
||||
|
@ -106,9 +106,16 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
|
|||
#[salsa::invoke(ImplData::impl_data_query)]
|
||||
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
|
||||
|
||||
#[salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
|
||||
fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>);
|
||||
|
||||
#[salsa::invoke(TraitData::trait_data_query)]
|
||||
fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
|
||||
|
||||
#[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
|
||||
fn trait_data_with_diagnostics(&self, tr: TraitId)
|
||||
-> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>);
|
||||
|
||||
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
|
||||
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ impl DefDiagnostic {
|
|||
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
|
||||
}
|
||||
|
||||
pub(super) fn unconfigured_code(
|
||||
pub fn unconfigured_code(
|
||||
container: LocalModuleId,
|
||||
ast: AstId<ast::Item>,
|
||||
cfg: CfgExpr,
|
||||
|
|
|
@ -10,7 +10,7 @@ use base_db::{
|
|||
SourceDatabase, Upcast,
|
||||
};
|
||||
use hir_expand::{db::AstDatabase, InFile};
|
||||
use rustc_hash::FxHashSet;
|
||||
use stdx::hash::NoHashHashSet;
|
||||
use syntax::{algo, ast, AstNode};
|
||||
|
||||
use crate::{
|
||||
|
@ -76,7 +76,7 @@ impl FileLoader for TestDB {
|
|||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||
FileLoaderDelegate(self).resolve_path(path)
|
||||
}
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
|
||||
FileLoaderDelegate(self).relevant_crates(file_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
//!
|
||||
//! This usually involves resolving names, collecting generic arguments etc.
|
||||
use std::{
|
||||
cell::{Cell, RefCell},
|
||||
cell::{Cell, RefCell, RefMut},
|
||||
iter,
|
||||
sync::Arc,
|
||||
};
|
||||
|
@ -330,26 +330,26 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
}
|
||||
TypeRef::Macro(macro_call) => {
|
||||
let (expander, recursion_start) = {
|
||||
let mut expander = self.expander.borrow_mut();
|
||||
if expander.is_some() {
|
||||
(Some(expander), false)
|
||||
} else {
|
||||
*expander = Some(Expander::new(
|
||||
self.db.upcast(),
|
||||
macro_call.file_id,
|
||||
self.resolver.module(),
|
||||
));
|
||||
(Some(expander), true)
|
||||
let (mut expander, recursion_start) = {
|
||||
match RefMut::filter_map(self.expander.borrow_mut(), Option::as_mut) {
|
||||
Ok(expander) => (expander, false),
|
||||
Err(expander) => (
|
||||
RefMut::map(expander, |it| {
|
||||
it.insert(Expander::new(
|
||||
self.db.upcast(),
|
||||
macro_call.file_id,
|
||||
self.resolver.module(),
|
||||
))
|
||||
}),
|
||||
true,
|
||||
),
|
||||
}
|
||||
};
|
||||
let ty = if let Some(mut expander) = expander {
|
||||
let expander_mut = expander.as_mut().unwrap();
|
||||
let ty = {
|
||||
let macro_call = macro_call.to_node(self.db.upcast());
|
||||
match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
|
||||
match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
|
||||
Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
|
||||
let ctx =
|
||||
LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
|
||||
let ctx = LowerCtx::new(self.db.upcast(), expander.current_file_id());
|
||||
let type_ref = TypeRef::from_ast(&ctx, expanded);
|
||||
|
||||
drop(expander);
|
||||
|
@ -364,8 +364,6 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if recursion_start {
|
||||
*self.expander.borrow_mut() = None;
|
||||
|
@ -479,7 +477,14 @@ impl<'a> TyLoweringContext<'a> {
|
|||
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
|
||||
}
|
||||
ParamLoweringMode::Variable => {
|
||||
let idx = generics.param_idx(param_id.into()).expect("matching generics");
|
||||
let idx = match generics.param_idx(param_id.into()) {
|
||||
None => {
|
||||
never!("no matching generics");
|
||||
return (TyKind::Error.intern(Interner), None);
|
||||
}
|
||||
Some(idx) => idx,
|
||||
};
|
||||
|
||||
TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use base_db::{
|
|||
};
|
||||
use hir_def::{db::DefDatabase, ModuleId};
|
||||
use hir_expand::db::AstDatabase;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use stdx::hash::{NoHashHashMap, NoHashHashSet};
|
||||
use syntax::TextRange;
|
||||
use test_utils::extract_annotations;
|
||||
|
||||
|
@ -80,7 +80,7 @@ impl FileLoader for TestDB {
|
|||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||
FileLoaderDelegate(self).resolve_path(path)
|
||||
}
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
|
||||
FileLoaderDelegate(self).relevant_crates(file_id)
|
||||
}
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ impl TestDB {
|
|||
self.module_for_file_opt(file_id).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
|
||||
pub(crate) fn extract_annotations(&self) -> NoHashHashMap<FileId, Vec<(TextRange, String)>> {
|
||||
let mut files = Vec::new();
|
||||
let crate_graph = self.crate_graph();
|
||||
for krate in crate_graph.iter() {
|
||||
|
|
|
@ -1526,6 +1526,34 @@ unsafe impl Storage for InlineStorage {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gat_crash_3() {
|
||||
// FIXME: This test currently crashes rust analyzer in a debug build but not in a
|
||||
// release build (i.e. for the user). With the assumption that tests will always be run
|
||||
// in debug mode, we catch the unwind and expect that it panicked. See the
|
||||
// [`crate::utils::generics`] function for more information.
|
||||
cov_mark::check!(ignore_gats);
|
||||
std::panic::catch_unwind(|| {
|
||||
check_no_mismatches(
|
||||
r#"
|
||||
trait Collection {
|
||||
type Item;
|
||||
type Member<T>: Collection<Item = T>;
|
||||
fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>;
|
||||
}
|
||||
struct ConstGen<T, const N: usize> {
|
||||
data: [T; N],
|
||||
}
|
||||
impl<T, const N: usize> Collection for ConstGen<T, N> {
|
||||
type Item = T;
|
||||
type Member<U> = ConstGen<U, N>;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
})
|
||||
.expect_err("must panic");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cfgd_out_self_param() {
|
||||
cov_mark::check!(cfgd_out_self_param);
|
||||
|
|
|
@ -176,10 +176,19 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
|
|||
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
|
||||
if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
|
||||
let params = db.generic_params(def);
|
||||
let parent_params = &parent_generics.as_ref().unwrap().params;
|
||||
let has_consts =
|
||||
params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
|
||||
return if has_consts {
|
||||
// XXX: treat const generic associated types as not existing to avoid crashes (#11769)
|
||||
let parent_has_consts =
|
||||
parent_params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
|
||||
return if has_consts || parent_has_consts {
|
||||
// XXX: treat const generic associated types as not existing to avoid crashes
|
||||
// (#11769)
|
||||
//
|
||||
// Note: Also crashes when the parent has const generics (also even if the GAT
|
||||
// doesn't use them), see `tests::regression::gat_crash_3` for an example.
|
||||
// Avoids that by disabling GATs when the parent (i.e. `impl` block) has
|
||||
// const generics (#12193).
|
||||
//
|
||||
// Chalk expects the inner associated type's parameters to come
|
||||
// *before*, not after the trait's generics as we've always done it.
|
||||
|
@ -264,12 +273,8 @@ impl Generics {
|
|||
|
||||
fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
|
||||
if param.parent == self.def {
|
||||
let (idx, (_local_id, data)) = self
|
||||
.params
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, (idx, _))| *idx == param.local_id)
|
||||
.unwrap();
|
||||
let (idx, (_local_id, data)) =
|
||||
self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?;
|
||||
let parent_len = self.parent_generics().map_or(0, Generics::len);
|
||||
Some((parent_len + idx, data))
|
||||
} else {
|
||||
|
|
|
@ -511,6 +511,7 @@ impl Module {
|
|||
.collect()
|
||||
}
|
||||
|
||||
/// Fills `acc` with the module's diagnostics.
|
||||
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
|
||||
let _p = profile::span("Module::diagnostics").detail(|| {
|
||||
format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
|
||||
|
@ -531,11 +532,21 @@ impl Module {
|
|||
m.diagnostics(db, acc)
|
||||
}
|
||||
}
|
||||
ModuleDef::Trait(t) => {
|
||||
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
|
||||
emit_def_diagnostic(db, acc, diag);
|
||||
}
|
||||
acc.extend(decl.diagnostics(db))
|
||||
}
|
||||
_ => acc.extend(decl.diagnostics(db)),
|
||||
}
|
||||
}
|
||||
|
||||
for impl_def in self.impl_defs(db) {
|
||||
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
|
||||
emit_def_diagnostic(db, acc, diag);
|
||||
}
|
||||
|
||||
for item in impl_def.items(db) {
|
||||
let def: DefWithBody = match item {
|
||||
AssocItem::Function(it) => it.into(),
|
||||
|
|
|
@ -171,6 +171,25 @@ fn collect_used_generics<'gp>(
|
|||
ast::Type::RefType(ref_) => generics.extend(
|
||||
ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))),
|
||||
),
|
||||
ast::Type::ArrayType(ar) => {
|
||||
if let Some(expr) = ar.expr() {
|
||||
if let ast::Expr::PathExpr(p) = expr {
|
||||
if let Some(path) = p.path() {
|
||||
if let Some(name_ref) = path.as_single_name_ref() {
|
||||
if let Some(param) = known_generics.iter().find(|gp| {
|
||||
if let ast::GenericParam::ConstParam(cp) = gp {
|
||||
cp.name().map_or(false, |n| n.text() == name_ref.text())
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) {
|
||||
generics.push(param);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
});
|
||||
// stable resort to lifetime, type, const
|
||||
|
@ -357,4 +376,29 @@ impl<'outer, Outer, const OUTER: usize> () {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn issue_11197() {
|
||||
check_assist(
|
||||
extract_type_alias,
|
||||
r#"
|
||||
struct Foo<T, const N: usize>
|
||||
where
|
||||
[T; N]: Sized,
|
||||
{
|
||||
arr: $0[T; N]$0,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
type $0Type<T, const N: usize> = [T; N];
|
||||
|
||||
struct Foo<T, const N: usize>
|
||||
where
|
||||
[T; N]: Sized,
|
||||
{
|
||||
arr: Type<T, N>,
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -311,12 +311,16 @@ fn inline(
|
|||
} else {
|
||||
fn_body.clone_for_update()
|
||||
};
|
||||
if let Some(t) = body.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty()) {
|
||||
body.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
|
||||
.for_each(|tok| ted::replace(tok, t.syntax()));
|
||||
if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) {
|
||||
if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
|
||||
if let Some(t) = imp.self_ty() {
|
||||
body.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
|
||||
.for_each(|tok| ted::replace(tok, t.syntax()));
|
||||
}
|
||||
}
|
||||
}
|
||||
let usages_for_locals = |local| {
|
||||
Definition::Local(local)
|
||||
|
@ -1221,6 +1225,31 @@ impl A {
|
|||
fn main() {
|
||||
A(114514);
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_call_with_self_type_but_within_same_impl() {
|
||||
check_assist(
|
||||
inline_call,
|
||||
r#"
|
||||
struct A(u32);
|
||||
impl A {
|
||||
fn f() -> Self { Self(1919810) }
|
||||
fn main() {
|
||||
Self::f$0();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct A(u32);
|
||||
impl A {
|
||||
fn f() -> Self { Self(1919810) }
|
||||
fn main() {
|
||||
Self(1919810);
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -64,8 +64,11 @@ pub(crate) struct PathCompletionCtx {
|
|||
pub(super) qualified: Qualified,
|
||||
/// The parent of the path we are completing.
|
||||
pub(super) parent: Option<ast::Path>,
|
||||
#[allow(dead_code)]
|
||||
/// The path of which we are completing the segment
|
||||
pub(super) path: ast::Path,
|
||||
/// The path of which we are completing the segment in the original file
|
||||
pub(crate) original_path: Option<ast::Path>,
|
||||
pub(super) kind: PathKind,
|
||||
/// Whether the path segment has type args or not.
|
||||
pub(super) has_type_args: bool,
|
||||
|
|
|
@ -588,12 +588,15 @@ impl<'a> CompletionContext<'a> {
|
|||
};
|
||||
|
||||
let path = segment.parent_path();
|
||||
let original_path = find_node_in_file_compensated(sema, original_file, &path);
|
||||
|
||||
let mut path_ctx = PathCompletionCtx {
|
||||
has_call_parens: false,
|
||||
has_macro_bang: false,
|
||||
qualified: Qualified::No,
|
||||
parent: None,
|
||||
path: path.clone(),
|
||||
original_path,
|
||||
kind: PathKind::Item { kind: ItemListKind::SourceFile },
|
||||
has_type_args: false,
|
||||
use_tree_parent: false,
|
||||
|
|
|
@ -323,9 +323,7 @@ fn render_resolution_path(
|
|||
..CompletionRelevance::default()
|
||||
});
|
||||
|
||||
if let Some(ref_match) = compute_ref_match(completion, &ty) {
|
||||
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
|
||||
}
|
||||
path_ref_match(completion, path_ctx, &ty, &mut item);
|
||||
};
|
||||
item
|
||||
}
|
||||
|
@ -453,6 +451,29 @@ fn compute_ref_match(
|
|||
None
|
||||
}
|
||||
|
||||
fn path_ref_match(
|
||||
completion: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
ty: &hir::Type,
|
||||
item: &mut Builder,
|
||||
) {
|
||||
if let Some(original_path) = &path_ctx.original_path {
|
||||
// At least one char was typed by the user already, in that case look for the original path
|
||||
if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) {
|
||||
if let Some(ref_match) = compute_ref_match(completion, ty) {
|
||||
item.ref_match(ref_match, original_path.syntax().text_range().start());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// completion requested on an empty identifier, there is no path here yet.
|
||||
// FIXME: This might create inconsistent completions where we show a ref match in macro inputs
|
||||
// as long as nothing was typed yet
|
||||
if let Some(ref_match) = compute_ref_match(completion, ty) {
|
||||
item.ref_match(ref_match, completion.position.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::cmp;
|
||||
|
|
|
@ -79,18 +79,18 @@ fn render(
|
|||
..ctx.completion_relevance()
|
||||
});
|
||||
|
||||
if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
|
||||
match func_kind {
|
||||
FuncKind::Function(path_ctx) => {
|
||||
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
|
||||
}
|
||||
FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
|
||||
if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
|
||||
match func_kind {
|
||||
FuncKind::Function(path_ctx) => {
|
||||
super::path_ref_match(completion, path_ctx, &ret_type, &mut item);
|
||||
}
|
||||
FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
|
||||
if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
|
||||
if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
|
||||
item.ref_match(ref_match, original_expr.syntax().text_range().start());
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
item.set_documentation(ctx.docs(func))
|
||||
|
|
|
@ -2,13 +2,12 @@
|
|||
|
||||
use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind};
|
||||
use ide_db::SymbolKind;
|
||||
use syntax::AstNode;
|
||||
|
||||
use crate::{
|
||||
context::{CompletionContext, PathCompletionCtx, PathKind},
|
||||
item::{Builder, CompletionItem},
|
||||
render::{
|
||||
compute_ref_match, compute_type_match,
|
||||
compute_type_match,
|
||||
variant::{
|
||||
format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit,
|
||||
visible_fields, RenderedLiteral,
|
||||
|
@ -125,9 +124,8 @@ fn render(
|
|||
type_match: compute_type_match(ctx.completion, &ty),
|
||||
..ctx.completion_relevance()
|
||||
});
|
||||
if let Some(ref_match) = compute_ref_match(completion, &ty) {
|
||||
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
|
||||
}
|
||||
|
||||
super::path_ref_match(completion, path_ctx, &ty, &mut item);
|
||||
|
||||
if let Some(import_to_add) = ctx.import_to_add {
|
||||
item.add_import(import_to_add);
|
||||
|
|
|
@ -52,6 +52,7 @@ use hir::{
|
|||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
symbols::FileSymbolKind,
|
||||
};
|
||||
use stdx::hash::NoHashHashSet;
|
||||
|
||||
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
|
||||
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
|
||||
|
@ -118,7 +119,7 @@ impl FileLoader for RootDatabase {
|
|||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||
FileLoaderDelegate(self).resolve_path(path)
|
||||
}
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
|
||||
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
|
||||
FileLoaderDelegate(self).relevant_crates(file_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//! representation.
|
||||
use std::{iter, mem};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::hash::NoHashHashMap;
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -10,7 +10,7 @@ pub struct LineIndex {
|
|||
/// Offset the the beginning of each line, zero-based
|
||||
pub(crate) newlines: Vec<TextSize>,
|
||||
/// List of non-ASCII characters on each line
|
||||
pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
|
||||
pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
|
@ -55,7 +55,7 @@ impl Utf16Char {
|
|||
|
||||
impl LineIndex {
|
||||
pub fn new(text: &str) -> LineIndex {
|
||||
let mut utf16_lines = FxHashMap::default();
|
||||
let mut utf16_lines = NoHashHashMap::default();
|
||||
let mut utf16_chars = Vec::new();
|
||||
|
||||
let mut newlines = vec![0.into()];
|
||||
|
|
|
@ -9,7 +9,7 @@ use std::{mem, sync::Arc};
|
|||
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
|
||||
use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
|
||||
use once_cell::unsync::Lazy;
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::hash::NoHashHashMap;
|
||||
use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
|
||||
|
||||
use crate::{
|
||||
|
@ -20,7 +20,7 @@ use crate::{
|
|||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct UsageSearchResult {
|
||||
pub references: FxHashMap<FileId, Vec<FileReference>>,
|
||||
pub references: NoHashHashMap<FileId, Vec<FileReference>>,
|
||||
}
|
||||
|
||||
impl UsageSearchResult {
|
||||
|
@ -45,7 +45,7 @@ impl UsageSearchResult {
|
|||
|
||||
impl IntoIterator for UsageSearchResult {
|
||||
type Item = (FileId, Vec<FileReference>);
|
||||
type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
|
||||
type IntoIter = <NoHashHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.references.into_iter()
|
||||
|
@ -78,17 +78,17 @@ pub enum ReferenceCategory {
|
|||
/// e.g. for things like local variables.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SearchScope {
|
||||
entries: FxHashMap<FileId, Option<TextRange>>,
|
||||
entries: NoHashHashMap<FileId, Option<TextRange>>,
|
||||
}
|
||||
|
||||
impl SearchScope {
|
||||
fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
|
||||
fn new(entries: NoHashHashMap<FileId, Option<TextRange>>) -> SearchScope {
|
||||
SearchScope { entries }
|
||||
}
|
||||
|
||||
/// Build a search scope spanning the entire crate graph of files.
|
||||
fn crate_graph(db: &RootDatabase) -> SearchScope {
|
||||
let mut entries = FxHashMap::default();
|
||||
let mut entries = NoHashHashMap::default();
|
||||
|
||||
let graph = db.crate_graph();
|
||||
for krate in graph.iter() {
|
||||
|
@ -102,7 +102,7 @@ impl SearchScope {
|
|||
|
||||
/// Build a search scope spanning all the reverse dependencies of the given crate.
|
||||
fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
|
||||
let mut entries = FxHashMap::default();
|
||||
let mut entries = NoHashHashMap::default();
|
||||
for rev_dep in of.transitive_reverse_dependencies(db) {
|
||||
let root_file = rev_dep.root_file(db);
|
||||
let source_root_id = db.file_source_root(root_file);
|
||||
|
@ -117,14 +117,12 @@ impl SearchScope {
|
|||
let root_file = of.root_file(db);
|
||||
let source_root_id = db.file_source_root(root_file);
|
||||
let source_root = db.source_root(source_root_id);
|
||||
SearchScope {
|
||||
entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
|
||||
}
|
||||
SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
|
||||
}
|
||||
|
||||
/// Build a search scope spanning the given module and all its submodules.
|
||||
fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
|
||||
let mut entries = FxHashMap::default();
|
||||
let mut entries = NoHashHashMap::default();
|
||||
|
||||
let (file_id, range) = {
|
||||
let InFile { file_id, value } = module.definition_source(db);
|
||||
|
@ -157,7 +155,7 @@ impl SearchScope {
|
|||
|
||||
/// Build an empty search scope.
|
||||
pub fn empty() -> SearchScope {
|
||||
SearchScope::new(FxHashMap::default())
|
||||
SearchScope::new(NoHashHashMap::default())
|
||||
}
|
||||
|
||||
/// Build a empty search scope spanning the given file.
|
||||
|
|
|
@ -6,8 +6,7 @@
|
|||
use std::{collections::hash_map::Entry, iter, mem};
|
||||
|
||||
use base_db::{AnchoredPathBuf, FileId};
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::never;
|
||||
use stdx::{hash::NoHashHashMap, never};
|
||||
use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
|
||||
use text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
|
@ -15,7 +14,7 @@ use crate::SnippetCap;
|
|||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct SourceChange {
|
||||
pub source_file_edits: FxHashMap<FileId, TextEdit>,
|
||||
pub source_file_edits: NoHashHashMap<FileId, TextEdit>,
|
||||
pub file_system_edits: Vec<FileSystemEdit>,
|
||||
pub is_snippet: bool,
|
||||
}
|
||||
|
@ -24,7 +23,7 @@ impl SourceChange {
|
|||
/// Creates a new SourceChange with the given label
|
||||
/// from the edits.
|
||||
pub fn from_edits(
|
||||
source_file_edits: FxHashMap<FileId, TextEdit>,
|
||||
source_file_edits: NoHashHashMap<FileId, TextEdit>,
|
||||
file_system_edits: Vec<FileSystemEdit>,
|
||||
) -> Self {
|
||||
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
|
||||
|
@ -78,8 +77,8 @@ impl Extend<FileSystemEdit> for SourceChange {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
|
||||
fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
|
||||
impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
|
||||
fn from(source_file_edits: NoHashHashMap<FileId, TextEdit>) -> SourceChange {
|
||||
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,18 +106,17 @@ fn f() {
|
|||
|
||||
#[test]
|
||||
fn inactive_assoc_item() {
|
||||
// FIXME these currently don't work, hence the *
|
||||
check(
|
||||
r#"
|
||||
struct Foo;
|
||||
impl Foo {
|
||||
#[cfg(any())] pub fn f() {}
|
||||
//*************************** weak: code is inactive due to #[cfg] directives
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
|
||||
}
|
||||
|
||||
trait Bar {
|
||||
#[cfg(any())] pub fn f() {}
|
||||
//*************************** weak: code is inactive due to #[cfg] directives
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
|
|
@ -20,6 +20,7 @@ parser = { path = "../parser", version = "0.0.0" }
|
|||
syntax = { path = "../syntax", version = "0.0.0" }
|
||||
ide-db = { path = "../ide-db", version = "0.0.0" }
|
||||
hir = { path = "../hir", version = "0.0.0" }
|
||||
stdx = { path = "../stdx", version = "0.0.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
test-utils = { path = "../test-utils" }
|
||||
|
|
|
@ -86,11 +86,9 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
|
|||
|
||||
use crate::{errors::bail, matching::MatchFailureReason};
|
||||
use hir::Semantics;
|
||||
use ide_db::{
|
||||
base_db::{FileId, FilePosition, FileRange},
|
||||
FxHashMap,
|
||||
};
|
||||
use ide_db::base_db::{FileId, FilePosition, FileRange};
|
||||
use resolving::ResolvedRule;
|
||||
use stdx::hash::NoHashHashMap;
|
||||
use syntax::{ast, AstNode, SyntaxNode, TextRange};
|
||||
use text_edit::TextEdit;
|
||||
|
||||
|
@ -170,9 +168,9 @@ impl<'db> MatchFinder<'db> {
|
|||
}
|
||||
|
||||
/// Finds matches for all added rules and returns edits for all found matches.
|
||||
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
|
||||
pub fn edits(&self) -> NoHashHashMap<FileId, TextEdit> {
|
||||
use ide_db::base_db::SourceDatabaseExt;
|
||||
let mut matches_by_file = FxHashMap::default();
|
||||
let mut matches_by_file = NoHashHashMap::default();
|
||||
for m in self.matches().matches {
|
||||
matches_by_file
|
||||
.entry(m.range.file_id)
|
||||
|
|
|
@ -184,10 +184,10 @@ pub(crate) fn resolve_doc_path_for_def(
|
|||
Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::BuiltinAttr(_)
|
||||
| Definition::ToolModule(_)
|
||||
| Definition::BuiltinType(_)
|
||||
| Definition::SelfType(_)
|
||||
| Definition::Local(_)
|
||||
| Definition::GenericParam(_)
|
||||
| Definition::Label(_)
|
||||
|
|
|
@ -87,7 +87,7 @@ pub use crate::{
|
|||
},
|
||||
join_lines::JoinLinesConfig,
|
||||
markup::Markup,
|
||||
moniker::{MonikerKind, MonikerResult, PackageInformation},
|
||||
moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
|
||||
move_item::Direction,
|
||||
navigation_target::NavigationTarget,
|
||||
prime_caches::ParallelPrimeCachesProgress,
|
||||
|
@ -98,7 +98,7 @@ pub use crate::{
|
|||
static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
|
||||
syntax_highlighting::{
|
||||
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
|
||||
HlRange,
|
||||
HighlightConfig, HlRange,
|
||||
},
|
||||
};
|
||||
pub use hir::{Documentation, Semantics};
|
||||
|
@ -517,8 +517,12 @@ impl Analysis {
|
|||
}
|
||||
|
||||
/// Computes syntax highlighting for the given file
|
||||
pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
|
||||
self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
|
||||
pub fn highlight(
|
||||
&self,
|
||||
highlight_config: HighlightConfig,
|
||||
file_id: FileId,
|
||||
) -> Cancellable<Vec<HlRange>> {
|
||||
self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
|
||||
}
|
||||
|
||||
/// Computes all ranges to highlight for a given item in a file.
|
||||
|
@ -533,9 +537,13 @@ impl Analysis {
|
|||
}
|
||||
|
||||
/// Computes syntax highlighting for the given file range.
|
||||
pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
|
||||
pub fn highlight_range(
|
||||
&self,
|
||||
highlight_config: HighlightConfig,
|
||||
frange: FileRange,
|
||||
) -> Cancellable<Vec<HlRange>> {
|
||||
self.with_db(|db| {
|
||||
syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
|
||||
syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range))
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -13,17 +13,39 @@ use syntax::{AstNode, SyntaxKind::*, T};
|
|||
|
||||
use crate::{doc_links::token_as_doc_comment, RangeInfo};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum MonikerDescriptorKind {
|
||||
Namespace,
|
||||
Type,
|
||||
Term,
|
||||
Method,
|
||||
TypeParameter,
|
||||
Parameter,
|
||||
Macro,
|
||||
Meta,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct MonikerDescriptor {
|
||||
pub name: Name,
|
||||
pub desc: MonikerDescriptorKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct MonikerIdentifier {
|
||||
crate_name: String,
|
||||
path: Vec<Name>,
|
||||
pub crate_name: String,
|
||||
pub description: Vec<MonikerDescriptor>,
|
||||
}
|
||||
|
||||
impl ToString for MonikerIdentifier {
|
||||
fn to_string(&self) -> String {
|
||||
match self {
|
||||
MonikerIdentifier { path, crate_name } => {
|
||||
format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::"))
|
||||
MonikerIdentifier { description, crate_name } => {
|
||||
format!(
|
||||
"{}::{}",
|
||||
crate_name,
|
||||
description.iter().map(|x| x.name.to_string()).join("::")
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +64,12 @@ pub struct MonikerResult {
|
|||
pub package_information: PackageInformation,
|
||||
}
|
||||
|
||||
impl MonikerResult {
|
||||
pub fn from_def(db: &RootDatabase, def: Definition, from_crate: Crate) -> Option<Self> {
|
||||
def_to_moniker(db, def, from_crate)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct PackageInformation {
|
||||
pub name: String,
|
||||
|
@ -105,13 +133,23 @@ pub(crate) fn def_to_moniker(
|
|||
def: Definition,
|
||||
from_crate: Crate,
|
||||
) -> Option<MonikerResult> {
|
||||
if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) {
|
||||
if matches!(
|
||||
def,
|
||||
Definition::GenericParam(_)
|
||||
| Definition::Label(_)
|
||||
| Definition::DeriveHelper(_)
|
||||
| Definition::BuiltinAttr(_)
|
||||
| Definition::ToolModule(_)
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let module = def.module(db)?;
|
||||
let krate = module.krate();
|
||||
let mut path = vec![];
|
||||
path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db)));
|
||||
let mut description = vec![];
|
||||
description.extend(module.path_to_root(db).into_iter().filter_map(|x| {
|
||||
Some(MonikerDescriptor { name: x.name(db)?, desc: MonikerDescriptorKind::Namespace })
|
||||
}));
|
||||
|
||||
// Handle associated items within a trait
|
||||
if let Some(assoc) = def.as_assoc_item(db) {
|
||||
|
@ -120,31 +158,98 @@ pub(crate) fn def_to_moniker(
|
|||
AssocItemContainer::Trait(trait_) => {
|
||||
// Because different traits can have functions with the same name,
|
||||
// we have to include the trait name as part of the moniker for uniqueness.
|
||||
path.push(trait_.name(db));
|
||||
description.push(MonikerDescriptor {
|
||||
name: trait_.name(db),
|
||||
desc: MonikerDescriptorKind::Type,
|
||||
});
|
||||
}
|
||||
AssocItemContainer::Impl(impl_) => {
|
||||
// Because a struct can implement multiple traits, for implementations
|
||||
// we add both the struct name and the trait name to the path
|
||||
if let Some(adt) = impl_.self_ty(db).as_adt() {
|
||||
path.push(adt.name(db));
|
||||
description.push(MonikerDescriptor {
|
||||
name: adt.name(db),
|
||||
desc: MonikerDescriptorKind::Type,
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(trait_) = impl_.trait_(db) {
|
||||
path.push(trait_.name(db));
|
||||
description.push(MonikerDescriptor {
|
||||
name: trait_.name(db),
|
||||
desc: MonikerDescriptorKind::Type,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Definition::Field(it) = def {
|
||||
path.push(it.parent_def(db).name(db));
|
||||
description.push(MonikerDescriptor {
|
||||
name: it.parent_def(db).name(db),
|
||||
desc: MonikerDescriptorKind::Type,
|
||||
});
|
||||
}
|
||||
|
||||
path.push(def.name(db)?);
|
||||
let name_desc = match def {
|
||||
// These are handled by top-level guard (for performance).
|
||||
Definition::GenericParam(_)
|
||||
| Definition::Label(_)
|
||||
| Definition::DeriveHelper(_)
|
||||
| Definition::BuiltinAttr(_)
|
||||
| Definition::ToolModule(_) => return None,
|
||||
|
||||
Definition::Local(local) => {
|
||||
if !local.is_param(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
MonikerDescriptor { name: local.name(db), desc: MonikerDescriptorKind::Parameter }
|
||||
}
|
||||
Definition::Macro(m) => {
|
||||
MonikerDescriptor { name: m.name(db), desc: MonikerDescriptorKind::Macro }
|
||||
}
|
||||
Definition::Function(f) => {
|
||||
MonikerDescriptor { name: f.name(db), desc: MonikerDescriptorKind::Method }
|
||||
}
|
||||
Definition::Variant(v) => {
|
||||
MonikerDescriptor { name: v.name(db), desc: MonikerDescriptorKind::Type }
|
||||
}
|
||||
Definition::Const(c) => {
|
||||
MonikerDescriptor { name: c.name(db)?, desc: MonikerDescriptorKind::Term }
|
||||
}
|
||||
Definition::Trait(trait_) => {
|
||||
MonikerDescriptor { name: trait_.name(db), desc: MonikerDescriptorKind::Type }
|
||||
}
|
||||
Definition::TypeAlias(ta) => {
|
||||
MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::TypeParameter }
|
||||
}
|
||||
Definition::Module(m) => {
|
||||
MonikerDescriptor { name: m.name(db)?, desc: MonikerDescriptorKind::Namespace }
|
||||
}
|
||||
Definition::BuiltinType(b) => {
|
||||
MonikerDescriptor { name: b.name(), desc: MonikerDescriptorKind::Type }
|
||||
}
|
||||
Definition::SelfType(imp) => MonikerDescriptor {
|
||||
name: imp.self_ty(db).as_adt()?.name(db),
|
||||
desc: MonikerDescriptorKind::Type,
|
||||
},
|
||||
Definition::Field(it) => {
|
||||
MonikerDescriptor { name: it.name(db), desc: MonikerDescriptorKind::Term }
|
||||
}
|
||||
Definition::Adt(adt) => {
|
||||
MonikerDescriptor { name: adt.name(db), desc: MonikerDescriptorKind::Type }
|
||||
}
|
||||
Definition::Static(s) => {
|
||||
MonikerDescriptor { name: s.name(db), desc: MonikerDescriptorKind::Meta }
|
||||
}
|
||||
};
|
||||
|
||||
description.push(name_desc);
|
||||
|
||||
Some(MonikerResult {
|
||||
identifier: MonikerIdentifier {
|
||||
crate_name: krate.display_name(db)?.crate_name().to_string(),
|
||||
path,
|
||||
description,
|
||||
},
|
||||
kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
|
||||
package_information: {
|
||||
|
|
|
@ -12,8 +12,9 @@ use ide_db::{
|
|||
salsa::{Database, ParallelDatabase, Snapshot},
|
||||
Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
|
||||
},
|
||||
FxHashSet, FxIndexMap,
|
||||
FxIndexMap,
|
||||
};
|
||||
use stdx::hash::NoHashHashSet;
|
||||
|
||||
use crate::RootDatabase;
|
||||
|
||||
|
@ -141,7 +142,7 @@ pub(crate) fn parallel_prime_caches(
|
|||
}
|
||||
}
|
||||
|
||||
fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
|
||||
fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> NoHashHashSet<CrateId> {
|
||||
// We're only interested in the workspace crates and the `ImportMap`s of their direct
|
||||
// dependencies, though in practice the latter also compute the `DefMap`s.
|
||||
// We don't prime transitive dependencies because they're generally not visible in
|
||||
|
|
|
@ -14,8 +14,9 @@ use ide_db::{
|
|||
base_db::FileId,
|
||||
defs::{Definition, NameClass, NameRefClass},
|
||||
search::{ReferenceCategory, SearchScope, UsageSearchResult},
|
||||
FxHashMap, RootDatabase,
|
||||
RootDatabase,
|
||||
};
|
||||
use stdx::hash::NoHashHashMap;
|
||||
use syntax::{
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, HasName},
|
||||
|
@ -29,7 +30,7 @@ use crate::{FilePosition, NavigationTarget, TryToNav};
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct ReferenceSearchResult {
|
||||
pub declaration: Option<Declaration>,
|
||||
pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
|
||||
pub references: NoHashHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
|
@ -14,7 +14,7 @@ mod html;
|
|||
mod tests;
|
||||
|
||||
use hir::{Name, Semantics};
|
||||
use ide_db::{FxHashMap, RootDatabase};
|
||||
use ide_db::{FxHashMap, RootDatabase, SymbolKind};
|
||||
use syntax::{
|
||||
ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
|
||||
};
|
||||
|
@ -24,7 +24,7 @@ use crate::{
|
|||
escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
|
||||
macro_::MacroHighlighter, tags::Highlight,
|
||||
},
|
||||
FileId, HlMod, HlTag,
|
||||
FileId, HlMod, HlOperator, HlPunct, HlTag,
|
||||
};
|
||||
|
||||
pub(crate) use html::highlight_as_html;
|
||||
|
@ -36,6 +36,26 @@ pub struct HlRange {
|
|||
pub binding_hash: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct HighlightConfig {
|
||||
/// Whether to highlight strings
|
||||
pub strings: bool,
|
||||
/// Whether to highlight punctuation
|
||||
pub punctuation: bool,
|
||||
/// Whether to specialize punctuation highlights
|
||||
pub specialize_punctuation: bool,
|
||||
/// Whether to highlight operator
|
||||
pub operator: bool,
|
||||
/// Whether to specialize operator highlights
|
||||
pub specialize_operator: bool,
|
||||
/// Whether to inject highlights into doc comments
|
||||
pub inject_doc_comment: bool,
|
||||
/// Whether to highlight the macro call bang
|
||||
pub macro_bang: bool,
|
||||
/// Whether to highlight unresolved things be their syntax
|
||||
pub syntactic_name_ref_highlighting: bool,
|
||||
}
|
||||
|
||||
// Feature: Semantic Syntax Highlighting
|
||||
//
|
||||
// rust-analyzer highlights the code semantically.
|
||||
|
@ -155,9 +175,9 @@ pub struct HlRange {
|
|||
// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
|
||||
pub(crate) fn highlight(
|
||||
db: &RootDatabase,
|
||||
config: HighlightConfig,
|
||||
file_id: FileId,
|
||||
range_to_highlight: Option<TextRange>,
|
||||
syntactic_name_ref_highlighting: bool,
|
||||
) -> Vec<HlRange> {
|
||||
let _p = profile::span("highlight");
|
||||
let sema = Semantics::new(db);
|
||||
|
@ -183,26 +203,18 @@ pub(crate) fn highlight(
|
|||
Some(it) => it.krate(),
|
||||
None => return hl.to_vec(),
|
||||
};
|
||||
traverse(
|
||||
&mut hl,
|
||||
&sema,
|
||||
file_id,
|
||||
&root,
|
||||
krate,
|
||||
range_to_highlight,
|
||||
syntactic_name_ref_highlighting,
|
||||
);
|
||||
traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight);
|
||||
hl.to_vec()
|
||||
}
|
||||
|
||||
fn traverse(
|
||||
hl: &mut Highlights,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: HighlightConfig,
|
||||
file_id: FileId,
|
||||
root: &SyntaxNode,
|
||||
krate: hir::Crate,
|
||||
range_to_highlight: TextRange,
|
||||
syntactic_name_ref_highlighting: bool,
|
||||
) {
|
||||
let is_unlinked = sema.to_module_def(file_id).is_none();
|
||||
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
|
||||
|
@ -323,9 +335,11 @@ fn traverse(
|
|||
Enter(it) => it,
|
||||
Leave(NodeOrToken::Token(_)) => continue,
|
||||
Leave(NodeOrToken::Node(node)) => {
|
||||
// Doc comment highlighting injection, we do this when leaving the node
|
||||
// so that we overwrite the highlighting of the doc comment itself.
|
||||
inject::doc_comment(hl, sema, file_id, &node);
|
||||
if config.inject_doc_comment {
|
||||
// Doc comment highlighting injection, we do this when leaving the node
|
||||
// so that we overwrite the highlighting of the doc comment itself.
|
||||
inject::doc_comment(hl, sema, config, file_id, &node);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
@ -400,7 +414,8 @@ fn traverse(
|
|||
let string_to_highlight = ast::String::cast(descended_token.clone());
|
||||
if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
|
||||
if string.is_raw() {
|
||||
if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() {
|
||||
if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -421,7 +436,7 @@ fn traverse(
|
|||
sema,
|
||||
krate,
|
||||
&mut bindings_shadow_count,
|
||||
syntactic_name_ref_highlighting,
|
||||
config.syntactic_name_ref_highlighting,
|
||||
name_like,
|
||||
),
|
||||
NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
|
||||
|
@ -439,6 +454,29 @@ fn traverse(
|
|||
// something unresolvable. FIXME: There should be a way to prevent that
|
||||
continue;
|
||||
}
|
||||
|
||||
// apply config filtering
|
||||
match &mut highlight.tag {
|
||||
HlTag::StringLiteral if !config.strings => continue,
|
||||
// If punctuation is disabled, make the macro bang part of the macro call again.
|
||||
tag @ HlTag::Punctuation(HlPunct::MacroBang) => {
|
||||
if !config.macro_bang {
|
||||
*tag = HlTag::Symbol(SymbolKind::Macro);
|
||||
} else if !config.specialize_punctuation {
|
||||
*tag = HlTag::Punctuation(HlPunct::Other);
|
||||
}
|
||||
}
|
||||
HlTag::Punctuation(_) if !config.punctuation => continue,
|
||||
tag @ HlTag::Punctuation(_) if !config.specialize_punctuation => {
|
||||
*tag = HlTag::Punctuation(HlPunct::Other);
|
||||
}
|
||||
HlTag::Operator(_) if !config.operator && highlight.mods.is_empty() => continue,
|
||||
tag @ HlTag::Operator(_) if !config.specialize_operator => {
|
||||
*tag = HlTag::Operator(HlOperator::Other);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if inside_attribute {
|
||||
highlight |= HlMod::Attribute
|
||||
}
|
||||
|
|
|
@ -5,7 +5,10 @@ use oorandom::Rand32;
|
|||
use stdx::format_to;
|
||||
use syntax::AstNode;
|
||||
|
||||
use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
|
||||
use crate::{
|
||||
syntax_highlighting::{highlight, HighlightConfig},
|
||||
FileId, RootDatabase,
|
||||
};
|
||||
|
||||
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
|
||||
let parse = db.parse(file_id);
|
||||
|
@ -20,7 +23,21 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
|||
)
|
||||
}
|
||||
|
||||
let hl_ranges = highlight(db, file_id, None, false);
|
||||
let hl_ranges = highlight(
|
||||
db,
|
||||
HighlightConfig {
|
||||
strings: true,
|
||||
punctuation: true,
|
||||
specialize_punctuation: true,
|
||||
specialize_operator: true,
|
||||
operator: true,
|
||||
inject_doc_comment: true,
|
||||
macro_bang: true,
|
||||
syntactic_name_ref_highlighting: false,
|
||||
},
|
||||
file_id,
|
||||
None,
|
||||
);
|
||||
let text = parse.tree().syntax().to_string();
|
||||
let mut buf = String::new();
|
||||
buf.push_str(STYLE);
|
||||
|
|
|
@ -15,13 +15,14 @@ use syntax::{
|
|||
|
||||
use crate::{
|
||||
doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
|
||||
syntax_highlighting::{highlights::Highlights, injector::Injector},
|
||||
syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig},
|
||||
Analysis, HlMod, HlRange, HlTag, RootDatabase,
|
||||
};
|
||||
|
||||
pub(super) fn ra_fixture(
|
||||
hl: &mut Highlights,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: HighlightConfig,
|
||||
literal: &ast::String,
|
||||
expanded: &ast::String,
|
||||
) -> Option<()> {
|
||||
|
@ -63,7 +64,13 @@ pub(super) fn ra_fixture(
|
|||
|
||||
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
|
||||
|
||||
for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
|
||||
for mut hl_range in analysis
|
||||
.highlight(
|
||||
HighlightConfig { syntactic_name_ref_highlighting: false, ..config },
|
||||
tmp_file_id,
|
||||
)
|
||||
.unwrap()
|
||||
{
|
||||
for range in inj.map_range_up(hl_range.range) {
|
||||
if let Some(range) = literal.map_range_up(range) {
|
||||
hl_range.range = range;
|
||||
|
@ -86,6 +93,7 @@ const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
|
|||
pub(super) fn doc_comment(
|
||||
hl: &mut Highlights,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: HighlightConfig,
|
||||
src_file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) {
|
||||
|
@ -206,7 +214,14 @@ pub(super) fn doc_comment(
|
|||
|
||||
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
|
||||
|
||||
if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) {
|
||||
if let Ok(ranges) = analysis.with_db(|db| {
|
||||
super::highlight(
|
||||
db,
|
||||
HighlightConfig { syntactic_name_ref_highlighting: true, ..config },
|
||||
tmp_file_id,
|
||||
None,
|
||||
)
|
||||
}) {
|
||||
for HlRange { range, highlight, binding_hash } in ranges {
|
||||
for range in inj.map_range_up(range) {
|
||||
hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });
|
||||
|
|
|
@ -199,7 +199,7 @@ impl fmt::Display for HlTag {
|
|||
}
|
||||
|
||||
impl HlMod {
|
||||
const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[
|
||||
const ALL: &'static [HlMod; 19] = &[
|
||||
HlMod::Associated,
|
||||
HlMod::Async,
|
||||
HlMod::Attribute,
|
||||
|
@ -296,7 +296,7 @@ impl Highlight {
|
|||
Highlight { tag, mods: HlMods::default() }
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.tag == HlTag::None && self.mods == HlMods::default()
|
||||
self.tag == HlTag::None && self.mods.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -330,6 +330,10 @@ impl ops::BitOr<HlMod> for Highlight {
|
|||
}
|
||||
|
||||
impl HlMods {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0 == 0
|
||||
}
|
||||
|
||||
pub fn contains(self, m: HlMod) -> bool {
|
||||
self.0 & m.mask() == m.mask()
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
<span class="comment documentation">/// This is an impl with a code block.</span>
|
||||
<span class="comment documentation">/// This is an impl of </span><span class="struct documentation injected intra_doc_link">[`Foo`]</span><span class="comment documentation"> with a code block.</span>
|
||||
<span class="comment documentation">///</span>
|
||||
<span class="comment documentation">/// ```</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
|
||||
|
|
|
@ -4,7 +4,18 @@ use expect_test::{expect_file, ExpectFile};
|
|||
use ide_db::SymbolKind;
|
||||
use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
|
||||
|
||||
use crate::{fixture, FileRange, HlTag, TextRange};
|
||||
use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange};
|
||||
|
||||
const HL_CONFIG: HighlightConfig = HighlightConfig {
|
||||
strings: true,
|
||||
punctuation: true,
|
||||
specialize_punctuation: true,
|
||||
specialize_operator: true,
|
||||
operator: true,
|
||||
inject_doc_comment: true,
|
||||
macro_bang: true,
|
||||
syntactic_name_ref_highlighting: false,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn attributes() {
|
||||
|
@ -613,7 +624,7 @@ struct Foo {
|
|||
bar: bool,
|
||||
}
|
||||
|
||||
/// This is an impl with a code block.
|
||||
/// This is an impl of [`Foo`] with a code block.
|
||||
///
|
||||
/// ```
|
||||
/// fn foo() {
|
||||
|
@ -996,7 +1007,10 @@ struct Foo {
|
|||
|
||||
// The "x"
|
||||
let highlights = &analysis
|
||||
.highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
|
||||
.highlight_range(
|
||||
HL_CONFIG,
|
||||
FileRange { file_id, range: TextRange::at(45.into(), 1.into()) },
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
|
||||
|
@ -1011,7 +1025,7 @@ macro_rules! test {}
|
|||
}"#
|
||||
.trim(),
|
||||
);
|
||||
let _ = analysis.highlight(file_id).unwrap();
|
||||
let _ = analysis.highlight(HL_CONFIG, file_id).unwrap();
|
||||
}
|
||||
|
||||
/// Highlights the code given by the `ra_fixture` argument, renders the
|
||||
|
@ -1035,7 +1049,7 @@ fn benchmark_syntax_highlighting_long_struct() {
|
|||
let hash = {
|
||||
let _pt = bench("syntax highlighting long struct");
|
||||
analysis
|
||||
.highlight(file_id)
|
||||
.highlight(HL_CONFIG, file_id)
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
|
||||
|
@ -1061,7 +1075,7 @@ fn syntax_highlighting_not_quadratic() {
|
|||
let time = Instant::now();
|
||||
|
||||
let hash = analysis
|
||||
.highlight(file_id)
|
||||
.highlight(HL_CONFIG, file_id)
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
|
||||
|
@ -1086,7 +1100,7 @@ fn benchmark_syntax_highlighting_parser() {
|
|||
let hash = {
|
||||
let _pt = bench("syntax highlighting parser");
|
||||
analysis
|
||||
.highlight(file_id)
|
||||
.highlight(HL_CONFIG, file_id)
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
|
||||
|
|
|
@ -3,8 +3,9 @@ use std::sync::Arc;
|
|||
use dot::{Id, LabelText};
|
||||
use ide_db::{
|
||||
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
|
||||
FxHashSet, RootDatabase,
|
||||
RootDatabase,
|
||||
};
|
||||
use stdx::hash::NoHashHashSet;
|
||||
|
||||
// Feature: View Crate Graph
|
||||
//
|
||||
|
@ -41,7 +42,7 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
|
|||
|
||||
struct DotCrateGraph {
|
||||
graph: Arc<CrateGraph>,
|
||||
crates_to_render: FxHashSet<CrateId>,
|
||||
crates_to_render: NoHashHashSet<CrateId>,
|
||||
}
|
||||
|
||||
type Edge<'a> = (CrateId, &'a Dependency);
|
||||
|
|
|
@ -13,6 +13,8 @@ pub(super) const PATTERN_FIRST: TokenSet =
|
|||
T![.],
|
||||
]));
|
||||
|
||||
const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]]));
|
||||
|
||||
pub(crate) fn pattern(p: &mut Parser<'_>) {
|
||||
pattern_r(p, PAT_RECOVERY_SET);
|
||||
}
|
||||
|
@ -228,6 +230,7 @@ fn path_or_macro_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
// let S(_) = ();
|
||||
// let S(_,) = ();
|
||||
// let S(_, .. , x) = ();
|
||||
// let S(| a) = ();
|
||||
// }
|
||||
fn tuple_pat_fields(p: &mut Parser<'_>) {
|
||||
assert!(p.at(T!['(']));
|
||||
|
@ -363,6 +366,7 @@ fn ref_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
// let (a,) = ();
|
||||
// let (..) = ();
|
||||
// let () = ();
|
||||
// let (| a | a, | b) = ((),());
|
||||
// }
|
||||
fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
||||
assert!(p.at(T!['(']));
|
||||
|
@ -373,13 +377,13 @@ fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
let mut has_rest = false;
|
||||
while !p.at(EOF) && !p.at(T![')']) {
|
||||
has_pat = true;
|
||||
if !p.at_ts(PATTERN_FIRST) {
|
||||
if !p.at_ts(PAT_TOP_FIRST) {
|
||||
p.error("expected a pattern");
|
||||
break;
|
||||
}
|
||||
has_rest |= p.at(T![..]);
|
||||
|
||||
pattern(p);
|
||||
pattern_top(p);
|
||||
if !p.at(T![')']) {
|
||||
has_comma = true;
|
||||
p.expect(T![,]);
|
||||
|
@ -393,6 +397,7 @@ fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
// test slice_pat
|
||||
// fn main() {
|
||||
// let [a, b, ..] = [];
|
||||
// let [| a, ..] = [];
|
||||
// }
|
||||
fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
||||
assert!(p.at(T!['[']));
|
||||
|
@ -405,12 +410,12 @@ fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
|
||||
fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
|
||||
while !p.at(EOF) && !p.at(ket) {
|
||||
if !p.at_ts(PATTERN_FIRST) {
|
||||
if !p.at_ts(PAT_TOP_FIRST) {
|
||||
p.error("expected a pattern");
|
||||
break;
|
||||
}
|
||||
|
||||
pattern(p);
|
||||
pattern_top(p);
|
||||
if !p.at(ket) {
|
||||
p.expect(T![,]);
|
||||
}
|
||||
|
|
|
@ -37,6 +37,29 @@ SOURCE_FILE
|
|||
L_BRACK "["
|
||||
R_BRACK "]"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n "
|
||||
LET_STMT
|
||||
LET_KW "let"
|
||||
WHITESPACE " "
|
||||
SLICE_PAT
|
||||
L_BRACK "["
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
IDENT_PAT
|
||||
NAME
|
||||
IDENT "a"
|
||||
COMMA ","
|
||||
WHITESPACE " "
|
||||
REST_PAT
|
||||
DOT2 ".."
|
||||
R_BRACK "]"
|
||||
WHITESPACE " "
|
||||
EQ "="
|
||||
WHITESPACE " "
|
||||
ARRAY_EXPR
|
||||
L_BRACK "["
|
||||
R_BRACK "]"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
fn main() {
|
||||
let [a, b, ..] = [];
|
||||
let [| a, ..] = [];
|
||||
}
|
||||
|
|
|
@ -100,6 +100,29 @@ SOURCE_FILE
|
|||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n "
|
||||
LET_STMT
|
||||
LET_KW "let"
|
||||
WHITESPACE " "
|
||||
TUPLE_STRUCT_PAT
|
||||
PATH
|
||||
PATH_SEGMENT
|
||||
NAME_REF
|
||||
IDENT "S"
|
||||
L_PAREN "("
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
IDENT_PAT
|
||||
NAME
|
||||
IDENT "a"
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
EQ "="
|
||||
WHITESPACE " "
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
|
|
|
@ -3,4 +3,5 @@ fn foo() {
|
|||
let S(_) = ();
|
||||
let S(_,) = ();
|
||||
let S(_, .. , x) = ();
|
||||
let S(| a) = ();
|
||||
}
|
||||
|
|
|
@ -85,6 +85,46 @@ SOURCE_FILE
|
|||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n "
|
||||
LET_STMT
|
||||
LET_KW "let"
|
||||
WHITESPACE " "
|
||||
TUPLE_PAT
|
||||
L_PAREN "("
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
OR_PAT
|
||||
IDENT_PAT
|
||||
NAME
|
||||
IDENT "a"
|
||||
WHITESPACE " "
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
IDENT_PAT
|
||||
NAME
|
||||
IDENT "a"
|
||||
COMMA ","
|
||||
WHITESPACE " "
|
||||
PIPE "|"
|
||||
WHITESPACE " "
|
||||
IDENT_PAT
|
||||
NAME
|
||||
IDENT "b"
|
||||
R_PAREN ")"
|
||||
WHITESPACE " "
|
||||
EQ "="
|
||||
WHITESPACE " "
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
COMMA ","
|
||||
TUPLE_EXPR
|
||||
L_PAREN "("
|
||||
R_PAREN ")"
|
||||
R_PAREN ")"
|
||||
SEMICOLON ";"
|
||||
WHITESPACE "\n"
|
||||
R_CURLY "}"
|
||||
WHITESPACE "\n"
|
||||
|
|
|
@ -3,4 +3,5 @@ fn main() {
|
|||
let (a,) = ();
|
||||
let (..) = ();
|
||||
let () = ();
|
||||
let (| a | a, | b) = ((),());
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ tt = { path = "../tt", version = "0.0.0" }
|
|||
mbe = { path = "../mbe", version = "0.0.0" }
|
||||
paths = { path = "../paths", version = "0.0.0" }
|
||||
proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
|
||||
crossbeam = "0.8.1"
|
||||
|
||||
[dev-dependencies]
|
||||
expect-test = "1.4.0"
|
||||
|
|
|
@ -26,6 +26,7 @@ use std::{
|
|||
ffi::OsString,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
thread,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
|
@ -65,18 +66,16 @@ impl ProcMacroSrv {
|
|||
|
||||
let macro_body = task.macro_body.to_subtree();
|
||||
let attributes = task.attributes.map(|it| it.to_subtree());
|
||||
// FIXME: replace this with std's scoped threads once they stabilize
|
||||
// (then remove dependency on crossbeam)
|
||||
let result = crossbeam::scope(|s| {
|
||||
let res = match s
|
||||
.builder()
|
||||
let result = thread::scope(|s| {
|
||||
let thread = thread::Builder::new()
|
||||
.stack_size(EXPANDER_STACK_SIZE)
|
||||
.name(task.macro_name.clone())
|
||||
.spawn(|_| {
|
||||
.spawn_scoped(s, || {
|
||||
expander
|
||||
.expand(&task.macro_name, ¯o_body, attributes.as_ref())
|
||||
.map(|it| FlatTree::new(&it))
|
||||
}) {
|
||||
});
|
||||
let res = match thread {
|
||||
Ok(handle) => handle.join(),
|
||||
Err(e) => std::panic::resume_unwind(Box::new(e)),
|
||||
};
|
||||
|
@ -86,10 +85,6 @@ impl ProcMacroSrv {
|
|||
Err(e) => std::panic::resume_unwind(e),
|
||||
}
|
||||
});
|
||||
let result = match result {
|
||||
Ok(result) => result,
|
||||
Err(e) => std::panic::resume_unwind(e),
|
||||
};
|
||||
|
||||
prev_env.rollback();
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,7 +13,7 @@ use cfg::{CfgDiff, CfgOptions};
|
|||
use paths::{AbsPath, AbsPathBuf};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use semver::Version;
|
||||
use stdx::always;
|
||||
use stdx::{always, hash::NoHashHashMap};
|
||||
|
||||
use crate::{
|
||||
build_scripts::BuildScriptOutput,
|
||||
|
@ -471,7 +471,7 @@ fn project_json_to_crate_graph(
|
|||
.map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
|
||||
|
||||
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
|
||||
let crates: FxHashMap<CrateId, CrateId> = project
|
||||
let crates: NoHashHashMap<CrateId, CrateId> = project
|
||||
.crates()
|
||||
.filter_map(|(crate_id, krate)| {
|
||||
let file_path = &krate.root_module;
|
||||
|
|
|
@ -22,7 +22,8 @@ anyhow = "1.0.57"
|
|||
crossbeam-channel = "0.5.5"
|
||||
dissimilar = "1.0.4"
|
||||
itertools = "0.10.3"
|
||||
lsp-types = { version = "0.93.0", features = ["proposed"] }
|
||||
scip = "0.1.1"
|
||||
lsp-types = { version = "0.93.1", features = ["proposed"] }
|
||||
parking_lot = "0.12.1"
|
||||
xflags = "0.2.4"
|
||||
oorandom = "11.1.3"
|
||||
|
@ -88,5 +89,5 @@ in-rust-tree = [
|
|||
"proc-macro-srv/sysroot-abi",
|
||||
"sourcegen/in-rust-tree",
|
||||
"ide/in-rust-tree",
|
||||
"syntax/in-rust-tree"
|
||||
"syntax/in-rust-tree",
|
||||
]
|
||||
|
|
|
@ -93,6 +93,7 @@ fn try_main() -> Result<()> {
|
|||
flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?,
|
||||
flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
|
||||
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
|
||||
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ mod analysis_stats;
|
|||
mod diagnostics;
|
||||
mod ssr;
|
||||
mod lsif;
|
||||
mod scip;
|
||||
|
||||
mod progress_report;
|
||||
|
||||
|
|
|
@ -112,6 +112,10 @@ xflags::xflags! {
|
|||
cmd lsif
|
||||
required path: PathBuf
|
||||
{}
|
||||
|
||||
cmd scip
|
||||
required path: PathBuf
|
||||
{}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,6 +144,7 @@ pub enum RustAnalyzerCmd {
|
|||
Search(Search),
|
||||
ProcMacro(ProcMacro),
|
||||
Lsif(Lsif),
|
||||
Scip(Scip),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -207,6 +212,11 @@ pub struct Lsif {
|
|||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Scip {
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl RustAnalyzer {
|
||||
pub const HELP: &'static str = Self::HELP_;
|
||||
|
||||
|
|
|
@ -0,0 +1,448 @@
|
|||
//! SCIP generator
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
|
||||
use hir::Name;
|
||||
use ide::{
|
||||
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, TextRange,
|
||||
TokenId,
|
||||
};
|
||||
use ide_db::LineIndexDatabase;
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
|
||||
use scip::types as scip_types;
|
||||
use std::env;
|
||||
|
||||
use crate::cli::{
|
||||
flags,
|
||||
load_cargo::{load_workspace, LoadCargoConfig},
|
||||
Result,
|
||||
};
|
||||
|
||||
impl flags::Scip {
|
||||
pub fn run(self) -> Result<()> {
|
||||
eprintln!("Generating SCIP start...");
|
||||
let now = Instant::now();
|
||||
let cargo_config = CargoConfig::default();
|
||||
|
||||
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s));
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
load_out_dirs_from_check: true,
|
||||
with_proc_macro: true,
|
||||
prefill_caches: true,
|
||||
};
|
||||
let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
|
||||
let rootpath = path.normalize();
|
||||
let manifest = ProjectManifest::discover_single(&path)?;
|
||||
|
||||
let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
|
||||
|
||||
let (host, vfs, _) = load_workspace(workspace, &load_cargo_config)?;
|
||||
let db = host.raw_database();
|
||||
let analysis = host.analysis();
|
||||
|
||||
let si = StaticIndex::compute(&analysis);
|
||||
|
||||
let mut index = scip_types::Index {
|
||||
metadata: Some(scip_types::Metadata {
|
||||
version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
|
||||
tool_info: Some(scip_types::ToolInfo {
|
||||
name: "rust-analyzer".to_owned(),
|
||||
version: "0.1".to_owned(),
|
||||
arguments: vec![],
|
||||
..Default::default()
|
||||
})
|
||||
.into(),
|
||||
project_root: format!(
|
||||
"file://{}",
|
||||
path.normalize()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
|
||||
.to_string()
|
||||
),
|
||||
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
|
||||
..Default::default()
|
||||
})
|
||||
.into(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
|
||||
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
|
||||
|
||||
for file in si.files {
|
||||
let mut local_count = 0;
|
||||
let mut new_local_symbol = || {
|
||||
let new_symbol = scip::types::Symbol::new_local(local_count);
|
||||
local_count += 1;
|
||||
|
||||
new_symbol
|
||||
};
|
||||
|
||||
let StaticIndexedFile { file_id, tokens, .. } = file;
|
||||
let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
|
||||
Some(relative_path) => relative_path,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let line_index = LineIndex {
|
||||
index: db.line_index(file_id),
|
||||
encoding: OffsetEncoding::Utf8,
|
||||
endings: LineEndings::Unix,
|
||||
};
|
||||
|
||||
let mut doc = scip_types::Document {
|
||||
relative_path,
|
||||
language: "rust".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
tokens.into_iter().for_each(|(range, id)| {
|
||||
let token = si.tokens.get(id).unwrap();
|
||||
|
||||
let mut occurrence = scip_types::Occurrence::default();
|
||||
occurrence.range = text_range_to_scip_range(&line_index, range);
|
||||
occurrence.symbol = match tokens_to_symbol.get(&id) {
|
||||
Some(symbol) => symbol.clone(),
|
||||
None => {
|
||||
let symbol = match &token.moniker {
|
||||
Some(moniker) => moniker_to_symbol(&moniker),
|
||||
None => new_local_symbol(),
|
||||
};
|
||||
|
||||
let symbol = scip::symbol::format_symbol(symbol);
|
||||
tokens_to_symbol.insert(id, symbol.clone());
|
||||
symbol
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(def) = token.definition {
|
||||
if def.range == range {
|
||||
occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32;
|
||||
}
|
||||
|
||||
if !symbols_emitted.contains(&id) {
|
||||
symbols_emitted.insert(id);
|
||||
|
||||
let mut symbol_info = scip_types::SymbolInformation::default();
|
||||
symbol_info.symbol = occurrence.symbol.clone();
|
||||
if let Some(hover) = &token.hover {
|
||||
if !hover.markup.as_str().is_empty() {
|
||||
symbol_info.documentation = vec![hover.markup.as_str().to_string()];
|
||||
}
|
||||
}
|
||||
|
||||
doc.symbols.push(symbol_info)
|
||||
}
|
||||
}
|
||||
|
||||
doc.occurrences.push(occurrence);
|
||||
});
|
||||
|
||||
if doc.occurrences.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
index.documents.push(doc);
|
||||
}
|
||||
|
||||
scip::write_message_to_file("index.scip", index)
|
||||
.map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
|
||||
|
||||
eprintln!("Generating SCIP finished {:?}", now.elapsed());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_relative_filepath(
|
||||
vfs: &vfs::Vfs,
|
||||
rootpath: &vfs::AbsPathBuf,
|
||||
file_id: ide::FileId,
|
||||
) -> Option<String> {
|
||||
Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string())
|
||||
}
|
||||
|
||||
// SCIP Ranges have a (very large) optimization that ranges if they are on the same line
|
||||
// only encode as a vector of [start_line, start_col, end_col].
|
||||
//
|
||||
// This transforms a line index into the optimized SCIP Range.
|
||||
fn text_range_to_scip_range(line_index: &LineIndex, range: TextRange) -> Vec<i32> {
|
||||
let LineCol { line: start_line, col: start_col } = line_index.index.line_col(range.start());
|
||||
let LineCol { line: end_line, col: end_col } = line_index.index.line_col(range.end());
|
||||
|
||||
if start_line == end_line {
|
||||
vec![start_line as i32, start_col as i32, end_col as i32]
|
||||
} else {
|
||||
vec![start_line as i32, start_col as i32, end_line as i32, end_col as i32]
|
||||
}
|
||||
}
|
||||
|
||||
fn new_descriptor_str(
|
||||
name: &str,
|
||||
suffix: scip_types::descriptor::Suffix,
|
||||
) -> scip_types::Descriptor {
|
||||
scip_types::Descriptor {
|
||||
name: name.to_string(),
|
||||
disambiguator: "".to_string(),
|
||||
suffix: suffix.into(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
|
||||
let mut name = name.to_string();
|
||||
if name.contains("'") {
|
||||
name = format!("`{}`", name);
|
||||
}
|
||||
|
||||
new_descriptor_str(name.as_str(), suffix)
|
||||
}
|
||||
|
||||
/// Loosely based on `def_to_moniker`
|
||||
///
|
||||
/// Only returns a Symbol when it's a non-local symbol.
|
||||
/// So if the visibility isn't outside of a document, then it will return None
|
||||
fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
|
||||
use scip_types::descriptor::Suffix::*;
|
||||
|
||||
let package_name = moniker.package_information.name.clone();
|
||||
let version = moniker.package_information.version.clone();
|
||||
let descriptors = moniker
|
||||
.identifier
|
||||
.description
|
||||
.iter()
|
||||
.map(|desc| {
|
||||
new_descriptor(
|
||||
desc.name.clone(),
|
||||
match desc.desc {
|
||||
MonikerDescriptorKind::Namespace => Namespace,
|
||||
MonikerDescriptorKind::Type => Type,
|
||||
MonikerDescriptorKind::Term => Term,
|
||||
MonikerDescriptorKind::Method => Method,
|
||||
MonikerDescriptorKind::TypeParameter => TypeParameter,
|
||||
MonikerDescriptorKind::Parameter => Parameter,
|
||||
MonikerDescriptorKind::Macro => Macro,
|
||||
MonikerDescriptorKind::Meta => Meta,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
scip_types::Symbol {
|
||||
scheme: "rust-analyzer".into(),
|
||||
package: Some(scip_types::Package {
|
||||
manager: "cargo".to_string(),
|
||||
name: package_name,
|
||||
version,
|
||||
..Default::default()
|
||||
})
|
||||
.into(),
|
||||
descriptors,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use hir::Semantics;
|
||||
use ide::{AnalysisHost, FilePosition};
|
||||
use ide_db::defs::IdentClass;
|
||||
use ide_db::{base_db::fixture::ChangeFixture, helpers::pick_best_token};
|
||||
use scip::symbol::format_symbol;
|
||||
use syntax::SyntaxKind::*;
|
||||
use syntax::{AstNode, T};
|
||||
|
||||
fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) {
|
||||
let mut host = AnalysisHost::default();
|
||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||
host.raw_database_mut().apply_change(change_fixture.change);
|
||||
let (file_id, range_or_offset) =
|
||||
change_fixture.file_position.expect("expected a marker ($0)");
|
||||
let offset = range_or_offset.expect_offset();
|
||||
(host, FilePosition { file_id, offset })
|
||||
}
|
||||
|
||||
/// If expected == "", then assert that there are no symbols (this is basically local symbol)
|
||||
#[track_caller]
|
||||
fn check_symbol(ra_fixture: &str, expected: &str) {
|
||||
let (host, position) = position(ra_fixture);
|
||||
|
||||
let FilePosition { file_id, offset } = position;
|
||||
|
||||
let db = host.raw_database();
|
||||
let sema = &Semantics::new(db);
|
||||
let file = sema.parse(file_id).syntax().clone();
|
||||
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
|
||||
IDENT
|
||||
| INT_NUMBER
|
||||
| LIFETIME_IDENT
|
||||
| T![self]
|
||||
| T![super]
|
||||
| T![crate]
|
||||
| T![Self]
|
||||
| COMMENT => 2,
|
||||
kind if kind.is_trivia() => 0,
|
||||
_ => 1,
|
||||
})
|
||||
.expect("OK OK");
|
||||
|
||||
let navs = sema
|
||||
.descend_into_macros(original_token.clone())
|
||||
.into_iter()
|
||||
.filter_map(|token| {
|
||||
IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
|
||||
it.into_iter().flat_map(|def| {
|
||||
let module = def.module(db).unwrap();
|
||||
let current_crate = module.krate();
|
||||
|
||||
match MonikerResult::from_def(sema.db, def, current_crate) {
|
||||
Some(moniker_result) => Some(moniker_to_symbol(&moniker_result)),
|
||||
None => None,
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if expected == "" {
|
||||
assert_eq!(0, navs.len(), "must have no symbols {:?}", navs);
|
||||
return;
|
||||
}
|
||||
|
||||
assert_eq!(1, navs.len(), "must have one symbol {:?}", navs);
|
||||
|
||||
let res = navs.get(0).unwrap();
|
||||
let formatted = format_symbol(res.clone());
|
||||
assert_eq!(formatted, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /lib.rs crate:main deps:foo
|
||||
use foo::example_mod::func;
|
||||
fn main() {
|
||||
func$0();
|
||||
}
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub mod example_mod {
|
||||
pub fn func() {}
|
||||
}
|
||||
"#,
|
||||
"rust-analyzer cargo foo 0.1.0 example_mod/func().",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_for_trait() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub mod module {
|
||||
pub trait MyTrait {
|
||||
pub fn func$0() {}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
"rust-analyzer cargo foo 0.1.0 module/MyTrait#func().",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_for_trait_constant() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub mod module {
|
||||
pub trait MyTrait {
|
||||
const MY_CONST$0: u8;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
"rust-analyzer cargo foo 0.1.0 module/MyTrait#MY_CONST.",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_for_trait_type() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub mod module {
|
||||
pub trait MyTrait {
|
||||
type MyType$0;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
// "foo::module::MyTrait::MyType",
|
||||
"rust-analyzer cargo foo 0.1.0 module/MyTrait#[MyType]",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_for_trait_impl_function() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub mod module {
|
||||
pub trait MyTrait {
|
||||
pub fn func() {}
|
||||
}
|
||||
|
||||
struct MyStruct {}
|
||||
|
||||
impl MyTrait for MyStruct {
|
||||
pub fn func$0() {}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
// "foo::module::MyStruct::MyTrait::func",
|
||||
"rust-analyzer cargo foo 0.1.0 module/MyStruct#MyTrait#func().",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_for_field() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /lib.rs crate:main deps:foo
|
||||
use foo::St;
|
||||
fn main() {
|
||||
let x = St { a$0: 2 };
|
||||
}
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub struct St {
|
||||
pub a: i32,
|
||||
}
|
||||
"#,
|
||||
"rust-analyzer cargo foo 0.1.0 St#a.",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn local_symbol_for_local() {
|
||||
check_symbol(
|
||||
r#"
|
||||
//- /lib.rs crate:main deps:foo
|
||||
use foo::module::func;
|
||||
fn main() {
|
||||
func();
|
||||
}
|
||||
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
|
||||
pub mod module {
|
||||
pub fn func() {
|
||||
let x$0 = 2;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
"",
|
||||
);
|
||||
}
|
||||
}
|
|
@ -12,8 +12,8 @@ use std::{ffi::OsString, fmt, iter, path::PathBuf};
|
|||
use flycheck::FlycheckConfig;
|
||||
use ide::{
|
||||
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
|
||||
HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig,
|
||||
Snippet, SnippetScope,
|
||||
HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig,
|
||||
JoinLinesConfig, Snippet, SnippetScope,
|
||||
};
|
||||
use ide_db::{
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
|
||||
|
@ -385,6 +385,34 @@ config_data! {
|
|||
/// available on a nightly build.
|
||||
rustfmt_rangeFormatting_enable: bool = "false",
|
||||
|
||||
/// Inject additional highlighting into doc comments.
|
||||
///
|
||||
/// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
|
||||
/// doc links.
|
||||
semanticHighlighting_doc_comment_inject_enable: bool = "true",
|
||||
/// Use semantic tokens for operators.
|
||||
///
|
||||
/// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
|
||||
/// they are tagged with modifiers.
|
||||
semanticHighlighting_operator_enable: bool = "true",
|
||||
/// Use specialized semantic tokens for operators.
|
||||
///
|
||||
/// When enabled, rust-analyzer will emit special token types for operator tokens instead
|
||||
/// of the generic `operator` token type.
|
||||
semanticHighlighting_operator_specialization_enable: bool = "false",
|
||||
/// Use semantic tokens for punctuations.
|
||||
///
|
||||
/// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
|
||||
/// they are tagged with modifiers or have a special role.
|
||||
semanticHighlighting_punctuation_enable: bool = "false",
|
||||
/// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
|
||||
/// calls.
|
||||
semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
|
||||
/// Use specialized semantic tokens for punctuations.
|
||||
///
|
||||
/// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
|
||||
/// of the generic `punctuation` token type.
|
||||
semanticHighlighting_punctuation_specialization_enable: bool = "false",
|
||||
/// Use semantic tokens for strings.
|
||||
///
|
||||
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
|
||||
|
@ -1171,8 +1199,19 @@ impl Config {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn highlighting_strings(&self) -> bool {
|
||||
self.data.semanticHighlighting_strings_enable
|
||||
pub fn highlighting_config(&self) -> HighlightConfig {
|
||||
HighlightConfig {
|
||||
strings: self.data.semanticHighlighting_strings_enable,
|
||||
punctuation: self.data.semanticHighlighting_punctuation_enable,
|
||||
specialize_punctuation: self
|
||||
.data
|
||||
.semanticHighlighting_punctuation_specialization_enable,
|
||||
macro_bang: self.data.semanticHighlighting_punctuation_separate_macro_bang,
|
||||
operator: self.data.semanticHighlighting_operator_enable,
|
||||
specialize_operator: self.data.semanticHighlighting_operator_specialization_enable,
|
||||
inject_doc_comment: self.data.semanticHighlighting_doc_comment_inject_enable,
|
||||
syntactic_name_ref_highlighting: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hover(&self) -> HoverConfig {
|
||||
|
|
|
@ -4,11 +4,12 @@ pub(crate) mod to_proto;
|
|||
use std::{mem, sync::Arc};
|
||||
|
||||
use ide::FileId;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use ide_db::FxHashMap;
|
||||
use stdx::hash::{NoHashHashMap, NoHashHashSet};
|
||||
|
||||
use crate::lsp_ext;
|
||||
|
||||
pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>;
|
||||
pub(crate) type CheckFixes = Arc<NoHashHashMap<usize, NoHashHashMap<FileId, Vec<Fix>>>>;
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DiagnosticsMapConfig {
|
||||
|
@ -19,12 +20,12 @@ pub struct DiagnosticsMapConfig {
|
|||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub(crate) struct DiagnosticCollection {
|
||||
// FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
|
||||
pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
|
||||
// FIXME: should be NoHashHashMap<FileId, Vec<ra_id::Diagnostic>>
|
||||
pub(crate) native: NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>,
|
||||
// FIXME: should be Vec<flycheck::Diagnostic>
|
||||
pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
|
||||
pub(crate) check: NoHashHashMap<usize, NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
|
||||
pub(crate) check_fixes: CheckFixes,
|
||||
changes: FxHashSet<FileId>,
|
||||
changes: NoHashHashSet<FileId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -105,7 +106,7 @@ impl DiagnosticCollection {
|
|||
native.chain(check)
|
||||
}
|
||||
|
||||
pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
|
||||
pub(crate) fn take_changes(&mut self) -> Option<NoHashHashSet<FileId>> {
|
||||
if self.changes.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ use parking_lot::{Mutex, RwLock};
|
|||
use proc_macro_api::ProcMacroServer;
|
||||
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::hash::NoHashHashMap;
|
||||
use vfs::AnchoredPathBuf;
|
||||
|
||||
use crate::{
|
||||
|
@ -67,7 +68,7 @@ pub(crate) struct GlobalState {
|
|||
pub(crate) flycheck_sender: Sender<flycheck::Message>,
|
||||
pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
|
||||
|
||||
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
||||
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
|
||||
pub(crate) vfs_config_version: u32,
|
||||
pub(crate) vfs_progress_config_version: u32,
|
||||
pub(crate) vfs_progress_n_total: usize,
|
||||
|
@ -113,7 +114,7 @@ pub(crate) struct GlobalStateSnapshot {
|
|||
pub(crate) check_fixes: CheckFixes,
|
||||
mem_docs: MemDocs,
|
||||
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
||||
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
||||
vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
|
||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||
}
|
||||
|
||||
|
@ -157,7 +158,7 @@ impl GlobalState {
|
|||
flycheck_sender,
|
||||
flycheck_receiver,
|
||||
|
||||
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
|
||||
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), NoHashHashMap::default()))),
|
||||
vfs_config_version: 0,
|
||||
vfs_progress_config_version: 0,
|
||||
vfs_progress_n_total: 0,
|
||||
|
|
|
@ -1504,10 +1504,8 @@ pub(crate) fn handle_semantic_tokens_full(
|
|||
let text = snap.analysis.file_text(file_id)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
let highlights = snap.analysis.highlight(file_id)?;
|
||||
let highlight_strings = snap.config.highlighting_strings();
|
||||
let semantic_tokens =
|
||||
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
|
||||
let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
|
||||
// Unconditionally cache the tokens
|
||||
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
|
||||
|
@ -1525,10 +1523,8 @@ pub(crate) fn handle_semantic_tokens_full_delta(
|
|||
let text = snap.analysis.file_text(file_id)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
let highlights = snap.analysis.highlight(file_id)?;
|
||||
let highlight_strings = snap.config.highlighting_strings();
|
||||
let semantic_tokens =
|
||||
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
|
||||
let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
|
||||
let mut cache = snap.semantic_tokens_cache.lock();
|
||||
let cached_tokens = cache.entry(params.text_document.uri).or_default();
|
||||
|
@ -1556,10 +1552,8 @@ pub(crate) fn handle_semantic_tokens_range(
|
|||
let text = snap.analysis.file_text(frange.file_id)?;
|
||||
let line_index = snap.file_line_index(frange.file_id)?;
|
||||
|
||||
let highlights = snap.analysis.highlight_range(frange)?;
|
||||
let highlight_strings = snap.config.highlighting_strings();
|
||||
let semantic_tokens =
|
||||
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
|
||||
let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?;
|
||||
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
|
||||
Ok(Some(semantic_tokens.into()))
|
||||
}
|
||||
|
||||
|
|
|
@ -328,8 +328,33 @@ impl GlobalState {
|
|||
}
|
||||
|
||||
let uri = file_id_to_url(&self.vfs.read().0, file_id);
|
||||
let diagnostics =
|
||||
let mut diagnostics =
|
||||
self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
|
||||
|
||||
// VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
|
||||
// empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
|
||||
// diagnostic messages are actually allowed to be empty or not and patching this
|
||||
// in the VSCode client does not work as the assertion happens in the protocol
|
||||
// conversion. So this hack is here to stay, and will be considered a hack
|
||||
// until the LSP decides to state that empty messages are allowed.
|
||||
|
||||
// See https://github.com/rust-lang/rust-analyzer/issues/11404
|
||||
// See https://github.com/rust-lang/rust-analyzer/issues/13130
|
||||
let patch_empty = |message: &mut String| {
|
||||
if message.is_empty() {
|
||||
*message = " ".to_string();
|
||||
}
|
||||
};
|
||||
|
||||
for d in &mut diagnostics {
|
||||
patch_empty(&mut d.message);
|
||||
if let Some(dri) = &mut d.related_information {
|
||||
for dri in dri {
|
||||
patch_empty(&mut dri.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let version = from_proto::vfs_path(&uri)
|
||||
.map(|path| self.mem_docs.get(&path).map(|it| it.version))
|
||||
.unwrap_or_default();
|
||||
|
@ -529,6 +554,13 @@ impl GlobalState {
|
|||
}
|
||||
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
|
||||
flycheck::Progress::DidCancel => (Progress::End, None),
|
||||
flycheck::Progress::DidFailToRestart(err) => {
|
||||
self.show_and_log_error(
|
||||
"cargo check failed".to_string(),
|
||||
Some(err.to_string()),
|
||||
);
|
||||
return;
|
||||
}
|
||||
flycheck::Progress::DidFinish(result) => {
|
||||
if let Err(err) = result {
|
||||
self.show_and_log_error(
|
||||
|
|
|
@ -8,107 +8,130 @@ use lsp_types::{
|
|||
};
|
||||
|
||||
macro_rules! define_semantic_token_types {
|
||||
($(($ident:ident, $string:literal)),*$(,)?) => {
|
||||
$(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
|
||||
(
|
||||
standard {
|
||||
$($standard:ident),*$(,)?
|
||||
}
|
||||
custom {
|
||||
$(($custom:ident, $string:literal)),*$(,)?
|
||||
}
|
||||
|
||||
) => {
|
||||
$(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
|
||||
$(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
|
||||
|
||||
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
|
||||
SemanticTokenType::COMMENT,
|
||||
SemanticTokenType::KEYWORD,
|
||||
SemanticTokenType::STRING,
|
||||
SemanticTokenType::NUMBER,
|
||||
SemanticTokenType::REGEXP,
|
||||
SemanticTokenType::OPERATOR,
|
||||
SemanticTokenType::NAMESPACE,
|
||||
SemanticTokenType::TYPE,
|
||||
SemanticTokenType::STRUCT,
|
||||
SemanticTokenType::CLASS,
|
||||
SemanticTokenType::INTERFACE,
|
||||
SemanticTokenType::ENUM,
|
||||
SemanticTokenType::ENUM_MEMBER,
|
||||
SemanticTokenType::TYPE_PARAMETER,
|
||||
SemanticTokenType::FUNCTION,
|
||||
SemanticTokenType::METHOD,
|
||||
SemanticTokenType::PROPERTY,
|
||||
SemanticTokenType::MACRO,
|
||||
SemanticTokenType::VARIABLE,
|
||||
SemanticTokenType::PARAMETER,
|
||||
$($ident),*
|
||||
$(SemanticTokenType::$standard,)*
|
||||
$($custom),*
|
||||
];
|
||||
};
|
||||
}
|
||||
|
||||
define_semantic_token_types![
|
||||
(ANGLE, "angle"),
|
||||
(ARITHMETIC, "arithmetic"),
|
||||
(ATTRIBUTE, "attribute"),
|
||||
(ATTRIBUTE_BRACKET, "attributeBracket"),
|
||||
(BITWISE, "bitwise"),
|
||||
(BOOLEAN, "boolean"),
|
||||
(BRACE, "brace"),
|
||||
(BRACKET, "bracket"),
|
||||
(BUILTIN_ATTRIBUTE, "builtinAttribute"),
|
||||
(BUILTIN_TYPE, "builtinType"),
|
||||
(CHAR, "character"),
|
||||
(COLON, "colon"),
|
||||
(COMMA, "comma"),
|
||||
(COMPARISON, "comparison"),
|
||||
(CONST_PARAMETER, "constParameter"),
|
||||
(DERIVE, "derive"),
|
||||
(DERIVE_HELPER, "deriveHelper"),
|
||||
(DOT, "dot"),
|
||||
(ESCAPE_SEQUENCE, "escapeSequence"),
|
||||
(FORMAT_SPECIFIER, "formatSpecifier"),
|
||||
(GENERIC, "generic"),
|
||||
(LABEL, "label"),
|
||||
(LIFETIME, "lifetime"),
|
||||
(LOGICAL, "logical"),
|
||||
(MACRO_BANG, "macroBang"),
|
||||
(OPERATOR, "operator"),
|
||||
(PARENTHESIS, "parenthesis"),
|
||||
(PUNCTUATION, "punctuation"),
|
||||
(SELF_KEYWORD, "selfKeyword"),
|
||||
(SELF_TYPE_KEYWORD, "selfTypeKeyword"),
|
||||
(SEMICOLON, "semicolon"),
|
||||
(TYPE_ALIAS, "typeAlias"),
|
||||
(TOOL_MODULE, "toolModule"),
|
||||
(UNION, "union"),
|
||||
(UNRESOLVED_REFERENCE, "unresolvedReference"),
|
||||
standard {
|
||||
COMMENT,
|
||||
DECORATOR,
|
||||
ENUM_MEMBER,
|
||||
ENUM,
|
||||
FUNCTION,
|
||||
INTERFACE,
|
||||
KEYWORD,
|
||||
MACRO,
|
||||
METHOD,
|
||||
NAMESPACE,
|
||||
NUMBER,
|
||||
OPERATOR,
|
||||
PARAMETER,
|
||||
PROPERTY,
|
||||
STRING,
|
||||
STRUCT,
|
||||
TYPE_PARAMETER,
|
||||
VARIABLE,
|
||||
}
|
||||
|
||||
custom {
|
||||
(ANGLE, "angle"),
|
||||
(ARITHMETIC, "arithmetic"),
|
||||
(ATTRIBUTE, "attribute"),
|
||||
(ATTRIBUTE_BRACKET, "attributeBracket"),
|
||||
(BITWISE, "bitwise"),
|
||||
(BOOLEAN, "boolean"),
|
||||
(BRACE, "brace"),
|
||||
(BRACKET, "bracket"),
|
||||
(BUILTIN_ATTRIBUTE, "builtinAttribute"),
|
||||
(BUILTIN_TYPE, "builtinType"),
|
||||
(CHAR, "character"),
|
||||
(COLON, "colon"),
|
||||
(COMMA, "comma"),
|
||||
(COMPARISON, "comparison"),
|
||||
(CONST_PARAMETER, "constParameter"),
|
||||
(DERIVE, "derive"),
|
||||
(DERIVE_HELPER, "deriveHelper"),
|
||||
(DOT, "dot"),
|
||||
(ESCAPE_SEQUENCE, "escapeSequence"),
|
||||
(FORMAT_SPECIFIER, "formatSpecifier"),
|
||||
(GENERIC, "generic"),
|
||||
(LABEL, "label"),
|
||||
(LIFETIME, "lifetime"),
|
||||
(LOGICAL, "logical"),
|
||||
(MACRO_BANG, "macroBang"),
|
||||
(PARENTHESIS, "parenthesis"),
|
||||
(PUNCTUATION, "punctuation"),
|
||||
(SELF_KEYWORD, "selfKeyword"),
|
||||
(SELF_TYPE_KEYWORD, "selfTypeKeyword"),
|
||||
(SEMICOLON, "semicolon"),
|
||||
(TYPE_ALIAS, "typeAlias"),
|
||||
(TOOL_MODULE, "toolModule"),
|
||||
(UNION, "union"),
|
||||
(UNRESOLVED_REFERENCE, "unresolvedReference"),
|
||||
}
|
||||
];
|
||||
|
||||
macro_rules! define_semantic_token_modifiers {
|
||||
($(($ident:ident, $string:literal)),*$(,)?) => {
|
||||
$(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
|
||||
(
|
||||
standard {
|
||||
$($standard:ident),*$(,)?
|
||||
}
|
||||
custom {
|
||||
$(($custom:ident, $string:literal)),*$(,)?
|
||||
}
|
||||
|
||||
) => {
|
||||
|
||||
$(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
|
||||
$(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
|
||||
|
||||
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
|
||||
SemanticTokenModifier::DOCUMENTATION,
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
SemanticTokenModifier::STATIC,
|
||||
SemanticTokenModifier::ABSTRACT,
|
||||
SemanticTokenModifier::DEPRECATED,
|
||||
SemanticTokenModifier::READONLY,
|
||||
SemanticTokenModifier::DEFAULT_LIBRARY,
|
||||
$($ident),*
|
||||
$(SemanticTokenModifier::$standard,)*
|
||||
$($custom),*
|
||||
];
|
||||
};
|
||||
}
|
||||
|
||||
define_semantic_token_modifiers![
|
||||
(ASYNC, "async"),
|
||||
(ATTRIBUTE_MODIFIER, "attribute"),
|
||||
(CALLABLE, "callable"),
|
||||
(CONSTANT, "constant"),
|
||||
(CONSUMING, "consuming"),
|
||||
(CONTROL_FLOW, "controlFlow"),
|
||||
(CRATE_ROOT, "crateRoot"),
|
||||
(INJECTED, "injected"),
|
||||
(INTRA_DOC_LINK, "intraDocLink"),
|
||||
(LIBRARY, "library"),
|
||||
(MUTABLE, "mutable"),
|
||||
(PUBLIC, "public"),
|
||||
(REFERENCE, "reference"),
|
||||
(TRAIT_MODIFIER, "trait"),
|
||||
(UNSAFE, "unsafe"),
|
||||
standard {
|
||||
DOCUMENTATION,
|
||||
DECLARATION,
|
||||
STATIC,
|
||||
DEFAULT_LIBRARY,
|
||||
}
|
||||
custom {
|
||||
(ASYNC, "async"),
|
||||
(ATTRIBUTE_MODIFIER, "attribute"),
|
||||
(CALLABLE, "callable"),
|
||||
(CONSTANT, "constant"),
|
||||
(CONSUMING, "consuming"),
|
||||
(CONTROL_FLOW, "controlFlow"),
|
||||
(CRATE_ROOT, "crateRoot"),
|
||||
(INJECTED, "injected"),
|
||||
(INTRA_DOC_LINK, "intraDocLink"),
|
||||
(LIBRARY, "library"),
|
||||
(MUTABLE, "mutable"),
|
||||
(PUBLIC, "public"),
|
||||
(REFERENCE, "reference"),
|
||||
(TRAIT_MODIFIER, "trait"),
|
||||
(UNSAFE, "unsafe"),
|
||||
}
|
||||
];
|
||||
|
||||
#[derive(Default)]
|
||||
|
|
|
@ -517,7 +517,6 @@ pub(crate) fn semantic_tokens(
|
|||
text: &str,
|
||||
line_index: &LineIndex,
|
||||
highlights: Vec<HlRange>,
|
||||
highlight_strings: bool,
|
||||
) -> lsp_types::SemanticTokens {
|
||||
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
|
||||
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
|
||||
|
@ -526,10 +525,8 @@ pub(crate) fn semantic_tokens(
|
|||
if highlight_range.highlight.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
|
||||
if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
|
||||
continue;
|
||||
}
|
||||
let token_index = semantic_tokens::type_index(ty);
|
||||
let modifier_bitset = mods.0;
|
||||
|
||||
|
@ -561,55 +558,55 @@ fn semantic_token_type_and_modifiers(
|
|||
let mut mods = semantic_tokens::ModifierSet::default();
|
||||
let type_ = match highlight.tag {
|
||||
HlTag::Symbol(symbol) => match symbol {
|
||||
SymbolKind::Attribute => semantic_tokens::ATTRIBUTE,
|
||||
SymbolKind::Attribute => semantic_tokens::DECORATOR,
|
||||
SymbolKind::Derive => semantic_tokens::DERIVE,
|
||||
SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
|
||||
SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
|
||||
SymbolKind::Module => semantic_tokens::NAMESPACE,
|
||||
SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
|
||||
SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY,
|
||||
SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
|
||||
SymbolKind::Field => semantic_tokens::PROPERTY,
|
||||
SymbolKind::TypeParam => semantic_tokens::TYPE_PARAMETER,
|
||||
SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
|
||||
SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
|
||||
SymbolKind::Label => semantic_tokens::LABEL,
|
||||
SymbolKind::ValueParam => lsp_types::SemanticTokenType::PARAMETER,
|
||||
SymbolKind::ValueParam => semantic_tokens::PARAMETER,
|
||||
SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
|
||||
SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
|
||||
SymbolKind::Local => lsp_types::SemanticTokenType::VARIABLE,
|
||||
SymbolKind::Local => semantic_tokens::VARIABLE,
|
||||
SymbolKind::Function => {
|
||||
if highlight.mods.contains(HlMod::Associated) {
|
||||
lsp_types::SemanticTokenType::METHOD
|
||||
semantic_tokens::METHOD
|
||||
} else {
|
||||
lsp_types::SemanticTokenType::FUNCTION
|
||||
semantic_tokens::FUNCTION
|
||||
}
|
||||
}
|
||||
SymbolKind::Const => {
|
||||
mods |= semantic_tokens::CONSTANT;
|
||||
mods |= lsp_types::SemanticTokenModifier::STATIC;
|
||||
lsp_types::SemanticTokenType::VARIABLE
|
||||
mods |= semantic_tokens::STATIC;
|
||||
semantic_tokens::VARIABLE
|
||||
}
|
||||
SymbolKind::Static => {
|
||||
mods |= lsp_types::SemanticTokenModifier::STATIC;
|
||||
lsp_types::SemanticTokenType::VARIABLE
|
||||
mods |= semantic_tokens::STATIC;
|
||||
semantic_tokens::VARIABLE
|
||||
}
|
||||
SymbolKind::Struct => lsp_types::SemanticTokenType::STRUCT,
|
||||
SymbolKind::Enum => lsp_types::SemanticTokenType::ENUM,
|
||||
SymbolKind::Variant => lsp_types::SemanticTokenType::ENUM_MEMBER,
|
||||
SymbolKind::Struct => semantic_tokens::STRUCT,
|
||||
SymbolKind::Enum => semantic_tokens::ENUM,
|
||||
SymbolKind::Variant => semantic_tokens::ENUM_MEMBER,
|
||||
SymbolKind::Union => semantic_tokens::UNION,
|
||||
SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
|
||||
SymbolKind::Trait => lsp_types::SemanticTokenType::INTERFACE,
|
||||
SymbolKind::Macro => lsp_types::SemanticTokenType::MACRO,
|
||||
SymbolKind::Trait => semantic_tokens::INTERFACE,
|
||||
SymbolKind::Macro => semantic_tokens::MACRO,
|
||||
SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
|
||||
SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
|
||||
},
|
||||
HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
|
||||
HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
|
||||
HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
|
||||
HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER,
|
||||
HlTag::ByteLiteral | HlTag::NumericLiteral => semantic_tokens::NUMBER,
|
||||
HlTag::CharLiteral => semantic_tokens::CHAR,
|
||||
HlTag::Comment => lsp_types::SemanticTokenType::COMMENT,
|
||||
HlTag::Comment => semantic_tokens::COMMENT,
|
||||
HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
|
||||
HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
|
||||
HlTag::Keyword => lsp_types::SemanticTokenType::KEYWORD,
|
||||
HlTag::Keyword => semantic_tokens::KEYWORD,
|
||||
HlTag::None => semantic_tokens::GENERIC,
|
||||
HlTag::Operator(op) => match op {
|
||||
HlOperator::Bitwise => semantic_tokens::BITWISE,
|
||||
|
@ -618,7 +615,7 @@ fn semantic_token_type_and_modifiers(
|
|||
HlOperator::Comparison => semantic_tokens::COMPARISON,
|
||||
HlOperator::Other => semantic_tokens::OPERATOR,
|
||||
},
|
||||
HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING,
|
||||
HlTag::StringLiteral => semantic_tokens::STRING,
|
||||
HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
|
||||
HlTag::Punctuation(punct) => match punct {
|
||||
HlPunct::Bracket => semantic_tokens::BRACKET,
|
||||
|
@ -643,16 +640,16 @@ fn semantic_token_type_and_modifiers(
|
|||
HlMod::Consuming => semantic_tokens::CONSUMING,
|
||||
HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
|
||||
HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
|
||||
HlMod::DefaultLibrary => lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY,
|
||||
HlMod::Definition => lsp_types::SemanticTokenModifier::DECLARATION,
|
||||
HlMod::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION,
|
||||
HlMod::DefaultLibrary => semantic_tokens::DEFAULT_LIBRARY,
|
||||
HlMod::Definition => semantic_tokens::DECLARATION,
|
||||
HlMod::Documentation => semantic_tokens::DOCUMENTATION,
|
||||
HlMod::Injected => semantic_tokens::INJECTED,
|
||||
HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
|
||||
HlMod::Library => semantic_tokens::LIBRARY,
|
||||
HlMod::Mutable => semantic_tokens::MUTABLE,
|
||||
HlMod::Public => semantic_tokens::PUBLIC,
|
||||
HlMod::Reference => semantic_tokens::REFERENCE,
|
||||
HlMod::Static => lsp_types::SemanticTokenModifier::STATIC,
|
||||
HlMod::Static => semantic_tokens::STATIC,
|
||||
HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
|
||||
HlMod::Unsafe => semantic_tokens::UNSAFE,
|
||||
};
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
//! A none hashing [`Hasher`] implementation.
|
||||
use std::{
|
||||
hash::{BuildHasher, Hasher},
|
||||
marker::PhantomData,
|
||||
};
|
||||
|
||||
pub type NoHashHashMap<K, V> = std::collections::HashMap<K, V, NoHashHasherBuilder<K>>;
|
||||
pub type NoHashHashSet<K> = std::collections::HashSet<K, NoHashHasherBuilder<K>>;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct NoHashHasherBuilder<T>(PhantomData<T>);
|
||||
|
||||
impl<T> Default for NoHashHasherBuilder<T> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NoHashHashable {}
|
||||
impl NoHashHashable for usize {}
|
||||
impl NoHashHashable for u32 {}
|
||||
|
||||
pub struct NoHashHasher(u64);
|
||||
|
||||
impl<T: NoHashHashable> BuildHasher for NoHashHasherBuilder<T> {
|
||||
type Hasher = NoHashHasher;
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
NoHashHasher(0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hasher for NoHashHasher {
|
||||
fn finish(&self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
|
||||
fn write(&mut self, _: &[u8]) {
|
||||
unimplemented!("NoHashHasher should only be used for hashing primitive integers")
|
||||
}
|
||||
|
||||
fn write_u8(&mut self, i: u8) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_u16(&mut self, i: u16) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_u32(&mut self, i: u32) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_u64(&mut self, i: u64) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_usize(&mut self, i: usize) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_i8(&mut self, i: i8) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_i16(&mut self, i: i16) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_i32(&mut self, i: i32) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_i64(&mut self, i: i64) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
|
||||
fn write_isize(&mut self, i: isize) {
|
||||
self.0 = i as u64;
|
||||
}
|
||||
}
|
|
@ -7,6 +7,7 @@ use std::{cmp::Ordering, ops, time::Instant};
|
|||
use std::{io as sio, iter};
|
||||
|
||||
mod macros;
|
||||
pub mod hash;
|
||||
pub mod process;
|
||||
pub mod panic_context;
|
||||
pub mod non_empty_vec;
|
||||
|
|
|
@ -12,6 +12,7 @@ doctest = false
|
|||
[dependencies]
|
||||
rustc-hash = "1.1.0"
|
||||
fst = "0.4.7"
|
||||
indexmap = "1.9.1"
|
||||
|
||||
paths = { path = "../paths", version = "0.0.0" }
|
||||
indexmap = "1.9.1"
|
||||
stdx = { path = "../stdx", version = "0.0.0" }
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::fmt;
|
|||
|
||||
use fst::{IntoStreamer, Streamer};
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::hash::NoHashHashMap;
|
||||
|
||||
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
|
||||
|
||||
|
@ -13,7 +14,7 @@ use crate::{AnchoredPath, FileId, Vfs, VfsPath};
|
|||
#[derive(Default, Clone, Eq, PartialEq)]
|
||||
pub struct FileSet {
|
||||
files: FxHashMap<VfsPath, FileId>,
|
||||
paths: FxHashMap<FileId, VfsPath>,
|
||||
paths: NoHashHashMap<FileId, VfsPath>,
|
||||
}
|
||||
|
||||
impl FileSet {
|
||||
|
|
|
@ -59,9 +59,16 @@ pub use paths::{AbsPath, AbsPathBuf};
|
|||
/// Handle to a file in [`Vfs`]
|
||||
///
|
||||
/// Most functions in rust-analyzer use this when they need to refer to a file.
|
||||
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
|
||||
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
|
||||
pub struct FileId(pub u32);
|
||||
|
||||
impl stdx::hash::NoHashHashable for FileId {}
|
||||
impl std::hash::Hash for FileId {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// Storage for all files read by rust-analyzer.
|
||||
///
|
||||
/// For more information see the [crate-level](crate) documentation.
|
||||
|
|
|
@ -587,6 +587,52 @@ Enables the use of rustfmt's unstable range formatting command for the
|
|||
`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
|
||||
available on a nightly build.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`)::
|
||||
+
|
||||
--
|
||||
Inject additional highlighting into doc comments.
|
||||
|
||||
When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
|
||||
doc links.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.operator.enable]]rust-analyzer.semanticHighlighting.operator.enable (default: `true`)::
|
||||
+
|
||||
--
|
||||
Use semantic tokens for operators.
|
||||
|
||||
When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
|
||||
they are tagged with modifiers.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.operator.specialization.enable]]rust-analyzer.semanticHighlighting.operator.specialization.enable (default: `false`)::
|
||||
+
|
||||
--
|
||||
Use specialized semantic tokens for operators.
|
||||
|
||||
When enabled, rust-analyzer will emit special token types for operator tokens instead
|
||||
of the generic `operator` token type.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
|
||||
+
|
||||
--
|
||||
Use semantic tokens for punctuations.
|
||||
|
||||
When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
|
||||
they are tagged with modifiers or have a special role.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang]]rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang (default: `false`)::
|
||||
+
|
||||
--
|
||||
When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
|
||||
calls.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
|
||||
+
|
||||
--
|
||||
Use specialized semantic tokens for punctuations.
|
||||
|
||||
When enabled, rust-analyzer will emit special token types for punctuation tokens instead
|
||||
of the generic `punctuation` token type.
|
||||
--
|
||||
[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
|
||||
+
|
||||
--
|
||||
|
|
|
@ -861,3 +861,14 @@ For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watc
|
|||
"isBackground": true
|
||||
}
|
||||
```
|
||||
|
||||
==== Live Share
|
||||
|
||||
VS Code Live Share has partial support for rust-analyzer.
|
||||
|
||||
Live Share _requires_ the official Microsoft build of VS Code, OSS builds will not work correctly.
|
||||
|
||||
The host's rust-analyzer instance will be shared with all guests joining the session.
|
||||
The guests do not have to have the rust-analyzer extension installed for this to work.
|
||||
|
||||
If you are joining a Live Share session and _do_ have rust-analyzer installed locally, commands from the command palette will not work correctly since they will attempt to communicate with the local server.
|
||||
|
|
|
@ -1084,6 +1084,36 @@
|
|||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
|
||||
"markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.operator.enable": {
|
||||
"markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
|
||||
"default": true,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.operator.specialization.enable": {
|
||||
"markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.punctuation.enable": {
|
||||
"markdownDescription": "Use semantic tokens for punctuations.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
|
||||
"markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
|
||||
"markdownDescription": "Use specialized semantic tokens for punctuations.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"rust-analyzer.semanticHighlighting.strings.enable": {
|
||||
"markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
|
||||
"default": true,
|
||||
|
|
|
@ -5,7 +5,6 @@ import * as Is from "vscode-languageclient/lib/common/utils/is";
|
|||
import { assert } from "./util";
|
||||
import { WorkspaceEdit } from "vscode";
|
||||
import { Workspace } from "./ctx";
|
||||
import { updateConfig } from "./config";
|
||||
import { substituteVariablesInEnv } from "./config";
|
||||
import { outputChannel, traceOutputChannel } from "./main";
|
||||
import { randomUUID } from "crypto";
|
||||
|
@ -86,11 +85,6 @@ export async function createClient(
|
|||
|
||||
let initializationOptions = vscode.workspace.getConfiguration("rust-analyzer");
|
||||
|
||||
// Update outdated user configs
|
||||
await updateConfig(initializationOptions).catch((err) => {
|
||||
void vscode.window.showErrorMessage(`Failed updating old config keys: ${err.message}`);
|
||||
});
|
||||
|
||||
if (workspace.kind === "Detached Files") {
|
||||
initializationOptions = {
|
||||
detachedFiles: workspace.files.map((file) => file.uri.fsPath),
|
||||
|
@ -105,22 +99,6 @@ export async function createClient(
|
|||
traceOutputChannel: traceOutputChannel(),
|
||||
outputChannel: outputChannel(),
|
||||
middleware: {
|
||||
async handleDiagnostics(uri, diagnostics, next) {
|
||||
// Workaround for https://github.com/microsoft/vscode/issues/155531
|
||||
for (const diagnostic of diagnostics) {
|
||||
if (!diagnostic.message) {
|
||||
diagnostic.message = " ";
|
||||
}
|
||||
if (diagnostic.relatedInformation) {
|
||||
for (const relatedInformation of diagnostic.relatedInformation) {
|
||||
if (!relatedInformation.message) {
|
||||
relatedInformation.message = " ";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
next(uri, diagnostics);
|
||||
},
|
||||
async provideHover(
|
||||
document: vscode.TextDocument,
|
||||
position: vscode.Position,
|
||||
|
|
|
@ -3,8 +3,6 @@ import * as vscode from "vscode";
|
|||
import { Env } from "./client";
|
||||
import { log } from "./util";
|
||||
|
||||
export type UpdatesChannel = "stable" | "nightly";
|
||||
|
||||
export type RunnableEnvCfg =
|
||||
| undefined
|
||||
| Record<string, string>
|
||||
|
@ -175,100 +173,6 @@ export class Config {
|
|||
}
|
||||
}
|
||||
|
||||
export async function updateConfig(config: vscode.WorkspaceConfiguration) {
|
||||
const renames = [
|
||||
["assist.allowMergingIntoGlobImports", "imports.merge.glob"],
|
||||
["assist.exprFillDefault", "assist.expressionFillDefault"],
|
||||
["assist.importEnforceGranularity", "imports.granularity.enforce"],
|
||||
["assist.importGranularity", "imports.granularity.group"],
|
||||
["assist.importMergeBehavior", "imports.granularity.group"],
|
||||
["assist.importMergeBehaviour", "imports.granularity.group"],
|
||||
["assist.importGroup", "imports.group.enable"],
|
||||
["assist.importPrefix", "imports.prefix"],
|
||||
["primeCaches.enable", "cachePriming.enable"],
|
||||
["cache.warmup", "cachePriming.enable"],
|
||||
["cargo.loadOutDirsFromCheck", "cargo.buildScripts.enable"],
|
||||
["cargo.runBuildScripts", "cargo.buildScripts.enable"],
|
||||
["cargo.runBuildScriptsCommand", "cargo.buildScripts.overrideCommand"],
|
||||
["cargo.useRustcWrapperForBuildScripts", "cargo.buildScripts.useRustcWrapper"],
|
||||
["completion.snippets", "completion.snippets.custom"],
|
||||
["diagnostics.enableExperimental", "diagnostics.experimental.enable"],
|
||||
["experimental.procAttrMacros", "procMacro.attributes.enable"],
|
||||
["highlighting.strings", "semanticHighlighting.strings.enable"],
|
||||
["highlightRelated.breakPoints", "highlightRelated.breakPoints.enable"],
|
||||
["highlightRelated.exitPoints", "highlightRelated.exitPoints.enable"],
|
||||
["highlightRelated.yieldPoints", "highlightRelated.yieldPoints.enable"],
|
||||
["highlightRelated.references", "highlightRelated.references.enable"],
|
||||
["hover.documentation", "hover.documentation.enable"],
|
||||
["hover.linksInHover", "hover.links.enable"],
|
||||
["hoverActions.linksInHover", "hover.links.enable"],
|
||||
["hoverActions.debug", "hover.actions.debug.enable"],
|
||||
["hoverActions.enable", "hover.actions.enable.enable"],
|
||||
["hoverActions.gotoTypeDef", "hover.actions.gotoTypeDef.enable"],
|
||||
["hoverActions.implementations", "hover.actions.implementations.enable"],
|
||||
["hoverActions.references", "hover.actions.references.enable"],
|
||||
["hoverActions.run", "hover.actions.run.enable"],
|
||||
["inlayHints.chainingHints", "inlayHints.chainingHints.enable"],
|
||||
["inlayHints.closureReturnTypeHints", "inlayHints.closureReturnTypeHints.enable"],
|
||||
["inlayHints.hideNamedConstructorHints", "inlayHints.typeHints.hideNamedConstructor"],
|
||||
["inlayHints.parameterHints", "inlayHints.parameterHints.enable"],
|
||||
["inlayHints.reborrowHints", "inlayHints.reborrowHints.enable"],
|
||||
["inlayHints.typeHints", "inlayHints.typeHints.enable"],
|
||||
["lruCapacity", "lru.capacity"],
|
||||
["runnables.cargoExtraArgs", "runnables.extraArgs"],
|
||||
["runnables.overrideCargo", "runnables.command"],
|
||||
["rustcSource", "rustc.source"],
|
||||
["rustfmt.enableRangeFormatting", "rustfmt.rangeFormatting.enable"],
|
||||
];
|
||||
|
||||
for (const [oldKey, newKey] of renames) {
|
||||
const inspect = config.inspect(oldKey);
|
||||
if (inspect !== undefined) {
|
||||
const valMatrix = [
|
||||
{
|
||||
val: inspect.globalValue,
|
||||
langVal: inspect.globalLanguageValue,
|
||||
target: vscode.ConfigurationTarget.Global,
|
||||
},
|
||||
{
|
||||
val: inspect.workspaceFolderValue,
|
||||
langVal: inspect.workspaceFolderLanguageValue,
|
||||
target: vscode.ConfigurationTarget.WorkspaceFolder,
|
||||
},
|
||||
{
|
||||
val: inspect.workspaceValue,
|
||||
langVal: inspect.workspaceLanguageValue,
|
||||
target: vscode.ConfigurationTarget.Workspace,
|
||||
},
|
||||
];
|
||||
for (const { val, langVal, target } of valMatrix) {
|
||||
const patch = (val: unknown) => {
|
||||
// some of the updates we do only append "enable" or "custom"
|
||||
// that means on the next run we would find these again, but as objects with
|
||||
// these properties causing us to destroy the config
|
||||
// so filter those already updated ones out
|
||||
return (
|
||||
val !== undefined &&
|
||||
!(
|
||||
typeof val === "object" &&
|
||||
val !== null &&
|
||||
(oldKey === "completion.snippets" || !val.hasOwnProperty("custom"))
|
||||
)
|
||||
);
|
||||
};
|
||||
if (patch(val)) {
|
||||
await config.update(newKey, val, target, false);
|
||||
await config.update(oldKey, undefined, target, false);
|
||||
}
|
||||
if (patch(langVal)) {
|
||||
await config.update(newKey, langVal, target, true);
|
||||
await config.update(oldKey, undefined, target, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function substituteVariablesInEnv(env: Env): Env {
|
||||
const missingDeps = new Set<string>();
|
||||
// vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
|
||||
|
|
|
@ -33,7 +33,7 @@ export function outputChannel() {
|
|||
}
|
||||
|
||||
export interface RustAnalyzerExtensionApi {
|
||||
client: lc.LanguageClient;
|
||||
client?: lc.LanguageClient;
|
||||
}
|
||||
|
||||
export async function activate(
|
||||
|
@ -48,6 +48,23 @@ export async function activate(
|
|||
}
|
||||
|
||||
async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyzerExtensionApi> {
|
||||
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
|
||||
// only those are in use.
|
||||
// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
|
||||
const folders = (vscode.workspace.workspaceFolders || []).filter(
|
||||
(folder) => folder.uri.scheme === "file"
|
||||
);
|
||||
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
|
||||
isRustDocument(document)
|
||||
);
|
||||
|
||||
if (folders.length === 0 && rustDocuments.length === 0) {
|
||||
// FIXME: Ideally we would choose not to activate at all (and avoid registering
|
||||
// non-functional editor commands), but VS Code doesn't seem to have a good way of doing
|
||||
// that
|
||||
return {};
|
||||
}
|
||||
|
||||
const config = new Config(context);
|
||||
const state = new PersistentState(context.globalState);
|
||||
const serverPath = await bootstrap(context, config, state).catch((err) => {
|
||||
|
@ -60,18 +77,11 @@ async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyz
|
|||
throw new Error(message);
|
||||
});
|
||||
|
||||
if ((vscode.workspace.workspaceFolders || []).length === 0) {
|
||||
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
|
||||
isRustDocument(document)
|
||||
);
|
||||
if (rustDocuments.length > 0) {
|
||||
ctx = await Ctx.create(config, context, serverPath, {
|
||||
kind: "Detached Files",
|
||||
files: rustDocuments,
|
||||
});
|
||||
} else {
|
||||
throw new Error("no rust files are opened");
|
||||
}
|
||||
if (folders.length === 0) {
|
||||
ctx = await Ctx.create(config, context, serverPath, {
|
||||
kind: "Detached Files",
|
||||
files: rustDocuments,
|
||||
});
|
||||
} else {
|
||||
// Note: we try to start the server before we activate type hints so that it
|
||||
// registers its `onDidChangeDocument` handler before us.
|
||||
|
|
|
@ -15,7 +15,7 @@ pub(crate) fn socket_transport(
|
|||
stream: TcpStream,
|
||||
) -> (Sender<Message>, Receiver<Message>, IoThreads) {
|
||||
let (reader_receiver, reader) = make_reader(stream.try_clone().unwrap());
|
||||
let (writer_sender, writer) = make_write(stream.try_clone().unwrap());
|
||||
let (writer_sender, writer) = make_write(stream);
|
||||
let io_threads = make_io_threads(reader, writer);
|
||||
(writer_sender, reader_receiver, io_threads)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue