Merge pull request #1775 from Mark-Simulacrum/fix-rls

Fix breakage due to upstream rustc changes
This commit is contained in:
Mark Rousskov 2022-05-17 10:58:49 -04:00 committed by GitHub
commit 1acff66049
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 12643 additions and 132 deletions

229
Cargo.lock generated
View File

@ -19,10 +19,11 @@ dependencies = [
[[package]]
name = "annotate-snippets"
version = "0.8.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d78ea013094e5ea606b1c05fe35f1dd7ea1eb1ea259908d040b25bd5ec677ee5"
checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36"
dependencies = [
"unicode-width",
"yansi-term",
]
@ -125,13 +126,13 @@ dependencies = [
[[package]]
name = "cargo"
version = "0.62.0"
source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8"
version = "0.63.0"
source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1"
dependencies = [
"anyhow",
"atty",
"bytesize",
"cargo-platform 0.1.2 (git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8)",
"cargo-platform 0.1.2 (git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1)",
"cargo-util",
"clap 3.1.2",
"crates-io",
@ -150,7 +151,8 @@ dependencies = [
"humantime 2.1.0",
"ignore",
"im-rc",
"itertools 0.10.1",
"indexmap",
"itertools",
"jobserver",
"lazy_static",
"lazycell",
@ -160,6 +162,7 @@ dependencies = [
"memchr",
"opener",
"os_info",
"pathdiff",
"percent-encoding 2.1.0",
"rustc-workspace-hack",
"rustfix",
@ -192,15 +195,15 @@ dependencies = [
[[package]]
name = "cargo-platform"
version = "0.1.2"
source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8"
source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1"
dependencies = [
"serde",
]
[[package]]
name = "cargo-util"
version = "0.1.2"
source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8"
version = "0.1.3"
source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1"
dependencies = [
"anyhow",
"core-foundation",
@ -240,12 +243,6 @@ dependencies = [
"jobserver",
]
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -275,13 +272,28 @@ checksum = "5177fac1ab67102d8989464efd043c6ff44191b1557ec1ddd489b4f7e1447e77"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"indexmap",
"lazy_static",
"os_str_bytes",
"strsim 0.10.0",
"termcolor",
"textwrap 0.14.2",
]
[[package]]
name = "clap_derive"
version = "3.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c"
dependencies = [
"heck 0.4.0",
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "clippy_lints"
version = "0.1.60"
@ -290,7 +302,7 @@ dependencies = [
"cargo_metadata",
"clippy_utils",
"if_chain",
"itertools 0.10.1",
"itertools",
"pulldown-cmark",
"quine-mc_cluskey",
"regex-syntax",
@ -366,7 +378,7 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
[[package]]
name = "crates-io"
version = "0.34.0"
source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8"
source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1"
dependencies = [
"anyhow",
"curl",
@ -382,7 +394,7 @@ version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3825b1e8580894917dc4468cb634a1b4e9745fddc854edad72d9c04644c0319f"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
]
[[package]]
@ -391,7 +403,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"crossbeam-utils",
]
@ -401,7 +413,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"crossbeam-epoch",
"crossbeam-utils",
]
@ -412,7 +424,7 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"crossbeam-utils",
"lazy_static",
"memoffset",
@ -425,7 +437,7 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"lazy_static",
]
@ -510,14 +522,23 @@ checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
[[package]]
name = "dirs"
version = "2.0.2"
version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3"
checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
dependencies = [
"cfg-if 0.1.10",
"dirs-sys",
]
[[package]]
name = "dirs-next"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
dependencies = [
"cfg-if",
"dirs-sys-next",
]
[[package]]
name = "dirs-sys"
version = "0.3.6"
@ -529,6 +550,17 @@ dependencies = [
"winapi",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"winapi",
]
[[package]]
name = "either"
version = "1.6.1"
@ -548,19 +580,6 @@ dependencies = [
"termcolor",
]
[[package]]
name = "env_logger"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3"
dependencies = [
"atty",
"humantime 2.1.0",
"log",
"regex",
"termcolor",
]
[[package]]
name = "env_logger"
version = "0.9.0"
@ -580,7 +599,7 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"libc",
"redox_syscall",
"winapi",
@ -592,7 +611,7 @@ version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"crc32fast",
"libc",
"libz-sys",
@ -758,7 +777,7 @@ version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
@ -769,7 +788,7 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"libc",
"wasi 0.10.2+wasi-snapshot-preview1",
]
@ -835,6 +854,12 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "heck"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
[[package]]
name = "hermit-abi"
version = "0.1.19"
@ -956,16 +981,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "itertools"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
dependencies = [
"either",
"cfg-if",
]
[[package]]
@ -1104,11 +1120,11 @@ dependencies = [
[[package]]
name = "kstring"
version = "1.0.6"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b310ccceade8121d7d77fee406160e457c2f4e7c7982d589da3499bc7ea4526"
checksum = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747"
dependencies = [
"serde",
"static_assertions",
]
[[package]]
@ -1194,7 +1210,7 @@ version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
]
[[package]]
@ -1316,7 +1332,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95"
dependencies = [
"bitflags",
"cfg-if 1.0.0",
"cfg-if",
"foreign-types",
"libc",
"once_cell",
@ -1399,7 +1415,7 @@ version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"instant",
"libc",
"redox_syscall",
@ -1407,6 +1423,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "pathdiff"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
[[package]]
name = "percent-encoding"
version = "1.0.1"
@ -1529,8 +1551,6 @@ dependencies = [
[[package]]
name = "racer"
version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64954e44fc0d1dcc64e0b9f2b155249ad62849eba25354b76ae1598d1e8f0fa0"
dependencies = [
"bitflags",
"clap 2.33.3",
@ -1538,10 +1558,28 @@ dependencies = [
"env_logger 0.7.1",
"humantime 2.1.0",
"lazy_static",
"lazycell",
"log",
"racer-cargo-metadata",
"rls-span",
]
[[package]]
name = "racer-cargo-metadata"
version = "0.1.2"
dependencies = [
"racer-interner",
"serde",
"serde_json",
]
[[package]]
name = "racer-interner"
version = "0.1.0"
dependencies = [
"serde",
]
[[package]]
name = "rand"
version = "0.7.3"
@ -1721,9 +1759,9 @@ dependencies = [
"difference",
"env_logger 0.9.0",
"futures 0.3.18",
"heck",
"heck 0.3.3",
"home",
"itertools 0.10.1",
"itertools",
"jsonrpc-core",
"lazy_static",
"log",
@ -1764,7 +1802,7 @@ version = "0.18.3"
dependencies = [
"derive-new",
"fst",
"itertools 0.10.1",
"itertools",
"json",
"log",
"rls-data",
@ -1871,7 +1909,7 @@ dependencies = [
[[package]]
name = "rustfmt-config_proc_macro"
version = "0.2.0"
source = "git+https://github.com/rust-lang/rustfmt?rev=5056f4cfb311a084420f1828cd58af94d143f5e0#5056f4cfb311a084420f1828cd58af94d143f5e0"
source = "git+https://github.com/rust-lang/rustfmt?rev=8a4c05865be17bac75b8d53eae5be18d749a0f5c#8a4c05865be17bac75b8d53eae5be18d749a0f5c"
dependencies = [
"proc-macro2",
"quote",
@ -1881,19 +1919,20 @@ dependencies = [
[[package]]
name = "rustfmt-nightly"
version = "1.4.38"
source = "git+https://github.com/rust-lang/rustfmt?rev=5056f4cfb311a084420f1828cd58af94d143f5e0#5056f4cfb311a084420f1828cd58af94d143f5e0"
source = "git+https://github.com/rust-lang/rustfmt?rev=8a4c05865be17bac75b8d53eae5be18d749a0f5c#8a4c05865be17bac75b8d53eae5be18d749a0f5c"
dependencies = [
"annotate-snippets",
"anyhow",
"bytecount",
"cargo_metadata",
"clap 3.1.2",
"derive-new",
"diff",
"dirs",
"env_logger 0.8.4",
"env_logger 0.9.0",
"getopts",
"ignore",
"itertools 0.9.0",
"itertools",
"lazy_static",
"log",
"regex",
@ -1901,7 +1940,6 @@ dependencies = [
"rustfmt-config_proc_macro",
"serde",
"serde_json",
"structopt",
"term",
"thiserror",
"toml",
@ -1910,6 +1948,12 @@ dependencies = [
"unicode_categories",
]
[[package]]
name = "rustversion"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
[[package]]
name = "ryu"
version = "1.0.5"
@ -2066,6 +2110,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strip-ansi-escapes"
version = "0.1.1"
@ -2087,30 +2137,6 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "structopt"
version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40b9788f4202aa75c240ecc9c15c65185e6a39ccdeb0fd5d008b98825464c87c"
dependencies = [
"clap 2.33.3",
"lazy_static",
"structopt-derive",
]
[[package]]
name = "structopt-derive"
version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "syn"
version = "1.0.81"
@ -2138,7 +2164,7 @@ version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22"
dependencies = [
"cfg-if 1.0.0",
"cfg-if",
"libc",
"rand 0.8.4",
"redox_syscall",
@ -2148,11 +2174,12 @@ dependencies = [
[[package]]
name = "term"
version = "0.6.1"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0863a3345e70f61d613eab32ee046ccd1bcc5f9105fe402c61fcd0c13eeb8b5"
checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
dependencies = [
"dirs",
"dirs-next",
"rustversion",
"winapi",
]
@ -2278,13 +2305,13 @@ dependencies = [
[[package]]
name = "toml_edit"
version = "0.13.4"
version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "744e9ed5b352340aa47ce033716991b5589e23781acb97cad37d4ea70560f55b"
checksum = "5376256e44f2443f8896ac012507c19a012df0fe8758b55246ae51a2279db51f"
dependencies = [
"combine",
"indexmap",
"itertools 0.10.1",
"itertools",
"kstring",
"serde",
]
@ -2339,9 +2366,9 @@ checksum = "58dd944fd05f2f0b5c674917aea8a4df6af84f2d8de3fe8d988b95d28fb8fb09"
[[package]]
name = "unicode-segmentation"
version = "1.8.0"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b"
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
[[package]]
name = "unicode-width"

View File

@ -31,8 +31,8 @@ rls-vfs = "0.8"
rls-ipc = { version = "0.1.0", path = "rls-ipc", optional = true }
anyhow = "1.0.26"
cargo = { git = "https://github.com/rust-lang/cargo", rev = "1ef1e0a12723ce9548d7da2b63119de9002bead8" }
cargo-util = { git = "https://github.com/rust-lang/cargo", rev = "1ef1e0a12723ce9548d7da2b63119de9002bead8" }
cargo = { git = "https://github.com/rust-lang/cargo", rev = "3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" }
cargo-util = { git = "https://github.com/rust-lang/cargo", rev = "3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" }
cargo_metadata = "0.14"
clippy_lints = { git = "https://github.com/rust-lang/rust-clippy", version = "0.1.60", optional = true }
env_logger = "0.9"
@ -43,11 +43,11 @@ lsp-types = { version = "0.60", features = ["proposed"] }
lazy_static = "1"
log = "0.4"
num_cpus = "1"
racer = { version = "2.2", default-features = false }
racer = { path = "racer" }
rand = "0.8"
rayon = "1"
rustc_tools_util = "0.2"
rustfmt-nightly = { git = "https://github.com/rust-lang/rustfmt", rev = "5056f4cfb311a084420f1828cd58af94d143f5e0" }
rustfmt-nightly = { git = "https://github.com/rust-lang/rustfmt", rev = "8a4c05865be17bac75b8d53eae5be18d749a0f5c" }
serde = "1.0"
serde_json = "1.0"
serde_derive = "1.0"
@ -58,7 +58,7 @@ regex = "1"
ordslice = "0.3"
crossbeam-channel = "0.5"
toml = "0.5"
toml_edit = { version = "0.13.4", features = ["easy"] }
toml_edit = { version = "0.14.3", features = ["easy"] }
heck = "0.3"
# A noop dependency that changes in the Rust repository, it's a bit of a hack.

49
racer/.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,49 @@
name: CI
on:
pull_request:
types: [opened, synchronize, reopened]
push:
branches:
- master
- '*'
schedule:
- cron: '0 0 * * *' # Nightly at 00:00 UTC
jobs:
build_and_test:
strategy:
fail-fast: false
matrix:
toolchain:
- x86_64-unknown-linux-gnu
- x86_64-apple-darwin
- x86_64-pc-windows-msvc
- i686-pc-windows-msvc
include:
- toolchain: x86_64-unknown-linux-gnu
builder: ubuntu-latest
os: linux
- toolchain: x86_64-apple-darwin
builder: macos-latest
os: macos
- toolchain: x86_64-pc-windows-msvc
builder: windows-latest
os: windows
- toolchain: i686-pc-windows-msvc
builder: windows-latest
os: windows
name: nightly - ${{ matrix.toolchain }}
runs-on: ${{ matrix.builder }}
steps:
- uses: actions/checkout@v2
- name: Use latest nightly on scheduled builds
if: github.event_name == 'schedule'
run: echo "nightly" > rust-toolchain
- run: rustup set default-host ${{ matrix.toolchain }}
- run: rustup component add rust-src
- run: rustc -vV
- run: cargo build --verbose --all
- run: cargo test --all

9
racer/.gitignore vendored Normal file
View File

@ -0,0 +1,9 @@
\#*
src/scopes
!.travis.yml
*tmpfile*
*.racertmp
target/
*.py[cod]
.vscode/**
*.log

0
racer/.rustfmt.toml Normal file
View File

235
racer/CHANGELOG.md Normal file
View File

@ -0,0 +1,235 @@
Change Log
==========
All notable changes to this project will be documented in this file. This
project adheres to [Semantic Versioning](https://semver.org/).
# 2.1.37
- Bump rustc-ap-* version to 677.0
- Account for new standard library source directory layout
# 2.1.37
- Bump rustc-ap-* version to 671.0
# 2.1.36
- Bump rustc-ap-* version to 669.0
# 2.1.35
- Bump rustc-ap-* version to 664.0
# 2.1.34
- Bump rustc-ap-* version to 659.0
- Fix submodule search (#1107)
# 2.1.33
- Bump rustc-ap-* version to 654.0
# 2.1.32
- Bump rustc-ap-* version to 651.0
# 2.1.31
- Bump rustc-ap-* version to 642.0
# 2.1.30
- Support for union(#1086)
# 2.1.29
- Support async/await syntax(#1083, #1085)
# 2.1.28
- Update the version of rustc-ap-syntax
# 2.1.27
- Update the version of rustc-ap-syntax
# 2.1.26
- Update the version of rustc-ap-syntax
# 2.1.25
- Update the version of rustc-ap-syntax
# 2.1.24
- Rust 2018 (#1051)
- Update the version of rustc-ap-syntax
# 2.1.22
- Fix completion for `super::super::...`(#1053)
# 2.1.20, 2.1.21
- Fix completion in testdir for Rust 2018(#1022)
- Fix enum variant completion for pub(crate) enum(#1025)
# 2.1.18, 2.1.19
- Update rustc-ap-syntax
# 2.1.17, 2.1.18
- Fix doc comment parsing(#1010)
# 2.1.15. 2.1.16
- Handle CRLF correctly(#1007)
# 2.1.14
- Completion for binary operation(#976)
# 2.1.10, 2.1.11, 2.1.12, 2.1.13
- Completion for impl trait(#985, #986)
- Completion for use as(#988)
# 2.1.8, 2.1.9
- Completion for trait objects(#972)
- Completion for simple closure return types(#973)
# 2.1.7
- Lots of refactoring(#961, #963, #965)
- Add `is_use_statement` for RLS(#965)
# 2.1.6
- Completion based on impl<T: Bound> #948
- Fix for argument completion #943
- Trait bound in where clause #937
# 2.1.5
- migrate to cargo metadata #930
# 2.1.3
- Make cargo optional for RLS #910
## 2.1.2
- Fix bug around getting `use` context #906
- Update rustc-ap-syntax to fix build in current nightly #911
## 2.1.1
- Fix coordinate bug
- Get doc string for macro #905
## 2.1.0
- Support completions for stdlib macros #902
- Support extern "~" block #895
- Support `crate_in_paths` #891
- Fix bug of getting completion context from `use` statement #886
- Handle const unsafe fn #879
- Limit recursion depth through glob imports #875
- Enable completion based on trait bound for function args #871
- Fix bug in search_closure_args #862
- Replace cargo.rs with cargo crate #855
- Migrate over to rustc_ap_syntax #854
- Make RUST_SRC_PATH optional #808
- Refactor based on clippy #860
## 2.0.14
- Cache generic impls #839
- Cache parsed TOML file and cargo crate roots #838
- Skip `pub` keyword as a temporary fix for #624 #850
- Remove complex generic type by impl trait #848
- Fix bug for array expression #841
- Support completion for enum variants without type annotation #825
- Fix bug for raw string #822
## 2.0.13
- Fix bug for finding the start of match statement #819
## 2.0.12
- Fix bug that broke completions in previous release #807
## 2.0.11
- Use `rustup` to find libstd path even when used as library #799
## 2.0.10
- Support resolving `use as` aliases declared in multi-element `use` statements #753
- Provide suggestions for global paths in more cases #765
- Suggestions imported via `use as` statements now return their in-scope alias as the match string #767
- Add new commands for converting between points and coordinates in files #776
- Return fewer duplicate suggestions #778
- Handle cases where mod names and trait methods collide, such as `fmt` #781
## 2.0.9
- Support completion after using try operator `?` #726
- Find methods on cooked string literals #728
- Fix bug caused by closure completions feature #734
- Find static methods on enums #737
- Find doc comments on named and indexed struct fields #739
- Find `pub(restricted)` items #748
## 2.0.8
- Fix bug finding definitions where impl contains bang #717
- Find definition for closures #697
- Resolve types for tuple struct fields #722
- Resolve types for let patterns #724
- Fix completions for reference fields #723
## 2.0.7
- Fix panic with macros called `impl*` #701
- Relax semver specs
## 2.0.6
- resolve Self (e.g. in-impl function calls like Self::myfunction())
- Fix stack overflow issue on unresolvable imports :tada: #698
## 2.0.5
- Chained completions on separate lines now work #686
## 2.0.4
- Fix for find-doc not always returning full doc string #675
## 2.0.3
- Fix for recursion in certain `use foo::{self, ..}` cases #669
## 2.0.2
- Internal fixes so we can publish on crates.io
## 2.0.1
- Syntex 0.52 #643
- Fix `racer --help` bug from 2.0 refactor #662
- Support short revision identifiers for git checkout paths #664
- Handle self resolution when using `use mymod::{self, Thing}` #665
- Fix type alias resolution #666
## 2.0
- Rework public API to hide many implementation details and allow the project to
move forward without breaking changes.
- Many fixes that didn't make it into the changelog, but we're going to work on
that in the future!
## 1.2
- Added basic 'daemon' mode, racer process can be kept running between
invocations
- now uses clap to parse command line options
- Adds caching of file source and code indices
- Adds an alternative 'tabbed' mode where inputs and outputs can be tab
separated for easier parsing
- emacs and vim support split out into their own git projects [emacs-racer] and
[vim-racer], respectively.
- Fix issue resolving some `std::*` modules in latest rust source: (rust std lib
implicitly imports core with `#![no_std]`)
- Searches multirust overrides when locating cargo src directories
## 1.0.0 2015-07-29
- First release
[vim-racer]: https://github.com/racer-rust/vim-racer
[emacs-racer]: https://github.com/racer-rust/emacs-racer

43
racer/Cargo.toml Normal file
View File

@ -0,0 +1,43 @@
[package]
name = "racer"
version = "2.2.2"
license = "MIT"
description = "Code completion for Rust"
authors = ["Phil Dawes <phil@phildawes.net>", "The Racer developers"]
homepage = "https://github.com/racer-rust/racer"
repository = "https://github.com/racer-rust/racer"
edition = "2018"
[lib]
name = "racer"
path = "src/racer/lib.rs"
[profile.release]
debug = false # because of #1005
[dependencies]
bitflags = "1.0"
log = "0.4"
env_logger = "0.7.1"
clap = "2.32"
lazy_static = "1.2"
humantime = "2.0.0"
derive_more = "0.99.2"
rls-span = "0.5.1"
lazycell = { version = "1.2", optional = true }
[dependencies.racer-cargo-metadata]
version = "0.1"
optional = true
path = "metadata"
[features]
default = ["metadata"]
metadata = ["lazycell", "racer-cargo-metadata"]
[workspace]
members = ["interner", "metadata"]
[package.metadata.rust-analyzer]
# This package uses #[feature(rustc_private)]
rustc_private = true

25
racer/LICENSE-MIT Normal file
View File

@ -0,0 +1,25 @@
Copyright (c) 2014 Phil Dawes
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

137
racer/README.md Normal file
View File

@ -0,0 +1,137 @@
# *Racer* - code completion for [Rust](https://www.rust-lang.org/)
[![Build Status](https://github.com/racer-rust/racer/workflows/CI/badge.svg?branch=master)](https://github.com/racer-rust/racer/actions?query=workflow%3ACI+branch%3Amaster)
![racer completion screenshot](images/racer_completion.png)
![racer eldoc screenshot](images/racer_eldoc.png)
*RACER* = *R*ust *A*uto-*C*omplete-*er*. A utility intended to provide Rust code completion for editors and IDEs. Maybe one day the 'er' bit will be exploring + refactoring or something.
## **DISCLAIMER**
Racer is **not** actively developped now.
Please consider using newer software such as
[rust-analyzer](https://rust-analyzer.github.io/).
## Installation
**NOTE**
From 2.1, racer needs **nightly rust**
### Requirements
#### Current nightly Rust
If you're using rustup, run
```
rustup toolchain install nightly
rustup component add rustc-dev --toolchain=nightly
```
_Note: The second command adds the `rustc-dev` component to the nightly
toolchain, which is necessary to compile Racer._
#### Cargo
Internally, racer calls cargo as a CLI tool, so please make sure cargo is installed
### With `cargo install`
Simply run:
```cargo +nightly install racer```
As mentioned in the command output, don't forget to add the installation directory to your `PATH`.
### From sources
1. Clone the repository: ```git clone https://github.com/racer-rust/racer.git```
2. ```cd racer; cargo +nightly build --release```. The binary will now be in ```./target/release/racer```
3. Add the binary to your `PATH`. This can be done by moving it to a directory already in your `PATH` (i.e. `/usr/local/bin`) or by adding the `./target/release/` directory to your `PATH`
## Configuration
1. Fetch the Rust sourcecode
1. automatically via [rustup](https://www.rustup.rs/) and run `rustup component add rust-src` in order to install the source to `$(rustc --print sysroot)/lib/rustlib/src/rust/library` (or `$(rustc --print sysroot)/lib/rustlib/src/rust/src` in older toolchains). Rustup will keep the sources in sync with the toolchain if you run `rustup update`.
2. manually from git: https://github.com/rust-lang/rust
**Note**
If you want to use `racer` with multiple release channels (Rust has 3 release channels: `stable`, `beta` and `nightly`), you have to also download Rust source code for each release channel you install.
e.g. (rustup case) Add a nightly toolchain build and install nightly sources too
`rustup toolchain add nightly`
`rustup component add rust-src`
2. (Optional) Set `RUST_SRC_PATH` environment variable to point to the 'src' dir in the Rust source installation
e.g. `% export RUST_SRC_PATH=$(rustc --print sysroot)/lib/rustlib/src/rust/library` or `% export RUST_SRC_PATH="$(rustc --print sysroot)/lib/rustlib/src/rust/src"` (older)
It's recommended to set `RUST_SRC_PATH` for speed up, but racer detects it automatically if you don't set it.
3. Test on the command line:
`racer complete std::io::B ` (should show some completions)
**Note**
To complete names in external crates, Racer needs `Cargo.lock`.
So, when you add a dependency in your `Cargo.toml`, you have to run a build command
such as `cargo build` or `cargo test`, to get completions.
## Editors/IDEs Supported
### RLS
Racer is used as a static library in [RLS](https://github.com/rust-lang-nursery/rls)
### Eclipse integration
Racer can be used with Eclipse through the use of [RustDT](https://github.com/RustDT/RustDT). (User guide is [linked](https://rustdt.github.io/) in repo description)
### Emacs integration
Emacs integration has been moved to a separate project: [emacs-racer](https://github.com/racer-rust/emacs-racer).
### Gedit integration
Gedit integration can be found [here](https://github.com/isamert/gracer).
### Builder integration
Gnome Builder integration can be found [here](https://github.com/deikatsuo/bracer)
### Kate integration
The Kate community maintains a [plugin](https://cgit.kde.org/kate.git/tree/addons/rustcompletion). It is bundled with recent releases of Kate (tested with 16.08 - read more [here](https://blogs.kde.org/2015/05/22/updates-kates-rust-plugin-syntax-highlighting-and-rust-source-mime-type)).
1. Enable 'Rust code completion' in the plugin list in the Kate config dialog;
2. On the new 'Rust code completion' dialog page, make sure 'Racer command' and 'Rust source tree location' are set correctly.
### Sublime Text integration
The Sublime Text community maintains some packages that integrates Racer
* [RustAutoComplete](https://github.com/defuz/RustAutoComplete) that offers auto completion and goto definition.
* [AnacondaRUST](https://github.com/DamnWidget/anaconda_rust) from the [anaconda](https://github.com/DamnWidget/anaconda) plugins family that offers auto completion, goto definition and show documentation
### Vim integration
Vim integration has been moved to a separate project: [vim-racer](https://github.com/racer-rust/vim-racer).
### Visual Studio Code extension
Racer recommends the official [`Rust (rls)` extension](https://github.com/rust-lang-nursery/rls-vscode) based on RLS, which uses Racer for completion.
### Atom integration
You can find the racer package for Atom [here](https://atom.io/packages/autocomplete-racer)
### Kakoune integration
[Kakoune](https://github.com/mawww/kakoune) comes with a builtin integration for racer auto completion.

View File

@ -0,0 +1 @@
paths = ["./arst"]

View File

@ -0,0 +1,6 @@
[package]
name = "arst"
version = "0.1.0"
authors = ["Joe Wilm <joe@jwilm.com>"]
[dependencies]

View File

@ -0,0 +1,6 @@
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}

View File

@ -0,0 +1,3 @@
pub fn hello_submodule() {
println!("Hello from submodule.");
}

13
racer/interner/Cargo.toml Normal file
View File

@ -0,0 +1,13 @@
[package]
name = "racer-interner"
version = "0.1.0"
authors = ["Yuji Kanagawa <yuji.kngw.80s.revive@gmail.com>"]
license = "MIT"
description = "thread-local string interner for racer-rust"
homepage = "https://github.com/racer-rust/racer"
repository = "https://github.com/racer-rust/racer"
edition = "2018"
workspace = ".."
[dependencies]
serde = "1.0"

100
racer/interner/src/lib.rs Normal file
View File

@ -0,0 +1,100 @@
//! string interner
//! same as cargo::core::interning.rs, but thread local and Deserializable
use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
use std::cell::RefCell;
use std::collections::HashSet;
use std::error::Error;
use std::fmt;
use std::ops::Deref;
use std::ptr;
use std::str;
fn leak(s: String) -> &'static str {
Box::leak(s.into_boxed_str())
}
thread_local! {
static STRING_CACHE: RefCell<HashSet<&'static str>> = Default::default();
}
#[derive(Clone, Copy, PartialOrd, Ord, Eq, Hash)]
pub struct InternedString {
inner: &'static str,
}
impl PartialEq for InternedString {
fn eq(&self, other: &InternedString) -> bool {
ptr::eq(self.as_str(), other.as_str())
}
}
impl InternedString {
pub fn new(st: &str) -> InternedString {
STRING_CACHE.with(|cache| {
let mut cache = cache.borrow_mut();
let s = cache.get(st).map(|&s| s).unwrap_or_else(|| {
let s = leak(st.to_string());
cache.insert(s);
s
});
InternedString { inner: s }
})
}
pub fn new_if_exists(st: &str) -> Option<InternedString> {
STRING_CACHE.with(|cache| cache.borrow().get(st).map(|&s| InternedString { inner: s }))
}
pub fn as_str(&self) -> &'static str {
self.inner
}
}
impl Deref for InternedString {
type Target = str;
fn deref(&self) -> &'static str {
self.as_str()
}
}
impl fmt::Debug for InternedString {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self.as_str(), f)
}
}
impl fmt::Display for InternedString {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self.as_str(), f)
}
}
impl Serialize for InternedString {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.inner)
}
}
impl<'de> Deserialize<'de> for InternedString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct VisStr;
impl<'de> Visitor<'de> for VisStr {
type Value = InternedString;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "expecting string")
}
fn visit_borrowed_str<E: Error>(self, v: &'de str) -> Result<InternedString, E> {
Ok(InternedString::new(v))
}
}
deserializer.deserialize_str(VisStr {})
}
}

22
racer/metadata/Cargo.toml Normal file
View File

@ -0,0 +1,22 @@
[package]
name = "racer-cargo-metadata"
version = "0.1.2"
authors = ["Yuji Kanagawa <yuji.kngw.80s.revive@gmail.com>"]
license = "MIT"
description = "light-weight cargo metadata parser for racer"
homepage = "https://github.com/racer-rust/racer"
repository = "https://github.com/racer-rust/racer"
edition = "2018"
workspace = ".."
[dependencies]
serde_json = "1.0"
[dependencies.serde]
version = "1.0"
features = ["derive"]
[dependencies.racer-interner]
version = "0.1"
path = "../interner"

93
racer/metadata/src/lib.rs Normal file
View File

@ -0,0 +1,93 @@
extern crate racer_interner;
#[macro_use]
extern crate serde;
extern crate serde_json;
pub mod mapping;
pub mod metadata;
use crate::metadata::Metadata;
use std::env;
use std::error::Error;
use std::fmt;
use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::Utf8Error;
#[derive(Debug)]
pub enum ErrorKind {
Encode(Utf8Error),
Json(serde_json::Error),
Io(io::Error),
Subprocess(String),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorKind::Encode(e) => fmt::Display::fmt(e, f),
ErrorKind::Json(e) => fmt::Display::fmt(e, f),
ErrorKind::Io(e) => fmt::Display::fmt(e, f),
ErrorKind::Subprocess(s) => write!(f, "stderr: {}", s),
}
}
}
impl Error for ErrorKind {}
impl From<Utf8Error> for ErrorKind {
fn from(e: Utf8Error) -> ErrorKind {
ErrorKind::Encode(e)
}
}
impl From<serde_json::Error> for ErrorKind {
fn from(e: serde_json::Error) -> ErrorKind {
ErrorKind::Json(e)
}
}
impl From<io::Error> for ErrorKind {
fn from(e: io::Error) -> ErrorKind {
ErrorKind::Io(e)
}
}
pub fn find_manifest(mut current: &Path) -> Option<PathBuf> {
let file = "Cargo.toml";
if current.is_dir() {
let manifest = current.join(file);
if manifest.exists() {
return Some(manifest);
}
}
while let Some(parent) = current.parent() {
let manifest = parent.join(file);
if manifest.exists() {
return Some(manifest);
}
current = parent;
}
None
}
pub fn run(manifest_path: &Path, frozen: bool) -> Result<Metadata, ErrorKind> {
let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned());
let mut cmd = Command::new(cargo);
cmd.arg("metadata");
cmd.arg("--all-features");
cmd.args(&["--format-version", "1"]);
cmd.args(&["--color", "never"]);
cmd.arg("--manifest-path");
cmd.arg(manifest_path.as_os_str());
if frozen {
cmd.arg("--frozen");
}
let op = cmd.output()?;
if !op.status.success() {
let stderr = String::from_utf8(op.stderr).map_err(|e| e.utf8_error())?;
return Err(ErrorKind::Subprocess(stderr));
}
serde_json::from_slice(&op.stdout).map_err(From::from)
}

View File

@ -0,0 +1,139 @@
use crate::metadata::{Metadata, Package, PackageId, Resolve, ResolveNode, Target};
use racer_interner::InternedString;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
/// Cached dependencies for racer
#[derive(Clone, Debug)]
pub struct PackageMap {
manifest_to_idx: HashMap<PathBuf, PackageIdx>,
id_to_idx: HashMap<PackageId, PackageIdx>,
packages: Vec<PackageInner>,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub enum Edition {
Ed2015,
Ed2018,
Ed2021,
}
impl Edition {
pub fn from_str(s: &str) -> Self {
match s {
"2015" => Edition::Ed2015,
"2018" => Edition::Ed2018,
"2021" => Edition::Ed2021,
_ => unreachable!("got unexpected edition {}", s),
}
}
}
#[derive(Clone, Debug)]
struct PackageInner {
edition: Edition,
deps: Vec<(InternedString, PathBuf)>,
lib: Option<Target>,
id: PackageId,
}
impl PackageInner {
fn new(ed: InternedString, id: PackageId, lib: Option<Target>) -> Self {
PackageInner {
edition: Edition::from_str(ed.as_str()),
deps: Vec::new(),
id,
lib,
}
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct PackageIdx(usize);
impl PackageMap {
pub fn from_metadata(meta: Metadata) -> Self {
let Metadata {
packages, resolve, ..
} = meta;
PackageMap::new(packages, resolve)
}
pub fn new(packages: Vec<Package>, resolve: Option<Resolve>) -> Self {
let mut manifest_to_idx = HashMap::new();
let mut id_to_idx = HashMap::new();
let mut inner = Vec::new();
for (i, package) in packages.into_iter().enumerate() {
let Package {
id,
targets,
manifest_path,
edition,
..
} = package;
id_to_idx.insert(id, PackageIdx(i));
manifest_to_idx.insert(manifest_path, PackageIdx(i));
let lib = targets.into_iter().find(|t| t.is_lib()).to_owned();
inner.push(PackageInner::new(edition, id, lib));
}
if let Some(res) = resolve {
construct_deps(res.nodes, &id_to_idx, &mut inner);
}
PackageMap {
manifest_to_idx,
id_to_idx,
packages: inner,
}
}
pub fn ids<'a>(&'a self) -> impl 'a + Iterator<Item = PackageId> {
self.packages.iter().map(|p| p.id)
}
pub fn id_to_idx(&self, id: PackageId) -> Option<PackageIdx> {
self.id_to_idx.get(&id).map(|&x| x)
}
pub fn get_idx(&self, path: &Path) -> Option<PackageIdx> {
self.manifest_to_idx.get(path).map(|&id| id)
}
pub fn get_id(&self, idx: PackageIdx) -> PackageId {
self.packages[idx.0].id
}
pub fn get_edition(&self, idx: PackageIdx) -> Edition {
self.packages[idx.0].edition
}
pub fn get_lib(&self, idx: PackageIdx) -> Option<&Target> {
self.packages[idx.0].lib.as_ref()
}
pub fn get_lib_src_path(&self, idx: PackageIdx) -> Option<&Path> {
self.get_lib(idx).map(|t| t.src_path.as_ref())
}
pub fn get_dependencies(&self, idx: PackageIdx) -> &[(InternedString, PathBuf)] {
self.packages[idx.0].deps.as_ref()
}
pub fn get_src_path_from_libname(&self, id: PackageIdx, s: &str) -> Option<&Path> {
let deps = self.get_dependencies(id);
let query_str = InternedString::new_if_exists(s)?;
deps.iter().find(|t| t.0 == query_str).map(|t| t.1.as_ref())
}
}
fn construct_deps(
nodes: Vec<ResolveNode>,
id_to_idx: &HashMap<PackageId, PackageIdx>,
res: &mut [PackageInner],
) -> Option<()> {
for node in nodes {
let idx = id_to_idx.get(&node.id)?;
let deps: Vec<_> = node
.dependencies
.into_iter()
.filter_map(|id| {
let idx = id_to_idx.get(&id)?;
res[idx.0]
.lib
.as_ref()
.map(|l| (l.name, l.src_path.clone()))
})
.collect();
res[idx.0].deps.extend(deps);
}
Some(())
}

View File

@ -0,0 +1,77 @@
//! Data structures for metadata
use racer_interner::InternedString;
use std::path::PathBuf;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Metadata {
pub packages: Vec<Package>,
pub workspace_members: Vec<PackageId>,
pub resolve: Option<Resolve>,
#[serde(default)]
pub workspace_root: PathBuf,
pub target_directory: PathBuf,
version: usize,
#[serde(skip)]
__guard: (),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Package {
pub id: PackageId,
pub targets: Vec<Target>,
pub manifest_path: PathBuf,
#[serde(default = "edition_default")]
pub edition: InternedString,
#[serde(skip)]
__guard: (),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Resolve {
pub nodes: Vec<ResolveNode>,
#[serde(skip)]
__guard: (),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ResolveNode {
pub id: PackageId,
pub dependencies: Vec<PackageId>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Target {
pub name: InternedString,
pub kind: Vec<InternedString>,
pub src_path: PathBuf,
#[serde(default = "edition_default")]
pub edition: InternedString,
#[serde(skip)]
__guard: (),
}
const LIB_KINDS: [&'static str; 4] = ["lib", "rlib", "dylib", "proc-macro"];
impl Target {
pub fn is_lib(&self) -> bool {
self.kind.iter().any(|k| LIB_KINDS.contains(&k.as_str()))
}
pub fn is_2015(&self) -> bool {
self.edition.as_str() == "2015"
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageId(InternedString);
impl PackageId {
pub fn name(&self) -> &str {
let idx = self.0.find(' ').expect("Whitespace not found");
&self.0[..idx]
}
}
#[inline(always)]
fn edition_default() -> InternedString {
InternedString::new("2015")
}

View File

@ -0,0 +1,3 @@
[toolchain]
channel = "nightly-2022-04-06"
components = ["rust-src", "rustc-dev"]

1372
racer/src/racer/ast.rs Normal file

File diff suppressed because it is too large Load Diff

1068
racer/src/racer/ast_types.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,63 @@
extern crate test;
use std::env::var;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use codecleaner::code_chunks;
use codeiter::StmtIndicesIter;
use core::IndexedSource;
use scopes::{mask_comments, mask_sub_scopes};
use self::test::Bencher;
fn get_rust_file_str(path: &[&str]) -> String {
let mut src_path = match var("RUST_SRC_PATH") {
Ok(env) => PathBuf::from(&env),
_ => panic!("Cannot find $RUST_SRC_PATH"),
};
for &s in path.iter() {
src_path.push(s);
}
let mut s = String::new();
File::open(&src_path)
.unwrap()
.read_to_string(&mut s)
.unwrap();
s
}
#[bench]
fn bench_code_chunks(b: &mut Bencher) {
let src = &get_rust_file_str(&["liballoc", "vec.rs"]);
b.iter(|| {
test::black_box(code_chunks(src).collect::<Vec<_>>());
});
}
#[bench]
fn bench_iter_stmts(b: &mut Bencher) {
let src = &get_rust_file_str(&["liballoc", "vec.rs"]);
b.iter(|| {
test::black_box(StmtIndicesIter::from_parts(src, code_chunks(src)).collect::<Vec<_>>());
});
}
#[bench]
fn bench_mask_comments(b: &mut Bencher) {
let src_indexed = IndexedSource::new(get_rust_file_str(&["liballoc", "vec.rs"]));
let src = src_indexed.as_src();
b.iter(|| {
test::black_box(mask_comments(src));
});
}
#[bench]
fn bench_mask_sub_scopes(b: &mut Bencher) {
let src = &get_rust_file_str(&["liballoc", "vec.rs"]);
b.iter(|| {
test::black_box(mask_sub_scopes(src));
});
}

View File

@ -0,0 +1,460 @@
use crate::core::{BytePos, ByteRange};
/// Type of the string
#[derive(Clone, Copy, Debug)]
enum StrStyle {
/// normal string starts with "
Cooked,
/// Raw(n) => raw string started with n #s
Raw(usize),
}
#[derive(Clone, Copy)]
enum State {
Code,
Comment,
CommentBlock,
String(StrStyle),
Char,
Finished,
}
#[derive(Clone, Copy)]
pub struct CodeIndicesIter<'a> {
src: &'a str,
pos: BytePos,
state: State,
}
impl<'a> Iterator for CodeIndicesIter<'a> {
type Item = ByteRange;
fn next(&mut self) -> Option<ByteRange> {
match self.state {
State::Code => Some(self.code()),
State::Comment => Some(self.comment()),
State::CommentBlock => Some(self.comment_block()),
State::String(style) => Some(self.string(style)),
State::Char => Some(self.char()),
State::Finished => None,
}
}
}
impl<'a> CodeIndicesIter<'a> {
fn code(&mut self) -> ByteRange {
let mut pos = self.pos;
let start = match self.state {
State::String(_) | State::Char => pos.decrement(), // include quote
_ => pos,
};
let src_bytes = self.src.as_bytes();
for &b in &src_bytes[pos.0..] {
pos = pos.increment();
match b {
b'/' if src_bytes.len() > pos.0 => match src_bytes[pos.0] {
b'/' => {
self.state = State::Comment;
self.pos = pos.increment();
return ByteRange::new(start, pos.decrement());
}
b'*' => {
self.state = State::CommentBlock;
self.pos = pos.increment();
return ByteRange::new(start, pos.decrement());
}
_ => {}
},
b'"' => {
// "
let str_type = self.detect_str_type(pos);
self.state = State::String(str_type);
self.pos = pos;
return ByteRange::new(start, pos); // include dblquotes
}
b'\'' => {
// single quotes are also used for lifetimes, so we need to
// be confident that this is not a lifetime.
// Look for backslash starting the escape, or a closing quote:
if src_bytes.len() > pos.increment().0
&& (src_bytes[pos.0] == b'\\' || src_bytes[pos.increment().0] == b'\'')
{
self.state = State::Char;
self.pos = pos;
return ByteRange::new(start, pos); // include single quote
}
}
_ => {}
}
}
self.state = State::Finished;
ByteRange::new(start, self.src.len().into())
}
fn comment(&mut self) -> ByteRange {
let mut pos = self.pos;
let src_bytes = self.src.as_bytes();
for &b in &src_bytes[pos.0..] {
pos = pos.increment();
if b == b'\n' {
if pos.0 + 2 <= src_bytes.len() && src_bytes[pos.0..pos.0 + 2] == [b'/', b'/'] {
continue;
}
break;
}
}
self.pos = pos;
self.code()
}
fn comment_block(&mut self) -> ByteRange {
let mut nesting_level = 0usize;
let mut prev = b' ';
let mut pos = self.pos;
for &b in &self.src.as_bytes()[pos.0..] {
pos = pos.increment();
match b {
b'/' if prev == b'*' => {
prev = b' ';
if nesting_level == 0 {
break;
} else {
nesting_level -= 1;
}
}
b'*' if prev == b'/' => {
prev = b' ';
nesting_level += 1;
}
_ => {
prev = b;
}
}
}
self.pos = pos;
self.code()
}
fn string(&mut self, str_type: StrStyle) -> ByteRange {
let src_bytes = self.src.as_bytes();
let mut pos = self.pos;
match str_type {
StrStyle::Raw(level) => {
// raw string (e.g. br#"\"#)
#[derive(Debug)]
enum SharpState {
Sharp {
// number of preceding #s
num_sharps: usize,
// Position of last "
quote_pos: BytePos,
},
None, // No preceding "##...
}
let mut cur_state = SharpState::None;
let mut end_was_found = false;
// detect corresponding end(if start is r##", "##) greedily
for (i, &b) in src_bytes[self.pos.0..].iter().enumerate() {
match cur_state {
SharpState::Sharp {
num_sharps,
quote_pos,
} => {
cur_state = match b {
b'#' => SharpState::Sharp {
num_sharps: num_sharps + 1,
quote_pos,
},
b'"' => SharpState::Sharp {
num_sharps: 0,
quote_pos: BytePos(i),
},
_ => SharpState::None,
}
}
SharpState::None => {
if b == b'"' {
cur_state = SharpState::Sharp {
num_sharps: 0,
quote_pos: BytePos(i),
};
}
}
}
if let SharpState::Sharp {
num_sharps,
quote_pos,
} = cur_state
{
if num_sharps == level {
end_was_found = true;
pos += quote_pos.increment();
break;
}
}
}
if !end_was_found {
pos = src_bytes.len().into();
}
}
StrStyle::Cooked => {
let mut is_not_escaped = true;
for &b in &src_bytes[pos.0..] {
pos = pos.increment();
match b {
b'"' if is_not_escaped => {
break;
} // "
b'\\' => {
is_not_escaped = !is_not_escaped;
}
_ => {
is_not_escaped = true;
}
}
}
}
};
self.pos = pos;
self.code()
}
fn char(&mut self) -> ByteRange {
let mut is_not_escaped = true;
let mut pos = self.pos;
for &b in &self.src.as_bytes()[pos.0..] {
pos = pos.increment();
match b {
b'\'' if is_not_escaped => {
break;
}
b'\\' => {
is_not_escaped = !is_not_escaped;
}
_ => {
is_not_escaped = true;
}
}
}
self.pos = pos;
self.code()
}
fn detect_str_type(&self, pos: BytePos) -> StrStyle {
let src_bytes = self.src.as_bytes();
let mut sharp = 0;
if pos == BytePos::ZERO {
return StrStyle::Cooked;
}
// now pos is at one byte after ", so we have to start at pos - 2
for &b in src_bytes[..pos.decrement().0].iter().rev() {
match b {
b'#' => sharp += 1,
b'r' => return StrStyle::Raw(sharp),
_ => return StrStyle::Cooked,
}
}
StrStyle::Cooked
}
}
/// Returns indices of chunks of code (minus comments and string contents)
pub fn code_chunks(src: &str) -> CodeIndicesIter<'_> {
CodeIndicesIter {
src,
state: State::Code,
pos: BytePos::ZERO,
}
}
#[cfg(test)]
mod code_indices_iter_test {
use super::*;
use crate::testutils::{rejustify, slice};
#[test]
fn removes_a_comment() {
let src = &rejustify(
"
this is some code // this is a comment
some more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code ", slice(src, it.next().unwrap()));
assert_eq!("some more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_consecutive_comments() {
let src = &rejustify(
"
this is some code // this is a comment
// this is more comment
// another comment
some more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code ", slice(src, it.next().unwrap()));
assert_eq!("some more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_string_contents() {
let src = &rejustify(
"
this is some code \"this is a string\" more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code \"", slice(src, it.next().unwrap()));
assert_eq!("\" more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_char_contents() {
let src = &rejustify(
"
this is some code \'\"\' more code \'\\x00\' and \'\\\'\' that\'s it
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code \'", slice(src, it.next().unwrap()));
assert_eq!("\' more code \'", slice(src, it.next().unwrap()));
assert_eq!("\' and \'", slice(src, it.next().unwrap()));
assert_eq!("\' that\'s it", slice(src, it.next().unwrap()));
}
#[test]
fn removes_string_contents_with_a_comment_in_it() {
let src = &rejustify(
"
this is some code \"string with a // fake comment \" more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code \"", slice(src, it.next().unwrap()));
assert_eq!("\" more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_a_comment_with_a_dbl_quote_in_it() {
let src = &rejustify(
"
this is some code // comment with \" double quote
some more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code ", slice(src, it.next().unwrap()));
assert_eq!("some more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_multiline_comment() {
let src = &rejustify(
"
this is some code /* this is a
\"multiline\" comment */some more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code ", slice(src, it.next().unwrap()));
assert_eq!("some more code", slice(src, it.next().unwrap()));
}
#[test]
fn handles_nesting_of_block_comments() {
let src = &rejustify(
"
this is some code /* nested /* block */ comment */ some more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code ", slice(src, it.next().unwrap()));
assert_eq!(" some more code", slice(src, it.next().unwrap()));
}
#[test]
fn handles_documentation_block_comments_nested_into_block_comments() {
let src = &rejustify(
"
this is some code /* nested /** documentation block */ comment */ some more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code ", slice(src, it.next().unwrap()));
assert_eq!(" some more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_string_with_escaped_dblquote_in_it() {
let src = &rejustify(
"
this is some code \"string with a \\\" escaped dblquote fake comment \" more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code \"", slice(src, it.next().unwrap()));
assert_eq!("\" more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_raw_string_with_dangling_escape_in_it() {
let src = &rejustify(
"
this is some code br\" escaped dblquote raw string \\\" more code
",
);
let mut it = code_chunks(src);
assert_eq!("this is some code br\"", slice(src, it.next().unwrap()));
assert_eq!("\" more code", slice(src, it.next().unwrap()));
}
#[test]
fn removes_string_with_escaped_slash_before_dblquote_in_it() {
let src = &rejustify("
this is some code \"string with an escaped slash, so dbl quote does end the string after all \\\\\" more code
");
let mut it = code_chunks(src);
assert_eq!("this is some code \"", slice(src, it.next().unwrap()));
assert_eq!("\" more code", slice(src, it.next().unwrap()));
}
#[test]
fn handles_tricky_bit_from_str_rs() {
let src = &rejustify(
"
before(\"\\\\\'\\\\\\\"\\\\\\\\\");
more_code(\" skip me \")
",
);
for range in code_chunks(src) {
let range = || range.to_range();
println!("BLOB |{}|", &src[range()]);
if src[range()].contains("skip me") {
panic!("{}", &src[range()]);
}
}
}
#[test]
fn removes_nested_rawstr() {
let src = &rejustify(
r####"
this is some code br###""" r##""##"### more code
"####,
);
let mut it = code_chunks(src);
assert_eq!("this is some code br###\"", slice(src, it.next().unwrap()));
assert_eq!("\"### more code", slice(src, it.next().unwrap()));
}
}

367
racer/src/racer/codeiter.rs Normal file
View File

@ -0,0 +1,367 @@
use std::iter::{Fuse, Iterator};
use crate::core::{BytePos, ByteRange};
use crate::scopes;
use crate::util::is_whitespace_byte;
/// An iterator which iterates statements.
/// e.g. for "let a = 5; let b = 4;" it returns "let a = 5;" and then "let b = 4;"
/// This iterator only works for comment-masked source codes.
pub struct StmtIndicesIter<'a> {
src: &'a str,
pos: BytePos,
end: BytePos,
}
impl<'a> Iterator for StmtIndicesIter<'a> {
type Item = ByteRange;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let src_bytes = self.src.as_bytes();
let mut enddelim = b';';
let mut bracelevel = 0isize;
let mut parenlevel = 0isize;
let mut bracketlevel = 0isize;
let mut pos = self.pos;
for &b in &src_bytes[pos.0..self.end.0] {
match b {
b' ' | b'\r' | b'\n' | b'\t' => {
pos += BytePos(1);
}
_ => {
break;
}
}
}
let start = pos;
// test attribute #[foo = bar]
if pos < self.end && src_bytes[pos.0] == b'#' {
enddelim = b']'
};
// iterate through the chunk, looking for stmt end
for &b in &src_bytes[pos.0..self.end.0] {
pos += BytePos(1);
match b {
b'(' => {
parenlevel += 1;
}
b')' => {
parenlevel -= 1;
}
b'[' => {
bracketlevel += 1;
}
b']' => {
bracketlevel -= 1;
}
b'{' => {
// if we are top level and stmt is not a 'use' or 'let' then
// closebrace finishes the stmt
if bracelevel == 0
&& parenlevel == 0
&& !(is_a_use_stmt(src_bytes, start, pos)
|| is_a_let_stmt(src_bytes, start, pos))
{
enddelim = b'}';
}
bracelevel += 1;
}
b'}' => {
// have we reached the end of the scope?
if bracelevel == 0 {
self.pos = pos;
return None;
}
bracelevel -= 1;
}
b'!' => {
// macro if followed by at least one space or (
// FIXME: test with boolean 'not' expression
if parenlevel == 0 && bracelevel == 0 && pos < self.end && (pos - start).0 > 1 {
match src_bytes[pos.0] {
b' ' | b'\r' | b'\n' | b'\t' | b'(' => {
enddelim = b')';
}
_ => {}
}
}
}
_ => {}
}
if parenlevel < 0
|| bracelevel < 0
|| bracketlevel < 0
|| (enddelim == b && bracelevel == 0 && parenlevel == 0 && bracketlevel == 0)
{
self.pos = pos;
return Some(ByteRange::new(start, pos));
}
}
if start < self.end {
self.pos = pos;
return Some(ByteRange::new(start, self.end));
}
None
}
}
fn is_a_use_stmt(src_bytes: &[u8], start: BytePos, pos: BytePos) -> bool {
let src = unsafe { ::std::str::from_utf8_unchecked(&src_bytes[start.0..pos.0]) };
scopes::use_stmt_start(&src).is_some()
}
fn is_a_let_stmt(src_bytes: &[u8], start: BytePos, pos: BytePos) -> bool {
pos.0 > 3
&& &src_bytes[start.0..start.0 + 3] == b"let"
&& is_whitespace_byte(src_bytes[start.0 + 3])
}
impl<'a> StmtIndicesIter<'a> {
pub fn from_parts(src: &str) -> Fuse<StmtIndicesIter<'_>> {
StmtIndicesIter {
src,
pos: BytePos::ZERO,
end: BytePos(src.len()),
}
.fuse()
}
}
#[cfg(test)]
mod test {
use std::iter::Fuse;
use crate::codecleaner;
use crate::testutils::{rejustify, slice};
use super::*;
fn iter_stmts(src: &str) -> Fuse<StmtIndicesIter<'_>> {
let idx: Vec<_> = codecleaner::code_chunks(&src).collect();
let code = scopes::mask_comments(src, &idx);
let code: &'static str = Box::leak(code.into_boxed_str());
StmtIndicesIter::from_parts(code)
}
#[test]
fn iterates_single_use_stmts() {
let src = rejustify(
"
use std::Foo; // a comment
use std::Bar;
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!("use std::Foo;", slice(&src, it.next().unwrap()));
assert_eq!("use std::Bar;", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_array_stmts() {
let src = rejustify(
"
let a: [i32; 2] = [1, 2];
let b = [[0], [1], [2]];
let c = ([1, 2, 3])[1];
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!("let a: [i32; 2] = [1, 2];", slice(&src, it.next().unwrap()));
assert_eq!("let b = [[0], [1], [2]];", slice(&src, it.next().unwrap()));
assert_eq!("let c = ([1, 2, 3])[1];", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_use_stmt_over_two_lines() {
let src = rejustify(
"
use std::{Foo,
Bar}; // a comment
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!(
"use std::{Foo,
Bar};",
slice(&src, it.next().unwrap())
);
}
#[test]
fn iterates_use_stmt_without_the_prefix() {
let src = rejustify(
"
pub use {Foo,
Bar}; // this is also legit apparently
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!(
"pub use {Foo,
Bar};",
slice(&src, it.next().unwrap())
);
}
#[test]
fn iterates_while_stmt() {
let src = rejustify(
"
while self.pos < 3 { }
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!("while self.pos < 3 { }", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_lambda_arg() {
let src = rejustify(
"
myfn(|n|{});
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!("myfn(|n|{});", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_macro() {
let src = "
mod foo;
macro_rules! otry(
($e:expr) => (match $e { Some(e) => e, None => return })
)
mod bar;
";
let mut it = iter_stmts(src.as_ref());
assert_eq!("mod foo;", slice(&src, it.next().unwrap()));
assert_eq!(
"macro_rules! otry(
($e:expr) => (match $e { Some(e) => e, None => return })
)",
slice(&src, it.next().unwrap())
);
assert_eq!("mod bar;", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_macro_invocation() {
let src = "
mod foo;
local_data_key!(local_stdout: Box<Writer + Send>) // no ';'
mod bar;
";
let mut it = iter_stmts(src.as_ref());
assert_eq!("mod foo;", slice(&src, it.next().unwrap()));
assert_eq!(
"local_data_key!(local_stdout: Box<Writer + Send>)",
slice(&src, it.next().unwrap())
);
assert_eq!("mod bar;", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_if_else_stmt() {
let src = "
if self.pos < 3 { } else { }
";
let mut it = iter_stmts(src.as_ref());
assert_eq!("if self.pos < 3 { }", slice(&src, it.next().unwrap()));
assert_eq!("else { }", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_inner_scope() {
let src = &"
while(self.pos < 3 {
let a = 35;
return a + 35; // should iterate this
}
{
b = foo; // but not this
}
"[29..];
let mut it = iter_stmts(src.as_ref());
assert_eq!("let a = 35;", slice(&src, it.next().unwrap()));
assert_eq!("return a + 35;", slice(&src, it.next().unwrap()));
assert_eq!(None, it.next());
}
#[test]
fn iterates_module_attribute() {
let src = rejustify(
"
#![license = \"BSD\"]
#[test]
",
);
let mut it = iter_stmts(src.as_ref());
assert_eq!("#![license = \"BSD\"]", slice(&src, it.next().unwrap()));
assert_eq!("#[test]", slice(&src, it.next().unwrap()));
}
#[test]
fn iterates_half_open_subscope_if_is_the_last_thing() {
let src = "
let something = 35;
while self.pos < 3 {
let a = 35;
return a + 35; // should iterate this
";
let mut it = iter_stmts(src.as_ref());
assert_eq!("let something = 35;", slice(&src, it.next().unwrap()));
assert_eq!(
"while self.pos < 3 {
let a = 35;
return a + 35; // should iterate this
",
slice(&src, it.next().unwrap())
);
}
#[test]
fn iterates_ndarray() {
let src = "
let a = [[f64; 5]; 5];
pub struct Matrix44f(pub [[f64; 4]; 4]);
";
let mut it = iter_stmts(src.as_ref());
assert_eq!("let a = [[f64; 5]; 5];", slice(&src, it.next().unwrap()));
assert_eq!(
"pub struct Matrix44f(pub [[f64; 4]; 4]);",
slice(&src, it.next().unwrap())
);
}
#[test]
#[ignore]
fn iterates_for_struct() {
let src = "
let a = 5;
for St { a, b } in iter() {
let b = a;
}
while let St { a, b } = iter().next() {
}
if let St(a) = hoge() {
}
";
let mut it = iter_stmts(src.as_ref());
assert_eq!("let a = 5;", slice(&src, it.next().unwrap()));
assert_eq!(
r"for St { a, b } in iter() {
let b = a;
}",
slice(&src, it.next().unwrap())
);
}
}

1414
racer/src/racer/core.rs Normal file

File diff suppressed because it is too large Load Diff

106
racer/src/racer/fileres.rs Normal file
View File

@ -0,0 +1,106 @@
use crate::core::{BytePos, Coordinate, Match, MatchType, SearchType, Session, SessionExt};
use crate::matchers;
use crate::nameres::RUST_SRC_PATH;
use crate::project_model::Edition;
use std::path::{Path, PathBuf};
/// get crate file from current path & crate name
pub fn get_crate_file(name: &str, from_path: &Path, session: &Session<'_>) -> Option<PathBuf> {
debug!("get_crate_file {}, {:?}", name, from_path);
get_std_file(name, session).or_else(|| get_outer_crates(name, from_path, session))
}
pub fn get_std_file(name: &str, session: &Session<'_>) -> Option<PathBuf> {
if let Some(ref std_path) = *RUST_SRC_PATH {
// try lib<name>/lib.rs, like in the rust source dir
let cratelibname = format!("lib{}", name);
let filepath = std_path.join(cratelibname).join("lib.rs");
if filepath.exists() || session.contains_file(&filepath) {
return Some(filepath);
}
// If not found, try using the new standard library directory layout
let filepath = std_path.join(name).join("src").join("lib.rs");
if filepath.exists() || session.contains_file(&filepath) {
return Some(filepath);
}
}
return None;
}
/// 2018 style crate name resolution
pub fn search_crate_names(
searchstr: &str,
search_type: SearchType,
file_path: &Path,
only_2018: bool,
session: &Session<'_>,
) -> Vec<Match> {
let manifest_path = try_vec!(session.project_model.discover_project_manifest(file_path));
if only_2018 {
let edition = session
.project_model
.edition(&manifest_path)
.unwrap_or(Edition::Ed2015);
if edition < Edition::Ed2018 {
return Vec::new();
}
}
let hyphenated = searchstr.replace('_', "-");
let searchstr = searchstr.to_owned();
session
.project_model
.search_dependencies(
&manifest_path,
Box::new(move |libname| match search_type {
SearchType::ExactMatch => libname == hyphenated || libname == searchstr,
SearchType::StartsWith => {
libname.starts_with(&hyphenated) || libname.starts_with(&searchstr)
}
}),
)
.into_iter()
.map(|(name, path)| {
let name = name.replace('-', "_");
let raw_src = session.load_raw_file(&path);
Match {
matchstr: name,
filepath: path,
point: BytePos::ZERO,
coords: Some(Coordinate::start()),
local: false,
mtype: MatchType::Crate,
contextstr: String::new(),
docs: matchers::find_mod_doc(&raw_src, BytePos::ZERO),
}
})
.collect()
}
/// get module file from current path & crate name
pub fn get_module_file(name: &str, parentdir: &Path, session: &Session<'_>) -> Option<PathBuf> {
// try just <name>.rs
let filepath = parentdir.join(format!("{}.rs", name));
if filepath.exists() || session.contains_file(&filepath) {
return Some(filepath);
}
// try <name>/mod.rs
let filepath = parentdir.join(name).join("mod.rs");
if filepath.exists() || session.contains_file(&filepath) {
return Some(filepath);
}
None
}
/// try to get outer crates
/// if we have dependencies in cache, use it.
/// else, call cargo-metadata(default) or fall back to rls
fn get_outer_crates(libname: &str, from_path: &Path, session: &Session<'_>) -> Option<PathBuf> {
debug!(
"[get_outer_crates] lib name: {:?}, from_path: {:?}",
libname, from_path
);
let manifest = session.project_model.discover_project_manifest(from_path)?;
let res = session.project_model.resolve_dependency(&manifest, libname);
res
}

60
racer/src/racer/lib.rs Executable file
View File

@ -0,0 +1,60 @@
#![cfg_attr(feature = "nightly", feature(test))]
#![feature(control_flow_enum)]
#![feature(try_trait_v2)]
#![feature(rustc_private)]
#[macro_use]
extern crate log;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate derive_more;
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_data_structures;
extern crate rustc_errors;
extern crate rustc_parse;
extern crate rustc_session;
extern crate rustc_span;
#[macro_use]
mod testutils;
#[macro_use]
mod util;
mod ast;
mod ast_types;
mod codecleaner;
mod codeiter;
mod core;
mod fileres;
mod matchers;
#[cfg(feature = "metadata")]
mod metadata;
mod nameres;
mod primitive;
mod project_model;
mod scopes;
mod snippets;
mod typeinf;
pub use crate::ast_types::PathSearch;
pub use crate::core::{
complete_from_file, complete_fully_qualified_name, find_definition, is_use_stmt, to_coords,
to_point,
};
pub use crate::core::{
BytePos, ByteRange, Coordinate, FileCache, FileLoader, Location, Match, MatchType, Session,
};
pub use crate::primitive::PrimKind;
pub use crate::project_model::{Edition, ProjectModelProvider};
pub use crate::snippets::snippet_for_match;
pub use crate::util::expand_ident;
pub use crate::util::{get_rust_src_path, RustSrcPathError};
#[cfg(all(feature = "nightly", test))]
mod benches;

914
racer/src/racer/matchers.rs Normal file
View File

@ -0,0 +1,914 @@
use crate::ast_types::{ImplHeader, PathAlias, PathAliasKind, PathSegment};
use crate::core::MatchType::{
self, Const, Enum, EnumVariant, For, Function, IfLet, Let, Macro, Module, Static, Struct,
Trait, Type, WhileLet,
};
use crate::core::Namespace;
use crate::core::SearchType::{self, ExactMatch, StartsWith};
use crate::core::{BytePos, ByteRange, Coordinate, Match, Session, SessionExt, Src};
use crate::fileres::{get_crate_file, get_module_file};
use crate::nameres::resolve_path;
use crate::util::*;
use crate::{ast, scopes, typeinf};
use std::path::Path;
use std::{str, vec};
/// The location of an import (`use` item) currently being resolved.
#[derive(PartialEq, Eq)]
struct PendingImport<'fp> {
filepath: &'fp Path,
range: ByteRange,
}
/// A stack of imports (`use` items) currently being resolved.
type PendingImports<'stack, 'fp> = StackLinkedListNode<'stack, PendingImport<'fp>>;
const GLOB_LIMIT: usize = 2;
/// Import information(pending imports, glob, and etc.)
pub struct ImportInfo<'stack, 'fp> {
/// A stack of imports currently being resolved
imports: PendingImports<'stack, 'fp>,
/// the max number of times where we can go through glob continuously
/// if current search path isn't constructed via glob, it's none
glob_limit: Option<usize>,
}
impl<'stack, 'fp: 'stack> Default for ImportInfo<'stack, 'fp> {
fn default() -> Self {
ImportInfo {
imports: PendingImports::empty(),
glob_limit: None,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct MatchCxt<'s, 'p> {
pub filepath: &'p Path,
pub search_str: &'s str,
pub range: ByteRange,
pub search_type: SearchType,
pub is_local: bool,
}
impl<'s, 'p> MatchCxt<'s, 'p> {
fn get_key_ident(
&self,
blob: &str,
keyword: &str,
ignore: &[&str],
) -> Option<(BytePos, String)> {
find_keyword(blob, keyword, ignore, self).map(|start| {
let s = match self.search_type {
ExactMatch => self.search_str.to_owned(),
StartsWith => {
let end = find_ident_end(blob, start + BytePos(self.search_str.len()));
blob[start.0..end.0].to_owned()
}
};
(start, s)
})
}
}
pub(crate) fn find_keyword(
src: &str,
pattern: &str,
ignore: &[&str],
context: &MatchCxt<'_, '_>,
) -> Option<BytePos> {
find_keyword_impl(
src,
pattern,
context.search_str,
ignore,
context.search_type,
context.is_local,
)
}
fn find_keyword_impl(
src: &str,
pattern: &str,
search_str: &str,
ignore: &[&str],
search_type: SearchType,
is_local: bool,
) -> Option<BytePos> {
let mut start = BytePos::ZERO;
if let Some(offset) = strip_visibility(&src[..]) {
start += offset;
} else if !is_local {
// TODO: too about
return None;
}
if ignore.len() > 0 {
start += strip_words(&src[start.0..], ignore);
}
// mandatory pattern\s+
if !src[start.0..].starts_with(pattern) {
return None;
}
// remove whitespaces ... must have one at least
start += pattern.len().into();
let oldstart = start;
for &b in src[start.0..].as_bytes() {
match b {
b if is_whitespace_byte(b) => start = start.increment(),
_ => break,
}
}
if start == oldstart {
return None;
}
let search_str_len = search_str.len();
if src[start.0..].starts_with(search_str) {
match search_type {
StartsWith => Some(start),
ExactMatch => {
if src.len() > start.0 + search_str_len
&& !is_ident_char(char_at(src, start.0 + search_str_len))
{
Some(start)
} else {
None
}
}
}
} else {
None
}
}
fn is_const_fn(src: &str, blob_range: ByteRange) -> bool {
if let Some(b) = strip_word(&src[blob_range.to_range()], "const") {
let s = src[(blob_range.start + b).0..].trim_start();
s.starts_with("fn") || s.starts_with("unsafe")
} else {
false
}
}
fn match_pattern_start(
src: &str,
context: &MatchCxt<'_, '_>,
pattern: &str,
ignore: &[&str],
mtype: MatchType,
) -> Option<Match> {
// ast currently doesn't contain the ident coords, so match them with a hacky
// string search
let blob = &src[context.range.to_range()];
if let Some(start) = find_keyword(blob, pattern, ignore, context) {
if let Some(end) = blob[start.0..].find(|c: char| c == ':' || c.is_whitespace()) {
if blob[start.0 + end..].trim_start().chars().next() == Some(':') {
let s = &blob[start.0..start.0 + end];
return Some(Match {
matchstr: s.to_owned(),
filepath: context.filepath.to_path_buf(),
point: context.range.start + start,
coords: None,
local: context.is_local,
mtype: mtype,
contextstr: first_line(blob),
docs: String::new(),
});
}
}
}
None
}
pub fn match_const(msrc: &str, context: &MatchCxt<'_, '_>) -> Option<Match> {
if is_const_fn(msrc, context.range) {
return None;
}
// Here we don't have to ignore "unsafe"
match_pattern_start(msrc, context, "const", &[], Const)
}
pub fn match_static(msrc: &str, context: &MatchCxt<'_, '_>) -> Option<Match> {
// Here we don't have to ignore "unsafe"
match_pattern_start(msrc, context, "static", &[], Static)
}
fn match_let_impl(msrc: &str, context: &MatchCxt<'_, '_>, mtype: MatchType) -> Vec<Match> {
let mut out = Vec::new();
let coords = ast::parse_pat_bind_stmt(msrc.to_owned());
for pat_range in coords {
let s = &msrc[pat_range.to_range()];
if symbol_matches(context.search_type, context.search_str, s) {
let start = context.range.start + pat_range.start;
debug!("match_pattern_let point is {:?}", start);
out.push(Match {
matchstr: s.to_owned(),
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: mtype.clone(),
contextstr: msrc.to_owned(),
docs: String::new(),
});
if context.search_type == ExactMatch {
break;
}
}
}
out
}
pub fn match_if_let(msrc: &str, start: BytePos, context: &MatchCxt<'_, '_>) -> Vec<Match> {
match_let_impl(msrc, context, IfLet(start))
}
pub fn match_while_let(msrc: &str, start: BytePos, context: &MatchCxt<'_, '_>) -> Vec<Match> {
match_let_impl(msrc, context, WhileLet(start))
}
pub fn match_let(msrc: &str, start: BytePos, context: &MatchCxt<'_, '_>) -> Vec<Match> {
let blob = &msrc[context.range.to_range()];
if blob.starts_with("let ") && txt_matches(context.search_type, context.search_str, blob) {
match_let_impl(blob, context, Let(start))
} else {
Vec::new()
}
}
pub fn match_for(msrc: &str, for_start: BytePos, context: &MatchCxt<'_, '_>) -> Vec<Match> {
let mut out = Vec::new();
let blob = &msrc[context.range.to_range()];
let coords = ast::parse_pat_bind_stmt(blob.to_owned());
for pat_range in coords {
let s = &blob[pat_range.to_range()];
if symbol_matches(context.search_type, context.search_str, s) {
let start = pat_range.start + context.range.start;
debug!("match_for point is {:?}, found ident {}", start, s);
out.push(Match {
matchstr: s.to_owned(),
filepath: context.filepath.to_path_buf(),
point: start, // it's 'for ~' start
coords: None,
local: context.is_local,
mtype: For(for_start),
contextstr: blob.to_owned(),
docs: String::new(),
});
}
}
out
}
pub fn first_line(blob: &str) -> String {
blob[..blob.find('\n').unwrap_or(blob.len())].to_owned()
}
/// Get the match's cleaned up context string
///
/// Strip all whitespace, including newlines in order to have a single line
/// context string.
pub fn get_context(blob: &str, context_end: &str) -> String {
blob[..blob.find(context_end).unwrap_or(blob.len())]
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
}
pub fn match_extern_crate(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let mut res = None;
let mut blob = &msrc[context.range.to_range()];
// Temporary fix to parse reexported crates by skipping pub
// keyword until racer understands crate visibility.
if let Some(offset) = strip_visibility(blob) {
blob = &blob[offset.0..];
}
if txt_matches(
context.search_type,
&format!("extern crate {}", context.search_str),
blob,
) && !(txt_matches(
context.search_type,
&format!("extern crate {} as", context.search_str),
blob,
)) || (blob.starts_with("extern crate")
&& txt_matches(
context.search_type,
&format!("as {}", context.search_str),
blob,
))
{
debug!("found an extern crate: |{}|", blob);
let extern_crate = ast::parse_extern_crate(blob.to_owned());
if let Some(ref name) = extern_crate.name {
let realname = extern_crate.realname.as_ref().unwrap_or(name);
if let Some(cratepath) = get_crate_file(realname, context.filepath, session) {
let raw_src = session.load_raw_file(&cratepath);
res = Some(Match {
matchstr: name.clone(),
filepath: cratepath.to_path_buf(),
point: BytePos::ZERO,
coords: Some(Coordinate::start()),
local: false,
mtype: Module,
contextstr: cratepath.to_str().unwrap().to_owned(),
docs: find_mod_doc(&raw_src, BytePos::ZERO),
});
}
}
}
res
}
pub fn match_mod(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
let (start, s) = context.get_key_ident(blob, "mod", &[])?;
if blob.find('{').is_some() {
debug!("found a module inline: |{}|", blob);
return Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: context.range.start + start,
coords: None,
local: false,
mtype: Module,
contextstr: context.filepath.to_str().unwrap().to_owned(),
docs: String::new(),
});
} else {
debug!("found a module declaration: |{}|", blob);
// the name of the file where we found the module declaration (foo.rs)
// without its extension!
let filename = context.filepath.file_stem()?;
let parent_path = context.filepath.parent()?;
// if we found the declaration in `src/foo.rs`, then let's look for the
// submodule in `src/foo/` as well!
let filename_subdir = parent_path.join(filename);
// if we are looking for "foo::bar", we have two cases:
// 1. we found `pub mod bar;` in either `src/foo/mod.rs`
// (or `src/lib.rs`). As such we are going to search for `bar.rs` in
// the same directory (`src/foo/`, or `src/` respectively).
// 2. we found `pub mod bar;` in `src/foo.rs`. This means that we also
// need to seach in `src/foo/` if it exists!
let search_path = if filename_subdir.exists() {
filename_subdir.as_path()
} else {
parent_path
};
match_mod_inner(msrc, context, session, search_path, s)
}
}
fn match_mod_inner(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
search_path: &Path,
s: String,
) -> Option<Match> {
let ranged_raw = session.load_raw_src_ranged(&msrc, context.filepath);
// get module from path attribute
if let Some(modpath) =
scopes::get_module_file_from_path(msrc, context.range.start, search_path, ranged_raw)
{
let doc_src = session.load_raw_file(&modpath);
return Some(Match {
matchstr: s,
filepath: modpath.to_path_buf(),
point: BytePos::ZERO,
coords: Some(Coordinate::start()),
local: false,
mtype: Module,
contextstr: modpath.to_str().unwrap().to_owned(),
docs: find_mod_doc(&doc_src, BytePos::ZERO),
});
}
// get internal module nesting
// e.g. is this in an inline submodule? mod foo{ mod bar; }
// because if it is then we need to search further down the
// directory hierarchy - e.g. <cwd>/foo/bar.rs
let internalpath = scopes::get_local_module_path(msrc, context.range.start);
let mut searchdir = (*search_path).to_owned();
for s in internalpath {
searchdir.push(&s);
}
if let Some(modpath) = get_module_file(&s, &searchdir, session) {
let doc_src = session.load_raw_file(&modpath);
let context = modpath.to_str().unwrap().to_owned();
return Some(Match {
matchstr: s,
filepath: modpath,
point: BytePos::ZERO,
coords: Some(Coordinate::start()),
local: false,
mtype: Module,
contextstr: context,
docs: find_mod_doc(&doc_src, BytePos::ZERO),
});
}
None
}
fn find_generics_end(blob: &str) -> Option<BytePos> {
// Naive version that attempts to skip over attributes
let mut in_attr = false;
let mut attr_level = 0;
let mut level = 0;
for (i, b) in blob.as_bytes().into_iter().enumerate() {
// Naively skip attributes `#[...]`
if in_attr {
match b {
b'[' => attr_level += 1,
b']' => {
attr_level -=1;
if attr_level == 0 {
in_attr = false;
continue;
}
},
_ => continue,
}
}
// ...otherwise just try to find the last `>`
match b {
b'{' | b'(' | b';' => return None,
b'<' => level += 1,
b'>' => {
level -= 1;
if level == 0 {
return Some(i.into());
}
}
b'#' if blob.bytes().nth(i + 1) == Some(b'[') => in_attr = true,
_ => {}
}
}
None
}
pub fn match_struct(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
let (start, s) = context.get_key_ident(blob, "struct", &[])?;
debug!("found a struct |{}|", s);
let generics =
find_generics_end(&blob[start.0..]).map_or_else(Default::default, |generics_end| {
let header = format!("struct {}();", &blob[start.0..=(start + generics_end).0]);
ast::parse_generics(header, context.filepath)
});
let start = context.range.start + start;
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: Struct(Box::new(generics)),
contextstr: get_context(blob, "{"),
docs: find_doc(&doc_src, start),
})
}
pub fn match_union(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
let (start, s) = context.get_key_ident(blob, "union", &[])?;
debug!("found a union |{}|", s);
let generics =
find_generics_end(&blob[start.0..]).map_or_else(Default::default, |generics_end| {
let header = format!("union {}();", &blob[start.0..=(start + generics_end).0]);
ast::parse_generics(header, context.filepath)
});
let start = context.range.start + start;
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: MatchType::Union(Box::new(generics)),
contextstr: get_context(blob, "{"),
docs: find_doc(&doc_src, start),
})
}
pub fn match_type(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
let (start, s) = context.get_key_ident(blob, "type", &[])?;
debug!("found!! a type {}", s);
// parse type here
let start = context.range.start + start;
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: Type,
contextstr: first_line(blob),
docs: find_doc(&doc_src, start),
})
}
pub fn match_trait(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
let (start, s) = context.get_key_ident(blob, "trait", &["unsafe"])?;
debug!("found!! a trait {}", s);
let start = context.range.start + start;
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: Trait,
contextstr: get_context(blob, "{"),
docs: find_doc(&doc_src, start),
})
}
pub fn match_enum_variants(msrc: &str, context: &MatchCxt<'_, '_>) -> Vec<Match> {
let blob = &msrc[context.range.to_range()];
let mut out = Vec::new();
let parsed_enum = ast::parse_enum(blob.to_owned());
for (name, offset) in parsed_enum.values {
if name.starts_with(context.search_str) {
let start = context.range.start + offset;
let m = Match {
matchstr: name,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: EnumVariant(None),
contextstr: first_line(&blob[offset.0..]),
docs: find_doc(msrc, start),
};
out.push(m);
}
}
out
}
pub fn match_enum(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
let (start, s) = context.get_key_ident(blob, "enum", &[])?;
debug!("found!! an enum |{}|", s);
let generics =
find_generics_end(&blob[start.0..]).map_or_else(Default::default, |generics_end| {
let header = format!("enum {}{{}}", &blob[start.0..=(start + generics_end).0]);
ast::parse_generics(header, context.filepath)
});
let start = context.range.start + start;
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: Enum(Box::new(generics)),
contextstr: first_line(blob),
docs: find_doc(&doc_src, start),
})
}
pub fn match_use(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
import_info: &ImportInfo<'_, '_>,
) -> Vec<Match> {
let import = PendingImport {
filepath: context.filepath,
range: context.range,
};
let blob = &msrc[context.range.to_range()];
// If we're trying to resolve the same import recursively,
// do not return any matches this time.
if import_info.imports.contains(&import) {
debug!("import {} involved in a cycle; ignoring", blob);
return Vec::new();
}
// Push this import on the stack of pending imports.
let pending_imports = import_info.imports.push(import);
let mut out = Vec::new();
if find_keyword_impl(blob, "use", "", &[], StartsWith, context.is_local).is_none() {
return out;
}
let use_item = ast::parse_use(blob.to_owned());
debug!(
"[match_use] found item: {:?}, searchstr: {}",
use_item, context.search_str
);
// for speed up!
if !use_item.contains_glob && !txt_matches(context.search_type, context.search_str, blob) {
return out;
}
let mut import_info = ImportInfo {
imports: pending_imports,
glob_limit: import_info.glob_limit,
};
let alias_match = |ident, start, inner, cstr| Match {
matchstr: ident,
filepath: context.filepath.to_owned(),
point: context.range.start + start,
coords: None,
local: context.is_local,
mtype: MatchType::UseAlias(Box::new(inner)),
contextstr: cstr,
docs: String::new(),
};
// common utilities
macro_rules! with_match {
($path:expr, $ns: expr, $f:expr) => {
let path_iter = resolve_path(
$path,
context.filepath,
context.range.start,
ExactMatch,
$ns,
session,
&import_info,
);
for m in path_iter {
out.push($f(m));
if context.search_type == ExactMatch {
return out;
}
}
};
}
// let's find searchstr using path_aliases
for path_alias in use_item.path_list {
let PathAlias {
path: mut alias_path,
kind: alias_kind,
range: alias_range,
} = path_alias;
alias_path.set_prefix();
match alias_kind {
PathAliasKind::Ident(ref ident, rename_start) => {
if !symbol_matches(context.search_type, context.search_str, &ident) {
continue;
}
with_match!(&alias_path, Namespace::Path, |m: Match| {
debug!("[match_use] PathAliasKind::Ident {:?} was found", ident);
let rename_start = match rename_start {
Some(r) => r,
None => return m,
};
// if use A as B found, we treat this type as type alias
let context_str = &msrc[alias_range.shift(context.range.start).to_range()];
alias_match(ident.clone(), rename_start, m, context_str.to_owned())
});
}
PathAliasKind::Self_(ref ident, rename_start) => {
if let Some(last_seg) = alias_path.segments.last() {
let search_name = if rename_start.is_some() {
ident
} else {
&last_seg.name
};
if !symbol_matches(context.search_type, context.search_str, search_name) {
continue;
}
with_match!(&alias_path, Namespace::PathParen, |m: Match| {
debug!("[match_use] PathAliasKind::Self_ {:?} was found", ident);
let rename_start = match rename_start {
Some(r) => r,
None => return m,
};
// if use A as B found, we treat this type as type alias
let context_str = &msrc[alias_range.shift(context.range.start).to_range()];
alias_match(ident.clone(), rename_start, m, context_str.to_owned())
});
}
}
PathAliasKind::Glob => {
let glob_depth_reserved = if let Some(ref mut d) = import_info.glob_limit {
if *d == 0 {
continue;
}
*d -= 1;
Some(*d + 1)
} else {
// heuristics for issue #844
import_info.glob_limit = Some(GLOB_LIMIT - 1);
None
};
let mut search_path = alias_path;
search_path.segments.push(PathSegment::new(
context.search_str.to_owned(),
vec![],
None,
));
let path_iter = resolve_path(
&search_path,
context.filepath,
context.range.start,
context.search_type,
Namespace::Path,
session,
&import_info,
);
import_info.glob_limit = glob_depth_reserved;
debug!("[match_use] resolve_path returned {:?} for Glob", path_iter,);
out.extend(path_iter);
}
}
}
out
}
/// TODO: Handle `extern` functions
pub fn match_fn(msrc: Src<'_>, context: &MatchCxt<'_, '_>, session: &Session<'_>) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
if typeinf::first_param_is_self(blob) {
return None;
}
match_fn_common(blob, msrc, context, session)
}
pub fn match_method(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
include_assoc_fn: bool,
session: &Session<'_>,
) -> Option<Match> {
let blob = &msrc[context.range.to_range()];
if !include_assoc_fn && !typeinf::first_param_is_self(blob) {
return None;
}
match_fn_common(blob, msrc, context, session)
}
fn match_fn_common(
blob: &str,
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let (start, s) = context.get_key_ident(blob, "fn", &["const", "unsafe", "async"])?;
let start = context.range.start + start;
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_path_buf(),
point: start,
coords: None,
local: context.is_local,
mtype: Function,
contextstr: get_context(blob, "{"),
docs: find_doc(&doc_src, start),
})
}
pub fn match_macro(
msrc: Src<'_>,
context: &MatchCxt<'_, '_>,
session: &Session<'_>,
) -> Option<Match> {
let trimed = context.search_str.trim_end_matches('!');
let mut context = context.clone();
context.search_str = trimed;
let blob = &msrc[context.range.to_range()];
let (start, mut s) = context.get_key_ident(blob, "macro_rules!", &[])?;
s.push('!');
debug!("found a macro {}", s);
let doc_src = session.load_raw_src_ranged(&msrc, context.filepath);
Some(Match {
matchstr: s,
filepath: context.filepath.to_owned(),
point: context.range.start + start,
coords: None,
local: context.is_local,
mtype: Macro,
contextstr: first_line(blob),
docs: find_doc(&doc_src, context.range.start),
})
}
pub fn find_doc(msrc: &str, match_point: BytePos) -> String {
let blob = &msrc[0..match_point.0];
blob.lines()
.rev()
.skip(1) // skip the line that the match is on
.map(|line| line.trim())
.take_while(|line| line.starts_with("///") || line.starts_with("#[") || line.is_empty())
.filter(|line| !(line.trim().starts_with("#[") || line.is_empty())) // remove the #[flags]
.collect::<Vec<_>>() // These are needed because
.iter() // you cannot `rev`an `iter` that
.rev() // has already been `rev`ed.
.map(|line| if line.len() >= 4 { &line[4..] } else { "" }) // Remove "/// "
.collect::<Vec<_>>()
.join("\n")
}
pub(crate) fn find_mod_doc(msrc: &str, blobstart: BytePos) -> String {
let blob = &msrc[blobstart.0..];
let mut doc = String::new();
let mut iter = blob
.lines()
.map(|line| line.trim())
.take_while(|line| line.starts_with("//") || line.is_empty())
// Skip over the copyright notice and empty lines until you find
// the module's documentation (it will go until the end of the
// file if the module doesn't have any docs).
.filter(|line| line.starts_with("//!"))
.peekable();
// Use a loop to avoid unnecessary collect and String allocation
while let Some(line) = iter.next() {
// Remove "//! " and push to doc string to be returned
doc.push_str(if line.len() >= 4 { &line[4..] } else { "" });
if iter.peek() != None {
doc.push_str("\n");
}
}
doc
}
// DON'T USE MatchCxt's range
pub fn match_impl(decl: String, context: &MatchCxt<'_, '_>, offset: BytePos) -> Option<Vec<Match>> {
let ImplHeader { generics, .. } =
ast::parse_impl(decl, context.filepath, offset, true, offset)?;
let mut out = Vec::new();
for type_param in generics.0 {
if !symbol_matches(context.search_type, context.search_str, &type_param.name) {
continue;
}
out.push(type_param.into_match());
}
Some(out)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn find_generics_end() {
use super::find_generics_end;
assert_eq!(
find_generics_end("Vec<T, #[unstable(feature = \"\", issue = \"\"] A: AllocRef = Global>"),
Some(BytePos(64))
);
assert_eq!(
find_generics_end("Vec<T, A: AllocRef = Global>"),
Some(BytePos(27))
);
assert_eq!(
find_generics_end("Result<Vec<String>, Option<&str>>"),
Some(BytePos(32))
);
}
}

125
racer/src/racer/metadata.rs Normal file
View File

@ -0,0 +1,125 @@
use lazycell;
extern crate racer_cargo_metadata as metadata;
use self::lazycell::LazyCell;
use self::metadata::mapping::{Edition as Ed, PackageIdx, PackageMap};
use crate::project_model::{Edition, ProjectModelProvider};
use std::cell::Cell;
use std::path::{Path, PathBuf};
struct MetadataCache {
pkg_map: LazyCell<PackageMap>,
manifest_path: Option<PathBuf>,
failed_to_fill: Cell<bool>,
}
impl MetadataCache {
fn new(manifest_path: Option<PathBuf>) -> Self {
MetadataCache {
pkg_map: LazyCell::new(),
manifest_path,
failed_to_fill: Cell::new(false),
}
}
fn fill_impl(&self, manifest: &Path) -> Result<(), ()> {
let meta = metadata::run(manifest, true)
.or_else(|e| {
if let metadata::ErrorKind::Subprocess(ref s) = e {
// HACK: if --frozen failed, try again without --frozen
// see https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/registry.rs#L344
if s.contains("--frozen") {
info!("MetadataCache: try again without --frozen");
return metadata::run(manifest, false);
}
}
Err(e)
})
.map_err(|e| {
warn!("Error in cargo metadata: {}", e);
})?;
let pkg_map = PackageMap::from_metadata(meta);
self.pkg_map.fill(pkg_map).map_err(|_| {
warn!("Error in initialize lazy cell");
})
}
fn fill(&self, manifest: &Path) {
if !self.pkg_map.filled() && !self.failed_to_fill.get() {
self.failed_to_fill.set(self.fill_impl(manifest).is_err());
}
}
fn setup(&self, manifest: &Path) -> Option<(&PackageMap, PackageIdx)> {
self.fill(manifest);
let pkg_map: &PackageMap = self.pkg_map.borrow().unwrap();
let idx = if manifest.is_relative() {
let path = manifest.canonicalize().ok()?;
pkg_map.get_idx(&path)?
} else {
pkg_map.get_idx(manifest)?
};
Some((pkg_map, idx))
}
}
impl ProjectModelProvider for MetadataCache {
fn edition(&self, manifest: &Path) -> Option<Edition> {
let (pkg_map, idx) = self.setup(manifest)?;
let edition = pkg_map.get_edition(idx);
Some(match edition {
Ed::Ed2015 => Edition::Ed2015,
Ed::Ed2018 => Edition::Ed2018,
Ed::Ed2021 => Edition::Ed2021,
})
}
fn discover_project_manifest(&self, path: &Path) -> Option<PathBuf> {
let cur_manifest = metadata::find_manifest(path)?;
let manifest = self.manifest_path.as_ref()?;
self.fill(manifest);
Some(cur_manifest)
}
fn search_dependencies(
&self,
manifest: &Path,
search_fn: Box<dyn Fn(&str) -> bool>,
) -> Vec<(String, PathBuf)> {
let (pkg_map, idx) = match self.setup(manifest) {
Some(x) => x,
None => return vec![],
};
let deps = pkg_map
.get_dependencies(idx)
.iter()
.filter(|(s, _)| search_fn(s))
.map(|(s, p)| (s.to_string(), p.to_path_buf()));
let lib = pkg_map
.get_lib(idx)
.filter(|t| search_fn(&t.name))
.map(|t| (t.name.to_string(), t.src_path.to_path_buf()));
deps.chain(lib).collect()
}
fn resolve_dependency(&self, manifest: &Path, libname: &str) -> Option<PathBuf> {
debug!(
"MetadataCache::resolve_dependency manifest: {:?} libname: {}",
manifest, libname
);
let (pkg_map, idx) = self.setup(manifest)?;
pkg_map
.get_src_path_from_libname(idx, libname)
.or_else(|| {
let hyphnated = libname.replace('_', "-");
pkg_map.get_src_path_from_libname(idx, &hyphnated)
})
.or_else(|| {
let target = pkg_map.get_lib(idx)?;
if target.name.replace('-', "_") == libname {
Some(&target.src_path)
} else {
None
}
})
.map(|p| p.to_owned())
}
}
pub fn project_model(project_path: Option<&Path>) -> Box<dyn ProjectModelProvider> {
let manifest = project_path.and_then(|p| metadata::find_manifest(p));
Box::new(MetadataCache::new(manifest))
}

2749
racer/src/racer/nameres.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,231 @@
use crate::core::{BytePos, Match, MatchType, Namespace, SearchType, Session};
use crate::matchers::ImportInfo;
use crate::nameres::{self, RUST_SRC_PATH};
use rustc_ast::ast::{IntTy, LitIntType, UintTy};
use std::path::PathBuf;
const PRIM_DOC: &str = "std/src/primitive_docs.rs";
const KEY_DOC: &str = "std/src/keyword_docs.rs";
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum PrimKind {
Bool,
Never,
Char,
Unit,
Pointer,
Array,
Slice,
Str,
Tuple,
F32,
F64,
I8,
I16,
I32,
I64,
I128,
U8,
U16,
U32,
U64,
U128,
Isize,
Usize,
Ref,
Fn,
Await,
}
const PRIM_MATCHES: [PrimKind; 17] = [
PrimKind::Bool,
PrimKind::Char,
PrimKind::Str,
PrimKind::F32,
PrimKind::F64,
PrimKind::I8,
PrimKind::I16,
PrimKind::I32,
PrimKind::I64,
PrimKind::I128,
PrimKind::U8,
PrimKind::U16,
PrimKind::U32,
PrimKind::U64,
PrimKind::U128,
PrimKind::Isize,
PrimKind::Usize,
];
impl PrimKind {
pub(crate) fn from_litint(lit: LitIntType) -> Self {
match lit {
LitIntType::Signed(i) => match i {
IntTy::I8 => PrimKind::I8,
IntTy::I16 => PrimKind::I16,
IntTy::I32 => PrimKind::I32,
IntTy::I64 => PrimKind::I64,
IntTy::I128 => PrimKind::I128,
IntTy::Isize => PrimKind::Isize,
},
LitIntType::Unsigned(u) => match u {
UintTy::U8 => PrimKind::U8,
UintTy::U16 => PrimKind::U16,
UintTy::U32 => PrimKind::U32,
UintTy::U64 => PrimKind::U64,
UintTy::U128 => PrimKind::U128,
UintTy::Usize => PrimKind::Usize,
},
LitIntType::Unsuffixed => PrimKind::U32,
}
}
fn impl_files(self) -> Option<&'static [&'static str]> {
match self {
PrimKind::Bool => None,
PrimKind::Never => None,
PrimKind::Char => Some(&["core/src/char/methods.rs"]),
PrimKind::Unit => None,
PrimKind::Pointer => Some(&["core/src/ptr.rs"]),
PrimKind::Array => None,
PrimKind::Slice => Some(&["core/src/slice/mod.rs", "alloc/src/slice.rs"]),
PrimKind::Str => Some(&["core/src/str/mod.rs", "alloc/src/str.rs"]),
PrimKind::Tuple => None,
PrimKind::F32 => Some(&["std/src/f32.rs", "core/src/num/f32.rs"]),
PrimKind::F64 => Some(&["std/src/f64.rs", "core/src/num/f64.rs"]),
PrimKind::I8 => Some(&["core/src/num/mod.rs"]),
PrimKind::I16 => Some(&["core/src/num/mod.rs"]),
PrimKind::I32 => Some(&["core/src/num/mod.rs"]),
PrimKind::I64 => Some(&["core/src/num/mod.rs"]),
PrimKind::I128 => Some(&["core/src/num/mod.rs"]),
PrimKind::U8 => Some(&["core/src/num/mod.rs"]),
PrimKind::U16 => Some(&["core/src/num/mod.rs"]),
PrimKind::U32 => Some(&["core/src/num/mod.rs"]),
PrimKind::U64 => Some(&["core/src/num/mod.rs"]),
PrimKind::U128 => Some(&["core/src/num/mod.rs"]),
PrimKind::Isize => Some(&["core/src/num/mod.rs"]),
PrimKind::Usize => Some(&["core/src/num/mod.rs"]),
PrimKind::Ref => None,
PrimKind::Fn => None,
PrimKind::Await => None,
}
}
fn is_keyword(self) -> bool {
match self {
PrimKind::Await => true,
_ => false,
}
}
fn match_name(self) -> &'static str {
match self {
PrimKind::Bool => "bool",
PrimKind::Never => "never",
PrimKind::Char => "char",
PrimKind::Unit => "unit",
PrimKind::Pointer => "pointer",
PrimKind::Array => "array",
PrimKind::Slice => "slice",
PrimKind::Str => "str",
PrimKind::Tuple => "tuple",
PrimKind::F32 => "f32",
PrimKind::F64 => "f64",
PrimKind::I8 => "i8",
PrimKind::I16 => "i16",
PrimKind::I32 => "i32",
PrimKind::I64 => "i64",
PrimKind::I128 => "i128",
PrimKind::U8 => "u8",
PrimKind::U16 => "u16",
PrimKind::U32 => "u32",
PrimKind::U64 => "u64",
PrimKind::U128 => "u128",
PrimKind::Isize => "isize",
PrimKind::Usize => "usize",
PrimKind::Ref => "ref",
PrimKind::Fn => "fn",
PrimKind::Await => "await",
}
}
pub(crate) fn get_impl_files(&self) -> Option<Vec<PathBuf>> {
let src_path = RUST_SRC_PATH.as_ref()?;
let impls = self.impl_files()?;
Some(impls.iter().map(|file| src_path.join(file)).collect())
}
pub fn to_module_match(self) -> Option<Match> {
let _impl_files = self.impl_files()?;
Some(Match {
matchstr: self.match_name().to_owned(),
filepath: PathBuf::new(),
point: BytePos::ZERO,
coords: None,
local: false,
mtype: MatchType::Builtin(self),
contextstr: String::new(),
docs: String::new(),
})
}
pub fn to_doc_match(self, session: &Session<'_>) -> Option<Match> {
let src_path = RUST_SRC_PATH.as_ref()?;
let (path, seg) = if self.is_keyword() {
(
src_path.join(KEY_DOC),
format!("{}_keyword", self.match_name()),
)
} else {
(
src_path.join(PRIM_DOC),
format!("prim_{}", self.match_name()),
)
};
let mut m = nameres::resolve_name(
&seg.into(),
&path,
BytePos::ZERO,
SearchType::ExactMatch,
Namespace::Mod,
session,
&ImportInfo::default(),
)
.into_iter()
.next()?;
m.mtype = MatchType::Builtin(self);
m.matchstr = self.match_name().to_owned();
Some(m)
}
}
pub fn get_primitive_docs(
searchstr: &str,
stype: SearchType,
session: &Session<'_>,
out: &mut Vec<Match>,
) {
for prim in PRIM_MATCHES.iter() {
let prim_str = prim.match_name();
if (stype == SearchType::StartsWith && prim_str.starts_with(searchstr))
|| (stype == SearchType::ExactMatch && prim_str == searchstr)
{
if let Some(m) = prim.to_doc_match(session) {
out.push(m);
if stype == SearchType::ExactMatch {
return;
}
}
}
}
}
pub fn get_primitive_mods(searchstr: &str, stype: SearchType, out: &mut Vec<Match>) {
for prim in PRIM_MATCHES.iter() {
let prim_str = prim.match_name();
if (stype == SearchType::StartsWith && prim_str.starts_with(searchstr))
|| (stype == SearchType::ExactMatch && prim_str == searchstr)
{
if let Some(matches) = prim.to_module_match() {
out.push(matches);
if stype == SearchType::ExactMatch {
return;
}
}
}
}
}

View File

@ -0,0 +1,19 @@
use std::path::{Path, PathBuf};
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub enum Edition {
Ed2015,
Ed2018,
Ed2021,
}
pub trait ProjectModelProvider {
fn edition(&self, manifest: &Path) -> Option<Edition>;
fn discover_project_manifest(&self, path: &Path) -> Option<PathBuf>;
fn search_dependencies(
&self,
manifest: &Path,
search_fn: Box<dyn Fn(&str) -> bool>,
) -> Vec<(String, PathBuf)>;
fn resolve_dependency(&self, manifest: &Path, dep_name: &str) -> Option<PathBuf>;
}

903
racer/src/racer/scopes.rs Normal file
View File

@ -0,0 +1,903 @@
use crate::ast_types::Path as RacerPath;
#[cfg(test)]
use crate::core::{self, Coordinate};
use crate::core::{BytePos, ByteRange, CompletionType, Namespace, RangedRawSrc, Src};
use crate::util::{self, char_at};
use std::iter::Iterator;
use std::path::{Path, PathBuf};
use std::str::from_utf8;
fn find_close<'a, A>(iter: A, open: u8, close: u8, level_end: u32) -> Option<BytePos>
where
A: Iterator<Item = &'a u8>,
{
let mut levels = 0u32;
for (count, &b) in iter.enumerate() {
if b == close {
if levels == level_end {
return Some(count.into());
}
if levels == 0 {
return None;
}
levels -= 1;
} else if b == open {
levels += 1;
}
}
None
}
// expected to use with
fn find_close_with_pos<'a>(
iter: impl Iterator<Item = (usize, &'a u8)>,
open: u8,
close: u8,
level_end: u32,
) -> Option<BytePos> {
let mut levels = 0u32;
for (pos, &c) in iter {
if c == close {
if levels == level_end {
// +1 for compatibility with find_close
return Some(BytePos(pos).increment());
}
if levels == 0 {
return None;
}
levels -= 1;
} else if c == open {
levels += 1;
}
}
None
}
pub fn find_closing_paren(src: &str, pos: BytePos) -> BytePos {
find_close(src.as_bytes()[pos.0..].iter(), b'(', b')', 0)
.map_or(src.len().into(), |count| pos + count)
}
pub fn find_closure_scope_start(
src: Src<'_>,
point: BytePos,
parentheses_open_pos: BytePos,
) -> Option<BytePos> {
let closing_paren_pos = find_closing_paren(&src[..], point - parentheses_open_pos);
let src_between_parent = &src[..closing_paren_pos.0];
util::closure_valid_arg_scope(src_between_parent).map(|_| parentheses_open_pos)
}
pub fn scope_start(src: Src<'_>, point: BytePos) -> BytePos {
let src = src.change_length(point);
let (mut clev, mut plev) = (0u32, 0u32);
let mut iter = src[..].as_bytes().into_iter().enumerate().rev();
for (pos, b) in &mut iter {
match b {
b'{' => {
// !!! found { earlier than (
if clev == 0 {
return BytePos(pos).increment();
}
clev -= 1;
}
b'}' => clev += 1,
b'(' => {
// !!! found ( earlier than {
if plev == 0 {
if let Some(scope_pos) =
find_closure_scope_start(src, point, BytePos(pos).increment())
{
return scope_pos;
} else {
break;
}
}
plev -= 1;
}
b')' => plev += 1,
_ => {}
}
}
// fallback: return curly_parent_open_pos
find_close_with_pos(iter, b'}', b'{', 0).unwrap_or(BytePos::ZERO)
}
pub fn find_stmt_start(msrc: Src<'_>, point: BytePos) -> Option<BytePos> {
let scope_start = scope_start(msrc, point);
find_stmt_start_given_scope(msrc, point, scope_start)
}
fn find_stmt_start_given_scope(
msrc: Src<'_>,
point: BytePos,
scope_start: BytePos,
) -> Option<BytePos> {
// Iterate the scope to find the start of the statement that surrounds the point.
debug!(
"[find_stmt_start] now we are in scope {:?} ~ {:?}",
scope_start, point,
);
msrc.shift_start(scope_start)
.iter_stmts()
.map(|range| range.shift(scope_start))
.find(|range| range.contains(point))
.map(|range| range.start)
}
/// Finds a statement start or panics.
pub fn expect_stmt_start(msrc: Src<'_>, point: BytePos) -> BytePos {
find_stmt_start(msrc, point).expect("Statement does not have a beginning")
}
pub fn get_local_module_path(msrc: Src<'_>, point: BytePos) -> Vec<String> {
let mut v = Vec::new();
get_local_module_path_(msrc, point, &mut v);
v
}
fn get_local_module_path_(msrc: Src<'_>, point: BytePos, out: &mut Vec<String>) {
for range in msrc.iter_stmts() {
if range.contains_exclusive(point) {
let blob = msrc.shift_range(range);
let start = util::strip_visibility(&blob).unwrap_or(BytePos::ZERO);
if !blob[start.0..].starts_with("mod") {
continue;
}
if let Some(newstart) = blob[start.0 + 3..].find('{') {
let newstart = newstart + start.0 + 4;
out.push(blob[start.0 + 3..newstart - 1].trim().to_owned());
get_local_module_path_(
blob.shift_start(newstart.into()),
point - range.start - newstart.into(),
out,
);
}
}
}
}
pub fn get_module_file_from_path(
msrc: Src<'_>,
point: BytePos,
parentdir: &Path,
raw_src: RangedRawSrc,
) -> Option<PathBuf> {
let mut iter = msrc.iter_stmts();
while let Some(range) = iter.next() {
let blob = &raw_src[range.to_range()];
let start = range.start;
if blob.starts_with("#[path ") {
if let Some(ByteRange {
start: _,
end: modend,
}) = iter.next()
{
if start < point && modend > point {
let pathstart = blob.find('"')? + 1;
let pathend = blob[pathstart..].find('"').unwrap();
let path = &blob[pathstart..pathstart + pathend];
debug!("found a path attribute, path = |{}|", path);
let filepath = parentdir.join(path);
if filepath.exists() {
return Some(filepath);
}
}
}
}
}
None
}
// TODO(kngwyu): this functions shouldn't be generic
pub fn find_impl_start(msrc: Src<'_>, point: BytePos, scopestart: BytePos) -> Option<BytePos> {
let len = point - scopestart;
msrc.shift_start(scopestart)
.iter_stmts()
.find(|range| range.end > len)
.and_then(|range| {
let blob = msrc.shift_start(scopestart + range.start);
if blob.starts_with("impl") || util::trim_visibility(&blob[..]).starts_with("trait") {
Some(scopestart + range.start)
} else {
let newstart = blob.find('{')? + 1;
find_impl_start(msrc, point, scopestart + range.start + newstart.into())
}
})
}
#[test]
fn finds_subnested_module() {
use crate::core;
let src = "
pub mod foo {
pub mod bar {
here
}
}";
let raw_src = core::RawSource::new(src.to_owned());
let src = core::MaskedSource::new(src);
let point = raw_src.coords_to_point(&Coordinate::new(4, 12)).unwrap();
let v = get_local_module_path(src.as_src(), point);
assert_eq!("foo", &v[0][..]);
assert_eq!("bar", &v[1][..]);
let point = raw_src.coords_to_point(&Coordinate::new(3, 8)).unwrap();
let v = get_local_module_path(src.as_src(), point);
assert_eq!("foo", &v[0][..]);
}
// TODO: This function can't handle use_nested_groups
pub fn split_into_context_and_completion(s: &str) -> (&str, &str, CompletionType) {
match s
.char_indices()
.rev()
.find(|&(_, c)| !util::is_ident_char(c))
{
Some((i, c)) => match c {
'.' => (&s[..i], &s[(i + 1)..], CompletionType::Field),
':' if s.len() > 1 => (&s[..(i - 1)], &s[(i + 1)..], CompletionType::Path),
_ => (&s[..(i + 1)], &s[(i + 1)..], CompletionType::Path),
},
None => ("", s, CompletionType::Path),
}
}
/// search in reverse for the start of the current expression
/// allow . and :: to be surrounded by white chars to enable multi line call chains
pub fn get_start_of_search_expr(src: &str, point: BytePos) -> BytePos {
#[derive(Debug)]
enum State {
/// In parentheses; the value inside identifies depth.
Paren(usize),
/// in bracket
Bracket(usize),
/// In a string
StringLiteral,
/// In char
CharLiteral,
StartsWithDot,
MustEndsWithDot(usize),
StartsWithCol(usize),
None,
Result(usize),
}
let mut ws_ok = State::None;
for (i, c) in src.as_bytes()[..point.0].iter().enumerate().rev() {
ws_ok = match (*c, ws_ok) {
(b'(', State::None) => State::Result(i + 1),
(b'(', State::Paren(1)) => State::None,
(b'(', State::Paren(lev)) => State::Paren(lev - 1),
(b')', State::Paren(lev)) => State::Paren(lev + 1),
(b')', State::None) | (b')', State::StartsWithDot) => State::Paren(1),
(b'[', State::None) => State::Result(i + 1),
(b'[', State::Bracket(1)) => State::None,
(b'[', State::Bracket(lev)) => State::Bracket(lev - 1),
(b']', State::Bracket(lev)) => State::Bracket(lev + 1),
(b']', State::StartsWithDot) => State::Bracket(1),
(b'.', State::None) => State::StartsWithDot,
(b'.', State::StartsWithDot) => State::Result(i + 2),
(b'.', State::MustEndsWithDot(_)) => State::None,
(b':', State::MustEndsWithDot(index)) => State::StartsWithCol(index),
(b':', State::StartsWithCol(_)) => State::None,
(b'"', State::None) | (b'"', State::StartsWithDot) => State::StringLiteral,
(b'"', State::StringLiteral) => State::None,
(b'?', State::StartsWithDot) => State::None,
(b'\'', State::None) | (b'\'', State::StartsWithDot) => State::CharLiteral,
(b'\'', State::StringLiteral) => State::StringLiteral,
(b'\'', State::CharLiteral) => State::None,
(_, State::CharLiteral) => State::CharLiteral,
(_, State::StringLiteral) => State::StringLiteral,
(_, State::StartsWithCol(index)) => State::Result(index),
(_, State::None) if char_at(src, i).is_whitespace() => State::MustEndsWithDot(i + 1),
(_, State::MustEndsWithDot(index)) if char_at(src, i).is_whitespace() => {
State::MustEndsWithDot(index)
}
(_, State::StartsWithDot) if char_at(src, i).is_whitespace() => State::StartsWithDot,
(_, State::MustEndsWithDot(index)) => State::Result(index),
(_, State::None) if !util::is_search_expr_char(char_at(src, i)) => State::Result(i + 1),
(_, State::None) => State::None,
(_, s @ State::Paren(_)) => s,
(_, s @ State::Bracket(_)) => s,
(_, State::StartsWithDot) if util::is_search_expr_char(char_at(src, i)) => State::None,
(_, State::StartsWithDot) => State::Result(i + 1),
(_, State::Result(_)) => unreachable!(),
};
if let State::Result(index) = ws_ok {
return index.into();
}
}
BytePos::ZERO
}
pub fn get_start_of_pattern(src: &str, point: BytePos) -> BytePos {
let mut levels = 0u32;
for (i, &b) in src[..point.0].as_bytes().into_iter().enumerate().rev() {
match b {
b'(' => {
if levels == 0 {
return BytePos(i).increment();
}
levels -= 1;
}
b')' => {
levels += 1;
}
_ => {
if levels == 0 && !util::is_pattern_char(b as char) {
return BytePos(i).increment();
}
}
}
}
BytePos::ZERO
}
#[cfg(test)]
mod test_get_start_of_pattern {
use super::{get_start_of_pattern, BytePos};
fn get_start_of_pattern_(s: &str, u: usize) -> usize {
get_start_of_pattern(s, BytePos(u)).0
}
#[test]
fn handles_variant() {
assert_eq!(4, get_start_of_pattern_("foo, Some(a) =>", 13));
}
#[test]
fn handles_variant2() {
assert_eq!(
4,
get_start_of_pattern_("bla, ast::PatTup(ref tuple_elements) => {", 36)
);
}
}
pub fn expand_search_expr(msrc: &str, point: BytePos) -> ByteRange {
let start = get_start_of_search_expr(msrc, point);
ByteRange::new(start, util::find_ident_end(msrc, point))
}
#[cfg(test)]
mod test_expand_seacrh_expr {
use super::{expand_search_expr, BytePos};
fn expand_search_expr_(s: &str, u: usize) -> (usize, usize) {
let res = expand_search_expr(s, BytePos(u));
(res.start.0, res.end.0)
}
#[test]
fn finds_ident() {
assert_eq!((0, 7), expand_search_expr_("foo.bar", 5))
}
#[test]
fn ignores_bang_at_start() {
assert_eq!((1, 4), expand_search_expr_("!foo", 1))
}
#[test]
fn handles_chained_calls() {
assert_eq!((0, 20), expand_search_expr_("yeah::blah.foo().bar", 18))
}
#[test]
fn handles_inline_closures() {
assert_eq!(
(0, 29),
expand_search_expr_("yeah::blah.foo(|x:foo|{}).bar", 27)
)
}
#[test]
fn handles_a_function_arg() {
assert_eq!(
(5, 25),
expand_search_expr_("myfn(foo::new().baz().com)", 23)
)
}
#[test]
fn handles_macros() {
assert_eq!((0, 9), expand_search_expr_("my_macro!()", 8))
}
#[test]
fn handles_pos_at_end_of_search_str() {
assert_eq!((0, 7), expand_search_expr_("foo.bar", 7))
}
#[test]
fn handles_type_definition() {
assert_eq!((4, 7), expand_search_expr_("x : foo", 7))
}
#[test]
fn handles_ws_before_dot() {
assert_eq!((0, 8), expand_search_expr_("foo .bar", 7))
}
#[test]
fn handles_ws_after_dot() {
assert_eq!((0, 8), expand_search_expr_("foo. bar", 7))
}
#[test]
fn handles_ws_dot() {
assert_eq!((0, 13), expand_search_expr_("foo. bar .foo", 12))
}
#[test]
fn handles_let() {
assert_eq!((8, 11), expand_search_expr_("let b = foo", 10))
}
#[test]
fn handles_double_dot() {
assert_eq!((2, 5), expand_search_expr_("..foo", 4))
}
}
fn fill_gaps(buffer: &str, result: &mut String, start: usize, prev: usize) {
for _ in 0..((start - prev) / buffer.len()) {
result.push_str(buffer);
}
result.push_str(&buffer[..((start - prev) % buffer.len())]);
}
pub fn mask_comments(src: &str, chunks: &[ByteRange]) -> String {
let mut result = String::with_capacity(src.len());
let buf_byte = &[b' '; 128];
let buffer = from_utf8(buf_byte).unwrap();
let mut prev = BytePos::ZERO;
for range in chunks {
fill_gaps(buffer, &mut result, range.start.0, prev.0);
result.push_str(&src[range.to_range()]);
prev = range.end;
}
// Fill up if the comment was at the end
if src.len() > prev.0 {
fill_gaps(buffer, &mut result, src.len(), prev.0);
}
assert_eq!(src.len(), result.len());
result
}
pub fn mask_sub_scopes(src: &str) -> String {
let mut result = String::with_capacity(src.len());
let buf_byte = [b' '; 128];
let buffer = from_utf8(&buf_byte).unwrap();
let mut levels = 0i32;
let mut start = 0usize;
let mut pos = 0usize;
for &b in src.as_bytes() {
pos += 1;
match b {
b'{' => {
if levels == 0 {
result.push_str(&src[start..(pos)]);
start = pos + 1;
}
levels += 1;
}
b'}' => {
if levels == 1 {
fill_gaps(buffer, &mut result, pos, start);
result.push_str("}");
start = pos;
}
levels -= 1;
}
b'\n' if levels > 0 => {
fill_gaps(buffer, &mut result, pos, start);
result.push('\n');
start = pos + 1;
}
_ => {}
}
}
if start > pos {
start = pos;
}
if levels > 0 {
fill_gaps(buffer, &mut result, pos, start);
} else {
result.push_str(&src[start..pos]);
}
result
}
pub fn end_of_next_scope(src: &str) -> Option<BytePos> {
find_close(src.as_bytes().iter(), b'{', b'}', 1)
}
#[test]
fn test_scope_start() {
let src = String::from(
"
fn myfn() {
let a = 3;
print(a);
}
",
);
let src = core::MaskedSource::new(&src);
let raw_src = core::RawSource::new(src.to_string());
let point = raw_src.coords_to_point(&Coordinate::new(4, 10)).unwrap();
let start = scope_start(src.as_src(), point);
assert_eq!(start, BytePos(12));
}
#[test]
fn test_scope_start_handles_sub_scopes() {
let src = String::from(
"
fn myfn() {
let a = 3;
{
let b = 4;
}
print(a);
}
",
);
let src = core::MaskedSource::new(&src);
let raw_src = core::RawSource::new(src.to_string());
let point = raw_src.coords_to_point(&Coordinate::new(7, 10)).unwrap();
let start = scope_start(src.as_src(), point);
assert_eq!(start, BytePos(12));
}
#[test]
fn masks_out_comments() {
let src = String::from(
"
this is some code
this is a line // with a comment
some more
",
);
let raw = core::RawSource::new(src.to_string());
let src = core::MaskedSource::new(&src);
assert!(src.len() == raw.len());
// characters at the start are the same
assert!(src.as_bytes()[5] == raw.as_bytes()[5]);
// characters in the comments are masked
let commentoffset = raw.coords_to_point(&Coordinate::new(3, 23)).unwrap();
assert!(char_at(&src, commentoffset.0) == ' ');
assert!(src.as_bytes()[commentoffset.0] != raw.as_bytes()[commentoffset.0]);
// characters afterwards are the same
assert!(src.as_bytes()[src.len() - 3] == raw.as_bytes()[src.len() - 3]);
}
#[test]
fn finds_end_of_struct_scope() {
let src = "
struct foo {
a: usize,
blah: ~str
}
Some other junk";
let expected = "
struct foo {
a: usize,
blah: ~str
}";
let end = end_of_next_scope(src).unwrap();
assert_eq!(expected, &src[..=end.0]);
}
/// get start of path from use statements
/// e.g. get Some(16) from "pub(crate) use a"
pub(crate) fn use_stmt_start(line_str: &str) -> Option<BytePos> {
let use_start = util::strip_visibility(line_str).unwrap_or(BytePos::ZERO);
util::strip_word(&line_str[use_start.0..], "use").map(|b| b + use_start)
}
pub(crate) fn is_extern_crate(line_str: &str) -> bool {
let extern_start = util::strip_visibility(line_str).unwrap_or(BytePos::ZERO);
if let Some(crate_start) = util::strip_word(&line_str[extern_start.0..], "extern") {
let crate_str = &line_str[(extern_start + crate_start).0..];
crate_str.starts_with("crate ")
} else {
false
}
}
#[inline(always)]
fn next_use_item(expr: &str) -> Option<usize> {
let bytes = expr.as_bytes();
let mut i = bytes.len();
let mut before = b' ';
while i > 0 {
i -= 1;
let cur = bytes[i];
if before == b':' && cur == b':' {
return Some(i);
}
if cur == b',' {
while i > 0 && bytes[i] != b'{' {
i -= 1;
}
}
before = cur;
}
None
}
/// get path from use statement, supposing completion point is end of expr
/// e.g. "use std::collections::{hash_map, Hash" -> P["std", "collections", "Hash"]
pub(crate) fn construct_path_from_use_tree(expr: &str) -> RacerPath {
let mut segments = Vec::new();
let bytes = expr.as_bytes();
let mut i = bytes.len();
let mut ident_end = Some(i - 1);
while i > 0 {
i -= 1;
if util::is_ident_char(bytes[i] as char) {
if ident_end.is_none() {
ident_end = Some(i)
}
} else {
if let Some(end) = ident_end {
segments.push(&expr[i + 1..=end]);
ident_end = None;
}
if let Some(point) = next_use_item(&expr[..=i]) {
i = point;
continue;
}
break;
}
}
if let Some(end) = ident_end {
segments.push(&expr[0..=end]);
}
segments.reverse();
let is_global = expr.starts_with("::");
RacerPath::from_vec(is_global, segments)
}
/// get current statement for completion context
pub(crate) fn get_current_stmt<'c>(src: Src<'c>, pos: BytePos) -> (BytePos, String) {
let mut scopestart = scope_start(src, pos);
// for use statement
if scopestart > BytePos::ZERO && src[..scopestart.0].ends_with("::{") {
if let Some(pos) = src[..pos.0].rfind("use") {
scopestart = scope_start(src, pos.into());
}
}
let linestart = find_stmt_start_given_scope(src, pos, scopestart).unwrap_or(scopestart);
(
linestart,
(&src[linestart.0..pos.0])
.trim()
.rsplit(';')
.next()
.unwrap()
.to_owned(),
)
}
pub(crate) fn expr_to_path(expr: &str) -> (RacerPath, Namespace) {
let is_global = expr.starts_with("::");
let v: Vec<_> = (if is_global { &expr[2..] } else { expr })
.split("::")
.collect();
let path = RacerPath::from_vec(is_global, v);
let namespace = if path.len() == 1 {
Namespace::Global | Namespace::Path
} else {
Namespace::Path
};
(path, namespace)
}
pub(crate) fn is_in_struct_ctor(
src: Src<'_>,
stmt_start: BytePos,
pos: BytePos,
) -> Option<ByteRange> {
const ALLOW_SYMBOL: [u8; 5] = [b'{', b'(', b'|', b';', b','];
const ALLOW_KEYWORDS: [&'static str; 3] = ["let", "mut", "ref"];
const INIHIBIT_KEYWORDS: [&'static str; 2] = ["unsafe", "async"];
if stmt_start.0 <= 3 || src.as_bytes()[stmt_start.0 - 1] != b'{' || pos <= stmt_start {
return None;
}
{
for &b in src[stmt_start.0..pos.0].as_bytes().iter().rev() {
match b {
b',' => break,
b':' => return None,
_ => continue,
}
}
}
let src = &src[..stmt_start.0 - 1];
#[derive(Clone, Copy, Debug)]
enum State {
Initial,
Name(usize),
End,
}
let mut state = State::Initial;
let mut result = None;
let bytes = src.as_bytes();
for (i, b) in bytes.iter().enumerate().rev() {
match (state, *b) {
(State::Initial, b) if util::is_whitespace_byte(b) => continue,
(State::Initial, b) if util::is_ident_char(b.into()) => state = State::Name(i),
(State::Initial, _) => return None,
(State::Name(_), b) if b == b':' || util::is_ident_char(b.into()) => continue,
(State::Name(end), b) if util::is_whitespace_byte(b) => {
result = Some(ByteRange::new(i + 1, end + 1));
if INIHIBIT_KEYWORDS.contains(&&src[i + 1..=end]) {
return None;
}
state = State::End;
}
(State::Name(end), b) if ALLOW_SYMBOL.contains(&b) => {
result = Some(ByteRange::new(i + 1, end + 1));
break;
}
(State::End, b) if util::is_ident_char(b.into()) => {
let bytes = &bytes[..=i];
if !ALLOW_KEYWORDS.iter().any(|s| bytes.ends_with(s.as_bytes())) {
return None;
} else {
break;
}
}
(State::End, b) if util::is_whitespace_byte(b) => continue,
(State::End, b) if ALLOW_SYMBOL.contains(&b) => break,
(_, _) => return None,
}
}
match state {
State::Initial => None,
State::Name(end) => {
if INIHIBIT_KEYWORDS.contains(&&src[0..=end]) {
None
} else {
Some(ByteRange::new(0, end + 1))
}
}
State::End => result,
}
}
#[cfg(test)]
mod use_tree_test {
use super::*;
#[test]
fn test_use_stmt_start() {
assert_eq!(use_stmt_start("pub(crate) use some::").unwrap().0, 19);
}
#[test]
fn test_is_extern_crate() {
assert!(is_extern_crate("extern crate "));
assert!(is_extern_crate("pub extern crate abc"));
assert!(!is_extern_crate("pub extern crat"));
}
#[test]
fn test_construct_path_from_use_tree() {
let get_path_idents = |s| {
let s = construct_path_from_use_tree(s);
s.segments
.into_iter()
.map(|seg| seg.name)
.collect::<Vec<_>>()
};
assert_eq!(
get_path_idents("std::collections::HashMa"),
vec!["std", "collections", "HashMa"],
);
assert_eq!(
get_path_idents("std::{collections::{HashMap, hash_ma"),
vec!["std", "collections", "hash_ma"],
);
assert_eq!(
get_path_idents("std::{collections::{HashMap, "),
vec!["std", "collections", ""],
);
assert_eq!(
get_path_idents("std::collections::{"),
vec!["std", "collections", ""],
);
assert_eq!(
get_path_idents("std::{collections::HashMap, sync::Arc"),
vec!["std", "sync", "Arc"],
);
assert_eq!(get_path_idents("{Str1, module::Str2, Str3"), vec!["Str3"],);
}
}
#[cfg(test)]
mod ctor_test {
use super::{is_in_struct_ctor, scope_start};
use crate::core::{ByteRange, MaskedSource};
fn check(src: &str) -> Option<ByteRange> {
let source = MaskedSource::new(src);
let point = src.find("~").unwrap();
let scope_start = scope_start(source.as_src(), point.into());
is_in_struct_ctor(source.as_src(), scope_start, point.into())
}
#[test]
fn first_line() {
let src = "
struct UserData {
name: String,
id: usize,
}
fn main() {
UserData {
na~
}
}";
assert!(check(src).is_some())
}
#[test]
fn second_line() {
let src = r#"
fn main() {
UserData {
name: "ahkj".to_owned(),
i~d:
}
}"#;
assert!(check(src).is_some())
}
#[test]
fn tuple() {
let src = r#"
fn main() {
let (a,
UserData {
name: "ahkj".to_owned(),
i~d:
}
) = f();
}"#;
assert!(check(src).is_some())
}
#[test]
fn expr_pos() {
let src = r#"
fn main() {
UserData {
name: ~
}
}"#;
assert!(check(src).is_none())
}
#[test]
fn fnarg() {
let src = r#"
func(UserData {
name~
})
"#;
assert!(check(src).is_some())
}
#[test]
fn closure() {
let src = r#"
let f = || UserData {
name~
};
"#;
assert!(check(src).is_some())
}
#[test]
fn unsafe_() {
let src = r#"
unsafe {
name~
}
"#;
assert!(check(src).is_none())
}
}

123
racer/src/racer/snippets.rs Normal file
View File

@ -0,0 +1,123 @@
use crate::ast::with_error_checking_parse;
use crate::core::{Match, Session};
use crate::typeinf::get_function_declaration;
use rustc_ast::ast::AssocItemKind;
use rustc_parse::parser::ForceCollect;
/// Returns completion snippets usable by some editors
///
/// Generates a snippet string given a `Match`. The provided snippet contains
/// substrings like "${1:name}" which some editors can use to quickly fill in
/// arguments.
///
/// # Examples
///
/// ```no_run
/// extern crate racer;
///
/// use std::path::Path;
///
/// let path = Path::new(".");
/// let cache = racer::FileCache::default();
/// let session = racer::Session::new(&cache, Some(path));
///
/// let m = racer::complete_fully_qualified_name(
/// "std::fs::canonicalize",
/// &path,
/// &session
/// ).next().unwrap();
///
/// let snip = racer::snippet_for_match(&m, &session);
/// assert_eq!(snip, "canonicalize(${1:path})");
/// ```
pub fn snippet_for_match(m: &Match, session: &Session<'_>) -> String {
if m.mtype.is_function() {
let method = get_function_declaration(m, session);
if let Some(m) = MethodInfo::from_source_str(&method) {
m.snippet()
} else {
"".into()
}
} else {
m.matchstr.clone()
}
}
struct MethodInfo {
name: String,
args: Vec<String>,
}
impl MethodInfo {
///Parses method declaration as string and returns relevant data
fn from_source_str(source: &str) -> Option<MethodInfo> {
let trim: &[_] = &['\n', '\r', '{', ' '];
let decorated = format!("{} {{}}()", source.trim_end_matches(trim));
trace!("MethodInfo::from_source_str: {:?}", decorated);
with_error_checking_parse(decorated, |p| {
if let Ok(Some(Some(method))) = p.parse_impl_item(ForceCollect::No) {
if let AssocItemKind::Fn(ref fn_kind) = method.kind {
let decl = &fn_kind.sig.decl;
return Some(MethodInfo {
// ident.as_str calls Ident.name.as_str
name: method.ident.name.to_string(),
args: decl
.inputs
.iter()
.map(|arg| {
let source_map = &p.sess.source_map();
let var_name = match source_map.span_to_snippet(arg.pat.span) {
Ok(name) => name,
_ => "".into(),
};
match source_map.span_to_snippet(arg.ty.span) {
Ok(ref type_name) if !type_name.is_empty() => {
format!("{}: {}", var_name, type_name)
}
_ => var_name,
}
})
.collect(),
});
}
}
debug!("Unable to parse method declaration. |{}|", source);
None
})
}
///Returns completion snippets usable by some editors
fn snippet(&self) -> String {
format!(
"{}({})",
self.name,
&self
.args
.iter()
.filter(|&s| !s.ends_with("self"))
.enumerate()
.fold(String::new(), |cur, (i, ref s)| {
let arg = format!("${{{}:{}}}", i + 1, s);
let delim = if i > 0 { ", " } else { "" };
cur + delim + &arg
})
)
}
}
#[test]
fn method_info_test() {
let info = MethodInfo::from_source_str("pub fn new() -> Vec<T>").unwrap();
assert_eq!(info.name, "new");
assert_eq!(info.args.len(), 0);
assert_eq!(info.snippet(), "new()");
let info = MethodInfo::from_source_str("pub fn reserve(&mut self, additional: usize)").unwrap();
assert_eq!(info.name, "reserve");
assert_eq!(info.args.len(), 2);
// it looks odd, but no problme because what our clients see is only snippet
assert_eq!(info.args[0], "&mut self: &mut self");
assert_eq!(info.snippet(), "reserve(${1:additional: usize})");
}

View File

@ -0,0 +1,21 @@
#![cfg(test)]
use crate::core::ByteRange;
pub fn rejustify(src: &str) -> String {
let s = &src[1..]; // remove the newline
let mut sb = String::new();
for l in s.lines() {
let tabless = &l[4..];
sb.push_str(tabless);
if !tabless.is_empty() {
sb.push_str("\n");
}
}
let newlen = sb.len() - 1; // remove the trailing newline
sb.truncate(newlen);
sb
}
pub fn slice(src: &str, range: ByteRange) -> &str {
&src[range.to_range()]
}

630
racer/src/racer/typeinf.rs Normal file
View File

@ -0,0 +1,630 @@
//! Type inference
//! THIS MODULE IS ENTIRELY TOO UGLY SO REALLY NEADS REFACTORING(kngwyu)
use crate::ast;
use crate::ast_types::{Pat, Ty};
use crate::core;
use crate::core::{
BytePos, ByteRange, Match, MatchType, Namespace, Scope, SearchType, Session, SessionExt, Src,
};
use crate::matchers;
use crate::nameres;
use crate::primitive::PrimKind;
use crate::scopes;
use crate::util::{self, txt_matches};
use rustc_ast::ast::BinOpKind;
use std::path::Path;
// Removes the body of the statement (anything in the braces {...}), leaving just
// the header
pub fn generate_skeleton_for_parsing(src: &str) -> Option<String> {
src.find('{').map(|n| src[..=n].to_owned() + "}")
}
/// Get the trait name implementing which overrides the operator `op`
/// For comparison operators, it is `bool`
pub(crate) fn get_operator_trait(op: BinOpKind) -> &'static str {
match op {
BinOpKind::Add => "Add",
BinOpKind::Sub => "Sub",
BinOpKind::Mul => "Mul",
BinOpKind::Div => "Div",
BinOpKind::Rem => "Rem",
BinOpKind::And => "And",
BinOpKind::Or => "Or",
BinOpKind::BitXor => "BitXor",
BinOpKind::BitAnd => "BitAnd",
BinOpKind::BitOr => "BitOr",
BinOpKind::Shl => "Shl",
BinOpKind::Shr => "Shr",
_ => "bool",
}
}
// TODO(kngwyu): use libsyntax parser
pub fn first_param_is_self(blob: &str) -> bool {
// Restricted visibility introduces the possibility of `pub(in ...)` at the start
// of a method declaration. To counteract this, we restrict the search to only
// look at text _after_ the visibility declaration.
//
// Having found the end of the visibility declaration, we now start the search
// for method parameters.
let blob = util::trim_visibility(blob);
// skip generic arg
// consider 'pub fn map<U, F: FnOnce(T) -> U>(self, f: F)'
// we have to match the '>'
match blob.find('(') {
None => false,
Some(probable_param_start) => {
let skip_generic = match blob.find('<') {
None => 0,
Some(generic_start) if generic_start < probable_param_start => {
let mut level = 0;
let mut prev = ' ';
let mut skip_generic = 0;
for (i, c) in blob[generic_start..].char_indices() {
match c {
'<' => level += 1,
'>' if prev == '-' => (),
'>' => level -= 1,
_ => (),
}
prev = c;
if level == 0 {
skip_generic = i;
break;
}
}
skip_generic
}
Some(..) => 0,
};
if let Some(start) = blob[skip_generic..].find('(') {
let start = BytePos::from(skip_generic + start).increment();
let end = scopes::find_closing_paren(blob, start);
let is_self = txt_matches(SearchType::ExactMatch, "self", &blob[start.0..end.0]);
trace!(
"searching fn args for self: |{}| {}",
&blob[start.0..end.0],
is_self
);
return is_self;
}
false
}
}
}
#[test]
fn generates_skeleton_for_mod() {
let src = "mod foo { blah }";
let out = generate_skeleton_for_parsing(src).unwrap();
assert_eq!("mod foo {}", out);
}
fn get_type_of_self_arg(m: &Match, msrc: Src<'_>, session: &Session<'_>) -> Option<Ty> {
debug!("get_type_of_self_arg {:?}", m);
get_type_of_self(m.point, &m.filepath, m.local, msrc, session)
}
// TODO(kngwyu): parse correctly
pub fn get_type_of_self(
point: BytePos,
filepath: &Path,
local: bool,
msrc: Src<'_>,
session: &Session<'_>,
) -> Option<Ty> {
let start = scopes::find_impl_start(msrc, point, BytePos::ZERO)?;
let decl = generate_skeleton_for_parsing(&msrc.shift_start(start))?;
debug!("get_type_of_self_arg impl skeleton |{}|", decl);
if decl.starts_with("impl") {
// we have to do 2 operations around generics here
// 1. Checks if self's type is T
// 2. Checks if self's type contains T
let scope_start = start + decl.len().into();
let implres = ast::parse_impl(decl, filepath, start, local, scope_start)?;
if let Some((_, param)) = implres.generics().search_param_by_path(implres.self_path()) {
if let Some(resolved) = param.resolved() {
return Some(resolved.to_owned());
}
let mut m = param.to_owned().into_match();
m.local = local;
return Some(Ty::Match(m));
}
debug!("get_type_of_self_arg implres |{:?}|", implres);
nameres::resolve_path(
implres.self_path(),
filepath,
start,
SearchType::ExactMatch,
Namespace::Type,
session,
&matchers::ImportInfo::default(),
)
.into_iter()
.nth(0)
.map(|mut m| {
match &mut m.mtype {
MatchType::Enum(gen) | MatchType::Struct(gen) => {
for (i, param) in implres.generics.0.into_iter().enumerate() {
gen.add_bound(i, param.bounds);
}
}
_ => {}
}
Ty::Match(m)
})
} else {
// // must be a trait
ast::parse_trait(decl).name.and_then(|name| {
Some(Ty::Match(Match {
matchstr: name,
filepath: filepath.into(),
point: start,
coords: None,
local: local,
mtype: core::MatchType::Trait,
contextstr: matchers::first_line(&msrc[start.0..]),
docs: String::new(),
}))
})
}
}
fn get_type_of_fnarg(m: Match, session: &Session<'_>) -> Option<Ty> {
let Match {
matchstr,
filepath,
point,
mtype,
..
} = m;
let (pat, ty) = *match mtype {
MatchType::FnArg(a) => a,
_ => return None,
};
resolve_lvalue_ty(pat, ty, &matchstr, &filepath, point, session)
}
fn get_type_of_let_expr(m: Match, session: &Session<'_>) -> Option<Ty> {
let Match {
mtype,
contextstr,
filepath,
point,
..
} = m;
let let_start = match mtype {
MatchType::Let(s) => s,
_ => return None,
};
debug!("get_type_of_let_expr calling parse_let |{}|", contextstr);
let pos = point - let_start;
let scope = Scope {
filepath,
point: let_start,
};
ast::get_let_type(contextstr, pos, scope, session)
}
/// Decide l_value's type given r_value and ident query
pub(crate) fn resolve_lvalue_ty<'a>(
l_value: Pat,
r_value: Option<Ty>,
query: &str,
fpath: &Path,
pos: BytePos,
session: &Session<'_>,
) -> Option<Ty> {
match l_value {
Pat::Ident(_bi, name) => {
if name != query {
return None;
}
r_value
}
Pat::Tuple(pats) => {
if let Ty::Tuple(ty) = r_value? {
for (p, t) in pats.into_iter().zip(ty) {
let ret = try_continue!(resolve_lvalue_ty(p, t, query, fpath, pos, session,));
return Some(ret);
}
}
None
}
Pat::Ref(pat, _) => {
if let Some(ty) = r_value {
if let Ty::RefPtr(ty, _) = ty {
resolve_lvalue_ty(*pat, Some(*ty), query, fpath, pos, session)
} else {
resolve_lvalue_ty(*pat, Some(ty), query, fpath, pos, session)
}
} else {
resolve_lvalue_ty(*pat, None, query, fpath, pos, session)
}
}
Pat::TupleStruct(path, pats) => {
let ma = ast::find_type_match(&path, fpath, pos, session)?;
match &ma.mtype {
MatchType::Struct(_generics) => {
for (pat, (_, _, t)) in
pats.into_iter().zip(get_tuplestruct_fields(&ma, session))
{
let ret =
try_continue!(resolve_lvalue_ty(pat, t, query, fpath, pos, session));
return Some(ret);
}
None
}
MatchType::EnumVariant(enum_) => {
let generics = if let Some(Ty::Match(match_)) = r_value.map(Ty::dereference) {
match_.into_generics()
} else {
enum_.to_owned().and_then(|ma| ma.into_generics())
};
for (pat, (_, _, mut t)) in
pats.into_iter().zip(get_tuplestruct_fields(&ma, session))
{
debug!(
"Hi! I'm in enum and l: {:?}\n r: {:?}\n gen: {:?}",
pat, t, generics
);
if let Some(ref gen) = generics {
t = t.map(|ty| ty.replace_by_resolved_generics(&gen));
}
let ret =
try_continue!(resolve_lvalue_ty(pat, t, query, fpath, pos, session));
return Some(ret);
}
None
}
_ => None,
}
}
// Let's implement after #946 solved
Pat::Struct(path, _) => {
let item = ast::find_type_match(&path, fpath, pos, session)?;
if !item.mtype.is_struct() {
return None;
}
None
}
_ => None,
}
}
fn get_type_of_for_arg(m: &Match, session: &Session<'_>) -> Option<Ty> {
let for_start = match &m.mtype {
MatchType::For(pos) => *pos,
_ => {
warn!("[get_type_of_for_expr] invalid match type: {:?}", m.mtype);
return None;
}
};
// HACK: use outer scope when getting in ~ expr's type
let scope = Scope::new(m.filepath.clone(), for_start);
let ast::ForStmtVisitor {
for_pat, in_expr, ..
} = ast::parse_for_stmt(m.contextstr.clone(), scope, session);
debug!(
"[get_type_of_for_expr] match: {:?}, for: {:?}, in: {:?},",
m, for_pat, in_expr
);
fn get_item(ty: Ty, session: &Session<'_>) -> Option<Ty> {
match ty {
Ty::Match(ma) => nameres::get_iter_item(&ma, session),
Ty::PathSearch(paths) => {
nameres::get_iter_item(&paths.resolve_as_match(session)?, session)
}
Ty::RefPtr(ty, _) => get_item(*ty, session),
_ => None,
}
}
resolve_lvalue_ty(
for_pat?,
in_expr.and_then(|ty| get_item(ty, session)),
&m.matchstr,
&m.filepath,
m.point,
session,
)
}
fn get_type_of_if_let(m: &Match, session: &Session<'_>, start: BytePos) -> Option<Ty> {
// HACK: use outer scope when getting r-value's type
let scope = Scope::new(m.filepath.clone(), start);
let ast::IfLetVisitor {
let_pat, rh_expr, ..
} = ast::parse_if_let(m.contextstr.clone(), scope, session);
debug!(
"[get_type_of_if_let] match: {:?}\n let: {:?}\n rh: {:?},",
m, let_pat, rh_expr,
);
resolve_lvalue_ty(
let_pat?,
rh_expr,
&m.matchstr,
&m.filepath,
m.point,
session,
)
}
pub fn get_struct_field_type(
fieldname: &str,
structmatch: &Match,
session: &Session<'_>,
) -> Option<Ty> {
// temporary fix for https://github.com/rust-lang-nursery/rls/issues/783
if !structmatch.mtype.is_struct() {
warn!(
"get_struct_filed_type is called for {:?}",
structmatch.mtype
);
return None;
}
debug!("[get_struct_filed_type]{}, {:?}", fieldname, structmatch);
let src = session.load_source_file(&structmatch.filepath);
let opoint = scopes::expect_stmt_start(src.as_src(), structmatch.point);
// HACK: if scopes::end_of_next_scope returns empty struct, it's maybe tuple struct
let structsrc = if let Some(end) = scopes::end_of_next_scope(&src[opoint.0..]) {
src[opoint.0..=(opoint + end).0].to_owned()
} else {
(*get_first_stmt(src.as_src().shift_start(opoint))).to_owned()
};
let fields = ast::parse_struct_fields(structsrc.to_owned(), Scope::from_match(structmatch));
for (field, _, ty) in fields {
if fieldname != field {
continue;
}
return ty;
}
None
}
pub(crate) fn get_tuplestruct_fields(
structmatch: &Match,
session: &Session<'_>,
) -> Vec<(String, ByteRange, Option<Ty>)> {
let src = session.load_source_file(&structmatch.filepath);
let structsrc = if let core::MatchType::EnumVariant(_) = structmatch.mtype {
// decorate the enum variant src to make it look like a tuple struct
let to = src[structmatch.point.0..]
.find('(')
.map(|n| {
scopes::find_closing_paren(&src, structmatch.point + BytePos::from(n).increment())
})
.expect("Tuple enum variant should have `(` in definition");
"struct ".to_owned() + &src[structmatch.point.0..to.increment().0] + ";"
} else {
assert!(structmatch.mtype.is_struct());
let opoint = scopes::expect_stmt_start(src.as_src(), structmatch.point);
(*get_first_stmt(src.as_src().shift_start(opoint))).to_owned()
};
debug!("[tuplestruct_fields] structsrc=|{}|", structsrc);
ast::parse_struct_fields(structsrc, Scope::from_match(structmatch))
}
pub fn get_tuplestruct_field_type(
fieldnum: usize,
structmatch: &Match,
session: &Session<'_>,
) -> Option<Ty> {
let fields = get_tuplestruct_fields(structmatch, session);
for (i, (_, _, ty)) in fields.into_iter().enumerate() {
if i == fieldnum {
return ty;
}
}
None
}
pub fn get_first_stmt(src: Src<'_>) -> Src<'_> {
match src.iter_stmts().next() {
Some(range) => src.shift_range(range),
None => src,
}
}
pub fn get_type_of_match(m: Match, msrc: Src<'_>, session: &Session<'_>) -> Option<Ty> {
debug!("get_type_of match {:?} ", m);
match m.mtype {
core::MatchType::Let(_) => get_type_of_let_expr(m, session),
core::MatchType::IfLet(start) | core::MatchType::WhileLet(start) => {
get_type_of_if_let(&m, session, start)
}
core::MatchType::For(_) => get_type_of_for_arg(&m, session),
core::MatchType::FnArg(_) => get_type_of_fnarg(m, session),
core::MatchType::MatchArm => get_type_from_match_arm(&m, msrc, session),
core::MatchType::Struct(_)
| core::MatchType::Union(_)
| core::MatchType::Enum(_)
| core::MatchType::Function
| core::MatchType::Method(_)
| core::MatchType::Module => Some(Ty::Match(m)),
core::MatchType::Const | core::MatchType::Static => get_type_of_static(m),
core::MatchType::EnumVariant(Some(boxed_enum)) => {
if boxed_enum.mtype.is_enum() {
Some(Ty::Match(*boxed_enum))
} else {
debug!("EnumVariant has not-enum type: {:?}", boxed_enum.mtype);
None
}
}
_ => {
debug!("!!! WARNING !!! Can't get type of {:?}", m.mtype);
None
}
}
}
pub fn get_type_from_match_arm(m: &Match, msrc: Src<'_>, session: &Session<'_>) -> Option<Ty> {
// We construct a faux match stmt and then parse it. This is because the
// match stmt may be incomplete (half written) in the real code
// skip to end of match arm pattern so we can search backwards
let arm = BytePos(msrc[m.point.0..].find("=>")?) + m.point;
let scopestart = scopes::scope_start(msrc, arm);
let stmtstart = scopes::find_stmt_start(msrc, scopestart.decrement())?;
debug!("PHIL preblock is {:?} {:?}", stmtstart, scopestart);
let preblock = &msrc[stmtstart.0..scopestart.0];
let matchstart = stmtstart + preblock.rfind("match ")?.into();
let lhs_start = scopes::get_start_of_pattern(&msrc, arm);
let lhs = &msrc[lhs_start.0..arm.0];
// construct faux match statement and recreate point
let mut fauxmatchstmt = msrc[matchstart.0..scopestart.0].to_owned();
let faux_prefix_size = BytePos::from(fauxmatchstmt.len());
fauxmatchstmt = fauxmatchstmt + lhs + " => () };";
let faux_point = faux_prefix_size + (m.point - lhs_start);
debug!(
"fauxmatchstmt for parsing is pt:{:?} src:|{}|",
faux_point, fauxmatchstmt
);
ast::get_match_arm_type(
fauxmatchstmt,
faux_point,
// scope is used to locate expression, so send
// it the start of the match expr
Scope {
filepath: m.filepath.clone(),
point: matchstart,
},
session,
)
}
pub fn get_function_declaration(fnmatch: &Match, session: &Session<'_>) -> String {
let src = session.load_source_file(&fnmatch.filepath);
let start = scopes::expect_stmt_start(src.as_src(), fnmatch.point);
let def_end: &[_] = &['{', ';'];
let end = src[start.0..]
.find(def_end)
.expect("Definition should have an end (`{` or `;`)");
src[start.0..start.0 + end].to_owned()
}
pub fn get_return_type_of_function(
fnmatch: &Match,
contextm: &Match,
session: &Session<'_>,
) -> Option<Ty> {
let src = session.load_source_file(&fnmatch.filepath);
let point = scopes::expect_stmt_start(src.as_src(), fnmatch.point);
let block_start = src[point.0..].find('{')?;
let decl = "impl b{".to_string() + &src[point.0..point.0 + block_start + 1] + "}}";
debug!("get_return_type_of_function: passing in |{}|", decl);
let mut scope = Scope::from_match(fnmatch);
// TODO(kngwyu): if point <= 5 scope is incorrect
scope.point = point.checked_sub("impl b{".len()).unwrap_or(BytePos::ZERO);
let (ty, is_async) = ast::parse_fn_output(decl, scope);
let resolve_ty = |ty| {
if let Some(Ty::PathSearch(ref paths)) = ty {
let path = &paths.path;
if let Some(ref path_seg) = path.segments.get(0) {
if "Self" == path_seg.name {
return get_type_of_self_arg(fnmatch, src.as_src(), session);
}
if path.segments.len() == 1 && path_seg.generics.is_empty() {
for type_param in fnmatch.generics() {
if type_param.name() == &path_seg.name {
return Some(Ty::Match(contextm.clone()));
}
}
}
}
}
ty
};
resolve_ty(ty).map(|ty| {
if is_async {
Ty::Future(Box::new(ty), Scope::from_match(fnmatch))
} else {
ty
}
})
}
pub(crate) fn get_type_of_indexed_value(body: Ty, session: &Session<'_>) -> Option<Ty> {
match body.dereference() {
Ty::Match(m) => nameres::get_index_output(&m, session),
Ty::PathSearch(p) => p
.resolve_as_match(session)
.and_then(|m| nameres::get_index_output(&m, session)),
Ty::Array(ty, _) | Ty::Slice(ty) => Some(*ty),
_ => None,
}
}
pub(crate) fn get_type_of_typedef(m: &Match, session: &Session<'_>) -> Option<Match> {
debug!("get_type_of_typedef match is {:?}", m);
let msrc = session.load_source_file(&m.filepath);
let blobstart = m.point - BytePos(5); // 5 == "type ".len()
let blob = msrc.get_src_from_start(blobstart);
let type_ = blob.iter_stmts().nth(0).and_then(|range| {
let range = range.shift(blobstart);
let blob = msrc[range.to_range()].to_owned();
debug!("get_type_of_typedef blob string {}", blob);
let scope = Scope::new(m.filepath.clone(), range.start);
ast::parse_type(blob, &scope).type_
})?;
match type_.dereference() {
Ty::Match(m) => Some(m),
Ty::Ptr(_, _) => PrimKind::Pointer.to_module_match(),
Ty::Array(_, _) => PrimKind::Array.to_module_match(),
Ty::Slice(_) => PrimKind::Slice.to_module_match(),
Ty::PathSearch(paths) => {
let src = session.load_source_file(&m.filepath);
let scope_start = scopes::scope_start(src.as_src(), m.point);
// Type of TypeDef cannot be inside the impl block so look outside
let outer_scope_start = scope_start
.0
.checked_sub(1)
.map(|sub| scopes::scope_start(src.as_src(), sub.into()))
.and_then(|s| {
let blob = src.get_src_from_start(s);
let blob = blob.trim_start();
if blob.starts_with("impl") || util::trim_visibility(blob).starts_with("trait")
{
Some(s)
} else {
None
}
});
nameres::resolve_path_with_primitive(
&paths.path,
&paths.filepath,
outer_scope_start.unwrap_or(scope_start),
core::SearchType::StartsWith,
core::Namespace::Type,
session,
)
.into_iter()
.filter(|m_| Some(m_.matchstr.as_ref()) == paths.path.name() && m_.point != m.point)
.next()
}
_ => None,
}
}
fn get_type_of_static(m: Match) -> Option<Ty> {
let Match {
filepath,
point,
contextstr,
..
} = m;
let scope = Scope::new(filepath, point - "static".len().into());
let res = ast::parse_static(contextstr, scope);
res.ty
}

856
racer/src/racer/util.rs Normal file
View File

@ -0,0 +1,856 @@
// Small functions of utility
use std::rc::Rc;
use std::{cmp, error, fmt, path};
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use crate::core::SearchType::{self, ExactMatch, StartsWith};
use crate::core::{BytePos, ByteRange, Location, LocationExt, RawSource, Session, SessionExt};
#[cfg(unix)]
pub const PATH_SEP: char = ':';
#[cfg(windows)]
pub const PATH_SEP: char = ';';
#[inline]
pub(crate) fn is_pattern_char(c: char) -> bool {
c.is_alphanumeric() || c.is_whitespace() || (c == '_') || (c == ':') || (c == '.')
}
#[inline]
pub(crate) fn is_search_expr_char(c: char) -> bool {
c.is_alphanumeric() || (c == '_') || (c == ':') || (c == '.')
}
#[inline]
pub(crate) fn is_ident_char(c: char) -> bool {
c.is_alphanumeric() || (c == '_') || (c == '!')
}
#[inline(always)]
pub(crate) fn is_whitespace_byte(b: u8) -> bool {
b == b' ' || b == b'\r' || b == b'\n' || b == b'\t'
}
/// Searches for `needle` as a standalone identifier in `haystack`. To be considered a match,
/// the `needle` must occur either at the beginning of `haystack` or after a non-identifier
/// character.
pub fn txt_matches(stype: SearchType, needle: &str, haystack: &str) -> bool {
txt_matches_with_pos(stype, needle, haystack).is_some()
}
pub fn txt_matches_with_pos(stype: SearchType, needle: &str, haystack: &str) -> Option<usize> {
if needle.is_empty() {
return Some(0);
}
match stype {
ExactMatch => {
let n_len = needle.len();
let h_len = haystack.len();
for (n, _) in haystack.match_indices(needle) {
if (n == 0 || !is_ident_char(char_before(haystack, n)))
&& (n + n_len == h_len || !is_ident_char(char_at(haystack, n + n_len)))
{
return Some(n);
}
}
}
StartsWith => {
for (n, _) in haystack.match_indices(needle) {
if n == 0 || !is_ident_char(char_before(haystack, n)) {
return Some(n);
}
}
}
}
None
}
pub fn symbol_matches(stype: SearchType, searchstr: &str, candidate: &str) -> bool {
match stype {
ExactMatch => searchstr == candidate,
StartsWith => candidate.starts_with(searchstr),
}
}
pub fn find_closure(src: &str) -> Option<(ByteRange, ByteRange)> {
let (pipe_range, _) = closure_valid_arg_scope(src)?;
let mut chars = src
.chars()
.enumerate()
.skip(pipe_range.end.0)
.skip_while(|(_, c)| c.is_whitespace());
let (start, start_char) = chars
.next()
.map(|(i, c)| (if c == '{' { i + 1 } else { i }, c))?;
let mut clevel = if start_char == '{' { 1 } else { 0 };
let mut plevel = 0;
let mut last = None;
for (i, current) in chars {
match current {
'{' => clevel += 1,
'(' => plevel += 1,
'}' => {
clevel -= 1;
if (clevel == 0 && start_char == '{') || (clevel == -1) {
last = Some(i);
break;
}
}
';' => {
if start_char != '{' {
last = Some(i);
break;
}
}
')' => {
plevel -= 1;
if plevel == 0 {
last = Some(i + 1);
}
if plevel == -1 {
last = Some(i + 1);
break;
}
}
_ => {}
}
}
if let Some(last) = last {
Some((pipe_range, ByteRange::new(BytePos(start), BytePos(last))))
} else {
None
}
}
#[test]
fn test_find_closure() {
let src = "|a, b, c| something()";
let src2 = "|a, b, c| { something() }";
let src3 = "let a = |a, b, c|something();";
let src4 = "let a = |a, b, c| something().second().third();";
let src5 = "| x: i32 | y.map(|z| z~)";
let src6 = "| x: i32 | Struct { x };";
let src7 = "y.map(| x: i32 | y.map(|z| z) )";
let src8 = "|z| z)";
let src9 = "let p = |z| something() + 5;";
let get_range = |a, b| ByteRange::new(BytePos(a as usize), BytePos(b as usize));
let find = |src: &str, a, off1: i32, b, off2: i32| {
get_range(
src.find(a).unwrap() as i32 + off1,
src.rfind(b).unwrap() as i32 + 1 + off2,
)
};
let get_pipe = |src| find(src, '|', 0, '|', 0);
assert_eq!(
Some((get_pipe(src), find(src, 's', 0, ')', 0))),
find_closure(src)
);
assert_eq!(
Some((get_pipe(src2), find(src2, '{', 1, '}', -1))),
find_closure(src2)
);
assert_eq!(
Some((get_pipe(src3), find(src3, 's', 0, ')', 0))),
find_closure(src3)
);
assert_eq!(
Some((get_pipe(src4), find(src4, 's', 0, ')', 0))),
find_closure(src4)
);
assert_eq!(
Some((find(src5, '|', 0, 'y', -2), find(src5, 'y', 0, ')', 0))),
find_closure(src5)
);
assert_eq!(
Some((get_pipe(src6), find(src6, 'S', 0, ';', -1))),
find_closure(src6)
);
assert_eq!(
Some((find(src7, '|', 0, 'y', -2), find(src7, '2', 4, ')', 0))),
find_closure(src7)
);
assert_eq!(
Some((get_pipe(src8), find(src8, ' ', 1, ')', 0))),
find_closure(src8)
);
assert_eq!(
Some((get_pipe(src9), find(src9, 's', 0, '5', 0))),
find_closure(src9)
);
}
/// Try to valid if the given scope contains a valid closure arg scope.
pub fn closure_valid_arg_scope(scope_src: &str) -> Option<(ByteRange, &str)> {
// Try to find the left and right pipe, if one or both are not present, this is not a valid
// closure definition
let left_pipe = scope_src.find('|')?;
let candidate = &scope_src[left_pipe..];
let mut brace_level = 0;
for (i, c) in candidate.chars().skip(1).enumerate() {
match c {
'{' => brace_level += 1,
'}' => brace_level -= 1,
'|' => {
let right_pipe = left_pipe + 1 + i;
// now we find right |
if brace_level == 0 {
let range = ByteRange::new(left_pipe, right_pipe + 1);
return Some((range, &scope_src[range.to_range()]));
}
break;
}
';' => break,
_ => {}
}
if brace_level < 0 {
break;
}
}
None
}
#[test]
fn test_closure_valid_arg_scope() {
let valid = r#"
let a = |int, int| int * int;
"#;
assert_eq!(
closure_valid_arg_scope(valid),
Some((ByteRange::new(BytePos(13), BytePos(23)), "|int, int|"))
);
let confusing = r#"
match a {
EnumA::A => match b {
EnumB::A(u) | EnumB::B(u) => println!("u: {}", u),
},
EnumA::B => match b {
EnumB::A(u) | EnumB::B(u) => println!("u: {}", u),
},
}
"#;
assert_eq!(closure_valid_arg_scope(confusing), None);
}
#[test]
fn txt_matches_matches_stuff() {
assert_eq!(true, txt_matches(ExactMatch, "Vec", "Vec"));
assert_eq!(true, txt_matches(ExactMatch, "Vec", "use Vec"));
assert_eq!(false, txt_matches(ExactMatch, "Vec", "use Vecä"));
assert_eq!(true, txt_matches(StartsWith, "Vec", "Vector"));
assert_eq!(true, txt_matches(StartsWith, "Vec", "use Vector"));
assert_eq!(true, txt_matches(StartsWith, "Vec", "use Vec"));
assert_eq!(false, txt_matches(StartsWith, "Vec", "use äVector"));
}
#[test]
fn txt_matches_matches_methods() {
assert_eq!(true, txt_matches(StartsWith, "do_st", "fn do_stuff"));
assert_eq!(true, txt_matches(StartsWith, "do_st", "pub fn do_stuff"));
assert_eq!(
true,
txt_matches(StartsWith, "do_st", "pub(crate) fn do_stuff")
);
assert_eq!(
true,
txt_matches(StartsWith, "do_st", "pub(in codegen) fn do_stuff")
);
}
/// Given a string and index, return span of identifier
///
/// `pos` is coerced to be within `s`. Note that `expand_ident` only backtracks.
/// If the provided `pos` is in the middle of an identifier, the returned
/// `(start, end)` will have `end` = `pos`.
///
/// # Examples
///
/// ```
/// extern crate racer;
///
/// let src = "let x = this_is_an_identifier;";
/// let pos = racer::Location::from(29);
/// let path = "lib.rs";
///
/// let cache = racer::FileCache::default();
/// let session = racer::Session::new(&cache, None);
///
/// session.cache_file_contents(path, src);
///
/// let expanded = racer::expand_ident(path, pos, &session).unwrap();
/// assert_eq!("this_is_an_identifier", expanded.ident());
/// ```
pub fn expand_ident<P, C>(filepath: P, cursor: C, session: &Session<'_>) -> Option<ExpandedIdent>
where
P: AsRef<path::Path>,
C: Into<Location>,
{
let cursor = cursor.into();
let indexed_source = session.load_raw_file(filepath.as_ref());
let (start, pos) = {
let s = &indexed_source.code[..];
let pos = match cursor.to_point(&indexed_source) {
Some(pos) => pos,
None => {
debug!("Failed to convert cursor to point");
return None;
}
};
// TODO: Would this better be an assertion ? Why are out-of-bound values getting here ?
// They are coming from the command-line, question is, if they should be handled beforehand
// clamp pos into allowed range
let pos = cmp::min(s.len().into(), pos);
let sb = &s[..pos.0];
let mut start = pos;
// backtrack to find start of word
for (i, c) in sb.char_indices().rev() {
if !is_ident_char(c) {
break;
}
start = i.into();
}
(start, pos)
};
Some(ExpandedIdent {
src: indexed_source,
start,
pos,
})
}
pub struct ExpandedIdent {
src: Rc<RawSource>,
start: BytePos,
pos: BytePos,
}
impl ExpandedIdent {
pub fn ident(&self) -> &str {
&self.src.code[self.start.0..self.pos.0]
}
pub fn start(&self) -> BytePos {
self.start
}
pub fn pos(&self) -> BytePos {
self.pos
}
}
pub fn find_ident_end(s: &str, pos: BytePos) -> BytePos {
// find end of word
let sa = &s[pos.0..];
for (i, c) in sa.char_indices() {
if !is_ident_char(c) {
return pos + i.into();
}
}
s.len().into()
}
#[cfg(test)]
mod test_find_ident_end {
use super::{find_ident_end, BytePos};
fn find_ident_end_(s: &str, pos: usize) -> usize {
find_ident_end(s, BytePos(pos)).0
}
#[test]
fn ascii() {
assert_eq!(5, find_ident_end_("ident", 0));
assert_eq!(6, find_ident_end_("(ident)", 1));
assert_eq!(17, find_ident_end_("let an_identifier = 100;", 4));
}
#[test]
fn unicode() {
assert_eq!(7, find_ident_end_("num_µs", 0));
assert_eq!(10, find_ident_end_("ends_in_µ", 0));
}
}
fn char_before(src: &str, i: usize) -> char {
let mut prev = '\0';
for (ii, ch) in src.char_indices() {
if ii >= i {
return prev;
}
prev = ch;
}
prev
}
#[test]
fn test_char_before() {
assert_eq!('ä', char_before("täst", 3));
assert_eq!('ä', char_before("täst", 2));
assert_eq!('s', char_before("täst", 4));
assert_eq!('t', char_before("täst", 100));
}
pub fn char_at(src: &str, i: usize) -> char {
src[i..].chars().next().unwrap()
}
/// Error type returned from validate_rust_src_path()
#[derive(Debug, PartialEq)]
pub enum RustSrcPathError {
Missing,
DoesNotExist(path::PathBuf),
NotRustSourceTree(path::PathBuf),
}
impl error::Error for RustSrcPathError {}
impl fmt::Display for RustSrcPathError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RustSrcPathError::Missing => write!(
f,
"RUST_SRC_PATH environment variable must be set to \
point to the src directory of a rust checkout. \
E.g. \"/home/foouser/src/rust/library\" (or \"/home/foouser/src/rust/src\" in older toolchains)"
),
RustSrcPathError::DoesNotExist(ref path) => write!(
f,
"racer can't find the directory pointed to by the \
RUST_SRC_PATH variable \"{:?}\". Try using an \
absolute fully qualified path and make sure it \
points to the src directory of a rust checkout - \
e.g. \"/home/foouser/src/rust/library\" (or \"/home/foouser/src/rust/src\" in older toolchains).",
path
),
RustSrcPathError::NotRustSourceTree(ref path) => write!(
f,
"Unable to find libstd under RUST_SRC_PATH. N.B. \
RUST_SRC_PATH variable needs to point to the *src* \
directory inside a rust checkout e.g. \
\"/home/foouser/src/rust/library\" (or \"/home/foouser/src/rust/src\" in older toolchains). \
Current value \"{:?}\"",
path
),
}
}
}
fn check_rust_sysroot() -> Option<path::PathBuf> {
use std::process::Command;
let mut cmd = Command::new("rustc");
cmd.arg("--print").arg("sysroot");
if let Ok(output) = cmd.output() {
if let Ok(s) = String::from_utf8(output.stdout) {
let sysroot = path::Path::new(s.trim());
// See if the toolchain is sufficiently new, after the libstd
// has been internally reorganized
let srcpath = sysroot.join("lib/rustlib/src/rust/library");
if srcpath.exists() {
return Some(srcpath);
}
let srcpath = sysroot.join("lib/rustlib/src/rust/src");
if srcpath.exists() {
return Some(srcpath);
}
}
}
None
}
/// Get the path for Rust standard library source code.
/// Checks first the paths in the `RUST_SRC_PATH` environment variable.
///
/// If the environment variable is _not_ set, it checks the rust sys
/// root for the `rust-src` component.
///
/// If that isn't available, checks `/usr/local/src/rust/src` and
/// `/usr/src/rust/src` as default values.
///
/// If the Rust standard library source code cannot be found, returns
/// `Err(racer::RustSrcPathError::Missing)`.
///
/// If the path in `RUST_SRC_PATH` or the path in rust sys root is invalid,
/// returns a corresponding error. If a valid path is found, returns that path.
///
/// # Examples
///
/// ```
/// extern crate racer;
///
/// match racer::get_rust_src_path() {
/// Ok(_path) => {
/// // RUST_SRC_PATH is valid
/// },
/// Err(racer::RustSrcPathError::Missing) => {
/// // path is not set
/// },
/// Err(racer::RustSrcPathError::DoesNotExist(_path)) => {
/// // provided path doesnt point to valid file
/// },
/// Err(racer::RustSrcPathError::NotRustSourceTree(_path)) => {
/// // provided path doesn't have rustc src
/// }
/// }
/// ```
pub fn get_rust_src_path() -> Result<path::PathBuf, RustSrcPathError> {
use std::env;
debug!("Getting rust source path. Trying env var RUST_SRC_PATH.");
if let Ok(ref srcpaths) = env::var("RUST_SRC_PATH") {
if !srcpaths.is_empty() {
if let Some(path) = srcpaths.split(PATH_SEP).next() {
return validate_rust_src_path(path::PathBuf::from(path));
}
}
};
debug!("Nope. Trying rustc --print sysroot and appending lib/rustlib/src/rust/{{src, library}} to that.");
if let Some(path) = check_rust_sysroot() {
return validate_rust_src_path(path);
};
debug!("Nope. Trying default paths: /usr/local/src/rust/src and /usr/src/rust/src");
let default_paths = ["/usr/local/src/rust/src", "/usr/src/rust/src"];
for path in &default_paths {
if let Ok(path) = validate_rust_src_path(path::PathBuf::from(path)) {
return Ok(path);
}
}
warn!("Rust stdlib source path not found!");
Err(RustSrcPathError::Missing)
}
fn validate_rust_src_path(path: path::PathBuf) -> Result<path::PathBuf, RustSrcPathError> {
if !path.exists() {
return Err(RustSrcPathError::DoesNotExist(path));
}
// Historically, the Rust standard library was distributed under "libstd"
// but was later renamed to "std" when the library was moved under "library/"
// in https://github.com/rust-lang/rust/pull/73265.
if path.join("libstd").exists() || path.join("std").join("src").exists() {
Ok(path)
} else {
Err(RustSrcPathError::NotRustSourceTree(path.join("libstd")))
}
}
#[cfg(test)]
lazy_static! {
static ref TEST_SEMAPHORE: ::std::sync::Mutex<()> = Default::default();
}
#[test]
fn test_get_rust_src_path_env_ok() {
use std::env;
let _guard = TEST_SEMAPHORE.lock().unwrap();
let original = env::var_os("RUST_SRC_PATH");
if env::var_os("RUST_SRC_PATH").is_none() {
env::set_var("RUST_SRC_PATH", check_rust_sysroot().unwrap());
}
let result = get_rust_src_path();
match original {
Some(path) => env::set_var("RUST_SRC_PATH", path),
None => env::remove_var("RUST_SRC_PATH"),
}
assert!(result.is_ok());
}
#[test]
fn test_get_rust_src_path_does_not_exist() {
use std::env;
let _guard = TEST_SEMAPHORE.lock().unwrap();
let original = env::var_os("RUST_SRC_PATH");
env::set_var("RUST_SRC_PATH", "test_path");
let result = get_rust_src_path();
match original {
Some(path) => env::set_var("RUST_SRC_PATH", path),
None => env::remove_var("RUST_SRC_PATH"),
}
assert_eq!(
Err(RustSrcPathError::DoesNotExist(path::PathBuf::from(
"test_path"
))),
result
);
}
#[test]
fn test_get_rust_src_path_not_rust_source_tree() {
use std::env;
let _guard = TEST_SEMAPHORE.lock().unwrap();
let original = env::var_os("RUST_SRC_PATH");
env::set_var("RUST_SRC_PATH", "/");
let result = get_rust_src_path();
match original {
Some(path) => env::set_var("RUST_SRC_PATH", path),
None => env::remove_var("RUST_SRC_PATH"),
}
assert_eq!(
Err(RustSrcPathError::NotRustSourceTree(path::PathBuf::from(
"/libstd"
))),
result
);
}
#[test]
fn test_get_rust_src_path_missing() {
use std::env;
let _guard = TEST_SEMAPHORE.lock().unwrap();
let path = env::var_os("PATH").unwrap();
let original = env::var_os("RUST_SRC_PATH");
env::remove_var("RUST_SRC_PATH");
env::remove_var("PATH");
let result = get_rust_src_path();
env::set_var("PATH", path);
match original {
Some(path) => env::set_var("RUST_SRC_PATH", path),
None => env::remove_var("RUST_SRC_PATH"),
}
assert_eq!(Err(RustSrcPathError::Missing), result);
}
#[test]
fn test_get_rust_src_path_rustup_ok() {
use std::env;
let _guard = TEST_SEMAPHORE.lock().unwrap();
let original = env::var_os("RUST_SRC_PATH");
env::remove_var("RUST_SRC_PATH");
let result = get_rust_src_path();
match original {
Some(path) => env::set_var("RUST_SRC_PATH", path),
None => env::remove_var("RUST_SRC_PATH"),
}
match result {
Ok(_) => (),
Err(_) => panic!(
"Couldn't get the path via rustup! \
Rustup and the component rust-src needs to be installed for this test to pass!"
),
}
}
/// An immutable stack implemented as a linked list backed by a thread's stack.
// TODO: this implementation is fast, but if we want to run racer in multiple threads,
// we have to rewrite it using std::sync::Arc.
pub struct StackLinkedListNode<'stack, T>(Option<StackLinkedListNodeData<'stack, T>>);
struct StackLinkedListNodeData<'stack, T> {
item: T,
previous: &'stack StackLinkedListNode<'stack, T>,
}
impl<'stack, T> StackLinkedListNode<'stack, T> {
/// Returns an empty node.
pub fn empty() -> Self {
StackLinkedListNode(None)
}
/// Pushes a new node on the stack. Returns the new node.
pub fn push(&'stack self, item: T) -> Self {
StackLinkedListNode(Some(StackLinkedListNodeData {
item,
previous: self,
}))
}
}
impl<'stack, T: PartialEq> StackLinkedListNode<'stack, T> {
/// Check if the stack contains the specified item.
/// Returns `true` if the item is found, or `false` if it's not found.
pub fn contains(&self, item: &T) -> bool {
let mut current = self;
while let StackLinkedListNode(Some(StackLinkedListNodeData {
item: ref current_item,
previous,
})) = *current
{
if current_item == item {
return true;
}
current = previous;
}
false
}
}
// don't use other than strip_visibilities or strip_unsafe
fn strip_word_impl(src: &str, allow_paren: bool) -> Option<BytePos> {
let mut level = 0;
for (i, &b) in src.as_bytes().into_iter().enumerate() {
match b {
b'(' if allow_paren => level += 1,
b')' if allow_paren => level -= 1,
_ if level >= 1 => (),
// stop on the first thing that isn't whitespace
_ if !is_whitespace_byte(b) => {
if i == 0 {
break;
}
return Some(BytePos(i));
}
_ => continue,
}
}
None
}
/// remove pub(crate), crate
pub(crate) fn strip_visibility(src: &str) -> Option<BytePos> {
if src.starts_with("pub") {
Some(strip_word_impl(&src[3..], true)? + BytePos(3))
} else if src.starts_with("crate") {
Some(strip_word_impl(&src[5..], false)? + BytePos(5))
} else {
None
}
}
/// remove `unsafe` or other keywords
pub(crate) fn strip_word(src: &str, word: &str) -> Option<BytePos> {
if src.starts_with(word) {
let len = word.len();
Some(strip_word_impl(&src[len..], false)? + BytePos(len))
} else {
None
}
}
/// remove words
pub(crate) fn strip_words(src: &str, words: &[&str]) -> BytePos {
let mut start = BytePos::ZERO;
for word in words {
start += strip_word(&src[start.0..], word).unwrap_or(BytePos::ZERO);
}
start
}
#[test]
fn test_strip_words() {
assert_eq!(
strip_words("const unsafe fn", &["const", "unsafe"]),
BytePos(15)
);
assert_eq!(strip_words("unsafe fn", &["const", "unsafe"]), BytePos(8));
assert_eq!(strip_words("const fn", &["const", "unsafe"]), BytePos(8));
assert_eq!(strip_words("fn", &["const", "unsafe"]), BytePos(0));
}
/// Removes `pub(...)` from the start of a blob so that other code
/// can assess the struct/trait/fn without worrying about restricted
/// visibility.
pub(crate) fn trim_visibility(blob: &str) -> &str {
if let Some(start) = strip_visibility(blob) {
&blob[start.0..]
} else {
blob
}
}
#[test]
fn test_trim_visibility() {
assert_eq!(trim_visibility("pub fn"), "fn");
assert_eq!(trim_visibility("pub(crate) struct"), "struct");
assert_eq!(trim_visibility("pub (in super) const fn"), "const fn");
}
/// Checks if the completion point is in a function declaration by looking
/// to see if the second-to-last word is `fn`.
pub fn in_fn_name(line_before_point: &str) -> bool {
// Determine if the cursor is sitting in the whitespace after typing `fn ` before
// typing a name.
let has_started_name = !line_before_point.ends_with(|c: char| c.is_whitespace());
let mut words = line_before_point.split_whitespace().rev();
// Make sure we haven't finished the name and started generics or arguments
if has_started_name {
if let Some(ident) = words.next() {
if ident.chars().any(|c| !is_ident_char(c)) {
return false;
}
}
}
words.next().map(|word| word == "fn").unwrap_or_default()
}
#[test]
fn test_in_fn_name() {
assert!(in_fn_name("fn foo"));
assert!(in_fn_name(" fn foo"));
assert!(in_fn_name("fn "));
assert!(!in_fn_name("fn foo(b"));
assert!(!in_fn_name("fn"));
}
/// calculate hash of string
pub fn calculate_str_hash(s: &str) -> u64 {
let mut hasher = DefaultHasher::new();
s.hash(&mut hasher);
hasher.finish()
}
#[macro_export]
macro_rules! try_continue {
($res: expr) => {
match ::std::ops::Try::branch($res) {
::std::ops::ControlFlow::Continue(o) => o,
::std::ops::ControlFlow::Break(_) => continue,
}
};
}
#[macro_export]
macro_rules! try_vec {
($res: expr) => {
match ::std::ops::Try::branch($res) {
::std::ops::ControlFlow::Continue(o) => o,
::std::ops::ControlFlow::Break(_) => return Vec::new(),
}
};
}
pub(crate) fn gen_tuple_fields(u: usize) -> impl Iterator<Item = &'static str> {
const NUM: [&'static str; 16] = [
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15",
];
NUM.iter().take(::std::cmp::min(u, 16)).map(|x| *x)
}

View File

@ -75,4 +75,6 @@ pub enum Edition {
Edition2018,
/// Rust 2021
Edition2021,
/// Rust 2024
Edition2024,
}

View File

@ -137,6 +137,7 @@ impl Callbacks for ShimCalls {
rustc_span::edition::Edition::Edition2015 => Edition::Edition2015,
rustc_span::edition::Edition::Edition2018 => Edition::Edition2018,
rustc_span::edition::Edition::Edition2021 => Edition::Edition2021,
rustc_span::edition::Edition::Edition2024 => Edition::Edition2024,
},
};

View File

@ -697,6 +697,7 @@ fn reformat(
Edition::Edition2015 => RustfmtEdition::Edition2015,
Edition::Edition2018 => RustfmtEdition::Edition2018,
Edition::Edition2021 => RustfmtEdition::Edition2021,
Edition::Edition2024 => RustfmtEdition::Edition2024,
};
config.set().edition(edition);
trace!("Detected edition {:?} for file `{}`", edition, path.display());

View File

@ -408,17 +408,18 @@ impl Executor for RlsExecutor {
// Enforce JSON output so that we can parse the rustc output by
// stripping --error-format if it was specified (e.g. Cargo pipelined
// build)
let filtered_args = filter_arg(cargo_cmd.get_args(), "--error-format");
let filtered_args =
filter_arg(&*cargo_cmd.get_args().collect::<Vec<_>>(), "--error-format");
cargo_cmd.args_replace(&filtered_args);
cargo_cmd.arg("--error-format=json");
// Delete any stale data. We try and remove any json files with
// the same crate name as Cargo would emit. This includes files
// with the same crate name but different hashes, e.g., those
// made with a different compiler.
let cargo_args = cargo_cmd.get_args();
let cargo_args = cargo_cmd.get_args().collect::<Vec<_>>();
let crate_name =
parse_arg(cargo_args, "--crate-name").expect("no crate-name in rustc command line");
let cfg_test = cargo_args.iter().any(|arg| arg == "--test");
parse_arg(&cargo_args, "--crate-name").expect("no crate-name in rustc command line");
let cfg_test = cargo_args.iter().any(|arg| *arg == "--test");
trace!("exec: {} {:?}", crate_name, cargo_cmd);
// Send off a window/progress notification for this compile target.
@ -435,7 +436,8 @@ impl Executor for RlsExecutor {
.expect("failed to send progress update");
}
let out_dir = parse_arg(cargo_args, "--out-dir").expect("no out-dir in rustc command line");
let out_dir =
parse_arg(&cargo_args, "--out-dir").expect("no out-dir in rustc command line");
let analysis_dir = Path::new(&out_dir).join("save-analysis");
if let Ok(dir_contents) = read_dir(&analysis_dir) {
let lib_crate_name = "lib".to_owned() + &crate_name;
@ -478,7 +480,7 @@ impl Executor for RlsExecutor {
// Add args and envs to cmd.
let mut args: Vec<_> =
cargo_args.iter().map(|a| a.clone().into_string().unwrap()).collect();
cargo_args.iter().map(|a| (*a).to_owned().into_string().unwrap()).collect();
let envs = cargo_cmd.get_envs().clone();
let sysroot = super::rustc::current_sysroot()
@ -508,7 +510,7 @@ impl Executor for RlsExecutor {
"rustc not intercepted - {}{} - args: {:?} envs: {:?}",
id.name(),
build_script_notice,
cmd.get_args(),
cmd.get_args().collect::<Vec<_>>(),
cmd.get_envs(),
);
@ -712,9 +714,9 @@ pub fn make_cargo_config(
config
}
fn parse_arg(args: &[OsString], arg: &str) -> Option<String> {
fn parse_arg(args: &[&OsString], arg: &str) -> Option<String> {
for (i, a) in args.iter().enumerate() {
if a == arg {
if *a == arg {
return Some(args[i + 1].clone().into_string().unwrap());
}
}
@ -780,7 +782,7 @@ fn dedup_flags(flag_str: &str) -> String {
}
/// Removes a selected flag of a `--flag=VALUE` or `--flag VALUE` shape from `args` (command line args for Rust).
fn filter_arg(args: &[OsString], key: &str) -> Vec<String> {
fn filter_arg(args: &[&OsString], key: &str) -> Vec<String> {
let key_as_prefix = key.to_owned() + "=";
let mut ret = vec![];
@ -934,12 +936,47 @@ mod test {
input.split_whitespace().map(OsString::from).collect()
}
assert!(filter_arg(&args("--error-format=json"), "--error-format").is_empty());
assert!(filter_arg(&args("--error-format json"), "--error-format").is_empty());
assert_eq!(filter_arg(&args("-a --error-format=json"), "--error-format"), ["-a"]);
assert_eq!(filter_arg(&args("-a --error-format json"), "--error-format"), ["-a"]);
assert_eq!(filter_arg(&args("-a --error-format=json -b"), "--error-format"), ["-a", "-b"]);
assert_eq!(filter_arg(&args("-a --error-format json -b"), "--error-format"), ["-a", "-b"]);
assert_eq!(filter_arg(&args("-a -b -x"), "--error-format"), ["-a", "-b", "-x"]);
assert!(filter_arg(
&args("--error-format=json").iter().collect::<Vec<_>>(),
"--error-format"
)
.is_empty());
assert!(filter_arg(
&args("--error-format json").iter().collect::<Vec<_>>(),
"--error-format"
)
.is_empty());
assert_eq!(
filter_arg(
&args("-a --error-format=json").iter().collect::<Vec<_>>(),
"--error-format"
),
["-a"]
);
assert_eq!(
filter_arg(
&args("-a --error-format json").iter().collect::<Vec<_>>(),
"--error-format"
),
["-a"]
);
assert_eq!(
filter_arg(
&args("-a --error-format=json -b").iter().collect::<Vec<_>>(),
"--error-format"
),
["-a", "-b"]
);
assert_eq!(
filter_arg(
&args("-a --error-format json -b").iter().collect::<Vec<_>>(),
"--error-format"
),
["-a", "-b"]
);
assert_eq!(
filter_arg(&args("-a -b -x").iter().collect::<Vec<_>>(), "--error-format"),
["-a", "-b", "-x"]
);
}
}

View File

@ -197,7 +197,7 @@ impl BuildKey for Invocation {
let mut hash = DefaultHasher::new();
self.command.get_program().hash(&mut hash);
let /*mut*/ args = self.command.get_args().to_owned();
let /*mut*/ args = self.command.get_args().map(|a| a.to_owned()).collect::<Vec<_>>();
// args.sort(); // TODO: parse 2-part args (e.g., `["--extern", "a=b"]`)
args.hash(&mut hash);
let mut envs: Vec<_> = self.command.get_envs().iter().collect();
@ -417,10 +417,8 @@ fn guess_rustc_src_path(build_dir: &Path, cmd: &ProcessBuilder) -> Option<PathBu
let cwd = cmd.get_cwd().or_else(|| Some(build_dir));
let file = cmd
.get_args()
.iter()
.find(|&a| Path::new(a).extension().map(|e| e == "rs").unwrap_or(false))?;
let file =
cmd.get_args().find(|&a| Path::new(a).extension().map(|e| e == "rs").unwrap_or(false))?;
src_path(cwd, file)
}

View File

@ -101,6 +101,7 @@ mod callbacks {
rls_ipc::rpc::Edition::Edition2015 => crate::build::plan::Edition::Edition2015,
rls_ipc::rpc::Edition::Edition2018 => crate::build::plan::Edition::Edition2018,
rls_ipc::rpc::Edition::Edition2021 => crate::build::plan::Edition::Edition2021,
rls_ipc::rpc::Edition::Edition2024 => crate::build::plan::Edition::Edition2024,
},
disambiguator: krate.disambiguator,
}

View File

@ -84,7 +84,7 @@ pub(crate) struct JobQueue(Vec<ProcessBuilder>);
/// For example, if `[.., "--crate-name", "rls", ...]` arguments are specified,
/// then proc_arg(prc, "--crate-name") returns Some(&OsStr::new("rls"));
fn proc_argument_value<T: AsRef<OsStr>>(prc: &ProcessBuilder, key: T) -> Option<&std::ffi::OsStr> {
let args = prc.get_args();
let args = prc.get_args().collect::<Vec<_>>();
let (idx, _) = args.iter().enumerate().find(|(_, arg)| arg.as_os_str() == key.as_ref())?;
Some(args.get(idx + 1)?.as_os_str())
@ -125,7 +125,6 @@ impl JobQueue {
trace!("Executing: {:#?}", job);
let mut args: Vec<_> = job
.get_args()
.iter()
.cloned()
.map(|x| x.into_string().expect("cannot stringify job args"))
.collect();
@ -155,7 +154,7 @@ impl JobQueue {
let crate_name = proc_argument_value(&job, "--crate-name").and_then(OsStr::to_str);
let update = match crate_name {
Some(name) => {
let cfg_test = job.get_args().iter().any(|arg| arg == "--test");
let cfg_test = job.get_args().any(|arg| *arg == "--test");
ProgressUpdate::Message(if cfg_test {
format!("{} cfg(test)", name)
} else {
@ -237,6 +236,7 @@ pub enum Edition {
Edition2015,
Edition2018,
Edition2021,
Edition2024,
}
impl Default for Edition {
@ -253,6 +253,7 @@ impl std::convert::TryFrom<&str> for Edition {
"2015" => Edition::Edition2015,
"2018" => Edition::Edition2018,
"2021" => Edition::Edition2021,
"2024" => Edition::Edition2024,
_ => return Err("unknown"),
})
}

View File

@ -255,6 +255,7 @@ impl rustc_driver::Callbacks for RlsRustcCalls {
RustcEdition::Edition2015 => Edition::Edition2015,
RustcEdition::Edition2018 => Edition::Edition2018,
RustcEdition::Edition2021 => Edition::Edition2021,
RustcEdition::Edition2024 => Edition::Edition2024,
},
};

View File

@ -1,3 +1,3 @@
[toolchain]
channel = "nightly-2022-01-13"
channel = "nightly-2022-05-16"
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]