rust-analyzer: 2020-10-12 -> 2020-10-19

wip/yesman
oxalica 4 years ago committed by Jon
parent 6317616bee
commit dc12101886
  1. 6
      pkgs/development/tools/rust/rust-analyzer/default.nix
  2. 82
      pkgs/development/tools/rust/rust-analyzer/downgrade-smol_str.patch
  3. 11
      pkgs/development/tools/rust/rust-analyzer/generic.nix
  4. 223
      pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch
  5. 30
      pkgs/development/tools/rust/rust-analyzer/no-match-unsizing-in-const-fn.patch
  6. 52
      pkgs/development/tools/rust/rust-analyzer/no-option-zip.patch

@ -2,10 +2,10 @@
{
rust-analyzer-unwrapped = callPackage ./generic.nix rec {
rev = "2020-10-12";
rev = "2020-10-19";
version = "unstable-${rev}";
sha256 = "194xax87pwdh3p8zx46igvqwznlpnl4jp8lj987616gyldfgall0";
cargoSha256 = "1rvf3a2fpqpf4q52pi676qzq7h0xfqlcbp15sc5vqc8nbbs7c7vw";
sha256 = "1xvyk1d26zn1d9i42h78qsm6bg57nhn1rgr46jwa46gsb31nabjh";
cargoSha256 = "18s5yrc9fdj2ndylwyf07l0kmwxka7mnbj254xmq3g7ragw71xjw";
};
rust-analyzer = callPackage ./wrapper.nix {} {

@ -1,82 +0,0 @@
This patch revert 875ad9b5c410200f5072515ae91b4ff51cff0448 (Bump smol_str from 0.1.16 to 0.1.17 ).
diff --git a/Cargo.lock b/Cargo.lock
index 477af57aa..8b9055879 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1470,9 +1470,9 @@ checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252"
[[package]]
name = "smol_str"
-version = "0.1.17"
+version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ca0f7ce3a29234210f0f4f0b56f8be2e722488b95cb522077943212da3b32eb"
+checksum = "2f7909a1d8bc166a862124d84fdc11bda0ea4ed3157ccca662296919c2972db1"
dependencies = [
"serde",
]
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs
index a5750d829..49841c7a1 100644
--- a/crates/hir_expand/src/name.rs
+++ b/crates/hir_expand/src/name.rs
@@ -43,8 +43,8 @@ impl Name {
}
/// Shortcut to create inline plain text name
- const fn new_inline(text: &str) -> Name {
- Name::new_text(SmolStr::new_inline(text))
+ const fn new_inline_ascii(text: &[u8]) -> Name {
+ Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text))
}
/// Resolve a name from the text of token.
@@ -127,7 +127,7 @@ pub mod known {
$(
#[allow(bad_style)]
pub const $ident: super::Name =
- super::Name::new_inline(stringify!($ident));
+ super::Name::new_inline_ascii(stringify!($ident).as_bytes());
)*
};
}
@@ -210,8 +210,8 @@ pub mod known {
);
// self/Self cannot be used as an identifier
- pub const SELF_PARAM: super::Name = super::Name::new_inline("self");
- pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
+ pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self");
+ pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self");
#[macro_export]
macro_rules! name {
diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs
index 9a7785c76..2b53b8297 100644
--- a/crates/hir_ty/src/infer.rs
+++ b/crates/hir_ty/src/infer.rs
@@ -555,7 +555,7 @@ impl<'a> InferenceContext<'a> {
fn resolve_lang_item(&self, name: &str) -> Option<LangItemTarget> {
let krate = self.resolver.krate()?;
- let name = SmolStr::new_inline(name);
+ let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes());
self.db.lang_item(krate, name)
}
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index d987b2500..a8ad917fb 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -636,10 +636,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
let (text, id) = match leaf {
tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id),
tt::Leaf::Punct(punct) => {
- assert!(punct.char.is_ascii());
- let char = &(punct.char as u8);
- let text = std::str::from_utf8(std::slice::from_ref(char)).unwrap();
- (SmolStr::new_inline(text), punct.id)
+ (SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id)
}
tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id),
};

@ -16,16 +16,9 @@ rustPlatform.buildRustPackage {
inherit rev sha256;
};
# FIXME: Temporary fixes for our rust 1.45.0
cargoPatches = [
./downgrade-smol_str.patch # Requires rustc 1.46.0
];
# FIXME: Temporary fixes for our rust 1.46.0
patches = [
./no-track_env_var.patch # Requires rustc 1.47.0
./no-match-unsizing-in-const-fn.patch # Requires rustc 1.46.0
./no-loop-in-const-fn.patch # Requires rustc 1.46.0
./no-option-zip.patch # Requires rustc 1.46.0
./no-track_env_var.patch # Requires rustc 1.47.0
];
buildAndTestSubdir = "crates/rust-analyzer";

@ -1,223 +0,0 @@
This patch revert 4b989009e3839cfc6f021d1552a46561cee6cde2 (CONST LOOPS ARE HERE).
diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs
index 5f885edfd..e72929f8c 100644
--- a/crates/parser/src/grammar/expressions.rs
+++ b/crates/parser/src/grammar/expressions.rs
@@ -316,7 +316,7 @@ fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option<CompletedMark
}
const LHS_FIRST: TokenSet =
- atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]]));
+ atom::ATOM_EXPR_FIRST.union(token_set![T![&], T![*], T![!], T![.], T![-]]);
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
let m;
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 66a92a4e1..ba6dd2fbc 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -15,7 +15,7 @@ use super::*;
// let _ = b"e";
// let _ = br"f";
// }
-pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
+pub(crate) const LITERAL_FIRST: TokenSet = token_set![
TRUE_KW,
FALSE_KW,
INT_NUMBER,
@@ -25,8 +25,8 @@ pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
STRING,
RAW_STRING,
BYTE_STRING,
- RAW_BYTE_STRING,
-]);
+ RAW_BYTE_STRING
+];
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
if !p.at_ts(LITERAL_FIRST) {
@@ -39,7 +39,7 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
// E.g. for after the break in `if break {}`, this should not match
pub(super) const ATOM_EXPR_FIRST: TokenSet =
- LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
+ LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![
T!['('],
T!['{'],
T!['['],
@@ -59,9 +59,9 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
T![loop],
T![for],
LIFETIME,
- ]));
+ ]);
-const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[LET_KW, R_DOLLAR]);
+const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR];
pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
if let Some(m) = literal(p) {
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
index 22810e6fb..8fd8f3b80 100644
--- a/crates/parser/src/grammar/items.rs
+++ b/crates/parser/src/grammar/items.rs
@@ -26,7 +26,7 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
}
}
-pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
+pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![
FN_KW,
STRUCT_KW,
ENUM_KW,
@@ -41,7 +41,7 @@ pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
USE_KW,
MACRO_KW,
T![;],
-]);
+];
pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) {
let m = p.start();
diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs
index 5d297e2d6..52562afa4 100644
--- a/crates/parser/src/grammar/paths.rs
+++ b/crates/parser/src/grammar/paths.rs
@@ -3,7 +3,7 @@
use super::*;
pub(super) const PATH_FIRST: TokenSet =
- TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![:], T![<]]);
+ token_set![IDENT, T![self], T![super], T![crate], T![:], T![<]];
pub(super) fn is_path_start(p: &Parser) -> bool {
is_use_path_start(p) || p.at(T![<])
diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs
index 796f206e1..07b1d6dd5 100644
--- a/crates/parser/src/grammar/patterns.rs
+++ b/crates/parser/src/grammar/patterns.rs
@@ -2,18 +2,9 @@
use super::*;
-pub(super) const PATTERN_FIRST: TokenSet =
- expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
- T![box],
- T![ref],
- T![mut],
- T!['('],
- T!['['],
- T![&],
- T![_],
- T![-],
- T![.],
- ]));
+pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
+ .union(paths::PATH_FIRST)
+ .union(token_set![T![box], T![ref], T![mut], T!['('], T!['['], T![&], T![_], T![-], T![.]]);
pub(crate) fn pattern(p: &mut Parser) {
pattern_r(p, PAT_RECOVERY_SET);
@@ -83,7 +74,7 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) {
}
const PAT_RECOVERY_SET: TokenSet =
- TokenSet::new(&[LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]);
+ token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
let m = match p.nth(0) {
diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs
index 1ea130ac5..9d00eb9b9 100644
--- a/crates/parser/src/grammar/types.rs
+++ b/crates/parser/src/grammar/types.rs
@@ -2,7 +2,7 @@
use super::*;
-pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
+pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
T!['('],
T!['['],
T![<],
@@ -16,16 +16,16 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
T![for],
T![impl],
T![dyn],
-]));
+]);
-const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
+const TYPE_RECOVERY_SET: TokenSet = token_set![
T![')'],
T![,],
L_DOLLAR,
// test_err struct_field_recover
// struct S { f pub g: () }
T![pub],
-]);
+];
pub(crate) fn type_(p: &mut Parser) {
type_with_bounds_cond(p, true);
diff --git a/crates/parser/src/token_set.rs b/crates/parser/src/token_set.rs
index a68f0144e..994017acf 100644
--- a/crates/parser/src/token_set.rs
+++ b/crates/parser/src/token_set.rs
@@ -9,21 +9,15 @@ pub(crate) struct TokenSet(u128);
impl TokenSet {
pub(crate) const EMPTY: TokenSet = TokenSet(0);
- pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
- let mut res = 0u128;
- let mut i = 0;
- while i < kinds.len() {
- res |= mask(kinds[i]);
- i += 1
- }
- TokenSet(res)
+ pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet {
+ TokenSet(mask(kind))
}
pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
TokenSet(self.0 | other.0)
}
- pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
+ pub(crate) fn contains(&self, kind: SyntaxKind) -> bool {
self.0 & mask(kind) != 0
}
}
@@ -32,10 +26,16 @@ const fn mask(kind: SyntaxKind) -> u128 {
1u128 << (kind as usize)
}
+#[macro_export]
+macro_rules! token_set {
+ ($($t:expr),*) => { TokenSet::EMPTY$(.union(TokenSet::singleton($t)))* };
+ ($($t:expr),* ,) => { token_set!($($t),*) };
+}
+
#[test]
fn token_set_works_for_tokens() {
use crate::SyntaxKind::*;
- let ts = TokenSet::new(&[EOF, SHEBANG]);
+ let ts = token_set![EOF, SHEBANG];
assert!(ts.contains(EOF));
assert!(ts.contains(SHEBANG));
assert!(!ts.contains(PLUS));
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index d829790d7..b25a6e301 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -7,7 +7,7 @@ use anyhow::{bail, format_err, Context, Result};
use crate::not_bash::{pushd, run};
// Latest stable, feel free to send a PR if this lags behind.
-const REQUIRED_RUST_VERSION: u32 = 46;
+const REQUIRED_RUST_VERSION: u32 = 43;
pub struct InstallCmd {
pub client: Option<ClientOpt>,

@ -1,30 +0,0 @@
diff --git a/crates/assists/src/handlers/convert_integer_literal.rs b/crates/assists/src/handlers/convert_integer_literal.rs
index ea35e833a..4df80a3c0 100644
--- a/crates/assists/src/handlers/convert_integer_literal.rs
+++ b/crates/assists/src/handlers/convert_integer_literal.rs
@@ -105,7 +105,7 @@ impl IntegerLiteralBase {
}
}
- const fn base(&self) -> u32 {
+ fn base(&self) -> u32 {
match self {
Self::Binary => 2,
Self::Octal => 8,
@@ -114,14 +114,14 @@ impl IntegerLiteralBase {
}
}
- const fn prefix_len(&self) -> usize {
+ fn prefix_len(&self) -> usize {
match self {
Self::Decimal => 0,
_ => 2,
}
}
- const fn bases() -> &'static [IntegerLiteralBase] {
+ fn bases() -> &'static [IntegerLiteralBase] {
&[
IntegerLiteralBase::Binary,
IntegerLiteralBase::Octal,

@ -1,52 +0,0 @@
diff --git a/crates/assists/src/handlers/merge_imports.rs b/crates/assists/src/handlers/merge_imports.rs
index fe33cee53..2184a4154 100644
--- a/crates/assists/src/handlers/merge_imports.rs
+++ b/crates/assists/src/handlers/merge_imports.rs
@@ -32,7 +32,7 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()
if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) {
let (merged, to_delete) =
next_prev().filter_map(|dir| neighbor(&use_item, dir)).find_map(|use_item2| {
- try_merge_imports(&use_item, &use_item2, MergeBehaviour::Full).zip(Some(use_item2))
+ Some((try_merge_imports(&use_item, &use_item2, MergeBehaviour::Full)?, use_item2))
})?;
rewriter.replace_ast(&use_item, &merged);
@@ -44,7 +44,7 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()
} else {
let (merged, to_delete) =
next_prev().filter_map(|dir| neighbor(&tree, dir)).find_map(|use_tree| {
- try_merge_trees(&tree, &use_tree, MergeBehaviour::Full).zip(Some(use_tree))
+ Some((try_merge_trees(&tree, &use_tree, MergeBehaviour::Full)?, use_tree))
})?;
rewriter.replace_ast(&tree, &merged);
diff --git a/crates/assists/src/utils/insert_use.rs b/crates/assists/src/utils/insert_use.rs
index f6025c99a..f9bd31cf3 100644
--- a/crates/assists/src/utils/insert_use.rs
+++ b/crates/assists/src/utils/insert_use.rs
@@ -274,7 +274,7 @@ fn common_prefix(lhs: &ast::Path, rhs: &ast::Path) -> Option<(ast::Path, ast::Pa
}
res = Some((lhs_curr.clone(), rhs_curr.clone()));
- match lhs_curr.parent_path().zip(rhs_curr.parent_path()) {
+ match zip(lhs_curr.parent_path(), rhs_curr.parent_path()) {
Some((lhs, rhs)) => {
lhs_curr = lhs;
rhs_curr = rhs;
@@ -452,7 +452,7 @@ fn find_insert_position(
let path_node_iter = scope
.as_syntax_node()
.children()
- .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node)))
+ .filter_map(|node| Some((ast::Use::cast(node.clone())?, node)))
.flat_map(|(use_, node)| {
let tree = use_.use_tree()?;
let path = tree.path()?;
@@ -989,3 +989,7 @@ use foo::bar::baz::Qux;",
assert_eq!(result.map(|u| u.to_string()), None);
}
}
+
+fn zip<T, U>(x: Option<T>, y: Option<U>) -> Option<(T, U)> {
+ Some((x?, y?))
+}
Loading…
Cancel
Save