lsp_core/feature/
semantic.rs
1use bevy_ecs::{
2 prelude::*,
3 schedule::{IntoSystemConfigs, ScheduleLabel},
4};
5use derive_more::{AsMut, AsRef, Deref, DerefMut};
6use lsp_types::{SemanticToken, SemanticTokenType};
7
8use crate::prelude::*;
9
10#[derive(Resource, AsRef, Deref, AsMut, DerefMut, Debug, Default)]
15pub struct SemanticTokensDict(pub std::collections::HashMap<SemanticTokenType, usize>);
16
17#[derive(Component, AsRef, Deref, AsMut, DerefMut, Debug)]
19pub struct HighlightRequest(pub Vec<SemanticToken>);
20
21#[derive(ScheduleLabel, Clone, Eq, PartialEq, Debug, Hash)]
22pub struct Label;
23pub fn setup_world(world: &mut World) {
24 let mut semantic_tokens = bevy_ecs::schedule::Schedule::new(Label);
25 semantic_tokens.add_systems((
26 basic_semantic_tokens,
27 semantic_tokens_system.after(basic_semantic_tokens),
28 ));
29 world.add_schedule(semantic_tokens);
30}
31
32struct TokenHelper {
33 start: usize,
34 length: usize,
35 ty: usize,
36}
37
38pub type TokenTypesComponent = Wrapped<Vec<Spanned<SemanticTokenType>>>;
39pub fn basic_semantic_tokens(
40 mut query: Query<(Entity, &Tokens), With<HighlightRequest>>,
41 mut commands: Commands,
42) {
43 for (e, tokens) in &mut query {
44 let types: TokenTypesComponent = Wrapped(
45 tokens
46 .iter()
47 .flat_map(|token| {
48 Token::span_tokens(token)
49 .into_iter()
50 .map(|(x, y)| spanned(x, y))
51 })
52 .collect(),
53 );
54 commands.entity(e).insert(types);
55 }
56}
57
58pub fn semantic_tokens_system(
59 mut query: Query<(&RopeC, &TokenTypesComponent, &mut HighlightRequest)>,
60 res: Res<SemanticTokensDict>,
61) {
62 for (rope, types, mut req) in &mut query {
63 let rope = &rope.0;
64 let mut ts: Vec<Option<SemanticTokenType>> = Vec::with_capacity(rope.len_chars());
65 ts.resize(rope.len_bytes(), None);
66 types.iter().for_each(|Spanned(ty, r)| {
67 r.clone().for_each(|j| {
68 if j < ts.len() {
69 ts[j] = Some(ty.clone())
70 } else {
71 tracing::error!(
72 "Semantic tokens type {} (index={}) falls outside of rope size (chars: {} bytes: {})",
73 ty.as_str(),
74 j,
75 rope.len_chars(),
76 rope.len_bytes()
77 );
78 }
79 });
80 });
81
82 let mut last = None;
83 let mut start = 0;
84 let mut out_tokens = Vec::new();
85 for (i, ty) in ts.into_iter().enumerate() {
86 if last != ty {
87 if let Some(t) = last {
88 out_tokens.push(TokenHelper {
89 start,
90 length: i - start,
91 ty: res.get(&t).cloned().unwrap_or(0),
92 });
93 }
94
95 last = ty;
96 start = i;
97 }
98 }
99
100 if let Some(t) = last {
101 out_tokens.push(TokenHelper {
102 start,
103 length: rope.len_chars() - start,
104 ty: res.get(&t).cloned().unwrap_or(0),
105 });
106 }
107
108 let mut pre_line = 0;
109 let mut pre_start = 0;
110 req.0 = out_tokens
111 .into_iter()
112 .flat_map(|token| {
113 let line = rope.try_byte_to_line(token.start as usize).ok()? as u32;
114 let first = rope.try_line_to_char(line as usize).ok()? as u32;
115 let start = rope.try_byte_to_char(token.start as usize).ok()? as u32 - first;
116 let delta_line = line - pre_line;
117 let delta_start = if delta_line == 0 {
118 start - pre_start
119 } else {
120 start
121 };
122 let ret = Some(SemanticToken {
123 delta_line,
124 delta_start,
125 length: token.length as u32,
126 token_type: token.ty as u32,
127 token_modifiers_bitset: 0,
128 });
129 pre_line = line;
130 pre_start = start;
131 ret
132 })
133 .collect();
134 }
135}