lsp_core/lib.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
#![doc(
html_logo_url = "https://ajuvercr.github.io/semantic-web-lsp/assets/icons/favicon.png",
html_favicon_url = "https://ajuvercr.github.io/semantic-web-lsp/assets/icons/favicon.ico"
)]
//! Core and common implementation for the semantic web language server.
//!
//! Proivdes the backbone for the [semantic web lsp binary](../lsp_bin/index.html) and [semantic web
//! lsp wasm](../lsp_web/index.html).
//!
//! With the language server protocol, each different request is handled by an ECS schedule,
//! combining different systems together.
//! A system can generate new data and attach it to an entity, a document, or use this data to
//! respond to requests.
//!
//! Language specific implementations that handle things like tokenizing and parsing are
//! implemented in separate crates. The binary currently supports [Turtle](../lang_turtle/index.html), [JSON-LD](../lang_jsonld/index.html) and [SPARQL](../lang_sparql/index.html).
//! The goal is that each language at least generates [`Tokens`], [`Triples`] and
//! [`Prefixes`].
//! These components are then used to derive properties for autcompletion but also derive
//! [`TokenComponent`] and [`TripleComponent`] enabling completion.
//!
//! The different schedules can be found at [`prelude::feature`].
//!
//! ## Example add completion for all subjects that start with `a`
//! ```
//! use bevy_ecs::prelude::*;
//! use lsp_core::prelude::*;
//! # use sophia_api::dataset::Dataset;
//! # use sophia_api::prelude::Quad;
//!
//! // Define the extra data struct
//! #[derive(Component)]
//! struct MyCompletions {
//! subjects: Vec<String>,
//! }
//!
//! // Define the generating system
//! // Don't forget to add it to the ecs later
//! fn generate_my_completion(
//! // Only derive the completions when the document is parsed fully, aka is not Dirty
//! query: Query<(Entity, &Triples), (Changed<Triples>, Without<Dirty>)>,
//! mut commands: Commands,
//! ) {
//! for (e, triples) in &query {
//! let mut subjects = Vec::new();
//! for q in triples.quads().flatten() {
//! if q.s().as_str().starts_with('a') {
//! subjects.push(q.s().as_str().to_string());
//! }
//! }
//! commands.entity(e).insert(MyCompletions { subjects });
//! }
//! }
//!
//! // Define a system that adds these completions to the completion request
//! fn complete_my_completion(
//! mut query: Query<(
//! &TokenComponent, &TripleComponent, &MyCompletions, &mut CompletionRequest
//! )>,
//! ) {
//! for (token, this_triple, completions, mut request) in &mut query {
//! if this_triple.target == TripleTarget::Subject {
//! for my_completion in &completions.subjects {
//! request.push(
//! SimpleCompletion::new(
//! lsp_types::CompletionItemKind::FIELD,
//! my_completion.clone(),
//! lsp_types::TextEdit {
//! range: token.range.clone(),
//! new_text: my_completion.clone(),
//! }
//! )
//! )
//! }
//! }
//! }
//! }
//! ```
//! Note that [`Prefixes`] can help expand and shorten iri's in a document.
//!
//!
use bevy_ecs::{prelude::*, schedule::ScheduleLabel};
use prelude::SemanticTokensDict;
use systems::{init_onology_extractor, OntologyExtractor};
use crate::prelude::*;
/// Main language tower_lsp server implementation.
///
/// [`Backend`](struct@backend::Backend) implements [`LanguageServer`](tower_lsp::LanguageServer).
/// Each incoming request a schedule is ran on the main [`World`].
pub mod backend;
/// Handle platform specific implementations for fetching and spawning tasks.
pub mod client;
/// Common utils
///
/// Includes range transformations between [`std::ops::Range`] and [`lsp_types::Range`].
/// And commonly used [`Spanned`].
pub mod util;
/// Defines all common [`Component`]s and [`Resource`]s
///
/// In this [`World`], [Entity]s are documents and [`Components`](`Component`) are derived from these documents.
/// Different [`System`]s derive new [`Components`](`Component`) from existing [`Components`](`Component`), that are added to
/// the [`Entity`].
/// For example, if [`Triples`] are defined, [systems::derive_classes] will
/// derive [`DefinedClass`](struct@systems::DefinedClass) from them and add them to the [`Entity`].
pub mod components;
/// Hosts all common features of the semantic language server.
pub mod feature;
/// Defines common language traits
pub mod lang;
pub mod prelude;
pub mod systems;
/// Initializes a [`World`], including [`Resources`](`Resource`) and [`Schedules`].
/// All systems defined in [`crate`] are added to the [`World`].
pub fn setup_schedule_labels<C: Client + Resource>(world: &mut World) {
world.init_resource::<SemanticTokensDict>();
world.init_resource::<TypeHierarchy<'static>>();
world.insert_resource(OntologyExtractor::new());
parse::setup_schedule::<C>(world);
hover::setup_schedule(world);
completion::setup_schedule(world);
rename::setup_schedules(world);
diagnostics::setup_schedule(world);
save::setup_schedule(world);
format::setup_schedule(world);
inlay::setup_schedule(world);
semantic::setup_world(world);
world.add_schedule(Schedule::new(Tasks));
let mut schedule = Schedule::new(Startup);
schedule.add_systems(init_onology_extractor);
world.add_schedule(schedule);
}
/// Event triggers when a document is opened
///
/// Example
/// ```rust
/// # use lsp_core::components::DynLang;
/// # use lsp_core::CreateEvent;
/// # use lsp_core::lang::LangHelper;
/// # use bevy_ecs::prelude::{Commands, Trigger, World, Component};
///
/// #[derive(Component)]
/// pub struct TurtleLang;
///
/// #[derive(Debug)]
/// pub struct TurtleHelper;
/// impl LangHelper for TurtleHelper {
/// fn keyword(&self) -> &[&'static str] {
/// &[
/// "@prefix",
/// "@base",
/// "a",
/// ]
/// }
/// }
///
/// let mut world = World::new();
/// // This example tells the ECS system that the document is Turtle,
/// // adding Turtle specific components
/// world.observe(|trigger: Trigger<CreateEvent>, mut commands: Commands| {
/// match &trigger.event().language_id {
/// Some(x) if x == "turtle" => {
/// commands
/// .entity(trigger.entity())
/// .insert((TurtleLang, DynLang(Box::new(TurtleHelper))));
/// return;
/// }
/// _ => {}
/// }
/// if trigger.event().url.as_str().ends_with(".ttl") {
/// commands
/// .entity(trigger.entity())
/// .insert((TurtleLang, DynLang(Box::new(TurtleHelper))));
/// return;
/// }
/// });
/// ```
///
#[derive(Event)]
pub struct CreateEvent {
pub url: lsp_types::Url,
pub language_id: Option<String>,
}
/// [`ScheduleLabel`] related to the Tasks schedule
/// This schedule is used for async tasks, things that should be done at some point.
///
/// For example [`systems::handle_tasks`] spawns command queues sent with
/// [`CommandSender`]
#[derive(ScheduleLabel, Clone, Eq, PartialEq, Debug, Hash)]
pub struct Tasks;
#[derive(ScheduleLabel, Clone, Eq, PartialEq, Debug, Hash)]
pub struct Startup;