diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..ee27ae4db --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.{md, markdown, eex}] +trim_trailing_whitespace = false diff --git a/.formatter.exs b/.formatter.exs new file mode 100644 index 000000000..90a08535c --- /dev/null +++ b/.formatter.exs @@ -0,0 +1,5 @@ +# Used by "mix format" +[ + inputs: ["mix.exs", "config/*.exs"], + subdirectories: ["apps/*"] +] diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..2de045d57 --- /dev/null +++ b/.gitignore @@ -0,0 +1,23 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Temporary files, for example, from tests. +/tmp/ diff --git a/.iex.exs b/.iex.exs new file mode 100644 index 000000000..dfe5b925a --- /dev/null +++ b/.iex.exs @@ -0,0 +1,5 @@ +# Node.start(:"remsh@127.0.0.1") +# Node.set_cookie(:lexical) +# Node.connect(:"manager@127.0.0.1") + +project = Lexical.Project.new("file://#{File.cwd!()}/../ex_ls/") diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 000000000..73ef9baa4 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,4 @@ +elixir 1.14.3-otp-25 +erlang 25.2.1 +nodejs 12.16.3 +yarn 1.22.4 diff --git a/README.md b/README.md new file mode 100644 index 000000000..f9f1a3205 --- /dev/null +++ b/README.md @@ -0,0 +1,4 @@ +# Lexical + +**TODO: Add description** + diff --git a/apps/common/.formatter.exs b/apps/common/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/common/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/common/.gitignore b/apps/common/.gitignore new file mode 100644 index 000000000..d3a0cc386 --- /dev/null +++ b/apps/common/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +common-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/common/README.md b/apps/common/README.md new file mode 100644 index 000000000..e022e293b --- /dev/null +++ b/apps/common/README.md @@ -0,0 +1,21 @@ +# Common + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `common` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:common, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/common/lib/code_unit.ex b/apps/common/lib/code_unit.ex new file mode 100644 index 000000000..ed54af8d9 --- /dev/null +++ b/apps/common/lib/code_unit.ex @@ -0,0 +1,180 @@ +defmodule Lexical.CodeUnit do + @moduledoc """ + Code unit and offset conversions + + The LSP protocol speaks in positions, which defines where something happens in a document. + Positions have a start and an end, which are defined as code unit _offsets_ from the beginning + of a line. this module helps to convert between utf8, which most of the world speaks + natively, and utf16, which has been forced upon us by microsoft. + + Converting between offsets and code units is 0(n), and allocations only happen if a + multi-byte character is detected, at which point, only that character is allocated. + This exploits the fact that most source code consists of ascii characters, with at best, + sporadic multi-byte characters in it. Thus, the vast majority of documents will not require + any allocations at all. + """ + @type utf8_code_unit :: non_neg_integer() + @type utf16_code_unit :: non_neg_integer() + @type utf8_offset :: non_neg_integer() + @type utf16_offset :: non_neg_integer() + + @type error :: {:error, :misaligned} | {:error, :out_of_bounds} + + # public + + @doc """ + Converts a utf8 character offset into a utf16 character offset. This implementation + clamps the maximum size of an offset so that any initial character position can be + passed in and the offset returned will reflect the end of the line. + """ + @spec utf16_offset(String.t(), utf8_offset()) :: utf16_offset() + def utf16_offset(binary, character_position) do + do_utf16_offset(binary, character_position, 0) + end + + @doc """ + Converts a utf16 character offset into a utf8 character offset. This implementation + clamps the maximum size of an offset so that any initial character position can be + passed in and the offset returned will reflect the end of the line. + """ + @spec utf8_offset(String.t(), utf16_offset()) :: utf8_offset() + def utf8_offset(binary, character_position) do + do_utf8_offset(binary, character_position, 0) + end + + @spec to_utf8(String.t(), utf16_code_unit()) :: {:ok, utf8_code_unit()} | error + def to_utf8(binary, utf16_unit) do + do_to_utf8(binary, utf16_unit, 0) + end + + @spec to_utf16(String.t(), utf8_code_unit()) :: {:ok, utf16_code_unit()} | error + def to_utf16(binary, utf16_unit) do + do_to_utf16(binary, utf16_unit, 0) + end + + def count(:utf16, binary) do + do_count_utf16(binary, 0) + end + + # Private + + # UTF-16 + + def do_count_utf16(<<>>, count) do + count + end + + def do_count_utf16(<>, count) when c < 128 do + do_count_utf16(rest, count + 1) + end + + def do_count_utf16(<>, count) do + increment = + <> + |> byte_size() + |> div(2) + + do_count_utf16(rest, count + increment) + end + + defp do_utf16_offset(_, 0, offset) do + offset + end + + defp do_utf16_offset(<<>>, _, offset) do + # this clause pegs the offset at the end of the string + # no matter the character index + offset + end + + defp do_utf16_offset(<>, remaining, offset) when c < 128 do + do_utf16_offset(rest, remaining - 1, offset + 1) + end + + defp do_utf16_offset(<>, remaining, offset) do + s = <> + increment = utf16_size(s) + do_utf16_offset(rest, remaining - 1, offset + increment) + end + + defp do_to_utf16(_, 0, utf16_unit) do + {:ok, utf16_unit} + end + + defp do_to_utf16(_, utf8_unit, _) when utf8_unit < 0 do + {:error, :misaligned} + end + + defp do_to_utf16(<<>>, _remaining, _utf16_unit) do + {:error, :out_of_bounds} + end + + defp do_to_utf16(<>, utf8_unit, utf16_unit) when c < 128 do + do_to_utf16(rest, utf8_unit - 1, utf16_unit + 1) + end + + defp do_to_utf16(<>, utf8_unit, utf16_unit) do + utf8_string = <> + increment = utf16_size(utf8_string) + decrement = byte_size(utf8_string) + + do_to_utf16(rest, utf8_unit - decrement, utf16_unit + increment) + end + + defp utf16_size(binary) when is_binary(binary) do + binary + |> :unicode.characters_to_binary(:utf8, :utf16) + |> byte_size() + |> div(2) + end + + # UTF-8 + + defp do_utf8_offset(_, 0, offset) do + offset + end + + defp do_utf8_offset(<<>>, _, offset) do + # this clause pegs the offset at the end of the string + # no matter the character index + offset + end + + defp do_utf8_offset(<>, remaining, offset) when c < 128 do + do_utf8_offset(rest, remaining - 1, offset + 1) + end + + defp do_utf8_offset(<>, remaining, offset) do + s = <> + increment = utf8_size(s) + decrement = utf16_size(s) + do_utf8_offset(rest, remaining - decrement, offset + increment) + end + + defp do_to_utf8(_, 0, utf8_unit) do + {:ok, utf8_unit} + end + + defp do_to_utf8(_, utf_16_units, _) when utf_16_units < 0 do + {:error, :misaligned} + end + + defp do_to_utf8(<<>>, _remaining, _utf8_unit) do + {:error, :out_of_bounds} + end + + defp do_to_utf8(<>, utf16_unit, utf8_unit) when c < 128 do + do_to_utf8(rest, utf16_unit - 1, utf8_unit + 1) + end + + defp do_to_utf8(<>, utf16_unit, utf8_unit) do + utf8_code_units = byte_size(<>) + utf16_code_units = utf16_size(<>) + + do_to_utf8(rest, utf16_unit - utf16_code_units, utf8_unit + utf8_code_units) + end + + defp utf8_size(binary) when is_binary(binary) do + byte_size(binary) + end +end diff --git a/apps/common/lib/common.ex b/apps/common/lib/common.ex new file mode 100644 index 000000000..8cf576b90 --- /dev/null +++ b/apps/common/lib/common.ex @@ -0,0 +1,18 @@ +defmodule Common do + @moduledoc """ + Documentation for `Common`. + """ + + @doc """ + Hello world. + + ## Examples + + iex> Common.hello() + :world + + """ + def hello do + :world + end +end diff --git a/apps/common/lib/lexical.ex b/apps/common/lib/lexical.ex new file mode 100644 index 000000000..dede88ff4 --- /dev/null +++ b/apps/common/lib/lexical.ex @@ -0,0 +1,3 @@ +defmodule Lexical do + @type uri :: String.t() +end diff --git a/apps/common/lib/process_cache.ex b/apps/common/lib/process_cache.ex new file mode 100644 index 000000000..6fa5210f0 --- /dev/null +++ b/apps/common/lib/process_cache.ex @@ -0,0 +1,82 @@ +defmodule Lexical.ProcessCache do + @moduledoc """ + A simple cache with a timeout that lives in the process dictionary + """ + + defmodule Entry do + defstruct [:value, :expiry] + + def new(value, timeout_ms) do + expiry_ts = now_ts() + timeout_ms + %__MODULE__{value: value, expiry: expiry_ts} + end + + def valid?(%__MODULE__{} = entry) do + now_ts() < entry.expiry + end + + defp now_ts do + System.os_time(:millisecond) + end + end + + @type key :: term() + @type fetch_result :: {:ok, term()} | :error + + @doc """ + Retrieves a value from the cache + If the value is not found, the default is returned + """ + @spec get(key()) :: term() | nil + @spec get(key(), term()) :: term() | nil + def get(key, default \\ nil) do + case fetch(key) do + {:ok, val} -> val + :error -> default + end + end + + @doc """ + Retrieves a value from the cache + If the value is not found, the default is returned + """ + @spec fetch(key()) :: fetch_result() + def fetch(key) do + case Process.get(key, :unset) do + %Entry{} = entry -> + if Entry.valid?(entry) do + {:ok, entry.value} + else + Process.delete(key) + :error + end + + :unset -> + :error + end + end + + @doc """ + Retrieves and optionally sets a value in the cache. + + Trans looks up a value in the cache under key. If that value isn't + found, the compute_fn is then executed, and its return value is set + in the cache. The cached value will live in the cache for `timeout` + milliseconds + """ + def trans(key, timeout_ms \\ 5000, compute_fn) do + case fetch(key) do + :error -> + set(key, timeout_ms, compute_fn) + + {:ok, result} -> + result + end + end + + defp set(key, timeout_ms, compute_fn) do + value = compute_fn.() + Process.put(key, Entry.new(value, timeout_ms)) + value + end +end diff --git a/apps/common/lib/project.ex b/apps/common/lib/project.ex new file mode 100644 index 000000000..40675bce6 --- /dev/null +++ b/apps/common/lib/project.ex @@ -0,0 +1,299 @@ +defmodule Lexical.Project do + @moduledoc """ + The representation of the current state of an elixir project. + + This struct contains all the information required to build a project and interrogate its configuration, + as well as business logic for how to change its attributes. + """ + alias Lexical.SourceFile + alias Lexical.LanguageServer + + defstruct root_uri: nil, + mix_exs_uri: nil, + mix_project?: false, + mix_env: nil, + mix_target: nil, + env_variables: %{} + + @type message :: String.t() + @type restart_notification :: {:restart, Logger.level(), String.t()} + @type t :: %__MODULE__{ + root_uri: Lexical.uri() | nil, + mix_exs_uri: Lexical.uri() | nil + # mix_env: atom(), + # mix_target: atom(), + # env_variables: %{String.t() => String.t()} + } + @type error_with_message :: {:error, message} + # Public + @spec new(Lexical.uri()) :: t + def new(root_uri) do + %__MODULE__{} + |> maybe_set_root_uri(root_uri) + |> maybe_set_mix_exs_uri() + end + + def name(%__MODULE__{} = project) do + project + |> root_path() + |> Path.split() + |> List.last() + end + + @spec root_path(t) :: Path.t() | nil + def root_path(%__MODULE__{root_uri: nil}) do + nil + end + + def root_path(%__MODULE__{} = project) do + SourceFile.Path.from_uri(project.root_uri) + end + + @spec project_path(t) :: Path.t() | nil + def project_path(%__MODULE__{root_uri: nil}) do + nil + end + + def project_path(%__MODULE__{} = project) do + SourceFile.Path.from_uri(project.root_uri) + end + + @spec mix_exs_path(t) :: Path.t() | nil + def mix_exs_path(%__MODULE__{mix_exs_uri: nil}) do + nil + end + + def mix_exs_path(%__MODULE__{mix_exs_uri: mix_exs_uri}) do + SourceFile.Path.from_uri(mix_exs_uri) + end + + @spec change_mix_env(t, String.t() | nil) :: + {:ok, t} | error_with_message() | restart_notification() + def change_mix_env(%__MODULE__{} = project, mix_env) do + set_mix_env(project, mix_env) + end + + @spec change_mix_target(t, String.t() | nil) :: + {:ok, t} | error_with_message() | restart_notification() + def change_mix_target(%__MODULE__{} = project, mix_target) do + set_mix_target(project, mix_target) + end + + @spec change_project_directory(t, String.t() | nil) :: + {:ok, t} | error_with_message() | restart_notification() + def change_project_directory(%__MODULE__{} = project, project_directory) do + set_working_uri(project, project_directory) + end + + @spec change_environment_variables(t, map() | nil) :: + {:ok, t} | error_with_message() | restart_notification() + def change_environment_variables(%__MODULE__{} = project, environment_variables) do + set_env_vars(project, environment_variables) + end + + # private + + defp maybe_set_root_uri(%__MODULE__{} = project, nil), + do: %__MODULE__{project | root_uri: nil} + + defp maybe_set_root_uri(%__MODULE__{} = project, "file://" <> _ = root_uri) do + root_path = + root_uri + |> SourceFile.Path.absolute_from_uri() + |> Path.expand() + + if File.exists?(root_path) do + expanded_uri = SourceFile.Path.to_uri(root_path) + %__MODULE__{project | root_uri: expanded_uri} + else + project + end + end + + defp maybe_set_mix_exs_uri(%__MODULE__{} = project) do + possible_mix_exs_path = + project + |> root_path() + |> find_mix_exs_path() + + if File.exists?(possible_mix_exs_path) do + %__MODULE__{ + project + | mix_exs_uri: SourceFile.Path.to_uri(possible_mix_exs_path), + mix_project?: true + } + else + project + end + end + + # Project Path + defp set_working_uri(%__MODULE__{root_uri: root_uri} = old_project, project_directory) + when is_binary(root_uri) and project_directory != "" do + # root_path = SourceFile.Path.absolute_from_uri(root_uri) + + # normalized_project_dir = + # if is_binary(project_directory) and project_directory != "" do + # root_path + # |> Path.join(project_directory) + # |> Path.expand(root_path) + # |> Path.absname() + # else + # root_path + # end + + # cond do + # not File.dir?(normalized_project_dir) -> + # {:error, "Project directory #{normalized_project_dir} does not exist"} + + # not subdirectory?(root_path, normalized_project_dir) -> + # message = + # "Project directory '#{normalized_project_dir}' is not a subdirectory of '#{root_path}'" + + # {:error, message} + + # is_nil(old_project.working_uri) and subdirectory?(root_path, normalized_project_dir) -> + # :ok = File.cd(normalized_project_dir) + + # mix_exs_path = find_mix_exs_path(normalized_project_dir) + # mix_project? = mix_exs_exists?(mix_exs_path) + + # mix_exs_uri = + # if mix_project? do + # SourceFile.Path.to_uri(mix_exs_path) + # else + # nil + # end + + # working_uri = SourceFile.Path.to_uri(normalized_project_dir) + + # new_project = %__MODULE__{ + # old_project + # | working_uri: working_uri, + # mix_project?: mix_project?, + # mix_exs_uri: mix_exs_uri + # } + + # {:ok, new_project} + + # project_path(old_project) != normalized_project_dir -> + # {:restart, :warning, "Project directory change detected. Lexical will restart"} + + # true -> + # {:ok, old_project} + # end + end + + defp set_working_uri(%__MODULE__{} = old_project, _) do + {:ok, old_project} + end + + # Mix env + + defp set_mix_env(%__MODULE__{mix_env: old_env} = old_project, new_env) + when is_binary(new_env) and new_env != "" do + case {old_env, String.to_existing_atom(new_env)} do + {nil, nil} -> + do_set_mix_env(:test) + {:ok, %__MODULE__{old_project | mix_env: :test}} + + {nil, new_env} -> + do_set_mix_env(new_env) + {:ok, %__MODULE__{old_project | mix_env: new_env}} + + {same, same} -> + {:ok, old_project} + + _ -> + {:restart, :warning, "Mix env change detected. Lexical will restart."} + end + end + + defp set_mix_env(%__MODULE__{mix_env: nil} = project, _) do + do_set_mix_env(:test) + + {:ok, %__MODULE__{project | mix_env: :test}} + end + + defp set_mix_env(%__MODULE__{} = project, _) do + {:ok, project} + end + + # Mix target + defp set_mix_target(%__MODULE__{} = old_project, new_target) + when is_binary(new_target) and new_target != "" do + case {old_project.mix_target, String.to_atom(new_target)} do + {nil, new_target} -> + do_set_mix_target(new_target) + {:ok, %__MODULE__{old_project | mix_target: new_target}} + + {same, same} -> + {:ok, old_project} + + _ -> + {:restart, :warning, "Mix target change detected. Lexical will restart"} + end + end + + defp set_mix_target(%__MODULE__{} = old_project, _) do + {:ok, old_project} + end + + defp do_set_mix_env(new_env) do + if Code.ensure_loaded?(Mix) do + # Mix.env(new_env) + end + end + + defp do_set_mix_target(new_mix_target) do + if Code.ensure_loaded?(Mix) do + # Mix.target(new_mix_target) + end + end + + # Environment variables + + def set_env_vars(%__MODULE__{} = old_project, %{} = env_vars) do + case {old_project.env_variables, env_vars} do + {nil, vars} when map_size(vars) == 0 -> + {:ok, %__MODULE__{old_project | env_variables: vars}} + + {nil, new_vars} -> + System.put_env(new_vars) + {:ok, %__MODULE__{old_project | env_variables: new_vars}} + + {same, same} -> + {:ok, old_project} + + _ -> + {:restart, :warning, "Environment variables have changed. Lexical needs to restart"} + end + end + + def set_env_vars(%__MODULE__{} = old_project, _) do + {:ok, old_project} + end + + defp subdirectory?(parent, possible_child) do + parent_path = Path.expand(parent) + child_path = Path.expand(possible_child, parent) + + String.starts_with?(child_path, parent_path) + end + + defp find_mix_exs_path(project_directory) do + case System.get_env("MIX_EXS") do + nil -> + Path.join(project_directory, "mix.exs") + + mix_exs -> + mix_exs + end + end + + defp mix_exs_exists?(nil), do: false + + defp mix_exs_exists?(mix_exs_path) do + File.exists?(mix_exs_path) + end +end diff --git a/apps/common/lib/source_file.ex b/apps/common/lib/source_file.ex new file mode 100644 index 000000000..d9546f9d2 --- /dev/null +++ b/apps/common/lib/source_file.ex @@ -0,0 +1,239 @@ +defmodule Lexical.SourceFile do + alias Lexical.SourceFile.Conversions + alias Lexical.SourceFile.Document + alias Lexical.SourceFile.Position + alias Lexical.SourceFile.Range + import Lexical.SourceFile.Line + + defstruct [:uri, :path, :version, dirty?: false, document: nil] + + @type t :: %__MODULE__{ + uri: String.t(), + version: pos_integer(), + dirty?: boolean, + document: Document.t(), + path: String.t() + } + + @type version :: pos_integer() + @type change_application_error :: {:error, {:invalid_range, map()}} + # public + @spec new(URI.t(), String.t(), pos_integer()) :: t + def new(uri, text, version) do + uri = Conversions.ensure_uri(uri) + + %__MODULE__{ + uri: uri, + version: version, + document: Document.new(text), + path: __MODULE__.Path.from_uri(uri) + } + end + + @spec size(t) :: non_neg_integer() + def size(%__MODULE__{} = source) do + Document.size(source.document) + end + + @spec mark_dirty(t) :: t + def mark_dirty(%__MODULE__{} = source) do + %__MODULE__{source | dirty?: true} + end + + @spec mark_clean(t) :: t + def mark_clean(%__MODULE__{} = source) do + %__MODULE__{source | dirty?: false} + end + + @spec fetch_text_at(t, version()) :: {:ok, String.t()} | :error + def fetch_text_at(%__MODULE{} = source, line_number) do + case fetch_line_at(source, line_number) do + {:ok, line(text: text)} -> {:ok, text} + _ -> :error + end + end + + @spec fetch_line_at(t, version()) :: {:ok, Line.t()} | :error + def fetch_line_at(%__MODULE__{} = source, line_number) do + case Document.fetch_line(source.document, line_number) do + {:ok, line} -> {:ok, line} + _ -> :error + end + end + + @spec apply_content_changes(t, pos_integer(), [map | ContentChangeEvent.t()]) :: + {:ok, t} | change_application_error() + def apply_content_changes(%__MODULE__{version: current_version}, new_version, _) + when new_version <= current_version do + {:error, :invalid_version} + end + + def apply_content_changes(%__MODULE__{} = source, _, []) do + {:ok, source} + end + + def apply_content_changes(%__MODULE__{} = source, version, changes) when is_list(changes) do + result = + Enum.reduce_while(changes, source, fn change, source -> + case apply_change(source, change) do + {:ok, new_source} -> + {:cont, new_source} + + error -> + {:halt, error} + end + end) + + case result do + %__MODULE__{} = source -> + source = mark_dirty(%__MODULE__{source | version: version}) + + {:ok, source} + + error -> + error + end + end + + def to_string(%__MODULE__{} = source) do + source + |> to_iodata() + |> IO.iodata_to_binary() + end + + # private + + defp line_count(%__MODULE__{} = source) do + Document.size(source.document) + end + + defp apply_change( + %__MODULE__{} = source, + %Range{start: %Position{} = start_pos, end: %Position{} = end_pos}, + new_text + ) do + start_line = start_pos.line + + new_lines_iodata = + cond do + start_line > line_count(source) -> + append_to_end(source, new_text) + + start_line <= 0 -> + prepend_to_beginning(source, new_text) + + true -> + apply_valid_edits(source, new_text, start_pos, end_pos) + end + + new_document = + new_lines_iodata + |> IO.iodata_to_binary() + |> Document.new() + + {:ok, %__MODULE__{source | document: new_document}} + end + + defp apply_change(%__MODULE__{} = source, %{range: %Range{}, text: _} = change) do + apply_change(source, change.range, change.text) + end + + defp apply_change(%__MODULE__{} = source, %{text: _text} = change) do + new_state = + source.uri + |> new(change.text, source.version) + |> increment_version() + + {:ok, new_state} + end + + defp apply_change(%__MODULE__{}, %{"range" => invalid_range}) do + {:error, {:invalid_range, invalid_range}} + end + + defp apply_change( + %__MODULE__{} = source, + %{"text" => new_text} + ) do + {:ok, %__MODULE__{source | document: Document.new(new_text)}} + end + + defp append_to_end(%__MODULE__{} = source, edit_text) do + [to_iodata(source), edit_text] + end + + defp prepend_to_beginning(%__MODULE__{} = source, edit_text) do + [edit_text, to_iodata(source)] + end + + defp apply_valid_edits(%__MODULE{} = source, edit_text, start_pos, end_pos) do + Document.reduce(source.document, [], fn line() = line, acc -> + case edit_action(line, edit_text, start_pos, end_pos) do + :drop -> + acc + + {:append, io_data} -> + [acc, io_data] + end + end) + end + + defp edit_action(line() = line, edit_text, %Position{} = start_pos, %Position{} = end_pos) do + %Position{line: start_line, character: start_char} = start_pos + %Position{line: end_line, character: end_char} = end_pos + + line(line_number: line_number, text: text, ending: ending) = line + + cond do + line_number < start_line -> + {:append, [text, ending]} + + line_number > end_line -> + {:append, [text, ending]} + + line_number == start_line && line_number == end_line -> + prefix_text = utf8_prefix(line, start_char) + suffix_text = utf8_suffix(line, end_char) + + {:append, [prefix_text, edit_text, suffix_text, ending]} + + line_number == start_line -> + prefix_text = utf8_prefix(line, start_char) + {:append, [prefix_text, edit_text]} + + line_number == end_line -> + suffix_text = utf8_suffix(line, end_char) + {:append, [suffix_text, ending]} + + true -> + :drop + end + end + + defp utf8_prefix(line(text: text), start_code_unit) do + length = max(0, start_code_unit) + binary_part(text, 0, length) + end + + defp utf8_suffix(line(text: text), start_code_unit) do + byte_count = byte_size(text) + start_index = min(start_code_unit, byte_count) + length = byte_count - start_index + + binary_part(text, start_index, length) + end + + defp to_iodata(%__MODULE__{} = source) do + Document.to_iodata(source.document) + end + + defp increment_version(%__MODULE__{} = source) do + version = + case source.version do + v when is_integer(v) -> v + 1 + _ -> 1 + end + + %__MODULE__{source | version: version} + end +end diff --git a/apps/common/lib/source_file/conversions.ex b/apps/common/lib/source_file/conversions.ex new file mode 100644 index 000000000..e37e91697 --- /dev/null +++ b/apps/common/lib/source_file/conversions.ex @@ -0,0 +1,20 @@ +defmodule Lexical.SourceFile.Conversions do + @moduledoc """ + Functions to convert between language server representations and elixir-native representations. + + The LSP protocol defines positions in terms of their utf-16 representation (thanks, windows), + so when a document change comes in, we need to recalculate the positions of the change if + the line contains non-ascii characters. If it's a pure ascii line, then the positions + are the same in both utf-8 and utf-16, since they reference characters and not bytes. + """ + + def ensure_uri("file://" <> _ = uri), do: uri + + def ensure_uri(path), + do: Lexical.SourceFile.Path.to_uri(path) + + def ensure_path("file://" <> _ = uri), + do: Lexical.SourceFile.Path.from_uri(uri) + + def ensure_path(path), do: path +end diff --git a/apps/common/lib/source_file/document.ex b/apps/common/lib/source_file/document.ex new file mode 100644 index 000000000..799f29175 --- /dev/null +++ b/apps/common/lib/source_file/document.ex @@ -0,0 +1,104 @@ +defmodule Lexical.SourceFile.Document do + alias Lexical.SourceFile.LineParser + alias Lexical.SourceFile.Line + + import Line + defstruct lines: nil, starting_index: 1 + + @type t :: %__MODULE__{} + + def new(text, starting_index \\ 1) do + lines = + text + |> LineParser.parse(starting_index) + |> List.to_tuple() + + %__MODULE__{lines: lines, starting_index: starting_index} + end + + def to_iodata(%__MODULE__{} = document) do + reduce(document, [], fn line(text: text, ending: ending), acc -> + [acc | [text | ending]] + end) + end + + def to_string(%__MODULE__{} = document) do + document + |> to_iodata() + |> IO.iodata_to_binary() + end + + def size(%__MODULE__{} = document) do + tuple_size(document.lines) + end + + def fetch_line(%__MODULE__{lines: lines, starting_index: starting_index}, index) + when index - starting_index >= tuple_size(lines) do + :error + end + + def fetch_line(%__MODULE__{} = document, index) when is_integer(index) do + case elem(document.lines, index - document.starting_index) do + line() = line -> {:ok, line} + _ -> :error + end + end + + def reduce(%__MODULE__{} = document, initial, reducer_fn) do + size = size(document) + + if size == 0 do + initial + else + Enum.reduce(0..(size - 1), initial, fn index, acc -> + document.lines + |> elem(index) + |> reducer_fn.(acc) + end) + end + end +end + +defimpl Enumerable, for: Lexical.SourceFile.Document do + alias Lexical.SourceFile.Document + + def count(%Document{} = document) do + {:ok, Document.size(document)} + end + + def member?(%Document{}, _) do + {:error, Document} + end + + def reduce(%Document{} = document, acc, fun) do + tuple_reduce({0, tuple_size(document.lines), document.lines}, acc, fun) + end + + def slice(%Document{} = document) do + {:ok, Document.size(document), fn start, len -> do_slice(document, start, len) end} + end + + defp do_slice(%Document{} = document, start, 1) do + [elem(document.lines, start)] + end + + defp do_slice(%Document{} = document, start, length) do + Enum.map(start..(start + length - 1), &elem(document.lines, &1)) + end + + defp tuple_reduce(_, {:halt, acc}, _fun) do + {:halted, acc} + end + + defp tuple_reduce(current_state, {:suspend, acc}, fun) do + {:suspended, acc, &tuple_reduce(current_state, &1, fun)} + end + + defp tuple_reduce({same, same, _}, {:cont, acc}, _) do + {:done, acc} + end + + defp tuple_reduce({index, size, tuple}, {:cont, acc}, fun) do + tuple_reduce({index + 1, size, tuple}, fun.(elem(tuple, index), acc), fun) + end +end diff --git a/apps/common/lib/source_file/line.ex b/apps/common/lib/source_file/line.ex new file mode 100644 index 000000000..4b8c0b904 --- /dev/null +++ b/apps/common/lib/source_file/line.ex @@ -0,0 +1,5 @@ +defmodule Lexical.SourceFile.Line do + import Record + + defrecord :line, text: nil, ending: nil, line_number: 0, ascii?: true +end diff --git a/apps/common/lib/source_file/line_parser.ex b/apps/common/lib/source_file/line_parser.ex new file mode 100644 index 000000000..009c610bd --- /dev/null +++ b/apps/common/lib/source_file/line_parser.ex @@ -0,0 +1,102 @@ +defmodule Lexical.SourceFile.LineParser do + @moduledoc """ + A parser that parses a binary into `Line` records.any() + + The approach taken by the parser is to first go through the binary to find out where + the lines break, what their endings are and if the line is ascii. As we go through the + binary, we store this information, and when we're done, go back and split up the binary + using binary_slice. This performs 3x faster than iterating through the binary and collecting + IOlists that represent each line. + + I determines if a line is ascii (and what it really means is utf8 ascii) by checking to see if + each byte is greater than 0 and less than 128. UTF-16 files won't be marked as ascii, which + allows us to skip a lot of byte conversions later in the process. + """ + import Lexical.SourceFile.Line + + # it's important that "\r\n" comes before \r here, otherwise the generated pattern + # matches won't match. + @endings ["\r\n", "\r", "\n"] + @max_ascii_character 127 + + def parse(text, starting_index) do + text + |> traverse(starting_index) + |> Enum.reduce([], fn index, acc -> [extract_line(text, index) | acc] end) + end + + defp extract_line(text, {line_number, start, stop, is_ascii?, ending}) do + line_text = binary_part(text, start, stop) + line(line_number: line_number, text: line_text, ascii?: is_ascii?, ending: ending) + end + + defp traverse(text, starting_index) do + traverse(text, 0, starting_index, 0, true, []) + end + + for ending <- @endings, + ending_length = byte_size(ending) do + defp traverse( + <>, + current_index, + line_number, + line_start_index, + is_ascii?, + acc + ) do + line_length = current_index - line_start_index + line_index = {line_number, line_start_index, line_length, is_ascii?, unquote(ending)} + [line_index | acc] + end + + defp traverse( + <>, + current_index, + line_number, + line_start_index, + is_ascii?, + acc + ) do + line_length = current_index - line_start_index + + acc = [{line_number, line_start_index, line_length, is_ascii?, unquote(ending)} | acc] + next_index = current_index + unquote(ending_length) + traverse(rest, next_index, line_number + 1, next_index, is_ascii?, acc) + end + end + + defp traverse( + <>, + current_index, + line_number, + line_start_index, + is_ascii?, + acc + ) do + # Note, this heuristic assumes the NUL character won't occur in elixir source files. + # if this isn't true, then we need a better heuristic for detecting utf16 text. + is_still_ascii? = is_ascii? and c <= @max_ascii_character and c > 0 + + traverse( + rest, + current_index + 1, + line_number, + line_start_index, + is_still_ascii?, + acc + ) + end + + defp traverse(<<>>, same_index, _line_number, same_index, _is_ascii, acc) do + # this is a line at the end of the document with no content + # I'm choosing not to represent it as a line to simplify things + # and to make the line count what we expect + acc + end + + defp traverse(<<>>, current_index, line_number, line_start_index, is_ascii?, acc) do + # file doesn't end with a newline + line_length = current_index - line_start_index + [{line_number, line_start_index, line_length, is_ascii?, ""} | acc] + end +end diff --git a/apps/common/lib/source_file/path.ex b/apps/common/lib/source_file/path.ex new file mode 100644 index 000000000..ac91a501f --- /dev/null +++ b/apps/common/lib/source_file/path.ex @@ -0,0 +1,115 @@ +defmodule Lexical.SourceFile.Path do + @file_scheme "file" + + @doc """ + Returns path from URI in a way that handles windows file:///c%3A/... URLs correctly + """ + def from_uri(%URI{scheme: @file_scheme, path: nil}) do + # treat no path as root path + convert_separators_to_native("/") + end + + def from_uri(%URI{scheme: @file_scheme, path: path, authority: authority}) + when path != "" and authority not in ["", nil] do + # UNC path + convert_separators_to_native("//#{URI.decode(authority)}#{URI.decode(path)}") + end + + def from_uri(%URI{scheme: @file_scheme, path: path}) do + decoded_path = URI.decode(path) + + if windows?() and String.match?(decoded_path, ~r/^\/[a-zA-Z]:/) do + # Windows drive letter path + # drop leading `/` and downcase drive letter + <<"/", letter::binary-size(1), path_rest::binary>> = decoded_path + "#{String.downcase(letter)}#{path_rest}" + else + decoded_path + end + |> convert_separators_to_native() + end + + def from_uri(%URI{scheme: scheme}) do + raise ArgumentError, message: "unexpected URI scheme #{inspect(scheme)}" + end + + def from_uri(uri) do + uri |> URI.parse() |> from_uri() + end + + def absolute_from_uri(uri) do + uri |> from_uri |> Path.absname() + end + + def to_uri(path) do + path = + path + |> Path.expand() + |> convert_separators_to_universal() + + {authority, path} = + case path do + "//" <> rest -> + # UNC path - extract authority + case String.split(rest, "/", parts: 2) do + [_] -> + # no path part, use root path + {rest, "/"} + + [authority, ""] -> + # empty path part, use root path + {authority, "/"} + + [authority, p] -> + {authority, "/" <> p} + end + + "/" <> _rest -> + {"", path} + + other -> + # treat as relative to root path + {"", "/" <> other} + end + + %URI{ + scheme: @file_scheme, + authority: authority |> URI.encode(), + # file system paths allow reserved URI characters that need to be escaped + # the exact rules are complicated but for simplicity we escape all reserved except `/` + # that's what https://github.com/microsoft/vscode-uri does + path: path |> URI.encode(&(&1 == ?/ or URI.char_unreserved?(&1))) + } + |> URI.to_string() + end + + defp convert_separators_to_native(path) do + if windows?() do + # convert path separators from URI to Windows + String.replace(path, ~r/\//, "\\") + else + path + end + end + + defp convert_separators_to_universal(path) do + if windows?() do + # convert path separators from Windows to URI + String.replace(path, ~r/\\/, "/") + else + path + end + end + + defp windows? do + case os_type() do + {:win32, _} -> true + _ -> false + end + end + + # this is here to be mocked in tests + defp os_type do + :os.type() + end +end diff --git a/apps/common/lib/source_file/position.ex b/apps/common/lib/source_file/position.ex new file mode 100644 index 000000000..1f04099d5 --- /dev/null +++ b/apps/common/lib/source_file/position.ex @@ -0,0 +1,7 @@ +defmodule Lexical.SourceFile.Position do + defstruct [:line, :character] + + def new(line, character) when is_number(line) and is_number(character) do + %__MODULE__{line: line, character: character} + end +end diff --git a/apps/common/lib/source_file/range.ex b/apps/common/lib/source_file/range.ex new file mode 100644 index 000000000..afa646eeb --- /dev/null +++ b/apps/common/lib/source_file/range.ex @@ -0,0 +1,9 @@ +defmodule Lexical.SourceFile.Range do + alias Lexical.SourceFile.Position + + defstruct start: nil, end: nil + + def new(%Position{} = start_pos, %Position{} = end_pos) do + %__MODULE__{start: start_pos, end: end_pos} + end +end diff --git a/apps/common/lib/source_file/store.ex b/apps/common/lib/source_file/store.ex new file mode 100644 index 000000000..317f25a4e --- /dev/null +++ b/apps/common/lib/source_file/store.ex @@ -0,0 +1,295 @@ +defmodule Lexical.SourceFile.Store do + defmodule State do + alias Lexical.SourceFile + alias Lexical.SourceFile.Conversions + alias Lexical.SourceFile.Store + require Logger + + defstruct source_files: %{}, temp_files: %{}, temporary_open_refs: %{} + @type t :: %__MODULE__{} + def new do + %__MODULE__{} + end + + @spec fetch(t, Store.uri()) :: {:ok, t()} | {:error, :not_open} + def fetch(%__MODULE__{} = store, uri) do + with :error <- Map.fetch(store.source_files, uri), + :error <- Map.fetch(store.temp_files, uri) do + {:error, :not_open} + end + end + + @spec save(t, Store.uri()) :: {:ok, t()} | {:error, :not_open} + def save(%__MODULE__{} = store, uri) do + case Map.fetch(store.source_files, uri) do + {:ok, source_file} -> + source_file = SourceFile.mark_clean(source_file) + store = %__MODULE__{store | source_files: Map.put(store.source_files, uri, source_file)} + {:ok, store} + + :error -> + {:error, :not_open} + end + end + + @spec open(t, Store.uri(), String.t(), pos_integer()) :: {:ok, t} | {:error, :already_open} + def open(%__MODULE__{} = store, uri, text, version) do + case Map.fetch(store.source_files, uri) do + {:ok, _} -> + {:error, :already_open} + + :error -> + source_file = SourceFile.new(uri, text, version) + store = %__MODULE__{store | source_files: Map.put(store.source_files, uri, source_file)} + {:ok, store} + end + end + + def open?(%__MODULE__{} = store, uri) do + Map.has_key?(store.source_files, uri) or Map.has_key?(store.temp_files, uri) + end + + def close(%__MODULE__{} = store, uri) do + case Map.pop(store.source_files, uri) do + {nil, _store} -> + {:error, :not_open} + + {_, source_files} -> + store = %__MODULE__{store | source_files: source_files} + {:ok, store} + end + end + + def get_and_update(%__MODULE__{} = store, uri, updater_fn) do + with {:ok, source_file} <- fetch(store, uri), + {:ok, updated_source} <- updater_fn.(source_file) do + new_store = %__MODULE__{ + store + | source_files: Map.put(store.source_files, uri, updated_source) + } + + {:ok, updated_source, new_store} + else + error -> + normalize_error(error) + end + end + + def update(%__MODULE__{} = store, uri, updater_fn) do + with {:ok, _, new_store} <- get_and_update(store, uri, updater_fn) do + {:ok, new_store} + end + end + + def open_temporarily(%__MODULE__{} = store, path_or_uri, timeout) do + uri = Conversions.ensure_uri(path_or_uri) + path = Conversions.ensure_path(path_or_uri) + + with {:ok, contents} <- File.read(path) do + source_file = SourceFile.new(uri, contents, 0) + ref = schedule_unload(uri, timeout) + + new_refs = + store + |> maybe_cancel_old_ref(uri) + |> Map.put(uri, ref) + + temp_files = Map.put(store.temp_files, uri, source_file) + + new_store = %__MODULE__{store | temp_files: temp_files, temporary_open_refs: new_refs} + + {:ok, source_file, new_store} + end + end + + def extend_timeout(%__MODULE__{} = store, uri, timeout) do + case store.temporary_open_refs do + %{^uri => ref} -> + Process.cancel_timer(ref) + new_ref = schedule_unload(uri, timeout) + new_open_refs = Map.put(store.temporary_open_refs, uri, new_ref) + %__MODULE__{store | temporary_open_refs: new_open_refs} + + _ -> + store + end + end + + def unload(%__MODULE__{} = store, uri) do + new_refs = Map.delete(store.temporary_open_refs, uri) + temp_files = Map.delete(store.temp_files, uri) + + %__MODULE__{ + store + | temp_files: temp_files, + temporary_open_refs: new_refs + } + end + + defp maybe_cancel_old_ref(%__MODULE__{} = store, uri) do + {_, new_refs} = + Map.get_and_update(store.temporary_open_refs, uri, fn + nil -> + :pop + + old_ref when is_reference(old_ref) -> + Process.cancel_timer(old_ref) + :pop + end) + + new_refs + end + + defp schedule_unload(uri, timeout) do + Process.send_after(self(), {:unload, uri}, timeout) + end + + defp normalize_error(:error), do: {:error, :not_open} + defp normalize_error(e), do: e + end + + alias Lexical.ProcessCache + alias Lexical.SourceFile + alias Lexical.SourceFile.Conversions + + @type t :: %State{} + + @type uri :: String.t() + @type updater :: (SourceFile.t() -> {:ok, SourceFile.t()} | {:error, any()}) + + use GenServer + + @spec fetch(uri()) :: {:ok, SourceFile.t()} | :error + def fetch(uri) do + GenServer.call(__MODULE__, {:fetch, uri}) + end + + @spec save(uri()) :: :ok | {:error, :not_open} + def save(uri) do + GenServer.call(__MODULE__, {:save, uri}) + end + + @spec open?(uri()) :: boolean() + def open?(uri) do + GenServer.call(__MODULE__, {:open?, uri}) + end + + @spec open(uri(), String.t(), pos_integer()) :: :ok | {:error, :already_open} + def open(uri, text, version) do + GenServer.call(__MODULE__, {:open, uri, text, version}) + end + + def open_temporary(uri, timeout \\ 5000) do + ProcessCache.trans(uri, 50, fn -> + GenServer.call(__MODULE__, {:open_temporarily, uri, timeout}) + end) + end + + @spec close(uri()) :: :ok | {:error, :not_open} + def close(uri) do + GenServer.call(__MODULE__, {:close, uri}) + end + + @spec get_and_update(uri(), updater()) :: {SourceFile.t(), State.t()} + def get_and_update(uri, update_fn) do + GenServer.call(__MODULE__, {:get_and_update, uri, update_fn}) + end + + @spec update(uri(), updater()) :: :ok | {:error, any()} + def update(uri, update_fn) do + GenServer.call(__MODULE__, {:update, uri, update_fn}) + end + + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + def init(_) do + {:ok, State.new()} + end + + def handle_call({:fetch, uri}, _, %State{} = state) do + {reply, new_state} = + case State.fetch(state, uri) do + {:ok, _} = success -> {success, state} + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:save, uri}, _from, %State{} = state) do + {reply, new_state} = + case State.save(state, uri) do + {:ok, _} = success -> success + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:open, uri, text, version}, _from, %State{} = state) do + {reply, new_state} = + case State.open(state, uri, text, version) do + {:ok, _} = success -> success + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:open_temporarily, uri, timeout_ms}, _, %State{} = state) do + {reply, new_state} = + with {:error, :not_open} <- State.fetch(state, uri), + {:ok, source_file, new_state} <- State.open_temporarily(state, uri, timeout_ms) do + {{:ok, source_file}, new_state} + else + {:ok, source_file} -> + new_state = State.extend_timeout(state, uri, timeout_ms) + {{:ok, source_file}, new_state} + + error -> + {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:open?, uri}, _from, %State{} = state) do + {:reply, State.open?(state, uri), state} + end + + def handle_call({:close, uri}, _from, %State{} = state) do + {reply, new_state} = + case State.close(state, uri) do + {:ok, _} = success -> success + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:get_and_update, uri, update_fn}, _from, %State{} = state) do + {reply, new_state} = + case State.get_and_update(state, uri, update_fn) do + {:ok, updated_source, new_state} -> {{:ok, updated_source}, new_state} + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:update, uri, updater_fn}, _, %State{} = state) do + {reply, new_state} = + case State.update(state, uri, updater_fn) do + {:ok, _} = success -> success + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_info({:unload, uri}, %State{} = state) do + {:noreply, State.unload(state, uri)} + end +end diff --git a/apps/common/mix.exs b/apps/common/mix.exs new file mode 100644 index 000000000..38be673b2 --- /dev/null +++ b/apps/common/mix.exs @@ -0,0 +1,33 @@ +defmodule Common.MixProject do + use Mix.Project + + def project do + [ + app: :common, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}, + # {:sibling_app_in_umbrella, in_umbrella: true} + ] + end +end diff --git a/apps/common/test/common_test.exs b/apps/common/test/common_test.exs new file mode 100644 index 000000000..baaef911b --- /dev/null +++ b/apps/common/test/common_test.exs @@ -0,0 +1,8 @@ +defmodule CommonTest do + use ExUnit.Case + doctest Common + + test "greets the world" do + assert Common.hello() == :world + end +end diff --git a/apps/common/test/test_helper.exs b/apps/common/test/test_helper.exs new file mode 100644 index 000000000..869559e70 --- /dev/null +++ b/apps/common/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start() diff --git a/apps/protocol/.formatter.exs b/apps/protocol/.formatter.exs new file mode 100644 index 000000000..11099c76f --- /dev/null +++ b/apps/protocol/.formatter.exs @@ -0,0 +1,15 @@ +# Used by "mix format" +proto_dsl = [ + defalias: 1, + defenum: 1, + defnotification: 2, + defnotification: 3, + defrequest: 3, + defresponse: 1, + deftype: 1 +] + +[ + locals_without_parens: proto_dsl, + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/protocol/.gitignore b/apps/protocol/.gitignore new file mode 100644 index 000000000..d40a35eb6 --- /dev/null +++ b/apps/protocol/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +protocol-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/protocol/README.md b/apps/protocol/README.md new file mode 100644 index 000000000..b6af92161 --- /dev/null +++ b/apps/protocol/README.md @@ -0,0 +1,21 @@ +# Lexical.Protocol + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `protocol` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:protocol, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/protocol/lib/lexical/protocol.ex b/apps/protocol/lib/lexical/protocol.ex new file mode 100644 index 000000000..26d43d380 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol.ex @@ -0,0 +1,18 @@ +defmodule Lexical.Protocol do + @moduledoc """ + Documentation for `Lexical.Protocol`. + """ + + @doc """ + Hello world. + + ## Examples + + iex> Lexical.Protocol.hello() + :world + + """ + def hello do + :world + end +end diff --git a/apps/protocol/lib/lexical/protocol/conversions.ex b/apps/protocol/lib/lexical/protocol/conversions.ex new file mode 100644 index 000000000..aa62d99e7 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/conversions.ex @@ -0,0 +1,134 @@ +defmodule Lexical.Protocol.Conversions do + @moduledoc """ + Functions to convert between language server representations and elixir-native representations. + + The LSP protocol defines positions in terms of their utf-16 representation (thanks, windows), + so when a document change comes in, we need to recalculate the positions of the change if + the line contains non-ascii characters. If it's a pure ascii line, then the positions + are the same in both utf-8 and utf-16, since they reference characters and not bytes. + """ + alias Lexical.CodeUnit + alias Lexical.SourceFile + alias Lexical.SourceFile.Line + alias Lexical.SourceFile.Document + alias Lexical.SourceFile.Range, as: ElixirRange + alias Lexical.SourceFile.Position, as: ElixirPosition + alias Lexical.Protocol.Types.Position, as: LSPosition + alias Lexical.Protocol.Types.Range, as: LSRange + + import Line + + @elixir_ls_index_base 1 + + def to_elixir( + %LSRange{} = ls_range, + %SourceFile{} = source + ) do + with {:ok, start_pos} <- to_elixir(ls_range.start, source.document), + {:ok, end_pos} <- to_elixir(ls_range.end, source.document) do + {:ok, %ElixirRange{start: start_pos, end: end_pos}} + end + end + + def to_elixir(%LSPosition{} = position, %SourceFile{} = source_file) do + to_elixir(position, source_file.document) + end + + def to_elixir(%ElixirPosition{} = position, _) do + position + end + + def to_elixir(%LSPosition{} = position, %Document{} = document) do + document_size = Document.size(document) + # we need to handle out of bounds line numbers, because it's possible to build a document + # by starting with an empty document and appending to the beginning of it, with a start range of + # {0, 0} and and end range of {1, 0} (replace the first line) + document_line_number = min(position.line, document_size) + elixir_line_number = document_line_number + @elixir_ls_index_base + ls_character = position.character + + cond do + document_line_number == document_size and ls_character == 0 -> + # allow a line one more than the document size, as long as the character is 0. + # that means we're operating on the last line of the document + + {:ok, ElixirPosition.new(elixir_line_number, ls_character)} + + position.line >= document_size -> + # they've specified something outside of the document clamp it down so they can append at the + # end + {:ok, ElixirPosition.new(elixir_line_number, 0)} + + true -> + with {:ok, line} <- Document.fetch_line(document, elixir_line_number), + {:ok, elixir_character} <- extract_elixir_character(position, line) do + {:ok, ElixirPosition.new(elixir_line_number, elixir_character)} + end + end + end + + def to_elixir(%{range: %{start: start_pos, end: end_pos}}, _source_file) do + # this is actually an elixir sense range... note that it's a bare map with + # column keys rather than character keys. + %{line: start_line, column: start_col} = start_pos + %{line: end_line, column: end_col} = end_pos + + range = %ElixirRange{ + start: ElixirPosition.new(start_line, start_col - 1), + end: ElixirPosition.new(end_line, end_col - 1) + } + + {:ok, range} + end + + def to_lsp(%ElixirRange{} = ex_range, %SourceFile{} = source) do + with {:ok, start_pos} <- to_lsp(ex_range.start, source.document), + {:ok, end_pos} <- to_lsp(ex_range.end, source.document) do + {:ok, %LSRange{start: start_pos, end: end_pos}} + end + end + + def to_lsp(%ElixirPosition{} = position, %SourceFile{} = source_file) do + to_lsp(position, source_file.document) + end + + def to_lsp(%ElixirPosition{} = position, %Document{} = document) do + with {:ok, line} <- Document.fetch_line(document, position.line), + {:ok, lsp_character} <- extract_lsp_character(position, line) do + ls_pos = + LSPosition.new(character: lsp_character, line: position.line - @elixir_ls_index_base) + + {:ok, ls_pos} + end + end + + def to_lsp(%LSPosition{} = position, _) do + {:ok, position} + end + + # Private + + defp extract_lsp_character(%ElixirPosition{} = position, line(ascii?: true, text: text)) do + character = min(position.character, byte_size(text)) + {:ok, character} + end + + defp extract_lsp_character(%ElixirPosition{} = position, line(text: utf8_text)) do + with {:ok, code_unit} <- CodeUnit.to_utf16(utf8_text, position.character) do + character = min(code_unit, CodeUnit.count(:utf16, utf8_text)) + {:ok, character} + end + end + + defp extract_elixir_character(%LSPosition{} = position, line(ascii?: true, text: text)) do + character = min(position.character, byte_size(text)) + {:ok, character} + end + + defp extract_elixir_character(%LSPosition{} = position, line(text: utf8_text)) do + with {:ok, code_unit} <- CodeUnit.to_utf8(utf8_text, position.character) do + character = min(code_unit, byte_size(utf8_text)) + {:ok, character} + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/id.ex b/apps/protocol/lib/lexical/protocol/id.ex new file mode 100644 index 000000000..4e3a28d35 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/id.ex @@ -0,0 +1,7 @@ +defmodule Lexical.Protocol.Id do + def next do + [:monotonic, :positive] + |> System.unique_integer() + |> to_string() + end +end diff --git a/apps/protocol/lib/lexical/protocol/json_rpc.ex b/apps/protocol/lib/lexical/protocol/json_rpc.ex new file mode 100644 index 000000000..369090ef3 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/json_rpc.ex @@ -0,0 +1,23 @@ +defmodule Lexical.Protocol.JsonRpc do + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Responses + alias Lexical.Protocol.Notifications + + def decode(message_string) do + with {:ok, json_map} <- Jason.decode(message_string) do + do_decode(json_map) + end + end + + def encode(message) do + Jason.encode(message) + end + + defp do_decode(%{"method" => method, "id" => id} = request) do + Requests.decode(method, request) + end + + defp do_decode(%{"method" => method} = notification) do + Notifications.decode(method, notification) + end +end diff --git a/apps/protocol/lib/lexical/protocol/notifications.ex b/apps/protocol/lib/lexical/protocol/notifications.ex new file mode 100644 index 000000000..f05ad5ad7 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/notifications.ex @@ -0,0 +1,90 @@ +defmodule Lexical.Protocol.Notifications do + alias Lexical.Protocol.Proto + alias Lexical.Protocol.Types + + defmodule Initialized do + use Proto + defnotification("initialized", :shared) + end + + defmodule Cancel do + use Proto + + defnotification("$/cancelRequest", :shared, id: integer()) + end + + defmodule DidOpen do + use Proto + + defnotification("textDocument/didOpen", :shared, text_document: Types.TextDocument.Item) + end + + defmodule DidClose do + use Proto + + defnotification("textDocument/didClose", :shared, text_document: Types.TextDocument.Identifier) + end + + defmodule DidChange do + use Proto + + defnotification("textDocument/didChange", :shared, + text_document: Types.TextDocument.Versioned.Identifier, + content_changes: + list_of( + one_of([ + Types.TextDocument.ContentChangeEvent.TextDocumentContentChangeEvent, + Types.TextDocument.ContentChangeEvent.TextDocumentContentChangeEvent1 + ]) + ) + ) + end + + defmodule DidChangeConfiguration do + use Proto + + defnotification("workspace/didChangeConfiguration", :shared, settings: map_of(any())) + end + + defmodule DidChangeWatchedFiles do + use Proto + + defnotification("workspace/didChangeWatchedFiles", :shared, changes: list_of(Types.FileEvent)) + end + + defmodule DidSave do + use Proto + + defnotification("textDocument/didSave", :shared, text_document: Types.TextDocument.Identifier) + end + + defmodule PublishDiagnostics do + use Proto + + defnotification("textDocument/publishDiagnostics", :shared, + uri: string(), + version: optional(integer()), + diagnostics: list_of(Types.Diagnostic) + ) + end + + defmodule LogMessage do + use Proto + + defnotification("window/logMessage", :shared, + message: string(), + type: Types.Message.Type + ) + end + + defmodule ShowMessage do + use Proto + + defnotification("window/showMessage", :shared, + message: string(), + type: Types.Message.Type + ) + end + + use Proto, decoders: :notifications +end diff --git a/apps/protocol/lib/lexical/protocol/proto.ex b/apps/protocol/lib/lexical/protocol/proto.ex new file mode 100644 index 000000000..684585c31 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto.ex @@ -0,0 +1,48 @@ +defmodule Lexical.Protocol.Proto do + alias Lexical.Protocol.Proto.Decoders + + defmacro __using__([]) do + quote location: :keep do + alias Lexical.Protocol.Proto + alias Lexical.Protocol.Proto.LspTypes + + import Lexical.Protocol.Proto.TypeFunctions + import Proto.Alias, only: [defalias: 1] + import Proto.Enum, only: [defenum: 1] + import Proto.Notification, only: [defnotification: 2, defnotification: 3] + import Proto.Request, only: [defrequest: 3] + import Proto.Response, only: [defresponse: 1] + import Proto.Type, only: [deftype: 1] + end + end + + defmacro __using__(opts) when is_list(opts) do + function_name = + case Keyword.get(opts, :decoders) do + :notifications -> + :for_notifications + + :requests -> + :for_requests + + _ -> + invalid_decoder!(__CALLER__) + end + + quote do + @before_compile {Decoders, unquote(function_name)} + end + end + + defmacro __using__(_) do + invalid_decoder!(__CALLER__) + end + + defp invalid_decoder!(caller) do + raise CompileError.exception( + description: "Invalid decoder type. Must be either :notifications or :requests", + file: caller.file, + line: caller.line + ) + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/alias.ex b/apps/protocol/lib/lexical/protocol/proto/alias.ex new file mode 100644 index 000000000..78015ddde --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/alias.ex @@ -0,0 +1,22 @@ +defmodule Lexical.Protocol.Proto.Alias do + alias Lexical.Protocol.Proto.CompileMetadata + + defmacro defalias(alias_definition) do + caller_module = __CALLER__.module + CompileMetadata.add_type_alias_module(caller_module) + + quote location: :keep do + def definition do + unquote(alias_definition) + end + + def __meta__(:type) do + :type_alias + end + + def __meta__(:param_names) do + [] + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/compile_metadata.ex b/apps/protocol/lib/lexical/protocol/proto/compile_metadata.ex new file mode 100644 index 000000000..e5ca2b7a5 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/compile_metadata.ex @@ -0,0 +1,62 @@ +defmodule Lexical.Protocol.Proto.CompileMetadata do + @moduledoc """ + Compile-time storage of protocol metadata + """ + + @notification_modules_key {__MODULE__, :notification_modules} + @type_modules_key {__MODULE__, :type_modules} + @type_alias_modules_key {__MODULE__, :type_alias_modules} + @request_modules_key {__MODULE__, :request_modules} + @response_modules_key {__MODULE__, :response_modules} + + def notification_modules do + :persistent_term.get(@notification_modules_key, []) + end + + def request_modules do + :persistent_term.get(@request_modules_key, []) + end + + def response_modules do + :persistent_term.get(@response_modules_key, []) + end + + def type_alias_modules do + :persistent_term.get(@type_alias_modules_key) + end + + def type_modules do + :persistent_term.get(@type_modules_key) + end + + def add_notification_module(module) do + add_module(@notification_modules_key, module) + end + + def add_request_module(module) do + add_module(@request_modules_key, module) + end + + def add_response_module(module) do + add_module(@response_modules_key, module) + end + + def add_type_module(module) do + add_module(@type_modules_key, module) + end + + def add_type_alias_module(module) do + add_module(@type_alias_modules_key, module) + end + + defp update(key, initial_value, update_fn) do + case :persistent_term.get(key, :not_found) do + :not_found -> :persistent_term.put(key, initial_value) + found -> :persistent_term.put(key, update_fn.(found)) + end + end + + defp add_module(key, module) do + update(key, [module], fn old_list -> [module | old_list] end) + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/convert.ex b/apps/protocol/lib/lexical/protocol/proto/convert.ex new file mode 100644 index 000000000..9a51cfa74 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/convert.ex @@ -0,0 +1,95 @@ +defmodule Lexical.Protocol.Proto.Convert do + alias Lexical.Protocol.Types + alias Lexical.Protocol.Conversions + alias Lexical.SourceFile + + def to_elixir(%{lsp: lsp_request} = request) do + with {:ok, elixir_request, source_file} <- convert(lsp_request) do + updated_request = + case Map.merge(request, Map.from_struct(elixir_request)) do + %_{source_file: _} = updated -> Map.put(updated, :source_file, source_file) + updated -> updated + end + + {:ok, updated_request} + end + end + + def to_elixir(%_request_module{lsp: lsp_request} = request) do + converted = Map.merge(request, Map.from_struct(lsp_request)) + {:ok, converted} + end + + def to_elixir(request) do + request = Map.merge(request, Map.from_struct(request.lsp)) + + {:ok, request} + end + + defp fetch_source_file(%{text_document: %{uri: uri}}) do + SourceFile.Store.fetch(uri) + end + + defp fetch_source_file(%{source_file: %SourceFile{} = source_file}) do + {:ok, source_file} + end + + defp fetch_source_file(_) do + :error + end + + defp convert(%_{text_document: _} = request) do + with {:ok, source_file} <- fetch_source_file(request), + {:ok, converted} <- convert(request, source_file) do + {:ok, converted, source_file} + end + end + + defp convert(%_{} = request) do + {:ok, request, nil} + end + + defp convert(%Types.Range{} = range, %SourceFile{} = source_file) do + Conversions.to_elixir(range, source_file) + end + + defp convert(%Types.Position{} = pos, %SourceFile{} = source_file) do + Conversions.to_elixir(pos, source_file) + end + + defp convert(%_struct{} = request, %SourceFile{} = source_file) do + kvps = + request + |> Map.from_struct() + |> Enum.reduce(request, fn {key, value}, request -> + {:ok, value} = convert(value, source_file) + Map.put(request, key, value) + end) + + {:ok, Map.merge(request, kvps)} + end + + defp convert(list, %SourceFile{} = source_file) when is_list(list) do + items = + Enum.map(list, fn item -> + {:ok, item} = convert(item, source_file) + item + end) + + {:ok, items} + end + + defp convert(%{} = map, %SourceFile{} = source_file) do + converted = + Map.new(map, fn {k, v} -> + {:ok, converted} = convert(v, source_file) + {k, converted} + end) + + {:ok, converted} + end + + defp convert(item, %SourceFile{} = _) do + {:ok, item} + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/decoders.ex b/apps/protocol/lib/lexical/protocol/proto/decoders.ex new file mode 100644 index 000000000..b1d8a1b4c --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/decoders.ex @@ -0,0 +1,184 @@ +defmodule Lexical.Protocol.Proto.Decoders do + alias Lexical.Protocol.Proto.CompileMetadata + + defmacro for_notifications(_) do + notification_modules = CompileMetadata.notification_modules() + notification_matchers = Enum.map(notification_modules, &build_notification_matcher_macro/1) + notification_decoders = Enum.map(notification_modules, &build_notifications_decoder/1) + access_map = build_acces_map(notification_modules) + + quote do + alias Lexical.Protocol.Proto.Convert + + defmacro notification(method) do + quote do + %{"method" => unquote(method), "jsonrpc" => "2.0"} + end + end + + defmacro notification(method, params) do + quote do + %{"method" => unquote(method), "params" => unquote(params), "jsonrpc" => "2.0"} + end + end + + unquote(build_typespec(:notification, notification_modules)) + + unquote_splicing(notification_matchers) + + @spec decode(String.t(), map()) :: {:ok, notification} | {:error, any} + unquote_splicing(notification_decoders) + + def decode(method, _) do + {:error, {:unknown_notification, method}} + end + + def __meta__(:events) do + unquote(notification_modules) + end + + def __meta__(:notifications) do + unquote(notification_modules) + end + + def __meta__(:access) do + %{unquote_splicing(access_map)} + end + + def to_elixir(%{lsp: _} = request_or_notification) do + Convert.to_elixir(request_or_notification) + end + end + end + + defmacro for_requests(_) do + request_modules = CompileMetadata.request_modules() + request_matchers = Enum.map(request_modules, &build_request_matcher_macro/1) + request_decoders = Enum.map(request_modules, &build_request_decoder/1) + access_map = build_acces_map(request_modules) + + quote do + alias Lexical.Protocol.Proto.Convert + + def __meta__(:requests) do + unquote(request_modules) + end + + def __meta__(:access) do + %{unquote_splicing(access_map)} + end + + defmacro request(id, method) do + quote do + %{"method" => unquote(method), "id" => unquote(id), "jsonrpc" => "2.0"} + end + end + + defmacro request(id, method, params) do + quote do + %{"method" => unquote(method), "id" => unquote(id), "params" => unquote(params)} + end + end + + unquote(build_typespec(:request, request_modules)) + + unquote_splicing(request_matchers) + + @spec decode(String.t(), map()) :: {:ok, request} | {:error, any} + unquote_splicing(request_decoders) + + def decode(method, _) do + {:error, {:unknown_request, method}} + end + + def to_elixir(%{lsp: _} = request_or_notification) do + Convert.to_elixir(request_or_notification) + end + end + end + + defp build_acces_map(modules) do + Enum.map(modules, fn module -> + quote(do: {unquote(module.method()), unquote(module.__meta__(:access))}) + end) + end + + defp build_notification_matcher_macro(notification_module) do + macro_name = module_to_macro_name(notification_module) + method_name = notification_module.__meta__(:method_name) + + quote do + defmacro unquote(macro_name)() do + method_name = unquote(method_name) + + quote do + %{"method" => unquote(method_name), "jsonrpc" => "2.0"} + end + end + end + end + + defp build_notifications_decoder(notification_module) do + method_name = notification_module.__meta__(:method_name) + + quote do + def decode(unquote(method_name), request) do + unquote(notification_module).parse(request) + end + end + end + + defp build_request_matcher_macro(notification_module) do + macro_name = module_to_macro_name(notification_module) + method_name = notification_module.__meta__(:method_name) + + quote do + defmacro unquote(macro_name)(id) do + method_name = unquote(method_name) + + quote do + %{"method" => unquote(method_name), "id" => unquote(id), "jsonrpc" => "2.0"} + end + end + end + end + + defp build_request_decoder(request_module) do + method_name = request_module.__meta__(:method_name) + + quote do + def decode(unquote(method_name), request) do + unquote(request_module).parse(request) + end + end + end + + def build_typespec(type_name, modules) do + spec_name = {type_name, [], nil} + + spec = + Enum.reduce(modules, nil, fn + module, nil -> + quote do + unquote(module).t() + end + + module, spec -> + quote do + unquote(module).t() | unquote(spec) + end + end) + + quote do + @type unquote(spec_name) :: unquote(spec) + end + end + + defp module_to_macro_name(module) do + module + |> Module.split() + |> List.last() + |> Macro.underscore() + |> String.to_atom() + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/enum.ex b/apps/protocol/lib/lexical/protocol/proto/enum.ex new file mode 100644 index 000000000..bed470e66 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/enum.ex @@ -0,0 +1,54 @@ +defmodule Lexical.Protocol.Proto.Enum do + defmacro defenum(opts) do + parse_functions = + for {name, value} <- opts do + quote location: :keep do + def parse(unquote(value)) do + {:ok, unquote(name)} + end + end + end + + enum_macros = + for {name, value} <- opts do + quote location: :keep do + defmacro unquote(name)() do + unquote(value) + end + end + end + + encoders = + for {name, value} <- opts do + quote location: :keep do + def encode(unquote(name)) do + {:ok, unquote(value)} + end + end + end + + quote location: :keep do + unquote(parse_functions) + + def parse(unknown) do + {:error, {:invalid_constant, unknown}} + end + + unquote_splicing(encoders) + + def encode(val) do + {:error, {:invalid_value, __MODULE__, val}} + end + + unquote_splicing(enum_macros) + + def __meta__(:types) do + {:constant, __MODULE__} + end + + def __meta__(:type) do + :enum + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/field.ex b/apps/protocol/lib/lexical/protocol/proto/field.ex new file mode 100644 index 000000000..0d7745793 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/field.ex @@ -0,0 +1,287 @@ +defmodule Lexical.Protocol.Proto.Field do + alias Lexical.Protocol.Proto.Text + + def extract(:any, _, value) do + {:ok, value} + end + + def extract({:literal, same_value}, _name, same_value) do + {:ok, same_value} + end + + def extract({:optional, _}, _name, nil) do + {:ok, nil} + end + + def extract({:optional, type}, name, orig_val) do + extract(type, name, orig_val) + end + + def extract({:one_of, type_list}, name, value) do + result = + Enum.reduce_while(type_list, nil, fn type, _acc -> + case extract(type, name, value) do + {:ok, _} = success -> {:halt, success} + error -> {:cont, error} + end + end) + + case result do + {:ok, _} = success -> success + _error -> {:error, {:incorrect_type, type_list, value}} + end + end + + def extract({:list, list_type}, name, orig_value) when is_list(orig_value) do + result = + Enum.reduce_while(orig_value, [], fn orig, acc -> + case extract(list_type, name, orig) do + {:ok, value} -> {:cont, [value | acc]} + error -> {:halt, error} + end + end) + + case result do + value_list when is_list(value_list) -> {:ok, Enum.reverse(value_list)} + error -> error + end + end + + def extract(:integer, _name, orig_value) when is_integer(orig_value) do + {:ok, orig_value} + end + + def extract(:float, _name, orig_value) when is_float(orig_value) do + {:ok, orig_value} + end + + def extract(:string, _name, orig_value) when is_binary(orig_value) do + {:ok, orig_value} + end + + def extract(:boolean, _name, orig_value) when is_boolean(orig_value) do + {:ok, orig_value} + end + + def extract({:type_alias, alias_module}, name, orig_value) do + extract(alias_module.definition(), name, orig_value) + end + + def extract(module, _name, orig_value) + when is_atom(module) and module not in [:integer, :string, :boolean, :float] do + module.parse(orig_value) + end + + def extract({:map, type, _opts}, field_name, field_value) + when is_map(field_value) do + result = + Enum.reduce_while(field_value, [], fn {k, v}, acc -> + case extract(type, field_name, v) do + {:ok, value} -> {:cont, [{k, value} | acc]} + error -> {:halt, error} + end + end) + + case result do + values when is_list(values) -> {:ok, Map.new(values)} + error -> error + end + end + + def extract({:tuple, tuple_types}, field_name, field_value) when is_list(field_value) do + result = + field_value + |> Enum.zip(tuple_types) + |> Enum.reduce_while([], fn {value, type}, acc -> + case extract(type, field_name, value) do + {:ok, value} -> {:cont, [value | acc]} + error -> {:halt, error} + end + end) + + case result do + value when is_list(value) -> + value_as_tuple = + value + |> Enum.reverse() + |> List.to_tuple() + + {:ok, value_as_tuple} + + error -> + error + end + end + + def extract({:params, param_defs}, _field_name, field_value) + when is_map(field_value) do + result = + Enum.reduce_while(param_defs, [], fn {param_name, param_type}, acc -> + value = Map.get(field_value, Text.camelize(param_name)) + + case extract(param_type, param_name, value) do + {:ok, value} -> {:cont, [{param_name, value} | acc]} + error -> {:halt, error} + end + end) + + case result do + values when is_list(values) -> {:ok, Map.new(values)} + error -> error + end + end + + def extract(_type, name, orig_value) do + {:error, {:invalid_value, name, orig_value}} + end + + def encode(:any, field_value) do + {:ok, field_value} + end + + def encode({:literal, value}, _) do + {:ok, value} + end + + def encode({:optional, _}, nil) do + {:ok, :"$__drop__"} + end + + def encode({:optional, field_type}, field_value) do + encode(field_type, field_value) + end + + def encode({:one_of, types}, field_value) do + encoded = + Enum.reduce_while(types, nil, fn type, _ -> + case encode(type, field_value) do + {:ok, _} = success -> {:halt, success} + error -> {:cont, error} + end + end) + + case encoded do + encoded_list when is_list(encoded_list) -> {:ok, encoded_list} + error -> error + end + end + + def encode({:list, list_type}, field_value) when is_list(field_value) do + encoded = + Enum.reduce_while(field_value, [], fn element, acc -> + case encode(list_type, element) do + {:ok, encoded} -> {:cont, [encoded | acc]} + error -> {:halt, error} + end + end) + + case encoded do + encoded_list when is_list(encoded_list) -> + {:ok, Enum.reverse(encoded_list)} + + error -> + error + end + end + + def encode(:integer, field_value) when is_integer(field_value) do + {:ok, field_value} + end + + def encode(:integer, string_value) when is_binary(string_value) do + case Integer.parse(string_value) do + {int_value, ""} -> {:ok, int_value} + _ -> {:error, {:invalid_integer, string_value}} + end + end + + def encode(:float, float_value) when is_float(float_value) do + {:ok, float_value} + end + + def encode(:float, field_value) when is_float(field_value) do + field_value + end + + def encode(:string, field_value) when is_binary(field_value) do + {:ok, field_value} + end + + def encode(:boolean, field_value) when is_boolean(field_value) do + {:ok, field_value} + end + + def encode({:map, value_type, _}, field_value) when is_map(field_value) do + map_fields = + Enum.reduce_while(field_value, [], fn {key, value}, acc -> + case encode(value_type, value) do + {:ok, encoded_value} -> {:cont, [{key, encoded_value} | acc]} + error -> {:halt, error} + end + end) + + case map_fields do + fields when is_list(fields) -> {:ok, Map.new(fields)} + error -> error + end + end + + def encode({:tuple, types}, field_value) when is_tuple(field_value) do + encoded = + field_value + |> Tuple.to_list() + |> Enum.zip(types) + |> Enum.reduce_while([], fn {value, type}, acc -> + case encode(type, value) do + {:ok, encoded} -> {:cont, [encoded | acc]} + error -> {:halt, error} + end + end) + + case encoded do + encoded_list when is_list(encoded_list) -> {:ok, Enum.reverse(encoded_list)} + error -> error + end + end + + def encode({:params, param_defs}, field_value) when is_map(field_value) do + param_fields = + Enum.reduce_while(param_defs, [], fn {param_name, param_type}, acc -> + unencoded = Map.get(field_value, param_name) + + case encode(param_type, unencoded) do + {:ok, encoded_value} -> {:cont, [{param_name, encoded_value} | acc]} + error -> {:halt, error} + end + end) + + case param_fields do + fields when is_list(fields) -> {:ok, Map.new(fields)} + error -> error + end + end + + def encode({:constant, constant_module}, field_value) do + {:ok, constant_module.encode(field_value)} + end + + def encode({:type_alias, alias_module}, field_value) do + encode(alias_module.definition(), field_value) + end + + def encode(module, field_value) when is_atom(module) do + if function_exported?(module, :encode, 1) do + module.encode(field_value) + else + {:ok, field_value} + end + end + + def encode(_, nil) do + nil + end + + def encode(type, value) do + {:error, {:invalid_type, type, value}} + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/lsp_types.ex b/apps/protocol/lib/lexical/protocol/proto/lsp_types.ex new file mode 100644 index 000000000..2234bb106 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/lsp_types.ex @@ -0,0 +1,41 @@ +defmodule Lexical.Protocol.Proto.LspTypes do + alias Lexical.Protocol.Proto + use Proto + + defmodule ErrorCodes do + use Proto + + defenum parse_error: -32700, + invalid_request: -32600, + method_not_found: -32601, + invalid_params: -32602, + internal_error: -32603, + server_not_initialized: -32002, + unknown_error_code: -32001, + request_failed: -32803, + server_cancelled: -32802, + content_modified: -32801, + request_cancelled: -32800 + end + + defmodule ResponseError do + use Proto + deftype code: ErrorCodes, message: string(), data: optional(any()) + end + + defmodule ClientInfo do + use Proto + deftype name: string(), version: optional(string()) + end + + defmodule TraceValue do + use Proto + defenum off: "off", messages: "messages", verbose: "verbose" + end + + defmodule Registration do + use Proto + + deftype id: string(), method: string(), register_options: optional(any()) + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/access.ex b/apps/protocol/lib/lexical/protocol/proto/macros/access.ex new file mode 100644 index 000000000..ca9e5169f --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/access.ex @@ -0,0 +1,34 @@ +defmodule Lexical.Protocol.Proto.Macros.Access do + def build do + quote location: :keep do + def fetch(proto, key) when is_map_key(proto, key) do + {:ok, Map.get(proto, key)} + end + + def fetch(_, _) do + :error + end + + def get_and_update(proto, key, function) when is_map_key(proto, key) do + old_value = Map.get(proto, key) + + case function.(old_value) do + {current_value, updated_value} -> {current_value, Map.put(proto, key, updated_value)} + :pop -> {old_value, Map.put(proto, key, nil)} + end + end + + def get_and_update(proto, key, function) do + {{:error, {:nonexistent_key, key}}, proto} + end + + def pop(proto, key) when is_map_key(proto, key) do + {Map.get(proto, key), proto} + end + + def pop(proto, _key) do + {nil, proto} + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/inspect.ex b/apps/protocol/lib/lexical/protocol/proto/macros/inspect.ex new file mode 100644 index 000000000..168728235 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/inspect.ex @@ -0,0 +1,37 @@ +defmodule Lexical.Protocol.Proto.Macros.Inspect do + def build(dest_module) do + trimmed_name = trim_module_name(dest_module) + + quote location: :keep do + defimpl Inspect, for: unquote(dest_module) do + import Inspect.Algebra + + def inspect(proto_type, opts) do + proto_map = Map.from_struct(proto_type) + + concat(["%#{unquote(trimmed_name)}", to_doc(proto_map, opts), ""]) + end + end + end + end + + def trim_module_name(long_name) do + {sub_modules, _} = + long_name + |> Module.split() + |> Enum.reduce({[], false}, fn + "Protocol", _ -> + {["Protocol"], true} + + _ignored_module, {_, false} = state -> + state + + submodule, {mod_list, true} -> + {[submodule | mod_list], true} + end) + + sub_modules + |> Enum.reverse() + |> Enum.join(".") + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/json.ex b/apps/protocol/lib/lexical/protocol/proto/macros/json.ex new file mode 100644 index 000000000..06bf8fa97 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/json.ex @@ -0,0 +1,46 @@ +defmodule Lexical.Protocol.Proto.Macros.Json do + alias Lexical.Protocol.Proto.Field + + def build(dest_module) do + quote location: :keep do + defimpl Jason.Encoder, for: unquote(dest_module) do + def encode(%struct_module{} = value, opts) do + encoded_pairs = + for {field_name, field_type} <- unquote(dest_module).__meta__(:types), + field_value = get_field_value(value, field_name), + {:ok, encoded_value} = Field.encode(field_type, field_value), + encoded_value != :"$__drop__" do + {field_name, encoded_value} + end + + encoded_pairs + |> Enum.flat_map(fn + # flatten the spread into the current map + {:.., value} when is_map(value) -> Enum.to_list(value) + {k, v} -> [{camelize(k), v}] + end) + |> Jason.Encode.keyword(opts) + end + + defp get_field_value(%struct_module{} = struct, :..) do + get_field_value(struct, struct_module.__meta__(:spread_alias)) + end + + defp get_field_value(struct, field_name) do + Map.get(struct, field_name) + end + + def camelize(field_name) do + field_name + |> to_string() + |> Macro.camelize() + |> downcase_first() + end + + defp downcase_first(<>) do + String.downcase(c) <> rest + end + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/match.ex b/apps/protocol/lib/lexical/protocol/proto/macros/match.ex new file mode 100644 index 000000000..0347dd2e5 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/match.ex @@ -0,0 +1,20 @@ +defmodule Lexical.Protocol.Proto.Macros.Match do + def build(field_types, dest_module) do + macro_name = + dest_module + |> Macro.underscore() + |> String.replace("/", "_") + |> String.to_atom() + + quote location: :keep do + defmacro unquote(macro_name)(opts \\ []) do + cond do + Keyword.keyword?(opts) -> + %unquote(dest_module){unquote_splicing(field_types)} + end + end + end + + nil + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/message.ex b/apps/protocol/lib/lexical/protocol/proto/macros/message.ex new file mode 100644 index 000000000..ebd30cbb2 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/message.ex @@ -0,0 +1,64 @@ +defmodule Lexical.Protocol.Proto.Macros.Message do + alias Lexical.Protocol.Proto.Macros.{ + Access, + Struct, + Parse, + Typespec, + Meta + } + + alias Lexical.SourceFile + + def build(meta_type, method, access, types, param_names, opts \\ []) do + parse_fn = + if Keyword.get(opts, :include_parse?, true) do + Parse.build(types) + end + + quote do + unquote(Access.build()) + unquote(Struct.build(types)) + unquote(Typespec.build()) + unquote(parse_fn) + unquote(Meta.build(types)) + + def method do + unquote(method) + end + + def __meta__(:method_name) do + unquote(method) + end + + def __meta__(:type) do + unquote(meta_type) + end + + def __meta__(:param_names) do + unquote(param_names) + end + + def __meta__(:access) do + unquote(access) + end + end + end + + def generate_elixir_types(caller_module, message_types) do + message_types + |> Enum.reduce(message_types, fn + {:text_document, _}, types -> + Keyword.put(types, :source_file, quote(do: SourceFile)) + + {:position, _}, types -> + Keyword.put(types, :position, quote(do: SourceFile.Position)) + + {:range, _}, types -> + Keyword.put(types, :range, quote(do: SourceFile.Range)) + + _, types -> + types + end) + |> Keyword.put(:lsp, quote(do: unquote(caller_module).LSP)) + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/meta.ex b/apps/protocol/lib/lexical/protocol/proto/macros/meta.ex new file mode 100644 index 000000000..a0d79dbc6 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/meta.ex @@ -0,0 +1,37 @@ +defmodule Lexical.Protocol.Proto.Macros.Meta do + def build(opts) do + field_types = + for {field_name, field_type} <- opts do + field_meta(field_name, field_type) + end + + quote location: :keep do + unquote_splicing(field_types) + + def __meta__(:types) do + %{unquote_splicing(opts)} + end + end + end + + defp field_meta(:.., {:map_of, ctx, [key_type, [as: key_alias]]}) do + # a spread operator, generate meta for both the spread name and the aliased name + + quote do + def __meta__(:spread_alias) do + unquote(key_alias) + end + + unquote(field_meta(:.., {:map_of, ctx, [key_type]})) + unquote(field_meta(key_alias, {:map_of, ctx, [key_type]})) + end + end + + defp field_meta(field_name, field_type) do + quote location: :keep do + def __meta__(:type, unquote(field_name)) do + unquote(field_type) + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/parse.ex b/apps/protocol/lib/lexical/protocol/proto/macros/parse.ex new file mode 100644 index 000000000..383fb600a --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/parse.ex @@ -0,0 +1,140 @@ +defmodule Lexical.Protocol.Proto.Macros.Parse do + alias Lexical.Protocol.Proto.Field + alias Lexical.Protocol.Proto.Text + + def build(opts) do + {optional_opts, required_opts} = + Enum.split_with(opts, fn + {_key, {:optional, _, _}} -> true + {:.., _} -> true + _ -> false + end) + + {splat_opt, optional_opts} = Keyword.pop(optional_opts, :..) + + required_keys = Keyword.keys(required_opts) + + map_parameter_var = + if Enum.empty?(optional_opts) && is_nil(splat_opt) do + Macro.var(:_, nil) + else + Macro.var(:json_rpc_message, nil) + end + + struct_keys = Keyword.keys(opts) + + map_vars = Map.new(struct_keys, fn k -> {k, Macro.var(k, nil)} end) + + map_keys = Enum.map(required_keys, &Text.camelize/1) + + map_pairs = + map_vars + |> Map.take(required_keys) + |> Enum.map(fn {k, v} -> {Text.camelize(k), v} end) + + map_extractors = map_extractor(map_pairs) + + required_extractors = + for {field_name, field_type} <- required_opts do + quote location: :keep do + {unquote(field_name), + Field.extract( + unquote(field_type), + unquote(field_name), + unquote(Map.get(map_vars, field_name)) + )} + end + end + + optional_extractors = + for {field_name, field_type} <- optional_opts do + quote location: :keep do + {unquote(field_name), + Field.extract( + unquote(field_type), + unquote(field_name), + Map.get(unquote(map_parameter_var), unquote(Text.camelize(field_name))) + )} + end + end + + splat_extractors = + if splat_opt do + known_keys = opts |> Keyword.keys() |> Enum.map(&Text.camelize/1) + + quoted_extractor = + quote location: :keep do + {(fn -> + {:map, _, field_name} = unquote(splat_opt) + field_name + end).(), + Field.extract( + unquote(splat_opt), + :.., + unquote(map_parameter_var) + |> Enum.reject(fn {k, _} -> k in unquote(known_keys) end) + |> Map.new() + )} + end + + [quoted_extractor] + else + [] + end + + all_extractors = required_extractors ++ optional_extractors ++ splat_extractors + error_parse = maybe_build_error_parse(required_extractors, map_keys) + + quote location: :keep do + def parse(unquote(map_extractors) = unquote(map_parameter_var)) do + result = + unquote(all_extractors) + |> Enum.reduce_while([], fn + {field, {:ok, result}}, acc -> + {:cont, [{field, result} | acc]} + + {field, {:error, _} = err}, acc -> + {:halt, err} + end) + + case result do + {:error, _} = err -> err + keyword when is_list(keyword) -> {:ok, struct(__MODULE__, keyword)} + end + end + + unquote(error_parse) + + def parse(not_map) do + {:error, {:invalid_map, not_map}} + end + end + end + + defp maybe_build_error_parse([], _) do + end + + defp maybe_build_error_parse(_, map_keys) do + # this is only built if there are required fields + quote do + def parse(%{} = unmatched) do + missing_keys = + Enum.reduce(unquote(map_keys), [], fn key, acc -> + if Map.has_key?(unmatched, key) do + acc + else + [key | acc] + end + end) + + {:error, {:missing_keys, missing_keys, __MODULE__}} + end + end + end + + defp map_extractor(map_pairs) do + quote location: :keep do + %{unquote_splicing(map_pairs)} + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/struct.ex b/apps/protocol/lib/lexical/protocol/proto/macros/struct.ex new file mode 100644 index 000000000..e6a54b351 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/struct.ex @@ -0,0 +1,48 @@ +defmodule Lexical.Protocol.Proto.Macros.Struct do + def build(opts) do + keys = Keyword.keys(opts) + required_keys = required_keys(opts) + + keys = + if :.. in keys do + {splat_def, rest} = Keyword.pop(opts, :..) + + quote location: :keep do + [ + (fn -> + {_, _, field_name} = unquote(splat_def) + field_name + end).() + | unquote(rest) + ] + end + else + keys + end + + quote location: :keep do + @enforce_keys unquote(required_keys) + defstruct unquote(keys) + + def new(opts \\ []) do + struct!(__MODULE__, opts) + end + + defoverridable new: 0, new: 1 + end + end + + defp required_keys(opts) do + Enum.filter(opts, fn + # ignore the splat, it's always optional + {:.., _} -> false + # an optional signifier tuple + {_, {:optional, _}} -> false + # ast for an optional signifier tuple + {_, {:optional, _, _}} -> false + # everything else is required + _ -> true + end) + |> Keyword.keys() + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/macros/typespec.ex b/apps/protocol/lib/lexical/protocol/proto/macros/typespec.ex new file mode 100644 index 000000000..8068860cf --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/macros/typespec.ex @@ -0,0 +1,7 @@ +defmodule Lexical.Protocol.Proto.Macros.Typespec do + def build(_opts \\ []) do + quote do + @type t :: %__MODULE__{} + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/notification.ex b/apps/protocol/lib/lexical/protocol/proto/notification.ex new file mode 100644 index 000000000..0fd5c4c4b --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/notification.ex @@ -0,0 +1,88 @@ +defmodule Lexical.Protocol.Proto.Notification do + alias Lexical.Protocol.Proto.CompileMetadata + alias Lexical.Protocol.Proto.Macros.Message + + defmacro defnotification(method, access, types \\ []) do + CompileMetadata.add_notification_module(__CALLER__.module) + + jsonrpc_types = [ + jsonrpc: quote(do: literal("2.0")), + method: quote(do: literal(unquote(method))) + ] + + param_names = Keyword.keys(types) + lsp_types = Keyword.merge(jsonrpc_types, types) + elixir_types = Message.generate_elixir_types(__CALLER__.module, lsp_types) + lsp_module_name = Module.concat(__CALLER__.module, LSP) + + quote location: :keep do + defmodule LSP do + unquote(Message.build({:notification, :lsp}, method, access, lsp_types, param_names)) + + def new(opts \\ []) do + opts + |> Keyword.merge(method: unquote(method), jsonrpc: "2.0") + |> super() + end + end + + alias Lexical.Protocol.Proto.Convert + + unquote( + Message.build({:notification, :elixir}, method, access, elixir_types, param_names, + include_parse?: false + ) + ) + + unquote(build_parse(method)) + + def new(opts \\ []) do + opts = Keyword.merge(opts, method: unquote(method), jsonrpc: "2.0") + + # use struct here because initially, the non-lsp struct doesn't have + # to be filled out. Calling to_elixir fills it out. + struct(__MODULE__, lsp: LSP.new(opts), method: unquote(method), jsonrpc: "2.0") + end + + def to_elixir(%__MODULE__{} = request) do + Convert.to_elixir(request) + end + + defimpl Jason.Encoder, for: unquote(__CALLER__.module) do + def encode(notification, opts) do + Jason.Encoder.encode(notification.lsp, opts) + end + end + + defimpl Jason.Encoder, for: unquote(lsp_module_name) do + def encode(notification, opts) do + %{ + jsonrpc: "2.0", + method: unquote(method), + params: Map.take(notification, unquote(param_names)) + } + |> Jason.Encode.map(opts) + end + end + end + end + + defp build_parse(method) do + quote do + def parse(%{"method" => unquote(method), "jsonrpc" => "2.0"} = request) do + params = Map.get(request, "params", %{}) + flattened_notificaiton = Map.merge(request, params) + + case LSP.parse(flattened_notificaiton) do + {:ok, raw_lsp} -> + struct_opts = [method: unquote(method), jsonrpc: "2.0", lsp: raw_lsp] + notification = struct(__MODULE__, struct_opts) + {:ok, notification} + + error -> + error + end + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/request.ex b/apps/protocol/lib/lexical/protocol/proto/request.ex new file mode 100644 index 000000000..79bb1be15 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/request.ex @@ -0,0 +1,97 @@ +defmodule Lexical.Protocol.Proto.Request do + alias Lexical.Protocol.Proto.CompileMetadata + alias Lexical.Protocol.Proto.Macros.Message + alias Lexical.Protocol.Proto.TypeFunctions + + import TypeFunctions, only: [optional: 1, literal: 1] + + defmacro defrequest(method, access, types) do + CompileMetadata.add_request_module(__CALLER__.module) + # id is optional so we can resuse the parse function. If it's required, + # it will go in the pattern match for the params, which won't work. + + jsonrpc_types = [ + id: quote(do: optional(one_of([string(), integer()]))), + jsonrpc: quote(do: literal("2.0")), + method: quote(do: literal(unquote(method))) + ] + + lsp_types = Keyword.merge(jsonrpc_types, types) + elixir_types = Message.generate_elixir_types(__CALLER__.module, lsp_types) + param_names = Keyword.keys(types) + lsp_module_name = Module.concat(__CALLER__.module, LSP) + + quote location: :keep do + defmodule LSP do + unquote(Message.build({:request, :lsp}, method, access, lsp_types, param_names)) + + def new(opts \\ []) do + opts + |> Keyword.merge(method: unquote(method), jsonrpc: "2.0") + |> super() + end + end + + alias Lexical.Protocol.Proto.Convert + alias Lexical.Protocol.Types + + unquote( + Message.build({:request, :elixir}, method, access, elixir_types, param_names, + include_parse?: false + ) + ) + + unquote(build_parse(method)) + + def new(opts \\ []) do + opts = Keyword.merge(opts, method: unquote(method), jsonrpc: "2.0") + + raw = LSP.new(opts) + # use struct here because initially, the non-lsp struct doesn't have + # to be filled out. Calling to_elixir fills it out. + struct(__MODULE__, lsp: raw, id: raw.id, method: unquote(method), jsonrpc: "2.0") + end + + def to_elixir(%__MODULE__{} = request) do + Convert.to_elixir(request) + end + + defimpl Jason.Encoder, for: unquote(__CALLER__.module) do + def encode(request, opts) do + Jason.Encoder.encode(request.lsp, opts) + end + end + + defimpl Jason.Encoder, for: unquote(lsp_module_name) do + def encode(request, opts) do + %{ + id: request.id, + jsonrpc: "2.0", + method: unquote(method), + params: Map.take(request, unquote(param_names)) + } + |> Jason.Encode.map(opts) + end + end + end + end + + defp build_parse(method) do + quote do + def parse(%{"method" => unquote(method), "id" => id, "jsonrpc" => "2.0"} = request) do + params = Map.get(request, "params", %{}) + flattened_request = Map.merge(request, params) + + case LSP.parse(flattened_request) do + {:ok, raw_lsp} -> + struct_opts = [id: id, method: unquote(method), jsonrpc: "2.0", lsp: raw_lsp] + request = struct(__MODULE__, struct_opts) + {:ok, request} + + error -> + error + end + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/response.ex b/apps/protocol/lib/lexical/protocol/proto/response.ex new file mode 100644 index 000000000..735b22fe6 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/response.ex @@ -0,0 +1,76 @@ +defmodule Lexical.Protocol.Proto.Response do + alias Lexical.Protocol.Proto.CompileMetadata + + alias Lexical.Protocol.Proto.Macros.{ + Access, + Struct, + Typespec, + Meta + } + + defmacro defresponse(response_type) do + CompileMetadata.add_response_module(__CALLER__.module) + + jsonrpc_types = [ + id: quote(do: optional(one_of([integer(), string()]))), + error: quote(do: optional(LspTypes.ResponseError)), + result: quote(do: optional(unquote(response_type))) + ] + + quote location: :keep do + alias Lexical.Protocol.Proto.LspTypes + unquote(Access.build()) + unquote(Struct.build(jsonrpc_types)) + unquote(Typespec.build()) + unquote(Meta.build(jsonrpc_types)) + + def new(id, result) do + struct(__MODULE__, result: result, id: id) + end + + def error(id, error_code) when is_integer(error_code) do + %__MODULE__{id: id, error: LspTypes.ResponseError.new(code: error_code)} + end + + def error(id, error_code) when is_atom(error_code) do + %__MODULE__{id: id, error: LspTypes.ResponseError.new(code: error_code)} + end + + def error(id, error_code, error_message) + when is_integer(error_code) and is_binary(error_message) do + %__MODULE__{ + id: id, + error: LspTypes.ResponseError.new(code: error_code, message: error_message) + } + end + + def error(id, error_code, error_message) + when is_atom(error_code) and is_binary(error_message) do + %__MODULE__{ + id: id, + error: LspTypes.ResponseError.new(code: error_code, message: error_message) + } + end + + defimpl Jason.Encoder, for: unquote(__CALLER__.module) do + def encode(%_{error: nil} = response, opts) do + %{ + jsonrpc: "2.0", + id: response.id, + result: response.result + } + |> Jason.Encode.map(opts) + end + + def encode(response, opts) do + %{ + jsonrpc: "2.0", + id: response.id, + error: response.error + } + |> Jason.Encode.map(opts) + end + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/text.ex b/apps/protocol/lib/lexical/protocol/proto/text.ex new file mode 100644 index 000000000..1a58f8d98 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/text.ex @@ -0,0 +1,10 @@ +defmodule Lexical.Protocol.Proto.Text do + def camelize(atom) when is_atom(atom) do + atom |> Atom.to_string() |> camelize() + end + + def camelize(string) do + <> = Macro.camelize(string) + String.downcase(first) <> rest + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/type.ex b/apps/protocol/lib/lexical/protocol/proto/type.ex new file mode 100644 index 000000000..3dc2447ca --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/type.ex @@ -0,0 +1,38 @@ +defmodule Lexical.Protocol.Proto.Type do + alias Lexical.Protocol.Proto.CompileMetadata + + alias Lexical.Protocol.Proto.Macros.{ + Access, + Inspect, + Json, + Match, + Meta, + Parse, + Struct, + Typespec + } + + defmacro deftype(types) do + caller_module = __CALLER__.module + CompileMetadata.add_type_module(caller_module) + + quote location: :keep do + unquote(Json.build(caller_module)) + unquote(Inspect.build(caller_module)) + unquote(Access.build()) + unquote(Struct.build(types)) + unquote(Typespec.build(types)) + unquote(Parse.build(types)) + unquote(Match.build(types, caller_module)) + unquote(Meta.build(types)) + + def __meta__(:type) do + :type + end + + def __meta__(:param_names) do + unquote(Keyword.keys(types)) + end + end + end +end diff --git a/apps/protocol/lib/lexical/protocol/proto/type_functions.ex b/apps/protocol/lib/lexical/protocol/proto/type_functions.ex new file mode 100644 index 000000000..8c99a68c6 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/proto/type_functions.ex @@ -0,0 +1,58 @@ +defmodule Lexical.Protocol.Proto.TypeFunctions do + def integer do + :integer + end + + def float do + :float + end + + def string do + :string + end + + def boolean do + :boolean + end + + def uri do + :string + end + + def type_alias(alias_module) do + {:type_alias, alias_module} + end + + def literal(what) do + {:literal, what} + end + + def list_of(type) do + {:list, type} + end + + def tuple_of(types) when is_list(types) do + {:tuple, types} + end + + def map_of(type, opts \\ []) do + field_name = Keyword.get(opts, :as) + {:map, type, field_name} + end + + def one_of(options) when is_list(options) do + {:one_of, options} + end + + def optional(type) do + {:optional, type} + end + + def params(opts) do + {:params, opts} + end + + def any do + :any + end +end diff --git a/apps/protocol/lib/lexical/protocol/requests.ex b/apps/protocol/lib/lexical/protocol/requests.ex new file mode 100644 index 000000000..e1eb0edfc --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/requests.ex @@ -0,0 +1,57 @@ +defmodule Lexical.Protocol.Requests do + alias Lexical.Protocol.LspTypes + alias Lexical.Protocol.Proto + alias Lexical.Protocol.Types + + # Client -> Server request + defmodule Initialize do + use Proto + + defrequest "initialize", :shared, + capabilities: optional(Types.ClientCapabilities), + client_info: optional(LspTypes.ClientInfo), + initialization_options: optional(any()), + locale: optional(string()), + process_id: optional(integer()), + root_path: optional(string()), + root_uri: optional(uri()), + trace: optional(Types.TraceValues), + workspace_folders: optional(list_of(Types.Workspace.Folder)) + end + + defmodule FindReferences do + use Proto + + defrequest "textDocument/references", :exclusive, + position: Types.Position, + text_document: Types.TextDocument.Identifier + end + + defmodule Formatting do + use Proto + + defrequest "textDocument/formatting", :exclusive, + options: Types.Formatting.Options, + text_document: Types.TextDocument.Identifier + end + + defmodule CodeAction do + use Proto + + defrequest "textDocument/codeAction", :exclusive, + context: Types.CodeAction.Context, + range: Types.Range, + text_document: Types.TextDocument.Identifier + end + + # Server -> Client requests + + defmodule RegisterCapability do + use Proto + + defrequest "client/registerCapability", :shared, + registrations: optional(list_of(LspTypes.Registration)) + end + + use Proto, decoders: :requests +end diff --git a/apps/protocol/lib/lexical/protocol/responses.ex b/apps/protocol/lib/lexical/protocol/responses.ex new file mode 100644 index 000000000..7a8d1ae4f --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/responses.ex @@ -0,0 +1,28 @@ +defmodule Lexical.Protocol.Responses do + alias Lexical.Protocol.Proto + alias Lexical.Protocol.Types + + defmodule InitializeResult do + use Proto + + defresponse Types.Initizlize.Result + end + + defmodule FindReferences do + use Proto + + defresponse optional(list_of(Types.Location)) + end + + defmodule Formatting do + use Proto + + defresponse optional(list_of(Types.TextEdit)) + end + + defmodule CodeAction do + use Proto + + defresponse optional(list_of(Types.CodeAction)) + end +end diff --git a/apps/protocol/lib/lexical/protocol/types.ex b/apps/protocol/lib/lexical/protocol/types.ex new file mode 100644 index 000000000..9c80cc296 --- /dev/null +++ b/apps/protocol/lib/lexical/protocol/types.ex @@ -0,0 +1,528 @@ +# defmodule Lexical.Protocol.Types do +# alias Lexical.Protocol.Proto + +# defmodule Position do +# use Proto + +# deftype line: integer(), character: integer() +# end + +# defmodule Range do +# use Proto + +# deftype start: Position, end: Position +# end + +# defmodule Location do +# use Proto + +# deftype uri: uri(), range: Range +# end + +# defmodule TextDocument do +# use Proto +# deftype uri: uri(), language_id: string(), version: integer(), text: string() +# end + +# defmodule TextDocument.Identifier do +# use Proto + +# deftype uri: uri() +# end + +# defmodule TextDocument.VersionedIdentifier do +# use Proto + +# deftype uri: uri(), version: integer() +# end + +# defmodule TextDocument.OptionalVersionedIdentifier do +# use Proto + +# deftype uri: uri(), version: optional(integer()) +# end + +# defmodule TextDocument.ContentChangeEvent do +# use Proto + +# deftype range: optional(Range), text: string() +# end + +# defmodule TextDocument.Edit do +# use Proto + +# deftype text_document: TextDocument.OptionalVersionedIdentifier, +# edits: list_of(TextEdit) +# end + +# defmodule CodeDescription do +# use Proto + +# deftype href: string() +# end + +# defmodule Severity do +# use Proto +# defenum error: 1, warning: 2, information: 3, hint: 4 +# end + +# defmodule DiagnosticTag do +# use Proto + +# defenum unnecessary: 1, deprecated: 2 +# end + +# defmodule DiagnosticRelatedInformation do +# use Proto + +# deftype location: Location, message: string() +# end + +# defmodule Diagnostic do +# use Proto + +# deftype range: Range, +# severity: optional(Severity), +# code: optional(any()), +# code_description: optional(CodeDescription), +# source: optional(string()), +# message: string(), +# tags: optional(list_of(DiagnosticTag)), +# related_information: optional(list_of(DiagnosticRelatedInformation)), +# data: optional(any()) +# end + +# defmodule TextEdit do +# use Proto +# deftype range: Range, new_text: string() +# end + +# defmodule FormattingOptions do +# use Proto + +# deftype tab_size: integer(), +# insert_spaces: boolean(), +# trim_trailing_whitespace: optional(boolean()), +# insert_final_newline: optional(boolean()), +# trim_final_newlines: optional(boolean()), +# ..: map_of(one_of([string(), boolean(), integer()]), as: :opts) +# end + +# defmodule FileChangeType do +# use Proto + +# defenum created: 1, changed: 2, deleted: 3 +# end + +# defmodule FileEvent do +# use Proto + +# deftype uri: uri(), type: FileChangeType +# end + +# defmodule ReferencesContext do +# use Proto + +# deftype include_declaration: boolean() +# end + +# defmodule FileOperationsCapabilities do +# use Proto + +# deftype dynamic_registration: optional(boolean()), +# did_create: optional(boolean()), +# will_create: optional(boolean()), +# did_rename: optional(boolean()), +# will_rename: optional(boolean()), +# did_delete: optional(boolean()), +# will_delete: optional(boolean()) +# end + +# defmodule ResourceOperationKind do +# use Proto + +# defenum create: "create", rename: "rename", delete: "delete" +# end + +# defmodule FailureHandlingKind do +# use Proto + +# defenum abort: "abort", +# trasactional: "transactional", +# text_only_transactional: "textOnlyTransactional", +# unto: "undo" +# end + +# defmodule WorkspaceEdit.ClientCapabilities do +# use Proto + +# deftype document_changes: optional(boolean()), +# resource_operations: optional(list_of(ResourceOperationKind)) +# end + +# defmodule WorkspaceEdit do +# use Proto + +# deftype document_changes: optional(list_of(TextDocument.Edit)), +# changes: optional(map_of(list_of(TextEdit))) +# end + +# defmodule DidChangeConfiguration.ClientCapabilities do +# use Proto + +# deftype dynamic_registration: optional(boolean()) +# end + +# defmodule DidChangeWatchedFiles.ClientCapabilities do +# use Proto + +# deftype dynamic_registration: optional(boolean()), +# relative_pattern_support: optional(boolean()) +# end + +# defmodule SymbolKind do +# use Proto + +# defenum file: 1, +# module: 2, +# namespace: 3, +# package: 4, +# class: 5, +# method: 6, +# property: 7, +# field: 8, +# constructor: 9, +# enum: 10, +# interface: 11, +# function: 12, +# variable: 13, +# constant: 14, +# string: 15, +# number: 16, +# boolean: 17, +# array: 18, +# object: 19, +# key: 20, +# null: 21, +# enum_member: 22, +# struct: 23, +# event: 24, +# operator: 25, +# typep_arameter: 26 +# end + +# defmodule CompletionItemKind do +# use Proto + +# defenum text: 1, +# method: 2, +# function: 3, +# constructor: 4, +# field: 5, +# variable: 6, +# class: 7, +# interface: 8, +# module: 9, +# property: 10, +# unit: 11, +# value: 12, +# enum: 13, +# keyword: 14, +# snippet: 15, +# color: 16, +# File: 17, +# reference: 18, +# folder: 19, +# enum_member: 20, +# constant: 21, +# struct: 22, +# event: 23, +# operator: 24, +# type_parameter: 25 +# end + +# defmodule PositionEncodingKind do +# use Proto + +# defenum utf8: "utf-8", +# utf16: "utf-16", +# utf32: "utf-32" +# end + +# defmodule SymbolTag do +# use Proto + +# defenum deprecated: 1 +# end + +# defmodule ResolveProperties do +# use Proto +# deftype properties: list_of(string()) +# end + +# defmodule WorkspaceSymbol.ClientCapabilities do +# use Proto + +# deftype dynamic_registration: optional(boolean()), +# value_set: optional(list_of(SymbolKind)), +# tag_support: optional(list_of(SymbolTag)), +# resolve_support: optional(ResolveProperties) +# end + +# defmodule ExecuteCommand.ClientCapabilities do +# use Proto +# deftype dynamic_registration: optional(boolean()) +# end + +# defmodule SemanticTokensWorkspace.ClientCapabilities do +# use Proto + +# deftype refresh_support: optional(boolean()) +# end + +# defmodule CodeLensWorkspace.ClientCapabilities do +# use Proto + +# deftype refresh_support: optional(boolean()) +# end + +# defmodule InlineValueWorkspace.ClientCapabilities do +# use Proto +# deftype refresh_support: optional(boolean()) +# end + +# defmodule InlayHintWorkspace.ClientCapabilities do +# use Proto +# deftype refresh_support: optional(boolean()) +# end + +# defmodule DiagnosticWorkspace.ClientCapabilities do +# use Proto +# deftype refresh_support: optional(boolean()) +# end + +# defmodule WorkspaceCapabilities do +# use Proto + +# deftype apply_edit: optional(boolean()), +# workspace_edit: optional(WorkspaceEdit.ClientCapabilities), +# did_change_configuration: optional(DidChangeConfiguration.ClientCapabilities), +# did_change_watched_files: optional(DidChangeWatchedFiles.ClientCapabilities), +# symbol: optional(WorkspaceSymbol.ClientCapabilities), +# execute_command: optional(ExecuteCommand.ClientCapabilities), +# workspace_folders: optional(boolean()), +# configuration: optional(boolean()), +# semantic_tokens: optional(SemanticTokensWorkspace.ClientCapabilities), +# code_lens: optional(CodeLensWorkspace.ClientCapabilities), +# file_operations: optional(FileOperationsCapabilities), +# inline_value: optional(InlineValueWorkspace.ClientCapabilities), +# inlay_hint: optional(InlayHintWorkspace.ClientCapabilities), +# diagnostic: optional(DiagnosticWorkspace.ClientCapabilities) +# end + +# defmodule TextDocument.SyncKind do +# use Proto +# defenum none: 0, full: 1, incremental: 2 +# end + +# defmodule MarkupKind do +# use Proto +# defenum plain_text: "plaintext", markdown: "markdown" +# end + +# defmodule CompletionItemTag do +# use Proto + +# defenum deprecated: 1 +# end + +# defmodule InsertTextMode do +# use Proto + +# defenum as_is: 1, adjust_indentation: 2 +# end + +# defmodule TagSupport do +# use Proto +# deftype value_set: list_of(CompletionItemTag) +# end + +# defmodule ResolveSupport do +# use Proto + +# deftype properties: list_of(string()) +# end + +# defmodule TextDocumentSync.ClientCapabilities do +# use Proto + +# deftype dynamic_registration: optional(boolean()), +# will_save: optional(boolean()), +# will_save_wait_until: optional(boolean()), +# did_save: optional(boolean()) +# end + +# defmodule CompletionItem do +# defmodule InsertTextModeSupport do +# use Proto + +# deftype value_set: list_of(InsertTextMode) +# end + +# use Proto + +# deftype snippet_support: optional(boolean()), +# commit_characters_support: optional(boolean()), +# documentation_format: optional(list_of(MarkupKind)), +# deprecated_support: optional(boolean()), +# preselect_support: optional(boolean()), +# tag_support: optional(TagSupport), +# insert_replace_support: optional(boolean()), +# resolve_support: optional(ResolveSupport), +# insert_text_mode_support: optional(InsertTextModeSupport), +# label_detail_support: optional(boolean()) +# end + +# defmodule Completion.ClientCapabilities do +# defmodule CompletionKindValues do +# use Proto + +# deftype value_set: list_of(CompletionKind) +# end + +# defmodule CompletionList do +# use Proto +# deftype item_defaults: optional(list_of(string())) +# end + +# use Proto + +# deftype dynamic_registration: optional(boolean()), +# completion_item: optional(CompletionItem), +# completion_item_kind: optional(CompletionKindValues), +# context_support: optional(boolean()), +# insert_text_mode: optional(InsertTextMode), +# completion_list: optional(CompletionList) +# end + +# defmodule Hover.ClientCapabilities do +# use Proto + +# deftype dynamic_registration: optional(boolean()), +# content_format: optional(list_of(MarkupKind)) +# end + +# defmodule SignatureHelp.ClientCapabilities do +# use Proto + +# defmodule SignatureInformation do +# defmodule ParameterInformation do +# use Proto +# deftype label_offset_support: optional(boolean()) +# end + +# use Proto + +# deftype documentation_format: optional(list_of(MarkupKind)), +# parameter_information: optional(ParameterInformation), +# active_parameter_support: optional(boolean()) +# end + +# deftype dynamic_registration: optional(boolean()), +# signature_information: optional(SignatureInformation), +# context_support: optional(boolean()) +# end + +# defmodule TextDocument.Capabilities do +# use Proto + +# deftype syncronization: optional(TextDocumentSync.ClientCapabilities), +# completion: optional(Completion.ClientCapabilities), +# hover: optional(Hover.ClientCapabilities), +# signature_help: optional(SignatureHelp.ClientCapabilities) +# end + +# defmodule GeneralCapabilities do +# use Proto +# end + +# defmodule ClientCapabilities do +# use Proto + +# deftype workspace: WorkspaceCapabilities, +# text_document: TextDocument.Capabilities, +# # window: WindowCapabilities, +# general: optional(GeneralCapabilities) +# end + +# defmodule ServerInfo do +# use Proto + +# deftype name: optional(string()), +# version: optional(string()) +# end + +# defmodule ServerCapabilities do +# use Proto +# deftype position_encoding_kind: optional(PositionEncodingKind) +# end + +# defmodule InitializeParams do +# use Proto +# deftype root_uri: uri(), capabilities: map_of(any()) +# end + +# defmodule WorkspaceFolder do +# use Proto +# deftype uri: uri(), name: string() +# end + +# defmodule Command do +# use Proto + +# deftype title: string(), +# command: string(), +# arguments: optional(list_of(any())) +# end + +# defmodule CodeActionKind do +# use Proto + +# defenum empty: "", +# quick_fix: "quickfix", +# refactor: "refactor", +# refactor_extract: "refactor.extract", +# refactor_inline: "refactor.inline", +# refactor_rewrite: "refactor.rewrite", +# source: "source", +# source_organize_imports: "source.organizeImports", +# source_fix_all: "source.fixAll" +# end + +# defmodule CodeActionTriggerKind do +# use Proto + +# defenum invoked: 1, +# automatic: 2 +# end + +# defmodule CodeActionContext do +# use Proto + +# deftype diagnostics: list_of(Diagnostic), +# only: optional(list_of(CodeActionKind)), +# trigger_kind: optional(CodeActionTriggerKind) +# end + +# defmodule CodeAction do +# use Proto + +# deftype title: string(), +# kind: optional(CodeActionKind), +# diagnostics: optional(list_of(Diagnostic)), +# is_preferred: optional(boolean()), +# edit: optional(WorkspaceEdit), +# command: optional(Command), +# data: optional(any()) +# end +# end diff --git a/apps/protocol/mix.exs b/apps/protocol/mix.exs new file mode 100644 index 000000000..98b235086 --- /dev/null +++ b/apps/protocol/mix.exs @@ -0,0 +1,32 @@ +defmodule Lexical.Protocol.MixProject do + use Mix.Project + + def project do + [ + app: :protocol, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + {:jason, "~> 1.4", optional: true}, + {:common, in_umbrella: true} + ] + end +end diff --git a/apps/protocol/test/lexical/protocol_test.exs b/apps/protocol/test/lexical/protocol_test.exs new file mode 100644 index 000000000..ee7e0476e --- /dev/null +++ b/apps/protocol/test/lexical/protocol_test.exs @@ -0,0 +1,8 @@ +defmodule Lexical.ProtocolTest do + use ExUnit.Case + doctest Lexical.Protocol + + test "greets the world" do + assert Lexical.Protocol.hello() == :world + end +end diff --git a/apps/protocol/test/test_helper.exs b/apps/protocol/test/test_helper.exs new file mode 100644 index 000000000..869559e70 --- /dev/null +++ b/apps/protocol/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start() diff --git a/apps/remote_control/.formatter.exs b/apps/remote_control/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/.gitignore b/apps/remote_control/.gitignore new file mode 100644 index 000000000..1e24e82dc --- /dev/null +++ b/apps/remote_control/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +remote_control-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/.iex.exs b/apps/remote_control/.iex.exs new file mode 100644 index 000000000..f11fc43ab --- /dev/null +++ b/apps/remote_control/.iex.exs @@ -0,0 +1,17 @@ +alias Lexical.Project +alias Lexical.RemoteControl + +other_project = + [ + File.cwd!(), + "..", + "..", + "..", + "eakins" + ] + |> Path.join() + |> Path.expand() + +project = Lexical.Project.new("file://#{other_project}") + +RemoteControl.start_link(project, self()) diff --git a/apps/remote_control/.tool-versions b/apps/remote_control/.tool-versions new file mode 100644 index 000000000..e67e68a3b --- /dev/null +++ b/apps/remote_control/.tool-versions @@ -0,0 +1,2 @@ +elixir 1.14.3-otp-25 +erlang 25.2.1 diff --git a/apps/remote_control/README.md b/apps/remote_control/README.md new file mode 100644 index 000000000..8b0c7cbf6 --- /dev/null +++ b/apps/remote_control/README.md @@ -0,0 +1,21 @@ +# Lexical.RemoteControl + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `remote_control` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:remote_control, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/remote_control/lib/lexical/remote_control.ex b/apps/remote_control/lib/lexical/remote_control.ex new file mode 100644 index 000000000..765a93179 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control.ex @@ -0,0 +1,156 @@ +defmodule Lexical.RemoteControl do + @moduledoc """ + The remote control boots another elixir application in a separate VM, injects + the remote control application into it and allows the language server to execute tasks in the + context of the remote VM. + """ + + alias Lexical.Project + + @allow_list [ + # The paths with the slash match in a release + "/elixir-", + "/erlang/", + "/mix-", + "common", + "compiler", + "hex", + "iex", + "kernel", + "logger-", + "mix", + "protocol", + "remote_control", + "sasl", + "syntax-tools" + ] + + @localhost_ip {0, 0, 0, 0} + @localhost_string '127.0.0.1' + + def start_link(%Project{} = project, project_listener) do + entropy = :rand.uniform(65536) + + ensure_started(entropy) + + node_name = String.to_charlist("#{Project.name(project)}") + + erl_args = + erl_args([ + "-loader inet", + "-hosts 127.0.0.1", + "-setcookie #{Node.get_cookie()}", + "-sbwt none", + "-noshell" + ]) + + with {:ok, node} <- :slave.start_link('127.0.0.1', node_name, erl_args), + :ok <- :rpc.call(node, :code, :add_paths, [code_paths()]), + :ok <- :rpc.call(node, __MODULE__, :set_project, [project]), + :ok <- :rpc.call(node, __MODULE__, :set_project_listener_pid, [project_listener]), + :ok <- :rpc.call(node, File, :cd, [Project.root_path(project)]), + {:ok, _} <- :rpc.call(node, Application, :ensure_all_started, [:elixir]), + {:ok, _} <- :rpc.call(node, Application, :ensure_all_started, [:logger]), + {:ok, _} <- :rpc.call(node, Application, :ensure_all_started, [:mix]), + {:ok, _} <- :rpc.call(node, Application, :ensure_all_started, [:remote_control]), + {:ok, _} <- :rpc.call(node, Application, :ensure_all_started, [:runtime_tools]) do + {:ok, node} + end + end + + def with_lock(lock_type, func) do + :global.trans({lock_type, self()}, func) + end + + def notify_listener(message) do + send(project_listener_pid(), message) + end + + def project_node? do + !!:persistent_term.get({__MODULE__, :project}, false) + end + + def get_project do + :persistent_term.get({__MODULE__, :project}) + end + + def project_listener_pid do + :persistent_term.get({__MODULE__, :project_listener_pid}) + end + + def set_project_listener_pid(listener_pid) do + :persistent_term.put({__MODULE__, :project_listener_pid}, listener_pid) + end + + def set_project(%Project{} = project) do + :persistent_term.put({__MODULE__, :project}, project) + end + + def stop(%Project{} = project) do + project + |> node_name() + |> :slave.stop() + end + + def call(%Project{} = project, m, f, a \\ []) do + project + |> node_name() + |> :erpc.call(m, f, a) + end + + defp node_name(%Project{} = project) do + :"#{Project.name(project)}@127.0.0.1" + end + + defp ensure_started(entropy) do + # boot server startup + start_boot_server = fn -> + # voodoo flag to generate a "started" atom flag + once("boot_server:started", fn -> + {:ok, _} = :erl_boot_server.start([@localhost_ip, {127, 0, 0, 1}]) + end) + + :ok + end + + # only ever handle the :erl_boot_server on the initial startup + case :net_kernel.start([:"manager-#{entropy}@127.0.0.1"]) do + # handle nodes that have already been started elsewhere + {:error, {{:already_started, _}, _}} -> start_boot_server.() + {:error, {:already_started, _}} -> start_boot_server.() + # handle the node being started + {:ok, _} -> start_boot_server.() + # pass anything else + anything -> anything + end + end + + defp once(flag, func) do + with_lock(flag, fn -> + case :persistent_term.get(flag, :missing) do + :missing -> + :persistent_term.put(flag, :present) + func.() + + _ -> + :ok + end + end) + end + + def code_paths do + for entry <- :code.get_path(), + entry_string = List.to_string(entry), + Enum.any?(@allow_list, &String.contains?(entry_string, &1)) do + entry + end + + :code.get_path() + end + + defp erl_args(arg_list) do + arg_list + |> Enum.join(" ") + |> String.to_charlist() + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/api.ex b/apps/remote_control/lib/lexical/remote_control/api.ex new file mode 100644 index 000000000..4ad153c98 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/api.ex @@ -0,0 +1,17 @@ +defmodule Lexical.RemoteControl.Api do + alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Build + + defdelegate schedule_compile(project, force?), to: Build + defdelegate compile_source_file(project, source_file), to: Build + + def list_modules(%Project{} = project) do + RemoteControl.call(project, :code, :all_available) + end + + def formatter_for_file(%Project{} = project, path) do + {formatter, _} = RemoteControl.call(project, Mix.Tasks.Format, :formatter_for_file, [path]) + formatter + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/application.ex b/apps/remote_control/lib/lexical/remote_control/application.ex new file mode 100644 index 000000000..868020786 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/application.ex @@ -0,0 +1,24 @@ +defmodule Lexical.RemoteControl.Application do + # See https://hexdocs.pm/elixir/Application.html + # for more information on OTP Applications + @moduledoc false + + alias Lexical.RemoteControl + + use Application + + @impl true + def start(_type, _args) do + children = + if RemoteControl.project_node?() do + [RemoteControl.Build, RemoteControl.Build.CaptureServer] + else + [] + end + + # See https://hexdocs.pm/elixir/Supervisor.html + # for other strategies and supported options + opts = [strategy: :one_for_one, name: Lexical.RemoteControl.Supervisor] + Supervisor.start_link(children, opts) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build.ex b/apps/remote_control/lib/lexical/remote_control/build.ex new file mode 100644 index 000000000..d422df718 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build.ex @@ -0,0 +1,228 @@ +defmodule Lexical.RemoteControl.Build do + alias Lexical.RemoteControl.Build + alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Messages + alias Lexical.SourceFile + + require Logger + use GenServer + + import Messages + import Build.CaptureIO + + # Public interface + + def schedule_compile(%Project{} = project, force? \\ false) do + RemoteControl.call(project, GenServer, :cast, [__MODULE__, {:compile, force?}]) + end + + def compile_source_file(%Project{} = project, %SourceFile{} = source_file) do + RemoteControl.call(project, GenServer, :cast, [__MODULE__, {:compile_file, source_file}]) + end + + def with_lock(func) do + RemoteControl.with_lock(__MODULE__, func) + end + + # GenServer Callbacks + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + def init([]) do + project = RemoteControl.get_project() + + with :ok <- set_compiler_options(), + {:ok, _mix_module} <- load_mix_exs(project), + {:ok, deps} <- project_deps(), + :ok <- :code.add_pathsz(deps) do + {:ok, project} + end + end + + def handle_cast({:compile, force?}, %Project{} = project) do + {elapsed_us, result} = :timer.tc(fn -> safe_compile_project(force?) end) + elapsed_ms = to_ms(elapsed_us) + + message = + case result do + :ok -> + project_compiled(status: :success, project: project, elapsed_ms: elapsed_ms) + + {:ok, diagnostics} -> + project_compiled( + status: :success, + project: project, + elapsed_ms: elapsed_ms, + diagnostics: List.wrap(diagnostics) + ) + + {:error, diagnostics} -> + project_compiled( + status: :error, + project: project, + elapsed_ms: elapsed_ms, + diagnostics: List.wrap(diagnostics) + ) + end + + RemoteControl.notify_listener(message) + {:noreply, project} + end + + def handle_cast({:compile_file, %SourceFile{} = source_file}, %Project{} = project) do + {elapsed_us, result} = :timer.tc(fn -> safe_compile(source_file) end) + elapsed_ms = to_ms(elapsed_us) + + message = + case result do + :ok -> + file_compiled( + status: :success, + project: project, + source_file: source_file, + elapsed_ms: elapsed_ms + ) + + {:ok, diagnostics} -> + file_compiled( + status: :success, + project: project, + source_file: source_file, + elapsed_ms: elapsed_ms, + diagnostics: List.wrap(diagnostics) + ) + + {:error, diagnostics} -> + file_compiled( + status: :error, + project: project, + source_file: source_file, + elapsed_ms: elapsed_ms, + diagnostics: List.wrap(diagnostics) + ) + end + + RemoteControl.notify_listener(message) + + {:noreply, project} + end + + def handle_info(_, %Project{} = project) do + {:noreply, project} + end + + # Private + defp set_compiler_options do + Code.compiler_options( + parser_options: parser_options(), + tracers: [RemoteControl.CompileTracer], + warnings_as_errors: true + ) + + :ok + end + + defp parser_options do + [columns: true, token_metadata: true] + end + + defp find_mix_exs(%Project{} = project) do + with path when is_binary(path) <- Project.mix_exs_path(project), + true <- File.exists?(path) do + {:ok, path} + else + _ -> + {:error, :no_mix_exs} + end + end + + defp load_mix_exs(%Project{} = project) do + with {:ok, mix_exs_path} <- find_mix_exs(project), + {:ok, [project_module], _} <- Kernel.ParallelCompiler.compile([mix_exs_path]) do + {:ok, project_module} + end + end + + def project_deps do + build_root = Path.join(Mix.Project.build_path(), "lib") + + deps_paths = + for dep_dir <- File.ls!(build_root), + ebin_path = Path.join([build_root, dep_dir, "ebin"]), + File.exists?(ebin_path) do + String.to_charlist(ebin_path) + end + + {:ok, deps_paths} + end + + def safe_compile_project(force?) do + opts = ~w(--return-errors --ignore-module-conflicts --warnings-as-errors) + + opts = + if force? do + ["--force " | opts] + else + opts + end + + try do + Mix.Task.clear() + Mix.Task.run("local.hex", ["--force"]) + Mix.Task.run("local.rebar", ["--force"]) + + if force? do + Mix.Task.run("clean") + end + + compile_fun = fn -> + {result, _ignored_io} = + capture_io(fn -> + Mix.Task.run("compile", opts) + end) + + result + end + + case compile_fun.() do + {:error, _} = error -> + error + + {_, []} -> + :ok + + {status, [_ | _] = diagnostics} when status in [:ok, :noop] -> + {:ok, diagnostics} + end + rescue + e -> + {:error, e} + end + end + + defp safe_compile(%SourceFile{} = source_file) do + try do + capture_io(:stderr, fn -> + source_file + |> SourceFile.to_string() + |> Code.compile_string(source_file.path) + end) + rescue + e -> + {:error, [Build.Error.error_to_diagnostic(e)]} + else + {_, ""} -> + :ok + + {_, captured_warnings} -> + diagnostics = Build.Error.message_to_diagnostic(captured_warnings) + {:ok, diagnostics} + end + end + + defp to_ms(microseconds) do + microseconds / 1000 + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build/capture_io.ex b/apps/remote_control/lib/lexical/remote_control/build/capture_io.ex new file mode 100644 index 000000000..4c79632df --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build/capture_io.ex @@ -0,0 +1,105 @@ +defmodule Lexical.RemoteControl.Build.CaptureIO do + # Shamelessly stolen from ExUnit's CaptureIO + alias Lexical.RemoteControl.Build + + @spec capture_io((() -> any())) :: String.t() + def capture_io(fun) when is_function(fun, 0) do + capture_io(:stdio, [], fun) + end + + @spec capture_io(atom(), (() -> any())) :: String.t() + def capture_io(device, fun) when is_atom(device) and is_function(fun, 0) do + capture_io(device, [], fun) + end + + @spec capture_io(String.t(), (() -> any())) :: String.t() + def capture_io(input, fun) when is_binary(input) and is_function(fun, 0) do + capture_io(:stdio, [input: input], fun) + end + + @spec capture_io(keyword(), (() -> any())) :: String.t() + def capture_io(options, fun) when is_list(options) and is_function(fun, 0) do + capture_io(:stdio, options, fun) + end + + @spec capture_io(atom(), String.t(), (() -> any())) :: String.t() + def capture_io(device, input, fun) + when is_atom(device) and is_binary(input) and is_function(fun, 0) do + capture_io(device, [input: input], fun) + end + + @spec capture_io(atom(), keyword(), (() -> any())) :: String.t() + def capture_io(device, options, fun) + when is_atom(device) and is_list(options) and is_function(fun, 0) do + do_capture_io(map_dev(device), options, fun) + end + + defp map_dev(:stdio), do: :standard_io + defp map_dev(:stderr), do: :standard_error + defp map_dev(other), do: other + + defp do_capture_io(:standard_io, options, fun) do + prompt_config = Keyword.get(options, :capture_prompt, true) + encoding = Keyword.get(options, :encoding, :unicode) + input = Keyword.get(options, :input, "") + + original_gl = Process.group_leader() + {:ok, capture_gl} = StringIO.open(input, capture_prompt: prompt_config, encoding: encoding) + + try do + Process.group_leader(self(), capture_gl) + do_capture_gl(capture_gl, fun) + after + Process.group_leader(self(), original_gl) + end + end + + defp do_capture_io(device, options, fun) do + input = Keyword.get(options, :input, "") + encoding = Keyword.get(options, :encoding, :unicode) + + case Build.CaptureServer.device_capture_on(device, encoding, input) do + {:ok, ref} -> + try do + result = fun.() + output = Build.CaptureServer.device_output(device, ref) + {result, output} + after + Build.CaptureServer.device_capture_off(ref) + end + + {:error, :no_device} -> + raise "could not find IO device registered at #{inspect(device)}" + + {:error, {:changed_encoding, current_encoding}} -> + raise ArgumentError, """ + attempted to change the encoding for a currently captured device #{inspect(device)}. + + Currently set as: #{inspect(current_encoding)} + Given: #{inspect(encoding)} + + If you need to use multiple encodings on a captured device, you cannot \ + run your test asynchronously + """ + + {:error, :input_on_already_captured_device} -> + raise ArgumentError, + "attempted multiple captures on device #{inspect(device)} with input. " <> + "If you need to give an input to a captured device, you cannot run your test asynchronously" + end + end + + defp do_capture_gl(string_io, fun) do + try do + fun.() + catch + kind, reason -> + _ = StringIO.close(string_io) + :erlang.raise(kind, reason, __STACKTRACE__) + else + result -> + {:ok, {_input, output}} = StringIO.close(string_io) + {result, output} + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build/capture_server.ex b/apps/remote_control/lib/lexical/remote_control/build/capture_server.ex new file mode 100644 index 000000000..8818a4d46 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build/capture_server.ex @@ -0,0 +1,197 @@ +defmodule Lexical.RemoteControl.Build.CaptureServer do + @moduledoc false + @compile {:no_warn_undefined, Logger} + @timeout :infinity + @name __MODULE__ + + use GenServer + + def start_link(_opts) do + GenServer.start_link(__MODULE__, :ok, name: @name) + end + + def device_capture_on(name, encoding, input) do + GenServer.call(@name, {:device_capture_on, name, encoding, input}, @timeout) + end + + def device_output(name, ref) do + GenServer.call(@name, {:device_output, name, ref}, @timeout) + end + + def device_capture_off(ref) do + GenServer.call(@name, {:device_capture_off, ref}, @timeout) + end + + def log_capture_on(pid) do + GenServer.call(@name, {:log_capture_on, pid}, @timeout) + end + + def log_capture_off(ref) do + GenServer.call(@name, {:log_capture_off, ref}, @timeout) + end + + ## Callbacks + + def init(:ok) do + state = %{ + devices: %{}, + log_captures: %{}, + log_status: nil + } + + {:ok, state} + end + + def handle_call({:device_capture_on, name, encoding, input}, {caller, _}, config) do + capture_device(name, encoding, input, config, caller) + end + + def handle_call({:device_output, name, ref}, _from, config) do + device = Map.fetch!(config.devices, name) + {_, output} = StringIO.contents(device.pid) + total = byte_size(output) + {_pid, offset} = Map.fetch!(device.refs, ref) + output_size = total - offset + {:reply, binary_part(output, offset, output_size), config} + end + + def handle_call({:device_capture_off, ref}, _from, config) do + {:reply, :ok, release_device(ref, config)} + end + + def handle_call({:log_capture_on, pid}, _from, config) do + ref = Process.monitor(pid) + refs = Map.put(config.log_captures, ref, true) + + if map_size(refs) == 1 do + status = Logger.remove_backend(:console) + {:reply, ref, %{config | log_captures: refs, log_status: status}} + else + {:reply, ref, %{config | log_captures: refs}} + end + end + + def handle_call({:log_capture_off, ref}, _from, config) do + Process.demonitor(ref, [:flush]) + config = remove_log_capture(ref, config) + {:reply, :ok, config} + end + + def handle_info({:DOWN, ref, _, _, _}, config) do + config = remove_log_capture(ref, config) + config = release_device(ref, config) + {:noreply, config} + end + + defp capture_device(name, encoding, input, config, caller) do + case config.devices do + %{^name => device} -> + dead_refs = for {ref, {pid, _}} <- device.refs, not Process.alive?(pid), do: ref + + case dead_refs do + [] -> + capture_existing_device(name, encoding, input, config, caller) + + _ -> + config = Enum.reduce(dead_refs, config, &release_device/2) + capture_device(name, encoding, input, config, caller) + end + + %{} -> + capture_new_device(name, encoding, input, config, caller) + end + end + + defp capture_existing_device(name, encoding, input, config, caller) do + case Map.fetch!(config.devices, name) do + %{input?: input?} when input? or input != "" -> + {:reply, {:error, :input_on_already_captured_device}, config} + + %{encoding: ^encoding} = device -> + {_, output} = StringIO.contents(device.pid) + ref = Process.monitor(caller) + config = put_in(config.devices[name].refs[ref], {caller, byte_size(output)}) + {:reply, {:ok, ref}, config} + + %{encoding: other_encoding} -> + {:reply, {:error, {:changed_encoding, other_encoding}}, config} + end + end + + defp capture_new_device(name, encoding, input, config, caller) do + {:ok, pid} = StringIO.open(input, encoding: encoding) + original_pid = Process.whereis(name) + + try do + Process.unregister(name) + Process.register(pid, name) + rescue + ArgumentError -> + {:reply, {:error, :no_device}, config} + else + _ -> + ref = Process.monitor(caller) + + device = %{ + original_pid: original_pid, + pid: pid, + refs: %{ref => {caller, 0}}, + encoding: encoding, + input?: input != "" + } + + {:reply, {:ok, ref}, put_in(config.devices[name], device)} + end + end + + defp release_device(ref, %{devices: devices} = config) do + Process.demonitor(ref, [:flush]) + + case Enum.find(devices, fn {_, device} -> Map.has_key?(device.refs, ref) end) do + {name, device} -> + case Map.delete(device.refs, ref) do + refs when map_size(refs) == 0 -> + revert_device_to_original_pid(name, device.original_pid) + close_string_io(device.pid) + %{config | devices: Map.delete(devices, name)} + + refs -> + put_in(config.devices[name].refs, refs) + end + + _ -> + config + end + end + + defp revert_device_to_original_pid(name, pid) do + Process.unregister(name) + rescue + ArgumentError -> nil + after + Process.register(pid, name) + end + + defp close_string_io(pid) do + StringIO.close(pid) + rescue + ArgumentError -> nil + end + + defp remove_log_capture(ref, %{log_captures: refs} = config) do + case Map.pop(refs, ref, false) do + {true, refs} -> + maybe_add_console(refs, config.log_status) + %{config | log_captures: refs} + + {false, _refs} -> + config + end + end + + defp maybe_add_console(refs, status) do + if status == :ok and map_size(refs) == 0 do + Logger.add_backend(:console, flush: true) + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build/error.ex b/apps/remote_control/lib/lexical/remote_control/build/error.ex new file mode 100644 index 000000000..301fae655 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build/error.ex @@ -0,0 +1,97 @@ +defmodule Lexical.RemoteControl.Build.Error do + alias Mix.Task.Compiler.Diagnostic + + def error_to_diagnostic(%SyntaxError{} = syntax_error) do + %Diagnostic{ + message: syntax_error.description, + position: lsp_position(syntax_error.line, syntax_error.column), + compiler_name: "elixirc", + file: syntax_error.file, + severity: :error + } + end + + def error_to_diagnostic(%TokenMissingError{} = token_error) do + %Diagnostic{ + message: token_error.description, + position: lsp_position(token_error.line, token_error.column), + compiler_name: "elixirc", + file: token_error.file, + severity: :error + } + end + + def error_to_diagnostic(%CompileError{} = compile_error) do + %Diagnostic{ + message: compile_error.description, + position: lsp_position(compile_error.line, 0), + compiler_name: "elixirc", + file: compile_error.file, + severity: :error + } + end + + def message_to_diagnostic(message_string) do + message_string + |> String.split("\n\n") + |> Enum.map(&do_message_to_diagnostic/1) + |> Enum.reject(&is_nil/1) + end + + defp do_message_to_diagnostic("") do + nil + end + + defp do_message_to_diagnostic(message) do + [message, location] = String.split(message, "\n") + + case parse_location(location) do + {:ok, location} -> + {file, line, column, mfa} = location + + %Diagnostic{ + compiler_name: "elixirc", + details: mfa, + message: message, + file: file, + position: lsp_position(line, column), + severity: :warning + } + + _ -> + nil + end + end + + # This regex captures file / line based locations (file.ex:3) + @file_and_line_re ~r/\s+([^:]+):(\d+)/ + # This regex matches the more detailed locations that contain the + # file, line, and the mfa of the error + @location_re ~r/\s+([^:]+):(\d+):\s+([^\.]+)\.(\w+)\/(\d+)?/ + def parse_location(location_string) do + with [] <- Regex.scan(@location_re, location_string), + [[_, file, line]] <- Regex.scan(@file_and_line_re, location_string) do + line = String.to_integer(line) + column = 0 + location = {file, line, column, nil} + {:ok, location} + else + [[_, file, line, module, function, arity]] -> + line = String.to_integer(line) + column = 0 + module = Module.concat([module]) + function = String.to_atom(function) + arity = String.to_integer(arity) + location = {file, line, column, {module, function, arity}} + {:ok, location} + + m -> + IO.inspect(m, label: "no match") + :error + end + end + + defp lsp_position(line, column) do + {max(0, line - 1), max(0, column - 1)} + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/compile_tracer.ex b/apps/remote_control/lib/lexical/remote_control/compile_tracer.ex new file mode 100644 index 000000000..68ad42994 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/compile_tracer.ex @@ -0,0 +1,17 @@ +defmodule Lexical.RemoteControl.CompileTracer do + alias Lexical.RemoteControl + + import RemoteControl.Messages + + def trace({:on_module, _, _}, env) do + functions = env.module.__info__(:functions) + macros = env.module.__info__(:macros) + message = module_updated(name: env.module, functions: functions, macros: macros) + RemoteControl.notify_listener(message) + :ok + end + + def trace(_event, _env) do + :ok + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/messages.ex b/apps/remote_control/lib/lexical/remote_control/messages.ex new file mode 100644 index 000000000..7355ca69c --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/messages.ex @@ -0,0 +1,13 @@ +defmodule Lexical.RemoteControl.Messages do + import Record + defrecord :project_compiled, project: nil, status: :successful, diagnostics: [], elapsed_ms: 0 + + defrecord :file_compiled, + project: nil, + source_file: nil, + status: :successful, + diagnostics: [], + elapsed_ms: 0 + + defrecord :module_updated, name: nil, functions: [], macros: [] +end diff --git a/apps/remote_control/lib/lexical/remote_control/project_metadata.ex b/apps/remote_control/lib/lexical/remote_control/project_metadata.ex new file mode 100644 index 000000000..e69de29bb diff --git a/apps/remote_control/mix.exs b/apps/remote_control/mix.exs new file mode 100644 index 000000000..c8bb867d3 --- /dev/null +++ b/apps/remote_control/mix.exs @@ -0,0 +1,31 @@ +defmodule Lexical.RemoteControl.MixProject do + use Mix.Project + + def project do + [ + app: :remote_control, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + def application do + [ + extra_applications: [:logger], + mod: {Lexical.RemoteControl.Application, []} + ] + end + + defp deps do + [ + {:common, in_umbrella: true}, + {:jason, "~> 1.4", optional: true} + ] + end +end diff --git a/apps/remote_control/test/fixtures/compilation_errors/.formatter.exs b/apps/remote_control/test/fixtures/compilation_errors/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_errors/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/test/fixtures/compilation_errors/.gitignore b/apps/remote_control/test/fixtures/compilation_errors/.gitignore new file mode 100644 index 000000000..5b5619e04 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_errors/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +compilation_errors-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/compilation_errors/README.md b/apps/remote_control/test/fixtures/compilation_errors/README.md new file mode 100644 index 000000000..8866f904b --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_errors/README.md @@ -0,0 +1,21 @@ +# CompilationErrors + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `compilation_errors` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:compilation_errors, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/remote_control/test/fixtures/compilation_errors/lib/compilation_errors.ex b/apps/remote_control/test/fixtures/compilation_errors/lib/compilation_errors.ex new file mode 100644 index 000000000..3e60e96f0 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_errors/lib/compilation_errors.ex @@ -0,0 +1,5 @@ +defmodule CompilationErrors do + def syntax_error do + %{ + end +end diff --git a/apps/remote_control/test/fixtures/compilation_errors/mix.exs b/apps/remote_control/test/fixtures/compilation_errors/mix.exs new file mode 100644 index 000000000..12ac6bc93 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_errors/mix.exs @@ -0,0 +1,28 @@ +defmodule CompilationErrors.MixProject do + use Mix.Project + + def project do + [ + app: :compilation_errors, + version: "0.1.0", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} + ] + end +end diff --git a/apps/remote_control/test/fixtures/compilation_warnings/.formatter.exs b/apps/remote_control/test/fixtures/compilation_warnings/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_warnings/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/test/fixtures/compilation_warnings/.gitignore b/apps/remote_control/test/fixtures/compilation_warnings/.gitignore new file mode 100644 index 000000000..3af3ef704 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_warnings/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +compilation_warnings-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/compilation_warnings/README.md b/apps/remote_control/test/fixtures/compilation_warnings/README.md new file mode 100644 index 000000000..9be5f4331 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_warnings/README.md @@ -0,0 +1,21 @@ +# CompilationWarnings + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `compilation_warnings` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:compilation_warnings, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/remote_control/test/fixtures/compilation_warnings/lib/unused_variable.ex b/apps/remote_control/test/fixtures/compilation_warnings/lib/unused_variable.ex new file mode 100644 index 000000000..549c63aae --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_warnings/lib/unused_variable.ex @@ -0,0 +1,9 @@ +defmodule UnusedVariable do + def unused(b) do + end + + def unused_local do + a = 3 + 5 + end +end diff --git a/apps/remote_control/test/fixtures/compilation_warnings/mix.exs b/apps/remote_control/test/fixtures/compilation_warnings/mix.exs new file mode 100644 index 000000000..943471a3a --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_warnings/mix.exs @@ -0,0 +1,28 @@ +defmodule CompilationWarnings.MixProject do + use Mix.Project + + def project do + [ + app: :compilation_warnings, + version: "0.1.0", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} + ] + end +end diff --git a/apps/remote_control/test/fixtures/project_metadata/.formatter.exs b/apps/remote_control/test/fixtures/project_metadata/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/test/fixtures/project_metadata/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/test/fixtures/project_metadata/.gitignore b/apps/remote_control/test/fixtures/project_metadata/.gitignore new file mode 100644 index 000000000..45122b805 --- /dev/null +++ b/apps/remote_control/test/fixtures/project_metadata/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +project_metadata-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/project_metadata/README.md b/apps/remote_control/test/fixtures/project_metadata/README.md new file mode 100644 index 000000000..9ab9633b2 --- /dev/null +++ b/apps/remote_control/test/fixtures/project_metadata/README.md @@ -0,0 +1,21 @@ +# ProjectMetadata + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `project_metadata` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:project_metadata, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/remote_control/test/fixtures/project_metadata/lib/project_metadata.ex b/apps/remote_control/test/fixtures/project_metadata/lib/project_metadata.ex new file mode 100644 index 000000000..a4733bd9b --- /dev/null +++ b/apps/remote_control/test/fixtures/project_metadata/lib/project_metadata.ex @@ -0,0 +1,12 @@ +defmodule ProjectMetadata do + def zero_arity do + end + + def one_arity(first) do + first + end + + def two_arity(first, second) do + {first, second} + end +end diff --git a/apps/remote_control/test/fixtures/project_metadata/mix.exs b/apps/remote_control/test/fixtures/project_metadata/mix.exs new file mode 100644 index 000000000..414cc3456 --- /dev/null +++ b/apps/remote_control/test/fixtures/project_metadata/mix.exs @@ -0,0 +1,28 @@ +defmodule ProjectMetadata.MixProject do + use Mix.Project + + def project do + [ + app: :project_metadata, + version: "0.1.0", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} + ] + end +end diff --git a/apps/remote_control/test/fixtures/umbrella/.formatter.exs b/apps/remote_control/test/fixtures/umbrella/.formatter.exs new file mode 100644 index 000000000..90a08535c --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/.formatter.exs @@ -0,0 +1,5 @@ +# Used by "mix format" +[ + inputs: ["mix.exs", "config/*.exs"], + subdirectories: ["apps/*"] +] diff --git a/apps/remote_control/test/fixtures/umbrella/.gitignore b/apps/remote_control/test/fixtures/umbrella/.gitignore new file mode 100644 index 000000000..2de045d57 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/.gitignore @@ -0,0 +1,23 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/umbrella/README.md b/apps/remote_control/test/fixtures/umbrella/README.md new file mode 100644 index 000000000..0d1998ceb --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/README.md @@ -0,0 +1,4 @@ +# Umbrella + +**TODO: Add description** + diff --git a/apps/remote_control/test/fixtures/umbrella/apps/first/.formatter.exs b/apps/remote_control/test/fixtures/umbrella/apps/first/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/first/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/test/fixtures/umbrella/apps/first/.gitignore b/apps/remote_control/test/fixtures/umbrella/apps/first/.gitignore new file mode 100644 index 000000000..e5b271dde --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/first/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +first-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/umbrella/apps/first/README.md b/apps/remote_control/test/fixtures/umbrella/apps/first/README.md new file mode 100644 index 000000000..b4c7aef50 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/first/README.md @@ -0,0 +1,21 @@ +# Umbrella.First + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `first` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:first, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/remote_control/test/fixtures/umbrella/apps/first/lib/umbrella/first.ex b/apps/remote_control/test/fixtures/umbrella/apps/first/lib/umbrella/first.ex new file mode 100644 index 000000000..2577a66f8 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/first/lib/umbrella/first.ex @@ -0,0 +1,12 @@ +defmodule Umbrella.First do + def arity_0 do + end + + def arity_1(a) do + a + end + + def arity_2(a, b) do + {a, b} + end +end diff --git a/apps/remote_control/test/fixtures/umbrella/apps/first/mix.exs b/apps/remote_control/test/fixtures/umbrella/apps/first/mix.exs new file mode 100644 index 000000000..beafb77b6 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/first/mix.exs @@ -0,0 +1,33 @@ +defmodule Umbrella.First.MixProject do + use Mix.Project + + def project do + [ + app: :first, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}, + # {:sibling_app_in_umbrella, in_umbrella: true} + ] + end +end diff --git a/apps/remote_control/test/fixtures/umbrella/apps/second/.formatter.exs b/apps/remote_control/test/fixtures/umbrella/apps/second/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/second/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/test/fixtures/umbrella/apps/second/.gitignore b/apps/remote_control/test/fixtures/umbrella/apps/second/.gitignore new file mode 100644 index 000000000..c7a453110 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/second/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +second-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/umbrella/apps/second/README.md b/apps/remote_control/test/fixtures/umbrella/apps/second/README.md new file mode 100644 index 000000000..8a03f0434 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/second/README.md @@ -0,0 +1,21 @@ +# Umbrella.Second + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `second` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:second, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/remote_control/test/fixtures/umbrella/apps/second/lib/umbrella/second.ex b/apps/remote_control/test/fixtures/umbrella/apps/second/lib/umbrella/second.ex new file mode 100644 index 000000000..677e2fa2f --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/second/lib/umbrella/second.ex @@ -0,0 +1,12 @@ +defmodule Umbrella.Second do + def arity_0 do + end + + def arity_1(a) do + a + end + + def arity_2(a, b) do + {a, b} + end +end diff --git a/apps/remote_control/test/fixtures/umbrella/apps/second/mix.exs b/apps/remote_control/test/fixtures/umbrella/apps/second/mix.exs new file mode 100644 index 000000000..3d6587ff4 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/apps/second/mix.exs @@ -0,0 +1,33 @@ +defmodule Umbrella.Second.MixProject do + use Mix.Project + + def project do + [ + app: :second, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}, + # {:sibling_app_in_umbrella, in_umbrella: true} + ] + end +end diff --git a/apps/remote_control/test/fixtures/umbrella/config/config.exs b/apps/remote_control/test/fixtures/umbrella/config/config.exs new file mode 100644 index 000000000..ab23e800e --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/config/config.exs @@ -0,0 +1,18 @@ +# This file is responsible for configuring your umbrella +# and **all applications** and their dependencies with the +# help of the Config module. +# +# Note that all applications in your umbrella share the +# same configuration and dependencies, which is why they +# all use the same configuration file. If you want different +# configurations or dependencies per app, it is best to +# move said applications out of the umbrella. +import Config + +# Sample configuration: +# +# config :logger, :console, +# level: :info, +# format: "$date $time [$level] $metadata$message\n", +# metadata: [:user_id] +# diff --git a/apps/remote_control/test/fixtures/umbrella/mix.exs b/apps/remote_control/test/fixtures/umbrella/mix.exs new file mode 100644 index 000000000..8732ae808 --- /dev/null +++ b/apps/remote_control/test/fixtures/umbrella/mix.exs @@ -0,0 +1,21 @@ +defmodule Umbrella.MixProject do + use Mix.Project + + def project do + [ + apps_path: "apps", + version: "0.1.0", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Dependencies listed here are available only for this + # project and cannot be accessed from applications inside + # the apps folder. + # + # Run "mix help deps" for examples and options. + defp deps do + [] + end +end diff --git a/apps/remote_control/test/lexical/build_test.exs b/apps/remote_control/test/lexical/build_test.exs new file mode 100644 index 000000000..cd6e4d7b4 --- /dev/null +++ b/apps/remote_control/test/lexical/build_test.exs @@ -0,0 +1,353 @@ +defmodule Lexical.BuildTest do + alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Build + alias Lexical.RemoteControl.Messages + alias Lexical.SourceFile + alias Mix.Task.Compiler.Diagnostic + + import Messages + use ExUnit.Case + + def fixtures_dir do + [__ENV__.file, "..", "..", "fixtures"] + |> Path.join() + |> Path.expand() + end + + def compile_source_file(%Project{} = project, filename \\ "file.ex", source_code) do + uri = + project + |> Project.root_path() + |> Path.join(filename) + |> SourceFile.Path.to_uri() + + source = SourceFile.new(uri, source_code, 0) + Build.compile_source_file(project, source) + end + + def with_project(project_name) do + project_name = to_string(project_name) + fixture_dir = Path.join(fixtures_dir(), project_name) + project = Project.new("file://#{fixture_dir}") + + {:ok, _} = RemoteControl.start_link(project, self()) + + assert_receive module_updated(), 5000 + + on_exit(fn -> + :ok = RemoteControl.stop(project) + end) + + {:ok, project} + end + + def with_empty_module(%{project: project}) do + module = ~S[ + defmodule UnderTest do + end + ] + compile_source_file(project, module) + assert_receive file_compiled(), 5000 + :ok + end + + def with_metadata_project(_) do + {:ok, project} = with_project(:project_metadata) + {:ok, project: project} + end + + describe "compiling a project" do + test "sends a message when complete " do + {:ok, project} = with_project(:project_metadata) + Build.schedule_compile(project, true) + + assert_receive project_compiled(status: :success), 5000 + end + + test "receives metadata about the defined modules" do + {:ok, project} = with_project(:project_metadata) + + Build.schedule_compile(project, true) + assert_receive module_updated(name: name, functions: functions), 5000 + assert name == ProjectMetadata + assert {:zero_arity, 0} in functions + assert {:one_arity, 1} in functions + assert {:two_arity, 2} in functions + end + end + + describe "compiling an umbrella project" do + test "it sends a message when compilation is complete" do + {:ok, project} = with_project(:umbrella) + Build.schedule_compile(project, true) + + assert_receive project_compiled(status: :success, diagnostics: []), 5000 + assert_receive module_updated(name: Umbrella.First, functions: functions) + + assert {:arity_0, 0} in functions + assert {:arity_1, 1} in functions + assert {:arity_2, 2} in functions + + assert_receive module_updated(name: Umbrella.Second, functions: functions), 500 + + assert {:arity_0, 0} in functions + assert {:arity_1, 1} in functions + assert {:arity_2, 2} in functions + end + end + + describe "compiling a project that has errors" do + test "it reports the errors" do + {:ok, project} = with_project(:compilation_errors) + Build.schedule_compile(project, true) + + assert_receive project_compiled(status: :error, diagnostics: diagnostics), 5000 + assert [%Diagnostic{}] = diagnostics + end + end + + describe "when compiling a project that has warnings" do + test "it reports them" do + {:ok, project} = with_project(:compilation_warnings) + Build.schedule_compile(project, true) + + assert_receive project_compiled(status: :error, diagnostics: diagnostics), 5000 + assert [%Diagnostic{}, %Diagnostic{}] = diagnostics + end + end + + describe "compiling source files" do + setup [:with_metadata_project, :with_empty_module] + + test "handles syntax errors", %{project: project} do + source = ~S[ + defmodule WithErrors do + def error do + %{,} + end + end + ] + compile_source_file(project, source) + assert_receive file_compiled(status: :error, diagnostics: [diagnostic]) + assert %Diagnostic{} = diagnostic + assert diagnostic.severity == :error + assert diagnostic.message =~ ~S[syntax error before: ','] + assert diagnostic.position == {3, 14} + end + + test "handles missing token errors", %{project: project} do + source = ~S[%{foo: 3] + compile_source_file(project, source) + + assert_receive file_compiled(status: :error, diagnostics: [diagnostic]) + assert %Diagnostic{} = diagnostic + assert diagnostic.severity == :error + assert diagnostic.message =~ ~S[missing terminator: }] + assert diagnostic.position == {0, 8} + end + + test "handles compile errors", %{project: project} do + source = ~S[doesnt_exist()] + compile_source_file(project, source) + + assert_receive file_compiled(status: :error, diagnostics: [diagnostic]) + assert %Diagnostic{} = diagnostic + assert diagnostic.severity == :error + assert diagnostic.message =~ ~S[undefined function doesnt_exist/0] + assert diagnostic.position == {0, 0} + end + + test "reports unused variables", %{project: project} do + source = ~S[ + defmodule WithWarnings do + def error do + unused = 3 + end + end + ] + compile_source_file(project, source) + + assert_receive file_compiled(status: :success, diagnostics: [%Diagnostic{} = diagnostic]) + + assert diagnostic.severity == :warning + assert diagnostic.position == {3, 0} + assert diagnostic.message =~ ~S[warning: variable "unused" is unused] + assert diagnostic.details == {WithWarnings, :error, 0} + end + + test "reports missing parens", %{project: project} do + source = ~S[ + defmodule WithWarnings do + def error do + calc + end + + defp calc do + 3 + end + end + ] + compile_source_file(project, source) + assert_receive file_compiled(status: :success, diagnostics: [%Diagnostic{} = diagnostic]) + + assert diagnostic.severity == :warning + assert diagnostic.position == {3, 0} + + assert diagnostic.message =~ + ~S[warning: variable "calc" does not exist and is being expanded to "calc()"] + + assert diagnostic.details == {WithWarnings, :error, 0} + end + + test "reports unused defp functions", %{project: project} do + source = ~S[ + defmodule UnusedDefp do + defp unused do + end + end + ] + compile_source_file(project, source) + + assert_receive file_compiled(status: :success, diagnostics: [%Diagnostic{} = diagnostic]) + assert diagnostic.severity == :warning + assert diagnostic.position == {2, 0} + assert diagnostic.message =~ ~S[warning: function unused/0 is unused] + assert diagnostic.details == nil + end + + test "handles undefined usages", %{project: project} do + source = ~S[ + defmodule WithUndefinedFunction do + def error do + unknown_fn() + end + end + ] + compile_source_file(project, source) + + assert_receive file_compiled(status: :error, diagnostics: [diagnostic]) + assert diagnostic.severity == :error + assert diagnostic.position == {3, 0} + assert diagnostic.message =~ ~S[undefined function unknown_fn/0] + assert diagnostic.details == nil + end + + test "adding a new module notifies the listener", %{project: project} do + source = ~S[ + defmodule NewModule do + end + ] + + compile_source_file(project, source) + assert_receive module_updated(name: NewModule, functions: []) + end + + test "adding a function notifies the listener", %{project: project} do + source = ~S[ + defmodule UnderTest do + def added_function(a, b) do + a + b + end + end + ] + + compile_source_file(project, source) + assert_receive module_updated(name: UnderTest, functions: [added_function: 2]) + end + + test "removing a function notifies the listener", %{project: project} do + initial = ~S[ + defmodule Remove do + def remove_me do + end + end + ] + + removed = ~S[ + defmodule Remove do + end + ] + + compile_source_file(project, initial) + assert_receive module_updated() + + compile_source_file(project, removed) + assert_receive module_updated(name: Remove, functions: []) + end + + test "changing a function's arity notifies the listener", %{project: project} do + initial = ~S[ + defmodule ArityChange do + def arity(_) do + end + end + ] + compile_source_file(project, initial) + assert_receive module_updated(name: ArityChange, functions: [arity: 1]) + + changed = ~S[ + defmodule ArityChange do + def arity(_, _) do + end + end + ] + compile_source_file(project, changed) + assert_receive module_updated(name: ArityChange, functions: [arity: 2]) + end + + test "adding a macro notifies the listener", %{project: project} do + changed = ~S[ + defmodule UnderTest do + defmacro something(a) do + quote do + a + 1 + end + end + end + ] + compile_source_file(project, changed) + assert_receive module_updated(name: UnderTest, macros: [something: 1]) + end + + test "removing a macro notifies the listener", %{project: project} do + initial = ~S[ + defmodule RemoveMacro do + defmacro remove_me do + end + end + ] + + removed = ~S[ + defmodule RemoveMacro do + end + ] + + compile_source_file(project, initial) + assert_receive module_updated() + + compile_source_file(project, removed) + assert_receive module_updated(name: RemoveMacro, macros: []) + end + + test "changing a macro's arity notifies the listener", %{project: project} do + initial = ~S[ + defmodule ArityChange do + defmacro arity(_) do + end + end + ] + compile_source_file(project, initial) + assert_receive module_updated(name: ArityChange, macros: [arity: 1]) + + changed = ~S[ + defmodule ArityChange do + defmacro arity(_, _) do + end + end + ] + compile_source_file(project, changed) + assert_receive module_updated(name: ArityChange, macros: [arity: 2]) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control_test.exs b/apps/remote_control/test/lexical/remote_control_test.exs new file mode 100644 index 000000000..2ab40b88c --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control_test.exs @@ -0,0 +1,3 @@ +defmodule Lexical.RemoteControlTest do + use ExUnit.Case +end diff --git a/apps/remote_control/test/test_helper.exs b/apps/remote_control/test/test_helper.exs new file mode 100644 index 000000000..869559e70 --- /dev/null +++ b/apps/remote_control/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start() diff --git a/apps/server/.formatter.exs b/apps/server/.formatter.exs new file mode 100644 index 000000000..b840719c9 --- /dev/null +++ b/apps/server/.formatter.exs @@ -0,0 +1,5 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"], + import_deps: [:patch] +] diff --git a/apps/server/.gitignore b/apps/server/.gitignore new file mode 100644 index 000000000..84a9880dd --- /dev/null +++ b/apps/server/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +server-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/server/.iex.exs b/apps/server/.iex.exs new file mode 100644 index 000000000..033717f20 --- /dev/null +++ b/apps/server/.iex.exs @@ -0,0 +1,15 @@ +alias Lexical.Project +alias Lexical.RemoteControl + +other_project = + [ + File.cwd!(), + "..", + "..", + "..", + "eakins" + ] + |> Path.join() + |> Path.expand() + +project = Lexical.Project.new("file://#{other_project}") diff --git a/apps/server/README.md b/apps/server/README.md new file mode 100644 index 000000000..64f760e03 --- /dev/null +++ b/apps/server/README.md @@ -0,0 +1,21 @@ +# Lexical.Server + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `server` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:server, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/server/lib/lexical/dialyzer.ex b/apps/server/lib/lexical/dialyzer.ex new file mode 100644 index 000000000..00c8838e3 --- /dev/null +++ b/apps/server/lib/lexical/dialyzer.ex @@ -0,0 +1,5 @@ +defmodule Lexical.Dialyzer do + def check_support do + :ok + end +end diff --git a/apps/server/lib/lexical/provider/code_action/replace_with_underscore.ex b/apps/server/lib/lexical/provider/code_action/replace_with_underscore.ex new file mode 100644 index 000000000..21bbab657 --- /dev/null +++ b/apps/server/lib/lexical/provider/code_action/replace_with_underscore.ex @@ -0,0 +1,72 @@ +defmodule Lexical.Provider.CodeAction.ReplaceWithUnderscore do + @moduledoc """ + A code action that prefixes unused variables with an underscore + """ + alias Lexical.CodeMod + alias Lexical.CodeMod.Ast + alias Lexical.Protocol.Requests.CodeAction + alias Lexical.Protocol.Types.CodeAction, as: CodeActionResult + alias Lexical.Protocol.Types.Diagnostic + alias Lexical.Protocol.Types.Workspace + alias Lexical.SourceFile + + @spec apply(CodeAction.t()) :: [CodeActionReply.t()] + def apply(%CodeAction{} = code_action) do + source_file = code_action.source_file + diagnostics = get_in(code_action, [:context, :diagnostics]) || [] + + Enum.flat_map(diagnostics, fn %Diagnostic{} = diagnostic -> + with {:ok, variable_name, one_based_line} <- extract_variable_and_line(diagnostic), + {:ok, reply} <- build_code_action(source_file, one_based_line, variable_name) do + [reply] + else + _ -> + [] + end + end) + end + + defp build_code_action(%SourceFile{} = source_file, one_based_line, variable_name) do + with {:ok, line_text} <- SourceFile.fetch_text_at(source_file, one_based_line), + {:ok, line_ast} <- Ast.from(line_text), + {:ok, text_edits} <- + CodeMod.ReplaceWithUnderscore.text_edits(line_text, line_ast, variable_name) do + case text_edits do + [] -> + :error + + [_ | _] -> + reply = + CodeActionResult.new( + title: "Rename to _#{variable_name}", + kind: :quick_fix, + edit: Workspace.Edit.new(changes: %{source_file.uri => text_edits}) + ) + + {:ok, reply} + end + end + end + + defp extract_variable_and_line(%Diagnostic{} = diagnostic) do + with {:ok, variable_name} <- extract_variable_name(diagnostic.message), + {:ok, line} <- extract_line(diagnostic) do + {:ok, variable_name, line} + end + end + + @variable_re ~r/variable "([^"]+)" is unused/ + defp extract_variable_name(message) do + case Regex.scan(@variable_re, message) do + [[_, variable_name]] -> + {:ok, String.to_atom(variable_name)} + + _ -> + :error + end + end + + defp extract_line(%Diagnostic{} = diagnostic) do + {:ok, diagnostic.range.start.line} + end +end diff --git a/apps/server/lib/lexical/provider/env.ex b/apps/server/lib/lexical/provider/env.ex new file mode 100644 index 000000000..2a7dafc5c --- /dev/null +++ b/apps/server/lib/lexical/provider/env.ex @@ -0,0 +1,33 @@ +defmodule Lexical.Provider.Env do + @moduledoc """ + An environment passed to provider handlers. + This represents the current state of the project, and should include additional + information that provider handles might need to complete their tasks. + """ + + alias Lexical.Project + alias Lexical.Server.Configuration + + defstruct [:root_uri, :root_path, :project_uri, :project_path] + + @type t :: %__MODULE__{} + + def new do + %__MODULE__{} + end + + def from_configuration(%Configuration{} = config) do + %__MODULE__{ + root_uri: config.project.root_uri, + root_path: Project.root_path(config.project), + project_uri: config.project.root_uri, + project_path: Project.project_path(config.project) + } + end + + def project_name(%__MODULE__{} = env) do + env.project_path + |> Path.split() + |> List.last() + end +end diff --git a/apps/server/lib/lexical/provider/handlers/code_action.ex b/apps/server/lib/lexical/provider/handlers/code_action.ex new file mode 100644 index 000000000..0a77c64fa --- /dev/null +++ b/apps/server/lib/lexical/provider/handlers/code_action.ex @@ -0,0 +1,16 @@ +defmodule Lexical.Provider.Handlers.CodeAction do + alias Lexical.Provider.CodeAction.ReplaceWithUnderscore + alias Lexical.Provider.Env + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Responses + alias Lexical.Provider.CodeAction.ReplaceWithUnderscore + + require Logger + + def handle(%Requests.CodeAction{} = request, %Env{}) do + code_actions = ReplaceWithUnderscore.apply(request) + reply = Responses.CodeAction.new(request.id, code_actions) + + {:reply, reply} + end +end diff --git a/apps/server/lib/lexical/provider/handlers/find_references.ex b/apps/server/lib/lexical/provider/handlers/find_references.ex new file mode 100644 index 000000000..5924402a1 --- /dev/null +++ b/apps/server/lib/lexical/provider/handlers/find_references.ex @@ -0,0 +1,57 @@ +# defmodule Lexical.Provider.Handlers.FindReferences do +# alias Lexical.Build +# alias Lexical.Tracer +# alias Lexical.Protocol.Requests.FindReferences +# alias Lexical.Protocol.Responses +# alias Lexical.Protocol.Types.Location +# alias Lexical.SourceFile +# alias Lexical.SourceFile.Conversions + +# require Logger + +# def handle(%FindReferences{} = request, _) do +# source_file = request.source_file +# pos = request.position +# trace = Tracer.get_trace() +# # elixir_ls uses 1 based columns, so add 1 here. +# character = pos.character + 1 + +# Build.with_lock(fn -> +# references = +# source_file +# |> SourceFile.to_string() +# |> ElixirSense.references(pos.line, character, trace) +# |> Enum.reduce([], fn reference, acc -> +# case build_reference(reference, source_file) do +# {:ok, location} -> +# [location | acc] + +# _ -> +# acc +# end +# end) +# |> Enum.reverse() + +# response = Responses.FindReferences.new(request.id, references) +# Logger.info("found #{length(references)} refs") +# {:reply, response} +# end) +# end + +# defp build_reference(%{range: _, uri: _} = elixir_sense_reference, current_source_file) do +# with {:ok, source_file} <- get_source_file(elixir_sense_reference, current_source_file), +# {:ok, elixir_range} <- Conversions.to_elixir(elixir_sense_reference, source_file), +# {:ok, ls_range} <- Conversions.to_lsp(elixir_range, source_file) do +# uri = Conversions.ensure_uri(source_file.uri) +# {:ok, Location.new(uri: uri, range: ls_range)} +# end +# end + +# defp get_source_file(%{uri: nil}, current_source_file) do +# {:ok, current_source_file} +# end + +# defp get_source_file(%{uri: uri}, _) do +# SourceFile.Store.open_temporary(uri) +# end +# end diff --git a/apps/server/lib/lexical/provider/handlers/formatting.ex b/apps/server/lib/lexical/provider/handlers/formatting.ex new file mode 100644 index 000000000..04cc679f1 --- /dev/null +++ b/apps/server/lib/lexical/provider/handlers/formatting.ex @@ -0,0 +1,25 @@ +defmodule Lexical.Provider.Handlers.Formatting do + alias Lexical.Provider.Env + alias Lexical.CodeMod.Format + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Responses + alias Lexical.Transport + require Logger + + def handle(%Requests.Formatting{} = request, %Env{} = env) do + document = request.source_file + Logger.info("project path #{inspect(env)}") + Logger.info("doc path #{document.uri}") + + with {:ok, text_edits} <- Format.text_edits(document, env.project_uri) do + response = Responses.Formatting.new(request.id, text_edits) + Logger.info("Response #{inspect(response)}") + {:reply, response} + else + {:error, reason} -> + Logger.error("Formatter failed #{inspect(reason)}") + + {:reply, Responses.Formatting.error(request.id, :request_failed, inspect(reason))} + end + end +end diff --git a/apps/server/lib/lexical/provider/queue.ex b/apps/server/lib/lexical/provider/queue.ex new file mode 100644 index 000000000..5b5518fdf --- /dev/null +++ b/apps/server/lib/lexical/provider/queue.ex @@ -0,0 +1,229 @@ +defmodule Lexical.Provider.Queue do + defmodule State do + alias Lexical.Provider.Env + alias Lexical + alias Lexical.Protocol.Requests + alias Lexical.Provider.Handlers + alias Lexical.Provider.Queue + alias Lexical.Transport + require Logger + + defstruct tasks_by_id: %{}, pids_to_ids: %{} + + @type t :: %__MODULE__{} + + @requests_to_handler %{ + Requests.FindReferences => Handlers.FindReferences, + Requests.Formatting => Handlers.Formatting, + Requests.CodeAction => Handlers.CodeAction + } + + def new do + %__MODULE__{} + end + + defp handler_for(%request_module{}) do + case Map.fetch(@requests_to_handler, request_module) do + {:ok, _} = success -> + success + + :error -> + {:error, {:unhandled, request_module}} + end + end + + @spec add(t, Requests.request(), Env.t()) :: {:ok, t} | :error + def add(%__MODULE__{} = state, request, env) do + with {:ok, handler_module} <- handler_for(request), + {:ok, req} <- request.__struct__.to_elixir(request) do + task = %Task{} = as_task(request, fn -> handler_module.handle(req, env) end) + + new_state = %__MODULE__{ + state + | tasks_by_id: Map.put(state.tasks_by_id, request.id, task), + pids_to_ids: Map.put(state.pids_to_ids, task.pid, request.id) + } + + {:ok, new_state} + else + {:error, {:unhandled, _}} -> + Logger.info("unhandled request #{request.method}") + :error + + _ -> + :error + end + end + + @spec cancel(t, pos_integer()) :: t + def cancel(%__MODULE__{} = state, request_id) do + with {:ok, %Task{} = task} <- Map.fetch(state.tasks_by_id, request_id), + true <- Process.exit(task.pid, :kill) do + %State{ + state + | tasks_by_id: Map.delete(state.tasks_by_id, request_id), + pids_to_ids: Map.delete(state.pids_to_ids, task.pid) + } + else + _ -> + state + end + end + + def size(%__MODULE__{} = state) do + map_size(state.tasks_by_id) + end + + def task_finished(%__MODULE__{} = state, pid, reason) do + case Map.pop(state.pids_to_ids, pid) do + {nil, _} -> + Logger.warn("Got an exit for pid #{inspect(pid)}, but it wasn't in the queue") + state + + {request_id, new_pids_to_ids} -> + maybe_log_task(reason, request_id) + + %__MODULE__{ + state + | pids_to_ids: new_pids_to_ids, + tasks_by_id: Map.delete(state.tasks_by_id, request_id) + } + end + end + + def running?(%__MODULE__{} = state, request_id) do + Map.has_key?(state.tasks_by_id, request_id) + end + + defp maybe_log_task(:normal, _), + do: :ok + + defp maybe_log_task(reason, %{id: request_id} = _request), + do: maybe_log_task(reason, request_id) + + defp maybe_log_task(reason, request_id), + do: Logger.warn("Request id #{request_id} failed with reason #{inspect(reason)}") + + defp as_task(%{id: _} = request, func) do + handler = fn -> + try do + case func.() do + :noreply -> + {:request_complete, request} + + {:reply, reply} -> + Transport.write(reply) + {:request_complete, request} + + {:reply_and_alert, reply} -> + Transport.write(reply) + Lexical.Server.response_complete(request, reply) + {:request_complete, request} + end + rescue + e -> + exception_string = Exception.format(:error, e, __STACKTRACE__) + Logger.error(exception_string) + + Transport.write(%{ + id: request.id, + error: exception_string + }) + + {:request_complete, request} + end + end + + Queue.Supervisor.run_in_task(handler) + end + end + + alias Lexical.Provider.Env + alias Lexical.Server.Configuration + alias Lexical.Protocol.Requests + + use GenServer + + # public interface + @spec add(Requests.request(), Configuration.t() | Env.t()) :: :ok + def add(request, %Configuration{} = config) do + env = Env.from_configuration(config) + add(request, env) + end + + def add(request, %Env{} = env) do + GenServer.call(__MODULE__, {:add, request, env}) + end + + @spec size() :: non_neg_integer() + def size do + GenServer.call(__MODULE__, :size) + end + + def cancel(%{id: request_id}) do + cancel(request_id) + end + + def cancel(request_id) when is_binary(request_id) do + GenServer.call(__MODULE__, {:cancel, request_id}) + end + + def running?(%{id: request_id}) do + running?(request_id) + end + + def running?(request_id) when is_binary(request_id) do + GenServer.call(__MODULE__, {:running?, request_id}) + end + + # genserver callbacks + + def child_spec do + __MODULE__ + end + + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + def init(_) do + {:ok, State.new()} + end + + def handle_call({:add, request, env}, _from, %State{} = state) do + {reply, new_state} = + case State.add(state, request, env) do + {:ok, new_state} -> {:ok, new_state} + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:cancel, request_id}, _from, %State{} = state) do + new_state = State.cancel(state, request_id) + {:reply, :ok, new_state} + end + + def handle_call({:running?, request_id}, _from, %State{} = state) do + {:reply, State.running?(state, request_id), state} + end + + def handle_call(:size, _from, %State{} = state) do + {:reply, State.size(state), state} + end + + def handle_info({:DOWN, _ref, :process, pid, reason}, state) do + new_state = State.task_finished(state, pid, reason) + + {:noreply, new_state} + end + + def handle_info({ref, {:request_complete, _response}}, %State{} = state) + when is_reference(ref) do + # This head handles the replies from the tasks, which we don't really care about. + {:noreply, state} + end + + # private +end diff --git a/apps/server/lib/lexical/provider/supervisor.ex b/apps/server/lib/lexical/provider/supervisor.ex new file mode 100644 index 000000000..00b49c289 --- /dev/null +++ b/apps/server/lib/lexical/provider/supervisor.ex @@ -0,0 +1,20 @@ +defmodule Lexical.Provider.Queue.Supervisor do + def name do + __MODULE__ + end + + def child_spec do + {Task.Supervisor, name: name()} + end + + def run_in_task(provider_fn) do + name() + |> Task.Supervisor.async(provider_fn) + |> unlink() + end + + defp unlink(%Task{} = task) do + Process.unlink(task.pid) + task + end +end diff --git a/apps/server/lib/lexical/server.ex b/apps/server/lib/lexical/server.ex new file mode 100644 index 000000000..58280fbb6 --- /dev/null +++ b/apps/server/lib/lexical/server.ex @@ -0,0 +1,110 @@ +defmodule Lexical.Server do + alias Lexical.Provider + alias Lexical.Protocol.Notifications + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Responses + alias Lexical.Server.State + + import Logger + + use GenServer + + @spec response_complete(Requests.request(), Responses.response()) :: :ok + def response_complete(request, response) do + GenServer.call(__MODULE__, {:response_complete, request, response}) + end + + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + def protocol_message(message) do + GenServer.cast(__MODULE__, {:protocol_message, message}) + end + + def init(_) do + {:ok, State.new()} + end + + def handle_call({:response_complete, _request, _response}, _from, %State{} = state) do + {:reply, :ok, state} + end + + def handle_cast({:protocol_message, message}, %State{} = state) do + info("received #{message.__struct__} proto message") + + new_state = + with {:ok, new_state} <- handle_message(message, state) do + new_state + else + error -> + error("Could not handle message #{inspect(message.__struct__)} #{inspect(error)}") + state + end + + {:noreply, new_state} + end + + def handle_cast(other, %State{} = state) do + info("got other: #{inspect(other)}") + + {:noreply, state} + end + + def handle_info(:default_config, %State{configuration: nil} = state) do + warn( + "Did not receive workspace/didChangeConfiguration notification after 5 seconds. " <> + "Using default settings." + ) + + {:ok, config} = State.default_configuration(state) + {:noreply, %State{state | configuration: config}} + end + + def handle_info(:default_config, %State{} = state) do + {:noreply, state} + end + + def handle_message(%Requests.Initialize{} = initialize, %State{} = state) do + Process.send_after(self(), :default_config, :timer.seconds(5)) + + case State.initialize(state, initialize) do + {:ok, _state} = success -> + success + + error -> + {error, state} + end + end + + def handle_message(%message_module{} = message, %State{} = state) + when message_module in [ + Notifications.DidChange, + Notifications.DidChangeConfiguration, + Notifications.DidOpen, + Notifications.DidClose, + Notifications.DidSave + ] do + case apply_to_state(state, message) do + {:ok, _} = success -> + success + + error -> + error("Failed to handle #{message.__struct__}, #{inspect(error)}") + end + end + + def handle_message(request, %State{} = state) do + Provider.Queue.add(request, state.configuration) + + {:ok, %State{} = state} + end + + defp apply_to_state(%State{} = state, %{} = request_or_notification) do + case State.apply(state, request_or_notification) do + {:ok, new_state} -> {:ok, new_state} + :ok -> {:ok, state} + error -> {error, state} + end + end +end diff --git a/apps/server/lib/lexical/server/application.ex b/apps/server/lib/lexical/server/application.ex new file mode 100644 index 000000000..987bbfd4f --- /dev/null +++ b/apps/server/lib/lexical/server/application.ex @@ -0,0 +1,45 @@ +defmodule Lexical.Server.Application do + # See https://hexdocs.pm/elixir/Application.html + # for more information on OTP Applications + @moduledoc false + + alias Lexical.Provider + alias Lexical.Transport + use Application + + @impl true + def start(_type, _args) do + # on_start() + add_jsonrpc_backend() + + children = [ + Lexical.SourceFile.Store, + Lexical.Server, + {DynamicSupervisor, name: Lexical.Server.Project.Supervisor.dynamic_supervisor_name()}, + Provider.Queue.Supervisor.child_spec(), + Provider.Queue.child_spec(), + {Lexical.Server.IOServer, [:standard_io, &Lexical.Server.protocol_message/1]} + ] + + opts = [strategy: :one_for_one, name: Lexical.Server.Supervisor] + Supervisor.start_link(children, opts) + end + + defp on_start do + # Transport.StdIO.setup() + # Transport.StdIO.listen() + end + + defp add_jsonrpc_backend() do + Application.put_env(:logger, :backends, [Lexical.JsonRpc.Backend]) + + Application.put_env(:logger, Lexical.JsonRpc.Backend, + level: :debug, + format: "$message", + metadata: [] + ) + + {:ok, _} = Logger.add_backend(Lexical.JsonRpc.Backend) + :ok = Logger.remove_backend(:console, flush: true) + end +end diff --git a/apps/server/lib/lexical/server/code_mod/ast.ex b/apps/server/lib/lexical/server/code_mod/ast.ex new file mode 100644 index 000000000..69c9554a4 --- /dev/null +++ b/apps/server/lib/lexical/server/code_mod/ast.ex @@ -0,0 +1,19 @@ +defmodule Lexical.CodeMod.Ast do + alias Lexical.SourceFile + + @type t :: any() + + def from(%SourceFile{} = source_file) do + source_file + |> SourceFile.to_string() + |> from() + end + + def from(s) when is_binary(s) do + parse(s) + end + + defp parse(s) when is_binary(s) do + ElixirSense.string_to_quoted(s, 1, 6, token_metadata: true) + end +end diff --git a/apps/server/lib/lexical/server/code_mod/diff.ex b/apps/server/lib/lexical/server/code_mod/diff.ex new file mode 100644 index 000000000..4f7a5bbb6 --- /dev/null +++ b/apps/server/lib/lexical/server/code_mod/diff.ex @@ -0,0 +1,106 @@ +defmodule Lexical.CodeMod.Diff do + alias Lexical.CodeUnit + alias Lexical.Protocol.Types.Position + alias Lexical.Protocol.Types.Range + alias Lexical.Protocol.Types.TextEdit + + @spec diff(String.t(), String.t()) :: [TextEdit.t()] + def diff(source, dest) do + source + |> String.myers_difference(dest) + |> to_text_edits() + end + + defp to_text_edits(difference) do + {_, {current_line, prev_lines}} = + Enum.reduce(difference, {{0, 0}, {[], []}}, fn + {diff_type, diff_string}, {position, edits} -> + apply_diff(diff_type, position, diff_string, edits) + end) + + [current_line | prev_lines] + |> Enum.flat_map(fn line_edits -> + line_edits + |> Enum.reduce([], &collapse/2) + |> Enum.reverse() + end) + end + + # This collapses a delete and an an insert that are adjacent to one another + # into a single insert, changing the delete to insert the text from the + # insert rather than "" + # It's a small optimization, but it was in the original + defp collapse( + %TextEdit{ + new_text: "", + range: %Range{ + end: %Position{character: same_character, line: same_line} + } + } = delete_edit, + [ + %TextEdit{ + new_text: insert_text, + range: + %Range{ + start: %Position{character: same_character, line: same_line} + } = _insert_edit + } + | rest + ] + ) + when byte_size(insert_text) > 0 do + collapsed_edit = %TextEdit{delete_edit | new_text: insert_text} + [collapsed_edit | rest] + end + + defp collapse(%TextEdit{} = edit, edits) do + [edit | edits] + end + + defp apply_diff(:eq, position, doc_string, edits) do + advance(doc_string, position, edits) + end + + defp apply_diff(:del, {line, code_unit} = position, change, edits) do + {after_pos, {current_line, prev_lines}} = advance(change, position, edits) + {edit_end_line, edit_end_unit} = after_pos + current_line = [edit("", line, code_unit, edit_end_line, edit_end_unit) | current_line] + {after_pos, {current_line, prev_lines}} + end + + defp apply_diff(:ins, {line, code_unit} = position, change, {current_line, prev_lines}) do + current_line = [edit(change, line, code_unit, line, code_unit) | current_line] + advance(change, position, {current_line, prev_lines}) + end + + defp advance(<<>>, position, edits) do + {position, edits} + end + + for ending <- ["\r\n", "\r", "\n"] do + defp advance(<>, {line, _unit}, {current_line, prev_lines}) do + edits = {[], [current_line | prev_lines]} + advance(rest, {line + 1, 0}, edits) + end + end + + defp advance(<>, {line, unit}, edits) when c < 128 do + advance(rest, {line, unit + 1}, edits) + end + + defp advance(<>, {line, unit}, edits) do + increment = CodeUnit.count(:utf16, <>) + advance(rest, {line, unit + increment}, edits) + end + + defp edit(text, start_line, start_unit, end_line, end_unit) do + TextEdit.new( + new_text: text, + range: + Range.new( + start: Position.new(line: start_line, character: start_unit), + end: Position.new(line: end_line, character: end_unit) + ) + ) + end +end diff --git a/apps/server/lib/lexical/server/code_mod/format.ex b/apps/server/lib/lexical/server/code_mod/format.ex new file mode 100644 index 000000000..b9de07d3a --- /dev/null +++ b/apps/server/lib/lexical/server/code_mod/format.ex @@ -0,0 +1,158 @@ +defmodule Lexical.CodeMod.Format do + alias Lexical.CodeMod.Diff + alias Lexical.SourceFile + alias Lexical.SourceFile.Conversions + alias Lexical.Protocol.Types.TextEdit + + require Logger + @type formatter_function :: (String.t() -> any) | nil + + @spec text_edits(SourceFile.t(), String.t() | nil) :: {:ok, [TextEdit.t()]} | {:error, any} + def text_edits(%SourceFile{} = document, project_path_or_uri) do + with {:ok, unformatted, formatted} <- do_format(document, project_path_or_uri) do + edits = Diff.diff(unformatted, formatted) + {:ok, edits} + end + end + + @spec format(SourceFile.t(), String.t() | nil) :: {:ok, String.t()} | {:error, any} + def format(%SourceFile{} = document, project_path_or_uri) do + with {:ok, _, formatted_code} <- do_format(document, project_path_or_uri) do + {:ok, formatted_code} + end + end + + defp do_format(%SourceFile{} = document, project_path_or_uri) + when is_binary(project_path_or_uri) do + project_path = Conversions.ensure_path(project_path_or_uri) + + with :ok <- check_current_directory(document, project_path), + {:ok, formatter, options} <- formatter_for(document.path), + :ok <- + check_inputs_apply(document, project_path, Keyword.get(options, :inputs)) do + document + |> SourceFile.to_string() + |> formatter.() + end + end + + defp do_format(%SourceFile{} = document, _) do + formatter = build_formatter([]) + + document + |> SourceFile.to_string() + |> formatter.() + end + + @spec formatter_for(String.t()) :: {:ok, formatter_function, keyword()} | :error + defp formatter_for(uri_or_path) do + path = Conversions.ensure_path(uri_or_path) + + try do + true = Code.ensure_loaded?(Mix.Tasks.Format) + + if function_exported?(Mix.Tasks.Format, :formatter_for_file, 1) do + {formatter_function, options} = Mix.Tasks.Format.formatter_for_file(path) + + wrapped_formatter_function = wrap_with_try_catch(formatter_function) + + {:ok, wrapped_formatter_function, options} + else + options = Mix.Tasks.Format.formatter_opts_for_file(path) + formatter = build_formatter(options) + {:ok, formatter, Mix.Tasks.Format.formatter_opts_for_file(path)} + end + rescue + e -> + message = Exception.message(e) + + Logger.warn( + "Unable to get formatter options for #{path}: #{inspect(e.__struct__)} #{message}" + ) + + {:error, :no_formatter_available} + end + end + + defp build_formatter(opts) do + fn code -> + formatted_iodata = Code.format_string!(code, opts) + IO.iodata_to_binary([formatted_iodata, ?\n]) + end + |> wrap_with_try_catch() + end + + defp wrap_with_try_catch(formatter_fn) do + fn code -> + try do + {:ok, code, formatter_fn.(code)} + rescue + e -> + {:error, e} + end + end + end + + defp check_current_directory(%SourceFile{} = document, project_path) do + cwd = File.cwd!() + + if subdirectory?(document.path, parent: project_path) or + subdirectory?(document.path, parent: cwd) do + :ok + else + message = + "Cannot format file from current directory " <> + "(Currently in #{Path.relative_to(cwd, project_path)})" + + {:error, message} + end + end + + defp check_inputs_apply(%SourceFile{} = document, project_path, inputs) + when is_list(inputs) do + formatter_dir = dominating_formatter_exs_dir(document, project_path) + + inputs_apply? = + Enum.any?(inputs, fn input_glob -> + glob = Path.join(formatter_dir, input_glob) + PathGlob.match?(document.path, glob, match_dot: true) + end) + + if inputs_apply? do + :ok + else + {:error, :input_mismatch} + end + end + + defp check_inputs_apply(_, _, _), do: :ok + + defp subdirectory?(child, parent: parent) do + normalized_parent = Path.absname(parent) + String.starts_with?(child, normalized_parent) + end + + # Finds the directory with the .formatter.exs that's the nearest parent to the + # source file, or the project dir if none was found. + defp dominating_formatter_exs_dir(%SourceFile{} = document, project_path) do + document.path + |> Path.dirname() + |> dominating_formatter_exs_dir(project_path) + end + + defp dominating_formatter_exs_dir(project_dir, project_dir) do + project_dir + end + + defp dominating_formatter_exs_dir(current_dir, project_path) do + formatter_exs_name = Path.join(current_dir, ".formatter.exs") + + if File.exists?(formatter_exs_name) do + current_dir + else + current_dir + |> Path.dirname() + |> dominating_formatter_exs_dir(project_path) + end + end +end diff --git a/apps/server/lib/lexical/server/code_mod/replace_with_underscore.ex b/apps/server/lib/lexical/server/code_mod/replace_with_underscore.ex new file mode 100644 index 000000000..855940200 --- /dev/null +++ b/apps/server/lib/lexical/server/code_mod/replace_with_underscore.ex @@ -0,0 +1,74 @@ +defmodule Lexical.CodeMod.ReplaceWithUnderscore do + alias Lexical.Protocol.Types.TextEdit + alias Lexical.CodeMod.Ast + alias Lexical.CodeMod.Diff + + @spec text_edits(String.t(), Ast.t(), String.t() | atom) :: {:ok, [TextEdit.t()]} | :error + def text_edits(original_text, ast, variable_name) do + variable_name = ensure_atom(variable_name) + + with {:ok, transformed} <- apply_transform(original_text, ast, variable_name) do + {:ok, to_text_edits(original_text, transformed)} + end + end + + defp to_text_edits(orig_text, fixed_text) do + orig_text + |> Diff.diff(fixed_text) + |> Enum.filter(&(&1.new_text == "_")) + end + + defp ensure_atom(variable_name) when is_binary(variable_name) do + String.to_atom(variable_name) + end + + defp ensure_atom(variable_name) when is_atom(variable_name) do + variable_name + end + + defp apply_transform(line_text, quoted_ast, unused_variable_name) do + underscored_variable_name = :"_#{unused_variable_name}" + leading_indent = leading_indent(line_text) + + Macro.postwalk(quoted_ast, fn + {^unused_variable_name, meta, context} -> + {underscored_variable_name, meta, context} + + other -> + other + end) + |> Macro.to_string() + # We're dealing with a single error on a single line. + # If the line doesn't compile (like it has a do with no end), ElixirSense + # adds additional lines do documents with errors, so take the first line, as it's + # the properly transformed source + |> fetch_line(0) + |> case do + {:ok, text} -> + {:ok, "#{leading_indent}#{text}"} + + error -> + error + end + end + + @indent_regex ~r/^\s+/ + defp leading_indent(line_text) do + case Regex.scan(@indent_regex, line_text) do + [indent] -> indent + _ -> "" + end + end + + defp fetch_line(message, line_number) do + line = + message + |> String.split(["\r\n", "\r", "\n"]) + |> Enum.at(line_number) + + case line do + nil -> :error + other -> {:ok, other} + end + end +end diff --git a/apps/server/lib/lexical/server/configuration.ex b/apps/server/lib/lexical/server/configuration.ex new file mode 100644 index 000000000..d1e02cfd6 --- /dev/null +++ b/apps/server/lib/lexical/server/configuration.ex @@ -0,0 +1,136 @@ +defmodule Lexical.Server.Configuration do + alias Lexical.Dialyzer + alias Lexical.LanguageServer + alias Lexical.Project + alias Lexical.Protocol.Id + alias Lexical.Protocol.Notifications.DidChangeConfiguration + alias Lexical.Protocol.Proto.LspTypes.Registration + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Requests.RegisterCapability + alias Lexical.Protocol.Types.ClientCapabilities + alias Lexical.Server.Configuration.Support + + defstruct project: nil, + support: nil, + additional_watched_extensions: nil, + dialyzer_enabled?: false + + @type t :: %__MODULE__{} + + @spec new(Lexical.uri(), map()) :: t + def new(root_uri, %ClientCapabilities{} = client_capabilities) do + support = Support.new(client_capabilities) + project = Project.new(root_uri) + %__MODULE__{support: support, project: project} + end + + @spec default(t) :: + {:ok, t} + | {:ok, t, Requests.RegisterCapabilities.t()} + | {:restart, Logger.level(), String.t()} + def default(%__MODULE__{} = config) do + apply_config_change(config, default_config()) + end + + @spec on_change(t, DidChangeConfiguration.t()) :: + {:ok, t} + | {:ok, t, Requests.RegisterCapability.t()} + | {:restart, Logger.level(), String.t()} + def on_change(%__MODULE__{} = old_config, :defaults) do + apply_config_change(old_config, default_config()) + end + + def on_change(%__MODULE__{} = old_config, %DidChangeConfiguration{} = change) do + apply_config_change(old_config, change.lsp.settings) + end + + defp default_config do + %{} + end + + defp apply_config_change(%__MODULE__{} = old_config, %{} = settings) do + with {:ok, new_config} <- maybe_set_mix_env(old_config, settings), + {:ok, new_config} <- maybe_set_env_vars(new_config, settings), + {:ok, new_config} <- maybe_set_mix_target(new_config, settings), + {:ok, new_config} <- maybe_set_project_directory(new_config, settings), + {:ok, new_config} <- maybe_enable_dialyzer(new_config, settings) do + maybe_add_watched_extensions(new_config, settings) + end + end + + defp maybe_set_mix_env(%__MODULE__{} = old_config, settings) do + new_env = Map.get(settings, "mixEnv") + + with {:ok, new_project} <- Project.change_mix_env(old_config.project, new_env) do + {:ok, %__MODULE__{old_config | project: new_project}} + end + end + + defp maybe_set_env_vars(%__MODULE__{} = old_config, settings) do + env_vars = Map.get(settings, "envVariables") + + with {:ok, new_project} <- Project.set_env_vars(old_config.project, env_vars) do + {:ok, %__MODULE__{old_config | project: new_project}} + end + end + + defp maybe_set_mix_target(%__MODULE__{} = old_config, settings) do + mix_target = Map.get(settings, "mixTarget") + + with {:ok, new_project} <- Project.change_mix_target(old_config.project, mix_target) do + {:ok, %__MODULE__{old_config | project: new_project}} + end + end + + defp maybe_set_project_directory(%__MODULE__{} = old_config, settings) do + project_dir = Map.get(settings, "projectDir") + + with {:ok, new_project} <- Project.change_project_directory(old_config.project, project_dir) do + {:ok, %__MODULE__{old_config | project: new_project}} + end + end + + defp maybe_enable_dialyzer(%__MODULE__{} = old_config, settings) do + enabled? = + case Dialyzer.check_support() do + :ok -> + Map.get(settings, "dialyzerEnabled", true) + + _ -> + false + end + + {:ok, %__MODULE__{old_config | dialyzer_enabled?: enabled?}} + end + + defp maybe_add_watched_extensions(%__MODULE__{} = old_config, %{ + "additionalWatchedExtensions" => [] + }) do + {:ok, old_config} + end + + defp maybe_add_watched_extensions(%__MODULE__{} = old_config, %{ + "additionalWatchedExtensions" => extensions + }) + when is_list(extensions) do + register_id = Id.next() + request_id = Id.next() + + watchers = Enum.map(extensions, fn ext -> %{"globPattern" => "**/*#{ext}"} end) + + registration = + Registration.new( + id: request_id, + method: "workspace/didChangeWatchedFiles", + register_options: %{"watchers" => watchers} + ) + + request = RegisterCapability.new(id: register_id, registrations: [registration]) + + {:ok, old_config, request} + end + + defp maybe_add_watched_extensions(%__MODULE__{} = old_config, _) do + {:ok, old_config} + end +end diff --git a/apps/server/lib/lexical/server/configuration/support.ex b/apps/server/lib/lexical/server/configuration/support.ex new file mode 100644 index 000000000..2278573c0 --- /dev/null +++ b/apps/server/lib/lexical/server/configuration/support.ex @@ -0,0 +1,58 @@ +defmodule Lexical.Server.Configuration.Support do + alias Lexical.Protocol.Types.ClientCapabilities + + defstruct code_action_dynamic_registration?: false, + hierarchical_document_symbols?: false, + snippet?: false, + deprecated?: false, + tags?: false, + signature_help?: false + + def new(%ClientCapabilities{} = client_capabilities) do + dynamic_registration? = + client_capabilities + |> get_in([:text_document, :code_action, :dynamic_registration]) + |> bool() + + hierarchical_symbols? = + client_capabilities + |> get_in([:text_document, :document_symbol, :hierarchical_document_symbol_support]) + |> bool() + + snippet? = + client_capabilities + |> get_in([:text_document, :completion, :completion_item, :snippet_support]) + |> bool() + + deprecated? = + client_capabilities + |> get_in([:text_document, :completion, :completion_item, :deprecated_support]) + |> bool() + + tags? = + client_capabilities + |> get_in([:text_document, :completion, :completion_item, :tag_support]) + |> bool() + + signature_help? = + client_capabilities + |> get_in([:text_document, :signature_help]) + |> bool() + + %__MODULE__{ + code_action_dynamic_registration?: dynamic_registration?, + hierarchical_document_symbols?: hierarchical_symbols?, + snippet?: snippet?, + deprecated?: deprecated?, + tags?: tags?, + signature_help?: signature_help? + } + end + + def new(_) do + %__MODULE__{} + end + + defp bool(b) when b in [true, false], do: b + defp bool(_), do: false +end diff --git a/apps/server/lib/lexical/server/io_server.ex b/apps/server/lib/lexical/server/io_server.ex new file mode 100644 index 000000000..c6870c426 --- /dev/null +++ b/apps/server/lib/lexical/server/io_server.ex @@ -0,0 +1,120 @@ +defmodule Lexical.Server.IOServer do + alias Lexical.Protocol.JsonRpc + + @crlf "\r\n" + + def start_link(device, callback) do + pid = :proc_lib.spawn_link(__MODULE__, :init, [{callback, device}]) + {:ok, pid} + end + + def child_spec([device, callback]) do + %{id: __MODULE__, start: {__MODULE__, :start_link, [device, callback]}} + end + + def init({callback, device}) do + :io.setopts([:binary, encoding: :latin1]) + loop([], device, callback) + end + + def write(io_device \\ :stdio, payload) + + def write(io_device, %_{} = payload) do + with {:ok, encoded} <- Jason.encode(payload) do + write(io_device, encoded) + end + end + + def write(io_device, payload) when is_binary(payload) do + case io_device do + device when device in [:stdio, :standard_io] -> + content_length = IO.iodata_length(payload) + + IO.binwrite(io_device, [ + "Content-Length: ", + to_string(content_length), + @crlf, + @crlf, + payload + ]) + + _ -> + IO.binwrite(io_device, to_string(payload)) + end + end + + def log(level, message, opts \\ []) + + def log(level, message, opts) when level in [:error, :warning] do + log_message = format_message(message, opts) + write(:standard_error, log_message) + message + end + + def log(_level, message, opts) do + log_message = format_message(message, opts) + + write(:standard_error, log_message) + message + end + + # private + + defp format_message(message, opts) do + case Keyword.get(opts, :label) do + nil -> inspect(message) <> "\n" + label -> "#{label}: '#{inspect(message, limit: :infinity)}\n" + end + end + + defp loop(buffer, device, callback) do + case IO.read(device, :line) do + "\n" -> + headers = parse_headers(buffer) + + with {:ok, content_length} <- + header_value(headers, "content-length", &String.to_integer/1), + {:ok, data} <- read(device, content_length), + {:ok, message} <- JsonRpc.decode(data) do + callback.(message) + end + + loop([], device, callback) + + :eof -> + System.halt() + + line -> + loop([line | buffer], device, callback) + end + end + + defp parse_headers(headers) do + Enum.map(headers, &parse_header/1) + end + + defp header_value(headers, header_name, converter) do + case List.keyfind(headers, header_name, 0) do + nil -> :error + {_, value} -> {:ok, converter.(value)} + end + end + + defp read(device, amount) do + case IO.read(device, amount) do + data when is_binary(data) or is_list(data) -> {:ok, data} + other -> other + end + end + + defp parse_header(line) do + [name, value] = String.split(line, ":") + + header_name = + name + |> String.downcase() + |> String.trim() + + {header_name, String.trim(value)} + end +end diff --git a/apps/server/lib/lexical/server/json_rpc_backend.ex b/apps/server/lib/lexical/server/json_rpc_backend.ex new file mode 100644 index 000000000..0806f18cf --- /dev/null +++ b/apps/server/lib/lexical/server/json_rpc_backend.ex @@ -0,0 +1,172 @@ +defmodule Lexical.JsonRpc.Backend do + @moduledoc ~S""" + A logger backend that logs messages by sending them via LSP ‘window/logMessage’. + + ## Options + + * `:level` - the level to be logged by this backend. + Note that messages are filtered by the general + `:level` configuration for the `:logger` application first. + + * `:format` - the format message used to print logs. + Defaults to: `"$message"`. + It may also be a `{module, function}` tuple that is invoked + with the log level, the message, the current timestamp and + the metadata and must return `t:IO.chardata/0`. See + `Logger.Formatter`. + + * `:metadata` - the metadata to be printed by `$metadata`. + Defaults to an empty list (no metadata). + Setting `:metadata` to `:all` prints all metadata. See + the "Metadata" section for more information. + + """ + + @behaviour :gen_event + + defstruct format: nil, + level: nil, + metadata: nil + + @impl true + def init(__MODULE__) do + config = Application.get_env(:logger, __MODULE__) + + {:ok, init(config, %__MODULE__{})} + end + + def init({__MODULE__, opts}) when is_list(opts) do + config = + :logger + |> Application.get_env(__MODULE__) + |> merge_config(opts) + + {:ok, init(config, %__MODULE__{})} + end + + @impl true + def handle_call({:configure, options}, state) do + {:ok, :ok, configure(options, state)} + end + + def handle_call({:set_group_leader, pid}, state) do + Process.group_leader(self(), pid) + {:ok, :ok, state} + end + + @impl true + def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do + %{level: log_level} = state + + {:erl_level, level} = List.keyfind(md, :erl_level, 0, {:erl_level, level}) + + cond do + not meets_level?(level, log_level) -> + {:ok, state} + + true -> + {:ok, log_event(level, msg, ts, md, state)} + end + end + + def handle_event(:flush, state) do + {:ok, state} + end + + def handle_event(_, state) do + {:ok, state} + end + + @impl true + def handle_info(_, state) do + {:ok, state} + end + + @impl true + def code_change(_old_vsn, state, _extra) do + {:ok, state} + end + + @impl true + def terminate(_reason, _state) do + :ok + end + + ## Helpers + + defp meets_level?(_lvl, nil), do: true + + defp meets_level?(lvl, min) do + Logger.compare_levels(lvl, min) != :lt + end + + defp configure(options, state) do + config = merge_config(Application.get_env(:logger, __MODULE__), options) + Application.put_env(:logger, __MODULE__, config) + init(config, state) + end + + defp init(config, state) do + level = Keyword.get(config, :level) + format = Logger.Formatter.compile(Keyword.get(config, :format)) + metadata = Keyword.get(config, :metadata, []) |> configure_metadata() + + %{ + state + | format: format, + metadata: metadata, + level: level + } + end + + defp configure_metadata(:all), do: :all + defp configure_metadata(metadata), do: Enum.reverse(metadata) + + defp merge_config(env, options) do + Keyword.merge(env, options, fn + _, _v1, v2 -> v2 + end) + end + + defp log_event(level, msg, ts, md, state) do + output = + level + |> format_event(msg, ts, md, state) + |> IO.chardata_to_string() + + level + |> elixir_log_level_to_lsp() + |> Lexical.Transport.log(output) + + state + end + + defp elixir_log_level_to_lsp(:debug), do: :log + defp elixir_log_level_to_lsp(:info), do: :info + defp elixir_log_level_to_lsp(:notice), do: :info + defp elixir_log_level_to_lsp(:warning), do: :warning + defp elixir_log_level_to_lsp(:warn), do: :warning + defp elixir_log_level_to_lsp(:error), do: :error + defp elixir_log_level_to_lsp(:critical), do: :error + defp elixir_log_level_to_lsp(:alert), do: :error + defp elixir_log_level_to_lsp(:emergency), do: :error + + defp format_event(level, msg, ts, md, state) do + %{format: format, metadata: keys} = state + + Logger.Formatter.format(format, level, msg, ts, take_metadata(md, keys)) + end + + defp take_metadata(metadata, :all) do + metadata + end + + defp take_metadata(metadata, keys) do + Enum.reduce(keys, [], fn key, acc -> + case Keyword.fetch(metadata, key) do + {:ok, val} -> [{key, val} | acc] + :error -> acc + end + end) + end +end diff --git a/apps/server/lib/lexical/server/output_device.ex b/apps/server/lib/lexical/server/output_device.ex new file mode 100644 index 000000000..fd70506f5 --- /dev/null +++ b/apps/server/lib/lexical/server/output_device.ex @@ -0,0 +1,140 @@ +defmodule Lexical.Server.OutputDevice do + @moduledoc """ + Intercepts IO request messages and forwards them to the Output server to be sent as events to + the IDE. Implements Erlang I/O Protocol https://erlang.org/doc/apps/stdlib/io_protocol.html + + In order to send console output to Visual Studio Code, the debug adapter needs to send events + using the usual wire protocol. In order to intercept the debugged code's output, we replace the + registered processes `:user` and `:standard_error` and the process's group leader with instances + of this server. When it receives a message containing output, it sends an event via the `Output` + server with the correct category ("stdout" or "stderr"). + """ + + @opts binary: true, encoding: :unicode + + ## Client API + + def start_link(device, output_fn) do + Task.start_link(fn -> loop({device, output_fn}) end) + end + + def child_spec(arguments) do + %{ + id: __MODULE__, + start: {__MODULE__, :start_link, arguments}, + type: :worker, + restart: :permanent, + shutdown: 500 + } + end + + def get_opts, do: @opts + + ## Implementation + + defp loop(state) do + receive do + {:io_request, from, reply_as, request} -> + result = io_request(request, state, reply_as) + send(from, {:io_reply, reply_as, result}) + + loop(state) + end + end + + defp send_to_output(encoding, characters, {_device, output_fn}) do + # convert to unicode binary if necessary + case wrap_characters_to_binary(characters, encoding) do + binary when is_binary(binary) -> + output_fn.(binary) + + _ -> + {:error, :put_chars} + end + end + + defp io_request({:put_chars, encoding, characters}, state, _reply_as) do + send_to_output(encoding, characters, state) + end + + defp io_request({:put_chars, encoding, module, func, args}, state, _reply_as) do + # apply mfa to get binary or list + # return error in other cases + try do + case apply(module, func, args) do + characters when is_list(characters) or is_binary(characters) -> + send_to_output(encoding, characters, state) + + _ -> + {:error, :put_chars} + end + catch + _, _ -> {:error, :put_chars} + end + end + + defp io_request({:requests, list}, state, reply_as) do + # process request sequentially until error or end of data + # return last result + case io_requests(list, {:ok, :ok}, state, reply_as) do + :ok -> :ok + {:error, error} -> {:error, error} + other -> {:ok, other} + end + end + + defp io_request(:getopts, _state, _reply_as) do + @opts + end + + defp io_request({:setopts, new_opts}, _state, _reply_as) do + validate_otps(new_opts, {:ok, 0}) + end + + defp io_request(unknown, {device, _output_fn}, reply_as) do + # forward requests to underlying device + send(device, {:io_request, self(), reply_as, unknown}) + + receive do + {:io_reply, ^reply_as, reply} -> reply + end + end + + defp io_requests(_, {:error, error}, _, _), do: {:error, error} + + defp io_requests([request | rest], _, state, reply_as) do + result = io_request(request, state, reply_as) + io_requests(rest, result, state, reply_as) + end + + defp io_requests([], result, _, _), do: result + + defp wrap_characters_to_binary(bin, :unicode) when is_binary(bin), do: bin + + defp wrap_characters_to_binary(chars, from) do + # :unicode.characters_to_binary may throw, return error or incomplete result + try do + case :unicode.characters_to_binary(chars, from, :unicode) do + bin when is_binary(bin) -> + bin + + _ -> + :error + end + catch + _, _ -> :error + end + end + + defp validate_otps([opt | rest], {:ok, acc}) do + validate_otps(rest, opt_valid?(opt, acc)) + end + + defp validate_otps([], {:ok, 2}), do: :ok + defp validate_otps(_, _acc), do: {:error, :enotsup} + + defp opt_valid?(:binary, acc), do: {:ok, acc + 1} + defp opt_valid?({:binary, true}, acc), do: {:ok, acc + 1} + defp opt_valid?({:encoding, :unicode}, acc), do: {:ok, acc + 1} + defp opt_valid?(_opt, _acc), do: :error +end diff --git a/apps/server/lib/lexical/server/project/diagnostics.ex b/apps/server/lib/lexical/server/project/diagnostics.ex new file mode 100644 index 000000000..616c3d16f --- /dev/null +++ b/apps/server/lib/lexical/server/project/diagnostics.ex @@ -0,0 +1,202 @@ +defmodule Lexical.Server.Project.Diagnostics do + defmodule State do + alias Lexical.CodeUnit + alias Lexical.SourceFile + alias Lexical.Project + alias Lexical.Protocol.Types.Diagnostic + alias Lexical.Protocol.Types.Position + alias Lexical.Protocol.Types.Range + alias Mix.Task.Compiler + + defstruct [:project, :diagnostics_by_uri] + + def new(%Project{} = project) do + %__MODULE__{project: project, diagnostics_by_uri: %{}} + end + + def get(%__MODULE__{} = state, source_uri) do + Map.get(state.diagnostics_by_uri, source_uri, []) + end + + def clear(%__MODULE__{} = state, source_uri) do + %__MODULE__{state | diagnostics_by_uri: Map.put(state.diagnostics_by_uri, source_uri, [])} + end + + def clear_all(%__MODULE__{} = state) do + cleared = Map.new(state.diagnostics_by_uri, fn {k, _} -> {k, []} end) + %__MODULE__{state | diagnostics_by_uri: cleared} + end + + def add( + %__MODULE__{} = state, + %Compiler.Diagnostic{} = diagnostic, + %SourceFile{} = source_file + ) do + lsp_diagnostic = to_protocol(diagnostic, source_file) + + file_diagnostics = + Map.update( + state.diagnostics_by_uri, + source_file.uri, + [lsp_diagnostic], + &[lsp_diagnostic | &1] + ) + + %{state | diagnostics_by_uri: file_diagnostics} + end + + def add(%__MODULE__{} = state, %Mix.Error{} = error) do + project_uri = state.project.mix_exs_uri + lsp_diagnostic = to_protocol(error, project_uri) + + file_diagnostics = + Map.update( + state.diagnostics_by_uri, + project_uri, + [lsp_diagnostic], + &[lsp_diagnostic | &1] + ) + + %{state | diagnostics_by_uri: file_diagnostics} + end + + def add(%__MODULE__{} = state, %Compiler.Diagnostic{} = diagnostic) do + source_uri = SourceFile.Path.to_uri(diagnostic.file) + + with {:ok, lsp_diagnostic} <- to_protocol(diagnostic, source_uri) do + diagnostics_by_uri = + Map.update(state.diagnostics_by_uri, source_uri, [lsp_diagnostic], fn diagnostics -> + [lsp_diagnostic | diagnostics] + end) + + %__MODULE__{state | diagnostics_by_uri: diagnostics_by_uri} + else + _ -> + state + end + end + + defp to_protocol(%Compiler.Diagnostic{} = diagnostic, %SourceFile{} = source_file) do + %Diagnostic{ + message: diagnostic.message, + range: position_to_range(source_file, diagnostic.position), + severity: diagnostic.severity, + source: "Elixir" + } + end + + defp to_protocol(%Compiler.Diagnostic{} = diagnostic, source_uri) + when is_binary(source_uri) do + with {:ok, source_file} <- SourceFile.Store.open_temporary(source_uri) do + {:ok, to_protocol(diagnostic, source_file)} + end + end + + defp to_protocol(%Mix.Error{} = diagnostic, _) do + %Diagnostic{ + message: diagnostic.message, + range: + Range.new( + start: Position.new(line: 0, character: 0), + end: Position.new(line: 1, character: 0) + ), + severity: :error, + source: "Mix" + } + end + + defp position_to_range(_, line) when is_integer(line) do + Range.new( + start: Position.new(line: max(line - 1, 0), character: 0), + end: Position.new(line: line, character: 0) + ) + end + + defp position_to_range(%SourceFile{} = source_file, {line_number, column}) do + with {:ok, line_text} <- SourceFile.fetch_text_at(source_file, line_number), + {:ok, character} <- CodeUnit.to_utf16(line_text, column) do + Range.new( + start: Position.new(line: line_number, character: character), + end: Position.new(line: line_number, character: character) + ) + end + end + end + + alias Lexical.Protocol.Notifications.PublishDiagnostics + alias Lexical.Protocol.Types.Diagnostic + alias Lexical.Project + alias Lexical.Server.Project.Dispatch + alias Lexical.SourceFile + alias Lexical.RemoteControl.Messages + alias Mix.Task.Compiler + + import Messages + use GenServer + + def start_link(%Project{} = project) do + GenServer.start_link(__MODULE__, [project], name: name(project)) + end + + def child_spec(%Project{} = project) do + %{ + id: {__MODULE__, Project.name(project)}, + start: {__MODULE__, :start_link, [project]} + } + end + + # GenServer callbacks + + @impl GenServer + def init([%Project{} = project]) do + Dispatch.register(project, [project_compiled(), file_compiled()]) + state = State.new(project) + {:ok, state} + end + + @impl GenServer + def handle_info(project_compiled(diagnostics: diagnostics), %State{} = state) do + uris = + Enum.map(diagnostics, fn + %Mix.Error{} -> + state.project.mix_exs_uri + + %Compiler.Diagnostic{file: file} -> + SourceFile.Path.to_uri(file) + end) + + state = Enum.reduce(uris, state, &State.clear(&2, &1)) + state = Enum.reduce(diagnostics, state, &State.add(&2, &1)) + publish_diagnostics(state) + {:noreply, state} + end + + @impl GenServer + def handle_info( + file_compiled(diagnostics: diagnostics, source_file: %SourceFile{} = source_file), + %State{} = state + ) do + state = State.clear(state, source_file.uri) + state = Enum.reduce(diagnostics, state, &State.add(&2, &1, source_file)) + publish_diagnostics(state) + {:ok, state} + end + + # Private + + defp publish_diagnostics(%State{} = state) do + Enum.each(state.diagnostics_by_uri, fn {uri, diagnostic_list} -> + notification = + PublishDiagnostics.new( + uri: uri, + diagnostics: diagnostic_list + ) + + Lexical.Transport.write(notification) + end) + end + + defp name(%Project{} = project) do + :"#{Project.name(project)}::diagnostics" + end +end diff --git a/apps/server/lib/lexical/server/project/dispatch.ex b/apps/server/lib/lexical/server/project/dispatch.ex new file mode 100644 index 000000000..27810e005 --- /dev/null +++ b/apps/server/lib/lexical/server/project/dispatch.ex @@ -0,0 +1,148 @@ +defmodule Lexical.Server.Project.Dispatch do + defmodule State do + alias Lexical.Project + + defstruct [:project, :registrations] + + def new(%Project{} = project) do + %__MODULE__{project: project, registrations: %{}} + end + + def add(%__MODULE__{} = state, message_type, pid) do + registrations = + Map.update(state.registrations, message_type, [pid], fn registrations -> + [pid | registrations] + end) + + %__MODULE__{state | registrations: registrations} + end + + def registrations(%__MODULE__{} = state, message_type) do + all = Map.get(state.registrations, :all, []) + specific = Map.get(state.registrations, message_type, []) + all ++ specific + end + + def registered?(%__MODULE__{} = state, pid) do + state.registrations + |> Map.values() + |> List.flatten() + |> Enum.member?(pid) + end + + def registered?(%__MODULE__{} = state, message_type, pid) do + c = 32 + pid in registrations(state, message_type) + end + + def remove(%__MODULE__{} = state, message_type, pid) do + registrations = + Map.update(state.registrations, message_type, [], &Enum.reject(&1, fn e -> e == pid end)) + + %__MODULE__{state | registrations: registrations} + end + + def remove_all(%__MODULE__{} = state, pid) do + registrations = + Map.new(state.registrations, fn {message, pids} -> + pids = Enum.reject(pids, &(&1 == pid)) + {message, pids} + end) + + %__MODULE__{state | registrations: registrations} + end + end + + alias Lexical.RemoteControl + alias Lexical.Project + use GenServer + + # public API + + def register(%Project{} = project, message_types) when is_list(message_types) do + project + |> name() + |> GenServer.call({:register, message_types}) + end + + def registered?(%Project{} = project) do + registered?(project, self()) + end + + def registered?(%Project{} = project, pid) when is_pid(pid) do + project + |> name() + |> GenServer.call({:registered?, pid}) + end + + # GenServer callbacks + + def start_link(%Project{} = project) do + GenServer.start_link(__MODULE__, [project], name: name(project)) + end + + def child_spec(%Project{} = project) do + %{ + id: {__MODULE__, Project.name(project)}, + start: {__MODULE__, :start_link, [project]} + } + end + + @impl GenServer + def init([%Project{} = project]) do + {:ok, _} = RemoteControl.start_link(project, self()) + {:ok, State.new(project), {:continue, :trigger_build}} + end + + @impl GenServer + def handle_continue(:trigger_build, %State{} = state) do + RemoteControl.Api.schedule_compile(state.project, false) + {:noreply, state} + end + + @impl GenServer + def handle_call({:register, message_types}, {caller_pid, _ref}, %State{} = state) do + Process.monitor(caller_pid) + + new_state = + Enum.reduce(message_types, state, fn + message_type_or_message, %State{} = state -> + message_type = extract_message_type(message_type_or_message) + State.add(state, message_type, caller_pid) + end) + + {:reply, :ok, new_state} + end + + @impl GenServer + def handle_call({:registered?, pid}, _from, %State{} = state) do + registered? = State.registered?(state, pid) + {:reply, registered?, state} + end + + @impl GenServer + def handle_info({:DOWN, _ref, _, pid, _}, %State{} = state) do + new_state = State.remove_all(state, pid) + {:noreply, new_state} + end + + @impl GenServer + def handle_info(message, %State{} = state) do + message_type = extract_message_type(message) + + state + |> State.registrations(message_type) + |> Enum.each(&send(&1, message)) + + {:noreply, state} + end + + # Private api + + defp name(%Project{} = project) do + :"#{Project.name(project)}::dispatch" + end + + defp extract_message_type(message_type) when is_atom(message_type), do: message_type + defp extract_message_type(message_type) when is_tuple(message_type), do: elem(message_type, 0) +end diff --git a/apps/server/lib/lexical/server/project/index.ex b/apps/server/lib/lexical/server/project/index.ex new file mode 100644 index 000000000..062dd448a --- /dev/null +++ b/apps/server/lib/lexical/server/project/index.ex @@ -0,0 +1,12 @@ +defmodule Lexical.Server.Project.Index do + alias Lexical.Project + use GenServer + + def start_link(%Project{} = project) do + GenServer.start_link(__MODULE__, [project], name: :"#{Project.name(project)}::index") + end + + def init([%Project{} = project]) do + {:ok, project} + end +end diff --git a/apps/server/lib/lexical/server/project/supervisor.ex b/apps/server/lib/lexical/server/project/supervisor.ex new file mode 100644 index 000000000..e2f8c0a44 --- /dev/null +++ b/apps/server/lib/lexical/server/project/supervisor.ex @@ -0,0 +1,42 @@ +defmodule Lexical.Server.Project.Supervisor do + alias Lexical.Project + alias Lexical.Server.Project.Diagnostics + alias Lexical.Server.Project.Dispatch + alias Lexical.Server.Project.Index + use Supervisor + + def dynamic_supervisor_name do + Lexical.Server.ProjectSupervisor + end + + def start_link(%Project{} = project) do + Supervisor.start_link(__MODULE__, project, name: supervisor_name(project)) + end + + def init(%Project{} = project) do + children = [ + {Dispatch, project}, + {Diagnostics, project}, + {Index, project} + ] + + Supervisor.init(children, strategy: :one_for_one) + end + + def start(%Project{} = project) do + DynamicSupervisor.start_child(dynamic_supervisor_name(), {__MODULE__, project}) + end + + def stop(%Project{} = project) do + pid = + project + |> supervisor_name() + |> Process.whereis() + + DynamicSupervisor.terminate_child(dynamic_supervisor_name(), pid) + end + + defp supervisor_name(%Project{} = project) do + :"#{Project.name(project)}::supervisor" + end +end diff --git a/apps/server/lib/lexical/server/state.ex b/apps/server/lib/lexical/server/state.ex new file mode 100644 index 000000000..8fc34efc9 --- /dev/null +++ b/apps/server/lib/lexical/server/state.ex @@ -0,0 +1,165 @@ +defmodule Lexical.Server.State do + alias Lexical.Transport + + alias Lexical.Protocol.Notifications.{ + DidChange, + DidChangeConfiguration, + DidClose, + DidOpen, + DidSave + } + + alias Lexical.Protocol.Responses + alias Lexical.Protocol.Types + alias Lexical.Protocol.Types.CodeAction + alias Lexical.Protocol.Requests.Initialize + alias Lexical.Protocol.Types.TextDocument + alias Lexical.Server.Configuration + alias Lexical.SourceFile + + import Logger + + require CodeAction.Kind + + defstruct configuration: nil, initialized?: false + + @supported_code_actions [ + CodeAction.Kind.quick_fix() + ] + + def new do + %__MODULE__{} + end + + def initialize(%__MODULE__{initialized?: false} = state, %Initialize{ + lsp: %Initialize.LSP{} = event + }) do + config = Configuration.new(event.root_uri, event.capabilities) + new_state = %__MODULE__{state | configuration: config, initialized?: true} + info("Starting project at uri #{config.project.root_uri}") + + Lexical.Server.Project.Supervisor.start(config.project) + + event.id + |> initialize_result() + |> Transport.write() + + {:ok, new_state} + end + + def initialize(%__MODULE__{initialized?: true}, %Initialize{}) do + {:error, :already_initialized} + end + + def default_configuration(%__MODULE__{configuration: config} = state) do + with {:ok, config} <- Configuration.default(config) do + {:ok, %__MODULE__{state | configuration: config}} + end + end + + def apply(%__MODULE__{initialized?: false}, request) do + Logger.error("Received #{request.method} before server was initialized") + {:error, :not_initialized} + end + + def apply(%__MODULE__{} = state, %DidChangeConfiguration{} = event) do + case Configuration.on_change(state.configuration, event) do + {:ok, config} -> + {:ok, %__MODULE__{state | configuration: config}} + + {:ok, config, response} -> + Transport.write(response) + {:ok, %__MODULE__{state | configuration: config}} + + error -> + error + end + + {:ok, state} + end + + def apply(%__MODULE__{} = state, %DidChange{lsp: event}) do + uri = event.text_document.uri + version = event.text_document.version + + case SourceFile.Store.update( + uri, + &SourceFile.apply_content_changes(&1, version, event.content_changes) + ) do + :ok -> {:ok, state} + error -> error + end + end + + def apply(%__MODULE__{} = state, %DidOpen{lsp: event}) do + %TextDocument.Item{text: text, uri: uri, version: version} = + text_document = event.text_document + + case SourceFile.Store.open(uri, text, version) do + :ok -> + info("opened #{uri}") + {:ok, state} + + error -> + error("Could not open #{text_document.uri} #{inspect(error)}") + error + end + end + + def apply(%__MODULE__{} = state, %DidClose{lsp: event}) do + uri = event.text_document.uri + + case SourceFile.Store.close(uri) do + :ok -> + {:ok, state} + + error -> + warn("Received textDocument/didClose for a file that wasn't open. URI was #{uri}") + error + end + end + + def apply(%__MODULE__{} = state, %DidSave{lsp: event}) do + uri = event.text_document.uri + + case SourceFile.Store.save(uri) do + :ok -> + {:ok, state} + + error -> + warn("Save failed for uri #{uri} error was #{inspect(error)}") + error + end + end + + def apply(%__MODULE__{} = state, msg) do + Transport.log("Applying unknown #{inspect(msg)}") + {:ok, state} + end + + def initialize_result(event_id) do + sync_options = TextDocument.Sync.Options.new(open_close: true, change: :incremental) + + code_action_options = + CodeAction.Options.new(code_action_kinds: @supported_code_actions, resolve_provider: false) + + server_capabilities = + Types.ServerCapabilities.new( + code_action_provider: true, + document_formatting_provider: true, + text_document_sync: sync_options + ) + + result = + Types.Initialize.Result.new( + capabilities: server_capabilities, + server_info: + Types.Initialize.Result.ServerInfo.new( + name: "Lexical", + version: "0.0.1" + ) + ) + + Responses.InitializeResult.new(event_id, result) + end +end diff --git a/apps/server/lib/lexical/server/transport.ex b/apps/server/lib/lexical/server/transport.ex new file mode 100644 index 000000000..039bb91ae --- /dev/null +++ b/apps/server/lib/lexical/server/transport.ex @@ -0,0 +1,13 @@ +defmodule Lexical.Transport do + alias Lexical.Server.IOServer + + def log(level \\ :error, message) + + def log(level, message) do + IOServer.log(level, message) + end + + def write(message) do + IOServer.write(message) + end +end diff --git a/apps/server/lib/lexical/server/transport/std_io.ex b/apps/server/lib/lexical/server/transport/std_io.ex new file mode 100644 index 000000000..71b0dc0de --- /dev/null +++ b/apps/server/lib/lexical/server/transport/std_io.ex @@ -0,0 +1,14 @@ +defmodule Lexical.Transport.StdIO do + alias Lexical.Server + alias Lexical.Protocol.Notifications + + @crlf "\r\n" + + defdelegate write(device, payload), to: Lexical.Server.IOServer + defdelegate write(payload), to: Lexical.Server.IOServer + defdelegate log(level, payload), to: Lexical.Server.IOServer + + def error(message_text) do + log(:error, message_text) + end +end diff --git a/apps/server/mix.exs b/apps/server/mix.exs new file mode 100644 index 000000000..ea26dbc67 --- /dev/null +++ b/apps/server/mix.exs @@ -0,0 +1,53 @@ +defmodule Lexical.Server.MixProject do + use Mix.Project + + def project do + [ + app: :server, + version: "0.1.0", + build_path: "../../_build", + config_path: "../../config/config.exs", + deps_path: "../../deps", + lockfile: "../../mix.lock", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps(), + aliases: aliases(), + elixirc_paths: elixirc_paths(Mix.env()) + ] + end + + def application do + [ + extra_applications: [:logger, :runtime_tools, :et, :wx, :kernel, :erts], + mod: {Lexical.Server.Application, []} + ] + end + + def aliases do + [ + test: "test --no-start" + ] + end + + defp elixirc_paths(:test) do + ["lib", "test/support"] + end + + defp elixirc_paths(_) do + ["lib"] + end + + defp deps do + [ + {:common, in_umbrella: true}, + {:elixir_sense, github: "elixir-lsp/elixir_sense"}, + {:jason, "~> 1.4"}, + {:path_glob, "~> 0.2"}, + {:protocol, in_umbrella: true}, + {:remote_control, in_umbrella: true, runtime: false}, + {:sourceror, "~> 0.11"}, + {:patch, "~> 0.12", runtime: false, only: [:dev, :test]} + ] + end +end diff --git a/apps/server/test/fixtures/project/.formatter.exs b/apps/server/test/fixtures/project/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/server/test/fixtures/project/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/server/test/fixtures/project/.gitignore b/apps/server/test/fixtures/project/.gitignore new file mode 100644 index 000000000..89d8f868d --- /dev/null +++ b/apps/server/test/fixtures/project/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +project-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/server/test/fixtures/project/README.md b/apps/server/test/fixtures/project/README.md new file mode 100644 index 000000000..dcbbf7c59 --- /dev/null +++ b/apps/server/test/fixtures/project/README.md @@ -0,0 +1,21 @@ +# Project + +**TODO: Add description** + +## Installation + +If [available in Hex](https://hex.pm/docs/publish), the package can be installed +by adding `project` to your list of dependencies in `mix.exs`: + +```elixir +def deps do + [ + {:project, "~> 0.1.0"} + ] +end +``` + +Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) +and published on [HexDocs](https://hexdocs.pm). Once published, the docs can +be found at . + diff --git a/apps/server/test/fixtures/project/lib/project.ex b/apps/server/test/fixtures/project/lib/project.ex new file mode 100644 index 000000000..dc6f504ea --- /dev/null +++ b/apps/server/test/fixtures/project/lib/project.ex @@ -0,0 +1,18 @@ +defmodule Project do + @moduledoc """ + Documentation for `Project`. + """ + + @doc """ + Hello world. + + ## Examples + + iex> Project.hello() + :world + + """ + def hello do + :world + end +end diff --git a/apps/server/test/fixtures/project/mix.exs b/apps/server/test/fixtures/project/mix.exs new file mode 100644 index 000000000..172b53550 --- /dev/null +++ b/apps/server/test/fixtures/project/mix.exs @@ -0,0 +1,28 @@ +defmodule Project.MixProject do + use Mix.Project + + def project do + [ + app: :project, + version: "0.1.0", + elixir: "~> 1.14", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + # Run "mix help compile.app" to learn about applications. + def application do + [ + extra_applications: [:logger] + ] + end + + # Run "mix help deps" to learn about dependencies. + defp deps do + [ + # {:dep_from_hexpm, "~> 0.3.0"}, + # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"} + ] + end +end diff --git a/apps/server/test/lexical/project/diagnostics/state_test.exs b/apps/server/test/lexical/project/diagnostics/state_test.exs new file mode 100644 index 000000000..a0f9892b9 --- /dev/null +++ b/apps/server/test/lexical/project/diagnostics/state_test.exs @@ -0,0 +1,127 @@ +defmodule Lexical.Project.Diagnostics.StateTest do + alias Lexical.Project + alias Lexical.Protocol.Types.Diagnostic + alias Lexical.SourceFile + alias Lexical.Server.Project.Diagnostics.State + alias Mix.Task.Compiler + + import Lexical.Test.Fixtures + use Lexical.Test.CodeMod.Case + + setup do + {:ok, _} = start_supervised(Lexical.SourceFile.Store) + project = project() + state = State.new(project) + {:ok, project: project(), state: state} + end + + def existing_file_path do + Path.join([Project.root_path(project()), "lib", "project.ex"]) + end + + def source_file(contents \\ "") do + existing_file_path() + |> SourceFile.Path.to_uri() + |> SourceFile.new(contents, 0) + end + + def compiler_diagnostic(opts \\ []) do + %Compiler.Diagnostic{ + message: Keyword.get(opts, :message, "This file is broken"), + file: Keyword.get(opts, :file, existing_file_path()), + position: Keyword.get(opts, :position, 4), + severity: Keyword.get(opts, :severity, :error), + compiler_name: "Elixir" + } + end + + test "it allows you to add a global diagnostic", %{state: state} do + diagnostic = compiler_diagnostic(message: "This code is awful") + state = State.add(state, diagnostic) + + assert [%Diagnostic{}] = State.get(state, SourceFile.Path.to_uri(diagnostic.file)) + end + + test "it allows you to add a source file diagnostic", %{state: state} do + diagnostic = compiler_diagnostic(message: "Does not compile") + source_file = source_file("hello") + state = State.add(state, diagnostic, source_file) + + assert [%Diagnostic{}] = State.get(state, source_file.uri) + end + + describe "conversions" do + test "converts a position that is only a line number", %{state: state} do + diagnostic = compiler_diagnostic(message: "This code is awful") + state = State.add(state, diagnostic) + + assert [%Diagnostic{} = proto_diagnostic] = + State.get(state, SourceFile.Path.to_uri(diagnostic.file)) + + assert proto_diagnostic.message == "This code is awful" + range = proto_diagnostic.range + + # Starting at 0 and going to character 0 on the next line highlights the entire line + assert range.start.line == diagnostic.position - 1 + assert range.start.character == 0 + + assert range.end.line == diagnostic.position + assert range.end.character == 0 + end + + test "converts a position that is a line and a column", %{state: state} do + source = ~q[ + defmodule MyModule do + def foo do + + end + end + ] + source_file = source_file(source) + diagnostic = compiler_diagnostic(message: "Hoo boy, this is a mess", position: {2, 5}) + + state = State.add(state, diagnostic, source_file) + + assert [%Diagnostic{} = proto_diagnostic] = + State.get(state, SourceFile.Path.to_uri(diagnostic.file)) + + assert proto_diagnostic.message == "Hoo boy, this is a mess" + range = proto_diagnostic.range + + # Starting at 0 and going to character 0 on the next line highlights the entire line + assert range.start.line == 2 + assert range.start.character == 5 + + assert range.end.line == 2 + assert range.end.character == 5 + end + + test "converts a position that is a line and a column handling emojis", %{state: state} do + source = ~q[ + defmodule MyModule do + def foo do + "🎸hello" + end + end + ] + + source_file = source_file(source) + diagnostic = compiler_diagnostic(message: "Hoo boy, this is a mess", position: {3, 9}) + + state = State.add(state, diagnostic, source_file) + + assert [%Diagnostic{} = proto_diagnostic] = + State.get(state, SourceFile.Path.to_uri(diagnostic.file)) + + assert proto_diagnostic.message == "Hoo boy, this is a mess" + range = proto_diagnostic.range + + # Starting at 0 and going to character 0 on the next line highlights the entire line + assert range.start.line == 3 + assert range.start.character == 7 + + assert range.end.line == 3 + assert range.end.character == 7 + end + end +end diff --git a/apps/server/test/lexical/project/dispatch/state_test.exs b/apps/server/test/lexical/project/dispatch/state_test.exs new file mode 100644 index 000000000..b4ec401e5 --- /dev/null +++ b/apps/server/test/lexical/project/dispatch/state_test.exs @@ -0,0 +1,107 @@ +defmodule Lexical.Project.Dispatch.StateTest do + use ExUnit.Case + + alias Lexical.Project + alias Lexical.Server.Project.Dispatch.State + + setup do + project = %Project{} + state = State.new(project) + {:ok, state: state} + end + + def pid do + spawn(fn -> :ok end) + end + + describe "add/3" do + test "works for a specific type", %{state: state} do + state = State.add(state, :project_compiled, self()) + assert State.registered?(state, :project_compiled, self()) + refute State.registered?(state, :other_message, self()) + end + + test "works for all messages", %{state: state} do + state = State.add(state, :all, self()) + assert State.registered?(state, :all, self()) + assert State.registered?(state, :whatever, self()) + end + end + + describe "remove/3" do + test "can be removed", %{state: state} do + state = State.add(state, :project_compiled, self()) + assert State.registered?(state, :project_compiled, self()) + + state = State.remove(state, :project_compiled, self()) + refute State.registered?(state, :project_compiled, self()) + end + end + + describe "remove_all/2" do + test "all registrations can be removed", %{state: state} do + state = + state + |> State.add(:project_compiled, self()) + |> State.add(:other_message, self()) + |> State.add(:yet_another_message, self()) + |> State.remove_all(self()) + + refute State.registered?(state, :project_compiled, self()) + refute State.registered?(state, :other_message, self()) + refute State.registered?(state, :yet_another_message, self()) + end + end + + describe "registered?/2" do + test "returns true if a process is registered to all", %{state: state} do + state = State.add(state, :all, self()) + assert State.registered?(state, self()) + end + + test "returns true if a process is registered to a specific message", %{state: state} do + state = State.add(state, :project_compiled, self()) + assert State.registered?(state, self()) + end + + test "returns false if a process isn't registered", %{state: state} do + refute State.registered?(state, self()) + end + end + + describe "registrations/2" do + test "can see which things are registered for a given message type", %{state: state} do + first = pid() + second = pid() + third = pid() + + state = + state + |> State.add(:project_compiled, first) + |> State.add(:project_compiled, second) + |> State.add(:project_compiled, third) + + pids = State.registrations(state, :project_compiled) + assert first in pids + assert second in pids + assert third in pids + end + + test "includes those pids registered to all", %{state: state} do + first = pid() + second = pid() + third = pid() + + state = + state + |> State.add(:project_compiled, first) + |> State.add(:project_compiled, second) + |> State.add(:all, third) + + pids = State.registrations(state, :project_compiled) + assert first in pids + assert second in pids + assert third in pids + end + end +end diff --git a/apps/server/test/lexical/project/dispatch_test.exs b/apps/server/test/lexical/project/dispatch_test.exs new file mode 100644 index 000000000..b41aae69e --- /dev/null +++ b/apps/server/test/lexical/project/dispatch_test.exs @@ -0,0 +1,118 @@ +defmodule Lexical.Project.DispatchTest do + alias Lexical.Project + alias Lexical.Server.Project.Dispatch + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Messages + + import Messages + import Lexical.Test.Fixtures + use ExUnit.Case + use Patch + + setup do + {:ok, project: project()} + end + + def with_dispatch_started(%{project: project}) do + patch(RemoteControl, :start_link, :ok) + patch(RemoteControl.Api, :schedule_compile, :ok) + patch(RemoteControl.Api, :list_modules, []) + {:ok, dispatch} = start_supervised({Dispatch, project}) + + {:ok, dispatch: dispatch} + end + + defmodule Forwarder do + alias Lexical.Server.Project.Dispatch + + def start(%Project{} = project, message_types) do + test = self() + + pid = + spawn_link(fn -> + Dispatch.register(project, message_types) + send(test, :ready) + + loop(test) + end) + + receive do + :ready -> + {:ok, pid} + end + end + + def stop(pid) do + ref = Process.monitor(pid) + Process.unlink(pid) + Process.exit(pid, :kill) + + receive do + {:DOWN, ^ref, _, _, _} -> :ok + end + end + + def loop(parent) do + receive do + msg -> send(parent, {:forwarded, self(), msg}) + end + + loop(parent) + end + end + + describe "a running project" do + setup [:with_dispatch_started] + + test "starts the remote control", %{project: project} do + assert_called RemoteControl.start_link(^project, _) + end + + test "schedules a compile", %{project: project} do + assert_called RemoteControl.Api.schedule_compile(^project, false) + end + + test "allows processes to register for a message", %{project: project, dispatch: dispatch} do + assert :ok = Dispatch.register(project, [project_compiled()]) + + project_compiled = project_compiled(status: :successful) + send(dispatch, project_compiled) + assert_receive ^project_compiled + end + + test "allows processes to register for any message", %{project: project, dispatch: dispatch} do + assert :ok = Dispatch.register(project, [:all]) + send(dispatch, project_compiled(status: :successful)) + send(dispatch, module_updated()) + + assert_receive project_compiled() + assert_receive module_updated() + end + + test "cleans up if a process dies", %{project: project} do + Dispatch.register(project, [:all]) + {:ok, forwarder_pid} = Forwarder.start(project, [:all]) + + assert Dispatch.registered?(project, forwarder_pid) + :ok = Forwarder.stop(forwarder_pid) + refute Dispatch.registered?(project, forwarder_pid) + end + + test "handles multiple registrations", %{project: project, dispatch: dispatch} do + {:ok, forwarder_1} = Forwarder.start(project, [project_compiled()]) + {:ok, forwarder_2} = Forwarder.start(project, [module_updated()]) + {:ok, forwarder_3} = Forwarder.start(project, [:all]) + + send(dispatch, module_updated()) + send(dispatch, project_compiled()) + send(dispatch, {:other, :message}) + + assert_receive {:forwarded, ^forwarder_1, project_compiled()} + assert_receive {:forwarded, ^forwarder_2, module_updated()} + + assert_receive {:forwarded, ^forwarder_3, project_compiled()} + assert_receive {:forwarded, ^forwarder_3, module_updated()} + assert_receive {:forwarded, ^forwarder_3, {:other, :message}} + end + end +end diff --git a/apps/server/test/support/code_mod_case.ex b/apps/server/test/support/code_mod_case.ex new file mode 100644 index 000000000..90080f5e9 --- /dev/null +++ b/apps/server/test/support/code_mod_case.ex @@ -0,0 +1,92 @@ +defmodule Lexical.Test.CodeMod.Case do + alias Lexical.Protocol.Types.TextDocument.ContentChangeEvent + alias Lexical.SourceFile + + use ExUnit.CaseTemplate + + using do + quote do + import unquote(__MODULE__), only: [sigil_q: 2] + + def apply_code_mod(_, _, _) do + {:error, "You must implement apply_code_mod/3"} + end + + defoverridable apply_code_mod: 3 + + def modify(original, options \\ []) do + with {:ok, ast} <- maybe_convert_to_ast(original, options), + {:ok, edits} <- apply_code_mod(original, ast, options) do + {:ok, unquote(__MODULE__).apply_edits(original, edits, options)} + end + end + + defp maybe_convert_to_ast(code, options) do + alias Lexical.CodeMod.Ast + + if Keyword.get(options, :convert_to_ast, true) do + Ast.from(code) + else + {:ok, nil} + end + end + end + end + + def sigil_q(text, opts \\ []) do + ["", first | rest] = text |> String.split("\n") + base_indent = indent(first) + indent_length = String.length(base_indent) + + Enum.map_join([first | rest], "\n", &strip_leading_indent(&1, indent_length)) + |> maybe_trim(opts) + end + + def apply_edits(original, text_edits, opts) do + source_file = SourceFile.new("file:///file.ex", original, 0) + + converted_edits = + Enum.map(text_edits, fn edit -> + ContentChangeEvent.new(text: edit.new_text, range: edit.range) + end) + + {:ok, edited_source_file} = SourceFile.apply_content_changes(source_file, 1, converted_edits) + edited_source = SourceFile.to_string(edited_source_file) + + if Keyword.get(opts, :trim, true) do + String.trim(edited_source) + else + edited_source + end + end + + defp maybe_trim(iodata, [?t]) do + iodata + |> IO.iodata_to_binary() + |> String.trim_trailing() + end + + defp maybe_trim(iodata, _) do + IO.iodata_to_binary(iodata) + end + + @indent_re ~r/^\s*/ + defp indent(first_line) do + case Regex.scan(@indent_re, first_line) do + [[indent]] -> indent + _ -> "" + end + end + + defp strip_leading_indent(s, 0) do + s + end + + defp strip_leading_indent(<<" ", rest::binary>>, count) when count > 0 do + strip_leading_indent(rest, count - 1) + end + + defp strip_leading_indent(s, _) do + s + end +end diff --git a/apps/server/test/support/fixtures.ex b/apps/server/test/support/fixtures.ex new file mode 100644 index 000000000..75c503a21 --- /dev/null +++ b/apps/server/test/support/fixtures.ex @@ -0,0 +1,10 @@ +defmodule Lexical.Test.Fixtures do + alias Lexical.Project + + def project do + [File.cwd!(), "test", "fixtures", "project"] + |> Path.join() + |> Lexical.SourceFile.Path.to_uri() + |> Project.new() + end +end diff --git a/apps/server/test/test_helper.exs b/apps/server/test/test_helper.exs new file mode 100644 index 000000000..869559e70 --- /dev/null +++ b/apps/server/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start() diff --git a/config/config.exs b/config/config.exs new file mode 100644 index 000000000..ab23e800e --- /dev/null +++ b/config/config.exs @@ -0,0 +1,18 @@ +# This file is responsible for configuring your umbrella +# and **all applications** and their dependencies with the +# help of the Config module. +# +# Note that all applications in your umbrella share the +# same configuration and dependencies, which is why they +# all use the same configuration file. If you want different +# configurations or dependencies per app, it is best to +# move said applications out of the umbrella. +import Config + +# Sample configuration: +# +# config :logger, :console, +# level: :info, +# format: "$date $time [$level] $metadata$message\n", +# metadata: [:user_id] +# diff --git a/mix.exs b/mix.exs new file mode 100644 index 000000000..693e47df0 --- /dev/null +++ b/mix.exs @@ -0,0 +1,44 @@ +defmodule Lexical.MixProject do + use Mix.Project + + def project do + [ + apps_path: "apps", + version: "0.1.0", + start_permanent: Mix.env() == :prod, + deps: deps(), + releases: releases(), + aliases: aliases() + ] + end + + defp deps do + [] + end + + defp releases do + [ + lexical: [ + applications: [ + server: :permanent, + remote_control: :load, + mix: :load + ], + include_executables_for: [:unix], + include_erts: false, + cookie: "lexical" + ], + remote_control: [ + applications: [remote_control: :permanent], + include_erts: false, + include_executables_for: [] + ] + ] + end + + defp aliases do + [ + test: "test --no-start" + ] + end +end diff --git a/mix.lock b/mix.lock new file mode 100644 index 000000000..f843dee32 --- /dev/null +++ b/mix.lock @@ -0,0 +1,8 @@ +%{ + "elixir_sense": {:git, "https://github.com/elixir-lsp/elixir_sense.git", "6d1a951bfbfc02e93b410acded82c736e1806df2", []}, + "jason": {:hex, :jason, "1.4.0", "e855647bc964a44e2f67df589ccf49105ae039d4179db7f6271dfd3843dc27e6", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "79a3791085b2a0f743ca04cec0f7be26443738779d09302e01318f97bdb82121"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"}, + "patch": {:hex, :patch, "0.12.0", "2da8967d382bade20344a3e89d618bfba563b12d4ac93955468e830777f816b0", [:mix], [], "hexpm", "ffd0e9a7f2ad5054f37af84067ee88b1ad337308a1cb227e181e3967127b0235"}, + "path_glob": {:hex, :path_glob, "0.2.0", "b9e34b5045cac5ecb76ef1aa55281a52bf603bf7009002085de40958064ca312", [:mix], [{:nimble_parsec, "~> 1.2.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "be2594cb4553169a1a189f95193d910115f64f15f0d689454bb4e8cfae2e7ebc"}, + "sourceror": {:hex, :sourceror, "0.11.2", "549ce48be666421ac60cfb7f59c8752e0d393baa0b14d06271d3f6a8c1b027ab", [:mix], [], "hexpm", "9ab659118896a36be6eec68ff7b0674cba372fc8e210b1e9dc8cf2b55bb70dfb"}, +}