mirror of https://github.com/helix-editor/helix
Merge branch 'master' into help-command
commit
d7fb82f22b
@ -1,29 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: C-bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Your issue may already be reported!
|
||||
Please search on the issue tracker before creating one. -->
|
||||
|
||||
### Reproduction steps
|
||||
|
||||
<!-- Ideally provide a key sequence and/or asciinema.org recording. -->
|
||||
|
||||
### Environment
|
||||
|
||||
- Platform: <!-- macOS / Windows / Linux -->
|
||||
- Terminal emulator:
|
||||
- Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master -->
|
||||
|
||||
<details><summary>~/.cache/helix/helix.log</summary>
|
||||
|
||||
```
|
||||
please provide a copy of `~/.cache/helix/helix.log` here if possible, you may need to redact some of the lines
|
||||
```
|
||||
|
||||
</details>
|
@ -0,0 +1,67 @@
|
||||
name: Bug Report
|
||||
description: Create a report to help us improve
|
||||
labels: C-bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Thank you for filing a bug report! 🐛
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Summary
|
||||
description: >
|
||||
Please provide a short summary of the bug, along with any information
|
||||
you feel relevant to replicate the bug.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduction-steps
|
||||
attributes:
|
||||
label: Reproduction Steps
|
||||
value: |
|
||||
<!-- Ideally provide a key sequence and/or asciinema.org recording. -->
|
||||
|
||||
I tried this:
|
||||
|
||||
1. `hx`
|
||||
|
||||
I expected this to happen:
|
||||
|
||||
Instead, this happened:
|
||||
- type: textarea
|
||||
id: helix-log
|
||||
attributes:
|
||||
label: Helix log
|
||||
description: See `hx -h` for log file path
|
||||
value: |
|
||||
<details><summary>~/.cache/helix/helix.log</summary>
|
||||
|
||||
```
|
||||
please provide a copy of `~/.cache/helix/helix.log` here if possible, you may need to redact some of the lines
|
||||
```
|
||||
|
||||
</details>
|
||||
- type: input
|
||||
id: platform
|
||||
attributes:
|
||||
label: Platform
|
||||
placeholder: Linux / macOS / Windows
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: terminal-emulator
|
||||
attributes:
|
||||
label: Terminal Emulator
|
||||
placeholder: wezterm 20220101-133340-7edc5b5a
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: helix-version
|
||||
attributes:
|
||||
label: Helix Version
|
||||
description: >
|
||||
Helix version (`hx -V` if using a release, `git describe` if building
|
||||
from master)
|
||||
placeholder: "helix 0.6.0 (c0dbd6dc)"
|
||||
validations:
|
||||
required: true
|
@ -0,0 +1,26 @@
|
||||
# Publish the Nix flake outputs to Cachix
|
||||
name: Cachix
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish Flake
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install nix
|
||||
uses: cachix/install-nix-action@v16
|
||||
|
||||
- name: Authenticate with Cachix
|
||||
uses: cachix/cachix-action@v10
|
||||
with:
|
||||
name: helix
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
|
||||
- name: Build nix flake
|
||||
run: nix build
|
@ -0,0 +1,26 @@
|
||||
# This languages.toml is used for testing in CI.
|
||||
|
||||
[[language]]
|
||||
name = "rust"
|
||||
scope = "source.rust"
|
||||
injection-regex = "rust"
|
||||
file-types = ["rs"]
|
||||
comment-token = "//"
|
||||
roots = ["Cargo.toml", "Cargo.lock"]
|
||||
indent = { tab-width = 4, unit = " " }
|
||||
|
||||
[[grammar]]
|
||||
name = "rust"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
|
||||
|
||||
[[language]]
|
||||
name = "nix"
|
||||
scope = "source.nix"
|
||||
injection-regex = "nix"
|
||||
file-types = ["nix"]
|
||||
shebangs = []
|
||||
roots = []
|
||||
comment-token = "#"
|
||||
|
||||
# A grammar entry is not necessary for this language - it is only used for
|
||||
# testing TOML merging behavior.
|
@ -1,231 +0,0 @@
|
||||
[submodule "helix-syntax/languages/tree-sitter-cpp"]
|
||||
path = helix-syntax/languages/tree-sitter-cpp
|
||||
url = https://github.com/tree-sitter/tree-sitter-cpp
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-javascript"]
|
||||
path = helix-syntax/languages/tree-sitter-javascript
|
||||
url = https://github.com/tree-sitter/tree-sitter-javascript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-julia"]
|
||||
path = helix-syntax/languages/tree-sitter-julia
|
||||
url = https://github.com/tree-sitter/tree-sitter-julia
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-python"]
|
||||
path = helix-syntax/languages/tree-sitter-python
|
||||
url = https://github.com/tree-sitter/tree-sitter-python
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-typescript"]
|
||||
path = helix-syntax/languages/tree-sitter-typescript
|
||||
url = https://github.com/tree-sitter/tree-sitter-typescript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-agda"]
|
||||
path = helix-syntax/languages/tree-sitter-agda
|
||||
url = https://github.com/tree-sitter/tree-sitter-agda
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-go"]
|
||||
path = helix-syntax/languages/tree-sitter-go
|
||||
url = https://github.com/tree-sitter/tree-sitter-go
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ruby"]
|
||||
path = helix-syntax/languages/tree-sitter-ruby
|
||||
url = https://github.com/tree-sitter/tree-sitter-ruby
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-java"]
|
||||
path = helix-syntax/languages/tree-sitter-java
|
||||
url = https://github.com/tree-sitter/tree-sitter-java
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-php"]
|
||||
path = helix-syntax/languages/tree-sitter-php
|
||||
url = https://github.com/tree-sitter/tree-sitter-php
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-html"]
|
||||
path = helix-syntax/languages/tree-sitter-html
|
||||
url = https://github.com/tree-sitter/tree-sitter-html
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-scala"]
|
||||
path = helix-syntax/languages/tree-sitter-scala
|
||||
url = https://github.com/tree-sitter/tree-sitter-scala
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-bash"]
|
||||
path = helix-syntax/languages/tree-sitter-bash
|
||||
url = https://github.com/tree-sitter/tree-sitter-bash
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-rust"]
|
||||
path = helix-syntax/languages/tree-sitter-rust
|
||||
url = https://github.com/tree-sitter/tree-sitter-rust
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-json"]
|
||||
path = helix-syntax/languages/tree-sitter-json
|
||||
url = https://github.com/tree-sitter/tree-sitter-json
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-css"]
|
||||
path = helix-syntax/languages/tree-sitter-css
|
||||
url = https://github.com/tree-sitter/tree-sitter-css
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-c-sharp"]
|
||||
path = helix-syntax/languages/tree-sitter-c-sharp
|
||||
url = https://github.com/tree-sitter/tree-sitter-c-sharp
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-c"]
|
||||
path = helix-syntax/languages/tree-sitter-c
|
||||
url = https://github.com/tree-sitter/tree-sitter-c
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-haskell"]
|
||||
path = helix-syntax/languages/tree-sitter-haskell
|
||||
url = https://github.com/tree-sitter/tree-sitter-haskell
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-swift"]
|
||||
path = helix-syntax/languages/tree-sitter-swift
|
||||
url = https://github.com/tree-sitter/tree-sitter-swift
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-toml"]
|
||||
path = helix-syntax/languages/tree-sitter-toml
|
||||
url = https://github.com/ikatyang/tree-sitter-toml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-elixir"]
|
||||
path = helix-syntax/languages/tree-sitter-elixir
|
||||
url = https://github.com/elixir-lang/tree-sitter-elixir
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-nix"]
|
||||
path = helix-syntax/languages/tree-sitter-nix
|
||||
url = https://github.com/cstrahan/tree-sitter-nix
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-latex"]
|
||||
path = helix-syntax/languages/tree-sitter-latex
|
||||
url = https://github.com/latex-lsp/tree-sitter-latex
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ledger"]
|
||||
path = helix-syntax/languages/tree-sitter-ledger
|
||||
url = https://github.com/cbarrete/tree-sitter-ledger
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-protobuf"]
|
||||
path = helix-syntax/languages/tree-sitter-protobuf
|
||||
url = https://github.com/yusdacra/tree-sitter-protobuf.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-ocaml"]
|
||||
path = helix-syntax/languages/tree-sitter-ocaml
|
||||
url = https://github.com/tree-sitter/tree-sitter-ocaml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-lua"]
|
||||
path = helix-syntax/languages/tree-sitter-lua
|
||||
url = https://github.com/nvim-treesitter/tree-sitter-lua
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-yaml"]
|
||||
path = helix-syntax/languages/tree-sitter-yaml
|
||||
url = https://github.com/ikatyang/tree-sitter-yaml
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-zig"]
|
||||
path = helix-syntax/languages/tree-sitter-zig
|
||||
url = https://github.com/maxxnino/tree-sitter-zig
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-svelte"]
|
||||
path = helix-syntax/languages/tree-sitter-svelte
|
||||
url = https://github.com/Himujjal/tree-sitter-svelte
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-vue"]
|
||||
path = helix-syntax/languages/tree-sitter-vue
|
||||
url = https://github.com/ikatyang/tree-sitter-vue
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-tsq"]
|
||||
path = helix-syntax/languages/tree-sitter-tsq
|
||||
url = https://github.com/tree-sitter/tree-sitter-tsq
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-cmake"]
|
||||
path = helix-syntax/languages/tree-sitter-cmake
|
||||
url = https://github.com/uyha/tree-sitter-cmake
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-glsl"]
|
||||
path = helix-syntax/languages/tree-sitter-glsl
|
||||
url = https://github.com/theHamsta/tree-sitter-glsl.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-perl"]
|
||||
path = helix-syntax/languages/tree-sitter-perl
|
||||
url = https://github.com/ganezdragon/tree-sitter-perl
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-comment"]
|
||||
path = helix-syntax/languages/tree-sitter-comment
|
||||
url = https://github.com/stsewd/tree-sitter-comment
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-wgsl"]
|
||||
path = helix-syntax/languages/tree-sitter-wgsl
|
||||
url = https://github.com/szebniok/tree-sitter-wgsl
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-llvm"]
|
||||
path = helix-syntax/languages/tree-sitter-llvm
|
||||
url = https://github.com/benwilliamgraham/tree-sitter-llvm
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-markdown"]
|
||||
path = helix-syntax/languages/tree-sitter-markdown
|
||||
url = https://github.com/MDeiml/tree-sitter-markdown
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-dart"]
|
||||
path = helix-syntax/languages/tree-sitter-dart
|
||||
url = https://github.com/UserNobody14/tree-sitter-dart.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-dockerfile"]
|
||||
path = helix-syntax/languages/tree-sitter-dockerfile
|
||||
url = https://github.com/camdencheek/tree-sitter-dockerfile.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-fish"]
|
||||
path = helix-syntax/languages/tree-sitter-fish
|
||||
url = https://github.com/ram02z/tree-sitter-fish
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-commit"]
|
||||
path = helix-syntax/languages/tree-sitter-git-commit
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-commit.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-llvm-mir"]
|
||||
path = helix-syntax/languages/tree-sitter-llvm-mir
|
||||
url = https://github.com/Flakebi/tree-sitter-llvm-mir.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-diff"]
|
||||
path = helix-syntax/languages/tree-sitter-git-diff
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-diff.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-tablegen"]
|
||||
path = helix-syntax/languages/tree-sitter-tablegen
|
||||
url = https://github.com/Flakebi/tree-sitter-tablegen
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-rebase"]
|
||||
path = helix-syntax/languages/tree-sitter-git-rebase
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-rebase.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-lean"]
|
||||
path = helix-syntax/languages/tree-sitter-lean
|
||||
url = https://github.com/Julian/tree-sitter-lean
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-regex"]
|
||||
path = helix-syntax/languages/tree-sitter-regex
|
||||
url = https://github.com/tree-sitter/tree-sitter-regex.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-make"]
|
||||
path = helix-syntax/languages/tree-sitter-make
|
||||
url = https://github.com/alemuller/tree-sitter-make
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-git-config"]
|
||||
path = helix-syntax/languages/tree-sitter-git-config
|
||||
url = https://github.com/the-mikedavis/tree-sitter-git-config.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-graphql"]
|
||||
path = helix-syntax/languages/tree-sitter-graphql
|
||||
url = https://github.com/bkegley/tree-sitter-graphql
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-elm"]
|
||||
path = helix-syntax/languages/tree-sitter-elm
|
||||
url = https://github.com/elm-tooling/tree-sitter-elm
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-iex"]
|
||||
path = helix-syntax/languages/tree-sitter-iex
|
||||
url = https://github.com/elixir-lang/tree-sitter-iex
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-twig"]
|
||||
path = helix-syntax/languages/tree-sitter-twig
|
||||
url = https://github.com/eirabben/tree-sitter-twig.git
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-rescript"]
|
||||
path = helix-syntax/languages/tree-sitter-rescript
|
||||
url = https://github.com/jaredramirez/tree-sitter-rescript
|
||||
shallow = true
|
||||
[submodule "helix-syntax/languages/tree-sitter-erlang"]
|
||||
path = helix-syntax/languages/tree-sitter-erlang
|
||||
url = https://github.com/the-mikedavis/tree-sitter-erlang
|
@ -1,19 +0,0 @@
|
||||
|
||||
- [ ] completion isIncomplete support
|
||||
- [ ] respect view fullscreen flag
|
||||
- [ ] Implement marks (superset of Selection/Range)
|
||||
|
||||
- [ ] = for auto indent line/selection
|
||||
- [ ] lsp: signature help
|
||||
|
||||
2
|
||||
- [ ] store some state between restarts: file positions, prompt history
|
||||
- [ ] highlight matched characters in picker
|
||||
|
||||
3
|
||||
- [ ] diff mode with highlighting?
|
||||
- [ ] snippet support (tab to jump between marks)
|
||||
- [ ] gamelisp/wasm scripting
|
||||
|
||||
X
|
||||
- [ ] rendering via skulpin/skia or raw wgpu
|
@ -0,0 +1,79 @@
|
||||
# Adding Indent Queries
|
||||
|
||||
Helix uses tree-sitter to correctly indent new lines. This requires
|
||||
a tree-sitter grammar and an `indent.scm` query file placed in
|
||||
`runtime/queries/{language}/indents.scm`. The indentation for a line
|
||||
is calculated by traversing the syntax tree from the lowest node at the
|
||||
beginning of the new line. Each of these nodes contributes to the total
|
||||
indent when it is captured by the query (in what way depends on the name
|
||||
of the capture).
|
||||
|
||||
Note that it matters where these added indents begin. For example,
|
||||
multiple indent level increases that start on the same line only increase
|
||||
the total indent level by 1.
|
||||
|
||||
## Scopes
|
||||
|
||||
Added indents don't always apply to the whole node. For example, in most
|
||||
cases when a node should be indented, we actually only want everything
|
||||
except for its first line to be indented. For this, there are several
|
||||
scopes (more scopes may be added in the future if required):
|
||||
|
||||
- `all`:
|
||||
This scope applies to the whole captured node. This is only different from
|
||||
`tail` when the captured node is the first node on its line.
|
||||
|
||||
- `tail`:
|
||||
This scope applies to everything except for the first line of the
|
||||
captured node.
|
||||
|
||||
Every capture type has a default scope which should do the right thing
|
||||
in most situations. When a different scope is required, this can be
|
||||
changed by using a `#set!` declaration anywhere in the pattern:
|
||||
```scm
|
||||
(assignment_expression
|
||||
right: (_) @indent
|
||||
(#set! "scope" "all"))
|
||||
```
|
||||
|
||||
## Capture Types
|
||||
|
||||
- `@indent` (default scope `tail`):
|
||||
Increase the indent level by 1. Multiple occurences in the same line
|
||||
don't stack. If there is at least one `@indent` and one `@outdent`
|
||||
capture on the same line, the indent level isn't changed at all.
|
||||
|
||||
- `@outdent` (default scope `all`):
|
||||
Decrease the indent level by 1. The same rules as for `@indent` apply.
|
||||
|
||||
## Predicates
|
||||
|
||||
In some cases, an S-expression cannot express exactly what pattern should be matched.
|
||||
For that, tree-sitter allows for predicates to appear anywhere within a pattern,
|
||||
similar to how `#set!` declarations work:
|
||||
```scm
|
||||
(some_kind
|
||||
(child_kind) @indent
|
||||
(#predicate? arg1 arg2 ...)
|
||||
)
|
||||
```
|
||||
The number of arguments depends on the predicate that's used.
|
||||
Each argument is either a capture (`@name`) or a string (`"some string"`).
|
||||
The following predicates are supported by tree-sitter:
|
||||
|
||||
- `#eq?`/`#not-eq?`:
|
||||
The first argument (a capture) must/must not be equal to the second argument
|
||||
(a capture or a string).
|
||||
|
||||
- `#match?`/`#not-match?`:
|
||||
The first argument (a capture) must/must not match the regex given in the
|
||||
second argument (a string).
|
||||
|
||||
Additionally, we support some custom predicates for indent queries:
|
||||
|
||||
- `#not-kind-eq?`:
|
||||
The kind of the first argument (a capture) must not be equal to the second
|
||||
argument (a string).
|
||||
|
||||
- `#same-line?`/`#not-same-line?`:
|
||||
The captures given by the 2 arguments must/must not start on the same line.
|
@ -0,0 +1,15 @@
|
||||
# Helix
|
||||
|
||||
Docs for bleeding edge master can be found at
|
||||
[https://docs.helix-editor.com/master](https://docs.helix-editor.com/master).
|
||||
|
||||
See the [usage] section for a quick overview of the editor, [keymap]
|
||||
section for all available keybindings and the [configuration] section
|
||||
for defining custom keybindings, setting themes, etc.
|
||||
|
||||
Refer the [FAQ] for common questions.
|
||||
|
||||
[FAQ]: https://github.com/helix-editor/helix/wiki/FAQ
|
||||
[usage]: ./usage.md
|
||||
[keymap]: ./keymap.md
|
||||
[configuration]: ./configuration.md
|
@ -0,0 +1,106 @@
|
||||
{ stdenv, lib, runCommand, yj }:
|
||||
let
|
||||
# HACK: nix < 2.6 has a bug in the toml parser, so we convert to JSON
|
||||
# before parsing
|
||||
languages-json = runCommand "languages-toml-to-json" { } ''
|
||||
${yj}/bin/yj -t < ${./languages.toml} > $out
|
||||
'';
|
||||
languagesConfig = if lib.versionAtLeast builtins.nixVersion "2.6.0" then
|
||||
builtins.fromTOML (builtins.readFile ./languages.toml)
|
||||
else
|
||||
builtins.fromJSON (builtins.readFile (builtins.toPath languages-json));
|
||||
isGitGrammar = (grammar:
|
||||
builtins.hasAttr "source" grammar && builtins.hasAttr "git" grammar.source
|
||||
&& builtins.hasAttr "rev" grammar.source);
|
||||
isGitHubGrammar = grammar: lib.hasPrefix "https://github.com" grammar.source.git;
|
||||
toGitHubFetcher = url: let
|
||||
match = builtins.match "https://github\.com/([^/]*)/([^/]*)/?" url;
|
||||
in {
|
||||
owner = builtins.elemAt match 0;
|
||||
repo = builtins.elemAt match 1;
|
||||
};
|
||||
gitGrammars = builtins.filter isGitGrammar languagesConfig.grammar;
|
||||
buildGrammar = grammar:
|
||||
let
|
||||
gh = toGitHubFetcher grammar.source.git;
|
||||
sourceGit = builtins.fetchTree {
|
||||
type = "git";
|
||||
url = grammar.source.git;
|
||||
rev = grammar.source.rev;
|
||||
ref = grammar.source.ref or "HEAD";
|
||||
shallow = true;
|
||||
};
|
||||
sourceGitHub = builtins.fetchTree {
|
||||
type = "github";
|
||||
owner = gh.owner;
|
||||
repo = gh.repo;
|
||||
inherit (grammar.source) rev;
|
||||
};
|
||||
source = if isGitHubGrammar grammar then sourceGitHub else sourceGit;
|
||||
in stdenv.mkDerivation rec {
|
||||
# see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix
|
||||
|
||||
pname = "helix-tree-sitter-${grammar.name}";
|
||||
version = grammar.source.rev;
|
||||
|
||||
src = if builtins.hasAttr "subpath" grammar.source then
|
||||
"${source}/${grammar.source.subpath}"
|
||||
else
|
||||
source;
|
||||
|
||||
dontUnpack = true;
|
||||
dontConfigure = true;
|
||||
|
||||
FLAGS = [
|
||||
"-I${src}/src"
|
||||
"-g"
|
||||
"-O3"
|
||||
"-fPIC"
|
||||
"-fno-exceptions"
|
||||
"-Wl,-z,relro,-z,now"
|
||||
];
|
||||
|
||||
NAME = grammar.name;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
if [[ -e "$src/src/scanner.cc" ]]; then
|
||||
$CXX -c "$src/src/scanner.cc" -o scanner.o $FLAGS
|
||||
elif [[ -e "$src/src/scanner.c" ]]; then
|
||||
$CC -c "$src/src/scanner.c" -o scanner.o $FLAGS
|
||||
fi
|
||||
|
||||
$CC -c "$src/src/parser.c" -o parser.o $FLAGS
|
||||
$CXX -shared -o $NAME.so *.o
|
||||
|
||||
ls -al
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
mkdir $out
|
||||
mv $NAME.so $out/
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
# Strip failed on darwin: strip: error: symbols referenced by indirect symbol table entries that can't be stripped
|
||||
fixupPhase = lib.optionalString stdenv.isLinux ''
|
||||
runHook preFixup
|
||||
$STRIP $out/$NAME.so
|
||||
runHook postFixup
|
||||
'';
|
||||
};
|
||||
builtGrammars = builtins.map (grammar: {
|
||||
inherit (grammar) name;
|
||||
artifact = buildGrammar grammar;
|
||||
}) gitGrammars;
|
||||
grammarLinks = builtins.map (grammar:
|
||||
"ln -s ${grammar.artifact}/${grammar.name}.so $out/${grammar.name}.so")
|
||||
builtGrammars;
|
||||
in runCommand "consolidated-helix-grammars" { } ''
|
||||
mkdir -p $out
|
||||
${builtins.concatStringsSep "\n" grammarLinks}
|
||||
''
|
@ -1,33 +1,10 @@
|
||||
use crate::merge_toml_values;
|
||||
|
||||
/// Default bultin-in languages.toml.
|
||||
pub fn default_lang_config() -> toml::Value {
|
||||
toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Could not parse bultin-in languages.toml to valid toml")
|
||||
}
|
||||
|
||||
/// User configured languages.toml file, merged with the default config.
|
||||
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
||||
let def_lang_conf = default_lang_config();
|
||||
let data = std::fs::read(crate::config_dir().join("languages.toml"));
|
||||
let user_lang_conf = match data {
|
||||
Ok(raw) => {
|
||||
let value = toml::from_slice(&raw)?;
|
||||
merge_toml_values(def_lang_conf, value)
|
||||
}
|
||||
Err(_) => def_lang_conf,
|
||||
};
|
||||
|
||||
Ok(user_lang_conf)
|
||||
}
|
||||
|
||||
/// Syntax configuration loader based on built-in languages.toml.
|
||||
pub fn default_syntax_loader() -> crate::syntax::Configuration {
|
||||
default_lang_config()
|
||||
helix_loader::default_lang_config()
|
||||
.try_into()
|
||||
.expect("Could not serialize built-in language.toml")
|
||||
.expect("Could not serialize built-in languages.toml")
|
||||
}
|
||||
/// Syntax configuration loader based on user configured languages.toml.
|
||||
pub fn user_syntax_loader() -> Result<crate::syntax::Configuration, toml::de::Error> {
|
||||
user_lang_config()?.try_into()
|
||||
helix_loader::user_lang_config()?.try_into()
|
||||
}
|
||||
|
@ -0,0 +1 @@
|
||||
../../../src/indent.rs
|
@ -0,0 +1,13 @@
|
||||
# This languages.toml should contain definitions for all languages for which we have indent tests
|
||||
[[language]]
|
||||
name = "rust"
|
||||
scope = "source.rust"
|
||||
injection-regex = "rust"
|
||||
file-types = ["rs"]
|
||||
comment-token = "//"
|
||||
roots = ["Cargo.toml", "Cargo.lock"]
|
||||
indent = { tab-width = 4, unit = " " }
|
||||
|
||||
[[grammar]]
|
||||
name = "rust"
|
||||
source = { git = "https://github.com/tree-sitter/tree-sitter-rust", rev = "a360da0a29a19c281d08295a35ecd0544d2da211" }
|
@ -0,0 +1,105 @@
|
||||
use std::{
|
||||
io::{self, stdout, Stdout, Write},
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
mod test {
|
||||
fn hello_world() {
|
||||
1 + 1;
|
||||
|
||||
let does_indentation_work = 1;
|
||||
|
||||
let mut really_long_variable_name_using_up_the_line =
|
||||
really_long_fn_that_should_definitely_go_on_the_next_line();
|
||||
really_long_variable_name_using_up_the_line =
|
||||
really_long_fn_that_should_definitely_go_on_the_next_line();
|
||||
really_long_variable_name_using_up_the_line |=
|
||||
really_long_fn_that_should_definitely_go_on_the_next_line();
|
||||
|
||||
let (
|
||||
a_long_variable_name_in_this_tuple,
|
||||
b_long_variable_name_in_this_tuple,
|
||||
c_long_variable_name_in_this_tuple,
|
||||
d_long_variable_name_in_this_tuple,
|
||||
e_long_variable_name_in_this_tuple,
|
||||
): (usize, usize, usize, usize, usize) =
|
||||
if really_long_fn_that_should_definitely_go_on_the_next_line() {
|
||||
(
|
||||
03294239434,
|
||||
1213412342314,
|
||||
21231234134,
|
||||
834534234549898789,
|
||||
9879234234543853457,
|
||||
)
|
||||
} else {
|
||||
(0, 1, 2, 3, 4)
|
||||
};
|
||||
|
||||
let test_function = function_with_param(this_param,
|
||||
that_param
|
||||
);
|
||||
|
||||
let test_function = function_with_param(
|
||||
this_param,
|
||||
that_param
|
||||
);
|
||||
|
||||
let test_function = function_with_proper_indent(param1,
|
||||
param2,
|
||||
);
|
||||
|
||||
let selection = Selection::new(
|
||||
changes
|
||||
.clone()
|
||||
.map(|(start, end, text): (usize, usize, Option<Tendril>)| {
|
||||
let len = text.map(|text| text.len()).unwrap() - 1; // minus newline
|
||||
let pos = start + len;
|
||||
Range::new(pos, pos)
|
||||
})
|
||||
.collect(),
|
||||
0,
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, D> MyTrait<A, D> for YourType
|
||||
where
|
||||
A: TraitB + TraitC,
|
||||
D: TraitE + TraitF,
|
||||
{
|
||||
|
||||
}
|
||||
#[test]
|
||||
//
|
||||
match test {
|
||||
Some(a) => 1,
|
||||
None => {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
hook(info);
|
||||
}));
|
||||
|
||||
{ { {
|
||||
1
|
||||
}}}
|
||||
|
||||
pub fn change<I>(document: &Document, changes: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = Change> + ExactSizeIterator,
|
||||
{
|
||||
[
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
];
|
||||
(
|
||||
1,
|
||||
2
|
||||
);
|
||||
true
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
use helix_core::{
|
||||
indent::{treesitter_indent_for_pos, IndentStyle},
|
||||
syntax::Loader,
|
||||
Syntax,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_treesitter_indent_rust() {
|
||||
test_treesitter_indent("rust.rs", "source.rust");
|
||||
}
|
||||
#[test]
|
||||
fn test_treesitter_indent_rust_2() {
|
||||
test_treesitter_indent("indent.rs", "source.rust");
|
||||
// TODO Use commands.rs as indentation test.
|
||||
// Currently this fails because we can't align the parameters of a closure yet
|
||||
// test_treesitter_indent("commands.rs", "source.rust");
|
||||
}
|
||||
|
||||
fn test_treesitter_indent(file_name: &str, lang_scope: &str) {
|
||||
let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
test_dir.push("tests/data/indent");
|
||||
|
||||
let mut test_file = test_dir.clone();
|
||||
test_file.push(file_name);
|
||||
let test_file = std::fs::File::open(test_file).unwrap();
|
||||
let doc = ropey::Rope::from_reader(test_file).unwrap();
|
||||
|
||||
let mut config_file = test_dir;
|
||||
config_file.push("languages.toml");
|
||||
let config = std::fs::read(config_file).unwrap();
|
||||
let config = toml::from_slice(&config).unwrap();
|
||||
let loader = Loader::new(config);
|
||||
|
||||
// set runtime path so we can find the queries
|
||||
let mut runtime = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
runtime.push("../runtime");
|
||||
std::env::set_var("HELIX_RUNTIME", runtime.to_str().unwrap());
|
||||
|
||||
let language_config = loader.language_config_for_scope(lang_scope).unwrap();
|
||||
let highlight_config = language_config.highlight_config(&[]).unwrap();
|
||||
let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader));
|
||||
let indent_query = language_config.indent_query().unwrap();
|
||||
let text = doc.slice(..);
|
||||
|
||||
for i in 0..doc.len_lines() {
|
||||
let line = text.line(i);
|
||||
if let Some(pos) = helix_core::find_first_non_whitespace_char(line) {
|
||||
let suggested_indent = treesitter_indent_for_pos(
|
||||
indent_query,
|
||||
&syntax,
|
||||
&IndentStyle::Spaces(4),
|
||||
text,
|
||||
i,
|
||||
text.line_to_char(i) + pos,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(
|
||||
line.get_slice(..pos).map_or(false, |s| s == suggested_indent),
|
||||
"Wrong indentation on line {}:\n\"{}\" (original line)\n\"{}\" (suggested indentation)\n",
|
||||
i+1,
|
||||
line.slice(..line.len_chars()-1),
|
||||
suggested_indent,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,21 +1,23 @@
|
||||
[package]
|
||||
name = "helix-syntax"
|
||||
name = "helix-loader"
|
||||
version = "0.6.0"
|
||||
description = "A post-modern text editor."
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "Tree-sitter grammars support"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "languages/**/*", "build.rs", "!**/docs/**/*", "!**/test/**/*", "!**/examples/**/*", "!**/build/**/*"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
toml = "0.5"
|
||||
etcetera = "0.3"
|
||||
tree-sitter = "0.20"
|
||||
libloading = "0.7"
|
||||
anyhow = "1"
|
||||
once_cell = "1.9"
|
||||
|
||||
[build-dependencies]
|
||||
# cloning/compiling tree-sitter grammars
|
||||
cc = { version = "1" }
|
||||
threadpool = { version = "1.0" }
|
||||
anyhow = "1"
|
@ -0,0 +1,6 @@
|
||||
fn main() {
|
||||
println!(
|
||||
"cargo:rustc-env=BUILD_TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
}
|
@ -0,0 +1,391 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use libloading::{Library, Symbol};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::mpsc::channel,
|
||||
};
|
||||
use tree_sitter::Language;
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
|
||||
#[cfg(windows)]
|
||||
const DYLIB_EXTENSION: &str = "dll";
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Configuration {
|
||||
#[serde(rename = "use-grammars")]
|
||||
pub grammar_selection: Option<GrammarSelection>,
|
||||
pub grammar: Vec<GrammarConfiguration>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase", untagged)]
|
||||
pub enum GrammarSelection {
|
||||
Only { only: HashSet<String> },
|
||||
Except { except: HashSet<String> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct GrammarConfiguration {
|
||||
#[serde(rename = "name")]
|
||||
pub grammar_id: String,
|
||||
pub source: GrammarSource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase", untagged)]
|
||||
pub enum GrammarSource {
|
||||
Local {
|
||||
path: String,
|
||||
},
|
||||
Git {
|
||||
#[serde(rename = "git")]
|
||||
remote: String,
|
||||
#[serde(rename = "rev")]
|
||||
revision: String,
|
||||
subpath: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
const BUILD_TARGET: &str = env!("BUILD_TARGET");
|
||||
const REMOTE_NAME: &str = "origin";
|
||||
|
||||
pub fn get_language(name: &str) -> Result<Language> {
|
||||
let name = name.to_ascii_lowercase();
|
||||
let mut library_path = crate::runtime_dir().join("grammars").join(&name);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let library = unsafe { Library::new(&library_path) }
|
||||
.with_context(|| format!("Error opening dynamic library {:?}", library_path))?;
|
||||
let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_"));
|
||||
let language = unsafe {
|
||||
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
|
||||
.get(language_fn_name.as_bytes())
|
||||
.with_context(|| format!("Failed to load symbol {}", language_fn_name))?;
|
||||
language_fn()
|
||||
};
|
||||
std::mem::forget(library);
|
||||
Ok(language)
|
||||
}
|
||||
|
||||
pub fn fetch_grammars() -> Result<()> {
|
||||
// We do not need to fetch local grammars.
|
||||
let mut grammars = get_grammar_configs()?;
|
||||
grammars.retain(|grammar| !matches!(grammar.source, GrammarSource::Local { .. }));
|
||||
|
||||
run_parallel(grammars, fetch_grammar, "fetch")
|
||||
}
|
||||
|
||||
pub fn build_grammars() -> Result<()> {
|
||||
run_parallel(get_grammar_configs()?, build_grammar, "build")
|
||||
}
|
||||
|
||||
// Returns the set of grammar configurations the user requests.
|
||||
// Grammars are configured in the default and user `languages.toml` and are
|
||||
// merged. The `grammar_selection` key of the config is then used to filter
|
||||
// down all grammars into a subset of the user's choosing.
|
||||
fn get_grammar_configs() -> Result<Vec<GrammarConfiguration>> {
|
||||
let config: Configuration = crate::user_lang_config()
|
||||
.context("Could not parse languages.toml")?
|
||||
.try_into()?;
|
||||
|
||||
let grammars = match config.grammar_selection {
|
||||
Some(GrammarSelection::Only { only: selections }) => config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.filter(|grammar| selections.contains(&grammar.grammar_id))
|
||||
.collect(),
|
||||
Some(GrammarSelection::Except { except: rejections }) => config
|
||||
.grammar
|
||||
.into_iter()
|
||||
.filter(|grammar| !rejections.contains(&grammar.grammar_id))
|
||||
.collect(),
|
||||
None => config.grammar,
|
||||
};
|
||||
|
||||
Ok(grammars)
|
||||
}
|
||||
|
||||
fn run_parallel<F>(grammars: Vec<GrammarConfiguration>, job: F, action: &'static str) -> Result<()>
|
||||
where
|
||||
F: Fn(GrammarConfiguration) -> Result<()> + std::marker::Send + 'static + Copy,
|
||||
{
|
||||
let pool = threadpool::Builder::new().build();
|
||||
let (tx, rx) = channel();
|
||||
|
||||
for grammar in grammars {
|
||||
let tx = tx.clone();
|
||||
|
||||
pool.execute(move || {
|
||||
tx.send(job(grammar)).unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
drop(tx);
|
||||
|
||||
// TODO: print all failures instead of the first one found.
|
||||
rx.iter()
|
||||
.find(|result| result.is_err())
|
||||
.map(|err| err.with_context(|| format!("Failed to {} some grammar(s)", action)))
|
||||
.unwrap_or(Ok(()))
|
||||
}
|
||||
|
||||
fn fetch_grammar(grammar: GrammarConfiguration) -> Result<()> {
|
||||
if let GrammarSource::Git {
|
||||
remote, revision, ..
|
||||
} = grammar.source
|
||||
{
|
||||
let grammar_dir = crate::runtime_dir()
|
||||
.join("grammars/sources")
|
||||
.join(&grammar.grammar_id);
|
||||
|
||||
fs::create_dir_all(&grammar_dir).context(format!(
|
||||
"Could not create grammar directory {:?}",
|
||||
grammar_dir
|
||||
))?;
|
||||
|
||||
// create the grammar dir contains a git directory
|
||||
if !grammar_dir.join(".git").is_dir() {
|
||||
git(&grammar_dir, ["init"])?;
|
||||
}
|
||||
|
||||
// ensure the remote matches the configured remote
|
||||
if get_remote_url(&grammar_dir).map_or(true, |s| s != remote) {
|
||||
set_remote(&grammar_dir, &remote)?;
|
||||
}
|
||||
|
||||
// ensure the revision matches the configured revision
|
||||
if get_revision(&grammar_dir).map_or(true, |s| s != revision) {
|
||||
// Fetch the exact revision from the remote.
|
||||
// Supported by server-side git since v2.5.0 (July 2015),
|
||||
// enabled by default on major git hosts.
|
||||
git(
|
||||
&grammar_dir,
|
||||
["fetch", "--depth", "1", REMOTE_NAME, &revision],
|
||||
)?;
|
||||
git(&grammar_dir, ["checkout", &revision])?;
|
||||
|
||||
println!(
|
||||
"Grammar '{}' checked out at '{}'.",
|
||||
grammar.grammar_id, revision
|
||||
);
|
||||
} else {
|
||||
println!("Grammar '{}' is already up to date.", grammar.grammar_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets the remote for a repository to the given URL, creating the remote if
|
||||
// it does not yet exist.
|
||||
fn set_remote(repository_dir: &Path, remote_url: &str) -> Result<String> {
|
||||
git(
|
||||
repository_dir,
|
||||
["remote", "set-url", REMOTE_NAME, remote_url],
|
||||
)
|
||||
.or_else(|_| git(repository_dir, ["remote", "add", REMOTE_NAME, remote_url]))
|
||||
}
|
||||
|
||||
fn get_remote_url(repository_dir: &Path) -> Option<String> {
|
||||
git(repository_dir, ["remote", "get-url", REMOTE_NAME]).ok()
|
||||
}
|
||||
|
||||
fn get_revision(repository_dir: &Path) -> Option<String> {
|
||||
git(repository_dir, ["rev-parse", "HEAD"]).ok()
|
||||
}
|
||||
|
||||
// A wrapper around 'git' commands which returns stdout in success and a
|
||||
// helpful error message showing the command, stdout, and stderr in error.
|
||||
fn git<I, S>(repository_dir: &Path, args: I) -> Result<String>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<std::ffi::OsStr>,
|
||||
{
|
||||
let output = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(repository_dir)
|
||||
.output()?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8_lossy(&output.stdout)
|
||||
.trim_end()
|
||||
.to_owned())
|
||||
} else {
|
||||
// TODO: figure out how to display the git command using `args`
|
||||
Err(anyhow!(
|
||||
"Git command failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn build_grammar(grammar: GrammarConfiguration) -> Result<()> {
|
||||
let grammar_dir = if let GrammarSource::Local { path } = &grammar.source {
|
||||
PathBuf::from(&path)
|
||||
} else {
|
||||
crate::runtime_dir()
|
||||
.join("grammars/sources")
|
||||
.join(&grammar.grammar_id)
|
||||
};
|
||||
|
||||
let grammar_dir_entries = grammar_dir.read_dir().with_context(|| {
|
||||
format!(
|
||||
"Failed to read directory {:?}. Did you use 'hx --grammar fetch'?",
|
||||
grammar_dir
|
||||
)
|
||||
})?;
|
||||
|
||||
if grammar_dir_entries.count() == 0 {
|
||||
return Err(anyhow!(
|
||||
"Directory {:?} is empty. Did you use 'hx --grammar fetch'?",
|
||||
grammar_dir
|
||||
));
|
||||
};
|
||||
|
||||
let path = match &grammar.source {
|
||||
GrammarSource::Git {
|
||||
subpath: Some(subpath),
|
||||
..
|
||||
} => grammar_dir.join(subpath),
|
||||
_ => grammar_dir,
|
||||
}
|
||||
.join("src");
|
||||
|
||||
build_tree_sitter_library(&path, grammar)
|
||||
}
|
||||
|
||||
fn build_tree_sitter_library(src_path: &Path, grammar: GrammarConfiguration) -> Result<()> {
|
||||
let header_path = src_path;
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let mut scanner_path = src_path.join("scanner.c");
|
||||
|
||||
let scanner_path = if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
scanner_path.set_extension("cc");
|
||||
if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let parser_lib_path = crate::runtime_dir().join("grammars");
|
||||
let mut library_path = parser_lib_path.join(&grammar.grammar_id);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
|
||||
.context("Failed to compare source and binary timestamps")?;
|
||||
|
||||
if !recompile {
|
||||
println!("Grammar '{}' is already built.", grammar.grammar_id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Building grammar '{}'", grammar.grammar_id);
|
||||
|
||||
let mut config = cc::Build::new();
|
||||
config
|
||||
.cpp(true)
|
||||
.opt_level(3)
|
||||
.cargo_metadata(false)
|
||||
.host(BUILD_TARGET)
|
||||
.target(BUILD_TARGET);
|
||||
let compiler = config.get_compiler();
|
||||
let mut command = Command::new(compiler.path());
|
||||
command.current_dir(src_path);
|
||||
for (key, value) in compiler.env() {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
command
|
||||
.args(&["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
|
||||
command
|
||||
.arg(parser_path)
|
||||
.arg("/link")
|
||||
.arg(format!("/out:{}", library_path.to_str().unwrap()));
|
||||
} else {
|
||||
command
|
||||
.arg("-shared")
|
||||
.arg("-fPIC")
|
||||
.arg("-fno-exceptions")
|
||||
.arg("-g")
|
||||
.arg("-I")
|
||||
.arg(header_path)
|
||||
.arg("-o")
|
||||
.arg(&library_path)
|
||||
.arg("-O3");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
if scanner_path.extension() == Some("c".as_ref()) {
|
||||
command.arg("-xc").arg("-std=c99").arg(scanner_path);
|
||||
} else {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
}
|
||||
command.arg("-xc").arg(parser_path);
|
||||
if cfg!(all(unix, not(target_os = "macos"))) {
|
||||
command.arg("-Wl,-z,relro,-z,now");
|
||||
}
|
||||
}
|
||||
|
||||
let output = command.output().context("Failed to execute C compiler")?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn needs_recompile(
|
||||
lib_path: &Path,
|
||||
parser_c_path: &Path,
|
||||
scanner_path: &Option<PathBuf>,
|
||||
) -> Result<bool> {
|
||||
if !lib_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
let lib_mtime = mtime(lib_path)?;
|
||||
if mtime(parser_c_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(scanner_path) = scanner_path {
|
||||
if mtime(scanner_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn mtime(path: &Path) -> Result<SystemTime> {
|
||||
Ok(fs::metadata(path)?.modified()?)
|
||||
}
|
||||
|
||||
/// Gives the contents of a file from a language's `runtime/queries/<lang>`
|
||||
/// directory
|
||||
pub fn load_runtime_file(language: &str, filename: &str) -> Result<String, std::io::Error> {
|
||||
let path = crate::RUNTIME_DIR
|
||||
.join("queries")
|
||||
.join(language)
|
||||
.join(filename);
|
||||
std::fs::read_to_string(&path)
|
||||
}
|
@ -0,0 +1,161 @@
|
||||
pub mod grammar;
|
||||
|
||||
use etcetera::base_strategy::{choose_base_strategy, BaseStrategy};
|
||||
|
||||
pub static RUNTIME_DIR: once_cell::sync::Lazy<std::path::PathBuf> =
|
||||
once_cell::sync::Lazy::new(runtime_dir);
|
||||
|
||||
pub fn runtime_dir() -> std::path::PathBuf {
|
||||
if let Ok(dir) = std::env::var("HELIX_RUNTIME") {
|
||||
return dir.into();
|
||||
}
|
||||
|
||||
const RT_DIR: &str = "runtime";
|
||||
let conf_dir = config_dir().join(RT_DIR);
|
||||
if conf_dir.exists() {
|
||||
return conf_dir;
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("CARGO_MANIFEST_DIR") {
|
||||
// this is the directory of the crate being run by cargo, we need the workspace path so we take the parent
|
||||
return std::path::PathBuf::from(dir).parent().unwrap().join(RT_DIR);
|
||||
}
|
||||
|
||||
// fallback to location of the executable being run
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf().join(RT_DIR)))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn config_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.config_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> std::path::PathBuf {
|
||||
// TODO: allow env var override
|
||||
let strategy = choose_base_strategy().expect("Unable to find the config directory!");
|
||||
let mut path = strategy.cache_dir();
|
||||
path.push("helix");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn config_file() -> std::path::PathBuf {
|
||||
config_dir().join("config.toml")
|
||||
}
|
||||
|
||||
pub fn lang_config_file() -> std::path::PathBuf {
|
||||
config_dir().join("languages.toml")
|
||||
}
|
||||
|
||||
pub fn log_file() -> std::path::PathBuf {
|
||||
cache_dir().join("helix.log")
|
||||
}
|
||||
|
||||
/// Default bultin-in languages.toml.
|
||||
pub fn default_lang_config() -> toml::Value {
|
||||
toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Could not parse bultin-in languages.toml to valid toml")
|
||||
}
|
||||
|
||||
/// User configured languages.toml file, merged with the default config.
|
||||
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
||||
let def_lang_conf = default_lang_config();
|
||||
let data = std::fs::read(crate::config_dir().join("languages.toml"));
|
||||
let user_lang_conf = match data {
|
||||
Ok(raw) => {
|
||||
let value = toml::from_slice(&raw)?;
|
||||
merge_toml_values(def_lang_conf, value)
|
||||
}
|
||||
Err(_) => def_lang_conf,
|
||||
};
|
||||
|
||||
Ok(user_lang_conf)
|
||||
}
|
||||
|
||||
// right overrides left
|
||||
pub fn merge_toml_values(left: toml::Value, right: toml::Value) -> toml::Value {
|
||||
use toml::Value;
|
||||
|
||||
fn get_name(v: &Value) -> Option<&str> {
|
||||
v.get("name").and_then(Value::as_str)
|
||||
}
|
||||
|
||||
match (left, right) {
|
||||
(Value::Array(mut left_items), Value::Array(right_items)) => {
|
||||
left_items.reserve(right_items.len());
|
||||
for rvalue in right_items {
|
||||
let lvalue = get_name(&rvalue)
|
||||
.and_then(|rname| left_items.iter().position(|v| get_name(v) == Some(rname)))
|
||||
.map(|lpos| left_items.remove(lpos));
|
||||
let mvalue = match lvalue {
|
||||
Some(lvalue) => merge_toml_values(lvalue, rvalue),
|
||||
None => rvalue,
|
||||
};
|
||||
left_items.push(mvalue);
|
||||
}
|
||||
Value::Array(left_items)
|
||||
}
|
||||
(Value::Table(mut left_map), Value::Table(right_map)) => {
|
||||
for (rname, rvalue) in right_map {
|
||||
match left_map.remove(&rname) {
|
||||
Some(lvalue) => {
|
||||
let merged_value = merge_toml_values(lvalue, rvalue);
|
||||
left_map.insert(rname, merged_value);
|
||||
}
|
||||
None => {
|
||||
left_map.insert(rname, rvalue);
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Table(left_map)
|
||||
}
|
||||
// Catch everything else we didn't handle, and use the right value
|
||||
(_, value) => value,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod merge_toml_tests {
|
||||
use super::merge_toml_values;
|
||||
|
||||
#[test]
|
||||
fn language_tomls() {
|
||||
use toml::Value;
|
||||
|
||||
const USER: &str = "
|
||||
[[language]]
|
||||
name = \"nix\"
|
||||
test = \"bbb\"
|
||||
indent = { tab-width = 4, unit = \" \", test = \"aaa\" }
|
||||
";
|
||||
|
||||
let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
|
||||
.expect("Couldn't parse built-in languages config");
|
||||
let user: Value = toml::from_str(USER).unwrap();
|
||||
|
||||
let merged = merge_toml_values(base, user);
|
||||
let languages = merged.get("language").unwrap().as_array().unwrap();
|
||||
let nix = languages
|
||||
.iter()
|
||||
.find(|v| v.get("name").unwrap().as_str().unwrap() == "nix")
|
||||
.unwrap();
|
||||
let nix_indent = nix.get("indent").unwrap();
|
||||
|
||||
// We changed tab-width and unit in indent so check them if they are the new values
|
||||
assert_eq!(
|
||||
nix_indent.get("tab-width").unwrap().as_integer().unwrap(),
|
||||
4
|
||||
);
|
||||
assert_eq!(nix_indent.get("unit").unwrap().as_str().unwrap(), " ");
|
||||
// We added a new keys, so check them
|
||||
assert_eq!(nix.get("test").unwrap().as_str().unwrap(), "bbb");
|
||||
assert_eq!(nix_indent.get("test").unwrap().as_str().unwrap(), "aaa");
|
||||
// We didn't change comment-token so it should be same
|
||||
assert_eq!(nix.get("comment-token").unwrap().as_str().unwrap(), "#");
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
helix-syntax
|
||||
============
|
||||
|
||||
Syntax highlighting for helix, (shallow) submodules resides here.
|
||||
|
||||
Differences from nvim-treesitter
|
||||
--------------------------------
|
||||
|
||||
As the syntax are commonly ported from
|
||||
<https://github.com/nvim-treesitter/nvim-treesitter>.
|
||||
|
||||
Note that we do not support the custom `#any-of` predicate which is
|
||||
supported by neovim so one needs to change it to `#match` with regex.
|
@ -1,206 +0,0 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use std::fs;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
fn collect_tree_sitter_dirs(ignore: &[String]) -> Result<Vec<String>> {
|
||||
let mut dirs = Vec::new();
|
||||
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("languages");
|
||||
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if !entry.file_type()?.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dir = path.file_name().unwrap().to_str().unwrap().to_string();
|
||||
|
||||
// filter ignores
|
||||
if ignore.contains(&dir) {
|
||||
continue;
|
||||
}
|
||||
dirs.push(dir)
|
||||
}
|
||||
|
||||
Ok(dirs)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
const DYLIB_EXTENSION: &str = "so";
|
||||
|
||||
#[cfg(windows)]
|
||||
const DYLIB_EXTENSION: &str = "dll";
|
||||
|
||||
fn build_library(src_path: &Path, language: &str) -> Result<()> {
|
||||
let header_path = src_path;
|
||||
// let grammar_path = src_path.join("grammar.json");
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let mut scanner_path = src_path.join("scanner.c");
|
||||
|
||||
let scanner_path = if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
scanner_path.set_extension("cc");
|
||||
if scanner_path.exists() {
|
||||
Some(scanner_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
let parser_lib_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../runtime/grammars");
|
||||
let mut library_path = parser_lib_path.join(language);
|
||||
library_path.set_extension(DYLIB_EXTENSION);
|
||||
|
||||
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
|
||||
.with_context(|| "Failed to compare source and binary timestamps")?;
|
||||
|
||||
if !recompile {
|
||||
return Ok(());
|
||||
}
|
||||
let mut config = cc::Build::new();
|
||||
config.cpp(true).opt_level(2).cargo_metadata(false);
|
||||
let compiler = config.get_compiler();
|
||||
let mut command = Command::new(compiler.path());
|
||||
command.current_dir(src_path);
|
||||
for (key, value) in compiler.env() {
|
||||
command.env(key, value);
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
command
|
||||
.args(&["/nologo", "/LD", "/I"])
|
||||
.arg(header_path)
|
||||
.arg("/Od")
|
||||
.arg("/utf-8");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
|
||||
command
|
||||
.arg(parser_path)
|
||||
.arg("/link")
|
||||
.arg(format!("/out:{}", library_path.to_str().unwrap()));
|
||||
} else {
|
||||
command
|
||||
.arg("-shared")
|
||||
.arg("-fPIC")
|
||||
.arg("-fno-exceptions")
|
||||
.arg("-g")
|
||||
.arg("-I")
|
||||
.arg(header_path)
|
||||
.arg("-o")
|
||||
.arg(&library_path)
|
||||
.arg("-O2");
|
||||
if let Some(scanner_path) = scanner_path.as_ref() {
|
||||
if scanner_path.extension() == Some("c".as_ref()) {
|
||||
command.arg("-xc").arg("-std=c99").arg(scanner_path);
|
||||
} else {
|
||||
command.arg(scanner_path);
|
||||
}
|
||||
}
|
||||
command.arg("-xc").arg(parser_path);
|
||||
if cfg!(all(unix, not(target_os = "macos"))) {
|
||||
command.arg("-Wl,-z,relro,-z,now");
|
||||
}
|
||||
}
|
||||
|
||||
let output = command
|
||||
.output()
|
||||
.with_context(|| "Failed to execute C compiler")?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
fn needs_recompile(
|
||||
lib_path: &Path,
|
||||
parser_c_path: &Path,
|
||||
scanner_path: &Option<PathBuf>,
|
||||
) -> Result<bool> {
|
||||
if !lib_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
let lib_mtime = mtime(lib_path)?;
|
||||
if mtime(parser_c_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(scanner_path) = scanner_path {
|
||||
if mtime(scanner_path)? > lib_mtime {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn mtime(path: &Path) -> Result<SystemTime> {
|
||||
Ok(fs::metadata(path)?.modified()?)
|
||||
}
|
||||
|
||||
fn build_dir(dir: &str, language: &str) {
|
||||
println!("Build language {}", language);
|
||||
if PathBuf::from("languages")
|
||||
.join(dir)
|
||||
.read_dir()
|
||||
.unwrap()
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
eprintln!(
|
||||
"The directory {} is empty, you probably need to use 'git submodule update --init --recursive'?",
|
||||
dir
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("languages")
|
||||
.join(dir)
|
||||
.join("src");
|
||||
|
||||
build_library(&path, language).unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let ignore = vec![
|
||||
"tree-sitter-typescript".to_string(),
|
||||
"tree-sitter-ocaml".to_string(),
|
||||
];
|
||||
let dirs = collect_tree_sitter_dirs(&ignore).unwrap();
|
||||
|
||||
let mut n_jobs = 0;
|
||||
let pool = threadpool::Builder::new().build(); // by going through the builder, it'll use num_cpus
|
||||
let (tx, rx) = channel();
|
||||
|
||||
for dir in dirs {
|
||||
let tx = tx.clone();
|
||||
n_jobs += 1;
|
||||
|
||||
pool.execute(move || {
|
||||
let language = &dir.strip_prefix("tree-sitter-").unwrap();
|
||||
build_dir(&dir, language);
|
||||
|
||||
// report progress
|
||||
tx.send(1).unwrap();
|
||||
});
|
||||
}
|
||||
pool.join();
|
||||
// drop(tx);
|
||||
assert_eq!(rx.try_iter().sum::<usize>(), n_jobs);
|
||||
|
||||
build_dir("tree-sitter-typescript/tsx", "tsx");
|
||||
build_dir("tree-sitter-typescript/typescript", "typescript");
|
||||
build_dir("tree-sitter-ocaml/ocaml", "ocaml");
|
||||
build_dir("tree-sitter-ocaml/interface", "ocaml-interface")
|
||||
}
|
@ -1 +0,0 @@
|
||||
Subproject commit ca69cdf485e9ce2b2ef0991a720aa88d87d30231
|
@ -1 +0,0 @@
|
||||
Subproject commit a8eb5cb57c66f74c63ab950de081207cccf52017
|
@ -1 +0,0 @@
|
||||
Subproject commit f05e279aedde06a25801c3f2b2cc8ac17fac52ae
|
@ -1 +0,0 @@
|
||||
Subproject commit 53a65a908167d6556e1fcdb67f1ee62aac101dda
|
@ -1 +0,0 @@
|
||||
Subproject commit f6616f1e417ee8b62daf251aa1daa5d73781c596
|
@ -1 +0,0 @@
|
||||
Subproject commit 5dd3c62f1bbe378b220fe16b317b85247898639e
|
@ -1 +0,0 @@
|
||||
Subproject commit e8dcc9d2b404c542fd236ea5f7208f90be8a6e89
|
@ -1 +0,0 @@
|
||||
Subproject commit 94e10230939e702b4fa3fa2cb5c3bc7173b95d07
|
@ -1 +0,0 @@
|
||||
Subproject commit 6a25376685d1d47968c2cef06d4db8d84a70025e
|
@ -1 +0,0 @@
|
||||
Subproject commit 7af32bc04a66ab196f5b9f92ac471f29372ae2ce
|
@ -1 +0,0 @@
|
||||
Subproject commit f5d7bda543da788bd507b05bd722627dde66c9ec
|
@ -1 +0,0 @@
|
||||
Subproject commit bd50ccf66b42c55252ac8efc1086af4ac6bab8cd
|
@ -1 +0,0 @@
|
||||
Subproject commit 86985bde399c5f40b00bc75f7ab70a6c69a5f9c3
|
@ -1 +0,0 @@
|
||||
Subproject commit 04e54ab6585dfd4fee6ddfe5849af56f101b6d4f
|
@ -1 +0,0 @@
|
||||
Subproject commit 066e395e1107df17183cf3ae4230f1a1406cc972
|
@ -1 +0,0 @@
|
||||
Subproject commit 0e4f0baf90b57e5aeb62dcdbf03062c6315d43ea
|
@ -1 +0,0 @@
|
||||
Subproject commit c12e6ecb54485f764250556ffd7ccb18f8e2942b
|
@ -1 +0,0 @@
|
||||
Subproject commit 332dc528f27044bc4427024dbb33e6941fc131f2
|
@ -1 +0,0 @@
|
||||
Subproject commit 88408ffc5e27abcffced7010fc77396ae3636d7e
|
@ -1 +0,0 @@
|
||||
Subproject commit 0fa917a7022d1cd2e9b779a6a8fc5dc7fad69c75
|
@ -1 +0,0 @@
|
||||
Subproject commit 5e66e961eee421786bdda8495ed1db045e06b5fe
|
@ -1 +0,0 @@
|
||||
Subproject commit b6ec26f181dd059eedd506fa5fbeae1b8e5556c8
|
@ -1 +0,0 @@
|
||||
Subproject commit d93af487cc75120c89257195e6be46c999c6ba18
|
@ -1 +0,0 @@
|
||||
Subproject commit 3ec55082cf0be015d03148be8edfdfa8c56e77f9
|
@ -1 +0,0 @@
|
||||
Subproject commit bd6186c24d5eb13b4623efac9d944dcc095c0dad
|
@ -1 +0,0 @@
|
||||
Subproject commit 4a95461c4761c624f2263725aca79eeaefd36cad
|
@ -1 +0,0 @@
|
||||
Subproject commit 65bceef69c3b0f24c0b19ce67d79f57c96e90fcb
|
@ -1 +0,0 @@
|
||||
Subproject commit 12ea597262125fc22fd2e91aa953ac69b19c26ca
|
@ -1 +0,0 @@
|
||||
Subproject commit 7f720661de5316c0f8fee956526d4002fa1086d8
|
@ -1 +0,0 @@
|
||||
Subproject commit d98426109258b266e1e92358c5f11716d2e8f638
|
@ -1 +0,0 @@
|
||||
Subproject commit 0cdeb0e51411a3ba5493662952c3039de08939ca
|
@ -1 +0,0 @@
|
||||
Subproject commit 3b213925b9c4f42c1acfe2e10bfbb438d9c6834d
|
@ -1 +0,0 @@
|
||||
Subproject commit 06fabca19454b2dc00c1b211a7cb7ad0bc2585f1
|
@ -1 +0,0 @@
|
||||
Subproject commit 6f5d40190ec8a0aa8c8410699353d820f4f7d7a6
|
@ -1 +0,0 @@
|
||||
Subproject commit a4b9187417d6be349ee5fd4b6e77b4172c6827dd
|
@ -1 +0,0 @@
|
||||
Subproject commit ad8c32917a16dfbb387d1da567bf0c3fb6fffde2
|
@ -1 +0,0 @@
|
||||
Subproject commit 50f38ceab667f9d482640edfee803d74f4edeba5
|
@ -1 +0,0 @@
|
||||
Subproject commit 23d419ba45789c5a47d31448061557716b02750a
|
@ -1 +0,0 @@
|
||||
Subproject commit 0ac2c6da562c7a2c26ed7e8691d4a590f7e8b90a
|
@ -1 +0,0 @@
|
||||
Subproject commit 57f855461aeeca73bd4218754fb26b5ac143f98f
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue