refactor: unvendor flow-syntax
This commit is contained in:
parent
21bd1e58a8
commit
a227eb925c
13 changed files with 4 additions and 1995 deletions
|
@ -5,7 +5,10 @@
|
|||
.fingerprint = 0x52c0d670590aa80f,
|
||||
|
||||
.dependencies = .{
|
||||
.syntax = .{ .path = "src/syntax" },
|
||||
.syntax = .{
|
||||
.url = "git+https://github.com/neurocyte/flow-syntax?ref=zig-0.14#410d19e633f237cd1602175450bd7d3bb03a1898",
|
||||
.hash = "flow_syntax-0.1.0-X8jOoT4OAQDibKKzYlJls3u5KczVh__cWYN7vTqCE1o3",
|
||||
},
|
||||
.flags = .{
|
||||
.url = "https://github.com/n0s4/flags/archive/372501d1576b5723829bcba98e41361132c7b618.tar.gz",
|
||||
.hash = "flags-0.8.0-AAAAAJV0AACuGBBnpUnHqZzAhoGTp4ibFROBQQQZGRqx",
|
||||
|
|
1
src/syntax/.gitignore
vendored
1
src/syntax/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/.zig-cache/
|
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2024 CJ van den Berg
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -1,2 +0,0 @@
|
|||
# flow-syntax
|
||||
Syntax highlighting module used by [flow](https://github.com/neurocyte/flow), [zat](https://github.com/neurocyte/zat) and [zine](https://github.com/kristoff-it/zine)
|
|
@ -1,154 +0,0 @@
|
|||
const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const use_tree_sitter = b.option(bool, "use_tree_sitter", "Enable tree-sitter (default: yes)") orelse true;
|
||||
const options = b.addOptions();
|
||||
options.addOption(bool, "use_tree_sitter", use_tree_sitter);
|
||||
const options_mod = options.createModule();
|
||||
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const tree_sitter_dep = b.dependency("tree_sitter", .{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const tree_sitter_host_dep = b.dependency("tree_sitter", .{
|
||||
.target = b.graph.host,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const cbor_dep = b.dependency("cbor", .{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const ts_bin_query_gen = b.addExecutable(.{
|
||||
.name = "ts_bin_query_gen",
|
||||
.target = b.graph.host,
|
||||
.root_source_file = b.path("src/ts_bin_query_gen.zig"),
|
||||
});
|
||||
ts_bin_query_gen.linkLibC();
|
||||
ts_bin_query_gen.root_module.addImport("cbor", cbor_dep.module("cbor"));
|
||||
ts_bin_query_gen.root_module.addImport("treez", tree_sitter_host_dep.module("treez"));
|
||||
ts_bin_query_gen.root_module.addImport("build_options", options_mod);
|
||||
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "queries/cmake/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-agda/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-astro/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-bash/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-c-sharp/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-c/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-cpp/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-css/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-diff/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-dockerfile/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-elixir/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-git-rebase/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-gitcommit/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-gleam/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-go/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-fish/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-haskell/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-hare/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-html/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-hurl/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-java/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-javascript/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-jsdoc/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-json/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-julia/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-kdl/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-lua/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-mail/queries/mail/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-make/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-markdown/tree-sitter-markdown/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-markdown/tree-sitter-markdown-inline/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nasm/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nim/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-ninja/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nix/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nu/queries/nu/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-ocaml/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-odin/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-openscad/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-org/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-php/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-powershell/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-proto/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-python/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-purescript/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-regex/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-rpmspec/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-ruby/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-rust/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-ssh-config/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-scala/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-scheme/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-superhtml/tree-sitter-superhtml/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-sql/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-swift/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-toml/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-typescript/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-typst/queries/typst/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-uxntal/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-vim/queries/vim/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-xml/queries/dtd/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-xml/queries/xml/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-yaml/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-zig/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-ziggy/tree-sitter-ziggy/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-ziggy/tree-sitter-ziggy-schema/queries/highlights.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "nvim-treesitter/queries/verilog/highlights.scm");
|
||||
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "queries/cmake/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-astro/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-cpp/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-elixir/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-gitcommit/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-hare/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-html/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-hurl/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-javascript/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-kdl/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-lua/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-markdown/tree-sitter-markdown-inline/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-markdown/tree-sitter-markdown/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nasm/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nix/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-nu/queries/nu/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-odin/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-openscad/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-php/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-purescript/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-purescript/vim_queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-rust/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-superhtml/tree-sitter-superhtml/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-swift/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-typst/queries/typst/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-uxntal/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-vim/queries/vim/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "tree-sitter-zig/queries/injections.scm");
|
||||
ts_queryfile(b, tree_sitter_dep, ts_bin_query_gen, "nvim-treesitter/queries/verilog/injections.scm");
|
||||
|
||||
const syntax_mod = b.addModule("syntax", .{
|
||||
.root_source_file = b.path("src/syntax.zig"),
|
||||
.imports = &.{
|
||||
.{ .name = "build_options", .module = options_mod },
|
||||
.{ .name = "cbor", .module = cbor_dep.module("cbor") },
|
||||
.{ .name = "treez", .module = tree_sitter_dep.module("treez") },
|
||||
},
|
||||
});
|
||||
|
||||
if (use_tree_sitter) {
|
||||
const ts_bin_query_gen_step = b.addRunArtifact(ts_bin_query_gen);
|
||||
const output = ts_bin_query_gen_step.addOutputFileArg("bin_queries.cbor");
|
||||
syntax_mod.addAnonymousImport("syntax_bin_queries", .{ .root_source_file = output });
|
||||
}
|
||||
}
|
||||
|
||||
fn ts_queryfile(b: *std.Build, dep: *std.Build.Dependency, bin_gen: *std.Build.Step.Compile, comptime sub_path: []const u8) void {
|
||||
const module = b.createModule(.{ .root_source_file = dep.path(sub_path) });
|
||||
bin_gen.root_module.addImport(sub_path, module);
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
.{
|
||||
.name = .flow_syntax,
|
||||
.version = "0.1.0",
|
||||
.fingerprint = 0x3ba2584ea1cec85f,
|
||||
.minimum_zig_version = "0.14.1",
|
||||
|
||||
.dependencies = .{
|
||||
.tree_sitter = .{
|
||||
.url = "https://github.com/neurocyte/tree-sitter/releases/download/master-1c3ad59bd98ee430b166054030dac4c46d641e39/source.tar.gz",
|
||||
.hash = "N-V-__8AANMzUiemOR2eNnrtlMmAGHFqij6VYtDUiaFfn6Dw",
|
||||
},
|
||||
.cbor = .{
|
||||
.url = "https://github.com/neurocyte/cbor/archive/1fccb83c70cd84e1dff57cc53f7db8fb99909a94.tar.gz",
|
||||
.hash = "cbor-1.0.0-RcQE_HvqAACcrLH7t3IDZOshgY2xqJA_UX330MvwSepb",
|
||||
},
|
||||
},
|
||||
.paths = .{
|
||||
"src",
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
},
|
||||
}
|
|
@ -1,195 +0,0 @@
|
|||
const std = @import("std");
|
||||
const build_options = @import("build_options");
|
||||
|
||||
const treez = if (build_options.use_tree_sitter)
|
||||
@import("treez")
|
||||
else
|
||||
@import("treez_dummy.zig");
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const tss = @import("ts_serializer.zig");
|
||||
pub const FileType = @import("file_type.zig");
|
||||
const Query = treez.Query;
|
||||
|
||||
allocator: std.mem.Allocator,
|
||||
mutex: ?std.Thread.Mutex,
|
||||
highlights: std.StringHashMapUnmanaged(*CacheEntry) = .{},
|
||||
injections: std.StringHashMapUnmanaged(*CacheEntry) = .{},
|
||||
errors: std.StringHashMapUnmanaged(*CacheEntry) = .{},
|
||||
ref_count: usize = 1,
|
||||
|
||||
const CacheEntry = struct {
|
||||
mutex: ?std.Thread.Mutex,
|
||||
query: ?*Query,
|
||||
query_arena: ?*std.heap.ArenaAllocator,
|
||||
query_type: QueryType,
|
||||
file_type_name: []const u8,
|
||||
lang_fn: FileType.LangFn,
|
||||
|
||||
fn destroy(self: *@This(), allocator: std.mem.Allocator) void {
|
||||
if (self.query_arena) |a| {
|
||||
a.deinit();
|
||||
allocator.destroy(a);
|
||||
} else if (self.query) |q|
|
||||
q.destroy();
|
||||
self.query_arena = null;
|
||||
self.query = null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const QueryType = enum {
|
||||
highlights,
|
||||
errors,
|
||||
injections,
|
||||
};
|
||||
|
||||
const QueryParseError = error{
|
||||
InvalidSyntax,
|
||||
InvalidNodeType,
|
||||
InvalidField,
|
||||
InvalidCapture,
|
||||
InvalidStructure,
|
||||
InvalidLanguage,
|
||||
};
|
||||
|
||||
const CacheError = error{
|
||||
NotFound,
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub const Error = CacheError || QueryParseError || QuerySerializeError;
|
||||
|
||||
pub fn create(allocator: std.mem.Allocator, opts: struct { lock: bool = false }) !*Self {
|
||||
const self = try allocator.create(Self);
|
||||
errdefer allocator.destroy(self);
|
||||
self.* = .{
|
||||
.allocator = allocator,
|
||||
.mutex = if (opts.lock) .{} else null,
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.release_ref_unlocked_and_maybe_destroy();
|
||||
}
|
||||
|
||||
fn add_ref_locked(self: *Self) void {
|
||||
std.debug.assert(self.ref_count > 0);
|
||||
self.ref_count += 1;
|
||||
}
|
||||
|
||||
fn release_ref_unlocked_and_maybe_destroy(self: *Self) void {
|
||||
{
|
||||
if (self.mutex) |*mtx| mtx.lock();
|
||||
defer if (self.mutex) |*mtx| mtx.unlock();
|
||||
self.ref_count -= 1;
|
||||
if (self.ref_count > 0) return;
|
||||
}
|
||||
|
||||
release_cache_entry_hash_map(self.allocator, &self.highlights);
|
||||
release_cache_entry_hash_map(self.allocator, &self.errors);
|
||||
release_cache_entry_hash_map(self.allocator, &self.injections);
|
||||
self.allocator.destroy(self);
|
||||
}
|
||||
|
||||
fn release_cache_entry_hash_map(allocator: std.mem.Allocator, hash_map: *std.StringHashMapUnmanaged(*CacheEntry)) void {
|
||||
var iter = hash_map.iterator();
|
||||
while (iter.next()) |p| {
|
||||
allocator.free(p.key_ptr.*);
|
||||
p.value_ptr.*.destroy(allocator);
|
||||
allocator.destroy(p.value_ptr.*);
|
||||
}
|
||||
hash_map.deinit(allocator);
|
||||
}
|
||||
|
||||
fn get_cache_entry(self: *Self, file_type: FileType, comptime query_type: QueryType) CacheError!*CacheEntry {
|
||||
if (self.mutex) |*mtx| mtx.lock();
|
||||
defer if (self.mutex) |*mtx| mtx.unlock();
|
||||
|
||||
const hash = switch (query_type) {
|
||||
.highlights => &self.highlights,
|
||||
.errors => &self.errors,
|
||||
.injections => &self.injections,
|
||||
};
|
||||
|
||||
return if (hash.get(file_type.name)) |entry| entry else blk: {
|
||||
const entry_ = try hash.getOrPut(self.allocator, try self.allocator.dupe(u8, file_type.name));
|
||||
|
||||
const q = try self.allocator.create(CacheEntry);
|
||||
q.* = .{
|
||||
.query = null,
|
||||
.query_arena = null,
|
||||
.mutex = if (self.mutex) |_| .{} else null,
|
||||
.lang_fn = file_type.lang_fn,
|
||||
.file_type_name = file_type.name,
|
||||
.query_type = query_type,
|
||||
};
|
||||
entry_.value_ptr.* = q;
|
||||
|
||||
break :blk q;
|
||||
};
|
||||
}
|
||||
|
||||
fn get_cached_query(self: *Self, entry: *CacheEntry) Error!?*Query {
|
||||
if (entry.mutex) |*mtx| mtx.lock();
|
||||
defer if (entry.mutex) |*mtx| mtx.unlock();
|
||||
|
||||
return if (entry.query) |query| query else blk: {
|
||||
const lang = entry.lang_fn() orelse std.debug.panic("tree-sitter parser function failed for language: {s}", .{entry.file_type_name});
|
||||
const queries = FileType.queries.get(entry.file_type_name) orelse return null;
|
||||
const query_bin = switch (entry.query_type) {
|
||||
.highlights => queries.highlights_bin,
|
||||
.errors => queries.errors_bin,
|
||||
.injections => queries.injections_bin orelse return null,
|
||||
};
|
||||
const query, const arena = try deserialize_query(query_bin, lang, self.allocator);
|
||||
entry.query = query;
|
||||
entry.query_arena = arena;
|
||||
break :blk entry.query.?;
|
||||
};
|
||||
}
|
||||
|
||||
fn pre_load_internal(self: *Self, file_type: *const FileType, comptime query_type: QueryType) Error!void {
|
||||
_ = try self.get_cached_query(try self.get_cache_entry(file_type, query_type));
|
||||
}
|
||||
|
||||
pub fn pre_load(self: *Self, lang_name: []const u8) Error!void {
|
||||
const file_type = FileType.get_by_name(lang_name) orelse return;
|
||||
_ = try self.pre_load_internal(file_type, .highlights);
|
||||
_ = try self.pre_load_internal(file_type, .errors);
|
||||
_ = try self.pre_load_internal(file_type, .injections);
|
||||
}
|
||||
|
||||
fn ReturnType(comptime query_type: QueryType) type {
|
||||
return switch (query_type) {
|
||||
.highlights => *Query,
|
||||
.errors => *Query,
|
||||
.injections => ?*Query,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get(self: *Self, file_type: FileType, comptime query_type: QueryType) Error!ReturnType(query_type) {
|
||||
const query = try self.get_cached_query(try self.get_cache_entry(file_type, query_type));
|
||||
self.add_ref_locked();
|
||||
return switch (@typeInfo(ReturnType(query_type))) {
|
||||
.optional => |_| query,
|
||||
else => query.?,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn release(self: *Self, query: *Query, comptime query_type: QueryType) void {
|
||||
_ = query;
|
||||
_ = query_type;
|
||||
self.release_ref_unlocked_and_maybe_destroy();
|
||||
}
|
||||
|
||||
pub const QuerySerializeError = (tss.SerializeError || tss.DeserializeError);
|
||||
|
||||
fn deserialize_query(query_bin: []const u8, language: ?*const treez.Language, allocator: std.mem.Allocator) QuerySerializeError!struct { *Query, *std.heap.ArenaAllocator } {
|
||||
var ts_query_out, const arena = try tss.fromCbor(query_bin, allocator);
|
||||
ts_query_out.language = @intFromPtr(language);
|
||||
|
||||
const query_out: *Query = @alignCast(@ptrCast(ts_query_out));
|
||||
return .{ query_out, arena };
|
||||
}
|
|
@ -1,207 +0,0 @@
|
|||
const std = @import("std");
|
||||
const cbor = @import("cbor");
|
||||
const build_options = @import("build_options");
|
||||
|
||||
const treez = if (build_options.use_tree_sitter)
|
||||
@import("treez")
|
||||
else
|
||||
@import("treez_dummy.zig");
|
||||
|
||||
pub const FileType = @This();
|
||||
|
||||
color: u24,
|
||||
icon: []const u8,
|
||||
name: []const u8,
|
||||
description: []const u8,
|
||||
lang_fn: LangFn,
|
||||
extensions: []const []const u8,
|
||||
first_line_matches: ?FirstLineMatch = null,
|
||||
comment: []const u8,
|
||||
formatter: ?[]const []const u8,
|
||||
language_server: ?[]const []const u8,
|
||||
|
||||
pub fn get_by_name_static(name: []const u8) ?FileType {
|
||||
return FileType.static_file_types.get(name);
|
||||
}
|
||||
|
||||
pub fn get_all() []const FileType {
|
||||
return FileType.static_file_types.values();
|
||||
}
|
||||
|
||||
pub fn guess_static(file_path: ?[]const u8, content: []const u8) ?FileType {
|
||||
if (guess_first_line_static(content)) |ft| return ft;
|
||||
for (static_file_types.values()) |file_type|
|
||||
if (file_path) |fp| if (match_file_type(file_type.extensions, fp))
|
||||
return file_type;
|
||||
return null;
|
||||
}
|
||||
|
||||
fn guess_first_line_static(content: []const u8) ?FileType {
|
||||
const first_line = if (std.mem.indexOf(u8, content, "\n")) |pos| content[0..pos] else content;
|
||||
for (static_file_types.values()) |file_type|
|
||||
if (file_type.first_line_matches) |match|
|
||||
if (match_first_line(match.prefix, match.content, first_line))
|
||||
return file_type;
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn match_first_line(match_prefix: ?[]const u8, match_content: ?[]const u8, first_line: []const u8) bool {
|
||||
if (match_prefix == null and match_content == null) return false;
|
||||
if (match_prefix) |prefix|
|
||||
if (prefix.len > first_line.len or !std.mem.eql(u8, first_line[0..prefix.len], prefix))
|
||||
return false;
|
||||
if (match_content) |content|
|
||||
if (std.mem.indexOf(u8, first_line, content)) |_| {} else return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn match_file_type(extensions: []const []const u8, file_path: []const u8) bool {
|
||||
const basename = std.fs.path.basename(file_path);
|
||||
const extension = std.fs.path.extension(file_path);
|
||||
return for (extensions) |ext| {
|
||||
if (ext.len == basename.len and std.mem.eql(u8, ext, basename))
|
||||
return true;
|
||||
if (extension.len > 0 and ext.len == extension.len - 1 and std.mem.eql(u8, ext, extension[1..]))
|
||||
return true;
|
||||
} else false;
|
||||
}
|
||||
|
||||
pub fn Parser(comptime lang: []const u8) LangFn {
|
||||
return get_parser(lang);
|
||||
}
|
||||
|
||||
fn get_parser(comptime lang: []const u8) LangFn {
|
||||
if (build_options.use_tree_sitter) {
|
||||
const language_name = ft_func_name(lang);
|
||||
return @extern(?LangFn, .{ .name = "tree_sitter_" ++ language_name }) orelse @compileError(std.fmt.comptimePrint("Cannot find extern tree_sitter_{s}", .{language_name}));
|
||||
} else {
|
||||
return treez.Language.LangFn;
|
||||
}
|
||||
}
|
||||
|
||||
fn ft_func_name(comptime lang: []const u8) []const u8 {
|
||||
var transform: [lang.len]u8 = undefined;
|
||||
for (lang, 0..) |c, i|
|
||||
transform[i] = if (c == '-') '_' else c;
|
||||
const func_name = transform;
|
||||
return &func_name;
|
||||
}
|
||||
|
||||
pub const LangFn = *const fn () callconv(.C) ?*const treez.Language;
|
||||
|
||||
pub const FirstLineMatch = struct {
|
||||
prefix: ?[]const u8 = null,
|
||||
content: ?[]const u8 = null,
|
||||
};
|
||||
|
||||
const static_file_type_list = load_file_types(@import("file_types.zig"));
|
||||
const static_file_types = std.StaticStringMap(FileType).initComptime(static_file_type_list);
|
||||
|
||||
fn vec(comptime args: anytype) []const []const u8 {
|
||||
var cmd: []const []const u8 = &[_][]const u8{};
|
||||
inline for (args) |arg| {
|
||||
cmd = cmd ++ [_][]const u8{arg};
|
||||
}
|
||||
return cmd;
|
||||
}
|
||||
|
||||
const ListEntry = struct { []const u8, FileType };
|
||||
|
||||
fn load_file_types(comptime Namespace: type) []const ListEntry {
|
||||
comptime switch (@typeInfo(Namespace)) {
|
||||
.@"struct" => |info| {
|
||||
var count = 0;
|
||||
for (info.decls) |_| {
|
||||
// @compileLog(decl.name, @TypeOf(@field(Namespace, decl.name)));
|
||||
count += 1;
|
||||
}
|
||||
var construct_types: [count]ListEntry = undefined;
|
||||
var i = 0;
|
||||
for (info.decls) |decl| {
|
||||
const lang = decl.name;
|
||||
const args = @field(Namespace, lang);
|
||||
construct_types[i] = .{ lang, .{
|
||||
.color = if (@hasField(@TypeOf(args), "color")) args.color else 0xffffff,
|
||||
.icon = if (@hasField(@TypeOf(args), "icon")) args.icon else "",
|
||||
.name = lang,
|
||||
.description = args.description,
|
||||
.lang_fn = if (@hasField(@TypeOf(args), "parser")) args.parser else get_parser(lang),
|
||||
.extensions = vec(args.extensions),
|
||||
.comment = args.comment,
|
||||
.first_line_matches = if (@hasField(@TypeOf(args), "first_line_matches")) args.first_line_matches else null,
|
||||
.formatter = if (@hasField(@TypeOf(args), "formatter")) vec(args.formatter) else null,
|
||||
.language_server = if (@hasField(@TypeOf(args), "language_server")) vec(args.language_server) else null,
|
||||
} };
|
||||
i += 1;
|
||||
}
|
||||
const types = construct_types;
|
||||
return &types;
|
||||
},
|
||||
else => @compileError("expected tuple or struct type"),
|
||||
};
|
||||
}
|
||||
|
||||
pub const FileTypeQueries = struct {
|
||||
highlights_bin: []const u8,
|
||||
errors_bin: []const u8,
|
||||
injections_bin: ?[]const u8,
|
||||
};
|
||||
|
||||
pub const queries = std.StaticStringMap(FileTypeQueries).initComptime(load_queries());
|
||||
|
||||
fn load_queries() []const struct { []const u8, FileTypeQueries } {
|
||||
if (!build_options.use_tree_sitter) return &.{};
|
||||
@setEvalBranchQuota(32000);
|
||||
const queries_cb = @embedFile("syntax_bin_queries");
|
||||
var iter: []const u8 = queries_cb;
|
||||
var len = cbor.decodeMapHeader(&iter) catch |e| {
|
||||
@compileLog("cbor.decodeMapHeader", e);
|
||||
@compileError("invalid syntax_bin_queries");
|
||||
};
|
||||
var construct_types: [len]struct { []const u8, FileTypeQueries } = undefined;
|
||||
var i = 0;
|
||||
while (len > 0) : (len -= 1) {
|
||||
var lang: []const u8 = undefined;
|
||||
if (!try cbor.matchString(&iter, &lang))
|
||||
@compileError("invalid language name field");
|
||||
construct_types[i] = .{ lang, .{
|
||||
.highlights_bin = blk: {
|
||||
var iter_: []const u8 = iter;
|
||||
break :blk get_query_value_bin(&iter_, "highlights") orelse @compileError("missing highlights for " ++ lang);
|
||||
},
|
||||
.errors_bin = blk: {
|
||||
var iter_: []const u8 = iter;
|
||||
break :blk get_query_value_bin(&iter_, "errors") orelse @compileError("missing errors query for " ++ lang);
|
||||
},
|
||||
.injections_bin = blk: {
|
||||
var iter_: []const u8 = iter;
|
||||
break :blk get_query_value_bin(&iter_, "injections");
|
||||
},
|
||||
} };
|
||||
try cbor.skipValue(&iter);
|
||||
i += 1;
|
||||
}
|
||||
const types = construct_types;
|
||||
return &types;
|
||||
}
|
||||
|
||||
fn get_query_value_bin(iter: *[]const u8, comptime query: []const u8) ?[]const u8 {
|
||||
var len = cbor.decodeMapHeader(iter) catch |e| {
|
||||
@compileLog("cbor.decodeMapHeader", e);
|
||||
@compileError("invalid query map in syntax_bin_queries");
|
||||
};
|
||||
while (len > 0) : (len -= 1) {
|
||||
var query_name: []const u8 = undefined;
|
||||
if (!try cbor.matchString(iter, &query_name))
|
||||
@compileError("invalid query name field");
|
||||
if (std.mem.eql(u8, query_name, query)) {
|
||||
var query_value: []const u8 = undefined;
|
||||
if (try cbor.matchValue(iter, cbor.extract(&query_value)))
|
||||
return query_value;
|
||||
@compileError("invalid query value field");
|
||||
} else {
|
||||
try cbor.skipValue(iter);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -1,609 +0,0 @@
|
|||
const file_type = @import("file_type.zig");
|
||||
const FirstLineMatch = file_type.FirstLineMatch;
|
||||
|
||||
pub const agda = .{
|
||||
.description = "Agda",
|
||||
.extensions = .{"agda"},
|
||||
.comment = "--",
|
||||
};
|
||||
|
||||
pub const astro = .{
|
||||
.description = "Astro",
|
||||
.icon = "",
|
||||
.extensions = .{"astro"},
|
||||
.comment = "//",
|
||||
.language_server = .{ "astro-ls", "--stdio" },
|
||||
};
|
||||
|
||||
pub const bash = .{
|
||||
.description = "Bash",
|
||||
.color = 0x3e474a,
|
||||
.icon = "",
|
||||
.extensions = .{ "sh", "bash", ".profile" },
|
||||
.comment = "#",
|
||||
.first_line_matches = FirstLineMatch{ .prefix = "#!", .content = "sh" },
|
||||
.formatter = .{ "shfmt", "--indent", "4" },
|
||||
.language_server = .{ "bash-language-server", "start" },
|
||||
};
|
||||
|
||||
pub const c = .{
|
||||
.description = "C",
|
||||
.icon = "",
|
||||
.extensions = .{"c"},
|
||||
.comment = "//",
|
||||
.formatter = .{"clang-format"},
|
||||
.language_server = .{"clangd"},
|
||||
};
|
||||
|
||||
pub const @"c-sharp" = .{
|
||||
.description = "C#",
|
||||
.color = 0x68217a,
|
||||
.icon = "",
|
||||
.extensions = .{"cs"},
|
||||
.comment = "//",
|
||||
.language_server = .{ "OmniSharp", "-lsp" },
|
||||
.formatter = .{ "csharpier", "format" },
|
||||
};
|
||||
|
||||
pub const conf = .{
|
||||
.description = "Config",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{ "conf", "log", "config", ".gitconfig", "gui_config" },
|
||||
.highlights = fish.highlights,
|
||||
.comment = "#",
|
||||
.parser = fish.parser,
|
||||
};
|
||||
|
||||
pub const cmake = .{
|
||||
.description = "CMake",
|
||||
.color = 0x004078,
|
||||
.icon = "",
|
||||
.extensions = .{ "CMakeLists.txt", "cmake", "cmake.in" },
|
||||
.comment = "#",
|
||||
.highlights = "queries/cmake/highlights.scm",
|
||||
.injections = "queries/cmake/injections.scm",
|
||||
.formatter = .{"cmake-format"},
|
||||
.language_server = .{"cmake-language-server"},
|
||||
};
|
||||
|
||||
pub const cpp = .{
|
||||
.description = "C++",
|
||||
.color = 0x9c033a,
|
||||
.icon = "",
|
||||
.extensions = .{ "cc", "cpp", "cxx", "hpp", "hxx", "h", "ipp", "ixx" },
|
||||
.comment = "//",
|
||||
.highlights_list = .{
|
||||
"tree-sitter-c/queries/highlights.scm",
|
||||
"tree-sitter-cpp/queries/highlights.scm",
|
||||
},
|
||||
.injections = "tree-sitter-cpp/queries/injections.scm",
|
||||
.formatter = .{"clang-format"},
|
||||
.language_server = .{"clangd"},
|
||||
};
|
||||
|
||||
pub const css = .{
|
||||
.description = "CSS",
|
||||
.color = 0x3d8fc6,
|
||||
.icon = "",
|
||||
.extensions = .{"css"},
|
||||
.comment = "//",
|
||||
.language_server = .{ "vscode-css-language-server", "--stdio" },
|
||||
};
|
||||
|
||||
pub const diff = .{
|
||||
.description = "Diff",
|
||||
.extensions = .{ "diff", "patch", "rej" },
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const dockerfile = .{
|
||||
.description = "Docker",
|
||||
.color = 0x019bc6,
|
||||
.icon = "",
|
||||
.extensions = .{ "Dockerfile", "dockerfile", "docker", "Containerfile", "container" },
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const dtd = .{
|
||||
.description = "DTD",
|
||||
.icon = "",
|
||||
.extensions = .{"dtd"},
|
||||
.comment = "<!--",
|
||||
.highlights = "tree-sitter-xml/queries/dtd/highlights.scm",
|
||||
};
|
||||
|
||||
pub const elixir = .{
|
||||
.description = "Elixir",
|
||||
.color = 0x4e2a8e,
|
||||
.icon = "",
|
||||
.extensions = .{ "ex", "exs" },
|
||||
.comment = "#",
|
||||
.injections = "tree-sitter-elixir/queries/injections.scm",
|
||||
.formatter = .{ "mix", "format", "-" },
|
||||
.language_server = .{"elixir-ls"},
|
||||
};
|
||||
|
||||
pub const fish = .{
|
||||
.description = "Fish",
|
||||
.extensions = .{"fish"},
|
||||
.comment = "#",
|
||||
.parser = @import("file_type.zig").Parser("fish"),
|
||||
.highlights = "tree-sitter-fish/queries/highlights.scm",
|
||||
};
|
||||
|
||||
pub const @"git-rebase" = .{
|
||||
.description = "Git (rebase)",
|
||||
.color = 0xf34f29,
|
||||
.icon = "",
|
||||
.extensions = .{"git-rebase-todo"},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const gitcommit = .{
|
||||
.description = "Git (commit)",
|
||||
.color = 0xf34f29,
|
||||
.icon = "",
|
||||
.extensions = .{"COMMIT_EDITMSG"},
|
||||
.comment = "#",
|
||||
.injections = "tree-sitter-gitcommit/queries/injections.scm",
|
||||
};
|
||||
|
||||
pub const gleam = .{
|
||||
.description = "Gleam",
|
||||
.color = 0xffaff3,
|
||||
.icon = "",
|
||||
.extensions = .{"gleam"},
|
||||
.comment = "//",
|
||||
.language_server = .{ "gleam", "lsp" },
|
||||
.formatter = .{ "gleam", "format", "--stdin" },
|
||||
};
|
||||
|
||||
pub const go = .{
|
||||
.description = "Go",
|
||||
.color = 0x00acd7,
|
||||
.icon = "",
|
||||
.extensions = .{"go"},
|
||||
.comment = "//",
|
||||
.language_server = .{"gopls"},
|
||||
.formatter = .{"gofmt"},
|
||||
};
|
||||
|
||||
pub const hare = .{
|
||||
.description = "Hare",
|
||||
.extensions = .{"ha"},
|
||||
.comment = "//",
|
||||
};
|
||||
|
||||
pub const haskell = .{
|
||||
.description = "Haskell",
|
||||
.color = 0x5E5185,
|
||||
.icon = "",
|
||||
.extensions = .{"hs"},
|
||||
.comment = "--",
|
||||
.language_server = .{ "haskell-language-server-wrapper", "lsp" },
|
||||
};
|
||||
|
||||
pub const html = .{
|
||||
.description = "HTML",
|
||||
.color = 0xe54d26,
|
||||
.icon = "",
|
||||
.extensions = .{"html"},
|
||||
.comment = "<!--",
|
||||
.injections = "tree-sitter-html/queries/injections.scm",
|
||||
.language_server = .{ "superhtml", "lsp" }, // https://github.com/kristoff-it/super-html.git
|
||||
.formatter = .{ "superhtml", "fmt", "--stdin" },
|
||||
};
|
||||
|
||||
pub const superhtml = .{
|
||||
.description = "SuperHTML",
|
||||
.color = 0xe54d26,
|
||||
.icon = "",
|
||||
.extensions = .{"shtml"},
|
||||
.comment = "<!--",
|
||||
.highlights = "tree-sitter-superhtml/tree-sitter-superhtml/queries/highlights.scm",
|
||||
.injections = "tree-sitter-superhtml/tree-sitter-superhtml/queries/injections.scm",
|
||||
.language_server = .{ "superhtml", "lsp" },
|
||||
.formatter = .{ "superhtml", "fmt", "--stdin-super" },
|
||||
};
|
||||
|
||||
pub const hurl = .{
|
||||
.description = "Hurl",
|
||||
.color = 0xff0087,
|
||||
.icon = "",
|
||||
.extensions = .{"hurl"},
|
||||
.comment = "#",
|
||||
.injections = "tree-sitter-hurl/queries/injections.scm",
|
||||
};
|
||||
|
||||
pub const java = .{
|
||||
.description = "Java",
|
||||
.color = 0xEA2D2E,
|
||||
.icon = "",
|
||||
.extensions = .{"java"},
|
||||
.comment = "//",
|
||||
};
|
||||
|
||||
pub const javascript = .{
|
||||
.description = "JavaScript",
|
||||
.color = 0xf0db4f,
|
||||
.icon = "",
|
||||
.extensions = .{"js"},
|
||||
.comment = "//",
|
||||
.injections = "tree-sitter-javascript/queries/injections.scm",
|
||||
.language_server = .{ "typescript-language-server", "--stdio" },
|
||||
.formatter = .{ "prettier", "--parser", "typescript" },
|
||||
};
|
||||
|
||||
pub const json = .{
|
||||
.description = "JSON",
|
||||
.extensions = .{"json"},
|
||||
.comment = "//",
|
||||
.language_server = .{ "vscode-json-language-server", "--stdio" },
|
||||
.formatter = .{ "prettier", "--parser", "json" },
|
||||
};
|
||||
|
||||
pub const julia = .{
|
||||
.description = "Julia",
|
||||
.color = 0x4D64AE,
|
||||
.icon = "",
|
||||
.extensions = .{"jl"},
|
||||
.comment = "#",
|
||||
.language_server = .{ "julia", "-e", "using LanguageServer; runserver()" },
|
||||
.formatter = .{ "julia", "-e", "using JuliaFormatter; print(format_text(read(stdin, String)))" },
|
||||
};
|
||||
|
||||
pub const kdl = .{
|
||||
.description = "KDL",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{"kdl"},
|
||||
.comment = "//",
|
||||
};
|
||||
|
||||
pub const lua = .{
|
||||
.description = "Lua",
|
||||
.color = 0x02027d,
|
||||
.icon = "",
|
||||
.extensions = .{"lua"},
|
||||
.comment = "--",
|
||||
.injections = "tree-sitter-lua/queries/injections.scm",
|
||||
.first_line_matches = FirstLineMatch{ .prefix = "--", .content = "lua" },
|
||||
.language_server = .{"lua-lsp"},
|
||||
};
|
||||
|
||||
pub const mail = .{
|
||||
.description = "E-Mail",
|
||||
.icon = "",
|
||||
.extensions = .{ "eml", "mbox" },
|
||||
.comment = ">",
|
||||
.highlights = "tree-sitter-mail/queries/mail/highlights.scm",
|
||||
.first_line_matches = FirstLineMatch{ .prefix = "From" },
|
||||
};
|
||||
|
||||
pub const make = .{
|
||||
.description = "Make",
|
||||
.extensions = .{ "makefile", "Makefile", "MAKEFILE", "GNUmakefile", "mk", "mak", "dsp" },
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const markdown = .{
|
||||
.description = "Markdown",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{"md"},
|
||||
.comment = "<!--",
|
||||
.highlights = "tree-sitter-markdown/tree-sitter-markdown/queries/highlights.scm",
|
||||
.injections = "tree-sitter-markdown/tree-sitter-markdown/queries/injections.scm",
|
||||
.language_server = .{ "marksman", "server" },
|
||||
.formatter = .{ "prettier", "--parser", "markdown" },
|
||||
};
|
||||
|
||||
pub const @"markdown-inline" = .{
|
||||
.description = "Markdown (inline)",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{},
|
||||
.comment = "<!--",
|
||||
.highlights = "tree-sitter-markdown/tree-sitter-markdown-inline/queries/highlights.scm",
|
||||
.injections = "tree-sitter-markdown/tree-sitter-markdown-inline/queries/injections.scm",
|
||||
};
|
||||
|
||||
pub const nasm = .{
|
||||
.description = "Assembly Language (nasm)",
|
||||
.extensions = .{ "asm", "nasm" },
|
||||
.comment = "#",
|
||||
.injections = "tree-sitter-nasm/queries/injections.scm",
|
||||
};
|
||||
|
||||
pub const nim = .{
|
||||
.description = "Nim",
|
||||
.color = 0xffe953,
|
||||
.icon = "",
|
||||
.extensions = .{"nim"},
|
||||
.comment = "#",
|
||||
.language_server = .{"nimlangserver"},
|
||||
};
|
||||
|
||||
pub const nimble = .{
|
||||
.description = "Nimble (nim)",
|
||||
.color = 0xffe953,
|
||||
.icon = "",
|
||||
.extensions = .{"nimble"},
|
||||
.highlights = toml.highlights,
|
||||
.comment = "#",
|
||||
.parser = toml.parser,
|
||||
};
|
||||
|
||||
pub const ninja = .{
|
||||
.description = "Ninja",
|
||||
.extensions = .{"ninja"},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const nix = .{
|
||||
.description = "Nix",
|
||||
.color = 0x5277C3,
|
||||
.icon = "",
|
||||
.extensions = .{"nix"},
|
||||
.comment = "#",
|
||||
.injections = "tree-sitter-nix/queries/injections.scm",
|
||||
.language_server = .{"nixd"},
|
||||
.formatter = .{"alejandra"},
|
||||
};
|
||||
|
||||
pub const nu = .{
|
||||
.description = "Nushell",
|
||||
.color = 0x3AA675,
|
||||
.icon = ">",
|
||||
.extensions = .{ "nu", "nushell" },
|
||||
.comment = "#",
|
||||
.language_server = .{ "nu", "--lsp" },
|
||||
.highlights = "tree-sitter-nu/queries/nu/highlights.scm",
|
||||
.injections = "tree-sitter-nu/queries/nu/injections.scm",
|
||||
};
|
||||
|
||||
pub const ocaml = .{
|
||||
.description = "OCaml",
|
||||
.color = 0xF18803,
|
||||
.icon = "",
|
||||
.extensions = .{ "ml", "mli" },
|
||||
.comment = "(*",
|
||||
.formatter = .{ "ocamlformat", "--profile=ocamlformat", "-" },
|
||||
.language_server = .{ "ocamllsp", "--fallback-read-dot-merlin" },
|
||||
};
|
||||
|
||||
pub const odin = .{
|
||||
.description = "Odin",
|
||||
.extensions = .{"odin"},
|
||||
.comment = "//",
|
||||
.parser = @import("file_type.zig").Parser("odin"),
|
||||
.injections = "tree-sitter-odin/queries/injections.scm",
|
||||
.language_server = .{"ols"},
|
||||
.formatter = .{ "odinfmt", "-stdin" },
|
||||
};
|
||||
|
||||
pub const openscad = .{
|
||||
.description = "OpenSCAD",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{"scad"},
|
||||
.comment = "//",
|
||||
.injections = "tree-sitter-openscad/queries/injections.scm",
|
||||
.language_server = .{"openscad-lsp"},
|
||||
};
|
||||
|
||||
pub const org = .{
|
||||
.description = "Org Mode",
|
||||
.icon = "",
|
||||
.extensions = .{"org"},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const php = .{
|
||||
.description = "PHP",
|
||||
.color = 0x6181b6,
|
||||
.icon = "",
|
||||
.extensions = .{"php"},
|
||||
.comment = "//",
|
||||
.injections = "tree-sitter-php/queries/injections.scm",
|
||||
.language_server = .{ "intelephense", "--stdio" },
|
||||
};
|
||||
|
||||
pub const powershell = .{
|
||||
.description = "PowerShell",
|
||||
.color = 0x0873c5,
|
||||
.icon = "",
|
||||
.extensions = .{"ps1"},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const proto = .{
|
||||
.description = "protobuf (proto)",
|
||||
.extensions = .{"proto"},
|
||||
.comment = "//",
|
||||
};
|
||||
|
||||
pub const purescript = .{
|
||||
.description = "PureScript",
|
||||
.color = 0x14161a,
|
||||
.icon = "",
|
||||
.extensions = .{"purs"},
|
||||
.comment = "--",
|
||||
.injections = "tree-sitter-purescript/queries/injections.scm",
|
||||
};
|
||||
|
||||
pub const python = .{
|
||||
.description = "Python",
|
||||
.color = 0xffd845,
|
||||
.icon = "",
|
||||
.extensions = .{ "py", "pyi" },
|
||||
.comment = "#",
|
||||
.first_line_matches = FirstLineMatch{ .prefix = "#!", .content = "python" },
|
||||
.language_server = .{"pylsp"},
|
||||
};
|
||||
|
||||
pub const regex = .{
|
||||
.description = "Regular expression",
|
||||
.extensions = .{},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const rpmspec = .{
|
||||
.description = "RPM spec",
|
||||
.color = 0xff0000,
|
||||
.icon = "",
|
||||
.extensions = .{"spec"},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const ruby = .{
|
||||
.description = "Ruby",
|
||||
.color = 0xd91404,
|
||||
.icon = "",
|
||||
.extensions = .{"rb"},
|
||||
.comment = "#",
|
||||
.language_server = .{"ruby-lsp"},
|
||||
};
|
||||
|
||||
pub const rust = .{
|
||||
.description = "Rust",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{"rs"},
|
||||
.comment = "//",
|
||||
.injections = "tree-sitter-rust/queries/injections.scm",
|
||||
.language_server = .{"rust-analyzer"},
|
||||
.formatter = .{"rustfmt"},
|
||||
};
|
||||
|
||||
pub const scheme = .{
|
||||
.description = "Scheme",
|
||||
.extensions = .{ "scm", "ss", "el" },
|
||||
.comment = ";",
|
||||
};
|
||||
|
||||
pub const sql = .{
|
||||
.description = "SQL",
|
||||
.icon = "",
|
||||
.extensions = .{"sql"},
|
||||
.comment = "--",
|
||||
};
|
||||
|
||||
pub const @"ssh-config" = .{
|
||||
.description = "SSH config",
|
||||
.extensions = .{".ssh/config"},
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const swift = .{
|
||||
.description = "Swift",
|
||||
.color = 0xf05138,
|
||||
.icon = "",
|
||||
.extensions = .{ "swift", "swiftinterface" },
|
||||
.comment = "//",
|
||||
.language_server = .{"sourcekit-lsp"},
|
||||
.formatter = .{"swift-format"},
|
||||
};
|
||||
|
||||
pub const verilog = .{
|
||||
.description = "SystemVerilog",
|
||||
.extensions = .{ "sv", "svh" },
|
||||
.comment = "//",
|
||||
.highlights = "nvim-treesitter/queries/verilog/highlights.scm",
|
||||
.injections = "nvim-treesitter/queries/verilog/injections.scm",
|
||||
.language_server = .{"verible-verilog-ls"},
|
||||
.formatter = .{ "verible-verilog-format", "-" },
|
||||
};
|
||||
|
||||
pub const toml = .{
|
||||
.description = "TOML",
|
||||
.extensions = .{ "toml", "ini" },
|
||||
.comment = "#",
|
||||
.highlights = "tree-sitter-toml/queries/highlights.scm",
|
||||
.parser = @import("file_type.zig").Parser("toml"),
|
||||
};
|
||||
|
||||
pub const typescript = .{
|
||||
.description = "TypeScript",
|
||||
.color = 0x007acc,
|
||||
.icon = "",
|
||||
.extensions = .{ "ts", "tsx" },
|
||||
.comment = "//",
|
||||
.language_server = .{ "typescript-language-server", "--stdio" },
|
||||
.formatter = .{ "prettier", "--parser", "typescript" },
|
||||
};
|
||||
|
||||
pub const typst = .{
|
||||
.description = "Typst",
|
||||
.color = 0x23b6bc,
|
||||
.icon = "t",
|
||||
.extensions = .{ "typst", "typ" },
|
||||
.comment = "//",
|
||||
.language_server = .{"tinymist"},
|
||||
.highlights = "tree-sitter-typst/queries/typst/highlights.scm",
|
||||
.injections = "tree-sitter-typst/queries/typst/injections.scm",
|
||||
};
|
||||
|
||||
pub const uxntal = .{
|
||||
.description = "Uxntal",
|
||||
.extensions = .{"tal"},
|
||||
.comment = "(",
|
||||
};
|
||||
|
||||
pub const vim = .{
|
||||
.description = "Vimscript",
|
||||
.color = 0x007f00,
|
||||
.icon = "",
|
||||
.extensions = .{"vim"},
|
||||
.comment = "\"",
|
||||
.highlights = "tree-sitter-vim/queries/vim/highlights.scm",
|
||||
.injections = "tree-sitter-vim/queries/vim/injections.scm",
|
||||
};
|
||||
|
||||
pub const xml = .{
|
||||
.description = "XML",
|
||||
.icon = "",
|
||||
.extensions = .{"xml"},
|
||||
.comment = "<!--",
|
||||
.highlights = "tree-sitter-xml/queries/xml/highlights.scm",
|
||||
.first_line_matches = FirstLineMatch{ .prefix = "<?xml " },
|
||||
.formatter = .{ "xmllint", "--format", "-" },
|
||||
};
|
||||
|
||||
pub const yaml = .{
|
||||
.description = "YAML",
|
||||
.color = 0x000000,
|
||||
.icon = "",
|
||||
.extensions = .{ "yaml", "yml" },
|
||||
.comment = "#",
|
||||
};
|
||||
|
||||
pub const zig = .{
|
||||
.description = "Zig",
|
||||
.color = 0xf7a41d,
|
||||
.icon = "",
|
||||
.extensions = .{ "zig", "zon" },
|
||||
.comment = "//",
|
||||
.formatter = .{ "zig", "fmt", "--stdin" },
|
||||
.language_server = .{"zls"},
|
||||
.injections = "tree-sitter-zig/queries/injections.scm",
|
||||
};
|
||||
|
||||
pub const ziggy = .{
|
||||
.description = "Ziggy",
|
||||
.color = 0xf7a41d,
|
||||
.icon = "",
|
||||
.extensions = .{ "ziggy", "zgy" },
|
||||
.comment = "//",
|
||||
.highlights = "tree-sitter-ziggy/tree-sitter-ziggy/queries/highlights.scm",
|
||||
};
|
||||
|
||||
pub const @"ziggy-schema" = .{
|
||||
.description = "Ziggy (schema)",
|
||||
.color = 0xf7a41d,
|
||||
.icon = "",
|
||||
.extensions = .{ "ziggy-schema", "zyg-schema" },
|
||||
.comment = "//",
|
||||
.highlights = "tree-sitter-ziggy/tree-sitter-ziggy-schema/queries/highlights.scm",
|
||||
};
|
|
@ -1,213 +0,0 @@
|
|||
const std = @import("std");
|
||||
const build_options = @import("build_options");
|
||||
|
||||
const treez = if (build_options.use_tree_sitter)
|
||||
@import("treez")
|
||||
else
|
||||
@import("treez_dummy.zig");
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const Edit = treez.InputEdit;
|
||||
pub const FileType = @import("file_type.zig");
|
||||
pub const QueryCache = @import("QueryCache.zig");
|
||||
pub const Range = treez.Range;
|
||||
pub const Point = treez.Point;
|
||||
const Input = treez.Input;
|
||||
const Language = treez.Language;
|
||||
const Parser = treez.Parser;
|
||||
const Query = treez.Query;
|
||||
pub const Node = treez.Node;
|
||||
|
||||
allocator: std.mem.Allocator,
|
||||
lang: *const Language,
|
||||
parser: *Parser,
|
||||
query: *Query,
|
||||
errors_query: *Query,
|
||||
injections: ?*Query,
|
||||
tree: ?*treez.Tree = null,
|
||||
|
||||
pub fn create(file_type: FileType, allocator: std.mem.Allocator, query_cache: *QueryCache) !*Self {
|
||||
const query = try query_cache.get(file_type, .highlights);
|
||||
errdefer query_cache.release(query, .highlights);
|
||||
const errors_query = try query_cache.get(file_type, .errors);
|
||||
errdefer query_cache.release(errors_query, .highlights);
|
||||
const injections = try query_cache.get(file_type, .injections);
|
||||
errdefer if (injections) |injections_| query_cache.release(injections_, .injections);
|
||||
const self = try allocator.create(Self);
|
||||
errdefer allocator.destroy(self);
|
||||
self.* = .{
|
||||
.allocator = allocator,
|
||||
.lang = file_type.lang_fn() orelse std.debug.panic("tree-sitter parser function failed for language: {s}", .{file_type.name}),
|
||||
.parser = try Parser.create(),
|
||||
.query = query,
|
||||
.errors_query = errors_query,
|
||||
.injections = injections,
|
||||
};
|
||||
try self.parser.setLanguage(self.lang);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn static_create_file_type(allocator: std.mem.Allocator, lang_name: []const u8, query_cache: *QueryCache) !*Self {
|
||||
const file_type = FileType.get_by_name_static(lang_name) orelse return error.NotFound;
|
||||
return create(file_type, allocator, query_cache);
|
||||
}
|
||||
|
||||
pub fn static_create_guess_file_type_static(allocator: std.mem.Allocator, content: []const u8, file_path: ?[]const u8, query_cache: *QueryCache) !*Self {
|
||||
const file_type = FileType.guess_static(file_path, content) orelse return error.NotFound;
|
||||
return create(file_type, allocator, query_cache);
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Self, query_cache: *QueryCache) void {
|
||||
if (self.tree) |tree| tree.destroy();
|
||||
query_cache.release(self.query, .highlights);
|
||||
query_cache.release(self.errors_query, .highlights);
|
||||
if (self.injections) |injections| query_cache.release(injections, .injections);
|
||||
self.parser.destroy();
|
||||
self.allocator.destroy(self);
|
||||
}
|
||||
|
||||
pub fn reset(self: *Self) void {
|
||||
if (self.tree) |tree| {
|
||||
tree.destroy();
|
||||
self.tree = null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refresh_full(self: *Self, content: []const u8) !void {
|
||||
self.reset();
|
||||
self.tree = try self.parser.parseString(null, content);
|
||||
}
|
||||
|
||||
pub fn edit(self: *Self, ed: Edit) void {
|
||||
if (self.tree) |tree| tree.edit(&ed);
|
||||
}
|
||||
|
||||
pub fn refresh_from_buffer(self: *Self, buffer: anytype, metrics: anytype) !void {
|
||||
const old_tree = self.tree;
|
||||
defer if (old_tree) |tree| tree.destroy();
|
||||
|
||||
const State = struct {
|
||||
buffer: @TypeOf(buffer),
|
||||
metrics: @TypeOf(metrics),
|
||||
syntax: *Self,
|
||||
result_buf: [1024]u8 = undefined,
|
||||
};
|
||||
var state: State = .{
|
||||
.buffer = buffer,
|
||||
.metrics = metrics,
|
||||
.syntax = self,
|
||||
};
|
||||
|
||||
const input: Input = .{
|
||||
.payload = &state,
|
||||
.read = struct {
|
||||
fn read(payload: ?*anyopaque, _: u32, position: treez.Point, bytes_read: *u32) callconv(.C) [*:0]const u8 {
|
||||
const ctx: *State = @ptrCast(@alignCast(payload orelse return ""));
|
||||
const result = ctx.buffer.get_from_pos(.{ .row = position.row, .col = position.column }, &ctx.result_buf, ctx.metrics);
|
||||
bytes_read.* = @intCast(result.len);
|
||||
return @ptrCast(result.ptr);
|
||||
}
|
||||
}.read,
|
||||
.encoding = .utf_8,
|
||||
};
|
||||
self.tree = try self.parser.parse(old_tree, input);
|
||||
}
|
||||
|
||||
pub fn refresh_from_string(self: *Self, content: [:0]const u8) !void {
|
||||
const old_tree = self.tree;
|
||||
defer if (old_tree) |tree| tree.destroy();
|
||||
|
||||
const State = struct {
|
||||
content: @TypeOf(content),
|
||||
};
|
||||
var state: State = .{
|
||||
.content = content,
|
||||
};
|
||||
|
||||
const input: Input = .{
|
||||
.payload = &state,
|
||||
.read = struct {
|
||||
fn read(payload: ?*anyopaque, _: u32, position: treez.Point, bytes_read: *u32) callconv(.C) [*:0]const u8 {
|
||||
bytes_read.* = 0;
|
||||
const ctx: *State = @ptrCast(@alignCast(payload orelse return ""));
|
||||
const pos = (find_line_begin(ctx.content, position.row) orelse return "") + position.column;
|
||||
if (pos >= ctx.content.len) return "";
|
||||
bytes_read.* = @intCast(ctx.content.len - pos);
|
||||
return ctx.content[pos..].ptr;
|
||||
}
|
||||
}.read,
|
||||
.encoding = .utf_8,
|
||||
};
|
||||
self.tree = try self.parser.parse(old_tree, input);
|
||||
}
|
||||
|
||||
fn find_line_begin(s: []const u8, line: usize) ?usize {
|
||||
var idx: usize = 0;
|
||||
var at_line: usize = 0;
|
||||
while (idx < s.len) {
|
||||
if (at_line == line)
|
||||
return idx;
|
||||
if (s[idx] == '\n')
|
||||
at_line += 1;
|
||||
idx += 1;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn CallBack(comptime T: type) type {
|
||||
return fn (ctx: T, sel: Range, scope: []const u8, id: u32, capture_idx: usize, node: *const Node) error{Stop}!void;
|
||||
}
|
||||
|
||||
pub fn render(self: *const Self, ctx: anytype, comptime cb: CallBack(@TypeOf(ctx)), range: ?Range) !void {
|
||||
const cursor = try Query.Cursor.create();
|
||||
defer cursor.destroy();
|
||||
const tree = self.tree orelse return;
|
||||
cursor.execute(self.query, tree.getRootNode());
|
||||
if (range) |r| cursor.setPointRange(r.start_point, r.end_point);
|
||||
while (cursor.nextMatch()) |match| {
|
||||
var idx: usize = 0;
|
||||
for (match.captures()) |capture| {
|
||||
try cb(ctx, capture.node.getRange(), self.query.getCaptureNameForId(capture.id), capture.id, idx, &capture.node);
|
||||
idx += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn highlights_at_point(self: *const Self, ctx: anytype, comptime cb: CallBack(@TypeOf(ctx)), point: Point) void {
|
||||
const cursor = Query.Cursor.create() catch return;
|
||||
defer cursor.destroy();
|
||||
const tree = self.tree orelse return;
|
||||
cursor.execute(self.query, tree.getRootNode());
|
||||
cursor.setPointRange(.{ .row = point.row, .column = 0 }, .{ .row = point.row + 1, .column = 0 });
|
||||
while (cursor.nextMatch()) |match| {
|
||||
for (match.captures()) |capture| {
|
||||
const range = capture.node.getRange();
|
||||
const start = range.start_point;
|
||||
const end = range.end_point;
|
||||
const scope = self.query.getCaptureNameForId(capture.id);
|
||||
if (start.row == point.row and start.column <= point.column and point.column < end.column)
|
||||
cb(ctx, range, scope, capture.id, 0, &capture.node) catch return;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn node_at_point_range(self: *const Self, range: Range) error{Stop}!treez.Node {
|
||||
const tree = self.tree orelse return error.Stop;
|
||||
const root_node = tree.getRootNode();
|
||||
return treez.Node.externs.ts_node_descendant_for_point_range(root_node, range.start_point, range.end_point);
|
||||
}
|
||||
|
||||
pub fn count_error_nodes(self: *const Self) usize {
|
||||
const cursor = Query.Cursor.create() catch return std.math.maxInt(usize);
|
||||
defer cursor.destroy();
|
||||
const tree = self.tree orelse return 0;
|
||||
cursor.execute(self.errors_query, tree.getRootNode());
|
||||
var error_count: usize = 0;
|
||||
while (cursor.nextMatch()) |match| for (match.captures()) |_| {
|
||||
error_count += 1;
|
||||
};
|
||||
return error_count;
|
||||
}
|
|
@ -1,133 +0,0 @@
|
|||
pub const InputEdit = extern struct {
|
||||
start_byte: u32,
|
||||
old_end_byte: u32,
|
||||
new_end_byte: u32,
|
||||
start_point: Point,
|
||||
old_end_point: Point,
|
||||
new_end_point: Point,
|
||||
};
|
||||
pub const Range = extern struct {
|
||||
start_point: Point = .{},
|
||||
end_point: Point = .{},
|
||||
start_byte: u32 = 0,
|
||||
end_byte: u32 = 0,
|
||||
};
|
||||
|
||||
pub const Point = extern struct {
|
||||
row: u32 = 0,
|
||||
column: u32 = 0,
|
||||
};
|
||||
pub const InputEncoding = enum(c_uint) {
|
||||
utf_8,
|
||||
utf_16,
|
||||
};
|
||||
pub const Input = extern struct {
|
||||
payload: ?*anyopaque,
|
||||
read: ?*const fn (payload: ?*anyopaque, byte_index: u32, position: Point, bytes_read: *u32) callconv(.C) [*:0]const u8,
|
||||
encoding: InputEncoding,
|
||||
};
|
||||
pub const Language = struct {
|
||||
var dummy: @This() = .{};
|
||||
pub fn LangFn() callconv(.C) ?*const Language {
|
||||
return &dummy;
|
||||
}
|
||||
};
|
||||
pub const Parser = struct {
|
||||
var dummy: @This() = .{};
|
||||
pub fn create() !*@This() {
|
||||
return &dummy;
|
||||
}
|
||||
pub fn parse(_: *Parser, _: ?*Tree, _: Input) !*Tree {
|
||||
return &Tree.dummy;
|
||||
}
|
||||
pub fn parseString(_: *@This(), _: ?[]const u8, _: []const u8) !?*Tree {
|
||||
return null;
|
||||
}
|
||||
pub fn destroy(_: *@This()) void {}
|
||||
pub fn setLanguage(_: *Parser, _: *const Language) !void {}
|
||||
};
|
||||
pub const Query = struct {
|
||||
var dummy: @This() = .{};
|
||||
pub fn create(_: *const Language, _: []const u8) !*Query {
|
||||
return &dummy;
|
||||
}
|
||||
pub const Cursor = struct {
|
||||
var dummy_: @This() = .{};
|
||||
pub fn create() !*@This() {
|
||||
return &dummy_;
|
||||
}
|
||||
pub fn execute(_: *@This(), _: *Query, _: *Node) void {}
|
||||
pub fn setPointRange(_: *@This(), _: Point, _: Point) void {}
|
||||
pub fn nextMatch(_: *@This()) ?*Match {
|
||||
return null;
|
||||
}
|
||||
pub fn destroy(_: *@This()) void {}
|
||||
|
||||
pub const Match = struct {
|
||||
pub fn captures(_: *@This()) []Capture {
|
||||
return &[_]Capture{};
|
||||
}
|
||||
};
|
||||
pub const Capture = struct {
|
||||
id: u32,
|
||||
node: Node,
|
||||
};
|
||||
};
|
||||
pub fn getCaptureNameForId(_: *@This(), _: u32) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn destroy(_: *@This()) void {}
|
||||
};
|
||||
pub const Tree = struct {
|
||||
var dummy: @This() = .{};
|
||||
pub fn getRootNode(_: *@This()) *Node {
|
||||
return &Node.dummy;
|
||||
}
|
||||
pub fn destroy(_: *@This()) void {}
|
||||
pub fn edit(_: *Tree, _: *const InputEdit) void {}
|
||||
};
|
||||
pub const Node = struct {
|
||||
var dummy: @This() = .{};
|
||||
pub fn getRange(_: *const @This()) Range {
|
||||
return .{};
|
||||
}
|
||||
pub fn asSExpressionString(_: *const @This()) []const u8 {
|
||||
return "";
|
||||
}
|
||||
pub fn freeSExpressionString(_: []const u8) void {}
|
||||
pub fn getParent(_: *const @This()) Node {
|
||||
return dummy;
|
||||
}
|
||||
pub fn getChild(_: *const @This(), _: usize) Node {
|
||||
return dummy;
|
||||
}
|
||||
pub fn getChildCount(_: *const @This()) usize {
|
||||
return 0;
|
||||
}
|
||||
pub fn getNamedChild(_: *const @This(), _: usize) Node {
|
||||
return dummy;
|
||||
}
|
||||
pub fn getNamedChildCount(_: *const @This()) usize {
|
||||
return 0;
|
||||
}
|
||||
pub fn isNull(_: *const @This()) bool {
|
||||
return true;
|
||||
}
|
||||
pub const externs = struct {
|
||||
pub fn ts_node_next_sibling(_: Node) Node {
|
||||
return Node.dummy;
|
||||
}
|
||||
pub fn ts_node_prev_sibling(_: Node) Node {
|
||||
return Node.dummy;
|
||||
}
|
||||
pub fn ts_node_next_named_sibling(_: Node) Node {
|
||||
return Node.dummy;
|
||||
}
|
||||
pub fn ts_node_prev_named_sibling(_: Node) Node {
|
||||
return Node.dummy;
|
||||
}
|
||||
pub fn ts_node_descendant_for_point_range(_: *const Node, _: Point, _: Point) Node {
|
||||
return Node.dummy;
|
||||
}
|
||||
};
|
||||
};
|
|
@ -1,140 +0,0 @@
|
|||
const std = @import("std");
|
||||
const cbor = @import("cbor");
|
||||
const treez = @import("treez");
|
||||
|
||||
pub const tss = @import("ts_serializer.zig");
|
||||
|
||||
const verbose = false;
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
const allocator = std.heap.c_allocator;
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
|
||||
var opt_output_file_path: ?[]const u8 = null;
|
||||
|
||||
var i: usize = 1;
|
||||
while (i < args.len) : (i += 1) {
|
||||
const arg = args[i];
|
||||
if (opt_output_file_path != null) fatal("duplicated {s} argument", .{arg});
|
||||
opt_output_file_path = args[i];
|
||||
}
|
||||
|
||||
const output_file_path = opt_output_file_path orelse fatal("missing output file", .{});
|
||||
var output_file = std.fs.cwd().createFile(output_file_path, .{}) catch |err| {
|
||||
fatal("unable to open '{s}': {s}", .{ output_file_path, @errorName(err) });
|
||||
};
|
||||
defer output_file.close();
|
||||
|
||||
var output = std.ArrayList(u8).init(allocator);
|
||||
defer output.deinit();
|
||||
const writer = output.writer();
|
||||
|
||||
try cbor.writeMapHeader(writer, file_types.len);
|
||||
|
||||
for (file_types) |file_type| {
|
||||
const lang = file_type.lang_fn() orelse std.debug.panic("tree-sitter parser function failed for language: {s}", .{file_type.name});
|
||||
|
||||
try cbor.writeValue(writer, file_type.name);
|
||||
try cbor.writeMapHeader(writer, if (file_type.injections) |_| 3 else 2);
|
||||
|
||||
const highlights_in = try treez.Query.create(lang, file_type.highlights);
|
||||
const ts_highlights_in: *tss.TSQuery = @alignCast(@ptrCast(highlights_in));
|
||||
|
||||
const highlights_cb = try tss.toCbor(ts_highlights_in, allocator);
|
||||
defer allocator.free(highlights_cb);
|
||||
|
||||
try cbor.writeValue(writer, "highlights");
|
||||
try cbor.writeValue(writer, highlights_cb);
|
||||
if (verbose)
|
||||
std.log.info("file_type {s} highlights {d} bytes", .{ file_type.name, highlights_cb.len });
|
||||
|
||||
const errors_in = try treez.Query.create(lang, "(ERROR) @error");
|
||||
const ts_errors_in: *tss.TSQuery = @alignCast(@ptrCast(errors_in));
|
||||
|
||||
const errors_cb = try tss.toCbor(ts_errors_in, allocator);
|
||||
defer allocator.free(errors_cb);
|
||||
|
||||
try cbor.writeValue(writer, "errors");
|
||||
try cbor.writeValue(writer, errors_cb);
|
||||
if (verbose)
|
||||
std.log.info("file_type {s} errors {d} bytes", .{ file_type.name, errors_cb.len });
|
||||
|
||||
if (file_type.injections) |injections| {
|
||||
const injections_in = try treez.Query.create(lang, injections);
|
||||
const ts_injections_in: *tss.TSQuery = @alignCast(@ptrCast(injections_in));
|
||||
|
||||
const injections_cb = try tss.toCbor(ts_injections_in, allocator);
|
||||
defer allocator.free(injections_cb);
|
||||
|
||||
try cbor.writeValue(writer, "injections");
|
||||
try cbor.writeValue(writer, injections_cb);
|
||||
if (verbose)
|
||||
std.log.info("file_type {s} injections {d} bytes", .{ file_type.name, injections_cb.len });
|
||||
}
|
||||
}
|
||||
|
||||
try output_file.writeAll(output.items);
|
||||
if (verbose)
|
||||
std.log.info("file_types total {d} bytes", .{output.items.len});
|
||||
}
|
||||
|
||||
fn fatal(comptime format: []const u8, args: anytype) noreturn {
|
||||
std.debug.print(format, args);
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
pub const file_types = load_file_types(@import("file_types.zig"));
|
||||
|
||||
const FileType = struct {
|
||||
name: []const u8,
|
||||
lang_fn: LangFn,
|
||||
highlights: [:0]const u8,
|
||||
injections: ?[:0]const u8,
|
||||
};
|
||||
const LangFn = *const fn () callconv(.C) ?*const treez.Language;
|
||||
|
||||
fn load_file_types(comptime Namespace: type) []const FileType {
|
||||
comptime switch (@typeInfo(Namespace)) {
|
||||
.@"struct" => |info| {
|
||||
var count = 0;
|
||||
for (info.decls) |_| count += 1;
|
||||
var construct_types: [count]FileType = undefined;
|
||||
var i = 0;
|
||||
for (info.decls) |decl| {
|
||||
const lang = decl.name;
|
||||
const args = @field(Namespace, lang);
|
||||
construct_types[i] = .{
|
||||
.name = lang,
|
||||
.lang_fn = if (@hasField(@TypeOf(args), "parser")) args.parser else get_parser(lang),
|
||||
.highlights = if (@hasField(@TypeOf(args), "highlights"))
|
||||
@embedFile(args.highlights)
|
||||
else if (@hasField(@TypeOf(args), "highlights_list"))
|
||||
@embedFile(args.highlights_list[0]) ++ "\n" ++ @embedFile(args.highlights_list[1])
|
||||
else
|
||||
@embedFile("tree-sitter-" ++ lang ++ "/queries/highlights.scm"),
|
||||
.injections = if (@hasField(@TypeOf(args), "injections"))
|
||||
@embedFile(args.injections)
|
||||
else
|
||||
null,
|
||||
};
|
||||
i += 1;
|
||||
}
|
||||
const types = construct_types;
|
||||
return &types;
|
||||
},
|
||||
else => @compileError("expected tuple or struct type"),
|
||||
};
|
||||
}
|
||||
|
||||
fn get_parser(comptime lang: []const u8) LangFn {
|
||||
const language_name = ft_func_name(lang);
|
||||
return @extern(?LangFn, .{ .name = "tree_sitter_" ++ language_name }) orelse @compileError(std.fmt.comptimePrint("Cannot find extern tree_sitter_{s}", .{language_name}));
|
||||
}
|
||||
|
||||
fn ft_func_name(comptime lang: []const u8) []const u8 {
|
||||
var transform: [lang.len]u8 = undefined;
|
||||
for (lang, 0..) |c, i|
|
||||
transform[i] = if (c == '-') '_' else c;
|
||||
const func_name = transform;
|
||||
return &func_name;
|
||||
}
|
|
@ -1,297 +0,0 @@
|
|||
/// This file *MUST* be kept in sync with tree-sitter/lib/src/query.c
|
||||
/// It exactly represents the C structures in memory and must produce
|
||||
/// the exact same results as the C tree-sitter library version used.
|
||||
///
|
||||
/// Yes,... it is not a public API! Here be dragons!
|
||||
///
|
||||
const std = @import("std");
|
||||
const cbor = @import("cbor");
|
||||
const build_options = @import("build_options");
|
||||
const treez = if (build_options.use_tree_sitter) @import("treez") else @import("treez_dummy.zig");
|
||||
|
||||
pub const Slice = extern struct {
|
||||
offset: u32,
|
||||
length: u32,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extract(&self.offset),
|
||||
cbor.extract(&self.length),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
pub fn Array(T: type) type {
|
||||
return extern struct {
|
||||
contents: ?*T,
|
||||
size: u32,
|
||||
capacity: u32,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
if (self.contents) |contents| {
|
||||
const arr: []T = @as([*]T, @ptrCast(contents))[0..self.size];
|
||||
try cbor.writeValue(writer, arr);
|
||||
return;
|
||||
}
|
||||
try cbor.writeValue(writer, null);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8, allocator: std.mem.Allocator) cbor.Error!bool {
|
||||
var iter_ = iter.*;
|
||||
if (cbor.matchValue(&iter_, cbor.null_) catch false) {
|
||||
iter.* = iter_;
|
||||
self.contents = null;
|
||||
self.size = 0;
|
||||
self.capacity = 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (T == u8) {
|
||||
var arr: []const u8 = undefined;
|
||||
if (try cbor.matchValue(iter, cbor.extract(&arr))) {
|
||||
self.contents = @constCast(@ptrCast(arr.ptr));
|
||||
self.size = @intCast(arr.len);
|
||||
self.capacity = @intCast(arr.len);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
var n = try cbor.decodeArrayHeader(iter);
|
||||
var arr: []T = try allocator.alloc(T, n);
|
||||
while (n > 0) : (n -= 1) {
|
||||
if (comptime cbor.isExtractableAlloc(T)) {
|
||||
if (!(cbor.matchValue(iter, cbor.extractAlloc(&arr[i], allocator)) catch return false))
|
||||
return false;
|
||||
} else {
|
||||
if (!(cbor.matchValue(iter, cbor.extract(&arr[i])) catch return false))
|
||||
return false;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
self.contents = @constCast(@ptrCast(arr.ptr));
|
||||
self.size = @intCast(arr.len);
|
||||
self.capacity = @intCast(arr.len);
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub const SymbolTable = extern struct {
|
||||
characters: Array(u8),
|
||||
slices: Array(Slice),
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8, allocator: std.mem.Allocator) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extractAlloc(&self.characters, allocator),
|
||||
cbor.extractAlloc(&self.slices, allocator),
|
||||
});
|
||||
}
|
||||
};
|
||||
pub const CaptureQuantifiers = Array(u8);
|
||||
pub const PatternEntry = extern struct {
|
||||
step_index: u16,
|
||||
pattern_index: u16,
|
||||
is_rooted: bool,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extract(&self.step_index),
|
||||
cbor.extract(&self.pattern_index),
|
||||
cbor.extract(&self.is_rooted),
|
||||
});
|
||||
}
|
||||
};
|
||||
pub const QueryPattern = extern struct {
|
||||
steps: Slice,
|
||||
predicate_steps: Slice,
|
||||
start_byte: u32,
|
||||
end_byte: u32,
|
||||
is_non_local: bool,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8, allocator: std.mem.Allocator) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extractAlloc(&self.steps, allocator),
|
||||
cbor.extractAlloc(&self.predicate_steps, allocator),
|
||||
cbor.extract(&self.start_byte),
|
||||
cbor.extract(&self.end_byte),
|
||||
cbor.extract(&self.is_non_local),
|
||||
});
|
||||
}
|
||||
};
|
||||
pub const StepOffset = extern struct {
|
||||
byte_offset: u32,
|
||||
step_index: u16,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extract(&self.byte_offset),
|
||||
cbor.extract(&self.step_index),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
pub const MAX_STEP_CAPTURE_COUNT = 3;
|
||||
|
||||
pub const TSSymbol = u16;
|
||||
pub const TSFieldId = u16;
|
||||
|
||||
pub const QueryStep = extern struct {
|
||||
symbol: TSSymbol,
|
||||
supertype_symbol: TSSymbol,
|
||||
field: TSFieldId,
|
||||
capture_ids: [MAX_STEP_CAPTURE_COUNT]u16,
|
||||
depth: u16,
|
||||
alternative_index: u16,
|
||||
negated_field_list_id: u16,
|
||||
// is_named: u1,
|
||||
// is_immediate: u1,
|
||||
// is_last_child: u1,
|
||||
// is_pass_through: u1,
|
||||
// is_dead_end: u1,
|
||||
// alternative_is_immediate: u1,
|
||||
// contains_captures: u1,
|
||||
// root_pattern_guaranteed: u1,
|
||||
flags8: u8,
|
||||
// parent_pattern_guaranteed: u1,
|
||||
flags16: u8,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extract(&self.symbol),
|
||||
cbor.extract(&self.supertype_symbol),
|
||||
cbor.extract(&self.field),
|
||||
cbor.extract(&self.capture_ids),
|
||||
cbor.extract(&self.depth),
|
||||
cbor.extract(&self.alternative_index),
|
||||
cbor.extract(&self.negated_field_list_id),
|
||||
cbor.extract(&self.flags8),
|
||||
cbor.extract(&self.flags16),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
pub const PredicateStep = extern struct {
|
||||
pub const Type = enum(c_uint) {
|
||||
done,
|
||||
capture,
|
||||
string,
|
||||
};
|
||||
|
||||
type: Type,
|
||||
value_id: u32,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8) cbor.Error!bool {
|
||||
return cbor.matchValue(iter, .{
|
||||
cbor.extract(&self.type),
|
||||
cbor.extract(&self.value_id),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
pub const TSQuery = extern struct {
|
||||
captures: SymbolTable,
|
||||
predicate_values: SymbolTable,
|
||||
capture_quantifiers: Array(CaptureQuantifiers),
|
||||
steps: Array(QueryStep),
|
||||
pattern_map: Array(PatternEntry),
|
||||
predicate_steps: Array(PredicateStep),
|
||||
patterns: Array(QueryPattern),
|
||||
step_offsets: Array(StepOffset),
|
||||
negated_fields: Array(TSFieldId),
|
||||
string_buffer: Array(u8),
|
||||
repeat_symbols_with_rootless_patterns: Array(TSSymbol),
|
||||
language: usize,
|
||||
// language: ?*const treez.Language,
|
||||
wildcard_root_pattern_count: u16,
|
||||
|
||||
pub fn cborEncode(self: *const @This(), writer: anytype) !void {
|
||||
return cbor.writeArray(writer, self.*);
|
||||
}
|
||||
|
||||
pub fn cborExtract(self: *@This(), iter: *[]const u8, allocator: std.mem.Allocator) cbor.Error!bool {
|
||||
const result = cbor.matchValue(iter, .{
|
||||
cbor.extractAlloc(&self.captures, allocator),
|
||||
cbor.extractAlloc(&self.predicate_values, allocator),
|
||||
cbor.extractAlloc(&self.capture_quantifiers, allocator),
|
||||
cbor.extractAlloc(&self.steps, allocator),
|
||||
cbor.extractAlloc(&self.pattern_map, allocator),
|
||||
cbor.extractAlloc(&self.predicate_steps, allocator),
|
||||
cbor.extractAlloc(&self.patterns, allocator),
|
||||
cbor.extractAlloc(&self.step_offsets, allocator),
|
||||
cbor.extractAlloc(&self.negated_fields, allocator),
|
||||
cbor.extractAlloc(&self.string_buffer, allocator),
|
||||
cbor.extractAlloc(&self.repeat_symbols_with_rootless_patterns, allocator),
|
||||
cbor.extract(&self.language),
|
||||
cbor.extract(&self.wildcard_root_pattern_count),
|
||||
});
|
||||
self.language = 0;
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
pub const SerializeError = error{OutOfMemory};
|
||||
|
||||
pub fn toCbor(query: *TSQuery, allocator: std.mem.Allocator) SerializeError![]const u8 {
|
||||
var cb: std.ArrayListUnmanaged(u8) = .empty;
|
||||
defer cb.deinit(allocator);
|
||||
try cbor.writeValue(cb.writer(allocator), query.*);
|
||||
return cb.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
pub const DeserializeError = error{
|
||||
OutOfMemory,
|
||||
IntegerTooLarge,
|
||||
IntegerTooSmall,
|
||||
InvalidType,
|
||||
TooShort,
|
||||
InvalidFloatType,
|
||||
InvalidArrayType,
|
||||
InvalidPIntType,
|
||||
JsonIncompatibleType,
|
||||
InvalidQueryCbor,
|
||||
NotAnObject,
|
||||
BadArrayAllocExtract,
|
||||
};
|
||||
|
||||
pub fn fromCbor(cb: []const u8, allocator: std.mem.Allocator) DeserializeError!struct { *TSQuery, *std.heap.ArenaAllocator } {
|
||||
var arena = try allocator.create(std.heap.ArenaAllocator);
|
||||
errdefer allocator.destroy(arena);
|
||||
arena.* = std.heap.ArenaAllocator.init(allocator);
|
||||
errdefer arena.deinit();
|
||||
const query = try arena.allocator().create(TSQuery);
|
||||
query.* = undefined;
|
||||
var iter: []const u8 = cb;
|
||||
if (!try cbor.matchValue(&iter, cbor.extractAlloc(query, arena.allocator())))
|
||||
return error.InvalidQueryCbor;
|
||||
return .{ query, arena };
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue