Compare commits
	
		
			No commits in common. "05a14ae95c6dc1cfdeb10ea303aba6d9c01c475c" and "c217db02f2b82231d995dbb0a75c7d0af6b679bd" have entirely different histories.
		
	
	
		
			05a14ae95c
			...
			c217db02f2
		
	
		
					 5 changed files with 21 additions and 69 deletions
				
			
		|  | @ -16,7 +16,6 @@ allocator: std.mem.Allocator, | ||||||
| mutex: ?std.Thread.Mutex, | mutex: ?std.Thread.Mutex, | ||||||
| highlights: std.StringHashMapUnmanaged(*CacheEntry) = .{}, | highlights: std.StringHashMapUnmanaged(*CacheEntry) = .{}, | ||||||
| injections: std.StringHashMapUnmanaged(*CacheEntry) = .{}, | injections: std.StringHashMapUnmanaged(*CacheEntry) = .{}, | ||||||
| errors: std.StringHashMapUnmanaged(*CacheEntry) = .{}, |  | ||||||
| ref_count: usize = 1, | ref_count: usize = 1, | ||||||
| 
 | 
 | ||||||
| const CacheEntry = struct { | const CacheEntry = struct { | ||||||
|  | @ -39,7 +38,6 @@ const CacheEntry = struct { | ||||||
| 
 | 
 | ||||||
| pub const QueryType = enum { | pub const QueryType = enum { | ||||||
|     highlights, |     highlights, | ||||||
|     errors, |  | ||||||
|     injections, |     injections, | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | @ -85,20 +83,21 @@ fn release_ref_unlocked_and_maybe_destroy(self: *Self) void { | ||||||
|         if (self.ref_count > 0) return; |         if (self.ref_count > 0) return; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     release_cache_entry_hash_map(self.allocator, &self.highlights); |     var iter_highlights = self.highlights.iterator(); | ||||||
|     release_cache_entry_hash_map(self.allocator, &self.errors); |     while (iter_highlights.next()) |p| { | ||||||
|     release_cache_entry_hash_map(self.allocator, &self.injections); |         self.allocator.free(p.key_ptr.*); | ||||||
|     self.allocator.destroy(self); |         p.value_ptr.*.destroy(self.allocator); | ||||||
| } |         self.allocator.destroy(p.value_ptr.*); | ||||||
| 
 |  | ||||||
| fn release_cache_entry_hash_map(allocator: std.mem.Allocator, hash_map: *std.StringHashMapUnmanaged(*CacheEntry)) void { |  | ||||||
|     var iter = hash_map.iterator(); |  | ||||||
|     while (iter.next()) |p| { |  | ||||||
|         allocator.free(p.key_ptr.*); |  | ||||||
|         p.value_ptr.*.destroy(allocator); |  | ||||||
|         allocator.destroy(p.value_ptr.*); |  | ||||||
|     } |     } | ||||||
|     hash_map.deinit(allocator); |     var iter_injections = self.injections.iterator(); | ||||||
|  |     while (iter_injections.next()) |p| { | ||||||
|  |         self.allocator.free(p.key_ptr.*); | ||||||
|  |         p.value_ptr.*.destroy(self.allocator); | ||||||
|  |         self.allocator.destroy(p.value_ptr.*); | ||||||
|  |     } | ||||||
|  |     self.highlights.deinit(self.allocator); | ||||||
|  |     self.injections.deinit(self.allocator); | ||||||
|  |     self.allocator.destroy(self); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn get_cache_entry(self: *Self, file_type: *const FileType, comptime query_type: QueryType) CacheError!*CacheEntry { | fn get_cache_entry(self: *Self, file_type: *const FileType, comptime query_type: QueryType) CacheError!*CacheEntry { | ||||||
|  | @ -107,7 +106,6 @@ fn get_cache_entry(self: *Self, file_type: *const FileType, comptime query_type: | ||||||
| 
 | 
 | ||||||
|     const hash = switch (query_type) { |     const hash = switch (query_type) { | ||||||
|         .highlights => &self.highlights, |         .highlights => &self.highlights, | ||||||
|         .errors => &self.errors, |  | ||||||
|         .injections => &self.injections, |         .injections => &self.injections, | ||||||
|     }; |     }; | ||||||
| 
 | 
 | ||||||
|  | @ -137,7 +135,6 @@ fn get_cached_query(self: *Self, entry: *CacheEntry) Error!?*Query { | ||||||
|         const queries = FileType.queries.get(entry.file_type.name) orelse return null; |         const queries = FileType.queries.get(entry.file_type.name) orelse return null; | ||||||
|         const query_bin = switch (entry.query_type) { |         const query_bin = switch (entry.query_type) { | ||||||
|             .highlights => queries.highlights_bin, |             .highlights => queries.highlights_bin, | ||||||
|             .errors => queries.errors_bin, |  | ||||||
|             .injections => queries.injections_bin orelse return null, |             .injections => queries.injections_bin orelse return null, | ||||||
|         }; |         }; | ||||||
|         const query, const arena = try deserialize_query(query_bin, lang, self.allocator); |         const query, const arena = try deserialize_query(query_bin, lang, self.allocator); | ||||||
|  | @ -154,14 +151,12 @@ fn pre_load_internal(self: *Self, file_type: *const FileType, comptime query_typ | ||||||
| pub fn pre_load(self: *Self, lang_name: []const u8) Error!void { | pub fn pre_load(self: *Self, lang_name: []const u8) Error!void { | ||||||
|     const file_type = FileType.get_by_name(lang_name) orelse return; |     const file_type = FileType.get_by_name(lang_name) orelse return; | ||||||
|     _ = try self.pre_load_internal(file_type, .highlights); |     _ = try self.pre_load_internal(file_type, .highlights); | ||||||
|     _ = try self.pre_load_internal(file_type, .errors); |  | ||||||
|     _ = try self.pre_load_internal(file_type, .injections); |     _ = try self.pre_load_internal(file_type, .injections); | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn ReturnType(comptime query_type: QueryType) type { | fn ReturnType(comptime query_type: QueryType) type { | ||||||
|     return switch (query_type) { |     return switch (query_type) { | ||||||
|         .highlights => *Query, |         .highlights => *Query, | ||||||
|         .errors => *Query, |  | ||||||
|         .injections => ?*Query, |         .injections => ?*Query, | ||||||
|     }; |     }; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | @ -138,7 +138,6 @@ fn load_file_types(comptime Namespace: type) []const FileType { | ||||||
| 
 | 
 | ||||||
| pub const FileTypeQueries = struct { | pub const FileTypeQueries = struct { | ||||||
|     highlights_bin: []const u8, |     highlights_bin: []const u8, | ||||||
|     errors_bin: []const u8, |  | ||||||
|     injections_bin: ?[]const u8, |     injections_bin: ?[]const u8, | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | @ -146,7 +145,7 @@ pub const queries = std.static_string_map.StaticStringMap(FileTypeQueries).initC | ||||||
| 
 | 
 | ||||||
| fn load_queries() []const struct { []const u8, FileTypeQueries } { | fn load_queries() []const struct { []const u8, FileTypeQueries } { | ||||||
|     if (!build_options.use_tree_sitter) return &.{}; |     if (!build_options.use_tree_sitter) return &.{}; | ||||||
|     @setEvalBranchQuota(32000); |     @setEvalBranchQuota(16000); | ||||||
|     const queries_cb = @embedFile("syntax_bin_queries"); |     const queries_cb = @embedFile("syntax_bin_queries"); | ||||||
|     var iter: []const u8 = queries_cb; |     var iter: []const u8 = queries_cb; | ||||||
|     var len = cbor.decodeMapHeader(&iter) catch |e| { |     var len = cbor.decodeMapHeader(&iter) catch |e| { | ||||||
|  | @ -164,10 +163,6 @@ fn load_queries() []const struct { []const u8, FileTypeQueries } { | ||||||
|                 var iter_: []const u8 = iter; |                 var iter_: []const u8 = iter; | ||||||
|                 break :blk get_query_value_bin(&iter_, "highlights") orelse @compileError("missing highlights for " ++ lang); |                 break :blk get_query_value_bin(&iter_, "highlights") orelse @compileError("missing highlights for " ++ lang); | ||||||
|             }, |             }, | ||||||
|             .errors_bin = blk: { |  | ||||||
|                 var iter_: []const u8 = iter; |  | ||||||
|                 break :blk get_query_value_bin(&iter_, "errors") orelse @compileError("missing errors query for " ++ lang); |  | ||||||
|             }, |  | ||||||
|             .injections_bin = blk: { |             .injections_bin = blk: { | ||||||
|                 var iter_: []const u8 = iter; |                 var iter_: []const u8 = iter; | ||||||
|                 break :blk get_query_value_bin(&iter_, "injections"); |                 break :blk get_query_value_bin(&iter_, "injections"); | ||||||
|  |  | ||||||
|  | @ -24,13 +24,11 @@ lang: *const Language, | ||||||
| file_type: *const FileType, | file_type: *const FileType, | ||||||
| parser: *Parser, | parser: *Parser, | ||||||
| query: *Query, | query: *Query, | ||||||
| errors_query: *Query, |  | ||||||
| injections: ?*Query, | injections: ?*Query, | ||||||
| tree: ?*treez.Tree = null, | tree: ?*treez.Tree = null, | ||||||
| 
 | 
 | ||||||
| pub fn create(file_type: *const FileType, allocator: std.mem.Allocator, query_cache: *QueryCache) !*Self { | pub fn create(file_type: *const FileType, allocator: std.mem.Allocator, query_cache: *QueryCache) !*Self { | ||||||
|     const query = try query_cache.get(file_type, .highlights); |     const query = try query_cache.get(file_type, .highlights); | ||||||
|     const errors_query = try query_cache.get(file_type, .errors); |  | ||||||
|     const injections = try query_cache.get(file_type, .injections); |     const injections = try query_cache.get(file_type, .injections); | ||||||
|     const self = try allocator.create(Self); |     const self = try allocator.create(Self); | ||||||
|     self.* = .{ |     self.* = .{ | ||||||
|  | @ -39,7 +37,6 @@ pub fn create(file_type: *const FileType, allocator: std.mem.Allocator, query_ca | ||||||
|         .file_type = file_type, |         .file_type = file_type, | ||||||
|         .parser = try Parser.create(), |         .parser = try Parser.create(), | ||||||
|         .query = query, |         .query = query, | ||||||
|         .errors_query = errors_query, |  | ||||||
|         .injections = injections, |         .injections = injections, | ||||||
|     }; |     }; | ||||||
|     errdefer self.destroy(query_cache); |     errdefer self.destroy(query_cache); | ||||||
|  | @ -197,15 +194,3 @@ pub fn node_at_point_range(self: *const Self, range: Range) error{Stop}!treez.No | ||||||
|     const root_node = tree.getRootNode(); |     const root_node = tree.getRootNode(); | ||||||
|     return treez.Node.externs.ts_node_descendant_for_point_range(root_node, range.start_point, range.end_point); |     return treez.Node.externs.ts_node_descendant_for_point_range(root_node, range.start_point, range.end_point); | ||||||
| } | } | ||||||
| 
 |  | ||||||
| pub fn count_error_nodes(self: *const Self) usize { |  | ||||||
|     const cursor = Query.Cursor.create() catch return std.math.maxInt(usize); |  | ||||||
|     defer cursor.destroy(); |  | ||||||
|     const tree = self.tree orelse return 0; |  | ||||||
|     cursor.execute(self.errors_query, tree.getRootNode()); |  | ||||||
|     var error_count: usize = 0; |  | ||||||
|     while (cursor.nextMatch()) |match| for (match.captures()) |_| { |  | ||||||
|         error_count += 1; |  | ||||||
|     }; |  | ||||||
|     return error_count; |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  | @ -4,8 +4,6 @@ const treez = @import("treez"); | ||||||
| 
 | 
 | ||||||
| pub const tss = @import("ts_serializer.zig"); | pub const tss = @import("ts_serializer.zig"); | ||||||
| 
 | 
 | ||||||
| const verbose = false; |  | ||||||
| 
 |  | ||||||
| pub fn main() anyerror!void { | pub fn main() anyerror!void { | ||||||
|     const allocator = std.heap.c_allocator; |     const allocator = std.heap.c_allocator; | ||||||
|     const args = try std.process.argsAlloc(allocator); |     const args = try std.process.argsAlloc(allocator); | ||||||
|  | @ -35,7 +33,7 @@ pub fn main() anyerror!void { | ||||||
|         const lang = file_type.lang_fn() orelse std.debug.panic("tree-sitter parser function failed for language: {s}", .{file_type.name}); |         const lang = file_type.lang_fn() orelse std.debug.panic("tree-sitter parser function failed for language: {s}", .{file_type.name}); | ||||||
| 
 | 
 | ||||||
|         try cbor.writeValue(writer, file_type.name); |         try cbor.writeValue(writer, file_type.name); | ||||||
|         try cbor.writeMapHeader(writer, if (file_type.injections) |_| 3 else 2); |         try cbor.writeMapHeader(writer, if (file_type.injections) |_| 2 else 1); | ||||||
| 
 | 
 | ||||||
|         const highlights_in = try treez.Query.create(lang, file_type.highlights); |         const highlights_in = try treez.Query.create(lang, file_type.highlights); | ||||||
|         const ts_highlights_in: *tss.TSQuery = @alignCast(@ptrCast(highlights_in)); |         const ts_highlights_in: *tss.TSQuery = @alignCast(@ptrCast(highlights_in)); | ||||||
|  | @ -45,19 +43,7 @@ pub fn main() anyerror!void { | ||||||
| 
 | 
 | ||||||
|         try cbor.writeValue(writer, "highlights"); |         try cbor.writeValue(writer, "highlights"); | ||||||
|         try cbor.writeValue(writer, highlights_cb); |         try cbor.writeValue(writer, highlights_cb); | ||||||
|         if (verbose) |         // std.log.info("file_type {s} highlights {d} bytes", .{ file_type.name, highlights_cb.len }); | ||||||
|             std.log.info("file_type {s} highlights {d} bytes", .{ file_type.name, highlights_cb.len }); |  | ||||||
| 
 |  | ||||||
|         const errors_in = try treez.Query.create(lang, "(ERROR) @error"); |  | ||||||
|         const ts_errors_in: *tss.TSQuery = @alignCast(@ptrCast(errors_in)); |  | ||||||
| 
 |  | ||||||
|         const errors_cb = try tss.toCbor(ts_errors_in, allocator); |  | ||||||
|         defer allocator.free(errors_cb); |  | ||||||
| 
 |  | ||||||
|         try cbor.writeValue(writer, "errors"); |  | ||||||
|         try cbor.writeValue(writer, errors_cb); |  | ||||||
|         if (verbose) |  | ||||||
|             std.log.info("file_type {s} errors {d} bytes", .{ file_type.name, errors_cb.len }); |  | ||||||
| 
 | 
 | ||||||
|         if (file_type.injections) |injections| { |         if (file_type.injections) |injections| { | ||||||
|             const injections_in = try treez.Query.create(lang, injections); |             const injections_in = try treez.Query.create(lang, injections); | ||||||
|  | @ -68,14 +54,12 @@ pub fn main() anyerror!void { | ||||||
| 
 | 
 | ||||||
|             try cbor.writeValue(writer, "injections"); |             try cbor.writeValue(writer, "injections"); | ||||||
|             try cbor.writeValue(writer, injections_cb); |             try cbor.writeValue(writer, injections_cb); | ||||||
|             if (verbose) |             // std.log.info("file_type {s} injections {d} bytes", .{ file_type.name, injections_cb.len }); | ||||||
|                 std.log.info("file_type {s} injections {d} bytes", .{ file_type.name, injections_cb.len }); |  | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     try output_file.writeAll(output.items); |     try output_file.writeAll(output.items); | ||||||
|     if (verbose) |     // std.log.info("file_types total {d} bytes", .{output.items.len}); | ||||||
|         std.log.info("file_types total {d} bytes", .{output.items.len}); |  | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| fn fatal(comptime format: []const u8, args: anytype) noreturn { | fn fatal(comptime format: []const u8, args: anytype) noreturn { | ||||||
|  |  | ||||||
|  | @ -35,7 +35,6 @@ const scroll_cursor_min_border_distance = 5; | ||||||
| 
 | 
 | ||||||
| const double_click_time_ms = 350; | const double_click_time_ms = 350; | ||||||
| const syntax_full_reparse_time_limit = 0; // ms (0 = always use incremental) | const syntax_full_reparse_time_limit = 0; // ms (0 = always use incremental) | ||||||
| const syntax_full_reparse_error_threshold = 3; // number of tree-sitter errors that trigger a full reparse |  | ||||||
| 
 | 
 | ||||||
| pub const max_matches = if (builtin.mode == std.builtin.OptimizeMode.Debug) 10_000 else 100_000; | pub const max_matches = if (builtin.mode == std.builtin.OptimizeMode.Debug) 10_000 else 100_000; | ||||||
| pub const max_match_lines = 15; | pub const max_match_lines = 15; | ||||||
|  | @ -889,7 +888,7 @@ pub const Editor = struct { | ||||||
|         self.style_cache_theme = theme.name; |         self.style_cache_theme = theme.name; | ||||||
|         const cache: *StyleCache = &self.style_cache.?; |         const cache: *StyleCache = &self.style_cache.?; | ||||||
|         self.render_screen(theme, cache); |         self.render_screen(theme, cache); | ||||||
|         return self.scroll_dest != self.view.row or self.syntax_refresh_full; |         return self.scroll_dest != self.view.row; | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     const CellType = enum { |     const CellType = enum { | ||||||
|  | @ -4481,7 +4480,7 @@ pub const Editor = struct { | ||||||
|     fn update_syntax(self: *Self) !void { |     fn update_syntax(self: *Self) !void { | ||||||
|         const root = try self.buf_root(); |         const root = try self.buf_root(); | ||||||
|         const eol_mode = try self.buf_eol_mode(); |         const eol_mode = try self.buf_eol_mode(); | ||||||
|         if (!self.syntax_refresh_full and self.syntax_last_rendered_root == root) |         if (self.syntax_last_rendered_root == root) | ||||||
|             return; |             return; | ||||||
|         var kind: enum { full, incremental, none } = .none; |         var kind: enum { full, incremental, none } = .none; | ||||||
|         var edit_count: usize = 0; |         var edit_count: usize = 0; | ||||||
|  | @ -4492,7 +4491,6 @@ pub const Editor = struct { | ||||||
|                 defer frame.deinit(); |                 defer frame.deinit(); | ||||||
|                 syn.reset(); |                 syn.reset(); | ||||||
|                 self.syntax_last_rendered_root = null; |                 self.syntax_last_rendered_root = null; | ||||||
|                 self.syntax_refresh_full = false; |  | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             if (!self.syntax_incremental_reparse) |             if (!self.syntax_incremental_reparse) | ||||||
|  | @ -4546,11 +4544,6 @@ pub const Editor = struct { | ||||||
|                         const frame = tracy.initZone(@src(), .{ .name = "editor refresh syntax" }); |                         const frame = tracy.initZone(@src(), .{ .name = "editor refresh syntax" }); | ||||||
|                         defer frame.deinit(); |                         defer frame.deinit(); | ||||||
|                         try syn.refresh_from_string(content); |                         try syn.refresh_from_string(content); | ||||||
|                         const error_count = syn.count_error_nodes(); |  | ||||||
|                         if (error_count >= syntax_full_reparse_error_threshold) { |  | ||||||
|                             self.logger.print("incremental syntax update has {d} errors -> full reparse", .{error_count}); |  | ||||||
|                             self.syntax_refresh_full = true; |  | ||||||
|                         } |  | ||||||
|                     } |                     } | ||||||
|                     self.syntax_last_rendered_root = root; |                     self.syntax_last_rendered_root = root; | ||||||
|                     kind = .incremental; |                     kind = .incremental; | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue