last updated: January 04, 2026

9 minute read

Writing Non-Blocking Neovim Plugins

When building a process-intensive Neovim plugin, one of the first things you'll encounter is an unresponsive editor: since Neovim's Lua runtime is single threaded, the UI can't update while something else is running on the main thread.

In this article we'll build two solutions: a batching iterator that processes n items at a time, and a throttled iterator that runs for m nanoseconds at a time.

Boilerplate

First, let's set up some boilerplate to experience an unresponsive editor firsthand:

local enter_win = true
local listed_buf = false
local scratch_buf = true
local strict_indexing = false
local results_bufnr = vim.api.nvim_create_buf(listed_buf, scratch_buf)
local results_winnr = vim.api.nvim_open_win(results_bufnr, not enter_win, {
split = "right",
win = 0,
})
local input_bufnr = vim.api.nvim_create_buf(listed_buf, scratch_buf)
local input_winnr = vim.api.nvim_open_win(input_bufnr, enter_win, {
split = "above",
win = results_winnr,
})
vim.api.nvim_win_set_height(input_winnr, 1)
vim.api.nvim_create_autocmd("WinClosed", {
pattern = { tostring(input_winnr), tostring(results_winnr), },
callback = function()
local force_close = true
if vim.api.nvim_win_is_valid(input_winnr) then vim.api.nvim_win_close(input_winnr, force_close) end
if vim.api.nvim_win_is_valid(results_winnr) then vim.api.nvim_win_close(results_winnr, force_close) end
end,
})
--- @param input string
local function populate_results(input)
-- TODO
end
vim.api.nvim_create_autocmd({ "TextChanged", "TextChangedI", }, {
buffer = input_bufnr,
callback = function()
local input = vim.api.nvim_buf_get_lines(input_bufnr, 0, 1, strict_indexing)
populate_results(input[1] or "")
end,
})

This code creates an input buffer, results buffer, windows for each, and sets up two autocommands. The first is just for convenience: when closing either the input or results window, the other is closed as well. The second is more relevant: when the input buffer changes, a populate_results function will run.

Let's start by creating a large table and looping over it to populate the results buffer:

--- type number[]
local large_tbl = {}
for i = 1, 100000 do
table.insert(large_tbl, i)
end
-- ...
--- @param input string
local function populate_results(input)
local results = {}
for _, entry in ipairs(large_tbl) do
table.insert(results, ("%d :: %s"):format(entry, input))
end
vim.api.nvim_buf_set_lines(results_bufnr, 0, -1, strict_indexing, results)
end

You'll notice that the UI freezes while typing - the entire loop needs to complete before Neovim can update the input buffer with the latest keystroke. Looping a hundred thousand times might seem a bit contrived, but I've run into this scenario several times when processing results from commands such as fd and rg. How can we fix this?

Batching part 1: the basics

The first solution we'll look at is batching: break the loop into chunks, process one chunk at a time, and schedule the next chunk after each UI update.

A simple implementation could look like:

--- @class ListBatcherOpts
--- @field on_complete? fun():nil
--- @field batch_size? number
--- @generic T
--- @param list T[]
--- @param on_iteration fun(entry: T):nil
--- @param opts? ListBatcherOpts
local function list_batcher(list, on_iteration, opts)
opts = opts or {}
local batch_size = opts.batch_size or 100
local on_complete = opts.on_complete or (function() end)
local step
--- @param start number
step = function(start)
for i = start, math.min(#list, start + batch_size - 1) do
on_iteration(list[i])
end
start = start + batch_size
if start > #list then
on_complete()
else
vim.schedule(function() step(start) end)
end
end
step(1)
end

And its invocation:

--- @param input string
local function populate_results(input)
local results = {}
list_batcher(
large_tbl,
function(entry)
table.insert(results, ("%d :: %s"):format(entry, input))
end,
{
on_complete = function()
vim.api.nvim_buf_set_lines(results_bufnr, 0, -1, strict_indexing, results)
end
}
)
end

I'd prefer to pass all parameters along as opts, but the lua language server doesn't support passing along generics to classes i.e. T to ListBatcherOpts<T>. Since list and on_iteration are the only parameters that make use of T, it's simplest to keep those two as named parameters and move everything else to opts

This works for looping over tables, but how about traversing other iterators? For example, the vim.fs.dir(path) function returns an iterator over the items in its path argument - in its current form, our batcher wouldn't be able to handle it.

Batching part 2: handling generic iterators

A quick crash course on iterators in Lua (based on the official docs):

  • An iterator is a function that returns the next element in a collection on every invocation
  • Iterators are normally returned by calling another function: an iterator factory. ipairs(table) is an example of an iterator factory
  • Calling the iterator factory returns (up to) three values:
    1. The iterator function
    2. The "invariant state"
    3. The initial value of the "control variable"
  • The iterator function is called with the invariant state and the control variable
  • If the first value returned by the iterator function is nil, the iteration should end. Otherwise, the control variable is set to the first value returned by the iterator function, and the iterator is called again with the (static) invariant state and the (updated) control variable

The names are a bit academic, but I prefer to use the nomenclature in the docs rather than make something up myself.

--- @class BatchedIteratorOpts
--- @field on_complete? fun():nil
--- @field batch_size? number
--- @generic InvariantState, ControlVar
--- @param iterator_factory fun(): ((fun(invariant_state: InvariantState, control_var: ControlVar):ControlVar), InvariantState, ControlVar)
--- @param on_iteration fun(entry: ControlVar):nil
--- @param opts? ListBatcherOpts
local function batched_iterator(iterator_factory, on_iteration, opts)
opts = opts or {}
local batch_size = opts.batch_size or 100
local on_complete = opts.on_complete or (function() end)
local iter_fn, invariant_state, control_var = iterator_factory()
local step
step = function()
local num_processed = 0
while num_processed < batch_size do
local values = { iter_fn(invariant_state, control_var), }
control_var = values[1]
if control_var == nil then
on_complete()
return
end
on_iteration(unpack(values))
num_processed = num_processed + 1
end
vim.schedule(step)
end
step()
end
--- @param input string
local function populate_results(input)
local results = {}
batched_iterator(
function() return ipairs(large_tbl) end,
function(entry)
table.insert(results, ("%d :: %s"):format(entry, input))
end,
{
on_complete = function()
vim.api.nvim_buf_set_lines(results_bufnr, 0, -1, strict_indexing, results)
end
}
)
end

Batching part 3: canceling stale iterations

One missing feature of our implementation is the ability to cancel a stale iteration. Our boilerplate code could make very good use of such a feature: there's no need to process old user inputs since we only want to display the results for the latest user input. This can be achieved by tracking the current instance of populate_results with a current_query_id variable:

--- @class BatchedIteratorOpts
--- @field on_complete? fun():nil
--- @field batch_size? number
---
--- new field
---
--- @field should_cancel fun():boolean
--- @generic InvariantState, ControlVar
--- @param iterator_factory fun(): ((fun(invariant_state: InvariantState, control_var: ControlVar):ControlVar), InvariantState, ControlVar)
--- @param on_iteration fun(entry: ControlVar):nil
--- @param opts? BatchedIteratorOpts
local function batched_iterator(iterator_factory, on_iteration, opts)
opts = opts or {}
local batch_size = opts.batch_size or 100
local on_complete = opts.on_complete or (function() end)
local should_cancel = opts.should_cancel or (function() return false end)
local iter_fn, invariant_state, control_var = iterator_factory()
local step
step = function()
local num_processed = 0
while num_processed < batch_size do
-- and its invocation
if should_cancel() then return end
local values = { iter_fn(invariant_state, control_var), }
control_var = values[1]
if control_var == nil then
on_complete()
return
end
on_iteration(unpack(values))
num_processed = num_processed + 1
end
vim.schedule(step)
end
step()
end
-- new variable
local current_query_id = 0
local populate_results = function(input)
current_query_id = current_query_id + 1
local this_query_id = current_query_id
local results = {}
batched_iterator(
function() return ipairs(large_tbl) end,
function(entry)
table.insert(results, ("%d :: %s"):format(entry, input))
end,
{
on_complete = function()
vim.api.nvim_buf_set_lines(results_bufnr, 0, -1, strict_indexing, results)
end,
-- new argument
should_cancel = function()
return this_query_id ~= current_query_id
end
}
)
end

An alternative to batching: throttled iterators

An alternative to batching is to simply pause execution every n nanoseconds - an approach inspired by mini.pick.

In this solution we'll support generic iterators as before, but use coroutines internally to occasionally pause and resume via vim.schedule.

-- `coroutine.resume` will return - but not throw - errors
local function safe_resume(...)
local ok, err = coroutine.resume(...)
if not ok then
error(err)
end
end
--- @class ThrottledIteratorOpts
--- @field on_complete? fun():nil
--- @field threshold_ns? number
--- @field should_cancel? fun():boolean
--- @generic InvariantState, ControlVar
--- @param iterator_factory fun(): ((fun(invariant_state: InvariantState, control_var: ControlVar):ControlVar), InvariantState, ControlVar)
--- @param on_iteration fun(entry: ControlVar):nil
--- @param opts? ThrottledIteratorOpts
local function throttled_iterator(iterator_factory, on_iteration, opts)
opts = opts or {}
local threshold_ns = opts.threshold_ns or (10 * 1000000)
local on_complete = opts.on_complete or (function() end)
local should_cancel = opts.should_cancel or (function() return false end)
local function create_throttle()
local last_yield = vim.loop.hrtime()
return function()
local now = vim.loop.hrtime()
if (now - last_yield) >= threshold_ns then
last_yield = now
local thread = coroutine.running()
-- scheduling ensures that `yield` is called before `resume`
vim.schedule(function() safe_resume(thread) end)
coroutine.yield()
end
end
end
local function process()
local maybe_pause = create_throttle()
local iter_fn, invariant_state, control_var = iterator_factory()
while true do
if should_cancel() then return end
maybe_pause()
local values = { iter_fn(invariant_state, control_var), }
control_var = values[1]
if control_var == nil then
on_complete()
return
end
on_iteration(unpack(values))
end
end
safe_resume(coroutine.create(process))
end
local current_query_id = 0
local populate_results = function(input)
current_query_id = current_query_id + 1
local this_query_id = current_query_id
local results = {}
throttled_iterator(
function() return ipairs(large_tbl) end,
function(entry)
table.insert(results, ("%d :: %s"):format(entry, input))
end,
{
on_complete = function()
vim.api.nvim_buf_set_lines(results_bufnr, 0, -1, strict_indexing, results)
end,
should_cancel = function()
return this_query_id ~= current_query_id
end
}
)
end

The main advantage of throttled_iterator over batched_iterator is that the former is hardware-independent. batched_iterator yields every n batches, regardless of how long those batches take to process on the machine in question. 10 nanoseconds, on the other hand, is always 10 nanoseconds.

Bonus: A class-based builder pattern

I'm personally a fan of the builder pattern, so I prefer to use class-based functions like the following:

--- @class Throttle
--- @field _iterator_factory fun(): ((fun(invariant_state: any, control_var: any):any), any, any)
--- @field _should_cancel fun():boolean
--- @field _threshold_ns number
local Throttle = {}
Throttle.__index = Throttle
--- @generic InvariantState, ControlVar
--- @param iterator_factory fun(): ((fun(invariant_state: InvariantState, control_var: ControlVar):ControlVar), InvariantState, ControlVar)
--- @return Throttle
function Throttle:new(iterator_factory)
local ns_10 = 10 * 1000000
local this = {
_iterator_factory = iterator_factory,
_threshold_ns = ns_10,
_should_cancel = function() return false end,
_on_complete = function() end,
}
setmetatable(this, Throttle)
return this
end
--- @param threshold_ns number
--- @return Throttle
function Throttle:threshold_ns(threshold_ns)
self._threshold_ns = threshold_ns
return self
end
--- @param should_cancel fun():boolean
--- @return Throttle
function Throttle:should_cancel(should_cancel)
self._should_cancel = should_cancel
return self
end
--- @param on_complete fun():nil
--- @return Throttle
function Throttle:on_complete(on_complete)
self._on_complete = on_complete
return self
end
--- @generic T
--- @param on_iteration fun(entry:T):nil
--- @return nil
function Throttle:each(on_iteration)
local function create_throttle()
local last_yield = vim.loop.hrtime()
return function()
local now = vim.loop.hrtime()
if (now - last_yield) >= self._threshold_ns then
last_yield = now
local thread = coroutine.running()
vim.schedule(function() safe_resume(thread) end)
coroutine.yield()
end
end
end
local function process()
local maybe_pause = create_throttle()
local iter_fn, invariant_state, control_var = self._iterator_factory()
while true do
if self._should_cancel() then return end
maybe_pause()
local values = { iter_fn(invariant_state, control_var), }
control_var = values[1]
if control_var == nil then
self._on_complete()
return
end
on_iteration(unpack(values))
end
end
safe_resume(coroutine.create(process))
end
local current_query_id = 0
local populate_results = function(input)
current_query_id = current_query_id + 1
local this_query_id = current_query_id
local results = {}
Throttle
:new(function() return ipairs(large_tbl) end)
:threshold_ns(10)
:should_cancel(function()
return this_query_id ~= current_query_id
end)
:on_complete(function()
vim.api.nvim_buf_set_lines(results_bufnr, 0, -1, strict_indexing, results)
end)
:each(function(entry)
table.insert(results, ("%d :: %s"):format(entry, input))
end)
end

I'll leave the Batch class as an exercise to the reader - thanks for reading.

you might also like:

Lesson Two: Address Contemporary Issues

September 14, 2022

Comics are more interesting when they relate to real-world events

comics
reading
rant
© elan medoff