Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 49 additions & 44 deletions apps/remote_control/lib/lexical/remote_control/search/indexer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -101,57 +101,62 @@ defmodule Lexical.RemoteControl.Search.Indexer do

total_bytes = paths_to_sizes |> Enum.map(&elem(&1, 1)) |> Enum.sum()

{on_update_progess, on_complete} = Progress.begin_percent("Indexing source code", total_bytes)
if total_bytes > 0 do
{on_update_progess, on_complete} =
Progress.begin_percent("Indexing source code", total_bytes)

initial_state = {0, []}
initial_state = {0, []}

chunk_fn = fn {path, file_size}, {block_size, paths} ->
new_block_size = file_size + block_size
new_paths = [path | paths]
chunk_fn = fn {path, file_size}, {block_size, paths} ->
new_block_size = file_size + block_size
new_paths = [path | paths]

if new_block_size >= @bytes_per_block do
{:cont, new_paths, initial_state}
else
{:cont, {new_block_size, new_paths}}
if new_block_size >= @bytes_per_block do
{:cont, new_paths, initial_state}
else
{:cont, {new_block_size, new_paths}}
end
end
end

after_fn = fn
{_, []} ->
{:cont, []}
after_fn = fn
{_, []} ->
{:cont, []}

{_, paths} ->
{:cont, paths, []}
end

paths_to_sizes
|> Stream.chunk_while(initial_state, chunk_fn, after_fn)
|> Task.async_stream(
fn chunk ->
block_bytes = chunk |> Enum.map(&Map.get(path_to_size_map, &1)) |> Enum.sum()
result = Enum.map(chunk, processor)
on_update_progess.(block_bytes, "Indexing")
result
end,
timeout: timeout
)
|> Stream.flat_map(fn
{:ok, entry_chunks} -> entry_chunks
_ -> []
end)
# The next bit is the only way i could figure out how to
# call complete once the stream was realized
|> Stream.transform(
fn -> nil end,
fn chunk_items, acc ->
# By the chunk items list directly, each transformation
# will flatten the resulting steam
{chunk_items, acc}
end,
fn _acc ->
on_complete.()
{_, paths} ->
{:cont, paths, []}
end
)

paths_to_sizes
|> Stream.chunk_while(initial_state, chunk_fn, after_fn)
|> Task.async_stream(
fn chunk ->
block_bytes = chunk |> Enum.map(&Map.get(path_to_size_map, &1)) |> Enum.sum()
result = Enum.map(chunk, processor)
on_update_progess.(block_bytes, "Indexing")
result
end,
timeout: timeout
)
|> Stream.flat_map(fn
{:ok, entry_chunks} -> entry_chunks
_ -> []
end)
# The next bit is the only way i could figure out how to
# call complete once the stream was realized
|> Stream.transform(
fn -> nil end,
fn chunk_items, acc ->
# By the chunk items list directly, each transformation
# will flatten the resulting steam
{chunk_items, acc}
end,
fn _acc ->
on_complete.()
end
)
else
[]
end
end

defp path_to_sizes(paths) do
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
defmodule Lexical.RemoteControl.Search.IndexerTest do
alias Lexical.Project
alias Lexical.RemoteControl.Dispatch
alias Lexical.RemoteControl.Search.Indexer
alias Lexical.RemoteControl.Search.Indexer.Entry

Expand All @@ -24,7 +25,7 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do

setup do
project = project()
start_supervised(Lexical.RemoteControl.Dispatch)
start_supervised(Dispatch)
{:ok, project: project}
end

Expand All @@ -46,12 +47,8 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do

@ephemeral_file_name "ephemeral.ex"

def with_an_ephemeral_file(%{project: project}) do
def with_an_ephemeral_file(%{project: project}, file_contents) do
file_path = Path.join([Project.root_path(project), "lib", @ephemeral_file_name])
file_contents = ~s[
defmodule Ephemeral do
end
]
File.write!(file_path, file_contents)

on_exit(fn ->
Expand All @@ -61,6 +58,14 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do
{:ok, file_path: file_path}
end

def with_a_file_with_a_module(context) do
file_contents = ~s[
defmodule Ephemeral do
end
]
with_an_ephemeral_file(context, file_contents)
end

def with_an_existing_index(%{project: project}) do
{:ok, entry_stream} = Indexer.create_index(project)
entries = Enum.to_list(entry_stream)
Expand All @@ -69,7 +74,7 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do
end

describe "update_index/2 encounters a new file" do
setup [:with_an_existing_index, :with_an_ephemeral_file]
setup [:with_an_existing_index, :with_a_file_with_a_module]

test "the ephemeral file is not previously present in the index", %{entries: entries} do
refute Enum.any?(entries, fn entry -> Path.basename(entry.path) == @ephemeral_file_name end)
Expand All @@ -83,8 +88,31 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do
end
end

def with_an_ephemeral_empty_file(context) do
with_an_ephemeral_file(context, "")
end

describe "update_index/2 encounters a zero-length file" do
setup [:with_an_existing_index, :with_an_ephemeral_empty_file]

test "and does nothing", %{project: project} do
{:ok, entry_stream, []} = Indexer.update_index(project, FakeBackend)
assert [] = Enum.to_list(entry_stream)
end

test "there is no progress", %{project: project} do
# this ensures we don't emit progress with a total byte size of 0, which will
# cause an ArithmeticError

Dispatch.register_listener(self(), :all)
{:ok, entry_stream, []} = Indexer.update_index(project, FakeBackend)
assert [] = Enum.to_list(entry_stream)
refute_receive _
end
end

describe "update_index/2" do
setup [:with_an_ephemeral_file, :with_an_existing_index]
setup [:with_a_file_with_a_module, :with_an_existing_index]

test "sees the ephemeral file", %{entries: entries} do
assert Enum.any?(entries, fn entry -> Path.basename(entry.path) == @ephemeral_file_name end)
Expand Down
Loading