mirror of
https://github.com/livebook-dev/livebook.git
synced 2025-12-11 22:26:26 +08:00
Overlay doctest results (#1936)
This commit is contained in:
parent
532c86057b
commit
265a1dec6e
9 changed files with 304 additions and 155 deletions
|
|
@ -23,3 +23,20 @@ to be consistent with the editor.
|
|||
--ansi-color-light-cyan: #56b6c2;
|
||||
--ansi-color-light-white: white;
|
||||
}
|
||||
|
||||
/* The same as above but brightned by 10% */
|
||||
[data-editor-theme="default"] {
|
||||
--ansi-color-red: #dd1f53;
|
||||
--ansi-color-green: #5ab756;
|
||||
--ansi-color-yellow: #d9930b;
|
||||
--ansi-color-blue: #4d8cfb;
|
||||
--ansi-color-magenta: #b02fbb;
|
||||
--ansi-color-cyan: #05a4d0;
|
||||
--ansi-color-light-black: #676e7b;
|
||||
--ansi-color-light-red: #f35c57;
|
||||
--ansi-color-light-green: #42dcab;
|
||||
--ansi-color-light-yellow: #fdea9a;
|
||||
--ansi-color-light-blue: #77c0fc;
|
||||
--ansi-color-light-magenta: #d181e5;
|
||||
--ansi-color-light-cyan: #64ccda;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -160,35 +160,45 @@ Also some spacing adjustments.
|
|||
}
|
||||
|
||||
/* To style circles for doctest results */
|
||||
.line-circle-red {
|
||||
background-color: red;
|
||||
box-sizing: border-box;
|
||||
border-radius: 100%;
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
max-width: 15px;
|
||||
height: 15px !important;
|
||||
margin: 3px;
|
||||
}
|
||||
|
||||
.line-circle-green {
|
||||
background-color: green;
|
||||
box-sizing: border-box;
|
||||
border-radius: 100%;
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
max-width: 15px;
|
||||
height: 15px !important;
|
||||
margin: 3px;
|
||||
}
|
||||
|
||||
.line-circle-red,
|
||||
.line-circle-green,
|
||||
.line-circle-grey {
|
||||
background-color: grey;
|
||||
box-sizing: border-box;
|
||||
border-radius: 100%;
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
max-width: 15px;
|
||||
height: 15px !important;
|
||||
margin: 3px;
|
||||
height: 100%;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.line-circle-red::after,
|
||||
.line-circle-green::after,
|
||||
.line-circle-grey::after {
|
||||
box-sizing: border-box;
|
||||
border-radius: 2px;
|
||||
content: "";
|
||||
display: block;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
margin-left: 6px;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
|
||||
.line-circle-red::after {
|
||||
background-color: rgb(233 117 121);
|
||||
}
|
||||
|
||||
.line-circle-green::after {
|
||||
background-color: rgb(74 222 128);
|
||||
}
|
||||
|
||||
.line-circle-grey::after {
|
||||
background-color: rgb(97 117 138);
|
||||
}
|
||||
|
||||
.doctest-failure-overlay {
|
||||
@apply font-editor;
|
||||
white-space: pre;
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
padding-left: calc(68px + 6ch);
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -244,22 +244,21 @@ const Cell = {
|
|||
);
|
||||
|
||||
this.handleEvent(`start_evaluation:${this.props.cellId}`, () => {
|
||||
liveEditor.clearDoctestDecorations();
|
||||
liveEditor.clearDoctests();
|
||||
});
|
||||
|
||||
this.handleEvent(
|
||||
`doctest_result:${this.props.cellId}`,
|
||||
({ state, line }) => {
|
||||
console.log({ state, line });
|
||||
({ state, column, line, end_line, contents }) => {
|
||||
switch (state) {
|
||||
case "evaluating":
|
||||
liveEditor.addEvaluatingDoctestDecoration(line);
|
||||
liveEditor.addEvaluatingDoctest(line);
|
||||
break;
|
||||
case "success":
|
||||
liveEditor.addSuccessDoctestDecoration(line);
|
||||
liveEditor.addSuccessDoctest(line);
|
||||
break;
|
||||
case "failed":
|
||||
liveEditor.addFailedDoctestDecoration(line);
|
||||
liveEditor.addFailedDoctest(column, line, end_line, contents);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import MonacoEditorAdapter from "./live_editor/monaco_editor_adapter";
|
|||
import HookServerAdapter from "./live_editor/hook_server_adapter";
|
||||
import RemoteUser from "./live_editor/remote_user";
|
||||
import { replacedSuffixLength } from "../../lib/text_utils";
|
||||
import { settingsStore } from "../../lib/settings";
|
||||
import { settingsStore, EDITOR_FONT_SIZE } from "../../lib/settings";
|
||||
|
||||
/**
|
||||
* Mounts cell source editor with real-time collaboration mechanism.
|
||||
|
|
@ -35,6 +35,7 @@ class LiveEditor {
|
|||
this._onBlur = [];
|
||||
this._onCursorSelectionChange = [];
|
||||
this._remoteUserByClientId = {};
|
||||
|
||||
/* For doctest decorations we store the params to create the
|
||||
* decorations and also the result of creating the decorations.
|
||||
* The params are IModelDeltaDecoration from https://microsoft.github.io/monaco-editor/typedoc/interfaces/editor.IModelDeltaDecoration.html
|
||||
|
|
@ -45,6 +46,9 @@ class LiveEditor {
|
|||
decorationCollection: null,
|
||||
};
|
||||
|
||||
this._doctestZones = [];
|
||||
this._doctestOverlays = [];
|
||||
|
||||
const serverAdapter = new HookServerAdapter(hook, cellId, tag);
|
||||
this.editorClient = new EditorClient(serverAdapter, revision);
|
||||
|
||||
|
|
@ -238,6 +242,8 @@ class LiveEditor {
|
|||
_mountEditor() {
|
||||
const settings = settingsStore.get();
|
||||
|
||||
this.settings = settings;
|
||||
|
||||
this.editor = monaco.editor.create(this.container, {
|
||||
language: this.language,
|
||||
value: this.source,
|
||||
|
|
@ -579,9 +585,15 @@ class LiveEditor {
|
|||
});
|
||||
}
|
||||
|
||||
clearDoctestDecorations() {
|
||||
clearDoctests() {
|
||||
this._doctestDecorations.decorationCollection.clear();
|
||||
this._doctestDecorations.deltaDecorations = {};
|
||||
this._doctestOverlays.forEach((overlay) =>
|
||||
this.editor.removeOverlayWidget(overlay)
|
||||
);
|
||||
this.editor.changeViewZones((changeAccessor) => {
|
||||
this._doctestZones.forEach((zone) => changeAccessor.removeZone(zone));
|
||||
});
|
||||
}
|
||||
|
||||
_createDoctestDecoration(lineNumber, className) {
|
||||
|
|
@ -601,15 +613,52 @@ class LiveEditor {
|
|||
this._doctestDecorations.decorationCollection.set(decos);
|
||||
}
|
||||
|
||||
addSuccessDoctestDecoration(line) {
|
||||
_addDoctestOverlay(column, line, endLine, contents) {
|
||||
let overlayDom = document.createElement("div");
|
||||
overlayDom.innerHTML = contents.join("\n");
|
||||
overlayDom.classList.add("doctest-failure-overlay");
|
||||
overlayDom.style.fontSize = `${this.settings.editor_font_size}px`;
|
||||
overlayDom.style.paddingLeft =
|
||||
this.settings.editor_font_size === EDITOR_FONT_SIZE.large
|
||||
? `calc(74px + ${column}ch)`
|
||||
: `calc(68px + ${column}ch)`;
|
||||
|
||||
// https://microsoft.github.io/monaco-editor/api/interfaces/monaco.editor.ioverlaywidget.html
|
||||
let overlayWidget = {
|
||||
getId: () => `doctest-overlay-${line}`,
|
||||
getDomNode: () => overlayDom,
|
||||
getPosition: () => null,
|
||||
};
|
||||
this.editor.addOverlayWidget(overlayWidget);
|
||||
this._doctestOverlays.push(overlayWidget);
|
||||
|
||||
this.editor.changeViewZones((changeAccessor) => {
|
||||
this._doctestZones.push(
|
||||
changeAccessor.addZone({
|
||||
afterLineNumber: endLine,
|
||||
heightInLines: contents.length,
|
||||
domNode: document.createElement("div"),
|
||||
onDomNodeTop: (top) => {
|
||||
overlayDom.style.top = top + "px";
|
||||
},
|
||||
onComputedHeight: (height) => {
|
||||
overlayDom.style.height = height + "px";
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
addSuccessDoctest(line) {
|
||||
this._addDoctestDecoration(line, "line-circle-green");
|
||||
}
|
||||
|
||||
addFailedDoctestDecoration(line) {
|
||||
addFailedDoctest(column, line, endLine, contents) {
|
||||
this._addDoctestDecoration(line, "line-circle-red");
|
||||
this._addDoctestOverlay(column, line, endLine, contents);
|
||||
}
|
||||
|
||||
addEvaluatingDoctestDecoration(line) {
|
||||
addEvaluatingDoctest(line) {
|
||||
this._addDoctestDecoration(line, "line-circle-grey");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -462,8 +462,12 @@ const Session = {
|
|||
* (e.g. if the user starts selecting some text within the editor)
|
||||
*/
|
||||
handleDocumentMouseDown(event) {
|
||||
// If the click is outside the notebook element, keep the focus as is
|
||||
if (!event.target.closest(`[data-el-notebook]`)) {
|
||||
if (
|
||||
// If the click is outside the notebook element, keep the focus as is
|
||||
!event.target.closest(`[data-el-notebook]`) ||
|
||||
// If the click is inside the custom doctest editor widget, keep the focus as is
|
||||
event.target.closest(`.doctest-failure-overlay`)
|
||||
) {
|
||||
if (this.insertMode) {
|
||||
this.setInsertMode(false);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -458,7 +458,7 @@ defmodule Livebook.Runtime.Evaluator do
|
|||
if ebin_path() do
|
||||
new_context.env.context_modules
|
||||
|> Enum.filter(&Livebook.Intellisense.Docs.any_docs?/1)
|
||||
|> Livebook.Runtime.Evaluator.Doctests.run()
|
||||
|> Livebook.Runtime.Evaluator.Doctests.run(code)
|
||||
end
|
||||
|
||||
state = put_context(state, ref, new_context)
|
||||
|
|
|
|||
|
|
@ -8,27 +8,25 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
@doc """
|
||||
Runs doctests in the given modules.
|
||||
"""
|
||||
@spec run(list(module())) :: :ok
|
||||
def run(modules)
|
||||
@spec run(list(module()), String.t()) :: :ok
|
||||
def run(modules, code)
|
||||
|
||||
def run([]), do: :ok
|
||||
def run([], _code), do: :ok
|
||||
|
||||
def run(modules) do
|
||||
def run(modules, code) do
|
||||
case define_test_module(modules) do
|
||||
{:ok, test_module} ->
|
||||
if test_module.tests != [] do
|
||||
tests =
|
||||
test_module.tests
|
||||
|> Enum.sort_by(& &1.tags.doctest_line)
|
||||
|> Enum.map(fn test ->
|
||||
report_doctest_state(:evaluating, test)
|
||||
test = run_test(test)
|
||||
report_doctest_state(:success_or_failed, test)
|
||||
test
|
||||
end)
|
||||
lines = String.split(code, ["\r\n", "\n"])
|
||||
|
||||
formatted = format_results(tests)
|
||||
put_output({:text, formatted})
|
||||
test_module.tests
|
||||
|> Enum.sort_by(& &1.tags.doctest_line)
|
||||
|> Enum.each(fn test ->
|
||||
report_doctest_evaluating(test)
|
||||
test = run_test(test)
|
||||
report_doctest_result(test, lines)
|
||||
test
|
||||
end)
|
||||
end
|
||||
|
||||
delete_test_module(test_module)
|
||||
|
|
@ -40,24 +38,61 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
:ok
|
||||
end
|
||||
|
||||
defp report_doctest_state(:evaluating, test) do
|
||||
defp report_doctest_evaluating(test) do
|
||||
result = %{
|
||||
doctest_line: test.tags.doctest_line,
|
||||
line: test.tags.doctest_line,
|
||||
state: :evaluating
|
||||
}
|
||||
|
||||
put_output({:doctest_result, result})
|
||||
end
|
||||
|
||||
defp report_doctest_state(:success_or_failed, test) do
|
||||
defp report_doctest_result(%{state: nil} = test, _lines) do
|
||||
result = %{
|
||||
doctest_line: test.tags.doctest_line,
|
||||
state: get_in(test, [Access.key(:state), Access.elem(0)]) || :success
|
||||
line: test.tags.doctest_line,
|
||||
state: :success
|
||||
}
|
||||
|
||||
put_output({:doctest_result, result})
|
||||
end
|
||||
|
||||
defp report_doctest_result(%{state: {:failed, failure}} = test, lines) do
|
||||
line = test.tags.doctest_line
|
||||
[prompt_line | _] = lines = Enum.drop(lines, line - 1)
|
||||
|
||||
interval =
|
||||
lines
|
||||
|> Enum.take_while(&(not end_of_doctest?(&1)))
|
||||
|> length()
|
||||
|> Kernel.-(1)
|
||||
|
||||
# TODO: end_line must come from Elixir to be reliable
|
||||
result = %{
|
||||
column: count_columns(prompt_line, 0),
|
||||
line: line,
|
||||
end_line: interval + line,
|
||||
state: :failed,
|
||||
contents: IO.iodata_to_binary(format_failure(failure, test))
|
||||
}
|
||||
|
||||
put_output({:doctest_result, result})
|
||||
end
|
||||
|
||||
defp count_columns(" " <> rest, counter), do: count_columns(rest, counter + 1)
|
||||
defp count_columns("\t" <> rest, counter), do: count_columns(rest, counter + 2)
|
||||
defp count_columns(_, counter), do: counter
|
||||
|
||||
defp end_of_doctest?(line) do
|
||||
case String.trim_leading(line) do
|
||||
"" -> true
|
||||
"```" <> _ -> true
|
||||
"~~~" <> _ -> true
|
||||
"'''" <> _ -> true
|
||||
"\"\"\"" <> _ -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
defp define_test_module(modules) do
|
||||
id =
|
||||
modules
|
||||
|
|
@ -150,42 +185,6 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
|
||||
# Formatting
|
||||
|
||||
defp format_results(tests) do
|
||||
filed_tests = Enum.reject(tests, &(&1.state == nil))
|
||||
|
||||
test_count = length(tests)
|
||||
failure_count = length(filed_tests)
|
||||
|
||||
doctests_pl = pluralize(test_count, "doctest", "doctests")
|
||||
failures_pl = pluralize(failure_count, "failure", "failures")
|
||||
|
||||
headline =
|
||||
colorize(
|
||||
if(failure_count == 0, do: :green, else: :red),
|
||||
"#{test_count} #{doctests_pl}, #{failure_count} #{failures_pl}"
|
||||
)
|
||||
|
||||
failures =
|
||||
for {test, idx} <- Enum.with_index(filed_tests) do
|
||||
{:failed, failure} = test.state
|
||||
|
||||
name =
|
||||
test.name
|
||||
|> Atom.to_string()
|
||||
|> String.replace(~r/ \(\d+\)$/, "")
|
||||
|
||||
line = test.tags.doctest_line
|
||||
|
||||
[
|
||||
"\n\n",
|
||||
"#{idx + 1}) #{name} (line #{line})\n",
|
||||
format_failure(failure, test)
|
||||
]
|
||||
end
|
||||
|
||||
IO.iodata_to_binary([headline, failures])
|
||||
end
|
||||
|
||||
defp format_failure({:error, %ExUnit.AssertionError{} = reason, _stack}, _test) do
|
||||
diff =
|
||||
ExUnit.Formatter.format_assertion_diff(
|
||||
|
|
@ -198,58 +197,32 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
expected = diff[:right]
|
||||
got = diff[:left]
|
||||
|
||||
{expected_label, got_label, source} =
|
||||
{expected_label, got_label} =
|
||||
if reason.doctest == ExUnit.AssertionError.no_value() do
|
||||
{"right", "left", nil}
|
||||
{"right", "left"}
|
||||
else
|
||||
{"expected", "got", String.trim(reason.doctest)}
|
||||
{"expected", "got"}
|
||||
end
|
||||
|
||||
message_io =
|
||||
if_io(reason.message != "Doctest failed", fn ->
|
||||
message =
|
||||
reason.message
|
||||
|> String.replace_prefix("Doctest failed: ", "")
|
||||
|> pad(@pad_size)
|
||||
|
||||
[colorize(:red, message), "\n"]
|
||||
end)
|
||||
|
||||
source_io =
|
||||
if_io(source, fn ->
|
||||
[
|
||||
String.duplicate(" ", @pad_size),
|
||||
format_label("doctest"),
|
||||
"\n",
|
||||
pad(source, @pad_size + 2)
|
||||
]
|
||||
message = String.replace_prefix(reason.message, "Doctest failed: ", "")
|
||||
colorize(:red, message)
|
||||
end)
|
||||
|
||||
expected_io =
|
||||
if_io(expected, fn ->
|
||||
[
|
||||
"\n",
|
||||
String.duplicate(" ", @pad_size),
|
||||
format_label(expected_label),
|
||||
"\n",
|
||||
String.duplicate(" ", @pad_size + 2),
|
||||
expected
|
||||
]
|
||||
[format_label(expected_label), "\n ", expected]
|
||||
end)
|
||||
|
||||
got_io =
|
||||
if_io(got, fn ->
|
||||
[
|
||||
"\n",
|
||||
String.duplicate(" ", @pad_size),
|
||||
format_label(got_label),
|
||||
"\n",
|
||||
String.duplicate(" ", @pad_size + 2),
|
||||
got
|
||||
]
|
||||
[format_label(got_label), "\n ", got]
|
||||
end)
|
||||
|
||||
message_io ++ source_io ++ expected_io ++ got_io
|
||||
[message_io, expected_io, got_io]
|
||||
|> Enum.filter(&(&1 != []))
|
||||
|> Enum.intersperse("\n")
|
||||
end
|
||||
|
||||
defp format_failure({kind, reason, stacktrace}, test) do
|
||||
|
|
@ -269,12 +242,11 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
end
|
||||
|
||||
if stacktrace == [] do
|
||||
pad(banner, @pad_size)
|
||||
banner
|
||||
else
|
||||
[
|
||||
pad(banner, @pad_size),
|
||||
banner,
|
||||
"\n",
|
||||
String.duplicate(" ", @pad_size),
|
||||
format_label("stacktrace"),
|
||||
format_stacktrace(stacktrace, test.module, test.name)
|
||||
]
|
||||
|
|
@ -286,7 +258,7 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
defp format_stacktrace(stacktrace, test_case, test) do
|
||||
for entry <- stacktrace do
|
||||
message = format_stacktrace_entry(entry, test_case, test)
|
||||
"\n" <> pad(message, @pad_size + 2)
|
||||
"\n" <> pad(message, 2)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -330,9 +302,6 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
|
|||
|> IO.iodata_to_binary()
|
||||
end
|
||||
|
||||
defp pluralize(1, singular, _plural), do: singular
|
||||
defp pluralize(_, _singular, plural), do: plural
|
||||
|
||||
defp put_output(output) do
|
||||
gl = Process.group_leader()
|
||||
ref = make_ref()
|
||||
|
|
|
|||
|
|
@ -1777,10 +1777,18 @@ defmodule LivebookWeb.SessionLive do
|
|||
_prev_socket,
|
||||
{:add_cell_evaluation_output, _client_id, cell_id, {:doctest_result, result}}
|
||||
) do
|
||||
push_event(socket, "doctest_result:#{cell_id}", %{
|
||||
state: result.state,
|
||||
line: result.doctest_line
|
||||
})
|
||||
result =
|
||||
Map.replace_lazy(
|
||||
result,
|
||||
:contents,
|
||||
fn contents ->
|
||||
contents
|
||||
|> LivebookWeb.Helpers.ANSI.ansi_string_to_html_lines()
|
||||
|> Enum.map(&Phoenix.HTML.safe_to_string/1)
|
||||
end
|
||||
)
|
||||
|
||||
push_event(socket, "doctest_result:#{cell_id}", result)
|
||||
end
|
||||
|
||||
defp after_operation(
|
||||
|
|
|
|||
|
|
@ -416,8 +416,8 @@ defmodule Livebook.Runtime.EvaluatorTest do
|
|||
precinct: 99
|
||||
}
|
||||
|
||||
iex> Livebook.Runtime.EvaluatorTest.Doctests.data()
|
||||
%{name: "Jake Peralta", description: "NYPD detective"}
|
||||
iex> Livebook.Runtime.EvaluatorTest.Doctests.data()
|
||||
%{name: "Jake Peralta", description: "NYPD detective"}
|
||||
"""
|
||||
def data() do
|
||||
%{
|
||||
|
|
@ -447,15 +447,108 @@ defmodule Livebook.Runtime.EvaluatorTest do
|
|||
|
||||
Evaluator.evaluate_code(evaluator, code, :code_1, [])
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1, {:text, doctest_result}}
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 4, state: :evaluating}}}
|
||||
|
||||
assert doctest_result =~ "8 doctests, 7 failures"
|
||||
assert doctest_result =~ "Doctest did not compile, got: (TokenMissingError)"
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result,
|
||||
%{
|
||||
column: 6,
|
||||
contents:
|
||||
"\e[31mexpected exception ArgumentError but got RuntimeError with message \"oops\"\e[0m",
|
||||
end_line: 5,
|
||||
line: 4,
|
||||
state: :failed
|
||||
}}}
|
||||
|
||||
assert doctest_result =~
|
||||
"expected exception ArgumentError but got RuntimeError with message \"oops\""
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 7, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_response, :code_1, {:ok, _}, metadata()}
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result,
|
||||
%{
|
||||
column: 6,
|
||||
contents:
|
||||
"\e[31mDoctest did not compile, got: (TokenMissingError) " <> _,
|
||||
end_line: 8,
|
||||
line: 7,
|
||||
state: :failed
|
||||
}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 10, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result,
|
||||
%{
|
||||
column: 6,
|
||||
contents: "\e[31mmatch (=) failed" <> _,
|
||||
end_line: 10,
|
||||
line: 10,
|
||||
state: :failed
|
||||
}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 12, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{
|
||||
:doctest_result,
|
||||
%{
|
||||
column: 6,
|
||||
contents: "\e[31mExpected truthy, got false\e[0m",
|
||||
end_line: 13,
|
||||
line: 12,
|
||||
state: :failed
|
||||
}
|
||||
}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 17, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 17, state: :success}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 24, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{
|
||||
:doctest_result,
|
||||
%{column: 4, contents: _, end_line: 25, line: 24, state: :failed}
|
||||
}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 36, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{
|
||||
:doctest_result,
|
||||
%{
|
||||
column: 6,
|
||||
contents:
|
||||
"\e[31m** (Protocol.UndefinedError) protocol Enumerable not implemented for 1 of type Integer. " <>
|
||||
_,
|
||||
end_line: 37,
|
||||
line: 36,
|
||||
state: :failed
|
||||
}
|
||||
}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{:doctest_result, %{line: 44, state: :evaluating}}}
|
||||
|
||||
assert_receive {:runtime_evaluation_output, :code_1,
|
||||
{
|
||||
:doctest_result,
|
||||
%{
|
||||
column: 6,
|
||||
contents: "\e[31m** (EXIT from #PID<" <> _,
|
||||
end_line: 45,
|
||||
line: 44,
|
||||
state: :failed
|
||||
}
|
||||
}}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue