Doctest updates (#1944)

This commit is contained in:
Jonatan Kłosko 2023-05-30 22:03:20 +02:00 committed by GitHub
parent 2d265541f0
commit 3c56b87154
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 355 additions and 287 deletions

View file

@ -1,8 +1,8 @@
/* /*
Variables for HTML-ized ANSI string. Variables for HTML-ized ANSI string.
Many colors are taken from the One Light theme Many colors are taken from the One Light and One Dark theme for
to be consistent with the editor. consistency with the editor.
*/ */
:root { :root {
@ -24,19 +24,21 @@ to be consistent with the editor.
--ansi-color-light-white: white; --ansi-color-light-white: white;
} }
/* The same as above but brightned by 10% */ body[data-editor-theme="default"] .editor-theme-aware-ansi {
[data-editor-theme="default"] { --ansi-color-black: black;
--ansi-color-red: #dd1f53; --ansi-color-red: #be5046;
--ansi-color-green: #5ab756; --ansi-color-green: #98c379;
--ansi-color-yellow: #d9930b; --ansi-color-yellow: #e5c07b;
--ansi-color-blue: #4d8cfb; --ansi-color-blue: #61afef;
--ansi-color-magenta: #b02fbb; --ansi-color-magenta: #c678dd;
--ansi-color-cyan: #05a4d0; --ansi-color-cyan: #56b6c2;
--ansi-color-light-black: #676e7b; --ansi-color-white: white;
--ansi-color-light-red: #f35c57; --ansi-color-light-black: #5c6370;
--ansi-color-light-green: #42dcab; --ansi-color-light-red: #e06c75;
--ansi-color-light-yellow: #fdea9a; --ansi-color-light-green: #34d399;
--ansi-color-light-blue: #77c0fc; --ansi-color-light-yellow: #fde68a;
--ansi-color-light-magenta: #d181e5; --ansi-color-light-blue: #93c5fd;
--ansi-color-light-cyan: #64ccda; --ansi-color-light-magenta: #f472b6;
--ansi-color-light-cyan: #6be3f2;
--ansi-color-light-white: white;
} }

View file

@ -159,46 +159,50 @@ Also some spacing adjustments.
transform: none; transform: none;
} }
/* To style circles for doctest results */ /* === Doctest status decoration === */
.line-circle-red,
.line-circle-green, .doctest-status-decoration-running,
.line-circle-grey { .doctest-status-decoration-success,
.doctest-status-decoration-failed {
height: 100%; height: 100%;
position: relative; position: relative;
} }
.line-circle-red::after, .doctest-status-decoration-running::after,
.line-circle-green::after, .doctest-status-decoration-success::after,
.line-circle-grey::after { .doctest-status-decoration-failed::after {
box-sizing: border-box; box-sizing: border-box;
border-radius: 2px; border-radius: 2px;
content: ""; content: "";
display: block; display: block;
height: 12px; height: 12px;
width: 12px; width: 12px;
margin-left: 6px;
position: absolute; position: absolute;
top: 50%; top: 50%;
transform: translateY(-50%); left: 50%;
transform: translate(-50%, -50%);
} }
.line-circle-red::after { .doctest-status-decoration-running::after {
background-color: rgb(233 117 121); @apply bg-gray-400;
} }
.line-circle-green::after { .doctest-status-decoration-success::after {
background-color: rgb(74 222 128); @apply bg-green-bright-400;
} }
.line-circle-grey::after { .doctest-status-decoration-failed::after {
background-color: rgb(97 117 138); @apply bg-red-400;
} }
.doctest-failure-overlay { /* === Doctest failure details === */
.doctest-details-widget {
@apply font-editor; @apply font-editor;
white-space: pre; white-space: pre;
background-color: rgba(0, 0, 0, 0.05); background-color: rgba(0, 0, 0, 0.05);
padding-left: calc(68px + 6ch); padding-top: 6px;
padding-bottom: 6px;
position: absolute; position: absolute;
width: 100%; width: 100%;
} }

View file

@ -248,19 +248,9 @@ const Cell = {
}); });
this.handleEvent( this.handleEvent(
`doctest_result:${this.props.cellId}`, `doctest_report:${this.props.cellId}`,
({ state, column, line, end_line, contents }) => { (doctestReport) => {
switch (state) { liveEditor.updateDoctest(doctestReport);
case "evaluating":
liveEditor.addEvaluatingDoctest(line);
break;
case "success":
liveEditor.addSuccessDoctest(line);
break;
case "failed":
liveEditor.addFailedDoctest(column, line, end_line, contents);
break;
}
} }
); );
} }

View file

@ -7,6 +7,7 @@ import HookServerAdapter from "./live_editor/hook_server_adapter";
import RemoteUser from "./live_editor/remote_user"; import RemoteUser from "./live_editor/remote_user";
import { replacedSuffixLength } from "../../lib/text_utils"; import { replacedSuffixLength } from "../../lib/text_utils";
import { settingsStore, EDITOR_FONT_SIZE } from "../../lib/settings"; import { settingsStore, EDITOR_FONT_SIZE } from "../../lib/settings";
import Doctest from "./live_editor/doctest";
/** /**
* Mounts cell source editor with real-time collaboration mechanism. * Mounts cell source editor with real-time collaboration mechanism.
@ -35,19 +36,7 @@ class LiveEditor {
this._onBlur = []; this._onBlur = [];
this._onCursorSelectionChange = []; this._onCursorSelectionChange = [];
this._remoteUserByClientId = {}; this._remoteUserByClientId = {};
this._doctestByLine = {};
/* For doctest decorations we store the params to create the
* decorations and also the result of creating the decorations.
* The params are IModelDeltaDecoration from https://microsoft.github.io/monaco-editor/typedoc/interfaces/editor.IModelDeltaDecoration.html
* and the result is IEditorDecorationsCollection from https://microsoft.github.io/monaco-editor/typedoc/interfaces/editor.IEditorDecorationsCollection.html
*/
this._doctestDecorations = {
deltaDecorations: {},
decorationCollection: null,
};
this._doctestZones = [];
this._doctestOverlays = [];
const serverAdapter = new HookServerAdapter(hook, cellId, tag); const serverAdapter = new HookServerAdapter(hook, cellId, tag);
this.editorClient = new EditorClient(serverAdapter, revision); this.editorClient = new EditorClient(serverAdapter, revision);
@ -209,6 +198,33 @@ class LiveEditor {
} }
} }
/**
* Either adds or updates doctest indicators.
*/
updateDoctest(doctestReport) {
this._ensureMounted();
if (this._doctestByLine[doctestReport.line]) {
this._doctestByLine[doctestReport.line].update(doctestReport);
} else {
this._doctestByLine[doctestReport.line] = new Doctest(
this.editor,
doctestReport
);
}
}
/**
* Removes doctest indicators.
*/
clearDoctests() {
this._ensureMounted();
Object.values(this._doctestByLine).forEach((doctest) => doctest.dispose());
this._doctestByLine = {};
}
/** /**
* Sets underline markers for warnings and errors. * Sets underline markers for warnings and errors.
* *
@ -243,8 +259,6 @@ class LiveEditor {
_mountEditor() { _mountEditor() {
const settings = settingsStore.get(); const settings = settingsStore.get();
this.settings = settings;
this.editor = monaco.editor.create(this.container, { this.editor = monaco.editor.create(this.container, {
language: this.language, language: this.language,
value: this.source, value: this.source,
@ -286,9 +300,6 @@ class LiveEditor {
: "off", : "off",
}); });
this._doctestDecorations.decorationCollection =
this.editor.createDecorationsCollection([]);
this.editor.addAction({ this.editor.addAction({
contextMenuGroupId: "word-wrapping", contextMenuGroupId: "word-wrapping",
id: "enable-word-wrapping", id: "enable-word-wrapping",
@ -585,83 +596,6 @@ class LiveEditor {
); );
}); });
} }
clearDoctests() {
this._doctestDecorations.decorationCollection.clear();
this._doctestDecorations.deltaDecorations = {};
this._doctestOverlays.forEach((overlay) =>
this.editor.removeOverlayWidget(overlay)
);
this.editor.changeViewZones((changeAccessor) => {
this._doctestZones.forEach((zone) => changeAccessor.removeZone(zone));
});
}
_createDoctestDecoration(lineNumber, className) {
return {
range: new monaco.Range(lineNumber, 1, lineNumber, 1),
options: {
isWholeLine: true,
linesDecorationsClassName: className,
},
};
}
_addDoctestDecoration(line, className) {
const newDecoration = this._createDoctestDecoration(line, className);
this._doctestDecorations.deltaDecorations[line] = newDecoration;
const decos = Object.values(this._doctestDecorations.deltaDecorations);
this._doctestDecorations.decorationCollection.set(decos);
}
_addDoctestOverlay(column, line, endLine, contents) {
let overlayDom = document.createElement("div");
overlayDom.innerHTML = contents.join("\n");
overlayDom.classList.add("doctest-failure-overlay");
overlayDom.style.fontSize = `${this.settings.editor_font_size}px`;
overlayDom.style.paddingLeft =
this.settings.editor_font_size === EDITOR_FONT_SIZE.large
? `calc(74px + ${column}ch)`
: `calc(68px + ${column}ch)`;
// https://microsoft.github.io/monaco-editor/api/interfaces/monaco.editor.ioverlaywidget.html
let overlayWidget = {
getId: () => `doctest-overlay-${line}`,
getDomNode: () => overlayDom,
getPosition: () => null,
};
this.editor.addOverlayWidget(overlayWidget);
this._doctestOverlays.push(overlayWidget);
this.editor.changeViewZones((changeAccessor) => {
this._doctestZones.push(
changeAccessor.addZone({
afterLineNumber: endLine,
heightInLines: contents.length,
domNode: document.createElement("div"),
onDomNodeTop: (top) => {
overlayDom.style.top = top + "px";
},
onComputedHeight: (height) => {
overlayDom.style.height = height + "px";
},
})
);
});
}
addSuccessDoctest(line) {
this._addDoctestDecoration(line, "line-circle-green");
}
addFailedDoctest(column, line, endLine, contents) {
this._addDoctestDecoration(line, "line-circle-red");
this._addDoctestOverlay(column, line, endLine, contents);
}
addEvaluatingDoctest(line) {
this._addDoctestDecoration(line, "line-circle-grey");
}
} }
function completionItemsToSuggestions(items, settings) { function completionItemsToSuggestions(items, settings) {

View file

@ -0,0 +1,134 @@
import monaco from "./monaco";
/**
* Doctest visual indicators within the editor.
*
* Consists of a status widget and optional error details.
*/
export default class Doctest {
constructor(editor, doctestReport) {
this._editor = editor;
this._statusDecoration = new StatusDecoration(
editor,
doctestReport.line,
doctestReport.status
);
if (doctestReport.status === "failed") {
this._detailsWidget = new DetailsWidget(editor, doctestReport);
}
}
/**
* Updates doctest indicator.
*/
update(doctestReport) {
this._statusDecoration.update(doctestReport.status);
if (doctestReport.status === "failed") {
this._detailsWidget && this._detailsWidget.dispose();
this._detailsWidget = new DetailsWidget(this._editor, doctestReport);
}
}
/**
* Performs necessary cleanup actions.
*/
dispose() {
this._statusDecoration.dispose();
this._detailsWidget && this._detailsWidget.dispose();
}
}
class StatusDecoration {
constructor(editor, lineNumber, status) {
this._editor = editor;
this._lineNumber = lineNumber;
this._decorations = [];
this.update(status);
}
update(status) {
const newDecorations = [
{
range: new monaco.Range(this._lineNumber, 1, this._lineNumber, 1),
options: {
isWholeLine: true,
linesDecorationsClassName: `doctest-status-decoration-${status}`,
},
},
];
this._decorations = this._editor.deltaDecorations(
this._decorations,
newDecorations
);
}
dispose() {
this._editor.deltaDecorations(this._decorations, []);
}
}
class DetailsWidget {
constructor(editor, doctestReport) {
this._editor = editor;
const { line, end_line, details, column } = doctestReport;
const detailsHtml = details.join("\n");
const numberOfLines = details.length;
const marginWidth = this._editor
.getDomNode()
.querySelector(".margin-view-overlays").offsetWidth;
const fontSize = this._editor.getOption(
monaco.editor.EditorOption.fontSize
);
const lineHeight = this._editor.getOption(
monaco.editor.EditorOption.lineHeight
);
const detailsNode = document.createElement("div");
detailsNode.innerHTML = detailsHtml;
detailsNode.classList.add(
"doctest-details-widget",
"editor-theme-aware-ansi"
);
detailsNode.style.fontSize = `${fontSize}px`;
detailsNode.style.paddingLeft = `calc(${marginWidth}px + ${column}ch)`;
this._overlayWidget = {
getId: () => `livebook.doctest.overlay.${line}`,
getDomNode: () => detailsNode,
getPosition: () => null,
};
this._editor.addOverlayWidget(this._overlayWidget);
this._editor.changeViewZones((changeAccessor) => {
this._viewZone = changeAccessor.addZone({
afterLineNumber: end_line,
// Placeholder for all lines and additional padding
heightInPx: numberOfLines * lineHeight + 12,
domNode: document.createElement("div"),
onDomNodeTop: (top) => {
detailsNode.style.top = `${top}px`;
},
onComputedHeight: (height) => {
detailsNode.style.height = `${height}px`;
},
});
});
}
dispose() {
this._editor.removeOverlayWidget(this._overlayWidget);
this._editor.changeViewZones((changeAccessor) => {
changeAccessor.removeZone(this._viewZone);
});
}
}

View file

@ -92,6 +92,24 @@ defprotocol Livebook.Runtime do
identifiers_defined: %{(identifier :: term()) => version :: term()} identifiers_defined: %{(identifier :: term()) => version :: term()}
} }
@typedoc """
Includes information about a running or finished doctest.
Failed doctests have additional details formatted as a string.
"""
@type doctest_report ::
%{
status: :running | :success,
line: pos_integer()
}
| %{
status: :failed,
column: pos_integer(),
line: pos_integer(),
end_line: pos_integer(),
details: String.t()
}
@typedoc """ @typedoc """
Recognised intellisense request. Recognised intellisense request.
""" """
@ -403,6 +421,13 @@ defprotocol Livebook.Runtime do
to notify the owner. to notify the owner.
### Doctests
If the cell includes doctests, the runtime can evaluate them and
send reports as a message:
* `{:runtime_doctest_report, evaluation_ref, doctest_report}`
## Options ## Options
* `:file` - the file considered as the source during evaluation. * `:file` - the file considered as the source during evaluation.

View file

@ -22,7 +22,7 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
test_module.tests test_module.tests
|> Enum.sort_by(& &1.tags.doctest_line) |> Enum.sort_by(& &1.tags.doctest_line)
|> Enum.each(fn test -> |> Enum.each(fn test ->
report_doctest_evaluating(test) report_doctest_running(test)
test = run_test(test) test = run_test(test)
report_doctest_result(test, lines) report_doctest_result(test, lines)
test test
@ -38,22 +38,18 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
:ok :ok
end end
defp report_doctest_evaluating(test) do defp report_doctest_running(test) do
result = %{ send_doctest_report(%{
line: test.tags.doctest_line, line: test.tags.doctest_line,
state: :evaluating status: :running
} })
put_output({:doctest_result, result})
end end
defp report_doctest_result(%{state: nil} = test, _lines) do defp report_doctest_result(%{state: nil} = test, _lines) do
result = %{ send_doctest_report(%{
line: test.tags.doctest_line, line: test.tags.doctest_line,
state: :success status: :success
} })
put_output({:doctest_result, result})
end end
defp report_doctest_result(%{state: {:failed, failure}} = test, lines) do defp report_doctest_result(%{state: {:failed, failure}} = test, lines) do
@ -92,15 +88,13 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
end_line end_line
end end
result = %{ send_doctest_report(%{
column: count_columns(prompt_line, 0), column: count_columns(prompt_line, 0),
line: doctest_line, line: doctest_line,
end_line: end_line, end_line: end_line,
state: :failed, status: :failed,
contents: IO.iodata_to_binary(format_failure(failure, test)) details: IO.iodata_to_binary(format_failure(failure, test))
} })
put_output({:doctest_result, result})
end end
defp count_columns(" " <> rest, counter), do: count_columns(rest, counter + 1) defp count_columns(" " <> rest, counter), do: count_columns(rest, counter + 1)
@ -338,10 +332,18 @@ defmodule Livebook.Runtime.Evaluator.Doctests do
end end
defp put_output(output) do defp put_output(output) do
send_livebook_message({:livebook_put_output, output})
end
defp send_doctest_report(doctest_report) do
send_livebook_message({:livebook_doctest_report, doctest_report})
end
defp send_livebook_message(message) do
gl = Process.group_leader() gl = Process.group_leader()
ref = make_ref() ref = make_ref()
send(gl, {:io_request, self(), ref, {:livebook_put_output, output}}) send(gl, {:io_request, self(), ref, message})
receive do receive do
{:io_reply, ^ref, reply} -> {:ok, reply} {:io_reply, ^ref, reply} -> {:ok, reply}

View file

@ -240,6 +240,11 @@ defmodule Livebook.Runtime.Evaluator.IOProxy do
{:ok, state} {:ok, state}
end end
defp io_request({:livebook_doctest_report, doctest_report}, state) do
send(state.send_to, {:runtime_doctest_report, state.ref, doctest_report})
{:ok, state}
end
defp io_request({:livebook_get_input_value, input_id}, state) do defp io_request({:livebook_get_input_value, input_id}, state) do
input_cache = input_cache =
Map.put_new_lazy(state.input_cache, input_id, fn -> Map.put_new_lazy(state.input_cache, input_id, fn ->

View file

@ -1324,6 +1324,11 @@ defmodule Livebook.Session do
{:noreply, state} {:noreply, state}
end end
def handle_info({:runtime_doctest_report, cell_id, doctest_report}, state) do
operation = {:add_cell_doctest_report, @client_id, cell_id, doctest_report}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:runtime_evaluation_output_to_clients, cell_id, output}, state) do def handle_info({:runtime_evaluation_output_to_clients, cell_id, output}, state) do
operation = {:add_cell_evaluation_output, @client_id, cell_id, output} operation = {:add_cell_evaluation_output, @client_id, cell_id, output}
broadcast_operation(state.session_id, operation) broadcast_operation(state.session_id, operation)

View file

@ -181,6 +181,7 @@ defmodule Livebook.Session.Data do
| {:move_cell, client_id(), Cell.id(), offset :: integer()} | {:move_cell, client_id(), Cell.id(), offset :: integer()}
| {:move_section, client_id(), Section.id(), offset :: integer()} | {:move_section, client_id(), Section.id(), offset :: integer()}
| {:queue_cells_evaluation, client_id(), list(Cell.id())} | {:queue_cells_evaluation, client_id(), list(Cell.id())}
| {:add_cell_doctest_report, client_id(), Cell.id(), Runtime.doctest_report()}
| {:add_cell_evaluation_output, client_id(), Cell.id(), term()} | {:add_cell_evaluation_output, client_id(), Cell.id(), term()}
| {:add_cell_evaluation_response, client_id(), Cell.id(), term(), metadata :: map()} | {:add_cell_evaluation_response, client_id(), Cell.id(), term(), metadata :: map()}
| {:bind_input, client_id(), code_cell_id :: Cell.id(), input_id()} | {:bind_input, client_id(), code_cell_id :: Cell.id(), input_id()}
@ -546,10 +547,7 @@ defmodule Livebook.Session.Data do
end end
end end
def apply_operation( def apply_operation(data, {:add_cell_doctest_report, _client_id, id, _doctest_report}) do
data,
{:add_cell_evaluation_output, _client_id, id, {:doctest_result, _result}}
) do
with {:ok, _cell, _} <- Notebook.fetch_cell_and_section(data.notebook, id) do with {:ok, _cell, _} <- Notebook.fetch_cell_and_section(data.notebook, id) do
data data
|> with_actions() |> with_actions()

View file

@ -1772,25 +1772,6 @@ defmodule LivebookWeb.SessionLive do
end end
end end
defp after_operation(
socket,
_prev_socket,
{:add_cell_evaluation_output, _client_id, cell_id, {:doctest_result, result}}
) do
result =
Map.replace_lazy(
result,
:contents,
fn contents ->
contents
|> LivebookWeb.Helpers.ANSI.ansi_string_to_html_lines()
|> Enum.map(&Phoenix.HTML.safe_to_string/1)
end
)
push_event(socket, "doctest_result:#{cell_id}", result)
end
defp after_operation( defp after_operation(
socket, socket,
_prev_socket, _prev_socket,
@ -1809,6 +1790,21 @@ defmodule LivebookWeb.SessionLive do
|> push_event("evaluation_finished:#{cell_id}", %{code_markers: metadata.code_markers}) |> push_event("evaluation_finished:#{cell_id}", %{code_markers: metadata.code_markers})
end end
defp after_operation(
socket,
_prev_socket,
{:add_cell_doctest_report, _client_id, cell_id, doctest_report}
) do
doctest_report =
Map.replace_lazy(doctest_report, :details, fn details ->
details
|> LivebookWeb.Helpers.ANSI.ansi_string_to_html_lines()
|> Enum.map(&Phoenix.HTML.safe_to_string/1)
end)
push_event(socket, "doctest_report:#{cell_id}", doctest_report)
end
defp after_operation( defp after_operation(
socket, socket,
_prev_socket, _prev_socket,
@ -2264,9 +2260,6 @@ defmodule LivebookWeb.SessionLive do
data_view data_view
{:add_cell_evaluation_output, _client_id, _cell_id, {:doctest_result, _result}} ->
data_view
{:add_cell_evaluation_output, _client_id, cell_id, {:stdout, text}} -> {:add_cell_evaluation_output, _client_id, cell_id, {:stdout, text}} ->
# Lookup in previous data to see if the output is already there # Lookup in previous data to see if the output is already there
case Notebook.fetch_cell_and_section(prev_data.notebook, cell_id) do case Notebook.fetch_cell_and_section(prev_data.notebook, cell_id) do
@ -2278,6 +2271,9 @@ defmodule LivebookWeb.SessionLive do
data_to_view(data) data_to_view(data)
end end
{:doctest_report, _client_id, _cell_id, _doctest_report} ->
data_view
_ -> _ ->
data_to_view(data) data_to_view(data)
end end

View file

@ -437,49 +437,37 @@ defmodule Livebook.Runtime.EvaluatorTest do
Evaluator.evaluate_code(evaluator, code, :code_1, []) Evaluator.evaluate_code(evaluator, code, :code_1, [])
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1, %{line: 4, status: :running}}
{:doctest_result, %{line: 4, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1,
{:doctest_result, %{
%{ column: 6,
column: 6, details:
contents: "\e[31mexpected exception ArgumentError but got RuntimeError with message \"oops\"\e[0m",
"\e[31mexpected exception ArgumentError but got RuntimeError with message \"oops\"\e[0m", end_line: 5,
end_line: 5, line: 4,
line: 4, status: :failed
state: :failed
}}}
assert_receive {:runtime_evaluation_output, :code_1,
{:doctest_result, %{line: 7, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1,
{
:doctest_result,
%{
column: 6,
contents: "\e[31mExpected truthy, got false\e[0m",
end_line: 8,
line: 7,
state: :failed
}
}} }}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1, %{line: 7, status: :running}}
{:doctest_result, %{line: 12, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1,
{:doctest_result, %{line: 12, state: :success}}} %{
column: 6,
assert_receive {:runtime_evaluation_output, :code_1, details: "\e[31mExpected truthy, got false\e[0m",
{:doctest_result, %{line: 19, state: :evaluating}}} end_line: 8,
line: 7,
assert_receive {:runtime_evaluation_output, :code_1, status: :failed
{
:doctest_result,
%{column: 4, contents: _, end_line: 20, line: 19, state: :failed}
}} }}
assert_receive {:runtime_doctest_report, :code_1, %{line: 12, status: :running}}
assert_receive {:runtime_doctest_report, :code_1, %{line: 12, status: :success}}
assert_receive {:runtime_doctest_report, :code_1, %{line: 19, status: :running}}
assert_receive {:runtime_doctest_report, :code_1,
%{column: 4, details: _, end_line: 20, line: 19, status: :failed}}
end end
# TODO: Run this test on Elixir v1.15+ # TODO: Run this test on Elixir v1.15+
@ -502,18 +490,16 @@ defmodule Livebook.Runtime.EvaluatorTest do
Evaluator.evaluate_code(evaluator, code, :code_1, []) Evaluator.evaluate_code(evaluator, code, :code_1, [])
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1, %{line: 4, status: :running}}
{:doctest_result, %{line: 4, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1,
{:doctest_result, %{
%{ column: 6,
column: 6, details: _,
contents: _, end_line: 7,
end_line: 7, line: 4,
line: 4, status: :failed
state: :failed }}
}}}
end end
test "runtime errors", %{evaluator: evaluator} do test "runtime errors", %{evaluator: evaluator} do
@ -545,49 +531,39 @@ defmodule Livebook.Runtime.EvaluatorTest do
Evaluator.evaluate_code(evaluator, code, :code_1, []) Evaluator.evaluate_code(evaluator, code, :code_1, [])
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1, %{line: 4, status: :running}}
{:doctest_result, %{line: 4, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1,
{:doctest_result, %{
%{ column: 6,
column: 6, details: "\e[31mmatch (=) failed" <> _,
contents: "\e[31mmatch (=) failed" <> _, end_line: 4,
end_line: 4, line: 4,
line: 4, status: :failed
state: :failed
}}}
assert_receive {:runtime_evaluation_output, :code_1,
{:doctest_result, %{line: 9, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1,
{
:doctest_result,
%{
column: 6,
contents:
"\e[31m** (Protocol.UndefinedError) protocol Enumerable not implemented for 1 of type Integer. " <>
_,
end_line: 10,
line: 9,
state: :failed
}
}} }}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1, %{line: 9, status: :running}}
{:doctest_result, %{line: 17, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1,
{ %{
:doctest_result, column: 6,
%{ details:
column: 6, "\e[31m** (Protocol.UndefinedError) protocol Enumerable not implemented for 1 of type Integer. " <>
contents: "\e[31m** (EXIT from #PID<" <> _, _,
end_line: 18, end_line: 10,
line: 17, line: 9,
state: :failed status: :failed
} }}
assert_receive {:runtime_doctest_report, :code_1, %{line: 17, status: :running}}
assert_receive {:runtime_doctest_report, :code_1,
%{
column: 6,
details: "\e[31m** (EXIT from #PID<" <> _,
end_line: 18,
line: 17,
status: :failed
}} }}
end end
@ -606,19 +582,16 @@ defmodule Livebook.Runtime.EvaluatorTest do
Evaluator.evaluate_code(evaluator, code, :code_1, []) Evaluator.evaluate_code(evaluator, code, :code_1, [])
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1, %{line: 4, status: :running}}
{:doctest_result, %{line: 4, state: :evaluating}}}
assert_receive {:runtime_evaluation_output, :code_1, assert_receive {:runtime_doctest_report, :code_1,
{:doctest_result, %{
%{ column: 6,
column: 6, details: "\e[31mDoctest did not compile, got: (TokenMissingError) " <> _,
contents: end_line: 5,
"\e[31mDoctest did not compile, got: (TokenMissingError) " <> _, line: 4,
end_line: 5, status: :failed
line: 4, }}
state: :failed
}}}
end end
end end