2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2023-09-15 20:42:09 -04:00
|
|
|
|
2024-07-23 19:00:48 -04:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2023-09-15 20:42:09 -04:00
|
|
|
use crate::args::Flags;
|
|
|
|
use crate::args::JupyterFlags;
|
2024-07-02 18:37:54 -04:00
|
|
|
use crate::cdp;
|
|
|
|
use crate::lsp::ReplCompletionItem;
|
2023-09-26 20:21:06 -04:00
|
|
|
use crate::ops;
|
2023-09-15 20:42:09 -04:00
|
|
|
use crate::tools::repl;
|
2024-02-23 13:11:15 -05:00
|
|
|
use crate::tools::test::create_single_test_event_channel;
|
|
|
|
use crate::tools::test::reporters::PrettyTestReporter;
|
|
|
|
use crate::tools::test::TestEventWorkerSender;
|
2024-08-19 21:27:36 -04:00
|
|
|
use crate::tools::test::TestFailureFormatOptions;
|
2023-09-15 20:42:09 -04:00
|
|
|
use crate::CliFactory;
|
2024-07-02 18:37:54 -04:00
|
|
|
use deno_core::anyhow::bail;
|
2023-09-15 20:42:09 -04:00
|
|
|
use deno_core::anyhow::Context;
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
use deno_core::error::generic_error;
|
2023-09-15 20:42:09 -04:00
|
|
|
use deno_core::error::AnyError;
|
2024-07-02 18:37:54 -04:00
|
|
|
use deno_core::futures::FutureExt;
|
2023-09-26 20:21:06 -04:00
|
|
|
use deno_core::located_script_name;
|
2023-09-15 20:42:09 -04:00
|
|
|
use deno_core::resolve_url_or_path;
|
|
|
|
use deno_core::serde_json;
|
2024-07-02 18:37:54 -04:00
|
|
|
use deno_core::serde_json::json;
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
use deno_core::url::Url;
|
2023-10-05 16:45:11 -04:00
|
|
|
use deno_runtime::deno_io::Stdio;
|
|
|
|
use deno_runtime::deno_io::StdioPipe;
|
2024-06-06 23:37:53 -04:00
|
|
|
use deno_runtime::deno_permissions::PermissionsContainer;
|
2024-04-24 15:45:49 -04:00
|
|
|
use deno_runtime::WorkerExecutionMode;
|
2024-02-07 11:25:14 -05:00
|
|
|
use deno_terminal::colors;
|
2024-05-29 18:46:47 -04:00
|
|
|
use jupyter_runtime::jupyter::ConnectionInfo;
|
|
|
|
use jupyter_runtime::messaging::StreamContent;
|
2023-09-26 20:21:06 -04:00
|
|
|
use tokio::sync::mpsc;
|
2023-10-05 16:45:11 -04:00
|
|
|
use tokio::sync::mpsc::UnboundedSender;
|
2024-07-02 18:37:54 -04:00
|
|
|
use tokio::sync::oneshot;
|
2023-10-05 16:45:11 -04:00
|
|
|
|
2023-09-15 20:42:09 -04:00
|
|
|
mod install;
|
2024-04-30 22:30:40 -04:00
|
|
|
pub mod server;
|
2023-09-15 20:42:09 -04:00
|
|
|
|
|
|
|
pub async fn kernel(
|
2024-07-23 19:00:48 -04:00
|
|
|
flags: Arc<Flags>,
|
2023-09-15 20:42:09 -04:00
|
|
|
jupyter_flags: JupyterFlags,
|
|
|
|
) -> Result<(), AnyError> {
|
2024-01-18 17:16:14 -05:00
|
|
|
log::info!(
|
|
|
|
"{} \"deno jupyter\" is unstable and might change in the future.",
|
|
|
|
colors::yellow("Warning"),
|
|
|
|
);
|
2023-09-15 20:42:09 -04:00
|
|
|
|
|
|
|
if !jupyter_flags.install && !jupyter_flags.kernel {
|
|
|
|
install::status()?;
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
if jupyter_flags.install {
|
|
|
|
install::install()?;
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let connection_filepath = jupyter_flags.conn_file.unwrap();
|
|
|
|
|
2024-07-23 19:00:48 -04:00
|
|
|
let factory = CliFactory::from_flags(flags);
|
|
|
|
let cli_options = factory.cli_options()?;
|
2023-09-15 20:42:09 -04:00
|
|
|
let main_module =
|
|
|
|
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
|
|
|
|
.unwrap();
|
|
|
|
// TODO(bartlomieju): should we run with all permissions?
|
2024-09-16 16:39:37 -04:00
|
|
|
let permissions =
|
|
|
|
PermissionsContainer::allow_all(factory.permission_desc_parser()?.clone());
|
2023-09-15 20:42:09 -04:00
|
|
|
let npm_resolver = factory.npm_resolver().await?.clone();
|
|
|
|
let resolver = factory.resolver().await?.clone();
|
|
|
|
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
2023-09-26 20:21:06 -04:00
|
|
|
let (stdio_tx, stdio_rx) = mpsc::unbounded_channel();
|
2023-09-15 20:42:09 -04:00
|
|
|
|
|
|
|
let conn_file =
|
|
|
|
std::fs::read_to_string(&connection_filepath).with_context(|| {
|
|
|
|
format!("Couldn't read connection file: {:?}", connection_filepath)
|
|
|
|
})?;
|
2024-05-21 16:35:21 -04:00
|
|
|
let spec: ConnectionInfo =
|
2023-09-15 20:42:09 -04:00
|
|
|
serde_json::from_str(&conn_file).with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Connection file is not a valid JSON: {:?}",
|
|
|
|
connection_filepath
|
|
|
|
)
|
|
|
|
})?;
|
2024-02-23 13:11:15 -05:00
|
|
|
let (worker, test_event_receiver) = create_single_test_event_channel();
|
|
|
|
let TestEventWorkerSender {
|
|
|
|
sender: test_event_sender,
|
|
|
|
stdout,
|
|
|
|
stderr,
|
|
|
|
} = worker;
|
|
|
|
|
2023-09-15 20:42:09 -04:00
|
|
|
let mut worker = worker_factory
|
|
|
|
.create_custom_worker(
|
2024-04-24 15:45:49 -04:00
|
|
|
WorkerExecutionMode::Jupyter,
|
2023-09-15 20:42:09 -04:00
|
|
|
main_module.clone(),
|
|
|
|
permissions,
|
2023-10-05 16:45:11 -04:00
|
|
|
vec![
|
|
|
|
ops::jupyter::deno_jupyter::init_ops(stdio_tx.clone()),
|
2024-04-16 14:54:50 -04:00
|
|
|
ops::testing::deno_test::init_ops(test_event_sender),
|
2023-10-05 16:45:11 -04:00
|
|
|
],
|
|
|
|
// FIXME(nayeemrmn): Test output capturing currently doesn't work.
|
|
|
|
Stdio {
|
2024-02-23 13:11:15 -05:00
|
|
|
stdin: StdioPipe::inherit(),
|
|
|
|
stdout: StdioPipe::file(stdout),
|
|
|
|
stderr: StdioPipe::file(stderr),
|
2023-10-05 16:45:11 -04:00
|
|
|
},
|
2023-09-15 20:42:09 -04:00
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
worker.setup_repl().await?;
|
2023-09-26 20:21:06 -04:00
|
|
|
worker.execute_script_static(
|
|
|
|
located_script_name!(),
|
|
|
|
"Deno[Deno.internal].enableJupyter();",
|
|
|
|
)?;
|
2023-09-15 20:42:09 -04:00
|
|
|
let worker = worker.into_main_worker();
|
2023-10-05 16:45:11 -04:00
|
|
|
let mut repl_session = repl::ReplSession::initialize(
|
|
|
|
cli_options,
|
|
|
|
npm_resolver,
|
|
|
|
resolver,
|
|
|
|
worker,
|
|
|
|
main_module,
|
|
|
|
test_event_receiver,
|
|
|
|
)
|
|
|
|
.await?;
|
2024-05-21 16:35:21 -04:00
|
|
|
struct TestWriter(UnboundedSender<StreamContent>);
|
2023-10-05 16:45:11 -04:00
|
|
|
impl std::io::Write for TestWriter {
|
|
|
|
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
|
|
|
self
|
|
|
|
.0
|
2024-07-27 04:39:08 -04:00
|
|
|
.send(StreamContent::stdout(&String::from_utf8_lossy(buf)))
|
2023-10-05 16:45:11 -04:00
|
|
|
.ok();
|
|
|
|
Ok(buf.len())
|
|
|
|
}
|
|
|
|
fn flush(&mut self) -> std::io::Result<()> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
let cwd_url =
|
|
|
|
Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| {
|
|
|
|
generic_error(format!(
|
|
|
|
"Unable to construct URL from the path of cwd: {}",
|
|
|
|
cli_options.initial_cwd().to_string_lossy(),
|
|
|
|
))
|
|
|
|
})?;
|
2023-10-05 16:45:11 -04:00
|
|
|
repl_session.set_test_reporter_factory(Box::new(move || {
|
|
|
|
Box::new(
|
2024-08-19 21:27:36 -04:00
|
|
|
PrettyTestReporter::new(
|
|
|
|
false,
|
|
|
|
true,
|
|
|
|
false,
|
|
|
|
true,
|
|
|
|
cwd_url.clone(),
|
|
|
|
TestFailureFormatOptions::default(),
|
|
|
|
)
|
|
|
|
.with_writer(Box::new(TestWriter(stdio_tx.clone()))),
|
2023-10-05 16:45:11 -04:00
|
|
|
)
|
|
|
|
}));
|
2023-09-15 20:42:09 -04:00
|
|
|
|
2024-07-02 18:37:54 -04:00
|
|
|
let (tx1, rx1) = mpsc::unbounded_channel();
|
|
|
|
let (tx2, rx2) = mpsc::unbounded_channel();
|
|
|
|
let (startup_data_tx, startup_data_rx) =
|
|
|
|
oneshot::channel::<server::StartupData>();
|
|
|
|
|
|
|
|
let mut repl_session_proxy = JupyterReplSession {
|
|
|
|
repl_session,
|
|
|
|
rx: rx1,
|
|
|
|
tx: tx2,
|
|
|
|
};
|
|
|
|
let repl_session_proxy_channels = JupyterReplProxy { tx: tx1, rx: rx2 };
|
|
|
|
|
|
|
|
let join_handle = std::thread::spawn(move || {
|
|
|
|
let fut = server::JupyterServer::start(
|
|
|
|
spec,
|
|
|
|
stdio_rx,
|
|
|
|
repl_session_proxy_channels,
|
|
|
|
startup_data_tx,
|
|
|
|
)
|
|
|
|
.boxed_local();
|
|
|
|
deno_runtime::tokio_util::create_and_run_current_thread(fut)
|
|
|
|
});
|
|
|
|
|
|
|
|
let Ok(startup_data) = startup_data_rx.await else {
|
|
|
|
bail!("Failed to acquire startup data");
|
|
|
|
};
|
|
|
|
{
|
|
|
|
let op_state_rc =
|
|
|
|
repl_session_proxy.repl_session.worker.js_runtime.op_state();
|
|
|
|
let mut op_state = op_state_rc.borrow_mut();
|
|
|
|
op_state.put(startup_data.iopub_connection.clone());
|
|
|
|
op_state.put(startup_data.last_execution_request.clone());
|
2024-07-04 18:12:14 -04:00
|
|
|
op_state.put(startup_data.stdin_connection_proxy.clone());
|
2024-07-02 18:37:54 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
repl_session_proxy.start().await;
|
|
|
|
let server_result = join_handle.join();
|
|
|
|
match server_result {
|
|
|
|
Ok(result) => {
|
|
|
|
result?;
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
bail!("Jupyter kernel error: {:?}", e);
|
|
|
|
}
|
|
|
|
};
|
2023-09-15 20:42:09 -04:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2024-07-02 18:37:54 -04:00
|
|
|
|
|
|
|
pub enum JupyterReplRequest {
|
|
|
|
LspCompletions {
|
|
|
|
line_text: String,
|
|
|
|
position: usize,
|
|
|
|
},
|
|
|
|
JsGetProperties {
|
|
|
|
object_id: String,
|
|
|
|
},
|
|
|
|
JsEvaluate {
|
|
|
|
expr: String,
|
|
|
|
},
|
|
|
|
JsGlobalLexicalScopeNames,
|
|
|
|
JsEvaluateLineWithObjectWrapping {
|
|
|
|
line: String,
|
|
|
|
},
|
|
|
|
JsCallFunctionOnArgs {
|
|
|
|
function_declaration: String,
|
|
|
|
args: Vec<cdp::RemoteObject>,
|
|
|
|
},
|
|
|
|
JsCallFunctionOn {
|
|
|
|
arg0: cdp::CallArgument,
|
|
|
|
arg1: cdp::CallArgument,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
pub enum JupyterReplResponse {
|
|
|
|
LspCompletions(Vec<ReplCompletionItem>),
|
|
|
|
JsGetProperties(Option<cdp::GetPropertiesResponse>),
|
|
|
|
JsEvaluate(Option<cdp::EvaluateResponse>),
|
|
|
|
JsGlobalLexicalScopeNames(cdp::GlobalLexicalScopeNamesResponse),
|
|
|
|
JsEvaluateLineWithObjectWrapping(Result<repl::TsEvaluateResponse, AnyError>),
|
|
|
|
JsCallFunctionOnArgs(Result<cdp::CallFunctionOnResponse, AnyError>),
|
|
|
|
JsCallFunctionOn(Option<cdp::CallFunctionOnResponse>),
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct JupyterReplProxy {
|
|
|
|
tx: mpsc::UnboundedSender<JupyterReplRequest>,
|
|
|
|
rx: mpsc::UnboundedReceiver<JupyterReplResponse>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl JupyterReplProxy {
|
|
|
|
pub async fn lsp_completions(
|
|
|
|
&mut self,
|
|
|
|
line_text: String,
|
|
|
|
position: usize,
|
|
|
|
) -> Vec<ReplCompletionItem> {
|
|
|
|
let _ = self.tx.send(JupyterReplRequest::LspCompletions {
|
|
|
|
line_text,
|
|
|
|
position,
|
|
|
|
});
|
|
|
|
let Some(JupyterReplResponse::LspCompletions(resp)) = self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_properties(
|
|
|
|
&mut self,
|
|
|
|
object_id: String,
|
|
|
|
) -> Option<cdp::GetPropertiesResponse> {
|
|
|
|
let _ = self
|
|
|
|
.tx
|
|
|
|
.send(JupyterReplRequest::JsGetProperties { object_id });
|
|
|
|
let Some(JupyterReplResponse::JsGetProperties(resp)) = self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn evaluate(
|
|
|
|
&mut self,
|
|
|
|
expr: String,
|
|
|
|
) -> Option<cdp::EvaluateResponse> {
|
|
|
|
let _ = self.tx.send(JupyterReplRequest::JsEvaluate { expr });
|
|
|
|
let Some(JupyterReplResponse::JsEvaluate(resp)) = self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn global_lexical_scope_names(
|
|
|
|
&mut self,
|
|
|
|
) -> cdp::GlobalLexicalScopeNamesResponse {
|
|
|
|
let _ = self.tx.send(JupyterReplRequest::JsGlobalLexicalScopeNames);
|
|
|
|
let Some(JupyterReplResponse::JsGlobalLexicalScopeNames(resp)) =
|
|
|
|
self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn evaluate_line_with_object_wrapping(
|
|
|
|
&mut self,
|
|
|
|
line: String,
|
|
|
|
) -> Result<repl::TsEvaluateResponse, AnyError> {
|
|
|
|
let _ = self
|
|
|
|
.tx
|
|
|
|
.send(JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line });
|
|
|
|
let Some(JupyterReplResponse::JsEvaluateLineWithObjectWrapping(resp)) =
|
|
|
|
self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn call_function_on_args(
|
|
|
|
&mut self,
|
|
|
|
function_declaration: String,
|
|
|
|
args: Vec<cdp::RemoteObject>,
|
|
|
|
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
|
|
|
|
let _ = self.tx.send(JupyterReplRequest::JsCallFunctionOnArgs {
|
|
|
|
function_declaration,
|
|
|
|
args,
|
|
|
|
});
|
|
|
|
let Some(JupyterReplResponse::JsCallFunctionOnArgs(resp)) =
|
|
|
|
self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(bartlomieju): rename to "broadcast_result"?
|
|
|
|
pub async fn call_function_on(
|
|
|
|
&mut self,
|
|
|
|
arg0: cdp::CallArgument,
|
|
|
|
arg1: cdp::CallArgument,
|
|
|
|
) -> Option<cdp::CallFunctionOnResponse> {
|
|
|
|
let _ = self
|
|
|
|
.tx
|
|
|
|
.send(JupyterReplRequest::JsCallFunctionOn { arg0, arg1 });
|
|
|
|
let Some(JupyterReplResponse::JsCallFunctionOn(resp)) =
|
|
|
|
self.rx.recv().await
|
|
|
|
else {
|
|
|
|
unreachable!()
|
|
|
|
};
|
|
|
|
resp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct JupyterReplSession {
|
|
|
|
repl_session: repl::ReplSession,
|
|
|
|
rx: mpsc::UnboundedReceiver<JupyterReplRequest>,
|
|
|
|
tx: mpsc::UnboundedSender<JupyterReplResponse>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl JupyterReplSession {
|
|
|
|
pub async fn start(&mut self) {
|
|
|
|
loop {
|
|
|
|
let Some(msg) = self.rx.recv().await else {
|
|
|
|
break;
|
|
|
|
};
|
|
|
|
let resp = match msg {
|
|
|
|
JupyterReplRequest::LspCompletions {
|
|
|
|
line_text,
|
|
|
|
position,
|
|
|
|
} => JupyterReplResponse::LspCompletions(
|
|
|
|
self.lsp_completions(&line_text, position).await,
|
|
|
|
),
|
|
|
|
JupyterReplRequest::JsGetProperties { object_id } => {
|
|
|
|
JupyterReplResponse::JsGetProperties(
|
|
|
|
self.get_properties(object_id).await,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
JupyterReplRequest::JsEvaluate { expr } => {
|
|
|
|
JupyterReplResponse::JsEvaluate(self.evaluate(expr).await)
|
|
|
|
}
|
|
|
|
JupyterReplRequest::JsGlobalLexicalScopeNames => {
|
|
|
|
JupyterReplResponse::JsGlobalLexicalScopeNames(
|
|
|
|
self.global_lexical_scope_names().await,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
JupyterReplRequest::JsEvaluateLineWithObjectWrapping { line } => {
|
|
|
|
JupyterReplResponse::JsEvaluateLineWithObjectWrapping(
|
|
|
|
self.evaluate_line_with_object_wrapping(&line).await,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
JupyterReplRequest::JsCallFunctionOnArgs {
|
|
|
|
function_declaration,
|
|
|
|
args,
|
|
|
|
} => JupyterReplResponse::JsCallFunctionOnArgs(
|
|
|
|
self
|
|
|
|
.call_function_on_args(function_declaration, &args)
|
|
|
|
.await,
|
|
|
|
),
|
|
|
|
JupyterReplRequest::JsCallFunctionOn { arg0, arg1 } => {
|
|
|
|
JupyterReplResponse::JsCallFunctionOn(
|
|
|
|
self.call_function_on(arg0, arg1).await,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let Ok(()) = self.tx.send(resp) else {
|
|
|
|
break;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn lsp_completions(
|
|
|
|
&mut self,
|
|
|
|
line_text: &str,
|
|
|
|
position: usize,
|
|
|
|
) -> Vec<ReplCompletionItem> {
|
|
|
|
self
|
|
|
|
.repl_session
|
|
|
|
.language_server
|
|
|
|
.completions(line_text, position)
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_properties(
|
|
|
|
&mut self,
|
|
|
|
object_id: String,
|
|
|
|
) -> Option<cdp::GetPropertiesResponse> {
|
|
|
|
let get_properties_response = self
|
|
|
|
.repl_session
|
|
|
|
.post_message_with_event_loop(
|
|
|
|
"Runtime.getProperties",
|
|
|
|
Some(cdp::GetPropertiesArgs {
|
|
|
|
object_id,
|
|
|
|
own_properties: None,
|
|
|
|
accessor_properties_only: None,
|
|
|
|
generate_preview: None,
|
|
|
|
non_indexed_properties_only: Some(true),
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
.ok()?;
|
|
|
|
serde_json::from_value(get_properties_response).ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn evaluate(
|
|
|
|
&mut self,
|
|
|
|
expr: String,
|
|
|
|
) -> Option<cdp::EvaluateResponse> {
|
|
|
|
let evaluate_response: serde_json::Value = self
|
|
|
|
.repl_session
|
|
|
|
.post_message_with_event_loop(
|
|
|
|
"Runtime.evaluate",
|
|
|
|
Some(cdp::EvaluateArgs {
|
|
|
|
expression: expr,
|
|
|
|
object_group: None,
|
|
|
|
include_command_line_api: None,
|
|
|
|
silent: None,
|
|
|
|
context_id: Some(self.repl_session.context_id),
|
|
|
|
return_by_value: None,
|
|
|
|
generate_preview: None,
|
|
|
|
user_gesture: None,
|
|
|
|
await_promise: None,
|
|
|
|
throw_on_side_effect: Some(true),
|
|
|
|
timeout: Some(200),
|
|
|
|
disable_breaks: None,
|
|
|
|
repl_mode: None,
|
|
|
|
allow_unsafe_eval_blocked_by_csp: None,
|
|
|
|
unique_context_id: None,
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
.ok()?;
|
|
|
|
serde_json::from_value(evaluate_response).ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn global_lexical_scope_names(
|
|
|
|
&mut self,
|
|
|
|
) -> cdp::GlobalLexicalScopeNamesResponse {
|
|
|
|
let evaluate_response = self
|
|
|
|
.repl_session
|
|
|
|
.post_message_with_event_loop(
|
|
|
|
"Runtime.globalLexicalScopeNames",
|
|
|
|
Some(cdp::GlobalLexicalScopeNamesArgs {
|
|
|
|
execution_context_id: Some(self.repl_session.context_id),
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
.unwrap();
|
|
|
|
serde_json::from_value(evaluate_response).unwrap()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn evaluate_line_with_object_wrapping(
|
|
|
|
&mut self,
|
|
|
|
line: &str,
|
|
|
|
) -> Result<repl::TsEvaluateResponse, AnyError> {
|
|
|
|
self
|
|
|
|
.repl_session
|
|
|
|
.evaluate_line_with_object_wrapping(line)
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn call_function_on_args(
|
|
|
|
&mut self,
|
|
|
|
function_declaration: String,
|
|
|
|
args: &[cdp::RemoteObject],
|
|
|
|
) -> Result<cdp::CallFunctionOnResponse, AnyError> {
|
|
|
|
self
|
|
|
|
.repl_session
|
|
|
|
.call_function_on_args(function_declaration, args)
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(bartlomieju): rename to "broadcast_result"?
|
|
|
|
pub async fn call_function_on(
|
|
|
|
&mut self,
|
|
|
|
arg0: cdp::CallArgument,
|
|
|
|
arg1: cdp::CallArgument,
|
|
|
|
) -> Option<cdp::CallFunctionOnResponse> {
|
|
|
|
let response = self.repl_session
|
|
|
|
.post_message_with_event_loop(
|
|
|
|
"Runtime.callFunctionOn",
|
|
|
|
Some(json!({
|
|
|
|
"functionDeclaration": r#"async function (execution_count, result) {
|
|
|
|
await Deno[Deno.internal].jupyter.broadcastResult(execution_count, result);
|
|
|
|
}"#,
|
|
|
|
"arguments": [arg0, arg1],
|
|
|
|
"executionContextId": self.repl_session.context_id,
|
|
|
|
"awaitPromise": true,
|
|
|
|
})),
|
|
|
|
)
|
|
|
|
.await.ok()?;
|
|
|
|
serde_json::from_value(response).ok()
|
|
|
|
}
|
|
|
|
}
|