diff --git a/crates/moonbit/Cargo.toml b/crates/moonbit/Cargo.toml index fbf246090..4f8739124 100644 --- a/crates/moonbit/Cargo.toml +++ b/crates/moonbit/Cargo.toml @@ -11,6 +11,7 @@ description = """ MoonBit bindings generator for WIT and the component model, typically used through the `wit-bindgen-cli` crate. """ +readme = "README.md" [lints] workspace = true diff --git a/crates/moonbit/README.md b/crates/moonbit/README.md new file mode 100644 index 000000000..a5603264e --- /dev/null +++ b/crates/moonbit/README.md @@ -0,0 +1,68 @@ +# `wit-bindgen-moonbit` + +MoonBit language bindings generator for WIT and the Component Model. + +## Usage + +Generate bindings via the `moonbit` subcommand: + +```bash +wit-bindgen moonbit [OPTIONS] +``` + +See `wit-bindgen help moonbit` for available options. + +## Local async usage + +For pure MoonBit code (no FFI), you can create local future/stream pairs. + +Future + Promise: + +```mbt +let (f, p) = @async.Future::new[Int]() +@async.spawn(async fn() { p.write(42) }) +let value = f.get() +``` + +Stream + Sink (batched reads/writes): + +```mbt +let (s, sink) = @async.Stream::new[Byte]() +@async.spawn(async fn() { + let chunk : Array[Byte] = [1, 2, 3, 4] + let _ = sink.write(chunk[:]) + sink.close() +}) +let chunk = s.read(4096) +match chunk { + None => () + Some(bytes) => { + let _ = bytes.length() + } +} +``` + +`Stream::read(count)` returns up to `count` elements; `Sink::write` accepts +`ArrayView[T]` so byte streams can batch data efficiently. `Stream::new` +accepts an optional `capacity` (<= 0 means unbounded). + +## Testing + +From the repo root, run the MoonBit codegen tests: + +```bash +cargo run test \ + --languages rust,moonbit \ + --artifacts target/artifacts \ + --rust-wit-bindgen-path ./crates/guest-rust \ + tests/codegen +``` + +And the async runtime tests (requires an async component-model runner): + +```bash +cargo run test --languages rust,moonbit tests/runtime-async \ + --artifacts target/artifacts \ + --rust-wit-bindgen-path ./crates/guest-rust \ + --runner "wasmtime -W component-model-async" +``` diff --git a/crates/moonbit/src/async/async_abi.mbt b/crates/moonbit/src/async/async_abi.mbt new file mode 100644 index 000000000..0e7423f88 --- /dev/null +++ b/crates/moonbit/src/async/async_abi.mbt @@ -0,0 +1,266 @@ +// #region subtask + +///| +priv struct SubTask { + handle : Int + state : SubTaskState +} + +///| +priv enum SubTaskState { + Starting = 0 + Started = 1 + Returned = 2 + Cancelled_before_started = 3 + Cancelled_before_returned = 4 +} + +///| +fn SubTaskState::from(int : Int) -> SubTaskState { + match int { + 0 => Starting + 1 => Started + 2 => Returned + 3 => Cancelled_before_started + 4 => Cancelled_before_returned + _ => panic() + } +} + +///| +fn SubTask::from(code : Int) -> SubTask { + { handle: code >> 4, state: SubTaskState::from(code & 0xf) } +} + +///| +/// None : the subtask is blocked +fn SubTask::cancel(self : SubTask) -> SubTaskState? { + let result = subtask_cancel(self.handle) + if result == -1 { + None + } else { + Some(SubTaskState::from(subtask_cancel(self.handle))) + } +} + +// #endregion + +// #region events + +///| +priv enum EventCode { + None = 0 + SubTask = 1 + StreamRead = 2 + StreamWrite = 3 + FutureRead = 4 + FutureWrite = 5 + TaskCancelled = 6 +} + +///| +fn EventCode::from(int : Int) -> EventCode { + match int { + 0 => EventCode::None + 1 => EventCode::SubTask + 2 => EventCode::StreamRead + 3 => EventCode::StreamWrite + 4 => EventCode::FutureRead + 5 => EventCode::FutureWrite + 6 => EventCode::TaskCancelled + _ => panic() + } +} + +///| +priv enum Events { + None + Subtask(Int, SubTaskState) + StreamRead(Int, StreamResult) + StreamWrite(Int, StreamResult) + FutureRead(Int, FutureReadResult) + FutureWrite(Int, FutureWriteResult) + TaskCancelled +} + +///| +fn Events::new(code : EventCode, i : Int, j : Int) -> Events { + match code { + None => None + SubTask => Subtask(i, SubTaskState::from(j)) + StreamRead => StreamRead(i, StreamResult::from(j)) + StreamWrite => StreamWrite(i, StreamResult::from(j)) + FutureRead => FutureRead(i, FutureReadResult::from(j)) + FutureWrite => FutureWrite(i, FutureWriteResult::from(j)) + TaskCancelled => TaskCancelled + } +} + +// #endregion + +// #region waitable set + +///| +struct WaitableSet(Int) derive(Eq, Show, Hash) + +///| +fn WaitableSet::new() -> WaitableSet { + WaitableSet(waitable_set_new()) +} + +///| +fn WaitableSet::drop(self : Self) -> Unit { + waitable_set_drop(self.0) +} + +// #endregion + +// #region Future + +///| +priv enum FutureReadResult { + Completed = 0 + Cancelled = 2 +} + +///| +fn FutureReadResult::from(int : Int) -> FutureReadResult { + match int { + 0 => Completed + 2 => Cancelled + _ => panic() + } +} + +///| +priv enum FutureWriteResult { + Completed = 0 + Dropped = 1 + Cancelled = 2 +} + +///| +fn FutureWriteResult::from(int : Int) -> FutureWriteResult { + match int { + 0 => Completed + 1 => Dropped + 2 => Cancelled + _ => panic() + } +} + +// #endregion + +// #region Stream + +///| +priv struct StreamResult { + progress : Int + copy_result : CopyResult +} + +///| +fn StreamResult::from(int : Int) -> StreamResult { + let progress = int >> 4 + let copy_result = CopyResult::from(int & 0xf) + { progress, copy_result } +} + +///| +priv enum CopyResult { + Completed = 0 + Dropped = 1 + Cancelled = 2 +} + +///| +fn CopyResult::from(int : Int) -> CopyResult { + match int { + 0 => Completed + 1 => Dropped + 2 => Cancelled + _ => panic() + } +} + +// #endregion + +// #region callback code + +///| +/// Code to let the runtime know what to do in the callback +priv enum CallbackCode { + Completed + Yield + Wait(WaitableSet) + Poll(WaitableSet) +} + +///| +fn CallbackCode::encode(self : Self) -> Int { + match self { + Completed => 0 + Yield => 1 + Wait(id) => 2 | (id.0 << 4) + Poll(id) => 3 | (id.0 << 4) + } +} + +///| +fn CallbackCode::_decode(int : Int) -> CallbackCode { + let id = int >> 4 + match int & 0xf { + 0 => Completed + 1 => Yield + 2 => Wait(id) + 3 => Poll(id) + _ => panic() + } +} + +// #endregion + +// #region Component async primitives + +///| +/// Return whether is cancelled. +/// Use for non-callback implementation. +fn _yield() -> Bool = "$root" "[cancellable][yield]" + +///| +pub fn backpressure_inc() = "$root" "[backpressure-inc]" + +///| +pub fn backpressure_dec() = "$root" "[backpressure-dec]" + +///| +fn subtask_cancel(id : Int) -> Int = "$root" "[subtask-cancel]" + +///| +fn subtask_drop(id : Int) = "$root" "[subtask-drop]" + +///| +pub fn context_set(task : Int) = "$root" "[context-set-0]" + +///| +pub fn context_get() -> Int = "$root" "[context-get-0]" + +///| +fn tls_set(tls : Int) = "$root" "[context-set-0]" + +///| +fn tls_get() -> Int = "$root" "[context-get-0]" + +///| +pub fn task_cancel() = "[export]$root" "[task-cancel]" + +///| +fn waitable_set_new() -> Int = "$root" "[waitable-set-new]" + +///| +fn waitable_set_drop(set : Int) = "$root" "[waitable-set-drop]" + +///| +fn waitable_join(waitable : Int, set : Int) = "$root" "[waitable-join]" + +// #endregion diff --git a/crates/moonbit/src/async/async_primitive.mbt b/crates/moonbit/src/async/async_primitive.mbt new file mode 100644 index 000000000..b603b1cc9 --- /dev/null +++ b/crates/moonbit/src/async/async_primitive.mbt @@ -0,0 +1,21 @@ +// Copyright 2025 International Digital Economy Academy +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +///| +async fn[T] async_suspend( + cb : ((T) -> Unit, (Cancelled) -> Unit) -> Unit, +) -> T raise Cancelled = "%async.suspend" + +///| +fn run_async(f : async () -> Unit noraise) = "%async.run" diff --git a/crates/moonbit/src/ffi/async_primitive.mbt b/crates/moonbit/src/async/coroutine.mbt similarity index 50% rename from crates/moonbit/src/ffi/async_primitive.mbt rename to crates/moonbit/src/async/coroutine.mbt index 3a15c1330..b46a32942 100644 --- a/crates/moonbit/src/ffi/async_primitive.mbt +++ b/crates/moonbit/src/async/coroutine.mbt @@ -1,17 +1,23 @@ -///| -async fn[T, E : Error] async_suspend( - cb : ((T) -> Unit, (E) -> Unit) -> Unit, -) -> T raise E = "%async.suspend" - -///| -fn run_async(f : async () -> Unit noraise) = "%async.run" +// Copyright 2025 International Digital Economy Academy +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. ///| priv enum State { Done Fail(Error) Running - Suspend(ok_cont~ : (Unit) -> Unit, err_cont~ : (Error) -> Unit) + Suspend(ok_cont~ : (Unit) -> Unit, err_cont~ : (Cancelled) -> Unit) } ///| @@ -21,56 +27,37 @@ struct Coroutine { mut shielded : Bool mut cancelled : Bool mut ready : Bool - downstream : Map[Int, Coroutine] + mut spawner : ((async () -> Unit) -> Unit)? + downstream : Set[Coroutine] } ///| -pub impl Eq for Coroutine with equal(c1, c2) { +impl Eq for Coroutine with equal(c1, c2) { c1.coro_id == c2.coro_id } ///| -pub impl Hash for Coroutine with hash_combine(self, hasher) { +impl Hash for Coroutine with hash_combine(self, hasher) { self.coro_id.hash_combine(hasher) } ///| -pub fn Coroutine::wake(self : Coroutine) -> Unit { +fn Coroutine::wake(self : Coroutine) -> Unit { self.ready = true scheduler.run_later.push_back(self) } -///| -pub fn Coroutine::run(self : Coroutine) -> Unit { - self.ready = true - scheduler.run_later.push_front(self) -} - -///| -pub fn Coroutine::is_done(self : Coroutine) -> Bool { - match self.state { - Done => true - Fail(_) => true - Running | Suspend(_) => false - } -} - ///| pub fn is_being_cancelled() -> Bool { - current_coroutine().cancelled -} - -///| -pub fn current_coroutine_done() -> Bool { - guard scheduler.curr_coro is Some(coro) else { return true } - coro.is_done() + let coro = current_coroutine() + coro.cancelled && not(coro.shielded) } ///| pub(all) suberror Cancelled derive(Show) ///| -pub fn Coroutine::cancel(self : Coroutine) -> Unit { +fn Coroutine::cancel(self : Coroutine) -> Unit { self.cancelled = true if not(self.shielded || self.ready) { self.wake() @@ -78,7 +65,7 @@ pub fn Coroutine::cancel(self : Coroutine) -> Unit { } ///| -pub async fn pause() -> Unit { +pub async fn pause() -> Unit raise Cancelled { guard scheduler.curr_coro is Some(coro) if coro.cancelled && not(coro.shielded) { raise Cancelled::Cancelled @@ -92,7 +79,7 @@ pub async fn pause() -> Unit { } ///| -pub async fn suspend() -> Unit { +pub async fn suspend() -> Unit raise Cancelled { guard scheduler.curr_coro is Some(coro) if coro.cancelled && not(coro.shielded) { raise Cancelled::Cancelled @@ -110,23 +97,33 @@ pub async fn suspend() -> Unit { ///| pub fn spawn(f : async () -> Unit) -> Coroutine { scheduler.coro_id += 1 + let inherited_spawner = if scheduler.curr_coro is Some(parent) { + parent.spawner + } else { + None + } let coro = { state: Running, ready: true, - shielded: false, - downstream: {}, + shielded: true, + downstream: Set::new(), coro_id: scheduler.coro_id, cancelled: false, + spawner: inherited_spawner, } fn run(_) { - run_async(fn() { + run_async(() => { coro.shielded = false - try f() catch { - err => coro.state = Fail(err) - } noraise { - _ => coro.state = Done + if coro.cancelled { + coro.state = Fail(Cancelled::Cancelled) + } else { + try f() catch { + err => coro.state = Fail(err) + } noraise { + _ => coro.state = Done + } } - for _, coro in coro.downstream { + for coro in coro.downstream { coro.wake() } coro.downstream.clear() @@ -139,7 +136,7 @@ pub fn spawn(f : async () -> Unit) -> Coroutine { } ///| -pub fn Coroutine::unwrap(self : Coroutine) -> Unit raise { +fn Coroutine::unwrap(self : Coroutine) -> Unit raise { match self.state { Done => () Fail(err) => raise err @@ -148,7 +145,7 @@ pub fn Coroutine::unwrap(self : Coroutine) -> Unit raise { } ///| -pub async fn Coroutine::wait(target : Coroutine) -> Unit { +async fn Coroutine::wait(target : Coroutine) -> Unit { guard scheduler.curr_coro is Some(coro) guard not(physical_equal(coro, target)) match target.state { @@ -156,10 +153,10 @@ pub async fn Coroutine::wait(target : Coroutine) -> Unit { Fail(err) => raise err Running | Suspend(_) => () } - target.downstream[coro.coro_id] = coro + target.downstream.add(coro) try suspend() catch { err => { - target.downstream.remove(coro.coro_id) + target.downstream.remove(coro) raise err } } noraise { @@ -167,6 +164,14 @@ pub async fn Coroutine::wait(target : Coroutine) -> Unit { } } +///| +fn Coroutine::check_error(coro : Coroutine) -> Unit raise { + match coro.state { + Fail(err) => raise err + Done | Running | Suspend(_) => () + } +} + ///| pub async fn protect_from_cancel(f : async () -> Unit) -> Unit { guard scheduler.curr_coro is Some(coro) @@ -184,46 +189,3 @@ pub async fn protect_from_cancel(f : async () -> Unit) -> Unit { } } } - -///| -priv struct Scheduler { - mut coro_id : Int - mut curr_coro : Coroutine? - mut blocking : Int - run_later : @deque.Deque[Coroutine] -} - -///| -let scheduler : Scheduler = { - coro_id: 0, - curr_coro: None, - blocking: 0, - run_later: @deque.new(), -} - -///| -pub fn current_coroutine() -> Coroutine { - scheduler.curr_coro.unwrap() -} - -///| -pub fn no_more_work() -> Bool { - scheduler.blocking == 0 && scheduler.run_later.is_empty() -} - -///| -pub fn rschedule() -> Unit { - while scheduler.run_later.pop_front() is Some(coro) { - coro.ready = false - guard coro.state is Suspend(ok_cont~, err_cont~) else { } - coro.state = Running - let last_coro = scheduler.curr_coro - scheduler.curr_coro = Some(coro) - if coro.cancelled && !coro.shielded { - err_cont(Cancelled::Cancelled) - } else { - ok_cont(()) - } - scheduler.curr_coro = last_coro - } -} diff --git a/crates/moonbit/src/async/ev.mbt b/crates/moonbit/src/async/ev.mbt new file mode 100644 index 000000000..0e90b23be --- /dev/null +++ b/crates/moonbit/src/async/ev.mbt @@ -0,0 +1,366 @@ +// Copyright 2025 International Digital Economy Academy +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +///| +priv struct EventLoop { + subscribes : Map[Int, Subscriber] + tasks : Map[WaitableSet, Coroutine] + finished : Map[WaitableSet, Bool] +} + +///| +priv struct Subscriber { + mut event : Events? + coro : @set.Set[Coroutine] +} + +///| +fn current_waitableset() -> WaitableSet { + WaitableSet(tls_get()) +} + +///| +fn should_complete(waitable_set : WaitableSet) -> Bool { + ev.finished.get(waitable_set) is Some(true) && no_more_work() +} + +///| +fn next_callback(waitable_set : WaitableSet) -> Int { + if should_complete(waitable_set) { + ev.tasks.remove(waitable_set) + ev.finished.remove(waitable_set) + waitable_set.drop() + return CallbackCode::Completed.encode() + } + if has_immediately_ready_task() && ev.subscribes.is_empty() { + return CallbackCode::Yield.encode() + } + CallbackCode::Wait(waitable_set.0).encode() +} + +///| +pub fn with_waitableset(f : async () -> Unit) -> Int { + let waitable_set = WaitableSet::new() + tls_set(waitable_set.0) + let coro = spawn(async fn() -> Unit { + defer ev.finished.set(waitable_set, true) + f() + }) + ev.tasks.set(waitable_set, coro) + ev.finished.set(waitable_set, false) + reschedule() + next_callback(waitable_set) +} + +///| +pub fn cb(event : Int, waitable_id : Int, code : Int) -> Int { + let waitable_set = current_waitableset() + let events = Events::new(EventCode::from(event), waitable_id, code) + match events { + None => { + reschedule() + next_callback(waitable_set) + } + TaskCancelled => { + guard ev.tasks.get(waitable_set) is Some(coro) + coro.cancel() + for { + reschedule() + if !has_immediately_ready_task() { + break + } + } + ev.tasks.remove(waitable_set) + ev.finished.remove(waitable_set) + if ev.subscribes.is_empty() { + waitable_set.drop() + } + task_cancel() + return CallbackCode::Completed.encode() + } + _ => { + let sub = ev.subscribes.get(waitable_id) + guard sub is Some(subscriber) + subscriber.event = Some(events) + subscriber.coro.each(Coroutine::wake) + subscriber.coro.clear() + ev.subscribes.remove(waitable_id) + waitable_join(waitable_id, 0) + reschedule() + next_callback(waitable_set) + } + } +} + +///| +let ev : EventLoop = { subscribes: {}, tasks: {}, finished: {} } + +///| +pub fn detach_waitable(waitable_id : Int) -> Unit { + if ev.subscribes.get(waitable_id) is Some(subscriber) { + subscriber.coro.clear() + ev.subscribes.remove(waitable_id) + } + waitable_join(waitable_id, 0) +} + +///| +pub async fn suspend_for_subtask( + val : Int, + cleanup_after_started : () -> Unit, +) -> Unit { + let task = SubTask::from(val) + let mut cleaned = false + + // Helper: ensure cleanup is called once we've moved past Starting state + fn ensure_cleanup(state : SubTaskState) -> Unit { + if not(cleaned) && !(state is Starting) { + cleanup_after_started() + cleaned = true + } + } + + // Immediate completion without a handle. + if task.handle == 0 { + ensure_cleanup(task.state) + match task.state { + Returned => return + Cancelled_before_started => raise SubTaskCancelled(before_started=true) + Cancelled_before_returned => raise SubTaskCancelled(before_started=false) + _ => panic() + } + } + + defer subtask_drop(task.handle) + + // Initial state, return if finished + ensure_cleanup(task.state) + match task.state { + Returned => return + Cancelled_before_started => raise SubTaskCancelled(before_started=true) + Cancelled_before_returned => raise SubTaskCancelled(before_started=false) + _ => () + } + + // Create subscriber to wait for events + // The task has unique subscriber + guard ev.subscribes.get(task.handle) is None + let set = @set.Set::new() + let subscriber = { event: None, coro: set } + ev.subscribes.set(task.handle, subscriber) + for { + set.add(current_coroutine()) + waitable_join(task.handle, current_waitableset().0) + defer subscriber.coro.remove(current_coroutine()) + suspend() catch { + Cancelled::Cancelled => + // Cancel the subtask + return protect_from_cancel(() => { + subscriber.event = task + .cancel() + .map(state => Subtask(task.handle, state)) + while subscriber.event is None { + suspend() + } + guard subscriber.event is Some(Subtask(i, state)) && i == task.handle + ensure_cleanup(state) + match state { + Returned => return + Cancelled_before_started => + raise SubTaskCancelled(before_started=true) + Cancelled_before_returned => + raise SubTaskCancelled(before_started=false) + _ => panic() // should not happen + } + }) + } + + // Subsequent state, return if finished + if subscriber.event is Some(Subtask(i, state)) { + guard i == task.handle + ensure_cleanup(state) + match state { + Returned => return + Cancelled_before_started => raise SubTaskCancelled(before_started=true) + Cancelled_before_returned => + raise SubTaskCancelled(before_started=false) + _ => subscriber.event = None + } + } + } +} + +///| +pub async fn suspend_for_future_read(idx : Int, val : Int) -> Unit { + let result = if val == -1 { + let subscriber = if ev.subscribes.get(idx) is Some(subscriber) { + subscriber + } else { + let set = @set.Set::new() + let subscriber = { event: None, coro: set } + ev.subscribes.set(idx, subscriber) + subscriber + } + waitable_join(idx, current_waitableset().0) + subscriber.coro.add(current_coroutine()) + defer subscriber.coro.remove(current_coroutine()) + suspend() + guard subscriber.event is Some(FutureRead(i, result)) && i == idx + result + } else { + let result = FutureReadResult::from(val) + if ev.subscribes.get(idx) is Some(subscriber) { + subscriber.event = Some(FutureRead(idx, result)) + subscriber.coro.each(Coroutine::wake) + subscriber.coro.clear() + ev.subscribes.remove(idx) + waitable_join(idx, 0) + } + result + } + match result { + Completed => return + Cancelled => raise FutureReadError::Cancelled + } +} + +///| +pub async fn suspend_for_future_write(idx : Int, val : Int) -> Bool { + let result = if val == -1 { + let subscriber = if ev.subscribes.get(idx) is Some(subscriber) { + subscriber + } else { + let set = @set.Set::new() + let subscriber = { event: None, coro: set } + ev.subscribes.set(idx, subscriber) + subscriber + } + waitable_join(idx, current_waitableset().0) + subscriber.coro.add(current_coroutine()) + defer subscriber.coro.remove(current_coroutine()) + suspend() + guard subscriber.event is Some(FutureWrite(i, result)) && i == idx + result + } else { + let result = FutureWriteResult::from(val) + if ev.subscribes.get(idx) is Some(subscriber) { + subscriber.event = Some(FutureWrite(idx, result)) + subscriber.coro.each(Coroutine::wake) + subscriber.coro.clear() + ev.subscribes.remove(idx) + waitable_join(idx, 0) + } + result + } + match result { + Completed => true + Dropped => false + Cancelled => raise FutureWriteCancelled + } +} + +///| +pub async fn suspend_for_stream_read(idx : Int, val : Int) -> (Int, Bool) { + let { progress, copy_result } = if val == -1 { + // Blocked, wait for event + let subscriber = if ev.subscribes.get(idx) is Some(subscriber) { + subscriber.coro.add(current_coroutine()) + subscriber + } else { + waitable_join(idx, current_waitableset().0) + let set = @set.Set::new() + set.add(current_coroutine()) + let subscriber = { event: None, coro: set } + ev.subscribes.set(idx, subscriber) + subscriber + } + defer subscriber.coro.remove(current_coroutine()) + suspend() + guard subscriber.event is Some(StreamRead(i, result)) && i == idx + result + } else { + let result = StreamResult::from(val) + if ev.subscribes.get(idx) is Some(subscriber) { + subscriber.event = Some(StreamRead(idx, result)) + subscriber.coro.each(Coroutine::wake) + subscriber.coro.clear() + ev.subscribes.remove(idx) + waitable_join(idx, 0) + } + result + } + match copy_result { + Completed => return (progress, false) + Dropped => return (progress, true) + Cancelled => + if progress > 0 { + return (progress, false) + } else { + raise StreamReadCancelled + } + } +} + +///| +pub async fn suspend_for_stream_write(idx : Int, val : Int) -> (Int, Bool) { + let { progress, copy_result } = if val == -1 { + // Blocked, wait for event + let subscriber = if ev.subscribes.get(idx) is Some(subscriber) { + subscriber.coro.add(current_coroutine()) + subscriber + } else { + waitable_join(idx, current_waitableset().0) + let set = @set.Set::new() + set.add(current_coroutine()) + let subscriber = { event: None, coro: set } + ev.subscribes.set(idx, subscriber) + subscriber + } + defer subscriber.coro.remove(current_coroutine()) + suspend() + guard subscriber.event is Some(StreamWrite(i, result)) && i == idx + result + } else { + let result = StreamResult::from(val) + if ev.subscribes.get(idx) is Some(subscriber) { + subscriber.event = Some(StreamWrite(idx, result)) + subscriber.coro.each(Coroutine::wake) + subscriber.coro.clear() + ev.subscribes.remove(idx) + waitable_join(idx, 0) + } + result + } + match copy_result { + Completed => return (progress, false) + Dropped => return (progress, true) + // FIXME(WebAssembly/component-model#490): cancellation may be observed in + // cases that should otherwise block. Treat this as no-progress so callers + // can retry. + Cancelled => return (progress, false) + } +} + +///| +pub suberror OpCancelled { + SubTaskCancelled(before_started~ : Bool) + StreamReadCancelled + FutureWriteCancelled +} + +///| +pub(all) suberror FutureReadError { + Cancelled + Dropped +} diff --git a/crates/moonbit/src/async/moon.pkg.json b/crates/moonbit/src/async/moon.pkg.json new file mode 100644 index 000000000..504a690ce --- /dev/null +++ b/crates/moonbit/src/async/moon.pkg.json @@ -0,0 +1,9 @@ +{ + "warn-list": "-44", + "import": [ + { "path": "moonbitlang/core/deque", "alias": "deque" }, + { "path": "moonbitlang/core/ref", "alias": "ref" }, + { "path": "moonbitlang/core/set", "alias": "set" } + ], + "supported-targets": ["wasm"] +} diff --git a/crates/moonbit/src/async/scheduler.mbt b/crates/moonbit/src/async/scheduler.mbt new file mode 100644 index 000000000..01457685b --- /dev/null +++ b/crates/moonbit/src/async/scheduler.mbt @@ -0,0 +1,64 @@ +// Copyright 2025 International Digital Economy Academy +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +///| +priv struct Scheduler { + mut coro_id : Int + mut curr_coro : Coroutine? + mut blocking : Int + run_later : @deque.Deque[Coroutine] +} + +///| +let scheduler : Scheduler = { + coro_id: 0, + curr_coro: None, + blocking: 0, + run_later: @deque.new(), +} + +///| +pub fn current_coroutine() -> Coroutine { + scheduler.curr_coro.unwrap() +} + +///| +pub fn has_immediately_ready_task() -> Bool { + !scheduler.run_later.is_empty() +} + +///| +pub fn no_more_work() -> Bool { + scheduler.blocking == 0 && scheduler.run_later.is_empty() +} + +///| +pub fn reschedule() -> Unit { + let mut budget = 1024 + while budget > 0 { + budget -= 1 + guard scheduler.run_later.pop_front() is Some(coro) else { break } + coro.ready = false + guard coro.state is Suspend(ok_cont~, err_cont~) else { } + coro.state = Running + let last_coro = scheduler.curr_coro + scheduler.curr_coro = Some(coro) + if coro.cancelled && not(coro.shielded) { + err_cont(Cancelled::Cancelled) + } else { + ok_cont(()) + } + scheduler.curr_coro = last_coro + } +} diff --git a/crates/moonbit/src/async/task.mbt b/crates/moonbit/src/async/task.mbt new file mode 100644 index 000000000..676936293 --- /dev/null +++ b/crates/moonbit/src/async/task.mbt @@ -0,0 +1,50 @@ +// Copyright 2025 International Digital Economy Academy +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +///| +/// `Task[X]` represents a running task with result type `X`, +/// it can be used to wait and retrieve the result value of the task. +struct Task_[X] { + value : Ref[X?] + coro : Coroutine +} + +///| +/// Wait for a task and retrieve its result value. +/// If the task fails, `wait` will also fail with the same error. +/// +/// If the current task is cancelled, `wait` return immediately with error. +pub async fn[X] Task_::wait(self : Task_[X]) -> X { + self.coro.wait() + self.value.val.unwrap() +} + +///| +/// Try to obtain the result of the task. +/// If the task already terminated, its result value will be returned. +/// If the task already failed, `try_wait` will fail immediately. +/// If the task is still running, `try_wait` returns `None`. +/// `try_wait` is a synchoronous function: it never blocks. +pub fn[X] Task_::try_wait(self : Task_[X]) -> X? raise { + self.coro.check_error() + self.value.val +} + +///| +/// Cancel a task. Subsequent attempt to wait for the task will receive error. +/// Note that if the task is *not* spawned with `allow_failure=true`, +/// the whole task group will fail too. +pub fn[X] Task_::cancel(self : Task_[X]) -> Unit { + self.coro.cancel() +} diff --git a/crates/moonbit/src/async/task_group.mbt b/crates/moonbit/src/async/task_group.mbt new file mode 100644 index 000000000..25d49d3c7 --- /dev/null +++ b/crates/moonbit/src/async/task_group.mbt @@ -0,0 +1,265 @@ +// Copyright 2025 International Digital Economy Academy +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +///| +priv enum TaskGroupState { + Done + Fail(Error) + Running +} + +///| +/// A `TaskGroup` can be used to spawn children tasks that run in parallel. +/// Task groups implements *structured concurrency*: +/// a task group will only return after all its children task terminates. +/// +/// Task groups also handles *error propagation*: +/// by default, if any child task raises error, +/// the whole task group will also raise that error, +/// and all other remaining child tasks will be cancelled. +/// +/// The type parameter `X` in `TaskGroup[X]` is the result type of the group, +/// see `with_task_group` for more detail. +struct TaskGroup[X] { + children : Set[Coroutine] + parent : Coroutine + mut waiting : Int + mut state : TaskGroupState + mut result : X? + group_defer : Array[async () -> Unit] +} + +///| +/// Spawn a background task into the current task group. +/// +/// This will fail if called outside `with_task_group`. +pub fn spawn_bg_current(f : async () -> Unit) -> Unit { + let coro = current_coroutine() + guard coro.spawner is Some(spawn) + spawn(f) +} + +///| +#deprecated("this error is no longer emitted") +pub suberror AlreadyTerminated derive(Show) + +///| +fn[X] TaskGroup::spawn_coroutine( + self : TaskGroup[X], + f : async () -> Unit, + no_wait~ : Bool, + allow_failure~ : Bool, +) -> Coroutine { + guard self.state is Running else { + abort("trying to spawn from a terminated task group") + } + if not(no_wait) { + self.waiting += 1 + } + async fn worker() { + let coro = current_coroutine() + defer { + self.children.remove(coro) + if not(no_wait) { + self.waiting -= 1 + if self.waiting == 0 && self.state is Running { + for child in self.children { + child.cancel() + } + self.state = Done + } + } + if self.children.is_empty() { + self.parent.wake() + } + } + guard self.state is Running else { } + f() catch { + err if allow_failure => raise err + err => { + if self.state is Running { + for child in self.children { + child.cancel() + } + self.state = Fail(err) + } else if not(err is Cancelled::Cancelled) { + self.state = Fail(err) + } + raise err + } + } + } + + let coro = spawn(worker) + self.children.add(coro) + coro +} + +///| +/// Spawn a child task in a task group, and run it asynchronously in the background. +/// +/// Unless `no_wait` (`false` by default) is `true`, +/// the whole task group will only exit after this child task terminates. +/// +/// Unless `allow_failure` (`false` by default) is `true`, +/// Ithe whole task group will also fail if the spawned task fails, +/// other tasks in the group will be cancelled in this case. +/// +/// If the task group is already cancelled or has been terminated, +/// `spawn_bg` will fail with error and the child task will not be spawned. +/// +/// It is undefined whether the child task will start running immediately +/// before `spawn_bg` returns. +pub fn[X] TaskGroup::spawn_bg( + self : TaskGroup[X], + f : async () -> Unit, + no_wait? : Bool = false, + allow_failure? : Bool = false, +) -> Unit { + ignore(self.spawn_coroutine(f, no_wait~, allow_failure~)) +} + +///| +/// Spawn a child task in a task group, compute a result asynchronously. +/// A task handle will be returned, the result value of the task can be waited +/// and retrieved using `.wait()`, or cancelled using `.cancel()`. +/// +/// Unless `no_wait` (`false` by default) is `true`, +/// the whole task group will only exit after this child task terminates. +/// +/// Unless `allow_failure` (`false` by default) is `true`, +/// Ithe whole task group will also fail if the spawned task fails, +/// other tasks in the group will be cancelled in this case. +/// +/// If the task group is already cancelled or has been terminated, +/// `spawn` will fail with error and the child task will not be spawned. +/// +/// It is undefined whether the child task will start running immediately +/// before `spawn` returns. +pub fn[G, X] TaskGroup::spawn( + self : TaskGroup[G], + f : async () -> X, + no_wait? : Bool = false, + allow_failure? : Bool = false, +) -> Task_[X] { + let value = @ref.new(Option::None) + let coro = self.spawn_coroutine( + () => value.val = Some(f()), + no_wait~, + allow_failure~, + ) + { value, coro } +} + +///| +/// Attach a defer block, represented as a cleanup function, to a task group. +/// The clenaup function will be invoked when the group terminates. +/// Group scoped defer blocks are executed in FILO order, just like normal `defer`. +/// `with_task_group` will only exit after all group defer blocks terminate. +/// +/// Note that if the whole task group is cancelled, +/// async operations in group defer block will be cancelled immediately too. +/// Users can use `protect_from_cancel` to prevent async tasks from being cancelled. +/// It is highly recommended to add a hard timeout to async defer block in this case, +/// to avoid infinite hanging due to blocked operation. +pub fn[X] TaskGroup::add_defer( + self : TaskGroup[X], + block : async () -> Unit, +) -> Unit { + guard self.state is Running else { + abort("trying to attach defer to a terminated task group") + } + self.group_defer.push(block) +} + +///| +/// `with_task_group(f)` creates a new task group and run `f` with the new group. +/// `f` itself will be run in a child task of the new group. +/// `with_task_group` exits after all the whole group terminates, +/// which means all child tasks in the group have terminated, including `f`. +/// +/// If all children task terminate successfully, +/// `with_task_group` will return the result of `f`. +pub async fn[X] with_task_group(f : async (TaskGroup[X]) -> X) -> X { + let tg = { + children: Set::new(), + parent: current_coroutine(), + waiting: 0, + state: Running, + result: None, + group_defer: [], + } + let curr = current_coroutine() + let prev = curr.spawner + curr.spawner = Some(fn(child) { tg.spawn_bg(child) }) + defer { + curr.spawner = prev + } + tg.spawn_bg(() => { + let value = f(tg) + if tg.result is None { + tg.result = Some(value) + } + }) + if not(tg.children.is_empty()) { + suspend() catch { + err => + if tg.state is Running { + tg.state = Fail(err) + for child in tg.children { + child.cancel() + } + } + } + } + if not(tg.children.is_empty()) { + protect_from_cancel(() => suspend()) catch { + _ => () + } + } + tg.children.clear() + while tg.group_defer.pop() is Some(defer_block) { + defer_block() catch { + err => if tg.state is Done { tg.state = Fail(err) } + } + } + match tg.state { + Done => tg.result.unwrap() + Fail(err) => raise err + Running => panic() + } +} + +///| +/// Force a task group to terminate immediately with the given result value. +/// All child tasks in the group, including potentially the current one, +/// will be cancelled. +pub fn[X] TaskGroup::return_immediately( + self : TaskGroup[X], + value : X, +) -> Unit raise { + if self.result is None { + self.result = Some(value) + } + if self.state is Running { + self.state = Done + let curr_coro = current_coroutine() + for child in self.children { + if child != curr_coro { + child.cancel() + } + } + } + raise Cancelled::Cancelled +} diff --git a/crates/moonbit/src/async/trait.mbt b/crates/moonbit/src/async/trait.mbt new file mode 100644 index 000000000..ba65f10f8 --- /dev/null +++ b/crates/moonbit/src/async/trait.mbt @@ -0,0 +1,625 @@ +///| +pub(all) struct FutureR[X] { + handle : Int + get : async () -> X + drop : async () -> Unit + take_handle : () -> Int +} + +///| +pub async fn[X] FutureR::get(self : FutureR[X]) -> X { + (self.get)() +} + +///| +pub async fn[X] FutureR::drop(self : FutureR[X]) -> Unit { + (self.drop)() +} + +///| +pub fn[X] FutureR::take_handle(self : FutureR[X]) -> Int { + (self.take_handle)() +} + +///| +pub(all) struct StreamR[X] { + handle : Int + read : async (Int) -> ArrayView[X]? + close : async () -> Unit + take_handle : () -> Int +} + +///| +pub async fn[X] StreamR::read(self : StreamR[X], count : Int) -> ArrayView[X]? { + (self.read)(count) +} + +///| +pub async fn[X] StreamR::close(self : StreamR[X]) -> Unit { + (self.close)() +} + +///| +pub fn[X] StreamR::take_handle(self : StreamR[X]) -> Int { + (self.take_handle)() +} + +///| +pub(all) struct Sink[X] { + write : async (ArrayView[X]) -> Int + close : async () -> Unit +} + +///| +pub async fn[X] Sink::write(self : Sink[X], data : ArrayView[X]) -> Int { + (self.write)(data) +} + +///| +pub async fn[X] Sink::close(self : Sink[X]) -> Unit { + (self.close)() +} + +///| +pub struct Promise[X] { + write : async (X) -> Unit + close : async () -> Unit +} + +///| +pub async fn[X] Promise::write(self : Promise[X], value : X) -> Unit { + (self.write)(value) +} + +///| +pub async fn[X] Promise::close(self : Promise[X]) -> Unit { + (self.close)() +} + +///| +let next_id : Ref[Int] = { val: 0 } + +///| +fn fresh_id() -> Int { + let id = next_id.val + next_id.val = id + 1 + id +} + +///| +///| +priv struct Waiter { + mut coro : Coroutine? +} + +///| +fn wake_one(waiters : @deque.Deque[Waiter]) -> Unit { + while waiters.pop_front() is Some(waiter) { + match waiter.coro { + None => () + Some(coro) => { + coro.wake() + return + } + } + } +} + +///| +fn wake_all(waiters : @deque.Deque[Waiter]) -> Unit { + while waiters.pop_front() is Some(waiter) { + match waiter.coro { + None => () + Some(coro) => coro.wake() + } + } +} + +///| +async fn wait_on(waiters : @deque.Deque[Waiter]) -> Unit { + let waiter = { coro: Some(current_coroutine()) } + waiters.push_back(waiter) + try suspend() catch { + err => { + waiter.coro = None + raise err + } + } +} + +///| +struct LocalFutureState[X] { + mut value : X? + mut closed : Bool + waiters : @deque.Deque[Waiter] +} + +///| +fn[X] local_future_close(state : Ref[LocalFutureState[X]]) -> Unit { + if state.val.closed { + return + } + state.val.closed = true + wake_all(state.val.waiters) +} + +///| +fn[X] local_future_write(state : Ref[LocalFutureState[X]], value : X) -> Unit { + if state.val.closed { + panic() + } + if state.val.value is Some(_) { + panic() + } + state.val.value = Some(value) + wake_all(state.val.waiters) +} + +///| +async fn[X] local_future_get(state : Ref[LocalFutureState[X]]) -> X { + for { + match state.val.value { + Some(value) => return value + None => + if state.val.closed { + raise Cancelled::Cancelled + } else { + wait_on(state.val.waiters) + } + } + } +} + +///| +struct LocalStreamState[X] { + capacity : Int + chunks : @deque.Deque[Array[X]] + mut buffered : Int + mut closed : Bool + mut head : Array[X]? + mut head_pos : Int + readers : @deque.Deque[Waiter] + writers : @deque.Deque[Waiter] +} + +///| +fn[X] local_stream_close(state : Ref[LocalStreamState[X]]) -> Unit { + if state.val.closed { + return + } + state.val.closed = true + wake_all(state.val.readers) + wake_all(state.val.writers) +} + +///| +async fn[X] local_stream_write( + state : Ref[LocalStreamState[X]], + data : ArrayView[X], +) -> Int { + if data.length() == 0 || state.val.closed { + return 0 + } + let total = data.length() + let mut offset = 0 + for { + if offset >= total { + return total + } + if state.val.closed { + return offset + } + let capacity = state.val.capacity + let available = if capacity <= 0 { + total - offset + } else { + capacity - state.val.buffered + } + if available <= 0 { + wait_on(state.val.writers) + continue + } + let take = if available < (total - offset) { + available + } else { + total - offset + } + let chunk : Array[X] = [] + for i = 0; i < take; i = i + 1 { + chunk.push(data[offset + i]) + } + state.val.chunks.push_back(chunk) + state.val.buffered = state.val.buffered + take + offset = offset + take + wake_one(state.val.readers) + } + total +} + +///| +async fn[X] local_stream_read( + state : Ref[LocalStreamState[X]], + count : Int, +) -> ArrayView[X]? { + if count <= 0 { + return Some([]) + } + let result : Array[X] = [] + for { + if result.length() >= count { + return Some(result[:]) + } + match state.val.head { + Some(head) => { + let available = head.length() - state.val.head_pos + let needed = count - result.length() + let take = if needed < available { needed } else { available } + for i = 0; i < take; i = i + 1 { + result.push(head[state.val.head_pos + i]) + } + state.val.head_pos = state.val.head_pos + take + state.val.buffered = state.val.buffered - take + if state.val.head_pos >= head.length() { + state.val.head = None + state.val.head_pos = 0 + } + wake_one(state.val.writers) + continue + } + None => () + } + if state.val.chunks.pop_front() is Some(chunk) { + state.val.head = Some(chunk) + state.val.head_pos = 0 + continue + } + if result.length() > 0 { + return Some(result[:]) + } + if state.val.closed { + return None + } + wait_on(state.val.readers) + } +} + +///| +pub struct Future[X] { + id : Int + state : Ref[LocalFutureState[X]] +} + +///| +pub impl[X] Eq for Future[X] with equal(self, other) -> Bool { + self.id == other.id +} + +///| +pub impl[X] Show for Future[X] with output(self, logger) { + logger.write_string("Future(") + logger.write_string(self.id.to_string()) + logger.write_string(")") +} + +///| +pub fn[X] Future::new() -> (Future[X], Promise[X]) { + let state : Ref[LocalFutureState[X]] = { + val: { + value: None, + closed: false, + waiters: @deque.new(), + }, + } + let future = Future::{ id: fresh_id(), state } + let promise = Promise::{ + write: (value : X) => { local_future_write(state, value) }, + close: () => { local_future_close(state) }, + } + (future, promise) +} + +///| +pub fn[X] Future::ready(value : X) -> Future[X] { + let state : Ref[LocalFutureState[X]] = { + val: { + value: Some(value), + closed: false, + waiters: @deque.new(), + }, + } + Future::{ id: fresh_id(), state } +} + +///| +pub async fn[X] Future::get(self : Future[X]) -> X { + local_future_get(self.state) +} + +///| +pub fn[X] Future::drop(self : Future[X]) -> Unit { + local_future_close(self.state) +} + +///| +pub fn[X] Future::to_cm(self : Future[X]) -> CMFuture[X] { + CMFuture::from_local(self) +} + +///| +pub(all) struct CMFutureOutInner[X] { + mut producer : (async () -> X)? +} + +///| +pub(all) struct CMFutureOut[X] { + id : Int + inner : Ref[CMFutureOutInner[X]] +} + +///| +pub fn[X] CMFutureOut::new(producer : async () -> X) -> CMFutureOut[X] { + { id: fresh_id(), inner: { val: { producer: Some(producer) } } } +} + +///| +pub fn[X] CMFutureOut::take_producer(self : CMFutureOut[X]) -> (async () -> X) { + guard self.inner.val.producer is Some(p) + self.inner.val.producer = None + p +} + +///| +pub(all) enum CMFuture[X] { + Incoming(FutureR[X]) + Outgoing(CMFutureOut[X]) +} + +///| +pub impl[X] Eq for CMFuture[X] with equal(self, other) -> Bool { + match self { + Incoming(f) => + match other { + Incoming(g) => f.handle == g.handle + Outgoing(_) => false + } + Outgoing(f) => + match other { + Incoming(_) => false + Outgoing(g) => f.id == g.id + } + } +} + +///| +pub impl[X] Show for CMFuture[X] with output(self, logger) { + match self { + Incoming(f) => { + logger.write_string("CMFuture::Incoming(") + logger.write_string(f.handle.to_string()) + logger.write_string(")") + } + Outgoing(f) => { + logger.write_string("CMFuture::Outgoing(") + logger.write_string(f.id.to_string()) + logger.write_string(")") + } + } +} + +///| +pub fn[X] CMFuture::new() -> (CMFuture[X], Promise[X]) { + let (future, promise) = Future::new() + (CMFuture::from_local(future), promise) +} + +///| +pub fn[X] CMFuture::ready(value : X) -> CMFuture[X] { + CMFuture::Outgoing(CMFutureOut::new(() => value)) +} + +///| +pub fn[X] CMFuture::from_local(future : Future[X]) -> CMFuture[X] { + CMFuture::Outgoing(CMFutureOut::new(() => future.get())) +} + +///| +pub fn[X] CMFuture::from(producer : async () -> X) -> CMFuture[X] { + CMFuture::Outgoing(CMFutureOut::new(producer)) +} + +///| +pub async fn[X] CMFuture::get(self : CMFuture[X]) -> X { + match self { + Incoming(f) => f.get() + Outgoing(f) => (f.take_producer())() + } +} + +///| +pub async fn[X] CMFuture::drop(self : CMFuture[X]) -> Unit { + match self { + Incoming(f) => f.drop() + Outgoing(_) => () + } +} + +///| +pub struct Stream[X] { + id : Int + state : Ref[LocalStreamState[X]] +} + +///| +pub impl[X] Eq for Stream[X] with equal(self, other) -> Bool { + self.id == other.id +} + +///| +pub impl[X] Show for Stream[X] with output(self, logger) { + logger.write_string("Stream(") + logger.write_string(self.id.to_string()) + logger.write_string(")") +} + +///| +pub fn[X] Stream::new(capacity? : Int = 0) -> (Stream[X], Sink[X]) { + let cap = if capacity < 0 { 0 } else { capacity } + let state : Ref[LocalStreamState[X]] = { + val: { + capacity: cap, + chunks: @deque.new(), + buffered: 0, + closed: false, + head: None, + head_pos: 0, + readers: @deque.new(), + writers: @deque.new(), + }, + } + let stream = Stream::{ id: fresh_id(), state } + let sink = Sink::{ + write: (data : ArrayView[X]) => { local_stream_write(state, data) }, + close: () => { local_stream_close(state) }, + } + (stream, sink) +} + +///| +pub async fn[X] Stream::read(self : Stream[X], count : Int) -> ArrayView[X]? { + local_stream_read(self.state, count) +} + +///| +pub fn[X] Stream::close(self : Stream[X]) -> Unit { + local_stream_close(self.state) +} + +///| +pub fn[X] Stream::to_cm(self : Stream[X]) -> CMStream[X] { + CMStream::from_local(self) +} + +///| +pub(all) struct CMStreamOutInner[X] { + mut producer : (async (Sink[X]) -> Unit)? +} + +///| +pub(all) struct CMStreamOut[X] { + id : Int + inner : Ref[CMStreamOutInner[X]] +} + +///| +pub fn[X] CMStreamOut::new(producer : async (Sink[X]) -> Unit) -> CMStreamOut[X] { + { id: fresh_id(), inner: { val: { producer: Some(producer) } } } +} + +///| +pub fn[X] CMStreamOut::take_producer(self : CMStreamOut[X]) -> (async (Sink[X]) -> Unit) { + guard self.inner.val.producer is Some(p) + self.inner.val.producer = None + p +} + +///| +pub(all) enum CMStream[X] { + Incoming(StreamR[X]) + Outgoing(CMStreamOut[X]) +} + +///| +pub impl[X] Eq for CMStream[X] with equal(self, other) -> Bool { + match self { + Incoming(s) => + match other { + Incoming(t) => s.handle == t.handle + Outgoing(_) => false + } + Outgoing(s) => + match other { + Incoming(_) => false + Outgoing(t) => s.id == t.id + } + } +} + +///| +pub impl[X] Show for CMStream[X] with output(self, logger) { + match self { + Incoming(s) => { + logger.write_string("CMStream::Incoming(") + logger.write_string(s.handle.to_string()) + logger.write_string(")") + } + Outgoing(s) => { + logger.write_string("CMStream::Outgoing(") + logger.write_string(s.id.to_string()) + logger.write_string(")") + } + } +} + +///| +pub fn[X] CMStream::from(producer : async (Sink[X]) -> Unit) -> CMStream[X] { + CMStream::Outgoing(CMStreamOut::new(producer)) +} + +///| +async fn[X] cm_stream_write_all(sink : Sink[X], data : ArrayView[X]) -> Bool { + if data.length() == 0 { + return true + } + let pending : Array[X] = [] + for i = 0; i < data.length(); i = i + 1 { + pending.push(data[i]) + } + let mut offset = 0 + for { + if offset >= pending.length() { + return true + } + let written = sink.write(pending[offset:]) + if written <= 0 { + return false + } + offset = offset + written + } +} + +///| +pub fn[X] CMStream::from_local(stream : Stream[X], chunk_size? : Int = 64) -> CMStream[X] { + let read_size = if chunk_size <= 0 { 64 } else { chunk_size } + CMStream::Outgoing(CMStreamOut::new(async fn(sink : Sink[X]) { + for { + match stream.read(read_size) { + Some(data) => { + if not(cm_stream_write_all(sink, data)) { + stream.close() + return + } + } + None => { + sink.close() + return + } + } + } + })) +} + +///| +pub async fn[X] CMStream::read(self : CMStream[X], count : Int) -> ArrayView[X]? { + match self { + Incoming(s) => s.read(count) + Outgoing(_) => panic() + } +} + +///| +pub async fn[X] CMStream::close(self : CMStream[X]) -> Unit { + match self { + Incoming(s) => s.close() + Outgoing(_) => () + } +} diff --git a/crates/moonbit/src/async_support.rs b/crates/moonbit/src/async_support.rs index 7975c6990..bbec007ae 100644 --- a/crates/moonbit/src/async_support.rs +++ b/crates/moonbit/src/async_support.rs @@ -3,55 +3,74 @@ use std::{ fmt::Write, }; -use heck::{ToSnakeCase, ToUpperCamelCase}; +use heck::ToUpperCamelCase; use wit_bindgen_core::{ - Files, Source, - abi::{self, WasmSignature}, - uwriteln, - wit_parser::{Function, Param, Resolve, Type, TypeDefKind, TypeId}, + Direction, Files, Source, + abi::{self, WasmSignature, deallocate_lists_in_types, lift_from_memory}, + dealias, uwriteln, + wit_parser::{ + Function, LiftLowerAbi, ManglingAndAbi, Param, Type, TypeDefKind, TypeId, WasmImport, + }, }; -use crate::{ - FFI, FFI_DIR, indent, - pkg::{MoonbitSignature, ToMoonBitIdent}, -}; +use crate::pkg::ToMoonBitIdent; +use crate::{FunctionBindgen, ffi, indent}; -use super::{FunctionBindgen, InterfaceGenerator, PayloadFor}; +use super::InterfaceGenerator; -const ASYNC_PRIMITIVE: &str = include_str!("./ffi/async_primitive.mbt"); -const ASYNC_FUTURE: &str = include_str!("./ffi/future.mbt"); -const ASYNC_WASM_PRIMITIVE: &str = include_str!("./ffi/wasm_primitive.mbt"); -const ASYNC_WAITABLE_SET: &str = include_str!("./ffi/waitable_task.mbt"); -const ASYNC_SUBTASK: &str = include_str!("./ffi/subtask.mbt"); +// NEW Async Impl +const ASYNC_ABI: &str = include_str!("./async/async_abi.mbt"); +const ASYNC_CORO: &str = include_str!("./async/coroutine.mbt"); +const ASYNC_EV: &str = include_str!("./async/ev.mbt"); +const ASYNC_SCHEDULER: &str = include_str!("./async/scheduler.mbt"); +const ASYNC_TASK: &str = include_str!("./async/task.mbt"); +const ASYNC_TASK_GROUP: &str = include_str!("./async/task_group.mbt"); +const ASYNC_TRAIT: &str = include_str!("./async/trait.mbt"); +const ASYNC_PKG_JSON: &str = include_str!("./async/moon.pkg.json"); +const ASYNC_PRIM: &str = include_str!("./async/async_primitive.mbt"); struct Segment<'a> { name: &'a str, src: &'a str, } -const ASYNC_UTILS: [&Segment; 5] = [ +const ASYNC_IMPL: [&Segment; 8] = [ &Segment { - name: "async_primitive", - src: ASYNC_PRIMITIVE, + name: "async_abi", + src: ASYNC_ABI, }, &Segment { - name: "async_future", - src: ASYNC_FUTURE, + name: "async_coro", + src: ASYNC_CORO, }, &Segment { - name: "async_wasm_primitive", - src: ASYNC_WASM_PRIMITIVE, + name: "async_ev", + src: ASYNC_EV, }, &Segment { - name: "async_waitable_set", - src: ASYNC_WAITABLE_SET, + name: "async_scheduler", + src: ASYNC_SCHEDULER, }, &Segment { - name: "async_subtask", - src: ASYNC_SUBTASK, + name: "async_task", + src: ASYNC_TASK, + }, + &Segment { + name: "async_task_group", + src: ASYNC_TASK_GROUP, + }, + &Segment { + name: "async_trait", + src: ASYNC_TRAIT, + }, + &Segment { + name: "async_primitive", + src: ASYNC_PRIM, }, ]; +pub(crate) const ASYNC_DIR: &str = "async"; + #[derive(Default)] pub(crate) struct AsyncSupport { is_async: bool, @@ -63,456 +82,740 @@ impl AsyncSupport { self.is_async = true; } - pub(crate) fn register_future_or_stream(&mut self, module: &str, ty: TypeId) -> bool { - self.futures - .entry(module.to_string()) - .or_default() - .insert(ty) - } - - pub(crate) fn emit_utils(&self, files: &mut Files, version: &str) { + pub(crate) fn emit_utils(&self, files: &mut Files) { if !self.is_async && self.futures.is_empty() { return; } - let mut body = Source::default(); - wit_bindgen_core::generated_preamble(&mut body, version); - body.push_str(FFI); - files.push(&format!("{FFI_DIR}/top.mbt"), indent(&body).as_bytes()); - ASYNC_UTILS.iter().for_each(|s| { + ASYNC_IMPL.iter().for_each(|s| { files.push( - &format!("{FFI_DIR}/{}.mbt", s.name), + &format!("{ASYNC_DIR}/{}.mbt", s.name), indent(s.src).as_bytes(), ); }); files.push( - &format!("{FFI_DIR}/moon.pkg.json"), - "{ \"warn-list\": \"-44\", \"supported-targets\": [\"wasm\"] }".as_bytes(), + &format!("{ASYNC_DIR}/moon.pkg.json"), + indent(ASYNC_PKG_JSON).as_bytes(), ); } } +pub(crate) struct AsyncBindingEntry { + pub lift_name: String, + pub lift_src: String, + pub lift_builtins: HashSet<&'static str>, + pub lower_name: String, + pub lower_src: String, + pub lower_builtins: HashSet<&'static str>, +} + +pub(crate) struct AsyncBinding(pub HashMap); + /// Async-specific helpers used by `InterfaceGenerator` to keep the main /// visitor implementation focused on shared lowering/lifting logic. impl<'a> InterfaceGenerator<'a> { - /// Builds the MoonBit body for async imports, wiring wasm subtasks into the - /// runtime and lowering/lifting payloads as needed. - pub(super) fn generate_async_import_function( + pub(crate) fn generate_async_import( &mut self, func: &Function, - mbt_sig: MoonbitSignature, - sig: &WasmSignature, + ffi_import_name: &str, + wasm_sig: &WasmSignature, ) -> String { - let mut body = String::default(); - let mut lower_params = Vec::new(); - let mut lower_results = Vec::new(); - - if sig.indirect_params { - match &func.params[..] { - [] => {} - [_] => { - lower_params.push("_lower_ptr".into()); - } - multiple_params => { - let params = multiple_params.iter().map(|Param { ty, .. }| ty); - let offsets = self.r#gen.sizes.field_offsets(params.clone()); - let elem_info = self.r#gen.sizes.params(params); - body.push_str(&format!( - r#" - let _lower_ptr : Int = {ffi}malloc({}) - "#, - elem_info.size.size_wasm32(), - ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR) - )); - - for ((offset, ty), name) in offsets.iter().zip( - multiple_params - .iter() - .map(|Param { name, .. }| name.to_moonbit_ident()), - ) { - let result = self.lower_to_memory( - &format!("_lower_ptr + {}", offset.size_wasm32()), - &name, - ty, - self.name, - ); - body.push_str(&result); - } - - lower_params.push("_lower_ptr".into()); - } + let async_pkg = self + .world_gen + .pkg_resolver + .qualify_package(self.name, ASYNC_DIR); + let param_names = func + .params + .iter() + .map(|Param { name, .. }| name.to_moonbit_ident()) + .collect::>(); + let param_types = func + .params + .iter() + .map(|Param { ty, .. }| *ty) + .collect::>(); + let mut bindgen = FunctionBindgen::new( + self, + param_names.into_boxed_slice(), + Direction::Import, + true, + false, + ); + let mut lowered_params = Vec::new(); + + let params_ptr = if wasm_sig.indirect_params { + let params_info = bindgen + .interface_gen + .world_gen + .sizes + .record(param_types.iter()); + let params_ptr = bindgen.locals.tmp("params_ptr"); + bindgen.use_ffi(ffi::MALLOC); + uwriteln!( + bindgen.src, + "let {params_ptr} = mbt_ffi_malloc({});", + params_info.size.size_wasm32() + ); + let offsets = bindgen + .interface_gen + .world_gen + .sizes + .field_offsets(param_types.iter()); + for (i, (offset, ty)) in offsets.into_iter().enumerate() { + let param_ptr = bindgen.locals.tmp("param_ptr"); + let arg = bindgen.params[i].clone(); + uwriteln!( + bindgen.src, + "let {param_ptr} = {params_ptr} + {};", + offset.size_wasm32() + ); + abi::lower_to_memory( + bindgen.interface_gen.resolve, + &mut bindgen, + param_ptr, + arg, + ty, + ); } + lowered_params.push(params_ptr.clone()); + Some(params_ptr) } else { - let mut f = FunctionBindgen::new(self, "INVALID", self.name, Box::new([])); - for (name, ty) in mbt_sig.params.iter() { - lower_params.extend(abi::lower_flat(f.r#gen.resolve, &mut f, name.clone(), ty)); + for (i, ty) in param_types.iter().enumerate() { + let arg = bindgen.params[i].clone(); + lowered_params.extend(abi::lower_flat( + bindgen.interface_gen.resolve, + &mut bindgen, + arg, + ty, + )); } - lower_results.push(f.src.clone()); - } - - let func_name = func.name.to_upper_camel_case(); + None + }; + let cleaned = bindgen.locals.tmp("cleaned"); + uwriteln!(bindgen.src, "let {cleaned} : Ref[Bool] = {{ val: false }}"); + + let results_ptr = if func.result.is_some() { + let result_info = bindgen.interface_gen.world_gen.sizes.params(&func.result); + let results_ptr = bindgen.locals.tmp("results_ptr"); + bindgen.use_ffi(ffi::MALLOC); + bindgen.use_ffi(ffi::FREE); + uwriteln!( + bindgen.src, + "let {results_ptr} = mbt_ffi_malloc({});\n\ +defer mbt_ffi_free({results_ptr})", + result_info.size.size_wasm32() + ); + Some(results_ptr) + } else { + None + }; - let ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR); + let mut call_args = lowered_params.clone(); + if let Some(results_ptr) = &results_ptr { + call_args.push(results_ptr.clone()); + } + let subtask = bindgen.locals.tmp("subtask"); + uwriteln!( + bindgen.src, + "let {subtask} = {ffi_import_name}({});", + call_args.join(", ") + ); - let call_import = |params: &Vec| { - format!( - r#" - let _subtask_code = wasmImport{func_name}({}) - let _subtask_status = {ffi}SubtaskStatus::decode(_subtask_code) - let _subtask = @ffi.Subtask::from_handle(_subtask_status.handle(), code=_subtask_code) - - let task = @ffi.current_task() - task.add_waitable(_subtask, @ffi.current_coroutine()) - defer task.remove_waitable(_subtask) - - for {{ - if _subtask.done() || _subtask_status is Returned(_) {{ - break - }} else {{ - @ffi.suspend() - }} - }} - - "#, - params.join(", ") - ) + let cleanup_params = bindgen.locals.tmp("cleanup_params"); + uwriteln!( + bindgen.src, + "fn {cleanup_params}() -> Unit {{\n if {cleaned}.val {{ return }}\n {cleaned}.val = true" + ); + let dealloc_operands = if wasm_sig.indirect_params { + vec![params_ptr.clone().unwrap()] + } else { + lowered_params.clone() }; - match &func.result { - Some(ty) => { - lower_params.push("_result_ptr".into()); - let call_import = call_import(&lower_params); - let (lift, lift_result) = &self.lift_from_memory("_result_ptr", ty, self.name); - body.push_str(&format!( - r#" - {} - {} - {call_import} - {lift} - {lift_result} - "#, - lower_results.join("\n"), - &self.malloc_memory("_result_ptr", "1", ty) - )); - } - None => { - let call_import = call_import(&lower_params); - body.push_str(&call_import); - } + deallocate_lists_in_types( + bindgen.interface_gen.resolve, + ¶m_types, + &dealloc_operands, + wasm_sig.indirect_params, + &mut bindgen, + ); + if let Some(params_ptr) = ¶ms_ptr { + bindgen.use_ffi(ffi::FREE); + uwriteln!(bindgen.src, " mbt_ffi_free({params_ptr})"); } + uwriteln!( + bindgen.src, + "}}\nfn cleanup_after_started() -> Unit {{ {cleanup_params}() }}\n\ +defer {cleanup_params}()\n{async_pkg}suspend_for_subtask({subtask}, cleanup_after_started)", + ); - body.to_string() + if let Some(result) = func.result { + let lifted = lift_from_memory( + bindgen.interface_gen.resolve, + &mut bindgen, + results_ptr.clone().unwrap(), + &result, + ); + uwriteln!(bindgen.src, "return {lifted}"); + } + + let builtins = bindgen.take_local_ffi_imports(); + let src = bindgen.src; + self.ffi_imports.extend(builtins); + src } - /// Ensures async futures and streams referenced by `func` have their helper - /// import tables generated for the given module prefix. - pub(super) fn generation_futures_and_streams_import( - &mut self, - prefix: &str, - func: &Function, - module: &str, - ) { - let module = format!("{prefix}{module}"); - for (index, ty) in func - .find_futures_and_streams(self.resolve) - .into_iter() - .enumerate() - { - let func_name = &func.name; - - match &self.resolve.types[ty].kind { - TypeDefKind::Future(payload_type) => { - self.r#generate_async_future_or_stream_import( - PayloadFor::Future, - &module, - index, - func_name, - ty, - payload_type.as_ref(), - ); + /// Generate the async bindings for this function. + /// + /// Note that these bindings may be referenced while generating other async + /// bindings (e.g. `future }>`), so this method + /// populates `self.bindings` incrementally. + pub(crate) fn generate_async_binding(&mut self, func: &Function) { + self.bindings.0.clear(); + let futures_and_streams = func.find_futures_and_streams(self.resolve); + let (module, func_name) = self.resolve.wasm_import_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmImport::Func { + interface: self.interface, + func, + }, + ); + for (idx, type_) in futures_and_streams.iter().enumerate() { + let ty = dealias(self.resolve, *type_); + match self.resolve.types[ty].kind { + TypeDefKind::Future(_) => { + let binding = self.generate_future_binding(ty, idx, &module, &func_name); + self.bindings.0.insert(ty, binding); } - TypeDefKind::Stream(payload_type) => { - self.r#generate_async_future_or_stream_import( - PayloadFor::Stream, - &module, - index, - func_name, - ty, - payload_type.as_ref(), - ); + TypeDefKind::Stream(_) => { + let binding = self.generate_stream_binding(ty, idx, &module, &func_name); + self.bindings.0.insert(ty, binding); } - _ => unreachable!(), + _ => unreachable!("Expected future and stream"), } } } - fn generate_async_future_or_stream_import( + pub(crate) fn generate_future_binding( &mut self, - payload_for: PayloadFor, - module: &str, + ty: TypeId, index: usize, + module: &str, func_name: &str, - ty: TypeId, - result_type: Option<&Type>, - ) { - if !self - .r#gen - .async_support - .register_future_or_stream(module, ty) - { - return; - } - let result = match result_type { - Some(ty) => self.r#gen.pkg_resolver.type_name(self.name, ty), - None => "Unit".into(), + ) -> AsyncBindingEntry { + let mut lift = Source::default(); + let mut lower = Source::default(); + + let camel_name = func_name.to_upper_camel_case(); + let lifted_func_name = format!("wasmLift{camel_name}{index}"); + let lowered_func_name = format!("wasmLower{camel_name}{index}"); + let async_qualifier = self + .world_gen + .pkg_resolver + .qualify_package(self.name, ASYNC_DIR); + let module = if self.direction == Direction::Export && !module.starts_with("[export]") { + format!("[export]{module}") + } else { + module.to_string() }; + let lifted = self + .world_gen + .pkg_resolver + .type_name(self.name, &Type::Id(ty)); - let type_name = self.r#gen.pkg_resolver.type_name(self.name, &Type::Id(ty)); - let name = result.to_upper_camel_case(); - let kind = match payload_for { - PayloadFor::Future => "future", - PayloadFor::Stream => "stream", - }; - let table_name = format!("{}_{}_table", type_name.to_snake_case(), kind); - let camel_kind = kind.to_upper_camel_case(); - let payload_len_arg = match payload_for { - PayloadFor::Future => "", - PayloadFor::Stream => " ,length : Int", - }; + // write intrinsics + uwriteln!( + lift, + r#" +fn wasmLift{camel_name}{index}Read(handle : Int, ptr : Int) -> Int = "{module}" "[async-lower][future-read-{index}]{func_name}" +fn wasmLift{camel_name}{index}CancelRead(_ : Int) -> Int = "{module}" "[future-cancel-read-{index}]{func_name}" +fn wasmLift{camel_name}{index}DropReadable(_ : Int) = "{module}" "[future-drop-readable-{index}]{func_name}" + "#, + ); + uwriteln!( + lower, + r#" +fn wasmLower{camel_name}{index}New() -> UInt64 = "{module}" "[future-new-{index}]{func_name}" +fn wasmLower{camel_name}{index}Write(handle : Int, ptr : Int) -> Int = "{module}" "[future-write-{index}]{func_name}" +fn _wasmLower{camel_name}{index}CancelWrite(_ : Int) -> Int = "{module}" "[future-cancel-write-{index}]{func_name}" +fn wasmLower{camel_name}{index}DropWritable(_ : Int) = "{module}" "[future-drop-writable-{index}]{func_name}" + "# + ); - let payload_lift_func = match payload_for { - PayloadFor::Future => "", - PayloadFor::Stream => "List", + // generate function + let size = if let TypeDefKind::Future(Some(inner_ty)) = self.resolve.types[ty].kind { + self.world_gen.sizes.size(&inner_ty).size_wasm32() + } else { + 0 }; - let ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR); - - let mut dealloc_list; - let malloc; - let lift; - let lower; - let lift_result; - let lift_list: String; - let lower_list: String; - if let Some(result_type) = result_type { - (lift, lift_result) = self.lift_from_memory("ptr", result_type, module); - lower = self.lower_to_memory("ptr", "value", result_type, module); - dealloc_list = self.deallocate_lists( - std::slice::from_ref(result_type), - &[String::from("ptr")], - true, - module, - ); - lift_list = self.list_lift_from_memory( - "ptr", - "length", - &format!("wasm{name}{kind}Lift"), - result_type, - ); - lower_list = - self.list_lower_to_memory(&format!("wasm{name}{kind}Lower"), "value", result_type); - - malloc = self.malloc_memory("ptr", "length", result_type); + uwriteln!( + lift, + r#" +fn wasmLift{camel_name}{index}(future_handle : Int) -> {lifted} {{ + let mut result = None + let mut dropped = false + let mut reading = 0 + async fn drop() {{ + if !dropped && reading > 0 {{ + let cancel = wasmLift{camel_name}{index}CancelRead(future_handle) + if cancel == -1 {{ + {async_qualifier}detach_waitable(future_handle) + }} else {{ + {async_qualifier}suspend_for_future_read( + future_handle, + cancel + ) catch {{ + {async_qualifier}FutureReadError::Cancelled => () + _ => panic() + }} + }} + }} + if !dropped {{ + dropped = true + wasmLift{camel_name}{index}DropReadable(future_handle) + }} + }} + {async_qualifier}CMFuture::Incoming({async_qualifier}FutureR::{{ + handle: future_handle, + get: () => {{ + if result is Some(r) {{ + return r + }} + if dropped {{ + raise {async_qualifier}FutureReadError::Dropped + }} + let ptr = mbt_ffi_malloc({size}) + defer mbt_ffi_free(ptr) + {{ + let mut read_cancelled = false + reading += 1 + defer {{ + if !read_cancelled {{ + reading -= 1 + }} + }} + {async_qualifier}suspend_for_future_read( + future_handle, + wasmLift{camel_name}{index}Read(future_handle, ptr), + ) catch {{ + err => {{ + if err is {async_qualifier}Cancelled::Cancelled {{ + read_cancelled = true + }} + drop() catch {{ + _ => () + }} + raise err + }} + }} + }} + result = {{ + "# + ); + let (operand, lift_builtins) = + if let TypeDefKind::Future(Some(ty)) = self.resolve.types[ty].kind { + // TODO : solve ownership + let resolve = self.resolve.clone(); + let mut bindgen = + FunctionBindgen::new(self, Box::new([]), Direction::Import, true, false); + bindgen.use_ffi(ffi::MALLOC); + bindgen.use_ffi(ffi::FREE); + let operand = lift_from_memory(&resolve, &mut bindgen, "ptr".to_string(), &ty); + uwriteln!(lift, "{}", bindgen.src); + (operand, bindgen.take_local_ffi_imports()) + } else { + let mut builtins = HashSet::new(); + builtins.insert(ffi::MALLOC); + builtins.insert(ffi::FREE); + ("()".into(), builtins) + }; - if dealloc_list.is_empty() { - dealloc_list = "let _ = ptr".to_string(); - } - } else { - lift = "let _ = ptr".to_string(); - lower = "let _ = (ptr, value)".to_string(); - dealloc_list = "let _ = ptr".to_string(); - malloc = "let ptr = 0;".into(); - lift_result = "".into(); - lift_list = "FixedArray::make(length, Unit::default())".into(); - lower_list = "0".into(); - } + // lift from memory if it were actual data + uwriteln!( + lift, + r#" + Some({operand}) + }} + drop() + result.unwrap() + }}, + drop, + take_handle: fn () {{ + if dropped || reading > 0 {{ + panic() + }} + dropped = true + future_handle + }} + }}) +}} +"# + ); - let (mut lift_func, mut lower_func) = if result_type - .is_some_and(|ty| self.is_list_canonical(self.resolve, ty)) - && matches!(payload_for, PayloadFor::Stream) - { - ("".into(), "".into()) + // Generate the lower function body + let inner_type = if let TypeDefKind::Future(Some(inner_ty)) = self.resolve.types[ty].kind { + Some(inner_ty) } else { - ( - format!( - r#" - fn wasm{name}{kind}Lift(ptr: Int) -> {result} {{ - {lift} - {lift_result} - }} - "# - ), - format!( - r#" - fn wasm{name}{kind}Lower(value: {result}, ptr: Int) -> Unit {{ - {lower} - }} - "# - ), - ) + None }; - if matches!(payload_for, PayloadFor::Stream) { - lift_func.push_str(&format!( - r#" - fn wasm{name}{kind}ListLift(ptr: Int, length: Int) -> FixedArray[{result}] {{ - {lift_list} - }} - "# - )); + uwriteln!( + lower, + r#" +fn wasmLower{camel_name}{index}(future : {lifted}) -> Int {{ + match future {{ + {async_qualifier}CMFuture::Incoming(f) => f.take_handle() + {async_qualifier}CMFuture::Outgoing(f) => {{ + let handles = wasmLower{camel_name}{index}New() + let readable = (handles & 0xFFFFFFFF).to_int() + let writable = (handles >> 32).to_int() + let producer = f.take_producer() + {async_qualifier}backpressure_inc() + {async_qualifier}spawn_bg_current(async fn() {{ + defer {async_qualifier}backpressure_dec() + defer wasmLower{camel_name}{index}DropWritable(writable)"# + ); - lower_func.push_str(&format!( + let lower_builtins = if let Some(inner_ty) = inner_type { + let resolve = self.resolve.clone(); + let mut bindgen = + FunctionBindgen::new(self, Box::new([]), Direction::Export, true, false); + bindgen.use_ffi(ffi::MALLOC); + bindgen.use_ffi(ffi::FREE); + uwriteln!( + lower, + r#" + let value = producer() + let ret_area = mbt_ffi_malloc({size}) + defer mbt_ffi_free(ret_area)"# + ); + abi::lower_to_memory( + &resolve, + &mut bindgen, + "ret_area".to_string(), + "value".to_string(), + &inner_ty, + ); + uwriteln!(lower, "{}", bindgen.src); + uwriteln!( + lower, r#" - fn wasm{name}{kind}ListLower(value: FixedArray[{result}]) -> Int {{ - {lower_list} - }} - "# - )); + let _ = {async_qualifier}suspend_for_future_write(writable, wasmLower{camel_name}{index}Write(writable, ret_area)) catch {{ _ => false }}"# + ); + bindgen.take_local_ffi_imports() + } else { + // Unit type - no value to write, just complete the future + uwriteln!( + lower, + r#" + let _ = producer() + let _ = {async_qualifier}suspend_for_future_write(writable, wasmLower{camel_name}{index}Write(writable, 0)) catch {{ _ => false }}"# + ); + HashSet::new() }; uwriteln!( - self.ffi, + lower, r#" -fn wasmImport{name}{kind}New() -> UInt64 = "{module}" "[{kind}-new-{index}]{func_name}" -fn wasmImport{name}{kind}Read(handle : Int, buffer_ptr : Int{payload_len_arg}) -> Int = "{module}" "[async-lower][{kind}-read-{index}]{func_name}" -fn wasmImport{name}{kind}Write(handle : Int, buffer_ptr : Int{payload_len_arg}) -> Int = "{module}" "[async-lower][{kind}-write-{index}]{func_name}" -fn wasmImport{name}{kind}CancelRead(handle : Int) -> Int = "{module}" "[{kind}-cancel-read-{index}]{func_name}" -fn wasmImport{name}{kind}CancelWrite(handle : Int) -> Int = "{module}" "[{kind}-cancel-write-{index}]{func_name}" -fn wasmImport{name}{kind}DropReadable(handle : Int) = "{module}" "[{kind}-drop-readable-{index}]{func_name}" -fn wasmImport{name}{kind}DropWritable(handle : Int) = "{module}" "[{kind}-drop-writable-{index}]{func_name}" -fn wasm{name}{kind}Deallocate(ptr: Int) -> Unit {{ - {dealloc_list} -}} -fn wasm{name}{kind}Malloc(length: Int) -> Int {{ - {malloc} - ptr -}} - -fn {table_name}() -> {ffi}{camel_kind}VTable[{result}] {{ - {ffi}{camel_kind}VTable::new( - wasmImport{name}{kind}New, - wasmImport{name}{kind}Read, - wasmImport{name}{kind}Write, - wasmImport{name}{kind}CancelRead, - wasmImport{name}{kind}CancelWrite, - wasmImport{name}{kind}DropReadable, - wasmImport{name}{kind}DropWritable, - wasm{name}{kind}Malloc, - wasm{name}{kind}Deallocate, - wasm{name}{kind}{payload_lift_func}Lift, - wasm{name}{kind}{payload_lift_func}Lower, - ) -}} -{lift_func} -{lower_func} -"# + }}) + readable + }} + }} +}}"# ); + let lower_src = lower.to_string(); + AsyncBindingEntry { + lift_name: lifted_func_name, + lift_src: lift.to_string(), + lift_builtins, + lower_name: lowered_func_name, + lower_src, + lower_builtins, + } } - fn deallocate_lists( + pub(crate) fn generate_stream_binding( &mut self, - types: &[Type], - operands: &[String], - indirect: bool, + ty: TypeId, + index: usize, module: &str, - ) -> String { - let mut f = FunctionBindgen::new(self, "INVALID", module, Box::new([])); - abi::deallocate_lists_in_types(f.r#gen.resolve, types, operands, indirect, &mut f); - f.src - } + func_name: &str, + ) -> AsyncBindingEntry { + let mut lift = Source::default(); + let mut lower = Source::default(); + + let camel_name = func_name.to_upper_camel_case(); + let lifted_func_name = format!("wasmLift{camel_name}{index}"); + let lowered_func_name = format!("wasmLower{camel_name}{index}"); + let async_qualifier = self + .world_gen + .pkg_resolver + .qualify_package(self.name, ASYNC_DIR); + let module = if self.direction == Direction::Export && !module.starts_with("[export]") { + format!("[export]{module}") + } else { + module.to_string() + }; + let lifted = self + .world_gen + .pkg_resolver + .type_name(self.name, &Type::Id(ty)); - fn lift_from_memory(&mut self, address: &str, ty: &Type, module: &str) -> (String, String) { - let mut f = FunctionBindgen::new(self, "INVALID", module, Box::new([])); + // write intrinsics + uwriteln!( + lift, + r#" +fn wasmLift{camel_name}{index}Read(handle : Int, ptr : Int, len : Int) -> Int = "{module}" "[async-lower][stream-read-{index}]{func_name}" +fn wasmLift{camel_name}{index}CancelRead(_ : Int) -> Int = "{module}" "[stream-cancel-read-{index}]{func_name}" +fn wasmLift{camel_name}{index}DropReadable(_ : Int) = "{module}" "[stream-drop-readable-{index}]{func_name}" + "#, + ); + uwriteln!( + lower, + r#" +fn wasmLower{camel_name}{index}New() -> UInt64 = "{module}" "[stream-new-{index}]{func_name}" +fn wasmLower{camel_name}{index}Write(handle : Int, ptr : Int, len : Int) -> Int = "{module}" "[stream-write-{index}]{func_name}" +fn _wasmLower{camel_name}{index}CancelWrite(_ : Int) -> Int = "{module}" "[stream-cancel-write-{index}]{func_name}" +fn wasmLower{camel_name}{index}DropWritable(_ : Int) = "{module}" "[stream-drop-writable-{index}]{func_name}" + "# + ); - let result = abi::lift_from_memory(f.r#gen.resolve, &mut f, address.into(), ty); - (f.src, result) - } + // Get element type and size + let inner_type = if let TypeDefKind::Stream(Some(inner_ty)) = self.resolve.types[ty].kind { + Some(inner_ty) + } else { + None + }; + let elem_size = inner_type + .map(|t| self.world_gen.sizes.size(&t).size_wasm32()) + .unwrap_or(0); - fn lower_to_memory(&mut self, address: &str, value: &str, ty: &Type, module: &str) -> String { - let mut f = FunctionBindgen::new(self, "INVALID", module, Box::new([])); - abi::lower_to_memory(f.r#gen.resolve, &mut f, address.into(), value.into(), ty); - f.src - } + // Generate lift function (StreamR from handle) + uwriteln!( + lift, + r#" +fn wasmLift{camel_name}{index}(stream_handle : Int) -> {lifted} {{ + let mut closed = false + let mut reading = 0 + async fn close() {{ + if !closed && reading > 0 {{ + let _ = {async_qualifier}suspend_for_stream_read( + stream_handle, + wasmLift{camel_name}{index}CancelRead(stream_handle) + ) catch {{ _ => (0, false) }} + }} + if !closed {{ + closed = true + wasmLift{camel_name}{index}DropReadable(stream_handle) + }} + }} + {async_qualifier}CMStream::Incoming({async_qualifier}StreamR::{{ + handle: stream_handle, + read: (count : Int) => {{ + if closed {{ + return None + }}"# + ); - fn malloc_memory(&mut self, address: &str, length: &str, ty: &Type) -> String { - let size = self.r#gen.sizes.size(ty).size_wasm32(); - let ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR); - format!("let {address} = {ffi}malloc({size} * {length});") - } + let lift_builtins = if let Some(inner_ty) = inner_type { + let resolve = self.resolve.clone(); + let mut lift_bindgen = + FunctionBindgen::new(self, Box::new([]), Direction::Import, true, false); + lift_bindgen.use_ffi(ffi::MALLOC); + lift_bindgen.use_ffi(ffi::FREE); - fn is_list_canonical(&self, _resolve: &Resolve, element: &Type) -> bool { - matches!( - element, - Type::U8 | Type::U32 | Type::U64 | Type::S32 | Type::S64 | Type::F32 | Type::F64 + uwriteln!( + lift, + r#" + let ptr = mbt_ffi_malloc(count * {elem_size}) + reading += 1 + let (progress, end) = {{ + defer {{ reading -= 1 }} + {async_qualifier}suspend_for_stream_read( + stream_handle, + wasmLift{camel_name}{index}Read(stream_handle, ptr, count), ) - } + }} + if progress == 0 {{ + mbt_ffi_free(ptr) + if end {{ close(); return None }} + return Some([]) + }} + let items = []"# + ); - fn list_lift_from_memory( - &mut self, - address: &str, - length: &str, - lift_func: &str, - ty: &Type, - ) -> String { - let ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR); - if self.is_list_canonical(self.resolve, ty) { - if ty == &Type::U8 { - return format!("{ffi}ptr2bytes({address}, {length})"); - } - let ty = match ty { - Type::U32 => "uint", - Type::U64 => "uint64", - Type::S32 => "int", - Type::S64 => "int64", - Type::F32 => "float", - Type::F64 => "double", - _ => unreachable!(), - }; + // Generate code to lift each element from memory + uwriteln!(lift, " for i = 0; i < progress; i = i + 1 {{"); + uwriteln!(lift, " let elem_ptr = ptr + i * {elem_size}"); + let operand = lift_from_memory( + &resolve, + &mut lift_bindgen, + "elem_ptr".to_string(), + &inner_ty, + ); + uwriteln!(lift, "{}", lift_bindgen.src); + uwriteln!(lift, " items.push({operand})"); + uwriteln!(lift, " }}"); - return format!("{ffi}ptr2{ty}_array({address}, {length})"); - } - let size = self.r#gen.sizes.size(ty).size_wasm32(); - format!( - r#" - FixedArray::makei( - {length}, - (index) => {{ - let ptr = ({address}) + (index * {size}); - {lift_func}(ptr) - }} - ) - "# + uwriteln!( + lift, + r#" + mbt_ffi_free(ptr) + if end {{ close() }} + Some(items[:])"# + ); + lift_bindgen.take_local_ffi_imports() + } else { + // Unit type stream + uwriteln!( + lift, + r#" + reading += 1 + let (progress, end) = {{ + defer {{ reading -= 1 }} + {async_qualifier}suspend_for_stream_read( + stream_handle, + wasmLift{camel_name}{index}Read(stream_handle, 0, count), ) - } + }} + if progress == 0 && end {{ close(); return None }} + let result = FixedArray::make(progress, ()) + if end {{ close() }} + Some(result[:])"# + ); + HashSet::new() + }; - fn list_lower_to_memory(&mut self, lower_func: &str, value: &str, ty: &Type) -> String { - // Align the address, moonbit only supports wasm32 for now - let ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR); - if self.is_list_canonical(self.resolve, ty) { - if ty == &Type::U8 { - return format!("{ffi}bytes2ptr({value})"); - } + uwriteln!( + lift, + r#" + }}, + close, + take_handle: fn () {{ + if closed || reading > 0 {{ + panic() + }} + closed = true + stream_handle + }} + }}) +}}"# + ); - let ty = match ty { - Type::U32 => "uint", - Type::U64 => "uint64", - Type::S32 => "int", - Type::S64 => "int64", - Type::F32 => "float", - Type::F64 => "double", - _ => unreachable!(), - }; - return format!("{ffi}{ty}_array2ptr({value})"); - } - let size = self.r#gen.sizes.size(ty).size_wasm32(); - format!( + // Generate lower function (Stream to handle) + uwriteln!( + lower, r#" - let address = {ffi}malloc(({value}).length() * {size}); - for index = 0; index < ({value}).length(); index = index + 1 {{ - let ptr = (address) + (index * {size}); - let value = {value}[index]; - {lower_func}(value, ptr); +fn wasmLower{camel_name}{index}(stream : {lifted}) -> Int {{ + match stream {{ + {async_qualifier}CMStream::Incoming(s) => s.take_handle() + {async_qualifier}CMStream::Outgoing(s) => {{ + let handles = wasmLower{camel_name}{index}New() + let readable = (handles & 0xFFFFFFFF).to_int() + let writable = (handles >> 32).to_int() + let producer = s.take_producer() + {async_qualifier}backpressure_inc() + let _ = {async_qualifier}spawn_bg_current(async fn() {{ + defer {async_qualifier}backpressure_dec() + let mut closed = false + defer {{ + if !closed {{ + wasmLower{camel_name}{index}DropWritable(writable) + }} + }} + let sink = {async_qualifier}Sink::{{ + write: async fn (data : ArrayView[_]) {{ + if closed || data.length() == 0 {{ + return 0 + }}"# + ); + + let lower_builtins = if let Some(inner_ty) = inner_type { + let resolve = self.resolve.clone(); + let elem_type = self.world_gen.pkg_resolver.type_name(self.name, &inner_ty); + let mut lower_bindgen = + FunctionBindgen::new(self, Box::new([]), Direction::Export, true, false); + lower_bindgen.use_ffi(ffi::MALLOC); + lower_bindgen.use_ffi(ffi::FREE); + + uwriteln!( + lower, + r#" + let ptr = mbt_ffi_malloc(data.length() * {elem_size}) + defer mbt_ffi_free(ptr) + for i = 0; i < data.length(); i = i + 1 {{ + let elem_ptr = ptr + i * {elem_size} + let elem : {elem_type} = data[i]"# + ); + + abi::lower_to_memory( + &resolve, + &mut lower_bindgen, + "elem_ptr".to_string(), + "elem".to_string(), + &inner_ty, + ); + uwriteln!(lower, "{}", lower_bindgen.src); + uwriteln!(lower, " }}"); + + uwriteln!( + lower, + r#" + let (progress, dropped) = {async_qualifier}suspend_for_stream_write( + writable, + wasmLower{camel_name}{index}Write(writable, ptr, data.length()), + ) catch {{ _ => (0, true) }} + if dropped {{ + closed = true + wasmLower{camel_name}{index}DropWritable(writable) }} - address - "# - ) + progress"# + ); + lower_bindgen.take_local_ffi_imports() + } else { + // Unit type stream + uwriteln!( + lower, + r#" + let (progress, dropped) = {async_qualifier}suspend_for_stream_write( + writable, + wasmLower{camel_name}{index}Write(writable, 0, data.length()), + ) catch {{ _ => (0, true) }} + if dropped {{ + closed = true + wasmLower{camel_name}{index}DropWritable(writable) + }} + progress"# + ); + HashSet::new() + }; + + uwriteln!( + lower, + r#" + }}, + close: () => {{ + if !closed {{ + closed = true + wasmLower{camel_name}{index}DropWritable(writable) + }} + }} + }} + producer(sink) + sink.close() + }}) + readable + }} + }} +}}"# + ); + + AsyncBindingEntry { + lift_name: lifted_func_name, + lift_src: lift.to_string(), + lift_builtins, + lower_name: lowered_func_name, + lower_src: lower.to_string(), + lower_builtins, + } } } diff --git a/crates/moonbit/src/ffi/ffi.mbt b/crates/moonbit/src/ffi/ffi.mbt deleted file mode 100644 index 86ad5fd4e..000000000 --- a/crates/moonbit/src/ffi/ffi.mbt +++ /dev/null @@ -1,213 +0,0 @@ -///| -pub extern "wasm" fn extend16(value : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.extend16_s) - -///| -pub extern "wasm" fn extend8(value : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.extend8_s) - -///| -pub extern "wasm" fn store8(offset : Int, value : Int) = - #|(func (param i32) (param i32) local.get 0 local.get 1 i32.store8) - -///| -pub extern "wasm" fn load8_u(offset : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.load8_u) - -///| -pub extern "wasm" fn load8(offset : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.load8_s) - -///| -pub extern "wasm" fn store16(offset : Int, value : Int) = - #|(func (param i32) (param i32) local.get 0 local.get 1 i32.store16) - -///| -pub extern "wasm" fn load16(offset : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.load16_s) - -///| -pub extern "wasm" fn load16_u(offset : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.load16_u) - -///| -pub extern "wasm" fn store32(offset : Int, value : Int) = - #|(func (param i32) (param i32) local.get 0 local.get 1 i32.store) - -///| -pub extern "wasm" fn load32(offset : Int) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.load) - -///| -pub extern "wasm" fn store64(offset : Int, value : Int64) = - #|(func (param i32) (param i64) local.get 0 local.get 1 i64.store) - -///| -pub extern "wasm" fn load64(offset : Int) -> Int64 = - #|(func (param i32) (result i64) local.get 0 i64.load) - -///| -pub extern "wasm" fn storef32(offset : Int, value : Float) = - #|(func (param i32) (param f32) local.get 0 local.get 1 f32.store) - -///| -pub extern "wasm" fn loadf32(offset : Int) -> Float = - #|(func (param i32) (result f32) local.get 0 f32.load) - -///| -pub extern "wasm" fn storef64(offset : Int, value : Double) = - #|(func (param i32) (param f64) local.get 0 local.get 1 f64.store) - -///| -pub extern "wasm" fn loadf64(offset : Int) -> Double = - #|(func (param i32) (result f64) local.get 0 f64.load) - -///| -pub extern "wasm" fn f32_to_i32(value : Float) -> Int = - #|(func (param f32) (result i32) local.get 0 f32.convert_i32_s) - -///| -pub extern "wasm" fn f32_to_i64(value : Float) -> Int64 = - #|(func (param f32) (result i64) local.get 0 f32.convert_i64_s) - -// set pseudo header; allocate extra bytes for string - -///| -pub extern "wasm" fn malloc(size : Int) -> Int = - #|(func (param i32) (result i32) (local i32) - #| local.get 0 i32.const 4 i32.add call $moonbit.gc.malloc - #| local.tee 1 i32.const 0 call $moonbit.init_array8 - #| local.get 1 i32.const 8 i32.add) - -///| -pub extern "wasm" fn free(position : Int) = - #|(func (param i32) local.get 0 i32.const 8 i32.sub call $moonbit.decref) - -///| -extern "wasm" fn copy(dest : Int, src : Int, len : Int) = - #|(func (param i32) (param i32) (param i32) local.get 0 local.get 1 local.get 2 memory.copy) - -///| -#owned(str) -pub extern "wasm" fn str2ptr(str : String) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -pub extern "wasm" fn ptr2str(ptr : Int, len : Int) -> String = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array16 - #| local.get 2) - -///| -#owned(bytes) -pub extern "wasm" fn bytes2ptr(bytes : FixedArray[Byte]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -pub extern "wasm" fn ptr2bytes(ptr : Int, len : Int) -> FixedArray[Byte] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array8 - #| local.get 2) - -///| -#owned(array) -pub extern "wasm" fn uint_array2ptr(array : FixedArray[UInt]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -#owned(array) -pub extern "wasm" fn uint64_array2ptr(array : FixedArray[UInt64]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -#owned(array) -pub extern "wasm" fn int_array2ptr(array : FixedArray[Int]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -#owned(array) -pub extern "wasm" fn int64_array2ptr(array : FixedArray[Int64]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -#owned(array) -pub extern "wasm" fn float_array2ptr(array : FixedArray[Float]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -#owned(array) -pub extern "wasm" fn double_array2ptr(array : FixedArray[Double]) -> Int = - #|(func (param i32) (result i32) local.get 0 i32.const 8 i32.add) - -///| -pub extern "wasm" fn ptr2uint_array(ptr : Int, len : Int) -> FixedArray[UInt] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array32 - #| local.get 2) - -///| -pub extern "wasm" fn ptr2int_array(ptr : Int, len : Int) -> FixedArray[Int] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array32 - #| local.get 2) - -///| -pub extern "wasm" fn ptr2float_array(ptr : Int, len : Int) -> FixedArray[Float] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array32 - #| local.get 2) - -///| -pub extern "wasm" fn ptr2uint64_array( - ptr : Int, - len : Int, -) -> FixedArray[UInt64] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array64 - #| local.get 2) - -///| -pub extern "wasm" fn ptr2int64_array(ptr : Int, len : Int) -> FixedArray[Int64] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array64 - #| local.get 2) - -///| -pub extern "wasm" fn ptr2double_array( - ptr : Int, - len : Int, -) -> FixedArray[Double] = - #|(func (param i32) (param i32) (result i32) (local i32) - #| local.get 0 i32.const 8 i32.sub local.tee 2 - #| local.get 1 call $moonbit.init_array64 - #| local.get 2) - -///| -pub fn cabi_realloc( - src_offset : Int, - src_size : Int, - _dst_alignment : Int, - dst_size : Int, -) -> Int { - // malloc - if src_offset == 0 && src_size == 0 { - return malloc(dst_size) - } - // free - if dst_size == 0 { - free(src_offset) - return 0 - } - // realloc - let dst = malloc(dst_size) - copy(dst, src_offset, if src_size < dst_size { src_size } else { dst_size }) - free(src_offset) - dst -} diff --git a/crates/moonbit/src/ffi/future.mbt b/crates/moonbit/src/ffi/future.mbt deleted file mode 100644 index b8b0c19a3..000000000 --- a/crates/moonbit/src/ffi/future.mbt +++ /dev/null @@ -1,540 +0,0 @@ -///| -pub struct FutureVTable[T] { - new : () -> UInt64 - read : (Int, Int) -> Int - write : (Int, Int) -> Int - cancel_read : (Int) -> Int - cancel_write : (Int) -> Int - drop_readable : (Int) -> Unit - drop_writable : (Int) -> Unit - malloc : (Int) -> Int - free : (Int) -> Unit - lift : (Int) -> T - lower : (T, Int) -> Unit -} - -///| -pub fn[T] FutureVTable::new( - new : () -> UInt64, - read : (Int, Int) -> Int, - write : (Int, Int) -> Int, - cancel_read : (Int) -> Int, - cancel_write : (Int) -> Int, - drop_readable : (Int) -> Unit, - drop_writable : (Int) -> Unit, - malloc : (Int) -> Int, - free : (Int) -> Unit, - lift : (Int) -> T, - lower : (T, Int) -> Unit, -) -> FutureVTable[T] { - { - new, - read, - write, - cancel_read, - cancel_write, - drop_readable, - drop_writable, - malloc, - free, - lift, - lower, - } -} - -///| -pub fn[T] new_future( - vtable : FutureVTable[T], -) -> (FutureReader[T], FutureWriter[T]) { - let handle = (vtable.new)() - let left_handle = handle.to_int() - let right_handle = (handle >> 32).to_int() - ( - FutureReader::new(left_handle, vtable), - FutureWriter::new(right_handle, vtable), - ) -} - -///| -pub struct FutureReader[T] { - handle : Int - vtable : FutureVTable[T] - mut code : Int? - mut dropped : Bool - memory_refs : Array[Int] -} - -///| -pub fn[T] FutureReader::new( - handle : Int, - vtable : FutureVTable[T], -) -> FutureReader[T] { - { handle, vtable, code: None, memory_refs: [], dropped: false } -} - -///| -pub impl[T] Waitable for FutureReader[T] with update(self, code~ : Int) -> Unit { - self.code = Some(code) -} - -///| -pub impl[T] Eq for FutureReader[T] with equal(self, other) -> Bool { - self.handle == other.handle -} - -///| -pub impl[T] Waitable for FutureReader[T] with handle(self) -> Int { - self.handle -} - -///| -pub impl[T] Waitable for FutureReader[T] with cancel(self) -> Unit { - if self.code is Some(code) && WaitableStatus::decode(code) is Cancelled(_) { - return - } - self.code = Some((self.vtable.cancel_read)(self.handle)) -} - -///| -pub impl[T] Waitable for FutureReader[T] with drop(self) -> Bool { - _async_debug("stream-reader-drop(\{self.handle})") - if self.dropped { - return false - } - (self.vtable.drop_readable)(self.handle) - self.dropped = true - for ptr in self.memory_refs { - self.free(ptr) - } - true -} - -///| -pub impl[T] Waitable for FutureReader[T] with done(self) -> Bool { - match self.code { - Some(c) => - match WaitableStatus::decode(c) { - Completed(_) | Dropped(_) | Cancelled(_) => true - Blocking => false - } - None => false - } -} - -///| -pub fn[T] FutureReader::malloc(self : FutureReader[T]) -> Int { - let ptr = (self.vtable.malloc)(1) - ptr -} - -///| -pub fn[T] FutureReader::free(self : FutureReader[T], ptr : Int) -> Unit { - (self.vtable.free)(ptr) -} - -///| -pub fn[T] FutureReader::lift(self : FutureReader[T], ptr : Int) -> T { - let res = (self.vtable.lift)(ptr) - res -} - -///| -pub fn[T] FutureReader::lower_read(self : FutureReader[T], ptr : Int) -> Int { - (self.vtable.read)(self.handle, ptr) -} - -///| -pub async fn[T] FutureReader::read(self : FutureReader[T]) -> T { - let buf_ptr = self.malloc() - self.memory_refs.push(buf_ptr) - self.code = Some(self.lower_read(buf_ptr)) - _async_debug("future-read(\{self.handle}) -> \{self.code.unwrap()}") - // register this waitable to the current task - let task = current_task() - task.add_waitable(self, current_coroutine()) - defer task.remove_waitable(self) - - // wait until ready - for { - let status = WaitableStatus::decode(self.code.unwrap()) - match status { - Cancelled(_) | Dropped(_) => raise Cancelled::Cancelled - Completed(_) => break - Blocking => suspend() - } - } - // when receive event, continue this coroutine - let value = self.lift(buf_ptr) - return value -} - -///| -pub struct FutureWriter[T] { - handle : Int - vtable : FutureVTable[T] - mut code : Int? - mut dropped : Bool - memory_refs : Array[Int] -} - -///| -pub fn[T] FutureWriter::new( - handle : Int, - vtable : FutureVTable[T], -) -> FutureWriter[T] { - { handle, vtable, code: None, memory_refs: [], dropped: false } -} - -///| -pub impl[T] Waitable for FutureWriter[T] with update(self, code~ : Int) -> Unit { - self.code = Some(code) -} - -///| -pub impl[T] Eq for FutureWriter[T] with equal(self, other) -> Bool { - self.handle == other.handle -} - -///| -pub impl[T] Waitable for FutureWriter[T] with handle(self) -> Int { - self.handle -} - -///| -pub impl[T] Waitable for FutureWriter[T] with cancel(self) -> Unit { - if self.code is Some(code) && WaitableStatus::decode(code) is Cancelled(_) { - return - } - self.code = Some((self.vtable.cancel_write)(self.handle)) -} - -///| -pub impl[T] Waitable for FutureWriter[T] with drop(self) -> Bool { - _async_debug("stream-writer-drop(\{self.handle})") - if self.dropped { - return false - } - (self.vtable.drop_writable)(self.handle) - self.dropped = true - for ptr in self.memory_refs { - self.free(ptr) - } - true -} - -///| -pub impl[T] Waitable for FutureWriter[T] with done(self) -> Bool { - match self.code { - Some(c) => - match WaitableStatus::decode(c) { - Completed(_) | Dropped(_) | Cancelled(_) => true - Blocking => false - } - None => false - } -} - -///| -pub fn[T] FutureWriter::malloc(self : FutureWriter[T]) -> Int { - (self.vtable.malloc)(1) -} - -///| -pub fn[T] FutureWriter::free(self : FutureWriter[T], ptr : Int) -> Unit { - (self.vtable.free)(ptr) -} - -///| -pub fn[T] FutureWriter::lower( - self : FutureWriter[T], - value : T, - ptr : Int, -) -> Unit { - (self.vtable.lower)(value, ptr) -} - -///| -pub fn[T] FutureWriter::lower_write(self : FutureWriter[T], ptr : Int) -> Int { - (self.vtable.write)(self.handle, ptr) -} - -///| -pub async fn[T] FutureWriter::write(self : FutureWriter[T], value : T) -> Unit { - // register this waitable to the current task - let task = current_task() - task.add_waitable(self, current_coroutine()) - defer task.remove_waitable(self) - let buf_ptr = self.malloc() - self.memory_refs.push(buf_ptr) - self.lower(value, buf_ptr) - self.code = Some(self.lower_write(buf_ptr)) - defer self.free(buf_ptr) - - // wait until ready - for { - let status = WaitableStatus::decode(self.code.unwrap()) - match status { - Cancelled(_) | Dropped(_) => raise Cancelled::Cancelled - Completed(_) => break - Blocking => suspend() - } - } - // when receive event, continue this coroutine - return -} - -///| -pub suberror StreamCancelled (Int, Cancelled) derive(Show) - -///| -pub struct StreamVTable[T] { - new : () -> UInt64 - read : (Int, Int, Int) -> Int - write : (Int, Int, Int) -> Int - cancel_read : (Int) -> Int - cancel_write : (Int) -> Int - drop_readable : (Int) -> Unit - drop_writable : (Int) -> Unit - malloc : (Int) -> Int - free : (Int) -> Unit - lift : (Int, Int) -> FixedArray[T] - lower : (FixedArray[T]) -> Int -} - -///| -pub fn[T] StreamVTable::new( - new : () -> UInt64, - read : (Int, Int, Int) -> Int, - write : (Int, Int, Int) -> Int, - cancel_read : (Int) -> Int, - cancel_write : (Int) -> Int, - drop_readable : (Int) -> Unit, - drop_writable : (Int) -> Unit, - malloc : (Int) -> Int, - free : (Int) -> Unit, - lift : (Int, Int) -> FixedArray[T], - lower : (FixedArray[T]) -> Int, -) -> StreamVTable[T] { - { - new, - read, - write, - cancel_read, - cancel_write, - drop_readable, - drop_writable, - malloc, - free, - lift, - lower, - } -} - -///| -pub fn[T] new_stream( - vtable : StreamVTable[T], -) -> (StreamReader[T], StreamWriter[T]) { - let handle = (vtable.new)() - let left_handle = handle.to_int() - let right_handle = (handle >> 32).to_int() - ( - StreamReader::new(left_handle, vtable), - StreamWriter::new(right_handle, vtable), - ) -} - -///| -pub struct StreamReader[T] { - handle : Int - vtable : StreamVTable[T] - mut code : Int? - mut dropped : Bool - memory_refs : Array[Int] -} - -///| -pub impl[T] Waitable for StreamReader[T] with update(self, code~ : Int) -> Unit { - self.code = Some(code) -} - -///| -pub impl[T] Eq for StreamReader[T] with equal(self, other) -> Bool { - self.handle == other.handle -} - -///| -pub impl[T] Waitable for StreamReader[T] with handle(self) -> Int { - self.handle -} - -///| -pub impl[T] Waitable for StreamReader[T] with cancel(self) -> Unit { - if self.code is Some(code) && WaitableStatus::decode(code) is Cancelled(_) { - return - } - self.code = Some((self.vtable.cancel_read)(self.handle)) -} - -///| -pub impl[T] Waitable for StreamReader[T] with drop(self) -> Bool { - _async_debug("stream-reader-drop(\{self.handle})") - if self.dropped { - return false - } - (self.vtable.drop_readable)(self.handle) - self.dropped = true - for ptr in self.memory_refs { - (self.vtable.free)(ptr) - } - true -} - -///| -pub impl[T] Waitable for StreamReader[T] with done(self) -> Bool { - match self.code { - Some(c) => - match WaitableStatus::decode(c) { - Completed(_) | Dropped(_) | Cancelled(_) => true - Blocking => false - } - None => false - } -} - -///| -pub fn[T] StreamReader::new( - handle : Int, - vtable : StreamVTable[T], -) -> StreamReader[T] { - { handle, vtable, code: None, memory_refs: [], dropped: false } -} - -///| -pub async fn[T] StreamReader::read( - self : StreamReader[T], - buffer : FixedArray[T], - offset? : Int = 0, - length : Int, -) -> Int { - // register this waitable to the current task - let task = current_task() - task.add_waitable(self, current_coroutine()) - defer task.remove_waitable(self) - let buf_ptr = (self.vtable.malloc)(length) - self.code = Some((self.vtable.read)(self.handle, buf_ptr, length)) - _async_debug("stream-read(\{self.handle}) -> \{self.code.unwrap()}") - for { - let status = WaitableStatus::decode(self.code.unwrap()) - match status { - Completed(n) => { - let read_result = (self.vtable.lift)(buf_ptr, n) - for i in 0.. { - let read_result = (self.vtable.lift)(buf_ptr, n) - for i in 0.. suspend() - } - } -} - -///| -pub struct StreamWriter[T] { - handle : Int - vtable : StreamVTable[T] - mut code : Int? - mut dropped : Bool - memory_refs : Array[Int] -} - -///| -pub impl[T] Waitable for StreamWriter[T] with update(self, code~ : Int) -> Unit { - self.code = Some(code) -} - -///| -pub impl[T] Eq for StreamWriter[T] with equal(self, other) -> Bool { - self.handle == other.handle -} - -///| -pub impl[T] Waitable for StreamWriter[T] with handle(self) -> Int { - self.handle -} - -///| -pub impl[T] Waitable for StreamWriter[T] with cancel(self) -> Unit { - if self.code is Some(code) && WaitableStatus::decode(code) is Cancelled(_) { - return - } - self.code = Some((self.vtable.cancel_write)(self.handle)) -} - -///| -pub impl[T] Waitable for StreamWriter[T] with drop(self) -> Bool { - _async_debug("stream-writer-drop(\{self.handle})") - let task = current_task() - let coro = task.children.get(self.handle) - if coro is Some((_, coro)) { - coro.cancel() - coro.wake() - } - if self.dropped { - return false - } - (self.vtable.drop_writable)(self.handle) - self.dropped = true - for ptr in self.memory_refs { - (self.vtable.free)(ptr) - } - true -} - -///| -pub impl[T] Waitable for StreamWriter[T] with done(self) -> Bool { - match self.code { - Some(c) => - match WaitableStatus::decode(c) { - Completed(_) | Dropped(_) | Cancelled(_) => true - Blocking => false - } - None => false - } -} - -///| -pub fn[T] StreamWriter::new( - handle : Int, - vtable : StreamVTable[T], -) -> StreamWriter[T] { - { handle, vtable, code: None, memory_refs: [], dropped: false } -} - -///| -pub async fn[T] StreamWriter::write( - self : StreamWriter[T], - buffer : FixedArray[T], -) -> Int { - // register this waitable to the current task - let task = current_task() - task.add_waitable(self, current_coroutine()) - defer task.remove_waitable(self) - let write_buf = (self.vtable.lower)(buffer) - self.code = Some((self.vtable.write)(self.handle, write_buf, buffer.length())) - for { - let status = WaitableStatus::decode(self.code.unwrap()) - match status { - Completed(n) => return n - Cancelled(n) | Dropped(n) => - raise StreamCancelled::StreamCancelled((n, Cancelled::Cancelled)) - Blocking => suspend() - } - } -} diff --git a/crates/moonbit/src/ffi/subtask.mbt b/crates/moonbit/src/ffi/subtask.mbt deleted file mode 100644 index a53471df0..000000000 --- a/crates/moonbit/src/ffi/subtask.mbt +++ /dev/null @@ -1,57 +0,0 @@ -///| -pub struct Subtask { - handle : Int - mut code : Int? - mut dropped : Bool -} - -///| -pub fn Subtask::from_handle(handle : Int, code? : Int) -> Subtask { - { handle, code, dropped: false } -} - -///| -pub impl Waitable for Subtask with update(self, code~ : Int) -> Unit { - self.code = Some(code) -} - -///| -pub impl Eq for Subtask with equal(self, other) -> Bool { - self.handle == other.handle -} - -///| -pub impl Waitable for Subtask with handle(self) -> Int { - self.handle -} - -///| -pub impl Waitable for Subtask with cancel(self) -> Unit { - if self.code is Some(code) && CallbackCode::decode(code) is Cancel(_) { - return - } - self.code = Some(subtask_cancel(self.handle)) -} - -///| -pub impl Waitable for Subtask with drop(self) -> Bool { - _async_debug("subtask-drop(\{self.handle})") - if self.done() || self.dropped { - return false - } - subtask_drop(self.handle) - self.dropped = true - true -} - -///| -pub impl Waitable for Subtask with done(self) -> Bool { - guard self.code is Some(code) else { return false } - match SubtaskStatus::decode(code) { - StartCancelled(_) => true - Returned(_) => true - Started(_) => false - Starting(_) => false - ReturnCancelled(_) => true - } -} diff --git a/crates/moonbit/src/ffi/waitable_task.mbt b/crates/moonbit/src/ffi/waitable_task.mbt deleted file mode 100644 index 2a1014f00..000000000 --- a/crates/moonbit/src/ffi/waitable_task.mbt +++ /dev/null @@ -1,311 +0,0 @@ -///| -priv enum TaskStatus { - Fail(Error) - Running - Done -} - -///| -/// A `Task` represents a waitable task context that can manage waitables and child coroutines. -/// -struct Task { - id : Int - children : Map[Int, (&Waitable, Coroutine)] - task_defer : Array[() -> Unit raise] - resources : @deque.Deque[Int] - mut task : Coroutine? - mut waiting : Int - mut status : TaskStatus -} - -///| -pub let task_map : Map[Int, Task] = {} - -///| -pub fn Task::new() -> Task { - let waitable_set = waitable_set_new() - _async_debug("waitable-set-new(\{waitable_set})") - context_set(waitable_set) - { - id: waitable_set, - children: {}, - resources: @deque.Deque::new(), - task_defer: [], - status: Running, - waiting: 0, - task: None, - } -} - -///| -pub fn Task::from_raw(raw : Int) -> Task { - guard raw != 0 - context_set(raw) - _async_debug("context-set(\{raw})") - { - id: raw, - children: {}, - resources: @deque.Deque::new(), - task_defer: [], - status: Running, - waiting: 0, - task: None, - } -} - -///| -/// Check if the task is failed and return the error -pub fn Task::is_fail(self : Self) -> Error? { - match self.status { - Fail(err) => Some(err) - _ => None - } -} - -///| -/// Check if all waitables are done -pub fn Task::no_wait(self : Self) -> Bool { - self.waiting == 0 -} - -///| -/// Check if the task is done or failed -pub fn Task::is_done(self : Self) -> Bool { - match self.status { - Done => true - Fail(_) => true - Running => false - } -} - -///| -pub fn Task::handle(self : Self) -> Int { - self.id -} - -///| -pub fn Task::blocking_wait(self : Self) -> (Int, Int, Int) { - let result : FixedArray[Int] = FixedArray::make(2, 0) - let result_ptr = int_array2ptr(result) - let event0 = waitable_set_wait(self.id, result_ptr) - _async_debug("waitable_set_wait(\{event0}, \{result[0]}, \{result[1]})") - (event0, result[0], result[1]) -} - -///| -pub fn Task::blocking_poll(self : Self) -> (Int, Int, Int) { - let result : FixedArray[Int] = FixedArray::make(2, 0) - let result_ptr = int_array2ptr(result) - let event0 = waitable_set_poll(self.id, result_ptr) - _async_debug("waitable-set-poll(\{event0}, \{result[0]}, \{result[1]})") - (event0, result[0], result[1]) -} - -///| -/// Add a waitable to the waitable set and increase the waiting count -pub fn[T : Waitable] Task::add_waitable( - self : Self, - waitable : T, - coro : Coroutine, -) -> Unit { - waitable_join(waitable.handle(), self.id) - self.children[waitable.handle()] = (waitable, coro) - self.resources.push_back(waitable.handle()) - _async_debug("waitable-set-join(\{waitable.handle()}, \{self.id})") - self.waiting += 1 -} - -///| -/// When a waitable is done will be removed from the waitable set -/// then waitable will be try to drop -pub fn[T : Waitable] Task::remove_waitable(self : Self, state : T) -> Unit { - _async_debug("waitable-set-join(\{state.handle()}, 0)") - waitable_join(state.handle(), 0) - self.waiting -= 1 -} - -///| -pub fn[T : Waitable] Task::drop_waitable(self : Self, state : T) -> Unit { - let _ = state.drop() - if self.resources.search(state.handle()) is Some(idx) { - let _ = self.resources.remove(idx) - - } - self.children.remove(state.handle()) -} - -///| -/// Cancel a waitable, remove it from the waitable set and force drop it -pub fn[T : Waitable] Task::cancel_waitable(self : Self, state : T) -> Unit { - waitable_join(state.handle(), 0) - _async_debug("waitable-set-join(\{state.handle()}, 0)") - self.waiting -= 1 - state.cancel() - let _ = state.drop() - self.children.remove(state.handle()) -} - -///| -/// set current task context to 0 and let runner drop the waitable set -pub fn Task::drop(self : Self) -> Unit { - context_set(0) - defer waitable_set_drop(self.id) - _async_debug("context-set(0)") -} - -///| -/// Spawns a coroutine to execute an async function and without waits for its completion -/// while managing the waitable state. -pub fn Task::spawn(_self : Self, f : async () -> Unit) -> Unit { - let _ = spawn(f) - // start the coroutine - rschedule() -} - -///| -/// This function spawns a coroutine to run the async function and waits for its completion -pub async fn Task::wait(_ : Self, f : async () -> Unit) -> Unit { - let coro = spawn(f) - // start the coroutine - rschedule() - Coroutine::wait(coro) -} - -///| -pub fn Task::add_defer(self : Self, f : () -> Unit raise) -> Unit { - self.task_defer.push(f) -} - -///| -pub fn callback(event : Int, waitable_id : Int, code : Int) -> Int { - let event = Event::decode(event) - _async_debug("callback(\{event}, \{waitable_id}, \{code})") - let task = match current_waitable_set() { - Some(task) => task - None => current_task() - } - // Handle the event for the current waitable task - match event { - FutureRead | FutureWrite | StreamRead | StreamWrite | Subtask => { - let (state, coro) = task.children[waitable_id] - state.update(code~) - // schedule next coroutine - coro.wake() - rschedule() - if task.no_wait() && task.task is Some(parent) { - // run the parent coroutine when all waitables are done - // parent coroutine may execute return/cancel - parent.wake() - rschedule() - return CallbackCode::Exit.encode() - } - return CallbackCode::Wait(task.id).encode() - } - TaskCancel => { - if task.task is Some(parent) { - parent.wake() - } - task.children - .values() - .each(child => { - let (state, coro) = child - task.cancel_waitable(state) - coro.cancel() - }) - rschedule() - return CallbackCode::Exit.encode() - } - None => { - rschedule() - return CallbackCode::Exit.encode() - } - } -} - -///| -pub fn Task::with_waitable_set( - self : Self, - f : async (Self) -> Unit, - is_drop? : Bool = false, -) -> Coroutine noraise { - let parent = spawn(async fn() -> Unit noraise { - self.status = Running - defer { - while self.resources.pop_front() is Some(handle) { - let state = self.children.get(handle) - if state is Some((state, _)) { - let _ = state.drop() - self.children.remove(handle) - } - } - if self.status is Running { - self.status = Done - } - task_map.remove(self.id) - - // this defer block recycles waitable task resources - while self.task_defer.pop() is Some(defer_block) { - defer_block() catch { - err => if self.status is Done { self.status = Fail(err) } - } - } - - // runner will drop the waitable set - // export async function needs to keep the waitable set - if is_drop { - self.drop() - } - } - f(self) catch { - err => if self.status is Running { self.status = Fail(err) } - } - if !self.no_wait() { - _async_debug("task-wait-loop(\{self.id})") - suspend() catch { - err => if self.status is Running { self.status = Fail(err) } - } - } - }) - self.task = Some(parent) - // start the parent coroutine - parent.run() - rschedule() - parent -} - -///| -fn current_waitable_set() -> Task? { - let ctx = context_get() - _async_debug("context-get(\{ctx})") - if ctx == 0 { - None - } else { - match task_map.get(ctx) { - Some(task) => Some(task) - None => { - let ctx = Task::from_raw(ctx) - task_map[ctx.id] = ctx - Some(ctx) - } - } - } -} - -///| -pub fn current_task() -> Task { - let ctx = context_get() - if ctx == 0 { - let ctx = Task::new() - task_map[ctx.id] = ctx - ctx - } else { - match task_map.get(ctx) { - Some(task) => task - None => { - let ctx = Task::from_raw(ctx) - task_map[ctx.id] = ctx - ctx - } - } - } -} diff --git a/crates/moonbit/src/ffi/wasm_primitive.mbt b/crates/moonbit/src/ffi/wasm_primitive.mbt deleted file mode 100644 index dd0badaac..000000000 --- a/crates/moonbit/src/ffi/wasm_primitive.mbt +++ /dev/null @@ -1,185 +0,0 @@ -///| -pub(open) trait Waitable { - update(Self, code~ : Int) -> Unit - cancel(Self) -> Unit - - // when the waitable is dropped, this function is called to free resources - drop(Self) -> Bool - done(Self) -> Bool - handle(Self) -> Int -} - -///| -pub(all) enum SubtaskStatus { - Starting(Int) - Started(Int) - Returned(Int) - StartCancelled(Int) - ReturnCancelled(Int) -} derive(Eq, Show) - -///| -pub fn SubtaskStatus::decode(int : Int) -> SubtaskStatus { - let handle = int >> 4 - match int & 0xf { - 0 => Starting(handle) - 1 => Started(handle) - 2 => Returned(handle) - 3 => StartCancelled(handle) - 4 => ReturnCancelled(handle) - _ => panic() - } -} - -///| -pub fn SubtaskStatus::handle(self : Self) -> Int { - match self { - Starting(handle) => handle - Started(handle) => handle - Returned(handle) => handle - StartCancelled(handle) => handle - ReturnCancelled(handle) => handle - } -} - -///| -pub(all) enum Event { - None - Subtask - StreamRead - StreamWrite - FutureRead - FutureWrite - TaskCancel -} derive(Eq, Show) - -///| -pub fn Event::decode(int : Int) -> Event { - match int { - 0 => None - 1 => Subtask - 2 => StreamRead - 3 => StreamWrite - 4 => FutureRead - 5 => FutureWrite - 6 => TaskCancel - _ => panic() - } -} - -///| -pub fn Event::encode(self : Self) -> Int { - match self { - None => 0 - Subtask => 1 - StreamRead => 2 - StreamWrite => 3 - FutureRead => 4 - FutureWrite => 5 - TaskCancel => 6 - } -} - -///| -pub(all) enum WaitableStatus { - Completed(Int) - Dropped(Int) - Cancelled(Int) - Blocking -} derive(Eq, Show) - -///| -let waitable_status_block : Int = 0xffff_ffff - -///| -pub fn WaitableStatus::decode(int : Int) -> WaitableStatus { - if int == waitable_status_block { - return Blocking - } - let amt = int >> 4 - match int & 0xf { - 0 => Completed(amt) - 1 => Dropped(amt) - 2 => Cancelled(amt) - _ => panic() - } -} - -///| -pub fn WaitableStatus::count(int : Int) -> Int { - int >> 4 -} - -///| -pub(all) enum CallbackCode { - Exit - Yield - Wait(Int) - Cancel(Int) -} derive(Eq, Show) - -///| -pub fn CallbackCode::encode(self : Self) -> Int { - match self { - Exit => 0 - Yield => 1 - Wait(id) => 2 | (id << 4) - Cancel(id) => 3 | (id << 4) - } -} - -///| -pub fn CallbackCode::decode(int : Int) -> CallbackCode { - let id = int >> 4 - match int & 0xf { - 0 => Exit - 1 => Yield - 2 => Wait(id) - 3 => Cancel(id) - _ => panic() - } -} - -///| -/// This function is empty, If you want to print debug info, you can hook it in your environment. -pub fn _async_debug(_msg : String) -> Unit { - -} - -// Component async primitives - -///| -pub fn yield_blocking() -> Bool = "$root" "[yield]" - -///| -pub fn backpressure_set() -> Int = "$root" "[backpressure-set]" - -///| -pub fn subtask_cancel(id : Int) -> Int = "$root" "[subtask-cancel]" - -///| -pub fn subtask_drop(id : Int) = "$root" "[subtask-drop]" - -///| -pub fn context_set(task : Int) = "$root" "[context-set-0]" - -///| -pub fn context_get() -> Int = "$root" "[context-get-0]" - -///| -pub fn task_cancel() = "[export]$root" "[task-cancel]" - -///| -pub fn waitable_set_new() -> Int = "$root" "[waitable-set-new]" - -///| -pub fn waitable_set_drop(set : Int) = "$root" "[waitable-set-drop]" - -///| -pub fn waitable_join(waitable : Int, set : Int) = "$root" "[waitable-join]" - -///| -pub fn waitable_set_wait(set : Int, result_ptr : Int) -> Int = "$root" "[waitable-set-wait]" - -///| -pub fn waitable_set_poll(set : Int, result_ptr : Int) -> Int = "$root" "[waitable-set-poll]" diff --git a/crates/moonbit/src/lib.rs b/crates/moonbit/src/lib.rs index 7f8edbc6c..9a538bb03 100644 --- a/crates/moonbit/src/lib.rs +++ b/crates/moonbit/src/lib.rs @@ -1,6 +1,6 @@ use anyhow::Result; use core::panic; -use heck::{ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; +use heck::{ToShoutySnakeCase, ToUpperCamelCase}; use std::{ collections::{HashMap, HashSet}, fmt::Write, @@ -11,15 +11,16 @@ use wit_bindgen_core::{ AsyncFilterSet, Direction, Files, InterfaceGenerator as CoreInterfaceGenerator, Ns, Source, WorldGenerator, abi::{self, AbiVariant, Bindgen, Bitcast, Instruction, LiftLower, WasmType}, - uwrite, uwriteln, + dealias, uwrite, uwriteln, wit_parser::{ - Alignment, ArchitectureSize, Docs, Enum, Flags, FlagsRepr, Function, Int, InterfaceId, - Param, Record, Resolve, Result_, SizeAlign, Tuple, Type, TypeId, Variant, WorldId, - WorldKey, + Alignment, ArchitectureSize, Docs, Enum, Flags, FlagsRepr, Function, Handle, Int, + InterfaceId, LiftLowerAbi, Mangling, ManglingAndAbi, Param, Record, Resolve, + ResourceIntrinsic, Result_, SizeAlign, Tuple, Type, TypeDefKind, TypeId, Variant, + WasmExport, WasmExportKind, WasmImport, WorldId, WorldKey, }, }; -use crate::async_support::AsyncSupport; +use crate::async_support::{ASYNC_DIR, AsyncBinding, AsyncSupport}; use crate::pkg::{Imports, MoonbitSignature, PkgResolver, ToMoonBitIdent, ToMoonBitTypeIdent}; mod async_support; @@ -33,10 +34,13 @@ mod pkg; // Organization: // - one package per interface (export and import are treated as different interfaces) // - ffi utils are under `./ffi`, and the project entrance (package as link target) is under `./gen` + +// We use Legacy mangling for MoonBit (no specific reason, just because we haven't switched yet) +// We use AsyncCallback ABI for async functions + // TODO: Export will share the type signatures with the import by using a newtype alias -pub(crate) const FFI_DIR: &str = "ffi"; -pub(crate) const FFI: &str = include_str!("./ffi/ffi.mbt"); +const VERSION: &str = env!("CARGO_PKG_VERSION"); #[derive(Default, Debug, Clone)] #[cfg_attr(feature = "clap", derive(clap::Parser))] @@ -106,30 +110,22 @@ impl InterfaceFragment { } } -enum PayloadFor { - Future, - Stream, -} - #[derive(Default)] pub struct MoonBit { opts: Opts, - name: String, - needs_cleanup: bool, - import_interface_fragments: HashMap, - export_interface_fragments: HashMap, + project_name: String, import_world_fragment: InterfaceFragment, - export_world_fragment: InterfaceFragment, sizes: SizeAlign, + // Collision may happen when a package is imported with multiple versions. + // see multiverison interface_ns: Ns, // dependencies between packages pkg_resolver: PkgResolver, - export: HashMap, + // Wasm export name -> (exported function name, func) + export: HashMap, + export_ns: Ns, - // return area allocation - return_area_size: ArchitectureSize, - return_area_align: Alignment, async_support: AsyncSupport, } @@ -139,29 +135,119 @@ impl MoonBit { &'a mut self, resolve: &'a Resolve, name: &'a str, - module: &'a str, direction: Direction, + interface: Option<&'a WorldKey>, ) -> InterfaceGenerator<'a> { let derive_opts = self.opts.derive.clone(); InterfaceGenerator { src: String::new(), stub: String::new(), ffi: String::new(), - r#gen: self, + world_gen: self, resolve, name, - module, direction, ffi_imports: HashSet::new(), derive_opts, + interface, + bindings: AsyncBinding(HashMap::new()), + async_bindings_emitted: HashSet::new(), } } + + fn write_moon_pkg(&self, moon_pkg: &mut Source, imports: Option<&Imports>, link: bool) { + // Disable warning for invalid inline wasm + moon_pkg.push_str("{\n\"warn-list\": \"-44\""); + // Dependencies + if let Some(imports) = imports { + moon_pkg.push_str(",\n\"import\": [\n"); + moon_pkg.indent(1); + let mut deps = imports + .packages + .iter() + .map(|(k, v)| { + format!( + "{{ \"path\" : \"{}/{}\", \"alias\" : \"{}\" }}", + self.project_name, + k.replace(".", "/"), + v + ) + }) + .collect::>(); + deps.sort(); + uwrite!(moon_pkg, "{}", deps.join(",\n")); + moon_pkg.deindent(1); + moon_pkg.push_str("\n]"); + } + // Link target + if link { + let memory_name = self.pkg_resolver.resolve.wasm_export_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmExport::Memory, + ); + moon_pkg.push_str(",\n\"link\": {\n\"wasm\": {\n"); + moon_pkg.push_str(&format!("\"export-memory-name\": \"{memory_name}\",\n")); + moon_pkg.push_str("\"heap-start-address\": 16,\n"); + moon_pkg.push_str("\"exports\": [\n"); + moon_pkg.indent(1); + let mut exports = self + .export + .iter() + .map(|(export_name, (func_name, _))| format!("\"{func_name}:{export_name}\"")) + .collect::>(); + exports.push(format!( + "\"mbt_ffi_cabi_realloc:{}\"", + self.pkg_resolver.resolve.wasm_export_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmExport::Realloc, + ), + )); + exports.sort(); + uwrite!(moon_pkg, "{}", exports.join(",\n")); + moon_pkg.deindent(1); + moon_pkg.push_str("\n]\n}\n}\n"); + } + moon_pkg.push_str("\n}\n"); + } } +/// World generator implementation for MoonBit. +/// +/// This implementation connects the generic `wit-bindgen` world generation +/// workflow with MoonBit-specific codegen details. It consumes the parsed +/// WIT `Resolve` structure and emits MoonBit source (`*.mbt`) and package +/// metadata files into the provided `Files` collection. +/// +/// Responsibilities and behavior: +/// - `preprocess`: Initialize generator-wide state (package resolver, +/// project name, and size/align information) for the current world. +/// - `import_interface` / `export_interface`: Generate per-interface +/// sources, FFI glue, README documentation and `moon.pkg.json` metadata. +/// - `import_funcs` / `export_funcs` / `import_types`: Collect and accumulate +/// world-level functions and types (the `$root` module) into fragments +/// that are later written out by `finish_imports` or `finish`. +/// - `finish_imports` / `finish`: Emit aggregated import artifacts and the +/// final project entrypoints such as the combined FFI module and package +/// descriptor files. +/// +/// Implementation notes: +/// - Namespacing and collision avoidance are handled using `PkgResolver` and +/// an internal `Ns` to make import/export package names stable even when +/// multiple package versions are present. +/// - Inline FFI helpers and builtins are collected and written once into the +/// final export FFI module. Async helpers are emitted when required. impl WorldGenerator for MoonBit { fn preprocess(&mut self, resolve: &Resolve, world: WorldId) { self.pkg_resolver.resolve = resolve.clone(); - self.name = PkgResolver::world_name(resolve, world); + self.project_name = self + .opts + .project_name + .clone() + .or(resolve.worlds[world].package.map(|id| { + let package = &resolve.packages[id].name; + format!("{}/{}", package.namespace, package.name) + })) + .unwrap_or("generated".into()); self.sizes.fill(resolve); } @@ -178,26 +264,50 @@ impl WorldGenerator for MoonBit { .import_interface_names .insert(id, name.clone()); - if let Some(content) = &resolve.interfaces[id].docs.contents { - if !content.is_empty() { - files.push( - &format!("{}/README.md", name.replace(".", "/")), - content.as_bytes(), - ); - } - } - - let module = &resolve.name_world_key(key); - let mut r#gen = self.interface(resolve, &name, module, Direction::Import); + let mut r#gen = self.interface(resolve, &name, Direction::Import, Some(key)); r#gen.types(id); for (_, func) in resolve.interfaces[id].functions.iter() { - r#gen.import(Some(key), func); + r#gen.import(func); } - let result = r#gen.finish(); - self.import_interface_fragments - .insert(name.to_owned(), result); + let fragment = r#gen.finish(); + // Write files + { + let directory = name.replace('.', "/"); + + // README + if let Some(content) = &resolve.interfaces[id].docs.contents + && !content.is_empty() + { + files.push(&format!("{directory}/README.md"), content.as_bytes()); + } + + assert!(fragment.stub.is_empty()); + // Source + let mut src = Source::default(); + wit_bindgen_core::generated_preamble(&mut src, VERSION); + uwriteln!(src, "{}", fragment.src); + files.push(&format!("{directory}/top.mbt"), indent(&src).as_bytes()); + + // FFI + let mut ffi = Source::default(); + wit_bindgen_core::generated_preamble(&mut ffi, VERSION); + uwriteln!(ffi, "{}", fragment.ffi); + for builtin in fragment.builtins { + uwriteln!(ffi, "{builtin}"); + } + files.push(&format!("{directory}/ffi.mbt"), indent(&ffi).as_bytes()); + + // moon.pkg.json + let mut moon_pkg = Source::default(); + self.write_moon_pkg( + &mut moon_pkg, + self.pkg_resolver.package_import.get(&name), + false, + ); + files.push(&format!("{directory}/moon.pkg.json"), moon_pkg.as_bytes()); + } Ok(()) } @@ -210,16 +320,74 @@ impl WorldGenerator for MoonBit { _files: &mut Files, ) { let name = PkgResolver::world_name(resolve, world); - let mut r#gen = self.interface(resolve, &name, "$root", Direction::Import); + let mut r#gen = self.interface(resolve, &name, Direction::Import, None); for (_, func) in funcs { - r#gen.import(None, func); // None is "$root" + r#gen.import(func); + } + + let result = r#gen.finish(); + self.import_world_fragment.concat(result); + } + + fn import_types( + &mut self, + resolve: &Resolve, + world: WorldId, + types: &[(&str, TypeId)], + _files: &mut Files, + ) { + let name = PkgResolver::world_name(resolve, world); + let mut r#gen = self.interface(resolve, &name, Direction::Import, None); + + for (ty_name, ty) in types { + r#gen.define_type(ty_name, *ty); } let result = r#gen.finish(); self.import_world_fragment.concat(result); } + fn finish_imports(&mut self, resolve: &Resolve, world: WorldId, files: &mut Files) { + let name = PkgResolver::world_name(resolve, world); + let directory = name.replace('.', "/"); + + assert!(self.import_world_fragment.stub.is_empty()); + + // README + if let Some(content) = &resolve.worlds[world].docs.contents + && !content.is_empty() + { + files.push(&format!("{directory}/README.md"), content.as_bytes()); + } + // Source + let mut src = Source::default(); + wit_bindgen_core::generated_preamble(&mut src, VERSION); + uwriteln!(src, "{}", self.import_world_fragment.src); + files.push(&format!("{directory}/import.mbt"), indent(&src).as_bytes()); + // FFI + let mut ffi = Source::default(); + let mut builtins: HashSet<&'static str> = HashSet::new(); + wit_bindgen_core::generated_preamble(&mut ffi, VERSION); + uwriteln!(ffi, "{}", self.import_world_fragment.ffi); + builtins.extend(self.import_world_fragment.builtins.iter()); + for builtin in builtins { + uwriteln!(ffi, "{builtin}"); + } + files.push( + &format!("{directory}/ffi_import.mbt"), + indent(&ffi).as_bytes(), + ); + // moon.pkg.json + let mut moon_pkg = Source::default(); + self.write_moon_pkg( + &mut moon_pkg, + self.pkg_resolver.package_import.get(&name), + false, + ); + files.push(&format!("{directory}/moon.pkg.json"), moon_pkg.as_bytes()); + } + fn export_interface( &mut self, resolve: &Resolve, @@ -237,26 +405,61 @@ impl WorldGenerator for MoonBit { .export_interface_names .insert(id, name.clone()); - if let Some(content) = &resolve.interfaces[id].docs.contents { - if !content.is_empty() { + let mut r#gen = self.interface(resolve, &name, Direction::Export, Some(key)); + r#gen.types(id); + + for (_, func) in resolve.interfaces[id].functions.iter() { + r#gen.export(func); + } + + let fragment = r#gen.finish(); + + // Write files + { + let directory = name.replace('.', "/"); + + // README + if let Some(content) = &resolve.interfaces[id].docs.contents + && !content.is_empty() + { files.push( &format!("{}/README.md", name.replace(".", "/")), content.as_bytes(), ); } - } + // Source + let mut src = Source::default(); + wit_bindgen_core::generated_preamble(&mut src, VERSION); + uwriteln!(src, "{}", fragment.src); + files.push(&format!("{directory}/top.mbt"), indent(&src).as_bytes()); - let module = &resolve.name_world_key(key); - let mut r#gen = self.interface(resolve, &name, module, Direction::Export); - r#gen.types(id); + if !self.opts.ignore_stub { + // Stub + let mut stub = Source::default(); + generated_preamble(&mut stub, VERSION); + uwriteln!(stub, "{}", fragment.stub); + files.push(&format!("{directory}/stub.mbt"), indent(&stub).as_bytes()); - for (_, func) in resolve.interfaces[id].functions.iter() { - r#gen.export(Some(key), func); - } + // moon.pkg.json + let mut moon_pkg = Source::default(); + self.write_moon_pkg( + &mut moon_pkg, + self.pkg_resolver.package_import.get(&name), + false, + ); + files.push(&format!("{directory}/moon.pkg.json"), moon_pkg.as_bytes()); + } - let result = r#gen.finish(); - self.export_interface_fragments - .insert(name.to_owned(), result); + // FFI + let mut ffi = Source::default(); + wit_bindgen_core::generated_preamble(&mut ffi, VERSION); + + uwriteln!(&mut ffi, "{}", fragment.ffi); + for builtin in fragment.builtins.iter() { + uwriteln!(ffi, "{builtin}"); + } + files.push(&format!("{directory}/ffi.mbt",), indent(&ffi).as_bytes()); + } Ok(()) } @@ -266,290 +469,114 @@ impl WorldGenerator for MoonBit { resolve: &Resolve, world: WorldId, funcs: &[(&str, &Function)], - _files: &mut Files, + files: &mut Files, ) -> Result<()> { let name = format!( "{}.{}", self.opts.r#gen_dir, PkgResolver::world_name(resolve, world) ); - let mut r#gen = self.interface(resolve, &name, "$root", Direction::Export); + let mut r#gen = self.interface(resolve, &name, Direction::Export, None); for (_, func) in funcs { - r#gen.export(None, func); + r#gen.export(func); } - let result = r#gen.finish(); - self.export_world_fragment.concat(result); - Ok(()) - } - - fn import_types( - &mut self, - resolve: &Resolve, - world: WorldId, - types: &[(&str, TypeId)], - _files: &mut Files, - ) { - let name = PkgResolver::world_name(resolve, world); - let mut r#gen = self.interface(resolve, &name, "$root", Direction::Import); - - for (ty_name, ty) in types { - r#gen.define_type(ty_name, *ty); - } - - let result = r#gen.finish(); - self.import_world_fragment.concat(result); - } - - fn finish(&mut self, resolve: &Resolve, id: WorldId, files: &mut Files) -> Result<()> { - let project_name = self - .opts - .project_name - .clone() - .or(resolve.worlds[id].package.map(|id| { - let package = &resolve.packages[id].name; - format!("{}/{}", package.namespace, package.name) - })) - .unwrap_or("generated".into()); - let name = PkgResolver::world_name(resolve, id); - - if let Some(content) = &resolve.worlds[id].docs.contents { - if !content.is_empty() { - files.push( - &format!("{}/README.md", name.replace(".", "/")), - content.as_bytes(), - ); - } - } + let fragment = r#gen.finish(); - let version = env!("CARGO_PKG_VERSION"); - - let generate_pkg_definition = |name: &String, files: &mut Files| { + // Write files + { let directory = name.replace('.', "/"); - let imports: Option<&Imports> = self.pkg_resolver.package_import.get(name); - if let Some(imports) = imports { - let mut deps = imports - .packages - .iter() - .map(|(k, v)| { - format!( - "{{ \"path\" : \"{project_name}/{}\", \"alias\" : \"{}\" }}", - k.replace(".", "/"), - v - ) - }) - .collect::>(); - deps.sort(); - - files.push( - &format!("{directory}/moon.pkg.json"), - format!( - "{{ \"import\": [{}], \"warn-list\": \"-44\" }}", - deps.join(", ") - ) - .as_bytes(), - ); - } else { - files.push( - &format!("{directory}/moon.pkg.json"), - "{ \"warn-list\": \"-44\" }".to_string().as_bytes(), - ); - } - }; - - // Import world fragments - let mut src = Source::default(); - let mut ffi = Source::default(); - let mut builtins: HashSet<&'static str> = HashSet::new(); - wit_bindgen_core::generated_preamble(&mut src, version); - wit_bindgen_core::generated_preamble(&mut ffi, version); - uwriteln!(src, "{}", self.import_world_fragment.src); - uwriteln!(ffi, "{}", self.import_world_fragment.ffi); - builtins.extend(self.import_world_fragment.builtins.iter()); - assert!(self.import_world_fragment.stub.is_empty()); - for b in builtins.iter() { - uwriteln!(ffi, "{}", b); - } - - let directory = name.replace('.', "/"); - files.push(&format!("{directory}/import.mbt"), indent(&src).as_bytes()); - files.push( - &format!("{directory}/ffi_import.mbt"), - indent(&ffi).as_bytes(), - ); - generate_pkg_definition(&name, files); - - // Export world fragments - let mut src = Source::default(); - let mut stub = Source::default(); - wit_bindgen_core::generated_preamble(&mut src, version); - generated_preamble(&mut stub, version); - uwriteln!(src, "{}", self.export_world_fragment.src); - uwriteln!(stub, "{}", self.export_world_fragment.stub); - - files.push(&format!("{directory}/top.mbt"), indent(&src).as_bytes()); - if !self.opts.ignore_stub { - files.push( - &format!("{}/{directory}/stub.mbt", self.opts.r#gen_dir), - indent(&stub).as_bytes(), - ); - generate_pkg_definition(&format!("{}.{}", self.opts.r#gen_dir, name), files); - } - - let mut builtins: HashSet<&'static str> = HashSet::new(); - builtins.insert(ffi::MALLOC); - builtins.insert(ffi::FREE); - let mut generate_ffi = - |directory: String, fragment: &InterfaceFragment, files: &mut Files| { - // For cabi_realloc - - let mut body = Source::default(); - wit_bindgen_core::generated_preamble(&mut body, version); - - uwriteln!(&mut body, "{}", fragment.ffi); - builtins.extend(fragment.builtins.iter()); - - files.push( - &format!( - "{}/{}_export.mbt", - self.opts.r#gen_dir, - directory.to_snake_case() - ), - indent(&body).as_bytes(), - ); - }; - - generate_ffi(directory, &self.export_world_fragment, files); - - // Import interface fragments - for (name, fragment) in &self.import_interface_fragments { + // Source let mut src = Source::default(); - let mut ffi = Source::default(); - wit_bindgen_core::generated_preamble(&mut src, version); - wit_bindgen_core::generated_preamble(&mut ffi, version); - let mut builtins: HashSet<&'static str> = HashSet::new(); + wit_bindgen_core::generated_preamble(&mut src, VERSION); uwriteln!(src, "{}", fragment.src); - uwriteln!(ffi, "{}", fragment.ffi); - builtins.extend(fragment.builtins.iter()); - assert!(fragment.stub.is_empty()); - for builtin in builtins { - uwriteln!(ffi, "{}", builtin); - } - - let directory = name.replace('.', "/"); files.push(&format!("{directory}/top.mbt"), indent(&src).as_bytes()); - files.push(&format!("{directory}/ffi.mbt"), indent(&ffi).as_bytes()); - generate_pkg_definition(name, files); - } - // Export interface fragments - for (name, fragment) in &self.export_interface_fragments { - let mut src = Source::default(); - let mut stub = Source::default(); - wit_bindgen_core::generated_preamble(&mut src, version); - generated_preamble(&mut stub, version); - uwriteln!(src, "{}", fragment.src); - uwriteln!(stub, "{}", fragment.stub); - - let directory = name.replace('.', "/"); - files.push(&format!("{directory}/top.mbt"), indent(&src).as_bytes()); if !self.opts.ignore_stub { + // Stub + let mut stub = Source::default(); + generated_preamble(&mut stub, VERSION); + uwriteln!(stub, "{}", fragment.stub); files.push(&format!("{directory}/stub.mbt"), indent(&stub).as_bytes()); - generate_pkg_definition(name, files); + // moon.pkg.json + let mut moon_pkg = Source::default(); + self.write_moon_pkg( + &mut moon_pkg, + self.pkg_resolver.package_import.get(&name), + false, + ); + files.push(&format!("{directory}/moon.pkg.json"), moon_pkg.as_bytes()); } - generate_ffi(directory, fragment, files); + + // FFI + let mut export = Source::default(); + wit_bindgen_core::generated_preamble(&mut export, VERSION); + uwriteln!(&mut export, "{}", fragment.ffi); + for builtin in fragment.builtins.iter() { + uwriteln!(export, "{builtin}"); + } + files.push(&format!("{directory}/ffi.mbt",), indent(&export).as_bytes()); } - // Export FFI Utils - // Export Async utils + Ok(()) + } + fn finish(&mut self, _resolve: &Resolve, _id: WorldId, files: &mut Files) -> Result<()> { // If async is used, export async utils - self.async_support.emit_utils(files, version); + self.async_support.emit_utils(files); // Export project files if !self.opts.ignore_stub && !self.opts.ignore_module_file { let mut body = Source::default(); uwriteln!( &mut body, - "{{ \"name\": \"{project_name}\", \"preferred-target\": \"wasm\" }}" + "{{ \"name\": \"{}\", \"preferred-target\": \"wasm\" }}", + self.project_name ); files.push("moon.mod.json", body.as_bytes()); } // Export project entry point let mut body = Source::default(); - wit_bindgen_core::generated_preamble(&mut body, version); - uwriteln!(&mut body, "{}", ffi::CABI_REALLOC); - - if !self.return_area_size.is_empty() { - uwriteln!( - &mut body, - " - let return_area : Int = mbt_ffi_malloc({}) - ", - self.return_area_size.size_wasm32(), - ); - } - for builtin in builtins { + wit_bindgen_core::generated_preamble(&mut body, VERSION); + // CABI Realloc + for builtin in [ffi::CABI_REALLOC, ffi::MALLOC, ffi::FREE] { uwriteln!(&mut body, "{}", builtin); } + // Import all exported interfaces + for (_, (_, impl_)) in self.export.iter() { + uwriteln!(&mut body, "{impl_}"); + } + files.push( &format!("{}/ffi.mbt", self.opts.r#gen_dir), indent(&body).as_bytes(), ); - self.export - .insert("mbt_ffi_cabi_realloc".into(), "cabi_realloc".into()); - - let mut body = Source::default(); - let mut exports = self - .export - .iter() - .map(|(k, v)| format!("\"{k}:{v}\"")) - .collect::>(); - exports.sort(); - - uwrite!( - &mut body, - r#" - {{ - "link": {{ - "wasm": {{ - "exports": [{}], - "export-memory-name": "memory", - "heap-start-address": 16 - }} - }} - "#, - exports.join(", ") - ); + let mut moon_pkg = Source::default(); + let mut filtered_imports = Imports::default(); if let Some(imports) = self.pkg_resolver.package_import.get(&self.opts.r#gen_dir) { - let mut deps = imports - .packages - .iter() - .map(|(k, v)| { - format!( - "{{ \"path\" : \"{project_name}/{}\", \"alias\" : \"{}\" }}", - k.replace(".", "/"), - v - ) - }) - .collect::>(); - deps.sort(); - - uwrite!(&mut body, " ,\"import\": [{}]", deps.join(", ")); + for (path, alias) in imports.packages.iter() { + // The root `gen` package doesn't reference async helpers directly; + // interface subpackages import async on their own. + if alias == "async" || path.ends_with("/async") { + continue; + } + filtered_imports + .packages + .insert(path.to_string(), alias.to_string()); + } } - uwrite!( - &mut body, - " - , \"warn-list\": \"-44\" - }} - ", - ); + let imports = if filtered_imports.packages.is_empty() { + None + } else { + Some(&filtered_imports) + }; + self.write_moon_pkg(&mut moon_pkg, imports, true); files.push( - &format!("{}/moon.pkg.json", self.opts.r#gen_dir,), - indent(&body).as_bytes(), + &format!("{}/moon.pkg.json", self.opts.r#gen_dir), + indent(&moon_pkg).as_bytes(), ); Ok(()) @@ -563,15 +590,22 @@ struct InterfaceGenerator<'a> { // Collect of FFI imports used in this interface ffi_imports: HashSet<&'static str>, - r#gen: &'a mut MoonBit, + world_gen: &'a mut MoonBit, resolve: &'a Resolve, // The current interface getting generated name: &'a str, - module: &'a str, direction: Direction, + interface: Option<&'a WorldKey>, // Options for deriving traits derive_opts: DeriveOpts, + + // Generated lift and lower + bindings: AsyncBinding, + + // Avoid re-emitting the same async helper functions multiple times in the + // same `ffi.mbt`. + async_bindings_emitted: HashSet, } impl InterfaceGenerator<'_> { @@ -584,106 +618,121 @@ impl InterfaceGenerator<'_> { } } - fn import(&mut self, module: Option<&WorldKey>, func: &Function) { + fn import(&mut self, func: &Function) { + // Determine if the function is async let async_ = self - .r#gen + .world_gen .opts .async_ - .is_async(self.resolve, module, func, false); + .is_async(self.resolve, self.interface, func, false) + || !func.find_futures_and_streams(self.resolve).is_empty(); if async_ { - self.r#gen.async_support.mark_async(); + self.world_gen.async_support.mark_async(); + self.generate_async_binding(func); } - let interface_name = match module { - Some(key) => &self.resolve.name_world_key(key), - None => "$root", - }; + let ffi_import_name = format!("wasmImport{}", func.name.to_upper_camel_case()); + + // Generate the core wasm abi + let wasm_sig = self.resolve.wasm_signature( + if async_ { + AbiVariant::GuestImportAsync + } else { + AbiVariant::GuestImport + }, + func, + ); + { + let result_type = match &wasm_sig.results[..] { + [] => "".into(), + [result] => format!("-> {}", wasm_type(*result)), + _ => unimplemented!("multi-value results are not supported yet"), + }; + + let params = wasm_sig + .params + .iter() + .enumerate() + .map(|(i, param)| format!("p{i} : {}", wasm_type(*param))) + .collect::>() + .join(", "); + + let (import_module, import_name) = self.resolve.wasm_import_name( + ManglingAndAbi::Legacy(if async_ { + LiftLowerAbi::AsyncCallback + } else { + LiftLowerAbi::Sync + }), + WasmImport::Func { + interface: self.interface, + func, + }, + ); + + uwriteln!( + self.ffi, + r#" + fn {ffi_import_name}({params}) {result_type} = "{import_module}" "{import_name}" + "# + ); + } + + if async_ { + let src = self.generate_async_import(func, &ffi_import_name, &wasm_sig); + let mbt_sig = self.world_gen.pkg_resolver.mbt_sig(self.name, func, false); + let sig = self.sig_string_with_direction(&mbt_sig, async_, Direction::Import); + + print_docs(&mut self.src, &func.docs); + uwrite!( + self.src, + r#" + {sig} {{ + {src} + }} + "# + ); + return; + } + + // Generate the MoonBit wrapper let mut bindgen = FunctionBindgen::new( self, - &func.name, - self.name, func.params .iter() .map(|Param { name, .. }| name.to_moonbit_ident()) .collect(), + Direction::Import, + false, // sync import + true, ); - let (variant, async_prefix) = if async_ { - (AbiVariant::GuestImportAsync, "[async-lower]") - } else { - (AbiVariant::GuestImport, "") - }; - abi::call( - bindgen.r#gen.resolve, + bindgen.interface_gen.resolve, AbiVariant::GuestImport, LiftLower::LowerArgsLiftResults, func, &mut bindgen, false, ); - - let mut src = bindgen.src.clone(); - + let src = bindgen.src.clone(); let cleanup_list = if bindgen.needs_cleanup_list { - self.r#gen.needs_cleanup = true; - - " - let cleanup_list : Array[Int] = [] - " - .into() + "let cleanup_list : Array[Int] = []" } else { - String::new() - }; - - let name = &func.name; - - let wasm_sig = self.resolve.wasm_signature(variant, func); - - let result_type = match &wasm_sig.results[..] { - [] => "".into(), - [result] => format!("-> {}", wasm_type(*result)), - _ => unreachable!(), + "" }; + let builtins = bindgen.take_local_ffi_imports(); + drop(bindgen); + self.ffi_imports.extend(builtins); - let camel_name = func.name.to_upper_camel_case(); - - let params = wasm_sig - .params - .iter() - .enumerate() - .map(|(i, param)| { - let ty = wasm_type(*param); - format!("p{i} : {ty}") - }) - .collect::>() - .join(", "); - - let mbt_sig = self.r#gen.pkg_resolver.mbt_sig(self.name, func, false); - let sig = self.sig_string(&mbt_sig, async_); - - let module = match module { - Some(key) => self.resolve.name_world_key(key), - None => "$root".into(), - }; - - self.r#generation_futures_and_streams_import("", func, interface_name); - - uwriteln!( - self.ffi, - r#"fn wasmImport{camel_name}({params}) {result_type} = "{module}" "{async_prefix}{name}""# - ); + let mbt_sig = self.world_gen.pkg_resolver.mbt_sig(self.name, func, false); + let sig = self.sig_string_with_direction(&mbt_sig, async_, Direction::Import); print_docs(&mut self.src, &func.docs); - if async_ { - src = self.r#generate_async_import_function(func, mbt_sig, &wasm_sig); - } - uwrite!( self.src, r#" - {sig} {{ + {sig} {{ {cleanup_list} {src} }} @@ -691,16 +740,54 @@ impl InterfaceGenerator<'_> { ); } - fn export(&mut self, interface: Option<&WorldKey>, func: &Function) { + fn export(&mut self, func: &Function) { + // Determine if is async let async_ = self - .r#gen + .world_gen .opts .async_ - .is_async(self.resolve, interface, func, false); + .is_async(self.resolve, self.interface, func, false) + || !func.find_futures_and_streams(self.resolve).is_empty(); if async_ { - self.r#gen.async_support.mark_async(); + self.world_gen.async_support.mark_async(); + self.generate_async_binding(func); + } + + // Generate stub for user + { + let mbt_sig = self.world_gen.pkg_resolver.mbt_sig(self.name, func, false); + let func_sig = self.sig_string_with_direction(&mbt_sig, async_, Direction::Export); + let mut ignored_params = mbt_sig + .params + .iter() + .map(|(name, _)| format!("ignore({name})")) + .collect::>(); + if async_ { + ignored_params.push("ignore(task_group)".to_string()); + } + let ignored_params = if ignored_params.is_empty() { + String::new() + } else { + format!("{}\n", ignored_params.join("\n")) + }; + let async_marker = if async_ { + "@async.protect_from_cancel(() => ())\n" + } else { + "" + }; + + print_docs(&mut self.stub, &func.docs); + uwrite!( + self.stub, + r#" + {func_sig} {{ + {ignored_params}{async_marker}abort("not implemented") + }} + "# + ); } + // Generate the caller function let variant = if async_ { AbiVariant::GuestExportAsync } else { @@ -708,47 +795,32 @@ impl InterfaceGenerator<'_> { }; let sig = self.resolve.wasm_signature(variant, func); - let mbt_sig = self.r#gen.pkg_resolver.mbt_sig(self.name, func, false); - - let func_sig = self.sig_string(&mbt_sig, async_); - let export_dir = self.r#gen.opts.r#gen_dir.clone(); - - let mut toplevel_generator = self.r#gen.interface( - self.resolve, - export_dir.as_str(), - self.module, - Direction::Export, - ); let mut bindgen = FunctionBindgen::new( - &mut toplevel_generator, - &func.name, - self.name, + self, (0..sig.params.len()).map(|i| format!("p{i}")).collect(), + Direction::Export, + async_, + true, ); abi::call( - bindgen.r#gen.resolve, + bindgen.interface_gen.resolve, variant, LiftLower::LiftArgsLowerResults, func, &mut bindgen, async_, ); - - // TODO: adapt async cleanup - assert!(!bindgen.needs_cleanup_list); - - // Async functions deferred task return - let deferred_task_return = bindgen.deferred_task_return.clone(); - + // Handle cleanup for both sync and async exports + let cleanup_list = if bindgen.needs_cleanup_list { + "let cleanup_list : Array[Int] = []" + } else { + "" + }; + let builtins = bindgen.take_local_ffi_imports(); let src = bindgen.src; - assert!(toplevel_generator.src.is_empty()); - assert!(toplevel_generator.ffi.is_empty()); - - // Transfer ffi_imports from toplevel_generator to self - self.ffi_imports - .extend(toplevel_generator.ffi_imports.iter()); + self.ffi_imports.extend(builtins); let result_type = match &sig.results[..] { [] => "Unit", @@ -758,7 +830,10 @@ impl InterfaceGenerator<'_> { let camel_name = func.name.to_upper_camel_case(); - let func_name = self.r#gen.export_ns.tmp(&format!("wasmExport{camel_name}")); + let func_name = self + .world_gen + .export_ns + .tmp(&format!("wasmExport{camel_name}")); let params = sig .params @@ -771,98 +846,144 @@ impl InterfaceGenerator<'_> { .collect::>() .join(", "); - // Async export prefix for FFI - let async_export_prefix = if async_ { "[async-lift]" } else { "" }; - // Async functions return type - let interface_name = match interface { - Some(key) => Some(self.resolve.name_world_key(key)), - None => None, - }; - - let export_name = func.legacy_core_export_name(interface_name.as_deref()); - let module_name = interface_name.as_deref().unwrap_or("$root"); - self.r#generation_futures_and_streams_import("[export]", func, module_name); + if async_ { + let async_pkg = self + .world_gen + .pkg_resolver + .qualify_package(self.name, ASYNC_DIR); + uwrite!( + self.ffi, + r#" + #doc(hidden) + pub fn {func_name}({params}) -> {result_type} {{ + {async_pkg}with_waitableset(async fn() {{ + // Intentionally run export body in a task-group child task. + // MoonBit's structured concurrency model uses the task group + // as an umbrella for async work started by the export. + {async_pkg}with_task_group(async fn(task_group) {{ + {cleanup_list} + {src} + }}) + }}) + }} + "#, + ); + } else { + uwrite!( + self.ffi, + r#" + #doc(hidden) + pub fn {func_name}({params}) -> {result_type} {{ + {cleanup_list} + {src} + }} + "#, + ); + } + let export_name = self.resolve.wasm_export_name( + ManglingAndAbi::Legacy(if async_ { + LiftLowerAbi::AsyncCallback + } else { + LiftLowerAbi::Sync + }), + WasmExport::Func { + interface: self.interface, + func, + kind: WasmExportKind::Normal, + }, + ); - uwrite!( - self.ffi, + let export = format!( r#" + #doc(hidden) pub fn {func_name}({params}) -> {result_type} {{ - {src} + {}{func_name}({}) }} "#, + self.world_gen + .pkg_resolver + .qualify_package(self.world_gen.opts.gen_dir.as_str(), self.name), + (0..sig.params.len()) + .map(|i| format!("p{i}")) + .collect::>() + .join(", "), ); - self.r#gen + self.world_gen .export - .insert(func_name, format!("{async_export_prefix}{export_name}")); + .insert(export_name, (func_name, export)); + // If async, we also need a callback function and a task_return intrinsic if async_ { - let snake = self.r#gen.name.to_lower_camel_case(); - let export_func_name = self - .r#gen - .export_ns - .tmp(&format!("wasmExport{snake}Async{camel_name}")); - let DeferredTaskReturn::Emitted { - body: task_return_body, - params: task_return_params, - return_param, - } = deferred_task_return - else { - unreachable!() - }; - let func_name = func.name.clone(); - let import_module = self.resolve.name_world_key(interface.unwrap()); - self.r#gen.export.insert( - export_func_name.clone(), - format!("[callback]{async_export_prefix}{export_name}"), + let export_name = self.resolve.wasm_export_name( + ManglingAndAbi::Legacy(LiftLowerAbi::AsyncCallback), + WasmExport::Func { + interface: self.interface, + func, + kind: WasmExportKind::Callback, + }, ); - let task_return_param_tys = task_return_params + let export_dir = self.world_gen.opts.r#gen_dir.clone(); + + let (task_return_module, task_return_name, signature) = + func.task_return_import(self.resolve, self.interface, Mangling::Legacy); + + let params = signature + .params .iter() .enumerate() - .map(|(idx, (ty, _expr))| format!("p{}: {}", idx, wasm_type(*ty))) - .collect::>() - .join(", "); - let task_return_param_exprs = task_return_params - .iter() - .map(|(_ty, expr)| expr.as_str()) + .map(|(i, param)| { + let ty = wasm_type(*param); + format!("p{i} : {ty}") + }) .collect::>() .join(", "); - let return_ty = match &func.result { - Some(result) => self - .r#gen - .pkg_resolver - .type_name(self.name, result) - .to_string(), - None => "Unit".into(), - }; - let return_expr = match return_ty.as_str() { - "Unit" => "".into(), - _ => format!("{return_param}: {return_ty}",), - }; - let snake_func_name = func.name.to_moonbit_ident().to_string(); - let ffi = self.r#gen.pkg_resolver.qualify_package(self.name, FFI_DIR); + let async_pkg = self + .world_gen + .pkg_resolver + .qualify_package(&export_dir, ASYNC_DIR); uwriteln!( - self.src, + self.ffi, r#" - fn {export_func_name}TaskReturn({task_return_param_tys}) = "[export]{import_module}" "[task-return]{func_name}" - - pub fn {snake_func_name}_task_return({return_expr}) -> Unit {{ - {task_return_body} - {export_func_name}TaskReturn({task_return_param_exprs}) - }} - "# + fn wasmExportTaskReturn{}({params}) = "{task_return_module}" "{task_return_name}" + "#, + func.name.to_upper_camel_case() ); + let func_name = self + .world_gen + .export_ns + .tmp(&format!("wasmExport{}CB", func.name.to_upper_camel_case())); + uwriteln!( self.ffi, r#" - pub fn {export_func_name}(event_raw: Int, waitable: Int, code: Int) -> Int {{ - {ffi}callback(event_raw, waitable, code) + #doc(hidden) + pub fn {func_name}(event_raw: Int, waitable: Int, code: Int) -> Int {{ + {async_pkg}cb(event_raw, waitable, code) }} - "# + "#, ); - } else if abi::guest_export_needs_post_return(self.resolve, func) { + let export = format!( + r#" + #doc(hidden) + pub fn {func_name}(event_raw: Int, waitable: Int, code: Int) -> Int {{ + {}{func_name}(event_raw, waitable, code) + }} + "#, + self.world_gen + .pkg_resolver + .qualify_package(&self.world_gen.opts.gen_dir, self.name), + ); + + self.world_gen + .export + .insert(export_name, (func_name.clone(), export)); + } + + // If post return is needed, generate it + if !async_ && abi::guest_export_needs_post_return(self.resolve, func) { let params = sig .results .iter() @@ -876,63 +997,103 @@ impl InterfaceGenerator<'_> { let mut bindgen = FunctionBindgen::new( self, - "INVALID", - self.name, (0..sig.results.len()).map(|i| format!("p{i}")).collect(), + Direction::Export, + false, // post-return is not async + true, ); - abi::post_return(bindgen.r#gen.resolve, func, &mut bindgen); - + abi::post_return(bindgen.interface_gen.resolve, func, &mut bindgen); + let builtins = bindgen.take_local_ffi_imports(); let src = bindgen.src; + self.ffi_imports.extend(builtins); let func_name = self - .r#gen + .world_gen .export_ns .tmp(&format!("wasmExport{camel_name}PostReturn")); uwrite!( self.ffi, r#" + #doc(hidden) pub fn {func_name}({params}) -> Unit {{ {src} }} "# ); - self.r#gen + let export_name = self.resolve.wasm_export_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmExport::Func { + interface: self.interface, + func, + kind: WasmExportKind::PostReturn, + }, + ); + let export = format!( + r#" + #doc(hidden) + pub fn {func_name}({params}) -> Unit {{ + {}{func_name}({}) + }} + "#, + self.world_gen + .pkg_resolver + .qualify_package(self.world_gen.opts.gen_dir.as_str(), self.name), + (0..sig.results.len()) + .map(|i| format!("p{i}")) + .collect::>() + .join(", "), + ); + self.world_gen .export - .insert(func_name, format!("cabi_post_{export_name}")); + .insert(export_name, (func_name, export)); } - - print_docs(&mut self.stub, &func.docs); - uwrite!( - self.stub, - r#" - {func_sig} {{ - ... - }} - "# - ); } - fn sig_string(&mut self, sig: &MoonbitSignature, async_: bool) -> String { - let params = sig + fn sig_string_with_direction( + &mut self, + sig: &MoonbitSignature, + async_: bool, + direction: Direction, + ) -> String { + // Compute result type first (needed for taskgroup parameter type) + let result_type = match &sig.result_type { + None => "Unit".into(), + Some(ty) => match direction { + Direction::Export => self + .world_gen + .pkg_resolver + .type_name_for_lowering(self.name, ty), + Direction::Import => self.world_gen.pkg_resolver.type_name(self.name, ty), + }, + }; + + let mut params = sig .params .iter() .map(|(name, ty)| { - let ty = self.r#gen.pkg_resolver.type_name(self.name, ty); + let ty = self.world_gen.pkg_resolver.type_name(self.name, ty); format!("{name} : {ty}") }) .collect::>(); + // For async exports, add a task-group parameter. + // + // This is intentionally `TaskGroup[Unit]` even when the function result + // type is not `Unit`: this task group models the umbrella lifetime for + // export-side structured concurrency and cancellation, rather than the + // direct return payload type of the exported function. + if async_ && matches!(direction, Direction::Export) { + params.push("task_group : @async.TaskGroup[Unit]".to_string()); + } + let params = params.join(", "); - let (async_prefix, async_suffix) = if async_ { ("async ", "") } else { ("", "") }; - let result_type = match &sig.result_type { - None => "Unit".into(), - Some(ty) => self.r#gen.pkg_resolver.type_name(self.name, ty), - }; format!( - "pub {async_prefix}fn {}({params}) -> {}{async_suffix}", - sig.name, result_type + "pub {}fn {}({params}) -> {}", + if async_ { "async " } else { "" }, + sig.name, + result_type ) } } @@ -954,7 +1115,7 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { format!( "{} : {}", field.name.to_moonbit_ident(), - self.r#gen.pkg_resolver.type_name(self.name, &field.ty), + self.world_gen.pkg_resolver.type_name(self.name, &field.ty), ) }) .collect::>() @@ -979,9 +1140,8 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { ); } - fn type_resource(&mut self, _id: TypeId, name: &str, docs: &Docs) { + fn type_resource(&mut self, id: TypeId, name: &str, docs: &Docs) { print_docs(&mut self.src, docs); - let type_name = name; let name = name.to_moonbit_type_ident(); let mut deriviation: Vec<_> = Vec::new(); @@ -1005,9 +1165,15 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { deriviation.join(", "), ); - let module = self.module; - if self.direction == Direction::Import { + let (drop_module, drop_name) = self.resolve.wasm_import_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmImport::ResourceIntrinsic { + resource: id, + interface: self.interface, + intrinsic: ResourceIntrinsic::ImportedDrop, + }, + ); uwrite!( &mut self.src, r#" @@ -1022,10 +1188,34 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { uwrite!( &mut self.ffi, r#" - fn wasmImportResourceDrop{name}(resource : Int) = "{module}" "[resource-drop]{type_name}" + fn wasmImportResourceDrop{name}(resource : Int) = "{drop_module}" "{drop_name}" "#, ) } else { + let (drop_module, drop_name) = self.resolve.wasm_import_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmImport::ResourceIntrinsic { + resource: id, + interface: self.interface, + intrinsic: ResourceIntrinsic::ExportedDrop, + }, + ); + let (new_module, new_name) = self.resolve.wasm_import_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmImport::ResourceIntrinsic { + resource: id, + interface: self.interface, + intrinsic: ResourceIntrinsic::ExportedNew, + }, + ); + let (rep_module, rep_name) = self.resolve.wasm_import_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmImport::ResourceIntrinsic { + resource: id, + interface: self.interface, + intrinsic: ResourceIntrinsic::ExportedRep, + }, + ); uwrite!( &mut self.src, r#" @@ -1033,21 +1223,21 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { pub fn {name}::new(rep : Int) -> {name} {{ {name}::{name}(wasmExportResourceNew{name}(rep)) }} - fn wasmExportResourceNew{name}(rep : Int) -> Int = "[export]{module}" "[resource-new]{type_name}" + fn wasmExportResourceNew{name}(rep : Int) -> Int = "{new_module}" "{new_name}" /// Drops a resource handle. pub fn {name}::drop(self : Self) -> Unit {{ let {name}(resource) = self wasmExportResourceDrop{name}(resource) }} - fn wasmExportResourceDrop{name}(resource : Int) = "[export]{module}" "[resource-drop]{type_name}" + fn wasmExportResourceDrop{name}(resource : Int) = "{drop_module}" "{drop_name}" /// Gets the `Int` representation of the resource pointed to the given handle. pub fn {name}::rep(self : Self) -> Int {{ let {name}(resource) = self wasmExportResourceRep{name}(resource) }} - fn wasmExportResourceRep{name}(resource : Int) -> Int = "[export]{module}" "[resource-rep]{type_name}" + fn wasmExportResourceRep{name}(resource : Int) -> Int = "{rep_module}" "{rep_name}" "#, ); @@ -1056,35 +1246,48 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { r#" /// Destructor of the resource. pub fn {name}::dtor(_self : {name}) -> Unit {{ - ... + abort("not implemented") }} "# ); - let func_name = self.r#gen.export_ns.tmp(&format!("wasmExport{name}Dtor")); - - let export_dir = self.r#gen.opts.r#gen_dir.clone(); - - let r#gen = - self.r#gen - .interface(self.resolve, export_dir.as_str(), "", Direction::Export); + let func_name = self + .world_gen + .export_ns + .tmp(&format!("wasmExport{name}Dtor")); uwrite!( self.ffi, r#" + #doc(hidden) pub fn {func_name}(handle : Int) -> Unit {{ - {}{name}::dtor(handle) + {name}::dtor(handle) }} "#, - r#gen - .r#gen - .pkg_resolver - .qualify_package(r#gen.name, self.name) ); - self.r#gen + let export_name = self.resolve.wasm_export_name( + ManglingAndAbi::Legacy(LiftLowerAbi::Sync), + WasmExport::ResourceDtor { + interface: self.interface.unwrap(), + resource: id, + }, + ); + + let export = format!( + r#" + #doc(hidden) + pub fn {func_name}(handle : Int) -> Unit {{ + {}{func_name}(handle) + }} + "#, + self.world_gen + .pkg_resolver + .qualify_package(self.world_gen.opts.gen_dir.as_str(), self.name), + ); + self.world_gen .export - .insert(func_name, format!("{module}#[dtor]{type_name}")); + .insert(export_name, (func_name, export)); } } @@ -1194,7 +1397,7 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { .map(|case| { let name = case.name.to_upper_camel_case(); if let Some(ty) = case.ty { - let ty = self.r#gen.pkg_resolver.type_name(self.name, &ty); + let ty = self.world_gen.pkg_resolver.type_name(self.name, &ty); format!("{name}({ty})") } else { name.to_string() @@ -1331,15 +1534,15 @@ impl<'a> wit_bindgen_core::InterfaceGenerator<'a> for InterfaceGenerator<'a> { } fn type_future(&mut self, _id: TypeId, _name: &str, _ty: &Option, _docs: &Docs) { - unimplemented!() // Not needed + // Not needed. They will become `CMFuture[T]` in MoonBit. } fn type_stream(&mut self, _id: TypeId, _name: &str, _ty: &Option, _docs: &Docs) { - unimplemented!() // Not needed + // Not needed. They will become `CMStream[T]` in MoonBit. } fn type_builtin(&mut self, _id: TypeId, _name: &str, _ty: &Type, _docs: &Docs) { - unimplemented!(); + // Not needed. } } @@ -1357,24 +1560,8 @@ struct BlockStorage { cleanup: Vec, } -#[derive(Clone, Debug)] -enum DeferredTaskReturn { - None, - Generating { - prev_src: String, - return_param: String, - }, - Emitted { - params: Vec<(WasmType, String)>, - body: String, - return_param: String, - }, -} - struct FunctionBindgen<'a, 'b> { - r#gen: &'b mut InterfaceGenerator<'a>, - func_name: &'b str, - func_interface: &'b str, + interface_gen: &'b mut InterfaceGenerator<'a>, params: Box<[String]>, src: String, locals: Ns, @@ -1383,24 +1570,26 @@ struct FunctionBindgen<'a, 'b> { payloads: Vec, cleanup: Vec, needs_cleanup_list: bool, - deferred_task_return: DeferredTaskReturn, + defer_cleanup: bool, + direction: Direction, + async_: bool, + local_ffi_imports: HashSet<&'static str>, } impl<'a, 'b> FunctionBindgen<'a, 'b> { fn new( r#gen: &'b mut InterfaceGenerator<'a>, - func_name: &'b str, - func_interface: &'b str, params: Box<[String]>, + direction: Direction, + async_: bool, + defer_cleanup: bool, ) -> FunctionBindgen<'a, 'b> { let mut locals = Ns::default(); params.iter().for_each(|str| { locals.tmp(str); }); Self { - r#gen, - func_name, - func_interface, + interface_gen: r#gen, params, src: String::new(), locals, @@ -1409,10 +1598,17 @@ impl<'a, 'b> FunctionBindgen<'a, 'b> { payloads: Vec::new(), cleanup: Vec::new(), needs_cleanup_list: false, - deferred_task_return: DeferredTaskReturn::None, + defer_cleanup, + direction, + async_, + local_ffi_imports: HashSet::new(), } } + fn take_local_ffi_imports(&mut self) -> HashSet<&'static str> { + std::mem::take(&mut self.local_ffi_imports) + } + fn lower_variant( &mut self, cases: &[(&str, Option)], @@ -1453,8 +1649,8 @@ impl<'a, 'b> FunctionBindgen<'a, 'b> { .join(", "); let payload = if self - .r#gen - .r#gen + .interface_gen + .world_gen .pkg_resolver .non_empty_type(ty.as_ref()) .is_some() @@ -1520,11 +1716,7 @@ impl<'a, 'b> FunctionBindgen<'a, 'b> { .collect::>(); // Hacky way to get the type name without type parameter - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_constructor(self.r#gen.name, ty); + let ty = self.resolve_constructor(ty); let lifted = self.locals.tmp("lifted"); let cases = cases @@ -1533,8 +1725,8 @@ impl<'a, 'b> FunctionBindgen<'a, 'b> { .enumerate() .map(|(i, ((case_name, case_ty), Block { body, results, .. }))| { let payload = if self - .r#gen - .r#gen + .interface_gen + .world_gen .pkg_resolver .non_empty_type(case_ty.as_ref()) .is_some() @@ -1580,7 +1772,33 @@ impl<'a, 'b> FunctionBindgen<'a, 'b> { "# ); - results.push(lifted); + results.push(lifted); + } + + // Utilities + fn resolve_constructor(&mut self, ty: &Type) -> String { + self.interface_gen + .world_gen + .pkg_resolver + .type_constructor(self.interface_gen.name, ty) + } + + fn resolve_type_name(&mut self, ty: &Type) -> String { + self.interface_gen + .world_gen + .pkg_resolver + .type_name(self.interface_gen.name, ty) + } + + fn resolve_type_name_for_lowering(&mut self, ty: &Type) -> String { + self.interface_gen + .world_gen + .pkg_resolver + .type_name_for_lowering(self.interface_gen.name, ty) + } + + fn use_ffi(&mut self, str: &'static str) { + self.local_ffi_imports.insert(str); } } @@ -1638,13 +1856,13 @@ impl Bindgen for FunctionBindgen<'_, '_> { Instruction::U8FromI32 => results.push(format!("({}).to_byte()", operands[0])), Instruction::I32FromS8 => { - self.r#gen.ffi_imports.insert(ffi::EXTEND8); + self.use_ffi(ffi::EXTEND8); results.push(format!("mbt_ffi_extend8({})", operands[0])) } Instruction::S8FromI32 => results.push(format!("({} - 0x100)", operands[0])), Instruction::S16FromI32 => results.push(format!("({} - 0x10000)", operands[0])), Instruction::I32FromS16 => { - self.r#gen.ffi_imports.insert(ffi::EXTEND16); + self.use_ffi(ffi::EXTEND16); results.push(format!("mbt_ffi_extend16({})", operands[0])) } Instruction::U16FromI32 => results.push(format!( @@ -1674,11 +1892,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Int::U8 => { let op = &operands[0]; let flag = self.locals.tmp("flag"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_constructor(self.r#gen.name, &Type::Id(*ty)); + let ty = self.resolve_constructor(&Type::Id(*ty)); uwriteln!( self.src, r#" @@ -1690,11 +1904,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Int::U16 | Int::U32 => { let op = &operands[0]; let flag = self.locals.tmp("flag"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_constructor(self.r#gen.name, &Type::Id(*ty)); + let ty = self.resolve_constructor(&Type::Id(*ty)); uwriteln!( self.src, r#" @@ -1706,11 +1916,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Int::U64 => { let op = &operands[0]; let flag = self.locals.tmp("flag"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_constructor(self.r#gen.name, &Type::Id(*ty)); + let ty = self.resolve_constructor(&Type::Id(*ty)); uwriteln!( self.src, r#" @@ -1726,27 +1932,21 @@ impl Bindgen for FunctionBindgen<'_, '_> { Int::U8 => { results.push(format!( "{}({}.to_byte())", - self.r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)), + self.resolve_type_name(&Type::Id(*ty)), operands[0] )); } Int::U16 | Int::U32 => { results.push(format!( "{}({}.reinterpret_as_uint())", - self.r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)), + self.resolve_type_name(&Type::Id(*ty)), operands[0] )); } Int::U64 => { results.push(format!( "{}(({}).reinterpret_as_uint().to_uint64() | (({}).reinterpret_as_uint().to_uint64() << 32))", - self.r#gen.r#gen.pkg_resolver.type_name(self.r#gen.name, &Type::Id(*ty)), + self.resolve_type_name(&Type::Id(*ty)), operands[0], operands[1] )); @@ -1756,11 +1956,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Instruction::HandleLower { ty, .. } => { let op = &operands[0]; let handle = self.locals.tmp("handle"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_constructor(self.r#gen.name, &Type::Id(*ty)); + let ty = self.resolve_constructor(&Type::Id(*ty)); uwrite!( self.src, r#" @@ -1771,12 +1967,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::HandleLift { ty, .. } => { let op = &operands[0]; - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_constructor(self.r#gen.name, &Type::Id(*ty)); - + let ty = self.resolve_constructor(&Type::Id(*ty)); results.push(format!( "{}::{}({})", ty, @@ -1805,10 +1996,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { results.push(format!( "{}::{{{ops}}}", - self.r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)) + self.resolve_type_name(&Type::Id(*ty)) )); } @@ -1928,29 +2116,15 @@ impl Bindgen for FunctionBindgen<'_, '_> { ); } - Instruction::OptionLift { payload, ty } => { + Instruction::OptionLift { ty, .. } => { let some = self.blocks.pop().unwrap(); let _none = self.blocks.pop().unwrap(); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)); + let ty = self.resolve_type_name(&Type::Id(*ty)); let lifted = self.locals.tmp("lifted"); let op = &operands[0]; - let payload = if self - .r#gen - .r#gen - .pkg_resolver - .non_empty_type(Some(*payload)) - .is_some() - { - some.results.into_iter().next().unwrap() - } else { - "None".into() - }; + let assignment = some.results.first().unwrap(); let some = some.body; @@ -1961,7 +2135,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { 0 => Option::None 1 => {{ {some} - Option::Some({payload}) + Option::Some({assignment}) }} _ => panic() }} @@ -1995,10 +2169,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Instruction::EnumLift { ty, .. } => results.push(format!( "{}::from({})", - self.r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)), + self.resolve_type_name(&Type::Id(*ty)), operands[0] )), @@ -2006,7 +2177,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Type::U8 => { let op = &operands[0]; let ptr = self.locals.tmp("ptr"); - self.r#gen.ffi_imports.insert(ffi::BYTES2PTR); + self.use_ffi(ffi::BYTES2PTR); uwriteln!( self.src, " @@ -2024,27 +2195,27 @@ impl Bindgen for FunctionBindgen<'_, '_> { let ptr = self.locals.tmp("ptr"); let ty = match element { Type::U32 => { - self.r#gen.ffi_imports.insert(ffi::UINT_ARRAY2PTR); + self.use_ffi(ffi::UINT_ARRAY2PTR); "uint" } Type::U64 => { - self.r#gen.ffi_imports.insert(ffi::UINT64_ARRAY2PTR); + self.use_ffi(ffi::UINT64_ARRAY2PTR); "uint64" } Type::S32 => { - self.r#gen.ffi_imports.insert(ffi::INT_ARRAY2PTR); + self.use_ffi(ffi::INT_ARRAY2PTR); "int" } Type::S64 => { - self.r#gen.ffi_imports.insert(ffi::INT64_ARRAY2PTR); + self.use_ffi(ffi::INT64_ARRAY2PTR); "int64" } Type::F32 => { - self.r#gen.ffi_imports.insert(ffi::FLOAT_ARRAY2PTR); + self.use_ffi(ffi::FLOAT_ARRAY2PTR); "float" } Type::F64 => { - self.r#gen.ffi_imports.insert(ffi::DOUBLE_ARRAY2PTR); + self.use_ffi(ffi::DOUBLE_ARRAY2PTR); "double" } _ => unreachable!(), @@ -2070,7 +2241,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { let result = self.locals.tmp("result"); let address = &operands[0]; let length = &operands[1]; - self.r#gen.ffi_imports.insert(ffi::PTR2BYTES); + self.use_ffi(ffi::PTR2BYTES); uwrite!( self.src, " @@ -2083,27 +2254,27 @@ impl Bindgen for FunctionBindgen<'_, '_> { Type::U32 | Type::U64 | Type::S32 | Type::S64 | Type::F32 | Type::F64 => { let ty = match element { Type::U32 => { - self.r#gen.ffi_imports.insert(ffi::PTR2UINT_ARRAY); + self.use_ffi(ffi::PTR2UINT_ARRAY); "uint" } Type::U64 => { - self.r#gen.ffi_imports.insert(ffi::PTR2UINT64_ARRAY); + self.use_ffi(ffi::PTR2UINT64_ARRAY); "uint64" } Type::S32 => { - self.r#gen.ffi_imports.insert(ffi::PTR2INT_ARRAY); + self.use_ffi(ffi::PTR2INT_ARRAY); "int" } Type::S64 => { - self.r#gen.ffi_imports.insert(ffi::PTR2INT64_ARRAY); + self.use_ffi(ffi::PTR2INT64_ARRAY); "int64" } Type::F32 => { - self.r#gen.ffi_imports.insert(ffi::PTR2FLOAT_ARRAY); + self.use_ffi(ffi::PTR2FLOAT_ARRAY); "float" } Type::F64 => { - self.r#gen.ffi_imports.insert(ffi::PTR2DOUBLE_ARRAY); + self.use_ffi(ffi::PTR2DOUBLE_ARRAY); "double" } _ => unreachable!(), @@ -2129,7 +2300,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { let op = &operands[0]; let ptr = self.locals.tmp("ptr"); - self.r#gen.ffi_imports.insert(ffi::STR2PTR); + self.use_ffi(ffi::STR2PTR); uwrite!( self.src, " @@ -2149,7 +2320,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { let address = &operands[0]; let length = &operands[1]; - self.r#gen.ffi_imports.insert(ffi::PTR2STR); + self.use_ffi(ffi::PTR2STR); uwrite!( self.src, " @@ -2168,17 +2339,23 @@ impl Bindgen for FunctionBindgen<'_, '_> { assert!(block_results.is_empty()); let op = &operands[0]; - let size = self.r#gen.r#gen.sizes.size(element).size_wasm32(); - let _align = self.r#gen.r#gen.sizes.align(element).align_wasm32(); + let size = self + .interface_gen + .world_gen + .sizes + .size(element) + .size_wasm32(); + let _align = self + .interface_gen + .world_gen + .sizes + .align(element) + .align_wasm32(); let address = self.locals.tmp("address"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, element); + let ty = self.resolve_type_name(element); let index = self.locals.tmp("index"); - self.r#gen.ffi_imports.insert(ffi::MALLOC); + self.use_ffi(ffi::MALLOC); uwrite!( self.src, " @@ -2207,12 +2384,13 @@ impl Bindgen for FunctionBindgen<'_, '_> { let address = &operands[0]; let length = &operands[1]; let array = self.locals.tmp("array"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, element); - let size = self.r#gen.r#gen.sizes.size(element).size_wasm32(); + let ty = self.resolve_type_name(element); + let size = self + .interface_gen + .world_gen + .sizes + .size(element) + .size_wasm32(); // let align = self.r#gen.r#gen.sizes.align(element); let index = self.locals.tmp("index"); @@ -2221,7 +2399,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { _ => todo!("result count == {}", results.len()), }; - self.r#gen.ffi_imports.insert(ffi::FREE); + self.use_ffi(ffi::FREE); uwrite!( self.src, " @@ -2238,12 +2416,128 @@ impl Bindgen for FunctionBindgen<'_, '_> { results.push(array); } + Instruction::FixedLengthListLift { + element, + size, + id: _, + } => { + let mut lifted = Vec::with_capacity(*size as usize); + for operand in operands.drain(0..(*size as usize)) { + lifted.push(operand); + } + let ty = self.resolve_type_name(element); + if lifted.is_empty() { + results.push(format!("([] : FixedArray[{ty}])")); + } else { + results.push(format!("([{}] : FixedArray[{ty}])", lifted.join(", "))); + } + } + + Instruction::FixedLengthListLower { + element: _, + size, + id: _, + } => { + let op = &operands[0]; + for i in 0..(*size as usize) { + results.push(format!("({op})[{i}]")); + } + } + + Instruction::FixedLengthListLowerToMemory { + element, + size, + id: _, + } => { + let Block { + body, + results: block_results, + } = self.blocks.pop().unwrap(); + assert!(block_results.is_empty()); + + let op = &operands[0]; + let target = &operands[1]; + let ty = self.resolve_type_name(element); + let elem_size = self + .interface_gen + .world_gen + .sizes + .size(element) + .size_wasm32(); + + for i in 0..(*size as usize) { + uwrite!( + self.src, + " + {{ + let iter_elem : {ty} = ({op})[{i}] + let iter_base = ({target}) + ({i} * {elem_size}) + {body} + }} + ", + ); + } + } + + Instruction::FixedLengthListLiftFromMemory { + element, + size, + id: _, + } => { + let Block { + body, + results: block_results, + } = self.blocks.pop().unwrap(); + let address = &operands[0]; + let ty = self.resolve_type_name(element); + let elem_size = self + .interface_gen + .world_gen + .sizes + .size(element) + .size_wasm32(); + + let element_result = match &block_results[..] { + [result] => result, + _ => todo!("result count == {}", block_results.len()), + }; + + let mut lifted = Vec::with_capacity(*size as usize); + for i in 0..(*size as usize) { + let value = self.locals.tmp("fixed_elem"); + uwrite!( + self.src, + " + let {value} : {ty} = {{ + let iter_base = ({address}) + ({i} * {elem_size}) + {body} + {element_result} + }} + ", + ); + lifted.push(value); + } + + if lifted.is_empty() { + results.push(format!("([] : FixedArray[{ty}])")); + } else { + results.push(format!("([{}] : FixedArray[{ty}])", lifted.join(", "))); + } + } + Instruction::IterElem { .. } => results.push("iter_elem".into()), Instruction::IterBasePointer => results.push("iter_base".into()), - Instruction::CallWasm { sig, .. } => { + Instruction::AsyncTaskReturn { name, .. } => { + let func_name = name.to_upper_camel_case(); + let operands = operands.join(", "); + uwriteln!(self.src, "wasmExport{func_name}({operands});"); + } + + Instruction::CallWasm { sig, name } => { let assignment = match &sig.results[..] { + [] => String::new(), [result] => { let ty = wasm_type(*result); let result = self.locals.tmp("result"); @@ -2251,106 +2545,36 @@ impl Bindgen for FunctionBindgen<'_, '_> { results.push(result); assignment } - - [] => String::new(), - _ => unreachable!(), }; - let func_name = self.func_name.to_upper_camel_case(); + let func_name = name.to_upper_camel_case(); let operands = operands.join(", "); // TODO: handle this to support async functions uwriteln!(self.src, "{assignment} wasmImport{func_name}({operands});"); } - Instruction::CallInterface { func, async_ } => { - let name = self.r#gen.r#gen.pkg_resolver.func_call( - self.r#gen.name, + Instruction::CallInterface { func, .. } => { + let name = self.interface_gen.world_gen.pkg_resolver.func_call( + self.interface_gen.name, func, - self.func_interface, + self.interface_gen.name, ); let args = operands.join(", "); - if *async_ { - let (async_func_result, task_return_result, task_return_type) = - match func.result { - Some(ty) => { - let res = self.locals.tmp("return_result"); - ( - res.clone(), - res, - self.r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &ty), - ) - } - None => ("_ignore".into(), "".into(), "Unit".into()), - }; - - if func.result.is_some() { - results.push(async_func_result.clone()); - } - let ffi = self - .r#gen - .r#gen - .pkg_resolver - .qualify_package(self.r#gen.name, FFI_DIR); - uwrite!( - self.src, - r#" - let task = {ffi}current_task(); - let _ = task.with_waitable_set(fn(task) {{ - let {async_func_result}: Ref[{task_return_type}?] = Ref::new(None) - task.wait(fn() {{ - {async_func_result}.val = Some({name}({args})); - }}) - for {{ - if task.no_wait() && {async_func_result}.val is Some({async_func_result}){{ - {name}_task_return({task_return_result}); - break; - }} else {{ - {ffi}suspend() catch {{ - _ => {{ - {ffi}task_cancel(); - }} - }} - }} - }} - }}) - if task.is_fail() is Some({ffi}Cancelled::Cancelled) {{ - {ffi}task_cancel(); - return {ffi}CallbackCode::Exit.encode() - }} - if task.is_done() {{ - return {ffi}CallbackCode::Exit.encode() - }} - return {ffi}CallbackCode::Wait(task.handle()).encode() - "#, - ); - assert!(matches!( - self.deferred_task_return, - DeferredTaskReturn::None - )); - self.deferred_task_return = DeferredTaskReturn::Generating { - prev_src: mem::take(&mut self.src), - return_param: async_func_result.to_string(), - }; - return; - } - let assignment = match func.result { None => "let _ = ".into(), Some(ty) => { - let ty = format!( - "({})", - self.r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &ty) - ); + // For exports, use lowering type names. + // For imports, use lifting type names. + let ty = match self.direction { + Direction::Export => { + format!("({})", self.resolve_type_name_for_lowering(&ty)) + } + Direction::Import => format!("({})", self.resolve_type_name(&ty)), + }; let result = self.locals.tmp("result"); if func.result.is_some() { results.push(result.clone()); @@ -2360,12 +2584,27 @@ impl Bindgen for FunctionBindgen<'_, '_> { } }; - uwrite!( - self.src, - " - {assignment}{name}({args}); - ", - ); + // For async exports, pass the task_group that was already created + if self.async_ && matches!(self.direction, Direction::Export) { + let args_with_tg = if args.is_empty() { + "task_group".to_string() + } else { + format!("{args}, task_group") + }; + uwrite!( + self.src, + " + {assignment}{name}({args_with_tg}); + ", + ); + } else { + uwrite!( + self.src, + " + {assignment}{name}({args}); + ", + ); + } } Instruction::Return { amt, .. } => { @@ -2384,16 +2623,21 @@ impl Bindgen for FunctionBindgen<'_, '_> { } else { Vec::new() }; - + if !self.cleanup.is_empty() || self.needs_cleanup_list { + self.use_ffi(ffi::FREE); + } for clean in &self.cleanup { let address = &clean.address; - self.r#gen.ffi_imports.insert(ffi::FREE); uwriteln!(self.src, "mbt_ffi_free({address})",); } if self.needs_cleanup_list { - self.r#gen.ffi_imports.insert(ffi::FREE); - uwriteln!(self.src, "cleanup_list.each(mbt_ffi_free)",); + uwrite!( + self.src, + " + cleanup_list.each(mbt_ffi_free) + ", + ); } match *amt { @@ -2409,7 +2653,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Instruction::I32Load { offset } | Instruction::PointerLoad { offset } | Instruction::LengthLoad { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOAD32); + self.use_ffi(ffi::LOAD32); results.push(format!( "mbt_ffi_load32(({}) + {offset})", operands[0], @@ -2418,7 +2662,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I32Load8U { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOAD8_U); + self.use_ffi(ffi::LOAD8_U); results.push(format!( "mbt_ffi_load8_u(({}) + {offset})", operands[0], @@ -2427,7 +2671,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I32Load8S { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOAD8); + self.use_ffi(ffi::LOAD8); results.push(format!( "mbt_ffi_load8(({}) + {offset})", operands[0], @@ -2436,7 +2680,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I32Load16U { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOAD16_U); + self.use_ffi(ffi::LOAD16_U); results.push(format!( "mbt_ffi_load16_u(({}) + {offset})", operands[0], @@ -2445,7 +2689,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I32Load16S { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOAD16); + self.use_ffi(ffi::LOAD16); results.push(format!( "mbt_ffi_load16(({}) + {offset})", operands[0], @@ -2454,7 +2698,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I64Load { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOAD64); + self.use_ffi(ffi::LOAD64); results.push(format!( "mbt_ffi_load64(({}) + {offset})", operands[0], @@ -2463,7 +2707,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::F32Load { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOADF32); + self.use_ffi(ffi::LOADF32); results.push(format!( "mbt_ffi_loadf32(({}) + {offset})", operands[0], @@ -2472,7 +2716,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::F64Load { offset } => { - self.r#gen.ffi_imports.insert(ffi::LOADF64); + self.use_ffi(ffi::LOADF64); results.push(format!( "mbt_ffi_loadf64(({}) + {offset})", operands[0], @@ -2483,7 +2727,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { Instruction::I32Store { offset } | Instruction::PointerStore { offset } | Instruction::LengthStore { offset } => { - self.r#gen.ffi_imports.insert(ffi::STORE32); + self.use_ffi(ffi::STORE32); uwriteln!( self.src, "mbt_ffi_store32(({}) + {offset}, {})", @@ -2494,7 +2738,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I32Store8 { offset } => { - self.r#gen.ffi_imports.insert(ffi::STORE8); + self.use_ffi(ffi::STORE8); uwriteln!( self.src, "mbt_ffi_store8(({}) + {offset}, {})", @@ -2505,7 +2749,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I32Store16 { offset } => { - self.r#gen.ffi_imports.insert(ffi::STORE16); + self.use_ffi(ffi::STORE16); uwriteln!( self.src, "mbt_ffi_store16(({}) + {offset}, {})", @@ -2516,7 +2760,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::I64Store { offset } => { - self.r#gen.ffi_imports.insert(ffi::STORE64); + self.use_ffi(ffi::STORE64); uwriteln!( self.src, "mbt_ffi_store64(({}) + {offset}, {})", @@ -2527,7 +2771,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::F32Store { offset } => { - self.r#gen.ffi_imports.insert(ffi::STOREF32); + self.use_ffi(ffi::STOREF32); uwriteln!( self.src, "mbt_ffi_storef32(({}) + {offset}, {})", @@ -2538,7 +2782,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::F64Store { offset } => { - self.r#gen.ffi_imports.insert(ffi::STOREF64); + self.use_ffi(ffi::STOREF64); uwriteln!( self.src, "mbt_ffi_storef64(({}) + {offset}, {})", @@ -2549,17 +2793,17 @@ impl Bindgen for FunctionBindgen<'_, '_> { } // TODO: see what we can do with align Instruction::Malloc { size, .. } => { - self.r#gen.ffi_imports.insert(ffi::MALLOC); + self.use_ffi(ffi::MALLOC); uwriteln!(self.src, "mbt_ffi_malloc({})", size.size_wasm32()) } Instruction::GuestDeallocate { .. } => { - self.r#gen.ffi_imports.insert(ffi::FREE); + self.use_ffi(ffi::FREE); uwriteln!(self.src, "mbt_ffi_free({})", operands[0]) } Instruction::GuestDeallocateString => { - self.r#gen.ffi_imports.insert(ffi::FREE); + self.use_ffi(ffi::FREE); uwriteln!(self.src, "mbt_ffi_free({})", operands[0]) } @@ -2603,7 +2847,12 @@ impl Bindgen for FunctionBindgen<'_, '_> { let address = &operands[0]; let length = &operands[1]; - let size = self.r#gen.r#gen.sizes.size(element).size_wasm32(); + let size = self + .interface_gen + .world_gen + .sizes + .size(element) + .size_wasm32(); // let align = self.r#gen.r#gen.sizes.align(element); if !body.trim().is_empty() { @@ -2620,7 +2869,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { ); } - self.r#gen.ffi_imports.insert(ffi::FREE); + self.use_ffi(ffi::FREE); uwriteln!(self.src, "mbt_ffi_free({address})",); } @@ -2629,205 +2878,114 @@ impl Bindgen for FunctionBindgen<'_, '_> { } Instruction::FutureLift { ty, .. } => { - let result = self.locals.tmp("result"); + let ty = dealias(self.interface_gen.resolve, *ty); + let binding = self.interface_gen.bindings.0.get(&ty).unwrap(); let op = &operands[0]; - // let qualifier = self.r#gen.qualify_package(self.func_interface); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)); - let ffi = self - .r#gen - .r#gen - .pkg_resolver - .qualify_package(self.r#gen.name, FFI_DIR); - - let snake_name = format!("static_{}_future_table", ty.to_snake_case(),); - - uwriteln!( - self.src, - r#"let {result} = {ffi}FutureReader::new({op}, {snake_name});"#, - ); - - results.push(result); + results.push(format!("{}({op})", binding.lift_name)); + if self + .interface_gen + .async_bindings_emitted + .insert(binding.lift_name.clone()) + { + self.interface_gen + .ffi_imports + .extend(binding.lift_builtins.iter().copied()); + uwriteln!(self.interface_gen.ffi, "{}", binding.lift_src); + } } - Instruction::FutureLower { .. } => { + Instruction::FutureLower { ty, .. } => { + let ty = dealias(self.interface_gen.resolve, *ty); + let binding = self.interface_gen.bindings.0.get(&ty).unwrap(); let op = &operands[0]; - results.push(format!("{op}.handle")); - } - - Instruction::AsyncTaskReturn { params, .. } => { - let (body, return_param) = match &mut self.deferred_task_return { - DeferredTaskReturn::Generating { - prev_src, - return_param, - } => { - mem::swap(&mut self.src, prev_src); - (mem::take(prev_src), return_param.clone()) - } - _ => unreachable!(), - }; - assert_eq!(params.len(), operands.len()); - self.deferred_task_return = DeferredTaskReturn::Emitted { - body, - params: params - .iter() - .zip(operands) - .map(|(a, b)| (*a, b.clone())) - .collect(), - return_param, - }; + results.push(format!("{}({op})", binding.lower_name)); + if self + .interface_gen + .async_bindings_emitted + .insert(binding.lower_name.clone()) + { + self.interface_gen + .ffi_imports + .extend(binding.lower_builtins.iter().copied()); + uwriteln!(self.interface_gen.ffi, "{}", binding.lower_src); + } } - Instruction::StreamLower { .. } => { + Instruction::StreamLower { ty, .. } => { + let ty = dealias(self.interface_gen.resolve, *ty); + let binding = self.interface_gen.bindings.0.get(&ty).unwrap(); let op = &operands[0]; - results.push(format!("{op}.handle")); + results.push(format!("{}({op})", binding.lower_name)); + if self + .interface_gen + .async_bindings_emitted + .insert(binding.lower_name.clone()) + { + self.interface_gen + .ffi_imports + .extend(binding.lower_builtins.iter().copied()); + uwriteln!(self.interface_gen.ffi, "{}", binding.lower_src); + } } Instruction::StreamLift { ty, .. } => { - let result = self.locals.tmp("result"); + let ty = dealias(self.interface_gen.resolve, *ty); + let binding = self.interface_gen.bindings.0.get(&ty).unwrap(); let op = &operands[0]; - let qualifier = self - .r#gen - .r#gen - .pkg_resolver - .qualify_package(self.r#gen.name, self.func_interface); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, &Type::Id(*ty)); - let ffi = self - .r#gen - .r#gen - .pkg_resolver - .qualify_package(self.r#gen.name, FFI_DIR); - let snake_name = format!( - "static_{}_stream_table", - ty.replace(&qualifier, "").to_snake_case(), - ); - - uwriteln!( - self.src, - r#"let {result} = {ffi}StreamReader::new({op}, {snake_name});"#, - ); - - results.push(result); - } - Instruction::ErrorContextLower { .. } - | Instruction::ErrorContextLift { .. } - | Instruction::DropHandle { .. } => todo!(), - Instruction::FixedLengthListLift { - element: _, - size, - id: _, - } => { - let array = self.locals.tmp("array"); - let mut elements = String::new(); - for a in operands.drain(0..(*size as usize)) { - elements.push_str(&a); - elements.push_str(", "); - } - uwriteln!(self.src, "let {array} : FixedArray[_] = [{elements}]"); - results.push(array); - } - Instruction::FixedLengthListLower { - element: _, - size, - id: _, - } => { - for i in 0..(*size as usize) { - results.push(format!("({})[{i}]", operands[0])); + results.push(format!("{}({op})", binding.lift_name)); + if self + .interface_gen + .async_bindings_emitted + .insert(binding.lift_name.clone()) + { + self.interface_gen + .ffi_imports + .extend(binding.lift_builtins.iter().copied()); + uwriteln!(self.interface_gen.ffi, "{}", binding.lift_src); } } - Instruction::FixedLengthListLowerToMemory { - element, - size: _, - id: _, - } => { - let Block { - body, - results: block_results, - } = self.blocks.pop().unwrap(); - assert!(block_results.is_empty()); - - let vec = operands[0].clone(); - let target = operands[1].clone(); - let size = self.r#gen.r#gen.sizes.size(element).size_wasm32(); - let index = self.locals.tmp("index"); - - uwrite!( - self.src, - " - for {index} = 0; {index} < ({vec}).length(); {index} = {index} + 1 {{ - let iter_elem = ({vec})[{index}] - let iter_base = ({target}) + ({index} * {size}) - {body} - }} - ", - ); - } - Instruction::FixedLengthListLiftFromMemory { - element, - size: fll_size, - id: _, - } => { - let Block { - body, - results: block_results, - } = self.blocks.pop().unwrap(); - let address = &operands[0]; - let array = self.locals.tmp("array"); - let ty = self - .r#gen - .r#gen - .pkg_resolver - .type_name(self.r#gen.name, element); - let elem_size = self.r#gen.r#gen.sizes.size(element).size_wasm32(); - let index = self.locals.tmp("index"); - - let result = match &block_results[..] { - [result] => result, - _ => todo!("result count == {}", block_results.len()), - }; - uwrite!( - self.src, - " - let {array} : Array[{ty}] = [] - for {index} = 0; {index} < {fll_size}; {index} = {index} + 1 {{ - let iter_base = ({address}) + ({index} * {elem_size}) - {body} - {array}.push({result}) - }} - ", - ); + Instruction::ErrorContextLower { .. } | Instruction::ErrorContextLift { .. } => todo!(), - results.push(format!("FixedArray::from_array({array}[:])")); + Instruction::DropHandle { ty } => { + let op = &operands[0]; + match ty { + Type::Id(id) => match &self.interface_gen.resolve.types[*id].kind { + TypeDefKind::Handle(Handle::Own(_)) => { + let constructor = self + .interface_gen + .world_gen + .pkg_resolver + .type_constructor(self.interface_gen.name, ty); + uwriteln!(self.src, "let _ = {constructor}::drop({op});"); + } + TypeDefKind::Future(_) | TypeDefKind::Stream(_) => { + uwriteln!(self.src, "let _ = {op};"); + } + _ => unreachable!(), + }, + _ => unreachable!(), + } } } } - fn return_pointer(&mut self, size: ArchitectureSize, align: Alignment) -> String { - if self.r#gen.direction == Direction::Import { - self.r#gen.ffi_imports.insert(ffi::MALLOC); - let address = self.locals.tmp("return_area"); - uwriteln!( - self.src, - "let {address} = mbt_ffi_malloc({})", - size.size_wasm32(), - ); + fn return_pointer(&mut self, size: ArchitectureSize, _align: Alignment) -> String { + self.use_ffi(ffi::MALLOC); + let address = self.locals.tmp("return_area"); + uwriteln!( + self.src, + "let {address} = mbt_ffi_malloc({})", + size.size_wasm32(), + ); + // If the interface is an import, we need to track this for cleanup + // Otherwise, the caller is responsible for cleaning up in post_return + if self.interface_gen.direction == Direction::Import { self.cleanup.push(Cleanup { address: address.clone(), }); - address - } else { - self.r#gen.r#gen.return_area_size = self.r#gen.r#gen.return_area_size.max(size); - self.r#gen.r#gen.return_area_align = self.r#gen.r#gen.return_area_align.max(align); - "return_area".into() } + address } fn push_block(&mut self) { @@ -2841,11 +2999,18 @@ impl Bindgen for FunctionBindgen<'_, '_> { let BlockStorage { body, cleanup } = self.block_storage.pop().unwrap(); if !self.cleanup.is_empty() { - self.needs_cleanup_list = true; - - for cleanup in &self.cleanup { - let address = &cleanup.address; - uwriteln!(self.src, "cleanup_list.push({address})",); + if self.defer_cleanup { + self.needs_cleanup_list = true; + for cleanup in &self.cleanup { + let address = &cleanup.address; + uwriteln!(self.src, "cleanup_list.push({address})",); + } + } else { + self.use_ffi(ffi::FREE); + for cleanup in &self.cleanup { + let address = &cleanup.address; + uwriteln!(self.src, "mbt_ffi_free({address})",); + } } } @@ -2858,7 +3023,7 @@ impl Bindgen for FunctionBindgen<'_, '_> { } fn sizes(&self) -> &SizeAlign { - &self.r#gen.r#gen.sizes + &self.interface_gen.world_gen.sizes } fn is_list_canonical(&self, _resolve: &Resolve, element: &Type) -> bool { diff --git a/crates/moonbit/src/pkg.rs b/crates/moonbit/src/pkg.rs index e7c1a1057..96ef9222d 100644 --- a/crates/moonbit/src/pkg.rs +++ b/crates/moonbit/src/pkg.rs @@ -9,7 +9,7 @@ use wit_bindgen_core::{ }, }; -pub(crate) const FFI_DIR: &str = "ffi"; +use crate::async_support::ASYNC_DIR; #[derive(Default)] pub(crate) struct Imports { @@ -203,7 +203,7 @@ impl PkgResolver { } _ => format!("Array[{}]", self.type_name(this, &ty)), }, - TypeDefKind::FixedLengthList(ty, _size) => { + TypeDefKind::FixedLengthList(ty, _) => { format!("FixedArray[{}]", self.type_name(this, &ty)) } TypeDefKind::Tuple(tuple) => { @@ -249,9 +249,9 @@ impl PkgResolver { } TypeDefKind::Future(ty) => { - let qualifier = self.qualify_package(this, FFI_DIR); + let qualifier = self.qualify_package(this, ASYNC_DIR); format!( - "{}FutureReader[{}]", + "{}CMFuture[{}]", qualifier, ty.as_ref() .map(|t| self.type_name(this, t)) @@ -260,9 +260,9 @@ impl PkgResolver { } TypeDefKind::Stream(ty) => { - let qualifier = self.qualify_package(this, FFI_DIR); + let qualifier = self.qualify_package(this, ASYNC_DIR); format!( - "{}StreamReader[{}]", + "{}CMStream[{}]", qualifier, ty.as_ref() .map(|t| self.type_name(this, t)) @@ -286,6 +286,11 @@ impl PkgResolver { } } + /// Generate type name for export result types (lowering context). + pub(crate) fn type_name_for_lowering(&mut self, this: &str, ty: &Type) -> String { + self.type_name(this, ty) + } + pub(crate) fn non_empty_type<'a>(&self, ty: Option<&'a Type>) -> Option<&'a Type> { if let Some(ty) = ty { let id = match ty { diff --git a/crates/test/src/moonbit.rs b/crates/test/src/moonbit.rs index e22d9489d..211941b3d 100644 --- a/crates/test/src/moonbit.rs +++ b/crates/test/src/moonbit.rs @@ -1,5 +1,5 @@ use crate::{LanguageMethods, Runner}; -use anyhow::bail; +use anyhow::{Context, bail}; use serde::Deserialize; use std::process::Command; @@ -60,24 +60,45 @@ impl LanguageMethods for MoonBit { } // Compile the MoonBit bindings to a wasm file - let manifest = compile.bindings_dir.join("moon.mod.json"); let mut cmd = Command::new("moon"); cmd.arg("build") + .arg("--target") + .arg("wasm") + .arg("--release") .arg("--no-strip") // for debugging - .arg("--manifest-path") - .arg(&manifest); + .current_dir(&compile.bindings_dir); runner.run_command(&mut cmd)?; - // Build the component - let artifact = compile - .bindings_dir - .join("target/wasm/release/build/gen/gen.wasm"); + // Build the component. MoonBit toolchains may use either `_build` or + // `target` output roots depending on version/configuration. + let artifact_candidates = [ + compile + .bindings_dir + .join("_build/wasm/release/build/gen/gen.wasm"), + compile + .bindings_dir + .join("target/wasm/release/build/gen/gen.wasm"), + compile + .bindings_dir + .join("_build/wasm/debug/build/gen/gen.wasm"), + compile + .bindings_dir + .join("target/wasm/debug/build/gen/gen.wasm"), + ]; + let artifact = artifact_candidates + .iter() + .find(|path| path.exists()) + .cloned() + .with_context(|| { + format!("failed to locate MoonBit output wasm, looked in: {artifact_candidates:?}",) + })?; // Embed WIT files let manifest_dir = compile.component.path.parent().unwrap(); + let embedded = artifact.with_extension("embedded.wasm"); let mut cmd = Command::new("wasm-tools"); cmd.arg("component") .arg("embed") .args(["--encoding", "utf16"]) - .args(["-o", artifact.to_str().unwrap()]) + .args(["-o", embedded.to_str().unwrap()]) .args(["-w", &compile.component.bindgen.world]) .arg(manifest_dir) .arg(&artifact); @@ -87,35 +108,30 @@ impl LanguageMethods for MoonBit { cmd.arg("component") .arg("new") .args(["-o", compile.output.to_str().unwrap()]) - .arg(&artifact); + .arg(&embedded); runner.run_command(&mut cmd)?; Ok(()) } fn should_fail_verify( &self, - name: &str, + _name: &str, config: &crate::config::WitConfig, _args: &[String], ) -> bool { - // async-resource-func actually works, but most other async tests - // fail during codegen or verification - config.async_ && name != "async-resource-func.wit" + config.error_context } fn verify(&self, runner: &Runner, verify: &crate::Verify) -> anyhow::Result<()> { - let manifest = verify.bindings_dir.join("moon.mod.json"); let mut cmd = Command::new("moon"); cmd.arg("check") .arg("--warn-list") - .arg("-28") - // .arg("--deny-warn") - .arg("--manifest-path") - .arg(&manifest); + .arg("-28") // avoid warning noise in generated bindings + .current_dir(&verify.bindings_dir); runner.run_command(&mut cmd)?; let mut cmd = Command::new("moon"); - cmd.arg("build").arg("--manifest-path").arg(&manifest); + cmd.arg("build").current_dir(&verify.bindings_dir); runner.run_command(&mut cmd)?; Ok(()) diff --git a/tests/codegen/futures.wit b/tests/codegen/futures.wit index e312bc2e4..252d0d702 100644 --- a/tests/codegen/futures.wit +++ b/tests/codegen/futures.wit @@ -54,8 +54,13 @@ interface futures { b: string, c: future, } + record nested-future-record { + inner: future>, + } record-future: func(x: future) -> future; record-future-reverse: func(x: future) -> future; + nested-future: func(x: future>) -> future>; + nested-future-record-roundtrip: func(x: nested-future-record) -> nested-future-record; variant some-variant { a(string), diff --git a/tests/runtime-async/async/cancel-import/test.mbt b/tests/runtime-async/async/cancel-import/test.mbt new file mode 100644 index 000000000..1ff19a590 --- /dev/null +++ b/tests/runtime-async/async/cancel-import/test.mbt @@ -0,0 +1,16 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/i/stub.mbt' + +///| +pub async fn pending_import(x : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + x.get() +} + +///| +pub fn backpressure_set(x : Bool) -> Unit { + if x { + @async.backpressure_inc() + } else { + @async.backpressure_dec() + } +} diff --git a/tests/runtime-async/async/future-cancel-read/runner.mbt b/tests/runtime-async/async/future-cancel-read/runner.mbt new file mode 100644 index 000000000..45c4cb356 --- /dev/null +++ b/tests/runtime-async/async/future-cancel-read/runner.mbt @@ -0,0 +1,10 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + @i.cancel_before_read(@async.CMFuture::ready(1U)) + @i.start_read_then_cancel(@async.CMFuture::ready(4U), @async.CMFuture::ready(())) +} diff --git a/tests/runtime-async/async/future-cancel-read/test.mbt b/tests/runtime-async/async/future-cancel-read/test.mbt index bdb83de7b..dc14e796a 100644 --- a/tests/runtime-async/async/future-cancel-read/test.mbt +++ b/tests/runtime-async/async/future-cancel-read/test.mbt @@ -2,37 +2,31 @@ //@ path = 'gen/interface/my/test_/i/stub.mbt' ///| -pub async fn cancel_before_read( - x : @ffi.FutureReader[UInt], -) -> Unit noraise { - let task = @ffi.current_task() - let _ = x.drop() +pub async fn cancel_before_read(x : @async.CMFuture[UInt], task_group : @async.TaskGroup[Unit]) -> Unit { + x.drop() } ///| -pub async fn cancel_after_read( - x : @ffi.FutureReader[UInt], -) -> Unit noraise { - let task = @ffi.current_task() - task.spawn( - fn() { - let _ = x.read() catch { _ => raise @ffi.Cancelled::Cancelled } - } - ) - task.cancel_waitable(x) +pub async fn cancel_after_read(x : @async.CMFuture[UInt], task_group : @async.TaskGroup[Unit]) -> Unit { + task_group.spawn_bg(async fn() { x.drop() }) + let _ = x.get() catch { + @async.FutureReadError::Cancelled => return + _ => panic() + } + panic() } ///| pub async fn start_read_then_cancel( - data : @ffi.FutureReader[UInt], - signal : @ffi.FutureReader[Unit], -) -> Unit noraise { - let task = @ffi.current_task() - task.spawn( - fn() { let _ = data.read() catch { _ => raise @ffi.Cancelled::Cancelled } }, - ) - task.spawn( - fn() { signal.read() catch { _ => raise @ffi.Cancelled::Cancelled } }, - ) + data : @async.CMFuture[UInt], + signal : @async.CMFuture[Unit], + task_group : @async.TaskGroup[Unit], +) -> Unit { + task_group.spawn_bg(async fn() { + guard 4U == (try! data.get()) + }) + task_group.spawn_bg(async fn() { + signal.get() + data.drop() + }) } - diff --git a/tests/runtime-async/async/future-cancel-write-then-read/test.mbt b/tests/runtime-async/async/future-cancel-write-then-read/test.mbt new file mode 100644 index 000000000..535f3c39d --- /dev/null +++ b/tests/runtime-async/async/future-cancel-write-then-read/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/theTest/stub.mbt' + +///| +pub async fn f(param : @async.CMFuture[Byte], task_group : @async.TaskGroup[Unit]) -> Unit { + assert_eq(param.get(), (0).to_byte()) +} diff --git a/tests/runtime-async/async/future-cancel-write/runner.mbt b/tests/runtime-async/async/future-cancel-write/runner.mbt new file mode 100644 index 000000000..dc43140c8 --- /dev/null +++ b/tests/runtime-async/async/future-cancel-write/runner.mbt @@ -0,0 +1,10 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + @i.take_then_drop(@async.CMFuture::ready("hello")) + @i.read_and_drop(@async.CMFuture::ready("hello2")) +} diff --git a/tests/runtime-async/async/future-cancel-write/test.mbt b/tests/runtime-async/async/future-cancel-write/test.mbt index 2b1f21e8d..c6626202c 100644 --- a/tests/runtime-async/async/future-cancel-write/test.mbt +++ b/tests/runtime-async/async/future-cancel-write/test.mbt @@ -2,16 +2,11 @@ //@ path = 'gen/interface/my/test_/i/stub.mbt' ///| -pub fn take_then_drop(x : @ffi.FutureReader[String]) -> Unit { - let _ = x.drop() +pub async fn take_then_drop(x : @async.CMFuture[String], task_group : @async.TaskGroup[Unit]) -> Unit { + x.drop() } ///| -pub async fn read_and_drop( - x : @ffi.FutureReader[String], -) -> Unit noraise { - let task = @ffi.current_task() - let _ = task.spawn(fn() { - let _ = x.read() catch { _ => raise @ffi.Cancelled::Cancelled } - }) +pub async fn read_and_drop(x : @async.CMFuture[String], task_group : @async.TaskGroup[Unit]) -> Unit { + let _ = x.get() } diff --git a/tests/runtime-async/async/future-close-after-coming-back/runner.mbt b/tests/runtime-async/async/future-close-after-coming-back/runner.mbt new file mode 100644 index 000000000..1a9982a25 --- /dev/null +++ b/tests/runtime-async/async/future-close-after-coming-back/runner.mbt @@ -0,0 +1,10 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["a/b/interface/a/b/theTest", "a/b/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + let returned = @theTest.f(@async.CMFuture::ready(())) + returned.drop() +} diff --git a/tests/runtime-async/async/future-close-after-coming-back/test.mbt b/tests/runtime-async/async/future-close-after-coming-back/test.mbt index 9f6250656..115e7e4cd 100644 --- a/tests/runtime-async/async/future-close-after-coming-back/test.mbt +++ b/tests/runtime-async/async/future-close-after-coming-back/test.mbt @@ -1,7 +1,6 @@ //@ [lang] //@ path = 'gen/interface/a/b/theTest/stub.mbt' -pub fn f(_param : @ffi.FutureReader[Unit]) -> @ffi.FutureReader[Unit] { - _param +pub async fn f(param : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> @async.CMFuture[Unit] { + @async.CMFuture::from(async fn() { param.get() }) } - diff --git a/tests/runtime-async/async/future-close-then-receive-read/test.mbt b/tests/runtime-async/async/future-close-then-receive-read/test.mbt new file mode 100644 index 000000000..9cc71b837 --- /dev/null +++ b/tests/runtime-async/async/future-close-then-receive-read/test.mbt @@ -0,0 +1,17 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/theTest/stub.mbt' + +///| +let slot : Ref[@async.CMFuture[Unit]?] = { val: None } + +///| +pub async fn set(param : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + slot.val = Some(param) +} + +///| +pub async fn get(task_group : @async.TaskGroup[Unit]) -> @async.CMFuture[Unit] { + guard slot.val is Some(value) + slot.val = None + value +} diff --git a/tests/runtime-async/async/future-closes-with-error/test.mbt b/tests/runtime-async/async/future-closes-with-error/test.mbt new file mode 100644 index 000000000..4baa6547e --- /dev/null +++ b/tests/runtime-async/async/future-closes-with-error/test.mbt @@ -0,0 +1,10 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/theTest/stub.mbt' + +///| +pub async fn f(param : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + let _ = param.get() catch { + @async.FutureReadError::Cancelled => () + _ => panic() + } +} diff --git a/tests/runtime-async/async/future-write-then-read-comes-back/test.mbt b/tests/runtime-async/async/future-write-then-read-comes-back/test.mbt new file mode 100644 index 000000000..2ce9ee4ca --- /dev/null +++ b/tests/runtime-async/async/future-write-then-read-comes-back/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/theTest/stub.mbt' + +///| +pub async fn f(param : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> @async.CMFuture[Unit] { + param +} diff --git a/tests/runtime-async/async/future-write-then-read-remote/test.mbt b/tests/runtime-async/async/future-write-then-read-remote/test.mbt new file mode 100644 index 000000000..53e57f21b --- /dev/null +++ b/tests/runtime-async/async/future-write-then-read-remote/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/theTest/stub.mbt' + +///| +pub async fn f(param : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + param.get() +} diff --git a/tests/runtime-async/async/incomplete-writes/leaf.mbt b/tests/runtime-async/async/incomplete-writes/leaf.mbt new file mode 100644 index 000000000..47c8d3cd2 --- /dev/null +++ b/tests/runtime-async/async/incomplete-writes/leaf.mbt @@ -0,0 +1,26 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/leafInterface/stub.mbt' + +///| +let leaf_values : Map[Int, String] = {} + +///| +let next_leaf_rep : Ref[Int] = { val: 1 } + +///| Destructor of the resource. +pub fn LeafThing::dtor(self : LeafThing) -> Unit { + leaf_values.remove(self.rep()) +} + +///| +pub fn LeafThing::leaf_thing(s : String) -> LeafThing { + let rep = next_leaf_rep.val + next_leaf_rep.val += 1 + leaf_values[rep] = s + LeafThing::new(rep) +} + +///| +pub fn LeafThing::get(self : LeafThing) -> String { + leaf_values[self.rep()] +} diff --git a/tests/runtime-async/async/incomplete-writes/test.mbt b/tests/runtime-async/async/incomplete-writes/test.mbt new file mode 100644 index 000000000..01bed3bf2 --- /dev/null +++ b/tests/runtime-async/async/incomplete-writes/test.mbt @@ -0,0 +1,116 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/testInterface/stub.mbt' + +///| +let test_values : Map[Int, String] = {} + +///| +let next_test_rep : Ref[Int] = { val: 1 } + +///| Destructor of the resource. +pub fn TestThing::dtor(self : TestThing) -> Unit { + test_values.remove(self.rep()) +} + +///| +pub fn TestThing::test_thing(s : String) -> TestThing { + let rep = next_test_rep.val + next_test_rep.val += 1 + test_values[rep] = s + TestThing::new(rep) +} + +///| +pub fn TestThing::get(self : TestThing) -> String { + test_values[self.rep()] +} + +///| +pub async fn short_reads_test( + s : @async.CMStream[TestThing], + task_group : @async.TaskGroup[Unit], +) -> @async.CMStream[TestThing] { + @async.CMStream::from(async fn(sink : @async.Sink[TestThing]) { + let things : Array[TestThing] = [] + for { + match s.read(1) { + Some(chunk) => { + for i = 0; i < chunk.length(); i = i + 1 { + things.push(chunk[i]) + } + } + None => break + } + } + s.close() + if things.length() > 0 { + let _ = sink.write(things[:]) + } + sink.close() + }) +} + +///| +pub async fn short_reads_leaf( + s : @async.CMStream[@leafInterface.LeafThing], + task_group : @async.TaskGroup[Unit], +) -> @async.CMStream[@leafInterface.LeafThing] { + @async.CMStream::from(async fn(sink : @async.Sink[@leafInterface.LeafThing]) { + let things : Array[@leafInterface.LeafThing] = [] + for { + match s.read(1) { + Some(chunk) => { + for i = 0; i < chunk.length(); i = i + 1 { + things.push(chunk[i]) + } + } + None => break + } + } + s.close() + if things.length() > 0 { + let _ = sink.write(things[:]) + } + sink.close() + }) +} + +///| +pub async fn dropped_reader_test( + f1 : @async.CMFuture[TestThing], + f2 : @async.CMFuture[TestThing], + task_group : @async.TaskGroup[Unit], +) -> (@async.CMFuture[TestThing], @async.CMFuture[TestThing]) { + let (out1, writer1) : (@async.CMFuture[TestThing], @async.Promise[TestThing]) = @async.CMFuture::new() + let (out2, writer2) : (@async.CMFuture[TestThing], @async.Promise[TestThing]) = @async.CMFuture::new() + task_group.spawn_bg(async fn() { + f1.drop() + let thing = f2.get() + writer1.close() + writer2.write(thing) + }) + (out1, out2) +} + +///| +pub async fn dropped_reader_leaf( + f1 : @async.CMFuture[@leafInterface.LeafThing], + f2 : @async.CMFuture[@leafInterface.LeafThing], + task_group : @async.TaskGroup[Unit], +) -> (@async.CMFuture[@leafInterface.LeafThing], @async.CMFuture[@leafInterface.LeafThing]) { + let (out1, writer1) : ( + @async.CMFuture[@leafInterface.LeafThing], + @async.Promise[@leafInterface.LeafThing], + ) = @async.CMFuture::new() + let (out2, writer2) : ( + @async.CMFuture[@leafInterface.LeafThing], + @async.Promise[@leafInterface.LeafThing], + ) = @async.CMFuture::new() + task_group.spawn_bg(async fn() { + f1.drop() + let thing = f2.get() + writer1.close() + writer2.write(thing) + }) + (out1, out2) +} diff --git a/tests/runtime-async/async/moonbit-future-write/runner.mbt b/tests/runtime-async/async/moonbit-future-write/runner.mbt new file mode 100644 index 000000000..42dfb497e --- /dev/null +++ b/tests/runtime-async/async/moonbit-future-write/runner.mbt @@ -0,0 +1,21 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + let rx = @i.create_future_with_value(42U) + assert_eq(rx.get(), 42U) + + let done = @i.create_unit_future() + done.get() + + let nested = @i.create_nested_future(7U) + let inner = nested.get() + assert_eq(inner.get(), 7U) + + let nested_record = @i.create_nested_future_record(9U) + let nested_inner = nested_record.nested.get() + assert_eq(nested_inner.get(), 9U) +} diff --git a/tests/runtime-async/async/moonbit-future-write/runner.rs b/tests/runtime-async/async/moonbit-future-write/runner.rs new file mode 100644 index 000000000..8048a7644 --- /dev/null +++ b/tests/runtime-async/async/moonbit-future-write/runner.rs @@ -0,0 +1,34 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' + +include!(env!("BINDINGS")); + +use crate::my::test::i::*; + +struct Component; + +export!(Component); + +impl Guest for Component { + async fn run() { + // Test creating a future with a value + let rx = create_future_with_value(42).await; + let value = rx.await; + assert_eq!(value, 42); + + // Test creating a unit future + let rx = create_unit_future().await; + rx.await; + + // Test future> + let outer = create_nested_future(7).await; + let inner = outer.await; + let nested_value = inner.await; + assert_eq!(nested_value, 7); + + // Test record containing future> + let record = create_nested_future_record(9).await; + let record_inner = record.nested.await; + let record_value = record_inner.await; + assert_eq!(record_value, 9); + } +} diff --git a/tests/runtime-async/async/moonbit-future-write/test.mbt b/tests/runtime-async/async/moonbit-future-write/test.mbt new file mode 100644 index 000000000..ac2326267 --- /dev/null +++ b/tests/runtime-async/async/moonbit-future-write/test.mbt @@ -0,0 +1,22 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/i/stub.mbt' + +///| +pub async fn create_future_with_value(value : UInt, task_group : @async.TaskGroup[Unit]) -> @async.CMFuture[UInt] { + @async.CMFuture::ready(value) +} + +///| +pub async fn create_unit_future(task_group : @async.TaskGroup[Unit]) -> @async.CMFuture[Unit] { + @async.CMFuture::ready(()) +} + +///| +pub async fn create_nested_future(value : UInt, task_group : @async.TaskGroup[Unit]) -> @async.CMFuture[@async.CMFuture[UInt]] { + @async.CMFuture::ready(@async.CMFuture::ready(value)) +} + +///| +pub async fn create_nested_future_record(value : UInt, task_group : @async.TaskGroup[Unit]) -> NestedFutureRecord { + { nested: @async.CMFuture::ready(@async.CMFuture::ready(value)) } +} diff --git a/tests/runtime-async/async/moonbit-future-write/test.rs b/tests/runtime-async/async/moonbit-future-write/test.rs new file mode 100644 index 000000000..2217da3af --- /dev/null +++ b/tests/runtime-async/async/moonbit-future-write/test.rs @@ -0,0 +1,45 @@ +use wit_bindgen::FutureReader; + +include!(env!("BINDINGS")); + +struct Component; + +export!(Component); + +impl crate::exports::my::test::i::Guest for Component { + async fn create_future_with_value(value: u32) -> FutureReader { + let (tx, rx) = wit_future::new(|| unreachable!()); + wit_bindgen::spawn(async move { + tx.write(value).await.unwrap(); + }); + rx + } + + async fn create_unit_future() -> FutureReader<()> { + let (tx, rx) = wit_future::new(|| unreachable!()); + wit_bindgen::spawn(async move { + tx.write(()).await.unwrap(); + }); + rx + } + + async fn create_nested_future(value: u32) -> FutureReader> { + let (inner_tx, inner_rx) = wit_future::new(|| unreachable!()); + let (outer_tx, outer_rx) = wit_future::new(|| unreachable!()); + wit_bindgen::spawn(async move { + outer_tx.write(inner_rx).await.unwrap(); + inner_tx.write(value).await.unwrap(); + }); + outer_rx + } + + async fn create_nested_future_record(value: u32) -> crate::exports::my::test::i::NestedFutureRecord { + let (inner_tx, inner_rx) = wit_future::new(|| unreachable!()); + let (outer_tx, outer_rx) = wit_future::new(|| unreachable!()); + wit_bindgen::spawn(async move { + outer_tx.write(inner_rx).await.unwrap(); + inner_tx.write(value).await.unwrap(); + }); + crate::exports::my::test::i::NestedFutureRecord { nested: outer_rx } + } +} diff --git a/tests/runtime-async/async/moonbit-future-write/test.wit b/tests/runtime-async/async/moonbit-future-write/test.wit new file mode 100644 index 000000000..4e6510936 --- /dev/null +++ b/tests/runtime-async/async/moonbit-future-write/test.wit @@ -0,0 +1,26 @@ +package my:test; + +interface i { + // MoonBit creates a future, writes a value, and returns it + create-future-with-value: async func(value: u32) -> future; + // MoonBit creates a future without a payload + create-unit-future: async func() -> future; + // MoonBit returns nested future handles + create-nested-future: async func(value: u32) -> future>; + + record nested-future-record { + nested: future>, + } + // MoonBit returns a record containing nested futures + create-nested-future-record: async func(value: u32) -> nested-future-record; +} + +world test { + export i; +} + +world runner { + import i; + + export run: async func(); +} diff --git a/tests/runtime-async/async/moonbit-stream-write/runner.mbt b/tests/runtime-async/async/moonbit-stream-write/runner.mbt new file mode 100644 index 000000000..136d26737 --- /dev/null +++ b/tests/runtime-async/async/moonbit-stream-write/runner.mbt @@ -0,0 +1,26 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + let stream = @i.create_stream_with_values(3U) + let mut total = 0U + let mut count = 0U + while stream.read(16) is Some(chunk) { + for value in chunk { + total += value + count += 1U + } + } + assert_eq(count, 3U) + assert_eq(total, 3U) + + let unit_stream = @i.create_unit_stream(5U) + let mut unit_count = 0U + while unit_stream.read(16) is Some(chunk) { + unit_count += chunk.length().reinterpret_as_uint() + } + assert_eq(unit_count, 5U) +} diff --git a/tests/runtime-async/async/moonbit-stream-write/runner.rs b/tests/runtime-async/async/moonbit-stream-write/runner.rs new file mode 100644 index 000000000..c6232bea1 --- /dev/null +++ b/tests/runtime-async/async/moonbit-stream-write/runner.rs @@ -0,0 +1,52 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' + +include!(env!("BINDINGS")); + +use crate::my::test::i::*; +use wit_bindgen::StreamResult; + +struct Component; + +export!(Component); + +impl Guest for Component { + async fn run() { + // Test creating a stream with u32 values + let mut rx = create_stream_with_values(3).await; + let mut total = 0u32; + let mut count = 0u32; + loop { + let buf = Vec::::with_capacity(10); + let (result, values) = rx.read(buf).await; + match result { + StreamResult::Complete(n) if n > 0 => { + // Only process the first n items that were actually read + for v in values.iter().take(n) { + total += *v; + count += 1; + } + } + // Complete(0) means end of stream, or Dropped/Cancelled + _ => break, + } + } + assert_eq!(count, 3); + assert_eq!(total, 0 + 1 + 2); // 0, 1, 2 + + // Test creating a unit stream + let mut rx = create_unit_stream(5).await; + let mut count = 0u32; + loop { + let buf = Vec::<()>::with_capacity(10); + let (result, _values) = rx.read(buf).await; + match result { + StreamResult::Complete(n) if n > 0 => { + count += n as u32; + } + // Complete(0) means end of stream, or Dropped/Cancelled + _ => break, + } + } + assert_eq!(count, 5); + } +} diff --git a/tests/runtime-async/async/moonbit-stream-write/test.mbt b/tests/runtime-async/async/moonbit-stream-write/test.mbt new file mode 100644 index 000000000..b151beaef --- /dev/null +++ b/tests/runtime-async/async/moonbit-stream-write/test.mbt @@ -0,0 +1,24 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/i/stub.mbt' + +///| +pub async fn create_stream_with_values(count : UInt, task_group : @async.TaskGroup[Unit]) -> @async.CMStream[UInt] { + @async.CMStream::from(async fn(sink : @async.Sink[UInt]) { + for i = 0; i < count.reinterpret_as_int(); i = i + 1 { + let arr : Array[UInt] = [i.reinterpret_as_uint()] + let _ = sink.write(arr[:]) + } + sink.close() + }) +} + +///| +pub async fn create_unit_stream(count : UInt, task_group : @async.TaskGroup[Unit]) -> @async.CMStream[Unit] { + @async.CMStream::from(async fn(sink : @async.Sink[Unit]) { + for i = 0; i < count.reinterpret_as_int(); i = i + 1 { + let arr : Array[Unit] = [()] + let _ = sink.write(arr[:]) + } + sink.close() + }) +} diff --git a/tests/runtime-async/async/moonbit-stream-write/test.rs b/tests/runtime-async/async/moonbit-stream-write/test.rs new file mode 100644 index 000000000..726db5a22 --- /dev/null +++ b/tests/runtime-async/async/moonbit-stream-write/test.rs @@ -0,0 +1,35 @@ +use wit_bindgen::{StreamReader, StreamResult}; + +include!(env!("BINDINGS")); + +struct Component; + +export!(Component); + +impl crate::exports::my::test::i::Guest for Component { + async fn create_stream_with_values(count: u32) -> StreamReader { + let (mut tx, rx) = wit_stream::new(); + wit_bindgen::spawn(async move { + for i in 0..count { + let (result, _rest) = tx.write(vec![i]).await; + if !matches!(result, StreamResult::Complete(1)) { + break; + } + } + }); + rx + } + + async fn create_unit_stream(count: u32) -> StreamReader<()> { + let (mut tx, rx) = wit_stream::new(); + wit_bindgen::spawn(async move { + for _ in 0..count { + let (result, _rest) = tx.write(vec![()]).await; + if !matches!(result, StreamResult::Complete(1)) { + break; + } + } + }); + rx + } +} diff --git a/tests/runtime-async/async/moonbit-stream-write/test.wit b/tests/runtime-async/async/moonbit-stream-write/test.wit new file mode 100644 index 000000000..10c15ec10 --- /dev/null +++ b/tests/runtime-async/async/moonbit-stream-write/test.wit @@ -0,0 +1,18 @@ +package my:test; + +interface i { + // MoonBit creates a stream, writes values, and returns it + create-stream-with-values: async func(count: u32) -> stream; + // MoonBit creates a unit stream + create-unit-stream: async func(count: u32) -> stream; +} + +world test { + export i; +} + +world runner { + import i; + + export run: async func(); +} diff --git a/tests/runtime-async/async/pending-import/test.mbt b/tests/runtime-async/async/pending-import/test.mbt new file mode 100644 index 000000000..b69058ede --- /dev/null +++ b/tests/runtime-async/async/pending-import/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/i/stub.mbt' + +///| +pub async fn pending_import(x : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + x.get() +} diff --git a/tests/runtime-async/async/ping-pong/test.mbt b/tests/runtime-async/async/ping-pong/test.mbt new file mode 100644 index 000000000..e2d801bf4 --- /dev/null +++ b/tests/runtime-async/async/ping-pong/test.mbt @@ -0,0 +1,17 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/i/stub.mbt' + +///| +pub async fn ping( + x : @async.CMFuture[String], + y : String, + task_group : @async.TaskGroup[Unit], +) -> @async.CMFuture[String] { + let message = x.get() + y + @async.CMFuture::from(async fn() { message }) +} + +///| +pub async fn pong(x : @async.CMFuture[String], task_group : @async.TaskGroup[Unit]) -> String { + x.get() +} diff --git a/tests/runtime-async/async/rust-cross-task-wakeup/test.mbt b/tests/runtime-async/async/rust-cross-task-wakeup/test.mbt new file mode 100644 index 000000000..b44148684 --- /dev/null +++ b/tests/runtime-async/async/rust-cross-task-wakeup/test.mbt @@ -0,0 +1,21 @@ +//@ [lang] +//@ path = 'gen/interface/my/test_/i/stub.mbt' + +///| +let resolved : Ref[Bool] = { val: false } + +///| +pub async fn pending_import(task_group : @async.TaskGroup[Unit]) -> Unit { + resolved.val = false + for { + if resolved.val { + return + } + @async.pause() + } +} + +///| +pub fn resolve_pending_import() -> Unit { + resolved.val = true +} diff --git a/tests/runtime-async/async/rust-lowered-send/test.mbt b/tests/runtime-async/async/rust-lowered-send/test.mbt new file mode 100644 index 000000000..1a5a6f467 --- /dev/null +++ b/tests/runtime-async/async/rust-lowered-send/test.mbt @@ -0,0 +1,8 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/i/stub.mbt' + +///| +pub async fn one_argument(x : String, task_group : @async.TaskGroup[Unit]) -> Unit { + let copied = x + "" + assert_eq(copied, "hello") +} diff --git a/tests/runtime-async/async/simple-call-import/runner.mbt b/tests/runtime-async/async/simple-call-import/runner.mbt new file mode 100644 index 000000000..24f7c9960 --- /dev/null +++ b/tests/runtime-async/async/simple-call-import/runner.mbt @@ -0,0 +1,9 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["a/b/interface/a/b/i", "a/b/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + @i.f() +} diff --git a/tests/runtime-async/async/simple-call-import/test.mbt b/tests/runtime-async/async/simple-call-import/test.mbt new file mode 100644 index 000000000..04029a350 --- /dev/null +++ b/tests/runtime-async/async/simple-call-import/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/i/stub.mbt' + +///| +pub async fn f(task_group : @async.TaskGroup[Unit]) -> Unit { + () +} diff --git a/tests/runtime-async/async/simple-future/runner.mbt b/tests/runtime-async/async/simple-future/runner.mbt new file mode 100644 index 000000000..d1247c61b --- /dev/null +++ b/tests/runtime-async/async/simple-future/runner.mbt @@ -0,0 +1,10 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + @i.read_future(@async.CMFuture::ready(())) + @i.drop_future(@async.CMFuture::ready(())) +} diff --git a/tests/runtime-async/async/simple-future/test.mbt b/tests/runtime-async/async/simple-future/test.mbt index f4a0e49f0..640bd04d4 100644 --- a/tests/runtime-async/async/simple-future/test.mbt +++ b/tests/runtime-async/async/simple-future/test.mbt @@ -2,20 +2,11 @@ //@ path = 'gen/interface/my/test_/i/stub.mbt' ///| -pub async fn read_future( - x : @ffi.FutureReader[Unit], -) -> Unit noraise { - let task = @ffi.current_task() - task.spawn(fn() { - let _ = x.read() catch { _ => raise @ffi.Cancelled::Cancelled } - - }) +pub async fn read_future(x : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + x.get() } ///| -pub async fn drop_future( - x : @ffi.FutureReader[Unit], -) -> Unit noraise { - let task = @ffi.current_task() - let _ = x.drop() +pub async fn drop_future(x : @async.CMFuture[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + x.drop() } diff --git a/tests/runtime-async/async/simple-import-params-results/runner.mbt b/tests/runtime-async/async/simple-import-params-results/runner.mbt new file mode 100644 index 000000000..9e5c46683 --- /dev/null +++ b/tests/runtime-async/async/simple-import-params-results/runner.mbt @@ -0,0 +1,13 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["a/b/interface/a/b/i", "a/b/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + @i.one_argument(1) + assert_eq(@i.one_result(), 2) + assert_eq(@i.one_argument_and_result(3), 4) + @i.two_arguments(5, 6) + assert_eq(@i.two_arguments_and_result(7, 8), 9) +} diff --git a/tests/runtime-async/async/simple-import-params-results/test.mbt b/tests/runtime-async/async/simple-import-params-results/test.mbt index ddaaac29a..31020ecc2 100644 --- a/tests/runtime-async/async/simple-import-params-results/test.mbt +++ b/tests/runtime-async/async/simple-import-params-results/test.mbt @@ -2,29 +2,29 @@ //@ path = 'gen/interface/a/b/i/stub.mbt' ///| -pub async fn one_argument(x : UInt) -> Unit { +pub async fn one_argument(x : UInt, task_group : @async.TaskGroup[Unit]) -> Unit { assert_eq(x, 1) } ///| -pub async fn one_result() -> UInt noraise { +pub async fn one_result(task_group : @async.TaskGroup[Unit]) -> UInt { 2 } ///| -pub async fn one_argument_and_result(x : UInt) -> UInt { +pub async fn one_argument_and_result(x : UInt, task_group : @async.TaskGroup[Unit]) -> UInt { assert_eq(x, 3) 4 } ///| -pub async fn two_arguments(x : UInt, y : UInt) -> Unit { +pub async fn two_arguments(x : UInt, y : UInt, task_group : @async.TaskGroup[Unit]) -> Unit { assert_eq(x, 5) assert_eq(y, 6) } ///| -pub async fn two_arguments_and_result(x : UInt, y : UInt) -> UInt { +pub async fn two_arguments_and_result(x : UInt, y : UInt, task_group : @async.TaskGroup[Unit]) -> UInt { assert_eq(x, 7) assert_eq(y, 8) 9 diff --git a/tests/runtime-async/async/simple-pending-import/test.mbt b/tests/runtime-async/async/simple-pending-import/test.mbt new file mode 100644 index 000000000..e4380f28a --- /dev/null +++ b/tests/runtime-async/async/simple-pending-import/test.mbt @@ -0,0 +1,9 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/i/stub.mbt' + +///| +pub async fn f(task_group : @async.TaskGroup[Unit]) -> Unit { + for i = 0; i < 10; i = i + 1 { + @async.pause() + } +} diff --git a/tests/runtime-async/async/simple-stream-payload/runner.mbt b/tests/runtime-async/async/simple-stream-payload/runner.mbt new file mode 100644 index 000000000..629640d5c --- /dev/null +++ b/tests/runtime-async/async/simple-stream-payload/runner.mbt @@ -0,0 +1,20 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + let stream = @async.CMStream::from(async fn(sink : @async.Sink[Byte]) { + let a : Array[Byte] = [(0).to_byte()] + let b : Array[Byte] = [(1).to_byte(), (2).to_byte()] + let c : Array[Byte] = [(3).to_byte()] + let d : Array[Byte] = [(4).to_byte()] + assert_eq(sink.write(a[:]), 1) + assert_eq(sink.write(b[:]), 2) + assert_eq(sink.write(c[:]), 1) + assert_eq(sink.write(d[:]), 1) + sink.close() + }) + @i.read_stream(stream) +} diff --git a/tests/runtime-async/async/simple-stream-payload/test.mbt b/tests/runtime-async/async/simple-stream-payload/test.mbt index 31eb79be8..8e7be83ae 100644 --- a/tests/runtime-async/async/simple-stream-payload/test.mbt +++ b/tests/runtime-async/async/simple-stream-payload/test.mbt @@ -1,20 +1,23 @@ -//@ [lang] +//@ [lang] //@ path = 'gen/interface/my/test_/i/stub.mbt' -pub async fn read_stream(x : @ffi.StreamReader[Byte]) -> Unit raise { - let task = @ffi.current_task() - let buffer = FixedArray::make(10, Byte::default()); +pub async fn read_stream(x : @async.CMStream[Byte], task_group : @async.TaskGroup[Unit]) -> Unit { + guard x.read(1) is Some(a) + guard a.length() == 1 + guard a[0] == (0).to_byte() - task.wait(fn(){ - let _ = x.read(buffer, 1) catch { _ => raise @ffi.Cancelled::Cancelled } - }) - task.wait(fn(){ - let _ = x.read(buffer, 2, offset=1) catch { _ => raise @ffi.Cancelled::Cancelled } - }) - task.wait(fn(){ - let _ = x.read(buffer, 1, offset=3) catch { _ => raise @ffi.Cancelled::Cancelled } - }) - task.wait(fn(){ - let _ = x.read(buffer, 1, offset=4) catch { _ => raise @ffi.Cancelled::Cancelled } - }) + guard x.read(2) is Some(b) + guard b.length() == 2 + guard b[0] == (1).to_byte() + guard b[1] == (2).to_byte() + + guard x.read(1) is Some(c) + guard c.length() == 1 + guard c[0] == (3).to_byte() + + guard x.read(1) is Some(d) + guard d.length() == 1 + guard d[0] == (4).to_byte() + + x.close() } diff --git a/tests/runtime-async/async/simple-stream/runner.mbt b/tests/runtime-async/async/simple-stream/runner.mbt new file mode 100644 index 000000000..b0642bd09 --- /dev/null +++ b/tests/runtime-async/async/simple-stream/runner.mbt @@ -0,0 +1,16 @@ +//@ wasmtime-flags = '-Wcomponent-model-async' +//@ [lang] +//@ path = 'gen/world/runner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["my/test/interface/my/test_/i", "my/test/async"] }""" + +///| +pub async fn run(task_group : @async.TaskGroup[Unit]) -> Unit { + let stream = @async.CMStream::from(async fn(sink : @async.Sink[Unit]) { + let first : Array[Unit] = [()] + let second : Array[Unit] = [(), ()] + assert_eq(sink.write(first[:]), 1) + assert_eq(sink.write(second[:]), 2) + sink.close() + }) + @i.read_stream(stream) +} diff --git a/tests/runtime-async/async/simple-stream/test.mbt b/tests/runtime-async/async/simple-stream/test.mbt index c227a69c4..1830c4aa1 100644 --- a/tests/runtime-async/async/simple-stream/test.mbt +++ b/tests/runtime-async/async/simple-stream/test.mbt @@ -1,15 +1,10 @@ //@ [lang] //@ path = 'gen/interface/my/test_/i/stub.mbt' -pub async fn read_stream(x : @ffi.StreamReader[Unit]) -> Unit noraise { - let task = @ffi.current_task() - let buffer = FixedArray::make(10, Unit::default()); - - task.spawn(fn(){ - let _ = x.read(buffer, 1) catch { _ => raise @ffi.Cancelled::Cancelled } - }) - task.spawn(fn(){ - let _ = x.read(buffer, 2) catch { _ => raise @ffi.Cancelled::Cancelled } - }) +pub async fn read_stream(x : @async.CMStream[Unit], task_group : @async.TaskGroup[Unit]) -> Unit { + guard x.read(1) is Some(a) + guard a.length() == 1 + guard x.read(2) is Some(b) + guard b.length() == 2 + x.close() } - diff --git a/tests/runtime-async/async/simple-yield/test.mbt b/tests/runtime-async/async/simple-yield/test.mbt new file mode 100644 index 000000000..385e6d5d8 --- /dev/null +++ b/tests/runtime-async/async/simple-yield/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/i/stub.mbt' + +///| +pub async fn f(task_group : @async.TaskGroup[Unit]) -> Unit { + @async.pause() +} diff --git a/tests/runtime-async/async/threading-builtins/test.mbt b/tests/runtime-async/async/threading-builtins/test.mbt new file mode 100644 index 000000000..385e6d5d8 --- /dev/null +++ b/tests/runtime-async/async/threading-builtins/test.mbt @@ -0,0 +1,7 @@ +//@ [lang] +//@ path = 'gen/interface/a/b/i/stub.mbt' + +///| +pub async fn f(task_group : @async.TaskGroup[Unit]) -> Unit { + @async.pause() +} diff --git a/tests/runtime-async/async/yield-loop-receives-events/leaf.mbt b/tests/runtime-async/async/yield-loop-receives-events/leaf.mbt new file mode 100644 index 000000000..5bdfa6e28 --- /dev/null +++ b/tests/runtime-async/async/yield-loop-receives-events/leaf.mbt @@ -0,0 +1,8 @@ +//@ [lang] +//@ path = 'gen/interface/test_/common/iMiddle/stub.mbt' + +///| +pub async fn f(task_group : @async.TaskGroup[Unit]) -> Unit { + @async.pause() + @async.pause() +} diff --git a/tests/runtime-async/async/yield-loop-receives-events/middle.mbt b/tests/runtime-async/async/yield-loop-receives-events/middle.mbt new file mode 100644 index 000000000..4a0a6cb67 --- /dev/null +++ b/tests/runtime-async/async/yield-loop-receives-events/middle.mbt @@ -0,0 +1,22 @@ +//@ [lang] +//@ path = 'gen/interface/test_/common/iRunner/stub.mbt' +//@ pkg_config = """{ "warn-list": "-44", "import": ["test/common/interface/test_/common/iMiddle", "test/common/async"] }""" + +///| +let hit : Ref[Bool] = { val: false } + +///| +pub async fn f(task_group : @async.TaskGroup[Unit]) -> Unit { + hit.val = false + task_group.spawn_bg(async fn() { + @iMiddle.f() + hit.val = true + }) + + for { + if hit.val { + return + } + @async.pause() + } +} diff --git a/tests/runtime/resources/resources.mbt b/tests/runtime/resources/resources.mbt index 001535071..247fd8690 100644 --- a/tests/runtime/resources/resources.mbt +++ b/tests/runtime/resources/resources.mbt @@ -1,6 +1,6 @@ //@ [lang] //@ path = 'gen/interface/exports/stub.mbt' -//@ pkg_config = """{ "import": ["test/resources/interface/imports"] }""" +//@ pkg_config = """{ "warn-list": "-44", "import": ["test/resources/interface/imports"] }""" ///| let x : Map[Int, Int] = {}