From 2b00cbe8b749cfcc9a0339fe5f09717f01361858 Mon Sep 17 00:00:00 2001 From: Christopher Serr Date: Sun, 7 May 2023 13:51:32 +0200 Subject: [PATCH] Add support for asynchronous code This allows you to define an asynchronous `main` function instead of the poll based `update` function, which allows you to more easily keep state between individual ticks of the runtime. Unfortunately the most efficient implementation isn't possible yet on stable Rust, as we are blocked by the following two features: - [type_alias_impl_trait](https://github.com/rust-lang/rust/issues/63063) - [const_async_blocks](https://github.com/rust-lang/rust/issues/85368) For now we have to use a workaround that is less efficient by calling the `main` function at runtime and allocating it onto a new WebAssembly page. Here is a full example of how an auto splitter could look like using the `async_main` macro: Usage on stable Rust: ```rust async_main!(stable); ``` Usage on nightly Rust: ```rust async_main!(nightly); ``` The asynchronous main function itself: ```rust async fn main() { // TODO: Set up some general state and settings. loop { let process = Process::wait_attach("explorer.exe").await; process.until_closes(async { // TODO: Load some initial information from the process. loop { // TODO: Do something on every tick. next_tick().await; } }).await; } } ``` --- .github/workflows/build.yml | 10 +- README.md | 95 +++++++++- src/future.rs | 351 ++++++++++++++++++++++++++++++++++++ src/lib.rs | 99 +++++++++- 4 files changed, 545 insertions(+), 10 deletions(-) create mode 100644 src/future.rs diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1d38404..c38f6d6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -36,10 +36,18 @@ jobs: if: matrix.install_target != '' run: rustup target add ${{ matrix.target }} - - name: Build + - name: Build (No Default Features) + run: | + cargo build --no-default-features --target ${{ matrix.target }} + + - name: Build (Default Features) run: | cargo build --target ${{ matrix.target }} + - name: Build (All Features) + run: | + cargo build --all-features --target ${{ matrix.target }} + clippy: name: Check clippy lints runs-on: ubuntu-latest diff --git a/README.md b/README.md index fd7803b..1db64b7 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,34 @@ # LiveSplit asr -Helper crate to write auto splitters for LiveSplit One's auto splitting runtime. -## Example +Helper crate to write auto splitters for LiveSplit One's auto splitting +runtime. + +There are two ways of defining an auto splitter. + +## Defining an `update` function + +You can define an `update` function that will be called every frame. This is +the simplest way to define an auto splitter. The function must have the +following signature: +```rust +#[no_mangle] +pub extern "C" fn update() {} +``` + +The advantage of this approach is that you have full control over what +happens on every tick of the runtime. However, it's much harder to keep +state around as you need to store all state in global variables as you need +to return out of the function on every tick. + +### Example ```rust #[no_mangle] pub extern "C" fn update() { - if let Some(process) = Process::attach("Notepad.exe") { + if let Some(process) = Process::attach("explorer.exe") { asr::print_message("Hello World!"); - if let Ok(address) = process.get_module_address("Notepad.exe") { + if let Ok(address) = process.get_module_address("explorer.exe") { if let Ok(value) = process.read::(address) { if value > 0 { asr::timer::start(); @@ -20,6 +39,74 @@ pub extern "C" fn update() { } ``` +## Defining an asynchronous `main` function + +You can use the `async_main` macro to define an asynchronous `main` +function. + +Similar to using an `update` function, it is important to constantly yield +back to the runtime to communicate that the auto splitter is still alive. +All asynchronous code that you await automatically yields back to the +runtime. However, if you want to write synchronous code, such as the main +loop handling of a process on every tick, you can use the +`next_tick` function to yield back to the runtime and +continue on the next tick. + +The main low level abstraction is the `retry` function, which wraps any code +that you want to retry until it succeeds, yielding back to the runtime between +each try. + +So if you wanted to attach to a Process you could for example write: + +```rust +let process = retry(|| Process::attach("MyGame.exe")).await; +``` + +This will try to attach to the process every tick until it succeeds. This +specific example is exactly how the `Process::wait_attach` method is +implemented. So if you wanted to attach to any of multiple processes, you could +for example write: + +```rust +let process = retry(|| { + ["a.exe", "b.exe"].into_iter().find_map(Process::attach) +}).await; +``` + +### Example + +Here is a full example of how an auto splitter could look like using the +`async_main` macro: + +Usage on stable Rust: +```rust +async_main!(stable); +``` + +Usage on nightly Rust: +```rust +#![feature(type_alias_impl_trait, const_async_blocks)] + +async_main!(nightly); +``` + +The asynchronous main function itself: +```rust +async fn main() { + // TODO: Set up some general state and settings. + loop { + let process = Process::wait_attach("explorer.exe").await; + process.until_closes(async { + // TODO: Load some initial information from the process. + loop { + // TODO: Do something on every tick. + next_tick().await; + } + }).await; + } +} +``` + ## License Licensed under either of diff --git a/src/future.rs b/src/future.rs new file mode 100644 index 0000000..885785d --- /dev/null +++ b/src/future.rs @@ -0,0 +1,351 @@ +//! Futures support for writing auto splitters with asynchronous code. +//! +//! If you want to write an auto splitter that uses asynchronous code, you can +//! use the [`async_main`] macro to define an asynchronous `main` function +//! instead of defining an `update` function as the entrypoint for your auto +//! splitter. +//! +//! Similar to using an `update` function, it is important to constantly yield +//! back to the runtime to communicate that the auto splitter is still alive. +//! All asynchronous code that you await automatically yields back to the +//! runtime. However, if you want to write synchronous code, such as the main +//! loop handling of a process on every tick, you can use the [`next_tick`] +//! function to yield back to the runtime and continue on the next tick. +//! +//! The main low level abstraction is the [`retry`] function, which wraps any +//! code that you want to retry until it succeeds, yielding back to the runtime +//! between each try. +//! +//! So if you wanted to attach to a Process you could for example write: +//! +//! ```no_run +//! let process = retry(|| Process::attach("MyGame.exe")).await; +//! ``` +//! +//! This will try to attach to the process every tick until it succeeds. This +//! specific example is exactly how the [`Process::wait_attach`] method is +//! implemented. So if you wanted to attach to any of multiple processes, you +//! could for example write: +//! +//! ```no_run +//! let process = retry(|| { +//! ["a.exe", "b.exe"].into_iter().find_map(Process::attach) +//! }).await; +//! ``` +//! +//! # Example +//! +//! Here is a full example of how an auto splitter could look like using the +//! [`async_main`] macro: +//! +//! Usage on stable Rust: +//! ```no_run +//! async_main!(stable); +//! ``` +//! +//! Usage on nightly Rust: +//! ```no_run +//! #![feature(type_alias_impl_trait, const_async_blocks)] +//! +//! async_main!(nightly); +//! ``` +//! +//! The asynchronous main function itself: +//! ```no_run +//! async fn main() { +//! // TODO: Set up some general state and settings. +//! loop { +//! let process = Process::wait_attach("explorer.exe").await; +//! process.until_closes(async { +//! // TODO: Load some initial information from the process. +//! loop { +//! // TODO: Do something on every tick. +//! next_tick().await; +//! } +//! }).await; +//! } +//! } +//! ``` + +use core::{ + future::Future, + mem, + pin::Pin, + task::{Context, Poll}, +}; + +#[cfg(feature = "signature")] +use crate::signature::Signature; +use crate::{Address, Process}; + +/// A future that yields back to the runtime and continues on the next tick. It's +/// important to yield back to the runtime to communicate that the auto splitter +/// is still alive. +pub struct NextTick(bool); + +impl Future for NextTick { + type Output = (); + + fn poll(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll { + if !mem::replace(&mut self.0, true) { + Poll::Pending + } else { + Poll::Ready(()) + } + } +} + +/// A future that retries the given function until it returns `Some`, yielding +/// back to the runtime between each call. +pub struct Retry { + f: F, +} + +impl Option + Unpin> Future for Retry { + type Output = T; + + fn poll(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll { + match (self.f)() { + Some(t) => Poll::Ready(t), + None => Poll::Pending, + } + } +} + +/// Yields back to the runtime and continues on the next tick. It's important to +/// yield back to the runtime to communicate that the auto splitter is still +/// alive. +/// +/// # Example +/// +/// ```no_run +/// loop { +/// // TODO: Do something on every tick. +/// next_tick().await; +/// } +/// ``` +#[must_use = "You need to await this future."] +pub const fn next_tick() -> NextTick { + NextTick(false) +} + +/// Retries the given function until it returns `Some`, yielding back to the +/// runtime between each call. +/// +/// # Example +/// +/// If you wanted to attach to a Process you could for example write: +/// +/// ```no_run +/// let process = retry(|| Process::attach("MyGame.exe")).await; +/// ``` +/// +/// This will try to attach to the process every tick until it succeeds. This +/// specific example is exactly how the [`Process::wait_attach`] method is +/// implemented. So if you wanted to attach to any of multiple processes, you +/// could for example write: +/// +/// ```no_run +/// let process = retry(|| { +/// ["a.exe", "b.exe"].into_iter().find_map(Process::attach) +/// }).await; +/// ``` +#[must_use = "You need to await this future."] +pub const fn retry Option>(f: F) -> Retry { + Retry { f } +} + +impl Process { + /// Asynchronously awaits attaching to a process with the given name, + /// yielding back to the runtime between each try. + pub async fn wait_attach(name: &str) -> Process { + retry(|| Process::attach(name)).await + } + + /// Executes a future until the process closes. + pub const fn until_closes(&self, future: F) -> UntilProcessCloses<'_, F> { + UntilProcessCloses { + process: self, + future, + } + } + + /// Asynchronously awaits the address and size of a module in the process, + /// yielding back to the runtime between each try. + pub async fn wait_module_range(&self, name: &str) -> (Address, u64) { + retry(|| { + let address = self.get_module_address(name).ok()?; + let size = self.get_module_size(name).ok()?; + Some((address, size)) + }) + .await + } +} + +#[cfg(feature = "signature")] +impl Signature { + /// Asynchronously awaits scanning a process for the signature until it is + /// found. This will scan the address range of the process given. Once the + /// signature is found, the address of the start of the signature is + /// returned. + pub async fn wait_scan_process_range( + &self, + process: &Process, + addr: Address, + len: u64, + ) -> Address { + retry(|| self.scan_process_range(process, addr, len)).await + } +} + +/// A future that executes a future until the process closes. +pub struct UntilProcessCloses<'a, F> { + process: &'a Process, + future: F, +} + +impl> Future for UntilProcessCloses<'_, F> { + type Output = (); + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + if !self.process.is_open() { + return Poll::Ready(()); + } + // SAFETY: We are simply projecting the Pin. + unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().future).poll(cx) } + } +} + +/// Defines that the auto splitter is using an asynchronous `main` function +/// instead of the normal poll based `update` function. It is important to +/// frequently yield back to the runtime to communicate that the auto splitter +/// is still alive. If the function ends, the auto splitter will stop executing +/// code. +/// +/// There are two versions of the macro depending on whether you use `stable` or +/// `nightly` Rust. This needs to be passed to the macro as an argument. The +/// `stable` variant currently allocates WebAssembly pages to store the future. +/// This is still compatible with `no_std`, even without `alloc`. If you use the +/// `nightly` version of this macro, the future is stored in a global variable +/// at compile time, removing the work needed at runtime to store the future. If +/// you do so, you need to enable the `type_alias_impl_trait` and +/// `const_async_blocks` features. +/// +/// # Example +/// +/// Usage on stable Rust: +/// ```no_run +/// async_main!(stable); +/// ``` +/// +/// Usage on nightly Rust: +/// ```no_run +/// #![feature(type_alias_impl_trait, const_async_blocks)] +/// +/// async_main!(nightly); +/// ``` +/// +/// Example of an asynchronous `main` function: +/// ```no_run +/// async fn main() { +/// // TODO: Set up some general state and settings. +/// loop { +/// let process = Process::wait_attach("explorer.exe").await; +/// process.until_closes(async { +/// // TODO: Load some initial information from the process. +/// loop { +/// // TODO: Do something on every tick. +/// next_tick().await; +/// } +/// }).await; +/// } +/// } +/// ``` +#[macro_export] +macro_rules! async_main { + (nightly) => { + #[no_mangle] + pub extern "C" fn update() { + use core::{ + future::Future, + pin::Pin, + ptr, + task::{Context, RawWaker, RawWakerVTable, Waker}, + }; + + type MainFuture = impl Future; + const fn main_type() -> MainFuture { + async { + main().await; + } + } + static mut STATE: MainFuture = main_type(); + + static VTABLE: RawWakerVTable = RawWakerVTable::new( + |_| RawWaker::new(ptr::null(), &VTABLE), + |_| {}, + |_| {}, + |_| {}, + ); + let raw_waker = RawWaker::new(ptr::null(), &VTABLE); + let waker = unsafe { Waker::from_raw(raw_waker) }; + let mut cx = Context::from_waker(&waker); + let _ = unsafe { Pin::new_unchecked(&mut STATE).poll(&mut cx) }; + } + }; + (stable) => { + #[no_mangle] + pub extern "C" fn update() { + use core::{ + future::Future, + mem::{self, ManuallyDrop}, + pin::Pin, + ptr, + task::{Context, RawWaker, RawWakerVTable, Waker}, + }; + + static mut STATE: Option>> = None; + + static VTABLE: RawWakerVTable = RawWakerVTable::new( + |_| RawWaker::new(ptr::null(), &VTABLE), + |_| {}, + |_| {}, + |_| {}, + ); + let raw_waker = RawWaker::new(ptr::null(), &VTABLE); + let waker = unsafe { Waker::from_raw(raw_waker) }; + let mut cx = Context::from_waker(&waker); + let _ = unsafe { + Pin::new_unchecked(STATE.get_or_insert_with(|| { + fn allocate + 'static>( + f: ManuallyDrop, + ) -> Pin<&'static mut dyn Future> { + unsafe { + let size = mem::size_of::(); + const PAGE_SIZE: usize = 64 << 10; + assert!(mem::align_of::() <= PAGE_SIZE); + // TODO: div_ceil + let pages = (size + (PAGE_SIZE - 1)) / PAGE_SIZE; + + #[cfg(target_arch = "wasm32")] + let old_page_count = core::arch::wasm32::memory_grow(0, pages); + #[cfg(target_arch = "wasm64")] + let old_page_count = core::arch::wasm64::memory_grow(0, pages); + + let address = old_page_count * PAGE_SIZE; + let ptr = address as *mut ManuallyDrop; + ptr::write(ptr, f); + let ptr = ptr.cast::(); + let future: &'static mut F = &mut *ptr; + let future: &'static mut dyn Future = future; + Pin::static_mut(future) + } + } + + allocate(ManuallyDrop::new(main())) + })) + .poll(&mut cx) + }; + } + }; +} diff --git a/src/lib.rs b/src/lib.rs index 68290fd..4ce2dcf 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,17 +10,35 @@ rust_2018_idioms )] -//! Helper crate to write auto splitters for LiveSplit One's auto splitting runtime. +//! Helper crate to write auto splitters for LiveSplit One's auto splitting +//! runtime. //! -//! # Example +//! There are two ways of defining an auto splitter. +//! +//! # Defining an `update` function +//! +//! You can define an `update` function that will be called every frame. This is +//! the simplest way to define an auto splitter. The function must have the +//! following signature: +//! ```no_run +//! #[no_mangle] +//! pub extern "C" fn update() {} +//! ``` +//! +//! The advantage of this approach is that you have full control over what +//! happens on every tick of the runtime. However, it's much harder to keep +//! state around as you need to store all state in global variables as you need +//! to return out of the function on every tick. +//! +//! ## Example //! //! ```no_run //! # use asr::Process; //! #[no_mangle] //! pub extern "C" fn update() { -//! if let Some(process) = Process::attach("Notepad.exe") { +//! if let Some(process) = Process::attach("explorer.exe") { //! asr::print_message("Hello World!"); -//! if let Ok(address) = process.get_module_address("Notepad.exe") { +//! if let Ok(address) = process.get_module_address("explorer.exe") { //! if let Ok(value) = process.read::(address) { //! if value > 0 { //! asr::timer::start(); @@ -30,9 +48,80 @@ //! } //! } //! ``` +//! +//! # Defining an asynchronous `main` function +//! +//! You can use the [`async_main`] macro to define an asynchronous `main` +//! function. +//! +//! Similar to using an `update` function, it is important to constantly yield +//! back to the runtime to communicate that the auto splitter is still alive. +//! All asynchronous code that you await automatically yields back to the +//! runtime. However, if you want to write synchronous code, such as the main +//! loop handling of a process on every tick, you can use the +//! [`next_tick`](future::next_tick) function to yield back to the runtime and +//! continue on the next tick. +//! +//! The main low level abstraction is the [`retry`](future::retry) function, +//! which wraps any code that you want to retry until it succeeds, yielding back +//! to the runtime between each try. +//! +//! So if you wanted to attach to a Process you could for example write: +//! +//! ```no_run +//! let process = retry(|| Process::attach("MyGame.exe")).await; +//! ``` +//! +//! This will try to attach to the process every tick until it succeeds. This +//! specific example is exactly how the [`Process::wait_attach`] method is +//! implemented. So if you wanted to attach to any of multiple processes, you +//! could for example write: +//! +//! ```no_run +//! let process = retry(|| { +//! ["a.exe", "b.exe"].into_iter().find_map(Process::attach) +//! }).await; +//! ``` +//! +//! ## Example +//! +//! Here is a full example of how an auto splitter could look like using the +//! [`async_main`] macro: +//! +//! Usage on stable Rust: +//! ```no_run +//! async_main!(stable); +//! ``` +//! +//! Usage on nightly Rust: +//! ```no_run +//! #![feature(type_alias_impl_trait, const_async_blocks)] +//! +//! async_main!(nightly); +//! ``` +//! +//! The asynchronous main function itself: +//! ```no_run +//! async fn main() { +//! // TODO: Set up some general state and settings. +//! loop { +//! let process = Process::wait_attach("explorer.exe").await; +//! process.until_closes(async { +//! // TODO: Load some initial information from the process. +//! loop { +//! // TODO: Do something on every tick. +//! next_tick().await; +//! } +//! }).await; +//! } +//! } +//! ``` -pub mod primitives; mod runtime; + +#[macro_use] +pub mod future; +pub mod primitives; #[cfg(feature = "signature")] pub mod signature; pub mod string;