Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
553 changes: 272 additions & 281 deletions Cargo.lock

Large diffs are not rendered by default.

13 changes: 10 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,15 @@
[workspace]
members = [
".",
"bindings/python",
]
default-members = ["."]

[package]
name = "keep-talkin"
version = "0.1.0"
edition = "2021"
authors = ["Jason Wiemels <[email protected]>"]
edition = "2024"
authors = ["Jason Wiemels <[email protected]>"]
autobenches = false
autotests = false

Expand All @@ -19,7 +26,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
base64 = "0.22"
thiserror = "1.0"
pyo3 = { version = "0.25", features = ["extension-module"], optional = true }
pyo3 = { version = "0.26", features = ["extension-module"], optional = true }

[dev-dependencies]
criterion = "0.6"
Expand Down
12 changes: 12 additions & 0 deletions bindings/python/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[package]
name = "keep-talkin-python"
version = "0.1.0"
edition = "2024"

[lib]
name = "keep_talkin_py"
crate-type = ["cdylib"]

[dependencies]
keep-talkin = { path = "../../" }
pyo3 = { version = "0.26", features = ["extension-module"] }
16 changes: 16 additions & 0 deletions bindings/python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[build-system]
requires = ["maturin>=1.5,<2.0"]
build-backend = "maturin"

[project]
name = "keep_talkin"
version = "0.1.0"
requires-python = ">=3.9"
description = "Minimal Python bindings for keep-talkin"
authors = [{ name = "Jason Wiemels", email = "[email protected]" }]
license = { text = "Apache-2.0" }
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Rust",
"Operating System :: OS Independent",
]
149 changes: 149 additions & 0 deletions bindings/python/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
use keep_talkin::{Error, Rank};
use pyo3::exceptions::{PyIOError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyType};

pyo3::create_exception!(keep_talkin, InitError, PyIOError);
pyo3::create_exception!(keep_talkin, EncodeError, PyValueError);
pyo3::create_exception!(keep_talkin, DecodeError, PyValueError);

#[pyclass(eq, hash, frozen)]
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Token {
#[pyo3(get)]
pub bytes: Vec<u8>,
#[pyo3(get)]
pub rank: Rank,
}

#[pymethods]
impl Token {
#[new]
fn new(bytes: Vec<u8>, rank: Rank) -> Self {
Self { bytes, rank }
}

fn __repr__(&self) -> String {
format!("Token(bytes={:?}, rank={})", self.bytes, self.rank)
}

fn __str__(&self) -> String {
format!(
"Token('{}', {})",
String::from_utf8_lossy(&self.bytes),
self.rank
)
}
}

impl From<keep_talkin::Token> for Token {
fn from(token: keep_talkin::Token) -> Self {
Self {
bytes: token.bytes,
rank: token.rank,
}
}
}

#[pyclass]
pub struct Tokenizer(keep_talkin::Tokenizer);

#[pymethods]
impl Tokenizer {
#[classmethod]
fn from_tokenizer_json(_cls: &Bound<'_, PyType>, path: &str) -> PyResult<Self> {
let tokenizer = keep_talkin::Tokenizer::from_tokenizer_json(path)
.map_err(|e| InitError::new_err(e.to_string()))?;

Ok(Self(tokenizer))
}

#[classmethod]
fn from_model_and_config(
_cls: &Bound<'_, PyType>,
model_path: &str,
config_path: &str,
regex_pattern: &str,
) -> PyResult<Self> {
let tokenizer =
keep_talkin::Tokenizer::from_model_and_config(model_path, config_path, [regex_pattern])
.map_err(|e| InitError::new_err(e.to_string()))?;

Ok(Self(tokenizer))
}

#[classmethod]
fn from_tekken(_cls: &Bound<'_, PyType>, path: &str) -> PyResult<Self> {
let tokenizer = keep_talkin::Tokenizer::from_tekken(path)
.map_err(|e| InitError::new_err(e.to_string()))?;

Ok(Self(tokenizer))
}

fn encode(&self, py: Python, data: &[u8]) -> PyResult<Vec<Rank>> {
let Tokenizer(inner) = self;
py.detach(|| {
inner
.encode(data)
.map_err(|e| EncodeError::new_err(e.to_string()))
})
}

fn decode(&self, py: Python, tokens: Vec<Rank>) -> PyResult<Py<PyAny>> {
let Tokenizer(inner) = self;
let bytes = py
.detach(|| {
let decoded = inner.decode(&tokens)?;
Ok::<_, Error>(decoded.into_iter().flatten().copied().collect::<Vec<_>>())
})
.map_err(|e| DecodeError::new_err(e.to_string()))?;

Ok(PyBytes::new(py, &bytes).into())
}

fn encode_batch(&self, py: Python, data: Vec<Vec<u8>>) -> PyResult<Vec<Vec<Rank>>> {
let Tokenizer(inner) = self;
py.detach(|| {
inner
.encode_batch(data)
.map_err(|e| EncodeError::new_err(e.to_string()))
})
}

fn decode_batch(&self, py: Python, tokens: Vec<Vec<Rank>>) -> PyResult<Vec<Py<PyAny>>> {
let Tokenizer(inner) = self;
let batch_bytes = py
.detach(|| {
let decoded = inner.decode_batch(tokens)?;

Ok::<_, Error>(
decoded
.into_iter()
.map(|token_bytes| {
token_bytes
.into_iter()
.flatten()
.copied()
.collect::<Vec<_>>()
})
.collect::<Vec<_>>(),
)
})
.map_err(|e| DecodeError::new_err(e.to_string()))?;

Ok(batch_bytes
.into_iter()
.map(|bytes| PyBytes::new(py, &bytes).into())
.collect())
}
}

#[pymodule]
fn keep_talkin_py(py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_class::<Tokenizer>()?;
m.add_class::<Token>()?;
m.add("InitError", py.get_type::<InitError>())?;
m.add("EncodeError", py.get_type::<EncodeError>())?;
m.add("DecodeError", py.get_type::<DecodeError>())?;
Ok(())
}
139 changes: 0 additions & 139 deletions src/bindings.rs

This file was deleted.

Loading