Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 2 additions & 16 deletions lib/ruby_llm/active_record/acts_as_legacy.rb
Original file line number Diff line number Diff line change
Expand Up @@ -152,26 +152,12 @@ def with_schema(...)
end

def on_new_message(&block)
to_llm

existing_callback = @chat.instance_variable_get(:@on)[:new_message]

@chat.on_new_message do
existing_callback&.call
block&.call
end
to_llm.on_new_message(&block)
self
end

def on_end_message(&block)
to_llm

existing_callback = @chat.instance_variable_get(:@on)[:end_message]

@chat.on_end_message do |msg|
existing_callback&.call(msg)
block&.call(msg)
end
to_llm.on_end_message(&block)
self
end

Expand Down
18 changes: 2 additions & 16 deletions lib/ruby_llm/active_record/chat_methods.rb
Original file line number Diff line number Diff line change
Expand Up @@ -140,26 +140,12 @@ def with_schema(...)
end

def on_new_message(&block)
to_llm

existing_callback = @chat.instance_variable_get(:@on)[:new_message]

@chat.on_new_message do
existing_callback&.call
block&.call
end
to_llm.on_new_message(&block)
self
end

def on_end_message(&block)
to_llm

existing_callback = @chat.instance_variable_get(:@on)[:end_message]

@chat.on_end_message do |msg|
existing_callback&.call(msg)
block&.call(msg)
end
to_llm.on_end_message(&block)
self
end

Expand Down
35 changes: 29 additions & 6 deletions lib/ruby_llm/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,34 @@ class Chat

attr_reader :model, :messages, :tools, :params, :headers, :schema

# Hash-like container that stores arrays of callables and fans out calls to all of them
class CallbackFanout
def initialize
@callbacks = {}
end

# @return [Proc, nil] A callable that fans out to all callables in order
def [](key)
callables = Array(@callbacks[key]).compact
return nil if callables.empty?

# Return a callable that fans out to all callables in order
lambda do |*args, **kwargs, &block|
callables.each { |c| c.call(*args, **kwargs, &block) }
end
end

# Appends the callable to the array instead of overwriting
def []=(key, callable)
return if callable.nil?

raise ArgumentError, 'The callback must be callable' unless callable.respond_to?(:call)

@callbacks[key] ||= []
@callbacks[key] << callable
end
end

def initialize(model: nil, provider: nil, assume_model_exists: false, context: nil)
if assume_model_exists && !provider
raise ArgumentError, 'Provider must be specified if assume_model_exists is true'
Expand All @@ -22,12 +50,7 @@ def initialize(model: nil, provider: nil, assume_model_exists: false, context: n
@params = {}
@headers = {}
@schema = nil
@on = {
new_message: nil,
end_message: nil,
tool_call: nil,
tool_result: nil
}
@on = CallbackFanout.new
end

def ask(message = nil, with: nil, &)
Expand Down
4 changes: 4 additions & 0 deletions lib/ruby_llm/models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,10 @@ def refresh!(remote_only: false)
self.class.refresh!(remote_only: remote_only)
end

def resolve(model_id, provider: nil, assume_exists: false, config: nil)
self.class.resolve(model_id, provider: provider, assume_exists: assume_exists, config: config)
end

private

def find_with_provider(model_id, provider)
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 14 additions & 0 deletions spec/ruby_llm/active_record/acts_as_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -529,6 +529,20 @@ def uploaded_file(path, type)
expect(chat.messages.count).to eq(2) # Persistence still works
end

it 'allows chaining callbacks on to_llm without losing persistence' do
chat = Chat.create!(model: model)
llm_chat = chat.to_llm

user_callback_called = false
# Directly attach callback to the underlying Chat object
llm_chat.on_new_message { user_callback_called = true }

chat.ask('Hello')

expect(user_callback_called).to be true
expect(chat.messages.count).to eq(2) # Persistence still works
end

it 'calls on_tool_call and on_tool_result callbacks' do
tool_call_received = nil
tool_result_received = nil
Expand Down
40 changes: 40 additions & 0 deletions spec/ruby_llm/models_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,46 @@
end
end

describe '#resolve' do
it 'delegates to the class method when called on instance' do
model_id = 'gpt-4o'
provider = 'openai'

model_info, provider_instance = RubyLLM.models.resolve(model_id, provider: provider)

expect(model_info).to be_a(RubyLLM::Model::Info)
expect(model_info.id).to eq(model_id)
expect(model_info.provider).to eq(provider)
expect(provider_instance).to be_a(RubyLLM::Provider)
end

it 'resolves model without provider' do
model_id = 'gpt-4o'

model_info, provider_instance = RubyLLM.models.resolve(model_id)

expect(model_info).to be_a(RubyLLM::Model::Info)
expect(model_info.id).to eq(model_id)
expect(provider_instance).to be_a(RubyLLM::Provider)
end

it 'resolves with assume_exists option' do
model_id = 'custom-model'
provider = 'openai'

model_info, provider_instance = RubyLLM.models.resolve(
model_id,
provider: provider,
assume_exists: true
)

expect(model_info).to be_a(RubyLLM::Model::Info)
expect(model_info.id).to eq(model_id)
expect(model_info.provider).to eq(provider)
expect(provider_instance).to be_a(RubyLLM::Provider)
end
end

describe '#save_to_json' do
it 'saves models to the models.json file' do
temp_file = Tempfile.new(['models', '.json'])
Expand Down