Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 47 additions & 4 deletions lib/ruby_llm/active_record/acts_as_legacy.rb
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,19 @@ def ask(message, with: nil, &)

alias say ask

def prompt(message, with: nil, &)
llm_chat = to_llm
messages_before_count = llm_chat.messages.count

with_persistence_disabled(llm_chat) do
add_prompt_message(llm_chat, message, with)
response = llm_chat.complete(&)
prompt_messages = llm_chat.messages[messages_before_count..].dup.freeze
response.define_singleton_method(:prompt_messages) { prompt_messages }
response
end
end

def complete(...)
to_llm.complete(...)
rescue RubyLLM::Error => e
Expand All @@ -208,6 +221,22 @@ def complete(...)

private

def add_prompt_message(llm_chat, message, with)
if message.is_a?(::ActiveRecord::Base) && message.respond_to?(:to_llm)
llm_chat.add_message(message.to_llm)
else
content = prepare_prompt_content(message, with)
llm_chat.add_message role: :user, content: content
end
end

def prepare_prompt_content(message, with)
return message if message.is_a?(RubyLLM::Content) || message.is_a?(RubyLLM::Content::Raw)
return RubyLLM::Content.new(message, with) if with

message
end

def cleanup_failed_messages
RubyLLM.logger.warn "RubyLLM: API call failed, destroying message: #{@message.id}"
@message.destroy
Expand Down Expand Up @@ -243,6 +272,20 @@ def setup_persistence_callbacks
@chat
end

def with_persistence_disabled(llm_chat)
on_hash = llm_chat.instance_variable_get(:@on)
original_new_message = on_hash[:new_message]
original_end_message = on_hash[:end_message]

on_hash[:new_message] = nil
on_hash[:end_message] = nil

yield
ensure
on_hash[:new_message] = original_new_message if on_hash
on_hash[:end_message] = original_end_message if on_hash
end

def persist_new_message
@message = messages.create!(role: :assistant, content: '')
end
Expand Down Expand Up @@ -336,10 +379,10 @@ module MessageLegacyMethods
attr_reader :chat_class, :tool_call_class, :chat_foreign_key, :tool_call_foreign_key
end

def to_llm
def to_llm(include_attachments: true)
RubyLLM::Message.new(
role: role.to_sym,
content: extract_content,
content: extract_content(include_attachments: include_attachments),
tool_calls: extract_tool_calls,
tool_call_id: extract_tool_call_id,
input_tokens: input_tokens,
Expand Down Expand Up @@ -367,8 +410,8 @@ def extract_tool_call_id
parent_tool_call&.tool_call_id
end

def extract_content
return content unless respond_to?(:attachments) && attachments.attached?
def extract_content(include_attachments: true)
return content unless include_attachments && respond_to?(:attachments) && attachments.attached?

RubyLLM::Content.new(content).tap do |content_obj|
@_tempfiles = []
Expand Down
43 changes: 43 additions & 0 deletions lib/ruby_llm/active_record/chat_methods.rb
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,19 @@ def ask(message, with: nil, &)

alias say ask

def prompt(message, with: nil, &)
llm_chat = to_llm
messages_before_count = llm_chat.messages.count

with_persistence_disabled(llm_chat) do
add_prompt_message(llm_chat, message, with)
response = llm_chat.complete(&)
prompt_messages = llm_chat.messages[messages_before_count..].dup.freeze
response.define_singleton_method(:prompt_messages) { prompt_messages }
response
end
end

def complete(...)
to_llm.complete(...)
rescue RubyLLM::Error => e
Expand All @@ -204,6 +217,22 @@ def complete(...)

private

def add_prompt_message(llm_chat, message, with)
if message.is_a?(::ActiveRecord::Base) && message.respond_to?(:to_llm)
llm_chat.add_message(message.to_llm)
else
content = prepare_prompt_content(message, with)
llm_chat.add_message role: :user, content: content
end
end

def prepare_prompt_content(message, with)
return message if message.is_a?(RubyLLM::Content) || message.is_a?(RubyLLM::Content::Raw)
return RubyLLM::Content.new(message, with) if with

message
end

def cleanup_failed_messages
RubyLLM.logger.warn "RubyLLM: API call failed, destroying message: #{@message.id}"
@message.destroy
Expand Down Expand Up @@ -239,6 +268,20 @@ def setup_persistence_callbacks
@chat
end

def with_persistence_disabled(llm_chat)
on_hash = llm_chat.instance_variable_get(:@on)
original_new_message = on_hash[:new_message]
original_end_message = on_hash[:end_message]

on_hash[:new_message] = nil
on_hash[:end_message] = nil

yield
ensure
on_hash[:new_message] = original_new_message if on_hash
on_hash[:end_message] = original_end_message if on_hash
end

def persist_new_message
@message = messages_association.create!(role: :assistant, content: '')
end
Expand Down
8 changes: 4 additions & 4 deletions lib/ruby_llm/active_record/message_methods.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ module MessageMethods
attr_reader :chat_class, :tool_call_class, :chat_foreign_key, :tool_call_foreign_key
end

def to_llm
def to_llm(include_attachments: true)
cached = has_attribute?(:cached_tokens) ? self[:cached_tokens] : nil
cache_creation = has_attribute?(:cache_creation_tokens) ? self[:cache_creation_tokens] : nil

RubyLLM::Message.new(
role: role.to_sym,
content: extract_content,
content: extract_content(include_attachments: include_attachments),
tool_calls: extract_tool_calls,
tool_call_id: extract_tool_call_id,
input_tokens: input_tokens,
Expand Down Expand Up @@ -46,12 +46,12 @@ def extract_tool_call_id
parent_tool_call&.tool_call_id
end

def extract_content
def extract_content(include_attachments: true)
return RubyLLM::Content::Raw.new(content_raw) if has_attribute?(:content_raw) && content_raw.present?

content_value = self[:content]

return content_value unless respond_to?(:attachments) && attachments.attached?
return content_value unless include_attachments && respond_to?(:attachments) && attachments.attached?

RubyLLM::Content.new(content_value).tap do |content_obj|
@_tempfiles = []
Expand Down
1 change: 1 addition & 0 deletions lib/ruby_llm/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def ask(message = nil, with: nil, &)
end

alias say ask
alias prompt ask

def with_instructions(instructions, replace: false)
@messages = @messages.reject { |msg| msg.role == :system } if replace
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading