Class: Raif::Conversation

Overview

Schema Information

Table name: raif_conversations

id                         :bigint           not null, primary key
available_model_tools      :jsonb            not null
available_user_tools       :jsonb            not null
conversation_entries_count :integer          default(0), not null
creator_type               :string           not null
generating_entry_response  :boolean          default(FALSE), not null
llm_messages_max_length    :integer
llm_model_key              :string           not null
requested_language_key     :string
response_format            :integer          default("text"), not null
source_type                :string
system_prompt              :text
type                       :string           not null
created_at                 :datetime         not null
updated_at                 :datetime         not null
creator_id                 :bigint           not null
source_id                  :bigint

Indexes

index_raif_conversations_on_created_at  (created_at)
index_raif_conversations_on_creator     (creator_type,creator_id)
index_raif_conversations_on_source      (source_type,source_id)

Constant Summary

Constants included from Raif::Concerns::LlmResponseParsing

Raif::Concerns::LlmResponseParsing::ASCII_CONTROL_CHARS

Class Method Summary collapse

Instance Method Summary collapse

Methods included from Raif::Concerns::LlmResponseParsing

#parse_html_response, #parse_json_response, #parsed_response

Methods included from Raif::Concerns::HasAvailableModelTools

#available_model_tools_map

Methods included from Raif::Concerns::HasRequestedLanguage

#requested_language_name, #system_prompt_language_preference

Methods included from Raif::Concerns::HasLlm

#default_llm_model_key, #llm

Methods included from Raif::Concerns::HasPromptTemplates

#build_prompt

Methods inherited from ApplicationRecord

table_name_prefix, where_json_not_blank

Class Method Details

.before_prompt_model_for_entry_response(&block) ⇒ Object



44
45
46
47
# File 'app/models/raif/conversation.rb', line 44

def before_prompt_model_for_entry_response(&block)
  @before_prompt_model_for_entry_response_blocks ||= []
  @before_prompt_model_for_entry_response_blocks << block if block
end

.before_prompt_model_for_entry_response_blocksObject



49
50
51
52
53
54
55
56
57
58
59
60
# File 'app/models/raif/conversation.rb', line 49

def before_prompt_model_for_entry_response_blocks
  blocks = []

  # Collect blocks from ancestors (in reverse order so parent blocks run first)
  ancestors.reverse_each do |klass|
    if klass.instance_variable_defined?(:@before_prompt_model_for_entry_response_blocks)
      blocks.concat(klass.instance_variable_get(:@before_prompt_model_for_entry_response_blocks))
    end
  end

  blocks
end

Instance Method Details

#available_user_tool_classesObject



215
216
217
# File 'app/models/raif/conversation.rb', line 215

def available_user_tool_classes
  available_user_tools.map(&:constantize)
end

#build_system_promptObject



73
74
75
76
77
78
# File 'app/models/raif/conversation.rb', line 73

def build_system_prompt
  <<~PROMPT.strip
    #{system_prompt_intro}
    #{system_prompt_language_preference}
  PROMPT
end

#initial_chat_messageObject

i18n-tasks-use t(‘raif.conversation.initial_chat_message’)



86
87
88
# File 'app/models/raif/conversation.rb', line 86

def initial_chat_message
  I18n.t("#{self.class.name.underscore.gsub("/", ".")}.initial_chat_message")
end

#initial_chat_message_partial_pathObject



90
91
92
# File 'app/models/raif/conversation.rb', line 90

def initial_chat_message_partial_path
  "raif/conversations/initial_chat_message"
end

#llm_messagesObject



176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
# File 'app/models/raif/conversation.rb', line 176

def llm_messages
  messages = []

  # Apply max length limit to entries if configured (nil means no limit)
  included_entries = entries.oldest_first.includes(:raif_model_tool_invocations)
  included_entries = included_entries.last(llm_messages_max_length) if llm_messages_max_length.present?

  included_entries.each do |entry|
    unless entry.user_message.blank?
      messages << Raif::Messages::UserMessage.new(content: entry.user_message).to_h
    end

    next unless entry.completed?

    tool_invocations = entry.raif_model_tool_invocations.to_a

    if tool_invocations.any?
      # First tool call includes the assistant's message (if any).
      # For the result payload we send the model-facing observation when the tool
      # opts into observations, while keeping the raw invocation.result persisted
      # for admin/UI rendering.
      first_invocation = tool_invocations.shift
      messages << first_invocation.as_tool_call_message(assistant_message: entry.model_response_message.presence)
      messages << first_invocation.as_tool_call_result_message(result: tool_result_for_llm(first_invocation))

      # Remaining tool calls (if multiple)
      tool_invocations.each do |tool_invocation|
        messages << tool_invocation.as_tool_call_message
        messages << tool_invocation.as_tool_call_result_message(result: tool_result_for_llm(tool_invocation))
      end
    elsif entry.model_response_message.present?
      # No tool calls, just a regular assistant response
      messages << Raif::Messages::AssistantMessage.new(content: entry.model_response_message).to_h
    end
  end

  messages
end

#on_entry_finalized(entry:) ⇒ Object

Called exactly once per Raif::ConversationEntry immediately after the entry has been successfully finalized (model response saved, all developer- managed tool calls validated and invoked, entry transitioned to completed!). This is the correct place for per-entry side effects such as creating dependent records, enqueuing follow-up work, or broadcasting UI updates tied to the final response — anything you do NOT want re-executing for attempts that were discarded by the retry loop, or for intermediate streaming callbacks.

No-op by default. Subclasses may override.

Parameters:



172
173
174
# File 'app/models/raif/conversation.rb', line 172

def on_entry_finalized(entry:)
  # no-op by default.
end

#process_model_response_message(message:, entry:) ⇒ Object



148
149
150
151
152
153
154
155
156
157
158
# File 'app/models/raif/conversation.rb', line 148

def process_model_response_message(message:, entry:)
  # no-op by default.
  # Override in subclasses for type-specific processing of the model response message.
  #
  # IMPORTANT: this method is invoked on every streaming chunk and on every
  # retry attempt made by {Raif::ConversationEntry#process_entry!}. Do NOT
  # put persistent side effects (DB writes, broadcasts, external calls) in
  # here — those belong in {#on_entry_finalized}, which runs exactly once
  # per entry after validation and tool invocation have succeeded.
  message
end

#prompt_model_for_entry_response(entry:, extra_messages: [], &block) ⇒ Object

Parameters:

  • entry (Raif::ConversationEntry)
  • extra_messages (Array<Hash>) (defaults to: [])

    Additional in-memory messages appended to this attempt’s LLM request only. These are captured on the resulting ModelCompletion.messages for admin/debugging, but they are NOT persisted as ConversationEntry history — the user never sees them in chat UI. Used by the conversation entry retry loop to feed synthetic corrective feedback to the model after an invalid tool call.



101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
# File 'app/models/raif/conversation.rb', line 101

def prompt_model_for_entry_response(entry:, extra_messages: [], &block)
  self.class.before_prompt_model_for_entry_response_blocks.each do |callback_block|
    instance_exec(entry, &callback_block)
  end

  self.system_prompt = build_system_prompt
  self.generating_entry_response = true
  save!

  messages = llm_messages
  messages.concat(extra_messages) if extra_messages.present?

  model_completion = llm.chat(
    messages: messages,
    source: entry,
    response_format: response_format.to_sym,
    system_prompt: system_prompt,
    available_model_tools: available_model_tools,
    anthropic_prompt_caching_enabled: self.class.anthropic_prompt_caching_enabled,
    bedrock_prompt_caching_enabled: self.class.bedrock_prompt_caching_enabled,
    &block
  )

  self.generating_entry_response = false
  save!

  model_completion
rescue StandardError => e
  self.generating_entry_response = false
  save!

  Rails.logger.error("Error processing conversation entry ##{entry.id}. #{e.message}")
  Rails.logger.error(e.backtrace.join("\n"))

  entry.failed!

  if defined?(Airbrake)
    notice = Airbrake.build_notice(e)
    notice[:context][:component] = "raif_conversation"
    notice[:context][:action] = "prompt_model_for_entry_response"

    Airbrake.notify(notice)
  end

  nil
end

#system_prompt_introObject



80
81
82
83
# File 'app/models/raif/conversation.rb', line 80

def system_prompt_intro
  sp = Raif.config.conversation_system_prompt_intro
  sp.respond_to?(:call) ? sp.call(self) : sp
end