6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
|
# File 'lib/generators/llm_meta_client/scaffold/templates/app/controllers/prompts_controller.rb', line 6
def show
@prompt_execution = PromptNavigator::PromptExecution.find_by!(execution_id: params[:id])
@message = Message.where(prompt_navigator_prompt_execution: @prompt_execution).order(:created_at).first
@chat = @message.chat
@messages = @chat.ordered_messages
initialize_chat current_user&.chats
initialize_history @chat.ordered_by_descending_prompt_executions
jwt_token = current_user.id_token if user_signed_in?
@llm_families = LlmMetaClient::ServerResource.available_llm_families(jwt_token)
@target_message_id = @message.id
set_active_message_uuid(@prompt_execution.execution_id)
@branch_from_uuid = @prompt_execution.execution_id
render "chats/edit"
rescue StandardError => e
Rails.logger.error "Error in PromptsController#show_by_uuid: #{e.class} - #{e.message}\n#{e.backtrace&.join("\n")}"
redirect_to root_path, alert: "Message not found."
end
|