Class: Legion::TTY::Screens::Chat
- Defined in:
- lib/legion/tty/screens/chat.rb
Overview
rubocop:disable Metrics/ClassLength
Constant Summary collapse
- SLASH_COMMANDS =
%w[/help /quit /clear /model /session /cost /export /tools /dashboard /hotkeys /save /load /sessions].freeze
Instance Attribute Summary collapse
-
#message_stream ⇒ Object
readonly
Returns the value of attribute message_stream.
-
#status_bar ⇒ Object
readonly
Returns the value of attribute status_bar.
Attributes inherited from Base
Instance Method Summary collapse
- #activate ⇒ Object
- #handle_input(key) ⇒ Object
- #handle_slash_command(input) ⇒ Object
- #handle_user_message(input) ⇒ Object
-
#initialize(app, output: $stdout, input_bar: nil) ⇒ Chat
constructor
A new instance of Chat.
- #render(width, height) ⇒ Object
- #run ⇒ Object
- #running? ⇒ Boolean
- #send_to_llm(message) ⇒ Object
Methods inherited from Base
Constructor Details
#initialize(app, output: $stdout, input_bar: nil) ⇒ Chat
Returns a new instance of Chat.
20 21 22 23 24 25 26 27 28 29 30 31 |
# File 'lib/legion/tty/screens/chat.rb', line 20 def initialize(app, output: $stdout, input_bar: nil) super(app) @output = output @message_stream = Components::MessageStream.new @status_bar = Components::StatusBar.new @running = false @input_bar = || @llm_chat = app.respond_to?(:llm_chat) ? app.llm_chat : nil @token_tracker = Components::TokenTracker.new(provider: detect_provider) @session_store = SessionStore.new @session_name = 'default' end |
Instance Attribute Details
#message_stream ⇒ Object (readonly)
Returns the value of attribute message_stream.
18 19 20 |
# File 'lib/legion/tty/screens/chat.rb', line 18 def @message_stream end |
#status_bar ⇒ Object (readonly)
Returns the value of attribute status_bar.
18 19 20 |
# File 'lib/legion/tty/screens/chat.rb', line 18 def @status_bar end |
Instance Method Details
#activate ⇒ Object
33 34 35 36 37 38 39 40 41 42 |
# File 'lib/legion/tty/screens/chat.rb', line 33 def activate @running = true cfg = safe_config @status_bar.update(model: cfg[:provider], session: 'default') setup_system_prompt @message_stream.( role: :system, content: "Welcome#{", #{cfg[:name]}" if cfg[:name]}. Type /help for commands." ) end |
#handle_input(key) ⇒ Object
107 108 109 110 111 112 113 114 115 116 117 118 |
# File 'lib/legion/tty/screens/chat.rb', line 107 def handle_input(key) case key when :up @message_stream.scroll_up :handled when :down @message_stream.scroll_down :handled else :pass end end |
#handle_slash_command(input) ⇒ Object
66 67 68 69 70 71 72 73 |
# File 'lib/legion/tty/screens/chat.rb', line 66 def handle_slash_command(input) return nil unless input.start_with?('/') cmd = input.split.first return nil unless SLASH_COMMANDS.include?(cmd) dispatch_slash(cmd, input) end |
#handle_user_message(input) ⇒ Object
75 76 77 78 79 80 |
# File 'lib/legion/tty/screens/chat.rb', line 75 def (input) @message_stream.(role: :user, content: input) @message_stream.(role: :assistant, content: '') send_to_llm(input) render_screen end |
#render(width, height) ⇒ Object
99 100 101 102 103 104 105 |
# File 'lib/legion/tty/screens/chat.rb', line 99 def render(width, height) = @status_bar.render(width: width) divider = Theme.c(:muted, '-' * width) stream_height = [height - 2, 1].max stream_lines = @message_stream.render(width: width, height: stream_height) stream_lines + [divider, ] end |
#run ⇒ Object
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
# File 'lib/legion/tty/screens/chat.rb', line 48 def run activate while @running render_screen input = read_input break if input.nil? result = handle_slash_command(input) if result == :quit auto_save_session @running = false break elsif result.nil? (input) unless input.strip.empty? end end end |
#running? ⇒ Boolean
44 45 46 |
# File 'lib/legion/tty/screens/chat.rb', line 44 def running? @running end |
#send_to_llm(message) ⇒ Object
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# File 'lib/legion/tty/screens/chat.rb', line 82 def send_to_llm() unless @llm_chat @message_stream.append_streaming( 'LLM not configured. Use /help for commands.' ) return end response = @llm_chat.ask() do |chunk| @message_stream.append_streaming(chunk.content) if chunk.content render_screen end track_response_tokens(response) rescue StandardError => e @message_stream.append_streaming("\n[Error: #{e.}]") end |