Class: Ace::LLM::Providers::CLI::PiClient

Inherits:
Organisms::BaseClient
  • Object
show all
Includes:
CliArgsSupport
Defined in:
lib/ace/llm/providers/cli/pi_client.rb

Overview

Client for interacting with Pi CLI Provides access to multiple AI providers through Pi’s unified platform with skill command rewriting support

Constant Summary collapse

API_BASE_URL =
"https://pi.dev"
DEFAULT_GENERATION_CONFIG =
{}.freeze
DEFAULT_MODEL =
"zai/glm-4.7"

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(model: nil, **options) ⇒ PiClient

Returns a new instance of PiClient.



32
33
34
35
36
37
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 32

def initialize(model: nil, **options)
  @model = model || DEFAULT_MODEL
  @options = options
  @generation_config = options[:generation_config] || {}
  @skill_name_reader = Molecules::SkillNameReader.new
end

Class Method Details

.provider_nameObject



26
27
28
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 26

def self.provider_name
  "pi"
end

Instance Method Details

#build_interactive_invocation(messages, **options) ⇒ Object



84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 84

def build_interactive_invocation(messages, **options)
  validate_pi_availability!

  prompt = format_messages_as_prompt(messages)
  full_prompt, system_prompt = build_full_prompt(prompt, options)
  subprocess_env = options[:subprocess_env]
  working_dir = Atoms::ExecutionContext.resolve_working_dir(
    working_dir: options[:working_dir],
    subprocess_env: subprocess_env
  )
  full_prompt = rewrite_skill_commands(full_prompt, working_dir: working_dir)

  cmd = build_pi_interactive_command(full_prompt, options, system_prompt: system_prompt)
  {
    command: cmd,
    env: subprocess_env,
    working_dir: working_dir,
    prompt: full_prompt
  }
end

#generate(messages, **options) ⇒ Hash

Generate a response from the LLM

Parameters:

  • messages (Array<Hash>)

    Conversation messages

  • options (Hash)

    Generation options

Returns:

  • (Hash)

    Response with text and metadata



47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 47

def generate(messages, **options)
  validate_pi_availability!

  prompt = format_messages_as_prompt(messages)
  full_prompt, system_prompt = build_full_prompt(prompt, options)
  subprocess_env = options[:subprocess_env]
  working_dir = Atoms::ExecutionContext.resolve_working_dir(
    working_dir: options[:working_dir],
    subprocess_env: subprocess_env
  )
  full_prompt = rewrite_skill_commands(full_prompt, working_dir: working_dir)

  cmd = build_pi_command(full_prompt, options, system_prompt: system_prompt)
  stdout, stderr, status = execute_pi_command(cmd, working_dir: working_dir, options: options)

  parse_pi_response(stdout, stderr, status, full_prompt, options)
rescue => e
  handle_pi_error(e)
end

#interactive_supported?Boolean

Returns:

  • (Boolean)


80
81
82
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 80

def interactive_supported?
  true
end

#list_modelsObject

List available Pi models



68
69
70
71
72
73
74
75
76
77
78
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 68

def list_models
  [
    {id: "zai/glm-4.7", name: "GLM 4.7", description: "ZAI default model", context_size: 128_000},
    {id: "anthropic/claude-opus-4-6", name: "Claude Opus 4.6", description: "Anthropic flagship", context_size: 200_000},
    {id: "anthropic/claude-sonnet-4-5", name: "Claude Sonnet 4.5", description: "Anthropic balanced", context_size: 200_000},
    {id: "anthropic/claude-haiku-4-5", name: "Claude Haiku 4.5", description: "Anthropic fast", context_size: 200_000},
    {id: "google-gemini-cli/gemini-2.5-pro", name: "Gemini 2.5 Pro", description: "Google advanced", context_size: 1_000_000},
    {id: "google-gemini-cli/gemini-2.5-flash", name: "Gemini 2.5 Flash", description: "Google fast", context_size: 1_000_000},
    {id: "openai-codex/gpt-5.2", name: "GPT 5.2", description: "OpenAI model", context_size: 128_000}
  ]
end

#needs_credentials?Boolean

Returns:

  • (Boolean)


39
40
41
# File 'lib/ace/llm/providers/cli/pi_client.rb', line 39

def needs_credentials?
  false
end