provisioning/core/nulib/lib_provisioning/ai/lib.nu

280 lines
8.1 KiB
Plaintext
Raw Normal View History

# AI Integration Library for Provisioning System
# Provides AI capabilities for infrastructure automation
use std
use ../utils/settings.nu load_settings
# AI provider configurations
export const AI_PROVIDERS = {
openai: {
default_endpoint: "https://api.openai.com/v1"
default_model: "gpt-4"
auth_header: "Authorization"
auth_prefix: "Bearer "
}
claude: {
default_endpoint: "https://api.anthropic.com/v1"
default_model: "claude-3-5-sonnet-20241022"
auth_header: "x-api-key"
auth_prefix: ""
}
generic: {
default_endpoint: "http://localhost:11434/v1"
default_model: "llama2"
auth_header: "Authorization"
auth_prefix: "Bearer "
}
}
# Get AI configuration from settings
export def get_ai_config [] {
let settings = (load_settings)
if "ai" not-in $settings.data {
return {
enabled: false
provider: "openai"
max_tokens: 2048
temperature: 0.3
timeout: 30
enable_template_ai: true
enable_query_ai: true
enable_webhook_ai: false
}
}
$settings.data.ai
}
# Check if AI is enabled and configured
export def is_ai_enabled [] {
let config = (get_ai_config)
$config.enabled and ($env.OPENAI_API_KEY? != null or $env.ANTHROPIC_API_KEY? != null or $env.LLM_API_KEY? != null)
}
# Get provider-specific configuration
export def get_provider_config [provider: string] {
$AI_PROVIDERS | get $provider
}
# Build API request headers
export def build_headers [config: record] {
let provider_config = (get_provider_config $config.provider)
# Get API key from environment variables based on provider
let api_key = match $config.provider {
"openai" => $env.OPENAI_API_KEY?
"claude" => $env.ANTHROPIC_API_KEY?
_ => $env.LLM_API_KEY?
}
let auth_value = $provider_config.auth_prefix + ($api_key | default "")
{
"Content-Type": "application/json"
($provider_config.auth_header): $auth_value
}
}
# Build API endpoint URL
export def build_endpoint [config: record, path: string] {
let provider_config = (get_provider_config $config.provider)
let base_url = ($config.api_endpoint? | default $provider_config.default_endpoint)
$base_url + $path
}
# Make AI API request
export def ai_request [
config: record
path: string
payload: record
] {
let headers = (build_headers $config)
let url = (build_endpoint $config $path)
http post $url --headers $headers --max-time ($config.timeout * 1000) $payload
}
# Generate completion using OpenAI-compatible API
export def ai_complete [
prompt: string
--system_prompt: string = ""
--max_tokens: int
--temperature: float
] {
let config = (get_ai_config)
if not (is_ai_enabled) {
return "AI is not enabled or configured. Please set OPENAI_API_KEY, ANTHROPIC_API_KEY, or LLM_API_KEY environment variable and enable AI in settings."
}
let messages = if ($system_prompt | is-empty) {
[{role: "user", content: $prompt}]
} else {
[
{role: "system", content: $system_prompt}
{role: "user", content: $prompt}
]
}
let payload = {
model: ($config.model? | default (get_provider_config $config.provider).default_model)
messages: $messages
max_tokens: ($max_tokens | default $config.max_tokens)
temperature: ($temperature | default $config.temperature)
}
let endpoint = match $config.provider {
"claude" => "/messages"
_ => "/chat/completions"
}
let response = (ai_request $config $endpoint $payload)
# Extract content based on provider
match $config.provider {
"claude" => {
if "content" in $response and ($response.content | length) > 0 {
$response.content.0.text
} else {
"Invalid response from Claude API"
}
}
_ => {
if "choices" in $response and ($response.choices | length) > 0 {
$response.choices.0.message.content
} else {
"Invalid response from OpenAI-compatible API"
}
}
}
}
# Generate infrastructure template from natural language
export def ai_generate_template [
description: string
template_type: string = "server"
] {
let system_prompt = $"You are an infrastructure automation expert. Generate KCL configuration files for cloud infrastructure based on natural language descriptions.
Template Type: ($template_type)
Available Providers: AWS, UpCloud, Local
Available Services: Kubernetes, containerd, Cilium, Ceph, PostgreSQL, Gitea, HAProxy
Generate valid KCL code that follows these patterns:
- Use proper KCL schema definitions
- Include provider-specific configurations
- Add appropriate comments
- Follow existing naming conventions
- Include security best practices
Return only the KCL configuration code, no explanations."
if not (get_ai_config).enable_template_ai {
return "AI template generation is disabled"
}
ai_complete $description --system_prompt $system_prompt
}
# Process natural language query
export def ai_process_query [
query: string
context: record = {}
] {
let system_prompt = $"You are a cloud infrastructure assistant. Help users query and understand their infrastructure state.
Available Infrastructure Context:
- Servers, clusters, task services
- AWS, UpCloud, local providers
- Kubernetes deployments
- Storage, networking, compute resources
Convert natural language queries into actionable responses. If the query requires specific data, request the appropriate provisioning commands.
Be concise and practical. Focus on infrastructure operations and management."
if not (get_ai_config).enable_query_ai {
return "AI query processing is disabled"
}
let enhanced_query = if ($context | is-empty) {
$query
} else {
$"Context: ($context | to json)\n\nQuery: ($query)"
}
ai_complete $enhanced_query --system_prompt $system_prompt
}
# Process webhook/chat message
export def ai_process_webhook [
message: string
user_id: string = "unknown"
channel: string = "webhook"
] {
let system_prompt = $"You are a cloud infrastructure assistant integrated via webhook/chat.
Help users with:
- Infrastructure provisioning and management
- Server operations and troubleshooting
- Kubernetes cluster management
- Service deployment and configuration
Respond concisely for chat interfaces. Provide actionable commands when possible.
Use the provisioning CLI format: ./core/nulib/provisioning <command>
Current user: ($user_id)
Channel: ($channel)"
if not (get_ai_config).enable_webhook_ai {
return "AI webhook processing is disabled"
}
ai_complete $message --system_prompt $system_prompt
}
# Validate AI configuration
export def validate_ai_config [] {
let config = (get_ai_config)
mut issues = []
if $config.enabled {
if ($config.api_key? == null) {
$issues = ($issues | append "API key not configured")
}
if $config.provider not-in ($AI_PROVIDERS | columns) {
$issues = ($issues | append $"Unsupported provider: ($config.provider)")
}
if $config.max_tokens < 1 {
$issues = ($issues | append "max_tokens must be positive")
}
if $config.temperature < 0.0 or $config.temperature > 1.0 {
$issues = ($issues | append "temperature must be between 0.0 and 1.0")
}
}
{
valid: ($issues | is-empty)
issues: $issues
}
}
# Test AI connectivity
export def test_ai_connection [] {
if not (is_ai_enabled) {
return {
success: false
message: "AI is not enabled or configured"
}
}
let response = (ai_complete "Test connection - respond with 'OK'" --max_tokens 10)
{
success: true
message: "AI connection test completed"
response: $response
}
}