544 lines
15 KiB
Plaintext
544 lines
15 KiB
Plaintext
![]() |
# Testing Environment Configuration Template
|
||
|
# Copy this file to config.test.toml for testing-optimized settings
|
||
|
#
|
||
|
# This template provides settings optimized for testing scenarios:
|
||
|
# - Mock providers and safe defaults
|
||
|
# - Enhanced validation and checking
|
||
|
# - Test data isolation
|
||
|
# - CI/CD friendly configurations
|
||
|
# - Comprehensive testing utilities
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING CORE CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
[core]
|
||
|
version = "1.0.0"
|
||
|
name = "provisioning-system-test"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING PATHS
|
||
|
# =============================================================================
|
||
|
# Isolated paths for testing environment
|
||
|
|
||
|
[paths]
|
||
|
# Testing base path - isolated from production
|
||
|
# Common testing locations:
|
||
|
# base = "/tmp/provisioning-test" # Temporary testing
|
||
|
# base = "/opt/provisioning-test" # System testing
|
||
|
# base = "/home/ci/provisioning-test" # CI/CD testing
|
||
|
# base = "/workspace/provisioning-test" # Container testing
|
||
|
base = "/tmp/provisioning-test"
|
||
|
|
||
|
# Testing-specific path overrides for isolation
|
||
|
kloud = "{{paths.base}}/test-infra"
|
||
|
providers = "{{paths.base}}/test-providers"
|
||
|
taskservs = "{{paths.base}}/test-taskservs"
|
||
|
clusters = "{{paths.base}}/test-clusters"
|
||
|
resources = "{{paths.base}}/test-resources"
|
||
|
templates = "{{paths.base}}/test-templates"
|
||
|
tools = "{{paths.base}}/test-tools"
|
||
|
core = "{{paths.base}}/test-core"
|
||
|
|
||
|
[paths.files]
|
||
|
# Testing configuration files
|
||
|
settings = "{{paths.base}}/kcl/test-settings.k"
|
||
|
keys = "{{paths.base}}/test-keys.yaml"
|
||
|
requirements = "{{paths.base}}/test-requirements.yaml"
|
||
|
notify_icon = "{{paths.base}}/resources/test-icon.png"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING DEBUG CONFIGURATION
|
||
|
# =============================================================================
|
||
|
# Balanced debugging for testing visibility
|
||
|
|
||
|
[debug]
|
||
|
# Enable debugging for test visibility
|
||
|
enabled = true
|
||
|
|
||
|
# Disable metadata to reduce test noise
|
||
|
metadata = false
|
||
|
|
||
|
# Enable check mode by default for safe testing
|
||
|
check = true
|
||
|
|
||
|
# Disable remote debugging for test isolation
|
||
|
remote = false
|
||
|
|
||
|
# Use info level for balanced test logging
|
||
|
log_level = "info"
|
||
|
|
||
|
# Allow terminal features for interactive testing
|
||
|
no_terminal = false
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING OUTPUT CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
[output]
|
||
|
# Use cat for simple output in CI/CD environments
|
||
|
file_viewer = "cat"
|
||
|
|
||
|
# JSON format for programmatic test validation
|
||
|
format = "json"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING SOPS CONFIGURATION
|
||
|
# =============================================================================
|
||
|
# Simplified SOPS for testing scenarios
|
||
|
|
||
|
[sops]
|
||
|
# Enable SOPS for testing encryption workflows
|
||
|
use_sops = true
|
||
|
|
||
|
# Testing SOPS configuration
|
||
|
config_path = "{{paths.base}}/.sops-test.yaml"
|
||
|
|
||
|
# Test-specific key search paths
|
||
|
key_search_paths = [
|
||
|
"{{paths.base}}/keys/test-age.txt",
|
||
|
"./test-keys/age.txt",
|
||
|
"/tmp/test-keys/age.txt",
|
||
|
"~/.config/sops/age/test-keys.txt"
|
||
|
]
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING RUNTIME CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
[taskservs]
|
||
|
# Testing runtime directory with cleanup
|
||
|
run_path = "{{paths.base}}/run/test-taskservs"
|
||
|
|
||
|
[clusters]
|
||
|
# Testing cluster runtime with isolation
|
||
|
run_path = "{{paths.base}}/run/test-clusters"
|
||
|
|
||
|
[generation]
|
||
|
# Testing generation directory with unique naming
|
||
|
dir_path = "{{paths.base}}/generated/test"
|
||
|
defs_file = "test-defs.toml"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING PROVIDER CONFIGURATION
|
||
|
# =============================================================================
|
||
|
# Mock and safe provider configurations for testing
|
||
|
|
||
|
[providers]
|
||
|
# Default to local provider for safe testing
|
||
|
default = "local"
|
||
|
|
||
|
# AWS Testing Configuration (mock/safe)
|
||
|
[providers.aws]
|
||
|
# Use localstack or testing endpoints
|
||
|
api_url = "http://localhost:4566"
|
||
|
auth = ""
|
||
|
interface = "CLI"
|
||
|
|
||
|
# UpCloud Testing Configuration (safe)
|
||
|
[providers.upcloud]
|
||
|
# Standard API but with testing credentials
|
||
|
api_url = "https://api.upcloud.com/1.3"
|
||
|
auth = ""
|
||
|
interface = "CLI"
|
||
|
|
||
|
# Local Provider for Testing
|
||
|
[providers.local]
|
||
|
# Local testing configuration
|
||
|
api_url = ""
|
||
|
auth = ""
|
||
|
interface = "CLI"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING ENVIRONMENT CONFIGURATIONS
|
||
|
# =============================================================================
|
||
|
|
||
|
# Testing environment defaults
|
||
|
[environments.test]
|
||
|
debug.enabled = true
|
||
|
debug.log_level = "info"
|
||
|
debug.check = true
|
||
|
debug.metadata = false
|
||
|
debug.remote = false
|
||
|
providers.default = "local"
|
||
|
output.format = "json"
|
||
|
output.file_viewer = "cat"
|
||
|
|
||
|
# CI/CD testing environment
|
||
|
[environments.ci]
|
||
|
debug.enabled = false
|
||
|
debug.log_level = "warn"
|
||
|
debug.check = true
|
||
|
providers.default = "local"
|
||
|
output.format = "json"
|
||
|
output.file_viewer = "cat"
|
||
|
|
||
|
# Integration testing environment
|
||
|
[environments.integration]
|
||
|
debug.enabled = true
|
||
|
debug.log_level = "debug"
|
||
|
debug.check = false
|
||
|
providers.default = "aws"
|
||
|
output.format = "yaml"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING PERFORMANCE CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# Performance settings optimized for testing
|
||
|
[performance]
|
||
|
# Reduced parallelism for predictable test execution
|
||
|
parallel_operations = 1
|
||
|
# Shorter timeouts for faster test feedback
|
||
|
timeout_seconds = 60
|
||
|
# Disable caching for test isolation
|
||
|
cache_enabled = false
|
||
|
# Testing cache directory (if needed)
|
||
|
cache_dir = "{{paths.base}}/cache/test"
|
||
|
# Short cache retention for testing
|
||
|
cache_retention_hours = 1
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING SECURITY CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# Security settings for testing environment
|
||
|
[security]
|
||
|
# Disable confirmation for automated testing
|
||
|
require_confirmation = false
|
||
|
# Allow sensitive data logging for test debugging
|
||
|
log_sensitive_data = true
|
||
|
# Enable strict validation for test coverage
|
||
|
strict_validation = true
|
||
|
# Enable testing backups
|
||
|
auto_backup = false
|
||
|
backup_dir = "{{paths.base}}/backups/test"
|
||
|
# Short backup retention for testing
|
||
|
backup_retention_days = 1
|
||
|
# Disable backup encryption for testing simplicity
|
||
|
backup_encryption = false
|
||
|
# Enable audit logging for test verification
|
||
|
audit_enabled = true
|
||
|
audit_log_path = "{{paths.base}}/logs/test-audit.log"
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING MONITORING CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# Testing monitoring configuration
|
||
|
[monitoring]
|
||
|
# Enable monitoring for test validation
|
||
|
enabled = true
|
||
|
# Local testing metrics endpoint
|
||
|
endpoint = "http://localhost:9090/metrics"
|
||
|
# Frequent monitoring for testing
|
||
|
interval = "10s"
|
||
|
# Health check for testing
|
||
|
health_check_enabled = true
|
||
|
health_check_port = 8081
|
||
|
# Local log aggregation for testing
|
||
|
log_endpoint = "http://localhost:3001"
|
||
|
|
||
|
# Testing alerting (disabled for noise reduction)
|
||
|
[alerting]
|
||
|
# Disable production alerting in testing
|
||
|
enabled = false
|
||
|
email_enabled = false
|
||
|
slack_enabled = false
|
||
|
pagerduty_enabled = false
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING DATA MANAGEMENT
|
||
|
# =============================================================================
|
||
|
|
||
|
# Testing data configuration
|
||
|
[test_data]
|
||
|
# Enable test data generation
|
||
|
enabled = true
|
||
|
# Test data templates
|
||
|
template_dir = "{{paths.base}}/test-data/templates"
|
||
|
# Test data output
|
||
|
output_dir = "{{paths.base}}/test-data/generated"
|
||
|
# Test data cleanup
|
||
|
auto_cleanup = true
|
||
|
cleanup_after_hours = 2
|
||
|
|
||
|
# Testing fixtures
|
||
|
[fixtures]
|
||
|
# Enable test fixtures
|
||
|
enabled = true
|
||
|
# Fixture definitions
|
||
|
fixture_dir = "{{paths.base}}/fixtures"
|
||
|
# Common test scenarios
|
||
|
scenarios = [
|
||
|
"basic-server",
|
||
|
"multi-server",
|
||
|
"cluster-setup",
|
||
|
"failure-recovery"
|
||
|
]
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING VALIDATION CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# Enhanced validation for testing
|
||
|
[validation]
|
||
|
# Enable comprehensive validation
|
||
|
enabled = true
|
||
|
# Validation rules for testing
|
||
|
rules = [
|
||
|
"syntax-check",
|
||
|
"type-validation",
|
||
|
"security-scan",
|
||
|
"performance-check",
|
||
|
"integration-test"
|
||
|
]
|
||
|
# Validation reporting
|
||
|
report_enabled = true
|
||
|
report_format = "json"
|
||
|
report_dir = "{{paths.base}}/validation-reports"
|
||
|
|
||
|
# Testing assertions
|
||
|
[assertions]
|
||
|
# Enable test assertions
|
||
|
enabled = true
|
||
|
# Assertion timeout
|
||
|
timeout_seconds = 30
|
||
|
# Retry configuration
|
||
|
max_retries = 3
|
||
|
retry_delay_seconds = 5
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING CI/CD INTEGRATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# CI/CD specific configuration
|
||
|
[cicd]
|
||
|
# Enable CI/CD mode
|
||
|
enabled = true
|
||
|
# CI/CD provider detection
|
||
|
auto_detect = true
|
||
|
# Supported providers
|
||
|
providers = ["github", "gitlab", "jenkins", "azure-devops"]
|
||
|
# Pipeline configuration
|
||
|
pipeline_timeout = 1800
|
||
|
parallel_jobs = 2
|
||
|
# Artifact management
|
||
|
artifacts_enabled = true
|
||
|
artifacts_dir = "{{paths.base}}/artifacts"
|
||
|
|
||
|
# Testing in containers
|
||
|
[containers]
|
||
|
# Container runtime for testing
|
||
|
runtime = "docker"
|
||
|
# Testing registry
|
||
|
registry = "localhost:5000"
|
||
|
# Testing namespace
|
||
|
namespace = "test-provisioning"
|
||
|
# Container cleanup
|
||
|
auto_cleanup = true
|
||
|
cleanup_timeout = 300
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING MOCK CONFIGURATIONS
|
||
|
# =============================================================================
|
||
|
|
||
|
# Mock services for testing
|
||
|
[mocks]
|
||
|
# Enable mock services
|
||
|
enabled = true
|
||
|
# Mock service definitions
|
||
|
services = [
|
||
|
"aws-localstack",
|
||
|
"mock-upcloud",
|
||
|
"test-registry",
|
||
|
"mock-storage"
|
||
|
]
|
||
|
# Mock data directory
|
||
|
data_dir = "{{paths.base}}/mock-data"
|
||
|
|
||
|
# Simulation settings
|
||
|
[simulation]
|
||
|
# Enable simulation mode
|
||
|
enabled = true
|
||
|
# Simulation scenarios
|
||
|
scenarios_dir = "{{paths.base}}/simulations"
|
||
|
# Simulation results
|
||
|
results_dir = "{{paths.base}}/simulation-results"
|
||
|
# Simulation timeout
|
||
|
timeout_minutes = 30
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING UTILITIES CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# Test utilities
|
||
|
[test_utilities]
|
||
|
# Enable test utilities
|
||
|
enabled = true
|
||
|
# Test runner configuration
|
||
|
runner = "nushell"
|
||
|
# Test discovery
|
||
|
auto_discover = true
|
||
|
test_pattern = "*test*.nu"
|
||
|
# Test execution
|
||
|
parallel_execution = false
|
||
|
fail_fast = true
|
||
|
|
||
|
# Code coverage
|
||
|
[coverage]
|
||
|
# Enable code coverage
|
||
|
enabled = true
|
||
|
# Coverage output
|
||
|
output_dir = "{{paths.base}}/coverage"
|
||
|
# Coverage format
|
||
|
format = "json"
|
||
|
# Coverage thresholds
|
||
|
minimum_coverage = 80
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING CLEANUP CONFIGURATION
|
||
|
# =============================================================================
|
||
|
|
||
|
# Automatic cleanup for testing
|
||
|
[cleanup]
|
||
|
# Enable automatic cleanup
|
||
|
enabled = true
|
||
|
# Cleanup triggers
|
||
|
cleanup_on_exit = true
|
||
|
cleanup_on_failure = true
|
||
|
# Cleanup scope
|
||
|
clean_generated_files = true
|
||
|
clean_runtime_data = true
|
||
|
clean_cache = true
|
||
|
clean_logs = false # Keep logs for debugging
|
||
|
# Cleanup schedule
|
||
|
schedule = "0 2 * * *" # Daily cleanup at 2 AM
|
||
|
|
||
|
# Resource cleanup
|
||
|
[resource_cleanup]
|
||
|
# Enable resource cleanup
|
||
|
enabled = true
|
||
|
# Resource types to clean
|
||
|
resource_types = [
|
||
|
"servers",
|
||
|
"storage",
|
||
|
"networks",
|
||
|
"security-groups"
|
||
|
]
|
||
|
# Cleanup age threshold
|
||
|
max_age_hours = 24
|
||
|
# Protection tags
|
||
|
protected_tags = ["permanent", "do-not-delete"]
|
||
|
|
||
|
# =============================================================================
|
||
|
# TESTING ENVIRONMENT EXAMPLES
|
||
|
# =============================================================================
|
||
|
#
|
||
|
# Common Testing Scenarios:
|
||
|
# ------------------------
|
||
|
#
|
||
|
# 1. Unit Testing:
|
||
|
# export PROVISIONING_ENV=test
|
||
|
# ./core/nulib/provisioning validate config
|
||
|
# ./core/nulib/provisioning test unit
|
||
|
#
|
||
|
# 2. Integration Testing:
|
||
|
# export PROVISIONING_ENV=integration
|
||
|
# ./core/nulib/provisioning server create --check
|
||
|
# ./core/nulib/provisioning test integration
|
||
|
#
|
||
|
# 3. End-to-End Testing:
|
||
|
# ./core/nulib/provisioning test e2e --scenario basic-server
|
||
|
#
|
||
|
# 4. Performance Testing:
|
||
|
# ./core/nulib/provisioning test performance --load 100
|
||
|
#
|
||
|
# 5. Security Testing:
|
||
|
# ./core/nulib/provisioning test security --scan all
|
||
|
#
|
||
|
# CI/CD Pipeline Example:
|
||
|
# ----------------------
|
||
|
#
|
||
|
# test-stage:
|
||
|
# script:
|
||
|
# - export PROVISIONING_ENV=ci
|
||
|
# - ./core/nulib/provisioning validate config --strict
|
||
|
# - ./core/nulib/provisioning test unit
|
||
|
# - ./core/nulib/provisioning test integration --check
|
||
|
# - ./core/nulib/provisioning test security
|
||
|
# artifacts:
|
||
|
# reports:
|
||
|
# junit: test-results.xml
|
||
|
# paths:
|
||
|
# - coverage/
|
||
|
# - validation-reports/
|
||
|
#
|
||
|
# Testing with Docker:
|
||
|
# -------------------
|
||
|
#
|
||
|
# docker run --rm \
|
||
|
# -v $(pwd):/workspace \
|
||
|
# -e PROVISIONING_ENV=test \
|
||
|
# provisioning:test \
|
||
|
# ./core/nulib/provisioning test all
|
||
|
#
|
||
|
# =============================================================================
|
||
|
# TESTING TROUBLESHOOTING
|
||
|
# =============================================================================
|
||
|
#
|
||
|
# Common Testing Issues:
|
||
|
# ---------------------
|
||
|
#
|
||
|
# 1. Test Data Isolation:
|
||
|
# - Verify paths.base points to test directory
|
||
|
# - Check test data cleanup settings
|
||
|
# - Ensure proper test fixtures
|
||
|
#
|
||
|
# 2. Mock Service Issues:
|
||
|
# - Verify mock services are running
|
||
|
# - Check mock service configurations
|
||
|
# - Validate mock data setup
|
||
|
#
|
||
|
# 3. CI/CD Integration:
|
||
|
# - Check environment variable setup
|
||
|
# - Verify artifact collection
|
||
|
# - Validate pipeline timeout settings
|
||
|
#
|
||
|
# 4. Performance Test Issues:
|
||
|
# - Check timeout configurations
|
||
|
# - Verify resource limits
|
||
|
# - Monitor test environment capacity
|
||
|
#
|
||
|
# 5. Security Test Failures:
|
||
|
# - Review security validation rules
|
||
|
# - Check compliance requirements
|
||
|
# - Verify encryption settings
|
||
|
#
|
||
|
# Testing Best Practices:
|
||
|
# ----------------------
|
||
|
#
|
||
|
# 1. Test Isolation:
|
||
|
# - Use separate test directories
|
||
|
# - Clean up after each test
|
||
|
# - Avoid shared state between tests
|
||
|
#
|
||
|
# 2. Test Data Management:
|
||
|
# - Use fixtures for consistent data
|
||
|
# - Generate test data dynamically
|
||
|
# - Clean up test data regularly
|
||
|
#
|
||
|
# 3. Mock Usage:
|
||
|
# - Mock external dependencies
|
||
|
# - Use realistic mock data
|
||
|
# - Test both success and failure scenarios
|
||
|
#
|
||
|
# 4. CI/CD Integration:
|
||
|
# - Run tests in parallel when possible
|
||
|
# - Collect comprehensive artifacts
|
||
|
# - Set appropriate timeouts
|
||
|
#
|
||
|
# 5. Security Testing:
|
||
|
# - Include security scans in pipeline
|
||
|
# - Test encryption/decryption workflows
|
||
|
# - Validate access controls
|