From 1b740252a0b4594b445b528366c891e2c90fdfe6 Mon Sep 17 00:00:00 2001 From: vyas-git Date: Sat, 20 Dec 2025 00:14:09 +0530 Subject: [PATCH] working example as per read me --- story-writer-chat-v2/.env.example | 26 ++++++++++----------- story-writer-chat-v2/config/config.go | 33 +++++++++++++++------------ story-writer-chat-v2/go.mod | 5 ++-- story-writer-chat-v2/go.sum | 4 ++++ story-writer-chat-v2/main.go | 2 +- 5 files changed, 39 insertions(+), 31 deletions(-) diff --git a/story-writer-chat-v2/.env.example b/story-writer-chat-v2/.env.example index cb71516..0a22343 100644 --- a/story-writer-chat-v2/.env.example +++ b/story-writer-chat-v2/.env.example @@ -1,20 +1,20 @@ -# OpenRouter API Configuration -# Get your API key from: https://openrouter.ai/keys -OPENROUTER_API_KEY=your-api-key-here - -# Server Configuration -PORT=8080 - # LLM Provider Configuration +# Choose one of the following configurations: + +# Option 1: OpenRouter (default) LLM_PROVIDER=openrouter LLM_MODEL=openai/gpt-4o-mini +LLM_API_KEY=your-openrouter-key -# Alternative Models (uncomment to use) -# LLM_MODEL=anthropic/claude-3.5-sonnet -# LLM_MODEL=google/gemini-pro-1.5 -# LLM_MODEL=meta-llama/llama-3.1-70b-instruct +# Option 2: HuggingFace +# LLM_PROVIDER=huggingface +# LLM_MODEL=Qwen/Qwen2.5-72B-Instruct +# LLM_API_KEY=your-huggingface-api-key -# For OpenAI Direct (if not using OpenRouter) +# Option 3: OpenAI # LLM_PROVIDER=openai -# OPENAI_API_KEY=your-openai-key-here # LLM_MODEL=gpt-4o-mini +# LLM_API_KEY=your-openai-key + +# Server Configuration (optional, defaults to 8080) +# PORT=8080 diff --git a/story-writer-chat-v2/config/config.go b/story-writer-chat-v2/config/config.go index 5ee837b..a4ba21e 100644 --- a/story-writer-chat-v2/config/config.go +++ b/story-writer-chat-v2/config/config.go @@ -7,6 +7,7 @@ import ( "os" "time" + "github.com/joho/godotenv" "github.com/kunalkushwaha/agenticgokit/core/vnext" ) @@ -20,26 +21,30 @@ type Config struct { // LoadConfig loads configuration from environment variables func LoadConfig() (*Config, error) { - apiKey := os.Getenv("HUGGINGFACE_API_KEY") - if apiKey == "" { - return nil, fmt.Errorf("HUGGINGFACE_API_KEY environment variable not set\nPlease set it with: $env:HUGGINGFACE_API_KEY=\"your-key\"") - } + // Load .env file if it exists (ignore error if file doesn't exist) + _ = godotenv.Load() - port := os.Getenv("PORT") - if port == "" { - port = "8080" + // Check for API key from LLM_API_KEY environment variable + apiKey := os.Getenv("LLM_API_KEY") + if apiKey == "" { + return nil, fmt.Errorf("LLM_API_KEY environment variable not set\nPlease enable LLM_API_KEY in .env") } + // Require LLM_PROVIDER to be explicitly set provider := os.Getenv("LLM_PROVIDER") if provider == "" { - provider = "huggingface" + return nil, fmt.Errorf("LLM_PROVIDER environment variable not set\nPlease set it with: $env:LLM_PROVIDER=\"your-provider\" (e.g., \"openrouter\", \"huggingface\")") } + // Require LLM_MODEL to be explicitly set model := os.Getenv("LLM_MODEL") if model == "" { - // Qwen 2.5 72B is excellent at following instructions and structured formats - // Alternative: meta-llama/Llama-3.1-70b-Instruct, mistralai/Mistral-Large-Instruct-2411 - model = "Qwen/Qwen2.5-72B-Instruct" + return nil, fmt.Errorf("LLM_MODEL environment variable not set\nPlease set it with: $env:LLM_MODEL=\"your-model\" (e.g., \"openai/gpt-4o-mini\", \"Qwen/Qwen2.5-72B-Instruct\")") + } + + port := os.Getenv("PORT") + if port == "" { + port = "8080" } return &Config{ @@ -51,15 +56,15 @@ func LoadConfig() (*Config, error) { } // ValidateAPIConnection verifies the API key works by making a test request -func ValidateAPIConnection(apiKey string) error { +func ValidateAPIConnection(apiKey string, provider string, model string) error { log.Println("🔍 Validating API connection...") testAgent, err := vnext.QuickChatAgentWithConfig("ValidationTest", &vnext.Config{ Name: "validation_test", Timeout: 15 * time.Second, LLM: vnext.LLMConfig{ - Provider: "huggingface", - Model: "Qwen/Qwen2.5-72B-Instruct", + Provider: provider, + Model: model, APIKey: apiKey, }, }) diff --git a/story-writer-chat-v2/go.mod b/story-writer-chat-v2/go.mod index 276b42a..4de8639 100644 --- a/story-writer-chat-v2/go.mod +++ b/story-writer-chat-v2/go.mod @@ -2,11 +2,10 @@ module github.com/kunalkushwaha/agenticgokit/examples/vnext/story-writer-chat-v2 go 1.24.1 -replace github.com/kunalkushwaha/agenticgokit => ../../.. - require ( github.com/gorilla/websocket v1.5.3 - github.com/kunalkushwaha/agenticgokit v0.0.0-00010101000000-000000000000 + github.com/joho/godotenv v1.5.1 + github.com/kunalkushwaha/agenticgokit v0.4.7 ) require ( diff --git a/story-writer-chat-v2/go.sum b/story-writer-chat-v2/go.sum index 24d84c5..a1c9526 100644 --- a/story-writer-chat-v2/go.sum +++ b/story-writer-chat-v2/go.sum @@ -31,6 +31,10 @@ github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/kunalkushwaha/agenticgokit v0.4.7 h1:S4oVZwytYHZvHFNmb880WUzNgUiViA7oG5NxFvnHv9Q= +github.com/kunalkushwaha/agenticgokit v0.4.7/go.mod h1:ycHPDvRI8HiRLNck2DazSlIVnl1z40KomAg7wKrmUdc= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/pgvector/pgvector-go v0.3.0 h1:Ij+Yt78R//uYqs3Zk35evZFvr+G0blW0OUN+Q2D1RWc= diff --git a/story-writer-chat-v2/main.go b/story-writer-chat-v2/main.go index 1f22078..200b906 100644 --- a/story-writer-chat-v2/main.go +++ b/story-writer-chat-v2/main.go @@ -21,7 +21,7 @@ func main() { } // Validate API connection - if err := config.ValidateAPIConnection(cfg.APIKey); err != nil { + if err := config.ValidateAPIConnection(cfg.APIKey, cfg.Provider, cfg.Model); err != nil { log.Fatalf("❌ API validation failed: %v\nCheck your API key and network connection", err) } log.Println("✅ API connection validated")