The unofficial Go client library for the Groq Cloud API. This SDK provides idiomatic, high-performance Go access to Groq's LPU™ Inference Engine, enabling ultra-fast AI applications.
- Chat Completions: Standard request/response with support for all Groq models.
- Streaming: Real-time token streaming with full usage statistics support.
- JSON Mode: Enforce structured JSON outputs for reliable parsing.
- Tool Calling: Native support for function calling (Agentic workflows).
- Audio: Speech-to-Text (Whisper) and Text-to-Speech (TTS).
- Configurable: Custom HTTP clients, timeouts, and base URLs.
go get github.com/algolyzer/groq-go
Initialize the client with your API key. You can also configure the base URL or HTTP client if needed.
import "github.com/algolyzer/groq-go"
func main() {
// Basic initialization
client := groq.NewClient(os.Getenv("GROQ_API_KEY"))
// Advanced initialization (optional)
// client := groq.NewClient(
// os.Getenv("GROQ_API_KEY"),
// groq.WithBaseURL("[https://api.groq.com/openai/v1](https://api.groq.com/openai/v1)"),
// groq.WithHTTPClient(&http.Client{Timeout: 60 * time.Second}),
// )
}Generate a simple text response.
resp, err := client.CreateChatCompletion(context.Background(), groq.ChatCompletionRequest{
Model: "llama-3.3-70b-versatile",
Messages: []groq.ChatMessage{
{Role: groq.RoleUser, Content: "Explain quantum computing in 2 sentences."},
},
})
if err != nil {
log.Fatal(err)
}
fmt.Println(resp.Choices[0].Message.Content)Stream tokens as they are generated. Includes support for Usage Statistics at the end of the stream.
stream, err := client.CreateChatCompletionStream(context.Background(), groq.ChatCompletionRequest{
Model: "llama-3.3-70b-versatile",
Messages: []groq.ChatMessage{
{Role: groq.RoleUser, Content: "Write a haiku about code."},
},
// Request token usage stats (optional)
StreamOptions: &groq.StreamOptions{IncludeUsage: true},
})
if err != nil {
log.Fatal(err)
}
defer stream.Close()
for {
chunk, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
log.Fatal(err)
}
// Print content delta
if len(chunk.Choices) > 0 {
fmt.Print(chunk.Choices[0].Delta.Content)
}
// Check for final usage stats
if chunk.Usage != nil {
fmt.Printf("\n\n[Total Tokens: %d]\n", chunk.Usage.TotalTokens)
}
}Force the model to output valid JSON.
resp, err := client.CreateChatCompletion(context.Background(), groq.ChatCompletionRequest{
Model: "llama-3.1-8b-instant",
Messages: []groq.ChatMessage{
{Role: groq.RoleSystem, Content: "You are a database api."},
{Role: groq.RoleUser, Content: "Return a user object for John Doe."},
},
// Enable JSON mode
Format: &groq.ResponseFormat{Type: "json_object"},
})
fmt.Println(resp.Choices[0].Message.Content)
// Output: { "name": "John Doe", "id": 12345, "role": "user" }Define tools that the model can request to call.
// 1. Define the tool
tools := []groq.Tool{
{
Type: "function",
Function: groq.ToolFunction{
Name: "get_weather",
Description: "Get the weather for a location",
Parameters: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]interface{}{"type": "string"},
},
"required": []string{"location"},
},
},
},
}
// 2. Send request
resp, _ := client.CreateChatCompletion(context.Background(), groq.ChatCompletionRequest{
Model: "llama-3.3-70b-versatile",
Messages: []groq.ChatMessage{{Role: groq.RoleUser, Content: "Weather in NY?"}},
Tools: tools,
ToolChoice: "auto",
})
// 3. Check if model wants to call a tool
msg := resp.Choices[0].Message
if len(msg.ToolCalls) > 0 {
fmt.Printf("Tool to call: %s\n", msg.ToolCalls[0].Function.Name)
fmt.Printf("Arguments: %s\n", msg.ToolCalls[0].Function.Arguments)
}Transcribe audio files using Groq's distil-whisper models.
resp, err := client.CreateTranscription(context.Background(), groq.AudioTranscriptionRequest{
FilePath: "meeting.m4a",
Model: "whisper-large-v3",
Language: "en",
})
if err != nil {
log.Fatal(err)
}
fmt.Println(resp.Text)Generate audio from text.
audioBytes, err := client.CreateSpeech(context.Background(), groq.CreateSpeechRequest{
Model: "playai-tts", // or other supported TTS models
Input: "The quick brown fox jumps over the lazy dog.",
Voice: "autumn",
})
if err != nil {
log.Fatal(err)
}
os.WriteFile("output.mp3", audioBytes, 0644)To run the examples, navigate to the examples directory:
export GROQ_API_KEY="your_api_key_here"
go run examples/chat/main.go
go run examples/stream/main.go
Distributed under the MIT License. See LICENSE for more information.