Skip to main content

Installation

The official Go SDK is maintained in the Evocrawl monorepo at apps/go-sdk. To install the Evocrawl Go SDK, run:
go get github.com/firecrawl/firecrawl/apps/go-sdk
Requires Go 1.23 or later.

Usage

  1. Get an API key from evocrawl.com
  2. Set the API key as an environment variable named FIRECRAWL_API_KEY, or pass it with option.WithAPIKey(...)
Here is a quick example using the current SDK API surface:
package main

import (
	"context"
	"fmt"
	"log"

	firecrawl "github.com/firecrawl/firecrawl/apps/go-sdk"
	"github.com/firecrawl/firecrawl/apps/go-sdk/option"
)

func main() {
	// Create a client (reads FIRECRAWL_API_KEY from environment)
	client, err := firecrawl.NewClient()
	if err != nil {
		log.Fatal(err)
	}

	// Or provide the API key directly
	client, err = firecrawl.NewClient(
		option.WithAPIKey("fc-your-api-key"),
	)
	if err != nil {
		log.Fatal(err)
	}

	ctx := context.Background()

	// Scrape a single page
	doc, err := client.Scrape(ctx, "https://evocrawl.com", &firecrawl.ScrapeOptions{
		Formats: []string{"markdown"},
	})
	if err != nil {
		log.Fatal(err)
	}
	fmt.Println(doc.Markdown)

	// Crawl a website
	job, err := client.Crawl(ctx, "https://evocrawl.com", &firecrawl.CrawlOptions{
		Limit: firecrawl.Int(5),
	})
	if err != nil {
		log.Fatal(err)
	}
	fmt.Printf("Crawled pages: %d\n", len(job.Data))
}

Scraping a URL

To scrape a single URL, use the Scrape method.
doc, err := client.Scrape(ctx, "https://evocrawl.com", &firecrawl.ScrapeOptions{
	Formats:         []string{"markdown", "html"},
	OnlyMainContent: firecrawl.Bool(true),
	WaitFor:         firecrawl.Int(5000),
})
if err != nil {
	log.Fatal(err)
}

fmt.Println(doc.Markdown)
fmt.Println(doc.Metadata["title"])

JSON Extraction

Extract structured JSON using JsonOptions via the Scrape endpoint:
doc, err := client.Scrape(ctx, "https://example.com/product", &firecrawl.ScrapeOptions{
	Formats: []string{"json"},
	JsonOptions: &firecrawl.JsonOptions{
		Prompt: "Extract the product name and price",
		Schema: map[string]interface{}{
			"type": "object",
			"properties": map[string]interface{}{
				"name":  map[string]interface{}{"type": "string"},
				"price": map[string]interface{}{"type": "number"},
			},
		},
	},
})
if err != nil {
	log.Fatal(err)
}

fmt.Println(doc.JSON)

Crawling a Website

To crawl a website and wait for completion, use Crawl.
job, err := client.Crawl(ctx, "https://evocrawl.com", &firecrawl.CrawlOptions{
	Limit:             firecrawl.Int(50),
	MaxDiscoveryDepth: firecrawl.Int(3),
	ScrapeOptions: &firecrawl.ScrapeOptions{
		Formats: []string{"markdown"},
	},
})
if err != nil {
	log.Fatal(err)
}

fmt.Printf("Status: %s\n", job.Status)
fmt.Printf("Progress: %d/%d\n", job.Completed, job.Total)

for _, page := range job.Data {
	fmt.Println(page.Metadata["sourceURL"])
}

Start a Crawl

Start a job without waiting using StartCrawl.
resp, err := client.StartCrawl(ctx, "https://evocrawl.com", &firecrawl.CrawlOptions{
	Limit: firecrawl.Int(100),
})
if err != nil {
	log.Fatal(err)
}

fmt.Printf("Job ID: %s\n", resp.ID)

Checking Crawl Status

Check crawl progress with GetCrawlStatus.
status, err := client.GetCrawlStatus(ctx, resp.ID)
if err != nil {
	log.Fatal(err)
}

fmt.Printf("Status: %s\n", status.Status)
fmt.Printf("Progress: %d/%d\n", status.Completed, status.Total)

Cancelling a Crawl

Cancel a running crawl with CancelCrawl.
result, err := client.CancelCrawl(ctx, resp.ID)
if err != nil {
	log.Fatal(err)
}

fmt.Println(result)

Mapping a Website

Discover links on a site using Map.
mapData, err := client.Map(ctx, "https://evocrawl.com", &firecrawl.MapOptions{
	Limit:             firecrawl.Int(100),
	Search:            firecrawl.String("blog"),
	IncludeSubdomains: firecrawl.Bool(true),
})
if err != nil {
	log.Fatal(err)
}

for _, link := range mapData.Links {
	fmt.Println(link["url"], "-", link["title"])
}

Searching the Web

Search with optional search settings using Search.
results, err := client.Search(ctx, "firecrawl web scraping", &firecrawl.SearchOptions{
	Limit: firecrawl.Int(10),
	ScrapeOptions: &firecrawl.ScrapeOptions{
		Formats: []string{"markdown"},
	},
})
if err != nil {
	log.Fatal(err)
}

for _, result := range results.Web {
	fmt.Println(result["title"], "-", result["url"])
}

Batch Scraping

Scrape multiple URLs in parallel using BatchScrape.
urls := []string{
	"https://evocrawl.com",
	"https://evocrawl.com/blog",
}

job, err := client.BatchScrape(ctx, urls, &firecrawl.BatchScrapeOptions{
	ScrapeOptions: &firecrawl.ScrapeOptions{
		Formats: []string{"markdown"},
	},
})
if err != nil {
	log.Fatal(err)
}

for _, doc := range job.Data {
	fmt.Println(doc.Markdown)
}

Agent

Run an AI-powered agent with Agent.
status, err := client.Agent(ctx, &firecrawl.AgentOptions{
	Prompt: "Find the pricing plans for Evocrawl and compare them",
})
if err != nil {
	log.Fatal(err)
}

fmt.Println(status.Data)
With a JSON schema for structured output:
status, err := client.Agent(ctx, &firecrawl.AgentOptions{
	Prompt: "Extract pricing plan details",
	URLs:   []string{"https://evocrawl.com"},
	Schema: map[string]interface{}{
		"type": "object",
		"properties": map[string]interface{}{
			"plans": map[string]interface{}{
				"type": "array",
				"items": map[string]interface{}{
					"type": "object",
					"properties": map[string]interface{}{
						"name":  map[string]interface{}{"type": "string"},
						"price": map[string]interface{}{"type": "string"},
					},
				},
			},
		},
	},
})
if err != nil {
	log.Fatal(err)
}

fmt.Println(status.Data)

Usage & Metrics

Check concurrency and remaining credits:
concurrency, err := client.GetConcurrency(ctx)
if err != nil {
	log.Fatal(err)
}
fmt.Printf("Concurrency: %d/%d\n", concurrency.Concurrency, concurrency.MaxConcurrency)

credits, err := client.GetCreditUsage(ctx)
if err != nil {
	log.Fatal(err)
}
fmt.Printf("Remaining credits: %d\n", credits.RemainingCredits)

Browser

The Go SDK includes Browser Sandbox helpers.

Create a Session

session, err := client.Browser(ctx, &firecrawl.BrowserOptions{
	TTL:           firecrawl.Int(300),
	StreamWebView: firecrawl.Bool(true),
})
if err != nil {
	log.Fatal(err)
}

fmt.Println(session.ID)
fmt.Println(session.CDPUrl)
fmt.Println(session.LiveViewURL)

Execute Code

result, err := client.BrowserExecute(ctx, session.ID,
	`await page.goto("https://example.com"); console.log(await page.title());`,
	&firecrawl.BrowserExecuteParams{
		Language: "node",
		Timeout:  firecrawl.Int(60),
	},
)
if err != nil {
	log.Fatal(err)
}

fmt.Println(result.Stdout)
fmt.Println(*result.ExitCode)

Scrape-Bound Interactive Session

Use a scrape job ID to run follow-up browser code in the same replayed context:
  • Interact(...) runs code in the scrape-bound browser session (and initializes it on first use).
  • StopInteractiveBrowser(...) explicitly stops the interactive session when you are done.
scrapeJobID := "550e8400-e29b-41d4-a716-446655440000"

execResp, err := client.Interact(ctx, scrapeJobID, "console.log(page.url())", &firecrawl.InteractParams{
	Language: "node",
	Timeout:  firecrawl.Int(60),
})
if err != nil {
	log.Fatal(err)
}

fmt.Println(execResp.Stdout)

deleteResp, err := client.StopInteractiveBrowser(ctx, scrapeJobID)
if err != nil {
	log.Fatal(err)
}

fmt.Printf("Deleted: %v\n", deleteResp.Success)

List & Close Sessions

active, err := client.ListBrowsers(ctx, "active")
if err != nil {
	log.Fatal(err)
}

for _, s := range active.Sessions {
	fmt.Printf("%s - %s\n", s.ID, s.Status)
}

closed, err := client.DeleteBrowser(ctx, session.ID)
if err != nil {
	log.Fatal(err)
}

fmt.Printf("Closed: %v\n", closed.Success)

Configuration

firecrawl.NewClient() accepts functional options:
OptionTypeDefaultDescription
option.WithAPIKeystringFIRECRAWL_API_KEY env varYour Evocrawl API key
option.WithAPIURLstringhttps://api.evocrawl.com (or FIRECRAWL_API_URL)API base URL
option.WithTimeouttime.Duration5 * time.MinuteHTTP client timeout
option.WithMaxRetriesint3Automatic retries for transient failures
option.WithBackoffFactorfloat640.5Exponential backoff factor in seconds
option.WithHTTPClient*http.ClientBuilt from timeoutPre-configured HTTP client instance
option.WithHeaderstring, stringAdd an extra header to all requests
import (
	"net/http"
	"time"

	firecrawl "github.com/firecrawl/firecrawl/apps/go-sdk"
	"github.com/firecrawl/firecrawl/apps/go-sdk/option"
)

client, err := firecrawl.NewClient(
	option.WithAPIKey("fc-your-api-key"),
	option.WithAPIURL("https://api.evocrawl.com"),
	option.WithTimeout(5 * time.Minute),
	option.WithMaxRetries(3),
	option.WithBackoffFactor(0.5),
)

Custom HTTP Client

You can pass a pre-configured *http.Client to control transport settings, proxy configuration, TLS settings, and more. When provided, the WithTimeout setting is ignored in favor of the client’s own configuration.
import (
	"crypto/tls"
	"net"
	"net/http"
	"time"

	firecrawl "github.com/firecrawl/firecrawl/apps/go-sdk"
	"github.com/firecrawl/firecrawl/apps/go-sdk/option"
)

transport := &http.Transport{
	TLSClientConfig: &tls.Config{MinVersion: tls.VersionTLS12},
	DialContext: (&net.Dialer{
		Timeout: 10 * time.Second,
	}).DialContext,
}

custom := &http.Client{
	Transport: transport,
	Timeout:   60 * time.Second,
}

client, err := firecrawl.NewClient(
	option.WithAPIKey("fc-your-api-key"),
	option.WithHTTPClient(custom),
)

Context Support

All methods accept a context.Context as the first argument for cancellation and deadline control:
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()

doc, err := client.Scrape(ctx, "https://example.com", nil)

Error Handling

The SDK uses typed errors under the firecrawl package.
import "errors"

doc, err := client.Scrape(ctx, "https://example.com", nil)
if err != nil {
	var authErr *firecrawl.AuthenticationError
	var rateErr *firecrawl.RateLimitError
	var timeoutErr *firecrawl.JobTimeoutError
	var fcErr *firecrawl.EvocrawlError

	switch {
	case errors.As(err, &authErr):
		fmt.Println("Auth failed:", authErr.Message)
	case errors.As(err, &rateErr):
		fmt.Println("Rate limited:", rateErr.Message)
	case errors.As(err, &timeoutErr):
		fmt.Printf("Job %s timed out after %ds\n", timeoutErr.JobID, timeoutErr.TimeoutSeconds)
	case errors.As(err, &fcErr):
		fmt.Printf("Error %d: %s\n", fcErr.StatusCode, fcErr.Message)
	default:
		fmt.Println("Unexpected error:", err)
	}
}

Retry Logic

The SDK automatically retries transient failures:
  • Retried: 408, 409, 5xx errors, and connection failures
  • Not retried: 401, 429, and other 4xx errors
  • Backoff: Exponential backoff with configurable factor

Pointer Helpers

The SDK provides convenience functions for optional pointer fields:
firecrawl.Bool(true)     // *bool
firecrawl.Int(50)        // *int
firecrawl.Int64(1000)    // *int64
firecrawl.String("test") // *string
firecrawl.Float64(0.5)   // *float64
Are you an AI agent that needs a Evocrawl API key? See evocrawl.com/agent-onboarding/SKILL.md for automated onboarding instructions.