Here's a straightforward example using Go's standard library. We define a struct to parse the JSON response, which gives you type safety and autocomplete in your editor. The hybridGraph field contains the most reliable data since it combines explicit OpenGraph tags with content inferred from the page HTML.
package main
import (
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
)
type OpenGraphResponse struct {
HybridGraph struct {
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
URL string `json:"url"`
SiteName string `json:"site_name"`
} `json:"hybridGraph"`
}
func main() {
appID := "YOUR_APP_ID" // Get yours at dashboard.opengraph.io
targetURL := "https://github.com"
// Build the API URL
apiURL := fmt.Sprintf(
"https://opengraph.io/api/1.1/site/%s?app_id=%s",
url.QueryEscape(targetURL),
appID,
)
resp, err := http.Get(apiURL)
if err != nil {
fmt.Printf("Request failed: %v\n", err)
return
}
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result OpenGraphResponse
if err := json.Unmarshal(body, &result); err != nil {
fmt.Printf("Parse error: %v\n", err)
return
}
fmt.Printf("Title: %s\n", result.HybridGraph.Title)
fmt.Printf("Description: %s\n", result.HybridGraph.Description)
fmt.Printf("Image: %s\n", result.HybridGraph.Image)
}Go's goroutines make it easy to fetch multiple URLs at once. This example uses a WaitGroup and channels to process a batch of URLs concurrently. Useful when you're building a link preview service or need to enrich a bunch of URLs quickly.
package main
import (
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"sync"
)
type OpenGraphResponse struct {
HybridGraph struct {
Title string `json:"title"`
} `json:"hybridGraph"`
}
func fetchOG(targetURL, appID string, wg *sync.WaitGroup, results chan<- string) {
defer wg.Done()
apiURL := fmt.Sprintf(
"https://opengraph.io/api/1.1/site/%s?app_id=%s",
url.QueryEscape(targetURL),
appID,
)
resp, err := http.Get(apiURL)
if err != nil {
results <- fmt.Sprintf("%s: error - %v", targetURL, err)
return
}
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result OpenGraphResponse
json.Unmarshal(body, &result)
results <- fmt.Sprintf("%s: %s", targetURL, result.HybridGraph.Title)
}
func main() {
appID := "YOUR_APP_ID"
urls := []string{
"https://github.com",
"https://twitter.com",
"https://linkedin.com",
}
var wg sync.WaitGroup
results := make(chan string, len(urls))
for _, u := range urls {
wg.Add(1)
go fetchOG(u, appID, &wg, results)
}
wg.Wait()
close(results)
for r := range results {
fmt.Println(r)
}
}You could spin up a headless browser and scrape pages yourself, but that means dealing with proxy rotation, JavaScript rendering, rate limiting, and a bunch of edge cases. The API handles all of that. You make a simple HTTP request and get back clean, structured JSON. Your Go service stays lean and your deployment stays simple.
Make sure you have Go installed (go.dev). Save the code to main.go and run it
go run main.go