Files
go-extractor/sites/google/google.go
Steve Dudenhoeffer cb2ed10cfd
Some checks failed
CI / build (push) Failing after 2m4s
CI / test (push) Failing after 2m6s
CI / vet (push) Failing after 2m19s
refactor: restructure API, deduplicate code, expand test coverage
- Extract shared DeferClose helper, removing 14 duplicate copies
- Rename PlayWright-prefixed types to cleaner names (BrowserOptions,
  BrowserSelection, NewBrowser, etc.)
- Rename fields: ServerAddress, RequireServer (was DontLaunchOnConnectFailure)
- Extract shared initBrowser/mergeOptions into browser_init.go,
  deduplicating ~120 lines between NewBrowser and NewInteractiveBrowser
- Remove unused locator field from document struct
- Add tests for all previously untested packages (archive, aislegopher,
  wegmans, useragents, powerball) and expand existing test suites
- Add MIGRATION.md documenting all breaking API changes

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 13:59:47 -05:00

143 lines
2.4 KiB
Go

package google
import (
"context"
"fmt"
"net/url"
"gitea.stevedudenhoeffer.com/steve/go-extractor"
)
type Config struct {
// BaseURL is the base URL for the search engine, if empty "google.com" is used
BaseURL string
// Language is the language to use for the search engine, if empty "en" is used
Language string
// Country is the country to use for the search engine, if empty "us" is used
Country string
}
var DefaultConfig = Config{
BaseURL: "google.com",
Language: "en",
Country: "us",
}
func (c Config) validate() Config {
if c.BaseURL == "" {
c.BaseURL = "google.com"
}
if c.Language == "" {
c.Language = "en"
}
if c.Country == "" {
c.Country = "us"
}
return c
}
type Result struct {
URL string
Title string
Description string
}
func (c Config) Search(ctx context.Context, b extractor.Browser, query string) ([]Result, error) {
c = c.validate()
u, err := url.Parse(fmt.Sprintf("https://%s/search", c.BaseURL))
if err != nil {
return nil, fmt.Errorf("invalid url: %w", err)
}
vals := u.Query()
vals.Set("q", query)
if c.Language != "" {
vals.Set("hl", c.Language)
}
if c.Country != "" {
country := ""
switch c.Country {
case "us":
country = "countryUS"
case "uk":
country = "countryUK"
case "au":
country = "countryAU"
case "ca":
country = "countryCA"
}
if country != "" {
vals.Set("cr", country)
}
}
u.RawQuery = vals.Encode()
doc, err := b.Open(ctx, u.String(), extractor.OpenPageOptions{})
if err != nil {
return nil, fmt.Errorf("failed to open url: %w", err)
}
defer extractor.DeferClose(doc)
var res []Result
err = doc.ForEach("div.g", func(s extractor.Node) error {
var u string
var title string
var desc string
// get the first link in the div
link := s.Select("a")
if len(link) == 0 {
return nil
}
u, err := link[0].Attr("href")
if err != nil {
return fmt.Errorf("failed to get link: %w", err)
}
titles := s.Select("div > div > div a > h3")
if len(titles) != 0 {
title, _ = titles[0].Text()
}
descs := s.Select("div:nth-child(1) > div:nth-child(2) > div:nth-child(1) > span:not([class])")
if len(descs) != 0 {
desc, _ = descs[0].Text()
}
res = append(res, Result{
URL: u,
Title: title,
Description: desc,
})
return nil
})
return res, err
}
func Search(ctx context.Context, b extractor.Browser, query string) ([]Result, error) {
return DefaultConfig.Search(ctx, b, query)
}