Files
go-extractor/sites/google/google.go
Steve Dudenhoeffer e7b7e78796
Some checks failed
CI / vet (push) Failing after 15s
CI / build (push) Failing after 30s
CI / test (push) Failing after 36s
fix: bug fixes, test coverage, and CI workflow
- Fix Nodes.First() panic on empty slice (return nil)
- Fix ticker leak in archive.go (create once, defer Stop)
- Fix cookie path matching for empty and root paths
- Fix lost query params in google.go (u.Query().Set was discarded)
- Fix type assertion panic in useragents.go
- Fix dropped date parse error in powerball.go
- Remove unreachable dead code in megamillions.go and powerball.go
- Simplify document.go WaitForNetworkIdle, remove unused root field
- Remove debug fmt.Println calls across codebase
- Replace panic(err) with stderr+exit in all cmd/ programs
- Fix duckduckgo cmd: remove useless defer, return error on bad safesearch
- Fix archive cmd: ToConfig returns error instead of panicking
- Add 39+ unit tests across 6 new test files
- Add Gitea Actions CI workflow (build, test, vet in parallel)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 11:14:19 -05:00

150 lines
2.5 KiB
Go

package google
import (
"context"
"fmt"
"io"
"net/url"
"gitea.stevedudenhoeffer.com/steve/go-extractor"
)
type Config struct {
// BaseURL is the base URL for the search engine, if empty "google.com" is used
BaseURL string
// Language is the language to use for the search engine, if empty "en" is used
Language string
// Country is the country to use for the search engine, if empty "us" is used
Country string
}
var DefaultConfig = Config{
BaseURL: "google.com",
Language: "en",
Country: "us",
}
func (c Config) validate() Config {
if c.BaseURL == "" {
c.BaseURL = "google.com"
}
if c.Language == "" {
c.Language = "en"
}
if c.Country == "" {
c.Country = "us"
}
return c
}
type Result struct {
URL string
Title string
Description string
}
func deferClose(cl io.Closer) {
if cl != nil {
_ = cl.Close()
}
}
func (c Config) Search(ctx context.Context, b extractor.Browser, query string) ([]Result, error) {
c = c.validate()
u, err := url.Parse(fmt.Sprintf("https://%s/search", c.BaseURL))
if err != nil {
return nil, fmt.Errorf("invalid url: %w", err)
}
vals := u.Query()
vals.Set("q", query)
if c.Language != "" {
vals.Set("hl", c.Language)
}
if c.Country != "" {
country := ""
switch c.Country {
case "us":
country = "countryUS"
case "uk":
country = "countryUK"
case "au":
country = "countryAU"
case "ca":
country = "countryCA"
}
if country != "" {
vals.Set("cr", country)
}
}
u.RawQuery = vals.Encode()
doc, err := b.Open(ctx, u.String(), extractor.OpenPageOptions{})
if err != nil {
return nil, fmt.Errorf("failed to open url: %w", err)
}
defer deferClose(doc)
var res []Result
err = doc.ForEach("div.g", func(s extractor.Node) error {
var u string
var title string
var desc string
// get the first link in the div
link := s.Select("a")
if len(link) == 0 {
return nil
}
u, err := link[0].Attr("href")
if err != nil {
return fmt.Errorf("failed to get link: %w", err)
}
titles := s.Select("div > div > div a > h3")
if len(titles) != 0 {
title, _ = titles[0].Text()
}
descs := s.Select("div:nth-child(1) > div:nth-child(2) > div:nth-child(1) > span:not([class])")
if len(descs) != 0 {
desc, _ = descs[0].Text()
}
res = append(res, Result{
URL: u,
Title: title,
Description: desc,
})
return nil
})
return res, err
}
func Search(ctx context.Context, b extractor.Browser, query string) ([]Result, error) {
return DefaultConfig.Search(ctx, b, query)
}