Add OpenSearch and SearchPage functionality for DuckDuckGo
Introduced the `OpenSearch` method and `SearchPage` interface to streamline search operations and allow for loading more results dynamically. Updated dependencies and modified the DuckDuckGo CLI to utilize these enhancements.
This commit is contained in:
@@ -3,11 +3,11 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/urfave/cli/v3"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/urfave/cli/v3"
|
||||
"time"
|
||||
|
||||
"gitea.stevedudenhoeffer.com/steve/go-extractor/cmd/browser/pkg/browser"
|
||||
"gitea.stevedudenhoeffer.com/steve/go-extractor/sites/duckduckgo"
|
||||
@@ -58,6 +58,7 @@ func deferClose(cl io.Closer) {
|
||||
func main() {
|
||||
var flags []cli.Flag
|
||||
|
||||
flags = append(flags, browser.Flags...)
|
||||
flags = append(flags, Flags...)
|
||||
|
||||
cli := &cli.Command{
|
||||
@@ -81,13 +82,24 @@ func main() {
|
||||
return fmt.Errorf("failed to create browser: %w", err)
|
||||
}
|
||||
|
||||
res, err := c.Search(ctx, b, query)
|
||||
|
||||
search, err := c.OpenSearch(ctx, b, query)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to search: %w", err)
|
||||
return fmt.Errorf("failed to open search: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println(res)
|
||||
defer deferClose(search)
|
||||
|
||||
res := search.GetResults()
|
||||
fmt.Println("Results:", res)
|
||||
|
||||
err = search.LoadMore()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load more: %w", err)
|
||||
}
|
||||
|
||||
time.Sleep(2 * time.Second)
|
||||
res = search.GetResults()
|
||||
fmt.Println("Results:", res)
|
||||
|
||||
return nil
|
||||
},
|
||||
|
@@ -77,6 +77,21 @@ func deferClose(cl io.Closer) {
|
||||
}
|
||||
}
|
||||
|
||||
func (c Config) OpenSearch(ctx context.Context, b extractor.Browser, query string) (SearchPage, error) {
|
||||
u := c.ToSearchURL(query)
|
||||
|
||||
slog.Info("searching", "url", u, "query", query, "config", c, "browser", b)
|
||||
doc, err := b.Open(ctx, u.String(), extractor.OpenPageOptions{})
|
||||
if err != nil {
|
||||
if doc != nil {
|
||||
_ = doc.Close()
|
||||
}
|
||||
return nil, fmt.Errorf("failed to open url: %w", err)
|
||||
}
|
||||
|
||||
return searchPage{doc}, nil
|
||||
}
|
||||
|
||||
func (c Config) Search(ctx context.Context, b extractor.Browser, query string) ([]Result, error) {
|
||||
u := c.ToSearchURL(query)
|
||||
|
||||
|
68
sites/duckduckgo/page.go
Normal file
68
sites/duckduckgo/page.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package duckduckgo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"gitea.stevedudenhoeffer.com/steve/go-extractor"
|
||||
"io"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type SearchPage interface {
|
||||
io.Closer
|
||||
GetResults() []Result
|
||||
LoadMore() error
|
||||
}
|
||||
|
||||
type searchPage struct {
|
||||
doc extractor.Document
|
||||
}
|
||||
|
||||
func (s searchPage) GetResults() []Result {
|
||||
var res []Result
|
||||
var err error
|
||||
|
||||
err = s.doc.ForEach(`article[id^="r1-"]`, func(n extractor.Node) error {
|
||||
var r Result
|
||||
|
||||
links := n.Select(`a[href][target="_self"]`)
|
||||
|
||||
if len(links) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
r.URL, err = links[0].Attr(`href`)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get link: %w", err)
|
||||
}
|
||||
|
||||
titles := n.Select("h2")
|
||||
|
||||
if len(titles) != 0 {
|
||||
r.Title, _ = titles[0].Text()
|
||||
}
|
||||
|
||||
descriptions := n.Select("span > span")
|
||||
|
||||
if len(descriptions) != 0 {
|
||||
r.Description, _ = descriptions[0].Text()
|
||||
}
|
||||
|
||||
res = append(res, r)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (s searchPage) LoadMore() error {
|
||||
return s.doc.ForEach(`button#more-results`, func(n extractor.Node) error {
|
||||
slog.Info("clicking load more", "node", n)
|
||||
return n.Click()
|
||||
})
|
||||
}
|
||||
|
||||
func (s searchPage) Close() error {
|
||||
return s.doc.Close()
|
||||
}
|
Reference in New Issue
Block a user