M cmd/gemplex/index.go => cmd/gemplex/index.go +16 -16
@@ 1,6 1,7 @@
package main
import (
+ "context"
"fmt"
"log"
"os"
@@ 8,9 9,9 @@ import (
"sync"
"time"
- "github.com/blevesearch/bleve/v2"
"git.sr.ht/~elektito/gemplex/pkg/gsearch"
"git.sr.ht/~elektito/gemplex/pkg/utils"
+ "github.com/blevesearch/bleve/v2"
)
// used to make sure loadInitialIndex, which is called by both search and index
@@ 29,26 30,19 @@ var curIdx bleve.Index
func index(done chan bool, wg *sync.WaitGroup) {
defer wg.Done()
- loadIndexOnce.Do(func() { loadInitialIndex(done) })
+ ctx, cancelFunc := context.WithCancel(context.Background())
+ loadIndexOnce.Do(func() { loadInitialIndex(ctx) })
loopDone := make(chan bool)
- indexDbDone := make(chan bool)
- indexing := false
go func() {
<-done
-
- if indexing {
- indexDbDone <- true
- }
-
+ cancelFunc()
loopDone <- true
}()
loop:
for {
- indexing = true
- indexDb(indexDbDone)
- indexing = false
+ indexDb(ctx)
select {
case <-time.After(1 * time.Hour):
@@ 60,7 54,7 @@ loop:
log.Println("[index] Done.")
}
-func loadInitialIndex(done chan bool) {
+func loadInitialIndex(ctx context.Context) {
pingFile := path.Join(Config.Index.Path, "ping.idx")
pongFile := path.Join(Config.Index.Path, "pong.idx")
@@ 139,14 133,17 @@ func loadInitialIndex(done chan bool) {
curIdx, err = gsearch.NewIndex(pingFile, "ping")
utils.PanicOnErr(err)
- err = gsearch.IndexDb(curIdx, Config, done)
+ err = gsearch.IndexDb(ctx, curIdx, Config)
+ if ctx.Err() == context.Canceled {
+ return
+ }
utils.PanicOnErr(err)
idx.Add(curIdx)
}
}
-func indexDb(done chan bool) {
+func indexDb(ctx context.Context) {
pingFile := path.Join(Config.Index.Path, "ping.idx")
pongFile := path.Join(Config.Index.Path, "pong.idx")
@@ 168,7 165,10 @@ func indexDb(done chan bool) {
newIdx, err := gsearch.NewIndex(newIdxFile, newIdxName)
utils.PanicOnErr(err)
- err = gsearch.IndexDb(newIdx, Config, done)
+ err = gsearch.IndexDb(ctx, newIdx, Config)
+ if ctx.Err() == context.Canceled {
+ return
+ }
utils.PanicOnErr(err)
idx.Swap([]bleve.Index{newIdx}, []bleve.Index{curIdx})
M cmd/gemplex/search.go => cmd/gemplex/search.go +33 -3
@@ 2,6 2,7 @@ package main
import (
"bufio"
+ "context"
"encoding/json"
"fmt"
"log"
@@ 21,7 22,8 @@ type TypedRequest struct {
func search(done chan bool, wg *sync.WaitGroup) {
defer wg.Done()
- loadIndexOnce.Do(func() { loadInitialIndex(done) })
+ ctx, cancelFunc := context.WithCancel(context.Background())
+ loadIndexOnce.Do(func() { loadInitialIndex(ctx) })
cleanupUnixSocket()
listener, err := net.Listen("unix", Config.Search.UnixSocketPath)
@@ 30,6 32,7 @@ func search(done chan bool, wg *sync.WaitGroup) {
closing := false
go func() {
<-done
+ cancelFunc()
closing = true
listener.Close()
}()
@@ 83,6 86,8 @@ func handleConn(conn net.Conn) {
resp = handleRandImgRequest(reqLine)
case "getimg":
resp = handleGetImgRequest(reqLine)
+ case "searchimg":
+ resp = handleSearchImgRequest(reqLine)
default:
resp = errorResponse("unknown request type")
return
@@ 93,7 98,7 @@ func handleConn(conn net.Conn) {
}
func handleSearchRequest(reqLine []byte) []byte {
- var req gsearch.SearchRequest
+ var req gsearch.PageSearchRequest
req.Page = 1
err := json.Unmarshal(reqLine, &req)
if err != nil {
@@ 104,7 109,7 @@ func handleSearchRequest(reqLine []byte) []byte {
return errorResponse("no query")
}
- resp, err := gsearch.Search(req, idx)
+ resp, err := gsearch.SearchPages(req, idx)
if err != nil {
return errorResponse(err.Error())
}
@@ 176,6 181,31 @@ func handleGetImgRequest(reqLine []byte) []byte {
return jsonResp
}
+func handleSearchImgRequest(reqLine []byte) []byte {
+ var req gsearch.ImageSearchRequest
+ req.Page = 1
+ err := json.Unmarshal(reqLine, &req)
+ if err != nil {
+ return errorResponse("bad request")
+ }
+
+ if req.Query == "" {
+ return errorResponse("no query")
+ }
+
+ resp, err := gsearch.SearchImages(req, idx)
+ if err != nil {
+ return errorResponse(err.Error())
+ }
+
+ jsonResp, err := json.Marshal(resp)
+ if err != nil {
+ return errorResponse(fmt.Sprintf("Error marshalling results: %s", err))
+ }
+
+ return jsonResp
+}
+
func errorResponse(msg string) (resp []byte) {
type errorJson struct {
Err string `json:"err"`
M cmd/gpcgi/main.go => cmd/gpcgi/main.go +184 -5
@@ 92,6 92,8 @@ func cgi(r io.Reader, w io.Writer, params Params) {
handleRandomImage(u, r, w, params)
case strings.HasPrefix(u.Path, "/image/perm/"):
handleImagePermalink(u, r, w, params)
+ case strings.HasPrefix(u.Path, "/image/search"):
+ handleImageSearch(u, r, w, params)
default:
geminiHeader(w, 51, "Not found")
}
@@ 256,7 258,7 @@ func handleSearch(u *url.URL, r io.Reader, w io.Writer, params Params) {
return
}
- var resp gsearch.SearchResponse
+ var resp gsearch.PageSearchResponse
err = json.NewDecoder(conn).Decode(&resp)
if err != nil {
log.Println("Internal error:", err)
@@ 271,16 273,16 @@ func handleSearch(u *url.URL, r io.Reader, w io.Writer, params Params) {
}
geminiHeader(w, 20, "text/gemini")
- w.Write(renderResults(resp, req))
+ w.Write(renderSearchResults(resp, req))
}
-func renderResults(resp gsearch.SearchResponse, req gsearch.SearchRequest) []byte {
+func renderSearchResults(resp gsearch.PageSearchResponse, req gsearch.PageSearchRequest) []byte {
type Page struct {
Query string
QueryEscaped string
Duration time.Duration
Title string
- Results []gsearch.SearchResult
+ Results []gsearch.PageSearchResult
TotalResults uint64
Verbose bool
Page int
@@ 374,6 376,147 @@ Found {{ .TotalResults }} result(s) in {{ .Duration }}.
return w.Bytes()
}
+func handleImageSearch(u *url.URL, r io.Reader, w io.Writer, params Params) {
+ if u.RawQuery == "" {
+ geminiHeader(w, 10, "Image search query")
+ return
+ }
+
+ req, err := parseImageSearchRequest(u)
+ if err == ErrPageNotFound {
+ geminiHeader(w, 51, "Not Found")
+ return
+ } else if err == ErrBadUrl {
+ geminiHeader(w, 59, "Bad URL")
+ return
+ } else if err != nil {
+ log.Println("Internal error:", err)
+ cgiErr(w, "Internal error")
+ return
+ }
+
+ conn, err := net.Dial("unix", params.SearchDaemonSocket)
+ if err != nil {
+ log.Println("Cannot connect to search backend:", err)
+ cgiErr(w, "Cannot connect to search backend")
+ return
+ }
+
+ err = json.NewEncoder(conn).Encode(req)
+ if err != nil {
+ log.Println("Error encoding search request:", err)
+ cgiErr(w, "Internal error")
+ return
+ }
+
+ var resp gsearch.ImageSearchResponse
+ err = json.NewDecoder(conn).Decode(&resp)
+ if err != nil {
+ log.Println("Internal error:", err)
+ cgiErr(w, "Internal error")
+ return
+ }
+
+ if resp.Err != "" {
+ log.Println("Error from search daemon:", resp.Err)
+ cgiErr(w, "Internal error")
+ return
+ }
+
+ geminiHeader(w, 20, "text/gemini")
+ w.Write(renderImageSearchResults(resp, req))
+}
+
+func renderImageSearchResults(resp gsearch.ImageSearchResponse, req gsearch.ImageSearchRequest) []byte {
+ type Page struct {
+ Query string
+ QueryEscaped string
+ Duration time.Duration
+ Title string
+ Results []gsearch.ImageSearchResult
+ TotalResults uint64
+ Verbose bool
+ Page int
+ PageCount uint64
+ BaseUrl string
+ }
+
+ t := `
+{{- define "SingleResult" }}
+=> {{ permalink .ImageHash }} {{ .AltText }}
+* Fetched: {{ .FetchTime }} - Source: {{ urlhost .SourceUrl }}
+XXX {{ .AltText }}
+{{ .Image }}
+XXX
+{{ end }}
+
+{{- define "Results" }}
+ {{- range . }}
+ {{ template "SingleResult" . -}}
+ {{ end}}
+{{ end }}
+
+{{- define "Page" -}}
+# {{ .Title }}
+
+=> {{ .BaseUrl }}/image/search search
+
+Searching for: {{ .Query }}
+Found {{ .TotalResults }} result(s) in {{ .Duration }}.
+
+{{- template "Results" .Results }}
+{{- if gt .Page 1 }}
+=> {{ .BaseUrl }}/image/search/{{ dec .Page }}?{{ .QueryEscaped }} Prev Page ({{ dec .Page }} of {{ .PageCount }} pages)
+{{- end }}
+{{- if lt .Page .PageCount }}
+=> {{ .BaseUrl }}/image/search/{{ inc .Page }}?{{ .QueryEscaped }} Next Page ({{ inc .Page }} of {{ .PageCount }} pages)
+{{ end }}
+=> / Home
+{{ end -}}
+
+{{- template "Page" . }}
+`
+ t = strings.Replace(t, "XXX", "```", 2)
+
+ funcMap := template.FuncMap{
+ "inc": func(n int) int { return n + 1 },
+ "dec": func(n int) int { return n - 1 },
+ "human": func(n uint64) string { return humanize.Bytes(n) },
+ "urlhost": func(ustr string) string {
+ u, err := url.Parse(ustr)
+ if err != nil {
+ return "unknown"
+ }
+ return u.Host
+ },
+ "permalink": func(ih string) string { return "/image/perm/" + ih },
+ }
+
+ baseUrl := ""
+ npages := resp.TotalResults / gsearch.PageSize
+ if resp.TotalResults%gsearch.PageSize != 0 {
+ npages += 1
+ }
+
+ tmpl := template.Must(template.New("root").Funcs(funcMap).Parse(t))
+ data := Page{
+ Query: req.Query,
+ QueryEscaped: url.QueryEscape(req.Query),
+ Duration: resp.Duration.Round(time.Millisecond / 10),
+ Title: "Gemplex Gemini Image Search",
+ Results: resp.Results,
+ TotalResults: resp.TotalResults,
+ Page: req.Page,
+ PageCount: npages,
+ BaseUrl: baseUrl,
+ }
+ var w bytes.Buffer
+ err := tmpl.Execute(&w, data)
+ utils.PanicOnErr(err)
+
+ return w.Bytes()
+}
+
func geminiHeader(w io.Writer, statusCode int, meta string) {
msg := fmt.Sprintf("%d %s\r\n", statusCode, meta)
w.Write([]byte(msg))
@@ 384,7 527,7 @@ func cgiErr(w io.Writer, msg string) {
geminiHeader(w, 42, msg)
}
-func parseSearchRequest(u *url.URL) (req gsearch.SearchRequest, err error) {
+func parseSearchRequest(u *url.URL) (req gsearch.PageSearchRequest, err error) {
// url format: [/v]/search[/page]
re := regexp.MustCompile(`(?P<verbose>/v)?/search(?:/(?P<page>\d+))?`)
m := re.FindStringSubmatch(u.Path)
@@ 423,3 566,39 @@ func parseSearchRequest(u *url.URL) (req gsearch.SearchRequest, err error) {
return
}
+
+func parseImageSearchRequest(u *url.URL) (req gsearch.ImageSearchRequest, err error) {
+ // url format: [/v]/search[/page]
+ re := regexp.MustCompile(`/search(?:/(?P<page>\d+))?`)
+ m := re.FindStringSubmatch(u.Path)
+ if m == nil {
+ err = ErrPageNotFound
+ return
+ }
+
+ // default value
+ req.Type = "searchimg"
+ req.Page = 1
+
+ for i, name := range re.SubexpNames() {
+ switch name {
+ case "page":
+ pageStr := m[i]
+ if pageStr != "" {
+ req.Page, err = strconv.Atoi(pageStr)
+ if err != nil {
+ err = ErrBadUrl
+ return
+ }
+ }
+ }
+ }
+
+ req.Query, err = url.QueryUnescape(u.RawQuery)
+ if err != nil {
+ err = ErrBadUrl
+ return
+ }
+
+ return
+}
M cmd/gpctl/main.go => cmd/gpctl/main.go +1 -1
@@ 309,7 309,7 @@ func handleIndexCommand(cfg *config.Config, args []string) {
index, err := gsearch.NewIndex(indexDir, indexName)
utils.PanicOnErr(err)
- err = gsearch.IndexDb(index, cfg, nil)
+ err = gsearch.IndexDb(context.Background(), index, cfg)
utils.PanicOnErr(err)
}
M pkg/gsearch/gsearch.go => pkg/gsearch/gsearch.go +204 -24
@@ 1,6 1,7 @@
package gsearch
import (
+ "context"
"database/sql"
"fmt"
"log"
@@ 22,7 23,7 @@ import (
const PageSize = 15
-type Doc struct {
+type PageDoc struct {
Title string
Content string
Lang string
@@ 34,14 35,21 @@ type Doc struct {
ContentSize uint64
}
+type ImageDoc struct {
+ AltText string
+ Image string
+ SourceUrl string
+ FetchTime time.Time
+}
+
type RankedSort struct {
desc bool
pageRankBytes []byte
hostRankBytes []byte
}
-type SearchRequest struct {
- // for search requests, this should be "search"
+type PageSearchRequest struct {
+ // this should be set to "search"
Type string `json:"t"`
Query string `json:"q"`
@@ 50,7 58,16 @@ type SearchRequest struct {
Verbose bool `json:"-"`
}
-type SearchResult struct {
+type ImageSearchRequest struct {
+ // this should be set to "searchimg"
+ Type string `json:"t"`
+
+ Query string `json:"q"`
+ Page int `json:"page,omitempty"`
+ HighlightStyle string `json:"-"`
+}
+
+type PageSearchResult struct {
Url string `json:"url"`
Title string `json:"title"`
Snippet string `json:"snippet"`
@@ 64,10 81,28 @@ type SearchResult struct {
Hostname string `json:"-"`
}
-type SearchResponse struct {
- TotalResults uint64 `json:"n"`
- Results []SearchResult `json:"results"`
- Duration time.Duration `json:"duration"`
+type ImageSearchResult struct {
+ ImageHash string `json:"image_id"`
+ Image string `json:"image"`
+ AltText string `json:"alt"`
+ SourceUrl string `json:"url"`
+ FetchTime time.Time `json:"fetch_time"`
+ Relevance float64 `json:"score"`
+}
+
+type PageSearchResponse struct {
+ TotalResults uint64 `json:"n"`
+ Results []PageSearchResult `json:"results"`
+ Duration time.Duration `json:"duration"`
+
+ // used by the search daemon and cgi
+ Err string `json:"err,omitempty"`
+}
+
+type ImageSearchResponse struct {
+ TotalResults uint64 `json:"n"`
+ Results []ImageSearchResult `json:"results"`
+ Duration time.Duration `json:"duration"`
// used by the search daemon and cgi
Err string `json:"err,omitempty"`
@@ 141,53 176,72 @@ func (so *RankedSort) Copy() search.SearchSort {
func NewIndex(path string, name string) (idx bleve.Index, err error) {
idxMapping := bleve.NewIndexMapping()
- docMapping := bleve.NewDocumentMapping()
+ pageMapping := bleve.NewDocumentMapping()
titleFieldMapping := bleve.NewTextFieldMapping()
- docMapping.AddFieldMappingsAt("Title", titleFieldMapping)
+ pageMapping.AddFieldMappingsAt("Title", titleFieldMapping)
contentFieldMapping := bleve.NewTextFieldMapping()
- docMapping.AddFieldMappingsAt("Content", contentFieldMapping)
+ pageMapping.AddFieldMappingsAt("Content", contentFieldMapping)
langFieldMapping := bleve.NewKeywordFieldMapping()
langFieldMapping.IncludeInAll = false
langFieldMapping.IncludeTermVectors = false
- docMapping.AddFieldMappingsAt("Lang", langFieldMapping)
+ pageMapping.AddFieldMappingsAt("Lang", langFieldMapping)
linksFieldMapping := bleve.NewTextFieldMapping()
- docMapping.AddFieldMappingsAt("Links", linksFieldMapping)
+ pageMapping.AddFieldMappingsAt("Links", linksFieldMapping)
pageRankFieldMapping := bleve.NewNumericFieldMapping()
pageRankFieldMapping.Index = false
pageRankFieldMapping.IncludeInAll = false
pageRankFieldMapping.IncludeTermVectors = false
- docMapping.AddFieldMappingsAt("PageRank", pageRankFieldMapping)
+ pageMapping.AddFieldMappingsAt("PageRank", pageRankFieldMapping)
hostRankFieldMapping := bleve.NewNumericFieldMapping()
hostRankFieldMapping.Index = false
pageRankFieldMapping.IncludeInAll = false
pageRankFieldMapping.IncludeTermVectors = false
- docMapping.AddFieldMappingsAt("HostRank", hostRankFieldMapping)
+ pageMapping.AddFieldMappingsAt("HostRank", hostRankFieldMapping)
kindFieldMapping := bleve.NewTextFieldMapping()
kindFieldMapping.Index = true
kindFieldMapping.IncludeInAll = false
kindFieldMapping.IncludeTermVectors = false
- docMapping.AddFieldMappingsAt("Kind", kindFieldMapping)
+ pageMapping.AddFieldMappingsAt("Kind", kindFieldMapping)
contentTypeFieldMapping := bleve.NewKeywordFieldMapping()
contentTypeFieldMapping.Index = true
contentTypeFieldMapping.IncludeInAll = false
contentTypeFieldMapping.IncludeTermVectors = false
- docMapping.AddFieldMappingsAt("ContentType", contentTypeFieldMapping)
+ pageMapping.AddFieldMappingsAt("ContentType", contentTypeFieldMapping)
contentSizeFieldMapping := bleve.NewNumericFieldMapping()
contentSizeFieldMapping.Index = true
contentSizeFieldMapping.IncludeInAll = false
contentSizeFieldMapping.IncludeTermVectors = false
- docMapping.AddFieldMappingsAt("ContentSize", contentSizeFieldMapping)
+ pageMapping.AddFieldMappingsAt("ContentSize", contentSizeFieldMapping)
+
+ idxMapping.AddDocumentMapping("Page", pageMapping)
+
+ imgMapping := bleve.NewDocumentMapping()
+
+ altFieldMapping := bleve.NewTextFieldMapping()
+ imgMapping.AddFieldMappingsAt("AltText", altFieldMapping)
- idxMapping.AddDocumentMapping("Page", docMapping)
+ imageFieldMapping := bleve.NewTextFieldMapping()
+ imageFieldMapping.Store = true
+ imageFieldMapping.Index = false
+ imageFieldMapping.IncludeInAll = false
+ imageFieldMapping.IncludeTermVectors = false
+ imgMapping.AddFieldMappingsAt("Image", imageFieldMapping)
+
+ fetchTimeFieldMapping := bleve.NewDateTimeFieldMapping()
+ fetchTimeFieldMapping.IncludeInAll = false
+ fetchTimeFieldMapping.DateFormat = "dateTimeOptional"
+ imgMapping.AddFieldMappingsAt("FetchTime", fetchTimeFieldMapping)
+
+ idxMapping.AddDocumentMapping("Image", imgMapping)
idx, err = bleve.New(path, idxMapping)
if err != nil {
@@ 208,7 262,25 @@ func OpenIndex(path string, name string) (idx bleve.Index, err error) {
return
}
-func IndexDb(index bleve.Index, cfg *config.Config, done chan bool) (err error) {
+func IndexDb(ctx context.Context, index bleve.Index, cfg *config.Config) (err error) {
+ IndexPages(ctx, index, cfg)
+ if ctx.Err() == context.Canceled {
+ err = ctx.Err()
+ return
+ }
+
+ IndexImages(ctx, index, cfg)
+ if ctx.Err() == context.Canceled {
+ err = ctx.Err()
+ return
+ }
+
+ return
+}
+
+func IndexPages(ctx context.Context, index bleve.Index, cfg *config.Config) (err error) {
+ log.Println("Indexing pages...")
+
db, err := sql.Open("postgres", cfg.GetDbConnStr())
if err != nil {
return
@@ 238,7 310,7 @@ where u.rank is not null and h.rank is not null
batch := index.NewBatch()
loop:
for rows.Next() {
- var doc Doc
+ var doc PageDoc
var links pq.StringArray
var urlStr string
var lang sql.NullString
@@ 282,7 354,7 @@ loop:
}
select {
- case <-done:
+ case <-ctx.Done():
break loop
default:
}
@@ 301,7 373,64 @@ loop:
return
}
-func Search(req SearchRequest, idx bleve.Index) (resp SearchResponse, err error) {
+func IndexImages(ctx context.Context, index bleve.Index, cfg *config.Config) (err error) {
+ log.Println("Indexing images...")
+
+ db, err := sql.Open("postgres", cfg.GetDbConnStr())
+ if err != nil {
+ return
+ }
+ defer db.Close()
+
+ q := `select url, image_hash, alt, image, fetch_time from images where alt != ''`
+ rows, err := db.Query(q)
+ if err != nil {
+ return
+ }
+ defer rows.Close()
+
+ n := 1
+ batch := index.NewBatch()
+loop:
+ for rows.Next() {
+ var doc ImageDoc
+ var imageHash string
+ err = rows.Scan(&doc.SourceUrl, &imageHash, &doc.AltText, &doc.Image, &doc.FetchTime)
+ if err != nil {
+ return
+ }
+
+ batch.Index(imageHash, doc)
+ if batch.Size() >= cfg.Index.BatchSize {
+ err = index.Batch(batch)
+ if err != nil {
+ return
+ }
+ batch.Reset()
+ log.Printf("Indexing progress: %d pages indexed so far.\n", n)
+ }
+
+ select {
+ case <-ctx.Done():
+ break loop
+ default:
+ }
+
+ n++
+ }
+
+ if batch.Size() > 0 {
+ err = index.Batch(batch)
+ if err != nil {
+ return
+ }
+ }
+
+ log.Printf("Finished indexing: %d images indexed.\n", n)
+ return
+}
+
+func SearchPages(req PageSearchRequest, idx bleve.Index) (resp PageSearchResponse, err error) {
// sanity check, in case someone sends a zero-based page index
if req.Page < 1 {
err = fmt.Errorf("Invalid page number (needs to be greater than or equal to 1)")
@@ 369,7 498,7 @@ func Search(req SearchRequest, idx bleve.Index) (resp SearchResponse, err error)
// cruicially, formatted lines are not rendered in clients that do that.
snippet = " " + strings.Replace(snippet, "\n", " ", -1)
- result := SearchResult{
+ result := PageSearchResult{
Url: r.ID,
Title: r.Fields["Title"].(string),
Snippet: snippet,
@@ 385,4 514,55 @@ func Search(req SearchRequest, idx bleve.Index) (resp SearchResponse, err error)
return
}
+func SearchImages(req ImageSearchRequest, idx bleve.Index) (resp ImageSearchResponse, err error) {
+ // sanity check, in case someone sends a zero-based page index
+ if req.Page < 1 {
+ err = fmt.Errorf("Invalid page number (needs to be greater than or equal to 1)")
+ return
+ }
+
+ q := bleve.NewMatchQuery(req.Query)
+ q.SetField("AltText")
+
+ highlightStyle := req.HighlightStyle
+ if highlightStyle == "" {
+ highlightStyle = "gem"
+ }
+
+ s := bleve.NewSearchRequest(q)
+ s.Highlight = bleve.NewHighlightWithStyle(highlightStyle)
+ s.Fields = []string{"AltText", "Image", "FetchTime", "SourceUrl"}
+
+ s.Size = PageSize
+ s.From = (req.Page - 1) * s.Size
+
+ results, err := idx.Search(s)
+ if err != nil {
+ return
+ }
+
+ resp.TotalResults = results.Total
+ resp.Duration = results.Took
+
+ for _, r := range results.Hits {
+ fetchTimeStr := r.Fields["FetchTime"].(string)
+ fetchTime, err := time.Parse(time.RFC3339, fetchTimeStr)
+ if err != nil {
+ log.Println("WARNING: Could not parse datetime value stored in index.")
+ }
+
+ result := ImageSearchResult{
+ ImageHash: r.ID,
+ SourceUrl: r.Fields["SourceUrl"].(string),
+ Image: r.Fields["Image"].(string),
+ FetchTime: fetchTime,
+ AltText: r.Fields["AltText"].(string),
+ Relevance: r.Score,
+ }
+ resp.Results = append(resp.Results, result)
+ }
+
+ return
+}
+
var _ search.SearchSort = (*RankedSort)(nil)