~skuzzymiglet/feef

ref: 502ad139bb816c2ea660c087326865a3d6f7a733 feef/urls.go -rw-r--r-- 1.9 KiB
502ad139skuzzymiglet Don't add newline to -f templates if they're empty 1 year, 5 months ago
                                                                                
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
package main

import (
	"bufio"
	"errors"
	"fmt"
	"io"
	"net/url"
	"regexp"
	"strings"

	"github.com/gobwas/glob"
	"github.com/lithammer/fuzzysearch/fuzzy"
)

func parseURLs(r io.Reader) (urls []string) {
	// Q: should we validate the URLs here?
	scanner := bufio.NewScanner(r)
	for scanner.Scan() {
		if strings.TrimSpace(scanner.Text()) != "" {
			if !strings.HasPrefix(scanner.Text(), "#") { // TODO: comments at ends of lines
				urls = append(urls, scanner.Text())
			}
		}
	}
	return
}

// matchURLSpec parses a single URL specification
// Prefixes
// ~: fuzzy-matched against URLs
// /: regexp
// ?: glob (not quite sure)
// otherwise, parsed as URL
func matchURLSpec(spec string, urls []string) ([]string, error) {
	if len(spec) == 0 {
		return []string{}, errors.New("Empty URL spec")
	}
	switch {
	case strings.HasPrefix(spec, "~"):
		if len(urls) == 0 {
			return []string{}, fmt.Errorf("fuzzy URL'%s' provided but no URLs to match against", spec)
		}
		return fuzzy.Find(strings.TrimPrefix(spec, "~"), urls), nil
	case strings.HasPrefix(spec, "/"):
		if len(urls) == 0 {
			return []string{}, fmt.Errorf("URL regex '%s' provided but no URLs to match against", spec)
		}
		re, err := regexp.Compile(strings.TrimPrefix(spec, "/"))
		if err != nil {
			return []string{}, err
		}
		var matches []string
		for _, u := range urls {
			if re.MatchString(u) {
				matches = append(matches, u)
			}
		}
		return matches, nil
	case strings.HasPrefix(spec, "?"):
		if len(urls) == 0 {
			return []string{}, fmt.Errorf("URL glob'%s' provided but no URLs to match against", spec)
		}
		g, err := glob.Compile(strings.TrimPrefix(spec, "?"))
		if err != nil {
			return []string{}, err
		}
		var matches []string
		for _, u := range urls {
			if g.Match(u) {
				matches = append(matches, u)
			}
		}
		return matches, nil
	}
	// exact
	_, err := url.Parse(spec)
	if err != nil {
		return []string{}, err
	}
	return []string{spec}, nil
}