Added function that returns parsed document
1 files changed, 16 insertions(+), 0 deletions(-) M scraper.go
M scraper.go => scraper.go +16 -0
@@ 159,3 159,19 @@ func TableRows(url string, rows chan<- *html.Node, tableMatcher *NodeMatcher) er return nil } // New returns an HTML document rooted at the HTML node func New(url string) (*html.Node, error) { resp, err := nethttp.Get(url) defer resp.Body.Close() if err != nil { return nil, err } page, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, err } return html.Parse(bytes.NewBuffer(page)) }