~tsileo/blobstash

3bd2ac0cc88eecf523c1907be7619aa29eef2302 — Thomas Sileo 3 months ago d6248c4
server: remove git server
60 files changed, 6 insertions(+), 174879 deletions(-)

M go.mod
M go.sum
M pkg/apps/apps.go
D pkg/gitserver/gitserver.go
D pkg/gitserver/lua/lua.go
M pkg/server/server.go
D vendor/github.com/src-d/go-oniguruma/.travis.yml
D vendor/github.com/src-d/go-oniguruma/LICENSE
D vendor/github.com/src-d/go-oniguruma/README.md
D vendor/github.com/src-d/go-oniguruma/chelper.c
D vendor/github.com/src-d/go-oniguruma/chelper.h
D vendor/github.com/src-d/go-oniguruma/constants.go
D vendor/github.com/src-d/go-oniguruma/go.mod
D vendor/github.com/src-d/go-oniguruma/quotemeta.go
D vendor/github.com/src-d/go-oniguruma/regex.go
D vendor/github.com/toqueteos/trie/LICENSE.txt
D vendor/github.com/toqueteos/trie/README.md
D vendor/github.com/toqueteos/trie/go.mod
D vendor/github.com/toqueteos/trie/trie.go
D vendor/github.com/xeonx/timeago/.travis.yml
D vendor/github.com/xeonx/timeago/LICENSE
D vendor/github.com/xeonx/timeago/README.md
D vendor/github.com/xeonx/timeago/timeago.go
D vendor/gopkg.in/src-d/enry.v1/.gitignore
D vendor/gopkg.in/src-d/enry.v1/.travis.yml
D vendor/gopkg.in/src-d/enry.v1/CONTRIBUTING.md
D vendor/gopkg.in/src-d/enry.v1/DCO
D vendor/gopkg.in/src-d/enry.v1/LICENSE
D vendor/gopkg.in/src-d/enry.v1/MAINTAINERS
D vendor/gopkg.in/src-d/enry.v1/Makefile
D vendor/gopkg.in/src-d/enry.v1/README.md
D vendor/gopkg.in/src-d/enry.v1/classifier.go
D vendor/gopkg.in/src-d/enry.v1/common.go
D vendor/gopkg.in/src-d/enry.v1/data/alias.go
D vendor/gopkg.in/src-d/enry.v1/data/commit.go
D vendor/gopkg.in/src-d/enry.v1/data/content.go
D vendor/gopkg.in/src-d/enry.v1/data/doc.go
D vendor/gopkg.in/src-d/enry.v1/data/documentation.go
D vendor/gopkg.in/src-d/enry.v1/data/extension.go
D vendor/gopkg.in/src-d/enry.v1/data/filename.go
D vendor/gopkg.in/src-d/enry.v1/data/frequencies.go
D vendor/gopkg.in/src-d/enry.v1/data/heuristics.go
D vendor/gopkg.in/src-d/enry.v1/data/interpreter.go
D vendor/gopkg.in/src-d/enry.v1/data/mimeType.go
D vendor/gopkg.in/src-d/enry.v1/data/rule/rule.go
D vendor/gopkg.in/src-d/enry.v1/data/type.go
D vendor/gopkg.in/src-d/enry.v1/data/vendor.go
D vendor/gopkg.in/src-d/enry.v1/enry.go
D vendor/gopkg.in/src-d/enry.v1/internal/tokenizer/tokenize.go
D vendor/gopkg.in/src-d/enry.v1/regex/oniguruma.go
D vendor/gopkg.in/src-d/enry.v1/regex/standard.go
D vendor/gopkg.in/src-d/enry.v1/utils.go
D vendor/gopkg.in/toqueteos/substring.v1/.gitignore
D vendor/gopkg.in/toqueteos/substring.v1/.travis.yml
D vendor/gopkg.in/toqueteos/substring.v1/LICENSE
D vendor/gopkg.in/toqueteos/substring.v1/README.md
D vendor/gopkg.in/toqueteos/substring.v1/bytes.go
D vendor/gopkg.in/toqueteos/substring.v1/lib.go
D vendor/gopkg.in/toqueteos/substring.v1/string.go
M vendor/modules.txt
M go.mod => go.mod +0 -5
@@ 6,7 6,6 @@ require (
	a4.io/gluarequire2 v0.0.0-20200222094423-7528d5a10bc1
	a4.io/go/indieauth v1.0.0
	a4.io/ssse v0.0.0-20181202155639-1949828a8689
	bazil.org/fuse v0.0.0-20200415052832-70bd89b671a2 // indirect
	github.com/alecthomas/chroma v0.7.2 // indirect
	github.com/aws/aws-sdk-go v1.30.9
	github.com/blevesearch/segment v0.9.0


@@ 15,7 14,6 @@ require (
	github.com/dustin/go-humanize v1.0.0
	github.com/e3b0c442/warp v0.6.1
	github.com/evanphx/json-patch v4.5.0+incompatible
	github.com/fxamacker/cbor v1.5.1 // indirect
	github.com/golang/protobuf v1.4.0 // indirect
	github.com/golang/snappy v0.0.1
	github.com/gorilla/context v1.1.1


@@ 24,7 22,6 @@ require (
	github.com/gorilla/sessions v1.2.0
	github.com/hashicorp/golang-lru v0.5.4
	github.com/inconshreveable/log15 v0.0.0-20200109203555-b30bc20e4fd1
	github.com/mitchellh/go-ps v1.0.0 // indirect
	github.com/mvdan/xurls v1.1.0 // indirect
	github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
	github.com/reiver/go-porterstemmer v1.0.1


@@ 35,7 32,6 @@ require (
	github.com/syndtr/goleveldb v1.0.1-0.20190923125748-758128399b1d
	github.com/unrolled/secure v1.0.7
	github.com/vmihailenco/msgpack v4.0.4+incompatible
	github.com/xeonx/timeago v1.0.0-rc4
	github.com/yuin/goldmark v1.1.30
	github.com/yuin/gopher-lua v0.0.0-20191220021717-ab39c6098bdb
	github.com/zpatrick/rbac v0.0.0-20180829190353-d2c4f050cf28


@@ 43,7 39,6 @@ require (
	golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e // indirect
	golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
	gopkg.in/inconshreveable/log15.v2 v2.0.0-20200109203555-b30bc20e4fd1
	gopkg.in/src-d/enry.v1 v1.7.3
	gopkg.in/src-d/go-git.v4 v4.13.1
	gopkg.in/yaml.v2 v2.2.8
	willnorris.com/go/microformats v1.0.0

M go.sum => go.sum +2 -22
@@ 40,8 40,6 @@ bazil.org/fuse v0.0.0-20180421153158-65cc252bf669 h1:FNCRpXiquG1aoyqcIWVFmpTSKVc
bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
bazil.org/fuse v0.0.0-20200117225306-7b5117fecadc h1:utDghgcjE8u+EBjHOgYT+dJPcnDF05KqWMBcjuJy510=
bazil.org/fuse v0.0.0-20200117225306-7b5117fecadc/go.mod h1:FbcW6z/2VytnFDhZfumh8Ss8zxHE6qpMP5sHTRe0EaM=
bazil.org/fuse v0.0.0-20200415052832-70bd89b671a2 h1:BSFd7txKmf/El2sF/nCZvaNNm8/6FRF/tZtoTXWmv3Q=
bazil.org/fuse v0.0.0-20200415052832-70bd89b671a2/go.mod h1:FbcW6z/2VytnFDhZfumh8Ss8zxHE6qpMP5sHTRe0EaM=
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=


@@ 81,8 79,6 @@ github.com/aws/aws-sdk-go v1.29.7 h1:mwe1Bls/BsB3hB3I9CtUIWSpe1u3wdPcwdvtD9lkzsU
github.com/aws/aws-sdk-go v1.29.7/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
github.com/aws/aws-sdk-go v1.29.22 h1:3WmsCj3C30l6/4f50mPkDZoTPWSvaRCjcVJOWdCJoIE=
github.com/aws/aws-sdk-go v1.29.22/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
github.com/aws/aws-sdk-go v1.30.7 h1:IaXfqtioP6p9SFAnNfsqdNczbR5UNbYqvcZUSsCAdTY=
github.com/aws/aws-sdk-go v1.30.7/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/aws/aws-sdk-go v1.30.9 h1:DntpBUKkchINPDbhEzDRin1eEn1TG9TZFlzWPf0i8to=
github.com/aws/aws-sdk-go v1.30.9/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/blevesearch/segment v0.0.0-20160915185041-762005e7a34f h1:kqbi9lqXLLs+zfWlgo1PIiRQ86n33K1JKotjj4rSYOg=


@@ 134,8 130,6 @@ github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/e3b0c442/warp v0.6.0 h1:2GSXT7T/YN7WgYQbjMCHKHo+tDLFJIPopM7Kd8egOQI=
github.com/e3b0c442/warp v0.6.0/go.mod h1:F7cBsflowLSoMt9xwSqoh9f7YwLJEGwkT8In9wp/Rmo=
github.com/e3b0c442/warp v0.6.1 h1:B0cFj7u7y371b/oILTXR3LSnu9R9edDORwWRA9QieNg=
github.com/e3b0c442/warp v0.6.1/go.mod h1:pI39WXOdQwVZVP3TYgv6SN995Txh5RyDXebPibH1Rg8=
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712 h1:aaQcKT9WumO6JEJcRyTqFVq4XUZiUcKR2/GI31TOcz8=


@@ 154,10 148,6 @@ github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjr
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fxamacker/cbor v1.5.0 h1:idAiyeNSq/jeG9FPbCLVZLFJjsxP+g40a3UrXFapumw=
github.com/fxamacker/cbor v1.5.0/go.mod h1:UjdWSysJckWsChYy9I5zMbkGvK4xXDR+LmDb8kPGYgA=
github.com/fxamacker/cbor v1.5.1 h1:XjQWBgdmQyqimslUh5r4tUGmoqzHmBFQOImkWGi2awg=
github.com/fxamacker/cbor v1.5.1/go.mod h1:3aPGItF174ni7dDzd6JZ206H8cmr4GDNBGpPa971zsU=
github.com/fxamacker/cbor/v2 v2.2.0 h1:6eXqdDDe588rSYAi1HfZKbx6YYQO4mxQ9eC6xYpU/JQ=
github.com/fxamacker/cbor/v2 v2.2.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo=
github.com/gliderlabs/ssh v0.1.1 h1:j3L6gSLQalDETeEg/Jg0mGY0/y/N6zI2xX1978P0Uqw=


@@ 179,8 169,6 @@ github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.4 h1:87PNWwrRvUSnqS4dlcBU/ftvOIBep4sYuBLlh6rX2wk=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.5 h1:F768QJ1E9tib+q5Sc8MkdJi1RxLTbRcTf8LJV56aRls=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=


@@ 202,6 190,7 @@ github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=


@@ 388,7 377,6 @@ github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2 h1:GnOzE5fEFN3b2zDhJJABEofdb51uMRNb8eqIVtdducs=
github.com/syndtr/goleveldb v0.0.0-20181128100959-b001fa50d6b2/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0=
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
github.com/syndtr/goleveldb v1.0.1-0.20190923125748-758128399b1d h1:gZZadD8H+fF+n9CmNhYL1Y0dJB+kLOmKd7FbPJLeGHs=
github.com/syndtr/goleveldb v1.0.1-0.20190923125748-758128399b1d/go.mod h1:9OrXJhf154huy1nPWmuSrkgjPUtUNhA+Zmy+6AESzuA=
github.com/toqueteos/trie v0.0.0-20150530104557-56fed4a05683 h1:ej8ns+4aeQO+mm9VIzwnJElkqR0Vs6kTfIcvgyJFoMY=


@@ 415,7 403,6 @@ github.com/vmihailenco/msgpack v4.0.1+incompatible h1:RMF1enSPeKTlXrXdOcqjFUElyw
github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI=
github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
github.com/x448/float16 v0.8.3/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xanzy/ssh-agent v0.2.0 h1:Adglfbi5p9Z0BmK2oKU9nTG+zKfniSfnaMYB+ULd+Ro=


@@ 437,10 424,6 @@ github.com/yuin/goldmark v1.1.23 h1:eTodJ8hwEUvwXhb9qxQNuL/q1d+xMQClrXR4mdvV7gs=
github.com/yuin/goldmark v1.1.23/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.25 h1:isv+Q6HQAmmL2Ofcmg8QauBmDPlUUnSoNhEcC940Rds=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.28 h1:3Ksz4BbKZVlaGbkXzHxoazZzASQKsfUuOZPr5CNxnC4=
github.com/yuin/goldmark v1.1.28/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.29 h1:NgInEI7XcFG1LV4mebFHBvGfgQs5Na9wrEkP6cBaAfc=
github.com/yuin/goldmark v1.1.29/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.30 h1:j4d4Lw3zqZelDhBksEo3BnWg9xhXRQGJPPSL6OApZjI=
github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark-highlighting v0.0.0-20191202084645-78f32c8dd6d5 h1:QbH7ca1qtgZHrzvcVAEoiJIwBqrXxMOfHYfwZIniIK0=


@@ 474,8 457,6 @@ golang.org/x/crypto v0.0.0-20200221170553-0f24fbd83dfb h1:Bg7BRk6M/6/zfhJrglNmi/
golang.org/x/crypto v0.0.0-20200221170553-0f24fbd83dfb/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4 h1:QmwruyY+bKbDDL0BaglrbZABEali68eoMFhTZpCjYVA=
golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200406173513-056763e48d71 h1:DOmugCavvUtnUD114C1Wh+UgTgQZ4pMLzXxi1pSt+/Y=
golang.org/x/crypto v0.0.0-20200406173513-056763e48d71/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=


@@ 545,8 526,6 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200409092240-59c9f1ba88fa h1:mQTN3ECqfsViCNBgq+A40vdwhkGykrrQlYe3mPj6BoU=
golang.org/x/sys v0.0.0-20200409092240-59c9f1ba88fa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=


@@ 555,6 534,7 @@ golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=

M pkg/apps/apps.go => pkg/apps/apps.go +3 -8
@@ 16,7 16,7 @@ import (
	humanize "github.com/dustin/go-humanize"
	"github.com/gorilla/mux"
	log "github.com/inconshreveable/log15"
	"github.com/yuin/gopher-lua"
	lua "github.com/yuin/gopher-lua"
	git "gopkg.in/src-d/go-git.v4"
	"gopkg.in/src-d/go-git.v4/plumbing"
	"gopkg.in/src-d/go-git.v4/plumbing/object"


@@ 30,8 30,6 @@ import (
	"a4.io/blobstash/pkg/extra"
	"a4.io/blobstash/pkg/filetree"
	filetreeLua "a4.io/blobstash/pkg/filetree/lua"
	"a4.io/blobstash/pkg/gitserver"
	gitserverLua "a4.io/blobstash/pkg/gitserver/lua"
	"a4.io/blobstash/pkg/httputil"
	"a4.io/blobstash/pkg/hub"
	kvLua "a4.io/blobstash/pkg/kvstore/lua"


@@ 40,7 38,7 @@ import (
	"a4.io/blobstash/pkg/webauthn"
	"a4.io/gluapp"
	"a4.io/go/indieauth"
	"github.com/hashicorp/golang-lru"
	lru "github.com/hashicorp/golang-lru"
	"github.com/robfig/cron"
)



@@ 51,7 49,6 @@ type Apps struct {
	apps            map[string]*App
	config          *config.Config
	sess            *session.Session
	gs              *gitserver.GitServer
	ft              *filetree.FileTree
	bs              *blobstore.BlobStore
	docstore        *docstore.DocStore


@@ 296,7 293,6 @@ func (apps *Apps) newApp(appConf *config.AppConfig, conf *config.Config) (*App, 
				filetreeLua.Setup(L, apps.ft, apps.bs, apps.kvs)
				docstoreLua.Setup(L, apps.docstore)
				kvLua.Setup(L, apps.kvs, context.TODO())
				gitserverLua.Setup(L, apps.gs)
				// setup "apps"
				setup(L, apps)
				extra.Setup(L)


@@ 393,7 389,7 @@ func (app *App) serve(ctx context.Context, p string, w http.ResponseWriter, req 
}

// New initializes the Apps manager
func New(logger log.Logger, conf *config.Config, sess *session.Session, wa *webauthn.WebAuthn, bs *blobstore.BlobStore, kvs store.KvStore, ft *filetree.FileTree, ds *docstore.DocStore, gs *gitserver.GitServer, chub *hub.Hub, hostWhitelister func(...string)) (*Apps, error) {
func New(logger log.Logger, conf *config.Config, sess *session.Session, wa *webauthn.WebAuthn, bs *blobstore.BlobStore, kvs store.KvStore, ft *filetree.FileTree, ds *docstore.DocStore, chub *hub.Hub, hostWhitelister func(...string)) (*Apps, error) {
	if conf.SecretKey == "" {
		return nil, fmt.Errorf("missing secret_key in config")
	}


@@ 403,7 399,6 @@ func New(logger log.Logger, conf *config.Config, sess *session.Session, wa *weba
		apps:            map[string]*App{},
		ft:              ft,
		log:             logger,
		gs:              gs,
		bs:              bs,
		config:          conf,
		wa:              wa,

D pkg/gitserver/gitserver.go => pkg/gitserver/gitserver.go +0 -1244
@@ 1,1244 0,0 @@
package gitserver // import "a4.io/blobstash/pkg/gitserver"

import (
	"archive/tar"
	"bytes"
	"compress/gzip"
	"context"
	"fmt"
	"io"
	"io/ioutil"
	"net/http"
	"net/url"
	"path/filepath"
	"strings"
	"time"

	"github.com/gorilla/mux"
	log "github.com/inconshreveable/log15"
	"github.com/restic/chunker"
	"github.com/vmihailenco/msgpack"
	enry "gopkg.in/src-d/enry.v1"
	git "gopkg.in/src-d/go-git.v4"
	gconfig "gopkg.in/src-d/go-git.v4/config"
	"gopkg.in/src-d/go-git.v4/plumbing"
	gindex "gopkg.in/src-d/go-git.v4/plumbing/format/index"
	"gopkg.in/src-d/go-git.v4/plumbing/object"
	"gopkg.in/src-d/go-git.v4/plumbing/protocol/packp"
	"gopkg.in/src-d/go-git.v4/plumbing/storer"
	"gopkg.in/src-d/go-git.v4/plumbing/transport"
	"gopkg.in/src-d/go-git.v4/plumbing/transport/server"
	gstorage "gopkg.in/src-d/go-git.v4/storage"

	"a4.io/blobstash/pkg/auth"
	"a4.io/blobstash/pkg/blob"
	"a4.io/blobstash/pkg/config"
	"a4.io/blobstash/pkg/filetree/writer"
	"a4.io/blobstash/pkg/hashutil"
	"a4.io/blobstash/pkg/httputil"
	"a4.io/blobstash/pkg/hub"
	"a4.io/blobstash/pkg/perms"
	"a4.io/blobstash/pkg/stash/store"
	"a4.io/blobstash/pkg/vkv"
)

var remoteMaster = "refs/remotes/origin/master"

type GitServer struct {
	kvStore   store.KvStore
	blobStore store.BlobStore

	conf *config.Config

	hub *hub.Hub

	log log.Logger
}

// New initializes the `DocStoreExt`
func New(logger log.Logger, conf *config.Config, kvStore store.KvStore, blobStore store.BlobStore, chub *hub.Hub) (*GitServer, error) {
	logger.Debug("init")
	return &GitServer{
		conf:      conf,
		kvStore:   kvStore,
		blobStore: blobStore,
		hub:       chub,
		log:       logger,
	}, nil
}

// Close closes all the open DB files.
func (gs *GitServer) Close() error {
	return nil
}

// RegisterRoute registers all the HTTP handlers for the extension
func (gs *GitServer) Register(r *mux.Router, root *mux.Router, basicAuth func(http.Handler) http.Handler) {
	r.Handle("/", basicAuth(http.HandlerFunc(gs.rootHandler)))
	r.Handle("/{ns}", basicAuth(http.HandlerFunc(gs.nsHandler)))
	r.Handle("/{ns}/{repo}/config", basicAuth(http.HandlerFunc(gs.gitRepoConfigHandler)))
	r.Handle("/{ns}/{repo}/_backup", basicAuth(http.HandlerFunc(gs.gitCloneOrPullHandler)))
	r.Handle("/{ns}/{repo}/_tgz", basicAuth(http.HandlerFunc(gs.gitRepoTgzHandler)))
	r.Handle("/{ns}/{repo}", basicAuth(http.HandlerFunc(gs.gitRepoHandler)))
	root.Handle("/git/{ns}/{repo}.git/info/refs", basicAuth(http.HandlerFunc(gs.gitInfoRefsHandler)))
	root.Handle("/git/{ns}/{repo}.git/{service}", basicAuth(http.HandlerFunc(gs.gitServiceHandler)))
}

type storage struct {
	ns, name  string
	kvStore   store.KvStore
	blobStore store.BlobStore
	cloneMode bool
	tMode     bool
	chunker   *chunker.Chunker
	buf       []byte
}

func newStorage(ns, name string, blobStore store.BlobStore, kvStore store.KvStore) *storage {
	return &storage{
		ns:        ns,
		name:      name,
		kvStore:   kvStore,
		blobStore: blobStore,
		chunker:   chunker.New(bytes.NewReader(nil), writer.Pol),
		buf:       make([]byte, 8*1024*1024),
	}
}

func (s *storage) Load(ep *transport.Endpoint) (storer.Storer, error) {
	fmt.Printf("ep=%+v\n", ep)
	return s, nil
}

func rewriteKey(key string) string {
	return strings.Replace(key, "~", "/", -1)
}

func (s *storage) key(prefix, key string) string {
	// `/` is an illegal character for a key in the kvstore, and `~` is an illegal character for git branches
	key = strings.Replace(key, "/", "~", -1)
	return fmt.Sprintf("_git:%s:%s!%s!%s", s.ns, s.name, prefix, key)
}

func (s *storage) Module(n string) (gstorage.Storer, error) {
	return nil, nil
}

func (s *storage) SetShallow(hashes []plumbing.Hash) error {
	panic("should never happen")
}

func (s *storage) Shallow() ([]plumbing.Hash, error) {
	return []plumbing.Hash{}, nil
}

func (s *storage) SetIndex(idx *gindex.Index) error {
	panic("should never happen")
}

func (s *storage) Index() (*gindex.Index, error) {
	panic("should never happen")
}

func (s *storage) Config() (*gconfig.Config, error) {
	conf := gconfig.NewConfig()
	kv, err := s.kvStore.Get(context.TODO(), s.key("c", "conf"), -1)
	if err != nil {
		if err == vkv.ErrNotFound {
			return conf, nil
		}
		return nil, err
	}
	if kv != nil {
		if err := conf.Unmarshal(kv.Data); err != nil {
			return nil, err
		}
	}

	return conf, nil
}

func (s *storage) SetConfig(c *gconfig.Config) error {
	encoded, err := c.Marshal()
	if err != nil {
		return err
	}
	if _, err := s.kvStore.Put(context.TODO(), s.key("c", "conf"), "", encoded, -1); err != nil {
		return err
	}
	return nil
}

// SetReference implements the storer.ReferenceStorer interface
func (s *storage) SetReference(ref *plumbing.Reference) error {
	parts := ref.Strings()
	if _, err := s.kvStore.Put(context.TODO(), s.key("r", ref.Name().String()), "", []byte(parts[1]), -1); err != nil {
		return err
	}
	// If we're updating the remote master (during a fetch)
	if ref.Name().String() == remoteMaster {
		// Also update the local master/HEAD
		if _, err := s.kvStore.Put(context.TODO(), s.key("r", plumbing.Master.String()), "", []byte(parts[1]), -1); err != nil {
			return err
		}
	}
	return nil
}

// CheckAndSetReference implements the storer.ReferenceStorer interface
func (s *storage) CheckAndSetReference(new, old *plumbing.Reference) error {
	return s.SetReference(new)
}

func (s *storage) RemoveReference(n plumbing.ReferenceName) error {
	if _, err := s.kvStore.Put(context.TODO(), s.key("r", n.String()), "", nil, -1); err != nil {
		return err
	}
	return nil
}

func (s *storage) Reference(name plumbing.ReferenceName) (*plumbing.Reference, error) {
	if !s.tMode && name == plumbing.HEAD {
		return plumbing.NewSymbolicReference(
			plumbing.HEAD,
			plumbing.Master,
		), nil
	}
	kv, err := s.kvStore.Get(context.TODO(), s.key("r", name.String()), -1)
	if err != nil {
		if err == vkv.ErrNotFound {
			return nil, plumbing.ErrReferenceNotFound
		}
		return nil, err
	}
	if kv == nil && kv.Data == nil || len(kv.Data) == 0 {
		// Check if the reference has been removed
		return nil, plumbing.ErrReferenceNotFound

	}
	ref := plumbing.NewReferenceFromStrings(name.String(), string(kv.Data))
	return ref, nil
}

func (s *storage) IterReferences() (storer.ReferenceIter, error) {
	refs := []*plumbing.Reference{}

	rawRefs, _, err := s.kvStore.Keys(context.TODO(), s.key("r", ""), s.key("r", "\xff"), -1)
	if err != nil {
		return nil, err
	}
	for _, kv := range rawRefs {
		refs = append(refs, plumbing.NewReferenceFromStrings(rewriteKey(strings.Replace(kv.Key, s.key("r", ""), "", 1)), string(kv.Data)))
	}

	return storer.NewReferenceSliceIter(refs), nil
}

func (s *storage) CountLooseRefs() (int, error) {
	rawRefs, _, err := s.kvStore.Keys(context.TODO(), s.key("r", ""), s.key("r", "\xff"), -1)
	if err != nil {
		return 0, err
	}
	return len(rawRefs), nil
}

func (s *storage) PackRefs() error {
	return fmt.Errorf("should not happen")
}

// storer.EncodedObjectStorer interface
func (s *storage) NewEncodedObject() plumbing.EncodedObject {
	return &plumbing.MemoryObject{}
}

func (s *storage) SetEncodedObject(obj plumbing.EncodedObject) (plumbing.Hash, error) {
	key := s.key("o", obj.Hash().String())

	reader, err := obj.Reader()
	if err != nil {
		return plumbing.ZeroHash, err
	}

	obj.Size()

	content, err := ioutil.ReadAll(reader)
	if err != nil {
		return plumbing.ZeroHash, err
	}

	// Chunk the file the same way the filetree API does to share the "dedup"'d data
	if obj.Type() == plumbing.BlobObject {
		// reuse this buffer
		refs := [][32]byte{}
		if obj.Size() > 512*1024 {
			s.chunker.Reset(bytes.NewReader(content), writer.Pol)
			chunkSplitter := s.chunker
			for {
				chunk, err := chunkSplitter.Next(s.buf)
				if err == io.EOF {
					break
				}
				chunkHash := hashutil.ComputeRaw(chunk.Data)
				if _, err := s.blobStore.Put(context.TODO(), &blob.Blob{Hash: fmt.Sprintf("%x", chunkHash), Data: chunk.Data}); err != nil {
					return plumbing.ZeroHash, err
				}
				refs = append(refs, chunkHash)
			}
		} else {
			chunkHash := hashutil.ComputeRaw(content)
			if _, err := s.blobStore.Put(context.TODO(), &blob.Blob{Hash: fmt.Sprintf("%x", chunkHash), Data: content}); err != nil {
				return plumbing.ZeroHash, err
			}
			refs = append(refs, chunkHash)
		}
		content, err = msgpack.Marshal(&refs)
		if err != nil {
			return plumbing.ZeroHash, err
		}
	}

	if _, err := s.kvStore.Put(context.TODO(), key, "", append([]byte{byte(obj.Type())}, content...), -1); err != nil {
		return plumbing.ZeroHash, err

	}

	return obj.Hash(), nil
}

func (s *storage) objFromKv(kv *vkv.KeyValue) (plumbing.EncodedObject, error) {
	obj := &plumbing.MemoryObject{}
	objType := plumbing.ObjectType(kv.Data[0])
	obj.SetType(objType)

	if objType == plumbing.BlobObject {
		refs := [][32]byte{}
		if err := msgpack.Unmarshal(kv.Data[1:], &refs); err != nil {
			return nil, err
		}
		for _, rref := range refs {
			blob, err := s.blobStore.Get(context.TODO(), fmt.Sprintf("%x", rref))
			if err != nil {
				return nil, err
			}
			if _, err := obj.Write(blob); err != nil {
				return nil, err
			}
		}

	} else {
		if _, err := obj.Write(kv.Data[1:]); err != nil {
			return nil, err
		}
	}

	return obj, nil
}

func (s *storage) EncodedObject(t plumbing.ObjectType, h plumbing.Hash) (plumbing.EncodedObject, error) {
	key := s.key("o", h.String())

	kv, err := s.kvStore.Get(context.TODO(), key, -1)
	if err != nil {
		if err == vkv.ErrNotFound {
			return nil, plumbing.ErrObjectNotFound
		}
		return nil, err
	}
	return s.objFromKv(kv)
}

func (s *storage) EncodedObjectSize(h plumbing.Hash) (size int64, err error) {
	key := s.key("o", h.String())

	kv, err := s.kvStore.Get(context.TODO(), key, -1)
	if err != nil {
		if err == vkv.ErrNotFound {
			return 0, plumbing.ErrObjectNotFound
		}
		return 0, err
	}
	obj, err := s.objFromKv(kv)
	if err != nil {
		return 0, err
	}

	return obj.Size(), nil
}

func (s *storage) IterEncodedObjects(t plumbing.ObjectType) (storer.EncodedObjectIter, error) {
	res := []plumbing.EncodedObject{}
	kvs, _, err := s.kvStore.Keys(context.TODO(), s.key("o", ""), s.key("o", "\xff"), -1)
	if err != nil {
		return nil, err
	}

	for _, kv := range kvs {
		if plumbing.ObjectType(kv.Data[0]) != t {
			continue
		}
		obj, err := s.objFromKv(kv)
		if err != nil {
			return nil, err
		}
		res = append(res, obj)
	}

	return storer.NewEncodedObjectSliceIter(res), nil
}

func (s *storage) HasEncodedObject(h plumbing.Hash) error {
	key := s.key("o", h.String())
	switch _, err := s.kvStore.Get(context.TODO(), key, -1); err {
	case nil:
		return nil
	case vkv.ErrNotFound:
		return plumbing.ErrObjectNotFound
	default:
		return err
	}
}

func (gs *GitServer) getEndpoint(path string) (*transport.Endpoint, error) {
	var u string
	if gs.conf.AutoTLS {
		u = fmt.Sprintf("https://%s%s", gs.conf.Domains[0], path)
	} else {
		p, err := url.Parse(fmt.Sprintf("http://%s", gs.conf.Listen))
		if err != nil {
			return nil, err
		}
		hostname := p.Hostname()
		if hostname == "" {
			hostname = "localhost"
		}
		u = fmt.Sprintf("http://%s:%s%s", hostname, p.Port(), path)
	}
	ep, err := transport.NewEndpoint(u)
	if err != nil {
		return nil, err
	}
	return ep, nil
}

type LogBuilder struct {
	commits []*object.Commit
	max     int
}

func (b *LogBuilder) process(c *object.Commit) error {
	b.commits = append(b.commits, c)
	if b.max > 0 && len(b.commits) == b.max {
		return nil
	}
	parents := c.Parents()
	defer parents.Close()
	return parents.ForEach(b.process)
}

func buildCommitLogs(s *storage, h plumbing.Hash, max int) []*object.Commit {
	commit, err := object.GetCommit(s, h)
	//obj, err := storage.EncodedObject(plumbing.CommitObject, ref.Hash())
	if err != nil {
		panic(err)
	}
	lb := &LogBuilder{[]*object.Commit{commit}, max}
	parents := commit.Parents()
	defer parents.Close()
	if err := parents.ForEach(lb.process); err != nil {
		panic(err)
	}
	return lb.commits
}

func (gs *GitServer) rootHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "GET" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}

	if !auth.Can(
		w,
		r,
		perms.Action(perms.List, perms.GitNs),
		perms.ResourceWithID(perms.GitServer, perms.GitNs, "*"),
	) {
		auth.Forbidden(w)
		return
	}

	limit := 50

	namespaces, err := gs.Namespaces()
	if err != nil {
		panic(err)
	}

	httputil.MarshalAndWrite(r, w, map[string]interface{}{
		"data": namespaces,
		"pagination": map[string]interface{}{
			"cursor":   "",
			"has_more": len(namespaces) == limit,
			"count":    len(namespaces),
			"per_page": limit,
		},
	})
}

func (gs *GitServer) Namespaces() ([]string, error) {
	namespaces := []string{}

	// We cannot afford to index the repository (will waste space to keep a separate
	// kv collection) and having a temp index is complicated
	prefix := "_git:"
	for {
		keys, _, err := gs.kvStore.Keys(context.TODO(), prefix, "\xff", 1)
		if err != nil {
			return nil, err
		}
		if len(keys) == 0 || !strings.HasPrefix(keys[0].Key, "_git:") {
			break
		}
		dat := strings.Split(strings.Split(keys[0].Key, "!")[0], ":")
		namespaces = append(namespaces, dat[1])
		prefix = vkv.NextKey(fmt.Sprintf("_git:%s", dat[1]))
	}

	return namespaces, nil
}

func (gs *GitServer) Repositories(ns string) ([]string, error) {
	repos := []string{}
	// We cannot afford to index the repository (will waste space to keep a separate
	// kv collection) and having a temp index is complicated
	basePrefix := fmt.Sprintf("_git:%s:", ns)
	prefix := fmt.Sprintf("_git:%s:", ns)
	for {
		keys, _, err := gs.kvStore.Keys(context.TODO(), prefix, "\xff", 1)
		if err != nil {
			return nil, err
		}
		if len(keys) == 0 || !strings.HasPrefix(keys[0].Key, basePrefix) {
			break
		}
		repo := strings.Split(keys[0].Key, "!")[0]
		repo = repo[len(basePrefix):]
		repos = append(repos, repo)
		prefix = vkv.NextKey(fmt.Sprintf("_git:%s:%s", ns, repo))
	}
	return repos, nil
}

func (gs *GitServer) nsHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "GET" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)

	limit := 50

	ns := vars["ns"]

	if !auth.Can(
		w,
		r,
		perms.Action(perms.List, perms.GitNs),
		perms.ResourceWithID(perms.GitServer, perms.GitNs, ns),
	) {
		auth.Forbidden(w)
		return
	}

	repos, err := gs.Repositories(ns)
	if err != nil {
		panic(err)
	}

	httputil.MarshalAndWrite(r, w, map[string]interface{}{
		"data": repos,
		"pagination": map[string]interface{}{
			"cursor":   "",
			"has_more": len(repos) == limit,
			"count":    len(repos),
			"per_page": limit,
		},
	})
}

type GitRepoRefs struct {
	Branches []*RefSummary `json:"branches"`
	Tags     []*RefSummary `json:"tags"`
}

type GitRepoSummary struct {
	Ns        string             `json:"ns"`
	Name      string             `json:"name"`
	Commits   []*GitServerCommit `json:"commits"`
	Readme    string             `json:"readme"`
	Languages map[string]int     `json:"languages"`
	Branches  []*RefSummary      `json:"branches"`
	Tags      []*RefSummary      `json:"tags"`
}

type RefSummary struct {
	Ref    string           `json:"name"` // .Ref.Name().Short()
	Commit *GitServerCommit `json:"commit"`
}

func (gs *GitServer) RepoGetFile(ns, repo string, hash plumbing.Hash) (*object.File, error) {
	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)

	blob, err := object.GetBlob(storage, hash)
	if err != nil {
		return nil, err
	}

	return object.NewFile(hash.String(), 0644, blob), nil
}

func (gs *GitServer) RepoGetTree(ns, repo, hash string) (*object.Tree, error) {
	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)
	tree, err := object.GetTree(storage, plumbing.NewHash(hash))
	if err != nil {
		return nil, err
	}
	return tree, nil
}

func (gs *GitServer) RepoTree(ns, repo string) (*object.Tree, error) {
	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)
	ref, err := storage.Reference(plumbing.Master)
	if err != nil {
		return nil, err
	}
	commit, err := object.GetCommit(storage, ref.Hash())
	if err != nil {
		return nil, err
	}
	return commit.Tree()
}

func (gs *GitServer) RepoLog(ns, repo string) ([]*GitServerCommit, error) {
	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)
	ref, err := storage.Reference(plumbing.Master)
	if err != nil {
		return nil, err
	}
	commits := []*GitServerCommit{}
	for _, c := range buildCommitLogs(storage, ref.Hash(), 0) {
		commits = append(commits, fromCommit(c))
	}
	return commits, nil
}

func (gs *GitServer) RepoCommit(ns, repo string, hash plumbing.Hash) (*GitServerCommit, error) {
	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)
	commit, err := object.GetCommit(storage, hash)
	if err != nil {
		panic(err)
	}
	return fromCommit(commit), nil
}

func (gs *GitServer) RepoRefs(ns, repo string) (*GitRepoRefs, error) {
	summary := &GitRepoRefs{}

	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)
	refs, err := storage.IterReferences()
	if err != nil {
		return nil, err
	}
	if err := refs.ForEach(func(ref *plumbing.Reference) error {
		if ref.Name().IsTag() {
			commit, err := object.GetCommit(storage, ref.Hash())
			if err != nil {
				return fmt.Errorf("failed to fetch tag: %+v", err)
			}
			summary.Tags = append(summary.Tags, &RefSummary{Ref: ref.Name().Short(), Commit: fromCommit(commit)})
		}
		if ref.Name().IsBranch() {
			commit, err := object.GetCommit(storage, ref.Hash())
			if err != nil {
				return err
			}
			summary.Branches = append(summary.Branches, &RefSummary{Ref: ref.Name().Short(), Commit: fromCommit(commit)})
		}
		return nil
	}); err != nil {
		return nil, err
	}
	return summary, nil
}

type GitServerCommit struct {
	Hash      string                 `json:"hash"`
	Tree      string                 `json:"tree"`
	Author    *GitServerCommitAuthor `json:"author"`
	Committer *GitServerCommitAuthor `json:"comitter"`
	Message   string                 `json:"message"`
	Parents   []string               `json:"parents"`
	Raw       *object.Commit         `json:"-"`
}

type GitServerCommitAuthor struct {
	Name  string `json:"name"`
	Email string `json:"email"`
	Date  string `json:"date"`
}

type gitServerBranch struct {
	Name   string `json:"name"`
	Remote string `json:"remote"`
	Merge  string `json:"merge"`
}

type gitServerRemote struct {
	Name  string   `json:"name"`
	URLs  []string `json:"urls"`
	Fetch []string `json:"fetch"`
}

type gitServerConfig struct {
	Branches map[string]*gitServerBranch `json:"branches"`
	Remotes  map[string]*gitServerRemote `json:"remotes"`
}

func (gs *GitServer) Config(storage *storage) (*gitServerConfig, error) {
	conf, err := storage.Config()
	if err != nil {
		return nil, err
	}

	rconf := &gitServerConfig{
		Branches: map[string]*gitServerBranch{},
		Remotes:  map[string]*gitServerRemote{},
	}
	// TODO(tsileo): submodule support?

	for name, gbranch := range conf.Branches {
		rconf.Branches[name] = &gitServerBranch{
			Name:   name,
			Remote: gbranch.Remote,
			Merge:  gbranch.Merge.String(),
		}
	}
	for name, gremote := range conf.Remotes {
		fetch := []string{}
		for _, k := range gremote.Fetch {
			fetch = append(fetch, k.String())
		}
		rconf.Remotes[name] = &gitServerRemote{
			Name:  name,
			URLs:  gremote.URLs,
			Fetch: fetch,
		}
	}

	return rconf, nil
}

func (gs *GitServer) README(commit *object.Commit) (string, error) {
	tree, err := commit.Tree()
	if err != nil {
		return "", err
	}
	for _, treeEntry := range tree.Entries {
		fmt.Printf("treeEntry: %+v\n", treeEntry)
		if strings.HasSuffix(treeEntry.Name, "README.md") ||
			strings.HasSuffix(treeEntry.Name, "README.rst") ||
			strings.HasSuffix(treeEntry.Name, "README.txt") ||
			strings.HasSuffix(treeEntry.Name, "README") {
			f, err := tree.File(treeEntry.Name)
			if err != nil {
				return "", err
			}
			content, err := f.Contents()
			if err != nil {
				return "", err
			}
			return content, nil
		}
	}
	return "", nil
}

func fromCommit(rawCommit *object.Commit) *GitServerCommit {
	parents := []string{}
	for _, p := range rawCommit.ParentHashes {
		parents = append(parents, p.String())
	}

	return &GitServerCommit{
		Raw:     rawCommit,
		Tree:    rawCommit.TreeHash.String(),
		Hash:    rawCommit.Hash.String(),
		Message: rawCommit.Message,
		Parents: parents,
		Author: &GitServerCommitAuthor{
			Name:  rawCommit.Author.Name,
			Email: rawCommit.Author.Email,
			Date:  rawCommit.Author.When.Format(time.RFC3339),
		},
		Committer: &GitServerCommitAuthor{
			Name:  rawCommit.Committer.Name,
			Email: rawCommit.Committer.Email,
			Date:  rawCommit.Committer.When.Format(time.RFC3339),
		},
	}
}

func (gs *GitServer) RepoSummary(ns, repo string) (*GitRepoSummary, error) {
	summary := &GitRepoSummary{Ns: ns, Name: repo}

	storage := newStorage(ns, repo, gs.blobStore, gs.kvStore)
	ref, err := storage.Reference(plumbing.Master)
	switch err {
	case nil:
	case plumbing.ErrReferenceNotFound:
		return nil, err
	default:
		return nil, err
	}

	commit, err := object.GetCommit(storage, ref.Hash())
	if err != nil {
		return nil, err
	}

	// Fetch the README
	readme, err := gs.README(commit)
	if err != nil {
		return nil, err
	}

	tree, err := commit.Tree()
	if err != nil {
		return nil, err
	}

	// Compute the language breakdown
	fiter := tree.Files()
	out := map[string]int{}
	total := 0
	if err := fiter.ForEach(func(o *object.File) error {
		// Skip any "vendor" dir
		for _, pathPart := range strings.Split(o.Name, "/") {
			// enry expect `path/`
			tpart := pathPart + "/"
			if enry.IsVendor(tpart) {
				return nil
			}
		}
		lang, safe := enry.GetLanguageByExtension(o.Name)
		if !safe {
			return nil
		}
		total += 1
		if _, ok := out[lang]; ok {
			out[lang] += 1
		} else {
			out[lang] = 1
		}
		return nil
	}); err != nil {
		return nil, err
	}
	fiter.Close()

	for l, cnt := range out {
		out[l] = int(float64(cnt*100) / float64(total))
	}

	// Fetch the commits
	commits := []*GitServerCommit{}
	for _, rawCommit := range buildCommitLogs(storage, ref.Hash(), 0) {
		commits = append(commits, fromCommit(rawCommit))
	}

	refs, err := storage.IterReferences()
	if err != nil {
		return nil, err
	}
	if err := refs.ForEach(func(ref *plumbing.Reference) error {
		if ref.Name().IsTag() {
			commit, err := object.GetCommit(storage, ref.Hash())
			if err != nil {
				return fmt.Errorf("failed to fetch tag: %+v", err)
			}
			summary.Tags = append(summary.Tags, &RefSummary{Ref: ref.Name().Short(), Commit: fromCommit(commit)})
		}
		if ref.Name().IsBranch() {
			commit, err := object.GetCommit(storage, ref.Hash())
			if err != nil {
				return err
			}
			summary.Branches = append(summary.Branches, &RefSummary{Ref: ref.Name().Short(), Commit: fromCommit(commit)})
		}
		return nil
	}); err != nil {
		return nil, err
	}
	summary.Commits = commits
	summary.Readme = readme
	summary.Languages = out

	return summary, nil
}

func (gs *GitServer) gitRepoTgzHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "GET" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)

	ns := vars["ns"]

	if !auth.Can(
		w,
		r,
		perms.Action(perms.Read, perms.GitRepo),
		perms.ResourceWithID(perms.GitServer, perms.GitRepo, fmt.Sprintf("%s/%s", ns, vars["repo"])),
	) {
		auth.Forbidden(w)
		return
	}

	tree, err := gs.RepoTree(vars["ns"], vars["repo"])
	switch err {
	case nil:
	case plumbing.ErrReferenceNotFound:
		w.WriteHeader(http.StatusNotFound)
		return
	default:
		panic(err)
	}

	gzipWriter := gzip.NewWriter(w)
	tarWriter := tar.NewWriter(gzipWriter)

	// Iter the whole tree
	fiter := tree.Files()
	if err := fiter.ForEach(func(o *object.File) error {
		// Write the tar header
		hdr := &tar.Header{
			Name: filepath.Join(vars["repo"], o.Name),
			Mode: int64(o.Mode),
			Size: o.Size,
		}
		if err := tarWriter.WriteHeader(hdr); err != nil {
			return err
		}

		r, err := o.Reader()
		if err != nil {
			return err
		}
		defer r.Close()

		if _, err := io.Copy(tarWriter, r); err != nil {
			return err
		}
		return nil
	}); err != nil {
		panic(err)
	}
	fiter.Close()

	// "seal" the tarfile
	tarWriter.Close()
	gzipWriter.Close()
}

func (gs *GitServer) gitRepoConfigHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "GET" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)

	ns := vars["ns"]

	if !auth.Can(
		w,
		r,
		perms.Action(perms.Read, perms.GitRepo),
		perms.ResourceWithID(perms.GitServer, perms.GitRepo, fmt.Sprintf("%s/%s", ns, vars["repo"])),
	) {
		auth.Forbidden(w)
		return
	}

	storage := newStorage(ns, vars["repo"], gs.blobStore, gs.kvStore)
	conf, err := gs.Config(storage)
	switch err {
	case nil:
	case plumbing.ErrReferenceNotFound:
		w.WriteHeader(http.StatusNotFound)
		return
	default:
		panic(err)
	}
	httputil.MarshalAndWrite(r, w, map[string]interface{}{
		"data": map[string]interface{}{
			"ns":         ns,
			"repository": vars["repo"],
			"config":     conf,
		},
	})

}

func (gs *GitServer) gitRepoHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "GET" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)

	ns := vars["ns"]

	if !auth.Can(
		w,
		r,
		perms.Action(perms.Read, perms.GitRepo),
		perms.ResourceWithID(perms.GitServer, perms.GitRepo, fmt.Sprintf("%s/%s", ns, vars["repo"])),
	) {
		auth.Forbidden(w)
		return
	}

	summary, err := gs.RepoSummary(vars["ns"], vars["repo"])
	switch err {
	case nil:
	case plumbing.ErrReferenceNotFound:
		w.WriteHeader(http.StatusNotFound)
		return
	default:
		panic(err)
	}

	httputil.MarshalAndWrite(r, w, map[string]interface{}{
		"data": summary,
	})
}

func (gs *GitServer) gitInfoRefsHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "GET" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)

	service := r.URL.Query().Get("service")

	// Compute the permission for the RBAC (default to Write)
	perm := perms.Write
	if service == "git-upload-pack" {
		// If it's a `git clone`, set the permission to Read
		perm = perms.Read
	}

	// Check the perms
	if !auth.Can(
		w,
		r,
		perms.Action(perm, perms.GitRepo),
		perms.ResourceWithID(perms.GitServer, perms.GitRepo, fmt.Sprintf("%s/%s", vars["ns"], vars["repo"])),
	) {
		auth.Forbidden(w)
		return
	}

	var refs *packp.AdvRefs

	// Here, repositories are created on the fly, we don't need to check if it actually exists before
	storage := newStorage(vars["ns"], vars["repo"], gs.blobStore, gs.kvStore)
	git := server.NewServer(storage)
	t, err := gs.getEndpoint(r.URL.Path)
	if err != nil {
		panic(err)
	}

	switch service {
	case "git-upload-pack":
		sess, err := git.NewUploadPackSession(t, nil)
		if err != nil {
			panic(err)
		}
		refs, err = sess.AdvertisedReferences()
		if err != nil {
			panic(err)
		}
		fmt.Printf("refs=%+v\n", refs)
	case "git-receive-pack":
		sess, err := git.NewReceivePackSession(t, nil)
		if err != nil {
			panic(err)
		}
		refs, err = sess.AdvertisedReferences()
		if err != nil {
			panic(err)
		}
	}

	w.Header().Set("Content-Type", fmt.Sprintf("application/x-%s-advertisement", service))
	w.Header().Set("Cache-Control", "no-cache")

	data := fmt.Sprintf("# service=%s\n0000", service)
	w.Write([]byte(fmt.Sprintf("%04x%s", len(data), data)))
	if err := refs.Encode(w); err != nil {
		panic(err)
	}
}

type cloneReq struct {
	URL string `json:"url"`
}

func (gs *GitServer) gitCloneOrPullHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "POST" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)

	// Check the perms
	if !auth.Can(
		w,
		r,
		perms.Action(perms.Write, perms.GitRepo),
		perms.ResourceWithID(perms.GitServer, perms.GitRepo, fmt.Sprintf("%s/%s", vars["ns"], vars["repo"])),
	) {
		auth.Forbidden(w)
		return
	}

	// Parse the payload (the remote URL)
	creq := &cloneReq{}
	if err := httputil.Unmarshal(r, creq); err != nil {
		panic(err)
	}

	// Intialize the git backend
	storage := newStorage(vars["ns"], vars["repo"], gs.blobStore, gs.kvStore)
	storage.tMode = true

	httputil.HeaderLog(w, "git clone")

	// Try to clone the repo
	_, err := git.Clone(storage, nil, &git.CloneOptions{
		URL: creq.URL,
	})
	switch err {
	case nil:
		httputil.HeaderLog(w, "clone succeeded")
		w.WriteHeader(http.StatusCreated)
	case git.ErrRepositoryAlreadyExists:
		httputil.HeaderLog(w, "git fetch")

		// If the repo already exists, "open it"
		repo, err := git.Open(storage, nil)
		if err != nil {
			panic(err)
		}

		// Try to fetch the latest change
		switch err := repo.Fetch(&git.FetchOptions{}); err {
		case nil:
			httputil.HeaderLog(w, "fetch succeeded")
			w.WriteHeader(http.StatusResetContent)
		case git.NoErrAlreadyUpToDate:
			httputil.HeaderLog(w, err.Error())
			w.WriteHeader(http.StatusNoContent)
		default:
			panic(err)
		}

	default:
		panic(err)
	}
}

func (gs *GitServer) gitServiceHandler(w http.ResponseWriter, r *http.Request) {
	if r.Method != "POST" {
		w.WriteHeader(http.StatusMethodNotAllowed)
		return
	}
	vars := mux.Vars(r)
	service := vars["service"]

	// Compute the permission for the RBAC (default to Write)
	perm := perms.Write
	if service == "git-upload-pack" {
		// If it's a `git clone`, set the permission to Read
		perm = perms.Read
	}

	// Check the perms
	if !auth.Can(
		w,
		r,
		perms.Action(perm, perms.GitRepo),
		perms.ResourceWithID(perms.GitServer, perms.GitRepo, fmt.Sprintf("%s/%s", vars["ns"], vars["repo"])),
	) {
		auth.Forbidden(w)
		return
	}

	w.Header().Set("Content-Type", fmt.Sprintf("application/x-%s-result", service))

	storage := newStorage(vars["ns"], vars["repo"], gs.blobStore, gs.kvStore)
	git := server.NewServer(storage)
	t, err := gs.getEndpoint(r.URL.Path)
	if err != nil {
		panic(err)
	}

	switch service {
	case "git-receive-pack":
		// Handle push
		req := packp.NewReferenceUpdateRequest()
		sess, err := git.NewReceivePackSession(t, nil)
		if err != nil {
			panic(err)
		}

		if err := req.Decode(r.Body); err != nil {
			panic(err)
		}

		status, err := sess.ReceivePack(r.Context(), req)
		if err != nil {
			panic(err)
		}

		if err := status.Encode(w); err != nil {
			panic(err)
		}
	case "git-upload-pack":
		// Handle clone
		req := packp.NewUploadPackRequest()
		sess, err := git.NewUploadPackSession(t, nil)
		if err != nil {
			panic(err)
		}

		if err := req.Decode(r.Body); err != nil {
			panic(err)
		}

		resp, err := sess.UploadPack(r.Context(), req)
		if err != nil {
			panic(err)
		}

		if err := resp.Encode(w); err != nil {
			panic(err)
		}
	default:
		w.WriteHeader(http.StatusBadRequest)
		return
	}

}

D pkg/gitserver/lua/lua.go => pkg/gitserver/lua/lua.go +0 -351
@@ 1,351 0,0 @@
package lua // import "a4.io/blobstash/pkg/gitserver/lua"

import (
	"os"
	"time"

	"github.com/xeonx/timeago"
	"github.com/yuin/gopher-lua"
	"gopkg.in/src-d/go-git.v4/plumbing"
	"gopkg.in/src-d/go-git.v4/plumbing/object"

	"a4.io/blobstash/pkg/apps/luautil"
	"a4.io/blobstash/pkg/gitserver"
)

func setupGitServer(gs *gitserver.GitServer) func(*lua.LState) int {
	return func(L *lua.LState) int {
		// register functions to the table
		mod := L.SetFuncs(L.NewTable(), map[string]lua.LGFunction{
			"repo": func(L *lua.LState) int {
				ns := L.ToString(1)
				name := L.ToString(2)
				ud := L.NewUserData()
				ud.Value = &repo{gs, ns, name}
				L.SetMetatable(ud, L.GetTypeMetatable("repo"))
				L.Push(ud)
				return 1
			},
			"namespaces": func(L *lua.LState) int {
				namespaces, err := gs.Namespaces()
				if err != nil {
					panic(err)
				}
				L.Push(luautil.InterfaceToLValue(L, namespaces))
				return 1
			},
			"repositories": func(L *lua.LState) int {
				ns := L.ToString(1)
				repos, err := gs.Repositories(ns)
				if err != nil {
					panic(err)
				}
				L.Push(luautil.InterfaceToLValue(L, repos))
				return 1
			},
		})
		// returns the module
		L.Push(mod)
		return 1
	}
}

func Setup(L *lua.LState, gs *gitserver.GitServer) {
	mtCol := L.NewTypeMetatable("repo")
	L.SetField(mtCol, "__index", L.SetFuncs(L.NewTable(), map[string]lua.LGFunction{
		"name":       repoName,
		"refs":       repoRefs,
		"summary":    repoSummary,
		"tree":       repoTree,
		"log":        repoLog,
		"get_commit": repoGetCommit,
		"get_tree":   repoGetTree,
		"get_file":   repoGetFile,
	}))
	L.PreloadModule("gitserver", setupGitServer(gs))
}

type repo struct {
	gs       *gitserver.GitServer
	ns, name string
}

func checkRepo(L *lua.LState) *repo {
	ud := L.CheckUserData(1)
	if v, ok := ud.Value.(*repo); ok {
		return v
	}
	L.ArgError(1, "repo expected")
	return nil
}

func repoName(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	L.Push(lua.LString(repo.name))
	return 1
}

func convertFile(L *lua.LState, file *object.File) *lua.LTable {
	tbl := L.CreateTable(0, 3)
	var contents string
	var err error
	isBinary := lua.LTrue
	gisBinary, err := file.IsBinary()
	if err != nil {
		panic(err)
	}
	if !gisBinary {
		isBinary = lua.LFalse
		contents, err = file.Contents()
		if err != nil {
			panic(err)
		}

	}
	tbl.RawSetH(lua.LString("hash"), lua.LString(file.Name))
	tbl.RawSetH(lua.LString("contents"), lua.LString(contents))
	tbl.RawSetH(lua.LString("is_binary"), isBinary)
	return tbl
}

func mustParse(d string) time.Time {
	t, err := time.Parse(time.RFC3339, d)
	if err != nil {
		panic(err)
	}
	return t
}

func convertCommit(L *lua.LState, commit *gitserver.GitServerCommit, withPatch bool) *lua.LTable {
	cntWithPatch := 0
	if withPatch {
		cntWithPatch = 3
	}
	tbl := L.CreateTable(0, 12+cntWithPatch)
	tbl.RawSetH(lua.LString("hash"), lua.LString(commit.Hash))
	tbl.RawSetH(lua.LString("message"), lua.LString(commit.Message))
	// Author
	tbl.RawSetH(lua.LString("author_time_ago"), lua.LString(timeago.English.Format(mustParse(commit.Author.Date))))
	tbl.RawSetH(lua.LString("author_time"), lua.LString(commit.Author.Date))
	tbl.RawSetH(lua.LString("author_name"), lua.LString(commit.Author.Name))
	tbl.RawSetH(lua.LString("author_email"), lua.LString(commit.Author.Email))
	// Comitter
	tbl.RawSetH(lua.LString("comitter_time_ago"), lua.LString(timeago.English.Format(mustParse(commit.Committer.Date))))
	tbl.RawSetH(lua.LString("comitter_time"), lua.LString(commit.Committer.Date))
	tbl.RawSetH(lua.LString("comitter_name"), lua.LString(commit.Committer.Name))
	tbl.RawSetH(lua.LString("comitter_email"), lua.LString(commit.Committer.Email))

	tbl.RawSetH(lua.LString("tree_hash"), lua.LString(commit.Tree))
	rcommit := commit.Raw
	if len(rcommit.ParentHashes) > 0 {
		tbl.RawSetH(lua.LString("parent_hash"), lua.LString(rcommit.ParentHashes[0].String()))
	}
	if withPatch && len(rcommit.ParentHashes) > 0 {
		ci := rcommit.Parents()
		defer ci.Close()
		parentCommit, err := ci.Next()
		if err != nil {
			panic(err)
		}
		parentTree, err := parentCommit.Tree()
		if err != nil {
			panic(err)
		}
		tree, err := rcommit.Tree()
		if err != nil {
			panic(err)
		}
		changes, err := parentTree.Diff(tree)
		if err != nil {
			panic(err)
		}
		patch, err := changes.Patch()
		if err != nil {
			panic(err)
		}
		var filesChanged, additions, deletions int
		stats := patch.Stats()
		lfilestats := L.CreateTable(len(stats), 0)
		for _, fstat := range stats {
			filesChanged++
			additions += fstat.Addition
			deletions += fstat.Deletion
			lfilestats.Append(newFileStat(L, fstat.Name, fstat.Addition, fstat.Deletion))
		}
		lstats := L.CreateTable(0, 3)
		lstats.RawSetH(lua.LString("files_changed"), lua.LNumber(filesChanged))
		lstats.RawSetH(lua.LString("additions"), lua.LNumber(additions))
		lstats.RawSetH(lua.LString("deletions"), lua.LNumber(deletions))

		tbl.RawSetH(lua.LString("stats"), lstats)
		tbl.RawSetH(lua.LString("file_stats"), lfilestats)
		tbl.RawSetH(lua.LString("patch"), lua.LString(patch.String()))
	}

	return tbl
}

func newFileStat(L *lua.LState, name string, additions, deletions int) *lua.LTable {
	stats := L.CreateTable(0, 3)
	stats.RawSetH(lua.LString("name"), lua.LString(name))
	stats.RawSetH(lua.LString("additions"), lua.LNumber(additions))
	stats.RawSetH(lua.LString("deletions"), lua.LNumber(deletions))
	return stats
}

func convertRefSummary(L *lua.LState, refSummary *gitserver.RefSummary) *lua.LTable {
	tbl := L.CreateTable(0, 7)
	tbl.RawSetH(lua.LString("commit_time_ago"), lua.LString(timeago.English.Format(mustParse(refSummary.Commit.Committer.Date))))
	tbl.RawSetH(lua.LString("commit_short_hash"), lua.LString(refSummary.Commit.Hash[:8]))
	tbl.RawSetH(lua.LString("commit_hash"), lua.LString(refSummary.Commit.Hash))
	tbl.RawSetH(lua.LString("commit_message"), lua.LString(refSummary.Commit.Message))
	tbl.RawSetH(lua.LString("commit_author_name"), lua.LString(refSummary.Commit.Committer.Name))
	tbl.RawSetH(lua.LString("commit_author_email"), lua.LString(refSummary.Commit.Committer.Email))
	tbl.RawSetH(lua.LString("ref_short_name"), lua.LString(refSummary.Ref))
	return tbl
}

func convertTreeEntry(L *lua.LState, treeEntry *object.TreeEntry) *lua.LTable {
	tbl := L.CreateTable(0, 4)
	tbl.RawSetH(lua.LString("name"), lua.LString(treeEntry.Name))
	isFile := lua.LFalse
	mode := os.FileMode(treeEntry.Mode)
	if treeEntry.Mode.IsFile() {
		isFile = lua.LTrue
	} else {
		mode = mode | os.ModeDir
	}
	tbl.RawSetH(lua.LString("mode"), lua.LString(mode.String()))
	tbl.RawSetH(lua.LString("is_file"), isFile)
	tbl.RawSetH(lua.LString("hash"), lua.LString(treeEntry.Hash.String()))
	return tbl
}

func repoRefs(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	summary, err := repo.gs.RepoRefs(repo.ns, repo.name)
	if err != nil {
		panic(err)
	}
	branchesTbl := L.CreateTable(len(summary.Branches), 0)
	for _, refSummary := range summary.Branches {
		branchesTbl.Append(convertRefSummary(L, refSummary))
	}
	tagsTbl := L.CreateTable(len(summary.Tags), 0)
	for _, refSummary := range summary.Tags {
		tagsTbl.Append(convertRefSummary(L, refSummary))
	}
	tbl := L.CreateTable(0, 2)
	tbl.RawSetH(lua.LString("branches"), branchesTbl)
	tbl.RawSetH(lua.LString("tags"), tagsTbl)
	L.Push(tbl)
	return 1
}

func repoSummary(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	summary, err := repo.gs.RepoSummary(repo.ns, repo.name)
	if err != nil {
		panic(err)
	}
	if len(summary.Commits) > 3 {
		summary.Commits = summary.Commits[0:3]
	}
	commitsTbl := L.CreateTable(len(summary.Commits), 0)
	for _, commit := range summary.Commits {
		commitsTbl.Append(convertCommit(L, commit, false))
	}
	tbl := L.CreateTable(0, 2)
	tbl.RawSetH(lua.LString("readme"), lua.LString(summary.Readme))
	tbl.RawSetH(lua.LString("commits"), commitsTbl)
	L.Push(tbl)
	return 1
}

func repoLog(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	commits, err := repo.gs.RepoLog(repo.ns, repo.name)
	if err != nil {
		panic(err)
	}
	tbl := L.CreateTable(len(commits), 0)
	for _, commit := range commits {
		tbl.Append(convertCommit(L, commit, false))
	}
	L.Push(tbl)
	return 1

}

func repoTree(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	tree, err := repo.gs.RepoTree(repo.ns, repo.name)
	if err != nil {
		panic(err)
	}
	tbl := L.CreateTable(len(tree.Entries), 0)
	for _, entry := range tree.Entries {
		tbl.Append(convertTreeEntry(L, &entry))
	}
	L.Push(tbl)
	return 1

}
func repoGetTree(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	tree, err := repo.gs.RepoGetTree(repo.ns, repo.name, L.ToString(2))
	if err != nil {
		panic(err)
	}
	tbl := L.CreateTable(len(tree.Entries), 0)
	for _, entry := range tree.Entries {
		tbl.Append(convertTreeEntry(L, &entry))
	}
	L.Push(tbl)
	return 1
}

func repoGetCommit(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	hash := plumbing.NewHash(L.ToString(2))
	commit, err := repo.gs.RepoCommit(repo.ns, repo.name, hash)
	if err != nil {
		panic(err)
	}
	L.Push(convertCommit(L, commit, true))
	return 1
}
func repoGetFile(L *lua.LState) int {
	repo := checkRepo(L)
	if repo == nil {
		return 0
	}
	hash := plumbing.NewHash(L.ToString(2))
	file, err := repo.gs.RepoGetFile(repo.ns, repo.name, hash)
	if err != nil {
		panic(err)
	}
	L.Push(convertFile(L, file))
	return 1
}

M pkg/server/server.go => pkg/server/server.go +1 -8
@@ 24,7 24,6 @@ import (
	docstoreLua "a4.io/blobstash/pkg/docstore/lua"
	"a4.io/blobstash/pkg/expvarserver"
	"a4.io/blobstash/pkg/filetree"
	"a4.io/blobstash/pkg/gitserver"
	"a4.io/blobstash/pkg/httputil"
	"a4.io/blobstash/pkg/hub"
	"a4.io/blobstash/pkg/js"


@@ 198,12 197,6 @@ func New(conf *config.Config) (*Server, error) {
	}
	docstore.Register(s.router.PathPrefix("/api/docstore").Subrouter(), basicAuth)

	git, err := gitserver.New(logger.New("app", "gitserver"), conf, kvstore, blobstore, hub)
	if err != nil {
		return nil, fmt.Errorf("failed to initialize git server app: %v", err)
	}
	git.Register(s.router.PathPrefix("/api/git").Subrouter(), s.router, basicAuth)

	// Load the Lua config
	if _, err := os.Stat("blobstash.lua"); err == nil {
		if err := func() error {


@@ 228,7 221,7 @@ func New(conf *config.Config) (*Server, error) {
		return nil, err
	}

	apps, err := apps.New(logger.New("app", "apps"), conf, sess, wa, rootBlobstore, kvstore, filetree, docstore, git, hub, s.whitelistHosts)
	apps, err := apps.New(logger.New("app", "apps"), conf, sess, wa, rootBlobstore, kvstore, filetree, docstore, hub, s.whitelistHosts)
	if err != nil {
		return nil, fmt.Errorf("failed to initialize filetree app: %v", err)
	}

D vendor/github.com/src-d/go-oniguruma/.travis.yml => vendor/github.com/src-d/go-oniguruma/.travis.yml +0 -20
@@ 1,20 0,0 @@
dist: trusty
language: go
go:
  - '1.11.x'
  - '1.12.x'

env:
  global:
    - LD_LIBRARY_PATH="/usr/local/lib":${LD_LIBRARY_PATH}
    - GO111MODULE=on
    - ONIGURUMA_VERSION='6.9.1'

before_install: # install oniguruma manually as trusty has only ancient 5.x
  - sudo apt-get install -y dpkg # dpkg >= 1.17.5ubuntu5.8 fixes https://bugs.launchpad.net/ubuntu/+source/dpkg/+bug/1730627
  - wget "http://archive.ubuntu.com/ubuntu/pool/universe/libo/libonig/libonig5_${ONIGURUMA_VERSION}-1_amd64.deb"
  - sudo dpkg -i "libonig5_${ONIGURUMA_VERSION}-1_amd64.deb"
  - wget "http://archive.ubuntu.com/ubuntu/pool/universe/libo/libonig/libonig-dev_${ONIGURUMA_VERSION}-1_amd64.deb"
  - sudo dpkg -i "libonig-dev_${ONIGURUMA_VERSION}-1_amd64.deb"
script:
  - go test -v --cover -race

D vendor/github.com/src-d/go-oniguruma/LICENSE => vendor/github.com/src-d/go-oniguruma/LICENSE +0 -19
@@ 1,19 0,0 @@
Copyright (C) 2011 by Zhigang Chen

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

D vendor/github.com/src-d/go-oniguruma/README.md => vendor/github.com/src-d/go-oniguruma/README.md +0 -20
@@ 1,20 0,0 @@
## go-oniguruma
<a href="https://travis-ci.org/src-d/go-oniguruma"><img alt="Build Status" src="https://travis-ci.org/src-d/go-oniguruma.svg?branch=master" /></a>

This repository is a fork of [moovweb/rubex](https://github.com/moovweb/rubex/tree/go1) - a simple regular expression library (based on [oniguruma](https://github.com/kkos/oniguruma)) that supports Ruby's regex syntax.

The _rubex_ was originally created by Zhigang Chen (zhigang.chen@moovweb.com or zhigangc@gmail.com). It implements all the public functions of Go's Regexp package, except LiteralPrefix.

By the benchmark tests in regexp, the library is 40% to 10X faster than Regexp on all but one test. Unlike Go's regexp, this library supports named capture groups and also allow `"\\1"` and `"\\k<name>"` in replacement strings.
The library calls the _oniguruma_ regex library for regex pattern searching. All replacement code is done in Go.

### Install all (_oniguruma_ and _rubex_):
```sh
# linux (debian/ubuntu/...)
sudo apt-get install libonig-dev

# osx (homebrew)
brew install oniguruma

go install -i .
```

D vendor/github.com/src-d/go-oniguruma/chelper.c => vendor/github.com/src-d/go-oniguruma/chelper.c +0 -184
@@ 1,184 0,0 @@
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#ifdef BENCHMARK_CHELP
#include <sys/time.h>
#endif
#include "chelper.h"

int NewOnigRegex( char *pattern, int pattern_length, int option,
                  OnigRegex *regex, OnigRegion **region, OnigEncoding *encoding, OnigErrorInfo **error_info, char **error_buffer) {
    int ret = ONIG_NORMAL;
    int error_msg_len = 0;

    OnigUChar *pattern_start = (OnigUChar *) pattern;
    OnigUChar *pattern_end = (OnigUChar *) (pattern + pattern_length);

    *error_info = (OnigErrorInfo *) malloc(sizeof(OnigErrorInfo));
    memset(*error_info, 0, sizeof(OnigErrorInfo));

    onig_initialize_encoding(*encoding);

    *error_buffer = (char*) malloc(ONIG_MAX_ERROR_MESSAGE_LEN * sizeof(char));

    memset(*error_buffer, 0, ONIG_MAX_ERROR_MESSAGE_LEN * sizeof(char));

    *region = onig_region_new();

    ret = onig_new(regex, pattern_start, pattern_end, (OnigOptionType)(option), *encoding, OnigDefaultSyntax, *error_info);

    if (ret != ONIG_NORMAL) {
        error_msg_len = onig_error_code_to_str((unsigned char*)(*error_buffer), ret, *error_info);
        if (error_msg_len >= ONIG_MAX_ERROR_MESSAGE_LEN) {
            error_msg_len = ONIG_MAX_ERROR_MESSAGE_LEN - 1;
        }
        (*error_buffer)[error_msg_len] = '\0';
    }
    return ret;
}

int SearchOnigRegex( void *str, int str_length, int offset, int option,
                  OnigRegex regex, OnigRegion *region, OnigErrorInfo *error_info, char *error_buffer, int *captures, int *numCaptures) {
    int ret = ONIG_MISMATCH;
    int error_msg_len = 0;
#ifdef BENCHMARK_CHELP
    struct timeval tim1, tim2;
    long t;
#endif

    OnigUChar *str_start = (OnigUChar *) str;
    OnigUChar *str_end = (OnigUChar *) (str_start + str_length);
    OnigUChar *search_start = (OnigUChar *)(str_start + offset);
    OnigUChar *search_end = str_end;

#ifdef BENCHMARK_CHELP
    gettimeofday(&tim1, NULL);
#endif

    ret = onig_search(regex, str_start, str_end, search_start, search_end, region, option);
    if (ret < 0 && error_buffer != NULL) {
        error_msg_len = onig_error_code_to_str((unsigned char*)(error_buffer), ret, error_info);
        if (error_msg_len >= ONIG_MAX_ERROR_MESSAGE_LEN) {
            error_msg_len = ONIG_MAX_ERROR_MESSAGE_LEN - 1;
        }
        error_buffer[error_msg_len] = '\0';
    }
    else if (captures != NULL) {
        int i;
        int count = 0;
        for (i = 0; i < region->num_regs; i++) {
            captures[2*count] = region->beg[i];
            captures[2*count+1] = region->end[i];
            count ++;
        }
        *numCaptures = count;
    }

#ifdef BENCHMARK_CHELP
    gettimeofday(&tim2, NULL);
    t = (tim2.tv_sec - tim1.tv_sec) * 1000000 + tim2.tv_usec - tim1.tv_usec;
    printf("%ld microseconds elapsed\n", t);
#endif
    return ret;
}

int MatchOnigRegex(void *str, int str_length, int offset, int option,
                  OnigRegex regex, OnigRegion *region) {
    int ret = ONIG_MISMATCH;
    int error_msg_len = 0;
#ifdef BENCHMARK_CHELP
    struct timeval tim1, tim2;
    long t;
#endif

    OnigUChar *str_start = (OnigUChar *) str;
    OnigUChar *str_end = (OnigUChar *) (str_start + str_length);
    OnigUChar *search_start = (OnigUChar *)(str_start + offset);

#ifdef BENCHMARK_CHELP
    gettimeofday(&tim1, NULL);
#endif
    ret = onig_match(regex, str_start, str_end, search_start, region, option);
#ifdef BENCHMARK_CHELP
    gettimeofday(&tim2, NULL);
    t = (tim2.tv_sec - tim1.tv_sec) * 1000000 + tim2.tv_usec - tim1.tv_usec;
    printf("%ld microseconds elapsed\n", t);
#endif
    return ret;
}

int LookupOnigCaptureByName(char *name, int name_length,
                  OnigRegex regex, OnigRegion *region) {
    int ret = ONIGERR_UNDEFINED_NAME_REFERENCE;
#ifdef BENCHMARK_CHELP
    struct timeval tim1, tim2;
    long t;
#endif
    OnigUChar *name_start = (OnigUChar *) name;
    OnigUChar *name_end = (OnigUChar *) (name_start + name_length);
#ifdef BENCHMARK_CHELP
    gettimeofday(&tim1, NULL);
#endif
    ret = onig_name_to_backref_number(regex, name_start, name_end, region);
#ifdef BENCHMARK_CHELP
    gettimeofday(&tim2, NULL);
    t = (tim2.tv_sec - tim1.tv_sec) * 1000000 + tim2.tv_usec - tim1.tv_usec;
    printf("%ld microseconds elapsed\n", t);
#endif
    return ret;
}

typedef struct {
    char *nameBuffer;
    int bufferOffset;
    int bufferSize;
    int *numbers;
    int numIndex;
} group_info_t;

int name_callback(const UChar* name, const UChar* name_end,
          int ngroup_num, int* group_nums,
          regex_t* reg, void* arg)
{
    int nameLen, offset, newOffset;
    group_info_t *groupInfo;

    groupInfo = (group_info_t*) arg;
    offset = groupInfo->bufferOffset;
    nameLen = name_end - name;
    newOffset = offset + nameLen;

    //if there are already names, add a ";"
    if (offset > 0) {
        newOffset += 1;
    }

    if (newOffset <= groupInfo->bufferSize) {
        if (offset > 0) {
            groupInfo->nameBuffer[offset] = ';';
            offset += 1;
        }
        memcpy(&groupInfo->nameBuffer[offset], name, nameLen);
    }
    groupInfo->bufferOffset = newOffset;
    if (ngroup_num > 0) {
        groupInfo->numbers[groupInfo->numIndex] = group_nums[ngroup_num-1];
    } else {
        groupInfo->numbers[groupInfo->numIndex] = -1;
    }
    groupInfo->numIndex += 1;
    return 0;  /* 0: continue */
}

int GetCaptureNames(OnigRegex reg, void *buffer, int bufferSize, int* groupNumbers) {
    int ret;
    group_info_t groupInfo;
    groupInfo.nameBuffer = (char*)buffer;
    groupInfo.bufferOffset = 0;
    groupInfo.bufferSize = bufferSize;
    groupInfo.numbers = groupNumbers;
    groupInfo.numIndex = 0;
    onig_foreach_name(reg, name_callback, (void* )&groupInfo);
    return groupInfo.bufferOffset;
}


D vendor/github.com/src-d/go-oniguruma/chelper.h => vendor/github.com/src-d/go-oniguruma/chelper.h +0 -14
@@ 1,14 0,0 @@
#include <oniguruma.h>

extern int NewOnigRegex( char *pattern, int pattern_length, int option,
                                  OnigRegex *regex, OnigRegion **region, OnigEncoding *encoding, OnigErrorInfo **error_info, char **error_buffer);

extern int SearchOnigRegex( void *str, int str_length, int offset, int option,
                                  OnigRegex regex, OnigRegion *region, OnigErrorInfo *error_info, char *error_buffer, int *captures, int *numCaptures);

extern int MatchOnigRegex( void *str, int str_length, int offset, int option,
                  OnigRegex regex, OnigRegion *region);

extern int LookupOnigCaptureByName(char *name, int name_length, OnigRegex regex, OnigRegion *region);

extern int GetCaptureNames(OnigRegex regex, void *buffer, int bufferSize, int* groupNumbers);

D vendor/github.com/src-d/go-oniguruma/constants.go => vendor/github.com/src-d/go-oniguruma/constants.go +0 -27
@@ 1,27 0,0 @@
package rubex

const (
	ONIG_OPTION_DEFAULT = ONIG_OPTION_NONE
	/* options */
	ONIG_OPTION_NONE               = 0
	ONIG_OPTION_IGNORECASE         = 1
	ONIG_OPTION_EXTEND             = (ONIG_OPTION_IGNORECASE << 1)
	ONIG_OPTION_MULTILINE          = (ONIG_OPTION_EXTEND << 1)
	ONIG_OPTION_SINGLELINE         = (ONIG_OPTION_MULTILINE << 1)
	ONIG_OPTION_FIND_LONGEST       = (ONIG_OPTION_SINGLELINE << 1)
	ONIG_OPTION_FIND_NOT_EMPTY     = (ONIG_OPTION_FIND_LONGEST << 1)
	ONIG_OPTION_NEGATE_SINGLELINE  = (ONIG_OPTION_FIND_NOT_EMPTY << 1)
	ONIG_OPTION_DONT_CAPTURE_GROUP = (ONIG_OPTION_NEGATE_SINGLELINE << 1)
	ONIG_OPTION_CAPTURE_GROUP      = (ONIG_OPTION_DONT_CAPTURE_GROUP << 1)
	/* options (search time) */
	ONIG_OPTION_NOTBOL       = (ONIG_OPTION_CAPTURE_GROUP << 1)
	ONIG_OPTION_NOTEOL       = (ONIG_OPTION_NOTBOL << 1)
	ONIG_OPTION_POSIX_REGION = (ONIG_OPTION_NOTEOL << 1)
	ONIG_OPTION_MAXBIT       = ONIG_OPTION_POSIX_REGION /* limit */

	ONIG_NORMAL   = 0
	ONIG_MISMATCH = -1

	ONIG_MISMATCH_STR                = "mismatch"
	ONIGERR_UNDEFINED_NAME_REFERENCE = -217
)

D vendor/github.com/src-d/go-oniguruma/go.mod => vendor/github.com/src-d/go-oniguruma/go.mod +0 -1
@@ 1,1 0,0 @@
module github.com/src-d/go-oniguruma

D vendor/github.com/src-d/go-oniguruma/quotemeta.go => vendor/github.com/src-d/go-oniguruma/quotemeta.go +0 -36
@@ 1,36 0,0 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

// Package regexp implements a simple regular expression library.

// QuoteMeta func is copied here to avoid linking the entire Regexp library.

package rubex

func special(c int) bool {
	for _, r := range `\.+*?()|[]^$` {
		if c == int(r) {
			return true
		}
	}
	return false
}

// QuoteMeta returns a string that quotes all regular expression metacharacters
// inside the argument text; the returned string is a regular expression matching
// the literal text.  For example, QuoteMeta(`[foo]`) returns `\[foo\]`.
func QuoteMeta(s string) string {
	b := make([]byte, 2*len(s))

	// A byte loop is correct because all metacharacters are ASCII.
	j := 0
	for i := 0; i < len(s); i++ {
		if special(int(s[i])) {
			b[j] = '\\'
			j++
		}
		b[j] = s[i]
		j++
	}
	return string(b[0:j])
}

D vendor/github.com/src-d/go-oniguruma/regex.go => vendor/github.com/src-d/go-oniguruma/regex.go +0 -668
@@ 1,668 0,0 @@
package rubex

/*
#cgo CFLAGS: -I/usr/local/include
#cgo LDFLAGS: -L/usr/local/lib -lonig
#include <stdlib.h>
#include <oniguruma.h>
#include "chelper.h"
*/
import "C"

import (
	"bytes"
	"errors"
	"fmt"
	"io"
	"log"
	"runtime"
	"strconv"
	"sync"
	"unicode/utf8"
	"unsafe"
)

type strRange []int

const numMatchStartSize = 4
const numReadBufferStartSize = 256

var mutex sync.Mutex

type MatchData struct {
	count   int
	indexes [][]int32
}

type NamedGroupInfo map[string]int

type Regexp struct {
	pattern        string
	regex          C.OnigRegex
	region         *C.OnigRegion
	encoding       C.OnigEncoding
	errorInfo      *C.OnigErrorInfo
	errorBuf       *C.char
	matchData      *MatchData
	namedGroupInfo NamedGroupInfo
}

// NewRegexp creates and initializes a new Regexp with the given pattern and option.
func NewRegexp(pattern string, option int) (re *Regexp, err error) {
	return initRegexp(&Regexp{pattern: pattern, encoding: C.ONIG_ENCODING_UTF8}, option)
}

// NewRegexpASCII is equivalent to NewRegexp, but with the encoding restricted to ASCII.
func NewRegexpASCII(pattern string, option int) (re *Regexp, err error) {
	return initRegexp(&Regexp{pattern: pattern, encoding: C.ONIG_ENCODING_ASCII}, option)
}

func initRegexp(re *Regexp, option int) (*Regexp, error) {
	var err error
	patternCharPtr := C.CString(re.pattern)
	defer C.free(unsafe.Pointer(patternCharPtr))
	mutex.Lock()
	defer mutex.Unlock()
	errorCode := C.NewOnigRegex(patternCharPtr, C.int(len(re.pattern)), C.int(option), &re.regex, &re.region, &re.encoding, &re.errorInfo, &re.errorBuf)
	if errorCode != C.ONIG_NORMAL {
		err = errors.New(C.GoString(re.errorBuf))
	} else {
		err = nil
		numCapturesInPattern := int(C.onig_number_of_captures(re.regex)) + 1
		re.matchData = &MatchData{}
		re.matchData.indexes = make([][]int32, numMatchStartSize)
		for i := 0; i < numMatchStartSize; i++ {
			re.matchData.indexes[i] = make([]int32, numCapturesInPattern*2)
		}
		re.namedGroupInfo = re.getNamedGroupInfo()
		runtime.SetFinalizer(re, (*Regexp).Free)
	}
	return re, err
}

func Compile(str string) (*Regexp, error) {
	return NewRegexp(str, ONIG_OPTION_DEFAULT)
}

func MustCompile(str string) *Regexp {
	regexp, error := NewRegexp(str, ONIG_OPTION_DEFAULT)
	if error != nil {
		panic("regexp: compiling " + str + ": " + error.Error())
	}
	return regexp
}

func CompileWithOption(str string, option int) (*Regexp, error) {
	return NewRegexp(str, option)
}

func MustCompileWithOption(str string, option int) *Regexp {
	regexp, error := NewRegexp(str, option)
	if error != nil {
		panic("regexp: compiling " + str + ": " + error.Error())
	}
	return regexp
}

// MustCompileASCII is equivalent to MustCompile, but with the encoding restricted to ASCII.
func MustCompileASCII(str string) *Regexp {
	regexp, error := NewRegexpASCII(str, ONIG_OPTION_DEFAULT)
	if error != nil {
		panic("regexp: compiling " + str + ": " + error.Error())
	}
	return regexp
}

func (re *Regexp) Free() {
	mutex.Lock()
	if re.regex != nil {
		C.onig_free(re.regex)
		re.regex = nil
	}
	if re.region != nil {
		C.onig_region_free(re.region, 1)
		re.region = nil
	}
	mutex.Unlock()
	if re.errorInfo != nil {
		C.free(unsafe.Pointer(re.errorInfo))
		re.errorInfo = nil
	}
	if re.errorBuf != nil {
		C.free(unsafe.Pointer(re.errorBuf))
		re.errorBuf = nil
	}
}

func (re *Regexp) getNamedGroupInfo() (namedGroupInfo NamedGroupInfo) {
	numNamedGroups := int(C.onig_number_of_names(re.regex))
	//when any named capture exisits, there is no numbered capture even if there are unnamed captures
	if numNamedGroups > 0 {
		namedGroupInfo = make(map[string]int)
		//try to get the names
		bufferSize := len(re.pattern) * 2
		nameBuffer := make([]byte, bufferSize)
		groupNumbers := make([]int32, numNamedGroups)
		bufferPtr := unsafe.Pointer(&nameBuffer[0])
		numbersPtr := unsafe.Pointer(&groupNumbers[0])
		length := int(C.GetCaptureNames(re.regex, bufferPtr, (C.int)(bufferSize), (*C.int)(numbersPtr)))
		if length > 0 {
			namesAsBytes := bytes.Split(nameBuffer[:length], ([]byte)(";"))
			if len(namesAsBytes) != numNamedGroups {
				log.Fatalf("the number of named groups (%d) does not match the number names found (%d)\n", numNamedGroups, len(namesAsBytes))
			}
			for i, nameAsBytes := range namesAsBytes {
				name := string(nameAsBytes)
				namedGroupInfo[name] = int(groupNumbers[i])
			}
		} else {
			log.Fatalf("could not get the capture group names from %q", re.String())
		}
	}
	return
}

func (re *Regexp) groupNameToId(name string) (id int) {
	if re.namedGroupInfo == nil {
		id = ONIGERR_UNDEFINED_NAME_REFERENCE
	} else {
		id = re.namedGroupInfo[name]
	}
	return
}

func (re *Regexp) processMatch(numCaptures int) (match []int32) {
	if numCaptures <= 0 {
		panic("cannot have 0 captures when processing a match")
	}
	matchData := re.matchData
	return matchData.indexes[matchData.count][:numCaptures*2]
}

func (re *Regexp) ClearMatchData() {
	matchData := re.matchData
	matchData.count = 0
}

func (re *Regexp) find(b []byte, n int, offset int) (match []int) {
	if n == 0 {
		b = []byte{0}
	}
	ptr := unsafe.Pointer(&b[0])
	matchData := re.matchData
	capturesPtr := unsafe.Pointer(&(matchData.indexes[matchData.count][0]))
	numCaptures := int32(0)
	numCapturesPtr := unsafe.Pointer(&numCaptures)
	pos := int(C.SearchOnigRegex((ptr), C.int(n), C.int(offset), C.int(ONIG_OPTION_DEFAULT), re.regex, re.region, re.errorInfo, (*C.char)(nil), (*C.int)(capturesPtr), (*C.int)(numCapturesPtr)))
	if pos >= 0 {
		if numCaptures <= 0 {
			panic("cannot have 0 captures when processing a match")
		}
		match2 := matchData.indexes[matchData.count][:numCaptures*2]
		match = make([]int, len(match2))
		for i := range match2 {
			match[i] = int(match2[i])
		}
		numCapturesInPattern := int32(C.onig_number_of_captures(re.regex)) + 1
		if numCapturesInPattern != numCaptures {
			log.Fatalf("expected %d captures but got %d\n", numCapturesInPattern, numCaptures)
		}
	}
	return
}

func getCapture(b []byte, beg int, end int) []byte {
	if beg < 0 || end < 0 {
		return nil
	}
	return b[beg:end]
}

func (re *Regexp) match(b []byte, n int, offset int) bool {
	re.ClearMatchData()
	if n == 0 {
		b = []byte{0}
	}
	ptr := unsafe.Pointer(&b[0])
	pos := int(C.SearchOnigRegex((ptr), C.int(n), C.int(offset), C.int(ONIG_OPTION_DEFAULT), re.regex, re.region, re.errorInfo, (*C.char)(nil), (*C.int)(nil), (*C.int)(nil)))
	return pos >= 0
}

func (re *Regexp) findAll(b []byte, n int) (matches [][]int) {
	re.ClearMatchData()

	if n < 0 {
		n = len(b)
	}
	matchData := re.matchData
	offset := 0
	for offset <= n {
		if matchData.count >= len(matchData.indexes) {
			length := len(matchData.indexes[0])
			matchData.indexes = append(matchData.indexes, make([]int32, length))
		}
		if match := re.find(b, n, offset); len(match) > 0 {
			matchData.count += 1
			//move offset to the ending index of the current match and prepare to find the next non-overlapping match
			offset = match[1]
			//if match[0] == match[1], it means the current match does not advance the search. we need to exit the loop to avoid getting stuck here.
			if match[0] == match[1] {
				if offset < n && offset >= 0 {
					//there are more bytes, so move offset by a word
					_, width := utf8.DecodeRune(b[offset:])
					offset += width
				} else {
					//search is over, exit loop
					break
				}
			}
		} else {
			break
		}
	}
	matches2 := matchData.indexes[:matchData.count]
	matches = make([][]int, len(matches2))
	for i, v := range matches2 {
		matches[i] = make([]int, len(v))
		for j, v2 := range v {
			matches[i][j] = int(v2)
		}
	}
	return
}

func (re *Regexp) FindIndex(b []byte) []int {
	re.ClearMatchData()
	match := re.find(b, len(b), 0)
	if len(match) == 0 {
		return nil
	}
	return match[:2]
}

func (re *Regexp) Find(b []byte) []byte {
	loc := re.FindIndex(b)
	if loc == nil {
		return nil
	}
	return getCapture(b, loc[0], loc[1])
}

func (re *Regexp) FindString(s string) string {
	b := []byte(s)
	mb := re.Find(b)
	if mb == nil {
		return ""
	}
	return string(mb)
}

func (re *Regexp) FindStringIndex(s string) []int {
	b := []byte(s)
	return re.FindIndex(b)
}

func (re *Regexp) FindAllIndex(b []byte, n int) [][]int {
	matches := re.findAll(b, n)
	if len(matches) == 0 {
		return nil
	}
	return matches
}

func (re *Regexp) FindAll(b []byte, n int) [][]byte {
	matches := re.FindAllIndex(b, n)
	if matches == nil {
		return nil
	}
	matchBytes := make([][]byte, 0, len(matches))
	for _, match := range matches {
		matchBytes = append(matchBytes, getCapture(b, match[0], match[1]))
	}
	return matchBytes
}

func (re *Regexp) FindAllString(s string, n int) []string {
	b := []byte(s)
	matches := re.FindAllIndex(b, n)
	if matches == nil {
		return nil
	}
	matchStrings := make([]string, 0, len(matches))
	for _, match := range matches {
		m := getCapture(b, match[0], match[1])
		if m == nil {
			matchStrings = append(matchStrings, "")
		} else {
			matchStrings = append(matchStrings, string(m))
		}
	}
	return matchStrings

}

func (re *Regexp) FindAllStringIndex(s string, n int) [][]int {
	b := []byte(s)
	return re.FindAllIndex(b, n)
}

func (re *Regexp) findSubmatchIndex(b []byte) (match []int) {
	re.ClearMatchData()
	match = re.find(b, len(b), 0)
	return
}

func (re *Regexp) FindSubmatchIndex(b []byte) []int {
	match := re.findSubmatchIndex(b)
	if len(match) == 0 {
		return nil
	}
	return match
}

func (re *Regexp) FindSubmatch(b []byte) [][]byte {
	match := re.findSubmatchIndex(b)
	if match == nil {
		return nil
	}
	length := len(match) / 2
	if length == 0 {
		return nil
	}
	results := make([][]byte, 0, length)
	for i := 0; i < length; i++ {
		results = append(results, getCapture(b, match[2*i], match[2*i+1]))
	}
	return results
}

func (re *Regexp) FindStringSubmatch(s string) []string {
	b := []byte(s)
	match := re.findSubmatchIndex(b)
	if match == nil {
		return nil
	}
	length := len(match) / 2
	if length == 0 {
		return nil
	}

	results := make([]string, 0, length)
	for i := 0; i < length; i++ {
		cap := getCapture(b, match[2*i], match[2*i+1])
		if cap == nil {
			results = append(results, "")
		} else {
			results = append(results, string(cap))
		}
	}
	return results
}

func (re *Regexp) FindStringSubmatchIndex(s string) []int {
	b := []byte(s)
	return re.FindSubmatchIndex(b)
}

func (re *Regexp) FindAllSubmatchIndex(b []byte, n int) [][]int {
	matches := re.findAll(b, n)
	if len(matches) == 0 {
		return nil
	}
	return matches
}

func (re *Regexp) FindAllSubmatch(b []byte, n int) [][][]byte {
	matches := re.findAll(b, n)
	if len(matches) == 0 {
		return nil
	}
	allCapturedBytes := make([][][]byte, 0, len(matches))
	for _, match := range matches {
		length := len(match) / 2
		capturedBytes := make([][]byte, 0, length)
		for i := 0; i < length; i++ {
			capturedBytes = append(capturedBytes, getCapture(b, match[2*i], match[2*i+1]))
		}
		allCapturedBytes = append(allCapturedBytes, capturedBytes)
	}

	return allCapturedBytes
}

func (re *Regexp) FindAllStringSubmatch(s string, n int) [][]string {
	b := []byte(s)
	matches := re.findAll(b, n)
	if len(matches) == 0 {
		return nil
	}
	allCapturedStrings := make([][]string, 0, len(matches))
	for _, match := range matches {
		length := len(match) / 2
		capturedStrings := make([]string, 0, length)
		for i := 0; i < length; i++ {
			cap := getCapture(b, match[2*i], match[2*i+1])
			if cap == nil {
				capturedStrings = append(capturedStrings, "")
			} else {
				capturedStrings = append(capturedStrings, string(cap))
			}
		}
		allCapturedStrings = append(allCapturedStrings, capturedStrings)
	}
	return allCapturedStrings
}

func (re *Regexp) FindAllStringSubmatchIndex(s string, n int) [][]int {
	b := []byte(s)
	return re.FindAllSubmatchIndex(b, n)
}

func (re *Regexp) Match(b []byte) bool {
	return re.match(b, len(b), 0)
}

func (re *Regexp) MatchString(s string) bool {
	b := []byte(s)
	return re.Match(b)
}

func (re *Regexp) NumSubexp() int {
	return (int)(C.onig_number_of_captures(re.regex))
}

func (re *Regexp) getNamedCapture(name []byte, capturedBytes [][]byte) []byte {
	nameStr := string(name)
	capNum := re.groupNameToId(nameStr)
	if capNum < 0 || capNum >= len(capturedBytes) {
		panic(fmt.Sprintf("capture group name (%q) has error\n", nameStr))
	}
	return capturedBytes[capNum]
}

func (re *Regexp) getNumberedCapture(num int, capturedBytes [][]byte) []byte {
	//when named capture groups exist, numbered capture groups returns ""
	if re.namedGroupInfo == nil && num <= (len(capturedBytes)-1) && num >= 0 {
		return capturedBytes[num]
	}
	return ([]byte)("")
}

func fillCapturedValues(repl []byte, _ []byte, capturedBytes map[string][]byte) []byte {
	replLen := len(repl)
	newRepl := make([]byte, 0, replLen*3)
	inEscapeMode := false
	inGroupNameMode := false
	groupName := make([]byte, 0, replLen)
	for index := 0; index < replLen; index += 1 {
		ch := repl[index]
		if inGroupNameMode && ch == byte('<') {
		} else if inGroupNameMode && ch == byte('>') {
			inGroupNameMode = false
			groupNameStr := string(groupName)
			capBytes := capturedBytes[groupNameStr]
			newRepl = append(newRepl, capBytes...)
			groupName = groupName[:0] //reset the name
		} else if inGroupNameMode {
			groupName = append(groupName, ch)
		} else if inEscapeMode && ch <= byte('9') && byte('1') <= ch {
			capNumStr := string(ch)
			capBytes := capturedBytes[capNumStr]
			newRepl = append(newRepl, capBytes...)
		} else if inEscapeMode && ch == byte('k') && (index+1) < replLen && repl[index+1] == byte('<') {
			inGroupNameMode = true
			inEscapeMode = false
			index += 1 //bypass the next char '<'
		} else if inEscapeMode {
			newRepl = append(newRepl, '\\')
			newRepl = append(newRepl, ch)
		} else if ch != '\\' {
			newRepl = append(newRepl, ch)
		}
		if ch == byte('\\') || inEscapeMode {
			inEscapeMode = !inEscapeMode
		}
	}
	return newRepl
}

func (re *Regexp) replaceAll(src, repl []byte, replFunc func([]byte, []byte, map[string][]byte) []byte) []byte {
	srcLen := len(src)
	matches := re.findAll(src, srcLen)
	if len(matches) == 0 {
		return src
	}
	dest := make([]byte, 0, srcLen)
	for i, match := range matches {
		length := len(match) / 2
		capturedBytes := make(map[string][]byte)
		if re.namedGroupInfo == nil {
			for j := 0; j < length; j++ {
				capturedBytes[strconv.Itoa(j)] = getCapture(src, match[2*j], match[2*j+1])
			}
		} else {
			for name, j := range re.namedGroupInfo {
				capturedBytes[name] = getCapture(src, match[2*j], match[2*j+1])
			}
		}
		matchBytes := getCapture(src, match[0], match[1])
		newRepl := replFunc(repl, matchBytes, capturedBytes)
		prevEnd := 0
		if i > 0 {
			prevMatch := matches[i-1][:2]
			prevEnd = prevMatch[1]
		}
		if match[0] > prevEnd && prevEnd >= 0 && match[0] <= srcLen {
			dest = append(dest, src[prevEnd:match[0]]...)
		}
		dest = append(dest, newRepl...)
	}
	lastEnd := matches[len(matches)-1][1]
	if lastEnd < srcLen && lastEnd >= 0 {
		dest = append(dest, src[lastEnd:]...)
	}
	return dest
}

func (re *Regexp) ReplaceAll(src, repl []byte) []byte {
	return re.replaceAll(src, repl, fillCapturedValues)
}

func (re *Regexp) ReplaceAllFunc(src []byte, repl func([]byte) []byte) []byte {
	return re.replaceAll(src, []byte(""), func(_ []byte, matchBytes []byte, _ map[string][]byte) []byte {
		return repl(matchBytes)
	})
}

func (re *Regexp) ReplaceAllString(src, repl string) string {
	return string(re.ReplaceAll([]byte(src), []byte(repl)))
}

func (re *Regexp) ReplaceAllStringFunc(src string, repl func(string) string) string {
	srcB := []byte(src)
	destB := re.replaceAll(srcB, []byte(""), func(_ []byte, matchBytes []byte, _ map[string][]byte) []byte {
		return []byte(repl(string(matchBytes)))
	})
	return string(destB)
}

func (re *Regexp) String() string {
	return re.pattern
}

func grow_buffer(b []byte, offset int, n int) []byte {
	if offset+n > cap(b) {
		buf := make([]byte, 2*cap(b)+n)
		copy(buf, b[:offset])
		return buf
	}
	return b
}

func fromReader(r io.RuneReader) []byte {
	b := make([]byte, numReadBufferStartSize)
	offset := 0
	var err error = nil
	for err == nil {
		rune, runeWidth, err := r.ReadRune()
		if err == nil {
			b = grow_buffer(b, offset, runeWidth)
			writeWidth := utf8.EncodeRune(b[offset:], rune)
			if runeWidth != writeWidth {
				panic("reading rune width not equal to the written rune width")
			}
			offset += writeWidth
		} else {
			break
		}
	}
	return b[:offset]
}

func (re *Regexp) FindReaderIndex(r io.RuneReader) []int {
	b := fromReader(r)
	return re.FindIndex(b)
}

func (re *Regexp) FindReaderSubmatchIndex(r io.RuneReader) []int {
	b := fromReader(r)
	return re.FindSubmatchIndex(b)
}

func (re *Regexp) MatchReader(r io.RuneReader) bool {
	b := fromReader(r)
	return re.Match(b)
}

func (re *Regexp) LiteralPrefix() (prefix string, complete bool) {
	//no easy way to implement this
	return "", false
}

func MatchString(pattern string, s string) (matched bool, error error) {
	re, err := Compile(pattern)
	if err != nil {
		return false, err
	}
	return re.MatchString(s), nil
}

func (re *Regexp) Gsub(src, repl string) string {
	srcBytes := ([]byte)(src)
	replBytes := ([]byte)(repl)
	replaced := re.replaceAll(srcBytes, replBytes, fillCapturedValues)
	return string(replaced)
}

func (re *Regexp) GsubFunc(src string, replFunc func(string, map[string]string) string) string {
	srcBytes := ([]byte)(src)
	replaced := re.replaceAll(srcBytes, nil, func(_ []byte, matchBytes []byte, capturedBytes map[string][]byte) []byte {
		capturedStrings := make(map[string]string)
		for name, capBytes := range capturedBytes {
			capturedStrings[name] = string(capBytes)
		}
		matchString := string(matchBytes)
		return ([]byte)(replFunc(matchString, capturedStrings))
	})
	return string(replaced)
}

D vendor/github.com/toqueteos/trie/LICENSE.txt => vendor/github.com/toqueteos/trie/LICENSE.txt +0 -22
@@ 1,22 0,0 @@
Copyright (c) 2013 Caleb Spare

MIT License

Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:

The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

D vendor/github.com/toqueteos/trie/README.md => vendor/github.com/toqueteos/trie/README.md +0 -7
@@ 1,7 0,0 @@
# Trie

[![GoDoc](http://godoc.org/github.com/toqueteos/trie?status.png)](http://godoc.org/github.com/toqueteos/trie)

This is a fork of https://github.com/cespare/go-trie that adds the `PrefixIndex` method.

It's required for https://github.com/toqueteos/substring.

D vendor/github.com/toqueteos/trie/go.mod => vendor/github.com/toqueteos/trie/go.mod +0 -1
@@ 1,1 0,0 @@
module github.com/toqueteos/trie

D vendor/github.com/toqueteos/trie/trie.go => vendor/github.com/toqueteos/trie/trie.go +0 -102
@@ 1,102 0,0 @@
// Package trie is an implementation of a trie (prefix tree) data structure over byte slices. It provides a
// small and simple API for usage as a set as well as a 'Node' API for walking the trie.
package trie

// A Trie is a a prefix tree.
type Trie struct {
	root *Node
}

// New construct a new, empty Trie ready for use.
func New() *Trie {
	return &Trie{
		root: &Node{},
	}
}

// Insert puts b into the Trie. It returns true if the element was not previously in t.
func (t *Trie) Insert(b []byte) bool {
	n := t.root
	for _, c := range b {
		next, ok := n.Walk(c)
		if !ok {
			next = &Node{}
			n.branches[c] = next
			n.hasChildren = true
		}
		n = next
	}
	if n.terminal {
		return false
	}
	n.terminal = true
	return true
}

// Contains checks t for membership of b.
func (t *Trie) Contains(b []byte) bool {
	n := t.root
	for _, c := range b {
		next, ok := n.Walk(c)
		if !ok {
			return false
		}
		n = next
	}
	return n.terminal
}

// PrefixIndex walks through `b` until a prefix is found (terminal node) or it is exhausted.
func (t *Trie) PrefixIndex(b []byte) int {
	var idx int
	n := t.root
	for _, c := range b {
		next, ok := n.Walk(c)
		if !ok {
			return -1
		}
		if next.terminal {
			return idx
		}
		n = next
		idx++
	}
	if !n.terminal {
		idx = -1
	}
	return idx
}

// Root returns the root node of a Trie. A valid Trie (i.e., constructed with New), always has a non-nil root
// node.
func (t *Trie) Root() *Node {
	return t.root
}

// A Node represents a logical vertex in the trie structure.
type Node struct {
	branches    [256]*Node
	terminal    bool
	hasChildren bool
}

// Walk returns the node reached along edge c, if one exists. The ok value indicates whether such a node
// exist.
func (n *Node) Walk(c byte) (next *Node, ok bool) {
	next = n.branches[int(c)]
	return next, (next != nil)
}

// Terminal indicates whether n is terminal in the trie (that is, whether the path from the root to n
// represents an element in the set). For instance, if the root node is terminal, then []byte{} is in the
// trie.
func (n *Node) Terminal() bool {
	return n.terminal
}

// Leaf indicates whether n is a leaf node in the trie (that is, whether it has children). A leaf node must be
// terminal (else it would not exist). Logically, if n is a leaf node then the []byte represented by the path
// from the root to n is not a proper prefix of any element of the trie.
func (n *Node) Leaf() bool {
	return !n.hasChildren
}

D vendor/github.com/xeonx/timeago/.travis.yml => vendor/github.com/xeonx/timeago/.travis.yml +0 -1
@@ 1,1 0,0 @@
language: go
\ No newline at end of file

D vendor/github.com/xeonx/timeago/LICENSE => vendor/github.com/xeonx/timeago/LICENSE +0 -20
@@ 1,20 0,0 @@
The MIT License (MIT)

Copyright (c) 2013 Simon HEGE

Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

D vendor/github.com/xeonx/timeago/README.md => vendor/github.com/xeonx/timeago/README.md +0 -31
@@ 1,31 0,0 @@
# timeago - A time formatting package

## Install

	go get github.com/xeonx/timeago

## Docs

<http://godoc.org/github.com/xeonx/timeago>

## Use

	package main

	import (
		"time"
		"github.com/xeonx/timeago"
	)
		
	func main() {
		t := time.Now().Add(42 * time.Second)
		
		s := timeago.English.Format(t)
		//s will contains "less than a minute ago"
		
		//...
	}
	
## Tests

`go test` is used for testing.
\ No newline at end of file

D vendor/github.com/xeonx/timeago/timeago.go => vendor/github.com/xeonx/timeago/timeago.go +0 -308
@@ 1,308 0,0 @@
// Copyright 2013 Simon HEGE. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.

//timeago allows the formatting of time in terms of fuzzy timestamps.
//For example:
//	one minute ago
//	3 years ago
//	in 2 minutes
package timeago

import (
	"fmt"
	"strings"
	"time"
)

const (
	Day   time.Duration = time.Hour * 24
	Month time.Duration = Day * 30
	Year  time.Duration = Day * 365
)

type FormatPeriod struct {
	D    time.Duration
	One  string
	Many string
}

//Config allows the customization of timeago.
//You may configure string items (language, plurals, ...) and
//maximum allowed duration value for fuzzy formatting.
type Config struct {
	PastPrefix   string
	PastSuffix   string
	FuturePrefix string
	FutureSuffix string

	Periods []FormatPeriod

	Zero string
	Max  time.Duration //Maximum duration for using the special formatting.
	//DefaultLayout is used if delta is greater than the minimum of last period
	//in Periods and Max. It is the desired representation of the date 2nd of
	// January 2006.
	DefaultLayout string
}

//Predefined english configuration
var English = Config{
	PastPrefix:   "",
	PastSuffix:   " ago",
	FuturePrefix: "in ",
	FutureSuffix: "",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "about a second", "%d seconds"},
		FormatPeriod{time.Minute, "about a minute", "%d minutes"},
		FormatPeriod{time.Hour, "about an hour", "%d hours"},
		FormatPeriod{Day, "one day", "%d days"},
		FormatPeriod{Month, "one month", "%d months"},
		FormatPeriod{Year, "one year", "%d years"},
	},

	Zero: "about a second",

	Max:           73 * time.Hour,
	DefaultLayout: "2006-01-02",
}

var Portuguese = Config{
	PastPrefix:   "há ",
	PastSuffix:   "",
	FuturePrefix: "daqui a ",
	FutureSuffix: "",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "um segundo", "%d segundos"},
		FormatPeriod{time.Minute, "um minuto", "%d minutos"},
		FormatPeriod{time.Hour, "uma hora", "%d horas"},
		FormatPeriod{Day, "um dia", "%d dias"},
		FormatPeriod{Month, "um mês", "%d meses"},
		FormatPeriod{Year, "um ano", "%d anos"},
	},

	Zero: "menos de um segundo",

	Max:           73 * time.Hour,
	DefaultLayout: "02-01-2006",
}

var Chinese = Config{
	PastPrefix:   "",
	PastSuffix:   "前",
	FuturePrefix: "于 ",
	FutureSuffix: "",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "1 秒", "%d 秒"},
		FormatPeriod{time.Minute, "1 分钟", "%d 分钟"},
		FormatPeriod{time.Hour, "1 小时", "%d 小时"},
		FormatPeriod{Day, "1 天", "%d 天"},
		FormatPeriod{Month, "1 月", "%d 月"},
		FormatPeriod{Year, "1 年", "%d 年"},
	},

	Zero: "1 秒",

	Max:           73 * time.Hour,
	DefaultLayout: "2006-01-02",
}

//Predefined french configuration
var French = Config{
	PastPrefix:   "il y a ",
	PastSuffix:   "",
	FuturePrefix: "dans ",
	FutureSuffix: "",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "environ une seconde", "moins d'une minute"},
		FormatPeriod{time.Minute, "environ une minute", "%d minutes"},
		FormatPeriod{time.Hour, "environ une heure", "%d heures"},
		FormatPeriod{Day, "un jour", "%d jours"},
		FormatPeriod{Month, "un mois", "%d mois"},
		FormatPeriod{Year, "un an", "%d ans"},
	},

	Zero: "environ une seconde",

	Max:           73 * time.Hour,
	DefaultLayout: "02/01/2006",
}

//Predefined german configuration
var German = Config{
	PastPrefix:   "vor ",
	PastSuffix:   "",
	FuturePrefix: "in ",
	FutureSuffix: "",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "einer Sekunde", "%d Sekunden"},
		FormatPeriod{time.Minute, "einer Minute", "%d Minuten"},
		FormatPeriod{time.Hour, "einer Stunde", "%d Stunden"},
		FormatPeriod{Day, "einem Tag", "%d Tagen"},
		FormatPeriod{Month, "einem Monat", "%d Monaten"},
		FormatPeriod{Year, "einem Jahr", "%d Jahren"},
	},

	Zero: "einer Sekunde",

	Max:           73 * time.Hour,
	DefaultLayout: "02.01.2006",
}

//Predefined turkish configuration
var Turkish = Config{
	PastPrefix:   "",
	PastSuffix:   " önce",
	FuturePrefix: "",
	FutureSuffix: " içinde",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "yaklaşık bir saniye", "%d saniye"},
		FormatPeriod{time.Minute, "yaklaşık bir dakika", "%d dakika"},
		FormatPeriod{time.Hour, "yaklaşık bir saat", "%d saat"},
		FormatPeriod{Day, "bir gün", "%d gün"},
		FormatPeriod{Month, "bir ay", "%d ay"},
		FormatPeriod{Year, "bir yıl", "%d yıl"},
	},

	Zero: "yaklaşık bir saniye",

	Max:           73 * time.Hour,
	DefaultLayout: "02/01/2006",
}

// Korean support
var Korean = Config{
	PastPrefix:   "",
	PastSuffix:   " 전",
	FuturePrefix: "",
	FutureSuffix: " 후",

	Periods: []FormatPeriod{
		FormatPeriod{time.Second, "약 1초", "%d초"},
		FormatPeriod{time.Minute, "약 1분", "%d분"},
		FormatPeriod{time.Hour, "약 한시간", "%d시간"},
		FormatPeriod{Day, "하루", "%d일"},
		FormatPeriod{Month, "1개월", "%d개월"},
		FormatPeriod{Year, "1년", "%d년"},
	},

	Zero: "방금",

	Max:           10 * 365 * 24 * time.Hour,
	DefaultLayout: "2006-01-02",
}


//Format returns a textual representation of the time value formatted according to the layout
//defined in the Config. The time is compared to time.Now() and is then formatted as a fuzzy
//timestamp (eg. "4 days ago")
func (cfg Config) Format(t time.Time) string {
	return cfg.FormatReference(t, time.Now())
}

//FormatReference is the same as Format, but the reference has to be defined by the caller
func (cfg Config) FormatReference(t time.Time, reference time.Time) string {

	d := reference.Sub(t)

	if (d >= 0 && d >= cfg.Max) || (d < 0 && -d >= cfg.Max) {
		return t.Format(cfg.DefaultLayout)
	}

	return cfg.FormatRelativeDuration(d)
}

//FormatRelativeDuration is the same as Format, but for time.Duration.
//Config.Max is not used in this function, as there is no other alternative.
func (cfg Config) FormatRelativeDuration(d time.Duration) string {

	isPast := d >= 0

	if d < 0 {
		d = -d
	}

	s, _ := cfg.getTimeText(d, true)

	if isPast {
		return strings.Join([]string{cfg.PastPrefix, s, cfg.PastSuffix}, "")
	} else {
		return strings.Join([]string{cfg.FuturePrefix, s, cfg.FutureSuffix}, "")
	}
}

//Round the duration d in terms of step.
func round(d time.Duration, step time.Duration, roundCloser bool) time.Duration {

	if roundCloser {
		return time.Duration(float64(d)/float64(step) + 0.5)
	}

	return time.Duration(float64(d) / float64(step))
}

//Count the number of parameters in a format string
func nbParamInFormat(f string) int {
	return strings.Count(f, "%") - 2*strings.Count(f, "%%")
}

//Convert a duration to a text, based on the current config
func (cfg Config) getTimeText(d time.Duration, roundCloser bool) (string, time.Duration) {
	if len(cfg.Periods) == 0 || d < cfg.Periods[0].D {
		return cfg.Zero, 0
	}

	for i, p := range cfg.Periods {

		next := p.D
		if i+1 < len(cfg.Periods) {
			next = cfg.Periods[i+1].D
		}

		if i+1 == len(cfg.Periods) || d < next {

			r := round(d, p.D, roundCloser)

			if next != p.D && r == round(next, p.D, roundCloser) {
				continue
			}

			if r == 0 {
				return "", d
			}

			layout := p.Many
			if r == 1 {
				layout = p.One
			}

			if nbParamInFormat(layout) == 0 {
				return layout, d - r*p.D
			}

			return fmt.Sprintf(layout, r), d - r*p.D
		}
	}

	return d.String(), 0
}

//NoMax creates an new config without a maximum
func NoMax(cfg Config) Config {
	return WithMax(cfg, 9223372036854775807, time.RFC3339)
}

//WithMax creates an new config with special formatting limited to durations less than max.
//Values greater than max will be formatted by the standard time package using the defaultLayout.
func WithMax(cfg Config, max time.Duration, defaultLayout string) Config {
	n := cfg
	n.Max = max
	n.DefaultLayout = defaultLayout
	return n
}

D vendor/gopkg.in/src-d/enry.v1/.gitignore => vendor/gopkg.in/src-d/enry.v1/.gitignore +0 -10
@@ 1,10 0,0 @@
.linguist
benchmarks/output
.ci
Makefile.main
.shared
.idea
.docsrv-resources
build/
vendor/
java/lib/

D vendor/gopkg.in/src-d/enry.v1/.travis.yml => vendor/gopkg.in/src-d/enry.v1/.travis.yml +0 -154
@@ 1,154 0,0 @@
dist: trusty

language: go
go_import_path: gopkg.in/src-d/enry.v1

go:
  - '1.11.6' # specific versions until https://github.com/golang/go/issues/31293
  - '1.12.1'
env:
  global:
    - GO_VERSION_FOR_JVM='1.11.1'
    - CGO_ENABLED=0
  matrix:
    - ONIGURUMA=0
    - ONIGURUMA=1
matrix:
  fast_finish: true

addons:
  apt:
    packages:
      - libonig-dev

stages:
  - name: test
  - name: release
    if: tag IS present
  - name: publish
    if: tag IS present

stage: test
install:
  - >
    if [[ "${ONIGURUMA}" -gt 0 ]]; then
      export tags="${tags} oniguruma";
      export CGO_ENABLED=1;
    fi;
  - go get -v -t -tags "${tags}" ./...
script:
  - make test-coverage
after_success:
  - bash <(curl -s https://codecov.io/bash)

jobs:
  include:
    - name: 'java unit-tests'
      stage: test
      language: scala
      jdk: oraclejdk8
      before_install:
        - export CGO_ENABLED=1
        # mimics exact behavior of 'go_import_path' for non-go build image
        - export GOPATH=${TRAVIS_HOME}/gopath
        - mkdir -p ${GOPATH}/src/gopkg.in/src-d/enry.v1
        - tar -Pczf ${TRAVIS_TMPDIR}/src_archive.tar.gz -C ${TRAVIS_BUILD_DIR} . && tar -Pxzf ${TRAVIS_TMPDIR}/src_archive.tar.gz -C ${TRAVIS_HOME}/gopath/src/gopkg.in/src-d/enry.v1
        - export TRAVIS_BUILD_DIR=${TRAVIS_HOME}/gopath/src/gopkg.in/src-d/enry.v1
        - cd ${TRAVIS_HOME}/gopath/src/gopkg.in/src-d/enry.v1
      install:
        - eval "$(curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | GIMME_GO_VERSION=$GO_VERSION_FOR_JVM bash)"
        - go version
        - echo $PWD; echo $GOPATH
        - go get -v ./...
      before_script:
        - cd java
        - make
      script:
        - make test

    - name: 'linux packages'
      stage: release
      install:
        - go version
        - go get -v -t ./...
      script: make packages
      deploy:
        provider: releases
        api_key:
          secure: $GITHUB_TOKEN
        file_glob: true
        file: build/*.tar.gz
        skip_cleanup: true
        on:
          tags: true

    - name: 'linux shared lib'
      stage: release
      install:
        - go version
        - go get -v -t ./...
      script: make linux-shared
      deploy:
        provider: releases
        api_key:
          secure: $GITHUB_TOKEN
        file:
          - ./.shared/linux-x86-64/libenry.so
        skip_cleanup: true
        on:
          tags: true

    - name: 'macOS shared lib'
      stage: release
      sudo: true
      env:
        - OSXCROSS_PACKAGE="osxcross_3034f7149716d815bc473d0a7b35d17e4cf175aa.tar.gz"
        - OSXCROSS_URL="https://github.com/bblfsh/client-scala/releases/download/v1.5.2/${OSXCROSS_PACKAGE}"
        - PATH="/$HOME/osxcross/bin:$PATH"
      install:
        - go version
        - go get -v -t ./...
        - sudo apt-get update
        - sudo apt-get install -y --no-install-recommends clang g++ gcc gcc-multilib libc6-dev libc6-dev-i386 mingw-w64 patch xz-utils
        - cd ${HOME}
        - curl -sfSL ${OSXCROSS_URL} | tar -C ${HOME} -xzf -
        - cd $GOPATH/src/gopkg.in/src-d/enry.v1
      script: make darwin-shared
      deploy:
        provider: releases
        api_key:
          secure: $GITHUB_TOKEN
        file: ./.shared/darwin/libenry.dylib
        skip_cleanup: true
        on:
          tags: true

    - name: 'java: publish to maven'
      stage: publish
      language: scala
      jdk: oraclejdk8
      before_install:
        - export CGO_ENABLED=1
        # mimics exact behavior of 'go_import_path' for non-go build image
        - export GOPATH=${TRAVIS_HOME}/gopath
        - mkdir -p ${GOPATH}/src/gopkg.in/src-d/enry.v1
        - tar -Pczf ${TRAVIS_TMPDIR}/src_archive.tar.gz -C ${TRAVIS_BUILD_DIR} . && tar -Pxzf ${TRAVIS_TMPDIR}/src_archive.tar.gz -C ${TRAVIS_HOME}/gopath/src/gopkg.in/src-d/enry.v1
        - export TRAVIS_BUILD_DIR=${TRAVIS_HOME}/gopath/src/gopkg.in/src-d/enry.v1
        - cd ${TRAVIS_HOME}/gopath/src/gopkg.in/src-d/enry.v1
      install:
        - eval "$(curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | GIMME_GO_VERSION=$GO_VERSION_FOR_JVM bash)"
        - go version
        - go get -v -t ./...
      before_script:
        - cd java
        - make
        - curl -o ./shared/linux-x86-64/libenry.so -sfL "https://github.com/$TRAVIS_REPO_SLUG/releases/download/$TRAVIS_TAG/libenry.so" || travis_terminate 1
        - mkdir -p ./shared/darwin
        - curl -o ./shared/darwin/libenry.dylib -sfL "https://github.com/$TRAVIS_REPO_SLUG/releases/download/$TRAVIS_TAG/libenry.dylib" || travis_terminate 1
        - openssl aes-256-cbc -K $encrypted_a0e1c69dbbc7_key -iv $encrypted_a0e1c69dbbc7_iv -in key.asc.enc -out key.asc -d
        - gpg --no-default-keyring --primary-keyring ./project/.gnupg/pubring.gpg --secret-keyring ./project/.gnupg/secring.gpg --keyring ./project/.gnupg/pubring.gpg --fingerprint --import key.asc
      script:
        - make test # ensure the shared objects are functional
        - ./sbt publishLocal
        - ./sbt publishSigned
        - ./sbt sonatypeRelease

D vendor/gopkg.in/src-d/enry.v1/CONTRIBUTING.md => vendor/gopkg.in/src-d/enry.v1/CONTRIBUTING.md +0 -61
@@ 1,61 0,0 @@
# source{d} Contributing Guidelines

source{d} projects accept contributions via GitHub pull requests.
This document outlines some of the
conventions on development workflow, commit message formatting, contact points,
and other resources to make it easier to get your contribution accepted.

## Certificate of Origin

By contributing to this project, you agree to the [Developer Certificate of
Origin (DCO)](DCO). This document was created by the Linux Kernel community and is a
simple statement that you, as a contributor, have the legal right to make the
contribution.

In order to show your agreement with the DCO you should include at the end of the commit message,
the following line: `Signed-off-by: John Doe <john.doe@example.com>`, using your real name.

This can be done easily using the [`-s`](https://github.com/git/git/blob/b2c150d3aa82f6583b9aadfecc5f8fa1c74aca09/Documentation/git-commit.txt#L154-L161) flag on the `git commit`.

If you find yourself pushed a few commits without `Signed-off-by`, you can still add it afterwards. We wrote a manual which can help: [fix-DCO.md](https://github.com/src-d/guide/blob/master/developer-community/fix-DCO.md).

## Support Channels

The official support channels, for both users and contributors, are:

- GitHub issues: each repository has its own list of issues.
- Slack: join the [source{d} Slack](https://join.slack.com/t/sourced-community/shared_invite/enQtMjc4Njk5MzEyNzM2LTFjNzY4NjEwZGEwMzRiNTM4MzRlMzQ4MmIzZjkwZmZlM2NjODUxZmJjNDI1OTcxNDAyMmZlNmFjODZlNTg0YWM) community.

*Before opening a new issue or submitting a new pull request, it's helpful to
search the project - it's likely that another user has already reported the
issue you're facing, or it's a known issue that we're already aware of.


## How to Contribute

Pull Requests (PRs) are the main and exclusive way to contribute code to source{d} projects.
In order for a PR to be accepted it needs to pass this list of requirements:

- The contribution must be correctly explained with natural language and providing a minimum working example that reproduces it.
- All PRs must be written idiomaticly:
    - for Go: formatted according to [gofmt](https://golang.org/cmd/gofmt/), and without any warnings from [go lint](https://github.com/golang/lint) nor [go vet](https://golang.org/cmd/vet/)
    - for other languages, similar constraints apply.
- They should in general include tests, and those shall pass.
    - If the PR is a bug fix, it has to include a new unit test that fails before the patch is merged.
    - If the PR is a new feature, it has to come with a suite of unit tests, that tests the new functionality.
    - In any case, all the PRs have to pass the personal evaluation of at least one of the [maintainers](MAINTAINERS) of the project.


### Format of the commit message

Every commit message should describe what was changed, under which context and, if applicable, the GitHub issue it relates to:

```
plumbing: packp, Skip argument validations for unknown capabilities. Fixes #623
```

The format can be described more formally as follows:

```
<package>: <subpackage>, <what changed>. [Fixes #<issue-number>]
```

D vendor/gopkg.in/src-d/enry.v1/DCO => vendor/gopkg.in/src-d/enry.v1/DCO +0 -25
@@ 1,25 0,0 @@
    Developer's Certificate of Origin 1.1

    By making a contribution to this project, I certify that:

    (a) The contribution was created in whole or in part by me and I
        have the right to submit it under the open source license
        indicated in the file; or

    (b) The contribution is based upon previous work that, to the best
        of my knowledge, is covered under an appropriate open source
        license and I have the right under that license to submit that
        work with modifications, whether created in whole or in part
        by me, under the same open source license (unless I am
        permitted to submit under a different license), as indicated
        in the file; or

    (c) The contribution was provided directly to me by some other
        person who certified (a), (b) or (c) and I have not modified
        it.

    (d) I understand and agree that this project and the contribution
	are public and that a record of the contribution (including all
	personal information I submit with it, including my sign-off) is
	maintained indefinitely and may be redistributed consistent with
	this project or the open source license(s) involved.

D vendor/gopkg.in/src-d/enry.v1/LICENSE => vendor/gopkg.in/src-d/enry.v1/LICENSE +0 -201
@@ 1,201 0,0 @@
                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding those notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.

   END OF TERMS AND CONDITIONS

   APPENDIX: How to apply the Apache License to your work.

      To apply the Apache License to your work, attach the following
      boilerplate notice, with the fields enclosed by brackets "{}"
      replaced with your own identifying information. (Don't include
      the brackets!)  The text should be enclosed in the appropriate
      comment syntax for the file format. We also recommend that a
      file or class name and description of purpose be included on the
      same "printed page" as the copyright notice for easier
      identification within third-party archives.

   Copyright 2017 Sourced Technologies, S.L.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.

D vendor/gopkg.in/src-d/enry.v1/MAINTAINERS => vendor/gopkg.in/src-d/enry.v1/MAINTAINERS +0 -1
@@ 1,1 0,0 @@
Alexander Bezzubov <alex@sourced.tech> (@bzz)

D vendor/gopkg.in/src-d/enry.v1/Makefile => vendor/gopkg.in/src-d/enry.v1/Makefile +0 -82
@@ 1,82 0,0 @@
# Package configuration
PROJECT = enry
COMMANDS = cmd/enry

# Including ci Makefile
CI_REPOSITORY ?= https://github.com/src-d/ci.git
CI_BRANCH ?= v1
CI_PATH ?= .ci
MAKEFILE := $(CI_PATH)/Makefile.main
$(MAKEFILE):
	git clone --quiet --depth 1 -b $(CI_BRANCH) $(CI_REPOSITORY) $(CI_PATH);
-include $(MAKEFILE)

# Docsrv: configure the languages whose api-doc can be auto generated
LANGUAGES = go
# Docs: do not edit this
DOCS_REPOSITORY := https://github.com/src-d/docs
SHARED_PATH ?= $(shell pwd)/.docsrv-resources
DOCS_PATH ?= $(SHARED_PATH)/.docs
$(DOCS_PATH)/Makefile.inc:
	git clone --quiet --depth 1 $(DOCS_REPOSITORY) $(DOCS_PATH);
-include $(DOCS_PATH)/Makefile.inc

LINGUIST_PATH = .linguist

# shared objects
RESOURCES_DIR=./.shared
LINUX_DIR=$(RESOURCES_DIR)/linux-x86-64
LINUX_SHARED_LIB=$(LINUX_DIR)/libenry.so
DARWIN_DIR=$(RESOURCES_DIR)/darwin
DARWIN_SHARED_LIB=$(DARWIN_DIR)/libenry.dylib
HEADER_FILE=libenry.h
NATIVE_LIB=./shared/enry.go

$(LINGUIST_PATH):
	git clone https://github.com/github/linguist.git $@

clean-linguist:
	rm -rf $(LINGUIST_PATH)

clean-shared:
	rm -rf $(RESOURCES_DIR)

clean: clean-linguist clean-shared

code-generate: $(LINGUIST_PATH)
	mkdir -p data && \
	go run internal/code-generator/main.go
	ENRY_TEST_REPO="$${PWD}/.linguist" go test  -v \
		-run Test_GeneratorTestSuite \
		./internal/code-generator/generator \
		-testify.m TestUpdateGeneratorTestSuiteGold \
		-update_gold

benchmarks: $(LINGUIST_PATH)
	go test -run=NONE -bench=. && \
	benchmarks/linguist-total.rb

benchmarks-samples: $(LINGUIST_PATH)
	go test -run=NONE -bench=. -benchtime=5us && \
	benchmarks/linguist-samples.rb

benchmarks-slow: $(LINGUIST_PATH)
	mkdir -p benchmarks/output && \
	go test -run=NONE -bench=. -slow -benchtime=100ms -timeout=100h > benchmarks/output/enry_samples.bench && \
	benchmarks/linguist-samples.rb 5 > benchmarks/output/linguist_samples.bench

linux-shared: $(LINUX_SHARED_LIB)

darwin-shared: $(DARWIN_SHARED_LIB)

$(DARWIN_SHARED_LIB):
	mkdir -p $(DARWIN_DIR) && \
	CC="o64-clang" CXX="o64-clang++" CGO_ENABLED=1 GOOS=darwin go build -buildmode=c-shared -o $(DARWIN_SHARED_LIB) $(NATIVE_LIB) && \
	mv $(DARWIN_DIR)/$(HEADER_FILE) $(RESOURCES_DIR)/$(HEADER_FILE)

$(LINUX_SHARED_LIB):
	mkdir -p $(LINUX_DIR) && \
	CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -buildmode=c-shared -o $(LINUX_SHARED_LIB) $(NATIVE_LIB) && \
	mv $(LINUX_DIR)/$(HEADER_FILE) $(RESOURCES_DIR)/$(HEADER_FILE)

.PHONY: benchmarks benchmarks-samples benchmarks-slow

D vendor/gopkg.in/src-d/enry.v1/README.md => vendor/gopkg.in/src-d/enry.v1/README.md +0 -271
@@ 1,271 0,0 @@
# enry [![GoDoc](https://godoc.org/gopkg.in/src-d/enry.v1?status.svg)](https://godoc.org/gopkg.in/src-d/enry.v1) [![Build Status](https://travis-ci.com/src-d/enry.svg?branch=master)](https://travis-ci.com/src-d/enry) [![codecov](https://codecov.io/gh/src-d/enry/branch/master/graph/badge.svg)](https://codecov.io/gh/src-d/enry)

File programming language detector and toolbox to ignore binary or vendored files. *enry*, started as a port to _Go_ of the original [linguist](https://github.com/github/linguist) _Ruby_ library, that has an improved *2x performance*.


Installation
------------

The recommended way to install enry is

```
go get gopkg.in/src-d/enry.v1/cmd/enry
```

To build enry's CLI you must run

    make build

this will generate a binary in the project's root directory called `enry`. You can then move this binary to anywhere in your `PATH`.

This project is now part of [source{d} Engine](https://sourced.tech/engine),
which provides the simplest way to get started with a single command.
Visit [sourced.tech/engine](https://sourced.tech/engine) for more information.

### Faster regexp engine (optional)

[Oniguruma](https://github.com/kkos/oniguruma) is CRuby's regular expression engine.
It is very fast and performs better than the one built into Go runtime. *enry* supports swapping
between those two engines thanks to [rubex](https://github.com/moovweb/rubex) project.
The typical overall speedup from using Oniguruma is 1.5-2x. However, it requires CGo and the external shared library.
On macOS with brew, it is

```
brew install oniguruma
```

On Ubuntu, it is

```
sudo apt install libonig-dev
```

To build enry with Oniguruma regexps use the `oniguruma` build tag

```
go get -v -t --tags oniguruma ./...
```

and then rebuild the project.

Examples
------------

```go
lang, safe := enry.GetLanguageByExtension("foo.go")
fmt.Println(lang, safe)
// result: Go true

lang, safe := enry.GetLanguageByContent("foo.m", []byte("<matlab-code>"))
fmt.Println(lang, safe)
// result: Matlab true

lang, safe := enry.GetLanguageByContent("bar.m", []byte("<objective-c-code>"))
fmt.Println(lang, safe)
// result: Objective-C true

// all strategies together
lang := enry.GetLanguage("foo.cpp", []byte("<cpp-code>"))
// result: C++ true
```

Note that the returned boolean value `safe` is set either to `true`, if there is only one possible language detected, or to `false` otherwise.

To get a list of possible languages for a given file, you can use the plural version of the detecting functions.

```go
langs := enry.GetLanguages("foo.h",  []byte("<cpp-code>"))
// result: []string{"C", "C++", "Objective-C}

langs := enry.GetLanguagesByExtension("foo.asc", []byte("<content>"), nil)
// result: []string{"AGS Script", "AsciiDoc", "Public Key"}

langs := enry.GetLanguagesByFilename("Gemfile", []byte("<content>"), []string{})
// result: []string{"Ruby"}
```


CLI
------------

You can use enry as a command,

```bash
$ enry --help
  enry v1.5.0 build: 10-02-2017_14_01_07 commit: 95ef0a6cf3, based on linguist commit: 37979b2
  enry, A simple (and faster) implementation of github/linguist
  usage: enry <path>
         enry [-json] [-breakdown] <path>
         enry [-json] [-breakdown]
         enry [-version]
```

and it'll return an output similar to *linguist*'s output,

```bash
$ enry
55.56%    Shell
22.22%    Ruby
11.11%    Gnuplot
11.11%    Go
```

but not only the output; its flags are also the same as *linguist*'s ones,

```bash
$ enry --breakdown
55.56%    Shell
22.22%    Ruby
11.11%    Gnuplot
11.11%    Go

Gnuplot
plot-histogram.gp

Ruby
linguist-samples.rb
linguist-total.rb

Shell
parse.sh
plot-histogram.sh
run-benchmark.sh
run-slow-benchmark.sh
run.sh

Go
parser/main.go
```

even the JSON flag,

```bash
$ enry --json
{"Gnuplot":["plot-histogram.gp"],"Go":["parser/main.go"],"Ruby":["linguist-samples.rb","linguist-total.rb"],"Shell":["parse.sh","plot-histogram.sh","run-benchmark.sh","run-slow-benchmark.sh","run.sh"]}
```

Note that even if enry's CLI is compatible with linguist's, its main point is that **_enry doesn't need a git repository to work!_**

Java bindings
------------

Generated Java bindings using a C-shared library and JNI are located under [`java`](https://github.com/src-d/enry/blob/master/java)

Development
------------

*enry* re-uses parts of original [linguist](https://github.com/github/linguist) to generate internal data structures. In order to update to the latest upstream and generate all the necessary code you must run:

    git clone https://github.com/github/linguist.git .linguist
    # update commit in generator_test.go (to re-generate .gold fixtures)
    # https://github.com/src-d/enry/blob/13d3d66d37a87f23a013246a1b0678c9ee3d524b/internal/code-generator/generator/generator_test.go#L18
    go generate

We update enry when changes are done in linguist's master branch on the following files:

* [languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml)
* [heuristics.yml](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.yml)
* [vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml)
* [documentation.yml](https://github.com/github/linguist/blob/master/lib/linguist/documentation.yml)

Currently we don't have any procedure established to automatically detect changes in the linguist project and regenerate the code.
So we update the generated code as needed, without any specific criteria.

If you want to update *enry* because of changes in linguist, you can run the *go
generate* command and do a pull request that only contains the changes in
generated files (those files in the subdirectory [data](https://github.com/src-d/enry/blob/master/data)).

To run the tests,

    make test


Divergences from linguist
------------

`enry` [CLI tool](#cli) does *not* require a full Git repository to be present in the filesystem in order to report languages.

Using [linguist/samples](https://github.com/github/linguist/tree/master/samples)
as a set for the tests, the following issues were found:

* [Heuristics for ".es" extension](https://github.com/github/linguist/blob/e761f9b013e5b61161481fcb898b59721ee40e3d/lib/linguist/heuristics.yml#L103) in JavaScript could not be parsed, due to unsupported backreference in RE2 regexp engine

* As of (Linguist v5.3.2)[https://github.com/github/linguist/releases/tag/v5.3.2] it is using [flex-based scanner in C for tokenization](https://github.com/github/linguist/pull/3846). Enry still uses [extract_token](https://github.com/github/linguist/pull/3846/files#diff-d5179df0b71620e3fac4535cd1368d15L60) regex-based algorithm. See [#193](https://github.com/src-d/enry/issues/193).

* Bayesian classifier can't distinguish "SQL" from "PLpgSQL. See [#194](https://github.com/src-d/enry/issues/194).

* Detection of [generated files](https://github.com/github/linguist/blob/bf95666fc15e49d556f2def4d0a85338423c25f3/lib/linguist/generated.rb#L53) is not supported yet.
 (Thus they are not excluded from CLI output). See [#213](https://github.com/src-d/enry/issues/213).

* XML detection strategy is not implemented. See [#192](https://github.com/src-d/enry/issues/192).

* Overriding languages and types though `.gitattributes` is not yet supported. See [#18](https://github.com/src-d/enry/issues/18).

* `enry` CLI output does NOT exclude `.gitignore`ed files and git submodules, as linguist does

In all the cases above that have an issue number - we plan to update enry to match Linguist behaviour.


Benchmarks
------------

Enry's language detection has been compared with Linguist's one. In order to do that, Linguist's project directory [*linguist/samples*](https://github.com/github/linguist/tree/master/samples) was used as a set of files to run benchmarks against.

We got these results:

![histogram](benchmarks/histogram/distribution.png)

The histogram represents the number of files for which spent time in language
detection was in the range of the time interval indicated in the x axis.

So you can see that most of the files were detected quicker in enry.

We found some few cases where enry turns slower than linguist. This is due to
Golang's regexp engine being slower than Ruby's, which uses the [oniguruma](https://github.com/kkos/oniguruma) library, written in C.

You can find scripts and additional information (like software and hardware used
and benchmarks' results per sample file) in [*benchmarks*](https://github.com/src-d/enry/blob/master/benchmarks) directory.