[Vendor] Update Batch 2020-11 (#13746)
* github.com/alecthomas/chroma v0.8.1 -> v0.8.2 Changelog: https://github.com/alecthomas/chroma/releases/tag/v0.8.2 * github.com/blevesearch/bleve v1.0.12 -> v1.0.13 Changelog: https://github.com/blevesearch/bleve/releases/tag/v1.0.13 * github.com/editorconfig/editorconfig-core-go v2.3.8 -> v2.3.9 Changelog: https://github.com/editorconfig/editorconfig-core-go/releases/tag/v2.3.9 * github.com/klauspost/compress v1.11.2 -> v1.11.3 Changelog: https://github.com/klauspost/compress/releases/tag/v1.11.3 * github.com/minio/minio-go v7.0.5 -> v7.0.6 Changelog: https://github.com/minio/minio-go/releases/tag/v7.0.6 Co-authored-by: Lauris BH <lauris@nix.lv>
This commit is contained in:
parent
e8a6c425ec
commit
c4deb97ed1
10
go.mod
10
go.mod
|
@ -19,10 +19,10 @@ require (
|
||||||
gitea.com/macaron/toolbox v0.0.0-20190822013122-05ff0fc766b7
|
gitea.com/macaron/toolbox v0.0.0-20190822013122-05ff0fc766b7
|
||||||
github.com/PuerkitoBio/goquery v1.5.1
|
github.com/PuerkitoBio/goquery v1.5.1
|
||||||
github.com/RoaringBitmap/roaring v0.5.5 // indirect
|
github.com/RoaringBitmap/roaring v0.5.5 // indirect
|
||||||
github.com/alecthomas/chroma v0.8.1
|
github.com/alecthomas/chroma v0.8.2
|
||||||
github.com/andybalholm/brotli v1.0.1 // indirect
|
github.com/andybalholm/brotli v1.0.1 // indirect
|
||||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
||||||
github.com/blevesearch/bleve v1.0.12
|
github.com/blevesearch/bleve v1.0.13
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
||||||
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d // indirect
|
github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d // indirect
|
||||||
github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 // indirect
|
github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 // indirect
|
||||||
|
@ -31,7 +31,7 @@ require (
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
github.com/dlclark/regexp2 v1.4.0 // indirect
|
github.com/dlclark/regexp2 v1.4.0 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.0
|
github.com/dustin/go-humanize v1.0.0
|
||||||
github.com/editorconfig/editorconfig-core-go/v2 v2.3.8
|
github.com/editorconfig/editorconfig-core-go/v2 v2.3.9
|
||||||
github.com/emirpasic/gods v1.12.0
|
github.com/emirpasic/gods v1.12.0
|
||||||
github.com/ethantkoenig/rupture v0.0.0-20181029165146-c3b3b810dc77
|
github.com/ethantkoenig/rupture v0.0.0-20181029165146-c3b3b810dc77
|
||||||
github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 // indirect
|
github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 // indirect
|
||||||
|
@ -64,7 +64,7 @@ require (
|
||||||
github.com/jmhodges/levigo v1.0.0 // indirect
|
github.com/jmhodges/levigo v1.0.0 // indirect
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
||||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4
|
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4
|
||||||
github.com/klauspost/compress v1.11.2
|
github.com/klauspost/compress v1.11.3
|
||||||
github.com/klauspost/pgzip v1.2.5 // indirect
|
github.com/klauspost/pgzip v1.2.5 // indirect
|
||||||
github.com/lafriks/xormstore v1.3.2
|
github.com/lafriks/xormstore v1.3.2
|
||||||
github.com/lib/pq v1.8.1-0.20200908161135-083382b7e6fc
|
github.com/lib/pq v1.8.1-0.20200908161135-083382b7e6fc
|
||||||
|
@ -78,7 +78,7 @@ require (
|
||||||
github.com/mgechev/revive v1.0.3-0.20200921231451-246eac737dc7
|
github.com/mgechev/revive v1.0.3-0.20200921231451-246eac737dc7
|
||||||
github.com/mholt/archiver/v3 v3.5.0
|
github.com/mholt/archiver/v3 v3.5.0
|
||||||
github.com/microcosm-cc/bluemonday v1.0.4
|
github.com/microcosm-cc/bluemonday v1.0.4
|
||||||
github.com/minio/minio-go/v7 v7.0.5
|
github.com/minio/minio-go/v7 v7.0.6
|
||||||
github.com/mitchellh/go-homedir v1.1.0
|
github.com/mitchellh/go-homedir v1.1.0
|
||||||
github.com/msteinert/pam v0.0.0-20200810204841-913b8f8cdf8b
|
github.com/msteinert/pam v0.0.0-20200810204841-913b8f8cdf8b
|
||||||
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
|
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
|
||||||
|
|
45
go.sum
45
go.sum
|
@ -111,8 +111,8 @@ github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/g
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||||
github.com/alecthomas/chroma v0.7.2-0.20200305040604-4f3623dce67a/go.mod h1:fv5SzZPFJbwp2NXJWpFIX7DZS4HgV1K4ew4Pc2OZD9s=
|
github.com/alecthomas/chroma v0.7.2-0.20200305040604-4f3623dce67a/go.mod h1:fv5SzZPFJbwp2NXJWpFIX7DZS4HgV1K4ew4Pc2OZD9s=
|
||||||
github.com/alecthomas/chroma v0.8.1 h1:ym20sbvyC6RXz45u4qDglcgr8E313oPROshcuCHqiEE=
|
github.com/alecthomas/chroma v0.8.2 h1:x3zkuE2lUk/RIekyAJ3XRqSCP4zwWDfcw/YJCuCAACg=
|
||||||
github.com/alecthomas/chroma v0.8.1/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
github.com/alecthomas/chroma v0.8.2/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||||
github.com/alecthomas/kong v0.1.17-0.20190424132513-439c674f7ae0/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
github.com/alecthomas/kong v0.1.17-0.20190424132513-439c674f7ae0/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
||||||
|
@ -162,8 +162,8 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
|
||||||
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
|
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
|
||||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||||
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||||
github.com/blevesearch/bleve v1.0.12 h1:2qJUSBpU/h1z8x3ERRB5WwpmEpJwoivPqmDpHzv4tuk=
|
github.com/blevesearch/bleve v1.0.13 h1:NtqdA+2UL715y2/9Epg9Ie9uspNcilGMYNM+tT+HfAo=
|
||||||
github.com/blevesearch/bleve v1.0.12/go.mod h1:G0ErXWdIrUSYZLPoMpS9Z3saTnTsk4ebhPsVv/+0nxk=
|
github.com/blevesearch/bleve v1.0.13/go.mod h1:3y+16vR4Cwtis/bOGCt7r+CHKB2/ewizEqKBUycXomA=
|
||||||
github.com/blevesearch/blevex v0.0.0-20190916190636-152f0fe5c040 h1:SjYVcfJVZoCfBlg+fkaq2eoZHTf5HaJfaTeTkOtyfHQ=
|
github.com/blevesearch/blevex v0.0.0-20190916190636-152f0fe5c040 h1:SjYVcfJVZoCfBlg+fkaq2eoZHTf5HaJfaTeTkOtyfHQ=
|
||||||
github.com/blevesearch/blevex v0.0.0-20190916190636-152f0fe5c040/go.mod h1:WH+MU2F4T0VmSdaPX+Wu5GYoZBrYWdOZWSjzvYcDmqQ=
|
github.com/blevesearch/blevex v0.0.0-20190916190636-152f0fe5c040/go.mod h1:WH+MU2F4T0VmSdaPX+Wu5GYoZBrYWdOZWSjzvYcDmqQ=
|
||||||
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
|
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
|
||||||
|
@ -174,16 +174,16 @@ github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt
|
||||||
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
|
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
|
||||||
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||||
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||||
github.com/blevesearch/zap/v11 v11.0.12 h1:ZA+80yajko2tXr1kmbSoVRMCo0mFZAVJmoijjYsZuwc=
|
github.com/blevesearch/zap/v11 v11.0.13 h1:NDvmjAyeEQsBbPElubVPqrBtSDOftXYwxkHeZfflU4A=
|
||||||
github.com/blevesearch/zap/v11 v11.0.12/go.mod h1:JLfFhc8DWP01zMG/6VwEY2eAnlJsTN1vDE4S0rC5Y78=
|
github.com/blevesearch/zap/v11 v11.0.13/go.mod h1:qKkNigeXbxZwym02wsxoQpbme1DgAwTvRlT/beIGfTM=
|
||||||
github.com/blevesearch/zap/v12 v12.0.12 h1:9eWaL9/2hcjy1VR3lrl/b+kWh5G7w/BkNYI07mWActw=
|
github.com/blevesearch/zap/v12 v12.0.13 h1:05Ebdmv2tRTUytypG4DlOIHLLw995DtVV0Zl3YwwDew=
|
||||||
github.com/blevesearch/zap/v12 v12.0.12/go.mod h1:1HrB4hhPfI8u8x4SPYbluhb8xhflpPvvj8EcWImNnJY=
|
github.com/blevesearch/zap/v12 v12.0.13/go.mod h1:0RTeU1uiLqsPoybUn6G/Zgy6ntyFySL3uWg89NgX3WU=
|
||||||
github.com/blevesearch/zap/v13 v13.0.4 h1:eoRvJmLeIQUs1mAF+fAFALg1dPHOI1e1KFuXL0I7us4=
|
github.com/blevesearch/zap/v13 v13.0.5 h1:+Gcwl95uei3MgBlJAddBFRv9gl+FMNcXpMa7BX3byJw=
|
||||||
github.com/blevesearch/zap/v13 v13.0.4/go.mod h1:YdB7UuG7TBWu/1dz9e2SaLp1RKfFfdJx+ulIK5HR1bA=
|
github.com/blevesearch/zap/v13 v13.0.5/go.mod h1:HTfWECmzBN7BbdBxdEigpUsD6MOPFOO84tZ0z/g3CnE=
|
||||||
github.com/blevesearch/zap/v14 v14.0.3 h1:ccEv296u6DEUHFF9U4W2E/6/WkbuDrS9/1VJM34SCzA=
|
github.com/blevesearch/zap/v14 v14.0.4 h1:BnWWkdgmPhK50J9dkBlQrWB4UDa22OMPIUzn1oXcXfY=
|
||||||
github.com/blevesearch/zap/v14 v14.0.3/go.mod h1:oObAhcDHw7p1ahiTCqhRkdxdl7UA8qpvX10pSgrTMHc=
|
github.com/blevesearch/zap/v14 v14.0.4/go.mod h1:sTwuFoe1n/+VtaHNAjY3W5GzHZ5UxFkw1MZ82P/WKpA=
|
||||||
github.com/blevesearch/zap/v15 v15.0.1 h1:jEism63eY+qdcvwXH0K8MiKhv5tb10T1k7SNx6fauCM=
|
github.com/blevesearch/zap/v15 v15.0.2 h1:7wV4ksnKzBibLaWBolzbxngxdVAUmF7HJ+gMOqkzsdQ=
|
||||||
github.com/blevesearch/zap/v15 v15.0.1/go.mod h1:ho0frqAex2ktT9cYFAxQpoQXsxb/KEfdjpx4s49rf/M=
|
github.com/blevesearch/zap/v15 v15.0.2/go.mod h1:nfycXPgfbio8l+ZSkXUkhSNjTpp57jZ0/MKa6TigWvM=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK+hNk4tyD+nuGjpVLPEHuJSFXMw11/HPA=
|
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668 h1:U/lr3Dgy4WK+hNk4tyD+nuGjpVLPEHuJSFXMw11/HPA=
|
||||||
|
@ -195,6 +195,7 @@ github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/cheggaaa/pb v1.0.29/go.mod h1:W40334L7FMC5JKWldsTWbdGjLo0RxUKK73K+TuPxX30=
|
||||||
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
|
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
|
||||||
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
|
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
|
||||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
|
@ -278,8 +279,8 @@ github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25Kn
|
||||||
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
|
||||||
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
|
||||||
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||||
github.com/editorconfig/editorconfig-core-go/v2 v2.3.8 h1:nq6QPrFjoI1QP9trhj+bsXoS8MSjhTgQXgTavA5zPbg=
|
github.com/editorconfig/editorconfig-core-go/v2 v2.3.9 h1:4vZN3UCLAUbT408wDutTKGZwOlgGMpV3vhahYufNbV8=
|
||||||
github.com/editorconfig/editorconfig-core-go/v2 v2.3.8/go.mod h1:z7TIMh40583cev3v8ei7V1RRPKeHQbttoa4Vm5/5u7g=
|
github.com/editorconfig/editorconfig-core-go/v2 v2.3.9/go.mod h1:yoHDFR3nO8O5ssvhITSRsf0owQqIs0c9+nBTtarunPo=
|
||||||
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||||
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
|
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
|
||||||
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
||||||
|
@ -517,6 +518,8 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||||
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
|
github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
|
||||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M=
|
||||||
|
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-github/v32 v32.1.0 h1:GWkQOdXqviCPx7Q7Fj+KyPoGm4SwHRh8rheoPhd27II=
|
github.com/google/go-github/v32 v32.1.0 h1:GWkQOdXqviCPx7Q7Fj+KyPoGm4SwHRh8rheoPhd27II=
|
||||||
github.com/google/go-github/v32 v32.1.0/go.mod h1:rIEpZD9CTDQwDK9GDrtMTycQNA4JU3qBsCizh3q2WCI=
|
github.com/google/go-github/v32 v32.1.0/go.mod h1:rIEpZD9CTDQwDK9GDrtMTycQNA4JU3qBsCizh3q2WCI=
|
||||||
github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
|
github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
|
||||||
|
@ -701,8 +704,8 @@ github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0
|
||||||
github.com/klauspost/compress v1.9.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
github.com/klauspost/compress v1.9.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||||
github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||||
github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||||
github.com/klauspost/compress v1.11.2 h1:MiK62aErc3gIiVEtyzKfeOHgW7atJb5g/KNX5m3c2nQ=
|
github.com/klauspost/compress v1.11.3 h1:dB4Bn0tN3wdCzQxnS8r06kV74qN/TAfaIS0bVE8h3jc=
|
||||||
github.com/klauspost/compress v1.11.2/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||||
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||||
github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||||
github.com/klauspost/cpuid v1.3.1 h1:5JNjFYYQrZeKRJ0734q51WCEEn2huer72Dc7K+R/b6s=
|
github.com/klauspost/cpuid v1.3.1 h1:5JNjFYYQrZeKRJ0734q51WCEEn2huer72Dc7K+R/b6s=
|
||||||
|
@ -778,6 +781,7 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA
|
||||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||||
|
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||||
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
|
@ -800,8 +804,8 @@ github.com/microcosm-cc/bluemonday v1.0.4/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvza
|
||||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
github.com/minio/md5-simd v1.1.0 h1:QPfiOqlZH+Cj9teu0t9b1nTBfPbyTl16Of5MeuShdK4=
|
github.com/minio/md5-simd v1.1.0 h1:QPfiOqlZH+Cj9teu0t9b1nTBfPbyTl16Of5MeuShdK4=
|
||||||
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
||||||
github.com/minio/minio-go/v7 v7.0.5 h1:I2NIJ2ojwJqD/YByemC1M59e1b4FW9kS7NlOar7HPV4=
|
github.com/minio/minio-go/v7 v7.0.6 h1:9czXaG0LEZ9s74smSqy0rm034MxngQoP6HTTuSc5GEs=
|
||||||
github.com/minio/minio-go/v7 v7.0.5/go.mod h1:TA0CQCjJZHM5SJj9IjqR0NmpmQJ6bCbXifAJ3mUU6Hw=
|
github.com/minio/minio-go/v7 v7.0.6/go.mod h1:HcIuq+11d/3MfavIPZiswSzfQ1VJ2Lwxp/XLtW46IWQ=
|
||||||
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
||||||
github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM=
|
github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM=
|
||||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||||
|
@ -994,6 +998,7 @@ github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx
|
||||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
||||||
|
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w=
|
github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w=
|
||||||
|
|
|
@ -6,6 +6,8 @@ release:
|
||||||
brews:
|
brews:
|
||||||
-
|
-
|
||||||
install: bin.install "chroma"
|
install: bin.install "chroma"
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
builds:
|
builds:
|
||||||
- goos:
|
- goos:
|
||||||
- linux
|
- linux
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
sudo: false
|
|
||||||
language: go
|
|
||||||
go:
|
|
||||||
- "1.13.x"
|
|
||||||
script:
|
|
||||||
- go test -v ./...
|
|
||||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
|
|
||||||
- ./bin/golangci-lint run
|
|
||||||
- git clean -fdx .
|
|
||||||
after_success:
|
|
||||||
curl -sL https://git.io/goreleaser | bash && goreleaser
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![Build Status](https://travis-ci.org/alecthomas/chroma.svg)](https://travis-ci.org/alecthomas/chroma) [![Gitter chat](https://badges.gitter.im/alecthomas.svg)](https://gitter.im/alecthomas/Lobby)
|
# Chroma — A general purpose syntax highlighter in pure Go [![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CircleCI](https://img.shields.io/circleci/project/github/alecthomas/chroma.svg)](https://circleci.com/gh/alecthomas/chroma) [![Go Report Card](https://goreportcard.com/badge/github.com/alecthomas/chroma)](https://goreportcard.com/report/github.com/alecthomas/chroma) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://gophers.slack.com/messages/CN9DS8YF3)
|
||||||
|
|
||||||
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
|
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly.
|
||||||
|
|
||||||
|
@ -36,29 +36,30 @@ translators for Pygments lexers and styles.
|
||||||
Prefix | Language
|
Prefix | Language
|
||||||
:----: | --------
|
:----: | --------
|
||||||
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
|
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk
|
||||||
B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck
|
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, BlitzBasic, BNF, Brainfuck
|
||||||
C | C, C#, C++, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython
|
||||||
D | D, Dart, Diff, Django/Jinja, Docker, DTD
|
D | D, Dart, Diff, Django/Jinja, Docker, DTD
|
||||||
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
||||||
F | Factor, Fish, Forth, Fortran, FSharp
|
F | Factor, Fish, Forth, Fortran, FSharp
|
||||||
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
|
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groovy
|
||||||
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HTML, HTTP, Hy
|
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy
|
||||||
I | Idris, INI, Io
|
I | Idris, Igor, INI, Io
|
||||||
J | J, Java, JavaScript, JSON, Julia, Jungle
|
J | J, Java, JavaScript, JSON, Julia, Jungle
|
||||||
K | Kotlin
|
K | Kotlin
|
||||||
L | Lighttpd configuration file, LLVM, Lua
|
L | Lighttpd configuration file, LLVM, Lua
|
||||||
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
|
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
|
||||||
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
|
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
|
||||||
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
|
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
|
||||||
P | PacmanConf, Perl, PHP, Pig, PkgConfig, PL/pgSQL, plaintext, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3
|
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python, Python 3
|
||||||
Q | QBasic
|
Q | QBasic
|
||||||
R | R, Racket, Ragel, react, reg, reStructuredText, Rexx, Ruby, Rust
|
R | R, Racket, Ragel, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust
|
||||||
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, SML, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, SYSTEMD, systemverilog
|
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Swift, SYSTEMD, systemverilog
|
||||||
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
|
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
|
||||||
V | VB.net, verilog, VHDL, VimL, vue
|
V | VB.net, verilog, VHDL, VimL, vue
|
||||||
W | WDTE
|
W | WDTE
|
||||||
X | XML, Xorg
|
X | XML, Xorg
|
||||||
Y | YAML
|
Y | YAML, YANG
|
||||||
|
Z | Zig
|
||||||
|
|
||||||
|
|
||||||
_I will attempt to keep this section up to date, but an authoritative list can be
|
_I will attempt to keep this section up to date, but an authoritative list can be
|
||||||
|
@ -183,7 +184,7 @@ following constructor options:
|
||||||
- `ClassPrefix(prefix)` - prefix each generated CSS class.
|
- `ClassPrefix(prefix)` - prefix each generated CSS class.
|
||||||
- `TabWidth(width)` - Set the rendered tab width, in characters.
|
- `TabWidth(width)` - Set the rendered tab width, in characters.
|
||||||
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
|
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
|
||||||
- `LinkableLineNumbers()` - Make the line numbers linkable.
|
- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
|
||||||
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
|
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
|
||||||
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
|
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
|
||||||
|
|
||||||
|
|
|
@ -211,7 +211,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||||
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
|
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(w, "<span%s%s>%*d\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), lineDigits, line)
|
fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
|
||||||
|
|
||||||
if highlight {
|
if highlight {
|
||||||
fmt.Fprintf(w, "</span>")
|
fmt.Fprintf(w, "</span>")
|
||||||
|
@ -237,7 +237,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
|
||||||
}
|
}
|
||||||
|
|
||||||
if f.lineNumbers && !wrapInTable {
|
if f.lineNumbers && !wrapInTable {
|
||||||
fmt.Fprintf(w, "<span%s%s>%*d</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), lineDigits, line)
|
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
|
@ -272,7 +272,19 @@ func (f *Formatter) lineIDAttribute(line int) string {
|
||||||
if !f.linkableLineNumbers {
|
if !f.linkableLineNumbers {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
return fmt.Sprintf(" id=\"%s%d\"", f.lineNumbersIDPrefix, line)
|
return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string {
|
||||||
|
title := fmt.Sprintf("%*d", lineDigits, line)
|
||||||
|
if !f.linkableLineNumbers {
|
||||||
|
return title
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Formatter) lineID(line int) string {
|
||||||
|
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
|
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
|
||||||
|
|
|
@ -14,7 +14,7 @@ var Bash = internal.Register(MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "Bash",
|
Name: "Bash",
|
||||||
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
|
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"},
|
||||||
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
|
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"},
|
||||||
MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
|
MimeTypes: []string{"application/x-sh", "application/x-shellscript"},
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
|
|
|
@ -60,13 +60,13 @@ var Go = internal.Register(MustNewLexer(
|
||||||
|
|
||||||
var goTemplateRules = Rules{
|
var goTemplateRules = Rules{
|
||||||
"root": {
|
"root": {
|
||||||
|
{`{{(- )?/\*(.|\n)*?\*/( -)?}}`, CommentMultiline, nil},
|
||||||
{`{{[-]?`, CommentPreproc, Push("template")},
|
{`{{[-]?`, CommentPreproc, Push("template")},
|
||||||
{`[^{]+`, Other, nil},
|
{`[^{]+`, Other, nil},
|
||||||
{`{`, Other, nil},
|
{`{`, Other, nil},
|
||||||
},
|
},
|
||||||
"template": {
|
"template": {
|
||||||
{`[-]?}}`, CommentPreproc, Pop(1)},
|
{`[-]?}}`, CommentPreproc, Pop(1)},
|
||||||
{`/\*.*?\*/`, Comment, nil},
|
|
||||||
{`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline
|
{`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline
|
||||||
{`\(`, Operator, Push("subexpression")},
|
{`\(`, Operator, Push("subexpression")},
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||||
|
@ -80,19 +80,19 @@ var goTemplateRules = Rules{
|
||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
{`\(`, Operator, Push("subexpression")},
|
{`\(`, Operator, Push("subexpression")},
|
||||||
{`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil},
|
{`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil},
|
||||||
{`\||:=`, Operator, nil},
|
{`\||:?=|,`, Operator, nil},
|
||||||
{`[$]?[^\W\d]\w*`, NameOther, nil},
|
{`[$]?[^\W\d]\w*`, NameOther, nil},
|
||||||
{`[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil},
|
{`[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil},
|
||||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||||
{`\d+i`, LiteralNumber, nil},
|
{`-?\d+i`, LiteralNumber, nil},
|
||||||
{`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
{`-?\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
||||||
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
{`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
|
||||||
{`\d+[Ee][-+]\d+i`, LiteralNumber, nil},
|
{`-?\d+[Ee][-+]\d+i`, LiteralNumber, nil},
|
||||||
{`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
{`-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
|
||||||
{`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
{`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
|
||||||
{`0[0-7]+`, LiteralNumberOct, nil},
|
{`-?0[0-7]+`, LiteralNumberOct, nil},
|
||||||
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
{`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
{`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
|
{`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil},
|
||||||
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
{`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
|
||||||
{"`[^`]*`", LiteralString, nil},
|
{"`[^`]*`", LiteralString, nil},
|
||||||
},
|
},
|
||||||
|
|
|
@ -49,6 +49,7 @@ var JavascriptRules = Rules{
|
||||||
{"`", LiteralStringBacktick, Pop(1)},
|
{"`", LiteralStringBacktick, Pop(1)},
|
||||||
{`\\\\`, LiteralStringBacktick, nil},
|
{`\\\\`, LiteralStringBacktick, nil},
|
||||||
{"\\\\`", LiteralStringBacktick, nil},
|
{"\\\\`", LiteralStringBacktick, nil},
|
||||||
|
{"\\\\[^`\\\\]", LiteralStringBacktick, nil},
|
||||||
{`\$\{`, LiteralStringInterpol, Push("interp-inside")},
|
{`\$\{`, LiteralStringInterpol, Push("interp-inside")},
|
||||||
{`\$`, LiteralStringBacktick, nil},
|
{`\$`, LiteralStringBacktick, nil},
|
||||||
{"[^`\\\\$]+", LiteralStringBacktick, nil},
|
{"[^`\\\\$]+", LiteralStringBacktick, nil},
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
package p
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Promql lexer.
|
||||||
|
var Promql = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "PromQL",
|
||||||
|
Aliases: []string{"promql"},
|
||||||
|
Filenames: []string{"*.promql"},
|
||||||
|
MimeTypes: []string{},
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"root": {
|
||||||
|
{`\n`, TextWhitespace, nil},
|
||||||
|
{`\s+`, TextWhitespace, nil},
|
||||||
|
{`,`, Punctuation, nil},
|
||||||
|
{Words(``, `\b`, `bool`, `by`, `group_left`, `group_right`, `ignoring`, `offset`, `on`, `without`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `sum`, `min`, `max`, `avg`, `group`, `stddev`, `stdvar`, `count`, `count_values`, `bottomk`, `topk`, `quantile`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `abs`, `absent`, `absent_over_time`, `avg_over_time`, `ceil`, `changes`, `clamp_max`, `clamp_min`, `count_over_time`, `day_of_month`, `day_of_week`, `days_in_month`, `delta`, `deriv`, `exp`, `floor`, `histogram_quantile`, `holt_winters`, `hour`, `idelta`, `increase`, `irate`, `label_join`, `label_replace`, `ln`, `log10`, `log2`, `max_over_time`, `min_over_time`, `minute`, `month`, `predict_linear`, `quantile_over_time`, `rate`, `resets`, `round`, `scalar`, `sort`, `sort_desc`, `sqrt`, `stddev_over_time`, `stdvar_over_time`, `sum_over_time`, `time`, `timestamp`, `vector`, `year`), KeywordReserved, nil},
|
||||||
|
{`[1-9][0-9]*[smhdwy]`, LiteralString, nil},
|
||||||
|
{`-?[0-9]+\.[0-9]+`, LiteralNumberFloat, nil},
|
||||||
|
{`-?[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
{`#.*?$`, CommentSingle, nil},
|
||||||
|
{`(\+|\-|\*|\/|\%|\^)`, Operator, nil},
|
||||||
|
{`==|!=|>=|<=|<|>`, Operator, nil},
|
||||||
|
{`and|or|unless`, OperatorWord, nil},
|
||||||
|
{`[_a-zA-Z][a-zA-Z0-9_]+`, NameVariable, nil},
|
||||||
|
{`(["\'])(.*?)(["\'])`, ByGroups(Punctuation, LiteralString, Punctuation), nil},
|
||||||
|
{`\(`, Operator, Push("function")},
|
||||||
|
{`\)`, Operator, nil},
|
||||||
|
{`\{`, Punctuation, Push("labels")},
|
||||||
|
{`\[`, Punctuation, Push("range")},
|
||||||
|
},
|
||||||
|
"labels": {
|
||||||
|
{`\}`, Punctuation, Pop(1)},
|
||||||
|
{`\n`, TextWhitespace, nil},
|
||||||
|
{`\s+`, TextWhitespace, nil},
|
||||||
|
{`,`, Punctuation, nil},
|
||||||
|
{`([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|~!)(\s*?)(")(.*?)(")`, ByGroups(NameLabel, TextWhitespace, Operator, TextWhitespace, Punctuation, LiteralString, Punctuation), nil},
|
||||||
|
},
|
||||||
|
"range": {
|
||||||
|
{`\]`, Punctuation, Pop(1)},
|
||||||
|
{`[1-9][0-9]*[smhdwy]`, LiteralString, nil},
|
||||||
|
},
|
||||||
|
"function": {
|
||||||
|
{`\)`, Operator, Pop(1)},
|
||||||
|
{`\(`, Operator, Push()},
|
||||||
|
Default(Pop(1)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
))
|
|
@ -0,0 +1,54 @@
|
||||||
|
package lexers
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Qml lexer.
|
||||||
|
var Qml = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "QML",
|
||||||
|
Aliases: []string{"qml", "qbs"},
|
||||||
|
Filenames: []string{"*.qml", "*.qbs"},
|
||||||
|
MimeTypes: []string{"application/x-qml", "application/x-qt.qbs+qml"},
|
||||||
|
DotAll: true,
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"commentsandwhitespace": {
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
{`<!--`, Comment, nil},
|
||||||
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||||
|
},
|
||||||
|
"slashstartsregex": {
|
||||||
|
Include("commentsandwhitespace"),
|
||||||
|
{`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)},
|
||||||
|
{`(?=/)`, Text, Push("#pop", "badregex")},
|
||||||
|
Default(Pop(1)),
|
||||||
|
},
|
||||||
|
"badregex": {
|
||||||
|
{`\n`, Text, Pop(1)},
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
{`^(?=\s|/|<!--)`, Text, Push("slashstartsregex")},
|
||||||
|
Include("commentsandwhitespace"),
|
||||||
|
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
||||||
|
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||||
|
{`[})\].]`, Punctuation, nil},
|
||||||
|
{`\bid\s*:\s*[A-Za-z][\w.]*`, KeywordDeclaration, Push("slashstartsregex")},
|
||||||
|
{`\b[A-Za-z][\w.]*\s*:`, Keyword, Push("slashstartsregex")},
|
||||||
|
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")},
|
||||||
|
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
|
||||||
|
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil},
|
||||||
|
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
|
||||||
|
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
||||||
|
{`[$a-zA-Z_]\w*`, NameOther, nil},
|
||||||
|
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
|
||||||
|
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
|
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
|
||||||
|
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
))
|
|
@ -28,18 +28,18 @@ var Rust = internal.Register(MustNewLexer(
|
||||||
{`/\*\*(\n|[^/*])`, LiteralStringDoc, Push("doccomment")},
|
{`/\*\*(\n|[^/*])`, LiteralStringDoc, Push("doccomment")},
|
||||||
{`/\*!`, LiteralStringDoc, Push("doccomment")},
|
{`/\*!`, LiteralStringDoc, Push("doccomment")},
|
||||||
{`/\*`, CommentMultiline, Push("comment")},
|
{`/\*`, CommentMultiline, Push("comment")},
|
||||||
{`r#*"(?:\\.|[^\\\r\n;])*"#*`, LiteralString, nil},
|
{`r#*"(?:\\.|[^\\;])*"#*`, LiteralString, nil},
|
||||||
{`"(?:\\.|[^\\\r\n"])*"`, LiteralString, nil},
|
{`"(?:\\.|[^\\"])*"`, LiteralString, nil},
|
||||||
{`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil},
|
{`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil},
|
||||||
{Words(``, `\b`, `as`, `box`, `const`, `crate`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil},
|
{Words(``, `\b`, `as`, `async`, `await`, `const`, `crate`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil},
|
||||||
{Words(``, `\b`, `abstract`, `alignof`, `become`, `do`, `final`, `macro`, `offsetof`, `override`, `priv`, `proc`, `pure`, `sizeof`, `typeof`, `unsized`, `virtual`, `yield`), KeywordReserved, nil},
|
{Words(``, `\b`, `abstract`, `become`, `box`, `do`, `final`, `macro`, `override`, `priv`, `try`, `typeof`, `unsized`, `virtual`, `yield`), KeywordReserved, nil},
|
||||||
{`(true|false)\b`, KeywordConstant, nil},
|
{`(true|false)\b`, KeywordConstant, nil},
|
||||||
{`mod\b`, Keyword, Push("modname")},
|
{`mod\b`, Keyword, Push("modname")},
|
||||||
{`let\b`, KeywordDeclaration, nil},
|
{`let\b`, KeywordDeclaration, nil},
|
||||||
{`fn\b`, Keyword, Push("funcname")},
|
{`fn\b`, Keyword, Push("funcname")},
|
||||||
{`(struct|enum|type|union)\b`, Keyword, Push("typename")},
|
{`(struct|enum|type|union)\b`, Keyword, Push("typename")},
|
||||||
{`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil},
|
{`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil},
|
||||||
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `i8`, `i16`, `i32`, `i64`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil},
|
{Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil},
|
||||||
{`self\b`, NameBuiltinPseudo, nil},
|
{`self\b`, NameBuiltinPseudo, nil},
|
||||||
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
|
{Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil},
|
||||||
{`::\b`, Text, nil},
|
{`::\b`, Text, nil},
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -30,10 +30,10 @@ var Zig = internal.Register(MustNewLexer(
|
||||||
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
|
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
|
||||||
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
|
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||||
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
|
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
|
||||||
{`0b[01]+`, LiteralNumberBin, nil},
|
{`0b(?:_?[01])+`, LiteralNumberBin, nil},
|
||||||
{`0o[0-7]+`, LiteralNumberOct, nil},
|
{`0o(?:_?[0-7])+`, LiteralNumberOct, nil},
|
||||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
{`0x(?:_?[0-9a-fA-F])+`, LiteralNumberHex, nil},
|
||||||
{`[0-9]+`, LiteralNumberInteger, nil},
|
{`(?:_?[0-9])+`, LiteralNumberInteger, nil},
|
||||||
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
|
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
|
||||||
{`[a-zA-Z_]\w*`, Name, nil},
|
{`[a-zA-Z_]\w*`, Name, nil},
|
||||||
{`\'\\\'\'`, LiteralStringEscape, nil},
|
{`\'\\\'\'`, LiteralStringEscape, nil},
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
package styles
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alecthomas/chroma"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Base16Snazzy style
|
||||||
|
var Base16Snazzy = Register(chroma.MustNewStyle("base16-snazzy", chroma.StyleEntries{
|
||||||
|
chroma.Comment: "#78787e",
|
||||||
|
chroma.CommentHashbang: "#78787e",
|
||||||
|
chroma.CommentMultiline: "#78787e",
|
||||||
|
chroma.CommentPreproc: "#78787e",
|
||||||
|
chroma.CommentSingle: "#78787e",
|
||||||
|
chroma.CommentSpecial: "#78787e",
|
||||||
|
chroma.Generic: "#e2e4e5",
|
||||||
|
chroma.GenericDeleted: "#ff5c57",
|
||||||
|
chroma.GenericEmph: "#e2e4e5 underline",
|
||||||
|
chroma.GenericError: "#ff5c57",
|
||||||
|
chroma.GenericHeading: "#e2e4e5 bold",
|
||||||
|
chroma.GenericInserted: "#e2e4e5 bold",
|
||||||
|
chroma.GenericOutput: "#43454f",
|
||||||
|
chroma.GenericPrompt: "#e2e4e5",
|
||||||
|
chroma.GenericStrong: "#e2e4e5 italic",
|
||||||
|
chroma.GenericSubheading: "#e2e4e5 bold",
|
||||||
|
chroma.GenericTraceback: "#e2e4e5",
|
||||||
|
chroma.GenericUnderline: "underline",
|
||||||
|
chroma.Error: "#ff5c57",
|
||||||
|
chroma.Keyword: "#ff6ac1",
|
||||||
|
chroma.KeywordConstant: "#ff6ac1",
|
||||||
|
chroma.KeywordDeclaration: "#ff5c57",
|
||||||
|
chroma.KeywordNamespace: "#ff6ac1",
|
||||||
|
chroma.KeywordPseudo: "#ff6ac1",
|
||||||
|
chroma.KeywordReserved: "#ff6ac1",
|
||||||
|
chroma.KeywordType: "#9aedfe",
|
||||||
|
chroma.Literal: "#e2e4e5",
|
||||||
|
chroma.LiteralDate: "#e2e4e5",
|
||||||
|
chroma.Name: "#e2e4e5",
|
||||||
|
chroma.NameAttribute: "#57c7ff",
|
||||||
|
chroma.NameBuiltin: "#ff5c57",
|
||||||
|
chroma.NameBuiltinPseudo: "#e2e4e5",
|
||||||
|
chroma.NameClass: "#f3f99d",
|
||||||
|
chroma.NameConstant: "#ff9f43",
|
||||||
|
chroma.NameDecorator: "#ff9f43",
|
||||||
|
chroma.NameEntity: "#e2e4e5",
|
||||||
|
chroma.NameException: "#e2e4e5",
|
||||||
|
chroma.NameFunction: "#57c7ff",
|
||||||
|
chroma.NameLabel: "#ff5c57",
|
||||||
|
chroma.NameNamespace: "#e2e4e5",
|
||||||
|
chroma.NameOther: "#e2e4e5",
|
||||||
|
chroma.NameTag: "#ff6ac1",
|
||||||
|
chroma.NameVariable: "#ff5c57",
|
||||||
|
chroma.NameVariableClass: "#ff5c57",
|
||||||
|
chroma.NameVariableGlobal: "#ff5c57",
|
||||||
|
chroma.NameVariableInstance: "#ff5c57",
|
||||||
|
chroma.LiteralNumber: "#ff9f43",
|
||||||
|
chroma.LiteralNumberBin: "#ff9f43",
|
||||||
|
chroma.LiteralNumberFloat: "#ff9f43",
|
||||||
|
chroma.LiteralNumberHex: "#ff9f43",
|
||||||
|
chroma.LiteralNumberInteger: "#ff9f43",
|
||||||
|
chroma.LiteralNumberIntegerLong: "#ff9f43",
|
||||||
|
chroma.LiteralNumberOct: "#ff9f43",
|
||||||
|
chroma.Operator: "#ff6ac1",
|
||||||
|
chroma.OperatorWord: "#ff6ac1",
|
||||||
|
chroma.Other: "#e2e4e5",
|
||||||
|
chroma.Punctuation: "#e2e4e5",
|
||||||
|
chroma.LiteralString: "#5af78e",
|
||||||
|
chroma.LiteralStringBacktick: "#5af78e",
|
||||||
|
chroma.LiteralStringChar: "#5af78e",
|
||||||
|
chroma.LiteralStringDoc: "#5af78e",
|
||||||
|
chroma.LiteralStringDouble: "#5af78e",
|
||||||
|
chroma.LiteralStringEscape: "#5af78e",
|
||||||
|
chroma.LiteralStringHeredoc: "#5af78e",
|
||||||
|
chroma.LiteralStringInterpol: "#5af78e",
|
||||||
|
chroma.LiteralStringOther: "#5af78e",
|
||||||
|
chroma.LiteralStringRegex: "#5af78e",
|
||||||
|
chroma.LiteralStringSingle: "#5af78e",
|
||||||
|
chroma.LiteralStringSymbol: "#5af78e",
|
||||||
|
chroma.Text: "#e2e4e5",
|
||||||
|
chroma.TextWhitespace: "#e2e4e5",
|
||||||
|
chroma.Background: " bg:#282a36",
|
||||||
|
}))
|
|
@ -1,8 +1,12 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from subprocess import check_output
|
from subprocess import check_output
|
||||||
|
|
||||||
lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode('utf-8').splitlines()
|
README_FILE = "README.md"
|
||||||
|
|
||||||
|
|
||||||
|
lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode("utf-8").splitlines()
|
||||||
lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")]
|
lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")]
|
||||||
lines = sorted(lines, key=lambda l: l.lower())
|
lines = sorted(lines, key=lambda l: l.lower())
|
||||||
|
|
||||||
|
@ -11,5 +15,18 @@ table = defaultdict(list)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
table[line[0].upper()].append(line)
|
table[line[0].upper()].append(line)
|
||||||
|
|
||||||
|
rows = []
|
||||||
for key, value in table.items():
|
for key, value in table.items():
|
||||||
print("{} | {}".format(key, ', '.join(value)))
|
rows.append("{} | {}".format(key, ", ".join(value)))
|
||||||
|
tbody = "\n".join(rows)
|
||||||
|
|
||||||
|
with open(README_FILE, "r") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
with open(README_FILE, "w") as f:
|
||||||
|
marker = re.compile(r"(?P<start>:----: \\| --------\n).*?(?P<end>\n\n)", re.DOTALL)
|
||||||
|
replacement = r"\g<start>%s\g<end>" % tbody
|
||||||
|
updated_content = marker.sub(replacement, content)
|
||||||
|
f.write(updated_content)
|
||||||
|
|
||||||
|
print(tbody)
|
||||||
|
|
|
@ -8,11 +8,11 @@ require (
|
||||||
github.com/blevesearch/go-porterstemmer v1.0.3
|
github.com/blevesearch/go-porterstemmer v1.0.3
|
||||||
github.com/blevesearch/segment v0.9.0
|
github.com/blevesearch/segment v0.9.0
|
||||||
github.com/blevesearch/snowballstem v0.9.0
|
github.com/blevesearch/snowballstem v0.9.0
|
||||||
github.com/blevesearch/zap/v11 v11.0.12
|
github.com/blevesearch/zap/v11 v11.0.13
|
||||||
github.com/blevesearch/zap/v12 v12.0.12
|
github.com/blevesearch/zap/v12 v12.0.13
|
||||||
github.com/blevesearch/zap/v13 v13.0.4
|
github.com/blevesearch/zap/v13 v13.0.5
|
||||||
github.com/blevesearch/zap/v14 v14.0.3
|
github.com/blevesearch/zap/v14 v14.0.4
|
||||||
github.com/blevesearch/zap/v15 v15.0.1
|
github.com/blevesearch/zap/v15 v15.0.2
|
||||||
github.com/couchbase/moss v0.1.0
|
github.com/couchbase/moss v0.1.0
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/protobuf v1.3.2
|
github.com/golang/protobuf v1.3.2
|
||||||
|
|
|
@ -106,14 +106,18 @@ func (dm *DocumentMapping) fieldDescribedByPath(path string) *FieldMapping {
|
||||||
return subDocMapping.fieldDescribedByPath(encodePath(pathElements[1:]))
|
return subDocMapping.fieldDescribedByPath(encodePath(pathElements[1:]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
// just 1 path elememnt
|
|
||||||
|
// either the path just had one element
|
||||||
|
// or it had multiple, but no match for the first element at this level
|
||||||
|
// look for match with full path
|
||||||
|
|
||||||
// first look for property name with empty field
|
// first look for property name with empty field
|
||||||
for propName, subDocMapping := range dm.Properties {
|
for propName, subDocMapping := range dm.Properties {
|
||||||
if propName == pathElements[0] {
|
if propName == path {
|
||||||
// found property name match, now look at its fields
|
// found property name match, now look at its fields
|
||||||
for _, field := range subDocMapping.Fields {
|
for _, field := range subDocMapping.Fields {
|
||||||
if field.Name == "" || field.Name == pathElements[0] {
|
if field.Name == "" || field.Name == path {
|
||||||
// match
|
// match
|
||||||
return field
|
return field
|
||||||
}
|
}
|
||||||
|
@ -122,16 +126,15 @@ func (dm *DocumentMapping) fieldDescribedByPath(path string) *FieldMapping {
|
||||||
}
|
}
|
||||||
// next, walk the properties again, looking for field overriding the name
|
// next, walk the properties again, looking for field overriding the name
|
||||||
for propName, subDocMapping := range dm.Properties {
|
for propName, subDocMapping := range dm.Properties {
|
||||||
if propName != pathElements[0] {
|
if propName != path {
|
||||||
// property name isn't a match, but field name could override it
|
// property name isn't a match, but field name could override it
|
||||||
for _, field := range subDocMapping.Fields {
|
for _, field := range subDocMapping.Fields {
|
||||||
if field.Name == pathElements[0] {
|
if field.Name == path {
|
||||||
return field
|
return field
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ type IndexMappingImpl struct {
|
||||||
DefaultField string `json:"default_field"`
|
DefaultField string `json:"default_field"`
|
||||||
StoreDynamic bool `json:"store_dynamic"`
|
StoreDynamic bool `json:"store_dynamic"`
|
||||||
IndexDynamic bool `json:"index_dynamic"`
|
IndexDynamic bool `json:"index_dynamic"`
|
||||||
DocValuesDynamic bool `json:"docvalues_dynamic,omitempty"`
|
DocValuesDynamic bool `json:"docvalues_dynamic"`
|
||||||
CustomAnalysis *customAnalysis `json:"analysis,omitempty"`
|
CustomAnalysis *customAnalysis `json:"analysis,omitempty"`
|
||||||
cache *registry.Cache
|
cache *registry.Cache
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/RoaringBitmap/roaring v0.4.23
|
github.com/RoaringBitmap/roaring v0.4.23
|
||||||
github.com/blevesearch/bleve v1.0.12
|
github.com/blevesearch/bleve v1.0.13
|
||||||
github.com/blevesearch/mmap-go v1.0.2
|
github.com/blevesearch/mmap-go v1.0.2
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/snappy v0.0.1
|
github.com/golang/snappy v0.0.1
|
||||||
|
|
|
@ -4,7 +4,7 @@ go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/RoaringBitmap/roaring v0.4.23
|
github.com/RoaringBitmap/roaring v0.4.23
|
||||||
github.com/blevesearch/bleve v1.0.12
|
github.com/blevesearch/bleve v1.0.13
|
||||||
github.com/blevesearch/mmap-go v1.0.2
|
github.com/blevesearch/mmap-go v1.0.2
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/snappy v0.0.1
|
github.com/golang/snappy v0.0.1
|
||||||
|
|
|
@ -4,7 +4,7 @@ go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/RoaringBitmap/roaring v0.4.23
|
github.com/RoaringBitmap/roaring v0.4.23
|
||||||
github.com/blevesearch/bleve v1.0.12
|
github.com/blevesearch/bleve v1.0.13
|
||||||
github.com/blevesearch/mmap-go v1.0.2
|
github.com/blevesearch/mmap-go v1.0.2
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/snappy v0.0.1
|
github.com/golang/snappy v0.0.1
|
||||||
|
|
|
@ -4,7 +4,7 @@ go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/RoaringBitmap/roaring v0.4.23
|
github.com/RoaringBitmap/roaring v0.4.23
|
||||||
github.com/blevesearch/bleve v1.0.12
|
github.com/blevesearch/bleve v1.0.13
|
||||||
github.com/blevesearch/mmap-go v1.0.2
|
github.com/blevesearch/mmap-go v1.0.2
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/snappy v0.0.1
|
github.com/golang/snappy v0.0.1
|
||||||
|
|
|
@ -4,7 +4,7 @@ go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/RoaringBitmap/roaring v0.4.23
|
github.com/RoaringBitmap/roaring v0.4.23
|
||||||
github.com/blevesearch/bleve v1.0.12
|
github.com/blevesearch/bleve v1.0.13
|
||||||
github.com/blevesearch/mmap-go v1.0.2
|
github.com/blevesearch/mmap-go v1.0.2
|
||||||
github.com/couchbase/vellum v1.0.2
|
github.com/couchbase/vellum v1.0.2
|
||||||
github.com/golang/snappy v0.0.1
|
github.com/golang/snappy v0.0.1
|
||||||
|
|
|
@ -105,6 +105,10 @@ func (d *chunkedIntDecoder) readUvarint() (uint64, error) {
|
||||||
return d.r.ReadUvarint()
|
return d.r.ReadUvarint()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *chunkedIntDecoder) readBytes(start, end int) []byte {
|
||||||
|
return d.curChunkBytes[start:end]
|
||||||
|
}
|
||||||
|
|
||||||
func (d *chunkedIntDecoder) SkipUvarint() {
|
func (d *chunkedIntDecoder) SkipUvarint() {
|
||||||
d.r.SkipUvarint()
|
d.r.SkipUvarint()
|
||||||
}
|
}
|
||||||
|
@ -116,3 +120,7 @@ func (d *chunkedIntDecoder) SkipBytes(count int) {
|
||||||
func (d *chunkedIntDecoder) Len() int {
|
func (d *chunkedIntDecoder) Len() int {
|
||||||
return d.r.Len()
|
return d.r.Len()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *chunkedIntDecoder) remainingLen() int {
|
||||||
|
return len(d.curChunkBytes) - d.r.Len()
|
||||||
|
}
|
||||||
|
|
|
@ -341,11 +341,16 @@ func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap,
|
||||||
|
|
||||||
postItr = postings.iterator(true, true, true, postItr)
|
postItr = postings.iterator(true, true, true, postItr)
|
||||||
|
|
||||||
// can no longer optimize by copying, since chunk factor could have changed
|
if fieldsSame {
|
||||||
|
// can optimize by copying freq/norm/loc bytes directly
|
||||||
|
lastDocNum, lastFreq, lastNorm, err = mergeTermFreqNormLocsByCopying(
|
||||||
|
term, postItr, newDocNums[itrI], newRoaring,
|
||||||
|
tfEncoder, locEncoder)
|
||||||
|
} else {
|
||||||
lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs(
|
lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs(
|
||||||
fieldsMap, term, postItr, newDocNums[itrI], newRoaring,
|
fieldsMap, term, postItr, newDocNums[itrI], newRoaring,
|
||||||
tfEncoder, locEncoder, bufLoc)
|
tfEncoder, locEncoder, bufLoc)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
@ -473,6 +478,42 @@ func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap,
|
||||||
return rv, fieldDvLocsOffset, nil
|
return rv, fieldDvLocsOffset, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func mergeTermFreqNormLocsByCopying(term []byte, postItr *PostingsIterator,
|
||||||
|
newDocNums []uint64, newRoaring *roaring.Bitmap,
|
||||||
|
tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder) (
|
||||||
|
lastDocNum uint64, lastFreq uint64, lastNorm uint64, err error) {
|
||||||
|
nextDocNum, nextFreq, nextNorm, nextFreqNormBytes, nextLocBytes, err :=
|
||||||
|
postItr.nextBytes()
|
||||||
|
for err == nil && len(nextFreqNormBytes) > 0 {
|
||||||
|
hitNewDocNum := newDocNums[nextDocNum]
|
||||||
|
if hitNewDocNum == docDropped {
|
||||||
|
return 0, 0, 0, fmt.Errorf("see hit with dropped doc num")
|
||||||
|
}
|
||||||
|
|
||||||
|
newRoaring.Add(uint32(hitNewDocNum))
|
||||||
|
err = tfEncoder.AddBytes(hitNewDocNum, nextFreqNormBytes)
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(nextLocBytes) > 0 {
|
||||||
|
err = locEncoder.AddBytes(hitNewDocNum, nextLocBytes)
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lastDocNum = hitNewDocNum
|
||||||
|
lastFreq = nextFreq
|
||||||
|
lastNorm = nextNorm
|
||||||
|
|
||||||
|
nextDocNum, nextFreq, nextNorm, nextFreqNormBytes, nextLocBytes, err =
|
||||||
|
postItr.nextBytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
return lastDocNum, lastFreq, lastNorm, err
|
||||||
|
}
|
||||||
|
|
||||||
func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator,
|
func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator,
|
||||||
newDocNums []uint64, newRoaring *roaring.Bitmap,
|
newDocNums []uint64, newRoaring *roaring.Bitmap,
|
||||||
tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) (
|
tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) (
|
||||||
|
|
|
@ -588,6 +588,58 @@ func (i *PostingsIterator) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool,
|
||||||
return uint64(n), true, nil
|
return uint64(n), true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var freqHasLocs1Hit = encodeFreqHasLocs(1, false)
|
||||||
|
|
||||||
|
// nextBytes returns the docNum and the encoded freq & loc bytes for
|
||||||
|
// the next posting
|
||||||
|
func (i *PostingsIterator) nextBytes() (
|
||||||
|
docNumOut uint64, freq uint64, normBits uint64,
|
||||||
|
bytesFreqNorm []byte, bytesLoc []byte, err error) {
|
||||||
|
docNum, exists, err := i.nextDocNumAtOrAfter(0)
|
||||||
|
if err != nil || !exists {
|
||||||
|
return 0, 0, 0, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.normBits1Hit != 0 {
|
||||||
|
if i.buf == nil {
|
||||||
|
i.buf = make([]byte, binary.MaxVarintLen64*2)
|
||||||
|
}
|
||||||
|
n := binary.PutUvarint(i.buf, freqHasLocs1Hit)
|
||||||
|
n += binary.PutUvarint(i.buf[n:], i.normBits1Hit)
|
||||||
|
return docNum, uint64(1), i.normBits1Hit, i.buf[:n], nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
startFreqNorm := i.freqNormReader.remainingLen()
|
||||||
|
|
||||||
|
var hasLocs bool
|
||||||
|
|
||||||
|
freq, normBits, hasLocs, err = i.readFreqNormHasLocs()
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, 0, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
endFreqNorm := i.freqNormReader.remainingLen()
|
||||||
|
bytesFreqNorm = i.freqNormReader.readBytes(startFreqNorm, endFreqNorm)
|
||||||
|
|
||||||
|
if hasLocs {
|
||||||
|
startLoc := i.locReader.remainingLen()
|
||||||
|
|
||||||
|
numLocsBytes, err := i.locReader.readUvarint()
|
||||||
|
if err != nil {
|
||||||
|
return 0, 0, 0, nil, nil,
|
||||||
|
fmt.Errorf("error reading location nextBytes numLocs: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip over all the location bytes
|
||||||
|
i.locReader.SkipBytes(int(numLocsBytes))
|
||||||
|
|
||||||
|
endLoc := i.locReader.remainingLen()
|
||||||
|
bytesLoc = i.locReader.readBytes(startLoc, endLoc)
|
||||||
|
}
|
||||||
|
|
||||||
|
return docNum, freq, normBits, bytesFreqNorm, bytesLoc, nil
|
||||||
|
}
|
||||||
|
|
||||||
// optimization when the postings list is "clean" (e.g., no updates &
|
// optimization when the postings list is "clean" (e.g., no updates &
|
||||||
// no deletions) where the all bitmap is the same as the actual bitmap
|
// no deletions) where the all bitmap is the same as the actual bitmap
|
||||||
func (i *PostingsIterator) nextDocNumAtOrAfterClean(
|
func (i *PostingsIterator) nextDocNumAtOrAfterClean(
|
||||||
|
|
|
@ -194,7 +194,7 @@ func (s *Segment) loadConfig() error {
|
||||||
verOffset := crcOffset - 4
|
verOffset := crcOffset - 4
|
||||||
s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4])
|
s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4])
|
||||||
if s.version != Version {
|
if s.version != Version {
|
||||||
return fmt.Errorf("unsupported version %d", s.version)
|
return fmt.Errorf("unsupported version %d != %d", s.version, Version)
|
||||||
}
|
}
|
||||||
|
|
||||||
chunkOffset := verOffset - 4
|
chunkOffset := verOffset - 4
|
||||||
|
|
|
@ -1,5 +1,12 @@
|
||||||
# Change log
|
# Change log
|
||||||
|
|
||||||
|
## v2.3.9 - 2020-11-28
|
||||||
|
|
||||||
|
- Fix path separator on Windows
|
||||||
|
([#83](https://github.com/editorconfig/editorconfig-core-go/pull/89));
|
||||||
|
- Upgrade go-cmp v0.5.4
|
||||||
|
([#91](https://github.com/editorconfig/editorconfig-core-go/pull/91)).
|
||||||
|
|
||||||
## v2.3.8 - 2020-10-17
|
## v2.3.8 - 2020-10-17
|
||||||
|
|
||||||
- Feat more tests
|
- Feat more tests
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"gopkg.in/ini.v1"
|
"gopkg.in/ini.v1"
|
||||||
|
@ -122,7 +123,11 @@ func (e *Editorconfig) GetDefinitionForFilename(name string) (*Definition, error
|
||||||
}
|
}
|
||||||
|
|
||||||
if !strings.HasPrefix(name, "/") {
|
if !strings.HasPrefix(name, "/") {
|
||||||
|
if runtime.GOOS != "windows" {
|
||||||
name = "/" + name
|
name = "/" + name
|
||||||
|
} else {
|
||||||
|
name = "\\" + name
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ok, err := e.FnmatchCase(selector, name)
|
ok, err := e.FnmatchCase(selector, name)
|
||||||
|
|
|
@ -3,6 +3,7 @@ package editorconfig
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"runtime"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
@ -41,6 +42,11 @@ func translate(pattern string) string { // nolint: gocyclo
|
||||||
|
|
||||||
matchesBraces := len(findLeftBrackets.FindAllString(pattern, -1)) == len(findRightBrackets.FindAllString(pattern, -1))
|
matchesBraces := len(findLeftBrackets.FindAllString(pattern, -1)) == len(findRightBrackets.FindAllString(pattern, -1))
|
||||||
|
|
||||||
|
pathSeparator := "/"
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
pathSeparator = regexp.QuoteMeta("\\")
|
||||||
|
}
|
||||||
|
|
||||||
for index < length {
|
for index < length {
|
||||||
r := pat[index]
|
r := pat[index]
|
||||||
index++
|
index++
|
||||||
|
@ -52,19 +58,19 @@ func translate(pattern string) string { // nolint: gocyclo
|
||||||
result.WriteString(".*")
|
result.WriteString(".*")
|
||||||
index++
|
index++
|
||||||
} else {
|
} else {
|
||||||
result.WriteString("[^/]*")
|
result.WriteString(fmt.Sprintf("[^%s]*", pathSeparator))
|
||||||
}
|
}
|
||||||
case '/':
|
case '/':
|
||||||
p := index
|
p := index
|
||||||
if p+2 < length && pat[p] == '*' && pat[p+1] == '*' && pat[p+2] == '/' {
|
if p+2 < length && pat[p] == '*' && pat[p+1] == '*' && pat[p+2] == '/' {
|
||||||
result.WriteString("(?:/|/.*/)")
|
result.WriteString(fmt.Sprintf("(?:%s|%s.*%s)", pathSeparator, pathSeparator, pathSeparator))
|
||||||
|
|
||||||
index += 3
|
index += 3
|
||||||
} else {
|
} else {
|
||||||
result.WriteRune(r)
|
result.WriteRune(r)
|
||||||
}
|
}
|
||||||
case '?':
|
case '?':
|
||||||
result.WriteString("[^/]")
|
result.WriteString(fmt.Sprintf("[^%s]", pathSeparator))
|
||||||
case '[':
|
case '[':
|
||||||
if inBrackets {
|
if inBrackets {
|
||||||
result.WriteString("\\[")
|
result.WriteString("\\[")
|
||||||
|
|
|
@ -3,7 +3,7 @@ module github.com/editorconfig/editorconfig-core-go/v2
|
||||||
go 1.13
|
go 1.13
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/google/go-cmp v0.5.2
|
github.com/google/go-cmp v0.5.4
|
||||||
github.com/smartystreets/goconvey v1.6.4 // indirect
|
github.com/smartystreets/goconvey v1.6.4 // indirect
|
||||||
golang.org/x/mod v0.3.0
|
golang.org/x/mod v0.3.0
|
||||||
gopkg.in/ini.v1 v1.62.0
|
gopkg.in/ini.v1 v1.62.0
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
|
github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M=
|
||||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||||
|
|
|
@ -42,16 +42,6 @@ func (f *decompressor) $FUNCNAME$() {
|
||||||
stateDict
|
stateDict
|
||||||
)
|
)
|
||||||
fr := f.r.($TYPE$)
|
fr := f.r.($TYPE$)
|
||||||
moreBits := func() error {
|
|
||||||
c, err := fr.ReadByte()
|
|
||||||
if err != nil {
|
|
||||||
return noEOF(err)
|
|
||||||
}
|
|
||||||
f.roffset++
|
|
||||||
f.b |= uint32(c) << f.nb
|
|
||||||
f.nb += 8
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
switch f.stepState {
|
switch f.stepState {
|
||||||
case stateInit:
|
case stateInit:
|
||||||
|
@ -112,9 +102,7 @@ readLiteral:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var n uint // number of bits extra
|
|
||||||
var length int
|
var length int
|
||||||
var err error
|
|
||||||
switch {
|
switch {
|
||||||
case v < 256:
|
case v < 256:
|
||||||
f.dict.writeByte(byte(v))
|
f.dict.writeByte(byte(v))
|
||||||
|
@ -131,25 +119,26 @@ readLiteral:
|
||||||
// otherwise, reference to older data
|
// otherwise, reference to older data
|
||||||
case v < 265:
|
case v < 265:
|
||||||
length = v - (257 - 3)
|
length = v - (257 - 3)
|
||||||
n = 0
|
|
||||||
case v < 269:
|
|
||||||
length = v*2 - (265*2 - 11)
|
|
||||||
n = 1
|
|
||||||
case v < 273:
|
|
||||||
length = v*4 - (269*4 - 19)
|
|
||||||
n = 2
|
|
||||||
case v < 277:
|
|
||||||
length = v*8 - (273*8 - 35)
|
|
||||||
n = 3
|
|
||||||
case v < 281:
|
|
||||||
length = v*16 - (277*16 - 67)
|
|
||||||
n = 4
|
|
||||||
case v < 285:
|
|
||||||
length = v*32 - (281*32 - 131)
|
|
||||||
n = 5
|
|
||||||
case v < maxNumLit:
|
case v < maxNumLit:
|
||||||
length = 258
|
val := decCodeToLen[(v - 257)]
|
||||||
n = 0
|
length = int(val.length) + 3
|
||||||
|
n := uint(val.extra)
|
||||||
|
for f.nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("morebits n>0:", err)
|
||||||
|
}
|
||||||
|
f.err = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
|
}
|
||||||
|
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
||||||
|
f.b >>= n & regSizeMaskUint32
|
||||||
|
f.nb -= n
|
||||||
default:
|
default:
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println(v, ">= maxNumLit")
|
fmt.Println(v, ">= maxNumLit")
|
||||||
|
@ -157,45 +146,70 @@ readLiteral:
|
||||||
f.err = CorruptInputError(f.roffset)
|
f.err = CorruptInputError(f.roffset)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n > 0 {
|
|
||||||
for f.nb < n {
|
|
||||||
if err = moreBits(); err != nil {
|
|
||||||
if debugDecode {
|
|
||||||
fmt.Println("morebits n>0:", err)
|
|
||||||
}
|
|
||||||
f.err = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
|
||||||
f.b >>= n & regSizeMaskUint32
|
|
||||||
f.nb -= n
|
|
||||||
}
|
|
||||||
|
|
||||||
var dist uint32
|
var dist uint32
|
||||||
if f.hd == nil {
|
if f.hd == nil {
|
||||||
for f.nb < 5 {
|
for f.nb < 5 {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<5:", err)
|
fmt.Println("morebits f.nb<5:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
||||||
f.b >>= 5
|
f.b >>= 5
|
||||||
f.nb -= 5
|
f.nb -= 5
|
||||||
} else {
|
} else {
|
||||||
sym, err := f.huffSym(f.hd)
|
// Since a huffmanDecoder can be empty or be composed of a degenerate tree
|
||||||
|
// with single element, huffSym must error on these two edge cases. In both
|
||||||
|
// cases, the chunks slice will be 0 for the invalid sequence, leading it
|
||||||
|
// satisfy the n == 0 check below.
|
||||||
|
n := uint(f.hd.maxRead)
|
||||||
|
// Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
|
||||||
|
// but is smart enough to keep local variables in registers, so use nb and b,
|
||||||
|
// inline call to moreBits and reassign b,nb back to f on return.
|
||||||
|
nb, b := f.nb, f.b
|
||||||
|
for {
|
||||||
|
for nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if debugDecode {
|
f.b = b
|
||||||
fmt.Println("huffsym:", err)
|
f.nb = nb
|
||||||
}
|
f.err = noEOF(err)
|
||||||
f.err = err
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dist = uint32(sym)
|
f.roffset++
|
||||||
|
b |= uint32(c) << (nb & regSizeMaskUint32)
|
||||||
|
nb += 8
|
||||||
|
}
|
||||||
|
chunk := f.hd.chunks[b&(huffmanNumChunks-1)]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
if n > huffmanChunkBits {
|
||||||
|
chunk = f.hd.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&f.hd.linkMask]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
}
|
||||||
|
if n <= nb {
|
||||||
|
if n == 0 {
|
||||||
|
f.b = b
|
||||||
|
f.nb = nb
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("huffsym: n==0")
|
||||||
|
}
|
||||||
|
f.err = CorruptInputError(f.roffset)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.b = b >> (n & regSizeMaskUint32)
|
||||||
|
f.nb = nb - n
|
||||||
|
dist = uint32(chunk >> huffmanValueShift)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
|
@ -206,13 +220,17 @@ readLiteral:
|
||||||
// have 1 bit in bottom of dist, need nb more.
|
// have 1 bit in bottom of dist, need nb more.
|
||||||
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
||||||
for f.nb < nb {
|
for f.nb < nb {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<nb:", err)
|
fmt.Println("morebits f.nb<nb:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
||||||
f.b >>= nb & regSizeMaskUint32
|
f.b >>= nb & regSizeMaskUint32
|
||||||
|
|
|
@ -29,6 +29,13 @@ const (
|
||||||
debugDecode = false
|
debugDecode = false
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Value of length - 3 and extra bits.
|
||||||
|
type lengthExtra struct {
|
||||||
|
length, extra uint8
|
||||||
|
}
|
||||||
|
|
||||||
|
var decCodeToLen = [32]lengthExtra{{length: 0x0, extra: 0x0}, {length: 0x1, extra: 0x0}, {length: 0x2, extra: 0x0}, {length: 0x3, extra: 0x0}, {length: 0x4, extra: 0x0}, {length: 0x5, extra: 0x0}, {length: 0x6, extra: 0x0}, {length: 0x7, extra: 0x0}, {length: 0x8, extra: 0x1}, {length: 0xa, extra: 0x1}, {length: 0xc, extra: 0x1}, {length: 0xe, extra: 0x1}, {length: 0x10, extra: 0x2}, {length: 0x14, extra: 0x2}, {length: 0x18, extra: 0x2}, {length: 0x1c, extra: 0x2}, {length: 0x20, extra: 0x3}, {length: 0x28, extra: 0x3}, {length: 0x30, extra: 0x3}, {length: 0x38, extra: 0x3}, {length: 0x40, extra: 0x4}, {length: 0x50, extra: 0x4}, {length: 0x60, extra: 0x4}, {length: 0x70, extra: 0x4}, {length: 0x80, extra: 0x5}, {length: 0xa0, extra: 0x5}, {length: 0xc0, extra: 0x5}, {length: 0xe0, extra: 0x5}, {length: 0xff, extra: 0x0}, {length: 0x0, extra: 0x0}, {length: 0x0, extra: 0x0}, {length: 0x0, extra: 0x0}}
|
||||||
|
|
||||||
// Initialize the fixedHuffmanDecoder only once upon first use.
|
// Initialize the fixedHuffmanDecoder only once upon first use.
|
||||||
var fixedOnce sync.Once
|
var fixedOnce sync.Once
|
||||||
var fixedHuffmanDecoder huffmanDecoder
|
var fixedHuffmanDecoder huffmanDecoder
|
||||||
|
|
|
@ -20,16 +20,6 @@ func (f *decompressor) huffmanBytesBuffer() {
|
||||||
stateDict
|
stateDict
|
||||||
)
|
)
|
||||||
fr := f.r.(*bytes.Buffer)
|
fr := f.r.(*bytes.Buffer)
|
||||||
moreBits := func() error {
|
|
||||||
c, err := fr.ReadByte()
|
|
||||||
if err != nil {
|
|
||||||
return noEOF(err)
|
|
||||||
}
|
|
||||||
f.roffset++
|
|
||||||
f.b |= uint32(c) << f.nb
|
|
||||||
f.nb += 8
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
switch f.stepState {
|
switch f.stepState {
|
||||||
case stateInit:
|
case stateInit:
|
||||||
|
@ -90,9 +80,7 @@ readLiteral:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var n uint // number of bits extra
|
|
||||||
var length int
|
var length int
|
||||||
var err error
|
|
||||||
switch {
|
switch {
|
||||||
case v < 256:
|
case v < 256:
|
||||||
f.dict.writeByte(byte(v))
|
f.dict.writeByte(byte(v))
|
||||||
|
@ -109,25 +97,26 @@ readLiteral:
|
||||||
// otherwise, reference to older data
|
// otherwise, reference to older data
|
||||||
case v < 265:
|
case v < 265:
|
||||||
length = v - (257 - 3)
|
length = v - (257 - 3)
|
||||||
n = 0
|
|
||||||
case v < 269:
|
|
||||||
length = v*2 - (265*2 - 11)
|
|
||||||
n = 1
|
|
||||||
case v < 273:
|
|
||||||
length = v*4 - (269*4 - 19)
|
|
||||||
n = 2
|
|
||||||
case v < 277:
|
|
||||||
length = v*8 - (273*8 - 35)
|
|
||||||
n = 3
|
|
||||||
case v < 281:
|
|
||||||
length = v*16 - (277*16 - 67)
|
|
||||||
n = 4
|
|
||||||
case v < 285:
|
|
||||||
length = v*32 - (281*32 - 131)
|
|
||||||
n = 5
|
|
||||||
case v < maxNumLit:
|
case v < maxNumLit:
|
||||||
length = 258
|
val := decCodeToLen[(v - 257)]
|
||||||
n = 0
|
length = int(val.length) + 3
|
||||||
|
n := uint(val.extra)
|
||||||
|
for f.nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("morebits n>0:", err)
|
||||||
|
}
|
||||||
|
f.err = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
|
}
|
||||||
|
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
||||||
|
f.b >>= n & regSizeMaskUint32
|
||||||
|
f.nb -= n
|
||||||
default:
|
default:
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println(v, ">= maxNumLit")
|
fmt.Println(v, ">= maxNumLit")
|
||||||
|
@ -135,45 +124,70 @@ readLiteral:
|
||||||
f.err = CorruptInputError(f.roffset)
|
f.err = CorruptInputError(f.roffset)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n > 0 {
|
|
||||||
for f.nb < n {
|
|
||||||
if err = moreBits(); err != nil {
|
|
||||||
if debugDecode {
|
|
||||||
fmt.Println("morebits n>0:", err)
|
|
||||||
}
|
|
||||||
f.err = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
|
||||||
f.b >>= n & regSizeMaskUint32
|
|
||||||
f.nb -= n
|
|
||||||
}
|
|
||||||
|
|
||||||
var dist uint32
|
var dist uint32
|
||||||
if f.hd == nil {
|
if f.hd == nil {
|
||||||
for f.nb < 5 {
|
for f.nb < 5 {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<5:", err)
|
fmt.Println("morebits f.nb<5:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
||||||
f.b >>= 5
|
f.b >>= 5
|
||||||
f.nb -= 5
|
f.nb -= 5
|
||||||
} else {
|
} else {
|
||||||
sym, err := f.huffSym(f.hd)
|
// Since a huffmanDecoder can be empty or be composed of a degenerate tree
|
||||||
|
// with single element, huffSym must error on these two edge cases. In both
|
||||||
|
// cases, the chunks slice will be 0 for the invalid sequence, leading it
|
||||||
|
// satisfy the n == 0 check below.
|
||||||
|
n := uint(f.hd.maxRead)
|
||||||
|
// Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
|
||||||
|
// but is smart enough to keep local variables in registers, so use nb and b,
|
||||||
|
// inline call to moreBits and reassign b,nb back to f on return.
|
||||||
|
nb, b := f.nb, f.b
|
||||||
|
for {
|
||||||
|
for nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if debugDecode {
|
f.b = b
|
||||||
fmt.Println("huffsym:", err)
|
f.nb = nb
|
||||||
}
|
f.err = noEOF(err)
|
||||||
f.err = err
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dist = uint32(sym)
|
f.roffset++
|
||||||
|
b |= uint32(c) << (nb & regSizeMaskUint32)
|
||||||
|
nb += 8
|
||||||
|
}
|
||||||
|
chunk := f.hd.chunks[b&(huffmanNumChunks-1)]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
if n > huffmanChunkBits {
|
||||||
|
chunk = f.hd.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&f.hd.linkMask]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
}
|
||||||
|
if n <= nb {
|
||||||
|
if n == 0 {
|
||||||
|
f.b = b
|
||||||
|
f.nb = nb
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("huffsym: n==0")
|
||||||
|
}
|
||||||
|
f.err = CorruptInputError(f.roffset)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.b = b >> (n & regSizeMaskUint32)
|
||||||
|
f.nb = nb - n
|
||||||
|
dist = uint32(chunk >> huffmanValueShift)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
|
@ -184,13 +198,17 @@ readLiteral:
|
||||||
// have 1 bit in bottom of dist, need nb more.
|
// have 1 bit in bottom of dist, need nb more.
|
||||||
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
||||||
for f.nb < nb {
|
for f.nb < nb {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<nb:", err)
|
fmt.Println("morebits f.nb<nb:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
||||||
f.b >>= nb & regSizeMaskUint32
|
f.b >>= nb & regSizeMaskUint32
|
||||||
|
@ -246,16 +264,6 @@ func (f *decompressor) huffmanBytesReader() {
|
||||||
stateDict
|
stateDict
|
||||||
)
|
)
|
||||||
fr := f.r.(*bytes.Reader)
|
fr := f.r.(*bytes.Reader)
|
||||||
moreBits := func() error {
|
|
||||||
c, err := fr.ReadByte()
|
|
||||||
if err != nil {
|
|
||||||
return noEOF(err)
|
|
||||||
}
|
|
||||||
f.roffset++
|
|
||||||
f.b |= uint32(c) << f.nb
|
|
||||||
f.nb += 8
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
switch f.stepState {
|
switch f.stepState {
|
||||||
case stateInit:
|
case stateInit:
|
||||||
|
@ -316,9 +324,7 @@ readLiteral:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var n uint // number of bits extra
|
|
||||||
var length int
|
var length int
|
||||||
var err error
|
|
||||||
switch {
|
switch {
|
||||||
case v < 256:
|
case v < 256:
|
||||||
f.dict.writeByte(byte(v))
|
f.dict.writeByte(byte(v))
|
||||||
|
@ -335,25 +341,26 @@ readLiteral:
|
||||||
// otherwise, reference to older data
|
// otherwise, reference to older data
|
||||||
case v < 265:
|
case v < 265:
|
||||||
length = v - (257 - 3)
|
length = v - (257 - 3)
|
||||||
n = 0
|
|
||||||
case v < 269:
|
|
||||||
length = v*2 - (265*2 - 11)
|
|
||||||
n = 1
|
|
||||||
case v < 273:
|
|
||||||
length = v*4 - (269*4 - 19)
|
|
||||||
n = 2
|
|
||||||
case v < 277:
|
|
||||||
length = v*8 - (273*8 - 35)
|
|
||||||
n = 3
|
|
||||||
case v < 281:
|
|
||||||
length = v*16 - (277*16 - 67)
|
|
||||||
n = 4
|
|
||||||
case v < 285:
|
|
||||||
length = v*32 - (281*32 - 131)
|
|
||||||
n = 5
|
|
||||||
case v < maxNumLit:
|
case v < maxNumLit:
|
||||||
length = 258
|
val := decCodeToLen[(v - 257)]
|
||||||
n = 0
|
length = int(val.length) + 3
|
||||||
|
n := uint(val.extra)
|
||||||
|
for f.nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("morebits n>0:", err)
|
||||||
|
}
|
||||||
|
f.err = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
|
}
|
||||||
|
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
||||||
|
f.b >>= n & regSizeMaskUint32
|
||||||
|
f.nb -= n
|
||||||
default:
|
default:
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println(v, ">= maxNumLit")
|
fmt.Println(v, ">= maxNumLit")
|
||||||
|
@ -361,45 +368,70 @@ readLiteral:
|
||||||
f.err = CorruptInputError(f.roffset)
|
f.err = CorruptInputError(f.roffset)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n > 0 {
|
|
||||||
for f.nb < n {
|
|
||||||
if err = moreBits(); err != nil {
|
|
||||||
if debugDecode {
|
|
||||||
fmt.Println("morebits n>0:", err)
|
|
||||||
}
|
|
||||||
f.err = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
|
||||||
f.b >>= n & regSizeMaskUint32
|
|
||||||
f.nb -= n
|
|
||||||
}
|
|
||||||
|
|
||||||
var dist uint32
|
var dist uint32
|
||||||
if f.hd == nil {
|
if f.hd == nil {
|
||||||
for f.nb < 5 {
|
for f.nb < 5 {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<5:", err)
|
fmt.Println("morebits f.nb<5:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
||||||
f.b >>= 5
|
f.b >>= 5
|
||||||
f.nb -= 5
|
f.nb -= 5
|
||||||
} else {
|
} else {
|
||||||
sym, err := f.huffSym(f.hd)
|
// Since a huffmanDecoder can be empty or be composed of a degenerate tree
|
||||||
|
// with single element, huffSym must error on these two edge cases. In both
|
||||||
|
// cases, the chunks slice will be 0 for the invalid sequence, leading it
|
||||||
|
// satisfy the n == 0 check below.
|
||||||
|
n := uint(f.hd.maxRead)
|
||||||
|
// Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
|
||||||
|
// but is smart enough to keep local variables in registers, so use nb and b,
|
||||||
|
// inline call to moreBits and reassign b,nb back to f on return.
|
||||||
|
nb, b := f.nb, f.b
|
||||||
|
for {
|
||||||
|
for nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if debugDecode {
|
f.b = b
|
||||||
fmt.Println("huffsym:", err)
|
f.nb = nb
|
||||||
}
|
f.err = noEOF(err)
|
||||||
f.err = err
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dist = uint32(sym)
|
f.roffset++
|
||||||
|
b |= uint32(c) << (nb & regSizeMaskUint32)
|
||||||
|
nb += 8
|
||||||
|
}
|
||||||
|
chunk := f.hd.chunks[b&(huffmanNumChunks-1)]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
if n > huffmanChunkBits {
|
||||||
|
chunk = f.hd.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&f.hd.linkMask]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
}
|
||||||
|
if n <= nb {
|
||||||
|
if n == 0 {
|
||||||
|
f.b = b
|
||||||
|
f.nb = nb
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("huffsym: n==0")
|
||||||
|
}
|
||||||
|
f.err = CorruptInputError(f.roffset)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.b = b >> (n & regSizeMaskUint32)
|
||||||
|
f.nb = nb - n
|
||||||
|
dist = uint32(chunk >> huffmanValueShift)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
|
@ -410,13 +442,17 @@ readLiteral:
|
||||||
// have 1 bit in bottom of dist, need nb more.
|
// have 1 bit in bottom of dist, need nb more.
|
||||||
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
||||||
for f.nb < nb {
|
for f.nb < nb {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<nb:", err)
|
fmt.Println("morebits f.nb<nb:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
||||||
f.b >>= nb & regSizeMaskUint32
|
f.b >>= nb & regSizeMaskUint32
|
||||||
|
@ -472,16 +508,6 @@ func (f *decompressor) huffmanBufioReader() {
|
||||||
stateDict
|
stateDict
|
||||||
)
|
)
|
||||||
fr := f.r.(*bufio.Reader)
|
fr := f.r.(*bufio.Reader)
|
||||||
moreBits := func() error {
|
|
||||||
c, err := fr.ReadByte()
|
|
||||||
if err != nil {
|
|
||||||
return noEOF(err)
|
|
||||||
}
|
|
||||||
f.roffset++
|
|
||||||
f.b |= uint32(c) << f.nb
|
|
||||||
f.nb += 8
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
switch f.stepState {
|
switch f.stepState {
|
||||||
case stateInit:
|
case stateInit:
|
||||||
|
@ -542,9 +568,7 @@ readLiteral:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var n uint // number of bits extra
|
|
||||||
var length int
|
var length int
|
||||||
var err error
|
|
||||||
switch {
|
switch {
|
||||||
case v < 256:
|
case v < 256:
|
||||||
f.dict.writeByte(byte(v))
|
f.dict.writeByte(byte(v))
|
||||||
|
@ -561,25 +585,26 @@ readLiteral:
|
||||||
// otherwise, reference to older data
|
// otherwise, reference to older data
|
||||||
case v < 265:
|
case v < 265:
|
||||||
length = v - (257 - 3)
|
length = v - (257 - 3)
|
||||||
n = 0
|
|
||||||
case v < 269:
|
|
||||||
length = v*2 - (265*2 - 11)
|
|
||||||
n = 1
|
|
||||||
case v < 273:
|
|
||||||
length = v*4 - (269*4 - 19)
|
|
||||||
n = 2
|
|
||||||
case v < 277:
|
|
||||||
length = v*8 - (273*8 - 35)
|
|
||||||
n = 3
|
|
||||||
case v < 281:
|
|
||||||
length = v*16 - (277*16 - 67)
|
|
||||||
n = 4
|
|
||||||
case v < 285:
|
|
||||||
length = v*32 - (281*32 - 131)
|
|
||||||
n = 5
|
|
||||||
case v < maxNumLit:
|
case v < maxNumLit:
|
||||||
length = 258
|
val := decCodeToLen[(v - 257)]
|
||||||
n = 0
|
length = int(val.length) + 3
|
||||||
|
n := uint(val.extra)
|
||||||
|
for f.nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("morebits n>0:", err)
|
||||||
|
}
|
||||||
|
f.err = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
|
}
|
||||||
|
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
||||||
|
f.b >>= n & regSizeMaskUint32
|
||||||
|
f.nb -= n
|
||||||
default:
|
default:
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println(v, ">= maxNumLit")
|
fmt.Println(v, ">= maxNumLit")
|
||||||
|
@ -587,45 +612,70 @@ readLiteral:
|
||||||
f.err = CorruptInputError(f.roffset)
|
f.err = CorruptInputError(f.roffset)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n > 0 {
|
|
||||||
for f.nb < n {
|
|
||||||
if err = moreBits(); err != nil {
|
|
||||||
if debugDecode {
|
|
||||||
fmt.Println("morebits n>0:", err)
|
|
||||||
}
|
|
||||||
f.err = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
|
||||||
f.b >>= n & regSizeMaskUint32
|
|
||||||
f.nb -= n
|
|
||||||
}
|
|
||||||
|
|
||||||
var dist uint32
|
var dist uint32
|
||||||
if f.hd == nil {
|
if f.hd == nil {
|
||||||
for f.nb < 5 {
|
for f.nb < 5 {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<5:", err)
|
fmt.Println("morebits f.nb<5:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
||||||
f.b >>= 5
|
f.b >>= 5
|
||||||
f.nb -= 5
|
f.nb -= 5
|
||||||
} else {
|
} else {
|
||||||
sym, err := f.huffSym(f.hd)
|
// Since a huffmanDecoder can be empty or be composed of a degenerate tree
|
||||||
|
// with single element, huffSym must error on these two edge cases. In both
|
||||||
|
// cases, the chunks slice will be 0 for the invalid sequence, leading it
|
||||||
|
// satisfy the n == 0 check below.
|
||||||
|
n := uint(f.hd.maxRead)
|
||||||
|
// Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
|
||||||
|
// but is smart enough to keep local variables in registers, so use nb and b,
|
||||||
|
// inline call to moreBits and reassign b,nb back to f on return.
|
||||||
|
nb, b := f.nb, f.b
|
||||||
|
for {
|
||||||
|
for nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if debugDecode {
|
f.b = b
|
||||||
fmt.Println("huffsym:", err)
|
f.nb = nb
|
||||||
}
|
f.err = noEOF(err)
|
||||||
f.err = err
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dist = uint32(sym)
|
f.roffset++
|
||||||
|
b |= uint32(c) << (nb & regSizeMaskUint32)
|
||||||
|
nb += 8
|
||||||
|
}
|
||||||
|
chunk := f.hd.chunks[b&(huffmanNumChunks-1)]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
if n > huffmanChunkBits {
|
||||||
|
chunk = f.hd.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&f.hd.linkMask]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
}
|
||||||
|
if n <= nb {
|
||||||
|
if n == 0 {
|
||||||
|
f.b = b
|
||||||
|
f.nb = nb
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("huffsym: n==0")
|
||||||
|
}
|
||||||
|
f.err = CorruptInputError(f.roffset)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.b = b >> (n & regSizeMaskUint32)
|
||||||
|
f.nb = nb - n
|
||||||
|
dist = uint32(chunk >> huffmanValueShift)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
|
@ -636,13 +686,17 @@ readLiteral:
|
||||||
// have 1 bit in bottom of dist, need nb more.
|
// have 1 bit in bottom of dist, need nb more.
|
||||||
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
||||||
for f.nb < nb {
|
for f.nb < nb {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<nb:", err)
|
fmt.Println("morebits f.nb<nb:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
||||||
f.b >>= nb & regSizeMaskUint32
|
f.b >>= nb & regSizeMaskUint32
|
||||||
|
@ -698,16 +752,6 @@ func (f *decompressor) huffmanStringsReader() {
|
||||||
stateDict
|
stateDict
|
||||||
)
|
)
|
||||||
fr := f.r.(*strings.Reader)
|
fr := f.r.(*strings.Reader)
|
||||||
moreBits := func() error {
|
|
||||||
c, err := fr.ReadByte()
|
|
||||||
if err != nil {
|
|
||||||
return noEOF(err)
|
|
||||||
}
|
|
||||||
f.roffset++
|
|
||||||
f.b |= uint32(c) << f.nb
|
|
||||||
f.nb += 8
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
switch f.stepState {
|
switch f.stepState {
|
||||||
case stateInit:
|
case stateInit:
|
||||||
|
@ -768,9 +812,7 @@ readLiteral:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var n uint // number of bits extra
|
|
||||||
var length int
|
var length int
|
||||||
var err error
|
|
||||||
switch {
|
switch {
|
||||||
case v < 256:
|
case v < 256:
|
||||||
f.dict.writeByte(byte(v))
|
f.dict.writeByte(byte(v))
|
||||||
|
@ -787,25 +829,26 @@ readLiteral:
|
||||||
// otherwise, reference to older data
|
// otherwise, reference to older data
|
||||||
case v < 265:
|
case v < 265:
|
||||||
length = v - (257 - 3)
|
length = v - (257 - 3)
|
||||||
n = 0
|
|
||||||
case v < 269:
|
|
||||||
length = v*2 - (265*2 - 11)
|
|
||||||
n = 1
|
|
||||||
case v < 273:
|
|
||||||
length = v*4 - (269*4 - 19)
|
|
||||||
n = 2
|
|
||||||
case v < 277:
|
|
||||||
length = v*8 - (273*8 - 35)
|
|
||||||
n = 3
|
|
||||||
case v < 281:
|
|
||||||
length = v*16 - (277*16 - 67)
|
|
||||||
n = 4
|
|
||||||
case v < 285:
|
|
||||||
length = v*32 - (281*32 - 131)
|
|
||||||
n = 5
|
|
||||||
case v < maxNumLit:
|
case v < maxNumLit:
|
||||||
length = 258
|
val := decCodeToLen[(v - 257)]
|
||||||
n = 0
|
length = int(val.length) + 3
|
||||||
|
n := uint(val.extra)
|
||||||
|
for f.nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("morebits n>0:", err)
|
||||||
|
}
|
||||||
|
f.err = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
|
}
|
||||||
|
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
||||||
|
f.b >>= n & regSizeMaskUint32
|
||||||
|
f.nb -= n
|
||||||
default:
|
default:
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println(v, ">= maxNumLit")
|
fmt.Println(v, ">= maxNumLit")
|
||||||
|
@ -813,45 +856,70 @@ readLiteral:
|
||||||
f.err = CorruptInputError(f.roffset)
|
f.err = CorruptInputError(f.roffset)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if n > 0 {
|
|
||||||
for f.nb < n {
|
|
||||||
if err = moreBits(); err != nil {
|
|
||||||
if debugDecode {
|
|
||||||
fmt.Println("morebits n>0:", err)
|
|
||||||
}
|
|
||||||
f.err = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
length += int(f.b & uint32(1<<(n®SizeMaskUint32)-1))
|
|
||||||
f.b >>= n & regSizeMaskUint32
|
|
||||||
f.nb -= n
|
|
||||||
}
|
|
||||||
|
|
||||||
var dist uint32
|
var dist uint32
|
||||||
if f.hd == nil {
|
if f.hd == nil {
|
||||||
for f.nb < 5 {
|
for f.nb < 5 {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<5:", err)
|
fmt.Println("morebits f.nb<5:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
dist = uint32(bits.Reverse8(uint8(f.b & 0x1F << 3)))
|
||||||
f.b >>= 5
|
f.b >>= 5
|
||||||
f.nb -= 5
|
f.nb -= 5
|
||||||
} else {
|
} else {
|
||||||
sym, err := f.huffSym(f.hd)
|
// Since a huffmanDecoder can be empty or be composed of a degenerate tree
|
||||||
|
// with single element, huffSym must error on these two edge cases. In both
|
||||||
|
// cases, the chunks slice will be 0 for the invalid sequence, leading it
|
||||||
|
// satisfy the n == 0 check below.
|
||||||
|
n := uint(f.hd.maxRead)
|
||||||
|
// Optimization. Compiler isn't smart enough to keep f.b,f.nb in registers,
|
||||||
|
// but is smart enough to keep local variables in registers, so use nb and b,
|
||||||
|
// inline call to moreBits and reassign b,nb back to f on return.
|
||||||
|
nb, b := f.nb, f.b
|
||||||
|
for {
|
||||||
|
for nb < n {
|
||||||
|
c, err := fr.ReadByte()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if debugDecode {
|
f.b = b
|
||||||
fmt.Println("huffsym:", err)
|
f.nb = nb
|
||||||
}
|
f.err = noEOF(err)
|
||||||
f.err = err
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
dist = uint32(sym)
|
f.roffset++
|
||||||
|
b |= uint32(c) << (nb & regSizeMaskUint32)
|
||||||
|
nb += 8
|
||||||
|
}
|
||||||
|
chunk := f.hd.chunks[b&(huffmanNumChunks-1)]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
if n > huffmanChunkBits {
|
||||||
|
chunk = f.hd.links[chunk>>huffmanValueShift][(b>>huffmanChunkBits)&f.hd.linkMask]
|
||||||
|
n = uint(chunk & huffmanCountMask)
|
||||||
|
}
|
||||||
|
if n <= nb {
|
||||||
|
if n == 0 {
|
||||||
|
f.b = b
|
||||||
|
f.nb = nb
|
||||||
|
if debugDecode {
|
||||||
|
fmt.Println("huffsym: n==0")
|
||||||
|
}
|
||||||
|
f.err = CorruptInputError(f.roffset)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
f.b = b >> (n & regSizeMaskUint32)
|
||||||
|
f.nb = nb - n
|
||||||
|
dist = uint32(chunk >> huffmanValueShift)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
|
@ -862,13 +930,17 @@ readLiteral:
|
||||||
// have 1 bit in bottom of dist, need nb more.
|
// have 1 bit in bottom of dist, need nb more.
|
||||||
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
extra := (dist & 1) << (nb & regSizeMaskUint32)
|
||||||
for f.nb < nb {
|
for f.nb < nb {
|
||||||
if err = f.moreBits(); err != nil {
|
c, err := fr.ReadByte()
|
||||||
|
if err != nil {
|
||||||
if debugDecode {
|
if debugDecode {
|
||||||
fmt.Println("morebits f.nb<nb:", err)
|
fmt.Println("morebits f.nb<nb:", err)
|
||||||
}
|
}
|
||||||
f.err = err
|
f.err = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
f.roffset++
|
||||||
|
f.b |= uint32(c) << f.nb
|
||||||
|
f.nb += 8
|
||||||
}
|
}
|
||||||
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
extra |= f.b & uint32(1<<(nb®SizeMaskUint32)-1)
|
||||||
f.b >>= nb & regSizeMaskUint32
|
f.b >>= nb & regSizeMaskUint32
|
||||||
|
|
|
@ -54,11 +54,11 @@ To create a writer with default options, do like this:
|
||||||
```Go
|
```Go
|
||||||
// Compress input to output.
|
// Compress input to output.
|
||||||
func Compress(in io.Reader, out io.Writer) error {
|
func Compress(in io.Reader, out io.Writer) error {
|
||||||
w, err := NewWriter(output)
|
enc, err := zstd.NewWriter(out)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err := io.Copy(w, input)
|
_, err = io.Copy(enc, in)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
enc.Close()
|
enc.Close()
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -323,19 +323,23 @@ func (d *Decoder) DecodeAll(input, dst []byte) ([]byte, error) {
|
||||||
}
|
}
|
||||||
if frame.FrameContentSize > 0 && frame.FrameContentSize < 1<<30 {
|
if frame.FrameContentSize > 0 && frame.FrameContentSize < 1<<30 {
|
||||||
// Never preallocate moe than 1 GB up front.
|
// Never preallocate moe than 1 GB up front.
|
||||||
if uint64(cap(dst)) < frame.FrameContentSize {
|
if cap(dst)-len(dst) < int(frame.FrameContentSize) {
|
||||||
dst2 := make([]byte, len(dst), len(dst)+int(frame.FrameContentSize))
|
dst2 := make([]byte, len(dst), len(dst)+int(frame.FrameContentSize))
|
||||||
copy(dst2, dst)
|
copy(dst2, dst)
|
||||||
dst = dst2
|
dst = dst2
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if cap(dst) == 0 {
|
if cap(dst) == 0 {
|
||||||
// Allocate window size * 2 by default if nothing is provided and we didn't get frame content size.
|
// Allocate len(input) * 2 by default if nothing is provided
|
||||||
size := frame.WindowSize * 2
|
// and we didn't get frame content size.
|
||||||
|
size := len(input) * 2
|
||||||
// Cap to 1 MB.
|
// Cap to 1 MB.
|
||||||
if size > 1<<20 {
|
if size > 1<<20 {
|
||||||
size = 1 << 20
|
size = 1 << 20
|
||||||
}
|
}
|
||||||
|
if uint64(size) > d.o.maxDecodedSize {
|
||||||
|
size = int(d.o.maxDecodedSize)
|
||||||
|
}
|
||||||
dst = make([]byte, 0, size)
|
dst = make([]byte, 0, size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
GOPATH := $(shell go env GOPATH)
|
GOPATH := $(shell go env GOPATH)
|
||||||
|
TMPDIR := $(shell mktemp -d)
|
||||||
|
|
||||||
all: checks
|
all: checks
|
||||||
|
|
||||||
|
@ -20,7 +21,7 @@ test:
|
||||||
@GO111MODULE=on SERVER_ENDPOINT=localhost:9000 ACCESS_KEY=minio SECRET_KEY=minio123 ENABLE_HTTPS=1 MINT_MODE=full go test -race -v ./...
|
@GO111MODULE=on SERVER_ENDPOINT=localhost:9000 ACCESS_KEY=minio SECRET_KEY=minio123 ENABLE_HTTPS=1 MINT_MODE=full go test -race -v ./...
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
@mkdir -p /tmp/examples && for i in $(echo examples/s3/*); do go build -o /tmp/examples/$(basename ${i:0:-3}) ${i}; done
|
@$(foreach v,$(wildcard examples/s3/*), go build -o ${TMPDIR}/$(basename $(v)) $(v) || exit 1;)
|
||||||
|
|
||||||
functional-test:
|
functional-test:
|
||||||
@GO111MODULE=on SERVER_ENDPOINT=localhost:9000 ACCESS_KEY=minio SECRET_KEY=minio123 ENABLE_HTTPS=1 MINT_MODE=full go run functional_tests.go
|
@GO111MODULE=on SERVER_ENDPOINT=localhost:9000 ACCESS_KEY=minio SECRET_KEY=minio123 ENABLE_HTTPS=1 MINT_MODE=full go run functional_tests.go
|
||||||
|
|
|
@ -19,7 +19,6 @@ MinIO client requires the following four parameters specified to connect to an A
|
||||||
| endpoint | URL to object storage service. |
|
| endpoint | URL to object storage service. |
|
||||||
| _minio.Options_ | All the options such as credentials, custom transport etc. |
|
| _minio.Options_ | All the options such as credentials, custom transport etc. |
|
||||||
|
|
||||||
|
|
||||||
```go
|
```go
|
||||||
package main
|
package main
|
||||||
|
|
||||||
|
@ -116,6 +115,7 @@ func main() {
|
||||||
|
|
||||||
### Run FileUploader
|
### Run FileUploader
|
||||||
```sh
|
```sh
|
||||||
|
export GO111MODULE=on
|
||||||
go run file-uploader.go
|
go run file-uploader.go
|
||||||
2016/08/13 17:03:28 Successfully created mymusic
|
2016/08/13 17:03:28 Successfully created mymusic
|
||||||
2016/08/13 17:03:40 Successfully uploaded golden-oldies.zip of size 16253413
|
2016/08/13 17:03:40 Successfully uploaded golden-oldies.zip of size 16253413
|
||||||
|
@ -135,7 +135,6 @@ The full API Reference is available here.
|
||||||
* [`BucketExists`](https://docs.min.io/docs/golang-client-api-reference#BucketExists)
|
* [`BucketExists`](https://docs.min.io/docs/golang-client-api-reference#BucketExists)
|
||||||
* [`RemoveBucket`](https://docs.min.io/docs/golang-client-api-reference#RemoveBucket)
|
* [`RemoveBucket`](https://docs.min.io/docs/golang-client-api-reference#RemoveBucket)
|
||||||
* [`ListObjects`](https://docs.min.io/docs/golang-client-api-reference#ListObjects)
|
* [`ListObjects`](https://docs.min.io/docs/golang-client-api-reference#ListObjects)
|
||||||
* [`ListObjectsV2`](https://docs.min.io/docs/golang-client-api-reference#ListObjectsV2)
|
|
||||||
* [`ListIncompleteUploads`](https://docs.min.io/docs/golang-client-api-reference#ListIncompleteUploads)
|
* [`ListIncompleteUploads`](https://docs.min.io/docs/golang-client-api-reference#ListIncompleteUploads)
|
||||||
|
|
||||||
### API Reference : Bucket policy Operations
|
### API Reference : Bucket policy Operations
|
||||||
|
@ -173,7 +172,6 @@ The full API Reference is available here.
|
||||||
|
|
||||||
### API Reference : Client custom settings
|
### API Reference : Client custom settings
|
||||||
* [`SetAppInfo`](http://docs.min.io/docs/golang-client-api-reference#SetAppInfo)
|
* [`SetAppInfo`](http://docs.min.io/docs/golang-client-api-reference#SetAppInfo)
|
||||||
* [`SetCustomTransport`](http://docs.min.io/docs/golang-client-api-reference#SetCustomTransport)
|
|
||||||
* [`TraceOn`](http://docs.min.io/docs/golang-client-api-reference#TraceOn)
|
* [`TraceOn`](http://docs.min.io/docs/golang-client-api-reference#TraceOn)
|
||||||
* [`TraceOff`](http://docs.min.io/docs/golang-client-api-reference#TraceOff)
|
* [`TraceOff`](http://docs.min.io/docs/golang-client-api-reference#TraceOff)
|
||||||
|
|
||||||
|
|
|
@ -38,8 +38,10 @@ MinIO client需要以下4个参数来连接与Amazon S3兼容的对象存储。
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/minio/minio-go/v7"
|
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
|
"github.com/minio/minio-go/v7"
|
||||||
|
"github.com/minio/minio-go/v7/pkg/credentials"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
@ -49,7 +51,10 @@ func main() {
|
||||||
useSSL := true
|
useSSL := true
|
||||||
|
|
||||||
// 初使化 minio client对象。
|
// 初使化 minio client对象。
|
||||||
minioClient, err := minio.New(endpoint, accessKeyID, secretAccessKey, useSSL)
|
minioClient, err := minio.New(endpoint, &minio.Options{
|
||||||
|
Creds: credentials.NewStaticV4(accessKeyID, secretAccessKey, ""),
|
||||||
|
Secure: useSSL,
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalln(err)
|
log.Fatalln(err)
|
||||||
}
|
}
|
||||||
|
@ -68,18 +73,25 @@ func main() {
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/minio/minio-go/v7"
|
"context"
|
||||||
"log"
|
"log"
|
||||||
|
|
||||||
|
"github.com/minio/minio-go/v7"
|
||||||
|
"github.com/minio/minio-go/v7/pkg/credentials"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
ctx := context.Background()
|
||||||
endpoint := "play.min.io"
|
endpoint := "play.min.io"
|
||||||
accessKeyID := "Q3AM3UQ867SPQQA43P2F"
|
accessKeyID := "Q3AM3UQ867SPQQA43P2F"
|
||||||
secretAccessKey := "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
|
secretAccessKey := "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
|
||||||
useSSL := true
|
useSSL := true
|
||||||
|
|
||||||
// 初使化 minio client对象。
|
// 初使化 minio client对象。
|
||||||
minioClient, err := minio.New(endpoint, accessKeyID, secretAccessKey, useSSL)
|
minioClient, err := minio.New(endpoint, &minio.Options{
|
||||||
|
Creds: credentials.NewStaticV4(accessKeyID, secretAccessKey, ""),
|
||||||
|
Secure: useSSL,
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalln(err)
|
log.Fatalln(err)
|
||||||
}
|
}
|
||||||
|
@ -88,17 +100,18 @@ func main() {
|
||||||
bucketName := "mymusic"
|
bucketName := "mymusic"
|
||||||
location := "us-east-1"
|
location := "us-east-1"
|
||||||
|
|
||||||
err = minioClient.MakeBucket(bucketName, location)
|
err = minioClient.MakeBucket(ctx, bucketName, minio.MakeBucketOptions{Region: location})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// 检查存储桶是否已经存在。
|
// 检查存储桶是否已经存在。
|
||||||
exists, err := minioClient.BucketExists(bucketName)
|
exists, errBucketExists := minioClient.BucketExists(ctx, bucketName)
|
||||||
if err == nil && exists {
|
if errBucketExists == nil && exists {
|
||||||
log.Printf("We already own %s\n", bucketName)
|
log.Printf("We already own %s\n", bucketName)
|
||||||
} else {
|
} else {
|
||||||
log.Fatalln(err)
|
log.Fatalln(err)
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
log.Printf("Successfully created %s\n", bucketName)
|
log.Printf("Successfully created %s\n", bucketName)
|
||||||
|
}
|
||||||
|
|
||||||
// 上传一个zip文件。
|
// 上传一个zip文件。
|
||||||
objectName := "golden-oldies.zip"
|
objectName := "golden-oldies.zip"
|
||||||
|
@ -106,7 +119,7 @@ func main() {
|
||||||
contentType := "application/zip"
|
contentType := "application/zip"
|
||||||
|
|
||||||
// 使用FPutObject上传一个zip文件。
|
// 使用FPutObject上传一个zip文件。
|
||||||
n, err := minioClient.FPutObject(bucketName, objectName, filePath, minio.PutObjectOptions{ContentType:contentType})
|
n, err := minioClient.FPutObject(ctx, bucketName, objectName, filePath, minio.PutObjectOptions{ContentType: contentType})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalln(err)
|
log.Fatalln(err)
|
||||||
}
|
}
|
||||||
|
@ -135,7 +148,6 @@ mc ls play/mymusic/
|
||||||
* [`BucketExists`](https://docs.min.io/docs/golang-client-api-reference#BucketExists)
|
* [`BucketExists`](https://docs.min.io/docs/golang-client-api-reference#BucketExists)
|
||||||
* [`RemoveBucket`](https://docs.min.io/docs/golang-client-api-reference#RemoveBucket)
|
* [`RemoveBucket`](https://docs.min.io/docs/golang-client-api-reference#RemoveBucket)
|
||||||
* [`ListObjects`](https://docs.min.io/docs/golang-client-api-reference#ListObjects)
|
* [`ListObjects`](https://docs.min.io/docs/golang-client-api-reference#ListObjects)
|
||||||
* [`ListObjectsV2`](https://docs.min.io/docs/golang-client-api-reference#ListObjectsV2)
|
|
||||||
* [`ListIncompleteUploads`](https://docs.min.io/docs/golang-client-api-reference#ListIncompleteUploads)
|
* [`ListIncompleteUploads`](https://docs.min.io/docs/golang-client-api-reference#ListIncompleteUploads)
|
||||||
|
|
||||||
### API文档 : 存储桶策略
|
### API文档 : 存储桶策略
|
||||||
|
@ -146,7 +158,8 @@ mc ls play/mymusic/
|
||||||
* [`SetBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#SetBucketNotification)
|
* [`SetBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#SetBucketNotification)
|
||||||
* [`GetBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#GetBucketNotification)
|
* [`GetBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#GetBucketNotification)
|
||||||
* [`RemoveAllBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#RemoveAllBucketNotification)
|
* [`RemoveAllBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#RemoveAllBucketNotification)
|
||||||
* [`ListenBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#ListenBucketNotification) (MinIO Extension)
|
* [`ListenBucketNotification`](https://docs.min.io/docs/golang-client-api-reference#ListenBucketNotification) (MinIO 扩展)
|
||||||
|
* [`ListenNotification`](https://docs.min.io/docs/golang-client-api-reference#ListenNotification) (MinIO 扩展)
|
||||||
|
|
||||||
### API文档 : 操作文件对象
|
### API文档 : 操作文件对象
|
||||||
* [`FPutObject`](https://docs.min.io/docs/golang-client-api-reference#FPutObject)
|
* [`FPutObject`](https://docs.min.io/docs/golang-client-api-reference#FPutObject)
|
||||||
|
@ -161,10 +174,7 @@ mc ls play/mymusic/
|
||||||
* [`RemoveObject`](https://docs.min.io/docs/golang-client-api-reference#RemoveObject)
|
* [`RemoveObject`](https://docs.min.io/docs/golang-client-api-reference#RemoveObject)
|
||||||
* [`RemoveObjects`](https://docs.min.io/docs/golang-client-api-reference#RemoveObjects)
|
* [`RemoveObjects`](https://docs.min.io/docs/golang-client-api-reference#RemoveObjects)
|
||||||
* [`RemoveIncompleteUpload`](https://docs.min.io/docs/golang-client-api-reference#RemoveIncompleteUpload)
|
* [`RemoveIncompleteUpload`](https://docs.min.io/docs/golang-client-api-reference#RemoveIncompleteUpload)
|
||||||
|
* [`SelectObjectContent`](https://docs.min.io/docs/golang-client-api-reference#SelectObjectContent)
|
||||||
### API文档: 操作加密对象
|
|
||||||
* [`GetEncryptedObject`](https://docs.min.io/docs/golang-client-api-reference#GetEncryptedObject)
|
|
||||||
* [`PutEncryptedObject`](https://docs.min.io/docs/golang-client-api-reference#PutEncryptedObject)
|
|
||||||
|
|
||||||
### API文档 : Presigned操作
|
### API文档 : Presigned操作
|
||||||
* [`PresignedGetObject`](https://docs.min.io/docs/golang-client-api-reference#PresignedGetObject)
|
* [`PresignedGetObject`](https://docs.min.io/docs/golang-client-api-reference#PresignedGetObject)
|
||||||
|
@ -174,7 +184,6 @@ mc ls play/mymusic/
|
||||||
|
|
||||||
### API文档 : 客户端自定义设置
|
### API文档 : 客户端自定义设置
|
||||||
* [`SetAppInfo`](http://docs.min.io/docs/golang-client-api-reference#SetAppInfo)
|
* [`SetAppInfo`](http://docs.min.io/docs/golang-client-api-reference#SetAppInfo)
|
||||||
* [`SetCustomTransport`](http://docs.min.io/docs/golang-client-api-reference#SetCustomTransport)
|
|
||||||
* [`TraceOn`](http://docs.min.io/docs/golang-client-api-reference#TraceOn)
|
* [`TraceOn`](http://docs.min.io/docs/golang-client-api-reference#TraceOn)
|
||||||
* [`TraceOff`](http://docs.min.io/docs/golang-client-api-reference#TraceOff)
|
* [`TraceOff`](http://docs.min.io/docs/golang-client-api-reference#TraceOff)
|
||||||
|
|
||||||
|
@ -194,11 +203,26 @@ mc ls play/mymusic/
|
||||||
* [getbucketpolicy.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketpolicy.go)
|
* [getbucketpolicy.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketpolicy.go)
|
||||||
* [listbucketpolicies.go](https://github.com/minio/minio-go/blob/master/examples/s3/listbucketpolicies.go)
|
* [listbucketpolicies.go](https://github.com/minio/minio-go/blob/master/examples/s3/listbucketpolicies.go)
|
||||||
|
|
||||||
|
### 完整示例 : 存储桶生命周期
|
||||||
|
* [setbucketlifecycle.go](https://github.com/minio/minio-go/blob/master/examples/s3/setbucketlifecycle.go)
|
||||||
|
* [getbucketlifecycle.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketlifecycle.go)
|
||||||
|
|
||||||
|
### 完整示例 : 存储桶加密
|
||||||
|
* [setbucketencryption.go](https://github.com/minio/minio-go/blob/master/examples/s3/setbucketencryption.go)
|
||||||
|
* [getbucketencryption.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketencryption.go)
|
||||||
|
* [deletebucketencryption.go](https://github.com/minio/minio-go/blob/master/examples/s3/deletebucketencryption.go)
|
||||||
|
|
||||||
|
### 完整示例 : 存储桶复制
|
||||||
|
* [setbucketreplication.go](https://github.com/minio/minio-go/blob/master/examples/s3/setbucketreplication.go)
|
||||||
|
* [getbucketreplication.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketreplication.go)
|
||||||
|
* [removebucketreplication.go](https://github.com/minio/minio-go/blob/master/examples/s3/removebucketreplication.go)
|
||||||
|
|
||||||
### 完整示例 : 存储桶通知
|
### 完整示例 : 存储桶通知
|
||||||
* [setbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/s3/setbucketnotification.go)
|
* [setbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/s3/setbucketnotification.go)
|
||||||
* [getbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketnotification.go)
|
* [getbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/s3/getbucketnotification.go)
|
||||||
* [removeallbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/s3/removeallbucketnotification.go)
|
* [removeallbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/s3/removeallbucketnotification.go)
|
||||||
* [listenbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/minio/listenbucketnotification.go) (MinIO扩展)
|
* [listenbucketnotification.go](https://github.com/minio/minio-go/blob/master/examples/minio/listenbucketnotification.go) (MinIO扩展)
|
||||||
|
* [listennotification.go](https://github.com/minio/minio-go/blob/master/examples/minio/listen-notification.go) (MinIO 扩展)
|
||||||
|
|
||||||
### 完整示例 : 操作文件对象
|
### 完整示例 : 操作文件对象
|
||||||
* [fputobject.go](https://github.com/minio/minio-go/blob/master/examples/s3/fputobject.go)
|
* [fputobject.go](https://github.com/minio/minio-go/blob/master/examples/s3/fputobject.go)
|
||||||
|
|
|
@ -83,6 +83,23 @@ type BucketVersioningConfiguration struct {
|
||||||
MFADelete string `xml:"MfaDelete,omitempty"`
|
MFADelete string `xml:"MfaDelete,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Various supported states
|
||||||
|
const (
|
||||||
|
Enabled = "Enabled"
|
||||||
|
// Disabled State = "Disabled" only used by MFA Delete not supported yet.
|
||||||
|
Suspended = "Suspended"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Enabled returns true if bucket versioning is enabled
|
||||||
|
func (b BucketVersioningConfiguration) Enabled() bool {
|
||||||
|
return b.Status == Enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
// Suspended returns true if bucket versioning is suspended
|
||||||
|
func (b BucketVersioningConfiguration) Suspended() bool {
|
||||||
|
return b.Status == Suspended
|
||||||
|
}
|
||||||
|
|
||||||
// GetBucketVersioning gets the versioning configuration on
|
// GetBucketVersioning gets the versioning configuration on
|
||||||
// an existing bucket with a context to control cancellations and timeouts.
|
// an existing bucket with a context to control cancellations and timeouts.
|
||||||
func (c Client) GetBucketVersioning(ctx context.Context, bucketName string) (BucketVersioningConfiguration, error) {
|
func (c Client) GetBucketVersioning(ctx context.Context, bucketName string) (BucketVersioningConfiguration, error) {
|
||||||
|
|
|
@ -28,6 +28,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
"github.com/minio/minio-go/v7/pkg/encrypt"
|
"github.com/minio/minio-go/v7/pkg/encrypt"
|
||||||
"github.com/minio/minio-go/v7/pkg/s3utils"
|
"github.com/minio/minio-go/v7/pkg/s3utils"
|
||||||
)
|
)
|
||||||
|
@ -201,7 +202,7 @@ func (opts CopySrcOptions) validate() (err error) {
|
||||||
|
|
||||||
// Low level implementation of CopyObject API, supports only upto 5GiB worth of copy.
|
// Low level implementation of CopyObject API, supports only upto 5GiB worth of copy.
|
||||||
func (c Client) copyObjectDo(ctx context.Context, srcBucket, srcObject, destBucket, destObject string,
|
func (c Client) copyObjectDo(ctx context.Context, srcBucket, srcObject, destBucket, destObject string,
|
||||||
metadata map[string]string) (ObjectInfo, error) {
|
metadata map[string]string, dstOpts PutObjectOptions) (ObjectInfo, error) {
|
||||||
|
|
||||||
// Build headers.
|
// Build headers.
|
||||||
headers := make(http.Header)
|
headers := make(http.Header)
|
||||||
|
@ -210,16 +211,38 @@ func (c Client) copyObjectDo(ctx context.Context, srcBucket, srcObject, destBuck
|
||||||
for k, v := range metadata {
|
for k, v := range metadata {
|
||||||
headers.Set(k, v)
|
headers.Set(k, v)
|
||||||
}
|
}
|
||||||
|
if !dstOpts.Internal.ReplicationStatus.Empty() {
|
||||||
|
headers.Set(amzBucketReplicationStatus, string(dstOpts.Internal.ReplicationStatus))
|
||||||
|
}
|
||||||
|
if !dstOpts.Internal.SourceMTime.IsZero() {
|
||||||
|
headers.Set(minIOBucketSourceMTime, dstOpts.Internal.SourceMTime.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
if dstOpts.Internal.SourceETag != "" {
|
||||||
|
headers.Set(minIOBucketSourceETag, dstOpts.Internal.SourceETag)
|
||||||
|
}
|
||||||
|
if len(dstOpts.UserTags) != 0 {
|
||||||
|
headers.Set(amzTaggingHeader, s3utils.TagEncode(dstOpts.UserTags))
|
||||||
|
}
|
||||||
|
|
||||||
|
reqMetadata := requestMetadata{
|
||||||
|
bucketName: destBucket,
|
||||||
|
objectName: destObject,
|
||||||
|
customHeader: headers,
|
||||||
|
}
|
||||||
|
if dstOpts.Internal.SourceVersionID != "" {
|
||||||
|
if _, err := uuid.Parse(dstOpts.Internal.SourceVersionID); err != nil {
|
||||||
|
return ObjectInfo{}, errInvalidArgument(err.Error())
|
||||||
|
}
|
||||||
|
urlValues := make(url.Values)
|
||||||
|
urlValues.Set("versionId", dstOpts.Internal.SourceVersionID)
|
||||||
|
reqMetadata.queryValues = urlValues
|
||||||
|
}
|
||||||
|
|
||||||
// Set the source header
|
// Set the source header
|
||||||
headers.Set("x-amz-copy-source", s3utils.EncodePath(srcBucket+"/"+srcObject))
|
headers.Set("x-amz-copy-source", s3utils.EncodePath(srcBucket+"/"+srcObject))
|
||||||
|
|
||||||
// Send upload-part-copy request
|
// Send upload-part-copy request
|
||||||
resp, err := c.executeMethod(ctx, http.MethodPut, requestMetadata{
|
resp, err := c.executeMethod(ctx, http.MethodPut, reqMetadata)
|
||||||
bucketName: destBucket,
|
|
||||||
objectName: destObject,
|
|
||||||
customHeader: headers,
|
|
||||||
})
|
|
||||||
defer closeResponse(resp)
|
defer closeResponse(resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ObjectInfo{}, err
|
return ObjectInfo{}, err
|
||||||
|
|
|
@ -25,12 +25,19 @@ import (
|
||||||
"github.com/minio/minio-go/v7/pkg/encrypt"
|
"github.com/minio/minio-go/v7/pkg/encrypt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
//AdvancedGetOptions for internal use by MinIO server - not intended for client use.
|
||||||
|
type AdvancedGetOptions struct {
|
||||||
|
ReplicationDeleteMarker bool
|
||||||
|
}
|
||||||
|
|
||||||
// GetObjectOptions are used to specify additional headers or options
|
// GetObjectOptions are used to specify additional headers or options
|
||||||
// during GET requests.
|
// during GET requests.
|
||||||
type GetObjectOptions struct {
|
type GetObjectOptions struct {
|
||||||
headers map[string]string
|
headers map[string]string
|
||||||
ServerSideEncryption encrypt.ServerSide
|
ServerSideEncryption encrypt.ServerSide
|
||||||
VersionID string
|
VersionID string
|
||||||
|
// To be not used by external applications
|
||||||
|
Internal AdvancedGetOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
// StatObjectOptions are used to specify additional headers or options
|
// StatObjectOptions are used to specify additional headers or options
|
||||||
|
|
|
@ -149,7 +149,7 @@ func (c Client) putObjectMultipartStreamFromReadAt(ctx context.Context, bucketNa
|
||||||
|
|
||||||
var partsBuf = make([][]byte, opts.getNumThreads())
|
var partsBuf = make([][]byte, opts.getNumThreads())
|
||||||
for i := range partsBuf {
|
for i := range partsBuf {
|
||||||
partsBuf[i] = make([]byte, partSize)
|
partsBuf[i] = make([]byte, 0, partSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Receive each part number from the channel allowing three parallel uploads.
|
// Receive each part number from the channel allowing three parallel uploads.
|
||||||
|
@ -451,15 +451,12 @@ func (c Client) putObjectDo(ctx context.Context, bucketName, objectName string,
|
||||||
contentMD5Base64: md5Base64,
|
contentMD5Base64: md5Base64,
|
||||||
contentSHA256Hex: sha256Hex,
|
contentSHA256Hex: sha256Hex,
|
||||||
}
|
}
|
||||||
if opts.ReplicationVersionID != "" {
|
if opts.Internal.SourceVersionID != "" {
|
||||||
if _, err := uuid.Parse(opts.ReplicationVersionID); err != nil {
|
if _, err := uuid.Parse(opts.Internal.SourceVersionID); err != nil {
|
||||||
return UploadInfo{}, errInvalidArgument(err.Error())
|
return UploadInfo{}, errInvalidArgument(err.Error())
|
||||||
}
|
}
|
||||||
urlValues := make(url.Values)
|
urlValues := make(url.Values)
|
||||||
urlValues.Set("versionId", opts.ReplicationVersionID)
|
urlValues.Set("versionId", opts.Internal.SourceVersionID)
|
||||||
if opts.ReplicationETag != "" {
|
|
||||||
urlValues.Set("etag", opts.ReplicationETag)
|
|
||||||
}
|
|
||||||
reqMetadata.queryValues = urlValues
|
reqMetadata.queryValues = urlValues
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -52,6 +52,15 @@ func (r ReplicationStatus) Empty() bool {
|
||||||
return r == ""
|
return r == ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AdvancedPutOptions for internal use - to be utilized by replication, ILM transition
|
||||||
|
// implementation on MinIO server
|
||||||
|
type AdvancedPutOptions struct {
|
||||||
|
SourceVersionID string
|
||||||
|
SourceETag string
|
||||||
|
ReplicationStatus ReplicationStatus
|
||||||
|
SourceMTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
// PutObjectOptions represents options specified by user for PutObject call
|
// PutObjectOptions represents options specified by user for PutObject call
|
||||||
type PutObjectOptions struct {
|
type PutObjectOptions struct {
|
||||||
UserMetadata map[string]string
|
UserMetadata map[string]string
|
||||||
|
@ -72,10 +81,7 @@ type PutObjectOptions struct {
|
||||||
LegalHold LegalHoldStatus
|
LegalHold LegalHoldStatus
|
||||||
SendContentMd5 bool
|
SendContentMd5 bool
|
||||||
DisableMultipart bool
|
DisableMultipart bool
|
||||||
ReplicationVersionID string
|
Internal AdvancedPutOptions
|
||||||
ReplicationETag string
|
|
||||||
ReplicationStatus ReplicationStatus
|
|
||||||
ReplicationMTime time.Time
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// getNumThreads - gets the number of threads to be used in the multipart
|
// getNumThreads - gets the number of threads to be used in the multipart
|
||||||
|
@ -137,14 +143,14 @@ func (opts PutObjectOptions) Header() (header http.Header) {
|
||||||
header.Set(amzWebsiteRedirectLocation, opts.WebsiteRedirectLocation)
|
header.Set(amzWebsiteRedirectLocation, opts.WebsiteRedirectLocation)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !opts.ReplicationStatus.Empty() {
|
if !opts.Internal.ReplicationStatus.Empty() {
|
||||||
header.Set(amzBucketReplicationStatus, string(opts.ReplicationStatus))
|
header.Set(amzBucketReplicationStatus, string(opts.Internal.ReplicationStatus))
|
||||||
}
|
}
|
||||||
if !opts.ReplicationMTime.IsZero() {
|
if !opts.Internal.SourceMTime.IsZero() {
|
||||||
header.Set(minIOBucketReplicationSourceMTime, opts.ReplicationMTime.Format(time.RFC3339))
|
header.Set(minIOBucketSourceMTime, opts.Internal.SourceMTime.Format(time.RFC3339))
|
||||||
}
|
}
|
||||||
if opts.ReplicationETag != "" {
|
if opts.Internal.SourceETag != "" {
|
||||||
header.Set(minIOBucketReplicationETag, opts.ReplicationETag)
|
header.Set(minIOBucketSourceETag, opts.Internal.SourceETag)
|
||||||
}
|
}
|
||||||
if len(opts.UserTags) != 0 {
|
if len(opts.UserTags) != 0 {
|
||||||
header.Set(amzTaggingHeader, s3utils.TagEncode(opts.UserTags))
|
header.Set(amzTaggingHeader, s3utils.TagEncode(opts.UserTags))
|
||||||
|
|
|
@ -24,6 +24,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/minio/minio-go/v7/pkg/s3utils"
|
"github.com/minio/minio-go/v7/pkg/s3utils"
|
||||||
)
|
)
|
||||||
|
@ -58,10 +59,18 @@ func (c Client) RemoveBucket(ctx context.Context, bucketName string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AdvancedRemoveOptions intended for internal use by replication
|
||||||
|
type AdvancedRemoveOptions struct {
|
||||||
|
ReplicationDeleteMarker bool
|
||||||
|
ReplicationStatus ReplicationStatus
|
||||||
|
ReplicationMTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
// RemoveObjectOptions represents options specified by user for RemoveObject call
|
// RemoveObjectOptions represents options specified by user for RemoveObject call
|
||||||
type RemoveObjectOptions struct {
|
type RemoveObjectOptions struct {
|
||||||
GovernanceBypass bool
|
GovernanceBypass bool
|
||||||
VersionID string
|
VersionID string
|
||||||
|
Internal AdvancedRemoveOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
// RemoveObject removes an object from a bucket.
|
// RemoveObject removes an object from a bucket.
|
||||||
|
@ -74,6 +83,11 @@ func (c Client) RemoveObject(ctx context.Context, bucketName, objectName string,
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return c.removeObject(ctx, bucketName, objectName, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) removeObject(ctx context.Context, bucketName, objectName string, opts RemoveObjectOptions) error {
|
||||||
|
|
||||||
// Get resources properly escaped and lined up before
|
// Get resources properly escaped and lined up before
|
||||||
// using them in http request.
|
// using them in http request.
|
||||||
urlValues := make(url.Values)
|
urlValues := make(url.Values)
|
||||||
|
@ -89,6 +103,15 @@ func (c Client) RemoveObject(ctx context.Context, bucketName, objectName string,
|
||||||
// Set the bypass goverenance retention header
|
// Set the bypass goverenance retention header
|
||||||
headers.Set(amzBypassGovernance, "true")
|
headers.Set(amzBypassGovernance, "true")
|
||||||
}
|
}
|
||||||
|
if opts.Internal.ReplicationDeleteMarker {
|
||||||
|
headers.Set(minIOBucketReplicationDeleteMarker, "true")
|
||||||
|
}
|
||||||
|
if !opts.Internal.ReplicationMTime.IsZero() {
|
||||||
|
headers.Set(minIOBucketSourceMTime, opts.Internal.ReplicationMTime.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
if !opts.Internal.ReplicationStatus.Empty() {
|
||||||
|
headers.Set(amzBucketReplicationStatus, string(opts.Internal.ReplicationStatus))
|
||||||
|
}
|
||||||
// Execute DELETE on objectName.
|
// Execute DELETE on objectName.
|
||||||
resp, err := c.executeMethod(ctx, http.MethodDelete, requestMetadata{
|
resp, err := c.executeMethod(ctx, http.MethodDelete, requestMetadata{
|
||||||
bucketName: bucketName,
|
bucketName: bucketName,
|
||||||
|
@ -147,8 +170,14 @@ func processRemoveMultiObjectsResponse(body io.Reader, objects []ObjectInfo, err
|
||||||
|
|
||||||
// Fill deletion that returned an error.
|
// Fill deletion that returned an error.
|
||||||
for _, obj := range rmResult.UnDeletedObjects {
|
for _, obj := range rmResult.UnDeletedObjects {
|
||||||
|
// Version does not exist is not an error ignore and continue.
|
||||||
|
switch obj.Code {
|
||||||
|
case "InvalidArgument", "NoSuchVersion":
|
||||||
|
continue
|
||||||
|
}
|
||||||
errorCh <- RemoveObjectError{
|
errorCh <- RemoveObjectError{
|
||||||
ObjectName: obj.Key,
|
ObjectName: obj.Key,
|
||||||
|
VersionID: obj.VersionID,
|
||||||
Err: ErrorResponse{
|
Err: ErrorResponse{
|
||||||
Code: obj.Code,
|
Code: obj.Code,
|
||||||
Message: obj.Message,
|
Message: obj.Message,
|
||||||
|
@ -189,6 +218,26 @@ func (c Client) RemoveObjects(ctx context.Context, bucketName string, objectsCh
|
||||||
return errorCh
|
return errorCh
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Return true if the character is within the allowed characters in an XML 1.0 document
|
||||||
|
// The list of allowed characters can be found here: https://www.w3.org/TR/xml/#charsets
|
||||||
|
func validXMLChar(r rune) (ok bool) {
|
||||||
|
return r == 0x09 ||
|
||||||
|
r == 0x0A ||
|
||||||
|
r == 0x0D ||
|
||||||
|
r >= 0x20 && r <= 0xD7FF ||
|
||||||
|
r >= 0xE000 && r <= 0xFFFD ||
|
||||||
|
r >= 0x10000 && r <= 0x10FFFF
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasInvalidXMLChar(str string) bool {
|
||||||
|
for _, s := range str {
|
||||||
|
if !validXMLChar(s) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// Generate and call MultiDelete S3 requests based on entries received from objectsCh
|
// Generate and call MultiDelete S3 requests based on entries received from objectsCh
|
||||||
func (c Client) removeObjects(ctx context.Context, bucketName string, objectsCh <-chan ObjectInfo, errorCh chan<- RemoveObjectError, opts RemoveObjectsOptions) {
|
func (c Client) removeObjects(ctx context.Context, bucketName string, objectsCh <-chan ObjectInfo, errorCh chan<- RemoveObjectError, opts RemoveObjectsOptions) {
|
||||||
maxEntries := 1000
|
maxEntries := 1000
|
||||||
|
@ -209,6 +258,27 @@ func (c Client) removeObjects(ctx context.Context, bucketName string, objectsCh
|
||||||
|
|
||||||
// Try to gather 1000 entries
|
// Try to gather 1000 entries
|
||||||
for object := range objectsCh {
|
for object := range objectsCh {
|
||||||
|
if hasInvalidXMLChar(object.Key) {
|
||||||
|
// Use single DELETE so the object name will be in the request URL instead of the multi-delete XML document.
|
||||||
|
err := c.removeObject(ctx, bucketName, object.Key, RemoveObjectOptions{
|
||||||
|
VersionID: object.VersionID,
|
||||||
|
GovernanceBypass: opts.GovernanceBypass,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
// Version does not exist is not an error ignore and continue.
|
||||||
|
switch ToErrorResponse(err).Code {
|
||||||
|
case "InvalidArgument", "NoSuchVersion":
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
errorCh <- RemoveObjectError{
|
||||||
|
ObjectName: object.Key,
|
||||||
|
VersionID: object.VersionID,
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
batch = append(batch, object)
|
batch = append(batch, object)
|
||||||
if count++; count >= maxEntries {
|
if count++; count >= maxEntries {
|
||||||
break
|
break
|
||||||
|
|
|
@ -89,6 +89,8 @@ type Version struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ListVersionsResult is an element in the list object versions response
|
// ListVersionsResult is an element in the list object versions response
|
||||||
|
// and has a special Unmarshaler because we need to preserver the order
|
||||||
|
// of <Version> and <DeleteMarker> in ListVersionsResult.Versions slice
|
||||||
type ListVersionsResult struct {
|
type ListVersionsResult struct {
|
||||||
Versions []Version
|
Versions []Version
|
||||||
|
|
||||||
|
@ -125,8 +127,7 @@ func (l *ListVersionsResult) UnmarshalXML(d *xml.Decoder, start xml.StartElement
|
||||||
switch tagName {
|
switch tagName {
|
||||||
case "Name", "Prefix",
|
case "Name", "Prefix",
|
||||||
"Delimiter", "EncodingType",
|
"Delimiter", "EncodingType",
|
||||||
"KeyMarker", "VersionIdMarker",
|
"KeyMarker", "NextKeyMarker":
|
||||||
"NextKeyMarker", "NextVersionIdMarker":
|
|
||||||
var s string
|
var s string
|
||||||
if err = d.DecodeElement(&s, &se); err != nil {
|
if err = d.DecodeElement(&s, &se); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -135,6 +136,20 @@ func (l *ListVersionsResult) UnmarshalXML(d *xml.Decoder, start xml.StartElement
|
||||||
if v.IsValid() {
|
if v.IsValid() {
|
||||||
v.SetString(s)
|
v.SetString(s)
|
||||||
}
|
}
|
||||||
|
case "VersionIdMarker":
|
||||||
|
// VersionIdMarker is a special case because of 'Id' instead of 'ID' in field name
|
||||||
|
var s string
|
||||||
|
if err = d.DecodeElement(&s, &se); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
l.VersionIDMarker = s
|
||||||
|
case "NextVersionIdMarker":
|
||||||
|
// NextVersionIdMarker is a special case because of 'Id' instead of 'ID' in field name
|
||||||
|
var s string
|
||||||
|
if err = d.DecodeElement(&s, &se); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
l.NextVersionIDMarker = s
|
||||||
case "IsTruncated": // bool
|
case "IsTruncated": // bool
|
||||||
var b bool
|
var b bool
|
||||||
if err = d.DecodeElement(&b, &se); err != nil {
|
if err = d.DecodeElement(&b, &se); err != nil {
|
||||||
|
@ -328,6 +343,7 @@ type nonDeletedObject struct {
|
||||||
Key string
|
Key string
|
||||||
Code string
|
Code string
|
||||||
Message string
|
Message string
|
||||||
|
VersionID string `xml:"VersionId"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// deletedMultiObjects container for MultiObjects Delete XML request
|
// deletedMultiObjects container for MultiObjects Delete XML request
|
||||||
|
|
|
@ -78,27 +78,48 @@ func (c Client) statObject(ctx context.Context, bucketName, objectName string, o
|
||||||
if err := s3utils.CheckValidObjectName(objectName); err != nil {
|
if err := s3utils.CheckValidObjectName(objectName); err != nil {
|
||||||
return ObjectInfo{}, err
|
return ObjectInfo{}, err
|
||||||
}
|
}
|
||||||
|
headers := opts.Header()
|
||||||
|
if opts.Internal.ReplicationDeleteMarker {
|
||||||
|
headers.Set(minIOBucketReplicationDeleteMarker, "true")
|
||||||
|
}
|
||||||
|
|
||||||
urlValues := make(url.Values)
|
urlValues := make(url.Values)
|
||||||
if opts.VersionID != "" {
|
if opts.VersionID != "" {
|
||||||
urlValues.Set("versionId", opts.VersionID)
|
urlValues.Set("versionId", opts.VersionID)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute HEAD on objectName.
|
// Execute HEAD on objectName.
|
||||||
resp, err := c.executeMethod(ctx, http.MethodHead, requestMetadata{
|
resp, err := c.executeMethod(ctx, http.MethodHead, requestMetadata{
|
||||||
bucketName: bucketName,
|
bucketName: bucketName,
|
||||||
objectName: objectName,
|
objectName: objectName,
|
||||||
queryValues: urlValues,
|
queryValues: urlValues,
|
||||||
contentSHA256Hex: emptySHA256Hex,
|
contentSHA256Hex: emptySHA256Hex,
|
||||||
customHeader: opts.Header(),
|
customHeader: headers,
|
||||||
})
|
})
|
||||||
defer closeResponse(resp)
|
defer closeResponse(resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ObjectInfo{}, err
|
return ObjectInfo{}, err
|
||||||
}
|
}
|
||||||
|
deleteMarker := resp.Header.Get(amzDeleteMarker) == "true"
|
||||||
|
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusPartialContent {
|
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusPartialContent {
|
||||||
return ObjectInfo{}, httpRespToErrorResponse(resp, bucketName, objectName)
|
if resp.StatusCode == http.StatusBadRequest && opts.VersionID != "" && deleteMarker {
|
||||||
|
errResp := ErrorResponse{
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
Code: "MethodNotAllowed",
|
||||||
|
Message: "The specified method is not allowed against this resource.",
|
||||||
|
BucketName: bucketName,
|
||||||
|
Key: objectName,
|
||||||
|
}
|
||||||
|
return ObjectInfo{
|
||||||
|
VersionID: resp.Header.Get(amzVersionID),
|
||||||
|
IsDeleteMarker: deleteMarker,
|
||||||
|
}, errResp
|
||||||
|
}
|
||||||
|
return ObjectInfo{
|
||||||
|
VersionID: resp.Header.Get(amzVersionID),
|
||||||
|
IsDeleteMarker: deleteMarker,
|
||||||
|
}, httpRespToErrorResponse(resp, bucketName, objectName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -108,7 +108,7 @@ type Options struct {
|
||||||
// Global constants.
|
// Global constants.
|
||||||
const (
|
const (
|
||||||
libraryName = "minio-go"
|
libraryName = "minio-go"
|
||||||
libraryVersion = "v7.0.5"
|
libraryVersion = "v7.0.6"
|
||||||
)
|
)
|
||||||
|
|
||||||
// User Agent should always following the below style.
|
// User Agent should always following the below style.
|
||||||
|
|
|
@ -97,7 +97,7 @@ func (c Client) getBucketLocation(ctx context.Context, bucketName string) (strin
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize a new request.
|
// Initialize a new request.
|
||||||
req, err := c.getBucketLocationRequest(bucketName)
|
req, err := c.getBucketLocationRequest(ctx, bucketName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
@ -169,7 +169,7 @@ func processBucketLocationResponse(resp *http.Response, bucketName string) (buck
|
||||||
}
|
}
|
||||||
|
|
||||||
// getBucketLocationRequest - Wrapper creates a new getBucketLocation request.
|
// getBucketLocationRequest - Wrapper creates a new getBucketLocation request.
|
||||||
func (c Client) getBucketLocationRequest(bucketName string) (*http.Request, error) {
|
func (c Client) getBucketLocationRequest(ctx context.Context, bucketName string) (*http.Request, error) {
|
||||||
// Set location query.
|
// Set location query.
|
||||||
urlValues := make(url.Values)
|
urlValues := make(url.Values)
|
||||||
urlValues.Set("location", "")
|
urlValues.Set("location", "")
|
||||||
|
@ -198,7 +198,7 @@ func (c Client) getBucketLocationRequest(bucketName string) (*http.Request, erro
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get a new HTTP request for the method.
|
// Get a new HTTP request for the method.
|
||||||
req, err := http.NewRequest(http.MethodGet, urlStr, nil)
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, urlStr, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,6 +70,7 @@ const (
|
||||||
amzTaggingCount = "X-Amz-Tagging-Count"
|
amzTaggingCount = "X-Amz-Tagging-Count"
|
||||||
amzExpiration = "X-Amz-Expiration"
|
amzExpiration = "X-Amz-Expiration"
|
||||||
amzReplicationStatus = "X-Amz-Replication-Status"
|
amzReplicationStatus = "X-Amz-Replication-Status"
|
||||||
|
amzDeleteMarker = "X-Amz-Delete-Marker"
|
||||||
|
|
||||||
// Object legal hold header
|
// Object legal hold header
|
||||||
amzLegalHoldHeader = "X-Amz-Object-Lock-Legal-Hold"
|
amzLegalHoldHeader = "X-Amz-Object-Lock-Legal-Hold"
|
||||||
|
@ -81,7 +82,9 @@ const (
|
||||||
|
|
||||||
// Replication status
|
// Replication status
|
||||||
amzBucketReplicationStatus = "X-Amz-Replication-Status"
|
amzBucketReplicationStatus = "X-Amz-Replication-Status"
|
||||||
// Minio specific Replication extension
|
// Minio specific Replication/lifecycle transition extension
|
||||||
minIOBucketReplicationSourceMTime = "X-Minio-Source-Mtime"
|
minIOBucketSourceMTime = "X-Minio-Source-Mtime"
|
||||||
minIOBucketReplicationETag = "X-Minio-Source-Etag"
|
|
||||||
|
minIOBucketSourceETag = "X-Minio-Source-Etag"
|
||||||
|
minIOBucketReplicationDeleteMarker = "X-Minio-Source-DeleteMarker"
|
||||||
)
|
)
|
||||||
|
|
|
@ -56,8 +56,8 @@ func (c Core) ListObjectsV2(bucketName, objectPrefix, continuationToken string,
|
||||||
}
|
}
|
||||||
|
|
||||||
// CopyObject - copies an object from source object to destination object on server side.
|
// CopyObject - copies an object from source object to destination object on server side.
|
||||||
func (c Core) CopyObject(ctx context.Context, sourceBucket, sourceObject, destBucket, destObject string, metadata map[string]string) (ObjectInfo, error) {
|
func (c Core) CopyObject(ctx context.Context, sourceBucket, sourceObject, destBucket, destObject string, metadata map[string]string, dstOpts PutObjectOptions) (ObjectInfo, error) {
|
||||||
return c.copyObjectDo(ctx, sourceBucket, sourceObject, destBucket, destObject, metadata)
|
return c.copyObjectDo(ctx, sourceBucket, sourceObject, destBucket, destObject, metadata, dstOpts)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CopyObjectPart - creates a part in a multipart upload by copying (a
|
// CopyObjectPart - creates a part in a multipart upload by copying (a
|
||||||
|
@ -71,7 +71,8 @@ func (c Core) CopyObjectPart(ctx context.Context, srcBucket, srcObject, destBuck
|
||||||
|
|
||||||
// PutObject - Upload object. Uploads using single PUT call.
|
// PutObject - Upload object. Uploads using single PUT call.
|
||||||
func (c Core) PutObject(ctx context.Context, bucket, object string, data io.Reader, size int64, md5Base64, sha256Hex string, opts PutObjectOptions) (UploadInfo, error) {
|
func (c Core) PutObject(ctx context.Context, bucket, object string, data io.Reader, size int64, md5Base64, sha256Hex string, opts PutObjectOptions) (UploadInfo, error) {
|
||||||
return c.putObjectDo(ctx, bucket, object, data, md5Base64, sha256Hex, size, opts)
|
hookReader := newHook(data, opts.Progress)
|
||||||
|
return c.putObjectDo(ctx, bucket, object, hookReader, md5Base64, sha256Hex, size, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMultipartUpload - Initiates new multipart upload and returns the new uploadID.
|
// NewMultipartUpload - Initiates new multipart upload and returns the new uploadID.
|
||||||
|
|
|
@ -3,6 +3,8 @@ module github.com/minio/minio-go/v7
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/cheggaaa/pb v1.0.29 // indirect
|
||||||
|
github.com/dustin/go-humanize v1.0.0 // indirect
|
||||||
github.com/google/uuid v1.1.1
|
github.com/google/uuid v1.1.1
|
||||||
github.com/json-iterator/go v1.1.10
|
github.com/json-iterator/go v1.1.10
|
||||||
github.com/klauspost/cpuid v1.3.1 // indirect
|
github.com/klauspost/cpuid v1.3.1 // indirect
|
||||||
|
@ -13,6 +15,7 @@ require (
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.1 // indirect
|
github.com/modern-go/reflect2 v1.0.1 // indirect
|
||||||
github.com/rs/xid v1.2.1
|
github.com/rs/xid v1.2.1
|
||||||
|
github.com/sirupsen/logrus v1.7.0 // indirect
|
||||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a // indirect
|
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a // indirect
|
||||||
github.com/stretchr/testify v1.4.0 // indirect
|
github.com/stretchr/testify v1.4.0 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899
|
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
|
github.com/cheggaaa/pb v1.0.29 h1:FckUN5ngEk2LpvuG0fw1GEFx6LtyY2pWI/Z2QgCnEYo=
|
||||||
|
github.com/cheggaaa/pb v1.0.29/go.mod h1:W40334L7FMC5JKWldsTWbdGjLo0RxUKK73K+TuPxX30=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||||
|
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||||
|
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
|
||||||
|
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
|
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
|
||||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
@ -19,6 +25,13 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
|
||||||
|
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||||
|
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||||
|
github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
|
||||||
|
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||||
|
github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y=
|
||||||
|
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||||
github.com/minio/md5-simd v1.1.0 h1:QPfiOqlZH+Cj9teu0t9b1nTBfPbyTl16Of5MeuShdK4=
|
github.com/minio/md5-simd v1.1.0 h1:QPfiOqlZH+Cj9teu0t9b1nTBfPbyTl16Of5MeuShdK4=
|
||||||
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
||||||
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
||||||
|
@ -37,11 +50,14 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
|
github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
|
||||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||||
|
github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM=
|
||||||
|
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a h1:pa8hGb/2YqsZKovtsgrwcDH1RZhVbTKCjLp47XpqCDs=
|
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a h1:pa8hGb/2YqsZKovtsgrwcDH1RZhVbTKCjLp47XpqCDs=
|
||||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||||
|
@ -55,8 +71,10 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
|
||||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU=
|
golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU=
|
||||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo=
|
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo=
|
||||||
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
|
@ -144,7 +144,7 @@ func closeResponse(resp *http.Response) {
|
||||||
func getAssumeRoleCredentials(clnt *http.Client, endpoint string, opts STSAssumeRoleOptions) (AssumeRoleResponse, error) {
|
func getAssumeRoleCredentials(clnt *http.Client, endpoint string, opts STSAssumeRoleOptions) (AssumeRoleResponse, error) {
|
||||||
v := url.Values{}
|
v := url.Values{}
|
||||||
v.Set("Action", "AssumeRole")
|
v.Set("Action", "AssumeRole")
|
||||||
v.Set("Version", "2011-06-15")
|
v.Set("Version", STSVersion)
|
||||||
if opts.RoleARN != "" {
|
if opts.RoleARN != "" {
|
||||||
v.Set("RoleArn", opts.RoleARN)
|
v.Set("RoleArn", opts.RoleARN)
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,9 @@ import (
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// STSVersion sts version string
|
||||||
|
const STSVersion = "2011-06-15"
|
||||||
|
|
||||||
// A Value is the AWS credentials value for individual credential fields.
|
// A Value is the AWS credentials value for individual credential fields.
|
||||||
type Value struct {
|
type Value struct {
|
||||||
// AWS Access key ID
|
// AWS Access key ID
|
||||||
|
|
|
@ -48,7 +48,7 @@ type IAM struct {
|
||||||
Client *http.Client
|
Client *http.Client
|
||||||
|
|
||||||
// Custom endpoint to fetch IAM role credentials.
|
// Custom endpoint to fetch IAM role credentials.
|
||||||
endpoint string
|
Endpoint string
|
||||||
}
|
}
|
||||||
|
|
||||||
// IAM Roles for Amazon EC2
|
// IAM Roles for Amazon EC2
|
||||||
|
@ -62,13 +62,12 @@ const (
|
||||||
|
|
||||||
// NewIAM returns a pointer to a new Credentials object wrapping the IAM.
|
// NewIAM returns a pointer to a new Credentials object wrapping the IAM.
|
||||||
func NewIAM(endpoint string) *Credentials {
|
func NewIAM(endpoint string) *Credentials {
|
||||||
p := &IAM{
|
return New(&IAM{
|
||||||
Client: &http.Client{
|
Client: &http.Client{
|
||||||
Transport: http.DefaultTransport,
|
Transport: http.DefaultTransport,
|
||||||
},
|
},
|
||||||
endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
}
|
})
|
||||||
return New(p)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Retrieve retrieves credentials from the EC2 service.
|
// Retrieve retrieves credentials from the EC2 service.
|
||||||
|
@ -78,7 +77,7 @@ func (m *IAM) Retrieve() (Value, error) {
|
||||||
var roleCreds ec2RoleCredRespBody
|
var roleCreds ec2RoleCredRespBody
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
endpoint := m.endpoint
|
endpoint := m.Endpoint
|
||||||
switch {
|
switch {
|
||||||
case len(os.Getenv("AWS_WEB_IDENTITY_TOKEN_FILE")) > 0:
|
case len(os.Getenv("AWS_WEB_IDENTITY_TOKEN_FILE")) > 0:
|
||||||
if len(endpoint) == 0 {
|
if len(endpoint) == 0 {
|
||||||
|
@ -91,10 +90,8 @@ func (m *IAM) Retrieve() (Value, error) {
|
||||||
|
|
||||||
creds := &STSWebIdentity{
|
creds := &STSWebIdentity{
|
||||||
Client: m.Client,
|
Client: m.Client,
|
||||||
stsEndpoint: endpoint,
|
STSEndpoint: endpoint,
|
||||||
roleARN: os.Getenv("AWS_ROLE_ARN"),
|
GetWebIDTokenExpiry: func() (*WebIdentityToken, error) {
|
||||||
roleSessionName: os.Getenv("AWS_ROLE_SESSION_NAME"),
|
|
||||||
getWebIDTokenExpiry: func() (*WebIdentityToken, error) {
|
|
||||||
token, err := ioutil.ReadFile(os.Getenv("AWS_WEB_IDENTITY_TOKEN_FILE"))
|
token, err := ioutil.ReadFile(os.Getenv("AWS_WEB_IDENTITY_TOKEN_FILE"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -102,6 +99,8 @@ func (m *IAM) Retrieve() (Value, error) {
|
||||||
|
|
||||||
return &WebIdentityToken{Token: string(token)}, nil
|
return &WebIdentityToken{Token: string(token)}, nil
|
||||||
},
|
},
|
||||||
|
roleARN: os.Getenv("AWS_ROLE_ARN"),
|
||||||
|
roleSessionName: os.Getenv("AWS_ROLE_SESSION_NAME"),
|
||||||
}
|
}
|
||||||
|
|
||||||
stsWebIdentityCreds, err := creds.Retrieve()
|
stsWebIdentityCreds, err := creds.Retrieve()
|
||||||
|
@ -121,7 +120,6 @@ func (m *IAM) Retrieve() (Value, error) {
|
||||||
case len(os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI")) > 0:
|
case len(os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI")) > 0:
|
||||||
if len(endpoint) == 0 {
|
if len(endpoint) == 0 {
|
||||||
endpoint = os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI")
|
endpoint = os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI")
|
||||||
|
|
||||||
var ok bool
|
var ok bool
|
||||||
if ok, err = isLoopback(endpoint); !ok {
|
if ok, err = isLoopback(endpoint); !ok {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
|
@ -73,7 +73,7 @@ type STSClientGrants struct {
|
||||||
Client *http.Client
|
Client *http.Client
|
||||||
|
|
||||||
// MinIO endpoint to fetch STS credentials.
|
// MinIO endpoint to fetch STS credentials.
|
||||||
stsEndpoint string
|
STSEndpoint string
|
||||||
|
|
||||||
// getClientGrantsTokenExpiry function to retrieve tokens
|
// getClientGrantsTokenExpiry function to retrieve tokens
|
||||||
// from IDP This function should return two values one is
|
// from IDP This function should return two values one is
|
||||||
|
@ -81,7 +81,7 @@ type STSClientGrants struct {
|
||||||
// and second return value is the expiry associated with
|
// and second return value is the expiry associated with
|
||||||
// this token. This is a customer provided function and
|
// this token. This is a customer provided function and
|
||||||
// is mandatory.
|
// is mandatory.
|
||||||
getClientGrantsTokenExpiry func() (*ClientGrantsToken, error)
|
GetClientGrantsTokenExpiry func() (*ClientGrantsToken, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSTSClientGrants returns a pointer to a new
|
// NewSTSClientGrants returns a pointer to a new
|
||||||
|
@ -97,8 +97,8 @@ func NewSTSClientGrants(stsEndpoint string, getClientGrantsTokenExpiry func() (*
|
||||||
Client: &http.Client{
|
Client: &http.Client{
|
||||||
Transport: http.DefaultTransport,
|
Transport: http.DefaultTransport,
|
||||||
},
|
},
|
||||||
stsEndpoint: stsEndpoint,
|
STSEndpoint: stsEndpoint,
|
||||||
getClientGrantsTokenExpiry: getClientGrantsTokenExpiry,
|
GetClientGrantsTokenExpiry: getClientGrantsTokenExpiry,
|
||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ func getClientGrantsCredentials(clnt *http.Client, endpoint string,
|
||||||
v.Set("Action", "AssumeRoleWithClientGrants")
|
v.Set("Action", "AssumeRoleWithClientGrants")
|
||||||
v.Set("Token", accessToken.Token)
|
v.Set("Token", accessToken.Token)
|
||||||
v.Set("DurationSeconds", fmt.Sprintf("%d", accessToken.Expiry))
|
v.Set("DurationSeconds", fmt.Sprintf("%d", accessToken.Expiry))
|
||||||
v.Set("Version", "2011-06-15")
|
v.Set("Version", STSVersion)
|
||||||
|
|
||||||
u, err := url.Parse(endpoint)
|
u, err := url.Parse(endpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -145,7 +145,7 @@ func getClientGrantsCredentials(clnt *http.Client, endpoint string,
|
||||||
// Retrieve retrieves credentials from the MinIO service.
|
// Retrieve retrieves credentials from the MinIO service.
|
||||||
// Error will be returned if the request fails.
|
// Error will be returned if the request fails.
|
||||||
func (m *STSClientGrants) Retrieve() (Value, error) {
|
func (m *STSClientGrants) Retrieve() (Value, error) {
|
||||||
a, err := getClientGrantsCredentials(m.Client, m.stsEndpoint, m.getClientGrantsTokenExpiry)
|
a, err := getClientGrantsCredentials(m.Client, m.STSEndpoint, m.GetClientGrantsTokenExpiry)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Value{}, err
|
return Value{}, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,36 +52,41 @@ type LDAPIdentityResult struct {
|
||||||
type LDAPIdentity struct {
|
type LDAPIdentity struct {
|
||||||
Expiry
|
Expiry
|
||||||
|
|
||||||
stsEndpoint string
|
// Required http Client to use when connecting to MinIO STS service.
|
||||||
|
Client *http.Client
|
||||||
|
|
||||||
ldapUsername, ldapPassword string
|
// Exported STS endpoint to fetch STS credentials.
|
||||||
|
STSEndpoint string
|
||||||
|
|
||||||
|
// LDAP username/password used to fetch LDAP STS credentials.
|
||||||
|
LDAPUsername, LDAPPassword string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewLDAPIdentity returns new credentials object that uses LDAP
|
// NewLDAPIdentity returns new credentials object that uses LDAP
|
||||||
// Identity.
|
// Identity.
|
||||||
func NewLDAPIdentity(stsEndpoint, ldapUsername, ldapPassword string) (*Credentials, error) {
|
func NewLDAPIdentity(stsEndpoint, ldapUsername, ldapPassword string) (*Credentials, error) {
|
||||||
return New(&LDAPIdentity{
|
return New(&LDAPIdentity{
|
||||||
stsEndpoint: stsEndpoint,
|
Client: &http.Client{Transport: http.DefaultTransport},
|
||||||
ldapUsername: ldapUsername,
|
STSEndpoint: stsEndpoint,
|
||||||
ldapPassword: ldapPassword,
|
LDAPUsername: ldapUsername,
|
||||||
|
LDAPPassword: ldapPassword,
|
||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Retrieve gets the credential by calling the MinIO STS API for
|
// Retrieve gets the credential by calling the MinIO STS API for
|
||||||
// LDAP on the configured stsEndpoint.
|
// LDAP on the configured stsEndpoint.
|
||||||
func (k *LDAPIdentity) Retrieve() (value Value, err error) {
|
func (k *LDAPIdentity) Retrieve() (value Value, err error) {
|
||||||
u, kerr := url.Parse(k.stsEndpoint)
|
u, kerr := url.Parse(k.STSEndpoint)
|
||||||
if kerr != nil {
|
if kerr != nil {
|
||||||
err = kerr
|
err = kerr
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
clnt := &http.Client{Transport: http.DefaultTransport}
|
|
||||||
v := url.Values{}
|
v := url.Values{}
|
||||||
v.Set("Action", "AssumeRoleWithLDAPIdentity")
|
v.Set("Action", "AssumeRoleWithLDAPIdentity")
|
||||||
v.Set("Version", "2011-06-15")
|
v.Set("Version", STSVersion)
|
||||||
v.Set("LDAPUsername", k.ldapUsername)
|
v.Set("LDAPUsername", k.LDAPUsername)
|
||||||
v.Set("LDAPPassword", k.ldapPassword)
|
v.Set("LDAPPassword", k.LDAPPassword)
|
||||||
|
|
||||||
u.RawQuery = v.Encode()
|
u.RawQuery = v.Encode()
|
||||||
|
|
||||||
|
@ -91,7 +96,7 @@ func (k *LDAPIdentity) Retrieve() (value Value, err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, kerr := clnt.Do(req)
|
resp, kerr := k.Client.Do(req)
|
||||||
if kerr != nil {
|
if kerr != nil {
|
||||||
err = kerr
|
err = kerr
|
||||||
return
|
return
|
||||||
|
|
|
@ -66,16 +66,16 @@ type STSWebIdentity struct {
|
||||||
// Required http Client to use when connecting to MinIO STS service.
|
// Required http Client to use when connecting to MinIO STS service.
|
||||||
Client *http.Client
|
Client *http.Client
|
||||||
|
|
||||||
// MinIO endpoint to fetch STS credentials.
|
// Exported STS endpoint to fetch STS credentials.
|
||||||
stsEndpoint string
|
STSEndpoint string
|
||||||
|
|
||||||
// getWebIDTokenExpiry function which returns ID tokens
|
// Exported GetWebIDTokenExpiry function which returns ID
|
||||||
// from IDP. This function should return two values one
|
// tokens from IDP. This function should return two values
|
||||||
// is ID token which is a self contained ID token (JWT)
|
// one is ID token which is a self contained ID token (JWT)
|
||||||
// and second return value is the expiry associated with
|
// and second return value is the expiry associated with
|
||||||
// this token.
|
// this token.
|
||||||
// This is a customer provided function and is mandatory.
|
// This is a customer provided function and is mandatory.
|
||||||
getWebIDTokenExpiry func() (*WebIdentityToken, error)
|
GetWebIDTokenExpiry func() (*WebIdentityToken, error)
|
||||||
|
|
||||||
// roleARN is the Amazon Resource Name (ARN) of the role that the caller is
|
// roleARN is the Amazon Resource Name (ARN) of the role that the caller is
|
||||||
// assuming.
|
// assuming.
|
||||||
|
@ -98,8 +98,8 @@ func NewSTSWebIdentity(stsEndpoint string, getWebIDTokenExpiry func() (*WebIdent
|
||||||
Client: &http.Client{
|
Client: &http.Client{
|
||||||
Transport: http.DefaultTransport,
|
Transport: http.DefaultTransport,
|
||||||
},
|
},
|
||||||
stsEndpoint: stsEndpoint,
|
STSEndpoint: stsEndpoint,
|
||||||
getWebIDTokenExpiry: getWebIDTokenExpiry,
|
GetWebIDTokenExpiry: getWebIDTokenExpiry,
|
||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ func getWebIdentityCredentials(clnt *http.Client, endpoint, roleARN, roleSession
|
||||||
if idToken.Expiry > 0 {
|
if idToken.Expiry > 0 {
|
||||||
v.Set("DurationSeconds", fmt.Sprintf("%d", idToken.Expiry))
|
v.Set("DurationSeconds", fmt.Sprintf("%d", idToken.Expiry))
|
||||||
}
|
}
|
||||||
v.Set("Version", "2011-06-15")
|
v.Set("Version", STSVersion)
|
||||||
|
|
||||||
u, err := url.Parse(endpoint)
|
u, err := url.Parse(endpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -159,7 +159,7 @@ func getWebIdentityCredentials(clnt *http.Client, endpoint, roleARN, roleSession
|
||||||
// Retrieve retrieves credentials from the MinIO service.
|
// Retrieve retrieves credentials from the MinIO service.
|
||||||
// Error will be returned if the request fails.
|
// Error will be returned if the request fails.
|
||||||
func (m *STSWebIdentity) Retrieve() (Value, error) {
|
func (m *STSWebIdentity) Retrieve() (Value, error) {
|
||||||
a, err := getWebIdentityCredentials(m.Client, m.stsEndpoint, m.roleARN, m.roleSessionName, m.getWebIDTokenExpiry)
|
a, err := getWebIdentityCredentials(m.Client, m.STSEndpoint, m.roleARN, m.roleSessionName, m.GetWebIDTokenExpiry)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Value{}, err
|
return Value{}, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,7 +146,7 @@ type Filter struct {
|
||||||
XMLName xml.Name `xml:"Filter" json:"-"`
|
XMLName xml.Name `xml:"Filter" json:"-"`
|
||||||
And And `xml:"And,omitempty" json:"And,omitempty"`
|
And And `xml:"And,omitempty" json:"And,omitempty"`
|
||||||
Prefix string `xml:"Prefix,omitempty" json:"Prefix,omitempty"`
|
Prefix string `xml:"Prefix,omitempty" json:"Prefix,omitempty"`
|
||||||
Tag Tag `xml:"Tag,omitempty" json:"-"`
|
Tag Tag `xml:"Tag,omitempty" json:"Tag,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// MarshalXML - produces the xml representation of the Filter struct
|
// MarshalXML - produces the xml representation of the Filter struct
|
||||||
|
|
|
@ -43,6 +43,8 @@ const (
|
||||||
ObjectRemovedDelete = "s3:ObjectRemoved:Delete"
|
ObjectRemovedDelete = "s3:ObjectRemoved:Delete"
|
||||||
ObjectRemovedDeleteMarkerCreated = "s3:ObjectRemoved:DeleteMarkerCreated"
|
ObjectRemovedDeleteMarkerCreated = "s3:ObjectRemoved:DeleteMarkerCreated"
|
||||||
ObjectReducedRedundancyLostObject = "s3:ReducedRedundancyLostObject"
|
ObjectReducedRedundancyLostObject = "s3:ReducedRedundancyLostObject"
|
||||||
|
BucketCreatedAll = "s3:BucketCreated:*"
|
||||||
|
BucketRemovedAll = "s3:BucketRemoved:*"
|
||||||
)
|
)
|
||||||
|
|
||||||
// FilterRule - child of S3Key, a tag in the notification xml which
|
// FilterRule - child of S3Key, a tag in the notification xml which
|
||||||
|
|
|
@ -57,10 +57,12 @@ type Options struct {
|
||||||
DestBucket string
|
DestBucket string
|
||||||
IsTagSet bool
|
IsTagSet bool
|
||||||
IsSCSet bool
|
IsSCSet bool
|
||||||
|
ReplicateDeletes string // replicate versioned deletes
|
||||||
|
ReplicateDeleteMarkers string // replicate soft deletes
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tags returns a slice of tags for a rule
|
// Tags returns a slice of tags for a rule
|
||||||
func (opts Options) Tags() []Tag {
|
func (opts Options) Tags() ([]Tag, error) {
|
||||||
var tagList []Tag
|
var tagList []Tag
|
||||||
tagTokens := strings.Split(opts.TagString, "&")
|
tagTokens := strings.Split(opts.TagString, "&")
|
||||||
for _, tok := range tagTokens {
|
for _, tok := range tagTokens {
|
||||||
|
@ -68,12 +70,15 @@ func (opts Options) Tags() []Tag {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
kv := strings.SplitN(tok, "=", 2)
|
kv := strings.SplitN(tok, "=", 2)
|
||||||
|
if len(kv) != 2 {
|
||||||
|
return []Tag{}, fmt.Errorf("Tags should be entered as comma separated k=v pairs")
|
||||||
|
}
|
||||||
tagList = append(tagList, Tag{
|
tagList = append(tagList, Tag{
|
||||||
Key: kv[0],
|
Key: kv[0],
|
||||||
Value: kv[1],
|
Value: kv[1],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return tagList
|
return tagList, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Config - replication configuration specified in
|
// Config - replication configuration specified in
|
||||||
|
@ -110,9 +115,12 @@ func (c *Config) AddRule(opts Options) error {
|
||||||
return fmt.Errorf("Rule state should be either [enable|disable]")
|
return fmt.Errorf("Rule state should be either [enable|disable]")
|
||||||
}
|
}
|
||||||
|
|
||||||
tags := opts.Tags()
|
tags, err := opts.Tags()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
andVal := And{
|
andVal := And{
|
||||||
Tags: opts.Tags(),
|
Tags: tags,
|
||||||
}
|
}
|
||||||
filter := Filter{Prefix: opts.Prefix}
|
filter := Filter{Prefix: opts.Prefix}
|
||||||
// only a single tag is set.
|
// only a single tag is set.
|
||||||
|
@ -152,6 +160,30 @@ func (c *Config) AddRule(opts Options) error {
|
||||||
return fmt.Errorf("destination bucket needs to be in Arn format")
|
return fmt.Errorf("destination bucket needs to be in Arn format")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
dmStatus := Disabled
|
||||||
|
if opts.ReplicateDeleteMarkers != "" {
|
||||||
|
switch opts.ReplicateDeleteMarkers {
|
||||||
|
case "enable":
|
||||||
|
dmStatus = Enabled
|
||||||
|
case "disable":
|
||||||
|
dmStatus = Disabled
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("ReplicateDeleteMarkers should be either enable|disable")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vDeleteStatus := Disabled
|
||||||
|
if opts.ReplicateDeletes != "" {
|
||||||
|
switch opts.ReplicateDeletes {
|
||||||
|
case "enable":
|
||||||
|
vDeleteStatus = Enabled
|
||||||
|
case "disable":
|
||||||
|
vDeleteStatus = Disabled
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("ReplicateDeletes should be either enable|disable")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
newRule := Rule{
|
newRule := Rule{
|
||||||
ID: opts.ID,
|
ID: opts.ID,
|
||||||
Priority: priority,
|
Priority: priority,
|
||||||
|
@ -161,7 +193,8 @@ func (c *Config) AddRule(opts Options) error {
|
||||||
Bucket: destBucket,
|
Bucket: destBucket,
|
||||||
StorageClass: opts.StorageClass,
|
StorageClass: opts.StorageClass,
|
||||||
},
|
},
|
||||||
DeleteMarkerReplication: DeleteMarkerReplication{Status: Disabled},
|
DeleteMarkerReplication: DeleteMarkerReplication{Status: dmStatus},
|
||||||
|
DeleteReplication: DeleteReplication{Status: vDeleteStatus},
|
||||||
}
|
}
|
||||||
|
|
||||||
// validate rule after overlaying priority for pre-existing rule being disabled.
|
// validate rule after overlaying priority for pre-existing rule being disabled.
|
||||||
|
@ -211,8 +244,12 @@ func (c *Config) EditRule(opts Options) error {
|
||||||
if len(newRule.Filter.And.Tags) != 0 {
|
if len(newRule.Filter.And.Tags) != 0 {
|
||||||
tags = newRule.Filter.And.Tags
|
tags = newRule.Filter.And.Tags
|
||||||
}
|
}
|
||||||
|
var err error
|
||||||
if opts.IsTagSet {
|
if opts.IsTagSet {
|
||||||
tags = opts.Tags()
|
tags, err = opts.Tags()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
andVal := And{
|
andVal := And{
|
||||||
Tags: tags,
|
Tags: tags,
|
||||||
|
@ -244,6 +281,30 @@ func (c *Config) EditRule(opts Options) error {
|
||||||
return fmt.Errorf("Rule state should be either [enable|disable]")
|
return fmt.Errorf("Rule state should be either [enable|disable]")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// set DeleteMarkerReplication rule status for edit option
|
||||||
|
if opts.ReplicateDeleteMarkers != "" {
|
||||||
|
switch opts.ReplicateDeleteMarkers {
|
||||||
|
case "enable":
|
||||||
|
newRule.DeleteMarkerReplication.Status = Enabled
|
||||||
|
case "disable":
|
||||||
|
newRule.DeleteMarkerReplication.Status = Disabled
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("ReplicateDeleteMarkers state should be either [enable|disable]")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// set DeleteReplication rule status for edit option. This is a MinIO specific
|
||||||
|
// option to replicate versioned deletes
|
||||||
|
if opts.ReplicateDeletes != "" {
|
||||||
|
switch opts.ReplicateDeletes {
|
||||||
|
case "enable":
|
||||||
|
newRule.DeleteReplication.Status = Enabled
|
||||||
|
case "disable":
|
||||||
|
newRule.DeleteReplication.Status = Disabled
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("ReplicateDeletes state should be either [enable|disable]")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if opts.IsSCSet {
|
if opts.IsSCSet {
|
||||||
newRule.Destination.StorageClass = opts.StorageClass
|
newRule.Destination.StorageClass = opts.StorageClass
|
||||||
|
@ -314,6 +375,7 @@ type Rule struct {
|
||||||
Status Status `xml:"Status"`
|
Status Status `xml:"Status"`
|
||||||
Priority int `xml:"Priority"`
|
Priority int `xml:"Priority"`
|
||||||
DeleteMarkerReplication DeleteMarkerReplication `xml:"DeleteMarkerReplication"`
|
DeleteMarkerReplication DeleteMarkerReplication `xml:"DeleteMarkerReplication"`
|
||||||
|
DeleteReplication DeleteReplication `xml:"DeleteReplication"`
|
||||||
Destination Destination `xml:"Destination"`
|
Destination Destination `xml:"Destination"`
|
||||||
Filter Filter `xml:"Filter" json:"Filter"`
|
Filter Filter `xml:"Filter" json:"Filter"`
|
||||||
}
|
}
|
||||||
|
@ -470,7 +532,7 @@ type Destination struct {
|
||||||
type And struct {
|
type And struct {
|
||||||
XMLName xml.Name `xml:"And,omitempty" json:"-"`
|
XMLName xml.Name `xml:"And,omitempty" json:"-"`
|
||||||
Prefix string `xml:"Prefix,omitempty" json:"Prefix,omitempty"`
|
Prefix string `xml:"Prefix,omitempty" json:"Prefix,omitempty"`
|
||||||
Tags []Tag `xml:"Tags,omitempty" json:"Tags,omitempty"`
|
Tags []Tag `xml:"Tag,omitempty" json:"Tag,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// isEmpty returns true if Tags field is null
|
// isEmpty returns true if Tags field is null
|
||||||
|
@ -496,3 +558,14 @@ type DeleteMarkerReplication struct {
|
||||||
func (d DeleteMarkerReplication) IsEmpty() bool {
|
func (d DeleteMarkerReplication) IsEmpty() bool {
|
||||||
return len(d.Status) == 0
|
return len(d.Status) == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DeleteReplication - whether versioned deletes are replicated - this
|
||||||
|
// is a MinIO specific extension
|
||||||
|
type DeleteReplication struct {
|
||||||
|
Status Status `xml:"Status" json:"Status"` // should be set to "Disabled" by default
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEmpty returns true if DeleteReplication is not set
|
||||||
|
func (d DeleteReplication) IsEmpty() bool {
|
||||||
|
return len(d.Status) == 0
|
||||||
|
}
|
||||||
|
|
|
@ -286,15 +286,15 @@ func EncodePath(pathName string) string {
|
||||||
if reservedObjectNames.MatchString(pathName) {
|
if reservedObjectNames.MatchString(pathName) {
|
||||||
return pathName
|
return pathName
|
||||||
}
|
}
|
||||||
var encodedPathname string
|
var encodedPathname strings.Builder
|
||||||
for _, s := range pathName {
|
for _, s := range pathName {
|
||||||
if 'A' <= s && s <= 'Z' || 'a' <= s && s <= 'z' || '0' <= s && s <= '9' { // §2.3 Unreserved characters (mark)
|
if 'A' <= s && s <= 'Z' || 'a' <= s && s <= 'z' || '0' <= s && s <= '9' { // §2.3 Unreserved characters (mark)
|
||||||
encodedPathname = encodedPathname + string(s)
|
encodedPathname.WriteRune(s)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch s {
|
switch s {
|
||||||
case '-', '_', '.', '~', '/': // §2.3 Unreserved characters (mark)
|
case '-', '_', '.', '~', '/': // §2.3 Unreserved characters (mark)
|
||||||
encodedPathname = encodedPathname + string(s)
|
encodedPathname.WriteRune(s)
|
||||||
continue
|
continue
|
||||||
default:
|
default:
|
||||||
len := utf8.RuneLen(s)
|
len := utf8.RuneLen(s)
|
||||||
|
@ -306,11 +306,11 @@ func EncodePath(pathName string) string {
|
||||||
utf8.EncodeRune(u, s)
|
utf8.EncodeRune(u, s)
|
||||||
for _, r := range u {
|
for _, r := range u {
|
||||||
hex := hex.EncodeToString([]byte{r})
|
hex := hex.EncodeToString([]byte{r})
|
||||||
encodedPathname = encodedPathname + "%" + strings.ToUpper(hex)
|
encodedPathname.WriteString("%" + strings.ToUpper(hex))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return encodedPathname
|
return encodedPathname.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
// We support '.' with bucket names but we fallback to using path
|
// We support '.' with bucket names but we fallback to using path
|
||||||
|
|
|
@ -33,12 +33,12 @@ const MaxJitter = 1.0
|
||||||
const NoJitter = 0.0
|
const NoJitter = 0.0
|
||||||
|
|
||||||
// DefaultRetryUnit - default unit multiplicative per retry.
|
// DefaultRetryUnit - default unit multiplicative per retry.
|
||||||
// defaults to 1 second.
|
// defaults to 200 * time.Millisecond
|
||||||
const DefaultRetryUnit = time.Second
|
var DefaultRetryUnit = 200 * time.Millisecond
|
||||||
|
|
||||||
// DefaultRetryCap - Each retry attempt never waits no longer than
|
// DefaultRetryCap - Each retry attempt never waits no longer than
|
||||||
// this maximum time duration.
|
// this maximum time duration.
|
||||||
const DefaultRetryCap = time.Second * 30
|
var DefaultRetryCap = time.Second
|
||||||
|
|
||||||
// newRetryTimer creates a timer with exponentially increasing
|
// newRetryTimer creates a timer with exponentially increasing
|
||||||
// delays until the maximum retry attempts are reached.
|
// delays until the maximum retry attempts are reached.
|
||||||
|
|
|
@ -297,6 +297,8 @@ func ToObjectInfo(bucketName string, objectName string, h http.Header) (ObjectIn
|
||||||
// extract lifecycle expiry date and rule ID
|
// extract lifecycle expiry date and rule ID
|
||||||
expTime, ruleID := amzExpirationToExpiryDateRuleID(h.Get(amzExpiration))
|
expTime, ruleID := amzExpirationToExpiryDateRuleID(h.Get(amzExpiration))
|
||||||
|
|
||||||
|
deleteMarker := h.Get(amzDeleteMarker) == "true"
|
||||||
|
|
||||||
// Save object metadata info.
|
// Save object metadata info.
|
||||||
return ObjectInfo{
|
return ObjectInfo{
|
||||||
ETag: etag,
|
ETag: etag,
|
||||||
|
@ -306,6 +308,7 @@ func ToObjectInfo(bucketName string, objectName string, h http.Header) (ObjectIn
|
||||||
ContentType: contentType,
|
ContentType: contentType,
|
||||||
Expires: expiry,
|
Expires: expiry,
|
||||||
VersionID: h.Get(amzVersionID),
|
VersionID: h.Get(amzVersionID),
|
||||||
|
IsDeleteMarker: deleteMarker,
|
||||||
ReplicationStatus: h.Get(amzReplicationStatus),
|
ReplicationStatus: h.Get(amzReplicationStatus),
|
||||||
Expiration: expTime,
|
Expiration: expTime,
|
||||||
ExpirationRuleID: ruleID,
|
ExpirationRuleID: ruleID,
|
||||||
|
|
|
@ -69,7 +69,7 @@ github.com/PuerkitoBio/urlesc
|
||||||
# github.com/RoaringBitmap/roaring v0.5.5
|
# github.com/RoaringBitmap/roaring v0.5.5
|
||||||
## explicit
|
## explicit
|
||||||
github.com/RoaringBitmap/roaring
|
github.com/RoaringBitmap/roaring
|
||||||
# github.com/alecthomas/chroma v0.8.1
|
# github.com/alecthomas/chroma v0.8.2
|
||||||
## explicit
|
## explicit
|
||||||
github.com/alecthomas/chroma
|
github.com/alecthomas/chroma
|
||||||
github.com/alecthomas/chroma/formatters/html
|
github.com/alecthomas/chroma/formatters/html
|
||||||
|
@ -116,7 +116,7 @@ github.com/asaskevich/govalidator
|
||||||
github.com/aymerick/douceur/css
|
github.com/aymerick/douceur/css
|
||||||
# github.com/beorn7/perks v1.0.1
|
# github.com/beorn7/perks v1.0.1
|
||||||
github.com/beorn7/perks/quantile
|
github.com/beorn7/perks/quantile
|
||||||
# github.com/blevesearch/bleve v1.0.12
|
# github.com/blevesearch/bleve v1.0.13
|
||||||
## explicit
|
## explicit
|
||||||
github.com/blevesearch/bleve
|
github.com/blevesearch/bleve
|
||||||
github.com/blevesearch/bleve/analysis
|
github.com/blevesearch/bleve/analysis
|
||||||
|
@ -166,15 +166,15 @@ github.com/blevesearch/segment
|
||||||
# github.com/blevesearch/snowballstem v0.9.0
|
# github.com/blevesearch/snowballstem v0.9.0
|
||||||
github.com/blevesearch/snowballstem
|
github.com/blevesearch/snowballstem
|
||||||
github.com/blevesearch/snowballstem/english
|
github.com/blevesearch/snowballstem/english
|
||||||
# github.com/blevesearch/zap/v11 v11.0.12
|
# github.com/blevesearch/zap/v11 v11.0.13
|
||||||
github.com/blevesearch/zap/v11
|
github.com/blevesearch/zap/v11
|
||||||
# github.com/blevesearch/zap/v12 v12.0.12
|
# github.com/blevesearch/zap/v12 v12.0.13
|
||||||
github.com/blevesearch/zap/v12
|
github.com/blevesearch/zap/v12
|
||||||
# github.com/blevesearch/zap/v13 v13.0.4
|
# github.com/blevesearch/zap/v13 v13.0.5
|
||||||
github.com/blevesearch/zap/v13
|
github.com/blevesearch/zap/v13
|
||||||
# github.com/blevesearch/zap/v14 v14.0.3
|
# github.com/blevesearch/zap/v14 v14.0.4
|
||||||
github.com/blevesearch/zap/v14
|
github.com/blevesearch/zap/v14
|
||||||
# github.com/blevesearch/zap/v15 v15.0.1
|
# github.com/blevesearch/zap/v15 v15.0.2
|
||||||
github.com/blevesearch/zap/v15
|
github.com/blevesearch/zap/v15
|
||||||
# github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc
|
# github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc
|
||||||
github.com/boombuler/barcode
|
github.com/boombuler/barcode
|
||||||
|
@ -235,7 +235,7 @@ github.com/dsnet/compress/internal/prefix
|
||||||
# github.com/dustin/go-humanize v1.0.0
|
# github.com/dustin/go-humanize v1.0.0
|
||||||
## explicit
|
## explicit
|
||||||
github.com/dustin/go-humanize
|
github.com/dustin/go-humanize
|
||||||
# github.com/editorconfig/editorconfig-core-go/v2 v2.3.8
|
# github.com/editorconfig/editorconfig-core-go/v2 v2.3.9
|
||||||
## explicit
|
## explicit
|
||||||
github.com/editorconfig/editorconfig-core-go/v2
|
github.com/editorconfig/editorconfig-core-go/v2
|
||||||
# github.com/emirpasic/gods v1.12.0
|
# github.com/emirpasic/gods v1.12.0
|
||||||
|
@ -511,7 +511,7 @@ github.com/keybase/go-crypto/openpgp/errors
|
||||||
github.com/keybase/go-crypto/openpgp/packet
|
github.com/keybase/go-crypto/openpgp/packet
|
||||||
github.com/keybase/go-crypto/openpgp/s2k
|
github.com/keybase/go-crypto/openpgp/s2k
|
||||||
github.com/keybase/go-crypto/rsa
|
github.com/keybase/go-crypto/rsa
|
||||||
# github.com/klauspost/compress v1.11.2
|
# github.com/klauspost/compress v1.11.3
|
||||||
## explicit
|
## explicit
|
||||||
github.com/klauspost/compress/flate
|
github.com/klauspost/compress/flate
|
||||||
github.com/klauspost/compress/fse
|
github.com/klauspost/compress/fse
|
||||||
|
@ -595,7 +595,7 @@ github.com/mholt/archiver/v3
|
||||||
github.com/microcosm-cc/bluemonday
|
github.com/microcosm-cc/bluemonday
|
||||||
# github.com/minio/md5-simd v1.1.0
|
# github.com/minio/md5-simd v1.1.0
|
||||||
github.com/minio/md5-simd
|
github.com/minio/md5-simd
|
||||||
# github.com/minio/minio-go/v7 v7.0.5
|
# github.com/minio/minio-go/v7 v7.0.6
|
||||||
## explicit
|
## explicit
|
||||||
github.com/minio/minio-go/v7
|
github.com/minio/minio-go/v7
|
||||||
github.com/minio/minio-go/v7/pkg/credentials
|
github.com/minio/minio-go/v7/pkg/credentials
|
||||||
|
|
Loading…
Reference in New Issue