Update Vendors (#337)
* update & migrate gitea sdk (Fix Delete Tag Issue) * upgraded github.com/AlecAivazis/survey v2.2.7 => v2.2.8 * upgraded github.com/adrg/xdg v0.2.3 => v0.3.1 * upgraded github.com/araddon/dateparse * upgraded github.com/olekukonko/tablewriter v0.0.4 => v0.0.5 * upgraded gopkg.in/yaml.v2 v2.3.0 => v2.4.0 Reviewed-on: https://gitea.com/gitea/tea/pulls/337 Reviewed-by: Norwin <noerw@noreply.gitea.io> Reviewed-by: khmarbaise <khmarbaise@noreply.gitea.io> Co-authored-by: 6543 <6543@obermui.de> Co-committed-by: 6543 <6543@obermui.de>
This commit is contained in:
parent
15c4edba1a
commit
0d98cbd657
|
@ -64,7 +64,7 @@ func runReleaseDelete(cmd *cli.Context) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.Bool("delete-tag") {
|
if ctx.Bool("delete-tag") {
|
||||||
_, err = client.DeleteReleaseTag(ctx.Owner, ctx.Repo, tag)
|
_, err = client.DeleteTag(ctx.Owner, ctx.Repo, tag)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
35
go.mod
35
go.mod
|
@ -4,36 +4,33 @@ go 1.13
|
||||||
|
|
||||||
require (
|
require (
|
||||||
code.gitea.io/gitea-vet v0.2.1
|
code.gitea.io/gitea-vet v0.2.1
|
||||||
code.gitea.io/sdk/gitea v0.13.1-0.20201217101417-97e61e5a8a5f
|
code.gitea.io/sdk/gitea v0.13.1-0.20210304201955-ff82113459b5
|
||||||
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b
|
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b
|
||||||
github.com/AlecAivazis/survey/v2 v2.2.7
|
github.com/AlecAivazis/survey/v2 v2.2.8
|
||||||
github.com/Microsoft/go-winio v0.4.15 // indirect
|
github.com/Microsoft/go-winio v0.4.16 // indirect
|
||||||
github.com/adrg/xdg v0.2.3
|
github.com/adrg/xdg v0.3.1
|
||||||
github.com/alecthomas/chroma v0.8.1 // indirect
|
github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e
|
||||||
github.com/araddon/dateparse v0.0.0-20201001162425-8aadafed4dc4
|
|
||||||
github.com/charmbracelet/glamour v0.2.0
|
github.com/charmbracelet/glamour v0.2.0
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
||||||
github.com/dlclark/regexp2 v1.4.0 // indirect
|
|
||||||
github.com/go-git/go-git/v5 v5.2.0
|
github.com/go-git/go-git/v5 v5.2.0
|
||||||
github.com/imdario/mergo v0.3.11 // indirect
|
github.com/imdario/mergo v0.3.11 // indirect
|
||||||
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect
|
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.8 // indirect
|
github.com/mattn/go-colorable v0.1.8 // indirect
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||||
github.com/microcosm-cc/bluemonday v1.0.4 // indirect
|
|
||||||
github.com/muesli/reflow v0.2.0 // indirect
|
|
||||||
github.com/muesli/termenv v0.7.4
|
github.com/muesli/termenv v0.7.4
|
||||||
github.com/olekukonko/tablewriter v0.0.4
|
github.com/olekukonko/tablewriter v0.0.5
|
||||||
|
github.com/rivo/uniseg v0.2.0 // indirect
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966
|
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966
|
||||||
github.com/stretchr/testify v1.6.1
|
github.com/stretchr/testify v1.7.0
|
||||||
github.com/urfave/cli/v2 v2.3.0
|
github.com/urfave/cli/v2 v2.3.0
|
||||||
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620
|
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83
|
||||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 // indirect
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 // indirect
|
||||||
golang.org/x/sys v0.0.0-20201221093633-bc327ba9c2f0 // indirect
|
golang.org/x/sys v0.0.0-20210305034016-7844c3c200c3 // indirect
|
||||||
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf // indirect
|
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d // indirect
|
||||||
golang.org/x/text v0.3.4 // indirect
|
golang.org/x/text v0.3.5 // indirect
|
||||||
golang.org/x/tools v0.0.0-20201105220310-78b158585360 // indirect
|
golang.org/x/tools v0.1.0 // indirect
|
||||||
gopkg.in/yaml.v2 v2.3.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect
|
|
||||||
)
|
)
|
||||||
|
|
112
go.sum
112
go.sum
|
@ -1,28 +1,25 @@
|
||||||
code.gitea.io/gitea-vet v0.2.1 h1:b30by7+3SkmiftK0RjuXqFvZg2q4p68uoPGuxhzBN0s=
|
code.gitea.io/gitea-vet v0.2.1 h1:b30by7+3SkmiftK0RjuXqFvZg2q4p68uoPGuxhzBN0s=
|
||||||
code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
|
code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
|
||||||
code.gitea.io/sdk/gitea v0.13.1-0.20201217101417-97e61e5a8a5f h1:v+cKQhO5BFcUVZN73CaKPgM3yudiT8U1DYetMS6l1tE=
|
code.gitea.io/sdk/gitea v0.13.1-0.20210304201955-ff82113459b5 h1:va0KddYHN8bH6MCUaWf5e4p+il55blUw5J0ha5vTMaQ=
|
||||||
code.gitea.io/sdk/gitea v0.13.1-0.20201217101417-97e61e5a8a5f/go.mod h1:89WiyOX1KEcvjP66sRHdu0RafojGo60bT9UqW17VbWs=
|
code.gitea.io/sdk/gitea v0.13.1-0.20210304201955-ff82113459b5/go.mod h1:89WiyOX1KEcvjP66sRHdu0RafojGo60bT9UqW17VbWs=
|
||||||
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b h1:CLYsMGcGLohESQDMth+RgJ4cB3CCHToxnj0zBbvB3sE=
|
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b h1:CLYsMGcGLohESQDMth+RgJ4cB3CCHToxnj0zBbvB3sE=
|
||||||
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b/go.mod h1:Fc8iyPm4NINRWujeIk2bTfcbGc4ZYY29/oMAAGcr4qI=
|
gitea.com/noerw/unidiff-comments v0.0.0-20201219085024-64aec5658f2b/go.mod h1:Fc8iyPm4NINRWujeIk2bTfcbGc4ZYY29/oMAAGcr4qI=
|
||||||
github.com/AlecAivazis/survey/v2 v2.2.7 h1:5NbxkF4RSKmpywYdcRgUmos1o+roJY8duCLZXbVjoig=
|
github.com/AlecAivazis/survey/v2 v2.2.8 h1:TgxCwybKdBckmC+/P9/5h49rw/nAHe/itZL0dgHs+Q0=
|
||||||
github.com/AlecAivazis/survey/v2 v2.2.7/go.mod h1:9DYvHgXtiXm6nCn+jXnOXLKbH+Yo9u8fAS/SduGdoPk=
|
github.com/AlecAivazis/survey/v2 v2.2.8/go.mod h1:9DYvHgXtiXm6nCn+jXnOXLKbH+Yo9u8fAS/SduGdoPk=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/Microsoft/go-winio v0.4.14 h1:+hMXMk01us9KgxGb7ftKQt2Xpf5hH/yky+TDA+qxleU=
|
|
||||||
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
|
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
|
||||||
github.com/Microsoft/go-winio v0.4.15 h1:qkLXKzb1QoVatRyd/YlXZ/Kg0m5K3SPuoD82jjSOaBc=
|
github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=
|
||||||
github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
|
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
|
||||||
github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8 h1:xzYJEypr/85nBpB11F9br+3HUrpgb+fcm5iADzXXYEw=
|
github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8 h1:xzYJEypr/85nBpB11F9br+3HUrpgb+fcm5iADzXXYEw=
|
||||||
github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8/go.mod h1:oX5x61PbNXchhh0oikYAH+4Pcfw5LKv21+Jnpr6r6Pc=
|
github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8/go.mod h1:oX5x61PbNXchhh0oikYAH+4Pcfw5LKv21+Jnpr6r6Pc=
|
||||||
github.com/adrg/xdg v0.2.3 h1:GxXngdYxNDkoUvZXjNJGwqZxWXi43MKbOOlA/00qZi4=
|
github.com/adrg/xdg v0.3.1 h1:uIyL9BYfXaFgDyVRKE8wjtm6ETQULweQqTofphEFJYY=
|
||||||
github.com/adrg/xdg v0.2.3/go.mod h1:7I2hH/IT30IsupOpKZ5ue7/qNi3CoKzD6tL3HwpaRMQ=
|
github.com/adrg/xdg v0.3.1/go.mod h1:7I2hH/IT30IsupOpKZ5ue7/qNi3CoKzD6tL3HwpaRMQ=
|
||||||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
|
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
|
||||||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
|
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||||
github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI=
|
github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI=
|
||||||
github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
||||||
github.com/alecthomas/chroma v0.8.1 h1:ym20sbvyC6RXz45u4qDglcgr8E313oPROshcuCHqiEE=
|
|
||||||
github.com/alecthomas/chroma v0.8.1/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||||
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
github.com/alecthomas/kong v0.2.4/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
|
||||||
|
@ -30,17 +27,12 @@ github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkx
|
||||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
|
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
|
||||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
||||||
github.com/araddon/dateparse v0.0.0-20201001162425-8aadafed4dc4 h1:OkS1BqB3CzLtGRznRyvriSY8jeaVk2CrDn2ZiRQgMUI=
|
github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e h1:OjdSMCht0ZVX7IH0nTdf00xEustvbtUGRgMh3gbdmOg=
|
||||||
github.com/araddon/dateparse v0.0.0-20201001162425-8aadafed4dc4/go.mod h1:hMAUZFIkk4B1FouGxqlogyMyU6BwY/UiVmmbbzz9Up8=
|
github.com/araddon/dateparse v0.0.0-20210207001429-0eec95c9db7e/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
|
||||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
||||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
|
||||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
|
||||||
github.com/charmbracelet/glamour v0.2.0 h1:mTgaiNiumpqTZp3qVM6DH9UB0NlbY17wejoMf1kM8Pg=
|
github.com/charmbracelet/glamour v0.2.0 h1:mTgaiNiumpqTZp3qVM6DH9UB0NlbY17wejoMf1kM8Pg=
|
||||||
github.com/charmbracelet/glamour v0.2.0/go.mod h1:UA27Kwj3QHialP74iU6C+Gpc8Y7IOAKupeKMLLBURWM=
|
github.com/charmbracelet/glamour v0.2.0/go.mod h1:UA27Kwj3QHialP74iU6C+Gpc8Y7IOAKupeKMLLBURWM=
|
||||||
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
|
|
||||||
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
|
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY=
|
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
|
@ -52,8 +44,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
|
||||||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
|
||||||
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
|
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
|
||||||
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
||||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
|
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
|
||||||
|
@ -70,15 +60,11 @@ github.com/go-git/go-git/v5 v5.2.0 h1:YPBLG/3UK1we1ohRkncLjaXWLW+HKp5QNM/jTli2Jg
|
||||||
github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs=
|
github.com/go-git/go-git/v5 v5.2.0/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs=
|
||||||
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
||||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f h1:5CjVwnuUcp5adK4gmY6i72gpVFVnZDP2h5TmPScB6u4=
|
|
||||||
github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4=
|
github.com/google/goterm v0.0.0-20190703233501-fc88cf888a3f/go.mod h1:nOFQdrUlIlx6M6ODdSpBj1NVA+VgLC6kmw60mkw34H4=
|
||||||
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
|
|
||||||
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
|
|
||||||
github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI=
|
github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI=
|
||||||
github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||||
github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ=
|
github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174 h1:WlZsjVhE8Af9IcZDGgJGQpNflI3+MJSBhsgT5PCtzBQ=
|
||||||
github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174/go.mod h1:DqJ97dSdRW1W22yXSB90986pcOyQ7r45iio1KN2ez1A=
|
github.com/hinshun/vt10x v0.0.0-20180616224451-1954e6464174/go.mod h1:DqJ97dSdRW1W22yXSB90986pcOyQ7r45iio1KN2ez1A=
|
||||||
github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
|
|
||||||
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||||
github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
|
github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
|
||||||
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||||
|
@ -87,63 +73,56 @@ github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i
|
||||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||||
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY=
|
|
||||||
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||||
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=
|
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=
|
||||||
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
github.com/kr/pty v1.1.4 h1:5Myjjh3JY/NaAi4IsUbHADytDyl1VE1Y9PXDlL+P/VQ=
|
github.com/kr/pty v1.1.4 h1:5Myjjh3JY/NaAi4IsUbHADytDyl1VE1Y9PXDlL+P/VQ=
|
||||||
github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/lucasb-eyer/go-colorful v1.0.3 h1:QIbQXiugsb+q10B+MI+7DI1oQLdmnep86tWFlaaUAac=
|
|
||||||
github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||||
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
|
|
||||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
|
github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
|
||||||
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54=
|
|
||||||
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
|
||||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
|
github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg=
|
||||||
|
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||||
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s=
|
github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
|
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.4 h1:p0L+CTpo/PLFdkoPcJemLXG+fpMD7pYOoDEq1axMbGg=
|
|
||||||
github.com/microcosm-cc/bluemonday v1.0.4/go.mod h1:8iwZnFn2CDDNZ0r6UXhF4xawGvzaqzCRa1n3/lO3W2w=
|
|
||||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/muesli/reflow v0.1.0 h1:oQdpLfO56lr5pgLvqD0TcjW85rDjSYSBVdiG1Ch1ddM=
|
github.com/muesli/reflow v0.1.0 h1:oQdpLfO56lr5pgLvqD0TcjW85rDjSYSBVdiG1Ch1ddM=
|
||||||
github.com/muesli/reflow v0.1.0/go.mod h1:I9bWAt7QTg/que/qmUCJBGlj7wEq8OAFBjPNjc6xK4I=
|
github.com/muesli/reflow v0.1.0/go.mod h1:I9bWAt7QTg/que/qmUCJBGlj7wEq8OAFBjPNjc6xK4I=
|
||||||
github.com/muesli/reflow v0.2.0 h1:2o0UBJPHHH4fa2GCXU4Rg4DwOtWPMekCeyc5EWbAQp0=
|
|
||||||
github.com/muesli/reflow v0.2.0/go.mod h1:qT22vjVmM9MIUeLgsVYe/Ye7eZlbv9dZjL3dVhUqLX8=
|
|
||||||
github.com/muesli/termenv v0.6.0 h1:zxvzTBmo4ZcxhNGGWeMz+Tttm51eF5bmPjfy4MCRYlk=
|
|
||||||
github.com/muesli/termenv v0.6.0/go.mod h1:SohX91w6swWA4AYU+QmPx+aSgXhWO0juiyID9UZmbpA=
|
github.com/muesli/termenv v0.6.0/go.mod h1:SohX91w6swWA4AYU+QmPx+aSgXhWO0juiyID9UZmbpA=
|
||||||
github.com/muesli/termenv v0.7.4 h1:/pBqvU5CpkY53tU0vVn+xgs2ZTX63aH5nY+SSps5Xa8=
|
github.com/muesli/termenv v0.7.4 h1:/pBqvU5CpkY53tU0vVn+xgs2ZTX63aH5nY+SSps5Xa8=
|
||||||
github.com/muesli/termenv v0.7.4/go.mod h1:pZ7qY9l3F7e5xsAOS0zCew2tME+p7bWeBkotCEcIIcc=
|
github.com/muesli/termenv v0.7.4/go.mod h1:pZ7qY9l3F7e5xsAOS0zCew2tME+p7bWeBkotCEcIIcc=
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=
|
|
||||||
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
|
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
|
||||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||||
|
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
|
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||||
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
@ -153,30 +132,25 @@ github.com/seletskiy/tplutil v0.0.0-20200921103632-f880f6245597/go.mod h1:F8CBHS
|
||||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||||
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
|
|
||||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
|
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA=
|
||||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
|
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
|
||||||
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
|
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
|
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
|
github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
|
||||||
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
|
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
|
||||||
github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70=
|
|
||||||
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
|
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
|
||||||
github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=
|
github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=
|
||||||
github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0=
|
github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0=
|
||||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.2.0 h1:WOOcyaJPlzb8fZ8TloxFe8QZkhOOJx87leDa9MIT9dc=
|
|
||||||
github.com/yuin/goldmark v1.2.0/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.2.0/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=
|
github.com/yuin/goldmark v1.2.1 h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=
|
||||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
@ -184,12 +158,10 @@ golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnf
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM=
|
|
||||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 h1:3wPMTskHO3+O6jqTEXyFcsnuxMQOqYSaHsDxcbUXpqA=
|
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 h1:/ZScEX8SfEmUGRHs0gxpqteO5nfNW6axyZbBdw9A12g=
|
||||||
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||||
golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=
|
|
||||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4=
|
golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4=
|
||||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
|
@ -197,12 +169,10 @@ golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73r
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a h1:GuSPYbZzB5/dcLNCwLQLsg3obCJtX9IJhpXkvY7kzk0=
|
|
||||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI=
|
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102 h1:42cLlJJdEh+ySyeUUbEQ5bsTiq8voBeTuweGVkY6Puw=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
|
||||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
@ -217,35 +187,29 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
|
|
||||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
|
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201221093633-bc327ba9c2f0 h1:n+DPcgTwkgWzIFpLmoimYR2K2b0Ga5+Os4kayIN0vGo=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201221093633-bc327ba9c2f0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210305034016-7844c3c200c3 h1:RdE7htvBru4I4VZQofQjCZk5W9+aLNlSF5n0zgVwm8s=
|
||||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
|
golang.org/x/sys v0.0.0-20210305034016-7844c3c200c3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||||
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf h1:MZ2shdL+ZM/XzY3ZGOnh4Nlpnxz5GSOhOmtHo3iPU6M=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20201210144234-2321bbc49cbf/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d h1:SZxvLBoTP5yHO3Frd4z4vrF+DBX9vMVanchswa69toE=
|
||||||
|
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
|
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc=
|
golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ=
|
||||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224 h1:azwY/v0y0K4mFHVsg5+UrTgchqALYWpqVo6vL5OmkmI=
|
|
||||||
golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
|
golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
|
||||||
golang.org/x/tools v0.0.0-20201105220310-78b158585360 h1:/9CzsU8hOpnSUCtem1vfWNgsVeCTgkMdx+VE5YIYxnU=
|
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
|
||||||
golang.org/x/tools v0.0.0-20201105220310-78b158585360/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
@ -255,13 +219,11 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
||||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
|
||||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
|
|
|
@ -65,12 +65,12 @@ func (c *Client) AdminCreateUser(opt CreateUserOption) (*User, *Response, error)
|
||||||
type EditUserOption struct {
|
type EditUserOption struct {
|
||||||
SourceID int64 `json:"source_id"`
|
SourceID int64 `json:"source_id"`
|
||||||
LoginName string `json:"login_name"`
|
LoginName string `json:"login_name"`
|
||||||
FullName string `json:"full_name"`
|
Email *string `json:"email"`
|
||||||
Email string `json:"email"`
|
FullName *string `json:"full_name"`
|
||||||
Password string `json:"password"`
|
Password string `json:"password"`
|
||||||
MustChangePassword *bool `json:"must_change_password"`
|
MustChangePassword *bool `json:"must_change_password"`
|
||||||
Website string `json:"website"`
|
Website *string `json:"website"`
|
||||||
Location string `json:"location"`
|
Location *string `json:"location"`
|
||||||
Active *bool `json:"active"`
|
Active *bool `json:"active"`
|
||||||
Admin *bool `json:"admin"`
|
Admin *bool `json:"admin"`
|
||||||
AllowGitHook *bool `json:"allow_git_hook"`
|
AllowGitHook *bool `json:"allow_git_hook"`
|
||||||
|
|
|
@ -26,7 +26,7 @@ func Version() string {
|
||||||
return "0.14.0"
|
return "0.14.0"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Client represents a Gitea API client.
|
// Client represents a thread-safe Gitea API client.
|
||||||
type Client struct {
|
type Client struct {
|
||||||
url string
|
url string
|
||||||
accessToken string
|
accessToken string
|
||||||
|
@ -37,6 +37,7 @@ type Client struct {
|
||||||
debug bool
|
debug bool
|
||||||
client *http.Client
|
client *http.Client
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
|
mutex sync.RWMutex
|
||||||
serverVersion *version.Version
|
serverVersion *version.Version
|
||||||
getVersionOnce sync.Once
|
getVersionOnce sync.Once
|
||||||
}
|
}
|
||||||
|
@ -47,6 +48,7 @@ type Response struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewClient initializes and returns a API client.
|
// NewClient initializes and returns a API client.
|
||||||
|
// Usage of all gitea.Client methods is concurrency-safe.
|
||||||
func NewClient(url string, options ...func(*Client)) (*Client, error) {
|
func NewClient(url string, options ...func(*Client)) (*Client, error) {
|
||||||
client := &Client{
|
client := &Client{
|
||||||
url: strings.TrimSuffix(url, "/"),
|
url: strings.TrimSuffix(url, "/"),
|
||||||
|
@ -72,14 +74,23 @@ func NewClientWithHTTP(url string, httpClient *http.Client) *Client {
|
||||||
// SetHTTPClient is an option for NewClient to set custom http client
|
// SetHTTPClient is an option for NewClient to set custom http client
|
||||||
func SetHTTPClient(httpClient *http.Client) func(client *Client) {
|
func SetHTTPClient(httpClient *http.Client) func(client *Client) {
|
||||||
return func(client *Client) {
|
return func(client *Client) {
|
||||||
client.client = httpClient
|
client.SetHTTPClient(httpClient)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetHTTPClient replaces default http.Client with user given one.
|
||||||
|
func (c *Client) SetHTTPClient(client *http.Client) {
|
||||||
|
c.mutex.Lock()
|
||||||
|
c.client = client
|
||||||
|
c.mutex.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
// SetToken is an option for NewClient to set token
|
// SetToken is an option for NewClient to set token
|
||||||
func SetToken(token string) func(client *Client) {
|
func SetToken(token string) func(client *Client) {
|
||||||
return func(client *Client) {
|
return func(client *Client) {
|
||||||
|
client.mutex.Lock()
|
||||||
client.accessToken = token
|
client.accessToken = token
|
||||||
|
client.mutex.Unlock()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +103,9 @@ func SetBasicAuth(username, password string) func(client *Client) {
|
||||||
|
|
||||||
// SetBasicAuth sets username and password
|
// SetBasicAuth sets username and password
|
||||||
func (c *Client) SetBasicAuth(username, password string) {
|
func (c *Client) SetBasicAuth(username, password string) {
|
||||||
|
c.mutex.Lock()
|
||||||
c.username, c.password = username, password
|
c.username, c.password = username, password
|
||||||
|
c.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetOTP is an option for NewClient to set OTP for 2FA
|
// SetOTP is an option for NewClient to set OTP for 2FA
|
||||||
|
@ -104,7 +117,9 @@ func SetOTP(otp string) func(client *Client) {
|
||||||
|
|
||||||
// SetOTP sets OTP for 2FA
|
// SetOTP sets OTP for 2FA
|
||||||
func (c *Client) SetOTP(otp string) {
|
func (c *Client) SetOTP(otp string) {
|
||||||
|
c.mutex.Lock()
|
||||||
c.otp = otp
|
c.otp = otp
|
||||||
|
c.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetContext is an option for NewClient to set context
|
// SetContext is an option for NewClient to set context
|
||||||
|
@ -116,12 +131,9 @@ func SetContext(ctx context.Context) func(client *Client) {
|
||||||
|
|
||||||
// SetContext set context witch is used for http requests
|
// SetContext set context witch is used for http requests
|
||||||
func (c *Client) SetContext(ctx context.Context) {
|
func (c *Client) SetContext(ctx context.Context) {
|
||||||
|
c.mutex.Lock()
|
||||||
c.ctx = ctx
|
c.ctx = ctx
|
||||||
}
|
c.mutex.Unlock()
|
||||||
|
|
||||||
// SetHTTPClient replaces default http.Client with user given one.
|
|
||||||
func (c *Client) SetHTTPClient(client *http.Client) {
|
|
||||||
c.client = client
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetSudo is an option for NewClient to set sudo header
|
// SetSudo is an option for NewClient to set sudo header
|
||||||
|
@ -133,43 +145,57 @@ func SetSudo(sudo string) func(client *Client) {
|
||||||
|
|
||||||
// SetSudo sets username to impersonate.
|
// SetSudo sets username to impersonate.
|
||||||
func (c *Client) SetSudo(sudo string) {
|
func (c *Client) SetSudo(sudo string) {
|
||||||
|
c.mutex.Lock()
|
||||||
c.sudo = sudo
|
c.sudo = sudo
|
||||||
|
c.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetDebugMode is an option for NewClient to enable debug mode
|
// SetDebugMode is an option for NewClient to enable debug mode
|
||||||
func SetDebugMode() func(client *Client) {
|
func SetDebugMode() func(client *Client) {
|
||||||
return func(client *Client) {
|
return func(client *Client) {
|
||||||
|
client.mutex.Lock()
|
||||||
client.debug = true
|
client.debug = true
|
||||||
|
client.mutex.Unlock()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) getWebResponse(method, path string, body io.Reader) ([]byte, *Response, error) {
|
func (c *Client) getWebResponse(method, path string, body io.Reader) ([]byte, *Response, error) {
|
||||||
if c.debug {
|
c.mutex.RLock()
|
||||||
|
debug := c.debug
|
||||||
|
if debug {
|
||||||
fmt.Printf("%s: %s\nBody: %v\n", method, c.url+path, body)
|
fmt.Printf("%s: %s\nBody: %v\n", method, c.url+path, body)
|
||||||
}
|
}
|
||||||
req, err := http.NewRequestWithContext(c.ctx, method, c.url+path, body)
|
req, err := http.NewRequestWithContext(c.ctx, method, c.url+path, body)
|
||||||
|
|
||||||
|
client := c.client // client ref can change from this point on so safe it
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
resp, err := c.client.Do(req)
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
data, err := ioutil.ReadAll(resp.Body)
|
data, err := ioutil.ReadAll(resp.Body)
|
||||||
if c.debug {
|
if debug {
|
||||||
fmt.Printf("Response: %v\n\n", resp)
|
fmt.Printf("Response: %v\n\n", resp)
|
||||||
}
|
}
|
||||||
return data, &Response{resp}, nil
|
return data, &Response{resp}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) doRequest(method, path string, header http.Header, body io.Reader) (*Response, error) {
|
func (c *Client) doRequest(method, path string, header http.Header, body io.Reader) (*Response, error) {
|
||||||
if c.debug {
|
c.mutex.RLock()
|
||||||
|
debug := c.debug
|
||||||
|
if debug {
|
||||||
fmt.Printf("%s: %s\nHeader: %v\nBody: %s\n", method, c.url+"/api/v1"+path, header, body)
|
fmt.Printf("%s: %s\nHeader: %v\nBody: %s\n", method, c.url+"/api/v1"+path, header, body)
|
||||||
}
|
}
|
||||||
req, err := http.NewRequestWithContext(c.ctx, method, c.url+"/api/v1"+path, body)
|
req, err := http.NewRequestWithContext(c.ctx, method, c.url+"/api/v1"+path, body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
c.mutex.RUnlock()
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if len(c.accessToken) != 0 {
|
if len(c.accessToken) != 0 {
|
||||||
|
@ -184,20 +210,66 @@ func (c *Client) doRequest(method, path string, header http.Header, body io.Read
|
||||||
if len(c.sudo) != 0 {
|
if len(c.sudo) != 0 {
|
||||||
req.Header.Set("Sudo", c.sudo)
|
req.Header.Set("Sudo", c.sudo)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
client := c.client // client ref can change from this point on so safe it
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
|
||||||
for k, v := range header {
|
for k, v := range header {
|
||||||
req.Header[k] = v
|
req.Header[k] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := c.client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if c.debug {
|
if debug {
|
||||||
fmt.Printf("Response: %v\n\n", resp)
|
fmt.Printf("Response: %v\n\n", resp)
|
||||||
}
|
}
|
||||||
return &Response{resp}, nil
|
return &Response{resp}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Converts a response for a HTTP status code indicating an error condition
|
||||||
|
// (non-2XX) to a well-known error value and response body. For non-problematic
|
||||||
|
// (2XX) status codes nil will be returned. Note that on a non-2XX response, the
|
||||||
|
// response body stream will have been read and, hence, is closed on return.
|
||||||
|
func statusCodeToErr(resp *Response) (body []byte, err error) {
|
||||||
|
// no error
|
||||||
|
if resp.StatusCode/100 == 2 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// error: body will be read for details
|
||||||
|
//
|
||||||
|
defer resp.Body.Close()
|
||||||
|
data, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("body read on HTTP error %d: %v", resp.StatusCode, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch resp.StatusCode {
|
||||||
|
case 403:
|
||||||
|
return data, errors.New("403 Forbidden")
|
||||||
|
case 404:
|
||||||
|
return data, errors.New("404 Not Found")
|
||||||
|
case 409:
|
||||||
|
return data, errors.New("409 Conflict")
|
||||||
|
case 422:
|
||||||
|
return data, fmt.Errorf("422 Unprocessable Entity: %s", string(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
path := resp.Request.URL.Path
|
||||||
|
method := resp.Request.Method
|
||||||
|
header := resp.Request.Header
|
||||||
|
errMap := make(map[string]interface{})
|
||||||
|
if err = json.Unmarshal(data, &errMap); err != nil {
|
||||||
|
// when the JSON can't be parsed, data was probably empty or a
|
||||||
|
// plain string, so we try to return a helpful error anyway
|
||||||
|
return data, fmt.Errorf("Unknown API Error: %d\nRequest: '%s' with '%s' method '%s' header and '%s' body", resp.StatusCode, path, method, header, string(data))
|
||||||
|
}
|
||||||
|
return data, errors.New(errMap["message"].(string))
|
||||||
|
}
|
||||||
|
|
||||||
func (c *Client) getResponse(method, path string, header http.Header, body io.Reader) ([]byte, *Response, error) {
|
func (c *Client) getResponse(method, path string, header http.Header, body io.Reader) ([]byte, *Response, error) {
|
||||||
resp, err := c.doRequest(method, path, header, body)
|
resp, err := c.doRequest(method, path, header, body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -205,32 +277,18 @@ func (c *Client) getResponse(method, path string, header http.Header, body io.Re
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
data, err := ioutil.ReadAll(resp.Body)
|
// check for errors
|
||||||
|
data, err := statusCodeToErr(resp)
|
||||||
|
if err != nil {
|
||||||
|
return data, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// success (2XX), read body
|
||||||
|
data, err = ioutil.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, resp, err
|
return nil, resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch resp.StatusCode {
|
|
||||||
case 403:
|
|
||||||
return data, resp, errors.New("403 Forbidden")
|
|
||||||
case 404:
|
|
||||||
return data, resp, errors.New("404 Not Found")
|
|
||||||
case 409:
|
|
||||||
return data, resp, errors.New("409 Conflict")
|
|
||||||
case 422:
|
|
||||||
return data, resp, fmt.Errorf("422 Unprocessable Entity: %s", string(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode/100 != 2 {
|
|
||||||
errMap := make(map[string]interface{})
|
|
||||||
if err = json.Unmarshal(data, &errMap); err != nil {
|
|
||||||
// when the JSON can't be parsed, data was probably empty or a plain string,
|
|
||||||
// so we try to return a helpful error anyway
|
|
||||||
return data, resp, fmt.Errorf("Unknown API Error: %d\nRequest: '%s' with '%s' method '%s' header and '%s' body", resp.StatusCode, path, method, header, string(data))
|
|
||||||
}
|
|
||||||
return data, resp, errors.New(errMap["message"].(string))
|
|
||||||
}
|
|
||||||
|
|
||||||
return data, resp, nil
|
return data, resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -223,6 +223,7 @@ type EditIssueOption struct {
|
||||||
Milestone *int64 `json:"milestone"`
|
Milestone *int64 `json:"milestone"`
|
||||||
State *StateType `json:"state"`
|
State *StateType `json:"state"`
|
||||||
Deadline *time.Time `json:"due_date"`
|
Deadline *time.Time `json:"due_date"`
|
||||||
|
RemoveDeadline *bool `json:"unset_due_date"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate the EditIssueOption struct
|
// Validate the EditIssueOption struct
|
||||||
|
@ -252,6 +253,8 @@ func (c *Client) EditIssue(owner, repo string, index int64, opt EditIssueOption)
|
||||||
|
|
||||||
func (c *Client) issueBackwardsCompatibility(issue *Issue) {
|
func (c *Client) issueBackwardsCompatibility(issue *Issue) {
|
||||||
if c.checkServerVersionGreaterThanOrEqual(version1_12_0) != nil {
|
if c.checkServerVersionGreaterThanOrEqual(version1_12_0) != nil {
|
||||||
|
c.mutex.RLock()
|
||||||
issue.HTMLURL = fmt.Sprintf("%s/%s/issues/%d", c.url, issue.Repository.FullName, issue.Index)
|
issue.HTMLURL = fmt.Sprintf("%s/%s/issues/%d", c.url, issue.Repository.FullName, issue.Index)
|
||||||
|
c.mutex.RUnlock()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,12 @@ import (
|
||||||
// StopWatch represents a running stopwatch of an issue / pr
|
// StopWatch represents a running stopwatch of an issue / pr
|
||||||
type StopWatch struct {
|
type StopWatch struct {
|
||||||
Created time.Time `json:"created"`
|
Created time.Time `json:"created"`
|
||||||
|
Seconds int64 `json:"seconds"`
|
||||||
|
Duration string `json:"duration"`
|
||||||
IssueIndex int64 `json:"issue_index"`
|
IssueIndex int64 `json:"issue_index"`
|
||||||
|
IssueTitle string `json:"issue_title"`
|
||||||
|
RepoOwnerName string `json:"repo_owner_name"`
|
||||||
|
RepoName string `json:"repo_name"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetMyStopwatches list all stopwatches
|
// GetMyStopwatches list all stopwatches
|
||||||
|
|
|
@ -35,6 +35,7 @@ const (
|
||||||
type PullReview struct {
|
type PullReview struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
Reviewer *User `json:"user"`
|
Reviewer *User `json:"user"`
|
||||||
|
ReviewerTeam *Team `json:"team"`
|
||||||
State ReviewStateType `json:"state"`
|
State ReviewStateType `json:"state"`
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
CommitID string `json:"commit_id"`
|
CommitID string `json:"commit_id"`
|
||||||
|
@ -42,6 +43,7 @@ type PullReview struct {
|
||||||
Stale bool `json:"stale"`
|
Stale bool `json:"stale"`
|
||||||
// Official indicates if the review counts towards the required approval limit, if PR base is a protected branch
|
// Official indicates if the review counts towards the required approval limit, if PR base is a protected branch
|
||||||
Official bool `json:"official"`
|
Official bool `json:"official"`
|
||||||
|
Dismissed bool `json:"dismissed"`
|
||||||
CodeCommentsCount int `json:"comments_count"`
|
CodeCommentsCount int `json:"comments_count"`
|
||||||
Submitted time.Time `json:"submitted_at"`
|
Submitted time.Time `json:"submitted_at"`
|
||||||
|
|
||||||
|
@ -95,6 +97,17 @@ type SubmitPullReviewOptions struct {
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DismissPullReviewOptions are options to dismiss a pull review
|
||||||
|
type DismissPullReviewOptions struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PullReviewRequestOptions are options to add or remove pull review requests
|
||||||
|
type PullReviewRequestOptions struct {
|
||||||
|
Reviewers []string `json:"reviewers"`
|
||||||
|
TeamReviewers []string `json:"team_reviewers"`
|
||||||
|
}
|
||||||
|
|
||||||
// ListPullReviewsOptions options for listing PullReviews
|
// ListPullReviewsOptions options for listing PullReviews
|
||||||
type ListPullReviewsOptions struct {
|
type ListPullReviewsOptions struct {
|
||||||
ListOptions
|
ListOptions
|
||||||
|
@ -219,3 +232,63 @@ func (c *Client) SubmitPullReview(owner, repo string, index, id int64, opt Submi
|
||||||
jsonHeader, bytes.NewReader(body), r)
|
jsonHeader, bytes.NewReader(body), r)
|
||||||
return r, resp, err
|
return r, resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreateReviewRequests create review requests to an pull request
|
||||||
|
func (c *Client) CreateReviewRequests(owner, repo string, index int64, opt PullReviewRequestOptions) (*Response, error) {
|
||||||
|
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(&opt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, resp, err := c.getResponse("POST",
|
||||||
|
fmt.Sprintf("/repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, index),
|
||||||
|
jsonHeader, bytes.NewReader(body))
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteReviewRequests delete review requests to an pull request
|
||||||
|
func (c *Client) DeleteReviewRequests(owner, repo string, index int64, opt PullReviewRequestOptions) (*Response, error) {
|
||||||
|
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(&opt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, resp, err := c.getResponse("DELETE",
|
||||||
|
fmt.Sprintf("/repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, index),
|
||||||
|
jsonHeader, bytes.NewReader(body))
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DismissPullReview dismiss a review for a pull request
|
||||||
|
func (c *Client) DismissPullReview(owner, repo string, index, id int64, opt DismissPullReviewOptions) (*Response, error) {
|
||||||
|
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(&opt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, resp, err := c.getResponse("POST",
|
||||||
|
fmt.Sprintf("/repos/%s/%s/pulls/%d/reviews/%d/dismissals", owner, repo, index, id),
|
||||||
|
jsonHeader, bytes.NewReader(body))
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnDismissPullReview cancel to dismiss a review for a pull request
|
||||||
|
func (c *Client) UnDismissPullReview(owner, repo string, index, id int64) (*Response, error) {
|
||||||
|
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, resp, err := c.getResponse("POST",
|
||||||
|
fmt.Sprintf("/repos/%s/%s/pulls/%d/reviews/%d/undismissals", owner, repo, index, id),
|
||||||
|
jsonHeader, nil)
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ type Release struct {
|
||||||
Title string `json:"name"`
|
Title string `json:"name"`
|
||||||
Note string `json:"body"`
|
Note string `json:"body"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
HTMLURL string `json:"html_url"`
|
||||||
TarURL string `json:"tarball_url"`
|
TarURL string `json:"tarball_url"`
|
||||||
ZipURL string `json:"zipball_url"`
|
ZipURL string `json:"zipball_url"`
|
||||||
IsDraft bool `json:"draft"`
|
IsDraft bool `json:"draft"`
|
||||||
|
@ -132,8 +133,8 @@ func (c *Client) DeleteRelease(user, repo string, id int64) (*Response, error) {
|
||||||
return resp, err
|
return resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteReleaseTag deletes a tag from a repository, if no release refers to it.
|
// DeleteReleaseByTag deletes a release frm a repository by tag
|
||||||
func (c *Client) DeleteReleaseTag(user, repo string, tag string) (*Response, error) {
|
func (c *Client) DeleteReleaseByTag(user, repo string, tag string) (*Response, error) {
|
||||||
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -21,6 +22,32 @@ type Permission struct {
|
||||||
Pull bool `json:"pull"`
|
Pull bool `json:"pull"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// InternalTracker represents settings for internal tracker
|
||||||
|
type InternalTracker struct {
|
||||||
|
// Enable time tracking (Built-in issue tracker)
|
||||||
|
EnableTimeTracker bool `json:"enable_time_tracker"`
|
||||||
|
// Let only contributors track time (Built-in issue tracker)
|
||||||
|
AllowOnlyContributorsToTrackTime bool `json:"allow_only_contributors_to_track_time"`
|
||||||
|
// Enable dependencies for issues and pull requests (Built-in issue tracker)
|
||||||
|
EnableIssueDependencies bool `json:"enable_issue_dependencies"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExternalTracker represents settings for external tracker
|
||||||
|
type ExternalTracker struct {
|
||||||
|
// URL of external issue tracker.
|
||||||
|
ExternalTrackerURL string `json:"external_tracker_url"`
|
||||||
|
// External Issue Tracker URL Format. Use the placeholders {user}, {repo} and {index} for the username, repository name and issue index.
|
||||||
|
ExternalTrackerFormat string `json:"external_tracker_format"`
|
||||||
|
// External Issue Tracker Number Format, either `numeric` or `alphanumeric`
|
||||||
|
ExternalTrackerStyle string `json:"external_tracker_style"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExternalWiki represents setting for external wiki
|
||||||
|
type ExternalWiki struct {
|
||||||
|
// URL of external wiki.
|
||||||
|
ExternalWikiURL string `json:"external_wiki_url"`
|
||||||
|
}
|
||||||
|
|
||||||
// Repository represents a repository
|
// Repository represents a repository
|
||||||
type Repository struct {
|
type Repository struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
|
@ -31,6 +58,7 @@ type Repository struct {
|
||||||
Empty bool `json:"empty"`
|
Empty bool `json:"empty"`
|
||||||
Private bool `json:"private"`
|
Private bool `json:"private"`
|
||||||
Fork bool `json:"fork"`
|
Fork bool `json:"fork"`
|
||||||
|
Template bool `json:"template"`
|
||||||
Parent *Repository `json:"parent"`
|
Parent *Repository `json:"parent"`
|
||||||
Mirror bool `json:"mirror"`
|
Mirror bool `json:"mirror"`
|
||||||
Size int `json:"size"`
|
Size int `json:"size"`
|
||||||
|
@ -43,20 +71,28 @@ type Repository struct {
|
||||||
Forks int `json:"forks_count"`
|
Forks int `json:"forks_count"`
|
||||||
Watchers int `json:"watchers_count"`
|
Watchers int `json:"watchers_count"`
|
||||||
OpenIssues int `json:"open_issues_count"`
|
OpenIssues int `json:"open_issues_count"`
|
||||||
|
OpenPulls int `json:"open_pr_counter"`
|
||||||
|
Releases int `json:"release_counter"`
|
||||||
DefaultBranch string `json:"default_branch"`
|
DefaultBranch string `json:"default_branch"`
|
||||||
Archived bool `json:"archived"`
|
Archived bool `json:"archived"`
|
||||||
Created time.Time `json:"created_at"`
|
Created time.Time `json:"created_at"`
|
||||||
Updated time.Time `json:"updated_at"`
|
Updated time.Time `json:"updated_at"`
|
||||||
Permissions *Permission `json:"permissions,omitempty"`
|
Permissions *Permission `json:"permissions,omitempty"`
|
||||||
HasIssues bool `json:"has_issues"`
|
HasIssues bool `json:"has_issues"`
|
||||||
|
InternalTracker *InternalTracker `json:"internal_tracker,omitempty"`
|
||||||
|
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
|
||||||
HasWiki bool `json:"has_wiki"`
|
HasWiki bool `json:"has_wiki"`
|
||||||
|
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
||||||
HasPullRequests bool `json:"has_pull_requests"`
|
HasPullRequests bool `json:"has_pull_requests"`
|
||||||
|
HasProjects bool `json:"has_projects"`
|
||||||
IgnoreWhitespaceConflicts bool `json:"ignore_whitespace_conflicts"`
|
IgnoreWhitespaceConflicts bool `json:"ignore_whitespace_conflicts"`
|
||||||
AllowMerge bool `json:"allow_merge_commits"`
|
AllowMerge bool `json:"allow_merge_commits"`
|
||||||
AllowRebase bool `json:"allow_rebase"`
|
AllowRebase bool `json:"allow_rebase"`
|
||||||
AllowRebaseMerge bool `json:"allow_rebase_explicit"`
|
AllowRebaseMerge bool `json:"allow_rebase_explicit"`
|
||||||
AllowSquash bool `json:"allow_squash_merge"`
|
AllowSquash bool `json:"allow_squash_merge"`
|
||||||
AvatarURL string `json:"avatar_url"`
|
AvatarURL string `json:"avatar_url"`
|
||||||
|
Internal bool `json:"internal"`
|
||||||
|
MirrorInterval string `json:"mirror_interval"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// RepoType represent repo type
|
// RepoType represent repo type
|
||||||
|
@ -346,14 +382,24 @@ type EditRepoOption struct {
|
||||||
// Note: you will get a 422 error if the organization restricts changing repository visibility to organization
|
// Note: you will get a 422 error if the organization restricts changing repository visibility to organization
|
||||||
// owners and a non-owner tries to change the value of private.
|
// owners and a non-owner tries to change the value of private.
|
||||||
Private *bool `json:"private,omitempty"`
|
Private *bool `json:"private,omitempty"`
|
||||||
|
// either `true` to make this repository a template or `false` to make it a normal repository
|
||||||
|
Template *bool `json:"template,omitempty"`
|
||||||
// either `true` to enable issues for this repository or `false` to disable them.
|
// either `true` to enable issues for this repository or `false` to disable them.
|
||||||
HasIssues *bool `json:"has_issues,omitempty"`
|
HasIssues *bool `json:"has_issues,omitempty"`
|
||||||
|
// set this structure to configure internal issue tracker (requires has_issues)
|
||||||
|
InternalTracker *InternalTracker `json:"internal_tracker,omitempty"`
|
||||||
|
// set this structure to use external issue tracker (requires has_issues)
|
||||||
|
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
|
||||||
// either `true` to enable the wiki for this repository or `false` to disable it.
|
// either `true` to enable the wiki for this repository or `false` to disable it.
|
||||||
HasWiki *bool `json:"has_wiki,omitempty"`
|
HasWiki *bool `json:"has_wiki,omitempty"`
|
||||||
|
// set this structure to use external wiki instead of internal (requires has_wiki)
|
||||||
|
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
||||||
// sets the default branch for this repository.
|
// sets the default branch for this repository.
|
||||||
DefaultBranch *string `json:"default_branch,omitempty"`
|
DefaultBranch *string `json:"default_branch,omitempty"`
|
||||||
// either `true` to allow pull requests, or `false` to prevent pull request.
|
// either `true` to allow pull requests, or `false` to prevent pull request.
|
||||||
HasPullRequests *bool `json:"has_pull_requests,omitempty"`
|
HasPullRequests *bool `json:"has_pull_requests,omitempty"`
|
||||||
|
// either `true` to enable project unit, or `false` to disable them.
|
||||||
|
HasProjects *bool `json:"has_projects,omitempty"`
|
||||||
// either `true` to ignore whitespace for conflicts, or `false` to not ignore whitespace. `has_pull_requests` must be `true`.
|
// either `true` to ignore whitespace for conflicts, or `false` to not ignore whitespace. `has_pull_requests` must be `true`.
|
||||||
IgnoreWhitespaceConflicts *bool `json:"ignore_whitespace_conflicts,omitempty"`
|
IgnoreWhitespaceConflicts *bool `json:"ignore_whitespace_conflicts,omitempty"`
|
||||||
// either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. `has_pull_requests` must be `true`.
|
// either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. `has_pull_requests` must be `true`.
|
||||||
|
@ -366,6 +412,8 @@ type EditRepoOption struct {
|
||||||
AllowSquash *bool `json:"allow_squash_merge,omitempty"`
|
AllowSquash *bool `json:"allow_squash_merge,omitempty"`
|
||||||
// set to `true` to archive this repository.
|
// set to `true` to archive this repository.
|
||||||
Archived *bool `json:"archived,omitempty"`
|
Archived *bool `json:"archived,omitempty"`
|
||||||
|
// set to a string like `8h30m0s` to set the mirror interval time
|
||||||
|
MirrorInterval *string `json:"mirror_interval,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// EditRepo edit the properties of a repository
|
// EditRepo edit the properties of a repository
|
||||||
|
@ -420,3 +468,20 @@ const (
|
||||||
func (c *Client) GetArchive(owner, repo, ref string, ext ArchiveType) ([]byte, *Response, error) {
|
func (c *Client) GetArchive(owner, repo, ref string, ext ArchiveType) ([]byte, *Response, error) {
|
||||||
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/archive/%s%s", owner, repo, url.PathEscape(ref), ext), nil, nil)
|
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/archive/%s%s", owner, repo, url.PathEscape(ref), ext), nil, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetArchiveReader gets a `git archive` for a particular tree-ish git reference
|
||||||
|
// such as a branch name (`master`), a commit hash (`70b7c74b33`), a tag
|
||||||
|
// (`v1.2.1`). The archive is returned as a byte stream in a ReadCloser. It is
|
||||||
|
// the responsibility of the client to close the reader.
|
||||||
|
func (c *Client) GetArchiveReader(owner, repo, ref string, ext ArchiveType) (io.ReadCloser, *Response, error) {
|
||||||
|
resp, err := c.doRequest("GET", fmt.Sprintf("/repos/%s/%s/archive/%s%s", owner, repo, url.PathEscape(ref), ext), nil, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := statusCodeToErr(resp); err != nil {
|
||||||
|
return nil, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp.Body, resp, nil
|
||||||
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ type BranchProtection struct {
|
||||||
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
|
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
|
||||||
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
|
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
|
||||||
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
|
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
|
||||||
|
BlockOnOfficialReviewRequests bool `json:"block_on_official_review_requests"`
|
||||||
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
|
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
|
||||||
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
|
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
|
||||||
RequireSignedCommits bool `json:"require_signed_commits"`
|
RequireSignedCommits bool `json:"require_signed_commits"`
|
||||||
|
@ -56,6 +57,7 @@ type CreateBranchProtectionOption struct {
|
||||||
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
|
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
|
||||||
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
|
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
|
||||||
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
|
BlockOnRejectedReviews bool `json:"block_on_rejected_reviews"`
|
||||||
|
BlockOnOfficialReviewRequests bool `json:"block_on_official_review_requests"`
|
||||||
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
|
BlockOnOutdatedBranch bool `json:"block_on_outdated_branch"`
|
||||||
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
|
DismissStaleApprovals bool `json:"dismiss_stale_approvals"`
|
||||||
RequireSignedCommits bool `json:"require_signed_commits"`
|
RequireSignedCommits bool `json:"require_signed_commits"`
|
||||||
|
@ -79,6 +81,7 @@ type EditBranchProtectionOption struct {
|
||||||
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
|
ApprovalsWhitelistUsernames []string `json:"approvals_whitelist_username"`
|
||||||
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
|
ApprovalsWhitelistTeams []string `json:"approvals_whitelist_teams"`
|
||||||
BlockOnRejectedReviews *bool `json:"block_on_rejected_reviews"`
|
BlockOnRejectedReviews *bool `json:"block_on_rejected_reviews"`
|
||||||
|
BlockOnOfficialReviewRequests *bool `json:"block_on_official_review_requests"`
|
||||||
BlockOnOutdatedBranch *bool `json:"block_on_outdated_branch"`
|
BlockOnOutdatedBranch *bool `json:"block_on_outdated_branch"`
|
||||||
DismissStaleApprovals *bool `json:"dismiss_stale_approvals"`
|
DismissStaleApprovals *bool `json:"dismiss_stale_approvals"`
|
||||||
RequireSignedCommits *bool `json:"require_signed_commits"`
|
RequireSignedCommits *bool `json:"require_signed_commits"`
|
||||||
|
|
|
@ -47,6 +47,7 @@ type Commit struct {
|
||||||
Author *User `json:"author"`
|
Author *User `json:"author"`
|
||||||
Committer *User `json:"committer"`
|
Committer *User `json:"committer"`
|
||||||
Parents []*CommitMeta `json:"parents"`
|
Parents []*CommitMeta `json:"parents"`
|
||||||
|
Files []*CommitAffectedFiles `json:"files"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// CommitDateOptions store dates for GIT_AUTHOR_DATE and GIT_COMMITTER_DATE
|
// CommitDateOptions store dates for GIT_AUTHOR_DATE and GIT_COMMITTER_DATE
|
||||||
|
@ -55,6 +56,11 @@ type CommitDateOptions struct {
|
||||||
Committer time.Time `json:"committer"`
|
Committer time.Time `json:"committer"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CommitAffectedFiles store information about files affected by the commit
|
||||||
|
type CommitAffectedFiles struct {
|
||||||
|
Filename string `json:"filename"`
|
||||||
|
}
|
||||||
|
|
||||||
// GetSingleCommit returns a single commit
|
// GetSingleCommit returns a single commit
|
||||||
func (c *Client) GetSingleCommit(user, repo, commitID string) (*Commit, *Response, error) {
|
func (c *Client) GetSingleCommit(user, repo, commitID string) (*Commit, *Response, error) {
|
||||||
commit := new(Commit)
|
commit := new(Commit)
|
||||||
|
|
|
@ -9,6 +9,8 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// FileOptions options for all file APIs
|
// FileOptions options for all file APIs
|
||||||
|
@ -23,6 +25,8 @@ type FileOptions struct {
|
||||||
Author Identity `json:"author"`
|
Author Identity `json:"author"`
|
||||||
Committer Identity `json:"committer"`
|
Committer Identity `json:"committer"`
|
||||||
Dates CommitDateOptions `json:"dates"`
|
Dates CommitDateOptions `json:"dates"`
|
||||||
|
// Add a Signed-off-by trailer by the committer at the end of the commit log message.
|
||||||
|
Signoff bool `json:"signoff"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateFileOptions options for creating files
|
// CreateFileOptions options for creating files
|
||||||
|
@ -112,22 +116,63 @@ type FileDeleteResponse struct {
|
||||||
Verification *PayloadCommitVerification `json:"verification"`
|
Verification *PayloadCommitVerification `json:"verification"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetFile downloads a file of repository, ref can be branch/tag/commit.
|
// pathEscapeSegments escapes segments of a path while not escaping forward slash
|
||||||
// e.g.: ref -> master, tree -> macaron.go(no leading slash)
|
func pathEscapeSegments(path string) string {
|
||||||
func (c *Client) GetFile(user, repo, ref, tree string) ([]byte, *Response, error) {
|
slice := strings.Split(path, "/")
|
||||||
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/raw/%s/%s", user, repo, ref, tree), nil, nil)
|
for index := range slice {
|
||||||
|
slice[index] = url.PathEscape(slice[index])
|
||||||
|
}
|
||||||
|
escapedPath := strings.Join(slice, "/")
|
||||||
|
return escapedPath
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetContents get the metadata and contents (if a file) of an entry in a repository, or a list of entries if a dir
|
// GetFile downloads a file of repository, ref can be branch/tag/commit.
|
||||||
|
// e.g.: ref -> master, filepath -> README.md (no leading slash)
|
||||||
|
func (c *Client) GetFile(owner, repo, ref, filepath string) ([]byte, *Response, error) {
|
||||||
|
filepath = pathEscapeSegments(filepath)
|
||||||
|
if c.checkServerVersionGreaterThanOrEqual(version1_14_0) != nil {
|
||||||
|
ref = pathEscapeSegments(ref)
|
||||||
|
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/raw/%s/%s", owner, repo, ref, filepath), nil, nil)
|
||||||
|
}
|
||||||
|
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/raw/%s?ref=%s", owner, repo, filepath, url.QueryEscape(ref)), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContents get the metadata and contents of a file in a repository
|
||||||
// ref is optional
|
// ref is optional
|
||||||
func (c *Client) GetContents(owner, repo, ref, filepath string) (*ContentsResponse, *Response, error) {
|
func (c *Client) GetContents(owner, repo, ref, filepath string) (*ContentsResponse, *Response, error) {
|
||||||
|
data, resp, err := c.getDirOrFileContents(owner, repo, ref, filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, resp, err
|
||||||
|
}
|
||||||
cr := new(ContentsResponse)
|
cr := new(ContentsResponse)
|
||||||
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/contents/%s?ref=%s", owner, repo, filepath, ref), jsonHeader, nil, cr)
|
if json.Unmarshal(data, &cr) != nil {
|
||||||
|
return nil, resp, fmt.Errorf("expect file, got directory")
|
||||||
|
}
|
||||||
return cr, resp, err
|
return cr, resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ListContents gets a list of entries in a dir
|
||||||
|
// ref is optional
|
||||||
|
func (c *Client) ListContents(owner, repo, ref, filepath string) ([]*ContentsResponse, *Response, error) {
|
||||||
|
data, resp, err := c.getDirOrFileContents(owner, repo, ref, filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, resp, err
|
||||||
|
}
|
||||||
|
crl := make([]*ContentsResponse, 0)
|
||||||
|
if json.Unmarshal(data, &crl) != nil {
|
||||||
|
return nil, resp, fmt.Errorf("expect directory, got file")
|
||||||
|
}
|
||||||
|
return crl, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) getDirOrFileContents(owner, repo, ref, filepath string) ([]byte, *Response, error) {
|
||||||
|
filepath = pathEscapeSegments(strings.TrimPrefix(filepath, "/"))
|
||||||
|
return c.getResponse("GET", fmt.Sprintf("/repos/%s/%s/contents/%s?ref=%s", owner, repo, filepath, url.QueryEscape(ref)), jsonHeader, nil)
|
||||||
|
}
|
||||||
|
|
||||||
// CreateFile create a file in a repository
|
// CreateFile create a file in a repository
|
||||||
func (c *Client) CreateFile(owner, repo, filepath string, opt CreateFileOptions) (*FileResponse, *Response, error) {
|
func (c *Client) CreateFile(owner, repo, filepath string, opt CreateFileOptions) (*FileResponse, *Response, error) {
|
||||||
|
filepath = pathEscapeSegments(filepath)
|
||||||
var err error
|
var err error
|
||||||
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
|
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
|
@ -144,6 +189,7 @@ func (c *Client) CreateFile(owner, repo, filepath string, opt CreateFileOptions)
|
||||||
|
|
||||||
// UpdateFile update a file in a repository
|
// UpdateFile update a file in a repository
|
||||||
func (c *Client) UpdateFile(owner, repo, filepath string, opt UpdateFileOptions) (*FileResponse, *Response, error) {
|
func (c *Client) UpdateFile(owner, repo, filepath string, opt UpdateFileOptions) (*FileResponse, *Response, error) {
|
||||||
|
filepath = pathEscapeSegments(filepath)
|
||||||
var err error
|
var err error
|
||||||
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
|
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
|
@ -160,6 +206,7 @@ func (c *Client) UpdateFile(owner, repo, filepath string, opt UpdateFileOptions)
|
||||||
|
|
||||||
// DeleteFile delete a file from repository
|
// DeleteFile delete a file from repository
|
||||||
func (c *Client) DeleteFile(owner, repo, filepath string, opt DeleteFileOptions) (*Response, error) {
|
func (c *Client) DeleteFile(owner, repo, filepath string, opt DeleteFileOptions) (*Response, error) {
|
||||||
|
filepath = pathEscapeSegments(filepath)
|
||||||
var err error
|
var err error
|
||||||
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
|
if opt.BranchName, err = c.setDefaultBranchForOldVersions(owner, repo, opt.BranchName); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
@ -22,10 +22,8 @@ const (
|
||||||
GitServiceGitlab GitServiceType = "gitlab"
|
GitServiceGitlab GitServiceType = "gitlab"
|
||||||
// GitServiceGitea represents a gitea service
|
// GitServiceGitea represents a gitea service
|
||||||
GitServiceGitea GitServiceType = "gitea"
|
GitServiceGitea GitServiceType = "gitea"
|
||||||
|
// GitServiceGogs represents a gogs service
|
||||||
// Not supported jet
|
GitServiceGogs GitServiceType = "gogs"
|
||||||
// // GitServiceGogs represents a gogs service
|
|
||||||
// GitServiceGogs GitServiceType = "gogs"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// MigrateRepoOption options for migrating a repository from an external service
|
// MigrateRepoOption options for migrating a repository from an external service
|
||||||
|
@ -48,6 +46,7 @@ type MigrateRepoOption struct {
|
||||||
Issues bool `json:"issues"`
|
Issues bool `json:"issues"`
|
||||||
PullRequests bool `json:"pull_requests"`
|
PullRequests bool `json:"pull_requests"`
|
||||||
Releases bool `json:"releases"`
|
Releases bool `json:"releases"`
|
||||||
|
MirrorInterval string `json:"mirror_interval"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate the MigrateRepoOption struct
|
// Validate the MigrateRepoOption struct
|
||||||
|
@ -67,17 +66,24 @@ func (opt *MigrateRepoOption) Validate(c *Client) error {
|
||||||
switch opt.Service {
|
switch opt.Service {
|
||||||
case GitServiceGithub:
|
case GitServiceGithub:
|
||||||
if len(opt.AuthToken) == 0 {
|
if len(opt.AuthToken) == 0 {
|
||||||
return fmt.Errorf("github require token authentication")
|
return fmt.Errorf("github requires token authentication")
|
||||||
}
|
}
|
||||||
case GitServiceGitlab, GitServiceGitea:
|
case GitServiceGitlab, GitServiceGitea:
|
||||||
if len(opt.AuthToken) == 0 {
|
if len(opt.AuthToken) == 0 {
|
||||||
return fmt.Errorf("%s require token authentication", opt.Service)
|
return fmt.Errorf("%s requires token authentication", opt.Service)
|
||||||
}
|
}
|
||||||
// Gitlab is supported since 1.12.0 but api cant handle it until 1.13.0
|
// Gitlab is supported since 1.12.0 but api cant handle it until 1.13.0
|
||||||
// https://github.com/go-gitea/gitea/pull/12672
|
// https://github.com/go-gitea/gitea/pull/12672
|
||||||
if c.checkServerVersionGreaterThanOrEqual(version1_13_0) != nil {
|
if c.checkServerVersionGreaterThanOrEqual(version1_13_0) != nil {
|
||||||
return fmt.Errorf("migrate from service %s need gitea >= 1.13.0", opt.Service)
|
return fmt.Errorf("migrate from service %s need gitea >= 1.13.0", opt.Service)
|
||||||
}
|
}
|
||||||
|
case GitServiceGogs:
|
||||||
|
if len(opt.AuthToken) == 0 {
|
||||||
|
return fmt.Errorf("gogs requires token authentication")
|
||||||
|
}
|
||||||
|
if c.checkServerVersionGreaterThanOrEqual(version1_14_0) != nil {
|
||||||
|
return fmt.Errorf("migrate from service gogs need gitea >= 1.14.0")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
// Copyright 2021 The Gitea Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package gitea
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ListStargazersOptions options for listing a repository's stargazers
|
||||||
|
type ListStargazersOptions struct {
|
||||||
|
ListOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListRepoStargazers list a repository's stargazers
|
||||||
|
func (c *Client) ListRepoStargazers(user, repo string, opt ListStargazersOptions) ([]*User, *Response, error) {
|
||||||
|
opt.setDefaults()
|
||||||
|
stargazers := make([]*User, 0, opt.PageSize)
|
||||||
|
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/stargazers?%s", user, repo, opt.getURLQuery().Encode()), nil, nil, &stargazers)
|
||||||
|
return stargazers, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStarredRepos returns the repos that the given user has starred
|
||||||
|
func (c *Client) GetStarredRepos(user string) ([]*Repository, *Response, error) {
|
||||||
|
repos := make([]*Repository, 0, 10)
|
||||||
|
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/users/%s/starred", user), jsonHeader, nil, &repos)
|
||||||
|
return repos, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMyStarredRepos returns the repos that the authenticated user has starred
|
||||||
|
func (c *Client) GetMyStarredRepos() ([]*Repository, *Response, error) {
|
||||||
|
repos := make([]*Repository, 0, 10)
|
||||||
|
resp, err := c.getParsedResponse("GET", "/user/starred", jsonHeader, nil, &repos)
|
||||||
|
return repos, resp, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsRepoStarring returns whether the authenticated user has starred the repo or not
|
||||||
|
func (c *Client) IsRepoStarring(user, repo string) (bool, *Response, error) {
|
||||||
|
_, resp, err := c.getResponse("GET", fmt.Sprintf("/user/starred/%s/%s", user, repo), jsonHeader, nil)
|
||||||
|
if resp != nil {
|
||||||
|
switch resp.StatusCode {
|
||||||
|
case http.StatusNotFound:
|
||||||
|
return false, resp, nil
|
||||||
|
case http.StatusNoContent:
|
||||||
|
return true, resp, nil
|
||||||
|
default:
|
||||||
|
return false, resp, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// StarRepo star specified repo as the authenticated user
|
||||||
|
func (c *Client) StarRepo(user, repo string) (*Response, error) {
|
||||||
|
_, resp, err := c.getResponse("PUT", fmt.Sprintf("/user/starred/%s/%s", user, repo), jsonHeader, nil)
|
||||||
|
if resp != nil {
|
||||||
|
switch resp.StatusCode {
|
||||||
|
case http.StatusNoContent:
|
||||||
|
return resp, nil
|
||||||
|
default:
|
||||||
|
return resp, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnStarRepo remove star to specified repo as the authenticated user
|
||||||
|
func (c *Client) UnStarRepo(user, repo string) (*Response, error) {
|
||||||
|
_, resp, err := c.getResponse("DELETE", fmt.Sprintf("/user/starred/%s/%s", user, repo), jsonHeader, nil)
|
||||||
|
if resp != nil {
|
||||||
|
switch resp.StatusCode {
|
||||||
|
case http.StatusNoContent:
|
||||||
|
return resp, nil
|
||||||
|
default:
|
||||||
|
return resp, fmt.Errorf("unexpected status code '%d'", resp.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
|
@ -29,3 +29,14 @@ func (c *Client) ListRepoTags(user, repo string, opt ListRepoTagsOptions) ([]*Ta
|
||||||
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/tags?%s", user, repo, opt.getURLQuery().Encode()), nil, nil, &tags)
|
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/repos/%s/%s/tags?%s", user, repo, opt.getURLQuery().Encode()), nil, nil, &tags)
|
||||||
return tags, resp, err
|
return tags, resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DeleteTag deletes a tag from a repository, if no release refers to it
|
||||||
|
func (c *Client) DeleteTag(user, repo string, tag string) (*Response, error) {
|
||||||
|
if err := c.checkServerVersionGreaterThanOrEqual(version1_14_0); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_, resp, err := c.getResponse("DELETE",
|
||||||
|
fmt.Sprintf("/repos/%s/%s/tags/%s", user, repo, tag),
|
||||||
|
nil, nil)
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ package gitea
|
||||||
|
|
||||||
// GlobalUISettings represent the global ui settings of a gitea instance witch is exposed by API
|
// GlobalUISettings represent the global ui settings of a gitea instance witch is exposed by API
|
||||||
type GlobalUISettings struct {
|
type GlobalUISettings struct {
|
||||||
|
DefaultTheme string `json:"default_theme"`
|
||||||
AllowedReactions []string `json:"allowed_reactions"`
|
AllowedReactions []string `json:"allowed_reactions"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,6 +14,7 @@ type GlobalUISettings struct {
|
||||||
type GlobalRepoSettings struct {
|
type GlobalRepoSettings struct {
|
||||||
MirrorsDisabled bool `json:"mirrors_disabled"`
|
MirrorsDisabled bool `json:"mirrors_disabled"`
|
||||||
HTTPGitDisabled bool `json:"http_git_disabled"`
|
HTTPGitDisabled bool `json:"http_git_disabled"`
|
||||||
|
MigrationsDisabled bool `json:"migrations_disabled"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// GlobalAPISettings contains global api settings exposed by it
|
// GlobalAPISettings contains global api settings exposed by it
|
||||||
|
|
|
@ -27,12 +27,15 @@ type ListAccessTokensOptions struct {
|
||||||
|
|
||||||
// ListAccessTokens lists all the access tokens of user
|
// ListAccessTokens lists all the access tokens of user
|
||||||
func (c *Client) ListAccessTokens(opts ListAccessTokensOptions) ([]*AccessToken, *Response, error) {
|
func (c *Client) ListAccessTokens(opts ListAccessTokensOptions) ([]*AccessToken, *Response, error) {
|
||||||
if len(c.username) == 0 {
|
c.mutex.RLock()
|
||||||
|
username := c.username
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
if len(username) == 0 {
|
||||||
return nil, nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
|
return nil, nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
|
||||||
}
|
}
|
||||||
opts.setDefaults()
|
opts.setDefaults()
|
||||||
tokens := make([]*AccessToken, 0, opts.PageSize)
|
tokens := make([]*AccessToken, 0, opts.PageSize)
|
||||||
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/users/%s/tokens?%s", c.username, opts.getURLQuery().Encode()), jsonHeader, nil, &tokens)
|
resp, err := c.getParsedResponse("GET", fmt.Sprintf("/users/%s/tokens?%s", username, opts.getURLQuery().Encode()), jsonHeader, nil, &tokens)
|
||||||
return tokens, resp, err
|
return tokens, resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,7 +46,10 @@ type CreateAccessTokenOption struct {
|
||||||
|
|
||||||
// CreateAccessToken create one access token with options
|
// CreateAccessToken create one access token with options
|
||||||
func (c *Client) CreateAccessToken(opt CreateAccessTokenOption) (*AccessToken, *Response, error) {
|
func (c *Client) CreateAccessToken(opt CreateAccessTokenOption) (*AccessToken, *Response, error) {
|
||||||
if len(c.username) == 0 {
|
c.mutex.RLock()
|
||||||
|
username := c.username
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
if len(username) == 0 {
|
||||||
return nil, nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
|
return nil, nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
|
||||||
}
|
}
|
||||||
body, err := json.Marshal(&opt)
|
body, err := json.Marshal(&opt)
|
||||||
|
@ -51,13 +57,16 @@ func (c *Client) CreateAccessToken(opt CreateAccessTokenOption) (*AccessToken, *
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
t := new(AccessToken)
|
t := new(AccessToken)
|
||||||
resp, err := c.getParsedResponse("POST", fmt.Sprintf("/users/%s/tokens", c.username), jsonHeader, bytes.NewReader(body), t)
|
resp, err := c.getParsedResponse("POST", fmt.Sprintf("/users/%s/tokens", username), jsonHeader, bytes.NewReader(body), t)
|
||||||
return t, resp, err
|
return t, resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteAccessToken delete token, identified by ID and if not available by name
|
// DeleteAccessToken delete token, identified by ID and if not available by name
|
||||||
func (c *Client) DeleteAccessToken(value interface{}) (*Response, error) {
|
func (c *Client) DeleteAccessToken(value interface{}) (*Response, error) {
|
||||||
if len(c.username) == 0 {
|
c.mutex.RLock()
|
||||||
|
username := c.username
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
if len(username) == 0 {
|
||||||
return nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
|
return nil, fmt.Errorf("\"username\" not set: only BasicAuth allowed")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,6 +84,6 @@ func (c *Client) DeleteAccessToken(value interface{}) (*Response, error) {
|
||||||
return nil, fmt.Errorf("only string and int64 supported")
|
return nil, fmt.Errorf("only string and int64 supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, resp, err := c.getResponse("DELETE", fmt.Sprintf("/users/%s/tokens/%s", c.username, token), jsonHeader, nil)
|
_, resp, err := c.getResponse("DELETE", fmt.Sprintf("/users/%s/tokens/%s", username, token), jsonHeader, nil)
|
||||||
return resp, err
|
return resp, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,10 @@ func (c *Client) CheckServerVersionConstraint(constraint string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if !check.Check(c.serverVersion) {
|
if !check.Check(c.serverVersion) {
|
||||||
return fmt.Errorf("gitea server at %s does not satisfy version constraint %s", c.url, constraint)
|
c.mutex.RLock()
|
||||||
|
url := c.url
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
return fmt.Errorf("gitea server at %s does not satisfy version constraint %s", url, constraint)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -51,7 +54,10 @@ func (c *Client) checkServerVersionGreaterThanOrEqual(v *version.Version) error
|
||||||
}
|
}
|
||||||
|
|
||||||
if !c.serverVersion.GreaterThanOrEqual(v) {
|
if !c.serverVersion.GreaterThanOrEqual(v) {
|
||||||
return fmt.Errorf("gitea server at %s is older than %s", c.url, v.Original())
|
c.mutex.RLock()
|
||||||
|
url := c.url
|
||||||
|
c.mutex.RUnlock()
|
||||||
|
return fmt.Errorf("gitea server at %s is older than %s", url, v.Original())
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,8 @@ func (i *Input) OnChange(key rune, config *PromptConfig) (bool, error) {
|
||||||
}
|
}
|
||||||
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
|
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
|
||||||
if i.answer != "" {
|
if i.answer != "" {
|
||||||
i.answer = i.answer[0 : len(i.answer)-1]
|
runeAnswer := []rune(i.answer)
|
||||||
|
i.answer = string(runeAnswer[0 : len(runeAnswer)-1])
|
||||||
}
|
}
|
||||||
} else if key >= terminal.KeySpace {
|
} else if key >= terminal.KeySpace {
|
||||||
i.answer += string(key)
|
i.answer += string(key)
|
||||||
|
|
|
@ -113,7 +113,8 @@ func (m *MultiSelect) OnChange(key rune, config *PromptConfig) {
|
||||||
m.filter = ""
|
m.filter = ""
|
||||||
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
|
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
|
||||||
if m.filter != "" {
|
if m.filter != "" {
|
||||||
m.filter = m.filter[0 : len(m.filter)-1]
|
runeFilter := []rune(m.filter)
|
||||||
|
m.filter = string(runeFilter[0 : len(runeFilter)-1])
|
||||||
}
|
}
|
||||||
} else if key >= terminal.KeySpace {
|
} else if key >= terminal.KeySpace {
|
||||||
m.filter += string(key)
|
m.filter += string(key)
|
||||||
|
|
|
@ -114,8 +114,9 @@ func (s *Select) OnChange(key rune, config *PromptConfig) bool {
|
||||||
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
|
} else if key == terminal.KeyDelete || key == terminal.KeyBackspace {
|
||||||
// if there is content in the filter to delete
|
// if there is content in the filter to delete
|
||||||
if s.filter != "" {
|
if s.filter != "" {
|
||||||
|
runeFilter := []rune(s.filter)
|
||||||
// subtract a line from the current filter
|
// subtract a line from the current filter
|
||||||
s.filter = s.filter[0 : len(s.filter)-1]
|
s.filter = string(runeFilter[0 : len(runeFilter)-1])
|
||||||
// we removed the last value in the filter
|
// we removed the last value in the filter
|
||||||
}
|
}
|
||||||
} else if key >= terminal.KeySpace {
|
} else if key >= terminal.KeySpace {
|
||||||
|
|
|
@ -18,18 +18,21 @@ func TransformString(f func(s string) string) Transformer {
|
||||||
return func(ans interface{}) interface{} {
|
return func(ans interface{}) interface{} {
|
||||||
// if the answer value passed in is the zero value of the appropriate type
|
// if the answer value passed in is the zero value of the appropriate type
|
||||||
if isZero(reflect.ValueOf(ans)) {
|
if isZero(reflect.ValueOf(ans)) {
|
||||||
// skip this `Transformer` by returning a nil value.
|
// skip this `Transformer` by returning a zero value of string.
|
||||||
// The original answer will be not affected,
|
// The original answer will be not affected,
|
||||||
// see survey.go#L125.
|
// see survey.go#L125.
|
||||||
return nil
|
// A zero value of string should be returned to be handled by
|
||||||
|
// next Transformer in a composed Tranformer,
|
||||||
|
// see tranform.go#L75
|
||||||
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// "ans" is never nil here, so we don't have to check that
|
// "ans" is never nil here, so we don't have to check that
|
||||||
// see survey.go#L97 for more.
|
// see survey.go#L338 for more.
|
||||||
// Make sure that the the answer's value was a typeof string.
|
// Make sure that the the answer's value was a typeof string.
|
||||||
s, ok := ans.(string)
|
s, ok := ans.(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
return f(s)
|
return f(s)
|
||||||
|
|
|
@ -3,7 +3,7 @@ module github.com/Microsoft/go-winio
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/pkg/errors v0.8.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/sirupsen/logrus v1.4.1
|
github.com/sirupsen/logrus v1.4.1
|
||||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,8 +2,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k=
|
github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k=
|
||||||
|
@ -12,7 +12,5 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
|
||||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b h1:ag/x1USPSsqHud38I9BAC88qdNLDHHtQ4mlgQIZPPNA=
|
|
||||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3 h1:7TYNF4UdlohbFwpNH04CoPMp1cHUZgO1Ebq5r2hIjfo=
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3 h1:7TYNF4UdlohbFwpNH04CoPMp1cHUZgO1Ebq5r2hIjfo=
|
||||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
|
@ -429,10 +429,10 @@ type PipeConfig struct {
|
||||||
// when the pipe is in message mode.
|
// when the pipe is in message mode.
|
||||||
MessageMode bool
|
MessageMode bool
|
||||||
|
|
||||||
// InputBufferSize specifies the size the input buffer, in bytes.
|
// InputBufferSize specifies the size of the input buffer, in bytes.
|
||||||
InputBufferSize int32
|
InputBufferSize int32
|
||||||
|
|
||||||
// OutputBufferSize specifies the size the input buffer, in bytes.
|
// OutputBufferSize specifies the size of the output buffer, in bytes.
|
||||||
OutputBufferSize int32
|
OutputBufferSize int32
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,50 @@
|
||||||
xdg
|
<h1 align="center">
|
||||||
===
|
<div>
|
||||||
|
<img src="https://raw.githubusercontent.com/adrg/adrg.github.io/master/assets/projects/xdg/logo.png" height="80px" alt="xdg logo"/>
|
||||||
|
</div>
|
||||||
|
</h1>
|
||||||
|
|
||||||
[![Build Status](https://github.com/adrg/xdg/workflows/CI/badge.svg)](https://github.com/adrg/xdg/actions?query=workflow%3ACI)
|
<h4 align="center">Go implementation of the XDG Base Directory Specification and XDG user directories.</h4>
|
||||||
[![Code coverage](https://codecov.io/gh/adrg/xdg/branch/master/graphs/badge.svg?branch=master)](https://codecov.io/gh/adrg/xdg)
|
|
||||||
[![pkg.go.dev documentation](https://pkg.go.dev/badge/github.com/adrg/xdg)](https://pkg.go.dev/github.com/adrg/xdg)
|
<p align="center">
|
||||||
[![MIT license](https://img.shields.io/badge/license-MIT-red.svg?style=flat-square)](https://opensource.org/licenses/MIT)
|
<a href="https://github.com/adrg/xdg/actions?query=workflow%3ACI">
|
||||||
[![Go report card](https://goreportcard.com/badge/github.com/adrg/xdg)](https://goreportcard.com/report/github.com/adrg/xdg)
|
<img alt="Build status" src="https://github.com/adrg/xdg/workflows/CI/badge.svg">
|
||||||
[![GitHub issues](https://img.shields.io/github/issues/adrg/xdg)](https://github.com/adrg/xdg/issues)
|
</a>
|
||||||
[![Buy me a coffee](https://img.shields.io/static/v1.svg?label=%20&message=Buy%20me%20a%20coffee&color=FF813F&logo=buy%20me%20a%20coffee&logoColor=white)](https://www.buymeacoffee.com/adrg)
|
<a href="https://app.codecov.io/gh/adrg/xdg">
|
||||||
[![GitHub stars](https://img.shields.io/github/stars/adrg/xdg?style=social)](https://github.com/adrg/xdg/stargazers)
|
<img alt="Code coverage" src="https://codecov.io/gh/adrg/xdg/branch/master/graphs/badge.svg?branch=master">
|
||||||
|
</a>
|
||||||
|
<a href="https://pkg.go.dev/github.com/adrg/xdg">
|
||||||
|
<img alt="pkg.go.dev documentation" src="https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white">
|
||||||
|
</a>
|
||||||
|
<a href="https://opensource.org/licenses/MIT" rel="nofollow">
|
||||||
|
<img alt="MIT license" src="https://img.shields.io/github/license/adrg/xdg">
|
||||||
|
</a>
|
||||||
|
<br />
|
||||||
|
<a href="https://goreportcard.com/report/github.com/adrg/xdg">
|
||||||
|
<img alt="Go report card" src="https://goreportcard.com/badge/github.com/adrg/xdg">
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/avelino/awesome-go#configuration">
|
||||||
|
<img alt="Awesome Go" src="https://awesome.re/mentioned-badge.svg">
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/adrg/xdg/graphs/contributors">
|
||||||
|
<img alt="GitHub contributors" src="https://img.shields.io/github/contributors/adrg/xdg" />
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/adrg/xdg/issues">
|
||||||
|
<img alt="GitHub open issues" src="https://img.shields.io/github/issues-raw/adrg/xdg">
|
||||||
|
</a>
|
||||||
|
<a href="https://ko-fi.com/T6T72WATK">
|
||||||
|
<img alt="Buy me a coffee" src="https://img.shields.io/static/v1.svg?label=%20&message=Buy%20me%20a%20coffee&color=579fbf&logo=buy%20me%20a%20coffee&logoColor=white">
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
Provides an implementation of the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
|
Provides an implementation of the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
|
||||||
The specification defines a set of standard paths for storing application files,
|
The specification defines a set of standard paths for storing application files,
|
||||||
including data and configuration files. For portability and flexibility reasons,
|
including data and configuration files. For portability and flexibility reasons,
|
||||||
applications should use the XDG defined locations instead of hardcoding paths.
|
applications should use the XDG defined locations instead of hardcoding paths.
|
||||||
The package also includes the locations of well known [user directories](https://wiki.archlinux.org/index.php/XDG_user_directories).
|
|
||||||
The current implementation supports Windows, Mac OS and most flavors of Unix.
|
The package also includes the locations of well known [user directories](https://wiki.archlinux.org/index.php/XDG_user_directories)
|
||||||
|
and an implementation of the [state directory](https://wiki.debian.org/XDGBaseDirectorySpecification#Proposal:_STATE_directory) proposal.
|
||||||
|
Windows, macOS and most flavors of Unix are supported.
|
||||||
|
|
||||||
Full documentation can be found at: https://pkg.go.dev/github.com/adrg/xdg.
|
Full documentation can be found at: https://pkg.go.dev/github.com/adrg/xdg.
|
||||||
|
|
||||||
|
@ -29,19 +58,19 @@ present in the environment.
|
||||||
|
|
||||||
#### XDG Base Directory
|
#### XDG Base Directory
|
||||||
|
|
||||||
| | Unix | Mac OS | Windows |
|
| | Unix | macOS | Windows |
|
||||||
| :--- | :--- | :----- | :--- |
|
| :-------------- | :---------------------------------- | :------------------------------------------------------------------------------------ | :-------------------------------------- |
|
||||||
| XDG_DATA_HOME | `~/.local/share` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
|
| XDG_DATA_HOME | `~/.local/share` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
|
||||||
| XDG_DATA_DIRS | `/usr/local/share`<br/>`/usr/share` | `/Library/Application Support` | `%APPDATA%\Roaming`<br/>`%PROGRAMDATA%` |
|
| XDG_DATA_DIRS | `/usr/local/share`<br/>`/usr/share` | `/Library/Application Support` | `%APPDATA%\Roaming`<br/>`%PROGRAMDATA%` |
|
||||||
| XDG_CONFIG_HOME | `~/.config` | `~/Library/Preferences` | `%LOCALAPPDATA%` |
|
| XDG_CONFIG_HOME | `~/.config` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
|
||||||
| XDG_CONFIG_DIRS | `/etc/xdg` | `/Library/Preferences` | `%PROGRAMDATA%` |
|
| XDG_CONFIG_DIRS | `/etc/xdg` | `~/Library/Preferences`<br/>`/Library/Application Support`<br/>`/Library/Preferences` | `%PROGRAMDATA%` |
|
||||||
| XDG_CACHE_HOME | `~/.cache` | `~/Library/Caches` | `%LOCALAPPDATA%\cache` |
|
| XDG_CACHE_HOME | `~/.cache` | `~/Library/Caches` | `%LOCALAPPDATA%\cache` |
|
||||||
| XDG_RUNTIME_DIR | `/run/user/UID` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
|
| XDG_RUNTIME_DIR | `/run/user/UID` | `~/Library/Application Support` | `%LOCALAPPDATA%` |
|
||||||
|
|
||||||
#### XDG user directories
|
#### XDG user directories
|
||||||
|
|
||||||
| | Unix | Mac OS | Windows |
|
| | Unix | macOS | Windows |
|
||||||
| :--- | :--- | :----- | :--- |
|
| :------------------ | :------------ | :------------ | :------------------------ |
|
||||||
| XDG_DESKTOP_DIR | `~/Desktop` | `~/Desktop` | `%USERPROFILE%/Desktop` |
|
| XDG_DESKTOP_DIR | `~/Desktop` | `~/Desktop` | `%USERPROFILE%/Desktop` |
|
||||||
| XDG_DOWNLOAD_DIR | `~/Downloads` | `~/Downloads` | `%USERPROFILE%/Downloads` |
|
| XDG_DOWNLOAD_DIR | `~/Downloads` | `~/Downloads` | `%USERPROFILE%/Downloads` |
|
||||||
| XDG_DOCUMENTS_DIR | `~/Documents` | `~/Documents` | `%USERPROFILE%/Documents` |
|
| XDG_DOCUMENTS_DIR | `~/Documents` | `~/Documents` | `%USERPROFILE%/Documents` |
|
||||||
|
@ -53,43 +82,50 @@ present in the environment.
|
||||||
|
|
||||||
#### Non-standard directories
|
#### Non-standard directories
|
||||||
|
|
||||||
|
State directory
|
||||||
|
|
||||||
|
```
|
||||||
|
Unix
|
||||||
|
• ~/.local/state
|
||||||
|
macOS
|
||||||
|
• ~/Library/Application Support
|
||||||
|
Windows
|
||||||
|
• %LOCALAPPDATA%
|
||||||
|
```
|
||||||
|
|
||||||
Application directories
|
Application directories
|
||||||
|
|
||||||
```
|
```
|
||||||
Unix:
|
Unix
|
||||||
- $XDG_DATA_HOME/applications
|
• $XDG_DATA_HOME/applications
|
||||||
- ~/.local/share/applications
|
• ~/.local/share/applications
|
||||||
- /usr/local/share/applications
|
• /usr/local/share/applications
|
||||||
- /usr/share/applications
|
• /usr/share/applications
|
||||||
- $XDG_DATA_DIRS/applications
|
• $XDG_DATA_DIRS/applications
|
||||||
|
macOS
|
||||||
Mac OS:
|
• /Applications
|
||||||
- /Applications
|
Windows
|
||||||
|
• %APPDATA%\Roaming\Microsoft\Windows\Start Menu\Programs
|
||||||
Windows:
|
|
||||||
- %APPDATA%\Roaming\Microsoft\Windows\Start Menu\Programs
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Font Directories
|
Font directories
|
||||||
|
|
||||||
```
|
```
|
||||||
Unix:
|
Unix
|
||||||
- $XDG_DATA_HOME/fonts
|
• $XDG_DATA_HOME/fonts
|
||||||
- ~/.fonts
|
• ~/.fonts
|
||||||
- ~/.local/share/fonts
|
• ~/.local/share/fonts
|
||||||
- /usr/local/share/fonts
|
• /usr/local/share/fonts
|
||||||
- /usr/share/fonts
|
• /usr/share/fonts
|
||||||
- $XDG_DATA_DIRS/fonts
|
• $XDG_DATA_DIRS/fonts
|
||||||
|
macOS
|
||||||
Mac OS:
|
• ~/Library/Fonts
|
||||||
- ~/Library/Fonts
|
• /Library/Fonts
|
||||||
- /Library/Fonts
|
• /System/Library/Fonts
|
||||||
- /System/Library/Fonts
|
• /Network/Library/Fonts
|
||||||
- /Network/Library/Fonts
|
Windows
|
||||||
|
• %windir%\Fonts
|
||||||
Windows:
|
• %LOCALAPPDATA%\Microsoft\Windows\Fonts
|
||||||
- %windir%\Fonts
|
|
||||||
- %LOCALAPPDATA%\Microsoft\Windows\Fonts
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
@ -115,6 +151,7 @@ func main() {
|
||||||
log.Println("Runtime directory:", xdg.RuntimeDir)
|
log.Println("Runtime directory:", xdg.RuntimeDir)
|
||||||
|
|
||||||
// Non-standard directories.
|
// Non-standard directories.
|
||||||
|
log.Println("Home state directory:", xdg.StateHome)
|
||||||
log.Println("Application directories:", xdg.ApplicationDirs)
|
log.Println("Application directories:", xdg.ApplicationDirs)
|
||||||
log.Println("Font directories:", xdg.FontDirs)
|
log.Println("Font directories:", xdg.FontDirs)
|
||||||
|
|
||||||
|
@ -132,6 +169,7 @@ func main() {
|
||||||
// xdg.DataFile()
|
// xdg.DataFile()
|
||||||
// xdg.CacheFile()
|
// xdg.CacheFile()
|
||||||
// xdg.RuntimeFile()
|
// xdg.RuntimeFile()
|
||||||
|
// xdg.StateFile()
|
||||||
|
|
||||||
// Finding application config files.
|
// Finding application config files.
|
||||||
// SearchConfigFile takes one parameter which must contain the name of
|
// SearchConfigFile takes one parameter which must contain the name of
|
||||||
|
@ -147,6 +185,7 @@ func main() {
|
||||||
// xdg.SearchDataFile()
|
// xdg.SearchDataFile()
|
||||||
// xdg.SearchCacheFile()
|
// xdg.SearchCacheFile()
|
||||||
// xdg.SearchRuntimeFile()
|
// xdg.SearchRuntimeFile()
|
||||||
|
// xdg.SearchStateFile()
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -182,30 +221,26 @@ func main() {
|
||||||
|
|
||||||
Contributions in the form of pull requests, issues or just general feedback,
|
Contributions in the form of pull requests, issues or just general feedback,
|
||||||
are always welcome.
|
are always welcome.
|
||||||
See [CONTRIBUTING.MD](https://github.com/adrg/xdg/blob/master/CONTRIBUTING.md).
|
See [CONTRIBUTING.MD](CONTRIBUTING.md).
|
||||||
|
|
||||||
**Contributors**:
|
**Contributors**:
|
||||||
[adrg](https://github.com/adrg),
|
[adrg](https://github.com/adrg),
|
||||||
[wichert](https://github.com/wichert),
|
[wichert](https://github.com/wichert),
|
||||||
[bouncepaw](https://github.com/bouncepaw),
|
[bouncepaw](https://github.com/bouncepaw),
|
||||||
[gabriel-vasile](https://github.com/gabriel-vasile).
|
[gabriel-vasile](https://github.com/gabriel-vasile),
|
||||||
|
[KalleDK](https://github.com/KalleDK).
|
||||||
|
|
||||||
## References
|
## References
|
||||||
|
|
||||||
For more information see:
|
For more information see:
|
||||||
* [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
|
* [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
|
||||||
* [XDG user directories](https://wiki.archlinux.org/index.php/XDG_user_directories)
|
* [XDG user directories](https://wiki.archlinux.org/index.php/XDG_user_directories)
|
||||||
|
* [XDG state directory proposal](https://wiki.debian.org/XDGBaseDirectorySpecification#Proposal:_STATE_directory)
|
||||||
## Buy me a coffee
|
* [XDG_STATE_HOME proposal](https://lists.freedesktop.org/archives/xdg/2016-December/013803.html)
|
||||||
|
|
||||||
If you found this project useful and want to support it, consider buying me a coffee.
|
|
||||||
<a href="https://www.buymeacoffee.com/adrg">
|
|
||||||
<img src="https://cdn.buymeacoffee.com/buttons/v2/arial-orange.png" alt="Buy Me A Coffee" height="42px">
|
|
||||||
</a>
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright (c) 2014 Adrian-George Bostan.
|
Copyright (c) 2014 Adrian-George Bostan.
|
||||||
|
|
||||||
This project is licensed under the [MIT license](https://opensource.org/licenses/MIT).
|
This project is licensed under the [MIT license](https://opensource.org/licenses/MIT).
|
||||||
See [LICENSE](https://github.com/adrg/xdg/blob/master/LICENSE) for more details.
|
See [LICENSE](LICENSE) for more details.
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
package xdg
|
package xdg
|
||||||
|
|
||||||
import "os"
|
|
||||||
|
|
||||||
// XDG Base Directory environment variables.
|
// XDG Base Directory environment variables.
|
||||||
const (
|
const (
|
||||||
envDataHome = "XDG_DATA_HOME"
|
envDataHome = "XDG_DATA_HOME"
|
||||||
|
@ -10,6 +8,7 @@ const (
|
||||||
envConfigDirs = "XDG_CONFIG_DIRS"
|
envConfigDirs = "XDG_CONFIG_DIRS"
|
||||||
envCacheHome = "XDG_CACHE_HOME"
|
envCacheHome = "XDG_CACHE_HOME"
|
||||||
envRuntimeDir = "XDG_RUNTIME_DIR"
|
envRuntimeDir = "XDG_RUNTIME_DIR"
|
||||||
|
envStateHome = "XDG_STATE_HOME"
|
||||||
)
|
)
|
||||||
|
|
||||||
type baseDirectories struct {
|
type baseDirectories struct {
|
||||||
|
@ -21,6 +20,7 @@ type baseDirectories struct {
|
||||||
runtime string
|
runtime string
|
||||||
|
|
||||||
// Non-standard directories.
|
// Non-standard directories.
|
||||||
|
stateHome string
|
||||||
fonts []string
|
fonts []string
|
||||||
applications []string
|
applications []string
|
||||||
}
|
}
|
||||||
|
@ -38,27 +38,11 @@ func (bd baseDirectories) cacheFile(relPath string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bd baseDirectories) runtimeFile(relPath string) (string, error) {
|
func (bd baseDirectories) runtimeFile(relPath string) (string, error) {
|
||||||
fi, err := os.Lstat(bd.runtime)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return createPath(relPath, []string{bd.runtime})
|
return createPath(relPath, []string{bd.runtime})
|
||||||
}
|
}
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if fi.IsDir() {
|
func (bd baseDirectories) stateFile(relPath string) (string, error) {
|
||||||
// The runtime directory must be owned by the user.
|
return createPath(relPath, []string{bd.stateHome})
|
||||||
if err = chown(bd.runtime, os.Getuid(), os.Getgid()); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// For security reasons, the runtime directory cannot be a symlink.
|
|
||||||
if err = os.Remove(bd.runtime); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return createPath(relPath, []string{bd.runtime})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (bd baseDirectories) searchDataFile(relPath string) (string, error) {
|
func (bd baseDirectories) searchDataFile(relPath string) (string, error) {
|
||||||
|
@ -76,3 +60,7 @@ func (bd baseDirectories) searchCacheFile(relPath string) (string, error) {
|
||||||
func (bd baseDirectories) searchRuntimeFile(relPath string) (string, error) {
|
func (bd baseDirectories) searchRuntimeFile(relPath string) (string, error) {
|
||||||
return searchFile(relPath, []string{bd.runtime})
|
return searchFile(relPath, []string{bd.runtime})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (bd baseDirectories) searchStateFile(relPath string) (string, error) {
|
||||||
|
return searchFile(relPath, []string{bd.stateHome})
|
||||||
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|
|
@ -5,15 +5,23 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func initBaseDirs(home string) {
|
func initBaseDirs(home string) {
|
||||||
|
homeAppSupport := filepath.Join(home, "Library", "Application Support")
|
||||||
|
rootAppSupport := "/Library/Application Support"
|
||||||
|
|
||||||
// Initialize base directories.
|
// Initialize base directories.
|
||||||
baseDirs.dataHome = xdgPath(envDataHome, filepath.Join(home, "Library", "Application Support"))
|
baseDirs.dataHome = xdgPath(envDataHome, homeAppSupport)
|
||||||
baseDirs.data = xdgPaths(envDataDirs, "/Library/Application Support")
|
baseDirs.data = xdgPaths(envDataDirs, rootAppSupport)
|
||||||
baseDirs.configHome = xdgPath(envConfigHome, filepath.Join(home, "Library", "Preferences"))
|
baseDirs.configHome = xdgPath(envConfigHome, homeAppSupport)
|
||||||
baseDirs.config = xdgPaths(envConfigDirs, "/Library/Preferences")
|
baseDirs.config = xdgPaths(envConfigDirs,
|
||||||
|
filepath.Join(home, "Library", "Preferences"),
|
||||||
|
rootAppSupport,
|
||||||
|
"/Library/Preferences",
|
||||||
|
)
|
||||||
baseDirs.cacheHome = xdgPath(envCacheHome, filepath.Join(home, "Library", "Caches"))
|
baseDirs.cacheHome = xdgPath(envCacheHome, filepath.Join(home, "Library", "Caches"))
|
||||||
baseDirs.runtime = xdgPath(envRuntimeDir, filepath.Join(home, "Library", "Application Support"))
|
baseDirs.runtime = xdgPath(envRuntimeDir, homeAppSupport)
|
||||||
|
|
||||||
// Initialize non-standard directories.
|
// Initialize non-standard directories.
|
||||||
|
baseDirs.stateHome = xdgPath(envStateHome, homeAppSupport)
|
||||||
baseDirs.applications = []string{
|
baseDirs.applications = []string{
|
||||||
"/Applications",
|
"/Applications",
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ func initBaseDirs(home string) {
|
||||||
baseDirs.runtime = xdgPath(envRuntimeDir, filepath.Join("/run/user", strconv.Itoa(os.Getuid())))
|
baseDirs.runtime = xdgPath(envRuntimeDir, filepath.Join("/run/user", strconv.Itoa(os.Getuid())))
|
||||||
|
|
||||||
// Initialize non-standard directories.
|
// Initialize non-standard directories.
|
||||||
|
baseDirs.stateHome = xdgPath(envStateHome, filepath.Join(home, ".local", "state"))
|
||||||
appDirs := []string{
|
appDirs := []string{
|
||||||
filepath.Join(baseDirs.dataHome, "applications"),
|
filepath.Join(baseDirs.dataHome, "applications"),
|
||||||
filepath.Join(home, ".local/share/applications"),
|
filepath.Join(home, ".local/share/applications"),
|
||||||
|
|
|
@ -43,6 +43,7 @@ func initBaseDirs(home string) {
|
||||||
baseDirs.runtime = xdgPath(envRuntimeDir, localAppDataDir)
|
baseDirs.runtime = xdgPath(envRuntimeDir, localAppDataDir)
|
||||||
|
|
||||||
// Initialize non-standard directories.
|
// Initialize non-standard directories.
|
||||||
|
baseDirs.stateHome = xdgPath(envStateHome, localAppDataDir)
|
||||||
baseDirs.applications = []string{
|
baseDirs.applications = []string{
|
||||||
filepath.Join(roamingAppDataDir, "Microsoft", "Windows", "Start Menu", "Programs"),
|
filepath.Join(roamingAppDataDir, "Microsoft", "Windows", "Start Menu", "Programs"),
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,14 +33,6 @@ func homeDir() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func chown(name string, uid, gid int) error {
|
|
||||||
if goOS := runtime.GOOS; goOS == "windows" || goOS == "plan9" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return os.Chown(name, uid, gid)
|
|
||||||
}
|
|
||||||
|
|
||||||
func exists(path string) bool {
|
func exists(path string) bool {
|
||||||
_, err := os.Stat(path)
|
_, err := os.Stat(path)
|
||||||
return err == nil || os.IsExist(err)
|
return err == nil || os.IsExist(err)
|
||||||
|
|
|
@ -12,6 +12,10 @@ flavors of Unix.
|
||||||
|
|
||||||
For more information regarding the XDG user directories see:
|
For more information regarding the XDG user directories see:
|
||||||
https://wiki.archlinux.org/index.php/XDG_user_directories
|
https://wiki.archlinux.org/index.php/XDG_user_directories
|
||||||
|
|
||||||
|
For more information regarding the XDG state directory proposal see:
|
||||||
|
https://wiki.debian.org/XDGBaseDirectorySpecification#Proposal:_STATE_directory
|
||||||
|
https://lists.freedesktop.org/archives/xdg/2016-December/013803.html
|
||||||
*/
|
*/
|
||||||
package xdg
|
package xdg
|
||||||
|
|
||||||
|
@ -21,14 +25,14 @@ var (
|
||||||
|
|
||||||
// DataHome defines the base directory relative to which user-specific
|
// DataHome defines the base directory relative to which user-specific
|
||||||
// data files should be stored. This directory is defined by the
|
// data files should be stored. This directory is defined by the
|
||||||
// environment variable $XDG_DATA_HOME. If this variable is not set,
|
// $XDG_DATA_HOME environment variable. If the variable is not set,
|
||||||
// a default equal to $HOME/.local/share should be used.
|
// a default equal to $HOME/.local/share should be used.
|
||||||
DataHome string
|
DataHome string
|
||||||
|
|
||||||
// DataDirs defines the preference-ordered set of base directories to
|
// DataDirs defines the preference-ordered set of base directories to
|
||||||
// search for data files in addition to the DataHome base directory.
|
// search for data files in addition to the DataHome base directory.
|
||||||
// This set of directories is defined by the environment variable
|
// This set of directories is defined by the $XDG_DATA_DIRS environment
|
||||||
// $XDG_DATA_DIRS. If this variable is not set, the default directories
|
// variable. If the variable is not set, the default directories
|
||||||
// to be used are /usr/local/share and /usr/share, in that order. The
|
// to be used are /usr/local/share and /usr/share, in that order. The
|
||||||
// DataHome directory is considered more important than any of the
|
// DataHome directory is considered more important than any of the
|
||||||
// directories defined by DataDirs. Therefore, user data files should be
|
// directories defined by DataDirs. Therefore, user data files should be
|
||||||
|
@ -37,30 +41,30 @@ var (
|
||||||
|
|
||||||
// ConfigHome defines the base directory relative to which user-specific
|
// ConfigHome defines the base directory relative to which user-specific
|
||||||
// configuration files should be written. This directory is defined by
|
// configuration files should be written. This directory is defined by
|
||||||
// the environment variable $XDG_CONFIG_HOME. If this variable is not
|
// the $XDG_CONFIG_HOME environment variable. If the variable is not
|
||||||
// not set, a default equal to $HOME/.config should be used.
|
// not set, a default equal to $HOME/.config should be used.
|
||||||
ConfigHome string
|
ConfigHome string
|
||||||
|
|
||||||
// ConfigDirs defines the preference-ordered set of base directories to
|
// ConfigDirs defines the preference-ordered set of base directories to
|
||||||
// search for configuration files in addition to the ConfigHome base
|
// search for configuration files in addition to the ConfigHome base
|
||||||
// directory. This set of directories is defined by the environment
|
// directory. This set of directories is defined by the $XDG_CONFIG_DIRS
|
||||||
// variable $XDG_CONFIG_DIRS. If this variable is not set, a default
|
// environment variable. If the variable is not set, a default equal
|
||||||
// equal to /etc/xdg should be used. The ConfigHome directory is
|
// to /etc/xdg should be used. The ConfigHome directory is considered
|
||||||
// considered more important than any of the directories defined by
|
// more important than any of the directories defined by ConfigDirs.
|
||||||
// ConfigDirs. Therefore, user config files should be written
|
// Therefore, user config files should be written relative to the
|
||||||
// relative to the ConfigHome directory, if possible.
|
// ConfigHome directory, if possible.
|
||||||
ConfigDirs []string
|
ConfigDirs []string
|
||||||
|
|
||||||
// CacheHome defines the base directory relative to which user-specific
|
// CacheHome defines the base directory relative to which user-specific
|
||||||
// non-essential (cached) data should be written. This directory is
|
// non-essential (cached) data should be written. This directory is
|
||||||
// defined by the environment variable $XDG_CACHE_HOME. If this variable
|
// defined by the $XDG_CACHE_HOME environment variable. If the variable
|
||||||
// is not set, a default equal to $HOME/.cache should be used.
|
// is not set, a default equal to $HOME/.cache should be used.
|
||||||
CacheHome string
|
CacheHome string
|
||||||
|
|
||||||
// RuntimeDir defines the base directory relative to which user-specific
|
// RuntimeDir defines the base directory relative to which user-specific
|
||||||
// non-essential runtime files and other file objects (such as sockets,
|
// non-essential runtime files and other file objects (such as sockets,
|
||||||
// named pipes, etc.) should be stored. This directory is defined by the
|
// named pipes, etc.) should be stored. This directory is defined by the
|
||||||
// environment variable $XDG_RUNTIME_DIR. If this variable is not set,
|
// $XDG_RUNTIME_DIR environment variable. If the variable is not set,
|
||||||
// applications should fall back to a replacement directory with similar
|
// applications should fall back to a replacement directory with similar
|
||||||
// capabilities. Applications should use this directory for communication
|
// capabilities. Applications should use this directory for communication
|
||||||
// and synchronization purposes and should not place larger files in it,
|
// and synchronization purposes and should not place larger files in it,
|
||||||
|
@ -68,6 +72,12 @@ var (
|
||||||
// swapped out to disk.
|
// swapped out to disk.
|
||||||
RuntimeDir string
|
RuntimeDir string
|
||||||
|
|
||||||
|
// StateHome defines the base directory relative to which user-specific
|
||||||
|
// volatile data files should be stored. This directory is defined by
|
||||||
|
// the non-standard $XDG_STATE_HOME environment variable. If the variable
|
||||||
|
// is not set, a default equal to ~/.local/state should be used.
|
||||||
|
StateHome string
|
||||||
|
|
||||||
// UserDirs defines the locations of well known user directories.
|
// UserDirs defines the locations of well known user directories.
|
||||||
UserDirs UserDirectories
|
UserDirs UserDirectories
|
||||||
|
|
||||||
|
@ -96,6 +106,7 @@ func Reload() {
|
||||||
ConfigDirs = baseDirs.config
|
ConfigDirs = baseDirs.config
|
||||||
CacheHome = baseDirs.cacheHome
|
CacheHome = baseDirs.cacheHome
|
||||||
RuntimeDir = baseDirs.runtime
|
RuntimeDir = baseDirs.runtime
|
||||||
|
StateHome = baseDirs.stateHome
|
||||||
FontDirs = baseDirs.fonts
|
FontDirs = baseDirs.fonts
|
||||||
ApplicationDirs = baseDirs.applications
|
ApplicationDirs = baseDirs.applications
|
||||||
|
|
||||||
|
@ -143,6 +154,18 @@ func RuntimeFile(relPath string) (string, error) {
|
||||||
return baseDirs.runtimeFile(relPath)
|
return baseDirs.runtimeFile(relPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StateFile returns a suitable location for the specified state file. State
|
||||||
|
// files are usually volatile data files, not suitable to be stored relative
|
||||||
|
// to the $XDG_DATA_HOME directory.
|
||||||
|
// The relPath parameter must contain the name of the state file, and
|
||||||
|
// optionally, a set of parent directories (e.g. appname/app.state).
|
||||||
|
// If the specified directories do not exist, they will be created relative
|
||||||
|
// to the base state directory. On failure, an error containing the
|
||||||
|
// attempted paths is returned.
|
||||||
|
func StateFile(relPath string) (string, error) {
|
||||||
|
return baseDirs.stateFile(relPath)
|
||||||
|
}
|
||||||
|
|
||||||
// SearchDataFile searches for specified file in the data search paths.
|
// SearchDataFile searches for specified file in the data search paths.
|
||||||
// The relPath parameter must contain the name of the data file, and
|
// The relPath parameter must contain the name of the data file, and
|
||||||
// optionally, a set of parent directories (e.g. appname/app.data). If the
|
// optionally, a set of parent directories (e.g. appname/app.data). If the
|
||||||
|
@ -175,6 +198,14 @@ func SearchRuntimeFile(relPath string) (string, error) {
|
||||||
return baseDirs.searchRuntimeFile(relPath)
|
return baseDirs.searchRuntimeFile(relPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SearchStateFile searches for the specified file in the state search path.
|
||||||
|
// The relPath parameter must contain the name of the state file, and
|
||||||
|
// optionally, a set of parent directories (e.g. appname/app.state). If the
|
||||||
|
// file cannot be found, an error specifying the searched path is returned.
|
||||||
|
func SearchStateFile(relPath string) (string, error) {
|
||||||
|
return baseDirs.searchStateFile(relPath)
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
Reload()
|
Reload()
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,11 +20,6 @@ linters:
|
||||||
- wsl
|
- wsl
|
||||||
- gomnd
|
- gomnd
|
||||||
- gocognit
|
- gocognit
|
||||||
- goerr113
|
|
||||||
- nolintlint
|
|
||||||
- testpackage
|
|
||||||
- godot
|
|
||||||
- nestif
|
|
||||||
|
|
||||||
linters-settings:
|
linters-settings:
|
||||||
govet:
|
govet:
|
||||||
|
|
|
@ -4,7 +4,7 @@ go:
|
||||||
- "1.13.x"
|
- "1.13.x"
|
||||||
script:
|
script:
|
||||||
- go test -v ./...
|
- go test -v ./...
|
||||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
|
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
|
||||||
- ./bin/golangci-lint run
|
- ./bin/golangci-lint run
|
||||||
- git clean -fdx .
|
- git clean -fdx .
|
||||||
after_success:
|
after_success:
|
||||||
|
|
|
@ -30,14 +30,14 @@ var Awk = internal.Register(MustNewLexer(
|
||||||
"root": {
|
"root": {
|
||||||
{`^(?=\s|/)`, Text, Push("slashstartsregex")},
|
{`^(?=\s|/)`, Text, Push("slashstartsregex")},
|
||||||
Include("commentsandwhitespace"),
|
Include("commentsandwhitespace"),
|
||||||
{`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
|
{`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")},
|
||||||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||||
{`[})\].]`, Punctuation, nil},
|
{`[})\].]`, Punctuation, nil},
|
||||||
{`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")},
|
{`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")},
|
||||||
{`function\b`, KeywordDeclaration, Push("slashstartsregex")},
|
{`function\b`, KeywordDeclaration, Push("slashstartsregex")},
|
||||||
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil},
|
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil},
|
||||||
{`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil},
|
{`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil},
|
||||||
{`[@$a-zA-Z_]\w*`, NameOther, nil},
|
{`[$a-zA-Z_]\w*`, NameOther, nil},
|
||||||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
|
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil},
|
||||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
{`[0-9]+`, LiteralNumberInteger, nil},
|
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
|
|
@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer(
|
||||||
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
||||||
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
||||||
{`\A#!.+\n`, CommentPreproc, nil},
|
{`\A#!.+\n`, CommentPreproc, nil},
|
||||||
{`#.*(\S|$)`, CommentSingle, nil},
|
{`#.*\S`, CommentSingle, nil},
|
||||||
{`\\[\w\W]`, LiteralStringEscape, nil},
|
{`\\[\w\W]`, LiteralStringEscape, nil},
|
||||||
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
||||||
{`[\[\]{}()=]`, Operator, nil},
|
{`[\[\]{}()=]`, Operator, nil},
|
||||||
|
|
|
@ -1,206 +0,0 @@
|
||||||
package c
|
|
||||||
|
|
||||||
import (
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
|
||||||
)
|
|
||||||
|
|
||||||
// caddyfileCommon are the rules common to both of the lexer variants
|
|
||||||
var caddyfileCommon = Rules{
|
|
||||||
"site_block_common": {
|
|
||||||
// Import keyword
|
|
||||||
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
|
|
||||||
// Matcher definition
|
|
||||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
|
||||||
// Matcher token stub for docs
|
|
||||||
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
|
|
||||||
// These cannot have matchers but may have things that look like
|
|
||||||
// matchers in their arguments, so we just parse as a subdirective.
|
|
||||||
{`try_files`, Keyword, Push("subdirective")},
|
|
||||||
// These are special, they can nest more directives
|
|
||||||
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
|
|
||||||
// Any other directive
|
|
||||||
{`[^\s#]+`, Keyword, Push("directive")},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"matcher": {
|
|
||||||
{`\{`, Punctuation, Push("block")},
|
|
||||||
// Not can be one-liner
|
|
||||||
{`not`, Keyword, Push("deep_not_matcher")},
|
|
||||||
// Any other same-line matcher
|
|
||||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
|
||||||
// Terminators
|
|
||||||
{`\n`, Text, Pop(1)},
|
|
||||||
{`\}`, Punctuation, Pop(1)},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"block": {
|
|
||||||
{`\}`, Punctuation, Pop(2)},
|
|
||||||
// Not can be one-liner
|
|
||||||
{`not`, Keyword, Push("not_matcher")},
|
|
||||||
// Any other subdirective
|
|
||||||
{`[^\s#]+`, Keyword, Push("subdirective")},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"nested_block": {
|
|
||||||
{`\}`, Punctuation, Pop(2)},
|
|
||||||
// Matcher definition
|
|
||||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
|
||||||
// Something that starts with literally < is probably a docs stub
|
|
||||||
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
|
|
||||||
// Any other directive
|
|
||||||
{`[^\s#]+`, Keyword, Push("nested_directive")},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"not_matcher": {
|
|
||||||
{`\}`, Punctuation, Pop(2)},
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
|
||||||
{`[^\s#]+`, Keyword, Push("arguments")},
|
|
||||||
{`\s+`, Text, nil},
|
|
||||||
},
|
|
||||||
"deep_not_matcher": {
|
|
||||||
{`\}`, Punctuation, Pop(2)},
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
|
||||||
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
|
|
||||||
{`\s+`, Text, nil},
|
|
||||||
},
|
|
||||||
"directive": {
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
|
||||||
Include("matcher_token"),
|
|
||||||
Include("comments_pop_1"),
|
|
||||||
{`\n`, Text, Pop(1)},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"nested_directive": {
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("nested_block")},
|
|
||||||
Include("matcher_token"),
|
|
||||||
Include("comments_pop_1"),
|
|
||||||
{`\n`, Text, Pop(1)},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"subdirective": {
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
|
||||||
Include("comments_pop_1"),
|
|
||||||
{`\n`, Text, Pop(1)},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"arguments": {
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
|
||||||
Include("comments_pop_2"),
|
|
||||||
{`\\\n`, Text, nil}, // Skip escaped newlines
|
|
||||||
{`\n`, Text, Pop(2)},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"deep_subdirective": {
|
|
||||||
{`\{(?=\s)`, Punctuation, Push("block")},
|
|
||||||
Include("comments_pop_3"),
|
|
||||||
{`\n`, Text, Pop(3)},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"matcher_token": {
|
|
||||||
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
|
||||||
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
|
||||||
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
|
||||||
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
|
||||||
},
|
|
||||||
"comments": {
|
|
||||||
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
|
||||||
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
|
||||||
},
|
|
||||||
"comments_pop_1": {
|
|
||||||
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
|
||||||
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
|
||||||
},
|
|
||||||
"comments_pop_2": {
|
|
||||||
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
|
||||||
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
|
||||||
},
|
|
||||||
"comments_pop_3": {
|
|
||||||
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
|
||||||
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
|
||||||
},
|
|
||||||
"base": {
|
|
||||||
Include("comments"),
|
|
||||||
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
|
|
||||||
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
|
|
||||||
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
|
|
||||||
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
|
|
||||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
|
||||||
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
|
|
||||||
{`\]|\|`, Punctuation, nil},
|
|
||||||
{`[^\s#{}$\]]+`, LiteralString, nil},
|
|
||||||
{`/[^\s#]*`, Name, nil},
|
|
||||||
{`\s+`, Text, nil},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Caddyfile lexer.
|
|
||||||
var Caddyfile = internal.Register(MustNewLexer(
|
|
||||||
&Config{
|
|
||||||
Name: "Caddyfile",
|
|
||||||
Aliases: []string{"caddyfile", "caddy"},
|
|
||||||
Filenames: []string{"Caddyfile*"},
|
|
||||||
MimeTypes: []string{},
|
|
||||||
},
|
|
||||||
Rules{
|
|
||||||
"root": {
|
|
||||||
Include("comments"),
|
|
||||||
// Global options block
|
|
||||||
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
|
|
||||||
// Snippets
|
|
||||||
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
|
|
||||||
// Site label
|
|
||||||
{`[^#{(\s,]+`, GenericHeading, Push("label")},
|
|
||||||
// Site label with placeholder
|
|
||||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
|
|
||||||
{`\s+`, Text, nil},
|
|
||||||
},
|
|
||||||
"globals": {
|
|
||||||
{`\}`, Punctuation, Pop(1)},
|
|
||||||
{`[^\s#]+`, Keyword, Push("directive")},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"snippet": {
|
|
||||||
{`\}`, Punctuation, Pop(1)},
|
|
||||||
// Matcher definition
|
|
||||||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
|
||||||
// Any directive
|
|
||||||
{`[^\s#]+`, Keyword, Push("directive")},
|
|
||||||
Include("base"),
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
// Allow multiple labels, comma separated, newlines after
|
|
||||||
// a comma means another label is coming
|
|
||||||
{`,\s*\n?`, Text, nil},
|
|
||||||
{` `, Text, nil},
|
|
||||||
// Site label with placeholder
|
|
||||||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
|
|
||||||
// Site label
|
|
||||||
{`[^#{(\s,]+`, GenericHeading, nil},
|
|
||||||
// Comment after non-block label (hack because comments end in \n)
|
|
||||||
{`#.*\n`, CommentSingle, Push("site_block")},
|
|
||||||
// Note: if \n, we'll never pop out of the site_block, it's valid
|
|
||||||
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
|
|
||||||
},
|
|
||||||
"site_block": {
|
|
||||||
{`\}`, Punctuation, Pop(2)},
|
|
||||||
Include("site_block_common"),
|
|
||||||
},
|
|
||||||
}.Merge(caddyfileCommon),
|
|
||||||
))
|
|
||||||
|
|
||||||
// Caddyfile directive-only lexer.
|
|
||||||
var CaddyfileDirectives = internal.Register(MustNewLexer(
|
|
||||||
&Config{
|
|
||||||
Name: "Caddyfile Directives",
|
|
||||||
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
|
|
||||||
Filenames: []string{},
|
|
||||||
MimeTypes: []string{},
|
|
||||||
},
|
|
||||||
Rules{
|
|
||||||
// Same as "site_block" in Caddyfile
|
|
||||||
"root": {
|
|
||||||
Include("site_block_common"),
|
|
||||||
},
|
|
||||||
}.Merge(caddyfileCommon),
|
|
||||||
))
|
|
|
@ -1,12 +1,15 @@
|
||||||
package circular
|
package circular
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/h"
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
)
|
)
|
||||||
|
|
||||||
// PHP lexer for pure PHP code (not embedded in HTML).
|
// PHP lexer.
|
||||||
var PHP = internal.Register(MustNewLexer(
|
var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "PHP",
|
Name: "PHP",
|
||||||
Aliases: []string{"php", "php3", "php4", "php5"},
|
Aliases: []string{"php", "php3", "php4", "php5"},
|
||||||
|
@ -16,10 +19,12 @@ var PHP = internal.Register(MustNewLexer(
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
phpCommonRules.Rename("php", "root"),
|
Rules{
|
||||||
))
|
"root": {
|
||||||
|
{`<\?(php)?`, CommentPreproc, Push("php")},
|
||||||
var phpCommonRules = Rules{
|
{`[^<]+`, Other, nil},
|
||||||
|
{`<`, Other, nil},
|
||||||
|
},
|
||||||
"php": {
|
"php": {
|
||||||
{`\?>`, CommentPreproc, Pop(1)},
|
{`\?>`, CommentPreproc, Pop(1)},
|
||||||
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
||||||
|
@ -77,4 +82,10 @@ var phpCommonRules = Rules{
|
||||||
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
||||||
{`[${\\]`, LiteralStringDouble, nil},
|
{`[${\\]`, LiteralStringDouble, nil},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
).SetAnalyser(func(text string) float32 {
|
||||||
|
if strings.Contains(text, "<?php") {
|
||||||
|
return 0.5
|
||||||
}
|
}
|
||||||
|
return 0.0
|
||||||
|
})))
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
package circular
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
|
||||||
"github.com/alecthomas/chroma/lexers/h"
|
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
|
||||||
)
|
|
||||||
|
|
||||||
// PHTML lexer is PHP in HTML.
|
|
||||||
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
|
||||||
&Config{
|
|
||||||
Name: "PHTML",
|
|
||||||
Aliases: []string{"phtml"},
|
|
||||||
Filenames: []string{"*.phtml"},
|
|
||||||
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"},
|
|
||||||
DotAll: true,
|
|
||||||
CaseInsensitive: true,
|
|
||||||
EnsureNL: true,
|
|
||||||
},
|
|
||||||
Rules{
|
|
||||||
"root": {
|
|
||||||
{`<\?(php)?`, CommentPreproc, Push("php")},
|
|
||||||
{`[^<]+`, Other, nil},
|
|
||||||
{`<`, Other, nil},
|
|
||||||
},
|
|
||||||
}.Merge(phpCommonRules),
|
|
||||||
).SetAnalyser(func(text string) float32 {
|
|
||||||
if strings.Contains(text, "<?php") {
|
|
||||||
return 0.5
|
|
||||||
}
|
|
||||||
return 0.0
|
|
||||||
})))
|
|
|
@ -28,13 +28,6 @@ var Elixir = internal.Register(MustNewLexer(
|
||||||
{`:"`, LiteralStringSymbol, Push("string_double_atom")},
|
{`:"`, LiteralStringSymbol, Push("string_double_atom")},
|
||||||
{`:'`, LiteralStringSymbol, Push("string_single_atom")},
|
{`:'`, LiteralStringSymbol, Push("string_single_atom")},
|
||||||
{`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil},
|
{`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil},
|
||||||
{`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil},
|
|
||||||
{`(not|and|or|when|in)\b`, OperatorWord, nil},
|
|
||||||
{`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil},
|
|
||||||
{`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil},
|
|
||||||
{`(import|require|use|alias)\b`, KeywordNamespace, nil},
|
|
||||||
{`(nil|true|false)\b`, NameConstant, nil},
|
|
||||||
{`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil},
|
|
||||||
{`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil},
|
{`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil},
|
||||||
{`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil},
|
{`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil},
|
||||||
{`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil},
|
{`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil},
|
||||||
|
|
|
@ -15,7 +15,6 @@ var Go = internal.Register(MustNewLexer(
|
||||||
Aliases: []string{"go", "golang"},
|
Aliases: []string{"go", "golang"},
|
||||||
Filenames: []string{"*.go"},
|
Filenames: []string{"*.go"},
|
||||||
MimeTypes: []string{"text/x-gosrc"},
|
MimeTypes: []string{"text/x-gosrc"},
|
||||||
EnsureNL: true,
|
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
"root": {
|
"root": {
|
||||||
|
|
|
@ -19,8 +19,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
||||||
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
||||||
},
|
},
|
||||||
"headers": {
|
"headers": {
|
||||||
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
|
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
|
||||||
|
|
|
@ -10,7 +10,7 @@ var Ini = internal.Register(MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "INI",
|
Name: "INI",
|
||||||
Aliases: []string{"ini", "cfg", "dosini"},
|
Aliases: []string{"ini", "cfg", "dosini"},
|
||||||
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
|
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"},
|
||||||
MimeTypes: []string{"text/x-ini", "text/inf"},
|
MimeTypes: []string{"text/x-ini", "text/inf"},
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -24,71 +24,32 @@ var Kotlin = internal.Register(MustNewLexer(
|
||||||
{`//[^\n]*\n?`, CommentSingle, nil},
|
{`//[^\n]*\n?`, CommentSingle, nil},
|
||||||
{`/[*].*?[*]/`, CommentMultiline, nil},
|
{`/[*].*?[*]/`, CommentMultiline, nil},
|
||||||
{`\n`, Text, nil},
|
{`\n`, Text, nil},
|
||||||
{`!==|!in|!is|===`, Operator, nil},
|
{`::|!!|\?[:.]`, Operator, nil},
|
||||||
{`%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]`, Operator, nil},
|
{`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil},
|
||||||
{`[~!%^&*()+=|\[\]:;,.<>\/?-]`, Punctuation, nil},
|
|
||||||
{`[{}]`, Punctuation, nil},
|
{`[{}]`, Punctuation, nil},
|
||||||
{`"""`, LiteralString, Push("rawstring")},
|
{`"""[^"]*"""`, LiteralString, nil},
|
||||||
{`"`, LiteralStringDouble, Push("string")},
|
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
|
||||||
{`(')(\\u[0-9a-fA-F]{4})(')`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralStringChar), nil},
|
|
||||||
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
|
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
|
||||||
{`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil},
|
{`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil},
|
||||||
{`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil},
|
{`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil},
|
||||||
{`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")},
|
{`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")},
|
||||||
{`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")},
|
{`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")},
|
||||||
{`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")},
|
{`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")},
|
||||||
{`(fun)(\s+)`, ByGroups(Keyword, Text), Push("function")},
|
{`(fun)(\s+)(<[^>]*>\s+)?`, ByGroups(Keyword, Text, Text), Push("function")},
|
||||||
{`(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|var|vararg|when|where|while)\b`, Keyword, nil},
|
{`(abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|false|final|finally|for|fun|get|if|import|in|infix|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|true|try|val|var|vararg|when|where|while)\b`, Keyword, nil},
|
||||||
{`@[` + kotlinIdentifier + `]+`, NameDecorator, nil},
|
{"(@?[" + kotlinIdentifier + "]*`)", Name, nil},
|
||||||
{`[` + kotlinIdentifier + `]+`, Name, nil},
|
|
||||||
},
|
},
|
||||||
"package": {
|
"package": {
|
||||||
{`\S+`, NameNamespace, Pop(1)},
|
{`\S+`, NameNamespace, Pop(1)},
|
||||||
},
|
},
|
||||||
"class": {
|
"class": {
|
||||||
// \x60 is the back tick character (`)
|
{"(@?[" + kotlinIdentifier + "]*`)", NameClass, Pop(1)},
|
||||||
{`\x60[^\x60]+?\x60`, NameClass, Pop(1)},
|
|
||||||
{`[` + kotlinIdentifier + `]+`, NameClass, Pop(1)},
|
|
||||||
},
|
},
|
||||||
"property": {
|
"property": {
|
||||||
{`\x60[^\x60]+?\x60`, NameProperty, Pop(1)},
|
{"(@?[" + kotlinIdentifier + " ]*`)", NameProperty, Pop(1)},
|
||||||
{`[` + kotlinIdentifier + `]+`, NameProperty, Pop(1)},
|
|
||||||
},
|
|
||||||
"generics-specification": {
|
|
||||||
{`<`, Punctuation, Push("generics-specification")}, // required for generics inside generics e.g. <T : List<Int> >
|
|
||||||
{`>`, Punctuation, Pop(1)},
|
|
||||||
{`[,:*?]`, Punctuation, nil},
|
|
||||||
{`(in|out|reified)`, Keyword, nil},
|
|
||||||
{`\x60[^\x60]+?\x60`, NameClass, nil},
|
|
||||||
{`[` + kotlinIdentifier + `]+`, NameClass, nil},
|
|
||||||
{`\s+`, Text, nil},
|
|
||||||
},
|
},
|
||||||
"function": {
|
"function": {
|
||||||
{`<`, Punctuation, Push("generics-specification")},
|
{"(@?[" + kotlinIdentifier + " ]*`)", NameFunction, Pop(1)},
|
||||||
{`\x60[^\x60]+?\x60`, NameFunction, Pop(1)},
|
|
||||||
{`[` + kotlinIdentifier + `]+`, NameFunction, Pop(1)},
|
|
||||||
{`\s+`, Text, nil},
|
|
||||||
},
|
|
||||||
"rawstring": {
|
|
||||||
// raw strings don't allow character escaping
|
|
||||||
{`"""`, LiteralString, Pop(1)},
|
|
||||||
{`(?:[^$"]+|\"{1,2}[^"])+`, LiteralString, nil},
|
|
||||||
Include("string-interpol"),
|
|
||||||
// remaining dollar signs are just a string
|
|
||||||
{`\$`, LiteralString, nil},
|
|
||||||
},
|
|
||||||
"string": {
|
|
||||||
{`\\[tbnr'"\\\$]`, LiteralStringEscape, nil},
|
|
||||||
{`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil},
|
|
||||||
{`"`, LiteralStringDouble, Pop(1)},
|
|
||||||
Include("string-interpol"),
|
|
||||||
{`[^\n\\"$]+`, LiteralStringDouble, nil},
|
|
||||||
// remaining dollar signs are just a string
|
|
||||||
{`\$`, LiteralStringDouble, nil},
|
|
||||||
},
|
|
||||||
"string-interpol": {
|
|
||||||
{`\$[` + kotlinIdentifier + `]+`, LiteralStringInterpol, nil},
|
|
||||||
{`\${[^}\n]*}`, LiteralStringInterpol, nil},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
))
|
))
|
||||||
|
|
|
@ -32,7 +32,6 @@ import (
|
||||||
_ "github.com/alecthomas/chroma/lexers/w"
|
_ "github.com/alecthomas/chroma/lexers/w"
|
||||||
_ "github.com/alecthomas/chroma/lexers/x"
|
_ "github.com/alecthomas/chroma/lexers/x"
|
||||||
_ "github.com/alecthomas/chroma/lexers/y"
|
_ "github.com/alecthomas/chroma/lexers/y"
|
||||||
_ "github.com/alecthomas/chroma/lexers/z"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Registry of Lexers.
|
// Registry of Lexers.
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
package p
|
|
||||||
|
|
||||||
import (
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Pony lexer.
|
|
||||||
var Pony = internal.Register(MustNewLexer(
|
|
||||||
&Config{
|
|
||||||
Name: "Pony",
|
|
||||||
Aliases: []string{"pony"},
|
|
||||||
Filenames: []string{"*.pony"},
|
|
||||||
MimeTypes: []string{},
|
|
||||||
},
|
|
||||||
Rules{
|
|
||||||
"root": {
|
|
||||||
{`\n`, Text, nil},
|
|
||||||
{`[^\S\n]+`, Text, nil},
|
|
||||||
{`//.*\n`, CommentSingle, nil},
|
|
||||||
{`/\*`, CommentMultiline, Push("nested_comment")},
|
|
||||||
{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil},
|
|
||||||
{`"`, LiteralString, Push("string")},
|
|
||||||
{`\'.*\'`, LiteralStringChar, nil},
|
|
||||||
{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil},
|
|
||||||
{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil},
|
|
||||||
{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil},
|
|
||||||
{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil},
|
|
||||||
{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")},
|
|
||||||
{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")},
|
|
||||||
{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil},
|
|
||||||
{`_?[A-Z]\w*`, NameClass, nil},
|
|
||||||
{`string\(\)`, NameOther, nil},
|
|
||||||
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil},
|
|
||||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
|
||||||
{`\d+`, LiteralNumberInteger, nil},
|
|
||||||
{`(true|false)\b`, Keyword, nil},
|
|
||||||
{`_\d*`, Name, nil},
|
|
||||||
{`_?[a-z][\w\'_]*`, Name, nil},
|
|
||||||
},
|
|
||||||
"typename": {
|
|
||||||
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)},
|
|
||||||
},
|
|
||||||
"methodname": {
|
|
||||||
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)},
|
|
||||||
},
|
|
||||||
"nested_comment": {
|
|
||||||
{`[^*/]+`, CommentMultiline, nil},
|
|
||||||
{`/\*`, CommentMultiline, Push()},
|
|
||||||
{`\*/`, CommentMultiline, Pop(1)},
|
|
||||||
{`[*/]`, CommentMultiline, nil},
|
|
||||||
},
|
|
||||||
"string": {
|
|
||||||
{`"`, LiteralString, Pop(1)},
|
|
||||||
{`\\"`, LiteralString, nil},
|
|
||||||
{`[^\\"]+`, LiteralString, nil},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
))
|
|
|
@ -22,7 +22,7 @@ var TOML = internal.Register(MustNewLexer(
|
||||||
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
|
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
|
||||||
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
|
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
|
||||||
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
|
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
|
||||||
{`[.,=\[\]{}]`, Punctuation, nil},
|
{`[.,=\[\]]`, Punctuation, nil},
|
||||||
{`[^\W\d]\w*`, NameOther, nil},
|
{`[^\W\d]\w*`, NameOther, nil},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -38,14 +38,14 @@ var TypeScript = internal.Register(MustNewLexer(
|
||||||
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
||||||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||||
{`[})\].]`, Punctuation, nil},
|
{`[})\].]`, Punctuation, nil},
|
||||||
{`(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b`, Keyword, Push("slashstartsregex")},
|
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")},
|
||||||
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
|
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
|
||||||
{`(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b`, KeywordReserved, nil},
|
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil},
|
||||||
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
|
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
|
||||||
{`(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
||||||
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
|
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
|
||||||
{`\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b`, KeywordType, nil},
|
{`\b(string|bool|number)\b`, KeywordType, nil},
|
||||||
{`\b(constructor|declare|interface|as)\b`, KeywordReserved, nil},
|
{`\b(constructor|declare|interface|as|AS)\b`, KeywordReserved, nil},
|
||||||
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
|
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
|
||||||
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
|
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
|
||||||
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},
|
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},
|
||||||
|
|
|
@ -15,36 +15,32 @@ var YAML = internal.Register(MustNewLexer(
|
||||||
Rules{
|
Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
{`^---`, NameNamespace, nil},
|
{`^---`, Text, nil},
|
||||||
{`^\.\.\.`, NameNamespace, nil},
|
|
||||||
{`[\n?]?\s*- `, Text, nil},
|
{`[\n?]?\s*- `, Text, nil},
|
||||||
{`#.*$`, Comment, nil},
|
{`#.*$`, Comment, nil},
|
||||||
{`!![^\s]+`, CommentPreproc, nil},
|
{`!![^\s]+`, CommentPreproc, nil},
|
||||||
{`&[^\s]+`, CommentPreproc, nil},
|
{`&[^\s]+`, CommentPreproc, nil},
|
||||||
{`\*[^\s]+`, CommentPreproc, nil},
|
{`\*[^\s]+`, CommentPreproc, nil},
|
||||||
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
|
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
|
||||||
|
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
|
||||||
Include("key"),
|
Include("key"),
|
||||||
Include("value"),
|
Include("value"),
|
||||||
{`[?:,\[\]]`, Punctuation, nil},
|
{`[?:,\[\]]`, Punctuation, nil},
|
||||||
{`.`, Text, nil},
|
{`.`, Text, nil},
|
||||||
},
|
},
|
||||||
"value": {
|
"value": {
|
||||||
{`([>|](?:[+-])?)(\n(^ {1,})(?:.*\n*(?:^\3 *).*)*)`, ByGroups(Punctuation, StringDoc, Whitespace), nil},
|
{Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil},
|
||||||
{Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null",
|
|
||||||
"y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO",
|
|
||||||
"on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil},
|
|
||||||
{`"(?:\\.|[^"])*"`, StringDouble, nil},
|
{`"(?:\\.|[^"])*"`, StringDouble, nil},
|
||||||
{`'(?:\\.|[^'])*'`, StringSingle, nil},
|
{`'(?:\\.|[^'])*'`, StringSingle, nil},
|
||||||
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
|
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
|
||||||
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
|
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
|
||||||
{`([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)`, ByGroups(Literal, Whitespace, Comment), nil},
|
{`\b[\w]+\b`, Text, nil},
|
||||||
{`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil},
|
|
||||||
},
|
},
|
||||||
"key": {
|
"key": {
|
||||||
{`"[^"\n].*": `, NameTag, nil},
|
{`"[^"\n].*": `, Keyword, nil},
|
||||||
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil},
|
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil},
|
||||||
{`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil},
|
{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil},
|
||||||
{`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil},
|
{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil},
|
||||||
},
|
},
|
||||||
"whitespace": {
|
"whitespace": {
|
||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
|
|
|
@ -1,54 +0,0 @@
|
||||||
package z
|
|
||||||
|
|
||||||
import (
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Zig lexer.
|
|
||||||
var Zig = internal.Register(MustNewLexer(
|
|
||||||
&Config{
|
|
||||||
Name: "Zig",
|
|
||||||
Aliases: []string{"zig"},
|
|
||||||
Filenames: []string{"*.zig"},
|
|
||||||
MimeTypes: []string{"text/zig"},
|
|
||||||
},
|
|
||||||
Rules{
|
|
||||||
"root": {
|
|
||||||
{`\n`, TextWhitespace, nil},
|
|
||||||
{`\s+`, TextWhitespace, nil},
|
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
|
||||||
{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil},
|
|
||||||
{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil},
|
|
||||||
{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil},
|
|
||||||
{Words(``, `\b`, `while`, `for`), Keyword, nil},
|
|
||||||
{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil},
|
|
||||||
{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil},
|
|
||||||
{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil},
|
|
||||||
{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil},
|
|
||||||
{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil},
|
|
||||||
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
|
|
||||||
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
|
|
||||||
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
|
|
||||||
{`0b[01]+`, LiteralNumberBin, nil},
|
|
||||||
{`0o[0-7]+`, LiteralNumberOct, nil},
|
|
||||||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
|
||||||
{`[0-9]+`, LiteralNumberInteger, nil},
|
|
||||||
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
|
|
||||||
{`[a-zA-Z_]\w*`, Name, nil},
|
|
||||||
{`\'\\\'\'`, LiteralStringEscape, nil},
|
|
||||||
{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil},
|
|
||||||
{`\'[^\\\']\'`, LiteralString, nil},
|
|
||||||
{`\\\\[^\n]*`, LiteralStringHeredoc, nil},
|
|
||||||
{`c\\\\[^\n]*`, LiteralStringHeredoc, nil},
|
|
||||||
{`c?"`, LiteralString, Push("string")},
|
|
||||||
{`[+%=><|^!?/\-*&~:]`, Operator, nil},
|
|
||||||
{`[{}()\[\],.;]`, Punctuation, nil},
|
|
||||||
},
|
|
||||||
"string": {
|
|
||||||
{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil},
|
|
||||||
{`[^\\"\n]+`, LiteralString, nil},
|
|
||||||
{`"`, LiteralString, Pop(1)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
))
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/dlclark/regexp2"
|
"github.com/dlclark/regexp2"
|
||||||
|
@ -161,14 +160,6 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
|
||||||
// Rules maps from state to a sequence of Rules.
|
// Rules maps from state to a sequence of Rules.
|
||||||
type Rules map[string][]Rule
|
type Rules map[string][]Rule
|
||||||
|
|
||||||
// Rename clones rules then a rule.
|
|
||||||
func (r Rules) Rename(old, new string) Rules {
|
|
||||||
r = r.Clone()
|
|
||||||
r[new] = r[old]
|
|
||||||
delete(r, old)
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clone returns a clone of the Rules.
|
// Clone returns a clone of the Rules.
|
||||||
func (r Rules) Clone() Rules {
|
func (r Rules) Clone() Rules {
|
||||||
out := map[string][]Rule{}
|
out := map[string][]Rule{}
|
||||||
|
@ -179,15 +170,6 @@ func (r Rules) Clone() Rules {
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge creates a clone of "r" then merges "rules" into the clone.
|
|
||||||
func (r Rules) Merge(rules Rules) Rules {
|
|
||||||
out := r.Clone()
|
|
||||||
for k, v := range rules.Clone() {
|
|
||||||
out[k] = v
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
// MustNewLexer creates a new Lexer or panics.
|
// MustNewLexer creates a new Lexer or panics.
|
||||||
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
||||||
lexer, err := NewLexer(config, rules)
|
lexer, err := NewLexer(config, rules)
|
||||||
|
@ -394,7 +376,6 @@ func (r *RegexLexer) maybeCompile() (err error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
|
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
|
||||||
}
|
}
|
||||||
rule.Regexp.MatchTimeout = time.Millisecond * 250
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,11 +65,11 @@ var examples = []string{
|
||||||
"Mon, 02 Jan 2006 15:04:05 MST",
|
"Mon, 02 Jan 2006 15:04:05 MST",
|
||||||
"Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
|
"Tue, 11 Jul 2017 16:28:13 +0200 (CEST)",
|
||||||
"Mon, 02 Jan 2006 15:04:05 -0700",
|
"Mon, 02 Jan 2006 15:04:05 -0700",
|
||||||
"Thu, 4 Jan 2018 17:53:36 +0000",
|
|
||||||
"Mon 30 Sep 2018 09:09:09 PM UTC",
|
"Mon 30 Sep 2018 09:09:09 PM UTC",
|
||||||
"Mon Aug 10 15:44:11 UTC+0100 2015",
|
"Mon Aug 10 15:44:11 UTC+0100 2015",
|
||||||
"Thu, 4 Jan 2018 17:53:36 +0000",
|
"Thu, 4 Jan 2018 17:53:36 +0000",
|
||||||
"Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
|
"Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)",
|
||||||
|
"Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00)",
|
||||||
"September 17, 2012 10:09am",
|
"September 17, 2012 10:09am",
|
||||||
"September 17, 2012 at 10:09am PST-08",
|
"September 17, 2012 at 10:09am PST-08",
|
||||||
"September 17, 2012, 10:10:09",
|
"September 17, 2012, 10:10:09",
|
||||||
|
@ -77,11 +77,15 @@ var examples = []string{
|
||||||
"October 7th, 1970",
|
"October 7th, 1970",
|
||||||
"12 Feb 2006, 19:17",
|
"12 Feb 2006, 19:17",
|
||||||
"12 Feb 2006 19:17",
|
"12 Feb 2006 19:17",
|
||||||
|
"14 May 2019 19:11:40.164",
|
||||||
"7 oct 70",
|
"7 oct 70",
|
||||||
"7 oct 1970",
|
"7 oct 1970",
|
||||||
"03 February 2013",
|
"03 February 2013",
|
||||||
"1 July 2013",
|
"1 July 2013",
|
||||||
"2013-Feb-03",
|
"2013-Feb-03",
|
||||||
|
// dd/Mon/yyy alpha Months
|
||||||
|
"06/Jan/2008:15:04:05 -0700",
|
||||||
|
"06/Jan/2008 15:04:05 -0700",
|
||||||
// mm/dd/yy
|
// mm/dd/yy
|
||||||
"3/31/2014",
|
"3/31/2014",
|
||||||
"03/31/2014",
|
"03/31/2014",
|
||||||
|
@ -123,7 +127,10 @@ var examples = []string{
|
||||||
"2006-01-02T15:04:05+0000",
|
"2006-01-02T15:04:05+0000",
|
||||||
"2009-08-12T22:15:09-07:00",
|
"2009-08-12T22:15:09-07:00",
|
||||||
"2009-08-12T22:15:09",
|
"2009-08-12T22:15:09",
|
||||||
|
"2009-08-12T22:15:09.988",
|
||||||
"2009-08-12T22:15:09Z",
|
"2009-08-12T22:15:09Z",
|
||||||
|
"2017-07-19T03:21:51:897+0100",
|
||||||
|
"2019-05-29T08:41-04", // no seconds, 2 digit TZ offset
|
||||||
// yyyy-mm-dd hh:mm:ss
|
// yyyy-mm-dd hh:mm:ss
|
||||||
"2014-04-26 17:24:37.3186369",
|
"2014-04-26 17:24:37.3186369",
|
||||||
"2012-08-03 18:31:59.257000000",
|
"2012-08-03 18:31:59.257000000",
|
||||||
|
@ -147,6 +154,8 @@ var examples = []string{
|
||||||
"2014-04",
|
"2014-04",
|
||||||
"2014",
|
"2014",
|
||||||
"2014-05-11 08:20:13,787",
|
"2014-05-11 08:20:13,787",
|
||||||
|
// yyyy-mm-dd-07:00
|
||||||
|
"2020-07-20+08:00",
|
||||||
// mm.dd.yy
|
// mm.dd.yy
|
||||||
"3.31.2014",
|
"3.31.2014",
|
||||||
"03.31.2014",
|
"03.31.2014",
|
||||||
|
@ -156,6 +165,9 @@ var examples = []string{
|
||||||
// yyyymmdd and similar
|
// yyyymmdd and similar
|
||||||
"20140601",
|
"20140601",
|
||||||
"20140722105203",
|
"20140722105203",
|
||||||
|
// yymmdd hh:mm:yy mysql log
|
||||||
|
// 080313 05:21:55 mysqld started
|
||||||
|
"171113 14:14:20",
|
||||||
// unix seconds, ms, micro, nano
|
// unix seconds, ms, micro, nano
|
||||||
"1332151919",
|
"1332151919",
|
||||||
"1384216367189",
|
"1384216367189",
|
||||||
|
@ -214,6 +226,7 @@ func main() {
|
||||||
| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
|
| Mon Aug 10 15:44:11 UTC+0100 2015 | 2015-08-10 15:44:11 +0000 UTC |
|
||||||
| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
|
| Thu, 4 Jan 2018 17:53:36 +0000 | 2018-01-04 17:53:36 +0000 UTC |
|
||||||
| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
|
| Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time) | 2015-07-03 18:04:07 +0100 GMT |
|
||||||
|
| Sun, 3 Jan 2021 00:12:23 +0800 (GMT+08:00) | 2021-01-03 00:12:23 +0800 +0800 |
|
||||||
| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
|
| September 17, 2012 10:09am | 2012-09-17 10:09:00 +0000 UTC |
|
||||||
| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
|
| September 17, 2012 at 10:09am PST-08 | 2012-09-17 10:09:00 -0800 PST |
|
||||||
| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
|
| September 17, 2012, 10:10:09 | 2012-09-17 10:10:09 +0000 UTC |
|
||||||
|
@ -221,11 +234,14 @@ func main() {
|
||||||
| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
| October 7th, 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||||
| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
| 12 Feb 2006, 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
||||||
| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
| 12 Feb 2006 19:17 | 2006-02-12 19:17:00 +0000 UTC |
|
||||||
|
| 14 May 2019 19:11:40.164 | 2019-05-14 19:11:40.164 +0000 UTC |
|
||||||
| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
|
| 7 oct 70 | 1970-10-07 00:00:00 +0000 UTC |
|
||||||
| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
| 7 oct 1970 | 1970-10-07 00:00:00 +0000 UTC |
|
||||||
| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
|
| 03 February 2013 | 2013-02-03 00:00:00 +0000 UTC |
|
||||||
| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
|
| 1 July 2013 | 2013-07-01 00:00:00 +0000 UTC |
|
||||||
| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
|
| 2013-Feb-03 | 2013-02-03 00:00:00 +0000 UTC |
|
||||||
|
| 06/Jan/2008:15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
|
||||||
|
| 06/Jan/2008 15:04:05 -0700 | 2008-01-06 15:04:05 -0700 -0700 |
|
||||||
| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
| 3/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||||
| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
| 03/31/2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||||
| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
|
| 08/21/71 | 1971-08-21 00:00:00 +0000 UTC |
|
||||||
|
@ -250,11 +266,22 @@ func main() {
|
||||||
| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
| 2014/4/02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||||
| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
| 2012/03/19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||||
| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
| 2012/03/19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||||
|
| 2014:3:31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||||
|
| 2014:03:31 | 2014-03-31 00:00:00 +0000 UTC |
|
||||||
|
| 2014:4:8 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||||
|
| 2014:04:08 22:05 | 2014-04-08 22:05:00 +0000 UTC |
|
||||||
|
| 2014:04:2 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||||
|
| 2014:4:02 03:00:51 | 2014-04-02 03:00:51 +0000 UTC |
|
||||||
|
| 2012:03:19 10:11:59 | 2012-03-19 10:11:59 +0000 UTC |
|
||||||
|
| 2012:03:19 10:11:59.3186369 | 2012-03-19 10:11:59.3186369 +0000 UTC |
|
||||||
| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
|
| 2014年04月08日 | 2014-04-08 00:00:00 +0000 UTC |
|
||||||
| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
|
| 2006-01-02T15:04:05+0000 | 2006-01-02 15:04:05 +0000 UTC |
|
||||||
| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
|
| 2009-08-12T22:15:09-07:00 | 2009-08-12 22:15:09 -0700 -0700 |
|
||||||
| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
|
| 2009-08-12T22:15:09 | 2009-08-12 22:15:09 +0000 UTC |
|
||||||
|
| 2009-08-12T22:15:09.988 | 2009-08-12 22:15:09.988 +0000 UTC |
|
||||||
| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
|
| 2009-08-12T22:15:09Z | 2009-08-12 22:15:09 +0000 UTC |
|
||||||
|
| 2017-07-19T03:21:51:897+0100 | 2017-07-19 03:21:51.897 +0100 +0100 |
|
||||||
|
| 2019-05-29T08:41-04 | 2019-05-29 08:41:00 -0400 -0400 |
|
||||||
| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
|
| 2014-04-26 17:24:37.3186369 | 2014-04-26 17:24:37.3186369 +0000 UTC |
|
||||||
| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
|
| 2012-08-03 18:31:59.257000000 | 2012-08-03 18:31:59.257 +0000 UTC |
|
||||||
| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
|
| 2014-04-26 17:24:37.123 | 2014-04-26 17:24:37.123 +0000 UTC |
|
||||||
|
@ -277,6 +304,7 @@ func main() {
|
||||||
| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
|
| 2014-04 | 2014-04-01 00:00:00 +0000 UTC |
|
||||||
| 2014 | 2014-01-01 00:00:00 +0000 UTC |
|
| 2014 | 2014-01-01 00:00:00 +0000 UTC |
|
||||||
| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
|
| 2014-05-11 08:20:13,787 | 2014-05-11 08:20:13.787 +0000 UTC |
|
||||||
|
| 2020-07-20+08:00 | 2020-07-20 00:00:00 +0800 +0800 |
|
||||||
| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
| 3.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||||
| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
| 03.31.2014 | 2014-03-31 00:00:00 +0000 UTC |
|
||||||
| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
|
| 08.21.71 | 1971-08-21 00:00:00 +0000 UTC |
|
||||||
|
@ -284,6 +312,7 @@ func main() {
|
||||||
| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
|
| 2014.03.30 | 2014-03-30 00:00:00 +0000 UTC |
|
||||||
| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
|
| 20140601 | 2014-06-01 00:00:00 +0000 UTC |
|
||||||
| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
|
| 20140722105203 | 2014-07-22 10:52:03 +0000 UTC |
|
||||||
|
| 171113 14:14:20 | 2017-11-13 14:14:20 +0000 UTC |
|
||||||
| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
|
| 1332151919 | 2012-03-19 10:11:59 +0000 UTC |
|
||||||
| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
|
| 1384216367189 | 2013-11-12 00:32:47.189 +0000 UTC |
|
||||||
| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |
|
| 1384216367111222 | 2013-11-12 00:32:47.111222 +0000 UTC |
|
||||||
|
|
|
@ -3,7 +3,7 @@ module github.com/araddon/dateparse
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/mattn/go-runewidth v0.0.9 // indirect
|
github.com/mattn/go-runewidth v0.0.10 // indirect
|
||||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4
|
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4
|
||||||
github.com/stretchr/testify v1.6.1
|
github.com/stretchr/testify v1.7.0
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg=
|
||||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
|
||||||
|
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4 h1:8qmTC5ByIXO3GP/IzBkxcZ/99VITvnIETDhdFz/om7A=
|
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4 h1:8qmTC5ByIXO3GP/IzBkxcZ/99VITvnIETDhdFz/om7A=
|
||||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||||
|
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
|
|
|
@ -55,37 +55,41 @@ type timeState uint8
|
||||||
const (
|
const (
|
||||||
dateStart dateState = iota // 0
|
dateStart dateState = iota // 0
|
||||||
dateDigit
|
dateDigit
|
||||||
|
dateDigitSt
|
||||||
dateYearDash
|
dateYearDash
|
||||||
dateYearDashAlphaDash
|
dateYearDashAlphaDash
|
||||||
dateYearDashDash
|
dateYearDashDash
|
||||||
dateYearDashDashWs // 5
|
dateYearDashDashWs // 5
|
||||||
dateYearDashDashT
|
dateYearDashDashT
|
||||||
|
dateYearDashDashOffset
|
||||||
dateDigitDash
|
dateDigitDash
|
||||||
dateDigitDashAlpha
|
dateDigitDashAlpha
|
||||||
dateDigitDashAlphaDash
|
dateDigitDashAlphaDash // 10
|
||||||
dateDigitDot // 10
|
dateDigitDot
|
||||||
dateDigitDotDot
|
dateDigitDotDot
|
||||||
dateDigitSlash
|
dateDigitSlash
|
||||||
|
dateDigitYearSlash
|
||||||
|
dateDigitSlashAlpha // 15
|
||||||
dateDigitColon
|
dateDigitColon
|
||||||
dateDigitChineseYear
|
dateDigitChineseYear
|
||||||
dateDigitChineseYearWs // 15
|
dateDigitChineseYearWs
|
||||||
dateDigitWs
|
dateDigitWs
|
||||||
dateDigitWsMoYear
|
dateDigitWsMoYear // 20
|
||||||
dateDigitWsMolong
|
dateDigitWsMolong
|
||||||
dateAlpha
|
dateAlpha
|
||||||
dateAlphaWs // 20
|
dateAlphaWs
|
||||||
dateAlphaWsDigit
|
dateAlphaWsDigit
|
||||||
dateAlphaWsDigitMore
|
dateAlphaWsDigitMore // 25
|
||||||
dateAlphaWsDigitMoreWs
|
dateAlphaWsDigitMoreWs
|
||||||
dateAlphaWsDigitMoreWsYear
|
dateAlphaWsDigitMoreWsYear
|
||||||
dateAlphaWsMonth // 25
|
dateAlphaWsMonth
|
||||||
dateAlphaWsDigitYearmaybe
|
dateAlphaWsDigitYearmaybe
|
||||||
dateAlphaWsMonthMore
|
dateAlphaWsMonthMore
|
||||||
dateAlphaWsMonthSuffix
|
dateAlphaWsMonthSuffix
|
||||||
dateAlphaWsMore
|
dateAlphaWsMore
|
||||||
dateAlphaWsAtTime // 30
|
dateAlphaWsAtTime
|
||||||
dateAlphaWsAlpha
|
dateAlphaWsAlpha
|
||||||
dateAlphaWsAlphaYearmaybe
|
dateAlphaWsAlphaYearmaybe // 35
|
||||||
dateAlphaPeriodWsDigit
|
dateAlphaPeriodWsDigit
|
||||||
dateWeekdayComma
|
dateWeekdayComma
|
||||||
dateWeekdayAbbrevComma
|
dateWeekdayAbbrevComma
|
||||||
|
@ -295,17 +299,37 @@ iterRunes:
|
||||||
p.stateDate = dateDigitDash
|
p.stateDate = dateDigitDash
|
||||||
}
|
}
|
||||||
case '/':
|
case '/':
|
||||||
|
// 08/May/2005
|
||||||
// 03/31/2005
|
// 03/31/2005
|
||||||
// 2014/02/24
|
// 2014/02/24
|
||||||
p.stateDate = dateDigitSlash
|
p.stateDate = dateDigitSlash
|
||||||
if i == 4 {
|
if i == 4 {
|
||||||
p.yearlen = i
|
// 2014/02/24 - Year first /
|
||||||
|
p.yearlen = i // since it was start of datestr, i=len
|
||||||
p.moi = i + 1
|
p.moi = i + 1
|
||||||
p.setYear()
|
p.setYear()
|
||||||
|
p.stateDate = dateDigitYearSlash
|
||||||
} else {
|
} else {
|
||||||
|
// Either Ambiguous dd/mm vs mm/dd OR dd/month/yy
|
||||||
|
// 08/May/2005
|
||||||
|
// 03/31/2005
|
||||||
|
// 31/03/2005
|
||||||
|
if i+2 < len(p.datestr) && unicode.IsLetter(rune(datestr[i+1])) {
|
||||||
|
// 08/May/2005
|
||||||
|
p.stateDate = dateDigitSlashAlpha
|
||||||
|
p.moi = i + 1
|
||||||
|
p.daylen = 2
|
||||||
|
p.dayi = 0
|
||||||
|
p.setDay()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Ambiguous dd/mm vs mm/dd the bane of date-parsing
|
||||||
|
// 03/31/2005
|
||||||
|
// 31/03/2005
|
||||||
p.ambiguousMD = true
|
p.ambiguousMD = true
|
||||||
if p.preferMonthFirst {
|
if p.preferMonthFirst {
|
||||||
if p.molen == 0 {
|
if p.molen == 0 {
|
||||||
|
// 03/31/2005
|
||||||
p.molen = i
|
p.molen = i
|
||||||
p.setMonth()
|
p.setMonth()
|
||||||
p.dayi = i + 1
|
p.dayi = i + 1
|
||||||
|
@ -317,6 +341,7 @@ iterRunes:
|
||||||
p.moi = i + 1
|
p.moi = i + 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case ':':
|
case ':':
|
||||||
|
@ -363,9 +388,13 @@ iterRunes:
|
||||||
// 02 Jan 2018 23:59:34
|
// 02 Jan 2018 23:59:34
|
||||||
// 12 Feb 2006, 19:17
|
// 12 Feb 2006, 19:17
|
||||||
// 12 Feb 2006, 19:17:22
|
// 12 Feb 2006, 19:17:22
|
||||||
|
if i == 6 {
|
||||||
|
p.stateDate = dateDigitSt
|
||||||
|
} else {
|
||||||
p.stateDate = dateDigitWs
|
p.stateDate = dateDigitWs
|
||||||
p.dayi = 0
|
p.dayi = 0
|
||||||
p.daylen = i
|
p.daylen = i
|
||||||
|
}
|
||||||
case '年':
|
case '年':
|
||||||
// Chinese Year
|
// Chinese Year
|
||||||
p.stateDate = dateDigitChineseYear
|
p.stateDate = dateDigitChineseYear
|
||||||
|
@ -376,9 +405,15 @@ iterRunes:
|
||||||
}
|
}
|
||||||
p.part1Len = i
|
p.part1Len = i
|
||||||
|
|
||||||
|
case dateDigitSt:
|
||||||
|
p.set(0, "060102")
|
||||||
|
i = i - 1
|
||||||
|
p.stateTime = timeStart
|
||||||
|
break iterRunes
|
||||||
case dateYearDash:
|
case dateYearDash:
|
||||||
// dateYearDashDashT
|
// dateYearDashDashT
|
||||||
// 2006-01-02T15:04:05Z07:00
|
// 2006-01-02T15:04:05Z07:00
|
||||||
|
// 2020-08-17T17:00:00:000+0100
|
||||||
// dateYearDashDashWs
|
// dateYearDashDashWs
|
||||||
// 2013-04-01 22:43:22
|
// 2013-04-01 22:43:22
|
||||||
// dateYearDashAlphaDash
|
// dateYearDashAlphaDash
|
||||||
|
@ -400,7 +435,14 @@ iterRunes:
|
||||||
// 2006-01-02T15:04:05Z07:00
|
// 2006-01-02T15:04:05Z07:00
|
||||||
// dateYearDashDashWs
|
// dateYearDashDashWs
|
||||||
// 2013-04-01 22:43:22
|
// 2013-04-01 22:43:22
|
||||||
|
// dateYearDashDashOffset
|
||||||
|
// 2020-07-20+00:00
|
||||||
switch r {
|
switch r {
|
||||||
|
case '+', '-':
|
||||||
|
p.offseti = i
|
||||||
|
p.daylen = i - p.dayi
|
||||||
|
p.stateDate = dateYearDashDashOffset
|
||||||
|
p.setDay()
|
||||||
case ' ':
|
case ' ':
|
||||||
p.daylen = i - p.dayi
|
p.daylen = i - p.dayi
|
||||||
p.stateDate = dateYearDashDashWs
|
p.stateDate = dateYearDashDashWs
|
||||||
|
@ -414,6 +456,21 @@ iterRunes:
|
||||||
p.setDay()
|
p.setDay()
|
||||||
break iterRunes
|
break iterRunes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case dateYearDashDashT:
|
||||||
|
// dateYearDashDashT
|
||||||
|
// 2006-01-02T15:04:05Z07:00
|
||||||
|
// 2020-08-17T17:00:00:000+0100
|
||||||
|
|
||||||
|
case dateYearDashDashOffset:
|
||||||
|
// 2020-07-20+00:00
|
||||||
|
switch r {
|
||||||
|
case ':':
|
||||||
|
p.set(p.offseti, "-07:00")
|
||||||
|
// case ' ':
|
||||||
|
// return nil, unknownErr(datestr)
|
||||||
|
}
|
||||||
|
|
||||||
case dateYearDashAlphaDash:
|
case dateYearDashAlphaDash:
|
||||||
// 2013-Feb-03
|
// 2013-Feb-03
|
||||||
switch r {
|
switch r {
|
||||||
|
@ -446,7 +503,7 @@ iterRunes:
|
||||||
case dateDigitDashAlphaDash:
|
case dateDigitDashAlphaDash:
|
||||||
// 13-Feb-03 ambiguous
|
// 13-Feb-03 ambiguous
|
||||||
// 28-Feb-03 ambiguous
|
// 28-Feb-03 ambiguous
|
||||||
// 29-Jun-2016
|
// 29-Jun-2016 dd-month(alpha)-yyyy
|
||||||
switch r {
|
switch r {
|
||||||
case ' ':
|
case ' ':
|
||||||
// we need to find if this was 4 digits, aka year
|
// we need to find if this was 4 digits, aka year
|
||||||
|
@ -476,8 +533,49 @@ iterRunes:
|
||||||
break iterRunes
|
break iterRunes
|
||||||
}
|
}
|
||||||
|
|
||||||
case dateDigitSlash:
|
case dateDigitYearSlash:
|
||||||
// 2014/07/10 06:55:38.156283
|
// 2014/07/10 06:55:38.156283
|
||||||
|
// I honestly don't know if this format ever shows up as yyyy/
|
||||||
|
|
||||||
|
switch r {
|
||||||
|
case ' ', ':':
|
||||||
|
p.stateTime = timeStart
|
||||||
|
if p.daylen == 0 {
|
||||||
|
p.daylen = i - p.dayi
|
||||||
|
p.setDay()
|
||||||
|
}
|
||||||
|
break iterRunes
|
||||||
|
case '/':
|
||||||
|
if p.molen == 0 {
|
||||||
|
p.molen = i - p.moi
|
||||||
|
p.setMonth()
|
||||||
|
p.dayi = i + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case dateDigitSlashAlpha:
|
||||||
|
// 06/May/2008
|
||||||
|
|
||||||
|
switch r {
|
||||||
|
case '/':
|
||||||
|
// |
|
||||||
|
// 06/May/2008
|
||||||
|
if p.molen == 0 {
|
||||||
|
p.set(p.moi, "Jan")
|
||||||
|
p.yeari = i + 1
|
||||||
|
}
|
||||||
|
// We aren't breaking because we are going to re-use this case
|
||||||
|
// to find where the date starts, and possible time begins
|
||||||
|
case ' ', ':':
|
||||||
|
p.stateTime = timeStart
|
||||||
|
if p.yearlen == 0 {
|
||||||
|
p.yearlen = i - p.yeari
|
||||||
|
p.setYear()
|
||||||
|
}
|
||||||
|
break iterRunes
|
||||||
|
}
|
||||||
|
|
||||||
|
case dateDigitSlash:
|
||||||
// 03/19/2012 10:11:59
|
// 03/19/2012 10:11:59
|
||||||
// 04/2/2014 03:00:37
|
// 04/2/2014 03:00:37
|
||||||
// 3/1/2012 10:11:59
|
// 3/1/2012 10:11:59
|
||||||
|
@ -488,25 +586,9 @@ iterRunes:
|
||||||
// 1/2/06
|
// 1/2/06
|
||||||
|
|
||||||
switch r {
|
switch r {
|
||||||
case ' ':
|
|
||||||
p.stateTime = timeStart
|
|
||||||
if p.yearlen == 0 {
|
|
||||||
p.yearlen = i - p.yeari
|
|
||||||
p.setYear()
|
|
||||||
} else if p.daylen == 0 {
|
|
||||||
p.daylen = i - p.dayi
|
|
||||||
p.setDay()
|
|
||||||
}
|
|
||||||
break iterRunes
|
|
||||||
case '/':
|
case '/':
|
||||||
if p.yearlen > 0 {
|
// This is the 2nd / so now we should know start pts of all of the dd, mm, yy
|
||||||
// 2014/07/10 06:55:38.156283
|
if p.preferMonthFirst {
|
||||||
if p.molen == 0 {
|
|
||||||
p.molen = i - p.moi
|
|
||||||
p.setMonth()
|
|
||||||
p.dayi = i + 1
|
|
||||||
}
|
|
||||||
} else if p.preferMonthFirst {
|
|
||||||
if p.daylen == 0 {
|
if p.daylen == 0 {
|
||||||
p.daylen = i - p.dayi
|
p.daylen = i - p.dayi
|
||||||
p.setDay()
|
p.setDay()
|
||||||
|
@ -519,6 +601,15 @@ iterRunes:
|
||||||
p.yeari = i + 1
|
p.yeari = i + 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Note no break, we are going to pass by and re-enter this dateDigitSlash
|
||||||
|
// and look for ending (space) or not (just date)
|
||||||
|
case ' ':
|
||||||
|
p.stateTime = timeStart
|
||||||
|
if p.yearlen == 0 {
|
||||||
|
p.yearlen = i - p.yeari
|
||||||
|
p.setYear()
|
||||||
|
}
|
||||||
|
break iterRunes
|
||||||
}
|
}
|
||||||
|
|
||||||
case dateDigitColon:
|
case dateDigitColon:
|
||||||
|
@ -718,8 +809,7 @@ iterRunes:
|
||||||
|
|
||||||
case r == ',':
|
case r == ',':
|
||||||
// Mon, 02 Jan 2006
|
// Mon, 02 Jan 2006
|
||||||
// p.moi = 0
|
|
||||||
// p.molen = i
|
|
||||||
if i == 3 {
|
if i == 3 {
|
||||||
p.stateDate = dateWeekdayAbbrevComma
|
p.stateDate = dateWeekdayAbbrevComma
|
||||||
p.set(0, "Mon")
|
p.set(0, "Mon")
|
||||||
|
@ -1039,7 +1129,7 @@ iterRunes:
|
||||||
for ; i < len(datestr); i++ {
|
for ; i < len(datestr); i++ {
|
||||||
r := rune(datestr[i])
|
r := rune(datestr[i])
|
||||||
|
|
||||||
//gou.Debugf("%d %s %d iterTimeRunes %s %s", i, string(r), p.stateTime, p.ds(), p.ts())
|
// gou.Debugf("i=%d r=%s state=%d iterTimeRunes %s %s", i, string(r), p.stateTime, p.ds(), p.ts())
|
||||||
|
|
||||||
switch p.stateTime {
|
switch p.stateTime {
|
||||||
case timeStart:
|
case timeStart:
|
||||||
|
@ -1096,8 +1186,13 @@ iterRunes:
|
||||||
// 22:18+0530
|
// 22:18+0530
|
||||||
p.minlen = i - p.mini
|
p.minlen = i - p.mini
|
||||||
} else {
|
} else {
|
||||||
|
if p.seclen == 0 {
|
||||||
p.seclen = i - p.seci
|
p.seclen = i - p.seci
|
||||||
}
|
}
|
||||||
|
if p.msi > 0 && p.mslen == 0 {
|
||||||
|
p.mslen = i - p.msi
|
||||||
|
}
|
||||||
|
}
|
||||||
p.offseti = i
|
p.offseti = i
|
||||||
case '.':
|
case '.':
|
||||||
p.stateTime = timePeriod
|
p.stateTime = timePeriod
|
||||||
|
@ -1154,6 +1249,19 @@ iterRunes:
|
||||||
} else if p.seci == 0 {
|
} else if p.seci == 0 {
|
||||||
p.seci = i + 1
|
p.seci = i + 1
|
||||||
p.minlen = i - p.mini
|
p.minlen = i - p.mini
|
||||||
|
} else if p.seci > 0 {
|
||||||
|
// 18:31:59:257 ms uses colon, wtf
|
||||||
|
p.seclen = i - p.seci
|
||||||
|
p.set(p.seci, "05")
|
||||||
|
p.msi = i + 1
|
||||||
|
|
||||||
|
// gross, gross, gross. manipulating the datestr is horrible.
|
||||||
|
// https://github.com/araddon/dateparse/issues/117
|
||||||
|
// Could not get the parsing to work using golang time.Parse() without
|
||||||
|
// replacing that colon with period.
|
||||||
|
p.set(i, ".")
|
||||||
|
datestr = datestr[0:i] + "." + datestr[i+1:]
|
||||||
|
p.datestr = datestr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case timeOffset:
|
case timeOffset:
|
||||||
|
@ -1201,7 +1309,6 @@ iterRunes:
|
||||||
// 17:57:51 MST 2009
|
// 17:57:51 MST 2009
|
||||||
p.tzi = i
|
p.tzi = i
|
||||||
p.stateTime = timeWsAlpha
|
p.stateTime = timeWsAlpha
|
||||||
//break iterTimeRunes
|
|
||||||
} else if unicode.IsDigit(r) {
|
} else if unicode.IsDigit(r) {
|
||||||
// 00:12:00 2008
|
// 00:12:00 2008
|
||||||
p.stateTime = timeWsYear
|
p.stateTime = timeWsYear
|
||||||
|
@ -1231,6 +1338,7 @@ iterRunes:
|
||||||
p.offseti = i
|
p.offseti = i
|
||||||
case ' ':
|
case ' ':
|
||||||
// 17:57:51 MST 2009
|
// 17:57:51 MST 2009
|
||||||
|
// 17:57:51 MST
|
||||||
p.tzlen = i - p.tzi
|
p.tzlen = i - p.tzi
|
||||||
if p.tzlen == 4 {
|
if p.tzlen == 4 {
|
||||||
p.set(p.tzi, " MST")
|
p.set(p.tzi, " MST")
|
||||||
|
@ -1333,7 +1441,7 @@ iterRunes:
|
||||||
p.trimExtra()
|
p.trimExtra()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case '+', '-':
|
case '+', '-', '(':
|
||||||
// This really doesn't seem valid, but for some reason when round-tripping a go date
|
// This really doesn't seem valid, but for some reason when round-tripping a go date
|
||||||
// their is an extra +03 printed out. seems like go bug to me, but, parsing anyway.
|
// their is an extra +03 printed out. seems like go bug to me, but, parsing anyway.
|
||||||
// 00:00:00 +0300 +03
|
// 00:00:00 +0300 +03
|
||||||
|
@ -1350,6 +1458,7 @@ iterRunes:
|
||||||
p.setYear()
|
p.setYear()
|
||||||
}
|
}
|
||||||
case unicode.IsLetter(r):
|
case unicode.IsLetter(r):
|
||||||
|
// 15:04:05 -0700 MST
|
||||||
if p.tzi == 0 {
|
if p.tzi == 0 {
|
||||||
p.tzi = i
|
p.tzi = i
|
||||||
}
|
}
|
||||||
|
@ -1535,6 +1644,17 @@ iterRunes:
|
||||||
}
|
}
|
||||||
|
|
||||||
switch p.stateTime {
|
switch p.stateTime {
|
||||||
|
case timeWsAlpha:
|
||||||
|
switch len(p.datestr) - p.tzi {
|
||||||
|
case 3:
|
||||||
|
// 13:31:51.999 +01:00 CET
|
||||||
|
p.set(p.tzi, "MST")
|
||||||
|
case 4:
|
||||||
|
p.set(p.tzi, "MST")
|
||||||
|
p.extra = len(p.datestr) - 1
|
||||||
|
p.trimExtra()
|
||||||
|
}
|
||||||
|
|
||||||
case timeWsAlphaWs:
|
case timeWsAlphaWs:
|
||||||
p.yearlen = i - p.yeari
|
p.yearlen = i - p.yeari
|
||||||
p.setYear()
|
p.setYear()
|
||||||
|
@ -1554,13 +1674,34 @@ iterRunes:
|
||||||
case timePeriod:
|
case timePeriod:
|
||||||
p.mslen = i - p.msi
|
p.mslen = i - p.msi
|
||||||
case timeOffset:
|
case timeOffset:
|
||||||
|
|
||||||
|
switch len(p.datestr) - p.offseti {
|
||||||
|
case 0, 1, 2, 4:
|
||||||
|
return p, fmt.Errorf("TZ offset not recognized %q near %q (must be 2 or 4 digits optional colon)", datestr, string(datestr[p.offseti:]))
|
||||||
|
case 3:
|
||||||
|
// 19:55:00+01
|
||||||
|
p.set(p.offseti, "-07")
|
||||||
|
case 5:
|
||||||
// 19:55:00+0100
|
// 19:55:00+0100
|
||||||
p.set(p.offseti, "-0700")
|
p.set(p.offseti, "-0700")
|
||||||
|
}
|
||||||
|
|
||||||
case timeWsOffset:
|
case timeWsOffset:
|
||||||
p.set(p.offseti, "-0700")
|
p.set(p.offseti, "-0700")
|
||||||
case timeWsOffsetWs:
|
case timeWsOffsetWs:
|
||||||
// 17:57:51 -0700 2009
|
// 17:57:51 -0700 2009
|
||||||
// 00:12:00 +0000 UTC
|
// 00:12:00 +0000 UTC
|
||||||
|
if p.tzi > 0 {
|
||||||
|
switch len(p.datestr) - p.tzi {
|
||||||
|
case 3:
|
||||||
|
// 13:31:51.999 +01:00 CET
|
||||||
|
p.set(p.tzi, "MST")
|
||||||
|
case 4:
|
||||||
|
// 13:31:51.999 +01:00 CEST
|
||||||
|
p.set(p.tzi, "MST ")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
case timeWsOffsetColon:
|
case timeWsOffsetColon:
|
||||||
// 17:57:51 -07:00
|
// 17:57:51 -07:00
|
||||||
p.set(p.offseti, "-07:00")
|
p.set(p.offseti, "-07:00")
|
||||||
|
@ -1638,6 +1779,9 @@ iterRunes:
|
||||||
p.t = &t
|
p.t = &t
|
||||||
return p, nil
|
return p, nil
|
||||||
}
|
}
|
||||||
|
case dateDigitSt:
|
||||||
|
// 171113 14:14:20
|
||||||
|
return p, nil
|
||||||
|
|
||||||
case dateYearDash:
|
case dateYearDash:
|
||||||
// 2006-01
|
// 2006-01
|
||||||
|
@ -1650,6 +1794,16 @@ iterRunes:
|
||||||
// 2006-01-2
|
// 2006-01-2
|
||||||
return p, nil
|
return p, nil
|
||||||
|
|
||||||
|
case dateYearDashDashOffset:
|
||||||
|
/// 2020-07-20+00:00
|
||||||
|
switch len(p.datestr) - p.offseti {
|
||||||
|
case 5:
|
||||||
|
p.set(p.offseti, "-0700")
|
||||||
|
case 6:
|
||||||
|
p.set(p.offseti, "-07:00")
|
||||||
|
}
|
||||||
|
return p, nil
|
||||||
|
|
||||||
case dateYearDashAlphaDash:
|
case dateYearDashAlphaDash:
|
||||||
// 2013-Feb-03
|
// 2013-Feb-03
|
||||||
// 2013-Feb-3
|
// 2013-Feb-3
|
||||||
|
@ -1757,6 +1911,13 @@ iterRunes:
|
||||||
// 3/1/2014
|
// 3/1/2014
|
||||||
// 10/13/2014
|
// 10/13/2014
|
||||||
// 01/02/2006
|
// 01/02/2006
|
||||||
|
return p, nil
|
||||||
|
|
||||||
|
case dateDigitSlashAlpha:
|
||||||
|
// 03/Jun/2014
|
||||||
|
return p, nil
|
||||||
|
|
||||||
|
case dateDigitYearSlash:
|
||||||
// 2014/10/13
|
// 2014/10/13
|
||||||
return p, nil
|
return p, nil
|
||||||
|
|
||||||
|
@ -2002,10 +2163,12 @@ func (p *parser) parse() (time.Time, error) {
|
||||||
p.format = p.format[p.skip:]
|
p.format = p.format[p.skip:]
|
||||||
p.datestr = p.datestr[p.skip:]
|
p.datestr = p.datestr[p.skip:]
|
||||||
}
|
}
|
||||||
//gou.Debugf("parse %q AS %q", p.datestr, string(p.format))
|
|
||||||
if p.loc == nil {
|
if p.loc == nil {
|
||||||
|
// gou.Debugf("parse layout=%q input=%q \ntx, err := time.Parse(%q, %q)", string(p.format), p.datestr, string(p.format), p.datestr)
|
||||||
return time.Parse(string(p.format), p.datestr)
|
return time.Parse(string(p.format), p.datestr)
|
||||||
}
|
}
|
||||||
|
//gou.Debugf("parse layout=%q input=%q \ntx, err := time.ParseInLocation(%q, %q, %v)", string(p.format), p.datestr, string(p.format), p.datestr, p.loc)
|
||||||
return time.ParseInLocation(string(p.format), p.datestr, p.loc)
|
return time.ParseInLocation(string(p.format), p.datestr, p.loc)
|
||||||
}
|
}
|
||||||
func isDay(alpha string) bool {
|
func isDay(alpha string) bool {
|
||||||
|
|
|
@ -1,60 +0,0 @@
|
||||||
package css
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
// Declaration represents a parsed style property
|
|
||||||
type Declaration struct {
|
|
||||||
Property string
|
|
||||||
Value string
|
|
||||||
Important bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDeclaration instanciates a new Declaration
|
|
||||||
func NewDeclaration() *Declaration {
|
|
||||||
return &Declaration{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns string representation of the Declaration
|
|
||||||
func (decl *Declaration) String() string {
|
|
||||||
return decl.StringWithImportant(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
// StringWithImportant returns string representation with optional !important part
|
|
||||||
func (decl *Declaration) StringWithImportant(option bool) string {
|
|
||||||
result := fmt.Sprintf("%s: %s", decl.Property, decl.Value)
|
|
||||||
|
|
||||||
if option && decl.Important {
|
|
||||||
result += " !important"
|
|
||||||
}
|
|
||||||
|
|
||||||
result += ";"
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Equal returns true if both Declarations are equals
|
|
||||||
func (decl *Declaration) Equal(other *Declaration) bool {
|
|
||||||
return (decl.Property == other.Property) && (decl.Value == other.Value) && (decl.Important == other.Important)
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
// DeclarationsByProperty
|
|
||||||
//
|
|
||||||
|
|
||||||
// DeclarationsByProperty represents sortable style declarations
|
|
||||||
type DeclarationsByProperty []*Declaration
|
|
||||||
|
|
||||||
// Implements sort.Interface
|
|
||||||
func (declarations DeclarationsByProperty) Len() int {
|
|
||||||
return len(declarations)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Implements sort.Interface
|
|
||||||
func (declarations DeclarationsByProperty) Swap(i, j int) {
|
|
||||||
declarations[i], declarations[j] = declarations[j], declarations[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Implements sort.Interface
|
|
||||||
func (declarations DeclarationsByProperty) Less(i, j int) bool {
|
|
||||||
return declarations[i].Property < declarations[j].Property
|
|
||||||
}
|
|
|
@ -1,230 +0,0 @@
|
||||||
package css
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
indentSpace = 2
|
|
||||||
)
|
|
||||||
|
|
||||||
// RuleKind represents a Rule kind
|
|
||||||
type RuleKind int
|
|
||||||
|
|
||||||
// Rule kinds
|
|
||||||
const (
|
|
||||||
QualifiedRule RuleKind = iota
|
|
||||||
AtRule
|
|
||||||
)
|
|
||||||
|
|
||||||
// At Rules than have Rules inside their block instead of Declarations
|
|
||||||
var atRulesWithRulesBlock = []string{
|
|
||||||
"@document", "@font-feature-values", "@keyframes", "@media", "@supports",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rule represents a parsed CSS rule
|
|
||||||
type Rule struct {
|
|
||||||
Kind RuleKind
|
|
||||||
|
|
||||||
// At Rule name (eg: "@media")
|
|
||||||
Name string
|
|
||||||
|
|
||||||
// Raw prelude
|
|
||||||
Prelude string
|
|
||||||
|
|
||||||
// Qualified Rule selectors parsed from prelude
|
|
||||||
Selectors []string
|
|
||||||
|
|
||||||
// Style properties
|
|
||||||
Declarations []*Declaration
|
|
||||||
|
|
||||||
// At Rule embedded rules
|
|
||||||
Rules []*Rule
|
|
||||||
|
|
||||||
// Current rule embedding level
|
|
||||||
EmbedLevel int
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewRule instanciates a new Rule
|
|
||||||
func NewRule(kind RuleKind) *Rule {
|
|
||||||
return &Rule{
|
|
||||||
Kind: kind,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns string representation of rule kind
|
|
||||||
func (kind RuleKind) String() string {
|
|
||||||
switch kind {
|
|
||||||
case QualifiedRule:
|
|
||||||
return "Qualified Rule"
|
|
||||||
case AtRule:
|
|
||||||
return "At Rule"
|
|
||||||
default:
|
|
||||||
return "WAT"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// EmbedsRules returns true if this rule embeds another rules
|
|
||||||
func (rule *Rule) EmbedsRules() bool {
|
|
||||||
if rule.Kind == AtRule {
|
|
||||||
for _, atRuleName := range atRulesWithRulesBlock {
|
|
||||||
if rule.Name == atRuleName {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Equal returns true if both rules are equals
|
|
||||||
func (rule *Rule) Equal(other *Rule) bool {
|
|
||||||
if (rule.Kind != other.Kind) ||
|
|
||||||
(rule.Prelude != other.Prelude) ||
|
|
||||||
(rule.Name != other.Name) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (len(rule.Selectors) != len(other.Selectors)) ||
|
|
||||||
(len(rule.Declarations) != len(other.Declarations)) ||
|
|
||||||
(len(rule.Rules) != len(other.Rules)) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, sel := range rule.Selectors {
|
|
||||||
if sel != other.Selectors[i] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, decl := range rule.Declarations {
|
|
||||||
if !decl.Equal(other.Declarations[i]) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, rule := range rule.Rules {
|
|
||||||
if !rule.Equal(other.Rules[i]) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Diff returns a string representation of rules differences
|
|
||||||
func (rule *Rule) Diff(other *Rule) []string {
|
|
||||||
result := []string{}
|
|
||||||
|
|
||||||
if rule.Kind != other.Kind {
|
|
||||||
result = append(result, fmt.Sprintf("Kind: %s | %s", rule.Kind.String(), other.Kind.String()))
|
|
||||||
}
|
|
||||||
|
|
||||||
if rule.Prelude != other.Prelude {
|
|
||||||
result = append(result, fmt.Sprintf("Prelude: \"%s\" | \"%s\"", rule.Prelude, other.Prelude))
|
|
||||||
}
|
|
||||||
|
|
||||||
if rule.Name != other.Name {
|
|
||||||
result = append(result, fmt.Sprintf("Name: \"%s\" | \"%s\"", rule.Name, other.Name))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(rule.Selectors) != len(other.Selectors) {
|
|
||||||
result = append(result, fmt.Sprintf("Selectors: %v | %v", strings.Join(rule.Selectors, ", "), strings.Join(other.Selectors, ", ")))
|
|
||||||
} else {
|
|
||||||
for i, sel := range rule.Selectors {
|
|
||||||
if sel != other.Selectors[i] {
|
|
||||||
result = append(result, fmt.Sprintf("Selector: \"%s\" | \"%s\"", sel, other.Selectors[i]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(rule.Declarations) != len(other.Declarations) {
|
|
||||||
result = append(result, fmt.Sprintf("Declarations Nb: %d | %d", len(rule.Declarations), len(other.Declarations)))
|
|
||||||
} else {
|
|
||||||
for i, decl := range rule.Declarations {
|
|
||||||
if !decl.Equal(other.Declarations[i]) {
|
|
||||||
result = append(result, fmt.Sprintf("Declaration: \"%s\" | \"%s\"", decl.String(), other.Declarations[i].String()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(rule.Rules) != len(other.Rules) {
|
|
||||||
result = append(result, fmt.Sprintf("Rules Nb: %d | %d", len(rule.Rules), len(other.Rules)))
|
|
||||||
} else {
|
|
||||||
|
|
||||||
for i, rule := range rule.Rules {
|
|
||||||
if !rule.Equal(other.Rules[i]) {
|
|
||||||
result = append(result, fmt.Sprintf("Rule: \"%s\" | \"%s\"", rule.String(), other.Rules[i].String()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the string representation of a rule
|
|
||||||
func (rule *Rule) String() string {
|
|
||||||
result := ""
|
|
||||||
|
|
||||||
if rule.Kind == QualifiedRule {
|
|
||||||
for i, sel := range rule.Selectors {
|
|
||||||
if i != 0 {
|
|
||||||
result += ", "
|
|
||||||
}
|
|
||||||
result += sel
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// AtRule
|
|
||||||
result += fmt.Sprintf("%s", rule.Name)
|
|
||||||
|
|
||||||
if rule.Prelude != "" {
|
|
||||||
if result != "" {
|
|
||||||
result += " "
|
|
||||||
}
|
|
||||||
result += fmt.Sprintf("%s", rule.Prelude)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (len(rule.Declarations) == 0) && (len(rule.Rules) == 0) {
|
|
||||||
result += ";"
|
|
||||||
} else {
|
|
||||||
result += " {\n"
|
|
||||||
|
|
||||||
if rule.EmbedsRules() {
|
|
||||||
for _, subRule := range rule.Rules {
|
|
||||||
result += fmt.Sprintf("%s%s\n", rule.indent(), subRule.String())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for _, decl := range rule.Declarations {
|
|
||||||
result += fmt.Sprintf("%s%s\n", rule.indent(), decl.String())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result += fmt.Sprintf("%s}", rule.indentEndBlock())
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns identation spaces for declarations and rules
|
|
||||||
func (rule *Rule) indent() string {
|
|
||||||
result := ""
|
|
||||||
|
|
||||||
for i := 0; i < ((rule.EmbedLevel + 1) * indentSpace); i++ {
|
|
||||||
result += " "
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns identation spaces for end of block character
|
|
||||||
func (rule *Rule) indentEndBlock() string {
|
|
||||||
result := ""
|
|
||||||
|
|
||||||
for i := 0; i < (rule.EmbedLevel * indentSpace); i++ {
|
|
||||||
result += " "
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
package css
|
|
||||||
|
|
||||||
// Stylesheet represents a parsed stylesheet
|
|
||||||
type Stylesheet struct {
|
|
||||||
Rules []*Rule
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewStylesheet instanciate a new Stylesheet
|
|
||||||
func NewStylesheet() *Stylesheet {
|
|
||||||
return &Stylesheet{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns string representation of the Stylesheet
|
|
||||||
func (sheet *Stylesheet) String() string {
|
|
||||||
result := ""
|
|
||||||
|
|
||||||
for _, rule := range sheet.Rules {
|
|
||||||
if result != "" {
|
|
||||||
result += "\n"
|
|
||||||
}
|
|
||||||
result += rule.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
|
@ -1,22 +0,0 @@
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2015 Aymerick JEHANNE
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
|
@ -1,409 +0,0 @@
|
||||||
package parser
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gorilla/css/scanner"
|
|
||||||
|
|
||||||
"github.com/aymerick/douceur/css"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
importantSuffixRegexp = `(?i)\s*!important\s*$`
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
importantRegexp *regexp.Regexp
|
|
||||||
)
|
|
||||||
|
|
||||||
// Parser represents a CSS parser
|
|
||||||
type Parser struct {
|
|
||||||
scan *scanner.Scanner // Tokenizer
|
|
||||||
|
|
||||||
// Tokens parsed but not consumed yet
|
|
||||||
tokens []*scanner.Token
|
|
||||||
|
|
||||||
// Rule embedding level
|
|
||||||
embedLevel int
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
importantRegexp = regexp.MustCompile(importantSuffixRegexp)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewParser instanciates a new parser
|
|
||||||
func NewParser(txt string) *Parser {
|
|
||||||
return &Parser{
|
|
||||||
scan: scanner.New(txt),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse parses a whole stylesheet
|
|
||||||
func Parse(text string) (*css.Stylesheet, error) {
|
|
||||||
result, err := NewParser(text).ParseStylesheet()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseDeclarations parses CSS declarations
|
|
||||||
func ParseDeclarations(text string) ([]*css.Declaration, error) {
|
|
||||||
result, err := NewParser(text).ParseDeclarations()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseStylesheet parses a stylesheet
|
|
||||||
func (parser *Parser) ParseStylesheet() (*css.Stylesheet, error) {
|
|
||||||
result := css.NewStylesheet()
|
|
||||||
|
|
||||||
// Parse BOM
|
|
||||||
if _, err := parser.parseBOM(); err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse list of rules
|
|
||||||
rules, err := parser.ParseRules()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Rules = rules
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseRules parses a list of rules
|
|
||||||
func (parser *Parser) ParseRules() ([]*css.Rule, error) {
|
|
||||||
result := []*css.Rule{}
|
|
||||||
|
|
||||||
inBlock := false
|
|
||||||
if parser.tokenChar("{") {
|
|
||||||
// parsing a block of rules
|
|
||||||
inBlock = true
|
|
||||||
parser.embedLevel++
|
|
||||||
|
|
||||||
parser.shiftToken()
|
|
||||||
}
|
|
||||||
|
|
||||||
for parser.tokenParsable() {
|
|
||||||
if parser.tokenIgnorable() {
|
|
||||||
parser.shiftToken()
|
|
||||||
} else if parser.tokenChar("}") {
|
|
||||||
if !inBlock {
|
|
||||||
errMsg := fmt.Sprintf("Unexpected } character: %s", parser.nextToken().String())
|
|
||||||
return result, errors.New(errMsg)
|
|
||||||
}
|
|
||||||
|
|
||||||
parser.shiftToken()
|
|
||||||
parser.embedLevel--
|
|
||||||
|
|
||||||
// finished
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
rule, err := parser.ParseRule()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
rule.EmbedLevel = parser.embedLevel
|
|
||||||
result = append(result, rule)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseRule parses a rule
|
|
||||||
func (parser *Parser) ParseRule() (*css.Rule, error) {
|
|
||||||
if parser.tokenAtKeyword() {
|
|
||||||
return parser.parseAtRule()
|
|
||||||
}
|
|
||||||
|
|
||||||
return parser.parseQualifiedRule()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseDeclarations parses a list of declarations
|
|
||||||
func (parser *Parser) ParseDeclarations() ([]*css.Declaration, error) {
|
|
||||||
result := []*css.Declaration{}
|
|
||||||
|
|
||||||
if parser.tokenChar("{") {
|
|
||||||
parser.shiftToken()
|
|
||||||
}
|
|
||||||
|
|
||||||
for parser.tokenParsable() {
|
|
||||||
if parser.tokenIgnorable() {
|
|
||||||
parser.shiftToken()
|
|
||||||
} else if parser.tokenChar("}") {
|
|
||||||
// end of block
|
|
||||||
parser.shiftToken()
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
declaration, err := parser.ParseDeclaration()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result = append(result, declaration)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseDeclaration parses a declaration
|
|
||||||
func (parser *Parser) ParseDeclaration() (*css.Declaration, error) {
|
|
||||||
result := css.NewDeclaration()
|
|
||||||
curValue := ""
|
|
||||||
|
|
||||||
for parser.tokenParsable() {
|
|
||||||
if parser.tokenChar(":") {
|
|
||||||
result.Property = strings.TrimSpace(curValue)
|
|
||||||
curValue = ""
|
|
||||||
|
|
||||||
parser.shiftToken()
|
|
||||||
} else if parser.tokenChar(";") || parser.tokenChar("}") {
|
|
||||||
if result.Property == "" {
|
|
||||||
errMsg := fmt.Sprintf("Unexpected ; character: %s", parser.nextToken().String())
|
|
||||||
return result, errors.New(errMsg)
|
|
||||||
}
|
|
||||||
|
|
||||||
if importantRegexp.MatchString(curValue) {
|
|
||||||
result.Important = true
|
|
||||||
curValue = importantRegexp.ReplaceAllString(curValue, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Value = strings.TrimSpace(curValue)
|
|
||||||
|
|
||||||
if parser.tokenChar(";") {
|
|
||||||
parser.shiftToken()
|
|
||||||
}
|
|
||||||
|
|
||||||
// finished
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
token := parser.shiftToken()
|
|
||||||
curValue += token.Value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// log.Printf("[parsed] Declaration: %s", result.String())
|
|
||||||
|
|
||||||
return result, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse an At Rule
|
|
||||||
func (parser *Parser) parseAtRule() (*css.Rule, error) {
|
|
||||||
// parse rule name (eg: "@import")
|
|
||||||
token := parser.shiftToken()
|
|
||||||
|
|
||||||
result := css.NewRule(css.AtRule)
|
|
||||||
result.Name = token.Value
|
|
||||||
|
|
||||||
for parser.tokenParsable() {
|
|
||||||
if parser.tokenChar(";") {
|
|
||||||
parser.shiftToken()
|
|
||||||
|
|
||||||
// finished
|
|
||||||
break
|
|
||||||
} else if parser.tokenChar("{") {
|
|
||||||
if result.EmbedsRules() {
|
|
||||||
// parse rules block
|
|
||||||
rules, err := parser.ParseRules()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Rules = rules
|
|
||||||
} else {
|
|
||||||
// parse declarations block
|
|
||||||
declarations, err := parser.ParseDeclarations()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Declarations = declarations
|
|
||||||
}
|
|
||||||
|
|
||||||
// finished
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
// parse prelude
|
|
||||||
prelude, err := parser.parsePrelude()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Prelude = prelude
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// log.Printf("[parsed] Rule: %s", result.String())
|
|
||||||
|
|
||||||
return result, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse a Qualified Rule
|
|
||||||
func (parser *Parser) parseQualifiedRule() (*css.Rule, error) {
|
|
||||||
result := css.NewRule(css.QualifiedRule)
|
|
||||||
|
|
||||||
for parser.tokenParsable() {
|
|
||||||
if parser.tokenChar("{") {
|
|
||||||
if result.Prelude == "" {
|
|
||||||
errMsg := fmt.Sprintf("Unexpected { character: %s", parser.nextToken().String())
|
|
||||||
return result, errors.New(errMsg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse declarations block
|
|
||||||
declarations, err := parser.ParseDeclarations()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Declarations = declarations
|
|
||||||
|
|
||||||
// finished
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
// parse prelude
|
|
||||||
prelude, err := parser.parsePrelude()
|
|
||||||
if err != nil {
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Prelude = prelude
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Selectors = strings.Split(result.Prelude, ",")
|
|
||||||
for i, sel := range result.Selectors {
|
|
||||||
result.Selectors[i] = strings.TrimSpace(sel)
|
|
||||||
}
|
|
||||||
|
|
||||||
// log.Printf("[parsed] Rule: %s", result.String())
|
|
||||||
|
|
||||||
return result, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse Rule prelude
|
|
||||||
func (parser *Parser) parsePrelude() (string, error) {
|
|
||||||
result := ""
|
|
||||||
|
|
||||||
for parser.tokenParsable() && !parser.tokenEndOfPrelude() {
|
|
||||||
token := parser.shiftToken()
|
|
||||||
result += token.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
result = strings.TrimSpace(result)
|
|
||||||
|
|
||||||
// log.Printf("[parsed] prelude: %s", result)
|
|
||||||
|
|
||||||
return result, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse BOM
|
|
||||||
func (parser *Parser) parseBOM() (bool, error) {
|
|
||||||
if parser.nextToken().Type == scanner.TokenBOM {
|
|
||||||
parser.shiftToken()
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return false, parser.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns next token without removing it from tokens buffer
|
|
||||||
func (parser *Parser) nextToken() *scanner.Token {
|
|
||||||
if len(parser.tokens) == 0 {
|
|
||||||
// fetch next token
|
|
||||||
nextToken := parser.scan.Next()
|
|
||||||
|
|
||||||
// log.Printf("[token] %s => %v", nextToken.Type.String(), nextToken.Value)
|
|
||||||
|
|
||||||
// queue it
|
|
||||||
parser.tokens = append(parser.tokens, nextToken)
|
|
||||||
}
|
|
||||||
|
|
||||||
return parser.tokens[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns next token and remove it from the tokens buffer
|
|
||||||
func (parser *Parser) shiftToken() *scanner.Token {
|
|
||||||
var result *scanner.Token
|
|
||||||
|
|
||||||
result, parser.tokens = parser.tokens[0], parser.tokens[1:]
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns tokenizer error, or nil if no error
|
|
||||||
func (parser *Parser) err() error {
|
|
||||||
if parser.tokenError() {
|
|
||||||
token := parser.nextToken()
|
|
||||||
return fmt.Errorf("Tokenizer error: %s", token.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is Error
|
|
||||||
func (parser *Parser) tokenError() bool {
|
|
||||||
return parser.nextToken().Type == scanner.TokenError
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is EOF
|
|
||||||
func (parser *Parser) tokenEOF() bool {
|
|
||||||
return parser.nextToken().Type == scanner.TokenEOF
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is a whitespace
|
|
||||||
func (parser *Parser) tokenWS() bool {
|
|
||||||
return parser.nextToken().Type == scanner.TokenS
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is a comment
|
|
||||||
func (parser *Parser) tokenComment() bool {
|
|
||||||
return parser.nextToken().Type == scanner.TokenComment
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is a CDO or a CDC
|
|
||||||
func (parser *Parser) tokenCDOorCDC() bool {
|
|
||||||
switch parser.nextToken().Type {
|
|
||||||
case scanner.TokenCDO, scanner.TokenCDC:
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is ignorable
|
|
||||||
func (parser *Parser) tokenIgnorable() bool {
|
|
||||||
return parser.tokenWS() || parser.tokenComment() || parser.tokenCDOorCDC()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is parsable
|
|
||||||
func (parser *Parser) tokenParsable() bool {
|
|
||||||
return !parser.tokenEOF() && !parser.tokenError()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is an At Rule keyword
|
|
||||||
func (parser *Parser) tokenAtKeyword() bool {
|
|
||||||
return parser.nextToken().Type == scanner.TokenAtKeyword
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token is given character
|
|
||||||
func (parser *Parser) tokenChar(value string) bool {
|
|
||||||
token := parser.nextToken()
|
|
||||||
return (token.Type == scanner.TokenChar) && (token.Value == value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if next token marks the end of a prelude
|
|
||||||
func (parser *Parser) tokenEndOfPrelude() bool {
|
|
||||||
return parser.tokenChar(";") || parser.tokenChar("{")
|
|
||||||
}
|
|
|
@ -1,5 +1,5 @@
|
||||||
language: go
|
language: go
|
||||||
|
|
||||||
go:
|
go:
|
||||||
- 1.9
|
- 1.5
|
||||||
- tip
|
- tip
|
|
@ -43,8 +43,8 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
|
||||||
| Category | regexp | regexp2 |
|
| Category | regexp | regexp2 |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field |
|
| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field |
|
||||||
| Python-style capture groups `(?P<name>re)` | yes | no (yes in RE2 compat mode) |
|
| Python-style capture groups `(P<name>re)` | yes | no |
|
||||||
| .NET-style capture groups `(?<name>re)` or `(?'name're)` | no | yes |
|
| .NET-style capture groups `(<name>re)` or `('name're)` | no | yes |
|
||||||
| comments `(?#comment)` | no | yes |
|
| comments `(?#comment)` | no | yes |
|
||||||
| branch numbering reset `(?\|a\|b)` | no | no |
|
| branch numbering reset `(?\|a\|b)` | no | no |
|
||||||
| possessive match `(?>re)` | no | yes |
|
| possessive match `(?>re)` | no | yes |
|
||||||
|
@ -54,14 +54,13 @@ The __last__ capture is embedded in each group, so `g.String()` will return the
|
||||||
| negative lookbehind `(?<!re)` | no | yes |
|
| negative lookbehind `(?<!re)` | no | yes |
|
||||||
| back reference `\1` | no | yes |
|
| back reference `\1` | no | yes |
|
||||||
| named back reference `\k'name'` | no | yes |
|
| named back reference `\k'name'` | no | yes |
|
||||||
| named ascii character class `[[:foo:]]`| yes | no (yes in RE2 compat mode) |
|
| named ascii character class `[[:foo:]]`| yes | no |
|
||||||
| conditionals `(?(expr)yes\|no)` | no | yes |
|
| conditionals `((expr)yes\|no)` | no | yes |
|
||||||
|
|
||||||
## RE2 compatibility mode
|
## RE2 compatibility mode
|
||||||
The default behavior of `regexp2` is to match the .NET regexp engine, however the `RE2` option is provided to change the parsing to increase compatibility with RE2. Using the `RE2` option when compiling a regexp will not take away any features, but will change the following behaviors:
|
The default behavior of `regexp2` is to match the .NET regexp engine, however the `RE2` option is provided to change the parsing to increase compatibility with RE2. Using the `RE2` option when compiling a regexp will not take away any features, but will change the following behaviors:
|
||||||
* add support for named ascii character classes (e.g. `[[:foo:]]`)
|
* add support for named ascii character classes (e.g. `[[:foo:]]`)
|
||||||
* add support for python-style capture groups (e.g. `(P<name>re)`)
|
* add support for python-style capture groups (e.g. `(P<name>re)`)
|
||||||
* change singleline behavior for `$` to only match end of string (like RE2) (see [#24](https://github.com/dlclark/regexp2/issues/24))
|
|
||||||
|
|
||||||
```go
|
```go
|
||||||
re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2)
|
re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2)
|
||||||
|
|
|
@ -235,14 +235,17 @@ func (re *Regexp) getRunesAndStart(s string, startAt int) ([]rune, int) {
|
||||||
ret[i] = r
|
ret[i] = r
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
if startAt == len(s) {
|
|
||||||
runeIdx = i
|
|
||||||
}
|
|
||||||
return ret[:i], runeIdx
|
return ret[:i], runeIdx
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRunes(s string) []rune {
|
func getRunes(s string) []rune {
|
||||||
return []rune(s)
|
ret := make([]rune, len(s))
|
||||||
|
i := 0
|
||||||
|
for _, r := range s {
|
||||||
|
ret[i] = r
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return ret[:i]
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchRunes return true if the runes matches the regex
|
// MatchRunes return true if the runes matches the regex
|
||||||
|
|
|
@ -566,22 +566,9 @@ func (r *runner) execute() error {
|
||||||
continue
|
continue
|
||||||
|
|
||||||
case syntax.EndZ:
|
case syntax.EndZ:
|
||||||
rchars := r.rightchars()
|
if r.rightchars() > 1 || r.rightchars() == 1 && r.charAt(r.textPos()) != '\n' {
|
||||||
if rchars > 1 {
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
// RE2 and EcmaScript define $ as "asserts position at the end of the string"
|
|
||||||
// PCRE/.NET adds "or before the line terminator right at the end of the string (if any)"
|
|
||||||
if (r.re.options & (RE2 | ECMAScript)) != 0 {
|
|
||||||
// RE2/Ecmascript mode
|
|
||||||
if rchars > 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
} else if rchars == 1 && r.charAt(r.textPos()) != '\n' {
|
|
||||||
// "regular" mode
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
r.advance(0)
|
r.advance(0)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -951,8 +938,8 @@ func (r *runner) advance(i int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *runner) goTo(newpos int) {
|
func (r *runner) goTo(newpos int) {
|
||||||
// when branching backward or in place, ensure storage
|
// when branching backward, ensure storage
|
||||||
if newpos <= r.codepos {
|
if newpos < r.codepos {
|
||||||
r.ensureStorage()
|
r.ensureStorage()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1250,10 +1250,10 @@ func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.isCaptureSlot(capnum) {
|
if p.useOptionE() || p.isCaptureSlot(capnum) {
|
||||||
return newRegexNodeM(ntRef, p.options, capnum), nil
|
return newRegexNodeM(ntRef, p.options, capnum), nil
|
||||||
}
|
}
|
||||||
if capnum <= 9 && !p.useOptionE() {
|
if capnum <= 9 {
|
||||||
return nil, p.getErr(ErrUndefinedBackRef, capnum)
|
return nil, p.getErr(ErrUndefinedBackRef, capnum)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1648,7 +1648,7 @@ func (p *parser) scanOptions() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scans \ code for escape codes that map to single unicode chars.
|
// Scans \ code for escape codes that map to single unicode chars.
|
||||||
func (p *parser) scanCharEscape() (r rune, err error) {
|
func (p *parser) scanCharEscape() (rune, error) {
|
||||||
|
|
||||||
ch := p.moveRightGetChar()
|
ch := p.moveRightGetChar()
|
||||||
|
|
||||||
|
@ -1657,22 +1657,16 @@ func (p *parser) scanCharEscape() (r rune, err error) {
|
||||||
return p.scanOctal(), nil
|
return p.scanOctal(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
pos := p.textpos()
|
|
||||||
|
|
||||||
switch ch {
|
switch ch {
|
||||||
case 'x':
|
case 'x':
|
||||||
// support for \x{HEX} syntax from Perl and PCRE
|
// support for \x{HEX} syntax from Perl and PCRE
|
||||||
if p.charsRight() > 0 && p.rightChar(0) == '{' {
|
if p.charsRight() > 0 && p.rightChar(0) == '{' {
|
||||||
if p.useOptionE() {
|
|
||||||
return ch, nil
|
|
||||||
}
|
|
||||||
p.moveRight(1)
|
p.moveRight(1)
|
||||||
return p.scanHexUntilBrace()
|
return p.scanHexUntilBrace()
|
||||||
} else {
|
|
||||||
r, err = p.scanHex(2)
|
|
||||||
}
|
}
|
||||||
|
return p.scanHex(2)
|
||||||
case 'u':
|
case 'u':
|
||||||
r, err = p.scanHex(4)
|
return p.scanHex(4)
|
||||||
case 'a':
|
case 'a':
|
||||||
return '\u0007', nil
|
return '\u0007', nil
|
||||||
case 'b':
|
case 'b':
|
||||||
|
@ -1690,18 +1684,13 @@ func (p *parser) scanCharEscape() (r rune, err error) {
|
||||||
case 'v':
|
case 'v':
|
||||||
return '\u000B', nil
|
return '\u000B', nil
|
||||||
case 'c':
|
case 'c':
|
||||||
r, err = p.scanControl()
|
return p.scanControl()
|
||||||
default:
|
default:
|
||||||
if !p.useOptionE() && IsWordChar(ch) {
|
if !p.useOptionE() && IsWordChar(ch) {
|
||||||
return 0, p.getErr(ErrUnrecognizedEscape, string(ch))
|
return 0, p.getErr(ErrUnrecognizedEscape, string(ch))
|
||||||
}
|
}
|
||||||
return ch, nil
|
return ch, nil
|
||||||
}
|
}
|
||||||
if err != nil && p.useOptionE() {
|
|
||||||
p.textto(pos)
|
|
||||||
return ch, nil
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Grabs and converts an ascii control character
|
// Grabs and converts an ascii control character
|
||||||
|
@ -1818,12 +1807,12 @@ func (p *parser) scanOctal() rune {
|
||||||
//we know the first char is good because the caller had to check
|
//we know the first char is good because the caller had to check
|
||||||
i := 0
|
i := 0
|
||||||
d := int(p.rightChar(0) - '0')
|
d := int(p.rightChar(0) - '0')
|
||||||
for c > 0 && d <= 7 && d >= 0 {
|
for c > 0 && d <= 7 {
|
||||||
if i >= 0x20 && p.useOptionE() {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
i *= 8
|
i *= 8
|
||||||
i += d
|
i += d
|
||||||
|
if p.useOptionE() && i >= 0x20 {
|
||||||
|
break
|
||||||
|
}
|
||||||
c--
|
c--
|
||||||
|
|
||||||
p.moveRight(1)
|
p.moveRight(1)
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
Copyright (c) 2013, Gorilla web toolkit
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
Redistributions in binary form must reproduce the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer in the documentation and/or
|
|
||||||
other materials provided with the distribution.
|
|
||||||
|
|
||||||
Neither the name of the {organization} nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
|
||||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
@ -1,33 +0,0 @@
|
||||||
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
/*
|
|
||||||
Package gorilla/css/scanner generates tokens for a CSS3 input.
|
|
||||||
|
|
||||||
It follows the CSS3 specification located at:
|
|
||||||
|
|
||||||
http://www.w3.org/TR/css3-syntax/
|
|
||||||
|
|
||||||
To use it, create a new scanner for a given CSS string and call Next() until
|
|
||||||
the token returned has type TokenEOF or TokenError:
|
|
||||||
|
|
||||||
s := scanner.New(myCSS)
|
|
||||||
for {
|
|
||||||
token := s.Next()
|
|
||||||
if token.Type == scanner.TokenEOF || token.Type == scanner.TokenError {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
// Do something with the token...
|
|
||||||
}
|
|
||||||
|
|
||||||
Following the CSS3 specification, an error can only occur when the scanner
|
|
||||||
finds an unclosed quote or unclosed comment. In these cases the text becomes
|
|
||||||
"untokenizable". Everything else is tokenizable and it is up to a parser
|
|
||||||
to make sense of the token stream (or ignore nonsensical token sequences).
|
|
||||||
|
|
||||||
Note: the scanner doesn't perform lexical analysis or, in other words, it
|
|
||||||
doesn't care about the token context. It is intended to be used by a
|
|
||||||
lexer or parser.
|
|
||||||
*/
|
|
||||||
package scanner
|
|
|
@ -1,356 +0,0 @@
|
||||||
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package scanner
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// tokenType identifies the type of lexical tokens.
|
|
||||||
type tokenType int
|
|
||||||
|
|
||||||
// String returns a string representation of the token type.
|
|
||||||
func (t tokenType) String() string {
|
|
||||||
return tokenNames[t]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Token represents a token and the corresponding string.
|
|
||||||
type Token struct {
|
|
||||||
Type tokenType
|
|
||||||
Value string
|
|
||||||
Line int
|
|
||||||
Column int
|
|
||||||
}
|
|
||||||
|
|
||||||
// String returns a string representation of the token.
|
|
||||||
func (t *Token) String() string {
|
|
||||||
if len(t.Value) > 10 {
|
|
||||||
return fmt.Sprintf("%s (line: %d, column: %d): %.10q...",
|
|
||||||
t.Type, t.Line, t.Column, t.Value)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%s (line: %d, column: %d): %q",
|
|
||||||
t.Type, t.Line, t.Column, t.Value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// All tokens -----------------------------------------------------------------
|
|
||||||
|
|
||||||
// The complete list of tokens in CSS3.
|
|
||||||
const (
|
|
||||||
// Scanner flags.
|
|
||||||
TokenError tokenType = iota
|
|
||||||
TokenEOF
|
|
||||||
// From now on, only tokens from the CSS specification.
|
|
||||||
TokenIdent
|
|
||||||
TokenAtKeyword
|
|
||||||
TokenString
|
|
||||||
TokenHash
|
|
||||||
TokenNumber
|
|
||||||
TokenPercentage
|
|
||||||
TokenDimension
|
|
||||||
TokenURI
|
|
||||||
TokenUnicodeRange
|
|
||||||
TokenCDO
|
|
||||||
TokenCDC
|
|
||||||
TokenS
|
|
||||||
TokenComment
|
|
||||||
TokenFunction
|
|
||||||
TokenIncludes
|
|
||||||
TokenDashMatch
|
|
||||||
TokenPrefixMatch
|
|
||||||
TokenSuffixMatch
|
|
||||||
TokenSubstringMatch
|
|
||||||
TokenChar
|
|
||||||
TokenBOM
|
|
||||||
)
|
|
||||||
|
|
||||||
// tokenNames maps tokenType's to their names. Used for conversion to string.
|
|
||||||
var tokenNames = map[tokenType]string{
|
|
||||||
TokenError: "error",
|
|
||||||
TokenEOF: "EOF",
|
|
||||||
TokenIdent: "IDENT",
|
|
||||||
TokenAtKeyword: "ATKEYWORD",
|
|
||||||
TokenString: "STRING",
|
|
||||||
TokenHash: "HASH",
|
|
||||||
TokenNumber: "NUMBER",
|
|
||||||
TokenPercentage: "PERCENTAGE",
|
|
||||||
TokenDimension: "DIMENSION",
|
|
||||||
TokenURI: "URI",
|
|
||||||
TokenUnicodeRange: "UNICODE-RANGE",
|
|
||||||
TokenCDO: "CDO",
|
|
||||||
TokenCDC: "CDC",
|
|
||||||
TokenS: "S",
|
|
||||||
TokenComment: "COMMENT",
|
|
||||||
TokenFunction: "FUNCTION",
|
|
||||||
TokenIncludes: "INCLUDES",
|
|
||||||
TokenDashMatch: "DASHMATCH",
|
|
||||||
TokenPrefixMatch: "PREFIXMATCH",
|
|
||||||
TokenSuffixMatch: "SUFFIXMATCH",
|
|
||||||
TokenSubstringMatch: "SUBSTRINGMATCH",
|
|
||||||
TokenChar: "CHAR",
|
|
||||||
TokenBOM: "BOM",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Macros and productions -----------------------------------------------------
|
|
||||||
// http://www.w3.org/TR/css3-syntax/#tokenization
|
|
||||||
|
|
||||||
var macroRegexp = regexp.MustCompile(`\{[a-z]+\}`)
|
|
||||||
|
|
||||||
// macros maps macro names to patterns to be expanded.
|
|
||||||
var macros = map[string]string{
|
|
||||||
// must be escaped: `\.+*?()|[]{}^$`
|
|
||||||
"ident": `-?{nmstart}{nmchar}*`,
|
|
||||||
"name": `{nmchar}+`,
|
|
||||||
"nmstart": `[a-zA-Z_]|{nonascii}|{escape}`,
|
|
||||||
"nonascii": "[\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]",
|
|
||||||
"unicode": `\\[0-9a-fA-F]{1,6}{wc}?`,
|
|
||||||
"escape": "{unicode}|\\\\[\u0020-\u007E\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]",
|
|
||||||
"nmchar": `[a-zA-Z0-9_-]|{nonascii}|{escape}`,
|
|
||||||
"num": `[0-9]*\.[0-9]+|[0-9]+`,
|
|
||||||
"string": `"(?:{stringchar}|')*"|'(?:{stringchar}|")*'`,
|
|
||||||
"stringchar": `{urlchar}|[ ]|\\{nl}`,
|
|
||||||
"nl": `[\n\r\f]|\r\n`,
|
|
||||||
"w": `{wc}*`,
|
|
||||||
"wc": `[\t\n\f\r ]`,
|
|
||||||
|
|
||||||
// urlchar should accept [(ascii characters minus those that need escaping)|{nonascii}|{escape}]
|
|
||||||
// ASCII characters range = `[\u0020-\u007e]`
|
|
||||||
// Skip space \u0020 = `[\u0021-\u007e]`
|
|
||||||
// Skip quotation mark \0022 = `[\u0021\u0023-\u007e]`
|
|
||||||
// Skip apostrophe \u0027 = `[\u0021\u0023-\u0026\u0028-\u007e]`
|
|
||||||
// Skip reverse solidus \u005c = `[\u0021\u0023-\u0026\u0028-\u005b\u005d\u007e]`
|
|
||||||
// Finally, the left square bracket (\u005b) and right (\u005d) needs escaping themselves
|
|
||||||
"urlchar": "[\u0021\u0023-\u0026\u0028-\\\u005b\\\u005d-\u007E]|{nonascii}|{escape}",
|
|
||||||
}
|
|
||||||
|
|
||||||
// productions maps the list of tokens to patterns to be expanded.
|
|
||||||
var productions = map[tokenType]string{
|
|
||||||
// Unused regexps (matched using other methods) are commented out.
|
|
||||||
TokenIdent: `{ident}`,
|
|
||||||
TokenAtKeyword: `@{ident}`,
|
|
||||||
TokenString: `{string}`,
|
|
||||||
TokenHash: `#{name}`,
|
|
||||||
TokenNumber: `{num}`,
|
|
||||||
TokenPercentage: `{num}%`,
|
|
||||||
TokenDimension: `{num}{ident}`,
|
|
||||||
TokenURI: `url\({w}(?:{string}|{urlchar}*?){w}\)`,
|
|
||||||
TokenUnicodeRange: `U\+[0-9A-F\?]{1,6}(?:-[0-9A-F]{1,6})?`,
|
|
||||||
//TokenCDO: `<!--`,
|
|
||||||
TokenCDC: `-->`,
|
|
||||||
TokenS: `{wc}+`,
|
|
||||||
TokenComment: `/\*[^\*]*[\*]+(?:[^/][^\*]*[\*]+)*/`,
|
|
||||||
TokenFunction: `{ident}\(`,
|
|
||||||
//TokenIncludes: `~=`,
|
|
||||||
//TokenDashMatch: `\|=`,
|
|
||||||
//TokenPrefixMatch: `\^=`,
|
|
||||||
//TokenSuffixMatch: `\$=`,
|
|
||||||
//TokenSubstringMatch: `\*=`,
|
|
||||||
//TokenChar: `[^"']`,
|
|
||||||
//TokenBOM: "\uFEFF",
|
|
||||||
}
|
|
||||||
|
|
||||||
// matchers maps the list of tokens to compiled regular expressions.
|
|
||||||
//
|
|
||||||
// The map is filled on init() using the macros and productions defined in
|
|
||||||
// the CSS specification.
|
|
||||||
var matchers = map[tokenType]*regexp.Regexp{}
|
|
||||||
|
|
||||||
// matchOrder is the order to test regexps when first-char shortcuts
|
|
||||||
// can't be used.
|
|
||||||
var matchOrder = []tokenType{
|
|
||||||
TokenURI,
|
|
||||||
TokenFunction,
|
|
||||||
TokenUnicodeRange,
|
|
||||||
TokenIdent,
|
|
||||||
TokenDimension,
|
|
||||||
TokenPercentage,
|
|
||||||
TokenNumber,
|
|
||||||
TokenCDC,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
// replace macros and compile regexps for productions.
|
|
||||||
replaceMacro := func(s string) string {
|
|
||||||
return "(?:" + macros[s[1:len(s)-1]] + ")"
|
|
||||||
}
|
|
||||||
for t, s := range productions {
|
|
||||||
for macroRegexp.MatchString(s) {
|
|
||||||
s = macroRegexp.ReplaceAllStringFunc(s, replaceMacro)
|
|
||||||
}
|
|
||||||
matchers[t] = regexp.MustCompile("^(?:" + s + ")")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scanner --------------------------------------------------------------------
|
|
||||||
|
|
||||||
// New returns a new CSS scanner for the given input.
|
|
||||||
func New(input string) *Scanner {
|
|
||||||
// Normalize newlines.
|
|
||||||
input = strings.Replace(input, "\r\n", "\n", -1)
|
|
||||||
return &Scanner{
|
|
||||||
input: input,
|
|
||||||
row: 1,
|
|
||||||
col: 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scanner scans an input and emits tokens following the CSS3 specification.
|
|
||||||
type Scanner struct {
|
|
||||||
input string
|
|
||||||
pos int
|
|
||||||
row int
|
|
||||||
col int
|
|
||||||
err *Token
|
|
||||||
}
|
|
||||||
|
|
||||||
// Next returns the next token from the input.
|
|
||||||
//
|
|
||||||
// At the end of the input the token type is TokenEOF.
|
|
||||||
//
|
|
||||||
// If the input can't be tokenized the token type is TokenError. This occurs
|
|
||||||
// in case of unclosed quotation marks or comments.
|
|
||||||
func (s *Scanner) Next() *Token {
|
|
||||||
if s.err != nil {
|
|
||||||
return s.err
|
|
||||||
}
|
|
||||||
if s.pos >= len(s.input) {
|
|
||||||
s.err = &Token{TokenEOF, "", s.row, s.col}
|
|
||||||
return s.err
|
|
||||||
}
|
|
||||||
if s.pos == 0 {
|
|
||||||
// Test BOM only once, at the beginning of the file.
|
|
||||||
if strings.HasPrefix(s.input, "\uFEFF") {
|
|
||||||
return s.emitSimple(TokenBOM, "\uFEFF")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// There's a lot we can guess based on the first byte so we'll take a
|
|
||||||
// shortcut before testing multiple regexps.
|
|
||||||
input := s.input[s.pos:]
|
|
||||||
switch input[0] {
|
|
||||||
case '\t', '\n', '\f', '\r', ' ':
|
|
||||||
// Whitespace.
|
|
||||||
return s.emitToken(TokenS, matchers[TokenS].FindString(input))
|
|
||||||
case '.':
|
|
||||||
// Dot is too common to not have a quick check.
|
|
||||||
// We'll test if this is a Char; if it is followed by a number it is a
|
|
||||||
// dimension/percentage/number, and this will be matched later.
|
|
||||||
if len(input) > 1 && !unicode.IsDigit(rune(input[1])) {
|
|
||||||
return s.emitSimple(TokenChar, ".")
|
|
||||||
}
|
|
||||||
case '#':
|
|
||||||
// Another common one: Hash or Char.
|
|
||||||
if match := matchers[TokenHash].FindString(input); match != "" {
|
|
||||||
return s.emitToken(TokenHash, match)
|
|
||||||
}
|
|
||||||
return s.emitSimple(TokenChar, "#")
|
|
||||||
case '@':
|
|
||||||
// Another common one: AtKeyword or Char.
|
|
||||||
if match := matchers[TokenAtKeyword].FindString(input); match != "" {
|
|
||||||
return s.emitSimple(TokenAtKeyword, match)
|
|
||||||
}
|
|
||||||
return s.emitSimple(TokenChar, "@")
|
|
||||||
case ':', ',', ';', '%', '&', '+', '=', '>', '(', ')', '[', ']', '{', '}':
|
|
||||||
// More common chars.
|
|
||||||
return s.emitSimple(TokenChar, string(input[0]))
|
|
||||||
case '"', '\'':
|
|
||||||
// String or error.
|
|
||||||
match := matchers[TokenString].FindString(input)
|
|
||||||
if match != "" {
|
|
||||||
return s.emitToken(TokenString, match)
|
|
||||||
}
|
|
||||||
|
|
||||||
s.err = &Token{TokenError, "unclosed quotation mark", s.row, s.col}
|
|
||||||
return s.err
|
|
||||||
case '/':
|
|
||||||
// Comment, error or Char.
|
|
||||||
if len(input) > 1 && input[1] == '*' {
|
|
||||||
match := matchers[TokenComment].FindString(input)
|
|
||||||
if match != "" {
|
|
||||||
return s.emitToken(TokenComment, match)
|
|
||||||
} else {
|
|
||||||
s.err = &Token{TokenError, "unclosed comment", s.row, s.col}
|
|
||||||
return s.err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return s.emitSimple(TokenChar, "/")
|
|
||||||
case '~':
|
|
||||||
// Includes or Char.
|
|
||||||
return s.emitPrefixOrChar(TokenIncludes, "~=")
|
|
||||||
case '|':
|
|
||||||
// DashMatch or Char.
|
|
||||||
return s.emitPrefixOrChar(TokenDashMatch, "|=")
|
|
||||||
case '^':
|
|
||||||
// PrefixMatch or Char.
|
|
||||||
return s.emitPrefixOrChar(TokenPrefixMatch, "^=")
|
|
||||||
case '$':
|
|
||||||
// SuffixMatch or Char.
|
|
||||||
return s.emitPrefixOrChar(TokenSuffixMatch, "$=")
|
|
||||||
case '*':
|
|
||||||
// SubstringMatch or Char.
|
|
||||||
return s.emitPrefixOrChar(TokenSubstringMatch, "*=")
|
|
||||||
case '<':
|
|
||||||
// CDO or Char.
|
|
||||||
return s.emitPrefixOrChar(TokenCDO, "<!--")
|
|
||||||
}
|
|
||||||
// Test all regexps, in order.
|
|
||||||
for _, token := range matchOrder {
|
|
||||||
if match := matchers[token].FindString(input); match != "" {
|
|
||||||
return s.emitToken(token, match)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// We already handled unclosed quotation marks and comments,
|
|
||||||
// so this can only be a Char.
|
|
||||||
r, width := utf8.DecodeRuneInString(input)
|
|
||||||
token := &Token{TokenChar, string(r), s.row, s.col}
|
|
||||||
s.col += width
|
|
||||||
s.pos += width
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// updatePosition updates input coordinates based on the consumed text.
|
|
||||||
func (s *Scanner) updatePosition(text string) {
|
|
||||||
width := utf8.RuneCountInString(text)
|
|
||||||
lines := strings.Count(text, "\n")
|
|
||||||
s.row += lines
|
|
||||||
if lines == 0 {
|
|
||||||
s.col += width
|
|
||||||
} else {
|
|
||||||
s.col = utf8.RuneCountInString(text[strings.LastIndex(text, "\n"):])
|
|
||||||
}
|
|
||||||
s.pos += len(text) // while col is a rune index, pos is a byte index
|
|
||||||
}
|
|
||||||
|
|
||||||
// emitToken returns a Token for the string v and updates the scanner position.
|
|
||||||
func (s *Scanner) emitToken(t tokenType, v string) *Token {
|
|
||||||
token := &Token{t, v, s.row, s.col}
|
|
||||||
s.updatePosition(v)
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// emitSimple returns a Token for the string v and updates the scanner
|
|
||||||
// position in a simplified manner.
|
|
||||||
//
|
|
||||||
// The string is known to have only ASCII characters and to not have a newline.
|
|
||||||
func (s *Scanner) emitSimple(t tokenType, v string) *Token {
|
|
||||||
token := &Token{t, v, s.row, s.col}
|
|
||||||
s.col += len(v)
|
|
||||||
s.pos += len(v)
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
// emitPrefixOrChar returns a Token for type t if the current position
|
|
||||||
// matches the given prefix. Otherwise it returns a Char token using the
|
|
||||||
// first character from the prefix.
|
|
||||||
//
|
|
||||||
// The prefix is known to have only ASCII characters and to not have a newline.
|
|
||||||
func (s *Scanner) emitPrefixOrChar(t tokenType, prefix string) *Token {
|
|
||||||
if strings.HasPrefix(s.input[s.pos:], prefix) {
|
|
||||||
return s.emitSimple(t, prefix)
|
|
||||||
}
|
|
||||||
return s.emitSimple(TokenChar, string(prefix[0]))
|
|
||||||
}
|
|
|
@ -1,28 +1,101 @@
|
||||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
# Created by https://www.toptal.com/developers/gitignore/api/code,go,linux,macos,windows
|
||||||
*.o
|
# Edit at https://www.toptal.com/developers/gitignore?templates=code,go,linux,macos,windows
|
||||||
*.a
|
|
||||||
*.so
|
|
||||||
|
|
||||||
# Folders
|
### Code ###
|
||||||
_obj
|
.vscode/*
|
||||||
_test
|
!.vscode/tasks.json
|
||||||
|
!.vscode/launch.json
|
||||||
# Vim swap files
|
*.code-workspace
|
||||||
.*.sw?
|
|
||||||
|
|
||||||
# Architecture specific extensions/prefixes
|
|
||||||
*.[568vq]
|
|
||||||
[568vq].out
|
|
||||||
|
|
||||||
*.cgo1.go
|
|
||||||
*.cgo2.c
|
|
||||||
_cgo_defun.c
|
|
||||||
_cgo_gotypes.go
|
|
||||||
_cgo_export.*
|
|
||||||
|
|
||||||
_testmain.go
|
|
||||||
|
|
||||||
|
### Go ###
|
||||||
|
# Binaries for programs and plugins
|
||||||
*.exe
|
*.exe
|
||||||
|
*.exe~
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
|
||||||
# Code coverage stuff
|
# Test binary, built with `go test -c`
|
||||||
coverage.out
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
|
|
||||||
|
# Dependency directories (remove the comment below to include it)
|
||||||
|
# vendor/
|
||||||
|
|
||||||
|
### Go Patch ###
|
||||||
|
/vendor/
|
||||||
|
/Godeps/
|
||||||
|
|
||||||
|
### Linux ###
|
||||||
|
*~
|
||||||
|
|
||||||
|
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||||
|
.fuse_hidden*
|
||||||
|
|
||||||
|
# KDE directory preferences
|
||||||
|
.directory
|
||||||
|
|
||||||
|
# Linux trash folder which might appear on any partition or disk
|
||||||
|
.Trash-*
|
||||||
|
|
||||||
|
# .nfs files are created when an open file is removed but is still being accessed
|
||||||
|
.nfs*
|
||||||
|
|
||||||
|
### macOS ###
|
||||||
|
# General
|
||||||
|
.DS_Store
|
||||||
|
.AppleDouble
|
||||||
|
.LSOverride
|
||||||
|
|
||||||
|
# Icon must end with two \r
|
||||||
|
Icon
|
||||||
|
|
||||||
|
|
||||||
|
# Thumbnails
|
||||||
|
._*
|
||||||
|
|
||||||
|
# Files that might appear in the root of a volume
|
||||||
|
.DocumentRevisions-V100
|
||||||
|
.fseventsd
|
||||||
|
.Spotlight-V100
|
||||||
|
.TemporaryItems
|
||||||
|
.Trashes
|
||||||
|
.VolumeIcon.icns
|
||||||
|
.com.apple.timemachine.donotpresent
|
||||||
|
|
||||||
|
# Directories potentially created on remote AFP share
|
||||||
|
.AppleDB
|
||||||
|
.AppleDesktop
|
||||||
|
Network Trash Folder
|
||||||
|
Temporary Items
|
||||||
|
.apdisk
|
||||||
|
|
||||||
|
### Windows ###
|
||||||
|
# Windows thumbnail cache files
|
||||||
|
Thumbs.db
|
||||||
|
Thumbs.db:encryptable
|
||||||
|
ehthumbs.db
|
||||||
|
ehthumbs_vista.db
|
||||||
|
|
||||||
|
# Dump file
|
||||||
|
*.stackdump
|
||||||
|
|
||||||
|
# Folder config file
|
||||||
|
[Dd]esktop.ini
|
||||||
|
|
||||||
|
# Recycle Bin used on file shares
|
||||||
|
$RECYCLE.BIN/
|
||||||
|
|
||||||
|
# Windows Installer files
|
||||||
|
*.cab
|
||||||
|
*.msi
|
||||||
|
*.msix
|
||||||
|
*.msm
|
||||||
|
*.msp
|
||||||
|
|
||||||
|
# Windows shortcuts
|
||||||
|
*.lnk
|
||||||
|
|
||||||
|
# End of https://www.toptal.com/developers/gitignore/api/code,go,linux,macos,windows
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
language: go
|
|
||||||
install:
|
|
||||||
- go get golang.org/x/tools/cmd/cover
|
|
||||||
- go get github.com/mattn/goveralls
|
|
||||||
script:
|
|
||||||
- go test -v -covermode=count -coverprofile=coverage.out
|
|
||||||
- if [[ "$TRAVIS_PULL_REQUEST" = "false" ]]; then $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $COVERALLS_TOKEN; fi
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
# Changelog
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
The format of this file is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
but only releases after v1.0.3 properly adhere to it.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.2.0] - 2021-01-27
|
||||||
|
### Added
|
||||||
|
- HSLuv and HPLuv color spaces (#41, #51)
|
||||||
|
- CIE LCh(uv) color space, called `LuvLCh` in code (#51)
|
||||||
|
- JSON and envconfig serialization support for `HexColor` (#42)
|
||||||
|
- `DistanceLinearRGB` (#53)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- RGB to/from XYZ conversion is more accurate (#51)
|
||||||
|
- A bug in `XYZToLuvWhiteRef` that only applied to very small values was fixed (#51)
|
||||||
|
- `BlendHCL` output is clamped so that it's not invalid (#46)
|
||||||
|
- Properly documented `DistanceCIE76` (#40)
|
||||||
|
- Some small godoc fixes
|
||||||
|
|
||||||
|
|
||||||
|
## [1.0.3] - 2019-11-11
|
||||||
|
- Remove SQLMock dependency
|
||||||
|
|
||||||
|
|
||||||
|
## [1.0.2] - 2019-04-07
|
||||||
|
- Fixes SQLMock dependency
|
||||||
|
|
||||||
|
|
||||||
|
## [1.0.1] - 2019-03-24
|
||||||
|
- Adds support for Go Modules
|
||||||
|
|
||||||
|
|
||||||
|
## [1.0.0] - 2018-05-26
|
||||||
|
- API Breaking change in `MakeColor`: instead of `panic`ing when alpha is zero, it now returns a secondary, boolean return value indicating success. See [the color.Color interface](#the-colorcolor-interface) section and [this FAQ entry](#q-why-would-makecolor-ever-fail) for details.
|
||||||
|
|
||||||
|
|
||||||
|
## [0.9.0] - 2018-05-26
|
||||||
|
- Initial version number after having ignored versioning for a long time :)
|
|
@ -1,9 +1,9 @@
|
||||||
go-colorful
|
go-colorful
|
||||||
===========
|
===========
|
||||||
A library for playing with colors in go (golang).
|
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/lucasb-eyer/go-colorful.svg?branch=master)](https://travis-ci.org/lucasb-eyer/go-colorful)
|
[![go reportcard](https://goreportcard.com/badge/github.com/lucasb-eyer/go-colorful)](https://goreportcard.com/report/github.com/lucasb-eyer/go-colorful)
|
||||||
[![Coverage Status](https://coveralls.io/repos/github/lucasb-eyer/go-colorful/badge.svg?branch=master)](https://coveralls.io/github/lucasb-eyer/go-colorful?branch=master)
|
|
||||||
|
A library for playing with colors in Go. Supports Go 1.13 onwards.
|
||||||
|
|
||||||
Why?
|
Why?
|
||||||
====
|
====
|
||||||
|
@ -30,6 +30,9 @@ Go-Colorful stores colors in RGB and provides methods from converting these to v
|
||||||
- **CIE-L\*a\*b\*:** A *perceptually uniform* color space, i.e. distances are meaningful. L\* in [0..1] and a\*, b\* almost in [-1..1].
|
- **CIE-L\*a\*b\*:** A *perceptually uniform* color space, i.e. distances are meaningful. L\* in [0..1] and a\*, b\* almost in [-1..1].
|
||||||
- **CIE-L\*u\*v\*:** Very similar to CIE-L\*a\*b\*, there is [no consensus](http://en.wikipedia.org/wiki/CIELUV#Historical_background) on which one is "better".
|
- **CIE-L\*u\*v\*:** Very similar to CIE-L\*a\*b\*, there is [no consensus](http://en.wikipedia.org/wiki/CIELUV#Historical_background) on which one is "better".
|
||||||
- **CIE-L\*C\*h° (HCL):** This is generally the [most useful](http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/) one; CIE-L\*a\*b\* space in polar coordinates, i.e. a *better* HSV. H° is in [0..360], C\* almost in [-1..1] and L\* as in CIE-L\*a\*b\*.
|
- **CIE-L\*C\*h° (HCL):** This is generally the [most useful](http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/) one; CIE-L\*a\*b\* space in polar coordinates, i.e. a *better* HSV. H° is in [0..360], C\* almost in [-1..1] and L\* as in CIE-L\*a\*b\*.
|
||||||
|
- **CIE LCh(uv):** Called `LuvLCh` in code, this is a cylindrical transformation of the CIE-L\*u\*v\* color space. Like HCL above: H° is in [0..360], C\* almost in [-1..1] and L\* as in CIE-L\*u\*v\*.
|
||||||
|
- **HSLuv:** The better alternative to HSL, see [here](https://www.hsluv.org/) and [here](https://www.kuon.ch/post/2020-03-08-hsluv/). Hue in [0..360], Saturation and Luminance in [0..1].
|
||||||
|
- **HPLuv:** A variant of HSLuv. The color space is smoother, but only pastel colors can be included. Because the valid colors are limited, it's easy to get invalid Saturation values way above 1.0, indicating the color can't be represented in HPLuv beccause it's not pastel.
|
||||||
|
|
||||||
For the colorspaces where it makes sense (XYZ, Lab, Luv, HCl), the
|
For the colorspaces where it makes sense (XYZ, Lab, Luv, HCl), the
|
||||||
[D65](http://en.wikipedia.org/wiki/Illuminant_D65) is used as reference white
|
[D65](http://en.wikipedia.org/wiki/Illuminant_D65) is used as reference white
|
||||||
|
@ -248,14 +251,14 @@ func main() {
|
||||||
//c2, _ := colorful.Hex("#1E3140")
|
//c2, _ := colorful.Hex("#1E3140")
|
||||||
|
|
||||||
for i := 0 ; i < blocks ; i++ {
|
for i := 0 ; i < blocks ; i++ {
|
||||||
draw.Draw(img, image.Rect(i*blockw, 0,(i+1)*blockw, 40), &image.Uniform{c1.BlendHsv(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
|
draw.Draw(img, image.Rect(i*blockw, 0,(i+1)*blockw, 40), &image.Uniform{c1.BlendHsv(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
|
||||||
draw.Draw(img, image.Rect(i*blockw, 40,(i+1)*blockw, 80), &image.Uniform{c1.BlendLuv(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
|
draw.Draw(img, image.Rect(i*blockw, 40,(i+1)*blockw, 80), &image.Uniform{c1.BlendLuv(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
|
||||||
draw.Draw(img, image.Rect(i*blockw, 80,(i+1)*blockw,120), &image.Uniform{c1.BlendRgb(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
|
draw.Draw(img, image.Rect(i*blockw, 80,(i+1)*blockw,120), &image.Uniform{c1.BlendRgb(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
|
||||||
draw.Draw(img, image.Rect(i*blockw,120,(i+1)*blockw,160), &image.Uniform{c1.BlendLab(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
|
draw.Draw(img, image.Rect(i*blockw,120,(i+1)*blockw,160), &image.Uniform{c1.BlendLab(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
|
||||||
draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1))}, image.ZP, draw.Src)
|
draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1))}, image.Point{}, draw.Src)
|
||||||
|
|
||||||
// This can be used to "fix" invalid colors in the gradient.
|
// This can be used to "fix" invalid colors in the gradient.
|
||||||
//draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1)).Clamped()}, image.ZP, draw.Src)
|
//draw.Draw(img, image.Rect(i*blockw,160,(i+1)*blockw,200), &image.Uniform{c1.BlendHcl(c2, float64(i)/float64(blocks-1)).Clamped()}, image.Point{}, draw.Src)
|
||||||
}
|
}
|
||||||
|
|
||||||
toimg, err := os.Create("colorblend.png")
|
toimg, err := os.Create("colorblend.png")
|
||||||
|
@ -468,25 +471,12 @@ section above.
|
||||||
Who?
|
Who?
|
||||||
====
|
====
|
||||||
|
|
||||||
This library has been developed by Lucas Beyer with contributions from
|
This library was developed by Lucas Beyer with contributions from
|
||||||
Bastien Dejean (@baskerville), Phil Kulak (@pkulak) and Christian Muehlhaeuser (@muesli).
|
Bastien Dejean (@baskerville), Phil Kulak (@pkulak) and Christian Muehlhaeuser (@muesli).
|
||||||
|
|
||||||
Release Notes
|
It is now maintained by makeworld (@makeworld-the-better-one).
|
||||||
=============
|
|
||||||
|
|
||||||
### Version 1.0
|
|
||||||
- API Breaking change in `MakeColor`: instead of `panic`ing when alpha is zero, it now returns a secondary, boolean return value indicating success. See [the color.Color interface](https://github.com/lucasb-eyer/go-colorful#the-colorcolor-interface) section and [this FAQ entry](https://github.com/lucasb-eyer/go-colorful#q-why-would-makecolor-ever-fail) for details.
|
|
||||||
|
|
||||||
### Version 0.9
|
## License
|
||||||
- Initial version number after having ignored versioning for a long time :)
|
|
||||||
|
|
||||||
License: MIT
|
|
||||||
============
|
|
||||||
Copyright (c) 2013 Lucas Beyer
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
|
This repo is under the MIT license, see [LICENSE](LICENSE) for details.
|
||||||
|
|
|
@ -48,6 +48,11 @@ func (col Color) RGB255() (r, g, b uint8) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Used to simplify HSLuv testing.
|
||||||
|
func (col Color) values() (float64, float64, float64) {
|
||||||
|
return col.R, col.G, col.B
|
||||||
|
}
|
||||||
|
|
||||||
// This is the tolerance used when comparing colors using AlmostEqualRgb.
|
// This is the tolerance used when comparing colors using AlmostEqualRgb.
|
||||||
const Delta = 1.0 / 255.0
|
const Delta = 1.0 / 255.0
|
||||||
|
|
||||||
|
@ -64,6 +69,7 @@ func (c Color) IsValid() bool {
|
||||||
0.0 <= c.B && c.B <= 1.0
|
0.0 <= c.B && c.B <= 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// clamp01 clamps from 0 to 1.
|
||||||
func clamp01(v float64) float64 {
|
func clamp01(v float64) float64 {
|
||||||
return math.Max(0.0, math.Min(v, 1.0))
|
return math.Max(0.0, math.Min(v, 1.0))
|
||||||
}
|
}
|
||||||
|
@ -88,6 +94,15 @@ func (c1 Color) DistanceRgb(c2 Color) float64 {
|
||||||
return math.Sqrt(sq(c1.R-c2.R) + sq(c1.G-c2.G) + sq(c1.B-c2.B))
|
return math.Sqrt(sq(c1.R-c2.R) + sq(c1.G-c2.G) + sq(c1.B-c2.B))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DistanceLinearRGB computes the distance between two colors in linear RGB
|
||||||
|
// space. This is not useful for measuring how humans perceive color, but
|
||||||
|
// might be useful for other things, like dithering.
|
||||||
|
func (c1 Color) DistanceLinearRGB(c2 Color) float64 {
|
||||||
|
r1, g1, b1 := c1.LinearRgb()
|
||||||
|
r2, g2, b2 := c2.LinearRgb()
|
||||||
|
return math.Sqrt(sq(r1-r2) + sq(g1-g2) + sq(b1-b2))
|
||||||
|
}
|
||||||
|
|
||||||
// Check for equality between colors within the tolerance Delta (1/255).
|
// Check for equality between colors within the tolerance Delta (1/255).
|
||||||
func (c1 Color) AlmostEqualRgb(c2 Color) bool {
|
func (c1 Color) AlmostEqualRgb(c2 Color) bool {
|
||||||
return math.Abs(c1.R-c2.R)+
|
return math.Abs(c1.R-c2.R)+
|
||||||
|
@ -422,16 +437,16 @@ func FastLinearRgb(r, g, b float64) Color {
|
||||||
|
|
||||||
// XyzToLinearRgb converts from CIE XYZ-space to Linear RGB space.
|
// XyzToLinearRgb converts from CIE XYZ-space to Linear RGB space.
|
||||||
func XyzToLinearRgb(x, y, z float64) (r, g, b float64) {
|
func XyzToLinearRgb(x, y, z float64) (r, g, b float64) {
|
||||||
r = 3.2404542*x - 1.5371385*y - 0.4985314*z
|
r = 3.2409699419045214*x - 1.5373831775700935*y - 0.49861076029300328*z
|
||||||
g = -0.9692660*x + 1.8760108*y + 0.0415560*z
|
g = -0.96924363628087983*x + 1.8759675015077207*y + 0.041555057407175613*z
|
||||||
b = 0.0556434*x - 0.2040259*y + 1.0572252*z
|
b = 0.055630079696993609*x - 0.20397695888897657*y + 1.0569715142428786*z
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func LinearRgbToXyz(r, g, b float64) (x, y, z float64) {
|
func LinearRgbToXyz(r, g, b float64) (x, y, z float64) {
|
||||||
x = 0.4124564*r + 0.3575761*g + 0.1804375*b
|
x = 0.41239079926595948*r + 0.35758433938387796*g + 0.18048078840183429*b
|
||||||
y = 0.2126729*r + 0.7151522*g + 0.0721750*b
|
y = 0.21263900587151036*r + 0.71516867876775593*g + 0.072192315360733715*b
|
||||||
z = 0.0193339*r + 0.1191920*g + 0.9503041*b
|
z = 0.019330818715591851*r + 0.11919477979462599*g + 0.95053215224966058*b
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -589,7 +604,7 @@ func (c1 Color) DistanceLab(c2 Color) float64 {
|
||||||
return math.Sqrt(sq(l1-l2) + sq(a1-a2) + sq(b1-b2))
|
return math.Sqrt(sq(l1-l2) + sq(a1-a2) + sq(b1-b2))
|
||||||
}
|
}
|
||||||
|
|
||||||
// That's actually the same, but I don't want to break code.
|
// DistanceCIE76 is the same as DistanceLab.
|
||||||
func (c1 Color) DistanceCIE76(c2 Color) float64 {
|
func (c1 Color) DistanceCIE76(c2 Color) float64 {
|
||||||
return c1.DistanceLab(c2)
|
return c1.DistanceLab(c2)
|
||||||
}
|
}
|
||||||
|
@ -739,7 +754,7 @@ func XyzToLuv(x, y, z float64) (l, a, b float64) {
|
||||||
|
|
||||||
func XyzToLuvWhiteRef(x, y, z float64, wref [3]float64) (l, u, v float64) {
|
func XyzToLuvWhiteRef(x, y, z float64, wref [3]float64) (l, u, v float64) {
|
||||||
if y/wref[1] <= 6.0/29.0*6.0/29.0*6.0/29.0 {
|
if y/wref[1] <= 6.0/29.0*6.0/29.0*6.0/29.0 {
|
||||||
l = y / wref[1] * 29.0 / 3.0 * 29.0 / 3.0 * 29.0 / 3.0
|
l = y / wref[1] * (29.0 / 3.0 * 29.0 / 3.0 * 29.0 / 3.0) / 100.0
|
||||||
} else {
|
} else {
|
||||||
l = 1.16*math.Cbrt(y/wref[1]) - 0.16
|
l = 1.16*math.Cbrt(y/wref[1]) - 0.16
|
||||||
}
|
}
|
||||||
|
@ -803,7 +818,7 @@ func (col Color) LuvWhiteRef(wref [3]float64) (l, u, v float64) {
|
||||||
|
|
||||||
// Generates a color by using data given in CIE L*u*v* space using D65 as reference white.
|
// Generates a color by using data given in CIE L*u*v* space using D65 as reference white.
|
||||||
// L* is in [0..1] and both u* and v* are in about [-1..1]
|
// L* is in [0..1] and both u* and v* are in about [-1..1]
|
||||||
// WARNING: many combinations of `l`, `a`, and `b` values do not have corresponding
|
// WARNING: many combinations of `l`, `u`, and `v` values do not have corresponding
|
||||||
// valid RGB values, check the FAQ in the README if you're unsure.
|
// valid RGB values, check the FAQ in the README if you're unsure.
|
||||||
func Luv(l, u, v float64) Color {
|
func Luv(l, u, v float64) Color {
|
||||||
return Xyz(LuvToXyz(l, u, v))
|
return Xyz(LuvToXyz(l, u, v))
|
||||||
|
@ -870,7 +885,7 @@ func (col Color) HclWhiteRef(wref [3]float64) (h, c, l float64) {
|
||||||
|
|
||||||
// Generates a color by using data given in HCL space using D65 as reference white.
|
// Generates a color by using data given in HCL space using D65 as reference white.
|
||||||
// H values are in [0..360], C and L values are in [0..1]
|
// H values are in [0..360], C and L values are in [0..1]
|
||||||
// WARNING: many combinations of `l`, `a`, and `b` values do not have corresponding
|
// WARNING: many combinations of `h`, `c`, and `l` values do not have corresponding
|
||||||
// valid RGB values, check the FAQ in the README if you're unsure.
|
// valid RGB values, check the FAQ in the README if you're unsure.
|
||||||
func Hcl(h, c, l float64) Color {
|
func Hcl(h, c, l float64) Color {
|
||||||
return HclWhiteRef(h, c, l, D65)
|
return HclWhiteRef(h, c, l, D65)
|
||||||
|
@ -899,5 +914,66 @@ func (col1 Color) BlendHcl(col2 Color, t float64) Color {
|
||||||
h2, c2, l2 := col2.Hcl()
|
h2, c2, l2 := col2.Hcl()
|
||||||
|
|
||||||
// We know that h are both in [0..360]
|
// We know that h are both in [0..360]
|
||||||
return Hcl(interp_angle(h1, h2, t), c1+t*(c2-c1), l1+t*(l2-l1))
|
return Hcl(interp_angle(h1, h2, t), c1+t*(c2-c1), l1+t*(l2-l1)).Clamped()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LuvLch
|
||||||
|
|
||||||
|
// Converts the given color to LuvLCh space using D65 as reference white.
|
||||||
|
// h values are in [0..360], C and L values are in [0..1] although C can overshoot 1.0
|
||||||
|
func (col Color) LuvLCh() (l, c, h float64) {
|
||||||
|
return col.LuvLChWhiteRef(D65)
|
||||||
|
}
|
||||||
|
|
||||||
|
func LuvToLuvLCh(L, u, v float64) (l, c, h float64) {
|
||||||
|
// Oops, floating point workaround necessary if u ~= v and both are very small (i.e. almost zero).
|
||||||
|
if math.Abs(v-u) > 1e-4 && math.Abs(u) > 1e-4 {
|
||||||
|
h = math.Mod(57.29577951308232087721*math.Atan2(v, u)+360.0, 360.0) // Rad2Deg
|
||||||
|
} else {
|
||||||
|
h = 0.0
|
||||||
|
}
|
||||||
|
l = L
|
||||||
|
c = math.Sqrt(sq(u) + sq(v))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Converts the given color to LuvLCh space, taking into account
|
||||||
|
// a given reference white. (i.e. the monitor's white)
|
||||||
|
// h values are in [0..360], c and l values are in [0..1]
|
||||||
|
func (col Color) LuvLChWhiteRef(wref [3]float64) (l, c, h float64) {
|
||||||
|
return LuvToLuvLCh(col.LuvWhiteRef(wref))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generates a color by using data given in LuvLCh space using D65 as reference white.
|
||||||
|
// h values are in [0..360], C and L values are in [0..1]
|
||||||
|
// WARNING: many combinations of `l`, `c`, and `h` values do not have corresponding
|
||||||
|
// valid RGB values, check the FAQ in the README if you're unsure.
|
||||||
|
func LuvLCh(l, c, h float64) Color {
|
||||||
|
return LuvLChWhiteRef(l, c, h, D65)
|
||||||
|
}
|
||||||
|
|
||||||
|
func LuvLChToLuv(l, c, h float64) (L, u, v float64) {
|
||||||
|
H := 0.01745329251994329576 * h // Deg2Rad
|
||||||
|
u = c * math.Cos(H)
|
||||||
|
v = c * math.Sin(H)
|
||||||
|
L = l
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generates a color by using data given in LuvLCh space, taking
|
||||||
|
// into account a given reference white. (i.e. the monitor's white)
|
||||||
|
// h values are in [0..360], C and L values are in [0..1]
|
||||||
|
func LuvLChWhiteRef(l, c, h float64, wref [3]float64) Color {
|
||||||
|
L, u, v := LuvLChToLuv(l, c, h)
|
||||||
|
return LuvWhiteRef(L, u, v, wref)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BlendLuvLCh blends two colors in the cylindrical CIELUV color space.
|
||||||
|
// t == 0 results in c1, t == 1 results in c2
|
||||||
|
func (col1 Color) BlendLuvLCh(col2 Color, t float64) Color {
|
||||||
|
l1, c1, h1 := col1.LuvLCh()
|
||||||
|
l2, c2, h2 := col2.LuvLCh()
|
||||||
|
|
||||||
|
// We know that h are both in [0..360]
|
||||||
|
return LuvLCh(l1+t*(l2-l1), c1+t*(c2-c1), interp_angle(h1, h2, t))
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,12 +2,14 @@ package colorful
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql/driver"
|
"database/sql/driver"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
)
|
)
|
||||||
|
|
||||||
// A HexColor is a Color stored as a hex string "#rrggbb". It implements the
|
// A HexColor is a Color stored as a hex string "#rrggbb". It implements the
|
||||||
// database/sql.Scanner and database/sql/driver.Value interfaces.
|
// database/sql.Scanner, database/sql/driver.Value,
|
||||||
|
// encoding/json.Unmarshaler and encoding/json.Marshaler interfaces.
|
||||||
type HexColor Color
|
type HexColor Color
|
||||||
|
|
||||||
type errUnsupportedType struct {
|
type errUnsupportedType struct {
|
||||||
|
@ -35,3 +37,31 @@ func (hc *HexColor) Value() (driver.Value, error) {
|
||||||
func (e errUnsupportedType) Error() string {
|
func (e errUnsupportedType) Error() string {
|
||||||
return fmt.Sprintf("unsupported type: got %v, want a %s", e.got, e.want)
|
return fmt.Sprintf("unsupported type: got %v, want a %s", e.got, e.want)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (hc *HexColor) UnmarshalJSON(data []byte) error {
|
||||||
|
var hexCode string
|
||||||
|
if err := json.Unmarshal(data, &hexCode); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var col, err = Hex(hexCode)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*hc = HexColor(col)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hc HexColor) MarshalJSON() ([]byte, error) {
|
||||||
|
return json.Marshal(Color(hc).Hex())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode - deserialize function for https://github.com/kelseyhightower/envconfig
|
||||||
|
func (hc *HexColor) Decode(hexCode string) error {
|
||||||
|
var col, err = Hex(hexCode)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*hc = HexColor(col)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,207 @@
|
||||||
|
package colorful
|
||||||
|
|
||||||
|
import "math"
|
||||||
|
|
||||||
|
// Source: https://github.com/hsluv/hsluv-go
|
||||||
|
// Under MIT License
|
||||||
|
// Modified so that Saturation and Luminance are in [0..1] instead of [0..100].
|
||||||
|
|
||||||
|
// HSLuv uses a rounded version of the D65. This has no impact on the final RGB
|
||||||
|
// values, but to keep high levels of accuracy for internal operations and when
|
||||||
|
// comparing to the test values, this modified white reference is used internally.
|
||||||
|
//
|
||||||
|
// See this GitHub thread for details on these values:
|
||||||
|
// https://github.com/hsluv/hsluv/issues/79
|
||||||
|
var hSLuvD65 = [3]float64{0.95045592705167, 1.0, 1.089057750759878}
|
||||||
|
|
||||||
|
func LuvLChToHSLuv(l, c, h float64) (float64, float64, float64) {
|
||||||
|
// [-1..1] but the code expects it to be [-100..100]
|
||||||
|
c *= 100.0
|
||||||
|
l *= 100.0
|
||||||
|
|
||||||
|
var s, max float64
|
||||||
|
if l > 99.9999999 || l < 0.00000001 {
|
||||||
|
s = 0.0
|
||||||
|
} else {
|
||||||
|
max = maxChromaForLH(l, h)
|
||||||
|
s = c / max * 100.0
|
||||||
|
}
|
||||||
|
return h, clamp01(s / 100.0), clamp01(l / 100.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func HSLuvToLuvLCh(h, s, l float64) (float64, float64, float64) {
|
||||||
|
l *= 100.0
|
||||||
|
s *= 100.0
|
||||||
|
|
||||||
|
var c, max float64
|
||||||
|
if l > 99.9999999 || l < 0.00000001 {
|
||||||
|
c = 0.0
|
||||||
|
} else {
|
||||||
|
max = maxChromaForLH(l, h)
|
||||||
|
c = max / 100.0 * s
|
||||||
|
}
|
||||||
|
|
||||||
|
// c is [-100..100], but for LCh it's supposed to be almost [-1..1]
|
||||||
|
return clamp01(l / 100.0), c / 100.0, h
|
||||||
|
}
|
||||||
|
|
||||||
|
func LuvLChToHPLuv(l, c, h float64) (float64, float64, float64) {
|
||||||
|
// [-1..1] but the code expects it to be [-100..100]
|
||||||
|
c *= 100.0
|
||||||
|
l *= 100.0
|
||||||
|
|
||||||
|
var s, max float64
|
||||||
|
if l > 99.9999999 || l < 0.00000001 {
|
||||||
|
s = 0.0
|
||||||
|
} else {
|
||||||
|
max = maxSafeChromaForL(l)
|
||||||
|
s = c / max * 100.0
|
||||||
|
}
|
||||||
|
return h, s / 100.0, l / 100.0
|
||||||
|
}
|
||||||
|
|
||||||
|
func HPLuvToLuvLCh(h, s, l float64) (float64, float64, float64) {
|
||||||
|
// [-1..1] but the code expects it to be [-100..100]
|
||||||
|
l *= 100.0
|
||||||
|
s *= 100.0
|
||||||
|
|
||||||
|
var c, max float64
|
||||||
|
if l > 99.9999999 || l < 0.00000001 {
|
||||||
|
c = 0.0
|
||||||
|
} else {
|
||||||
|
max = maxSafeChromaForL(l)
|
||||||
|
c = max / 100.0 * s
|
||||||
|
}
|
||||||
|
return l / 100.0, c / 100.0, h
|
||||||
|
}
|
||||||
|
|
||||||
|
// HSLuv creates a new Color from values in the HSLuv color space.
|
||||||
|
// Hue in [0..360], a Saturation [0..1], and a Luminance (lightness) in [0..1].
|
||||||
|
//
|
||||||
|
// The returned color values are clamped (using .Clamped), so this will never output
|
||||||
|
// an invalid color.
|
||||||
|
func HSLuv(h, s, l float64) Color {
|
||||||
|
// HSLuv -> LuvLCh -> CIELUV -> CIEXYZ -> Linear RGB -> sRGB
|
||||||
|
l, u, v := LuvLChToLuv(HSLuvToLuvLCh(h, s, l))
|
||||||
|
return LinearRgb(XyzToLinearRgb(LuvToXyzWhiteRef(l, u, v, hSLuvD65))).Clamped()
|
||||||
|
}
|
||||||
|
|
||||||
|
// HPLuv creates a new Color from values in the HPLuv color space.
|
||||||
|
// Hue in [0..360], a Saturation [0..1], and a Luminance (lightness) in [0..1].
|
||||||
|
//
|
||||||
|
// The returned color values are clamped (using .Clamped), so this will never output
|
||||||
|
// an invalid color.
|
||||||
|
func HPLuv(h, s, l float64) Color {
|
||||||
|
// HPLuv -> LuvLCh -> CIELUV -> CIEXYZ -> Linear RGB -> sRGB
|
||||||
|
l, u, v := LuvLChToLuv(HPLuvToLuvLCh(h, s, l))
|
||||||
|
return LinearRgb(XyzToLinearRgb(LuvToXyzWhiteRef(l, u, v, hSLuvD65))).Clamped()
|
||||||
|
}
|
||||||
|
|
||||||
|
// HSLuv returns the Hue, Saturation and Luminance of the color in the HSLuv
|
||||||
|
// color space. Hue in [0..360], a Saturation [0..1], and a Luminance
|
||||||
|
// (lightness) in [0..1].
|
||||||
|
func (col Color) HSLuv() (h, s, l float64) {
|
||||||
|
// sRGB -> Linear RGB -> CIEXYZ -> CIELUV -> LuvLCh -> HSLuv
|
||||||
|
return LuvLChToHSLuv(col.LuvLChWhiteRef(hSLuvD65))
|
||||||
|
}
|
||||||
|
|
||||||
|
// HPLuv returns the Hue, Saturation and Luminance of the color in the HSLuv
|
||||||
|
// color space. Hue in [0..360], a Saturation [0..1], and a Luminance
|
||||||
|
// (lightness) in [0..1].
|
||||||
|
//
|
||||||
|
// Note that HPLuv can only represent pastel colors, and so the Saturation
|
||||||
|
// value could be much larger than 1 for colors it can't represent.
|
||||||
|
func (col Color) HPLuv() (h, s, l float64) {
|
||||||
|
return LuvLChToHPLuv(col.LuvLChWhiteRef(hSLuvD65))
|
||||||
|
}
|
||||||
|
|
||||||
|
// DistanceHSLuv calculates Euclidan distance in the HSLuv colorspace. No idea
|
||||||
|
// how useful this is.
|
||||||
|
//
|
||||||
|
// The Hue value is divided by 100 before the calculation, so that H, S, and L
|
||||||
|
// have the same relative ranges.
|
||||||
|
func (c1 Color) DistanceHSLuv(c2 Color) float64 {
|
||||||
|
h1, s1, l1 := c1.HSLuv()
|
||||||
|
h2, s2, l2 := c2.HSLuv()
|
||||||
|
return math.Sqrt(sq((h1-h2)/100.0) + sq(s1-s2) + sq(l1-l2))
|
||||||
|
}
|
||||||
|
|
||||||
|
// DistanceHPLuv calculates Euclidean distance in the HPLuv colorspace. No idea
|
||||||
|
// how useful this is.
|
||||||
|
//
|
||||||
|
// The Hue value is divided by 100 before the calculation, so that H, S, and L
|
||||||
|
// have the same relative ranges.
|
||||||
|
func (c1 Color) DistanceHPLuv(c2 Color) float64 {
|
||||||
|
h1, s1, l1 := c1.HPLuv()
|
||||||
|
h2, s2, l2 := c2.HPLuv()
|
||||||
|
return math.Sqrt(sq((h1-h2)/100.0) + sq(s1-s2) + sq(l1-l2))
|
||||||
|
}
|
||||||
|
|
||||||
|
var m = [3][3]float64{
|
||||||
|
{3.2409699419045214, -1.5373831775700935, -0.49861076029300328},
|
||||||
|
{-0.96924363628087983, 1.8759675015077207, 0.041555057407175613},
|
||||||
|
{0.055630079696993609, -0.20397695888897657, 1.0569715142428786},
|
||||||
|
}
|
||||||
|
|
||||||
|
const kappa = 903.2962962962963
|
||||||
|
const epsilon = 0.0088564516790356308
|
||||||
|
|
||||||
|
func maxChromaForLH(l, h float64) float64 {
|
||||||
|
hRad := h / 360.0 * math.Pi * 2.0
|
||||||
|
minLength := math.MaxFloat64
|
||||||
|
for _, line := range getBounds(l) {
|
||||||
|
length := lengthOfRayUntilIntersect(hRad, line[0], line[1])
|
||||||
|
if length > 0.0 && length < minLength {
|
||||||
|
minLength = length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return minLength
|
||||||
|
}
|
||||||
|
|
||||||
|
func getBounds(l float64) [6][2]float64 {
|
||||||
|
var sub2 float64
|
||||||
|
var ret [6][2]float64
|
||||||
|
sub1 := math.Pow(l+16.0, 3.0) / 1560896.0
|
||||||
|
if sub1 > epsilon {
|
||||||
|
sub2 = sub1
|
||||||
|
} else {
|
||||||
|
sub2 = l / kappa
|
||||||
|
}
|
||||||
|
for i := range m {
|
||||||
|
for k := 0; k < 2; k++ {
|
||||||
|
top1 := (284517.0*m[i][0] - 94839.0*m[i][2]) * sub2
|
||||||
|
top2 := (838422.0*m[i][2]+769860.0*m[i][1]+731718.0*m[i][0])*l*sub2 - 769860.0*float64(k)*l
|
||||||
|
bottom := (632260.0*m[i][2]-126452.0*m[i][1])*sub2 + 126452.0*float64(k)
|
||||||
|
ret[i*2+k][0] = top1 / bottom
|
||||||
|
ret[i*2+k][1] = top2 / bottom
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func lengthOfRayUntilIntersect(theta, x, y float64) (length float64) {
|
||||||
|
length = y / (math.Sin(theta) - x*math.Cos(theta))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func maxSafeChromaForL(l float64) float64 {
|
||||||
|
minLength := math.MaxFloat64
|
||||||
|
for _, line := range getBounds(l) {
|
||||||
|
m1 := line[0]
|
||||||
|
b1 := line[1]
|
||||||
|
x := intersectLineLine(m1, b1, -1.0/m1, 0.0)
|
||||||
|
dist := distanceFromPole(x, b1+x*m1)
|
||||||
|
if dist < minLength {
|
||||||
|
minLength = dist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return minLength
|
||||||
|
}
|
||||||
|
|
||||||
|
func intersectLineLine(x1, y1, x2, y2 float64) float64 {
|
||||||
|
return (y1 - y2) / (x2 - x1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func distanceFromPole(x, y float64) float64 {
|
||||||
|
return math.Sqrt(math.Pow(x, 2.0) + math.Pow(y, 2.0))
|
||||||
|
}
|
|
@ -61,7 +61,7 @@ func SoftPaletteEx(colorsCount int, settings SoftPaletteSettings) ([]Color, erro
|
||||||
|
|
||||||
// That would cause some infinite loops down there...
|
// That would cause some infinite loops down there...
|
||||||
if len(samples) < colorsCount {
|
if len(samples) < colorsCount {
|
||||||
return nil, fmt.Errorf("palettegen: more colors requested (%v) than samples available (%v). Your requested color count may be wrong, you might want to use many samples or your constraint function makes the valid color space too small.", colorsCount, len(samples))
|
return nil, fmt.Errorf("palettegen: more colors requested (%v) than samples available (%v). Your requested color count may be wrong, you might want to use many samples or your constraint function makes the valid color space too small", colorsCount, len(samples))
|
||||||
} else if len(samples) == colorsCount {
|
} else if len(samples) == colorsCount {
|
||||||
return labs2cols(samples), nil // Oops?
|
return labs2cols(samples), nil // Oops?
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
module github.com/mattn/go-runewidth
|
module github.com/mattn/go-runewidth
|
||||||
|
|
||||||
go 1.9
|
go 1.9
|
||||||
|
|
||||||
|
require github.com/rivo/uniseg v0.1.0
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
|
||||||
|
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
|
@ -2,6 +2,8 @@ package runewidth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/rivo/uniseg"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:generate go run script/generate.go
|
//go:generate go run script/generate.go
|
||||||
|
@ -10,9 +12,6 @@ var (
|
||||||
// EastAsianWidth will be set true if the current locale is CJK
|
// EastAsianWidth will be set true if the current locale is CJK
|
||||||
EastAsianWidth bool
|
EastAsianWidth bool
|
||||||
|
|
||||||
// ZeroWidthJoiner is flag to set to use UTR#51 ZWJ
|
|
||||||
ZeroWidthJoiner bool
|
|
||||||
|
|
||||||
// DefaultCondition is a condition in current locale
|
// DefaultCondition is a condition in current locale
|
||||||
DefaultCondition = &Condition{}
|
DefaultCondition = &Condition{}
|
||||||
)
|
)
|
||||||
|
@ -30,7 +29,6 @@ func handleEnv() {
|
||||||
}
|
}
|
||||||
// update DefaultCondition
|
// update DefaultCondition
|
||||||
DefaultCondition.EastAsianWidth = EastAsianWidth
|
DefaultCondition.EastAsianWidth = EastAsianWidth
|
||||||
DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type interval struct {
|
type interval struct {
|
||||||
|
@ -86,14 +84,12 @@ var nonprint = table{
|
||||||
// Condition have flag EastAsianWidth whether the current locale is CJK or not.
|
// Condition have flag EastAsianWidth whether the current locale is CJK or not.
|
||||||
type Condition struct {
|
type Condition struct {
|
||||||
EastAsianWidth bool
|
EastAsianWidth bool
|
||||||
ZeroWidthJoiner bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewCondition return new instance of Condition which is current locale.
|
// NewCondition return new instance of Condition which is current locale.
|
||||||
func NewCondition() *Condition {
|
func NewCondition() *Condition {
|
||||||
return &Condition{
|
return &Condition{
|
||||||
EastAsianWidth: EastAsianWidth,
|
EastAsianWidth: EastAsianWidth,
|
||||||
ZeroWidthJoiner: ZeroWidthJoiner,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,38 +106,20 @@ func (c *Condition) RuneWidth(r rune) int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Condition) stringWidth(s string) (width int) {
|
|
||||||
for _, r := range []rune(s) {
|
|
||||||
width += c.RuneWidth(r)
|
|
||||||
}
|
|
||||||
return width
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Condition) stringWidthZeroJoiner(s string) (width int) {
|
|
||||||
r1, r2 := rune(0), rune(0)
|
|
||||||
for _, r := range []rune(s) {
|
|
||||||
if r == 0xFE0E || r == 0xFE0F {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
w := c.RuneWidth(r)
|
|
||||||
if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) {
|
|
||||||
if width < w {
|
|
||||||
width = w
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
width += w
|
|
||||||
}
|
|
||||||
r1, r2 = r2, r
|
|
||||||
}
|
|
||||||
return width
|
|
||||||
}
|
|
||||||
|
|
||||||
// StringWidth return width as you can see
|
// StringWidth return width as you can see
|
||||||
func (c *Condition) StringWidth(s string) (width int) {
|
func (c *Condition) StringWidth(s string) (width int) {
|
||||||
if c.ZeroWidthJoiner {
|
g := uniseg.NewGraphemes(s)
|
||||||
return c.stringWidthZeroJoiner(s)
|
for g.Next() {
|
||||||
|
var chWidth int
|
||||||
|
for _, r := range g.Runes() {
|
||||||
|
chWidth = c.RuneWidth(r)
|
||||||
|
if chWidth > 0 {
|
||||||
|
break // Our best guess at this point is to use the width of the first non-zero-width rune.
|
||||||
}
|
}
|
||||||
return c.stringWidth(s)
|
}
|
||||||
|
width += chWidth
|
||||||
|
}
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Truncate return string truncated with w cells
|
// Truncate return string truncated with w cells
|
||||||
|
@ -149,19 +127,25 @@ func (c *Condition) Truncate(s string, w int, tail string) string {
|
||||||
if c.StringWidth(s) <= w {
|
if c.StringWidth(s) <= w {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
r := []rune(s)
|
w -= c.StringWidth(tail)
|
||||||
tw := c.StringWidth(tail)
|
var width int
|
||||||
w -= tw
|
pos := len(s)
|
||||||
width := 0
|
g := uniseg.NewGraphemes(s)
|
||||||
i := 0
|
for g.Next() {
|
||||||
for ; i < len(r); i++ {
|
var chWidth int
|
||||||
cw := c.RuneWidth(r[i])
|
for _, r := range g.Runes() {
|
||||||
if width+cw > w {
|
chWidth = c.RuneWidth(r)
|
||||||
|
if chWidth > 0 {
|
||||||
|
break // See StringWidth() for details.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if width+chWidth > w {
|
||||||
|
pos, _ = g.Positions()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
width += cw
|
width += chWidth
|
||||||
}
|
}
|
||||||
return string(r[0:i]) + tail
|
return s[:pos] + tail
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wrap return string wrapped with w cells
|
// Wrap return string wrapped with w cells
|
||||||
|
@ -169,7 +153,7 @@ func (c *Condition) Wrap(s string, w int) string {
|
||||||
width := 0
|
width := 0
|
||||||
out := ""
|
out := ""
|
||||||
for _, r := range []rune(s) {
|
for _, r := range []rune(s) {
|
||||||
cw := RuneWidth(r)
|
cw := c.RuneWidth(r)
|
||||||
if r == '\n' {
|
if r == '\n' {
|
||||||
out += string(r)
|
out += string(r)
|
||||||
width = 0
|
width = 0
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
# Binaries for programs and plugins
|
|
||||||
*.exe
|
|
||||||
*.exe~
|
|
||||||
*.dll
|
|
||||||
*.so
|
|
||||||
*.dylib
|
|
||||||
|
|
||||||
# Test binary, built with `go test -c`
|
|
||||||
*.test
|
|
||||||
|
|
||||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
|
||||||
*.out
|
|
||||||
|
|
||||||
# goland idea folder
|
|
||||||
*.idea
|
|
|
@ -1,5 +1,6 @@
|
||||||
language: go
|
language: go
|
||||||
go:
|
go:
|
||||||
|
- 1.1.x
|
||||||
- 1.2.x
|
- 1.2.x
|
||||||
- 1.3.x
|
- 1.3.x
|
||||||
- 1.4.x
|
- 1.4.x
|
||||||
|
@ -10,7 +11,6 @@ go:
|
||||||
- 1.9.x
|
- 1.9.x
|
||||||
- 1.10.x
|
- 1.10.x
|
||||||
- 1.11.x
|
- 1.11.x
|
||||||
- 1.12.x
|
|
||||||
- tip
|
- tip
|
||||||
matrix:
|
matrix:
|
||||||
allow_failures:
|
allow_failures:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
1. John Graham-Cumming http://jgc.org/
|
|
||||||
1. Mohammad Gufran https://github.com/Gufran
|
|
||||||
1. Steven Gutzwiller https://github.com/StevenGutzwiller
|
|
||||||
1. Andrew Krasichkov @buglloc https://github.com/buglloc
|
1. Andrew Krasichkov @buglloc https://github.com/buglloc
|
||||||
|
1. John Graham-Cumming http://jgc.org/
|
||||||
1. Mike Samuel mikesamuel@gmail.com
|
1. Mike Samuel mikesamuel@gmail.com
|
||||||
1. Dmitri Shuralyov shurcooL@gmail.com
|
1. Dmitri Shuralyov shurcooL@gmail.com
|
||||||
1. https://github.com/opennota
|
1. https://github.com/opennota
|
||||||
|
1. https://github.com/Gufran
|
|
@ -58,12 +58,10 @@ We expect to be supplied with well-formatted HTML (closing elements for every ap
|
||||||
|
|
||||||
### Supported Go Versions
|
### Supported Go Versions
|
||||||
|
|
||||||
bluemonday is tested against Go 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 1.10, 1.11, 1.12, and tip.
|
bluemonday is tested against Go 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, and tip.
|
||||||
|
|
||||||
We do not support Go 1.0 as we depend on `golang.org/x/net/html` which includes a reference to `io.ErrNoProgress` which did not exist in Go 1.0.
|
We do not support Go 1.0 as we depend on `golang.org/x/net/html` which includes a reference to `io.ErrNoProgress` which did not exist in Go 1.0.
|
||||||
|
|
||||||
We support Go 1.1 but Travis no longer tests against it.
|
|
||||||
|
|
||||||
## Is it production ready?
|
## Is it production ready?
|
||||||
|
|
||||||
*Yes*
|
*Yes*
|
||||||
|
@ -169,26 +167,12 @@ To add elements to a policy either add just the elements:
|
||||||
p.AllowElements("b", "strong")
|
p.AllowElements("b", "strong")
|
||||||
```
|
```
|
||||||
|
|
||||||
Or using a regex:
|
|
||||||
|
|
||||||
_Note: if an element is added by name as shown above, any matching regex will be ignored_
|
|
||||||
|
|
||||||
It is also recommended to ensure multiple patterns don't overlap as order of execution is not guaranteed and can result in some rules being missed.
|
|
||||||
```go
|
|
||||||
p.AllowElementsMatching(regex.MustCompile(`^my-element-`))
|
|
||||||
```
|
|
||||||
|
|
||||||
Or add elements as a virtue of adding an attribute:
|
Or add elements as a virtue of adding an attribute:
|
||||||
```go
|
```go
|
||||||
// Not the recommended pattern, see the recommendation on using .Matching() below
|
// Not the recommended pattern, see the recommendation on using .Matching() below
|
||||||
p.AllowAttrs("nowrap").OnElements("td", "th")
|
p.AllowAttrs("nowrap").OnElements("td", "th")
|
||||||
```
|
```
|
||||||
|
|
||||||
Again, this also supports a regex pattern match alternative:
|
|
||||||
```go
|
|
||||||
p.AllowAttrs("nowrap").OnElementsMatching(regex.MustCompile(`^my-element-`))
|
|
||||||
```
|
|
||||||
|
|
||||||
Attributes can either be added to all elements:
|
Attributes can either be added to all elements:
|
||||||
```go
|
```go
|
||||||
p.AllowAttrs("dir").Matching(regexp.MustCompile("(?i)rtl|ltr")).Globally()
|
p.AllowAttrs("dir").Matching(regexp.MustCompile("(?i)rtl|ltr")).Globally()
|
||||||
|
@ -218,49 +202,6 @@ p := bluemonday.UGCPolicy()
|
||||||
p.AllowElements("fieldset", "select", "option")
|
p.AllowElements("fieldset", "select", "option")
|
||||||
```
|
```
|
||||||
|
|
||||||
### Inline CSS
|
|
||||||
|
|
||||||
Although it's possible to handle inline CSS using `AllowAttrs` with a `Matching` rule, writing a single monolithic regular expression to safely process all inline CSS which you wish to allow is not a trivial task. Instead of attempting to do so, you can whitelist the `style` attribute on whichever element(s) you desire and use style policies to control and sanitize inline styles.
|
|
||||||
|
|
||||||
It is suggested that you use `Matching` (with a suitable regular expression)
|
|
||||||
`MatchingEnum`, or `MatchingHandler` to ensure each style matches your needs,
|
|
||||||
but default handlers are supplied for most widely used styles.
|
|
||||||
|
|
||||||
Similar to attributes, you can allow specific CSS properties to be set inline:
|
|
||||||
```go
|
|
||||||
p.AllowAttrs("style").OnElements("span", "p")
|
|
||||||
// Allow the 'color' property with valid RGB(A) hex values only (on any element allowed a 'style' attribute)
|
|
||||||
p.AllowStyles("color").Matching(regexp.MustCompile("(?i)^#([0-9a-f]{3,4}|[0-9a-f]{6}|[0-9a-f]{8})$")).Globally()
|
|
||||||
```
|
|
||||||
|
|
||||||
Additionally, you can allow a CSS property to be set only to an allowed value:
|
|
||||||
```go
|
|
||||||
p.AllowAttrs("style").OnElements("span", "p")
|
|
||||||
// Allow the 'text-decoration' property to be set to 'underline', 'line-through' or 'none'
|
|
||||||
// on 'span' elements only
|
|
||||||
p.AllowStyles("text-decoration").MatchingEnum("underline", "line-through", "none").OnElements("span")
|
|
||||||
```
|
|
||||||
|
|
||||||
Or you can specify elements based on a regex patterm match:
|
|
||||||
```go
|
|
||||||
p.AllowAttrs("style").OnElementsMatching(regex.MustCompile(`^my-element-`))
|
|
||||||
// Allow the 'text-decoration' property to be set to 'underline', 'line-through' or 'none'
|
|
||||||
// on 'span' elements only
|
|
||||||
p.AllowStyles("text-decoration").MatchingEnum("underline", "line-through", "none").OnElementsMatching(regex.MustCompile(`^my-element-`))
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need more specific checking, you can create a handler that takes in a string and returns a bool to
|
|
||||||
validate the values for a given property. The string parameter has been
|
|
||||||
converted to lowercase and unicode code points have been converted.
|
|
||||||
```go
|
|
||||||
myHandler := func(value string) bool{
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
p.AllowAttrs("style").OnElements("span", "p")
|
|
||||||
// Allow the 'color' property with values validated by the handler (on any element allowed a 'style' attribute)
|
|
||||||
p.AllowStyles("color").MatchingHandler(myHandler).Globally()
|
|
||||||
```
|
|
||||||
|
|
||||||
### Links
|
### Links
|
||||||
|
|
||||||
Links are difficult beasts to sanitise safely and also one of the biggest attack vectors for malicious content.
|
Links are difficult beasts to sanitise safely and also one of the biggest attack vectors for malicious content.
|
||||||
|
@ -295,13 +236,6 @@ Regardless of whether you have enabled parseable URLs, you can force all URLs to
|
||||||
p.RequireNoFollowOnLinks(true)
|
p.RequireNoFollowOnLinks(true)
|
||||||
```
|
```
|
||||||
|
|
||||||
Similarly, you can force all URLs to have "noreferrer" in their rel attribute.
|
|
||||||
```go
|
|
||||||
// This applies to "a" "area" "link" elements that have a "href" attribute
|
|
||||||
p.RequireNoReferrerOnLinks(true)
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
We provide a convenience method that applies all of the above, but you will still need to whitelist the linkable elements for the URL rules to be applied to:
|
We provide a convenience method that applies all of the above, but you will still need to whitelist the linkable elements for the URL rules to be applied to:
|
||||||
```go
|
```go
|
||||||
p.AllowStandardURLs()
|
p.AllowStandardURLs()
|
||||||
|
@ -382,6 +316,7 @@ It is not the job of bluemonday to fix your bad HTML, it is merely the job of bl
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
|
* Add support for CSS sanitisation to allow some CSS properties based on a whitelist, possibly using the [Gorilla CSS3 scanner](http://www.gorillatoolkit.org/pkg/css/scanner) - PRs welcome so long as testing covers XSS and demonstrates safety first
|
||||||
* Investigate whether devs want to blacklist elements and attributes. This would allow devs to take an existing policy (such as the `bluemonday.UGCPolicy()` ) that encapsulates 90% of what they're looking for but does more than they need, and to remove the extra things they do not want to make it 100% what they want
|
* Investigate whether devs want to blacklist elements and attributes. This would allow devs to take an existing policy (such as the `bluemonday.UGCPolicy()` ) that encapsulates 90% of what they're looking for but does more than they need, and to remove the extra things they do not want to make it 100% what they want
|
||||||
* Investigate whether devs want a validating HTML mode, in which the HTML elements are not just transformed into a balanced tree (every start tag has a closing tag at the correct depth) but also that elements and character data appear only in their allowed context (i.e. that a `table` element isn't a descendent of a `caption`, that `colgroup`, `thead`, `tbody`, `tfoot` and `tr` are permitted, and that character data is not permitted)
|
* Investigate whether devs want a validating HTML mode, in which the HTML elements are not just transformed into a balanced tree (every start tag has a closing tag at the correct depth) but also that elements and character data appear only in their allowed context (i.e. that a `table` element isn't a descendent of a `caption`, that `colgroup`, `thead`, `tbody`, `tfoot` and `tr` are permitted, and that character data is not permitted)
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,4 @@ module github.com/microcosm-cc/bluemonday
|
||||||
|
|
||||||
go 1.9
|
go 1.9
|
||||||
|
|
||||||
require (
|
require golang.org/x/net v0.0.0-20181220203305-927f97764cc3
|
||||||
github.com/aymerick/douceur v0.2.0 // indirect
|
|
||||||
github.com/chris-ramon/douceur v0.2.0
|
|
||||||
github.com/gorilla/css v1.0.0 // indirect
|
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3
|
|
||||||
)
|
|
||||||
|
|
|
@ -1,8 +1,2 @@
|
||||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
|
||||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
|
||||||
github.com/chris-ramon/douceur v0.2.0 h1:IDMEdxlEUUBYBKE4z/mJnFyVXox+MjuEVDJNN27glkU=
|
|
||||||
github.com/chris-ramon/douceur v0.2.0/go.mod h1:wDW5xjJdeoMm1mRt4sD4c/LbF/mWdEpRXQKjTR8nIBE=
|
|
||||||
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
|
|
||||||
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
|
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis=
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis=
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -135,7 +135,7 @@ func (p *Policy) AllowStandardURLs() {
|
||||||
// Most common URL schemes only
|
// Most common URL schemes only
|
||||||
p.AllowURLSchemes("mailto", "http", "https")
|
p.AllowURLSchemes("mailto", "http", "https")
|
||||||
|
|
||||||
// For linking elements we will add rel="nofollow" if it does not already exist
|
// For all anchors we will add rel="nofollow" if it does not already exist
|
||||||
// This applies to "a" "area" "link"
|
// This applies to "a" "area" "link"
|
||||||
p.RequireNoFollowOnLinks(true)
|
p.RequireNoFollowOnLinks(true)
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,8 +29,6 @@
|
||||||
|
|
||||||
package bluemonday
|
package bluemonday
|
||||||
|
|
||||||
//TODO sgutzwiller create map of styles to default handlers
|
|
||||||
//TODO sgutzwiller create handlers for various attributes
|
|
||||||
import (
|
import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
@ -53,22 +51,14 @@ type Policy struct {
|
||||||
// tag is replaced by a space character.
|
// tag is replaced by a space character.
|
||||||
addSpaces bool
|
addSpaces bool
|
||||||
|
|
||||||
// When true, add rel="nofollow" to HTML a, area, and link tags
|
// When true, add rel="nofollow" to HTML anchors
|
||||||
requireNoFollow bool
|
requireNoFollow bool
|
||||||
|
|
||||||
// When true, add rel="nofollow" to HTML a, area, and link tags
|
// When true, add rel="nofollow" to HTML anchors
|
||||||
// Will add for href="http://foo"
|
// Will add for href="http://foo"
|
||||||
// Will skip for href="/foo" or href="foo"
|
// Will skip for href="/foo" or href="foo"
|
||||||
requireNoFollowFullyQualifiedLinks bool
|
requireNoFollowFullyQualifiedLinks bool
|
||||||
|
|
||||||
// When true, add rel="noreferrer" to HTML a, area, and link tags
|
|
||||||
requireNoReferrer bool
|
|
||||||
|
|
||||||
// When true, add rel="noreferrer" to HTML a, area, and link tags
|
|
||||||
// Will add for href="http://foo"
|
|
||||||
// Will skip for href="/foo" or href="foo"
|
|
||||||
requireNoReferrerFullyQualifiedLinks bool
|
|
||||||
|
|
||||||
// When true add target="_blank" to fully qualified links
|
// When true add target="_blank" to fully qualified links
|
||||||
// Will add for href="http://foo"
|
// Will add for href="http://foo"
|
||||||
// Will skip for href="/foo" or href="foo"
|
// Will skip for href="/foo" or href="foo"
|
||||||
|
@ -86,21 +76,9 @@ type Policy struct {
|
||||||
// map[htmlElementName]map[htmlAttributeName]attrPolicy
|
// map[htmlElementName]map[htmlAttributeName]attrPolicy
|
||||||
elsAndAttrs map[string]map[string]attrPolicy
|
elsAndAttrs map[string]map[string]attrPolicy
|
||||||
|
|
||||||
// elsMatchingAndAttrs stores regex based element matches along with attributes
|
|
||||||
elsMatchingAndAttrs map[*regexp.Regexp]map[string]attrPolicy
|
|
||||||
|
|
||||||
// map[htmlAttributeName]attrPolicy
|
// map[htmlAttributeName]attrPolicy
|
||||||
globalAttrs map[string]attrPolicy
|
globalAttrs map[string]attrPolicy
|
||||||
|
|
||||||
// map[htmlElementName]map[cssPropertyName]stylePolicy
|
|
||||||
elsAndStyles map[string]map[string]stylePolicy
|
|
||||||
|
|
||||||
// map[regex]map[cssPropertyName]stylePolicy
|
|
||||||
elsMatchingAndStyles map[*regexp.Regexp]map[string]stylePolicy
|
|
||||||
|
|
||||||
// map[cssPropertyName]stylePolicy
|
|
||||||
globalStyles map[string]stylePolicy
|
|
||||||
|
|
||||||
// If urlPolicy is nil, all URLs with matching schema are allowed.
|
// If urlPolicy is nil, all URLs with matching schema are allowed.
|
||||||
// Otherwise, only the URLs with matching schema and urlPolicy(url)
|
// Otherwise, only the URLs with matching schema and urlPolicy(url)
|
||||||
// returning true are allowed.
|
// returning true are allowed.
|
||||||
|
@ -115,16 +93,6 @@ type Policy struct {
|
||||||
// be maintained in the output HTML.
|
// be maintained in the output HTML.
|
||||||
setOfElementsAllowedWithoutAttrs map[string]struct{}
|
setOfElementsAllowedWithoutAttrs map[string]struct{}
|
||||||
|
|
||||||
// If an element has had all attributes removed as a result of a policy
|
|
||||||
// being applied, then the element would be removed from the output.
|
|
||||||
//
|
|
||||||
// However some elements are valid and have strong layout meaning without
|
|
||||||
// any attributes, i.e. <table>.
|
|
||||||
//
|
|
||||||
// In this case, any element matching a regular expression will be accepted without
|
|
||||||
// attributes added.
|
|
||||||
setOfElementsMatchingAllowedWithoutAttrs []*regexp.Regexp
|
|
||||||
|
|
||||||
setOfElementsToSkipContent map[string]struct{}
|
setOfElementsToSkipContent map[string]struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,20 +103,6 @@ type attrPolicy struct {
|
||||||
regexp *regexp.Regexp
|
regexp *regexp.Regexp
|
||||||
}
|
}
|
||||||
|
|
||||||
type stylePolicy struct {
|
|
||||||
// handler to validate
|
|
||||||
handler func(string) bool
|
|
||||||
|
|
||||||
// optional pattern to match, when not nil the regexp needs to match
|
|
||||||
// otherwise the property is removed
|
|
||||||
regexp *regexp.Regexp
|
|
||||||
|
|
||||||
// optional list of allowed property values, for properties which
|
|
||||||
// have a defined list of allowed values; property will be removed
|
|
||||||
// if the value is not allowed
|
|
||||||
enum []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type attrPolicyBuilder struct {
|
type attrPolicyBuilder struct {
|
||||||
p *Policy
|
p *Policy
|
||||||
|
|
||||||
|
@ -157,26 +111,13 @@ type attrPolicyBuilder struct {
|
||||||
allowEmpty bool
|
allowEmpty bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type stylePolicyBuilder struct {
|
|
||||||
p *Policy
|
|
||||||
|
|
||||||
propertyNames []string
|
|
||||||
regexp *regexp.Regexp
|
|
||||||
enum []string
|
|
||||||
handler func(string) bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type urlPolicy func(url *url.URL) (allowUrl bool)
|
type urlPolicy func(url *url.URL) (allowUrl bool)
|
||||||
|
|
||||||
// init initializes the maps if this has not been done already
|
// init initializes the maps if this has not been done already
|
||||||
func (p *Policy) init() {
|
func (p *Policy) init() {
|
||||||
if !p.initialized {
|
if !p.initialized {
|
||||||
p.elsAndAttrs = make(map[string]map[string]attrPolicy)
|
p.elsAndAttrs = make(map[string]map[string]attrPolicy)
|
||||||
p.elsMatchingAndAttrs = make(map[*regexp.Regexp]map[string]attrPolicy)
|
|
||||||
p.globalAttrs = make(map[string]attrPolicy)
|
p.globalAttrs = make(map[string]attrPolicy)
|
||||||
p.elsAndStyles = make(map[string]map[string]stylePolicy)
|
|
||||||
p.elsMatchingAndStyles = make(map[*regexp.Regexp]map[string]stylePolicy)
|
|
||||||
p.globalStyles = make(map[string]stylePolicy)
|
|
||||||
p.allowURLSchemes = make(map[string]urlPolicy)
|
p.allowURLSchemes = make(map[string]urlPolicy)
|
||||||
p.setOfElementsAllowedWithoutAttrs = make(map[string]struct{})
|
p.setOfElementsAllowedWithoutAttrs = make(map[string]struct{})
|
||||||
p.setOfElementsToSkipContent = make(map[string]struct{})
|
p.setOfElementsToSkipContent = make(map[string]struct{})
|
||||||
|
@ -304,30 +245,6 @@ func (abp *attrPolicyBuilder) OnElements(elements ...string) *Policy {
|
||||||
return abp.p
|
return abp.p
|
||||||
}
|
}
|
||||||
|
|
||||||
// OnElementsMatching will bind an attribute policy to all elements matching a given regex
|
|
||||||
// and return the updated policy
|
|
||||||
func (abp *attrPolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy {
|
|
||||||
for _, attr := range abp.attrNames {
|
|
||||||
if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok {
|
|
||||||
abp.p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy)
|
|
||||||
}
|
|
||||||
ap := attrPolicy{}
|
|
||||||
if abp.regexp != nil {
|
|
||||||
ap.regexp = abp.regexp
|
|
||||||
}
|
|
||||||
abp.p.elsMatchingAndAttrs[regex][attr] = ap
|
|
||||||
}
|
|
||||||
|
|
||||||
if abp.allowEmpty {
|
|
||||||
abp.p.setOfElementsMatchingAllowedWithoutAttrs = append(abp.p.setOfElementsMatchingAllowedWithoutAttrs, regex)
|
|
||||||
if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok {
|
|
||||||
abp.p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return abp.p
|
|
||||||
}
|
|
||||||
|
|
||||||
// Globally will bind an attribute policy to all HTML elements and return the
|
// Globally will bind an attribute policy to all HTML elements and return the
|
||||||
// updated policy
|
// updated policy
|
||||||
func (abp *attrPolicyBuilder) Globally() *Policy {
|
func (abp *attrPolicyBuilder) Globally() *Policy {
|
||||||
|
@ -348,139 +265,6 @@ func (abp *attrPolicyBuilder) Globally() *Policy {
|
||||||
return abp.p
|
return abp.p
|
||||||
}
|
}
|
||||||
|
|
||||||
// AllowStyles takes a range of CSS property names and returns a
|
|
||||||
// style policy builder that allows you to specify the pattern and scope of
|
|
||||||
// the whitelisted property.
|
|
||||||
//
|
|
||||||
// The style policy is only added to the core policy when either Globally()
|
|
||||||
// or OnElements(...) are called.
|
|
||||||
func (p *Policy) AllowStyles(propertyNames ...string) *stylePolicyBuilder {
|
|
||||||
|
|
||||||
p.init()
|
|
||||||
|
|
||||||
abp := stylePolicyBuilder{
|
|
||||||
p: p,
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, propertyName := range propertyNames {
|
|
||||||
abp.propertyNames = append(abp.propertyNames, strings.ToLower(propertyName))
|
|
||||||
}
|
|
||||||
|
|
||||||
return &abp
|
|
||||||
}
|
|
||||||
|
|
||||||
// Matching allows a regular expression to be applied to a nascent style
|
|
||||||
// policy, and returns the style policy. Calling this more than once will
|
|
||||||
// replace the existing regexp.
|
|
||||||
func (spb *stylePolicyBuilder) Matching(regex *regexp.Regexp) *stylePolicyBuilder {
|
|
||||||
|
|
||||||
spb.regexp = regex
|
|
||||||
|
|
||||||
return spb
|
|
||||||
}
|
|
||||||
|
|
||||||
// MatchingEnum allows a list of allowed values to be applied to a nascent style
|
|
||||||
// policy, and returns the style policy. Calling this more than once will
|
|
||||||
// replace the existing list of allowed values.
|
|
||||||
func (spb *stylePolicyBuilder) MatchingEnum(enum ...string) *stylePolicyBuilder {
|
|
||||||
|
|
||||||
spb.enum = enum
|
|
||||||
|
|
||||||
return spb
|
|
||||||
}
|
|
||||||
|
|
||||||
// MatchingHandler allows a handler to be applied to a nascent style
|
|
||||||
// policy, and returns the style policy. Calling this more than once will
|
|
||||||
// replace the existing handler.
|
|
||||||
func (spb *stylePolicyBuilder) MatchingHandler(handler func(string) bool) *stylePolicyBuilder {
|
|
||||||
|
|
||||||
spb.handler = handler
|
|
||||||
|
|
||||||
return spb
|
|
||||||
}
|
|
||||||
|
|
||||||
// OnElements will bind a style policy to a given range of HTML elements
|
|
||||||
// and return the updated policy
|
|
||||||
func (spb *stylePolicyBuilder) OnElements(elements ...string) *Policy {
|
|
||||||
|
|
||||||
for _, element := range elements {
|
|
||||||
element = strings.ToLower(element)
|
|
||||||
|
|
||||||
for _, attr := range spb.propertyNames {
|
|
||||||
|
|
||||||
if _, ok := spb.p.elsAndStyles[element]; !ok {
|
|
||||||
spb.p.elsAndStyles[element] = make(map[string]stylePolicy)
|
|
||||||
}
|
|
||||||
|
|
||||||
sp := stylePolicy{}
|
|
||||||
if spb.handler != nil {
|
|
||||||
sp.handler = spb.handler
|
|
||||||
} else if len(spb.enum) > 0 {
|
|
||||||
sp.enum = spb.enum
|
|
||||||
} else if spb.regexp != nil {
|
|
||||||
sp.regexp = spb.regexp
|
|
||||||
} else {
|
|
||||||
sp.handler = getDefaultHandler(attr)
|
|
||||||
}
|
|
||||||
spb.p.elsAndStyles[element][attr] = sp
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return spb.p
|
|
||||||
}
|
|
||||||
|
|
||||||
// OnElementsMatching will bind a style policy to any HTML elements matching the pattern
|
|
||||||
// and return the updated policy
|
|
||||||
func (spb *stylePolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy {
|
|
||||||
|
|
||||||
for _, attr := range spb.propertyNames {
|
|
||||||
|
|
||||||
if _, ok := spb.p.elsMatchingAndStyles[regex]; !ok {
|
|
||||||
spb.p.elsMatchingAndStyles[regex] = make(map[string]stylePolicy)
|
|
||||||
}
|
|
||||||
|
|
||||||
sp := stylePolicy{}
|
|
||||||
if spb.handler != nil {
|
|
||||||
sp.handler = spb.handler
|
|
||||||
} else if len(spb.enum) > 0 {
|
|
||||||
sp.enum = spb.enum
|
|
||||||
} else if spb.regexp != nil {
|
|
||||||
sp.regexp = spb.regexp
|
|
||||||
} else {
|
|
||||||
sp.handler = getDefaultHandler(attr)
|
|
||||||
}
|
|
||||||
spb.p.elsMatchingAndStyles[regex][attr] = sp
|
|
||||||
}
|
|
||||||
|
|
||||||
return spb.p
|
|
||||||
}
|
|
||||||
|
|
||||||
// Globally will bind a style policy to all HTML elements and return the
|
|
||||||
// updated policy
|
|
||||||
func (spb *stylePolicyBuilder) Globally() *Policy {
|
|
||||||
|
|
||||||
for _, attr := range spb.propertyNames {
|
|
||||||
if _, ok := spb.p.globalStyles[attr]; !ok {
|
|
||||||
spb.p.globalStyles[attr] = stylePolicy{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use only one strategy for validating styles, fallback to default
|
|
||||||
sp := stylePolicy{}
|
|
||||||
if spb.handler != nil {
|
|
||||||
sp.handler = spb.handler
|
|
||||||
} else if len(spb.enum) > 0 {
|
|
||||||
sp.enum = spb.enum
|
|
||||||
} else if spb.regexp != nil {
|
|
||||||
sp.regexp = spb.regexp
|
|
||||||
} else {
|
|
||||||
sp.handler = getDefaultHandler(attr)
|
|
||||||
}
|
|
||||||
spb.p.globalStyles[attr] = sp
|
|
||||||
}
|
|
||||||
|
|
||||||
return spb.p
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllowElements will append HTML elements to the whitelist without applying an
|
// AllowElements will append HTML elements to the whitelist without applying an
|
||||||
// attribute policy to those elements (the elements are permitted
|
// attribute policy to those elements (the elements are permitted
|
||||||
// sans-attributes)
|
// sans-attributes)
|
||||||
|
@ -498,16 +282,8 @@ func (p *Policy) AllowElements(names ...string) *Policy {
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Policy) AllowElementsMatching(regex *regexp.Regexp) *Policy {
|
// RequireNoFollowOnLinks will result in all <a> tags having a rel="nofollow"
|
||||||
p.init()
|
// added to them if one does not already exist
|
||||||
if _, ok := p.elsMatchingAndAttrs[regex]; !ok {
|
|
||||||
p.elsMatchingAndAttrs[regex] = make(map[string]attrPolicy)
|
|
||||||
}
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequireNoFollowOnLinks will result in all a, area, link tags having a
|
|
||||||
// rel="nofollow"added to them if one does not already exist
|
|
||||||
//
|
//
|
||||||
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
||||||
func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy {
|
func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy {
|
||||||
|
@ -518,10 +294,9 @@ func (p *Policy) RequireNoFollowOnLinks(require bool) *Policy {
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
// RequireNoFollowOnFullyQualifiedLinks will result in all a, area, and link
|
// RequireNoFollowOnFullyQualifiedLinks will result in all <a> tags that point
|
||||||
// tags that point to a non-local destination (i.e. starts with a protocol and
|
// to a non-local destination (i.e. starts with a protocol and has a host)
|
||||||
// has a host) having a rel="nofollow" added to them if one does not already
|
// having a rel="nofollow" added to them if one does not already exist
|
||||||
// exist
|
|
||||||
//
|
//
|
||||||
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
||||||
func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy {
|
func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy {
|
||||||
|
@ -532,35 +307,9 @@ func (p *Policy) RequireNoFollowOnFullyQualifiedLinks(require bool) *Policy {
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
// RequireNoReferrerOnLinks will result in all a, area, and link tags having a
|
// AddTargetBlankToFullyQualifiedLinks will result in all <a> tags that point
|
||||||
// rel="noreferrrer" added to them if one does not already exist
|
// to a non-local destination (i.e. starts with a protocol and has a host)
|
||||||
//
|
// having a target="_blank" added to them if one does not already exist
|
||||||
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
|
||||||
func (p *Policy) RequireNoReferrerOnLinks(require bool) *Policy {
|
|
||||||
|
|
||||||
p.requireNoReferrer = require
|
|
||||||
p.requireParseableURLs = true
|
|
||||||
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequireNoReferrerOnFullyQualifiedLinks will result in all a, area, and link
|
|
||||||
// tags that point to a non-local destination (i.e. starts with a protocol and
|
|
||||||
// has a host) having a rel="noreferrer" added to them if one does not already
|
|
||||||
// exist
|
|
||||||
//
|
|
||||||
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
|
||||||
func (p *Policy) RequireNoReferrerOnFullyQualifiedLinks(require bool) *Policy {
|
|
||||||
|
|
||||||
p.requireNoReferrerFullyQualifiedLinks = require
|
|
||||||
p.requireParseableURLs = true
|
|
||||||
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddTargetBlankToFullyQualifiedLinks will result in all a, area and link tags
|
|
||||||
// that point to a non-local destination (i.e. starts with a protocol and has a
|
|
||||||
// host) having a target="_blank" added to them if one does not already exist
|
|
||||||
//
|
//
|
||||||
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
// Note: This requires p.RequireParseableURLs(true) and will enable it.
|
||||||
func (p *Policy) AddTargetBlankToFullyQualifiedLinks(require bool) *Policy {
|
func (p *Policy) AddTargetBlankToFullyQualifiedLinks(require bool) *Policy {
|
||||||
|
|
|
@ -34,19 +34,15 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"net/url"
|
"net/url"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
"golang.org/x/net/html"
|
||||||
|
|
||||||
cssparser "github.com/chris-ramon/douceur/parser"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
dataAttribute = regexp.MustCompile("^data-.+")
|
dataAttribute = regexp.MustCompile("^data-.+")
|
||||||
dataAttributeXMLPrefix = regexp.MustCompile("^xml.+")
|
dataAttributeXMLPrefix = regexp.MustCompile("^xml.+")
|
||||||
dataAttributeInvalidChars = regexp.MustCompile("[A-Z;]+")
|
dataAttributeInvalidChars = regexp.MustCompile("[A-Z;]+")
|
||||||
cssUnicodeChar = regexp.MustCompile(`\\[0-9a-f]{1,6} ?`)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Sanitize takes a string that contains a HTML fragment or document and applies
|
// Sanitize takes a string that contains a HTML fragment or document and applies
|
||||||
|
@ -86,98 +82,6 @@ func (p *Policy) SanitizeReader(r io.Reader) *bytes.Buffer {
|
||||||
return p.sanitize(r)
|
return p.sanitize(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
const escapedURLChars = "'<>\"\r"
|
|
||||||
|
|
||||||
func escapeUrlComponent(val string) string {
|
|
||||||
w := bytes.NewBufferString("")
|
|
||||||
i := strings.IndexAny(val, escapedURLChars)
|
|
||||||
for i != -1 {
|
|
||||||
if _, err := w.WriteString(val[:i]); err != nil {
|
|
||||||
return w.String()
|
|
||||||
}
|
|
||||||
var esc string
|
|
||||||
switch val[i] {
|
|
||||||
case '\'':
|
|
||||||
// "'" is shorter than "'" and apos was not in HTML until HTML5.
|
|
||||||
esc = "'"
|
|
||||||
case '<':
|
|
||||||
esc = "<"
|
|
||||||
case '>':
|
|
||||||
esc = ">"
|
|
||||||
case '"':
|
|
||||||
// """ is shorter than """.
|
|
||||||
esc = """
|
|
||||||
case '\r':
|
|
||||||
esc = " "
|
|
||||||
default:
|
|
||||||
panic("unrecognized escape character")
|
|
||||||
}
|
|
||||||
val = val[i+1:]
|
|
||||||
if _, err := w.WriteString(esc); err != nil {
|
|
||||||
return w.String()
|
|
||||||
}
|
|
||||||
i = strings.IndexAny(val, escapedURLChars)
|
|
||||||
}
|
|
||||||
w.WriteString(val)
|
|
||||||
return w.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func sanitizedUrl(val string) (string, error) {
|
|
||||||
u, err := url.Parse(val)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
// sanitize the url query params
|
|
||||||
sanitizedQueryValues := make(url.Values, 0)
|
|
||||||
queryValues := u.Query()
|
|
||||||
for k, vals := range queryValues {
|
|
||||||
sk := html.EscapeString(k)
|
|
||||||
for _, v := range vals {
|
|
||||||
sv := v
|
|
||||||
sanitizedQueryValues.Add(sk, sv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
u.RawQuery = sanitizedQueryValues.Encode()
|
|
||||||
// u.String() will also sanitize host/scheme/user/pass
|
|
||||||
return u.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Policy) writeLinkableBuf(buff *bytes.Buffer, token *html.Token) {
|
|
||||||
// do not escape multiple query parameters
|
|
||||||
tokenBuff := bytes.NewBufferString("")
|
|
||||||
tokenBuff.WriteString("<")
|
|
||||||
tokenBuff.WriteString(token.Data)
|
|
||||||
for _, attr := range token.Attr {
|
|
||||||
tokenBuff.WriteByte(' ')
|
|
||||||
tokenBuff.WriteString(attr.Key)
|
|
||||||
tokenBuff.WriteString(`="`)
|
|
||||||
switch attr.Key {
|
|
||||||
case "href", "src":
|
|
||||||
u, ok := p.validURL(attr.Val)
|
|
||||||
if !ok {
|
|
||||||
tokenBuff.WriteString(html.EscapeString(attr.Val))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
u, err := sanitizedUrl(u)
|
|
||||||
if err == nil {
|
|
||||||
tokenBuff.WriteString(u)
|
|
||||||
} else {
|
|
||||||
// fallthrough
|
|
||||||
tokenBuff.WriteString(html.EscapeString(attr.Val))
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// re-apply
|
|
||||||
tokenBuff.WriteString(html.EscapeString(attr.Val))
|
|
||||||
}
|
|
||||||
tokenBuff.WriteByte('"')
|
|
||||||
}
|
|
||||||
if token.Type == html.SelfClosingTagToken {
|
|
||||||
tokenBuff.WriteString("/")
|
|
||||||
}
|
|
||||||
tokenBuff.WriteString(">")
|
|
||||||
buff.WriteString(tokenBuff.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Performs the actual sanitization process.
|
// Performs the actual sanitization process.
|
||||||
func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
|
|
||||||
|
@ -229,12 +133,10 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
|
|
||||||
case html.StartTagToken:
|
case html.StartTagToken:
|
||||||
|
|
||||||
mostRecentlyStartedToken = strings.ToLower(token.Data)
|
mostRecentlyStartedToken = token.Data
|
||||||
|
|
||||||
aps, ok := p.elsAndAttrs[token.Data]
|
aps, ok := p.elsAndAttrs[token.Data]
|
||||||
if !ok {
|
if !ok {
|
||||||
aa, matched := p.matchRegex(token.Data)
|
|
||||||
if !matched {
|
|
||||||
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok {
|
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok {
|
||||||
skipElementContent = true
|
skipElementContent = true
|
||||||
skippingElementsCount++
|
skippingElementsCount++
|
||||||
|
@ -244,8 +146,7 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
aps = aa
|
|
||||||
}
|
|
||||||
if len(token.Attr) != 0 {
|
if len(token.Attr) != 0 {
|
||||||
token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps)
|
token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps)
|
||||||
}
|
}
|
||||||
|
@ -262,17 +163,12 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !skipElementContent {
|
if !skipElementContent {
|
||||||
// do not escape multiple query parameters
|
|
||||||
if linkable(token.Data) {
|
|
||||||
p.writeLinkableBuf(&buff, &token)
|
|
||||||
} else {
|
|
||||||
buff.WriteString(token.String())
|
buff.WriteString(token.String())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
case html.EndTagToken:
|
case html.EndTagToken:
|
||||||
|
|
||||||
if mostRecentlyStartedToken == strings.ToLower(token.Data) {
|
if mostRecentlyStartedToken == token.Data {
|
||||||
mostRecentlyStartedToken = ""
|
mostRecentlyStartedToken = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -286,28 +182,19 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := p.elsAndAttrs[token.Data]; !ok {
|
if _, ok := p.elsAndAttrs[token.Data]; !ok {
|
||||||
match := false
|
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok {
|
||||||
for regex := range p.elsMatchingAndAttrs {
|
|
||||||
if regex.MatchString(token.Data) {
|
|
||||||
skipElementContent = false
|
|
||||||
match = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if _, ok := p.setOfElementsToSkipContent[token.Data]; ok && !match {
|
|
||||||
skippingElementsCount--
|
skippingElementsCount--
|
||||||
if skippingElementsCount == 0 {
|
if skippingElementsCount == 0 {
|
||||||
skipElementContent = false
|
skipElementContent = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !match {
|
|
||||||
if p.addSpaces {
|
if p.addSpaces {
|
||||||
buff.WriteString(" ")
|
buff.WriteString(" ")
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if !skipElementContent {
|
if !skipElementContent {
|
||||||
buff.WriteString(token.String())
|
buff.WriteString(token.String())
|
||||||
|
@ -317,15 +204,11 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
|
|
||||||
aps, ok := p.elsAndAttrs[token.Data]
|
aps, ok := p.elsAndAttrs[token.Data]
|
||||||
if !ok {
|
if !ok {
|
||||||
aa, matched := p.matchRegex(token.Data)
|
if p.addSpaces {
|
||||||
if !matched {
|
|
||||||
if p.addSpaces && !matched {
|
|
||||||
buff.WriteString(" ")
|
buff.WriteString(" ")
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
aps = aa
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(token.Attr) != 0 {
|
if len(token.Attr) != 0 {
|
||||||
token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps)
|
token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps)
|
||||||
|
@ -334,17 +217,13 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
if len(token.Attr) == 0 && !p.allowNoAttrs(token.Data) {
|
if len(token.Attr) == 0 && !p.allowNoAttrs(token.Data) {
|
||||||
if p.addSpaces {
|
if p.addSpaces {
|
||||||
buff.WriteString(" ")
|
buff.WriteString(" ")
|
||||||
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if !skipElementContent {
|
if !skipElementContent {
|
||||||
// do not escape multiple query parameters
|
|
||||||
if linkable(token.Data) {
|
|
||||||
p.writeLinkableBuf(&buff, &token)
|
|
||||||
} else {
|
|
||||||
buff.WriteString(token.String())
|
buff.WriteString(token.String())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
case html.TextToken:
|
case html.TextToken:
|
||||||
|
|
||||||
|
@ -363,7 +242,6 @@ func (p *Policy) sanitize(r io.Reader) *bytes.Buffer {
|
||||||
buff.WriteString(token.String())
|
buff.WriteString(token.String())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// A token that didn't exist in the html package when we wrote this
|
// A token that didn't exist in the html package when we wrote this
|
||||||
return &bytes.Buffer{}
|
return &bytes.Buffer{}
|
||||||
|
@ -384,23 +262,6 @@ func (p *Policy) sanitizeAttrs(
|
||||||
return attrs
|
return attrs
|
||||||
}
|
}
|
||||||
|
|
||||||
hasStylePolicies := false
|
|
||||||
sps, elementHasStylePolicies := p.elsAndStyles[elementName]
|
|
||||||
if len(p.globalStyles) > 0 || (elementHasStylePolicies && len(sps) > 0) {
|
|
||||||
hasStylePolicies = true
|
|
||||||
}
|
|
||||||
// no specific element policy found, look for a pattern match
|
|
||||||
if !hasStylePolicies {
|
|
||||||
for k, v := range p.elsMatchingAndStyles {
|
|
||||||
if k.MatchString(elementName) {
|
|
||||||
if len(v) > 0 {
|
|
||||||
hasStylePolicies = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Builds a new attribute slice based on the whether the attribute has been
|
// Builds a new attribute slice based on the whether the attribute has been
|
||||||
// whitelisted explicitly or globally.
|
// whitelisted explicitly or globally.
|
||||||
cleanAttrs := []html.Attribute{}
|
cleanAttrs := []html.Attribute{}
|
||||||
|
@ -412,19 +273,6 @@ func (p *Policy) sanitizeAttrs(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Is this a "style" attribute, and if so, do we need to sanitize it?
|
|
||||||
if htmlAttr.Key == "style" && hasStylePolicies {
|
|
||||||
htmlAttr = p.sanitizeStyles(htmlAttr, elementName)
|
|
||||||
if htmlAttr.Val == "" {
|
|
||||||
// We've sanitized away any and all styles; don't bother to
|
|
||||||
// output the style attribute (even if it's allowed)
|
|
||||||
continue
|
|
||||||
} else {
|
|
||||||
cleanAttrs = append(cleanAttrs, htmlAttr)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Is there an element specific attribute policy that applies?
|
// Is there an element specific attribute policy that applies?
|
||||||
if ap, ok := aps[htmlAttr.Key]; ok {
|
if ap, ok := aps[htmlAttr.Key]; ok {
|
||||||
if ap.regexp != nil {
|
if ap.regexp != nil {
|
||||||
|
@ -506,8 +354,6 @@ func (p *Policy) sanitizeAttrs(
|
||||||
|
|
||||||
if (p.requireNoFollow ||
|
if (p.requireNoFollow ||
|
||||||
p.requireNoFollowFullyQualifiedLinks ||
|
p.requireNoFollowFullyQualifiedLinks ||
|
||||||
p.requireNoReferrer ||
|
|
||||||
p.requireNoReferrerFullyQualifiedLinks ||
|
|
||||||
p.addTargetBlankToFullyQualifiedLinks) &&
|
p.addTargetBlankToFullyQualifiedLinks) &&
|
||||||
len(cleanAttrs) > 0 {
|
len(cleanAttrs) > 0 {
|
||||||
|
|
||||||
|
@ -535,16 +381,12 @@ func (p *Policy) sanitizeAttrs(
|
||||||
if hrefFound {
|
if hrefFound {
|
||||||
var (
|
var (
|
||||||
noFollowFound bool
|
noFollowFound bool
|
||||||
noReferrerFound bool
|
|
||||||
targetBlankFound bool
|
targetBlankFound bool
|
||||||
)
|
)
|
||||||
|
|
||||||
addNoFollow := (p.requireNoFollow ||
|
addNoFollow := (p.requireNoFollow ||
|
||||||
externalLink && p.requireNoFollowFullyQualifiedLinks)
|
externalLink && p.requireNoFollowFullyQualifiedLinks)
|
||||||
|
|
||||||
addNoReferrer := (p.requireNoReferrer ||
|
|
||||||
externalLink && p.requireNoReferrerFullyQualifiedLinks)
|
|
||||||
|
|
||||||
addTargetBlank := (externalLink &&
|
addTargetBlank := (externalLink &&
|
||||||
p.addTargetBlankToFullyQualifiedLinks)
|
p.addTargetBlankToFullyQualifiedLinks)
|
||||||
|
|
||||||
|
@ -552,18 +394,18 @@ func (p *Policy) sanitizeAttrs(
|
||||||
for _, htmlAttr := range cleanAttrs {
|
for _, htmlAttr := range cleanAttrs {
|
||||||
|
|
||||||
var appended bool
|
var appended bool
|
||||||
if htmlAttr.Key == "rel" && (addNoFollow || addNoReferrer) {
|
if htmlAttr.Key == "rel" && addNoFollow {
|
||||||
|
|
||||||
if addNoFollow && !strings.Contains(htmlAttr.Val, "nofollow") {
|
if strings.Contains(htmlAttr.Val, "nofollow") {
|
||||||
htmlAttr.Val += " nofollow"
|
noFollowFound = true
|
||||||
}
|
|
||||||
if addNoReferrer && !strings.Contains(htmlAttr.Val, "noreferrer") {
|
|
||||||
htmlAttr.Val += " noreferrer"
|
|
||||||
}
|
|
||||||
noFollowFound = addNoFollow
|
|
||||||
noReferrerFound = addNoReferrer
|
|
||||||
tmpAttrs = append(tmpAttrs, htmlAttr)
|
tmpAttrs = append(tmpAttrs, htmlAttr)
|
||||||
appended = true
|
appended = true
|
||||||
|
} else {
|
||||||
|
htmlAttr.Val += " nofollow"
|
||||||
|
noFollowFound = true
|
||||||
|
tmpAttrs = append(tmpAttrs, htmlAttr)
|
||||||
|
appended = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if elementName == "a" && htmlAttr.Key == "target" {
|
if elementName == "a" && htmlAttr.Key == "target" {
|
||||||
|
@ -582,22 +424,14 @@ func (p *Policy) sanitizeAttrs(
|
||||||
tmpAttrs = append(tmpAttrs, htmlAttr)
|
tmpAttrs = append(tmpAttrs, htmlAttr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if noFollowFound || noReferrerFound || targetBlankFound {
|
if noFollowFound || targetBlankFound {
|
||||||
cleanAttrs = tmpAttrs
|
cleanAttrs = tmpAttrs
|
||||||
}
|
}
|
||||||
|
|
||||||
if (addNoFollow && !noFollowFound) || (addNoReferrer && !noReferrerFound) {
|
if addNoFollow && !noFollowFound {
|
||||||
rel := html.Attribute{}
|
rel := html.Attribute{}
|
||||||
rel.Key = "rel"
|
rel.Key = "rel"
|
||||||
if addNoFollow {
|
|
||||||
rel.Val = "nofollow"
|
rel.Val = "nofollow"
|
||||||
}
|
|
||||||
if addNoReferrer {
|
|
||||||
if rel.Val != "" {
|
|
||||||
rel.Val += " "
|
|
||||||
}
|
|
||||||
rel.Val += "noreferrer"
|
|
||||||
}
|
|
||||||
cleanAttrs = append(cleanAttrs, rel)
|
cleanAttrs = append(cleanAttrs, rel)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -667,95 +501,8 @@ func (p *Policy) sanitizeAttrs(
|
||||||
return cleanAttrs
|
return cleanAttrs
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Policy) sanitizeStyles(attr html.Attribute, elementName string) html.Attribute {
|
|
||||||
sps := p.elsAndStyles[elementName]
|
|
||||||
if len(sps) == 0 {
|
|
||||||
sps = map[string]stylePolicy{}
|
|
||||||
// check for any matching elements, if we don't already have a policy found
|
|
||||||
// if multiple matches are found they will be overwritten, it's best
|
|
||||||
// to not have overlapping matchers
|
|
||||||
for regex, policies := range p.elsMatchingAndStyles {
|
|
||||||
if regex.MatchString(elementName) {
|
|
||||||
for k, v := range policies {
|
|
||||||
sps[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//Add semi-colon to end to fix parsing issue
|
|
||||||
if len(attr.Val) > 0 && attr.Val[len(attr.Val)-1] != ';' {
|
|
||||||
attr.Val = attr.Val + ";"
|
|
||||||
}
|
|
||||||
decs, err := cssparser.ParseDeclarations(attr.Val)
|
|
||||||
if err != nil {
|
|
||||||
attr.Val = ""
|
|
||||||
return attr
|
|
||||||
}
|
|
||||||
clean := []string{}
|
|
||||||
prefixes := []string{"-webkit-", "-moz-", "-ms-", "-o-", "mso-", "-xv-", "-atsc-", "-wap-", "-khtml-", "prince-", "-ah-", "-hp-", "-ro-", "-rim-", "-tc-"}
|
|
||||||
|
|
||||||
for _, dec := range decs {
|
|
||||||
addedProperty := false
|
|
||||||
tempProperty := strings.ToLower(dec.Property)
|
|
||||||
tempValue := removeUnicode(strings.ToLower(dec.Value))
|
|
||||||
for _, i := range prefixes {
|
|
||||||
tempProperty = strings.TrimPrefix(tempProperty, i)
|
|
||||||
}
|
|
||||||
if sp, ok := sps[tempProperty]; ok {
|
|
||||||
if sp.handler != nil {
|
|
||||||
if sp.handler(tempValue) {
|
|
||||||
clean = append(clean, dec.Property+": "+dec.Value)
|
|
||||||
addedProperty = true
|
|
||||||
}
|
|
||||||
} else if len(sp.enum) > 0 {
|
|
||||||
if stringInSlice(tempValue, sp.enum) {
|
|
||||||
clean = append(clean, dec.Property+": "+dec.Value)
|
|
||||||
addedProperty = true
|
|
||||||
}
|
|
||||||
} else if sp.regexp != nil {
|
|
||||||
if sp.regexp.MatchString(tempValue) {
|
|
||||||
clean = append(clean, dec.Property+": "+dec.Value)
|
|
||||||
addedProperty = true
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if sp, ok := p.globalStyles[tempProperty]; ok && !addedProperty {
|
|
||||||
if sp.handler != nil {
|
|
||||||
if sp.handler(tempValue) {
|
|
||||||
clean = append(clean, dec.Property+": "+dec.Value)
|
|
||||||
}
|
|
||||||
} else if len(sp.enum) > 0 {
|
|
||||||
if stringInSlice(tempValue, sp.enum) {
|
|
||||||
clean = append(clean, dec.Property+": "+dec.Value)
|
|
||||||
}
|
|
||||||
} else if sp.regexp != nil {
|
|
||||||
if sp.regexp.MatchString(tempValue) {
|
|
||||||
clean = append(clean, dec.Property+": "+dec.Value)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(clean) > 0 {
|
|
||||||
attr.Val = strings.Join(clean, "; ")
|
|
||||||
} else {
|
|
||||||
attr.Val = ""
|
|
||||||
}
|
|
||||||
return attr
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Policy) allowNoAttrs(elementName string) bool {
|
func (p *Policy) allowNoAttrs(elementName string) bool {
|
||||||
_, ok := p.setOfElementsAllowedWithoutAttrs[elementName]
|
_, ok := p.setOfElementsAllowedWithoutAttrs[elementName]
|
||||||
if !ok {
|
|
||||||
for _, r := range p.setOfElementsMatchingAllowedWithoutAttrs {
|
|
||||||
if r.MatchString(elementName) {
|
|
||||||
ok = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -814,16 +561,6 @@ func linkable(elementName string) bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// stringInSlice returns true if needle exists in haystack
|
|
||||||
func stringInSlice(needle string, haystack []string) bool {
|
|
||||||
for _, straw := range haystack {
|
|
||||||
if strings.ToLower(straw) == strings.ToLower(needle) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func isDataAttribute(val string) bool {
|
func isDataAttribute(val string) bool {
|
||||||
if !dataAttribute.MatchString(val) {
|
if !dataAttribute.MatchString(val) {
|
||||||
return false
|
return false
|
||||||
|
@ -842,48 +579,3 @@ func isDataAttribute(val string) bool {
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func removeUnicode(value string) string {
|
|
||||||
substitutedValue := value
|
|
||||||
currentLoc := cssUnicodeChar.FindStringIndex(substitutedValue)
|
|
||||||
for currentLoc != nil {
|
|
||||||
|
|
||||||
character := substitutedValue[currentLoc[0]+1 : currentLoc[1]]
|
|
||||||
character = strings.TrimSpace(character)
|
|
||||||
if len(character) < 4 {
|
|
||||||
character = strings.Repeat("0", 4-len(character)) + character
|
|
||||||
} else {
|
|
||||||
for len(character) > 4 {
|
|
||||||
if character[0] != '0' {
|
|
||||||
character = ""
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
character = character[1:]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
character = "\\u" + character
|
|
||||||
translatedChar, err := strconv.Unquote(`"` + character + `"`)
|
|
||||||
translatedChar = strings.TrimSpace(translatedChar)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
substitutedValue = substitutedValue[0:currentLoc[0]] + translatedChar + substitutedValue[currentLoc[1]:]
|
|
||||||
currentLoc = cssUnicodeChar.FindStringIndex(substitutedValue)
|
|
||||||
}
|
|
||||||
return substitutedValue
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Policy) matchRegex(elementName string) (map[string]attrPolicy, bool) {
|
|
||||||
aps := make(map[string]attrPolicy, 0)
|
|
||||||
matched := false
|
|
||||||
for regex, attrs := range p.elsMatchingAndAttrs {
|
|
||||||
if regex.MatchString(elementName) {
|
|
||||||
matched = true
|
|
||||||
for k, v := range attrs {
|
|
||||||
aps[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return aps, matched
|
|
||||||
}
|
|
||||||
|
|
|
@ -11,16 +11,11 @@ type Buffer struct {
|
||||||
bytes.Buffer
|
bytes.Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrintableRuneWidth returns the width of all printable runes in the buffer.
|
// PrintableRuneCount returns the amount of printable runes in the buffer.
|
||||||
func (w Buffer) PrintableRuneWidth() int {
|
func (w Buffer) PrintableRuneCount() int {
|
||||||
return PrintableRuneWidth(w.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func PrintableRuneWidth(s string) int {
|
|
||||||
var n int
|
var n int
|
||||||
var ansi bool
|
var ansi bool
|
||||||
|
for _, c := range w.String() {
|
||||||
for _, c := range s {
|
|
||||||
if c == '\x1B' {
|
if c == '\x1B' {
|
||||||
// ANSI escape sequence
|
// ANSI escape sequence
|
||||||
ansi = true
|
ansi = true
|
||||||
|
|
|
@ -66,7 +66,7 @@ func (w *WordWrap) addSpace() {
|
||||||
func (w *WordWrap) addWord() {
|
func (w *WordWrap) addWord() {
|
||||||
if w.word.Len() > 0 {
|
if w.word.Len() > 0 {
|
||||||
w.addSpace()
|
w.addSpace()
|
||||||
w.lineLen += w.word.PrintableRuneWidth()
|
w.lineLen += w.word.PrintableRuneCount()
|
||||||
w.buf.Write(w.word.Bytes())
|
w.buf.Write(w.word.Bytes())
|
||||||
w.word.Reset()
|
w.word.Reset()
|
||||||
}
|
}
|
||||||
|
@ -139,8 +139,8 @@ func (w *WordWrap) Write(b []byte) (int, error) {
|
||||||
|
|
||||||
// add a line break if the current word would exceed the line's
|
// add a line break if the current word would exceed the line's
|
||||||
// character limit
|
// character limit
|
||||||
if w.lineLen+w.space.Len()+w.word.PrintableRuneWidth() > w.Limit &&
|
if w.lineLen+w.space.Len()+w.word.PrintableRuneCount() > w.Limit &&
|
||||||
w.word.PrintableRuneWidth() < w.Limit {
|
w.word.PrintableRuneCount() < w.Limit {
|
||||||
w.addNewLine()
|
w.addNewLine()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
language: go
|
language: go
|
||||||
|
arch:
|
||||||
|
- ppc64le
|
||||||
|
- amd64
|
||||||
go:
|
go:
|
||||||
- 1.1
|
|
||||||
- 1.2
|
|
||||||
- 1.3
|
- 1.3
|
||||||
- 1.4
|
- 1.4
|
||||||
- 1.5
|
- 1.5
|
||||||
|
@ -12,3 +12,11 @@ go:
|
||||||
- 1.9
|
- 1.9
|
||||||
- "1.10"
|
- "1.10"
|
||||||
- tip
|
- tip
|
||||||
|
jobs:
|
||||||
|
exclude :
|
||||||
|
- arch : ppc64le
|
||||||
|
go :
|
||||||
|
- 1.3
|
||||||
|
- arch : ppc64le
|
||||||
|
go :
|
||||||
|
- 1.4
|
||||||
|
|
|
@ -25,7 +25,7 @@ Generate ASCII table on the fly ... Installation is simple as
|
||||||
- Set custom footer support
|
- Set custom footer support
|
||||||
- Optional identical cells merging
|
- Optional identical cells merging
|
||||||
- Set custom caption
|
- Set custom caption
|
||||||
- Optional reflowing of paragrpahs in multi-line cells.
|
- Optional reflowing of paragraphs in multi-line cells.
|
||||||
|
|
||||||
#### Example 1 - Basic
|
#### Example 1 - Basic
|
||||||
```go
|
```go
|
||||||
|
@ -197,6 +197,41 @@ table.Render()
|
||||||
+----------+--------------------------+-------+---------+
|
+----------+--------------------------+-------+---------+
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Example 7 - Identical cells merging (specify the column index to merge)
|
||||||
|
```go
|
||||||
|
data := [][]string{
|
||||||
|
[]string{"1/1/2014", "Domain name", "1234", "$10.98"},
|
||||||
|
[]string{"1/1/2014", "January Hosting", "1234", "$10.98"},
|
||||||
|
[]string{"1/4/2014", "February Hosting", "3456", "$51.00"},
|
||||||
|
[]string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"},
|
||||||
|
}
|
||||||
|
|
||||||
|
table := tablewriter.NewWriter(os.Stdout)
|
||||||
|
table.SetHeader([]string{"Date", "Description", "CV2", "Amount"})
|
||||||
|
table.SetFooter([]string{"", "", "Total", "$146.93"})
|
||||||
|
table.SetAutoMergeCellsByColumnIndex([]int{2, 3})
|
||||||
|
table.SetRowLine(true)
|
||||||
|
table.AppendBulk(data)
|
||||||
|
table.Render()
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Output 7
|
||||||
|
```
|
||||||
|
+----------+--------------------------+-------+---------+
|
||||||
|
| DATE | DESCRIPTION | CV2 | AMOUNT |
|
||||||
|
+----------+--------------------------+-------+---------+
|
||||||
|
| 1/1/2014 | Domain name | 1234 | $10.98 |
|
||||||
|
+----------+--------------------------+ + +
|
||||||
|
| 1/1/2014 | January Hosting | | |
|
||||||
|
+----------+--------------------------+-------+---------+
|
||||||
|
| 1/4/2014 | February Hosting | 3456 | $51.00 |
|
||||||
|
+----------+--------------------------+-------+---------+
|
||||||
|
| 1/4/2014 | February Extra Bandwidth | 4567 | $30.00 |
|
||||||
|
+----------+--------------------------+-------+---------+
|
||||||
|
| TOTAL | $146.93 |
|
||||||
|
+----------+--------------------------+-------+---------+
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
#### Table with color
|
#### Table with color
|
||||||
```go
|
```go
|
||||||
|
@ -233,7 +268,7 @@ table.Render()
|
||||||
#### Table with color Output
|
#### Table with color Output
|
||||||
![Table with Color](https://cloud.githubusercontent.com/assets/6460392/21101956/bbc7b356-c0a1-11e6-9f36-dba694746efc.png)
|
![Table with Color](https://cloud.githubusercontent.com/assets/6460392/21101956/bbc7b356-c0a1-11e6-9f36-dba694746efc.png)
|
||||||
|
|
||||||
#### Example - 7 Table Cells with Color
|
#### Example - 8 Table Cells with Color
|
||||||
|
|
||||||
Individual Cell Colors from `func Rich` take precedence over Column Colors
|
Individual Cell Colors from `func Rich` take precedence over Column Colors
|
||||||
|
|
||||||
|
@ -289,7 +324,7 @@ table.Render()
|
||||||
##### Table cells with color Output
|
##### Table cells with color Output
|
||||||
![Table cells with Color](https://user-images.githubusercontent.com/9064687/63969376-bcd88d80-ca6f-11e9-9466-c3d954700b25.png)
|
![Table cells with Color](https://user-images.githubusercontent.com/9064687/63969376-bcd88d80-ca6f-11e9-9466-c3d954700b25.png)
|
||||||
|
|
||||||
#### Example 8 - Set table caption
|
#### Example 9 - Set table caption
|
||||||
```go
|
```go
|
||||||
data := [][]string{
|
data := [][]string{
|
||||||
[]string{"A", "The Good", "500"},
|
[]string{"A", "The Good", "500"},
|
||||||
|
@ -310,7 +345,7 @@ table.Render() // Send output
|
||||||
|
|
||||||
Note: Caption text will wrap with total width of rendered table.
|
Note: Caption text will wrap with total width of rendered table.
|
||||||
|
|
||||||
##### Output 7
|
##### Output 9
|
||||||
```
|
```
|
||||||
+------+-----------------------+--------+
|
+------+-----------------------+--------+
|
||||||
| NAME | SIGN | RATING |
|
| NAME | SIGN | RATING |
|
||||||
|
@ -323,7 +358,7 @@ Note: Caption text will wrap with total width of rendered table.
|
||||||
Movie ratings.
|
Movie ratings.
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Example 8 - Set NoWhiteSpace and TablePadding option
|
#### Example 10 - Set NoWhiteSpace and TablePadding option
|
||||||
```go
|
```go
|
||||||
data := [][]string{
|
data := [][]string{
|
||||||
{"node1.example.com", "Ready", "compute", "1.11"},
|
{"node1.example.com", "Ready", "compute", "1.11"},
|
||||||
|
@ -349,7 +384,7 @@ table.AppendBulk(data) // Add Bulk Data
|
||||||
table.Render()
|
table.Render()
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Output 8
|
##### Output 10
|
||||||
```
|
```
|
||||||
NAME STATUS ROLE VERSION
|
NAME STATUS ROLE VERSION
|
||||||
node1.example.com Ready compute 1.11
|
node1.example.com Ready compute 1.11
|
||||||
|
|
|
@ -2,4 +2,4 @@ module github.com/olekukonko/tablewriter
|
||||||
|
|
||||||
go 1.12
|
go 1.12
|
||||||
|
|
||||||
require github.com/mattn/go-runewidth v0.0.7
|
require github.com/mattn/go-runewidth v0.0.9
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue