diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 22fb443f9f..258da124a9 100755 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -397,11 +397,33 @@ jobs: name: session-build-${{ matrix.sys.os }} path: build/ + scan: + name: Scan + needs: + - os_specific + runs-on: ubuntu-latest + steps: + - name: Download All Build Session Artifacts + uses: actions/download-artifact@v2 + with: + path: build/ + + - name: Scan for CVEs + if: runner.os == 'Linux' + uses: aquasecurity/trivy-action@0.20.0 + with: + scan-type: rootfs + scan-ref: build + list-all-pkgs: true + ignore-unfixed: true + format: table + exit-code: 1 + # === Deploy job (runs once with combined artifacts from OS specific job) === deploy: name: Deploy needs: - - os_specific + - scan runs-on: ubuntu-20.04 env: ACTIVESTATE_CI: true @@ -424,7 +446,7 @@ jobs: name: Install Go uses: actions/setup-go@v3 with: - go-version: ${{ matrix.go-version }} + go-version: 1.22.x - # === Install State Tool === name: Install State Tool diff --git a/go.mod b/go.mod index 7e0f041a41..adb9a64189 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/blang/semver v3.5.1+incompatible github.com/creack/pty v1.1.11 github.com/dave/jennifer v0.18.0 - github.com/fatih/color v1.10.0 + github.com/fatih/color v1.16.0 github.com/felixge/fgprof v0.9.0 github.com/fsnotify/fsnotify v1.4.7 github.com/gammazero/workerpool v1.1.1 @@ -28,8 +28,8 @@ require ( github.com/google/go-github/v45 v45.0.0 github.com/google/uuid v1.3.0 github.com/gorilla/websocket v1.5.0 - github.com/hashicorp/go-cleanhttp v0.5.1 - github.com/hashicorp/go-retryablehttp v0.6.7 + github.com/hashicorp/go-cleanhttp v0.5.2 + github.com/hashicorp/go-retryablehttp v0.7.7 github.com/hashicorp/go-version v1.1.0 github.com/hpcloud/tail v1.0.0 github.com/imdario/mergo v0.3.11 @@ -54,7 +54,7 @@ require ( github.com/stretchr/testify v1.9.0 github.com/thoas/go-funk v0.8.0 github.com/vbauerster/mpb/v7 v7.1.5 - github.com/vektah/gqlparser/v2 v2.5.1 + github.com/vektah/gqlparser/v2 v2.5.16 golang.org/x/crypto v0.23.0 // indirect golang.org/x/net v0.25.0 golang.org/x/sys v0.20.0 @@ -142,8 +142,8 @@ require ( github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/matryer/is v1.2.0 // indirect - github.com/mattn/go-colorable v0.1.12 // indirect - github.com/mattn/go-isatty v0.0.18 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-sqlite3 v1.14.7 // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect diff --git a/go.sum b/go.sum index 297a7d1a53..27789bcfcf 100644 --- a/go.sum +++ b/go.sum @@ -147,8 +147,8 @@ github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a/go.mod h1:Ro8st/El github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg= -github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/felixge/fgprof v0.9.0 h1:1Unx04fyC3gn3RMH/GuwUF1UdlulLMpJV13jr9SOHvs= @@ -331,7 +331,6 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github/v45 v45.0.0 h1:LU0WBjYidxIVyx7PZeWb+FP4JZJ3Wh3FQgdumnGqiLs= @@ -361,15 +360,16 @@ github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-retryablehttp v0.6.7 h1:8/CAEZt/+F7kR7GevNHulKkUjLht3CPmn7egmhieNKo= -github.com/hashicorp/go-retryablehttp v0.6.7/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= @@ -479,16 +479,17 @@ github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlW github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.18 h1:DOKFKCQ7FNG2L1rbrmstDN4QVRdS89Nkh85u68Uwp98= -github.com/mattn/go-isatty v0.0.18/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= @@ -604,8 +605,6 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shibukawa/configdir v0.0.0-20170330084843-e180dbdc8da0 h1:Xuk8ma/ibJ1fOy4Ee11vHhUFHQNpHhrBneOCNHVXS5w= github.com/shibukawa/configdir v0.0.0-20170330084843-e180dbdc8da0/go.mod h1:7AwjWCpdPhkSmNAgUv5C7EJ4AbmjEB3r047r3DXWu3Y= -github.com/shirou/gopsutil/v3 v3.23.8 h1:xnATPiybo6GgdRoC4YoGnxXZFRc3dqQTGi73oLvvBrE= -github.com/shirou/gopsutil/v3 v3.23.8/go.mod h1:7hmCaBn+2ZwaZOr6jmPBZDfawwMGuo1id3C6aM8EDqQ= github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI= github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= @@ -640,8 +639,6 @@ github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5q github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= @@ -651,8 +648,6 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= @@ -679,8 +674,9 @@ github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+ github.com/vbauerster/mpb/v7 v7.1.5 h1:vtUEUfQHmNeJETyF4AcRCOV6RC4wqFwNORy52UMXPbQ= github.com/vbauerster/mpb/v7 v7.1.5/go.mod h1:4M8+qAoQqV60WDNktBM5k05i1iTrXE7rjKOHEVkVlec= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= -github.com/vektah/gqlparser/v2 v2.5.1 h1:ZGu+bquAY23jsxDRcYpWjttRZrUz07LbiY77gUOHcr4= github.com/vektah/gqlparser/v2 v2.5.1/go.mod h1:mPgqFBu/woKTVYWyNk8cO3kh4S/f4aRFZrvOnp3hmCs= +github.com/vektah/gqlparser/v2 v2.5.16 h1:1gcmLTvs3JLKXckwCwlUagVn/IlV2bwqle0vJ0vy5p8= +github.com/vektah/gqlparser/v2 v2.5.16/go.mod h1:1lz1OeCqgQbQepsGxPVywrjdBHW2T08PUS3pJqepRww= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= @@ -696,8 +692,6 @@ github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7Jul github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= -github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zijiren233/yaml-comment v0.2.1 h1:/ymMfauuR6zPme+c59FvGNmvxmjOS+BRZSU9YEM82g4= @@ -841,7 +835,6 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -860,6 +853,7 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220721230656-c6bc011c0c49/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md index d62e4024aa..be82827cac 100644 --- a/vendor/github.com/fatih/color/README.md +++ b/vendor/github.com/fatih/color/README.md @@ -7,7 +7,6 @@ suits you. ![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) - ## Install ```bash @@ -78,7 +77,7 @@ notice("Don't forget this...") ### Custom fprint functions (FprintFunc) ```go -blue := color.New(FgBlue).FprintfFunc() +blue := color.New(color.FgBlue).FprintfFunc() blue(myWriter, "important notice: %s", stars) // Mix up with multiple attributes @@ -124,17 +123,19 @@ fmt.Println("All text will now be bold magenta.") ``` ### Disable/Enable color - + There might be a case where you want to explicitly disable/enable color output. the `go-isatty` package will automatically disable color output for non-tty output streams -(for example if the output were piped directly to `less`) +(for example if the output were piped directly to `less`). -`Color` has support to disable/enable colors both globally and for single color -definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You -can easily disable the color output with: +The `color` package also disables color output if the [`NO_COLOR`](https://no-color.org) environment +variable is set to a non-empty string. -```go +`Color` has support to disable/enable colors programmatically both globally and +for single color definitions. For example suppose you have a CLI app and a +`-no-color` bool flag. You can easily disable the color output with: +```go var flagNoColor = flag.Bool("no-color", false, "Disable color output") if *flagNoColor { @@ -156,18 +157,20 @@ c.EnableColor() c.Println("This prints again cyan...") ``` +## GitHub Actions + +To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams. + ## Todo * Save/Return previous values * Evaluate fmt.Formatter interface - ## Credits - * [Fatih Arslan](https://github.com/fatih) - * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) +* [Fatih Arslan](https://github.com/fatih) +* Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) ## License The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details - diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go index 91c8e9f062..c4234287dc 100644 --- a/vendor/github.com/fatih/color/color.go +++ b/vendor/github.com/fatih/color/color.go @@ -15,12 +15,14 @@ import ( var ( // NoColor defines if the output is colorized or not. It's dynamically set to // false or true based on the stdout's file descriptor referring to a terminal - // or not. This is a global option and affects all colors. For more control - // over each color block use the methods DisableColor() individually. - NoColor = os.Getenv("TERM") == "dumb" || + // or not. It's also set to true if the NO_COLOR environment variable is + // set (regardless of its value). This is a global option and affects all + // colors. For more control over each color block use the methods + // DisableColor() individually. + NoColor = noColorIsSet() || os.Getenv("TERM") == "dumb" || (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) - // Output defines the standard output of the print functions. By default + // Output defines the standard output of the print functions. By default, // os.Stdout is used. Output = colorable.NewColorableStdout() @@ -33,6 +35,11 @@ var ( colorsCacheMu sync.Mutex // protects colorsCache ) +// noColorIsSet returns true if the environment variable NO_COLOR is set to a non-empty string. +func noColorIsSet() bool { + return os.Getenv("NO_COLOR") != "" +} + // Color defines a custom color object which is defined by SGR parameters. type Color struct { params []Attribute @@ -58,6 +65,29 @@ const ( CrossedOut ) +const ( + ResetBold Attribute = iota + 22 + ResetItalic + ResetUnderline + ResetBlinking + _ + ResetReversed + ResetConcealed + ResetCrossedOut +) + +var mapResetAttributes map[Attribute]Attribute = map[Attribute]Attribute{ + Bold: ResetBold, + Faint: ResetBold, + Italic: ResetItalic, + Underline: ResetUnderline, + BlinkSlow: ResetBlinking, + BlinkRapid: ResetBlinking, + ReverseVideo: ResetReversed, + Concealed: ResetConcealed, + CrossedOut: ResetCrossedOut, +} + // Foreground text colors const ( FgBlack Attribute = iota + 30 @@ -108,7 +138,14 @@ const ( // New returns a newly created color object. func New(value ...Attribute) *Color { - c := &Color{params: make([]Attribute, 0)} + c := &Color{ + params: make([]Attribute, 0), + } + + if noColorIsSet() { + c.noColor = boolPtr(true) + } + c.Add(value...) return c } @@ -137,7 +174,7 @@ func (c *Color) Set() *Color { return c } - fmt.Fprintf(Output, c.format()) + fmt.Fprint(Output, c.format()) return c } @@ -149,16 +186,21 @@ func (c *Color) unset() { Unset() } -func (c *Color) setWriter(w io.Writer) *Color { +// SetWriter is used to set the SGR sequence with the given io.Writer. This is +// a low-level function, and users should use the higher-level functions, such +// as color.Fprint, color.Print, etc. +func (c *Color) SetWriter(w io.Writer) *Color { if c.isNoColorSet() { return c } - fmt.Fprintf(w, c.format()) + fmt.Fprint(w, c.format()) return c } -func (c *Color) unsetWriter(w io.Writer) { +// UnsetWriter resets all escape attributes and clears the output with the give +// io.Writer. Usually should be called after SetWriter(). +func (c *Color) UnsetWriter(w io.Writer) { if c.isNoColorSet() { return } @@ -177,20 +219,14 @@ func (c *Color) Add(value ...Attribute) *Color { return c } -func (c *Color) prepend(value Attribute) { - c.params = append(c.params, 0) - copy(c.params[1:], c.params[0:]) - c.params[0] = value -} - // Fprint formats using the default formats for its operands and writes to w. // Spaces are added between operands when neither is a string. // It returns the number of bytes written and any write error encountered. // On Windows, users should wrap w with colorable.NewColorable() if w is of // type *os.File. func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) { - c.setWriter(w) - defer c.unsetWriter(w) + c.SetWriter(w) + defer c.UnsetWriter(w) return fmt.Fprint(w, a...) } @@ -212,8 +248,8 @@ func (c *Color) Print(a ...interface{}) (n int, err error) { // On Windows, users should wrap w with colorable.NewColorable() if w is of // type *os.File. func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - c.setWriter(w) - defer c.unsetWriter(w) + c.SetWriter(w) + defer c.UnsetWriter(w) return fmt.Fprintf(w, format, a...) } @@ -233,10 +269,7 @@ func (c *Color) Printf(format string, a ...interface{}) (n int, err error) { // On Windows, users should wrap w with colorable.NewColorable() if w is of // type *os.File. func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - c.setWriter(w) - defer c.unsetWriter(w) - - return fmt.Fprintln(w, a...) + return fmt.Fprintln(w, c.wrap(fmt.Sprint(a...))) } // Println formats using the default formats for its operands and writes to @@ -245,10 +278,7 @@ func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { // encountered. This is the standard fmt.Print() method wrapped with the given // color. func (c *Color) Println(a ...interface{}) (n int, err error) { - c.Set() - defer c.unset() - - return fmt.Fprintln(Output, a...) + return fmt.Fprintln(Output, c.wrap(fmt.Sprint(a...))) } // Sprint is just like Print, but returns a string instead of printing it. @@ -258,7 +288,7 @@ func (c *Color) Sprint(a ...interface{}) string { // Sprintln is just like Println, but returns a string instead of printing it. func (c *Color) Sprintln(a ...interface{}) string { - return c.wrap(fmt.Sprintln(a...)) + return fmt.Sprintln(c.Sprint(a...)) } // Sprintf is just like Printf, but returns a string instead of printing it. @@ -340,7 +370,7 @@ func (c *Color) SprintfFunc() func(format string, a ...interface{}) string { // string. Windows users should use this in conjunction with color.Output. func (c *Color) SprintlnFunc() func(a ...interface{}) string { return func(a ...interface{}) string { - return c.wrap(fmt.Sprintln(a...)) + return fmt.Sprintln(c.Sprint(a...)) } } @@ -370,7 +400,18 @@ func (c *Color) format() string { } func (c *Color) unformat() string { - return fmt.Sprintf("%s[%dm", escape, Reset) + //return fmt.Sprintf("%s[%dm", escape, Reset) + //for each element in sequence let's use the speficic reset escape, ou the generic one if not found + format := make([]string, len(c.params)) + for i, v := range c.params { + format[i] = strconv.Itoa(int(Reset)) + ra, ok := mapResetAttributes[v] + if ok { + format[i] = strconv.Itoa(int(ra)) + } + } + + return fmt.Sprintf("%s[%sm", escape, strings.Join(format, ";")) } // DisableColor disables the color output. Useful to not change any existing @@ -381,13 +422,13 @@ func (c *Color) DisableColor() { } // EnableColor enables the color output. Use it in conjunction with -// DisableColor(). Otherwise this method has no side effects. +// DisableColor(). Otherwise, this method has no side effects. func (c *Color) EnableColor() { c.noColor = boolPtr(false) } func (c *Color) isNoColorSet() bool { - // check first if we have user setted action + // check first if we have user set action if c.noColor != nil { return *c.noColor } @@ -398,6 +439,12 @@ func (c *Color) isNoColorSet() bool { // Equals returns a boolean value indicating whether two colors are equal. func (c *Color) Equals(c2 *Color) bool { + if c == nil && c2 == nil { + return true + } + if c == nil || c2 == nil { + return false + } if len(c.params) != len(c2.params) { return false } diff --git a/vendor/github.com/fatih/color/color_windows.go b/vendor/github.com/fatih/color/color_windows.go new file mode 100644 index 0000000000..be01c558e5 --- /dev/null +++ b/vendor/github.com/fatih/color/color_windows.go @@ -0,0 +1,19 @@ +package color + +import ( + "os" + + "golang.org/x/sys/windows" +) + +func init() { + // Opt-in for ansi color support for current process. + // https://learn.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences#output-sequences + var outMode uint32 + out := windows.Handle(os.Stdout.Fd()) + if err := windows.GetConsoleMode(out, &outMode); err != nil { + return + } + outMode |= windows.ENABLE_PROCESSED_OUTPUT | windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING + _ = windows.SetConsoleMode(out, outMode) +} diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go index cf1e96500f..9491ad5413 100644 --- a/vendor/github.com/fatih/color/doc.go +++ b/vendor/github.com/fatih/color/doc.go @@ -5,106 +5,105 @@ that suits you. Use simple and default helper functions with predefined foreground colors: - color.Cyan("Prints text in cyan.") + color.Cyan("Prints text in cyan.") - // a newline will be appended automatically - color.Blue("Prints %s in blue.", "text") + // a newline will be appended automatically + color.Blue("Prints %s in blue.", "text") - // More default foreground colors.. - color.Red("We have red") - color.Yellow("Yellow color too!") - color.Magenta("And many others ..") + // More default foreground colors.. + color.Red("We have red") + color.Yellow("Yellow color too!") + color.Magenta("And many others ..") - // Hi-intensity colors - color.HiGreen("Bright green color.") - color.HiBlack("Bright black means gray..") - color.HiWhite("Shiny white color!") + // Hi-intensity colors + color.HiGreen("Bright green color.") + color.HiBlack("Bright black means gray..") + color.HiWhite("Shiny white color!") -However there are times where custom color mixes are required. Below are some +However, there are times when custom color mixes are required. Below are some examples to create custom color objects and use the print functions of each separate color object. - // Create a new color object - c := color.New(color.FgCyan).Add(color.Underline) - c.Println("Prints cyan text with an underline.") + // Create a new color object + c := color.New(color.FgCyan).Add(color.Underline) + c.Println("Prints cyan text with an underline.") - // Or just add them to New() - d := color.New(color.FgCyan, color.Bold) - d.Printf("This prints bold cyan %s\n", "too!.") + // Or just add them to New() + d := color.New(color.FgCyan, color.Bold) + d.Printf("This prints bold cyan %s\n", "too!.") - // Mix up foreground and background colors, create new mixes! - red := color.New(color.FgRed) + // Mix up foreground and background colors, create new mixes! + red := color.New(color.FgRed) - boldRed := red.Add(color.Bold) - boldRed.Println("This will print text in bold red.") + boldRed := red.Add(color.Bold) + boldRed.Println("This will print text in bold red.") - whiteBackground := red.Add(color.BgWhite) - whiteBackground.Println("Red text with White background.") + whiteBackground := red.Add(color.BgWhite) + whiteBackground.Println("Red text with White background.") - // Use your own io.Writer output - color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + // Use your own io.Writer output + color.New(color.FgBlue).Fprintln(myWriter, "blue color!") - blue := color.New(color.FgBlue) - blue.Fprint(myWriter, "This will print text in blue.") + blue := color.New(color.FgBlue) + blue.Fprint(myWriter, "This will print text in blue.") You can create PrintXxx functions to simplify even more: - // Create a custom print function for convenient - red := color.New(color.FgRed).PrintfFunc() - red("warning") - red("error: %s", err) + // Create a custom print function for convenient + red := color.New(color.FgRed).PrintfFunc() + red("warning") + red("error: %s", err) - // Mix up multiple attributes - notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() - notice("don't forget this...") + // Mix up multiple attributes + notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() + notice("don't forget this...") You can also FprintXxx functions to pass your own io.Writer: - blue := color.New(FgBlue).FprintfFunc() - blue(myWriter, "important notice: %s", stars) - - // Mix up with multiple attributes - success := color.New(color.Bold, color.FgGreen).FprintlnFunc() - success(myWriter, don't forget this...") + blue := color.New(FgBlue).FprintfFunc() + blue(myWriter, "important notice: %s", stars) + // Mix up with multiple attributes + success := color.New(color.Bold, color.FgGreen).FprintlnFunc() + success(myWriter, don't forget this...") Or create SprintXxx functions to mix strings with other non-colorized strings: - yellow := New(FgYellow).SprintFunc() - red := New(FgRed).SprintFunc() + yellow := New(FgYellow).SprintFunc() + red := New(FgRed).SprintFunc() - fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) + fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) - info := New(FgWhite, BgGreen).SprintFunc() - fmt.Printf("this %s rocks!\n", info("package")) + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Printf("this %s rocks!\n", info("package")) Windows support is enabled by default. All Print functions work as intended. -However only for color.SprintXXX functions, user should use fmt.FprintXXX and +However, only for color.SprintXXX functions, user should use fmt.FprintXXX and set the output to color.Output: - fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) + fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) - info := New(FgWhite, BgGreen).SprintFunc() - fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) Using with existing code is possible. Just use the Set() method to set the standard output to the given parameters. That way a rewrite of an existing code is not required. - // Use handy standard colors. - color.Set(color.FgYellow) + // Use handy standard colors. + color.Set(color.FgYellow) - fmt.Println("Existing text will be now in Yellow") - fmt.Printf("This one %s\n", "too") + fmt.Println("Existing text will be now in Yellow") + fmt.Printf("This one %s\n", "too") - color.Unset() // don't forget to unset + color.Unset() // don't forget to unset - // You can mix up parameters - color.Set(color.FgMagenta, color.Bold) - defer color.Unset() // use it in your function + // You can mix up parameters + color.Set(color.FgMagenta, color.Bold) + defer color.Unset() // use it in your function - fmt.Println("All text will be now bold magenta.") + fmt.Println("All text will be now bold magenta.") There might be a case where you want to disable color output (for example to pipe the standard output of your app to somewhere else). `Color` has support to @@ -112,22 +111,24 @@ disable colors both globally and for single color definition. For example suppose you have a CLI app and a `--no-color` bool flag. You can easily disable the color output with: - var flagNoColor = flag.Bool("no-color", false, "Disable color output") + var flagNoColor = flag.Bool("no-color", false, "Disable color output") + + if *flagNoColor { + color.NoColor = true // disables colorized output + } - if *flagNoColor { - color.NoColor = true // disables colorized output - } +You can also disable the color by setting the NO_COLOR environment variable to any value. It also has support for single color definitions (local). You can disable/enable color output on the fly: - c := color.New(color.FgCyan) - c.Println("Prints cyan text") + c := color.New(color.FgCyan) + c.Println("Prints cyan text") - c.DisableColor() - c.Println("This is printed without any color") + c.DisableColor() + c.Println("This is printed without any color") - c.EnableColor() - c.Println("This prints again cyan...") + c.EnableColor() + c.Println("This prints again cyan...") */ package color diff --git a/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go b/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go index 8d306bf513..fe28d15b6f 100644 --- a/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go +++ b/vendor/github.com/hashicorp/go-cleanhttp/cleanhttp.go @@ -32,6 +32,7 @@ func DefaultPooledTransport() *http.Transport { IdleConnTimeout: 90 * time.Second, TLSHandshakeTimeout: 10 * time.Second, ExpectContinueTimeout: 1 * time.Second, + ForceAttemptHTTP2: true, MaxIdleConnsPerHost: runtime.GOMAXPROCS(0) + 1, } return transport diff --git a/vendor/github.com/hashicorp/go-retryablehttp/.go-version b/vendor/github.com/hashicorp/go-retryablehttp/.go-version new file mode 100644 index 0000000000..6fee2fedb0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-retryablehttp/.go-version @@ -0,0 +1 @@ +1.22.2 diff --git a/vendor/github.com/hashicorp/go-retryablehttp/CHANGELOG.md b/vendor/github.com/hashicorp/go-retryablehttp/CHANGELOG.md new file mode 100644 index 0000000000..68a627c6d9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-retryablehttp/CHANGELOG.md @@ -0,0 +1,33 @@ +## 0.7.7 (May 30, 2024) + +BUG FIXES: + +- client: avoid potentially leaking URL-embedded basic authentication credentials in logs (#158) + +## 0.7.6 (May 9, 2024) + +ENHANCEMENTS: + +- client: support a `RetryPrepare` function for modifying the request before retrying (#216) +- client: support HTTP-date values for `Retry-After` header value (#138) +- client: avoid reading entire body when the body is a `*bytes.Reader` (#197) + +BUG FIXES: + +- client: fix a broken check for invalid server certificate in go 1.20+ (#210) + +## 0.7.5 (Nov 8, 2023) + +BUG FIXES: + +- client: fixes an issue where the request body is not preserved on temporary redirects or re-established HTTP/2 connections (#207) + +## 0.7.4 (Jun 6, 2023) + +BUG FIXES: + +- client: fixing an issue where the Content-Type header wouldn't be sent with an empty payload when using HTTP/2 (#194) + +## 0.7.3 (May 15, 2023) + +Initial release diff --git a/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS b/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS new file mode 100644 index 0000000000..d6dd78a2dd --- /dev/null +++ b/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS @@ -0,0 +1 @@ +* @hashicorp/go-retryablehttp-maintainers diff --git a/vendor/github.com/hashicorp/go-retryablehttp/LICENSE b/vendor/github.com/hashicorp/go-retryablehttp/LICENSE index e87a115e46..f4f97ee585 100644 --- a/vendor/github.com/hashicorp/go-retryablehttp/LICENSE +++ b/vendor/github.com/hashicorp/go-retryablehttp/LICENSE @@ -1,3 +1,5 @@ +Copyright (c) 2015 HashiCorp, Inc. + Mozilla Public License, version 2.0 1. Definitions diff --git a/vendor/github.com/hashicorp/go-retryablehttp/Makefile b/vendor/github.com/hashicorp/go-retryablehttp/Makefile index da17640e64..5255241961 100644 --- a/vendor/github.com/hashicorp/go-retryablehttp/Makefile +++ b/vendor/github.com/hashicorp/go-retryablehttp/Makefile @@ -2,7 +2,7 @@ default: test test: go vet ./... - go test -race ./... + go test -v -race ./... updatedeps: go get -f -t -u ./... diff --git a/vendor/github.com/hashicorp/go-retryablehttp/README.md b/vendor/github.com/hashicorp/go-retryablehttp/README.md index 30357c7566..145a62f218 100644 --- a/vendor/github.com/hashicorp/go-retryablehttp/README.md +++ b/vendor/github.com/hashicorp/go-retryablehttp/README.md @@ -26,6 +26,7 @@ fails so that the full request can be attempted again. See the details. Version 0.6.0 and before are compatible with Go prior to 1.12. From 0.6.1 onward, Go 1.12+ is required. +From 0.6.7 onward, Go 1.13+ is required. Example Use =========== @@ -58,4 +59,4 @@ standardClient := retryClient.StandardClient() // *http.Client ``` For more usage and examples see the -[godoc](http://godoc.org/github.com/hashicorp/go-retryablehttp). +[pkg.go.dev](https://pkg.go.dev/github.com/hashicorp/go-retryablehttp). diff --git a/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go119.go b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go119.go new file mode 100644 index 0000000000..b2b27e8722 --- /dev/null +++ b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go119.go @@ -0,0 +1,14 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +//go:build !go1.20 +// +build !go1.20 + +package retryablehttp + +import "crypto/x509" + +func isCertError(err error) bool { + _, ok := err.(x509.UnknownAuthorityError) + return ok +} diff --git a/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go120.go b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go120.go new file mode 100644 index 0000000000..a3cd315a28 --- /dev/null +++ b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go120.go @@ -0,0 +1,14 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +//go:build go1.20 +// +build go1.20 + +package retryablehttp + +import "crypto/tls" + +func isCertError(err error) bool { + _, ok := err.(*tls.CertificateVerificationError) + return ok +} diff --git a/vendor/github.com/hashicorp/go-retryablehttp/client.go b/vendor/github.com/hashicorp/go-retryablehttp/client.go index a23f9a93f2..efee53c400 100644 --- a/vendor/github.com/hashicorp/go-retryablehttp/client.go +++ b/vendor/github.com/hashicorp/go-retryablehttp/client.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // Package retryablehttp provides a familiar HTTP client interface with // automatic retries and exponential backoff. It is a thin wrapper over the // standard net/http client library and exposes nearly the same public API. @@ -24,10 +27,8 @@ package retryablehttp import ( "bytes" "context" - "crypto/x509" "fmt" "io" - "io/ioutil" "log" "math" "math/rand" @@ -60,6 +61,10 @@ var ( // limit the size we consume to respReadLimit. respReadLimit = int64(4096) + // timeNow sets the function that returns the current time. + // This defaults to time.Now. Changes to this should only be done in tests. + timeNow = time.Now + // A regular expression to match the error returned by net/http when the // configured number of redirects is exhausted. This error isn't typed // specifically so we resort to matching on the error string. @@ -69,11 +74,33 @@ var ( // scheme specified in the URL is invalid. This error isn't typed // specifically so we resort to matching on the error string. schemeErrorRe = regexp.MustCompile(`unsupported protocol scheme`) + + // A regular expression to match the error returned by net/http when a + // request header or value is invalid. This error isn't typed + // specifically so we resort to matching on the error string. + invalidHeaderErrorRe = regexp.MustCompile(`invalid header`) + + // A regular expression to match the error returned by net/http when the + // TLS certificate is not trusted. This error isn't typed + // specifically so we resort to matching on the error string. + notTrustedErrorRe = regexp.MustCompile(`certificate is not trusted`) ) // ReaderFunc is the type of function that can be given natively to NewRequest type ReaderFunc func() (io.Reader, error) +// ResponseHandlerFunc is a type of function that takes in a Response, and does something with it. +// The ResponseHandlerFunc is called when the HTTP client successfully receives a response and the +// CheckRetry function indicates that a retry of the base request is not necessary. +// If an error is returned from this function, the CheckRetry policy will be used to determine +// whether to retry the whole request (including this handler). +// +// Make sure to check status codes! Even if the request was completed it may have a non-2xx status code. +// +// The response body is not automatically closed. It must be closed either by the ResponseHandlerFunc or +// by the caller out-of-band. Failure to do so will result in a memory leak. +type ResponseHandlerFunc func(*http.Response) error + // LenReader is an interface implemented by many in-memory io.Reader's. Used // for automatically sending the right Content-Length header when possible. type LenReader interface { @@ -86,6 +113,8 @@ type Request struct { // used to rewind the request data in between retries. body ReaderFunc + responseHandler ResponseHandlerFunc + // Embed an HTTP request directly. This makes a *Request act exactly // like an *http.Request so that all meta methods are supported. *http.Request @@ -94,8 +123,16 @@ type Request struct { // WithContext returns wrapped Request with a shallow copy of underlying *http.Request // with its context changed to ctx. The provided ctx must be non-nil. func (r *Request) WithContext(ctx context.Context) *Request { - r.Request = r.Request.WithContext(ctx) - return r + return &Request{ + body: r.body, + responseHandler: r.responseHandler, + Request: r.Request.WithContext(ctx), + } +} + +// SetResponseHandler allows setting the response handler. +func (r *Request) SetResponseHandler(fn ResponseHandlerFunc) { + r.responseHandler = fn } // BodyBytes allows accessing the request body. It is an analogue to @@ -130,6 +167,20 @@ func (r *Request) SetBody(rawBody interface{}) error { } r.body = bodyReader r.ContentLength = contentLength + if bodyReader != nil { + r.GetBody = func() (io.ReadCloser, error) { + body, err := bodyReader() + if err != nil { + return nil, err + } + if rc, ok := body.(io.ReadCloser); ok { + return rc, nil + } + return io.NopCloser(body), nil + } + } else { + r.GetBody = func() (io.ReadCloser, error) { return http.NoBody, nil } + } return nil } @@ -204,21 +255,19 @@ func getBodyReaderAndContentLength(rawBody interface{}) (ReaderFunc, int64, erro // deal with it seeking so want it to match here instead of the // io.ReadSeeker case. case *bytes.Reader: - buf, err := ioutil.ReadAll(body) - if err != nil { - return nil, 0, err - } + snapshot := *body bodyReader = func() (io.Reader, error) { - return bytes.NewReader(buf), nil + r := snapshot + return &r, nil } - contentLength = int64(len(buf)) + contentLength = int64(body.Len()) // Compat case case io.ReadSeeker: raw := body bodyReader = func() (io.Reader, error) { _, err := raw.Seek(0, 0) - return ioutil.NopCloser(raw), err + return io.NopCloser(raw), err } if lr, ok := raw.(LenReader); ok { contentLength = int64(lr.Len()) @@ -226,14 +275,21 @@ func getBodyReaderAndContentLength(rawBody interface{}) (ReaderFunc, int64, erro // Read all in so we can reset case io.Reader: - buf, err := ioutil.ReadAll(body) + buf, err := io.ReadAll(body) if err != nil { return nil, 0, err } - bodyReader = func() (io.Reader, error) { - return bytes.NewReader(buf), nil + if len(buf) == 0 { + bodyReader = func() (io.Reader, error) { + return http.NoBody, nil + } + contentLength = 0 + } else { + bodyReader = func() (io.Reader, error) { + return bytes.NewReader(buf), nil + } + contentLength = int64(len(buf)) } - contentLength = int64(len(buf)) // No body provided, nothing to do case nil: @@ -252,23 +308,32 @@ func FromRequest(r *http.Request) (*Request, error) { return nil, err } // Could assert contentLength == r.ContentLength - return &Request{bodyReader, r}, nil + return &Request{body: bodyReader, Request: r}, nil } // NewRequest creates a new wrapped request. func NewRequest(method, url string, rawBody interface{}) (*Request, error) { - bodyReader, contentLength, err := getBodyReaderAndContentLength(rawBody) + return NewRequestWithContext(context.Background(), method, url, rawBody) +} + +// NewRequestWithContext creates a new wrapped request with the provided context. +// +// The context controls the entire lifetime of a request and its response: +// obtaining a connection, sending the request, and reading the response headers and body. +func NewRequestWithContext(ctx context.Context, method, url string, rawBody interface{}) (*Request, error) { + httpReq, err := http.NewRequestWithContext(ctx, method, url, nil) if err != nil { return nil, err } - httpReq, err := http.NewRequest(method, url, nil) - if err != nil { + req := &Request{ + Request: httpReq, + } + if err := req.SetBody(rawBody); err != nil { return nil, err } - httpReq.ContentLength = contentLength - return &Request{bodyReader, httpReq}, nil + return req, nil } // Logger interface allows to use other loggers than @@ -333,6 +398,9 @@ type Backoff func(min, max time.Duration, attemptNum int, resp *http.Response) t // attempted. If overriding this, be sure to close the body if needed. type ErrorHandler func(resp *http.Response, err error, numTries int) (*http.Response, error) +// PrepareRetry is called before retry operation. It can be used for example to re-sign the request +type PrepareRetry func(req *http.Request) error + // Client is used to make HTTP requests. It adds additional functionality // like automatic retries to tolerate minor outages. type Client struct { @@ -361,6 +429,9 @@ type Client struct { // ErrorHandler specifies the custom error handler to use, if any ErrorHandler ErrorHandler + // PrepareRetry can prepare the request for retry operation, for example re-sign it + PrepareRetry PrepareRetry + loggerInit sync.Once clientInit sync.Once } @@ -404,44 +475,9 @@ func DefaultRetryPolicy(ctx context.Context, resp *http.Response, err error) (bo return false, ctx.Err() } - if err != nil { - if v, ok := err.(*url.Error); ok { - // Don't retry if the error was due to too many redirects. - if redirectsErrorRe.MatchString(v.Error()) { - return false, nil - } - - // Don't retry if the error was due to an invalid protocol scheme. - if schemeErrorRe.MatchString(v.Error()) { - return false, nil - } - - // Don't retry if the error was due to TLS cert verification failure. - if _, ok := v.Err.(x509.UnknownAuthorityError); ok { - return false, nil - } - } - - // The error is likely recoverable so retry. - return true, nil - } - - // 429 Too Many Requests is recoverable. Sometimes the server puts - // a Retry-After response header to indicate when the server is - // available to start processing request from client. - if resp.StatusCode == http.StatusTooManyRequests { - return true, nil - } - - // Check the response code. We retry on 500-range responses to allow - // the server time to recover, as 500's are typically not permanent - // errors and may relate to outages on the server side. This will catch - // invalid response codes as well, like 0 and 999. - if resp.StatusCode == 0 || (resp.StatusCode >= 500 && resp.StatusCode != 501) { - return true, nil - } - - return false, nil + // don't propagate other errors + shouldRetry, _ := baseRetryPolicy(resp, err) + return shouldRetry, nil } // ErrorPropagatedRetryPolicy is the same as DefaultRetryPolicy, except it @@ -453,6 +489,10 @@ func ErrorPropagatedRetryPolicy(ctx context.Context, resp *http.Response, err er return false, ctx.Err() } + return baseRetryPolicy(resp, err) +} + +func baseRetryPolicy(resp *http.Response, err error) (bool, error) { if err != nil { if v, ok := err.(*url.Error); ok { // Don't retry if the error was due to too many redirects. @@ -465,8 +505,16 @@ func ErrorPropagatedRetryPolicy(ctx context.Context, resp *http.Response, err er return false, v } + // Don't retry if the error was due to an invalid header. + if invalidHeaderErrorRe.MatchString(v.Error()) { + return false, v + } + // Don't retry if the error was due to TLS cert verification failure. - if _, ok := v.Err.(x509.UnknownAuthorityError); ok { + if notTrustedErrorRe.MatchString(v.Error()) { + return false, v + } + if isCertError(v.Err) { return false, v } } @@ -475,11 +523,18 @@ func ErrorPropagatedRetryPolicy(ctx context.Context, resp *http.Response, err er return true, nil } + // 429 Too Many Requests is recoverable. Sometimes the server puts + // a Retry-After response header to indicate when the server is + // available to start processing request from client. + if resp.StatusCode == http.StatusTooManyRequests { + return true, nil + } + // Check the response code. We retry on 500-range responses to allow // the server time to recover, as 500's are typically not permanent // errors and may relate to outages on the server side. This will catch // invalid response codes as well, like 0 and 999. - if resp.StatusCode == 0 || (resp.StatusCode >= 500 && resp.StatusCode != 501) { + if resp.StatusCode == 0 || (resp.StatusCode >= 500 && resp.StatusCode != http.StatusNotImplemented) { return true, fmt.Errorf("unexpected HTTP status %s", resp.Status) } @@ -495,11 +550,9 @@ func ErrorPropagatedRetryPolicy(ctx context.Context, resp *http.Response, err er // seconds the server states it may be ready to process more requests from this client. func DefaultBackoff(min, max time.Duration, attemptNum int, resp *http.Response) time.Duration { if resp != nil { - if resp.StatusCode == http.StatusTooManyRequests { - if s, ok := resp.Header["Retry-After"]; ok { - if sleep, err := strconv.ParseInt(s[0], 10, 64); err == nil { - return time.Second * time.Duration(sleep) - } + if resp.StatusCode == http.StatusTooManyRequests || resp.StatusCode == http.StatusServiceUnavailable { + if sleep, ok := parseRetryAfterHeader(resp.Header["Retry-After"]); ok { + return sleep } } } @@ -512,6 +565,41 @@ func DefaultBackoff(min, max time.Duration, attemptNum int, resp *http.Response) return sleep } +// parseRetryAfterHeader parses the Retry-After header and returns the +// delay duration according to the spec: https://httpwg.org/specs/rfc7231.html#header.retry-after +// The bool returned will be true if the header was successfully parsed. +// Otherwise, the header was either not present, or was not parseable according to the spec. +// +// Retry-After headers come in two flavors: Seconds or HTTP-Date +// +// Examples: +// * Retry-After: Fri, 31 Dec 1999 23:59:59 GMT +// * Retry-After: 120 +func parseRetryAfterHeader(headers []string) (time.Duration, bool) { + if len(headers) == 0 || headers[0] == "" { + return 0, false + } + header := headers[0] + // Retry-After: 120 + if sleep, err := strconv.ParseInt(header, 10, 64); err == nil { + if sleep < 0 { // a negative sleep doesn't make sense + return 0, false + } + return time.Second * time.Duration(sleep), true + } + + // Retry-After: Fri, 31 Dec 1999 23:59:59 GMT + retryTime, err := time.Parse(time.RFC1123, header) + if err != nil { + return 0, false + } + if until := retryTime.Sub(timeNow()); until > 0 { + return until, true + } + // date is in the past + return 0, true +} + // LinearJitterBackoff provides a callback for Client.Backoff which will // perform linear backoff based on the attempt number and with jitter to // prevent a thundering herd. @@ -539,13 +627,13 @@ func LinearJitterBackoff(min, max time.Duration, attemptNum int, resp *http.Resp } // Seed rand; doing this every time is fine - rand := rand.New(rand.NewSource(int64(time.Now().Nanosecond()))) + source := rand.New(rand.NewSource(int64(time.Now().Nanosecond()))) // Pick a random number that lies somewhere between the min and max and // multiply by the attemptNum. attemptNum starts at zero so we always // increment here. We first get a random percentage, then apply that to the // difference between min and max, and add to min. - jitter := rand.Float64() * float64(max-min) + jitter := source.Float64() * float64(max-min) jitterMin := int64(jitter) + int64(min) return time.Duration(jitterMin * int64(attemptNum)) } @@ -570,22 +658,21 @@ func (c *Client) Do(req *Request) (*http.Response, error) { if logger != nil { switch v := logger.(type) { case LeveledLogger: - v.Debug("performing request", "method", req.Method, "url", req.URL) + v.Debug("performing request", "method", req.Method, "url", redactURL(req.URL)) case Logger: - v.Printf("[DEBUG] %s %s", req.Method, req.URL) + v.Printf("[DEBUG] %s %s", req.Method, redactURL(req.URL)) } } var resp *http.Response var attempt int var shouldRetry bool - var doErr, checkErr error + var doErr, respErr, checkErr, prepareErr error for i := 0; ; i++ { + doErr, respErr, prepareErr = nil, nil, nil attempt++ - var code int // HTTP response code - // Always rewind the request body when non-nil. if req.body != nil { body, err := req.body() @@ -596,7 +683,7 @@ func (c *Client) Do(req *Request) (*http.Response, error) { if c, ok := body.(io.ReadCloser); ok { req.Body = c } else { - req.Body = ioutil.NopCloser(body) + req.Body = io.NopCloser(body) } } @@ -613,19 +700,24 @@ func (c *Client) Do(req *Request) (*http.Response, error) { // Attempt the request resp, doErr = c.HTTPClient.Do(req.Request) - if resp != nil { - code = resp.StatusCode - } // Check if we should continue with retries. shouldRetry, checkErr = c.CheckRetry(req.Context(), resp, doErr) + if !shouldRetry && doErr == nil && req.responseHandler != nil { + respErr = req.responseHandler(resp) + shouldRetry, checkErr = c.CheckRetry(req.Context(), resp, respErr) + } - if doErr != nil { + err := doErr + if respErr != nil { + err = respErr + } + if err != nil { switch v := logger.(type) { case LeveledLogger: - v.Error("request failed", "error", doErr, "method", req.Method, "url", req.URL) + v.Error("request failed", "error", err, "method", req.Method, "url", redactURL(req.URL)) case Logger: - v.Printf("[ERR] %s %s request failed: %v", req.Method, req.URL, doErr) + v.Printf("[ERR] %s %s request failed: %v", req.Method, redactURL(req.URL), err) } } else { // Call this here to maintain the behavior of logging all requests, @@ -660,11 +752,11 @@ func (c *Client) Do(req *Request) (*http.Response, error) { } wait := c.Backoff(c.RetryWaitMin, c.RetryWaitMax, i, resp) - desc := fmt.Sprintf("%s %s", req.Method, req.URL) - if code > 0 { - desc = fmt.Sprintf("%s (status: %d)", desc, code) - } if logger != nil { + desc := fmt.Sprintf("%s %s", req.Method, redactURL(req.URL)) + if resp != nil { + desc = fmt.Sprintf("%s (status: %d)", desc, resp.StatusCode) + } switch v := logger.(type) { case LeveledLogger: v.Debug("retrying request", "request", desc, "timeout", wait, "remaining", remain) @@ -672,29 +764,44 @@ func (c *Client) Do(req *Request) (*http.Response, error) { v.Printf("[DEBUG] %s: retrying in %s (%d left)", desc, wait, remain) } } + timer := time.NewTimer(wait) select { case <-req.Context().Done(): + timer.Stop() c.HTTPClient.CloseIdleConnections() return nil, req.Context().Err() - case <-time.After(wait): + case <-timer.C: } // Make shallow copy of http Request so that we can modify its body // without racing against the closeBody call in persistConn.writeLoop. httpreq := *req.Request req.Request = &httpreq + + if c.PrepareRetry != nil { + if err := c.PrepareRetry(req.Request); err != nil { + prepareErr = err + break + } + } } // this is the closest we have to success criteria - if doErr == nil && checkErr == nil && !shouldRetry { + if doErr == nil && respErr == nil && checkErr == nil && prepareErr == nil && !shouldRetry { return resp, nil } defer c.HTTPClient.CloseIdleConnections() - err := doErr - if checkErr != nil { + var err error + if prepareErr != nil { + err = prepareErr + } else if checkErr != nil { err = checkErr + } else if respErr != nil { + err = respErr + } else { + err = doErr } if c.ErrorHandler != nil { @@ -711,17 +818,17 @@ func (c *Client) Do(req *Request) (*http.Response, error) { // communicate why if err == nil { return nil, fmt.Errorf("%s %s giving up after %d attempt(s)", - req.Method, req.URL, attempt) + req.Method, redactURL(req.URL), attempt) } return nil, fmt.Errorf("%s %s giving up after %d attempt(s): %w", - req.Method, req.URL, attempt, err) + req.Method, redactURL(req.URL), attempt, err) } // Try to read the response body so we can reuse this connection. func (c *Client) drainBody(body io.ReadCloser) { defer body.Close() - _, err := io.Copy(ioutil.Discard, io.LimitReader(body, respReadLimit)) + _, err := io.Copy(io.Discard, io.LimitReader(body, respReadLimit)) if err != nil { if c.logger() != nil { switch v := c.logger().(type) { @@ -796,3 +903,17 @@ func (c *Client) StandardClient() *http.Client { Transport: &RoundTripper{Client: c}, } } + +// Taken from url.URL#Redacted() which was introduced in go 1.15. +// We can switch to using it directly if we'll bump the minimum required go version. +func redactURL(u *url.URL) string { + if u == nil { + return "" + } + + ru := *u + if _, has := ru.User.Password(); has { + ru.User = url.UserPassword(ru.User.Username(), "xxxxx") + } + return ru.String() +} diff --git a/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go b/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go index 8f3ee35842..8c407adb3b 100644 --- a/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go +++ b/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go @@ -1,3 +1,6 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + package retryablehttp import ( diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go index d569c0c949..d0ea68f408 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_bsd.go +++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go @@ -1,6 +1,7 @@ -//go:build (darwin || freebsd || openbsd || netbsd || dragonfly || hurd) && !appengine +//go:build (darwin || freebsd || openbsd || netbsd || dragonfly || hurd) && !appengine && !tinygo // +build darwin freebsd openbsd netbsd dragonfly hurd // +build !appengine +// +build !tinygo package isatty diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go index 31503226f6..7402e0618a 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_others.go +++ b/vendor/github.com/mattn/go-isatty/isatty_others.go @@ -1,5 +1,6 @@ -//go:build appengine || js || nacl || wasm -// +build appengine js nacl wasm +//go:build (appengine || js || nacl || tinygo || wasm) && !windows +// +build appengine js nacl tinygo wasm +// +build !windows package isatty diff --git a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go index 67787657fb..0337d8cf6d 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go +++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go @@ -1,6 +1,7 @@ -//go:build (linux || aix || zos) && !appengine +//go:build (linux || aix || zos) && !appengine && !tinygo // +build linux aix zos // +build !appengine +// +build !tinygo package isatty diff --git a/vendor/github.com/vektah/gqlparser/v2/.go-version b/vendor/github.com/vektah/gqlparser/v2/.go-version new file mode 100644 index 0000000000..66e2ae6c25 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/.go-version @@ -0,0 +1 @@ +1.19.1 diff --git a/vendor/github.com/vektah/gqlparser/v2/.golangci.yaml b/vendor/github.com/vektah/gqlparser/v2/.golangci.yaml new file mode 100644 index 0000000000..97a514b970 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/.golangci.yaml @@ -0,0 +1,54 @@ +run: + tests: true + +linters-settings: + errcheck: + exclude-functions: + - (io.Writer).Write + - io.Copy + - io.WriteString + revive: + enable-all-rules: false + rules: + - name: empty-lines + testifylint: + disable-all: true + enable: + - bool-compare + - compares + - error-is-as + - error-nil + - expected-actual + - nil-compare + +linters: + disable-all: true + enable: + - bodyclose + - dupl + - errcheck + - gocritic + - gofmt + - goimports + - gosimple + - govet + - ineffassign + - misspell + - nakedret + - prealloc + - revive + - staticcheck + - testifylint + - typecheck + - unconvert + - unused + +issues: + exclude-dirs: + - bin + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - dupl + - errcheck diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/comment.go b/vendor/github.com/vektah/gqlparser/v2/ast/comment.go new file mode 100644 index 0000000000..8fcfda5813 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/ast/comment.go @@ -0,0 +1,31 @@ +package ast + +import ( + "strconv" + "strings" +) + +type Comment struct { + Value string + Position *Position +} + +func (c *Comment) Text() string { + return strings.TrimPrefix(c.Value, "#") +} + +type CommentGroup struct { + List []*Comment +} + +func (c *CommentGroup) Dump() string { + if len(c.List) == 0 { + return "" + } + var builder strings.Builder + for _, comment := range c.List { + builder.WriteString(comment.Value) + builder.WriteString("\n") + } + return strconv.Quote(builder.String()) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/decode.go b/vendor/github.com/vektah/gqlparser/v2/ast/decode.go index d00920554c..c9966b2440 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/decode.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/decode.go @@ -11,7 +11,7 @@ func UnmarshalSelectionSet(b []byte) (SelectionSet, error) { return nil, err } - var result = make([]Selection, 0) + result := make([]Selection, 0) for _, item := range tmp { var field Field if err := json.Unmarshal(item, &field); err == nil { diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/definition.go b/vendor/github.com/vektah/gqlparser/v2/ast/definition.go index d203908168..3f0aa53d14 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/definition.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/definition.go @@ -31,6 +31,10 @@ type Definition struct { Position *Position `dump:"-"` BuiltIn bool `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup + EndOfDefinitionComment *CommentGroup } func (d *Definition) IsLeafType() bool { @@ -66,6 +70,9 @@ type FieldDefinition struct { Type *Type Directives DirectiveList Position *Position `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } type ArgumentDefinition struct { @@ -75,6 +82,9 @@ type ArgumentDefinition struct { Type *Type Directives DirectiveList Position *Position `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } type EnumValueDefinition struct { @@ -82,6 +92,9 @@ type EnumValueDefinition struct { Name string Directives DirectiveList Position *Position `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } type DirectiveDefinition struct { @@ -91,4 +104,7 @@ type DirectiveDefinition struct { Locations []DirectiveLocation IsRepeatable bool Position *Position `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/document.go b/vendor/github.com/vektah/gqlparser/v2/ast/document.go index 43bfb54ff5..a3a9e98dc2 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/document.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/document.go @@ -4,6 +4,7 @@ type QueryDocument struct { Operations OperationList Fragments FragmentDefinitionList Position *Position `dump:"-"` + Comment *CommentGroup } type SchemaDocument struct { @@ -13,6 +14,7 @@ type SchemaDocument struct { Definitions DefinitionList Extensions DefinitionList Position *Position `dump:"-"` + Comment *CommentGroup } func (d *SchemaDocument) Merge(other *SchemaDocument) { @@ -35,6 +37,8 @@ type Schema struct { Implements map[string][]*Definition Description string + + Comment *CommentGroup } // AddTypes is the helper to add types definition to the schema @@ -70,10 +74,15 @@ type SchemaDefinition struct { Directives DirectiveList OperationTypes OperationTypeDefinitionList Position *Position `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup + EndOfDefinitionComment *CommentGroup } type OperationTypeDefinition struct { Operation Operation Type string Position *Position `dump:"-"` + Comment *CommentGroup } diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go b/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go index 57ab56c7c6..7bd7dbce7f 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go @@ -9,6 +9,7 @@ type FragmentSpread struct { Definition *FragmentDefinition Position *Position `dump:"-"` + Comment *CommentGroup } type InlineFragment struct { @@ -20,6 +21,7 @@ type InlineFragment struct { ObjectDefinition *Definition Position *Position `dump:"-"` + Comment *CommentGroup } type FragmentDefinition struct { @@ -35,4 +37,5 @@ type FragmentDefinition struct { Definition *Definition Position *Position `dump:"-"` + Comment *CommentGroup } diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/operation.go b/vendor/github.com/vektah/gqlparser/v2/ast/operation.go index 3b37f81bf3..16ee064db9 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/operation.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/operation.go @@ -15,6 +15,7 @@ type OperationDefinition struct { Directives DirectiveList SelectionSet SelectionSet Position *Position `dump:"-"` + Comment *CommentGroup } type VariableDefinition struct { @@ -23,6 +24,7 @@ type VariableDefinition struct { DefaultValue *Value Directives DirectiveList Position *Position `dump:"-"` + Comment *CommentGroup // Requires validation Definition *Definition diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/path.go b/vendor/github.com/vektah/gqlparser/v2/ast/path.go index 9af1684388..f40aa953dd 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/path.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/path.go @@ -14,10 +14,15 @@ type PathElement interface { isPathElement() } -var _ PathElement = PathIndex(0) -var _ PathElement = PathName("") +var ( + _ PathElement = PathIndex(0) + _ PathElement = PathName("") +) func (path Path) String() string { + if path == nil { + return "" + } var str bytes.Buffer for i, v := range path { switch v := v.(type) { @@ -60,8 +65,8 @@ func (path *Path) UnmarshalJSON(b []byte) error { type PathIndex int -func (_ PathIndex) isPathElement() {} +func (PathIndex) isPathElement() {} type PathName string -func (_ PathName) isPathElement() {} +func (PathName) isPathElement() {} diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/selection.go b/vendor/github.com/vektah/gqlparser/v2/ast/selection.go index 159db84471..c927a4d33a 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/selection.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/selection.go @@ -11,9 +11,9 @@ func (*Field) isSelection() {} func (*FragmentSpread) isSelection() {} func (*InlineFragment) isSelection() {} -func (s *Field) GetPosition() *Position { return s.Position } +func (f *Field) GetPosition() *Position { return f.Position } func (s *FragmentSpread) GetPosition() *Position { return s.Position } -func (s *InlineFragment) GetPosition() *Position { return s.Position } +func (f *InlineFragment) GetPosition() *Position { return f.Position } type Field struct { Alias string @@ -22,6 +22,7 @@ type Field struct { Directives DirectiveList SelectionSet SelectionSet Position *Position `dump:"-"` + Comment *CommentGroup // Require validation Definition *FieldDefinition @@ -32,6 +33,7 @@ type Argument struct { Name string Value *Value Position *Position `dump:"-"` + Comment *CommentGroup } func (f *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} { diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/type.go b/vendor/github.com/vektah/gqlparser/v2/ast/type.go index 9577fdb48c..5f77bc7ce4 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/type.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/type.go @@ -63,6 +63,6 @@ func (t *Type) IsCompatible(other *Type) bool { return true } -func (v *Type) Dump() string { - return v.String() +func (t *Type) Dump() string { + return t.String() } diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/value.go b/vendor/github.com/vektah/gqlparser/v2/ast/value.go index c25ef15059..10f7b1ccc6 100644 --- a/vendor/github.com/vektah/gqlparser/v2/ast/value.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/value.go @@ -26,6 +26,7 @@ type Value struct { Children ChildValueList Kind ValueKind Position *Position `dump:"-"` + Comment *CommentGroup // Require validation Definition *Definition @@ -37,6 +38,7 @@ type ChildValue struct { Name string Value *Value Position *Position `dump:"-"` + Comment *CommentGroup } func (v *Value) Value(vars map[string]interface{}) (interface{}, error) { diff --git a/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go b/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go index 8145061a22..483a086d6d 100644 --- a/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go +++ b/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go @@ -9,9 +9,9 @@ import ( "github.com/vektah/gqlparser/v2/ast" ) -// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors +// Error is the standard graphql error type described in https://spec.graphql.org/draft/#sec-Errors type Error struct { - err error `json:"-"` + Err error `json:"-"` Message string `json:"message"` Path ast.Path `json:"path,omitempty"` Locations []Location `json:"locations,omitempty"` @@ -64,12 +64,19 @@ func (err *Error) Error() string { return res.String() } -func (err Error) pathString() string { +func (err *Error) pathString() string { return err.Path.String() } -func (err Error) Unwrap() error { - return err.err +func (err *Error) Unwrap() error { + return err.Err +} + +func (err *Error) AsError() error { + if err == nil { + return nil + } + return err } func (errs List) Error() string { @@ -99,14 +106,48 @@ func (errs List) As(target interface{}) bool { return false } +func (errs List) Unwrap() []error { + l := make([]error, len(errs)) + for i, err := range errs { + l[i] = err + } + return l +} + func WrapPath(path ast.Path, err error) *Error { + if err == nil { + return nil + } return &Error{ - err: err, + Err: err, Message: err.Error(), Path: path, } } +func Wrap(err error) *Error { + if err == nil { + return nil + } + return &Error{ + Err: err, + Message: err.Error(), + } +} + +func WrapIfUnwrapped(err error) *Error { + if err == nil { + return nil + } + if gqlErr, ok := err.(*Error); ok { + return gqlErr + } + return &Error{ + Err: err, + Message: err.Error(), + } +} + func Errorf(message string, args ...interface{}) *Error { return &Error{ Message: fmt.Sprintf(message, args...), diff --git a/vendor/github.com/vektah/gqlparser/v2/gqlparser.go b/vendor/github.com/vektah/gqlparser/v2/gqlparser.go index e242a896b4..d404fd0b53 100644 --- a/vendor/github.com/vektah/gqlparser/v2/gqlparser.go +++ b/vendor/github.com/vektah/gqlparser/v2/gqlparser.go @@ -5,11 +5,21 @@ import ( "github.com/vektah/gqlparser/v2/gqlerror" "github.com/vektah/gqlparser/v2/parser" "github.com/vektah/gqlparser/v2/validator" + + // Blank import is used to load up the validator rules. _ "github.com/vektah/gqlparser/v2/validator/rules" ) func LoadSchema(str ...*ast.Source) (*ast.Schema, error) { - return validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...) + schema, err := validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...) + gqlErr, ok := err.(*gqlerror.Error) + if ok { + return schema, gqlErr + } + if err != nil { + return schema, gqlerror.Wrap(err) + } + return schema, nil } func MustLoadSchema(str ...*ast.Source) *ast.Schema { @@ -23,11 +33,14 @@ func MustLoadSchema(str ...*ast.Source) *ast.Schema { func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.List) { query, err := parser.ParseQuery(&ast.Source{Input: str}) if err != nil { - gqlErr := err.(*gqlerror.Error) - return nil, gqlerror.List{gqlErr} + gqlErr, ok := err.(*gqlerror.Error) + if ok { + return nil, gqlerror.List{gqlErr} + } + return nil, gqlerror.List{gqlerror.Wrap(err)} } errs := validator.Validate(schema, query) - if errs != nil { + if len(errs) > 0 { return nil, errs } diff --git a/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go index 25dcdcb9c4..21dd3cf4b2 100644 --- a/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go @@ -55,7 +55,7 @@ func (s *Lexer) makeValueToken(kind Type, value string) (Token, error) { }, nil } -func (s *Lexer) makeError(format string, args ...interface{}) (Token, error) { +func (s *Lexer) makeError(format string, args ...interface{}) (Token, *gqlerror.Error) { column := s.endRunes - s.lineStartRunes + 1 return Token{ Kind: Invalid, @@ -74,8 +74,7 @@ func (s *Lexer) makeError(format string, args ...interface{}) (Token, error) { // This skips over whitespace and comments until it finds the next lexable // token, then lexes punctuators immediately or calls the appropriate helper // function for more complicated tokens. -func (s *Lexer) ReadToken() (token Token, err error) { - +func (s *Lexer) ReadToken() (Token, error) { s.ws() s.start = s.end s.startRunes = s.endRunes @@ -121,8 +120,7 @@ func (s *Lexer) ReadToken() (token Token, err error) { case '|': return s.makeValueToken(Pipe, "") case '#': - s.readComment() - return s.ReadToken() + return s.readComment() case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': return s.readName() @@ -254,9 +252,8 @@ func (s *Lexer) readNumber() (Token, error) { if float { return s.makeToken(Float) - } else { - return s.makeToken(Int) } + return s.makeToken(Int) } // acceptByte if it matches any of given bytes, returning true if it found anything @@ -319,8 +316,8 @@ func (s *Lexer) readString() (Token, error) { } switch r { default: - var char = rune(r) - var w = 1 + char := rune(r) + w := 1 // skip unicode overhead if we are in the ascii range if r >= 127 { @@ -393,8 +390,8 @@ func (s *Lexer) readString() (Token, error) { case 't': buf.WriteByte('\t') default: - s.end += 1 - s.endRunes += 1 + s.end++ + s.endRunes++ return s.makeError("Invalid character escape sequence: \\%s.", string(escape)) } s.end += 2 @@ -442,11 +439,12 @@ func (s *Lexer) readBlockString() (Token, error) { return s.makeError(`Invalid character within String: "\u%04d".`, r) } - if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` { + switch { + case r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""`: buf.WriteString(`"""`) s.end += 4 s.endRunes += 4 - } else if r == '\r' { + case r == '\r': if s.end+1 < inputLen && s.Input[s.end+1] == '\n' { s.end++ s.endRunes++ @@ -457,9 +455,9 @@ func (s *Lexer) readBlockString() (Token, error) { s.endRunes++ s.line++ s.lineStartRunes = s.endRunes - } else { - var char = rune(r) - var w = 1 + default: + char := rune(r) + w := 1 // skip unicode overhead if we are in the ascii range if r >= 127 { diff --git a/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml index 5c4d5f0ff5..0899f4ca9b 100644 --- a/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml @@ -26,22 +26,38 @@ simple tokens: column: 3 value: 'foo' - - name: skips whitespace - input: "\n\n foo\n\n\n" + - name: records line and column with comments + input: "\n\n\n#foo\n #bar\n foo\n" tokens: + - + kind: COMMENT + start: 3 + end: 7 + line: 4 + column: 0 + value: '#foo' + - + kind: COMMENT + start: 10 + end: 14 + line: 5 + column: 3 + value: '#bar' - kind: NAME - start: 6 - end: 9 + start: 17 + end: 20 + line: 6 + column: 3 value: 'foo' - - name: skips comments - input: "\n #comment\n foo#comment\n" + - name: skips whitespace + input: "\n\n foo\n\n\n" tokens: - kind: NAME - start: 18 - end: 21 + start: 6 + end: 9 value: 'foo' - name: skips commas @@ -78,6 +94,57 @@ simple tokens: end: 1 value: a +lexes comments: + - name: basic + input: '#simple' + tokens: + - + kind: COMMENT + start: 0 + end: 7 + value: '#simple' + + - name: two lines + input: "#first\n#second" + tokens: + - + kind: COMMENT + start: 0 + end: 6 + value: "#first" + - + kind: COMMENT + start: 7 + end: 14 + value: "#second" + + - name: whitespace + input: '# white space ' + tokens: + - + kind: COMMENT + start: 0 + end: 14 + value: '# white space ' + + - name: not escaped + input: '#not escaped \n\r\b\t\f' + tokens: + - + kind: COMMENT + start: 0 + end: 23 + value: '#not escaped \n\r\b\t\f' + + - name: slashes + input: '#slashes \\ \/' + tokens: + - + kind: COMMENT + start: 0 + end: 14 + value: '#slashes \\ \/' + lexes strings: - name: basic input: '"simple"' @@ -674,7 +741,6 @@ lex reports useful unknown character error: - name: question mark input: "?" error: - message: 'Cannot parse the unexpected character "?".' message: 'Cannot parse the unexpected character "?".' locations: [{ line: 1, column: 1 }] diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/parser.go b/vendor/github.com/vektah/gqlparser/v2/parser/parser.go index 68eb51edc6..bfcf7ea498 100644 --- a/vendor/github.com/vektah/gqlparser/v2/parser/parser.go +++ b/vendor/github.com/vektah/gqlparser/v2/parser/parser.go @@ -1,6 +1,7 @@ package parser import ( + "fmt" "strconv" "github.com/vektah/gqlparser/v2/ast" @@ -17,6 +18,53 @@ type parser struct { peekError error prev lexer.Token + + comment *ast.CommentGroup + commentConsuming bool + + tokenCount int + maxTokenLimit int +} + +func (p *parser) SetMaxTokenLimit(maxToken int) { + p.maxTokenLimit = maxToken +} + +func (p *parser) consumeComment() (*ast.Comment, bool) { + if p.err != nil { + return nil, false + } + tok := p.peek() + if tok.Kind != lexer.Comment { + return nil, false + } + p.next() + return &ast.Comment{ + Value: tok.Value, + Position: &tok.Pos, + }, true +} + +func (p *parser) consumeCommentGroup() { + if p.err != nil { + return + } + if p.commentConsuming { + return + } + p.commentConsuming = true + + var comments []*ast.Comment + for { + comment, ok := p.consumeComment() + if !ok { + break + } + comments = append(comments, comment) + } + + p.comment = &ast.CommentGroup{List: comments} + p.commentConsuming = false } func (p *parser) peekPos() *ast.Position { @@ -36,6 +84,9 @@ func (p *parser) peek() lexer.Token { if !p.peeked { p.peekToken, p.peekError = p.lexer.ReadToken() p.peeked = true + if p.peekToken.Kind == lexer.Comment { + p.consumeCommentGroup() + } } return p.peekToken @@ -52,33 +103,45 @@ func (p *parser) next() lexer.Token { if p.err != nil { return p.prev } + // Increment the token count before reading the next token + p.tokenCount++ + if p.maxTokenLimit != 0 && p.tokenCount > p.maxTokenLimit { + p.err = fmt.Errorf("exceeded token limit of %d", p.maxTokenLimit) + return p.prev + } if p.peeked { p.peeked = false + p.comment = nil p.prev, p.err = p.peekToken, p.peekError } else { p.prev, p.err = p.lexer.ReadToken() + if p.prev.Kind == lexer.Comment { + p.consumeCommentGroup() + } } return p.prev } -func (p *parser) expectKeyword(value string) lexer.Token { +func (p *parser) expectKeyword(value string) (lexer.Token, *ast.CommentGroup) { tok := p.peek() + comment := p.comment if tok.Kind == lexer.Name && tok.Value == value { - return p.next() + return p.next(), comment } p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String()) - return tok + return tok, comment } -func (p *parser) expect(kind lexer.Type) lexer.Token { +func (p *parser) expect(kind lexer.Type) (lexer.Token, *ast.CommentGroup) { tok := p.peek() + comment := p.comment if tok.Kind == kind { - return p.next() + return p.next(), comment } p.error(tok, "Expected %s, found %s", kind, tok.Kind.String()) - return tok + return tok, comment } func (p *parser) skip(kind lexer.Type) bool { @@ -115,10 +178,10 @@ func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) { p.next() } -func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) { +func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) *ast.CommentGroup { hasDef := p.skip(start) if !hasDef { - return + return nil } called := false @@ -129,8 +192,10 @@ func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) { if !called { p.error(p.peek(), "expected at least one definition, found %s", p.peek().Kind.String()) - return + return nil } + comment := p.comment p.next() + return comment } diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/query.go b/vendor/github.com/vektah/gqlparser/v2/parser/query.go index a38d982acb..f653bed5f9 100644 --- a/vendor/github.com/vektah/gqlparser/v2/parser/query.go +++ b/vendor/github.com/vektah/gqlparser/v2/parser/query.go @@ -2,13 +2,22 @@ package parser import ( "github.com/vektah/gqlparser/v2/lexer" - + //nolint:revive . "github.com/vektah/gqlparser/v2/ast" ) func ParseQuery(source *Source) (*QueryDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 0, // 0 means unlimited + } + return p.parseQueryDocument(), p.err +} + +func ParseQueryWithTokenLimit(source *Source, maxTokenLimit int) (*QueryDocument, error) { + p := parser{ + lexer: lexer.New(source), + maxTokenLimit: maxTokenLimit, } return p.parseQueryDocument(), p.err } @@ -44,6 +53,7 @@ func (p *parser) parseOperationDefinition() *OperationDefinition { if p.peek().Kind == lexer.BraceL { return &OperationDefinition{ Position: p.peekPos(), + Comment: p.comment, Operation: Query, SelectionSet: p.parseRequiredSelectionSet(), } @@ -51,6 +61,7 @@ func (p *parser) parseOperationDefinition() *OperationDefinition { var od OperationDefinition od.Position = p.peekPos() + od.Comment = p.comment od.Operation = p.parseOperationType() if p.peek().Kind == lexer.Name { @@ -80,7 +91,7 @@ func (p *parser) parseOperationType() Operation { func (p *parser) parseVariableDefinitions() VariableDefinitionList { var defs []*VariableDefinition - p.many(lexer.ParenL, lexer.ParenR, func() { + p.some(lexer.ParenL, lexer.ParenR, func() { defs = append(defs, p.parseVariableDefinition()) }) @@ -90,6 +101,7 @@ func (p *parser) parseVariableDefinitions() VariableDefinitionList { func (p *parser) parseVariableDefinition() *VariableDefinition { var def VariableDefinition def.Position = p.peekPos() + def.Comment = p.comment def.Variable = p.parseVariable() p.expect(lexer.Colon) @@ -116,7 +128,7 @@ func (p *parser) parseOptionalSelectionSet() SelectionSet { selections = append(selections, p.parseSelection()) }) - return SelectionSet(selections) + return selections } func (p *parser) parseRequiredSelectionSet() SelectionSet { @@ -130,7 +142,7 @@ func (p *parser) parseRequiredSelectionSet() SelectionSet { selections = append(selections, p.parseSelection()) }) - return SelectionSet(selections) + return selections } func (p *parser) parseSelection() Selection { @@ -143,6 +155,7 @@ func (p *parser) parseSelection() Selection { func (p *parser) parseField() *Field { var field Field field.Position = p.peekPos() + field.Comment = p.comment field.Alias = p.parseName() if p.skip(lexer.Colon) { @@ -162,7 +175,7 @@ func (p *parser) parseField() *Field { func (p *parser) parseArguments(isConst bool) ArgumentList { var arguments ArgumentList - p.many(lexer.ParenL, lexer.ParenR, func() { + p.some(lexer.ParenL, lexer.ParenR, func() { arguments = append(arguments, p.parseArgument(isConst)) }) @@ -172,6 +185,7 @@ func (p *parser) parseArguments(isConst bool) ArgumentList { func (p *parser) parseArgument(isConst bool) *Argument { arg := Argument{} arg.Position = p.peekPos() + arg.Comment = p.comment arg.Name = p.parseName() p.expect(lexer.Colon) @@ -180,11 +194,12 @@ func (p *parser) parseArgument(isConst bool) *Argument { } func (p *parser) parseFragment() Selection { - p.expect(lexer.Spread) + _, comment := p.expect(lexer.Spread) if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" { return &FragmentSpread{ Position: p.peekPos(), + Comment: comment, Name: p.parseFragmentName(), Directives: p.parseDirectives(false), } @@ -192,6 +207,7 @@ func (p *parser) parseFragment() Selection { var def InlineFragment def.Position = p.peekPos() + def.Comment = comment if p.peek().Value == "on" { p.next() // "on" @@ -206,6 +222,7 @@ func (p *parser) parseFragment() Selection { func (p *parser) parseFragmentDefinition() *FragmentDefinition { var def FragmentDefinition def.Position = p.peekPos() + def.Comment = p.comment p.expectKeyword("fragment") def.Name = p.parseFragmentName() @@ -242,7 +259,7 @@ func (p *parser) parseValueLiteral(isConst bool) *Value { p.unexpectedError() return nil } - return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable} + return &Value{Position: &token.Pos, Comment: p.comment, Raw: p.parseVariable(), Kind: Variable} case lexer.Int: kind = IntValue case lexer.Float: @@ -267,32 +284,35 @@ func (p *parser) parseValueLiteral(isConst bool) *Value { p.next() - return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind} + return &Value{Position: &token.Pos, Comment: p.comment, Raw: token.Value, Kind: kind} } func (p *parser) parseList(isConst bool) *Value { var values ChildValueList pos := p.peekPos() + comment := p.comment p.many(lexer.BracketL, lexer.BracketR, func() { values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)}) }) - return &Value{Children: values, Kind: ListValue, Position: pos} + return &Value{Children: values, Kind: ListValue, Position: pos, Comment: comment} } func (p *parser) parseObject(isConst bool) *Value { var fields ChildValueList pos := p.peekPos() + comment := p.comment p.many(lexer.BraceL, lexer.BraceR, func() { fields = append(fields, p.parseObjectField(isConst)) }) - return &Value{Children: fields, Kind: ObjectValue, Position: pos} + return &Value{Children: fields, Kind: ObjectValue, Position: pos, Comment: comment} } func (p *parser) parseObjectField(isConst bool) *ChildValue { field := ChildValue{} field.Position = p.peekPos() + field.Comment = p.comment field.Name = p.parseName() p.expect(lexer.Colon) @@ -342,7 +362,7 @@ func (p *parser) parseTypeReference() *Type { } func (p *parser) parseName() string { - token := p.expect(lexer.Name) + token, _ := p.expect(lexer.Name) return token.Value } diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml b/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml index a46a01e718..ec0580f5fa 100644 --- a/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml @@ -436,6 +436,7 @@ large queries: - Alias: "id" Name: "id" + Comment: "# Copyright (c) 2015-present, Facebook, Inc.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n" - Operation: Operation("mutation") Name: "likeStory" diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/schema.go b/vendor/github.com/vektah/gqlparser/v2/parser/schema.go index aec08a9700..44c693754f 100644 --- a/vendor/github.com/vektah/gqlparser/v2/parser/schema.go +++ b/vendor/github.com/vektah/gqlparser/v2/parser/schema.go @@ -1,39 +1,73 @@ package parser import ( + //nolint:revive . "github.com/vektah/gqlparser/v2/ast" "github.com/vektah/gqlparser/v2/lexer" ) +func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) { + sd := &SchemaDocument{} + for _, input := range inputs { + inputAst, err := ParseSchema(input) + if err != nil { + return nil, err + } + sd.Merge(inputAst) + } + return sd, nil +} + func ParseSchema(source *Source) (*SchemaDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 0, // default value is unlimited } - ast, err := p.parseSchemaDocument(), p.err + sd, err := p.parseSchemaDocument(), p.err if err != nil { return nil, err } - for _, def := range ast.Definitions { + for _, def := range sd.Definitions { def.BuiltIn = source.BuiltIn } - for _, def := range ast.Extensions { + for _, def := range sd.Extensions { def.BuiltIn = source.BuiltIn } - return ast, nil + return sd, nil } -func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) { - ast := &SchemaDocument{} +func ParseSchemasWithLimit(maxTokenLimit int, inputs ...*Source) (*SchemaDocument, error) { + sd := &SchemaDocument{} for _, input := range inputs { - inputAst, err := ParseSchema(input) + inputAst, err := ParseSchemaWithLimit(input, maxTokenLimit) if err != nil { return nil, err } - ast.Merge(inputAst) + sd.Merge(inputAst) } - return ast, nil + return sd, nil +} + +func ParseSchemaWithLimit(source *Source, maxTokenLimit int) (*SchemaDocument, error) { + p := parser{ + lexer: lexer.New(source), + maxTokenLimit: maxTokenLimit, // 0 is unlimited + } + sd, err := p.parseSchemaDocument(), p.err + if err != nil { + return nil, err + } + + for _, def := range sd.Definitions { + def.BuiltIn = source.BuiltIn + } + for _, def := range sd.Extensions { + def.BuiltIn = source.BuiltIn + } + + return sd, nil } func (p *parser) parseSchemaDocument() *SchemaDocument { @@ -44,7 +78,7 @@ func (p *parser) parseSchemaDocument() *SchemaDocument { return nil } - var description string + var description descriptionWithComment if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String { description = p.parseDescription() } @@ -62,7 +96,7 @@ func (p *parser) parseSchemaDocument() *SchemaDocument { case "directive": doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description)) case "extend": - if description != "" { + if description.text != "" { p.unexpectedToken(p.prev) } p.parseTypeSystemExtension(&doc) @@ -72,20 +106,26 @@ func (p *parser) parseSchemaDocument() *SchemaDocument { } } + // treat end of file comments + doc.Comment = p.comment + return &doc } -func (p *parser) parseDescription() string { +func (p *parser) parseDescription() descriptionWithComment { token := p.peek() + var desc descriptionWithComment if token.Kind != lexer.BlockString && token.Kind != lexer.String { - return "" + return desc } - return p.next().Value + desc.comment = p.comment + desc.text = p.next().Value + return desc } -func (p *parser) parseTypeSystemDefinition(description string) *Definition { +func (p *parser) parseTypeSystemDefinition(description descriptionWithComment) *Definition { tok := p.peek() if tok.Kind != lexer.Name { p.unexpectedError() @@ -111,15 +151,17 @@ func (p *parser) parseTypeSystemDefinition(description string) *Definition { } } -func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition { - p.expectKeyword("schema") +func (p *parser) parseSchemaDefinition(description descriptionWithComment) *SchemaDefinition { + _, comment := p.expectKeyword("schema") - def := SchemaDefinition{Description: description} + def := SchemaDefinition{} def.Position = p.peekPos() - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Directives = p.parseDirectives(true) - p.some(lexer.BraceL, lexer.BraceR, func() { + def.EndOfDefinitionComment = p.some(lexer.BraceL, lexer.BraceR, func() { def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) }) return &def @@ -128,35 +170,40 @@ func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition { func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition { var op OperationTypeDefinition op.Position = p.peekPos() + op.Comment = p.comment op.Operation = p.parseOperationType() p.expect(lexer.Colon) op.Type = p.parseName() return &op } -func (p *parser) parseScalarTypeDefinition(description string) *Definition { - p.expectKeyword("scalar") +func (p *parser) parseScalarTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("scalar") var def Definition def.Position = p.peekPos() + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Kind = Scalar - def.Description = description def.Name = p.parseName() def.Directives = p.parseDirectives(true) return &def } -func (p *parser) parseObjectTypeDefinition(description string) *Definition { - p.expectKeyword("type") +func (p *parser) parseObjectTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("type") var def Definition def.Position = p.peekPos() def.Kind = Object - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Interfaces = p.parseImplementsInterfaces() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() return &def } @@ -175,18 +222,26 @@ func (p *parser) parseImplementsInterfaces() []string { return types } -func (p *parser) parseFieldsDefinition() FieldList { +func (p *parser) parseFieldsDefinition() (FieldList, *CommentGroup) { var defs FieldList - p.some(lexer.BraceL, lexer.BraceR, func() { + comment := p.some(lexer.BraceL, lexer.BraceR, func() { defs = append(defs, p.parseFieldDefinition()) }) - return defs + return defs, comment } func (p *parser) parseFieldDefinition() *FieldDefinition { var def FieldDefinition def.Position = p.peekPos() - def.Description = p.parseDescription() + + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text + } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment def.Name = p.parseName() def.Arguments = p.parseArgumentDefs() p.expect(lexer.Colon) @@ -207,7 +262,15 @@ func (p *parser) parseArgumentDefs() ArgumentDefinitionList { func (p *parser) parseArgumentDef() *ArgumentDefinition { var def ArgumentDefinition def.Position = p.peekPos() - def.Description = p.parseDescription() + + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text + } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment def.Name = p.parseName() p.expect(lexer.Colon) def.Type = p.parseTypeReference() @@ -221,7 +284,15 @@ func (p *parser) parseArgumentDef() *ArgumentDefinition { func (p *parser) parseInputValueDef() *FieldDefinition { var def FieldDefinition def.Position = p.peekPos() - def.Description = p.parseDescription() + + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text + } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment def.Name = p.parseName() p.expect(lexer.Colon) def.Type = p.parseTypeReference() @@ -232,27 +303,31 @@ func (p *parser) parseInputValueDef() *FieldDefinition { return &def } -func (p *parser) parseInterfaceTypeDefinition(description string) *Definition { - p.expectKeyword("interface") +func (p *parser) parseInterfaceTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("interface") var def Definition def.Position = p.peekPos() def.Kind = Interface - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Interfaces = p.parseImplementsInterfaces() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() return &def } -func (p *parser) parseUnionTypeDefinition(description string) *Definition { - p.expectKeyword("union") +func (p *parser) parseUnionTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("union") var def Definition def.Position = p.peekPos() def.Kind = Union - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Directives = p.parseDirectives(true) def.Types = p.parseUnionMemberTypes() @@ -273,87 +348,101 @@ func (p *parser) parseUnionMemberTypes() []string { return types } -func (p *parser) parseEnumTypeDefinition(description string) *Definition { - p.expectKeyword("enum") +func (p *parser) parseEnumTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("enum") var def Definition def.Position = p.peekPos() def.Kind = Enum - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.EnumValues = p.parseEnumValuesDefinition() + def.EnumValues, def.EndOfDefinitionComment = p.parseEnumValuesDefinition() return &def } -func (p *parser) parseEnumValuesDefinition() EnumValueList { +func (p *parser) parseEnumValuesDefinition() (EnumValueList, *CommentGroup) { var values EnumValueList - p.some(lexer.BraceL, lexer.BraceR, func() { + comment := p.some(lexer.BraceL, lexer.BraceR, func() { values = append(values, p.parseEnumValueDefinition()) }) - return values + return values, comment } func (p *parser) parseEnumValueDefinition() *EnumValueDefinition { - return &EnumValueDefinition{ - Position: p.peekPos(), - Description: p.parseDescription(), - Name: p.parseName(), - Directives: p.parseDirectives(true), + var def EnumValueDefinition + def.Position = p.peekPos() + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment + + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + + return &def } -func (p *parser) parseInputObjectTypeDefinition(description string) *Definition { - p.expectKeyword("input") +func (p *parser) parseInputObjectTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("input") var def Definition def.Position = p.peekPos() def.Kind = InputObject - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.Fields = p.parseInputFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseInputFieldsDefinition() return &def } -func (p *parser) parseInputFieldsDefinition() FieldList { +func (p *parser) parseInputFieldsDefinition() (FieldList, *CommentGroup) { var values FieldList - p.some(lexer.BraceL, lexer.BraceR, func() { + comment := p.some(lexer.BraceL, lexer.BraceR, func() { values = append(values, p.parseInputValueDef()) }) - return values + return values, comment } func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) { - p.expectKeyword("extend") + _, comment := p.expectKeyword("extend") switch p.peek().Value { case "schema": - doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension()) + doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension(comment)) case "scalar": - doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension(comment)) case "type": - doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension(comment)) case "interface": - doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension(comment)) case "union": - doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension(comment)) case "enum": - doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension(comment)) case "input": - doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension(comment)) default: p.unexpectedError() } } -func (p *parser) parseSchemaExtension() *SchemaDefinition { +func (p *parser) parseSchemaExtension(comment *CommentGroup) *SchemaDefinition { p.expectKeyword("schema") var def SchemaDefinition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Directives = p.parseDirectives(true) - p.some(lexer.BraceL, lexer.BraceR, func() { + def.EndOfDefinitionComment = p.some(lexer.BraceL, lexer.BraceR, func() { def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) }) if len(def.Directives) == 0 && len(def.OperationTypes) == 0 { @@ -362,11 +451,12 @@ func (p *parser) parseSchemaExtension() *SchemaDefinition { return &def } -func (p *parser) parseScalarTypeExtension() *Definition { +func (p *parser) parseScalarTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("scalar") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Scalar def.Name = p.parseName() def.Directives = p.parseDirectives(true) @@ -376,42 +466,45 @@ func (p *parser) parseScalarTypeExtension() *Definition { return &def } -func (p *parser) parseObjectTypeExtension() *Definition { +func (p *parser) parseObjectTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("type") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Object def.Name = p.parseName() def.Interfaces = p.parseImplementsInterfaces() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseInterfaceTypeExtension() *Definition { +func (p *parser) parseInterfaceTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("interface") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Interface def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() if len(def.Directives) == 0 && len(def.Fields) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseUnionTypeExtension() *Definition { +func (p *parser) parseUnionTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("union") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Union def.Name = p.parseName() def.Directives = p.parseDirectives(true) @@ -423,43 +516,47 @@ func (p *parser) parseUnionTypeExtension() *Definition { return &def } -func (p *parser) parseEnumTypeExtension() *Definition { +func (p *parser) parseEnumTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("enum") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Enum def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.EnumValues = p.parseEnumValuesDefinition() + def.EnumValues, def.EndOfDefinitionComment = p.parseEnumValuesDefinition() if len(def.Directives) == 0 && len(def.EnumValues) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseInputObjectTypeExtension() *Definition { +func (p *parser) parseInputObjectTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("input") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = InputObject def.Name = p.parseName() def.Directives = p.parseDirectives(false) - def.Fields = p.parseInputFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseInputFieldsDefinition() if len(def.Directives) == 0 && len(def.Fields) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition { - p.expectKeyword("directive") +func (p *parser) parseDirectiveDefinition(description descriptionWithComment) *DirectiveDefinition { + _, comment := p.expectKeyword("directive") p.expect(lexer.At) var def DirectiveDefinition def.Position = p.peekPos() - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Arguments = p.parseArgumentDefs() @@ -486,7 +583,7 @@ func (p *parser) parseDirectiveLocations() []DirectiveLocation { } func (p *parser) parseDirectiveLocation() DirectiveLocation { - name := p.expect(lexer.Name) + name, _ := p.expect(lexer.Name) switch name.Value { case `QUERY`: @@ -532,3 +629,8 @@ func (p *parser) parseDirectiveLocation() DirectiveLocation { p.unexpectedToken(name) return "" } + +type descriptionWithComment struct { + text string + comment *CommentGroup +} diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml b/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml index 8b6a5d0ca3..705514a995 100644 --- a/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml @@ -15,6 +15,67 @@ object types: Name: "world" Type: String + - name: with comments + input: | + # Hello + # Hello another + type Hello { + # World + # World another + world: String + # end of type comments + } + # end of file comments + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + AfterDescriptionComment: "# World\n# World another\n" + AfterDescriptionComment: "# Hello\n# Hello another\n" + EndOfDefinitionComment: "# end of type comments\n" + Comment: "# end of file comments\n" + + - name: with comments and description + input: | + # Hello + # Hello another + "type description" + # Hello after description + # Hello after description another + type Hello { + # World + # World another + "field description" + # World after description + # World after description another + world: String + # end of definition coments + # end of definition comments another + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Description: "type description" + Name: "Hello" + Fields: [FieldDefinition] + - + Description: "field description" + Name: "world" + Type: String + BeforeDescriptionComment: "# World\n# World another\n" + AfterDescriptionComment: "# World after description\n# World after description another\n" + BeforeDescriptionComment: "# Hello\n# Hello another\n" + AfterDescriptionComment: "# Hello after description\n# Hello after description another\n" + EndOfDefinitionComment: "# end of definition coments\n# end of definition comments another\n" + - name: with description input: | "Description" @@ -35,6 +96,7 @@ object types: - name: with block description input: | + # Before description comment """ Description """ @@ -53,6 +115,8 @@ object types: - Name: "world" Type: String + BeforeDescriptionComment: "# Before description comment\n" + AfterDescriptionComment: "# Even with comments between them\n" - name: with field arg input: | type Hello { @@ -146,8 +210,11 @@ object types: type extensions: - name: Object extension input: | + # comment extend type Hello { + # comment world world: String + # end of definition comment } ast: | @@ -159,6 +226,9 @@ type extensions: - Name: "world" Type: String + AfterDescriptionComment: "# comment world\n" + AfterDescriptionComment: "# comment\n" + EndOfDefinitionComment: "# end of definition comment\n" - name: without any fields input: "extend type Hello implements Greeting" @@ -277,6 +347,30 @@ schema definition: Operation: Operation("query") Type: "Query" + - name: with comments and description + input: | + # before description comment + "description" + # after description comment + schema { + # before field comment + query: Query + # after field comment + } + ast: | + + Schema: [SchemaDefinition] + - + Description: "description" + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("query") + Type: "Query" + Comment: "# before field comment\n" + BeforeDescriptionComment: "# before description comment\n" + AfterDescriptionComment: "# after description comment\n" + EndOfDefinitionComment: "# after field comment\n" + schema extensions: - name: simple input: | @@ -292,6 +386,26 @@ schema extensions: Operation: Operation("mutation") Type: "Mutation" + - name: with comment and description + input: | + # before extend comment + extend schema { + # before field comment + mutation: Mutation + # after field comment + } + ast: | + + SchemaExtension: [SchemaDefinition] + - + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("mutation") + Type: "Mutation" + Comment: "# before field comment\n" + AfterDescriptionComment: "# before extend comment\n" + EndOfDefinitionComment: "# after field comment\n" + - name: directive only input: "extend schema @directive" ast: | diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go index c354ec0dfa..43766f2215 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go @@ -2,6 +2,7 @@ package validator import ( _ "embed" + "github.com/vektah/gqlparser/v2/ast" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql index bdca0096a5..e199da5d9f 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql +++ b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql @@ -27,6 +27,9 @@ directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITIO "The @specifiedBy built-in directive is used within the type system definition language to provide a scalar specification URL for specifying the behavior of custom scalar types." directive @specifiedBy(url: String!) on SCALAR +"The @defer directive may be specified on a fragment spread to imply de-prioritization, that causes the fragment to be omitted in the initial response, and delivered as a subsequent response afterward. A query with @defer directive will cause the request to potentially return multiple responses, where non-deferred data is delivered in the initial response and data deferred delivered in a subsequent response. @include and @skip take precedence over @defer." +directive @defer(if: Boolean = true, label: String) on FRAGMENT_SPREAD | INLINE_FRAGMENT + type __Schema { description: String types: [__Type!]! diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go index aa83c6967e..24d4f3db05 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go @@ -6,6 +6,8 @@ import ( "strings" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) @@ -41,11 +43,12 @@ func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) return nil } - var suggestedObjectTypes []string + possibleTypes := walker.Schema.GetPossibleTypes(parent) + suggestedObjectTypes := make([]string, 0, len(possibleTypes)) var suggestedInterfaceTypes []string interfaceUsageCount := map[string]int{} - for _, possibleType := range walker.Schema.GetPossibleTypes(parent) { + for _, possibleType := range possibleTypes { field := possibleType.Fields.ForName(name) if field == nil { continue @@ -64,7 +67,7 @@ func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) } } - suggestedTypes := append(suggestedInterfaceTypes, suggestedObjectTypes...) + suggestedTypes := concatSlice(suggestedInterfaceTypes, suggestedObjectTypes) sort.SliceStable(suggestedTypes, func(i, j int) bool { typeA, typeB := suggestedTypes[i], suggestedTypes[j] @@ -78,6 +81,16 @@ func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) return suggestedTypes } +// By employing a full slice expression (slice[low:high:max]), +// where max is set to the slice’s length, +// we ensure that appending elements results +// in a slice backed by a distinct array. +// This method prevents the shared array issue +func concatSlice(first []string, second []string) []string { + n := len(first) + return append(first[:n:n], second...) +} + // For the field name provided, determine if there are any similar field names // that may be the result of a typo. func getSuggestedFieldNames(parent *ast.Definition, name string) []string { @@ -85,7 +98,7 @@ func getSuggestedFieldNames(parent *ast.Definition, name string) []string { return nil } - var possibleFieldNames []string + possibleFieldNames := make([]string, 0, len(parent.Fields)) for _, field := range parent.Fields { possibleFieldNames = append(possibleFieldNames, field.Name) } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go index 5215f697e7..81ef861ba5 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go @@ -4,6 +4,8 @@ import ( "fmt" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go index da5a796218..c187dabf30 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go index 18fe41fd78..f7bae811c9 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) @@ -12,7 +14,7 @@ func init() { Line int Column int } - var seen map[mayNotBeUsedDirective]bool = map[mayNotBeUsedDirective]bool{} + seen := map[mayNotBeUsedDirective]bool{} observers.OnDirective(func(walker *Walker, directive *ast.Directive) { if directive.Definition == nil { addError( diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go index b7427d0d0d..3afd9c1c16 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go index 4c60c8d8c2..60bc0d5242 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go @@ -4,6 +4,8 @@ import ( "fmt" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go index 7abfbf62f1..902939d354 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go index d275285425..fe8bb2039a 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go index da73f3499f..a953174f72 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go @@ -5,6 +5,8 @@ import ( "strings" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go index 91df727a25..46c18d120a 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go index dfc896725a..59d9c15c49 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go @@ -2,12 +2,13 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) func init() { AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) { - inFragmentDefinition := false fragmentNameUsed := make(map[string]bool) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go index df2e5f4b7f..d308810943 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go index 38e1efa11b..eaa2035ea5 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go @@ -6,11 +6,12 @@ import ( "reflect" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) func init() { - AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) { /** * Algorithm: @@ -302,10 +303,8 @@ func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFr } func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { - var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { - if fragmentSpreadA.Name == fragmentSpreadB.Name { return } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go index a3f795c97b..244e5f20cb 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go @@ -2,12 +2,13 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) func init() { AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) { - validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) { if parentDef == nil { return diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go index d8ed652092..ab79163b16 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go index 718bc6834a..605ab9e8e2 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go index a9e5bf6335..7d4c684318 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go @@ -5,6 +5,8 @@ import ( "strings" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go index 1d9a50ab22..e977d63876 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go index 52dfb21eb3..47971ee1a1 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go index 8c348aea06..2c44a4372d 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go index 092be671c0..c5fce8ff53 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go index 4d41b60ae3..49ffbe47f4 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go index 6481ef4cd2..c93948c177 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go index 22bea77117..914e428ea0 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go @@ -6,6 +6,8 @@ import ( "strconv" "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) @@ -157,7 +159,7 @@ func unexpectedTypeMessageOnly(v *ast.Value) ErrorOption { return Message(`Float cannot represent non numeric value: %s`, v.String()) case "ID", "ID!": return Message(`ID cannot represent a non-string and non-integer value: %s`, v.String()) - //case "Enum": + // case "Enum": // return Message(`Enum "%s" cannot represent non-enum value: %s`, v.ExpectedType.String(), v.String()) default: if v.Definition.Kind == ast.Enum { diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go index 4ea94e5a82..d16ee02156 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go index eef7435400..e3fd6fbbd6 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go @@ -2,6 +2,8 @@ package validator import ( "github.com/vektah/gqlparser/v2/ast" + + //nolint:revive // Validator rules each use dot imports for convenience. . "github.com/vektah/gqlparser/v2/validator" ) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/schema.go b/vendor/github.com/vektah/gqlparser/v2/validator/schema.go index 976ed8304e..d859028444 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/schema.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/schema.go @@ -5,20 +5,21 @@ import ( "strconv" "strings" + //nolint:revive . "github.com/vektah/gqlparser/v2/ast" "github.com/vektah/gqlparser/v2/gqlerror" "github.com/vektah/gqlparser/v2/parser" ) func LoadSchema(inputs ...*Source) (*Schema, error) { - ast, err := parser.ParseSchemas(inputs...) + sd, err := parser.ParseSchemas(inputs...) if err != nil { - return nil, err + return nil, gqlerror.WrapIfUnwrapped(err) } - return ValidateSchemaDocument(ast) + return ValidateSchemaDocument(sd) } -func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { +func ValidateSchemaDocument(sd *SchemaDocument) (*Schema, error) { schema := Schema{ Types: map[string]*Definition{}, Directives: map[string]*DirectiveDefinition{}, @@ -26,16 +27,16 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { Implements: map[string][]*Definition{}, } - for i, def := range ast.Definitions { + for i, def := range sd.Definitions { if schema.Types[def.Name] != nil { return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name) } - schema.Types[def.Name] = ast.Definitions[i] + schema.Types[def.Name] = sd.Definitions[i] } - defs := append(DefinitionList{}, ast.Definitions...) + defs := append(DefinitionList{}, sd.Definitions...) - for _, ext := range ast.Extensions { + for _, ext := range sd.Extensions { def := schema.Types[ext.Name] if def == nil { schema.Types[ext.Name] = &Definition{ @@ -79,13 +80,13 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { } } - for i, dir := range ast.Directives { + for i, dir := range sd.Directives { if schema.Directives[dir.Name] != nil { // While the spec says SDL must not (§3.5) explicitly define builtin // scalars, it may (§3.13) define builtin directives. Here we check for // that, and reject doubly-defined directives otherwise. switch dir.Name { - case "include", "skip", "deprecated", "specifiedBy": // the builtins + case "include", "skip", "deprecated", "specifiedBy", "defer": // the builtins // In principle here we might want to validate that the // directives are the same. But they might not be, if the // server has an older spec than we do. (Plus, validating this @@ -98,16 +99,16 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name) } } - schema.Directives[dir.Name] = ast.Directives[i] + schema.Directives[dir.Name] = sd.Directives[i] } - if len(ast.Schema) > 1 { - return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.") + if len(sd.Schema) > 1 { + return nil, gqlerror.ErrorPosf(sd.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.") } - if len(ast.Schema) == 1 { - schema.Description = ast.Schema[0].Description - for _, entrypoint := range ast.Schema[0].OperationTypes { + if len(sd.Schema) == 1 { + schema.Description = sd.Schema[0].Description + for _, entrypoint := range sd.Schema[0].OperationTypes { def := schema.Types[entrypoint.Type] if def == nil { return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type) @@ -123,7 +124,7 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { } } - for _, ext := range ast.SchemaExtension { + for _, ext := range sd.SchemaExtension { for _, entrypoint := range ext.OperationTypes { def := schema.Types[entrypoint.Type] if def == nil { @@ -151,7 +152,7 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { // Inferred root operation type names should be performed only when a `schema` directive is // **not** provided, when it is, `Mutation` and `Subscription` becomes valid types and are not // assigned as a root operation on the schema. - if len(ast.Schema) == 0 { + if len(sd.Schema) == 0 { if schema.Query == nil && schema.Types["Query"] != nil { schema.Query = schema.Types["Query"] } @@ -283,6 +284,9 @@ func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error { return gqlerror.ErrorPosf(def.Position, "%s %s: non-enum value %s.", def.Kind, def.Name, value.Name) } } + if err := validateDirectives(schema, value.Directives, LocationEnumValue, nil); err != nil { + return err + } } case InputObject: if len(def.Fields) == 0 { @@ -358,11 +362,12 @@ func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLo if currentDirective != nil && dir.Name == currentDirective.Name { return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name) } - if schema.Directives[dir.Name] == nil { + dirDefinition := schema.Directives[dir.Name] + if dirDefinition == nil { return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name) } validKind := false - for _, dirLocation := range schema.Directives[dir.Name].Locations { + for _, dirLocation := range dirDefinition.Locations { if dirLocation == location { validKind = true break @@ -371,6 +376,18 @@ func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLo if !validKind { return gqlerror.ErrorPosf(dir.Position, "Directive %s is not applicable on %s.", dir.Name, location) } + for _, arg := range dir.Arguments { + if dirDefinition.Arguments.ForName(arg.Name) == nil { + return gqlerror.ErrorPosf(arg.Position, "Undefined argument %s for directive %s.", arg.Name, dir.Name) + } + } + for _, schemaArg := range dirDefinition.Arguments { + if schemaArg.Type.NonNull && schemaArg.DefaultValue == nil { + if arg := dir.Arguments.ForName(schemaArg.Name); arg == nil || arg.Value.Kind == NullValue { + return gqlerror.ErrorPosf(dir.Position, "Argument %s for directive %s cannot be null.", schemaArg.Name, dir.Name) + } + } + } dir.Definition = schema.Directives[dir.Name] } return nil @@ -378,7 +395,7 @@ func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLo func validateImplements(schema *Schema, def *Definition, intfName string) *gqlerror.Error { // see validation rules at the bottom of - // https://facebook.github.io/graphql/October2021/#sec-Objects + // https://spec.graphql.org/October2021/#sec-Objects intf := schema.Types[intfName] if intf == nil { return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intfName)) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml index 7034a4697c..22f125bec4 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml @@ -80,6 +80,15 @@ object types: message: 'Name "__id" must not begin with "__", which is reserved by GraphQL introspection.' locations: [{line: 2, column: 3}] + - name: field argument list must not be empty + input: | + type FooBar { + foo(): ID + } + error: + message: 'expected at least one definition, found )' + locations: [{line: 2, column: 7}] + - name: check reserved names on type field argument input: | type FooBar { @@ -528,7 +537,16 @@ directives: directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT - - name: must be declared + - name: must be declared (type) + input: | + type User @foo { + name: String + } + error: + message: "Undefined directive foo." + locations: [{line: 1, column: 12}] + + - name: must be declared (field) input: | type User { name: String @foo @@ -537,6 +555,15 @@ directives: message: "Undefined directive foo." locations: [{line: 2, column: 17}] + - name: must be declared (enum) + input: | + enum Unit { + METER @foo + } + error: + message: "Undefined directive foo." + locations: [{line: 2, column: 10}] + - name: cannot be self-referential input: | directive @A(foo: Int! @A) on FIELD_DEFINITION @@ -604,6 +631,32 @@ directives: type P { name: String @testField } interface I { id: ID @testField } + - name: Invalid directive argument not allowed + input: | + directive @foo(bla: Int!) on FIELD_DEFINITION + type P {f: Int @foo(foobla: 11)} + + error: + message: 'Undefined argument foobla for directive foo.' + locations: [{line: 2, column: 21}] + + - name: non-null argument must be provided + input: | + directive @foo(bla: Int!) on FIELD_DEFINITION + type P {f: Int @foo } + + error: + message: 'Argument bla for directive foo cannot be null.' + locations: [{line: 2, column: 17}] + + - name: non-null argument must not be null + input: | + directive @foo(bla: Int!) on FIELD_DEFINITION + type P {f: Int @foo(bla: null) } + + error: + message: 'Argument bla for directive foo cannot be null.' + locations: [{line: 2, column: 17}] entry points: - name: multiple schema entry points diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/validator.go b/vendor/github.com/vektah/gqlparser/v2/validator/validator.go index 34bf93db3d..b4f37ce276 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/validator.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/validator.go @@ -1,6 +1,7 @@ package validator import ( + //nolint:revive . "github.com/vektah/gqlparser/v2/ast" "github.com/vektah/gqlparser/v2/gqlerror" ) @@ -24,7 +25,15 @@ func AddRule(name string, f ruleFunc) { func Validate(schema *Schema, doc *QueryDocument) gqlerror.List { var errs gqlerror.List - + if schema == nil { + errs = append(errs, gqlerror.Errorf("cannot validate as Schema is nil")) + } + if doc == nil { + errs = append(errs, gqlerror.Errorf("cannot validate as QueryDocument is nil")) + } + if len(errs) > 0 { + return errs + } observers := &Events{} for i := range rules { rule := rules[i] diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/vars.go b/vendor/github.com/vektah/gqlparser/v2/validator/vars.go index 8dbb05bc15..c386b6b946 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/vars.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/vars.go @@ -11,7 +11,7 @@ import ( "github.com/vektah/gqlparser/v2/gqlerror" ) -var UnexpectedType = fmt.Errorf("Unexpected Type") +var ErrUnexpectedType = fmt.Errorf("Unexpected Type") // VariableValues coerces and validates variable values func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, error) { @@ -53,8 +53,8 @@ func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables m } else { rv := reflect.ValueOf(val) - jsonNumber, isJsonNumber := val.(json.Number) - if isJsonNumber { + jsonNumber, isJSONNumber := val.(json.Number) + if isJSONNumber { if v.Type.NamedType == "Int" { n, err := jsonNumber.Int64() if err != nil { @@ -67,7 +67,6 @@ func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables m return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %f as %s", f, v.Type.NamedType) } rv = reflect.ValueOf(f) - } } if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface { @@ -222,7 +221,7 @@ func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflec if fieldDef.Type.NonNull && field.IsNil() { return val, gqlerror.ErrorPathf(v.path, "cannot be null") } - //allow null object field and skip it + // allow null object field and skip it if !fieldDef.Type.NonNull && field.IsNil() { continue } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/walk.go b/vendor/github.com/vektah/gqlparser/v2/validator/walk.go index 6ee69e4c2e..d3140746fb 100644 --- a/vendor/github.com/vektah/gqlparser/v2/validator/walk.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/walk.go @@ -22,27 +22,35 @@ type Events struct { func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) { o.operationVisitor = append(o.operationVisitor, f) } + func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) { o.field = append(o.field, f) } + func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) { o.fragment = append(o.fragment, f) } + func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) { o.inlineFragment = append(o.inlineFragment, f) } + func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) { o.fragmentSpread = append(o.fragmentSpread, f) } + func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) { o.directive = append(o.directive, f) } + func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) { o.directiveList = append(o.directiveList, f) } + func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) { o.value = append(o.value, f) } + func (o *Events) OnVariable(f func(walker *Walker, variable *ast.VariableDefinition)) { o.variable = append(o.variable, f) } @@ -277,7 +285,7 @@ func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) { w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread) if def != nil && !w.validatedFragmentSpreads[def.Name] { - // prevent inifinite recursion + // prevent infinite recursion w.validatedFragmentSpreads[def.Name] = true w.walkSelectionSet(nextParentDef, def.SelectionSet) } diff --git a/vendor/modules.txt b/vendor/modules.txt index 005da6b111..02f2a28442 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -206,8 +206,8 @@ github.com/emirpasic/gods/lists/arraylist github.com/emirpasic/gods/trees github.com/emirpasic/gods/trees/binaryheap github.com/emirpasic/gods/utils -# github.com/fatih/color v1.10.0 -## explicit; go 1.13 +# github.com/fatih/color v1.16.0 +## explicit; go 1.17 github.com/fatih/color # github.com/fatih/structs v1.1.0 ## explicit @@ -362,11 +362,11 @@ github.com/google/uuid # github.com/gorilla/websocket v1.5.0 ## explicit; go 1.12 github.com/gorilla/websocket -# github.com/hashicorp/go-cleanhttp v0.5.1 -## explicit -github.com/hashicorp/go-cleanhttp -# github.com/hashicorp/go-retryablehttp v0.6.7 +# github.com/hashicorp/go-cleanhttp v0.5.2 ## explicit; go 1.13 +github.com/hashicorp/go-cleanhttp +# github.com/hashicorp/go-retryablehttp v0.7.7 +## explicit; go 1.19 github.com/hashicorp/go-retryablehttp # github.com/hashicorp/go-version v1.1.0 ## explicit @@ -459,10 +459,10 @@ github.com/maruel/natural github.com/mash/go-tempfile-suffix # github.com/matryer/is v1.2.0 ## explicit -# github.com/mattn/go-colorable v0.1.12 -## explicit; go 1.13 +# github.com/mattn/go-colorable v0.1.13 +## explicit; go 1.15 github.com/mattn/go-colorable -# github.com/mattn/go-isatty v0.0.18 +# github.com/mattn/go-isatty v0.0.20 ## explicit; go 1.15 github.com/mattn/go-isatty # github.com/mattn/go-localereader v0.0.1 @@ -632,8 +632,8 @@ github.com/vbauerster/mpb/v7 github.com/vbauerster/mpb/v7/cwriter github.com/vbauerster/mpb/v7/decor github.com/vbauerster/mpb/v7/internal -# github.com/vektah/gqlparser/v2 v2.5.1 -## explicit; go 1.16 +# github.com/vektah/gqlparser/v2 v2.5.16 +## explicit; go 1.19 github.com/vektah/gqlparser/v2 github.com/vektah/gqlparser/v2/ast github.com/vektah/gqlparser/v2/gqlerror