From 20b4dcef71a932c5e8d3260f18af2cc7a6898d84 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 13:23:20 +0100 Subject: [PATCH 01/25] Dockerised build for binding regeneration --- .tool-versions | 3 ++- Dockerfile.bindings | 39 +++++++++++++++++++++++++++ cmd/scip/tests/reprolang/package.json | 2 +- dev/proto-generate-in-docker.sh | 10 +++++++ dev/proto-generate.sh | 2 +- 5 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 Dockerfile.bindings create mode 100755 dev/proto-generate-in-docker.sh diff --git a/.tool-versions b/.tool-versions index ed5d11a2..2f437e3f 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,5 +1,6 @@ golang 1.19.10 -nodejs 16.7.0 +nodejs 16.20.2 shellcheck 0.7.1 yarn 1.22.17 rust 1.71.0 +python 3.11.9 diff --git a/Dockerfile.bindings b/Dockerfile.bindings new file mode 100644 index 00000000..9bca4069 --- /dev/null +++ b/Dockerfile.bindings @@ -0,0 +1,39 @@ +FROM ubuntu:jammy + +ENV DEBIAN_FRONTEND noninteractive +RUN apt-get update -q && apt-get install -y git curl xz-utils tar gpg build-essential libssl-dev zlib1g-dev \ + libbz2-dev libreadline-dev libsqlite3-dev \ + libncursesw5-dev tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev \ + libffi8 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5 pkg-config + +RUN adduser --shell /bin/bash --home /asdf --disabled-password asdf +ENV PATH="${PATH}:/asdf/.asdf/shims:/asdf/.asdf/bin" + +USER asdf +RUN git clone --depth 1 https://github.com/asdf-vm/asdf.git $HOME/.asdf && \ + echo '. $HOME/.asdf/asdf.sh' >> $HOME/.bashrc && \ + echo '. $HOME/.asdf/asdf.sh' >> $HOME/.profile + +RUN . ~/.bashrc + +# YOLO +RUN curl --proto '=https' --tlsv1.2 -sSf https://get-ghcup.haskell.org | sh + +ENV PATH="${PATH}:/asdf/.ghcup/bin" + +RUN asdf plugin add nodejs && \ + asdf plugin add golang && \ + asdf plugin add shellcheck && \ + asdf plugin add yarn && \ + asdf plugin add python && \ + asdf plugin add rust + +COPY .tool-versions . + +RUN asdf install + + +WORKDIR /src + +ENTRYPOINT ["./dev/proto-generate.sh"] + diff --git a/cmd/scip/tests/reprolang/package.json b/cmd/scip/tests/reprolang/package.json index 2dcba2cc..f4d941cf 100644 --- a/cmd/scip/tests/reprolang/package.json +++ b/cmd/scip/tests/reprolang/package.json @@ -12,6 +12,6 @@ "nan": "^2.15.0" }, "devDependencies": { - "tree-sitter-cli": "^0.20.4" + "tree-sitter-cli": "^0.20.8" } } diff --git a/dev/proto-generate-in-docker.sh b/dev/proto-generate-in-docker.sh new file mode 100755 index 00000000..f6615803 --- /dev/null +++ b/dev/proto-generate-in-docker.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir + +IMAGE_NAME="sourcegraph/scip-bindings-env" + +docker build . -t $IMAGE_NAME -f Dockerfile.bindings +docker run -v $(pwd):/src $IMAGE_NAME diff --git a/dev/proto-generate.sh b/dev/proto-generate.sh index 1ca6be37..e1694bfb 100755 --- a/dev/proto-generate.sh +++ b/dev/proto-generate.sh @@ -19,7 +19,7 @@ fi echo "--- Haskell ---" command -v cabal > /dev/null 2>&1 || { echo >&2 "Haskell's 'cabal' command is not installed. Please install it first via https://www.haskell.org/ghcup/"; exit 1; } -cabal install proto-lens-protoc-0.7.1.1 ghc-source-gen-0.4.3.0 --overwrite-policy=always --ghc-options='-j2 +RTS -A32m' --installdir="$PWD/.bin" +cabal install proto-lens-protoc-0.8.0.1 ghc-source-gen-0.4.5.0 --overwrite-policy=always --ghc-options='-j2 +RTS -A32m' --installdir="$PWD/.bin" # buf requires the generator to be named protoc-gen-* ln -sfv "$PWD/.bin/proto-lens-protoc" "$PWD/.bin/protoc-gen-haskell" PATH="$PWD/.bin:$PATH" From 2ffd6a8e5967acc0352c65bc046ab2f0829ad368 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 13:37:12 +0100 Subject: [PATCH 02/25] Preload haskell dependencies as they're by far the slowest --- Dockerfile.bindings | 5 +- bindings/haskell/src/Proto/Scip.hs | 253 ++++++++--------------------- 2 files changed, 72 insertions(+), 186 deletions(-) diff --git a/Dockerfile.bindings b/Dockerfile.bindings index 9bca4069..e97a1078 100644 --- a/Dockerfile.bindings +++ b/Dockerfile.bindings @@ -30,8 +30,9 @@ RUN asdf plugin add nodejs && \ COPY .tool-versions . -RUN asdf install - +# We run binding regeneration in ephemeral location +# to make sure all dependencies are pre-downloaded +RUN asdf install && cabal install proto-lens-protoc-0.8.0.1 ghc-source-gen-0.4.5.0 WORKDIR /src diff --git a/bindings/haskell/src/Proto/Scip.hs b/bindings/haskell/src/Proto/Scip.hs index fd28ba3d..81fdb913 100644 --- a/bindings/haskell/src/Proto/Scip.hs +++ b/bindings/haskell/src/Proto/Scip.hs @@ -172,26 +172,16 @@ instance Data.ProtoLens.Message Descriptor where case tag of 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "name" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"name") y x) 18 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "disambiguator" loop (Lens.Family2.set @@ -572,42 +562,27 @@ instance Data.ProtoLens.Message Diagnostic where mutable'tags 18 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "code" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"code") y x) mutable'tags 26 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "message" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"message") y x) mutable'tags 34 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "source" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"source") y x) @@ -1036,28 +1011,18 @@ instance Data.ProtoLens.Message Document where case tag of 34 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "language" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"language") y x) mutable'occurrences mutable'symbols 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "relative_path" loop (Lens.Family2.set @@ -1086,14 +1051,9 @@ instance Data.ProtoLens.Message Document where loop x mutable'occurrences v 42 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "text" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"text") y x) @@ -2459,14 +2419,9 @@ instance Data.ProtoLens.Message Metadata where (Lens.Family2.set (Data.ProtoLens.Field.field @"toolInfo") y x) 26 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "project_root" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"projectRoot") y x) @@ -2868,14 +2823,9 @@ instance Data.ProtoLens.Message Occurrence where mutable'overrideDocumentation y 18 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "symbol" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"symbol") y x) @@ -2893,14 +2843,9 @@ instance Data.ProtoLens.Message Occurrence where mutable'overrideDocumentation mutable'range 34 -> do !y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "override_documentation" v <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO (Data.ProtoLens.Encoding.Growing.append @@ -3232,38 +3177,23 @@ instance Data.ProtoLens.Message Package where case tag of 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "manager" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"manager") y x) 18 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "name" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"name") y x) 26 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "version" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"version") y x) wire @@ -3638,14 +3568,9 @@ instance Data.ProtoLens.Message Relationship where case tag of 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "symbol" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"symbol") y x) 16 @@ -3990,14 +3915,9 @@ instance Data.ProtoLens.Message Symbol where case tag of 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "scheme" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"scheme") y x) @@ -4432,28 +4352,18 @@ instance Data.ProtoLens.Message SymbolInformation where case tag of 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "symbol" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"symbol") y x) mutable'documentation mutable'relationships 26 -> do !y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "documentation" v <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO (Data.ProtoLens.Encoding.Growing.append @@ -4483,14 +4393,9 @@ instance Data.ProtoLens.Message SymbolInformation where mutable'documentation mutable'relationships 50 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "display_name" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"displayName") y x) @@ -4507,14 +4412,9 @@ instance Data.ProtoLens.Message SymbolInformation where mutable'documentation mutable'relationships 66 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "enclosing_symbol" loop (Lens.Family2.set @@ -6000,42 +5900,27 @@ instance Data.ProtoLens.Message ToolInfo where case tag of 10 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "name" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"name") y x) mutable'arguments 18 -> do y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "version" loop (Lens.Family2.set (Data.ProtoLens.Field.field @"version") y x) mutable'arguments 26 -> do !y <- (Data.ProtoLens.Encoding.Bytes.) - (do value <- do len <- Data.ProtoLens.Encoding.Bytes.getVarInt - Data.ProtoLens.Encoding.Bytes.getBytes - (Prelude.fromIntegral len) - Data.ProtoLens.Encoding.Bytes.runEither - (case Data.Text.Encoding.decodeUtf8' value of - (Prelude.Left err) - -> Prelude.Left (Prelude.show err) - (Prelude.Right r) -> Prelude.Right r)) + (do len <- Data.ProtoLens.Encoding.Bytes.getVarInt + Data.ProtoLens.Encoding.Bytes.getText + (Prelude.fromIntegral len)) "arguments" v <- Data.ProtoLens.Encoding.Parser.Unsafe.unsafeLiftIO (Data.ProtoLens.Encoding.Growing.append mutable'arguments y) From feb713784b3a3eef5b27623adddf1b78225dda11 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:10:27 +0100 Subject: [PATCH 03/25] Pin image to a concrete SHA --- .dockerignore | 1 + Dockerfile.bindings | 13 +++++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..cf709889 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +**/node_modules diff --git a/Dockerfile.bindings b/Dockerfile.bindings index e97a1078..477d5c7c 100644 --- a/Dockerfile.bindings +++ b/Dockerfile.bindings @@ -1,6 +1,7 @@ -FROM ubuntu:jammy +FROM ubuntu@sha256:19478ce7fc2ffbce89df29fea5725a8d12e57de52eb9ea570890dc5852aac1ac ENV DEBIAN_FRONTEND noninteractive + RUN apt-get update -q && apt-get install -y git curl xz-utils tar gpg build-essential libssl-dev zlib1g-dev \ libbz2-dev libreadline-dev libsqlite3-dev \ libncursesw5-dev tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev \ @@ -30,11 +31,15 @@ RUN asdf plugin add nodejs && \ COPY .tool-versions . -# We run binding regeneration in ephemeral location -# to make sure all dependencies are pre-downloaded -RUN asdf install && cabal install proto-lens-protoc-0.8.0.1 ghc-source-gen-0.4.5.0 +RUN asdf install && \ + # Pre-fetch Haskell dependencies as they take the longest time by far + # TODO(anton): run the proto-generate.sh script during the build time to + # pre-fecth all dependencies. I attempted to do so but ran into issues + # with Yarn + cabal install proto-lens-protoc-0.8.0.1 ghc-source-gen-0.4.5.0 WORKDIR /src ENTRYPOINT ["./dev/proto-generate.sh"] + From 8130c6c32ee115597efb65a2f381d14d03d80ca4 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:15:58 +0100 Subject: [PATCH 04/25] README --- Development.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/Development.md b/Development.md index 3743a967..8e72c827 100644 --- a/Development.md +++ b/Development.md @@ -27,10 +27,20 @@ ## Code generation 1. Regenerating definitions after changing the schema in [scip.proto](./scip.proto). + + If you have **asdf** setup on your machine, you can use the binding generation script directly: + ``` ./dev/proto-generate.sh ``` + For the Haskell bindings, see `bindings/haskell/README.md`. + + You can also run the same script in a pre-configured Docker environment, + by running `./dev/proto-generate-in-docker.sh` - this command will + build the environment and run ./dev/proto-generate.sh script. The only + dependency you need is Docker. + 2. Regenerating snapshots after making changes to the CLI. ``` go test ./cmd/scip -update-snapshots From 6785b05d286b7abf371373ddc698f27ac505aa49 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:17:50 +0100 Subject: [PATCH 05/25] Use dockerised proto command --- .github/workflows/protobuf.yml | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/.github/workflows/protobuf.yml b/.github/workflows/protobuf.yml index 93652ba0..c17067ea 100644 --- a/.github/workflows/protobuf.yml +++ b/.github/workflows/protobuf.yml @@ -10,21 +10,12 @@ on: - 'buf**' - '.tool-versions' - 'dev/proto-generate.sh' + - 'dev/proto-generate-in-docker.sh' + - 'Dockerfile.bindings' jobs: protoc-gen-up-to-date: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Haskell - uses: haskell/actions/setup@v2 - with: - ghc-version: '8.10' - cabal-version: 'latest' - - uses: ./.github/actions/asdf - with: - js: true - rust: true - golang: true - - run: ./dev/proto-generate.sh - - run: git diff --exit-code + - run: ./dev/proto-generate-in-docker.sh From 39946f81de6f22af742c6efc04376a20afcce8b0 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:19:49 +0100 Subject: [PATCH 06/25] Run prettier --- Development.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Development.md b/Development.md index 8e72c827..fb0518d8 100644 --- a/Development.md +++ b/Development.md @@ -37,8 +37,8 @@ For the Haskell bindings, see `bindings/haskell/README.md`. You can also run the same script in a pre-configured Docker environment, - by running `./dev/proto-generate-in-docker.sh` - this command will - build the environment and run ./dev/proto-generate.sh script. The only + by running `./dev/proto-generate-in-docker.sh` - this command will + build the environment and run ./dev/proto-generate.sh script. The only dependency you need is Docker. 2. Regenerating snapshots after making changes to the CLI. From c792e114383e707cabcbb7722fd1f07b9f52600e Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:37:40 +0100 Subject: [PATCH 07/25] Include reprolang parser in generation --- Development.md | 6 +-- Dockerfile.bindings | 2 +- cmd/scip/tests/reprolang/src/parser.c | 47 ++++++++++++++++++- ...-docker.sh => build-docker-environment.sh} | 3 +- dev/generate-all-in-docker.sh | 10 ++++ yarn.lock | 8 ++-- 6 files changed, 65 insertions(+), 11 deletions(-) rename dev/{proto-generate-in-docker.sh => build-docker-environment.sh} (61%) create mode 100755 dev/generate-all-in-docker.sh diff --git a/Development.md b/Development.md index fb0518d8..11338fed 100644 --- a/Development.md +++ b/Development.md @@ -37,9 +37,9 @@ For the Haskell bindings, see `bindings/haskell/README.md`. You can also run the same script in a pre-configured Docker environment, - by running `./dev/proto-generate-in-docker.sh` - this command will - build the environment and run ./dev/proto-generate.sh script. The only - dependency you need is Docker. + by running `./dev/generate-all-in-docker.sh` - this command will + build the environment and generate both the proto bindings, and the reprolang + parser generation. The only dependency you need is Docker. 2. Regenerating snapshots after making changes to the CLI. ``` diff --git a/Dockerfile.bindings b/Dockerfile.bindings index 477d5c7c..5fbc3d4d 100644 --- a/Dockerfile.bindings +++ b/Dockerfile.bindings @@ -40,6 +40,6 @@ RUN asdf install && \ WORKDIR /src -ENTRYPOINT ["./dev/proto-generate.sh"] +CMD ["bash", "-c", "./dev/proto-generate.sh && ./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh"] diff --git a/cmd/scip/tests/reprolang/src/parser.c b/cmd/scip/tests/reprolang/src/parser.c index e69f026c..9935f8ff 100644 --- a/cmd/scip/tests/reprolang/src/parser.c +++ b/cmd/scip/tests/reprolang/src/parser.c @@ -5,7 +5,7 @@ #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #endif -#define LANGUAGE_VERSION 13 +#define LANGUAGE_VERSION 14 #define STATE_COUNT 41 #define LARGE_STATE_COUNT 2 #define SYMBOL_COUNT 33 @@ -327,6 +327,50 @@ static const uint16_t ts_non_terminal_alias_map[] = { 0, }; +static const TSStateId ts_primary_state_ids[STATE_COUNT] = { + [0] = 0, + [1] = 1, + [2] = 2, + [3] = 3, + [4] = 4, + [5] = 5, + [6] = 6, + [7] = 7, + [8] = 8, + [9] = 9, + [10] = 10, + [11] = 11, + [12] = 12, + [13] = 13, + [14] = 14, + [15] = 15, + [16] = 16, + [17] = 17, + [18] = 18, + [19] = 19, + [20] = 20, + [21] = 21, + [22] = 22, + [23] = 23, + [24] = 24, + [25] = 25, + [26] = 26, + [27] = 27, + [28] = 28, + [29] = 29, + [30] = 30, + [31] = 31, + [32] = 32, + [33] = 33, + [34] = 34, + [35] = 35, + [36] = 36, + [37] = 37, + [38] = 38, + [39] = 39, + [40] = 40, +}; + static bool ts_lex(TSLexer *lexer, TSStateId state) { START_LEXER(); eof = lexer->eof(lexer); @@ -1616,6 +1660,7 @@ extern const TSLanguage *tree_sitter_reprolang(void) { .lex_fn = ts_lex, .keyword_lex_fn = ts_lex_keywords, .keyword_capture_token = sym_workspace_identifier, + .primary_state_ids = ts_primary_state_ids, }; return &language; } diff --git a/dev/proto-generate-in-docker.sh b/dev/build-docker-environment.sh similarity index 61% rename from dev/proto-generate-in-docker.sh rename to dev/build-docker-environment.sh index f6615803..c85d9d0e 100755 --- a/dev/proto-generate-in-docker.sh +++ b/dev/build-docker-environment.sh @@ -6,5 +6,4 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME="sourcegraph/scip-bindings-env" -docker build . -t $IMAGE_NAME -f Dockerfile.bindings -docker run -v $(pwd):/src $IMAGE_NAME +docker build . -t $IMAGE_NAME -f Dockerfile.bindings && echo $IMAGE_NAME diff --git a/dev/generate-all-in-docker.sh b/dev/generate-all-in-docker.sh new file mode 100755 index 00000000..c0cfb2ed --- /dev/null +++ b/dev/generate-all-in-docker.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir + +IMAGE_NAME=$(./dev/build-docker-environment.sh) + +docker run -it -v $(pwd):/src $IMAGE_NAME + diff --git a/yarn.lock b/yarn.lock index 62cbfefd..093aa2a9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -27,10 +27,10 @@ protoc-gen-ts@0.8.6: resolved "https://registry.yarnpkg.com/protoc-gen-ts/-/protoc-gen-ts-0.8.6.tgz#e789a6fc3fbe09bdc119acecc349b9554ec5940e" integrity sha512-66oeorGy4QBvYjQGd/gaeOYyFqKyRmRgTpofmnw8buMG0P7A0jQjoKSvKJz5h5tNUaVkIzvGBUTRVGakrhhwpA== -tree-sitter-cli@^0.20.4: - version "0.20.6" - resolved "https://registry.yarnpkg.com/tree-sitter-cli/-/tree-sitter-cli-0.20.6.tgz#2a7202190d7bd64e112b451f94573dbe40a04f04" - integrity sha512-tjbAeuGSMhco/EnsThjWkQbDIYMDmdkWsTPsa/NJAW7bjaki9P7oM9TkLxfdlnm4LXd1wR5wVSM2/RTLtZbm6A== +tree-sitter-cli@^0.20.8: + version "0.20.8" + resolved "https://registry.yarnpkg.com/tree-sitter-cli/-/tree-sitter-cli-0.20.8.tgz#06a81cea8d6d82f93d67eed7d28b6bc04a4a8916" + integrity sha512-XjTcS3wdTy/2cc/ptMLc/WRyOLECRYcMTrSWyhZnj1oGSOWbHLTklgsgRICU3cPfb0vy+oZCC33M43u6R1HSCA== typescript@^4.9.0: version "4.9.5" From f45a87dc89c817943f26e89bfe45faa43d6bbb66 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:43:46 +0100 Subject: [PATCH 08/25] Move dockerfile to ./dev, update .dockerignore --- .dockerignore | 30 ++++++++++++++++++- Development.md | 15 ++++------ .../Dockerfile.bindings | 0 dev/build-docker-environment.sh | 2 +- 4 files changed, 35 insertions(+), 12 deletions(-) rename Dockerfile.bindings => dev/Dockerfile.bindings (100%) diff --git a/.dockerignore b/.dockerignore index cf709889..8c2bc102 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,29 @@ -**/node_modules +# This file should be kept in sync with .gitignore + +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Editor directories +.idea/ +.vscode/ + +# Test binary, built with `go test -c` +*.test + +# CLI binary +/scip + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +**/node_modules/ +.bin/ +**/target/ + +# Dependency directories (remove the comment below to include it) +# vendor/ +dist-newstyle/ diff --git a/Development.md b/Development.md index 11338fed..61e72096 100644 --- a/Development.md +++ b/Development.md @@ -28,18 +28,13 @@ 1. Regenerating definitions after changing the schema in [scip.proto](./scip.proto). - If you have **asdf** setup on your machine, you can use the binding generation script directly: + `./dev/generate-all-in-docker.sh` - ``` - ./dev/proto-generate.sh - ``` - - For the Haskell bindings, see `bindings/haskell/README.md`. + We provide a script that sets up the correct build environment in Docker + and runs the necessary regeneration steps. - You can also run the same script in a pre-configured Docker environment, - by running `./dev/generate-all-in-docker.sh` - this command will - build the environment and generate both the proto bindings, and the reprolang - parser generation. The only dependency you need is Docker. + Both the proto bindings and reprolang parser are generated. + The only dependency you need is Docker. 2. Regenerating snapshots after making changes to the CLI. ``` diff --git a/Dockerfile.bindings b/dev/Dockerfile.bindings similarity index 100% rename from Dockerfile.bindings rename to dev/Dockerfile.bindings diff --git a/dev/build-docker-environment.sh b/dev/build-docker-environment.sh index c85d9d0e..58905e1e 100755 --- a/dev/build-docker-environment.sh +++ b/dev/build-docker-environment.sh @@ -6,4 +6,4 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME="sourcegraph/scip-bindings-env" -docker build . -t $IMAGE_NAME -f Dockerfile.bindings && echo $IMAGE_NAME +docker build . -t $IMAGE_NAME -f ./dev/Dockerfile.bindings && echo $IMAGE_NAME From 55f0f8ebfe3085417f26a5e4a31aeaa22a8d1a72 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:47:56 +0100 Subject: [PATCH 09/25] Unify workflows that test generated code --- .../{protobuf.yml => protobuf-reprolang.yml} | 8 +++++--- .github/workflows/reprolang.yml | 18 ------------------ 2 files changed, 5 insertions(+), 21 deletions(-) rename .github/workflows/{protobuf.yml => protobuf-reprolang.yml} (67%) delete mode 100644 .github/workflows/reprolang.yml diff --git a/.github/workflows/protobuf.yml b/.github/workflows/protobuf-reprolang.yml similarity index 67% rename from .github/workflows/protobuf.yml rename to .github/workflows/protobuf-reprolang.yml index c17067ea..cfe5d671 100644 --- a/.github/workflows/protobuf.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -1,4 +1,4 @@ -name: Protobuf +name: Generated code is up to date on: pull_request: @@ -12,10 +12,12 @@ on: - 'dev/proto-generate.sh' - 'dev/proto-generate-in-docker.sh' - 'Dockerfile.bindings' + - 'cmd/scip/tests/reprolang/**' jobs: - protoc-gen-up-to-date: + gen-up-to-date: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - run: ./dev/proto-generate-in-docker.sh + - run: ./dev/generate-all-in-docker.sh + - run: git diff --exit-code diff --git a/.github/workflows/reprolang.yml b/.github/workflows/reprolang.yml deleted file mode 100644 index e75ca9c5..00000000 --- a/.github/workflows/reprolang.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Reprolang - -on: - pull_request: - paths: - - '.github/workflows/**' - - 'reprolang/**' - -jobs: - reprolang-parser-up-to-date: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: ./.github/actions/asdf - with: - js: true - - run: ./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh - - run: git diff --exit-code From 9f23ec289cf4159e846667b479b9f2126f853fc4 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Tue, 25 Jun 2024 14:56:48 +0100 Subject: [PATCH 10/25] Not a tty --- dev/generate-all-in-docker.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/generate-all-in-docker.sh b/dev/generate-all-in-docker.sh index c0cfb2ed..9e502fee 100755 --- a/dev/generate-all-in-docker.sh +++ b/dev/generate-all-in-docker.sh @@ -6,5 +6,5 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME=$(./dev/build-docker-environment.sh) -docker run -it -v $(pwd):/src $IMAGE_NAME +docker run -v $(pwd):/src $IMAGE_NAME From 49b58d4b421e771a656cd9c5e5585d27e17d05a7 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 10:12:34 +0100 Subject: [PATCH 11/25] Chown/chmod the target folder --- dev/Dockerfile.bindings | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dev/Dockerfile.bindings b/dev/Dockerfile.bindings index 5fbc3d4d..899e0e4d 100644 --- a/dev/Dockerfile.bindings +++ b/dev/Dockerfile.bindings @@ -40,6 +40,10 @@ RUN asdf install && \ WORKDIR /src +# We're doing it to circumvent issues with Yarn not being able to write to +# node_modules under this root (in some circumstances, on CI only) +RUN chown asdf:asdf /src && chmod 0666 /src + CMD ["bash", "-c", "./dev/proto-generate.sh && ./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh"] From 611cc99f0aeff4e6399fef6fc82014e5836f946d Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 10:30:02 +0100 Subject: [PATCH 12/25] Publish image on main for remote cache --- .github/workflows/build-env-docker.yml | 66 ++++++++++++++++++++++++++ dev/Dockerfile.bindings | 2 +- dev/build-docker-environment.sh | 2 +- 3 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/build-env-docker.yml diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml new file mode 100644 index 00000000..0a52a102 --- /dev/null +++ b/.github/workflows/build-env-docker.yml @@ -0,0 +1,66 @@ +# If this workflow stops working, first consult the documentation page where it was copied from. +# https://docs.github.com/en/actions/publishing-packages/publishing-docker-images#publishing-images-to-github-packages + + +name: Create and publish a Docker image for bindings build environment + +# Configures this workflow to run every time a change is pushed to the branch called `release`. +on: + push: + branches: ['main'] + +# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. +jobs: + build-and-push-image: + runs-on: ubuntu-latest + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + permissions: + contents: read + packages: write + attestations: write + id-token: write + # + steps: + - name: Checkout repository + uses: actions/checkout@v4 + # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }} + + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + - name: Build and push Docker image + id: push + uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)." + * name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + with: + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true + diff --git a/dev/Dockerfile.bindings b/dev/Dockerfile.bindings index 899e0e4d..82b6cc06 100644 --- a/dev/Dockerfile.bindings +++ b/dev/Dockerfile.bindings @@ -42,7 +42,7 @@ WORKDIR /src # We're doing it to circumvent issues with Yarn not being able to write to # node_modules under this root (in some circumstances, on CI only) -RUN chown asdf:asdf /src && chmod 0666 /src +RUN chown -R asdf:asdf /src && chmod -R 0666 /src CMD ["bash", "-c", "./dev/proto-generate.sh && ./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh"] diff --git a/dev/build-docker-environment.sh b/dev/build-docker-environment.sh index 58905e1e..3aa6d86c 100755 --- a/dev/build-docker-environment.sh +++ b/dev/build-docker-environment.sh @@ -6,4 +6,4 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME="sourcegraph/scip-bindings-env" -docker build . -t $IMAGE_NAME -f ./dev/Dockerfile.bindings && echo $IMAGE_NAME +docker build . -t $IMAGE_NAME -f ./dev/Dockerfile.bindings --cache-from ghcr.io/sourcegraph/scip:latest && echo $IMAGE_NAME From 82f944bf0e40df4b494caaf673e20503b7320337 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 10:33:48 +0100 Subject: [PATCH 13/25] formatting and test publishing --- .github/workflows/build-env-docker.yml | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml index 0a52a102..df49ad80 100644 --- a/.github/workflows/build-env-docker.yml +++ b/.github/workflows/build-env-docker.yml @@ -1,13 +1,12 @@ # If this workflow stops working, first consult the documentation page where it was copied from. # https://docs.github.com/en/actions/publishing-packages/publishing-docker-images#publishing-images-to-github-packages - name: Create and publish a Docker image for bindings build environment # Configures this workflow to run every time a change is pushed to the branch called `release`. on: push: - branches: ['main'] + branches: ['docker-for-bindings-generator'] # SET to main # Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. env: @@ -24,18 +23,19 @@ jobs: packages: write attestations: write id-token: write - # steps: - name: Checkout repository uses: actions/checkout@v4 - # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. + + # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. - name: Log in to the Container registry uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. + + # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 @@ -44,9 +44,9 @@ jobs: tags: | type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }} - # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. - # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. - # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. + # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. + # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. - name: Build and push Docker image id: push uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 @@ -55,12 +55,11 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - - # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)." - * name: Generate artifact attestation + + # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)." + - name: Generate artifact attestation uses: actions/attest-build-provenance@v1 with: subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} subject-digest: ${{ steps.push.outputs.digest }} push-to-registry: true - From f52e6ecac4817ddbf640f13f15760c2708387a49 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 10:35:20 +0100 Subject: [PATCH 14/25] WIP --- .github/workflows/build-env-docker.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml index df49ad80..904eac3f 100644 --- a/.github/workflows/build-env-docker.yml +++ b/.github/workflows/build-env-docker.yml @@ -6,7 +6,7 @@ name: Create and publish a Docker image for bindings build environment # Configures this workflow to run every time a change is pushed to the branch called `release`. on: push: - branches: ['docker-for-bindings-generator'] # SET to main + branches: ['docker-for-bindings-generator'] # TODO: set to main # Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. env: @@ -42,7 +42,7 @@ jobs: with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | - type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }} + type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'docker-for-bindings-generator') }} # TODO: set to main # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. From ff59c0e885c6fedd85124e3d1c51670183d1dcf2 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 10:36:52 +0100 Subject: [PATCH 15/25] WIP --- .github/workflows/build-env-docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml index 904eac3f..28d187f7 100644 --- a/.github/workflows/build-env-docker.yml +++ b/.github/workflows/build-env-docker.yml @@ -51,7 +51,7 @@ jobs: id: push uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 with: - context: . + file: dev/Dockerfile.bindings push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} From 45cdad961511c9897ebd83a38a673305b8687ecd Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 11:44:26 +0100 Subject: [PATCH 16/25] Hack around node_modules issue by moving them to a non-mounted location --- dev/Dockerfile.bindings | 6 +----- dev/docker-entrypoint.sh | 16 ++++++++++++++++ dev/generate-all-in-docker.sh | 2 +- package.json | 6 +++--- 4 files changed, 21 insertions(+), 9 deletions(-) create mode 100755 dev/docker-entrypoint.sh diff --git a/dev/Dockerfile.bindings b/dev/Dockerfile.bindings index 82b6cc06..c474c8a0 100644 --- a/dev/Dockerfile.bindings +++ b/dev/Dockerfile.bindings @@ -40,10 +40,6 @@ RUN asdf install && \ WORKDIR /src -# We're doing it to circumvent issues with Yarn not being able to write to -# node_modules under this root (in some circumstances, on CI only) -RUN chown -R asdf:asdf /src && chmod -R 0666 /src - -CMD ["bash", "-c", "./dev/proto-generate.sh && ./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh"] +CMD ["./dev/docker-entrypoint.sh"] diff --git a/dev/docker-entrypoint.sh b/dev/docker-entrypoint.sh new file mode 100755 index 00000000..fb1ccb64 --- /dev/null +++ b/dev/docker-entrypoint.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +# We're explicitly overriding the node_modules location used by Yarn because +# of issues when mounting node_modules inside the docker container AND running +# it under non-root user. + +echo "--modules-folder $HOME/my-node-modules" > .yarnrc + +export PATH="$PATH:$HOME/my-node-modules/.bin" +export NODE_PATH="$HOME/my-node-modules" + +./dev/proto-generate.sh + +./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh diff --git a/dev/generate-all-in-docker.sh b/dev/generate-all-in-docker.sh index 9e502fee..dabed1de 100755 --- a/dev/generate-all-in-docker.sh +++ b/dev/generate-all-in-docker.sh @@ -6,5 +6,5 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME=$(./dev/build-docker-environment.sh) -docker run -v $(pwd):/src $IMAGE_NAME +docker run -v $(pwd):/src -v /src/node_modules $IMAGE_NAME diff --git a/package.json b/package.json index 2e04d82e..bc5f656b 100644 --- a/package.json +++ b/package.json @@ -6,9 +6,9 @@ "author": "SCIP authors", "license": "MIT", "scripts": { - "build": "./node_modules/.bin/tsc --build --force bindings/typescript", - "prettier": "./node_modules/.bin/prettier --write --list-different '**/*.{ts,js(on)?,md,yml}'", - "prettier-check": "./node_modules/.bin/prettier --check '**/*.{ts,js(on)?,md,yml}'" + "build": "tsc --build --force bindings/typescript", + "prettier": "prettier --write --list-different '**/*.{ts,js(on)?,md,yml}'", + "prettier-check": "prettier --check '**/*.{ts,js(on)?,md,yml}'" }, "workspaces": { "packages": [ From 70dd331d01ff3c1a40b3d0cb27f31c690142f4ce Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 11:53:53 +0100 Subject: [PATCH 17/25] Can't write to .yarnrc either... --- dev/docker-entrypoint.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dev/docker-entrypoint.sh b/dev/docker-entrypoint.sh index fb1ccb64..bf14be11 100755 --- a/dev/docker-entrypoint.sh +++ b/dev/docker-entrypoint.sh @@ -6,7 +6,10 @@ set -xeuo pipefail # of issues when mounting node_modules inside the docker container AND running # it under non-root user. -echo "--modules-folder $HOME/my-node-modules" > .yarnrc +# Symlinking won't work: https://github.com/yarnpkg/yarn/issues/8079#issuecomment-622817604 +# node_modules mounting is a known issue: https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder + +echo "--modules-folder $HOME/my-node-modules" > $HOME/.yarnrc export PATH="$PATH:$HOME/my-node-modules/.bin" export NODE_PATH="$HOME/my-node-modules" From a96164acae22a09aa8c5349f2b65dbf38a56797f Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 12:06:26 +0100 Subject: [PATCH 18/25] pls make it stahp --- .github/workflows/build-env-docker.yml | 4 ++-- .github/workflows/protobuf-reprolang.yml | 7 +++++++ dev/docker-entrypoint.sh | 10 +++++++--- dev/generate-all-in-docker.sh | 2 +- 4 files changed, 17 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml index 28d187f7..36dec76b 100644 --- a/.github/workflows/build-env-docker.yml +++ b/.github/workflows/build-env-docker.yml @@ -6,7 +6,7 @@ name: Create and publish a Docker image for bindings build environment # Configures this workflow to run every time a change is pushed to the branch called `release`. on: push: - branches: ['docker-for-bindings-generator'] # TODO: set to main + branches: ['main'] # TODO: set to main # Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. env: @@ -42,7 +42,7 @@ jobs: with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | - type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'docker-for-bindings-generator') }} # TODO: set to main + type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }} # TODO: set to main # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. diff --git a/.github/workflows/protobuf-reprolang.yml b/.github/workflows/protobuf-reprolang.yml index cfe5d671..0eb426b4 100644 --- a/.github/workflows/protobuf-reprolang.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -19,5 +19,12 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - run: ./dev/generate-all-in-docker.sh - run: git diff --exit-code diff --git a/dev/docker-entrypoint.sh b/dev/docker-entrypoint.sh index bf14be11..452074e1 100755 --- a/dev/docker-entrypoint.sh +++ b/dev/docker-entrypoint.sh @@ -9,10 +9,14 @@ set -xeuo pipefail # Symlinking won't work: https://github.com/yarnpkg/yarn/issues/8079#issuecomment-622817604 # node_modules mounting is a known issue: https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder -echo "--modules-folder $HOME/my-node-modules" > $HOME/.yarnrc +CUSTOM_NODE_MODULES="$HOME/my-node-modules" -export PATH="$PATH:$HOME/my-node-modules/.bin" -export NODE_PATH="$HOME/my-node-modules" +mkdir -p $CUSTOM_NODE_MODULES && chmod 0777 $CUSTOM_NODE_MODULES && chown -R asdf:asdf $CUSTOM_NODE_MODULES + +echo "--modules-folder $CUSTOM_NODE_MODULES" > $HOME/.yarnrc + +export PATH="$PATH:$CUSTOM_NODE_MODULES/.bin" +export NODE_PATH="$CUSTOM_NODE_MODULES" ./dev/proto-generate.sh diff --git a/dev/generate-all-in-docker.sh b/dev/generate-all-in-docker.sh index dabed1de..9e502fee 100755 --- a/dev/generate-all-in-docker.sh +++ b/dev/generate-all-in-docker.sh @@ -6,5 +6,5 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME=$(./dev/build-docker-environment.sh) -docker run -v $(pwd):/src -v /src/node_modules $IMAGE_NAME +docker run -v $(pwd):/src $IMAGE_NAME From 3b25b36f3735136b7570143d4454031d141257c5 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 12:25:20 +0100 Subject: [PATCH 19/25] WIP --- .github/workflows/protobuf-reprolang.yml | 5 ++++- dev/Dockerfile.bindings | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/protobuf-reprolang.yml b/.github/workflows/protobuf-reprolang.yml index 0eb426b4..e3eee83c 100644 --- a/.github/workflows/protobuf-reprolang.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -26,5 +26,8 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - run: ./dev/generate-all-in-docker.sh + - run: docker pull ghcr.io/sourcegraph/scip:latest || echo "no suitable cache" + + - run: chown -R 1001:1001 . && ./dev/generate-all-in-docker.sh + - run: git diff --exit-code diff --git a/dev/Dockerfile.bindings b/dev/Dockerfile.bindings index c474c8a0..c5007db0 100644 --- a/dev/Dockerfile.bindings +++ b/dev/Dockerfile.bindings @@ -7,7 +7,8 @@ RUN apt-get update -q && apt-get install -y git curl xz-utils tar gpg build-esse libncursesw5-dev tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev \ libffi8 libgmp-dev libgmp10 libncurses-dev libncurses5 libtinfo5 pkg-config -RUN adduser --shell /bin/bash --home /asdf --disabled-password asdf +RUN groupadd -g 1001 asdf +RUN adduser -u 1001 --gid 1001 --shell /bin/bash --home /asdf --disabled-password asdf ENV PATH="${PATH}:/asdf/.asdf/shims:/asdf/.asdf/bin" USER asdf From 2e7f8237e6ee5612eb07d89e8ff2c75262af63cd Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 12:30:22 +0100 Subject: [PATCH 20/25] WIP --- .github/workflows/protobuf-reprolang.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/protobuf-reprolang.yml b/.github/workflows/protobuf-reprolang.yml index e3eee83c..23c1d33c 100644 --- a/.github/workflows/protobuf-reprolang.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -28,6 +28,6 @@ jobs: - run: docker pull ghcr.io/sourcegraph/scip:latest || echo "no suitable cache" - - run: chown -R 1001:1001 . && ./dev/generate-all-in-docker.sh + - run: sudo chown -R 1001:1001 . && ./dev/generate-all-in-docker.sh - run: git diff --exit-code From 2e2a60c0d7a209c06e436ae1e27dc6a8034ab8a1 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 12:43:57 +0100 Subject: [PATCH 21/25] Cleanup --- .github/workflows/build-env-docker.yml | 4 ++++ .github/workflows/protobuf-reprolang.yml | 5 ++++- dev/docker-entrypoint.sh | 16 ---------------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml index 36dec76b..1f2c929b 100644 --- a/.github/workflows/build-env-docker.yml +++ b/.github/workflows/build-env-docker.yml @@ -27,6 +27,9 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. - name: Log in to the Container registry uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 @@ -55,6 +58,7 @@ jobs: push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)." - name: Generate artifact attestation diff --git a/.github/workflows/protobuf-reprolang.yml b/.github/workflows/protobuf-reprolang.yml index 23c1d33c..654ad22e 100644 --- a/.github/workflows/protobuf-reprolang.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -28,6 +28,9 @@ jobs: - run: docker pull ghcr.io/sourcegraph/scip:latest || echo "no suitable cache" - - run: sudo chown -R 1001:1001 . && ./dev/generate-all-in-docker.sh + - run: | + # We're changing the owner of the checkout folder to a particular user id, + # matching the user id of `asdf` user we create inside the docker container. + sudo chown -R 1001:1001 . && ./dev/generate-all-in-docker.sh - run: git diff --exit-code diff --git a/dev/docker-entrypoint.sh b/dev/docker-entrypoint.sh index 452074e1..557e5cea 100755 --- a/dev/docker-entrypoint.sh +++ b/dev/docker-entrypoint.sh @@ -2,22 +2,6 @@ set -xeuo pipefail -# We're explicitly overriding the node_modules location used by Yarn because -# of issues when mounting node_modules inside the docker container AND running -# it under non-root user. - -# Symlinking won't work: https://github.com/yarnpkg/yarn/issues/8079#issuecomment-622817604 -# node_modules mounting is a known issue: https://stackoverflow.com/questions/29181032/add-a-volume-to-docker-but-exclude-a-sub-folder - -CUSTOM_NODE_MODULES="$HOME/my-node-modules" - -mkdir -p $CUSTOM_NODE_MODULES && chmod 0777 $CUSTOM_NODE_MODULES && chown -R asdf:asdf $CUSTOM_NODE_MODULES - -echo "--modules-folder $CUSTOM_NODE_MODULES" > $HOME/.yarnrc - -export PATH="$PATH:$CUSTOM_NODE_MODULES/.bin" -export NODE_PATH="$CUSTOM_NODE_MODULES" - ./dev/proto-generate.sh ./cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh From 66c03fa824a938369a8c96067bd09049fd07fc8d Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 14:15:00 +0100 Subject: [PATCH 22/25] Ensure we generate TS code for ABI v13 --- .../reprolang/generate-tree-sitter-parser.sh | 2 +- cmd/scip/tests/reprolang/src/parser.c | 47 +------------------ 2 files changed, 2 insertions(+), 47 deletions(-) diff --git a/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh b/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh index 909a2cf0..d216d952 100755 --- a/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh +++ b/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh @@ -3,5 +3,5 @@ set -eux SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" cd "$SCRIPT_DIR" yarn install -./node_modules/.bin/tree-sitter generate +./node_modules/.bin/tree-sitter generate --abi 13 yarn --cwd ../../../.. run prettier diff --git a/cmd/scip/tests/reprolang/src/parser.c b/cmd/scip/tests/reprolang/src/parser.c index 9935f8ff..e69f026c 100644 --- a/cmd/scip/tests/reprolang/src/parser.c +++ b/cmd/scip/tests/reprolang/src/parser.c @@ -5,7 +5,7 @@ #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #endif -#define LANGUAGE_VERSION 14 +#define LANGUAGE_VERSION 13 #define STATE_COUNT 41 #define LARGE_STATE_COUNT 2 #define SYMBOL_COUNT 33 @@ -327,50 +327,6 @@ static const uint16_t ts_non_terminal_alias_map[] = { 0, }; -static const TSStateId ts_primary_state_ids[STATE_COUNT] = { - [0] = 0, - [1] = 1, - [2] = 2, - [3] = 3, - [4] = 4, - [5] = 5, - [6] = 6, - [7] = 7, - [8] = 8, - [9] = 9, - [10] = 10, - [11] = 11, - [12] = 12, - [13] = 13, - [14] = 14, - [15] = 15, - [16] = 16, - [17] = 17, - [18] = 18, - [19] = 19, - [20] = 20, - [21] = 21, - [22] = 22, - [23] = 23, - [24] = 24, - [25] = 25, - [26] = 26, - [27] = 27, - [28] = 28, - [29] = 29, - [30] = 30, - [31] = 31, - [32] = 32, - [33] = 33, - [34] = 34, - [35] = 35, - [36] = 36, - [37] = 37, - [38] = 38, - [39] = 39, - [40] = 40, -}; - static bool ts_lex(TSLexer *lexer, TSStateId state) { START_LEXER(); eof = lexer->eof(lexer); @@ -1660,7 +1616,6 @@ extern const TSLanguage *tree_sitter_reprolang(void) { .lex_fn = ts_lex, .keyword_lex_fn = ts_lex_keywords, .keyword_capture_token = sym_workspace_identifier, - .primary_state_ids = ts_primary_state_ids, }; return &language; } From 795aeda373630274e15a7abf662613ad234910c7 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 14:42:37 +0100 Subject: [PATCH 23/25] Upgrade TS to 0.21.0 This is the minimal version I could find that fits all criteria: 1. Determinstic output: https://github.com/tree-sitter/tree-sitter/issues/2755 2. Published for ARM64 3. Tests for reprolang pass --- .github/workflows/protobuf-reprolang.yml | 3 +- cmd/scip/tests/reprolang/package.json | 2 +- cmd/scip/tests/reprolang/src/parser.c | 358 +++++++----------- .../tests/reprolang/src/tree_sitter/parser.h | 16 +- yarn.lock | 8 +- 5 files changed, 160 insertions(+), 227 deletions(-) diff --git a/.github/workflows/protobuf-reprolang.yml b/.github/workflows/protobuf-reprolang.yml index 654ad22e..8ec0b26e 100644 --- a/.github/workflows/protobuf-reprolang.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -28,7 +28,8 @@ jobs: - run: docker pull ghcr.io/sourcegraph/scip:latest || echo "no suitable cache" - - run: | + - name: Regenerate protobuf bindings and reprolang parser + run: | # We're changing the owner of the checkout folder to a particular user id, # matching the user id of `asdf` user we create inside the docker container. sudo chown -R 1001:1001 . && ./dev/generate-all-in-docker.sh diff --git a/cmd/scip/tests/reprolang/package.json b/cmd/scip/tests/reprolang/package.json index f4d941cf..dcf4cd7f 100644 --- a/cmd/scip/tests/reprolang/package.json +++ b/cmd/scip/tests/reprolang/package.json @@ -12,6 +12,6 @@ "nan": "^2.15.0" }, "devDependencies": { - "tree-sitter-cli": "^0.20.8" + "tree-sitter-cli": "0.21.0" } } diff --git a/cmd/scip/tests/reprolang/src/parser.c b/cmd/scip/tests/reprolang/src/parser.c index e69f026c..221f9c32 100644 --- a/cmd/scip/tests/reprolang/src/parser.c +++ b/cmd/scip/tests/reprolang/src/parser.c @@ -1,4 +1,4 @@ -#include +#include "tree_sitter/parser.h" #if defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push @@ -16,7 +16,7 @@ #define MAX_ALIAS_SEQUENCE_LENGTH 5 #define PRODUCTION_ID_COUNT 9 -enum { +enum ts_symbol_identifiers { sym_workspace_identifier = 1, anon_sym_LF = 2, anon_sym_definition = 3, @@ -258,7 +258,7 @@ static const TSSymbolMetadata ts_symbol_metadata[] = { }, }; -enum { +enum ts_field_identifiers { field_descriptors = 1, field_docstring = 2, field_forward_definition = 3, @@ -336,23 +336,19 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { if (lookahead == '#') ADVANCE(51); if (lookahead == 'd') ADVANCE(57); if (lookahead == 'r') ADVANCE(58); - if (lookahead == '\t' || - lookahead == '\n' || - lookahead == '\r' || + if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(0) if (lookahead != 0) ADVANCE(82); END_STATE(); case 1: if (lookahead == '\n') ADVANCE(43); - if (lookahead == '\t' || - lookahead == '\r' || + if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(1) if (lookahead != 0) ADVANCE(82); END_STATE(); case 2: if (lookahead == '\n') ADVANCE(43); - if (lookahead == '\t' || - lookahead == '\r' || + if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(2) END_STATE(); case 3: @@ -468,9 +464,7 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { if (lookahead == 't') ADVANCE(21); END_STATE(); case 40: - if (lookahead == '\t' || - lookahead == '\n' || - lookahead == '\r' || + if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(40) if (lookahead != 0) ADVANCE(82); END_STATE(); @@ -479,9 +473,7 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { if (lookahead == '#') ADVANCE(50); if (lookahead == 'd') ADVANCE(8); if (lookahead == 'r') ADVANCE(9); - if (lookahead == '\t' || - lookahead == '\n' || - lookahead == '\r' || + if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(41) END_STATE(); case 42: @@ -497,9 +489,7 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { case 45: ACCEPT_TOKEN(anon_sym_definition); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 46: @@ -508,9 +498,7 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { case 47: ACCEPT_TOKEN(anon_sym_reference); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 48: @@ -519,9 +507,7 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { case 49: ACCEPT_TOKEN(anon_sym_relationships); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 50: @@ -532,14 +518,12 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { ACCEPT_TOKEN(anon_sym_POUND); if (lookahead == ' ') ADVANCE(7); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r') ADVANCE(82); + (lookahead < '\t' || '\r' < lookahead)) ADVANCE(82); END_STATE(); case 52: ACCEPT_TOKEN(aux_sym_comment_token1); if (lookahead == '\t' || - lookahead == '\r' || + (11 <= lookahead && lookahead <= '\r') || lookahead == ' ') ADVANCE(52); if (lookahead != 0 && lookahead != '\n') ADVANCE(53); @@ -556,72 +540,56 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'a') ADVANCE(80); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 56: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'c') ADVANCE(60); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 57: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'e') ADVANCE(62); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 58: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'e') ADVANCE(63); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 59: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'e') ADVANCE(77); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 60: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'e') ADVANCE(47); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 61: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'e') ADVANCE(70); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 62: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'f') ADVANCE(65); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 63: @@ -629,179 +597,139 @@ static bool ts_lex(TSLexer *lexer, TSStateId state) { if (lookahead == 'f') ADVANCE(59); if (lookahead == 'l') ADVANCE(55); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 64: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'h') ADVANCE(67); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 65: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'i') ADVANCE(73); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 66: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'i') ADVANCE(74); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 67: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'i') ADVANCE(76); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 68: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'i') ADVANCE(75); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 69: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'i') ADVANCE(81); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 70: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'n') ADVANCE(56); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 71: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'n') ADVANCE(78); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 72: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'n') ADVANCE(45); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 73: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'n') ADVANCE(69); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 74: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'o') ADVANCE(71); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 75: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'o') ADVANCE(72); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 76: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'p') ADVANCE(79); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 77: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 'r') ADVANCE(61); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 78: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 's') ADVANCE(64); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 79: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 's') ADVANCE(49); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 80: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 't') ADVANCE(66); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 81: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead == 't') ADVANCE(68); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); case 82: ACCEPT_TOKEN(sym_workspace_identifier); if (lookahead != 0 && - lookahead != '\t' && - lookahead != '\n' && - lookahead != '\r' && + (lookahead < '\t' || '\r' < lookahead) && lookahead != ' ') ADVANCE(82); END_STATE(); default: @@ -820,9 +748,7 @@ static bool ts_lex_keywords(TSLexer *lexer, TSStateId state) { if (lookahead == 'i') ADVANCE(4); if (lookahead == 'r') ADVANCE(5); if (lookahead == 't') ADVANCE(6); - if (lookahead == '\t' || - lookahead == '\n' || - lookahead == '\r' || + if (('\t' <= lookahead && lookahead <= '\r') || lookahead == ' ') SKIP(0) END_STATE(); case 1: @@ -1058,14 +984,14 @@ static const TSLexMode ts_lex_modes[STATE_COUNT] = { [26] = {.lex_state = 40}, [27] = {.lex_state = 40}, [28] = {.lex_state = 40}, - [29] = {.lex_state = 41}, - [30] = {.lex_state = 52}, - [31] = {.lex_state = 40}, + [29] = {.lex_state = 40}, + [30] = {.lex_state = 2}, + [31] = {.lex_state = 52}, [32] = {.lex_state = 2}, - [33] = {.lex_state = 2}, - [34] = {.lex_state = 2}, + [33] = {.lex_state = 40}, + [34] = {.lex_state = 41}, [35] = {.lex_state = 2}, - [36] = {.lex_state = 40}, + [36] = {.lex_state = 2}, [37] = {.lex_state = 2}, [38] = {.lex_state = 2}, [39] = {.lex_state = 0}, @@ -1091,11 +1017,11 @@ static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { [1] = { [sym_source_file] = STATE(39), [sym__statement] = STATE(2), - [sym_definition_statement] = STATE(38), - [sym_reference_statement] = STATE(38), - [sym_relationships_statement] = STATE(38), - [sym_comment] = STATE(38), - [sym_docstring] = STATE(32), + [sym_definition_statement] = STATE(30), + [sym_reference_statement] = STATE(30), + [sym_relationships_statement] = STATE(30), + [sym_comment] = STATE(30), + [sym_docstring] = STATE(37), [aux_sym_source_file_repeat1] = STATE(2), [ts_builtin_sym_end] = ACTIONS(3), [anon_sym_definition] = ACTIONS(5), @@ -1120,12 +1046,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_POUNDdocstring_COLON, ACTIONS(15), 1, ts_builtin_sym_end, - STATE(32), 1, + STATE(37), 1, sym_docstring, STATE(3), 2, sym__statement, aux_sym_source_file_repeat1, - STATE(38), 4, + STATE(30), 4, sym_definition_statement, sym_reference_statement, sym_relationships_statement, @@ -1143,12 +1069,12 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_POUND, ACTIONS(31), 1, anon_sym_POUNDdocstring_COLON, - STATE(32), 1, + STATE(37), 1, sym_docstring, STATE(3), 2, sym__statement, aux_sym_source_file_repeat1, - STATE(38), 4, + STATE(30), 4, sym_definition_statement, sym_reference_statement, sym_relationships_statement, @@ -1219,7 +1145,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_references, ACTIONS(60), 1, anon_sym_LF, - STATE(10), 5, + STATE(11), 5, sym__definition_relations, sym_implementation_relation, sym_type_definition_relation, @@ -1234,37 +1160,37 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_references, ACTIONS(62), 1, anon_sym_LF, - STATE(9), 5, + STATE(10), 5, sym__definition_relations, sym_implementation_relation, sym_type_definition_relation, sym_references_relation, aux_sym_definition_statement_repeat1, [179] = 5, - ACTIONS(64), 1, - anon_sym_LF, - ACTIONS(66), 1, + ACTIONS(36), 1, anon_sym_implements, - ACTIONS(69), 1, + ACTIONS(38), 1, anon_sym_type_defines, - ACTIONS(72), 1, + ACTIONS(40), 1, anon_sym_references, - STATE(9), 5, + ACTIONS(64), 1, + anon_sym_LF, + STATE(8), 5, sym__definition_relations, sym_implementation_relation, sym_type_definition_relation, sym_references_relation, aux_sym_definition_statement_repeat1, [199] = 5, - ACTIONS(36), 1, + ACTIONS(66), 1, + anon_sym_LF, + ACTIONS(68), 1, anon_sym_implements, - ACTIONS(38), 1, + ACTIONS(71), 1, anon_sym_type_defines, - ACTIONS(40), 1, + ACTIONS(74), 1, anon_sym_references, - ACTIONS(75), 1, - anon_sym_LF, - STATE(9), 5, + STATE(10), 5, sym__definition_relations, sym_implementation_relation, sym_type_definition_relation, @@ -1279,7 +1205,7 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_references, ACTIONS(77), 1, anon_sym_LF, - STATE(8), 5, + STATE(10), 5, sym__definition_relations, sym_implementation_relation, sym_type_definition_relation, @@ -1309,9 +1235,9 @@ static const uint16_t ts_small_parse_table[] = { anon_sym_forward_definition, ACTIONS(91), 1, anon_sym_global, - STATE(13), 1, + STATE(16), 1, sym_global_identifier, - STATE(37), 1, + STATE(38), 1, sym_identifier, [276] = 2, ACTIONS(93), 1, @@ -1366,97 +1292,97 @@ static const uint16_t ts_small_parse_table[] = { sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(13), 1, - sym_global_identifier, - STATE(17), 1, + STATE(7), 1, sym_identifier, + STATE(16), 1, + sym_global_identifier, [349] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(11), 1, + STATE(6), 1, sym_identifier, - STATE(13), 1, + STATE(16), 1, sym_global_identifier, [362] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(13), 1, + STATE(16), 1, sym_global_identifier, - STATE(18), 1, + STATE(19), 1, sym_identifier, [375] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(13), 1, - sym_global_identifier, STATE(16), 1, + sym_global_identifier, + STATE(18), 1, sym_identifier, [388] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(6), 1, - sym_identifier, - STATE(13), 1, + STATE(16), 1, sym_global_identifier, + STATE(17), 1, + sym_identifier, [401] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(13), 1, + STATE(16), 1, sym_global_identifier, - STATE(19), 1, + STATE(32), 1, sym_identifier, [414] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(13), 1, - sym_global_identifier, - STATE(33), 1, + STATE(9), 1, sym_identifier, + STATE(16), 1, + sym_global_identifier, [427] = 4, ACTIONS(87), 1, sym_workspace_identifier, ACTIONS(91), 1, anon_sym_global, - STATE(7), 1, - sym_identifier, - STATE(13), 1, + STATE(16), 1, sym_global_identifier, + STATE(20), 1, + sym_identifier, [440] = 1, ACTIONS(117), 1, - anon_sym_definition, + sym_workspace_identifier, [444] = 1, ACTIONS(119), 1, - aux_sym_comment_token1, + anon_sym_LF, [448] = 1, ACTIONS(121), 1, - sym_workspace_identifier, + aux_sym_comment_token1, [452] = 1, ACTIONS(123), 1, anon_sym_LF, [456] = 1, ACTIONS(125), 1, - anon_sym_LF, + sym_workspace_identifier, [460] = 1, ACTIONS(127), 1, - anon_sym_LF, + anon_sym_definition, [464] = 1, ACTIONS(129), 1, anon_sym_LF, [468] = 1, ACTIONS(131), 1, - sym_workspace_identifier, + anon_sym_LF, [472] = 1, ACTIONS(133), 1, anon_sym_LF, @@ -1517,68 +1443,68 @@ static const TSParseActionEntry ts_parse_actions[] = { [0] = {.entry = {.count = 0, .reusable = false}}, [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), [3] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 0), - [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(28), + [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(21), [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(14), - [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(25), - [11] = {.entry = {.count = 1, .reusable = false}}, SHIFT(30), + [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(22), + [11] = {.entry = {.count = 1, .reusable = false}}, SHIFT(31), [13] = {.entry = {.count = 1, .reusable = true}}, SHIFT(40), [15] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source_file, 1), [17] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), - [19] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(28), + [19] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(21), [22] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(14), - [25] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(25), - [28] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(30), + [25] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(22), + [28] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(31), [31] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_source_file_repeat1, 2), SHIFT_REPEAT(40), [34] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_relationships_statement, 3, .production_id = 4), - [36] = {.entry = {.count = 1, .reusable = false}}, SHIFT(26), + [36] = {.entry = {.count = 1, .reusable = false}}, SHIFT(28), [38] = {.entry = {.count = 1, .reusable = false}}, SHIFT(23), - [40] = {.entry = {.count = 1, .reusable = false}}, SHIFT(21), - [42] = {.entry = {.count = 1, .reusable = false}}, SHIFT(24), + [40] = {.entry = {.count = 1, .reusable = false}}, SHIFT(24), + [42] = {.entry = {.count = 1, .reusable = false}}, SHIFT(25), [44] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), - [46] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(26), + [46] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(28), [49] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(23), - [52] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(21), - [55] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(24), + [52] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(24), + [55] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_relationships_statement_repeat1, 2), SHIFT_REPEAT(25), [58] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_relationships_statement, 2, .production_id = 2), [60] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_definition_statement, 2, .production_id = 2), [62] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_definition_statement, 5, .production_id = 8), - [64] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_definition_statement_repeat1, 2), - [66] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_definition_statement_repeat1, 2), SHIFT_REPEAT(26), - [69] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_definition_statement_repeat1, 2), SHIFT_REPEAT(23), - [72] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_definition_statement_repeat1, 2), SHIFT_REPEAT(21), - [75] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_definition_statement, 3, .production_id = 4), - [77] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_definition_statement, 4, .production_id = 7), + [64] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_definition_statement, 4, .production_id = 7), + [66] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_definition_statement_repeat1, 2), + [68] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_definition_statement_repeat1, 2), SHIFT_REPEAT(28), + [71] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_definition_statement_repeat1, 2), SHIFT_REPEAT(23), + [74] = {.entry = {.count = 2, .reusable = false}}, REDUCE(aux_sym_definition_statement_repeat1, 2), SHIFT_REPEAT(24), + [77] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_definition_statement, 3, .production_id = 4), [79] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym__statement, 2), [81] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym__statement, 2), - [83] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_identifier, 1, .production_id = 3), - [85] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_identifier, 1, .production_id = 3), + [83] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_global_identifier, 3, .production_id = 6), + [85] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_global_identifier, 3, .production_id = 6), [87] = {.entry = {.count = 1, .reusable = false}}, SHIFT(15), - [89] = {.entry = {.count = 1, .reusable = false}}, SHIFT(27), - [91] = {.entry = {.count = 1, .reusable = false}}, SHIFT(31), + [89] = {.entry = {.count = 1, .reusable = false}}, SHIFT(26), + [91] = {.entry = {.count = 1, .reusable = false}}, SHIFT(33), [93] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_identifier, 1, .production_id = 1), [95] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_identifier, 1, .production_id = 1), - [97] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_defined_by_relation, 2, .production_id = 2), - [99] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_defined_by_relation, 2, .production_id = 2), - [101] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_references_relation, 2, .production_id = 2), - [103] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_references_relation, 2, .production_id = 2), - [105] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_definition_relation, 2, .production_id = 2), - [107] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_type_definition_relation, 2, .production_id = 2), - [109] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_implementation_relation, 2, .production_id = 2), - [111] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_implementation_relation, 2, .production_id = 2), - [113] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_global_identifier, 3, .production_id = 6), - [115] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_global_identifier, 3, .production_id = 6), - [117] = {.entry = {.count = 1, .reusable = true}}, SHIFT(22), - [119] = {.entry = {.count = 1, .reusable = true}}, SHIFT(35), + [97] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_identifier, 1, .production_id = 3), + [99] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_identifier, 1, .production_id = 3), + [101] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_defined_by_relation, 2, .production_id = 2), + [103] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_defined_by_relation, 2, .production_id = 2), + [105] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_references_relation, 2, .production_id = 2), + [107] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_references_relation, 2, .production_id = 2), + [109] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_type_definition_relation, 2, .production_id = 2), + [111] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_type_definition_relation, 2, .production_id = 2), + [113] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_implementation_relation, 2, .production_id = 2), + [115] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_implementation_relation, 2, .production_id = 2), + [117] = {.entry = {.count = 1, .reusable = true}}, SHIFT(13), + [119] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), [121] = {.entry = {.count = 1, .reusable = true}}, SHIFT(36), - [123] = {.entry = {.count = 1, .reusable = true}}, SHIFT(29), - [125] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_reference_statement, 3, .production_id = 5), - [127] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_docstring, 2), - [129] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_comment, 2), - [131] = {.entry = {.count = 1, .reusable = true}}, SHIFT(20), - [133] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_reference_statement, 2, .production_id = 2), - [135] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), + [123] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_reference_statement, 3, .production_id = 5), + [125] = {.entry = {.count = 1, .reusable = true}}, SHIFT(29), + [127] = {.entry = {.count = 1, .reusable = true}}, SHIFT(27), + [129] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_docstring, 2), + [131] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_comment, 2), + [133] = {.entry = {.count = 1, .reusable = true}}, SHIFT(34), + [135] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_reference_statement, 2, .production_id = 2), [137] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), - [139] = {.entry = {.count = 1, .reusable = true}}, SHIFT(34), + [139] = {.entry = {.count = 1, .reusable = true}}, SHIFT(35), }; #ifdef __cplusplus diff --git a/cmd/scip/tests/reprolang/src/tree_sitter/parser.h b/cmd/scip/tests/reprolang/src/tree_sitter/parser.h index 2b14ac10..17b4fde9 100644 --- a/cmd/scip/tests/reprolang/src/tree_sitter/parser.h +++ b/cmd/scip/tests/reprolang/src/tree_sitter/parser.h @@ -13,9 +13,8 @@ extern "C" { #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 -typedef uint16_t TSStateId; - #ifndef TREE_SITTER_API_H_ +typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; @@ -130,9 +129,16 @@ struct TSLanguage { * Lexer Macros */ +#ifdef _MSC_VER +#define UNUSED __pragma(warning(suppress : 4101)) +#else +#define UNUSED __attribute__((unused)) +#endif + #define START_LEXER() \ bool result = false; \ bool skip = false; \ + UNUSED \ bool eof = false; \ int32_t lookahead; \ goto start; \ @@ -166,7 +172,7 @@ struct TSLanguage { * Parse Table Macros */ -#define SMALL_STATE(id) id - LARGE_STATE_COUNT +#define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) #define STATE(id) id @@ -176,7 +182,7 @@ struct TSLanguage { {{ \ .shift = { \ .type = TSParseActionTypeShift, \ - .state = state_value \ + .state = (state_value) \ } \ }} @@ -184,7 +190,7 @@ struct TSLanguage { {{ \ .shift = { \ .type = TSParseActionTypeShift, \ - .state = state_value, \ + .state = (state_value), \ .repetition = true \ } \ }} diff --git a/yarn.lock b/yarn.lock index 093aa2a9..a99e68f3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -27,10 +27,10 @@ protoc-gen-ts@0.8.6: resolved "https://registry.yarnpkg.com/protoc-gen-ts/-/protoc-gen-ts-0.8.6.tgz#e789a6fc3fbe09bdc119acecc349b9554ec5940e" integrity sha512-66oeorGy4QBvYjQGd/gaeOYyFqKyRmRgTpofmnw8buMG0P7A0jQjoKSvKJz5h5tNUaVkIzvGBUTRVGakrhhwpA== -tree-sitter-cli@^0.20.8: - version "0.20.8" - resolved "https://registry.yarnpkg.com/tree-sitter-cli/-/tree-sitter-cli-0.20.8.tgz#06a81cea8d6d82f93d67eed7d28b6bc04a4a8916" - integrity sha512-XjTcS3wdTy/2cc/ptMLc/WRyOLECRYcMTrSWyhZnj1oGSOWbHLTklgsgRICU3cPfb0vy+oZCC33M43u6R1HSCA== +tree-sitter-cli@0.21.0: + version "0.21.0" + resolved "https://registry.yarnpkg.com/tree-sitter-cli/-/tree-sitter-cli-0.21.0.tgz#563f0a387a62c6ca57ce8dc94849e3ad7bbb0abc" + integrity sha512-wA7wT5724fNQW82XDH6zT6ZcYonjrAKLCHHuhLsPcAKULrhp3rNuMvlgBdB5FUBvmjHNhtTZF/qpHenMoRJPBw== typescript@^4.9.0: version "4.9.5" From e114ec5fb3c9784b00ed8f145889d3b66082bb90 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 15:10:31 +0100 Subject: [PATCH 24/25] Rename docker image to clarify what it is --- .github/workflows/build-env-docker.yml | 6 +++--- cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh | 3 +++ dev/build-docker-environment.sh | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-env-docker.yml b/.github/workflows/build-env-docker.yml index 1f2c929b..b32306a5 100644 --- a/.github/workflows/build-env-docker.yml +++ b/.github/workflows/build-env-docker.yml @@ -6,12 +6,12 @@ name: Create and publish a Docker image for bindings build environment # Configures this workflow to run every time a change is pushed to the branch called `release`. on: push: - branches: ['main'] # TODO: set to main + branches: ['main'] # Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. env: REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository }} + IMAGE_NAME: sourcegraph/scip-bindings-env # There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. jobs: @@ -45,7 +45,7 @@ jobs: with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} tags: | - type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }} # TODO: set to main + type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', 'main') }} # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. diff --git a/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh b/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh index d216d952..b072d8fd 100755 --- a/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh +++ b/cmd/scip/tests/reprolang/generate-tree-sitter-parser.sh @@ -3,5 +3,8 @@ set -eux SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" cd "$SCRIPT_DIR" yarn install + +# See https://github.com/smacker/go-tree-sitter/issues/85#issuecomment-1287988200 ./node_modules/.bin/tree-sitter generate --abi 13 + yarn --cwd ../../../.. run prettier diff --git a/dev/build-docker-environment.sh b/dev/build-docker-environment.sh index 3aa6d86c..f93589cf 100755 --- a/dev/build-docker-environment.sh +++ b/dev/build-docker-environment.sh @@ -6,4 +6,4 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir IMAGE_NAME="sourcegraph/scip-bindings-env" -docker build . -t $IMAGE_NAME -f ./dev/Dockerfile.bindings --cache-from ghcr.io/sourcegraph/scip:latest && echo $IMAGE_NAME +docker build . -t $IMAGE_NAME -f ./dev/Dockerfile.bindings --cache-from ghcr.io/$IMAGE_NAME:latest && echo $IMAGE_NAME From 7fd96ddf26d3e11f7e0057cf5b6182b0c5a8f3c0 Mon Sep 17 00:00:00 2001 From: Anton Sviridov Date: Wed, 26 Jun 2024 19:25:03 +0100 Subject: [PATCH 25/25] Change image name --- .github/workflows/protobuf-reprolang.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/protobuf-reprolang.yml b/.github/workflows/protobuf-reprolang.yml index 8ec0b26e..5bfd252e 100644 --- a/.github/workflows/protobuf-reprolang.yml +++ b/.github/workflows/protobuf-reprolang.yml @@ -26,7 +26,7 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - run: docker pull ghcr.io/sourcegraph/scip:latest || echo "no suitable cache" + - run: docker pull ghcr.io/sourcegraph/scip-bindings-env:latest || echo "no suitable cache" - name: Regenerate protobuf bindings and reprolang parser run: |