Skip to content

Commit

Permalink
Merge branch 'develop' into gltrost/transaction_logic_stack_tests
Browse files Browse the repository at this point in the history
  • Loading branch information
joaosreis authored Oct 26, 2023
2 parents 97c30e5 + d5ae586 commit da0a3a4
Show file tree
Hide file tree
Showing 120 changed files with 1,529 additions and 397 deletions.
26 changes: 26 additions & 0 deletions automation/terraform/modules/google-cloud/cloud-postgres/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Google Cloud Postgres Deployment

This terraform configuration is used to deploy an instance of Google Cloud Postgres. Although the default configuration works without creating a conflict, it is recommended to deploy the postgres instance as a module within a larger terraform deployment (which passes it unique var values).

The default configuration uses Google Secret Manager to pull in a password for the default `postgres` user. After deployment, the assigned IP addresses, username, and password will be printed to the terminal as shown below:

```
Outputs:
cloud_postgres_ip = tolist([
{
"ip_address" = "35.35.35.35" <---- example IP
"time_to_retire" = ""
"type" = "PRIMARY"
},
{
"ip_address" = "34.34.34.34" <---- example IP
"time_to_retire" = ""
"type" = "OUTGOING"
},
])
db_password = "PASSWORD_HERE"
db_user = "postgres"
```

The `PRIMARY` IP should be used when connecting to the new instance. By default, not database or schema is defined on the newly deployed db.
36 changes: 36 additions & 0 deletions automation/terraform/modules/google-cloud/cloud-postgres/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Configure the Google Cloud provider
provider "google" {
project = var.gcp_project
region = var.gcp_region
}

resource "random_id" "instance_id" {
byte_length = 4
}

data "google_secret_manager_secret_version" "db_password" {
provider = google
secret = var.db_pass
}

# Create a Google Cloud SQL PostgreSQL instance
resource "google_sql_database_instance" "postgres_instance" {
name = "${var.db_name}-${random_id.instance_id.hex}"
database_version = var.postgres_version
project = var.gcp_project
region = var.gcp_region
settings {
tier = var.db_spec
user_labels = {
service = var.service_label
}
}
deletion_protection = var.deletion_protection
}

# Define the database user
resource "google_sql_user" "database_user" {
name = var.db_user
instance = google_sql_database_instance.postgres_instance.name
password = data.google_secret_manager_secret_version.db_password.secret_data
}
13 changes: 13 additions & 0 deletions automation/terraform/modules/google-cloud/cloud-postgres/output.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
output "cloud_postgres_ip" {
value = google_sql_database_instance.postgres_instance.ip_address
}

output "db_user" {
value = google_sql_user.database_user.name
}

output "db_password" {
value = data.google_secret_manager_secret_version.db_password.secret_data
}


39 changes: 39 additions & 0 deletions automation/terraform/modules/google-cloud/cloud-postgres/vars.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
variable "gcp_project" {
default = "o1labs-192920"
}

variable "gcp_region" {
default = "us-east4"
}

variable "gcp_zone" {
default = "us-east4-b"
}

variable "db_name" {
default = "o1db"
}

variable "db_user" {
default = "postgres"
}

variable "db_pass" {
default = "o1db-pass"
}

variable "deletion_protection" {
default = false
}

variable "postgres_version" {
default = "POSTGRES_14"
}

variable "db_spec" {
default = "db-g1-small"
}

variable "service_label" {
default = "none"
}
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,24 @@ groups:
description: "{{ $value }} blocks have been validated on network {{ $labels.testnet }} in the last hour (according to some node)."
runbook: "https://www.notion.so/minaprotocol/FewBlocksPerHour-47a6356f093242d988b0d9527ce23478"

- alert: StuckInBootstrap
expr: count by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{syncStatus = "BOOTSTRAP"}[2h]) >= 7200000) > 0
for: ${alert_evaluation_duration}
labels:
testnet: "{{ $labels.testnet }}"
severity: critical
annotations:
summary: "One or more {{ $labels.testnet }} nodes are stuck at bootstrap for more than 2 hours"

- alert: StuckInCatchup
expr: count by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{syncStatus = "CATCHUP"}[2h]) >= 7200000) > 0
for: ${alert_evaluation_duration}
labels:
testnet: "{{ $labels.testnet }}"
severity: critical
annotations:
summary: "One or more {{ $labels.testnet }} nodes are stuck at catchup for more than 2 hours"


- name: Warnings
rules:
Expand Down
21 changes: 12 additions & 9 deletions buildkite/scripts/check-compatibility.sh
Original file line number Diff line number Diff line change
Expand Up @@ -86,17 +86,20 @@ function boot_and_sync {
REST_SERVER="http://127.0.0.1:$REST_PORT/graphql"

while [ $SYNCED -eq 0 ]; do
SYNC_STATUS=$(docker container exec -it $DAEMON_CONTAINER \
curl -g -X POST -H "Content-Type: application/json" -d '{"query":"query { syncStatus }"}' ${REST_SERVER})
SYNC_STATUS=$(docker container exec -it $DAEMON_CONTAINER \
curl -g -X POST -H "Content-Type: application/json" -d '{"query":"query { syncStatus }"}' ${REST_SERVER})

# "connection refused" until GraphQL server up
GOT_SYNC_STATUS=$(echo ${SYNC_STATUS} | grep "syncStatus")
if [ ! -z $GOT_SYNC_STATUS ]; then
echo "Sync status:" $GOT_SYNC_STATUS
fi
# print logs
docker container logs $DAEMON_CONTAINER --tail 10

# "connection refused" until GraphQL server up
GOT_SYNC_STATUS=$(echo ${SYNC_STATUS} | grep "syncStatus")
if [ ! -z $GOT_SYNC_STATUS ]; then
echo $(date +'%Y-%m-%d %H:%M:%S') ". Sync status:" $GOT_SYNC_STATUS
fi

SYNCED=$(echo ${SYNC_STATUS} | grep -c "SYNCED")
sleep 5
SYNCED=$(echo ${SYNC_STATUS} | grep -c "SYNCED")
sleep 5
done
}

Expand Down
6 changes: 3 additions & 3 deletions buildkite/scripts/rosetta-integration-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ ROSETTA_CLI_CONFIG_FILES=${ROSETTA_CLI_CONFIG_FILES:="config.json mina.ros"}
ROSETTA_CLI_MAIN_CONFIG_FILE=${ROSETTA_CLI_MAIN_CONFIG_FILE:="config.json"}

# Frequency (in seconds) at which payment operations will be sent
TRANSACTION_FREQUENCY=60
TRANSACTION_FREQUENCY=10

# Fetch zkApps
curl -Ls https://github.com/MinaProtocol/rosetta-integration-test-zkapps/tarball/$ROSETTA_INT_TEST_ZKAPPS_VERSION | tar xz -C /tmp
Expand Down Expand Up @@ -122,8 +122,8 @@ cat <<EOF >"$MINA_CONFIG_FILE"
"ledger": {
"name": "${MINA_NETWORK}",
"accounts": [
{ "pk": "${BLOCK_PRODUCER_PK}", "balance": "1000", "delegate": null, "sk": null },
{ "pk": "${SNARK_PRODUCER_PK}", "balance": "2000", "delegate": "${BLOCK_PRODUCER_PK}", "sk": null }
{ "pk": "${BLOCK_PRODUCER_PK}", "balance": "1000000", "delegate": null, "sk": null },
{ "pk": "${SNARK_PRODUCER_PK}", "balance": "2000000", "delegate": "${BLOCK_PRODUCER_PK}", "sk": null }
]
}
}
Expand Down
10 changes: 2 additions & 8 deletions buildkite/scripts/run-snark-transaction-profiler.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,7 @@ export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y git apt-transport-https ca-certificates tzdata curl python3

case "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" in
rampup|berkeley|release/2.0.0|develop)
TESTNET_NAME="berkeley"
;;
*)
TESTNET_NAME="mainnet"
esac
TESTNET_NAME="berkeley"

git config --global --add safe.directory /workdir

Expand All @@ -30,4 +24,4 @@ MAX_NUM_UPDATES=4
MIN_NUM_UPDATES=2

echo "--- Run Snark Transaction Profiler with parameters: --zkapps --k ${K} --max-num-updates ${MAX_NUM_UPDATES} --min-num-updates ${MIN_NUM_UPDATES}"
python3 ./scripts/snark_transaction_profiler.py ${K} ${MAX_NUM_UPDATES} ${MIN_NUM_UPDATES}
python3 ./scripts/snark_transaction_profiler.py ${K} ${MAX_NUM_UPDATES} ${MIN_NUM_UPDATES}
5 changes: 3 additions & 2 deletions buildkite/src/Command/Base.dhall
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ let Config =
, soft_fail : Optional B/SoftFail
, skip: Optional B/Skip
, `if` : Optional B/If
, timeout_in_minutes : Optional Natural
, timeout_in_minutes : Optional Integer
}
, default =
{ depends_on = [] : List TaggedKey.Type
Expand All @@ -119,7 +119,7 @@ let Config =
, soft_fail = None B/SoftFail
, skip = None B/Skip
, `if` = None B/If
, timeout_in_minutes = None Natural
, timeout_in_minutes = None Integer
}
}

Expand Down Expand Up @@ -156,6 +156,7 @@ let build : Config.Type -> B/Command.Type = \(c : Config.Type) ->
else Some (B/ArtifactPaths.String (SelectFiles.compile c.artifact_paths)),
key = Some c.key,
label = Some c.label,
timeout_in_minutes = c.timeout_in_minutes,
retry =
Some {
-- we only consider automatic retries
Expand Down
7 changes: 6 additions & 1 deletion buildkite/src/Constants/DebianVersions.dhall
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,12 @@ let minimalDirtyWhen = [
S.strictlyStart (S.contains "dockerfiles/stages"),
S.exactly "scripts/rebuild-deb" "sh",
S.exactly "scripts/release-docker" "sh",
S.exactly "buildkite/scripts/build-artifact" "sh"
S.exactly "buildkite/scripts/build-artifact" "sh",
S.exactly "buildkite/scripts/check-compatibility" "sh",
-- Snark profiler dirtyWhen
S.exactly "buildkite/src/Jobs/Test/RunSnarkProfiler" "dhall",
S.exactly "buildkite/scripts/run-snark-transaction-profiler" "sh",
S.exactly "scripts/snark_transaction_profiler" "py"
]

-- The default debian version (Bullseye) is used in all downstream CI jobs
Expand Down
3 changes: 2 additions & 1 deletion buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ in Pipeline.build Pipeline.Config::{
key = "berkeley-compatibilty-test",
target = Size.XLarge,
docker = None Docker.Type,
depends_on = dependsOn
depends_on = dependsOn,
timeout_in_minutes = Some +60
}
]
}
Expand Down
3 changes: 2 additions & 1 deletion buildkite/src/Jobs/Test/DevelopCompatibility.dhall
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ in Pipeline.build Pipeline.Config::{
key = "develop-compatibilty-test",
target = Size.XLarge,
docker = None Docker.Type,
depends_on = dependsOn
depends_on = dependsOn,
timeout_in_minutes = Some +60
}
]
}
Expand Down
2 changes: 1 addition & 1 deletion buildkite/src/Jobs/Test/FuzzyZkappTest.dhall
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,6 @@ Pipeline.build
mode = PipelineMode.Type.Stable
},
steps = [
buildTestCmd "dev" "src/lib/transaction_snark/test/zkapp_fuzzy/zkapp_fuzzy.exe" 3600 150 Size.Small
buildTestCmd "dev" "src/lib/transaction_snark/test/zkapp_fuzzy/zkapp_fuzzy.exe" 4200 150 Size.Small
]
}
2 changes: 1 addition & 1 deletion buildkite/src/Jobs/Test/Libp2pUnitTest.dhall
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ Pipeline.build
key = "libp2p-bs-qc",
target = Size.Large,
docker = None Docker.Type,
timeout_in_minutes = Some 45
timeout_in_minutes = Some +45

}
]
Expand Down
2 changes: 1 addition & 1 deletion docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

The docs for the Mina Protocol website are published on [docs.minaprotocol.com](https://docs.minaprotocol.com/).

The docs repository is [https://github.com/o1-labs/docs2/)https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/)https://github.com/o1-labs/docs2/).
The docs repository is [https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/).
24 changes: 24 additions & 0 deletions scripts/version-linter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,30 @@

# version-linter.py -- makes sure serializations of versioned types don't change

"""
For the PR branch, PR base branch, and release branch, download the
type shapes file from Google storage There should be a type shape file
available for every commit in a PR branch.
For each branch, store the type shape information in a Python dictionary, truncating
the shapes at a maximum depth.
For each type, compare the type shapes of each branch. If the shapes don't match, print an
error message. The exact comparison rules are given in RFC 0047 (with some embellishments
mentioned below).
The maximum depth should be set high enough so that all differences are caught
(no false negatives).
There may be some false positives, where a difference is reported for
type t1 due to a change to a type t2 contained in t1. The
difference will always also be reported for t2 directly. The maximum
depth should be set low enough to minimize such false positives.
There are some special rules for the types associated with signed commands and zkApp commands.
See `check_command_types` below.
"""

import subprocess
import os
import io
Expand Down
2 changes: 1 addition & 1 deletion src/app/archive/create_schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ CREATE TABLE protocol_versions
, transaction int NOT NULL
, network int NOT NULL
, patch int NOT NULL
, UNIQUE (transaction,network)
, UNIQUE (transaction,network,patch)
);

CREATE TYPE chain_status_type AS ENUM ('canonical', 'orphaned', 'pending');
Expand Down
4 changes: 4 additions & 0 deletions src/app/archive/lib/load_data.ml
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
(* load_data.ml -- load archive db data to "native" OCaml data *)

(* these functions are used by the replayer and `extract_blocks` to load particular pieces
of archive db data
*)

open Core_kernel
open Async
open Mina_base
Expand Down
Loading

0 comments on commit da0a3a4

Please sign in to comment.