Skip to content

Commit

Permalink
Merge branch 'main' into operator-cli
Browse files Browse the repository at this point in the history
  • Loading branch information
shaspitz committed Jul 23, 2024
2 parents 3b7e1a8 + 985b84f commit 9acdb63
Show file tree
Hide file tree
Showing 159 changed files with 11,922 additions and 5,683 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/artifacts.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ permissions:

jobs:
upload_contracts:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 30

steps:
Expand Down Expand Up @@ -49,7 +49,7 @@ jobs:
--tagging 'TagSet=[{Key=AutoDelete,Value=true}]'
upload_binaries:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 30
strategy:
matrix:
Expand Down
25 changes: 21 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ concurrency:
jobs:
commitlint:
name: Check Commit Message
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 30

steps:
Expand Down Expand Up @@ -55,7 +55,7 @@ jobs:

go-modules:
name: Test and Build Go Modules
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 60

steps:
Expand Down Expand Up @@ -127,7 +127,7 @@ jobs:

foundry:
name: Foundry Checks and Reports
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 30
defaults:
run:
Expand Down Expand Up @@ -160,7 +160,7 @@ jobs:

contracts:
name: Test and Build Contracts Scripts
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 30
defaults:
run:
Expand Down Expand Up @@ -189,6 +189,9 @@ jobs:
- name: Install Hardhat
run: npm install -g hardhat

- name: Install solhint
run: npm install -g solhint

- name: Install Dependencies
run: npm install

Expand All @@ -198,6 +201,7 @@ jobs:
node --version
npm --version
forge --version
solhint --version
- name: Build
run: npm run build --if-present
Expand All @@ -216,3 +220,16 @@ jobs:
git diff --name-only --exit-code . || (echo "Generated files not in parity with the source files." && exit 1)
git reset --hard HEAD
working-directory: contracts-abi

- name: Run solhint solidity linter
run: solhint '**/*.sol'
working-directory: contracts

infrastructure:
uses: ./.github/workflows/infrastructure.yml
secrets: inherit
needs:
- commitlint
- go-modules
- foundry
- contracts
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
name: infrastructure

on:
workflow_run:
workflows:
- ci
types:
- completed
workflow_call:
workflow_dispatch:
inputs:
profile:
Expand Down Expand Up @@ -33,9 +29,9 @@ on:
type: choice
options:
- lax1
- nyc1
- nyc2
- chi1
- mia2
- mia3
default: 'lax'

permissions:
Expand All @@ -48,11 +44,18 @@ concurrency:
jobs:
cluster:
name: Setup and Test Nomad Cluster
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 180
if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }}

steps:
- name: Print System Information
run: |
echo "CPU INFO:"
lscpu
echo
echo "MEMORY INFO:"
free -h
- name: Setup Environment
run: |
RUNNER_START_TIME="$(date +%s)"
Expand All @@ -64,6 +67,7 @@ jobs:
CLUSTER_PROFILE_FLAG=$([ "${IS_MANUAL_DEPLOYMENT}" == "true" ] && echo "--profile ${{ github.event.inputs.profile }}" || echo "--profile ci")
CLUSTER_LOGS_FLAG=$([ "${{ github.event.inputs.logs }}" == "false" ] && echo "--no-logs-collection" || echo "")
CLUSTER_DATADOG_KEY_FLAG=$([ "${IS_MANUAL_DEPLOYMENT}" == "true" ] && echo "--datadog-key ${{ secrets.DATADOG_API_KEY }}" || echo "")
CLUSTER_L1_RPC_URL_FLAG="--l1-rpc-url ${{ secrets.L1_RPC_URL }}"
CLUSTER_DEBUG_FLAG=$([ "${{ github.event.inputs.debug }}" == "true" ] && echo "--debug" || echo "")
TARGET_MACHINE_IP=$([ "${IS_MANUAL_DEPLOYMENT}" == "true" ] && echo "$(dig +short ${{ github.event.inputs.target_machine }})" || echo "127.0.0.1")
Expand All @@ -73,6 +77,7 @@ jobs:
echo "CLUSTER_PROFILE_FLAG=${CLUSTER_PROFILE_FLAG}" >> ${GITHUB_ENV}
echo "CLUSTER_LOGS_FLAG=${CLUSTER_LOGS_FLAG}" >> ${GITHUB_ENV}
echo "CLUSTER_DATADOG_KEY_FLAG=${CLUSTER_DATADOG_KEY_FLAG}" >> ${GITHUB_ENV}
echo "CLUSTER_L1_RPC_URL_FLAG=${CLUSTER_L1_RPC_URL_FLAG}" >> ${GITHUB_ENV}
echo "CLUSTER_DEBUG_FLAG=${CLUSTER_DEBUG_FLAG}" >> ${GITHUB_ENV}
echo "TARGET_MACHINE_IP=${TARGET_MACHINE_IP}" >> ${GITHUB_ENV}
Expand Down Expand Up @@ -104,7 +109,7 @@ jobs:
uses: actions/checkout@v4
with:
submodules: recursive
ref: ${{ github.event.workflow_run.head_branch }}
ref: ${{ github.event.workflow_run.head_branch || github.event.inputs.branch || github.ref }}

- name: Setup Cache
uses: actions/cache@v4
Expand All @@ -131,6 +136,8 @@ jobs:
sudo add-apt-repository --yes ppa:ethereum/ethereum
sudo apt-get update
sudo apt-get install --yes goreleaser ethereum
python3 -m venv primevenv
source primevenv/bin/activate
pip install boto3 botocore
pipx inject ansible-core botocore boto3
Expand All @@ -143,9 +150,9 @@ jobs:
ANSIBLE_CONNECTION=""
export ANSIBLE_HOST_KEY_CHECKING=false
mkdir -p ~/.ssh && \
chmod 700 ~/.ssh && \
echo "${{ secrets.INFRASTRUCTURE_DEPLOYMENT_KEY }}" > ~/.ssh/id_ed25519 && \
mkdir -p ~/.ssh
chmod 700 ~/.ssh
echo "${{ secrets.INFRASTRUCTURE_DEPLOYMENT_KEY }}" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
fi
Expand Down Expand Up @@ -187,15 +194,15 @@ jobs:
if: ${{ env.IS_MANUAL_DEPLOYMENT == 'false' }}
run: |
START_TIME="$(date +%s)"
./cluster.sh init ${CLUSTER_ENVIRONMENT_FLAG} ${CLUSTER_PROFILE_FLAG} ${CLUSTER_DEBUG_FLAG}
./cluster.sh init ${CLUSTER_ENVIRONMENT_FLAG} ${CLUSTER_DEBUG_FLAG}
END_TIME="$(date +%s)"
echo "INIT_DURATION=$(date -ud "@$((END_TIME - START_TIME))" +'%H:%M:%S')" >> ${GITHUB_ENV}
working-directory: infrastructure/nomad

- name: Deploy Cluster
run: |
START_TIME="$(date +%s)"
./cluster.sh deploy ${CLUSTER_ENVIRONMENT_FLAG} ${CLUSTER_PROFILE_FLAG} ${CLUSTER_LOGS_FLAG} ${CLUSTER_DATADOG_KEY_FLAG} ${CLUSTER_DEBUG_FLAG}
./cluster.sh deploy ${CLUSTER_ENVIRONMENT_FLAG} ${CLUSTER_PROFILE_FLAG} ${CLUSTER_LOGS_FLAG} ${CLUSTER_DATADOG_KEY_FLAG} ${CLUSTER_L1_RPC_URL_FLAG} ${CLUSTER_DEBUG_FLAG}
END_TIME="$(date +%s)"
echo "DEPLOY_DURATION=$(date -ud "@$((END_TIME - START_TIME))" +'%H:%M:%S')" >> ${GITHUB_ENV}
working-directory: infrastructure/nomad
Expand Down Expand Up @@ -255,6 +262,49 @@ jobs:
)
curl -X POST -H 'Content-type: application/json' --data "${PAYLOAD}" "${{ secrets.SLACK_CI_CHANNEL_WEBHOOK_URL }}"
- name: Collect Cluster Logs
if: ${{ env.IS_MANUAL_DEPLOYMENT == 'false' && failure() }}
run: |
NOMAD_SERVER="http://${TARGET_MACHINE_IP}:4646"
journalctl -u nomad > nomad.log
curl -s ${NOMAD_SERVER}/v1/jobs > nomad_jobs.json
ALLOC_IDS=$(curl -s ${NOMAD_SERVER}/v1/allocations | jq -r '.[].ID')
for ALLOC_ID in ${ALLOC_IDS}; do
JOB=$(curl -s ${NOMAD_SERVER}/v1/allocation/${ALLOC_ID} | jq -r '.JobID')
TASKS=$(curl -s ${NOMAD_SERVER}/v1/allocation/${ALLOC_ID} | jq -r '.TaskStates | keys[]')
for TASK in ${TASKS}; do
STDOUT=$(curl -s "${NOMAD_SERVER}/v1/client/fs/logs/${ALLOC_ID}?task=${TASK}&type=stdout")
if [ "$(jq -e .Data <<< "${STDOUT}" 2> /dev/null)" != "null" ]; then
echo ${STDOUT} | jq -r '.Data' | base64 -d > "${ALLOC_ID}_${JOB}_${TASK}_stdout.log"
else
echo "Failed to fetch stdout log for ${ALLOC_ID}_${JOB}_${TASK}:"
echo ${STDOUT}
fi
STDERR=$(curl -s "${NOMAD_SERVER}/v1/client/fs/logs/${ALLOC_ID}?task=${TASK}&type=stderr")
if [ "$(jq -e .Data <<< "${STDERR}" 2> /dev/null)" != "null" ]; then
echo ${STDERR} | jq -r '.Data' | base64 -d > "${ALLOC_ID}_${JOB}_${TASK}_stderr.log"
else
echo "Failed to fetch stderr log for ${ALLOC_ID}_${JOB}_${TASK}:"
echo ${STDERR}
fi
done
done
- name: Upload Debug Artifacts
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: debug-artifacts
path: |
/tmp/dist/
nomad_jobs.json
nomad.log
*_stdout.log
*_stderr.log
- name: Initialize Debug Shell
if: ${{ env.IS_MANUAL_DEPLOYMENT == 'false' && failure() }}
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/releaser.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ permissions:

jobs:
release:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
timeout-minutes: 60
strategy:
matrix:
Expand Down
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ Is P2P software that creates a network of execution providers and bidders.
Bidders can broadcast bids to providers and receive commitments from them.
A high throughput POA EVM chain settles the bids at the end of a block slot.

## Documentation

For detailed documentation, visit the [mev-commit docs](https://docs.primev.xyz/).

## Main Components
- [mev-commit client](p2p)
- [mev-commit-oracle](oracle)
Expand Down
4 changes: 2 additions & 2 deletions bridge/standard/bridge-v1/deploy_contracts.sh
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ RELAYER_ADDR="$RELAYER_ADDR" $FORGE_BIN_PATH script \
--broadcast \
--chain-id "$SETTLEMENT_CHAIN_ID" \
-vvvv \
--use 0.8.23 | tee deploy_sg_output.txt
--use 0.8.20 | tee deploy_sg_output.txt

awk -F"JSON_DEPLOY_ARTIFACT: " '/JSON_DEPLOY_ARTIFACT:/ {print $2}' deploy_sg_output.txt | sed '/^$/d' > SettlementGatewayArtifact.json
mv SettlementGatewayArtifact.json "$ARTIFACT_OUT_PATH"
Expand All @@ -131,7 +131,7 @@ RELAYER_ADDR="$RELAYER_ADDR" $FORGE_BIN_PATH script \
--broadcast \
--chain-id "$L1_CHAIN_ID" \
-vvvv \
--use 0.8.23 | tee deploy_l1g_output.txt
--use 0.8.20 | tee deploy_l1g_output.txt

awk -F"JSON_DEPLOY_ARTIFACT: " '/JSON_DEPLOY_ARTIFACT:/ {print $2}' deploy_l1g_output.txt | sed '/^$/d' > L1GatewayArtifact.json
mv L1GatewayArtifact.json "$ARTIFACT_OUT_PATH"
Expand Down
Loading

0 comments on commit 9acdb63

Please sign in to comment.