Skip to content

Commit

Permalink
More fixes for helm release
Browse files Browse the repository at this point in the history
  • Loading branch information
ravenac95 committed May 17, 2024
1 parent cfc7898 commit 61dbaf3
Show file tree
Hide file tree
Showing 62 changed files with 804 additions and 4 deletions.
68 changes: 68 additions & 0 deletions .github/scripts/publish-docker-containers.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#!/bin/bash
set -euo pipefail

SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "${SCRIPT_DIR}/../../"
REPO_DIR=$(pwd)

# For now if a plugin has both a pyproject.toml and a package.json. The python
# will be used by nature of the order of docker image publishing
python_plugins="$(find ./warehouse/cloudquery-* -type f -name 'pyproject.toml' -exec sh -c 'dirname $0' {} \;)"
ts_plugins="$(find ./warehouse/cloudquery-* -type f -name 'package.json' -exec sh -c 'dirname $0' {} \;)"
tag="$(git rev-parse HEAD)"

build_base_image() {
language="$1"
tag="$2"
base_image="ghcr.io/opensource-observer/${language}-base:${tag}"
dockerfile_path="./docker/cloudquery/${language}-base.Dockerfile"
docker build -t "${base_image}" -f "${dockerfile_path}" .
echo $base_image
}

# Build the base images
py_base_image=$(build_base_image py $tag)
ts_base_image=$(build_base_image ts $tag)
prefix="cloudquery-"

for path in $ts_plugins; do
plugin_name=$(basename $path)
# Remove the cloudquery prefix
plugin_name=${plugin_name#"$prefix"}

plugin_image="ghcr.io/opensource-observer/cloudquery-${plugin_name}:${tag}"

echo "Building ${plugin_name} plugin"
docker build -t ${plugin_image} \
--build-arg PLUGIN_NAME=${plugin_name} \
--build-arg BASE_IMAGE=${ts_base_image} \
-f docker/cloudquery/ts.Dockerfile \
.
echo "Publishing the plugin to ${plugin_image}"
docker push ${plugin_image}
done

for path in $python_plugins; do
plugin_name=$(basename $path)
# Remove the cloudquery prefix
plugin_name=${plugin_name#"$prefix"}

plugin_cmd=$(echo $plugin_name | sed "s/-/_/g")
plugin_image="ghcr.io/opensource-observer/cloudquery-${plugin_name}:${tag}"

# Skip the example
if [[ $plugin_name = "example_plugin" ]]; then
continue
fi
echo "Building ${plugin_name} plugin"

docker build -t ${plugin_image} \
--build-arg PLUGIN_NAME=${plugin_name} \
--build-arg PLUGIN_CMD=${plugin_cmd} \
--build-arg BASE_IMAGE=${ts_base_image} \
-f docker/cloudquery/py.Dockerfile \
.

echo "Publishing the plugin to ${plugin_image}"
docker push ${plugin_image}
done
40 changes: 40 additions & 0 deletions .github/workflows/warehouse-publish-docker-containers.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: warehouse-publish-cloudquery-plugins
env:
X_GITHUB_GRAPHQL_API: ${{ vars.X_GITHUB_GRAPHQL_API }}
X_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

# This workflow only runs when a commit is completed on main.
on:
# Allows you to run this workflow manually from the Actions tab
push:
branches:
- main

jobs:
warehouse-publish-docker-containers:
name: warehouse-publish-docker-containers
environment: indexer
runs-on: ubuntu-latest

permissions:
packages: write

steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 1

- name: 'Login to GitHub Container Registry'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Package and publish cloudquery plugins
run: bash .github/scripts/publish-cloudquery-plugins.sh

- name: Package and publish other docker containers
run: bash .github/scripts/publish-docker-containers-plugins.sh

Empty file added a.out
Empty file.
40 changes: 40 additions & 0 deletions boop.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
01:07:33 Running with dbt=1.7.9
01:07:33 Registered adapter: bigquery=1.7.6
01:07:33 Found 112 models, 33 sources, 0 exposures, 0 metrics, 463 macros, 0 groups, 0 semantic models
01:07:33
01:07:34 Concurrency: 32 threads (target='production')
01:07:34
01:07:34 Compiled node 'playground__ossd_collections' is:


with __dbt__cte__playground__project_filter as (


SELECT * FROM UNNEST([
"gitcoin",
"opensource-observer",
"uniswap",
"velodrome",
"ethereum-attestation-service",
"zora",
"libp2p",
"rabbit-hole",
"safe-global",
"aave"
]) as project_slug
), filtered_collections as (
select distinct
collections.collection_name as `name`,
collections.sync_time as `sync_time`
from `opensource-observer`.`oso`.`stg_ossd__current_collections` as collections
cross join UNNEST(collections.projects) as project_name
inner join `opensource-observer`.`oso`.`stg_ossd__current_projects` as projects
on projects.project_name = project_name
where project_name IN (select * from __dbt__cte__playground__project_filter)
)

select collections.*
from `opensource-observer`.`oso`.`collections_ossd` as collections
inner join filtered_collections as filtered
on filtered.name = collections.name
and collections._cq_sync_time = filtered.sync_time
9 changes: 9 additions & 0 deletions docker/cloudquery/py-base.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM python:3.12-bookworm

RUN pip install poetry

COPY . /usr/src/app

WORKDIR /usr/src/app

RUN poetry install
7 changes: 7 additions & 0 deletions docker/cloudquery/py.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
ARG BASE_IMAGE=ghcr.io/opensource-observer/cloudquery-py-base:latest

FROM ${BASE_IMAGE}

ARG PLUGIN_NAME

ENTRYPOINT [ "${PLUGIN_NAME}" ]
9 changes: 9 additions & 0 deletions docker/cloudquery/ts-base.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM node:20 as build

RUN npm install -g pnpm@^9.0.0

COPY . /usr/src/app

WORKDIR /usr/src/app

RUN pnpm install && pnpm build:cloudquery
9 changes: 9 additions & 0 deletions docker/cloudquery/ts.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
ARG BASE_IMAGE=ghcr.io/opensource-observer/cloudquery-ts-base:latest

FROM ${BASE_IMAGE}

ARG PLUGIN_NAME

WORKDIR /usr/src/app/warehouse/cloudquery-${PLUGIN_NAME}

ENTRYPOINT [ "pnpm", "node", "--loader", "ts-node/esm", "src/main.ts" ]
6 changes: 6 additions & 0 deletions docker/images/cloudflare-tunnel/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
FROM ghcr.io/strrl/cloudflare-tunnel-ingress-controller:latest as binary

FROM alpine:3.19
COPY --from=binary /usr/bin/cloudflare-tunnel-ingress-controller /usr/bin/cloudflare-tunnel-ingress-controller
COPY ./cf-tunnel-wrapper.sh /usr/bin/cf-tunnel-wrapper.sh

9 changes: 9 additions & 0 deletions docker/images/cloudflare-tunnel/cf-tunnel-wrapper.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# This is to enable the use of kube-secrets-init
#!/bin/sh
cloudflare-tunnel-ingress-controller \
--ingress-class=${INGRESS_CLASS} \
--controller-class=${CONTROLLER_CLASS} \
--cloudflare-api-token=${CLOUDFLARE_API_TOKEN} \
--cloudflare-account-id=${CLOUDFLARE_ACCOUNT_ID} \
--cloudflare-tunnel-name=${CLOUDFLARE_TUNNEL_NAME} \
--namespace=${NAMESPACE}
37 changes: 37 additions & 0 deletions docker/images/dagster-dask.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
FROM ubuntu:jammy

ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
apt-get install -y software-properties-common && \
add-apt-repository ppa:deadsnakes/ppa && \
apt-get update && \
apt-get install -y python3.12
RUN apt-get install -y curl && \
curl -o get-pip.py https://bootstrap.pypa.io/get-pip.py && \
python3.12 get-pip.py
RUN pip3.12 install poetry


RUN mkdir -p /usr/bin/app && \
bash -c "mkdir -p /usr/bin/app/warehouse/{bq2cloudsql,oso_dagster,oso_lets_go,common}" && \
touch /usr/bin/app/warehouse/bq2cloudsql/__init__.py && \
touch /usr/bin/app/warehouse/bq2cloudsql/script.py && \
touch /usr/bin/app/warehouse/oso_dagster/__init__.py && \
touch /usr/bin/app/warehouse/oso_lets_go/__init__.py && \
touch /usr/bin/app/warehouse/oso_lets_go/wizard.py && \
touch /usr/bin/app/warehouse/common/__init__.py

WORKDIR /usr/bin/app
COPY pyproject.toml poetry.lock /usr/bin/app/
COPY warehouse/cloudquery-example-plugin /usr/bin/app/warehouse/cloudquery-example-plugin

# Install everything onto the system path
RUN poetry config virtualenvs.create false && \
poetry install

RUN rm -r /usr/bin/app/warehouse

COPY . /usr/bin/app

RUN poetry config virtualenvs.create false && \
poetry install
2 changes: 1 addition & 1 deletion ops/helm-charts/oso-dagster/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: oso-dagster
description: Extension of the dagster template

type: application
version: 0.1.1
version: 0.1.2
appVersion: "1.16.0"
dependencies:
- name: dagster
Expand Down
4 changes: 1 addition & 3 deletions ops/k8s-apps/production/custom-helm-values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,4 @@ spec:
secretPrefix: "gcp:secretmanager:production-dagster"
dagster:
global:
serviceAccountName: production-dagster
global:
serviceAccountName: production-dagster
serviceAccountName: production-dagster
Empty file added oso/dbtdag/__init__.py
Empty file.
78 changes: 78 additions & 0 deletions oso/dbtdag/export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# Setup imports
import click
from dbt import flags
from dbt.cli.main import cli, global_flags
from dbt.cli import requires, params as p
from dbt.config.profile import read_user_config
from dbt.tracking import User, active_user
from dbt.task.list import ListTask
from dbt.graph.queue import GraphQueue


# We need to initialize a click command in order to parse the arguments. This
# seems to be the easiest method to get all the necessary arguments used to call
# the things we need to call
#
# WARNING: This is not a very stable set of code it seems. We are getting most
# of these from here (from the list() function in this code)
# https://github.com/dbt-labs/dbt-core/blob/e4fe839e4574187b574473596a471092267a9f2e/core/dbt/cli/main.py
#
@cli.command("export_list_task")
@click.pass_context
@global_flags
@p.exclude
@p.indirect_selection
@p.models
@p.output
@p.output_keys
@p.profile
@p.profiles_dir
@p.project_dir
@p.resource_type
@p.raw_select
@p.selector
@p.state
@p.defer_state
@p.deprecated_state
@p.target
@p.target_path
@p.vars
@requires.postflight
@requires.preflight
@requires.profile
@requires.project
@requires.runtime_config
@requires.manifest
def export_list_task(ctx, **kwargs):
print(ctx.obj["flags"])
task = ListTask(ctx.obj["flags"], ctx.obj["runtime_config"], ctx.obj["manifest"])
return task, True


def call_export_list_task(target, project_dir=None):
args = ["export_list_task", "--target", target]

if project_dir:
args.append("--project-dir")
args.append(project_dir)

ctx = cli.make_context(
cli.name,
["export_list_task", "--target", target],
)

ctx.obj = {"manifest": None, "callbacks": []}
results, success = cli.invoke(ctx)
if not success:
raise Exception("invocation was not successful")
return results


def get_graph_queue_scores(target, project_dir=None):
task = call_export_list_task(target, project_dir)

task.compile_manifest()
graph = task.graph.graph

queue = GraphQueue(graph, task.manifest, ())
return queue._get_scores(graph)
18 changes: 18 additions & 0 deletions test_all.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#!/bin/bash
set -euxo pipefail

dagster asset materialize -m oso_dagster.definitions --select base_blocks
dagster asset materialize -m oso_dagster.definitions --select base_transactions
dagster asset materialize -m oso_dagster.definitions --select base_traces
dagster asset materialize -m oso_dagster.definitions --select frax_transactions
dagster asset materialize -m oso_dagster.definitions --select mode_transactions
dagster asset materialize -m oso_dagster.definitions --select pgn_transactions
dagster asset materialize -m oso_dagster.definitions --select frax_blocks
dagster asset materialize -m oso_dagster.definitions --select frax_traces
dagster asset materialize -m oso_dagster.definitions --select mode_blocks
dagster asset materialize -m oso_dagster.definitions --select mode_traces
dagster asset materialize -m oso_dagster.definitions --select pgn_blocks
dagster asset materialize -m oso_dagster.definitions --select pgn_traces
dagster asset materialize -m oso_dagster.definitions --select zora_blocks
dagster asset materialize -m oso_dagster.definitions --select zora_transactions
dagster asset materialize -m oso_dagster.definitions --select zora_traces
3 changes: 3 additions & 0 deletions warehouse/ansible-collection/integration/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Ansible Collection - opensource-observer.integration

Tools for managing a direct datasore integration into opensource observer.
Loading

0 comments on commit 61dbaf3

Please sign in to comment.