Skip to content

Commit

Permalink
update to sdk v4.2.1, fix error handling, add connector definition an…
Browse files Browse the repository at this point in the history
…d github workflow
  • Loading branch information
TristenHarr committed Mar 22, 2024
1 parent 66d9782 commit 3150b0d
Show file tree
Hide file tree
Showing 20 changed files with 2,581 additions and 1,255 deletions.
95 changes: 95 additions & 0 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@

# Example modified from https://docs.github.com/en/actions/publishing-packages/publishing-docker-images
name: Create and publish a Docker image

# Configures this workflow to run every time a change is pushed to selected tags and branches
on:
pull_request:
branches:
- main
push:
branches:
- main
tags:
- v**

# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
jobs:
build-and-push-image:
runs-on: ubuntu-latest
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.PAT_TOKEN }}
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Build connector definition
run: |
set -e pipefail
export DOCKER_IMAGE=$(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[0]')
make build
working-directory: ./connector-definition
- uses: actions/upload-artifact@v4
with:
name: connector-definition.tgz
path: ./connector-definition/dist/connector-definition.tgz
compression-level: 0 # Already compressed

release-connector:
name: Release connector
runs-on: ubuntu-latest
needs: build-and-push-image
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: connector-definition.tgz
path: ./connector-definition/dist
- name: Get version from tag
id: get-version
run: |
echo "tagged_version=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
shell: bash
- uses: mindsers/changelog-reader-action@v2
id: changelog-reader
with:
version: ${{ steps.get-version.outputs.tagged_version }}
path: ./CHANGELOG.md
- uses: softprops/action-gh-release@v1
with:
draft: false
tag_name: v${{ steps.get-version.outputs.tagged_version }}
body: ${{ steps.changelog-reader.outputs.changes }}
files: |
./connector-definition/dist/connector-definition.tgz
fail_on_unmatched_files: true
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# DuckDB Connector Changelog
This changelog documents changes between release tags.


## [Unreleased]
Upcoming changes for the next versioned release.

## [0.0.9] - 2024-03-22
* Initial Version Tag
31 changes: 22 additions & 9 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
# Use Node.js 18 as the base image
FROM node:18 AS build-stage
# Use Node.js 20 as the base image for both build and production stages
FROM node:20 AS build-stage

# Set the working directory inside the container
# Set the working directory inside the container for the build stage
WORKDIR /usr/src/app

# Copy package.json and package-lock.json (if you have one) to the working directory
# Copy package.json and package-lock.json (if available) to the working directory
COPY package*.json ./

# Install both production and development dependencies
RUN npm install

# Install TypeScript globally
RUN npm install -g typescript

# Copy the entire project
Expand All @@ -19,8 +20,9 @@ COPY . .
RUN tsc

# Start a new stage for the production environment
FROM node:18 AS production
FROM node:20 AS production

# Set working directory for the production stage
WORKDIR /usr/src/app

# Copy package.json and package-lock.json to the working directory
Expand All @@ -29,10 +31,21 @@ COPY package*.json ./
# Install only production dependencies
RUN npm ci --only=production

# Copy compiled JavaScript from the previous stage
# Copy compiled JavaScript from the build stage
COPY --from=build-stage /usr/src/app/dist ./dist

EXPOSE 8100
# Define the environment variable for configuration directory with a default value, which can be overridden
ENV HASURA_CONFIGURATION_DIRECTORY=/etc/connector

# Define the command to run the app using CMD
CMD ["node", "./dist/src/index.js", "serve", "--configuration=/etc/connector/config.json"]
# Set the default port environment variable and allow it to be overridden
ENV HASURA_CONNECTOR_PORT=8080

# Expose the port specified by the HASURA_CONNECTOR_PORT environment variable
EXPOSE $HASURA_CONNECTOR_PORT

# Copy the entrypoint script into the container and make it executable
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh

# Use the entrypoint script to handle startup and signal trapping
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
## DuckDB (via MotherDuck) Connector

The DuckDB Data Connector allows for connecting to a Motherduck hosted DuckDB database. This uses the [Typescript Data Connector SDK](https://github.com/hasura/ndc-sdk-typescript) and implements the [Data Connector Spec](https://github.com/hasura/ndc-spec).
## TODO: Fix README

In order to use this connector you will need Motherduck setup. This connector currently only supports querying.
The DuckDB Data Connector allows for connecting to a Motherduck hosted DuckDB database, or a local DuckDB database file. This uses the [Typescript Data Connector SDK](https://github.com/hasura/ndc-sdk-typescript) and implements the [Data Connector Spec](https://github.com/hasura/ndc-spec).

This connector currently only supports querying.

## Before you get started
It is recommended that you:
Expand Down
12 changes: 12 additions & 0 deletions connector-definition/connector-metadata.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
packagingDefinition:
type: PrebuiltDockerImage
dockerImage: ghcr.io/hasura/ndc-duckdb:v0.0.9
supportedEnvironmentVariables:
- name: DUCKDB_URL
description: The url for the DuckDB database
commands:
update: docker run --rm -e DUCKDB_URL="$DUCKDB_URL" -v "$HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH":/etc/connector ghcr.io/hasura/ndc-duckdb:v0.0.9 update
dockerComposeWatch:
- path: ./
target: /etc/connector
action: sync+restart
7 changes: 7 additions & 0 deletions connector-definition/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
services:
connector:
develop:
watch:
- path: ./
target: /etc/connector
action: sync+restart
33 changes: 33 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/bin/bash
# Function to handle SIGTERM and SIGINT (graceful shutdown)
graceful_shutdown() {
echo "Shutting down gracefully..."
# Replace this with a command to gracefully shutdown your application, if necessary
kill -SIGTERM "$pid"
wait "$pid"
exit 0
}
# Function to start the application
start_application() {
# Start the application in the background and save its PID
node ./dist/src/index.js serve &
pid=$!
# Trap SIGTERM and SIGINT to call the graceful_shutdown function
trap 'graceful_shutdown' SIGTERM SIGINT
# Wait for the application or a signal
wait "$pid"
}
# Function to run the update process
run_update() {
node ./dist/generate-config.js
}
# Dispatch on the first argument to the script
case "$1" in
update)
shift # Remove the first argument from the arguments list
run_update "$@"
;;
*)
start_application "$@"
;;
esac
10 changes: 3 additions & 7 deletions generate-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@ import * as duckdb from 'duckdb';
import * as fs from 'fs';
import { promisify } from "util";
const writeFile = promisify(fs.writeFile);
const DEFAULT_URL = "md:?motherduck_token=ey...";
const DEFAULT_OUTPUT_FILENAME = "configuration.json";
const db = new duckdb.Database(DEFAULT_URL);
const DUCKDB_URL = process.env["DUCKDB_URL"] as string;
const db = new duckdb.Database(DUCKDB_URL);
const con = db.connect();

const determineType = (t: string): string => {
Expand Down Expand Up @@ -100,9 +99,6 @@ async function main() {
}
}
const res: Configuration = {
credentials: {
url: DEFAULT_URL
},
config: {
collection_names: tableNames,
collection_aliases: tableAliases,
Expand All @@ -111,7 +107,7 @@ async function main() {
procedures: []
}
};
await writeFile(DEFAULT_OUTPUT_FILENAME, JSON.stringify(res));
await writeFile(`/etc/connector/config.json`, JSON.stringify(res));
};

main();
Expand Down
2 changes: 0 additions & 2 deletions http_requests/CONFIGURATION_SCHEMA.http

This file was deleted.

2 changes: 0 additions & 2 deletions http_requests/DEFAULT_CONFIGURATION.http

This file was deleted.

8 changes: 0 additions & 8 deletions http_requests/REMOTE_CONFIGURATION.http

This file was deleted.

Loading

0 comments on commit 3150b0d

Please sign in to comment.