Build duckdb v0.10.1 #429
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Note there is a reference to this workflow in test/ducktables-integration/github.test. It queries | |
# the Workflows endpoint of the Github API and uses this Workflow to test the integration. If you | |
# rename or move or refactor or whatever, that test will break *after* the change is merged to main. | |
name: Linux | |
on: [push] | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }} | |
cancel-in-progress: true | |
defaults: | |
run: | |
shell: bash | |
jobs: | |
linux-build: | |
name: Linux Build | |
runs-on: ubuntu-latest | |
container: ${{ matrix.container }} | |
strategy: | |
fail-fast: false | |
matrix: | |
# Add commits/tags to build against other DuckDB versions | |
duckdb_version: ['0.8.0', '0.8.1', '0.10.1'] # <submodule_version>" | |
python_version: ['3.8', '3.9', '3.10', '3.11'] | |
# arch: ['linux_amd64', 'linux_arm64', 'linux_amd64_gcc4'] | |
arch: ['linux_amd64'] | |
include: | |
- arch: 'linux_amd64' | |
container: 'ubuntu:20.04' | |
# - arch: 'linux_amd64' | |
# container: 'ubuntu:16.04' | |
# - arch: 'linux_arm64' | |
# container: 'ubuntu:18.04' | |
# - arch: 'linux_amd64_gcc4' | |
# container: 'quay.io/pypa/manylinux2014_x86_64' | |
env: | |
GEN: ninja | |
PYTHON_VERSION: ${{ matrix.python_version }} | |
steps: | |
- name: Install required ubuntu packages | |
if: ${{ matrix.arch == 'linux_amd64' || matrix.arch == 'linux_arm64' }} | |
run: | | |
apt-get update -y -qq | |
apt-get install -y -qq software-properties-common | |
add-apt-repository ppa:deadsnakes/ppa | |
apt-get update -y -qq | |
apt-get install -y -qq ninja-build make gcc-multilib g++-multilib libssl-dev wget openjdk-8-jdk zip maven unixodbc-dev libc6-dev-i386 lib32readline6-dev libssl-dev libcurl4-gnutls-dev libexpat1-dev gettext unzip build-essential checkinstall libffi-dev curl libz-dev openssh-client git clang-format libasan6 ccache docker.io | |
git config --global --add safe.directory '*' | |
- name: Setup Python ${{ matrix.python_version }} | |
run: | | |
apt-get install -y -qq python${{ matrix.python_version }}-dev python${PYTHON_VERSION}-distutils python${PYTHON_VERSION}-venv | |
curl https://bootstrap.pypa.io/get-pip.py | python${PYTHON_VERSION}; | |
python$PYTHON_VERSION -m pip install --upgrade pip && \ | |
python$PYTHON_VERSION -m venv --help | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
submodules: 'true' | |
# The 'matrix.duckdb_version' variable may contain a flag indicating "use what every the | |
# repo is pinned to". This may or may not point to a particular release. For some of our | |
# steps we care about working with a released version. So we distinguish betwen "an arbitrary | |
# revision" and a releaseed version. For clarity: | |
# DUCKDB_REVISION: A git identier such as a sha or a tag (eg: 35f5a40, or v0.8.1) | |
# DUCKDB_VERSION: A release number (eg 0.8.0). | |
- name: Extract + Export DuckDB Pinned Version if in Use | |
if: ${{ '<submodule_version>' == matrix.duckdb_version }} | |
run: | | |
export DUCKDB_VERSION=`git tag --points-at HEAD` | |
export DUCKDB_VERSION=${DUCKDB_VERSION:=`git log -1 --format=%h`} | |
echo "DUCKDB_REVISION=${DUCKDB_VERSION}" >> $GITHUB_ENV | |
echo "DUCKDB_VERSION=" >> $GITHUB_ENV | |
- name: Export DuckDB if Ignoring Pinned Version | |
if: ${{ '<submodule_version>' != matrix.duckdb_version }} | |
run: | | |
echo "DUCKDB_VERSION=${{matrix.duckdb_version}}" >> $GITHUB_ENV | |
echo "DUCKDB_REVISION=v${{matrix.duckdb_version}}" >> $GITHUB_ENV | |
echo "Matrix Provided DuckDB Version: $DUCKDB_VERSION" | |
- name: Restore cached build artifacts | |
id: cache-restore | |
uses: actions/cache/restore@v3 | |
with: | |
path: | | |
build/ | |
~/.ccache/ | |
key: duckdb-v${{ matrix.duckdb_version }}-${{ matrix.arch }} | |
- name: Checkout DuckDB to Tagged Version | |
if: ${{ matrix.duckdb_version != '<submodule_version>'}} | |
run: | | |
cd duckdb | |
git checkout v${{ matrix.duckdb_version }} | |
- if: ${{ matrix.arch == 'linux_amd64_gcc4' }} | |
uses: ./.github/actions/centos_7_setup | |
with: | |
openssl: 0 | |
- if: ${{ matrix.arch == 'linux_amd64' || matrix.arch == 'linux_arm64' }} | |
uses: ./.github/actions/ubuntu_16_setup | |
with: | |
aarch64_cross_compile: ${{ matrix.arch == 'linux_arm64' && 1 }} | |
# Build extension | |
- name: Build extension | |
env: | |
GEN: ninja | |
STATIC_LIBCPP: 1 | |
CC: ${{ matrix.arch == 'linux_arm64' && 'aarch64-linux-gnu-gcc' || '' }} | |
CXX: ${{ matrix.arch == 'linux_arm64' && 'aarch64-linux-gnu-g++' || '' }} | |
run: | | |
make release | |
ccache -s | |
- name: Cache build artifacts | |
id: cache-save | |
uses: actions/cache/save@v3 | |
with: | |
path: | | |
build/ | |
~/.ccache/ | |
key: ${{ steps.cache-restore.outputs.cache-primary-key }} | |
- name: Code Format Check | |
run: | | |
make check-format | |
- name: Build / Test Python Libraries | |
run: | | |
make python-ci | |
# Runs the extension + DuckTables functions against the actual APIs they work with | |
- name: Python Integration Tests | |
if: ${{ '<submodule_version>' != matrix.duckdb_version }} | |
run: | | |
echo "${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON_B64 }}" | base64 --decode > gcloud-creds.json | |
make AWS_ACCESS_KEY_ID=${{ secrets.S3_DEPLOY_ID }} AWS_SECRET_ACCESS_KEY=${{ secrets.S3_DEPLOY_KEY }} AWS_DEFAULT_REGION=${{ secrets.S3_REGION }} OPENAI_ORG_ID="${{ secrets.OPENAI_ORG_ID }}" OPENAI_API_KEY="${{ secrets.OPENAI_API_KEY }}" GOOGLE_APPLICATION_CREDENTIALS=gcloud-creds.json GITHUB_ACCESS_TOKEN=${{ secrets.GITHUB_TOKEN }} python-test-integration | |
# Runs the extension against the official DuckDB binary (versus the one built w/this project) | |
- name: Extension Integration Tests | |
if: ${{ '<submodule_version>' != matrix.duckdb_version }} | |
run: | | |
make GITHUB_ACCESS_TOKEN=${{ secrets.GITHUB_TOKEN }} DUCKDB_VERSION=${{ env.DUCKDB_VERSION }} extension-integration-tests | |
# Can't test the installer against a version of DuckDB that we've never produced binaries for before. | |
# todo: conditionally run this if the installer has changed | |
# - name: Run Installer Script Tests | |
# if: ${{ '<submodule_version>' != matrix.duckdb_version }} | |
# run: | | |
# make GITHUB_ACCESS_TOKEN=${{ secrets.GITHUB_TOKEN }} PYTHON_VERSION=${{ matrix.python_version }} test-installer | |
- uses: actions/upload-artifact@v2 | |
with: | |
name: duckdb-${{ env.DUCKDB_REVISION }}-py${{ matrix.python_version }}-extension-${{ matrix.arch }} | |
path: | | |
build/release/extension/pytables/pytables.duckdb_extension | |
- uses: actions/upload-artifact@v2 | |
with: | |
# This is a source only package so we are being a bit more descriptive than is really necessary, | |
# but I'm not sure what will happen if the matrix build were to upload multiple artifacts w/the | |
# same name. | |
name: ducktables-py${{ matrix.python_version }}-${{ matrix.arch }}-duckdb-${{ env.DUCKDB_REVISION }} | |
# todo: this will fail the next time we upgrade the python package version because it's | |
# hardcoded below. | |
path: | | |
pythonpkgs/ducktables/dist/ducktables-0.1.1-py3-none-any.whl | |
- uses: "marvinpinto/action-automatic-releases@latest" | |
if: github.ref == 'refs/heads/main' | |
with: | |
repo_token: "${{ secrets.GITHUB_TOKEN }}" | |
automatic_release_tag: "latest" | |
prerelease: true | |
title: "Automatic Build" | |
files: | | |
build/release/extension/pytables/pytables.duckdb_extension | |
installer/get-pytables.py | |
- name: Deploy | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.S3_DEPLOY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_DEPLOY_KEY }} | |
AWS_DEFAULT_REGION: ${{ secrets.S3_REGION }} | |
BUCKET_NAME: ${{ secrets.S3_BUCKET }} | |
ARCH: ${{matrix.arch}} | |
run: | | |
./scripts/ci-release-artifacts.sh |