Skip to content

AWS Advanced ODBC Wrapper Integration Tests - feat: rw splitting plugin and simple rw splitting plugin (#107) #995

AWS Advanced ODBC Wrapper Integration Tests - feat: rw splitting plugin and simple rw splitting plugin (#107)

AWS Advanced ODBC Wrapper Integration Tests - feat: rw splitting plugin and simple rw splitting plugin (#107) #995

name: Integration Tests
run-name: AWS Advanced ODBC Wrapper Integration Tests - ${{ github.event.head_commit.message }}
on:
workflow_dispatch:
push:
branches:
- main
pull_request:
branches:
- '**'
paths-ignore:
- '**/*.md'
- '**/*.jpg'
- '**/*.png'
- '**/README.*'
- '**/LICENSE.*'
- 'docs/**'
- 'ISSUE_TEMPLATE/**'
env:
BUILD_CONFIG: Debug
WIX_VERSION: 5.0.2
# Test configuration
TEST_DATABASE: 'test_database'
ENGINE_VERSION: 'latest'
NUM_INSTANCES: '5'
# Limitless
TEST_LIMITLESS_DATABASE: 'postgres_limitless'
ENGINE_LIMITLESS_VERSION: 'latest'
TEST_DSN_ANSI: 'AWS-ODBC-ANSI'
TEST_DSN_UNICODE: 'AWS-ODBC-UNICODE'
DRIVER_NAME_ANSI: 'AWS Advanced ODBC Wrapper Ansi'
DRIVER_NAME_UNICODE: 'AWS Advanced ODBC Wrapper Unicode'
concurrency:
group: integration-test-${{ github.ref }}
cancel-in-progress: true
permissions:
id-token: write
contents: read
jobs:
# Build / Get Underlying Drivers
build-win-psqlodbc:
name: Windows - Build psqlODBC
runs-on: windows-latest
outputs:
psqlodbc-version: ${{ steps.psqlodbc-version.outputs.version }}
steps:
- name: Get psqlODBC latest release version
id: psqlodbc-version
run: |
$VERSION=$(curl -s https://api.github.com/repos/postgresql-interfaces/psqlodbc/releases | python -c "import sys,json; print(json.load(sys.stdin)[0]['tag_name'])")
echo "version=$VERSION" >> $env:GITHUB_OUTPUT
echo "psqlODBC version: $VERSION"
- name: Retrieve psqlODBC Cache
id: cache-psqlodbc
uses: actions/cache@v5
with:
path: psqlodbc
key: ${{ runner.os }}-psqlodbc-driver-${{ steps.psqlodbc-version.outputs.version }}
- name: Download psqlODBC
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
run: |
mkdir psqlodbc
cd psqlodbc
$DOWNLOAD_URL=$(gh api repos/postgresql-interfaces/psqlodbc/releases --jq '.[0].assets.[] | select(.name=="psqlodbc_x64.msi") | .browser_download_url')
curl.exe -L ${DOWNLOAD_URL} --output psqlodbc_x64.msi
env:
GH_TOKEN: ${{ github.token }}
build-macos-psqlodbc:
name: MacOS - Build psqlODBC
runs-on:
- codebuild-odbc-wrapper-macos-pg-${{ github.run_id }}-${{ github.run_attempt }}
outputs:
psqlodbc-version: ${{ steps.psqlodbc-version.outputs.version }}
steps:
- name: Get psqlODBC latest release version
id: psqlodbc-version
run: |
VERSION=$(curl -s https://api.github.com/repos/postgresql-interfaces/psqlodbc/releases | python3 -c "import sys,json; print(json.load(sys.stdin)[0]['tag_name'])")
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "psqlODBC version: $VERSION"
- name: Retrieve psqlODBC Cache
id: cache-psqlodbc
uses: actions/cache@v5
with:
path: psqlodbc/.libs/
key: ${{ runner.os }}-psqlodbc-driver-${{ steps.psqlodbc-version.outputs.version }}
- name: Checkout psqlODBC
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
uses: actions/checkout@v6
with:
repository: postgresql-interfaces/psqlodbc
path: psqlodbc
- name: Build psqlodbc
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
working-directory: psqlodbc
run: |
./bootstrap
./configure
make
build-linux-psqlodbc:
name: Linux Ubuntu - Build psqlODBC
runs-on: ubuntu-latest
outputs:
psqlodbc-version: ${{ steps.psqlodbc-version.outputs.version }}
steps:
- name: Get psqlODBC latest release version
id: psqlodbc-version
run: |
VERSION=$(curl -s https://api.github.com/repos/postgresql-interfaces/psqlodbc/releases | python3 -c "import sys,json; print(json.load(sys.stdin)[0]['tag_name'])")
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "psqlODBC version: $VERSION"
- name: Retrieve psqlODBC Cache
id: cache-psqlodbc
uses: actions/cache@v5
with:
path: psqlodbc/.libs/
key: ${{ runner.os }}-psqlodbc-driver-${{ steps.psqlodbc-version.outputs.version }}
- name: Checkout psqlODBC
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
uses: actions/checkout@v6
with:
repository: postgresql-interfaces/psqlodbc
path: psqlodbc
- name: Install Dependencies
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
run: |
sudo apt update
sudo apt-get install autoconf automake libtool postgresql libpq-dev
- name: Download & Build unixODBC
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
run: |
curl -L https://www.unixodbc.org/unixODBC-2.3.12.tar.gz -o unixODBC.tar
tar xf unixODBC.tar
cd unixODBC-2.3.12
./configure && make
sudo make install
- name: Build psqlodbc
if: ${{steps.cache-psqlodbc.outputs.cache-hit != 'true'}}
working-directory: psqlodbc
run: |
./bootstrap
./configure
make
build-win-mysql:
name: Windows - Build MySQL Connector ODBC
runs-on: windows-latest
steps:
- name: Retrieve mysql-connector Cache
id: cache-mysql-connector
uses: actions/cache@v5
with:
path: mysql-connector
key: ${{ runner.os }}-mysql-driver-9.5.0
- name: Download mysql-connector
if: ${{steps.cache-mysql-connector.outputs.cache-hit != 'true'}}
run: |
mkdir mysql-connector
cd mysql-connector
curl.exe -L "https://dev.mysql.com/get/Downloads/Connector-ODBC/9.5/mysql-connector-odbc-9.5.0-winx64.msi" --output mysql-connector_x64.msi
build-macos-mysql:
name: MacOS - Build MySQL Connector ODBC
runs-on:
- codebuild-odbc-wrapper-macos-mysql-${{ github.run_id }}-${{ github.run_attempt }}
steps:
- name: Retrieve mysql-connector Cache
id: cache-mysql-connector
uses: actions/cache@v5
with:
path: mysql-connector/
key: ${{ runner.os }}-mysql-driver-9.5.0
- name: Download mysql-connector
if: ${{steps.cache-mysql-connector.outputs.cache-hit != 'true'}}
run: |
mkdir mysql-connector
curl -L "https://dev.mysql.com/get/Downloads/Connector-ODBC/9.5/mysql-connector-odbc-9.5.0-macos15-arm64.tar.gz" --output mysql-connector_arm64.tar.gz
tar xvf mysql-connector_arm64.tar.gz --strip-components 1 -C mysql-connector
build-linux-mysql:
name: Linux Ubuntu - Build MySQL Connector ODBC
runs-on: ubuntu-latest
steps:
- name: Retrieve mysql-connector Cache
id: cache-mysql-connector
uses: actions/cache@v5
with:
path: mysql-connector/
key: ${{ runner.os }}-mysql-driver-9.5.0
- name: Download mysql-connector
if: ${{steps.cache-mysql-connector.outputs.cache-hit != 'true'}}
run: |
mkdir mysql-connector
cd mysql-connector
curl \
-L "https://dev.mysql.com/get/Downloads/Connector-ODBC/9.5/mysql-connector-odbc_9.5.0-1ubuntu24.04_amd64.deb" \
--output mysql-connector_x64.deb
# Integration - General
windows-integration-tests:
name: Windows - Integration Tests
needs: [build-win-psqlodbc, build-win-mysql]
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
rds_engine: [aurora-postgresql, aurora-mysql]
env:
CUSTOM_ENDPOINT_ID: ODBC-Win-${{ matrix.rds_engine }}-custom-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=ODBC-Win-${{matrix.rds_engine}}-${{github.run_id}}${{github.run_number}}${{github.run_attempt}}" | Out-File -FilePath $env:GITHUB_ENV -Append
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
./scripts/compile_aws_sdk_win.ps1 ${{env.BUILD_CONFIG}}
- name: Setup Dotnet for WiX
uses: actions/setup-dotnet@v5
- name: Install WiX
shell: cmd
run: |
dotnet tool install --global wix --version ${{env.WIX_VERSION}}
wix extension add --global WixToolset.UI.wixext/${{env.WIX_VERSION}}
- name: Run build installer script
shell: pwsh
run: |
./installer/build_installer.ps1 ${{env.BUILD_CONFIG}}
- name: Install driver
shell: pwsh
working-directory: installer
run: Start-Process msiexec "/lp! .\test.log /i aws-advanced-odbc-wrapper.msi /quiet /norestart" -Wait;
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-win-inte
- name: Setup Python virtual environment
shell: bash
run: |
python -m venv .venv
source .venv/Scripts/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Resources
id: AuroraClusterSetup
shell: bash
run: |
source .venv/Scripts/activate
python scripts/db_resources.py create \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--engine ${{ matrix.rds_engine }} \
--engine-version ${{ env.ENGINE_VERSION }} \
--database ${{ env.TEST_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances ${{ env.NUM_INSTANCES }} \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--custom-endpoint-id ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
if [ "${{ matrix.rds_engine }}" == "aurora-postgresql" ]; then
echo "cache_path=psqlodbc" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-win-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
elif [ "${{ matrix.rds_engine }}" == "aurora-mysql" ]; then
echo "cache_path=mysql-connector" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-mysql-driver-9.5.0" >> $GITHUB_OUTPUT
echo "driver_path_ansi=C:\\Program Files\\MySQL\\MySQL Connector ODBC 9.5\\myodbc9a.dll" >> $GITHUB_OUTPUT
echo "driver_path_unicode=C:\\Program Files\\MySQL\\MySQL Connector ODBC 9.5\\myodbc9w.dll" >> $GITHUB_OUTPUT
echo "dialect_default_port=3306" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_MYSQL" >> $GITHUB_OUTPUT
fi
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Log base driver location
shell: pwsh
run: |
Write-Host "Base driver path: $((Resolve-Path ${{steps.base_driver_info.outputs.cache_path}}).Path)"
Get-ChildItem ${{steps.base_driver_info.outputs.cache_path}}
- name: Install PostgreSQL ODBC Driver
if: (matrix.rds_engine == 'aurora-postgresql')
id: pg_driver_paths
shell: pwsh
working-directory: ${{steps.base_driver_info.outputs.cache_path}}
run: |
Start-Process msiexec "/i psqlodbc_x64.msi /quiet /norestart" -Wait;
$ansi = Get-ChildItem "C:\Program Files\psqlODBC" -Recurse -Filter "podbc30a.dll" -ErrorAction SilentlyContinue | Select-Object -First 1
$unicode = Get-ChildItem "C:\Program Files\psqlODBC" -Recurse -Filter "podbc35w.dll" -ErrorAction SilentlyContinue | Select-Object -First 1
if (-not $ansi -or -not $unicode) {
Write-Host "psqlODBC DLLs not found under C:\Program Files\psqlODBC"
Get-ChildItem "C:\Program Files\psqlODBC" -Recurse | Format-Table FullName
exit 1
}
Write-Host "Ansi driver: $($ansi.FullName)"
Write-Host "Unicode driver: $($unicode.FullName)"
echo "driver_path_ansi=$($ansi.FullName)" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=$($unicode.FullName)" >> $env:GITHUB_OUTPUT
- name: Setup PostgreSQL
if: (matrix.rds_engine == 'aurora-postgresql')
shell: bash
run: |
echo "$PGBIN" >> $GITHUB_PATH
- name: Install MySQL
if: (matrix.rds_engine == 'aurora-mysql')
shell: pwsh
working-directory: ${{steps.base_driver_info.outputs.cache_path}}
run: |
Start-Process msiexec "/i mysql-connector_x64.msi /quiet /norestart" -Wait;
- name: Resolve Driver Paths
id: resolved_drivers
shell: pwsh
run: |
if ("${{ matrix.rds_engine }}" -eq "aurora-postgresql") {
echo "driver_path_ansi=${{ steps.pg_driver_paths.outputs.driver_path_ansi }}" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=${{ steps.pg_driver_paths.outputs.driver_path_unicode }}" >> $env:GITHUB_OUTPUT
} else {
echo "driver_path_ansi=${{ steps.base_driver_info.outputs.driver_path_ansi }}" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=${{ steps.base_driver_info.outputs.driver_path_unicode }}" >> $env:GITHUB_OUTPUT
}
- name: Install Ansi DSNs
shell: pwsh
run: |
Add-OdbcDsn -Name ${{ env.TEST_DSN_ANSI }} `
-DriverName "${{ env.DRIVER_NAME_ANSI }}" `
-DsnType User `
-SetPropertyValue `
@("RDS_AUTH_TYPE=database", `
"Server=${{ env.AURORA_CLUSTER_ENDPOINT }}", `
"Port=${{steps.base_driver_info.outputs.dialect_default_port}}", `
"SSLMode=prefer", `
"BASE_DRIVER=${{steps.resolved_drivers.outputs.driver_path_ansi}}")
- name: Install Unicode DSNs
shell: pwsh
run: |
Add-OdbcDsn -Name ${{ env.TEST_DSN_UNICODE }} `
-DriverName "${{ env.DRIVER_NAME_UNICODE }}" `
-DsnType User `
-SetPropertyValue `
@("RDS_AUTH_TYPE=database", `
"Server=${{ env.AURORA_CLUSTER_ENDPOINT }}", `
"Port=${{steps.base_driver_info.outputs.dialect_default_port}}", `
"SSLMode=prefer", `
"BASE_DRIVER=${{steps.resolved_drivers.outputs.driver_path_unicode}}")
- name: Verify DSN Configuration
shell: pwsh
run: |
Write-Host "Registered DSNs:"
Get-OdbcDsn | Format-Table Name, DriverName, DsnType, Platform
Write-Host "Ansi DSN properties:"
Get-OdbcDsn -Name ${{ env.TEST_DSN_ANSI }} | Select-Object -ExpandProperty PropertyValue
- name: Setup GDB
run: |
choco install mingw
echo "set auto-load safe-path /" > "$HOME/.gdbinit"
- name: Build and Run Ansi Integration Tests
shell: pwsh
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_ansi -D CMAKE_BUILD_TYPE="${{env.BUILD_CONFIG}}" `
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=ON -D BUILD_LIMITLESS=OFF
cmake --build build_ansi --config ${{env.BUILD_CONFIG}}
echo "Ansi Test Built"
gdb -q -x ./.gdbinit .\build_ansi\${{env.BUILD_CONFIG}}\integration-test.exe
env:
TEST_DSN: ${{ env.TEST_DSN_ANSI }}
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: ${{steps.base_driver_info.outputs.dialect_default_port}}
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
TEST_CUSTOM_ENDPOINT_ID: ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Build and Run Unicode Integration Tests
shell: pwsh
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_unicode -D CMAKE_BUILD_TYPE="${{env.BUILD_CONFIG}}" `
-D BUILD_UNICODE=ON -D BUILD_FAILOVER=ON -D BUILD_LIMITLESS=OFF
cmake --build build_unicode --config ${{env.BUILD_CONFIG}}
echo "Unicode Test Built"
gdb -q -x ./.gdbinit .\build_unicode\${{env.BUILD_CONFIG}}\integration-test.exe
env:
TEST_DSN: ${{ env.TEST_DSN_UNICODE }}
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: ${{steps.base_driver_info.outputs.dialect_default_port}}
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
TEST_CUSTOM_ENDPOINT_ID: ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/Scripts/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances ${{ env.NUM_INSTANCES }} \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--custom-endpoint-id ${{ env.CUSTOM_ENDPOINT_ID }}
# Make sure IP is always deleted
- name: Get Github Action IP
if: always()
id: ip
uses: haythem/public-ip@v1.3
- name: Remove Github Action IP
if: always()
run: |
aws ec2 revoke-security-group-ingress `
--group-name default `
--protocol tcp `
--port 0-65535 `
--cidr ${{ steps.ip.outputs.ipv4 }}/32 `
*> $null;
- name: Get log location
if: always()
id: log_location
shell: pwsh
run: |
echo "TEMP=$env:TEMP" >> $env:GITHUB_OUTPUT
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'windows-${{matrix.rds_engine}}-integration-test-logs'
path: ${{ steps.log_location.outputs.TEMP }}/aws-odbc-wrapper/
retention-days: 7
macos-integration-tests:
name: MacOS - Integration Tests
needs: [build-macos-psqlodbc, build-macos-mysql]
runs-on:
- ${{ matrix.env.project }}-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.env.fleet }}
buildspec-override:true
strategy:
fail-fast: false
matrix:
env: [{rds_engine: aurora-postgresql, project: codebuild-odbc-wrapper-macos-pg, fleet: fleet-macos-fleet-3}, {rds_engine: aurora-mysql, project: codebuild-odbc-wrapper-macos-mysql, fleet: fleet-macos-fleet-2}]
env:
CUSTOM_ENDPOINT_ID: ODBC-MacOS-${{ matrix.env.rds_engine }}-custom-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=ODBC-MacOS-${{matrix.env.rds_engine}}-${{github.run_id}}${{github.run_number}}${{github.run_attempt}}" >> $GITHUB_ENV
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
./scripts/compile_aws_sdk_unix.sh ${{env.BUILD_CONFIG}}
- name: Build aws-advanced-odbc-wrapper
run: |
cmake -S . -B build -DBUILD_UNICODE=ON -DBUILD_ANSI=ON -DBUILD_UNIT_TEST=OFF -DCMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}}
cmake --build build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-macos-inte
- name: Setup Python virtual environment
shell: bash
run: |
python3 -m venv .venv
source .venv/bin/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Resources
id: AuroraClusterSetup
shell: bash
run: |
source .venv/bin/activate
EXTRA_ARGS=""
if [ "${{ matrix.env.rds_engine }}" == "aurora-mysql" ]; then
EXTRA_ARGS="--extra-mysql-user ${{ secrets.TEST_USERNAME }}_mysql --extra-mysql-password ${{ secrets.TEST_PASSWORD }}"
fi
python scripts/db_resources.py create \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--engine ${{ matrix.env.rds_engine }} \
--engine-version ${{ env.ENGINE_VERSION }} \
--database ${{ env.TEST_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances ${{ env.NUM_INSTANCES }} \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--custom-endpoint-id ${{ env.CUSTOM_ENDPOINT_ID }} \
$EXTRA_ARGS
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
if [ "${{ matrix.env.rds_engine }}" == "aurora-postgresql" ]; then
echo "cache_path=psqlodbc/.libs/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-macos-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/psqlodbc/.libs/psqlodbca.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/psqlodbc/.libs/psqlodbcw.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
elif [ "${{ matrix.env.rds_engine }}" == "aurora-mysql" ]; then
echo "cache_path=mysql-connector/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-mysql-driver-9.5.0" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/mysql-connector/lib/libmyodbc9a.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/mysql-connector/lib/libmyodbc9w.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=3306" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_MYSQL" >> $GITHUB_OUTPUT
fi
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Log base driver location
run: |
echo "Base driver path: $(cd ${{steps.base_driver_info.outputs.cache_path}} && pwd)"
ls ${{steps.base_driver_info.outputs.cache_path}}
echo "--- ODBC DSN config ---"
odbcinst -j 2>/dev/null || true
cat $ODBCINST 2>/dev/null || cat /etc/odbcinst.ini 2>/dev/null || true
- name: Build and Run Ansi Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_ansi \
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=ON -D BUILD_LIMITLESS=OFF \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-a.dylib" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_ansi}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_DATABASE }}"
cmake --build build_ansi
echo "Ansi Test Built"
./build_ansi/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
TEST_CUSTOM_ENDPOINT_ID: ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Build and Run Unicode Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_unicode \
-D BUILD_UNICODE=ON -D BUILD_FAILOVER=ON -D BUILD_LIMITLESS=OFF \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-w.dylib" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_unicode}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_DATABASE }}"
cmake --build build_unicode
echo "Unicode Test Built"
./build_unicode/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
TEST_CUSTOM_ENDPOINT_ID: ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Get log location
if: always()
id: log_location
run: echo "TEMP=${TMPDIR:-/tmp}" >> $GITHUB_OUTPUT
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances ${{ env.NUM_INSTANCES }} \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--custom-endpoint-id ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'mac-${{matrix.env.rds_engine}}-integration-test-logs'
path: ${{steps.log_location.outputs.TEMP}}/aws-odbc-wrapper/
retention-days: 7
linux-integration-tests:
name: Linux Ubuntu - Integration Tests
needs: [build-linux-psqlodbc, build-linux-mysql]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
rds_engine: [aurora-postgresql, aurora-mysql]
env:
CUSTOM_ENDPOINT_ID: ODBC-Linux-${{ matrix.rds_engine }}-custom-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=ODBC-Linux-${{ matrix.rds_engine }}-${{github.run_id}}${{github.run_number}}${{github.run_attempt}}" >> $GITHUB_ENV
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Install Build Dependencies
run: |
sudo apt update
sudo apt-get install cmake libcurl4-openssl-dev libssl-dev odbcinst unixodbc-dev uuid-dev zlib1g-dev gdb
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
./scripts/compile_aws_sdk_unix.sh ${{env.BUILD_CONFIG}}
- name: Build aws-advanced-odbc-wrapper
run: |
cmake -S . -B build -DBUILD_UNICODE=ON -DBUILD_ANSI=ON -DBUILD_UNIT_TEST=OFF -DCMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}}
cmake --build build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-linux-inte
- name: Setup Python virtual environment
shell: bash
run: |
python3 -m venv .venv
source .venv/bin/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Resources
id: AuroraClusterSetup
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py create \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--engine ${{ matrix.rds_engine }} \
--engine-version ${{ env.ENGINE_VERSION }} \
--database ${{ env.TEST_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances ${{ env.NUM_INSTANCES }} \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--custom-endpoint-id ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
if [ "${{ matrix.rds_engine }}" == "aurora-postgresql" ]; then
echo "cache_path=psqlodbc/.libs/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-linux-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/psqlodbc/.libs/psqlodbca.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/psqlodbc/.libs/psqlodbcw.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
elif [ "${{ matrix.rds_engine }}" == "aurora-mysql" ]; then
echo "cache_path=mysql-connector/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-mysql-driver-9.5.0" >> $GITHUB_OUTPUT
echo "driver_path_ansi='/usr/lib/x86_64-linux-gnu/odbc/libmyodbc9a.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='/usr/lib/x86_64-linux-gnu/odbc/libmyodbc9w.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=3306" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_MYSQL" >> $GITHUB_OUTPUT
fi
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Log base driver location
run: |
echo "Base driver path: $(cd ${{steps.base_driver_info.outputs.cache_path}} && pwd)"
ls ${{steps.base_driver_info.outputs.cache_path}}
echo "--- ODBC DSN config ---"
odbcinst -j 2>/dev/null || true
cat $ODBCINST 2>/dev/null || cat /etc/odbcinst.ini 2>/dev/null || true
- name: Install MySQL
if: (matrix.rds_engine == 'aurora-mysql')
shell: bash
run: |
sudo apt install ./mysql-connector/mysql-connector_x64.deb -y
- name: Setup GDB
run: |
echo "set auto-load safe-path $PWD" > ~/.gdbinit
- name: Build and Run Ansi Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_ansi \
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=ON -D BUILD_LIMITLESS=OFF \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-a.so" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_ansi}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_DATABASE }}"
cmake --build build_ansi
echo "Ansi Test Built"
ulimit -c unlimited
gdb -q -x ./.gdbinit ./build_ansi/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
TEST_CUSTOM_ENDPOINT_ID: ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Build and Run Unicode Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_unicode \
-D BUILD_UNICODE=ON -D BUILD_FAILOVER=ON -D BUILD_LIMITLESS=OFF \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-w.so" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_unicode}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_DATABASE }}"
cmake --build build_unicode
echo "Unicode Test Built"
ulimit -c unlimited
gdb -q -x ./.gdbinit ./build_unicode/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
TEST_CUSTOM_ENDPOINT_ID: ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances ${{ env.NUM_INSTANCES }} \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--custom-endpoint-id ${{ env.CUSTOM_ENDPOINT_ID }}
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'linux-${{matrix.rds_engine}}-integration-test-logs'
path: /tmp/aws-odbc-wrapper/
retention-days: 7
# Limitless
windows-limitless-integration-tests:
name: Windows - Limitless Integration Tests
needs: [build-win-psqlodbc]
runs-on: windows-latest
env:
LIMITLESS_CLUSTER_ID: ODBC-Limitless-Win-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
LIMITLESS_SHARD_ID: ODBC-Limitless-Win-shard-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=${{ env.LIMITLESS_CLUSTER_ID }}" | Out-File -FilePath $env:GITHUB_ENV -Append
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
./scripts/compile_aws_sdk_win.ps1 ${{env.BUILD_CONFIG}}
- name: Setup Dotnet for WiX
uses: actions/setup-dotnet@v5
- name: Install WiX
shell: cmd
run: |
dotnet tool install --global wix --version ${{env.WIX_VERSION}}
wix extension add --global WixToolset.UI.wixext/${{env.WIX_VERSION}}
- name: Run build installer script
shell: pwsh
run: |
./installer/build_installer.ps1 ${{env.BUILD_CONFIG}}
- name: Install driver
shell: pwsh
working-directory: installer
run: Start-Process msiexec "/lp! .\test.log /i aws-advanced-odbc-wrapper.msi /quiet /norestart" -Wait;
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-win-limitless
- name: Setup Python virtual environment
shell: bash
run: |
python -m venv .venv
source .venv/Scripts/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Limitless Resources
id: AuroraClusterSetup
shell: bash
run: |
source .venv/Scripts/activate
python scripts/db_resources.py create \
--cluster-id ${{ env.LIMITLESS_CLUSTER_ID }} \
--engine aurora-postgresql \
--engine-version ${{ env.ENGINE_LIMITLESS_VERSION }} \
--database ${{ env.TEST_LIMITLESS_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--limitless \
--shard-id ${{ env.LIMITLESS_SHARD_ID }} \
--monitoring-role-arn ${{ secrets.AWS_RDS_MONITORING_ROLE_ARN }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
echo "cache_path=psqlodbc" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-win-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Log base driver location
shell: pwsh
run: |
Write-Host "Base driver path: $((Resolve-Path ${{steps.base_driver_info.outputs.cache_path}}).Path)"
Get-ChildItem ${{steps.base_driver_info.outputs.cache_path}}
- name: Install PostgreSQL ODBC Driver
id: pg_driver_paths
shell: pwsh
working-directory: ${{steps.base_driver_info.outputs.cache_path}}
run: |
Start-Process msiexec "/i psqlodbc_x64.msi /quiet /norestart" -Wait;
$ansi = Get-ChildItem "C:\Program Files\psqlODBC" -Recurse -Filter "podbc30a.dll" -ErrorAction SilentlyContinue | Select-Object -First 1
$unicode = Get-ChildItem "C:\Program Files\psqlODBC" -Recurse -Filter "podbc35w.dll" -ErrorAction SilentlyContinue | Select-Object -First 1
if (-not $ansi -or -not $unicode) {
Write-Host "psqlODBC DLLs not found under C:\Program Files\psqlODBC"
Get-ChildItem "C:\Program Files\psqlODBC" -Recurse | Format-Table FullName
exit 1
}
Write-Host "Ansi driver: $($ansi.FullName)"
Write-Host "Unicode driver: $($unicode.FullName)"
echo "driver_path_ansi=$($ansi.FullName)" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=$($unicode.FullName)" >> $env:GITHUB_OUTPUT
- name: Setup PostgreSQL
shell: bash
run: |
echo "$PGBIN" >> $GITHUB_PATH
- name: Install Ansi DSNs
shell: pwsh
run: |
Add-OdbcDsn -Name ${{ env.TEST_DSN_ANSI }} `
-DriverName "${{ env.DRIVER_NAME_ANSI }}" `
-DsnType User `
-SetPropertyValue `
@("RDS_AUTH_TYPE=database", `
"Server=${{ env.AURORA_CLUSTER_ENDPOINT }}", `
"Port=${{steps.base_driver_info.outputs.dialect_default_port}}", `
"SSLMode=prefer", `
"BASE_DRIVER=${{steps.pg_driver_paths.outputs.driver_path_ansi}}")
- name: Install Unicode DSNs
shell: pwsh
run: |
Add-OdbcDsn -Name ${{ env.TEST_DSN_UNICODE }} `
-DriverName "${{ env.DRIVER_NAME_UNICODE }}" `
-DsnType User `
-SetPropertyValue `
@("RDS_AUTH_TYPE=database", `
"Server=${{ env.AURORA_CLUSTER_ENDPOINT }}", `
"Port=${{steps.base_driver_info.outputs.dialect_default_port}}", `
"SSLMode=prefer", `
"BASE_DRIVER=${{steps.pg_driver_paths.outputs.driver_path_unicode}}")
- name: Verify DSN Configuration
shell: pwsh
run: |
Write-Host "Registered DSNs:"
Get-OdbcDsn | Format-Table Name, DriverName, DsnType, Platform
Write-Host "Ansi DSN properties:"
Get-OdbcDsn -Name ${{ env.TEST_DSN_ANSI }} | Select-Object -ExpandProperty PropertyValue
- name: Setup GDB
run: |
choco install mingw
echo "set auto-load safe-path /" > "$HOME/.gdbinit"
- name: Build and Run Limitless Ansi Integration Tests
shell: pwsh
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_limitless_ansi -D CMAKE_BUILD_TYPE="${{env.BUILD_CONFIG}}" `
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=ON
cmake --build build_limitless_ansi --config ${{env.BUILD_CONFIG}}
echo "Limitless Ansi Test Built"
gdb -q -x ./.gdbinit .\build_limitless_ansi\${{env.BUILD_CONFIG}}\integration-test.exe
env:
TEST_DSN: ${{ env.TEST_DSN_ANSI }}
TEST_DATABASE: ${{ env.TEST_LIMITLESS_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: ${{steps.base_driver_info.outputs.dialect_default_port}}
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
- name: Build and Run Limitless Unicode Integration Tests
shell: pwsh
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_limitless_unicode -D CMAKE_BUILD_TYPE="${{env.BUILD_CONFIG}}" `
-D BUILD_UNICODE=ON -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=ON
cmake --build build_limitless_unicode --config ${{env.BUILD_CONFIG}}
echo "Limitless Unicode Test Built"
gdb -q -x ./.gdbinit .\build_limitless_unicode\${{env.BUILD_CONFIG}}\integration-test.exe
env:
TEST_DSN: ${{ env.TEST_DSN_UNICODE }}
TEST_DATABASE: ${{ env.TEST_LIMITLESS_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: ${{steps.base_driver_info.outputs.dialect_default_port}}
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/Scripts/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.LIMITLESS_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--limitless \
--shard-id ${{ env.LIMITLESS_SHARD_ID }}
# Make sure IP is always deleted
- name: Get Github Action IP
if: always()
id: ip
uses: haythem/public-ip@v1.3
- name: Remove Github Action IP
if: always()
run: |
aws ec2 revoke-security-group-ingress `
--group-name default `
--protocol tcp `
--port 0-65535 `
--cidr ${{ steps.ip.outputs.ipv4 }}/32 `
*> $null;
- name: Get log location
if: always()
id: log_location
shell: pwsh
run: |
echo "TEMP=$env:TEMP" >> $env:GITHUB_OUTPUT
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'windows-limitless-test-logs'
path: ${{ steps.log_location.outputs.TEMP }}/aws-odbc-wrapper/
retention-days: 7
macos-limitless-integration-tests:
name: MacOS - Limitless Integration Tests
needs: [build-macos-psqlodbc]
runs-on:
- codebuild-odbc-wrapper-macos-limitless-${{ github.run_id }}-${{ github.run_attempt }}-fleet-macos-fleet-1
buildspec-override:true
env:
LIMITLESS_CLUSTER_ID: ODBC-Limitless-MacOS-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
LIMITLESS_SHARD_ID: ODBC-Limitless-MacOS-shard-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=${{ env.LIMITLESS_CLUSTER_ID }}" >> $GITHUB_ENV
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
./scripts/compile_aws_sdk_unix.sh ${{env.BUILD_CONFIG}}
- name: Build aws-advanced-odbc-wrapper
run: |
cmake -S . -B build -DBUILD_UNICODE=ON -DBUILD_ANSI=ON -DBUILD_UNIT_TEST=OFF -DCMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}}
cmake --build build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-macos-limitless
- name: Setup Python virtual environment
shell: bash
run: |
python3 -m venv .venv
source .venv/bin/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Limitless Resources
id: AuroraClusterSetup
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py create \
--cluster-id ${{ env.LIMITLESS_CLUSTER_ID }} \
--engine aurora-postgresql \
--engine-version ${{ env.ENGINE_LIMITLESS_VERSION }} \
--database ${{ env.TEST_LIMITLESS_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--limitless \
--shard-id ${{ env.LIMITLESS_SHARD_ID }} \
--monitoring-role-arn ${{ secrets.AWS_RDS_MONITORING_ROLE_ARN }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
echo "cache_path=psqlodbc/.libs/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-macos-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/psqlodbc/.libs/psqlodbca.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/psqlodbc/.libs/psqlodbcw.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Log base driver location
run: |
echo "Base driver path: $(cd ${{steps.base_driver_info.outputs.cache_path}} && pwd)"
ls ${{steps.base_driver_info.outputs.cache_path}}
echo "--- ODBC DSN config ---"
odbcinst -j 2>/dev/null || true
cat $ODBCINST 2>/dev/null || cat /etc/odbcinst.ini 2>/dev/null || true
- name: Build and Run Limitless Ansi Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_limitless_ansi \
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=ON \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-a.dylib" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_ansi}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_LIMITLESS_DATABASE }}"
cmake --build build_limitless_ansi
echo "Limitless Ansi Test Built"
./build_limitless_ansi/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_LIMITLESS_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
- name: Build and Run Limitless Unicode Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_limitless_unicode \
-D BUILD_UNICODE=ON -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=ON \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-w.dylib" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_unicode}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_LIMITLESS_DATABASE }}"
cmake --build build_limitless_unicode
echo "Limitless Unicode Test Built"
./build_limitless_unicode/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_LIMITLESS_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
- name: Get log location
if: always()
id: log_location
run: echo "TEMP=${TMPDIR:-/tmp}" >> $GITHUB_OUTPUT
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.LIMITLESS_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--limitless \
--shard-id ${{ env.LIMITLESS_SHARD_ID }}
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'mac-limitless-test-logs'
path: ${{steps.log_location.outputs.TEMP}}/aws-odbc-wrapper/
retention-days: 7
linux-limitless-integration-tests:
name: Linux Ubuntu - Limitless Integration Tests
needs: [build-linux-psqlodbc]
runs-on: ubuntu-latest
env:
LIMITLESS_CLUSTER_ID: ODBC-Limitless-Linux-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
LIMITLESS_SHARD_ID: ODBC-Limitless-Linux-shard-${{ github.run_id }}${{ github.run_number }}${{ github.run_attempt }}
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=${{ env.LIMITLESS_CLUSTER_ID }}" >> $GITHUB_ENV
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Install Build Dependencies
run: |
sudo apt update
sudo apt-get install cmake libcurl4-openssl-dev libssl-dev odbcinst unixodbc-dev uuid-dev zlib1g-dev gdb
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
./scripts/compile_aws_sdk_unix.sh ${{env.BUILD_CONFIG}}
- name: Build aws-advanced-odbc-wrapper
run: |
cmake -S . -B build -DBUILD_UNICODE=ON -DBUILD_ANSI=ON -DBUILD_UNIT_TEST=OFF -DCMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}}
cmake --build build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-linux-limitless
- name: Setup Python virtual environment
shell: bash
run: |
python3 -m venv .venv
source .venv/bin/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Limitless Resources
id: AuroraClusterSetup
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py create \
--cluster-id ${{ env.LIMITLESS_CLUSTER_ID }} \
--engine aurora-postgresql \
--engine-version ${{ env.ENGINE_LIMITLESS_VERSION }} \
--database ${{ env.TEST_LIMITLESS_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--limitless \
--shard-id ${{ env.LIMITLESS_SHARD_ID }} \
--monitoring-role-arn ${{ secrets.AWS_RDS_MONITORING_ROLE_ARN }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
echo "cache_path=psqlodbc/.libs/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-linux-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/psqlodbc/.libs/psqlodbca.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/psqlodbc/.libs/psqlodbcw.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Log base driver location
run: |
echo "Base driver path: $(cd ${{steps.base_driver_info.outputs.cache_path}} && pwd)"
ls ${{steps.base_driver_info.outputs.cache_path}}
echo "--- ODBC DSN config ---"
odbcinst -j 2>/dev/null || true
cat $ODBCINST 2>/dev/null || cat /etc/odbcinst.ini 2>/dev/null || true
- name: Setup GDB
run: |
echo "set auto-load safe-path $PWD" > ~/.gdbinit
- name: Build and Run Limitless Ansi Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_limitless_ansi \
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=ON \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-a.so" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_ansi}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_LIMITLESS_DATABASE }}"
cmake --build build_limitless_ansi
echo "Limitless Ansi Test Built"
ulimit -c unlimited
gdb -q -x ./.gdbinit ./build_limitless_ansi/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_LIMITLESS_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
- name: Build and Run Limitless Unicode Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_limitless_unicode \
-D BUILD_UNICODE=ON -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=ON \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-w.so" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_unicode}}" \
-D TEST_SERVER="${{ env.AURORA_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_LIMITLESS_DATABASE }}"
cmake --build build_limitless_unicode
echo "Limitless Unicode Test Built"
ulimit -c unlimited
gdb -q -x ./.gdbinit ./build_limitless_unicode/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_LIMITLESS_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.AURORA_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/bin/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.LIMITLESS_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--limitless \
--shard-id ${{ env.LIMITLESS_SHARD_ID }}
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'linux-limitless-test-logs'
path: /tmp/aws-odbc-wrapper/
retention-days: 7
# Blue Green
windows-bg-integration-tests:
# MySQL disabled until IAM is resolved
name: Windows - Blue/Green Integration Tests
needs: [build-win-psqlodbc]
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
rds_engine: [aurora-postgresql]
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=ODBC-BG-Win-${{matrix.rds_engine}}-${{github.run_id}}${{github.run_number}}${{github.run_attempt}}" | Out-File -FilePath $env:GITHUB_ENV -Append
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
./scripts/compile_aws_sdk_win.ps1 ${{env.BUILD_CONFIG}}
- name: Setup Dotnet for WiX
uses: actions/setup-dotnet@v5
- name: Install WiX
shell: cmd
run: |
dotnet tool install --global wix --version ${{env.WIX_VERSION}}
wix extension add --global WixToolset.UI.wixext/${{env.WIX_VERSION}}
- name: Run build installer script
shell: pwsh
run: |
./installer/build_installer.ps1 ${{env.BUILD_CONFIG}}
- name: Install driver
shell: pwsh
working-directory: installer
run: Start-Process msiexec "/lp! .\test.log /i aws-advanced-odbc-wrapper.msi /quiet /norestart" -Wait;
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-win-inte
role-duration-seconds: 43200
- name: Setup Python virtual environment
shell: bash
run: |
python -m venv .venv
source .venv/Scripts/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Resources with Blue/Green Deployment
id: AuroraClusterSetup
shell: bash
run: |
source .venv/Scripts/activate
python scripts/db_resources.py create \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--engine ${{ matrix.rds_engine }} \
--engine-version ${{ env.ENGINE_VERSION }} \
--database ${{ env.TEST_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--blue-green \
--parameter-group parameter-${{ env.AURORA_CLUSTER_ID }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
if [ "${{ matrix.rds_engine }}" == "aurora-postgresql" ]; then
echo "cache_path=psqlodbc" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-win-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
elif [ "${{ matrix.rds_engine }}" == "aurora-mysql" ]; then
echo "cache_path=mysql-connector" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-mysql-driver-9.5.0" >> $GITHUB_OUTPUT
echo "driver_path_ansi=C:\\Program Files\\MySQL\\MySQL Connector ODBC 9.5\\myodbc9a.dll" >> $GITHUB_OUTPUT
echo "driver_path_unicode=C:\\Program Files\\MySQL\\MySQL Connector ODBC 9.5\\myodbc9w.dll" >> $GITHUB_OUTPUT
echo "dialect_default_port=3306" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_MYSQL" >> $GITHUB_OUTPUT
fi
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Install PostgreSQL ODBC Driver
if: (matrix.rds_engine == 'aurora-postgresql')
id: pg_driver_paths
shell: pwsh
working-directory: ${{steps.base_driver_info.outputs.cache_path}}
run: |
Start-Process msiexec "/i psqlodbc_x64.msi /quiet /norestart" -Wait;
$ansi = Get-ChildItem "C:\Program Files\psqlODBC" -Recurse -Filter "podbc30a.dll" -ErrorAction SilentlyContinue | Select-Object -First 1
$unicode = Get-ChildItem "C:\Program Files\psqlODBC" -Recurse -Filter "podbc35w.dll" -ErrorAction SilentlyContinue | Select-Object -First 1
if (-not $ansi -or -not $unicode) {
Write-Host "psqlODBC DLLs not found under C:\Program Files\psqlODBC"
Get-ChildItem "C:\Program Files\psqlODBC" -Recurse | Format-Table FullName
exit 1
}
Write-Host "Ansi driver: $($ansi.FullName)"
Write-Host "Unicode driver: $($unicode.FullName)"
echo "driver_path_ansi=$($ansi.FullName)" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=$($unicode.FullName)" >> $env:GITHUB_OUTPUT
- name: Setup PostgreSQL
if: (matrix.rds_engine == 'aurora-postgresql')
shell: bash
run: |
echo "$PGBIN" >> $GITHUB_PATH
- name: Install MySQL
if: (matrix.rds_engine == 'aurora-mysql')
shell: pwsh
working-directory: ${{steps.base_driver_info.outputs.cache_path}}
run: |
Start-Process msiexec "/i mysql-connector_x64.msi /quiet /norestart" -Wait;
- name: Resolve Driver Paths
id: resolved_drivers
shell: pwsh
run: |
if ("${{ matrix.rds_engine }}" -eq "aurora-postgresql") {
echo "driver_path_ansi=${{ steps.pg_driver_paths.outputs.driver_path_ansi }}" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=${{ steps.pg_driver_paths.outputs.driver_path_unicode }}" >> $env:GITHUB_OUTPUT
} else {
echo "driver_path_ansi=${{ steps.base_driver_info.outputs.driver_path_ansi }}" >> $env:GITHUB_OUTPUT
echo "driver_path_unicode=${{ steps.base_driver_info.outputs.driver_path_unicode }}" >> $env:GITHUB_OUTPUT
}
- name: Install Ansi DSNs
shell: pwsh
run: |
Add-OdbcDsn -Name ${{ env.TEST_DSN_ANSI }} `
-DriverName "${{ env.DRIVER_NAME_ANSI }}" `
-DsnType User `
-SetPropertyValue `
@("RDS_AUTH_TYPE=database", `
"Server=${{ env.AURORA_CLUSTER_ENDPOINT }}", `
"Port=${{steps.base_driver_info.outputs.dialect_default_port}}", `
"SSLMode=prefer", `
"BASE_DRIVER=${{steps.resolved_drivers.outputs.driver_path_ansi}}")
- name: Install Unicode DSNs
shell: pwsh
run: |
Add-OdbcDsn -Name ${{ env.TEST_DSN_UNICODE }} `
-DriverName "${{ env.DRIVER_NAME_UNICODE }}" `
-DsnType User `
-SetPropertyValue `
@("RDS_AUTH_TYPE=database", `
"Server=${{ env.AURORA_CLUSTER_ENDPOINT }}", `
"Port=${{steps.base_driver_info.outputs.dialect_default_port}}", `
"SSLMode=prefer", `
"BASE_DRIVER=${{steps.resolved_drivers.outputs.driver_path_unicode}}")
- name: Verify DSN Configuration
shell: pwsh
run: |
Write-Host "Registered DSNs:"
Get-OdbcDsn | Format-Table Name, DriverName, DsnType, Platform
Write-Host "Ansi DSN properties:"
Get-OdbcDsn -Name ${{ env.TEST_DSN_ANSI }} | Select-Object -ExpandProperty PropertyValue
- name: Setup GDB
run: |
choco install mingw
echo "set auto-load safe-path /" > "$HOME/.gdbinit"
- name: Build and Run Blue/Green Ansi Integration Tests
shell: pwsh
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_bg_ansi -D CMAKE_BUILD_TYPE="${{env.BUILD_CONFIG}}" `
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=OFF -D BUILD_BLUE_GREEN=ON
cmake --build build_bg_ansi --config ${{env.BUILD_CONFIG}}
echo "Blue/Green Ansi Test Built"
gdb -q -x ./.gdbinit .\build_bg_ansi\${{env.BUILD_CONFIG}}\integration-test.exe
env:
TEST_DSN: ${{ env.TEST_DSN_ANSI }}
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.BG_CLUSTER_ENDPOINT }}
TEST_PORT: ${{steps.base_driver_info.outputs.dialect_default_port}}
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
TEST_BG_RESOURCE_ID: ${{ env.BG_RESOURCE_ID }}
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/Scripts/activate
python scripts/db_resources.py destroy \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--blue-green \
--bg-deployment-id "${{ env.BG_RESOURCE_ID }}" \
--parameter-group parameter-${{ env.AURORA_CLUSTER_ID }}
# Make sure IP is always deleted
- name: Get Github Action IP
if: always()
id: ip
uses: haythem/public-ip@v1.3
- name: Remove Github Action IP
if: always()
run: |
aws ec2 revoke-security-group-ingress `
--group-name default `
--protocol tcp `
--port 0-65535 `
--cidr ${{ steps.ip.outputs.ipv4 }}/32 `
*> $null;
- name: Get log location
if: always()
id: log_location
shell: pwsh
run: |
echo "TEMP=$env:TEMP" >> $env:GITHUB_OUTPUT
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'windows-${{matrix.rds_engine}}-test-logs'
path: ${{ steps.log_location.outputs.TEMP }}/aws-odbc-wrapper/
retention-days: 7
macos-bg-integration-tests:
# MySQL disabled until IAM is resolved
name: MacOS - Blue/Green Integration Tests
needs: [ build-macos-psqlodbc ]
runs-on:
- ${{ matrix.env.project }}-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.env.fleet }}
buildspec-override:true
strategy:
fail-fast: false
matrix:
env: [ {rds_engine: aurora-postgresql, project: codebuild-odbc-wrapper-macos-pg, fleet: fleet-macos-fleet-3} ]
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=ODBC-BG-MacOS-${{matrix.env.rds_engine}}-${{github.run_id}}${{github.run_number}}${{github.run_attempt}}" >> $GITHUB_ENV
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
./scripts/compile_aws_sdk_unix.sh ${{env.BUILD_CONFIG}}
- name: Build aws-advanced-odbc-wrapper
run: |
cmake -S . -B build -DBUILD_UNICODE=ON -DBUILD_ANSI=ON -DBUILD_UNIT_TEST=OFF -DCMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}}
cmake --build build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-macos-inte
role-duration-seconds: 43200
- name: Setup Python virtual environment
shell: bash
run: |
python3 -m venv .venv
source .venv/bin/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Resources with Blue/Green Deployment
id: AuroraClusterSetup
shell: bash
run: |
source .venv/bin/activate
python3 scripts/db_resources.py create \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--engine ${{ matrix.env.rds_engine }} \
--engine-version ${{ env.ENGINE_VERSION }} \
--database ${{ env.TEST_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--blue-green \
--parameter-group parameter-${{ env.AURORA_CLUSTER_ID }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
if [ "${{ matrix.env.rds_engine }}" == "aurora-postgresql" ]; then
echo "cache_path=psqlodbc/.libs/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-macos-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/psqlodbc/.libs/psqlodbca.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/psqlodbc/.libs/psqlodbcw.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
elif [ "${{ matrix.env.rds_engine }}" == "aurora-mysql" ]; then
echo "cache_path=mysql-connector/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-mysql-driver-9.5.0" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/mysql-connector/lib/libmyodbc9a.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/mysql-connector/lib/libmyodbc9w.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=3306" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_MYSQL" >> $GITHUB_OUTPUT
fi
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Build and Run Blue/Green Ansi Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_bg_ansi \
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=OFF -D BUILD_BLUE_GREEN=ON \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-a.dylib" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_ansi}}" \
-D TEST_SERVER="${{ env.BG_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_DATABASE }}"
cmake --build build_bg_ansi
echo "Blue/Green Ansi Test Built"
./build_bg_ansi/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.BG_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
TEST_BG_RESOURCE_ID: ${{ env.BG_RESOURCE_ID }}
- name: Get log location
if: always()
id: log_location
run: echo "TEMP=${TMPDIR:-/tmp}" >> $GITHUB_OUTPUT
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/bin/activate
python3 scripts/db_resources.py destroy \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--blue-green \
--bg-deployment-id "${{ env.BG_RESOURCE_ID }}" \
--parameter-group parameter-${{ env.AURORA_CLUSTER_ID }}
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'mac-${{matrix.env.rds_engine}}-bg-test-logs'
path: ${{steps.log_location.outputs.TEMP}}/aws-odbc-wrapper/
retention-days: 7
linux-bg-integration-tests:
name: Linux Ubuntu - Blue/Green Integration Tests
needs: [build-linux-psqlodbc]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
rds_engine: [aurora-postgresql]
steps:
- name: Setup Cluster ID
run: |
echo "AURORA_CLUSTER_ID=ODBC-BG-Linux-${{ matrix.rds_engine }}-${{github.run_id}}${{github.run_number}}${{github.run_attempt}}" >> $GITHUB_ENV
- name: Checkout aws-advanced-odbc-wrapper
uses: actions/checkout@v6
- name: Install Build Dependencies
run: |
sudo apt update
sudo apt-get install cmake libcurl4-openssl-dev libssl-dev odbcinst unixodbc-dev uuid-dev zlib1g-dev gdb
- name: Retrieve AWS SDK for C++ Cache
id: cache-aws-sdk
uses: actions/cache@v5
with:
path: aws_sdk/install
key: ${{ runner.os }}-aws-sdk-cpp-${{env.BUILD_CONFIG}}-shared
- name: Build AWS SDK for C++
if: ${{steps.cache-aws-sdk.outputs.cache-hit != 'true'}}
run: |
./scripts/compile_aws_sdk_unix.sh ${{env.BUILD_CONFIG}}
- name: Build aws-advanced-odbc-wrapper
run: |
cmake -S . -B build -DBUILD_UNICODE=ON -DBUILD_ANSI=ON -DBUILD_UNIT_TEST=OFF -DCMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}}
cmake --build build
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v6
with:
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOY_ROLE }}
role-session-name: odbc-wrapper-linux-inte
role-duration-seconds: 43200
- name: Setup Python virtual environment
shell: bash
run: |
python3 -m venv .venv
source .venv/bin/activate
pip install -r scripts/requirements.txt
- name: Create Aurora Resources with Blue/Green Deployment
id: AuroraClusterSetup
shell: bash
run: |
source .venv/bin/activate
python3 scripts/db_resources.py create \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--engine ${{ matrix.rds_engine }} \
--engine-version ${{ env.ENGINE_VERSION }} \
--database ${{ env.TEST_DATABASE }} \
--username ${{ secrets.TEST_USERNAME }} \
--password ${{ secrets.TEST_PASSWORD }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--iam-user ${{ secrets.TEST_IAM_USER }} \
--blue-green \
--parameter-group parameter-${{ env.AURORA_CLUSTER_ID }}
- name: Setup Base Driver Variables
id: base_driver_info
shell: bash
run: |
if [ "${{ matrix.rds_engine }}" == "aurora-postgresql" ]; then
echo "cache_path=psqlodbc/.libs/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-psqlodbc-driver-${{ needs.build-linux-psqlodbc.outputs.psqlodbc-version }}" >> $GITHUB_OUTPUT
echo "driver_path_ansi='${{ github.workspace }}/psqlodbc/.libs/psqlodbca.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='${{ github.workspace }}/psqlodbc/.libs/psqlodbcw.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=5432" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_POSTGRESQL" >> $GITHUB_OUTPUT
elif [ "${{ matrix.rds_engine }}" == "aurora-mysql" ]; then
echo "cache_path=mysql-connector/" >> $GITHUB_OUTPUT
echo "cache_key=${{ runner.os }}-mysql-driver-9.5.0" >> $GITHUB_OUTPUT
echo "driver_path_ansi='/usr/lib/x86_64-linux-gnu/odbc/libmyodbc9a.so'" >> $GITHUB_OUTPUT
echo "driver_path_unicode='/usr/lib/x86_64-linux-gnu/odbc/libmyodbc9w.so'" >> $GITHUB_OUTPUT
echo "dialect_default_port=3306" >> $GITHUB_OUTPUT
echo "database_dialect=AURORA_MYSQL" >> $GITHUB_OUTPUT
fi
- name: Retrieve Base Driver
uses: actions/cache@v5
with:
path: ${{steps.base_driver_info.outputs.cache_path}}
key: ${{steps.base_driver_info.outputs.cache_key}}
fail-on-cache-miss: true
- name: Install MySQL
if: (matrix.rds_engine == 'aurora-mysql')
shell: bash
run: |
sudo apt install ./mysql-connector/mysql-connector_x64.deb -y
- name: Setup GDB
run: |
echo "set auto-load safe-path $PWD" > ~/.gdbinit
- name: Build and Run Blue/Green Ansi Integration Tests
shell: bash
if: ${{steps.AuroraClusterSetup.outcome == 'success'}}
run: |
cmake -S test/integration -B build_bg_ansi \
-D BUILD_UNICODE=OFF -D BUILD_FAILOVER=OFF -D BUILD_LIMITLESS=OFF -D BUILD_BLUE_GREEN=ON \
-D CMAKE_BUILD_TYPE=${{env.BUILD_CONFIG}} \
-D TEST_DRIVER_PATH="${{ github.workspace }}/build/driver/aws-advanced-odbc-wrapper-a.so" \
-D BASE_DRIVER_PATH="${{steps.base_driver_info.outputs.driver_path_ansi}}" \
-D TEST_SERVER="${{ env.BG_CLUSTER_ENDPOINT }}" \
-D TEST_DATABASE="${{ env.TEST_DATABASE }}"
cmake --build build_bg_ansi
echo "Blue/Green Ansi Test Built"
ulimit -c unlimited
gdb -q -x ./.gdbinit ./build_bg_ansi/integration-test
env:
TEST_DSN: "inte-wrapper-dsn"
TEST_DATABASE: ${{ env.TEST_DATABASE }}
TEST_USERNAME: ${{ secrets.TEST_USERNAME }}
TEST_PASSWORD: ${{ secrets.TEST_PASSWORD }}
TEST_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
TEST_IAM_USER: ${{ secrets.TEST_IAM_USER }}
TEST_SECRET_ARN: ${{ env.AURORA_CLUSTER_SECRETS_ARN }}
TEST_SERVER: ${{ env.BG_CLUSTER_ENDPOINT }}
TEST_PORT: "${{ steps.base_driver_info.outputs.dialect_default_port }}"
TEST_DIALECT: "${{ steps.base_driver_info.outputs.database_dialect }}"
ODBCINI: "${{ github.workspace }}/test/resources/odbc.ini"
ODBCINST: "${{ github.workspace }}/test/resources/odbcinst.ini"
TEST_BG_RESOURCE_ID: ${{ env.BG_RESOURCE_ID }}
- name: Destroy Aurora Resources
if: always()
shell: bash
run: |
source .venv/bin/activate
python3 scripts/db_resources.py destroy \
--cluster-id ${{ env.AURORA_CLUSTER_ID }} \
--region ${{ secrets.AWS_DEFAULT_REGION }} \
--num-instances 1 \
--secrets-arn "${{ env.AURORA_CLUSTER_SECRETS_ARN }}" \
--blue-green \
--bg-deployment-id "${{ env.BG_RESOURCE_ID }}" \
--parameter-group parameter-${{ env.AURORA_CLUSTER_ID }}
- name: Archive log results
if: always()
uses: actions/upload-artifact@v6
with:
name: 'linux-${{matrix.rds_engine}}-test-logs'
path: /tmp/aws-odbc-wrapper/
retention-days: 7