Bump webpack-cli from 5.1.4 to 6.0.1 #5054
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
push: | |
branches: | |
- 'main' | |
tags: | |
- '**' | |
pull_request: | |
pull_request_target: | |
workflow_dispatch: | |
jobs: | |
test-and-package: | |
strategy: | |
matrix: | |
include: | |
- os: ubuntu-latest | |
target: darwin-x64 | |
zetasql-lib: remote_server | |
- os: ubuntu-latest | |
target: darwin-arm64 | |
zetasql-lib: remote_server_arm | |
- os: ubuntu-latest | |
target: linux-x64 | |
zetasql-lib: remote_server.so | |
- os: ubuntu-latest | |
target: linux-arm64 | |
zetasql-lib: remote_server_arm.so | |
- os: windows-latest | |
target: win32-x64 | |
zetasql-lib: remote_server.so | |
runs-on: ${{ matrix.os }} | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- name: set up node | |
uses: actions/setup-node@v4 | |
with: | |
node-version: '19.x' | |
- name: build and test | |
run: | | |
npm install | |
npm run build | |
npm run lint | |
npm run ts-unused-exports | |
npm run test | |
- name: set environment variable with file name | |
run: echo "FILE_NAME=${{ github.sha }}-${{ matrix.target }}.vsix" >> $GITHUB_ENV | |
- name: delete libraries for other platforms | |
run: | | |
find server/node_modules/@fivetrandevelopers/zetasql/lib/zetasql -maxdepth 1 -type f ! -name '${{ matrix.zetasql-lib }}' -exec rm -f {} \; | |
find server/node_modules/@fivetrandevelopers/zetasql-snowflake/lib/snowflake -maxdepth 1 -type f ! -name '${{ matrix.zetasql-lib }}' -exec rm -f {} \; | |
- name: package to vsix file | |
run: | | |
npm config set script-shell bash | |
npx @vscode/vsce package -o ${FILE_NAME} --target ${{ matrix.target }} --githubBranch main | |
- name: create cache | |
uses: actions/cache@v4 | |
with: | |
path: ./${{ env.FILE_NAME }} | |
key: ${{ env.FILE_NAME }} | |
enableCrossOsArchive: true | |
run-e2e-tests: | |
if: github.actor != 'dependabot[bot]' | |
needs: test-and-package | |
strategy: | |
fail-fast: false | |
matrix: | |
include: # Run tests on latest version and version used in https://github.com/fivetran/analytics/blob/main/dbt_ft_prod/dbt_project.yml#L9 (require-dbt-version) | |
# https://github.com/fivetran/analytics/blob/a83516d1f662ef4db5750e4bff6735b56b823123/.buildkite/scripts/step_2_run_models_dry.sh#L11 | |
- install-dbt: '${PYTHON_INSTALL_LOC} -m pip install dbt-bigquery==1.5.3 dbt-snowflake==1.5.2 dbt-postgres==1.5.4' | |
os: macos-latest | |
target: darwin-x64 | |
activate-venv: source ~/dbt_1_2_2_env/bin/activate | |
python-version: '3.9.12' | |
- install-dbt: '${PYTHON_INSTALL_LOC} -m pip install dbt-bigquery dbt-snowflake dbt-postgres' | |
os: macos-latest | |
target: darwin-x64 | |
activate-venv: source ~/dbt_1_2_2_env/bin/activate | |
python-version: '3.9.12' | |
- install-dbt: '${PYTHON_INSTALL_LOC} -m pip install dbt-bigquery dbt-snowflake dbt-postgres' | |
os: ubuntu-latest | |
target: linux-x64 | |
prepare-for-tests: | | |
Xvfb -ac :99 -screen 0 1280x1024x16 & | |
export DISPLAY=:99 | |
activate-venv: source ~/dbt_1_2_2_env/bin/activate | |
python-version: '3.10.7' | |
SKIP_TESTS: 'vscode_commands.spec.js' | |
- install-dbt: '${PYTHON_INSTALL_LOC} -m pip install dbt-bigquery dbt-snowflake dbt-postgres' | |
os: windows-latest | |
target: win32-x64 | |
add-colon-to-key-file-path: | | |
homedir=c:/${homedir#*c/} | |
python=${homedir}/dbt_1_2_2_env/Scripts/python | |
cat <<EOT > ${GITHUB_WORKSPACE}/e2e/projects/special-python-settings/.vscode/settings.json | |
{ | |
"python.defaultInterpreterPath": "${python}" | |
} | |
EOT | |
activate-venv: | | |
cd ~/dbt_1_2_2_env/Scripts | |
. activate | |
python-version: '3.10.7' | |
name: e2e-${{ matrix.target }} ${{ matrix.install-dbt }} ${{ matrix.os }} ${{ matrix.python-version }} | |
runs-on: ${{ matrix.os }} | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- name: set environment variable with file name | |
run: echo "FILE_NAME=${{ github.sha }}-${{ matrix.target }}.vsix" >> $GITHUB_ENV | |
- name: checkout | |
uses: actions/checkout@v4 | |
- name: load e2e tests and vsix from cache | |
uses: actions/cache@v4 | |
id: cache | |
with: | |
path: ./${{ env.FILE_NAME }} | |
key: ${{ env.FILE_NAME }} | |
enableCrossOsArchive: true | |
fail-on-cache-miss: true | |
- name: setup WSL | |
if: matrix.os == 'windows-latest' | |
uses: Vampire/setup-wsl@v3 | |
with: | |
distribution: Ubuntu-20.04 | |
- uses: actions/setup-node@v4 | |
with: | |
node-version: '19.x' | |
- name: build tests | |
run: | # ignore postinstall script and install dependencies for e2e | |
npm install --ignore-scripts | |
cd common | |
npx tsc --sourceMap false --project tsconfig.json | |
cd ../e2e | |
npm install | |
npx tsc --sourceMap false --project tsconfig.json | |
- uses: actions/setup-python@v5 | |
id: setup-python | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: add custom python venv | |
run: | # https://github.com/dbt-labs/dbt-core/issues/4745 | |
${PYTHON_INSTALL_LOC} -m venv ~/dbt_1_2_2_env | |
ls ~/dbt_1_2_2_env | |
${{ matrix.activate-venv }} | |
python -m pip install pytz dbt-core==1.2.2 dbt-bigquery==1.2.0 | |
python -m pip list | grep dbt | |
dbt --version | |
env: | |
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}' | |
- name: prepare profile config | |
run: | | |
mkdir -p ~/.dbt/ | |
echo "$BQ_SERVICE_ACCOUNT" > ~/.dbt/bq-test-project.json | |
echo "$PG_CONNECTION_PARAMS" > ~/.dbt/postgres.json | |
echo "$SNOWFLAKE_CONNECTION_PARAMS" > ~/.dbt/snowflake.json | |
user=$(whoami) | |
homedir=$(eval echo "~${user}") | |
${{ matrix.add-colon-to-key-file-path }} | |
KEY_FILE_PATH=${homedir}/.dbt/bq-test-project.json | |
echo "KEY_FILE_PATH=${KEY_FILE_PATH}" >> $GITHUB_ENV | |
read project_id < <(echo $(echo ${BQ_SERVICE_ACCOUNT} | jq -r '.project_id')) | |
dataset="transforms_dbt_default" | |
read pg_host pg_user pg_password pg_port pg_dbname pg_schema < <(echo $(echo ${PG_CONNECTION_PARAMS} | jq -r '.host, .user, .password, .port, .dbname, .schema')) | |
echo "POSTGRESQL_PORT=${pg_port}" >> $GITHUB_ENV | |
read sf_account sf_database sf_password sf_role sf_schema sf_user sf_warehouse < <(echo $(echo ${SNOWFLAKE_CONNECTION_PARAMS} | jq -r '.account, .database, .password, .role, .schema, .username, .warehouse')) | |
sf_password="${sf_password//[$'\t\r\n']}" # Fix new line issue on Windows | |
cat <<EOT > ~/.dbt/profiles.yml | |
e2e-test-project: | |
target: prod | |
outputs: | |
prod: | |
type: bigquery | |
method: service-account | |
project: ${project_id} | |
keyfile: "{{ env_var('KEY_FILE_PATH') }}" | |
dataset: ${dataset} | |
threads: 4 | |
e2e-test-workspace-project1: | |
target: prod | |
outputs: | |
prod: | |
type: bigquery | |
method: "{{ env_var('PROJECT1_METHOD') }}" # See .env file in the project root | |
project: ${project_id} | |
dataset: ${dataset} | |
threads: 4 | |
keyfile_json: "{{ env_var('BQ_SERVICE_ACCOUNT') | as_native }}" | |
e2e-test-workspace-project2: | |
target: prod | |
outputs: | |
prod: | |
type: bigquery | |
method: service-account | |
project: ${project_id} | |
keyfile: ${KEY_FILE_PATH} | |
dataset: ${dataset} | |
threads: 4 | |
e2e-test-project-postgres: | |
target: prod | |
outputs: | |
prod: | |
type: postgres | |
host: ${pg_host} | |
user: ${pg_user} | |
password: ${pg_password} | |
port: "{{ env_var('POSTGRESQL_PORT') | as_number }}" # For testing | as_number filter | |
dbname: ${pg_dbname} | |
schema: ${pg_schema} | |
threads: 4 | |
keepalives_idle: 0 | |
connect_timeout: 10 | |
search_path: ${pg_schema} | |
snowflake: | |
target: prod | |
outputs: | |
prod: | |
type: snowflake | |
account: ${sf_account} | |
database: ${sf_database} | |
password: '${sf_password}' | |
role: ${sf_role} | |
schema: ${sf_schema} | |
threads: 4 | |
user: ${sf_user} | |
warehouse: ${sf_warehouse} | |
EOT | |
env: | |
BQ_SERVICE_ACCOUNT: ${{ secrets.BQ_SERVICE_ACCOUNT }} | |
PG_CONNECTION_PARAMS: ${{ secrets.PG_CONNECTION_PARAMS }} | |
SNOWFLAKE_CONNECTION_PARAMS: ${{ secrets.SNOWFLAKE_CONNECTION_PARAMS }} | |
GITHUB_WORKSPACE: ${{ env.GITHUB_WORKSPACE }} | |
- name: show prepared files | |
run: | | |
cd ~/.dbt/ | |
pwd | |
ls -la | |
- name: install dbt | |
run: ${{ matrix.install-dbt }} | |
env: | |
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}' | |
- name: show info | |
run: | | |
which dbt | |
dbt --version | |
node -v | |
echo ${PYTHON_INSTALL_LOC} | |
${PYTHON_INSTALL_LOC} --version | |
${PYTHON_INSTALL_LOC} -m pip --version | |
${PYTHON_INSTALL_LOC} -m pip list | grep dbt | |
env: | |
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}' | |
# Uncomment lines below if you want to connect to the job via SSH | |
# - name: Setup SSH via tmate for debugging | |
# uses: rdp-studio/ssh2actions@main | |
# env: | |
# TELEGRAM_BOT_TOKEN: ${{ secrets.BOT_TOKEN }} | |
# TELEGRAM_CHAT_ID: ${{ secrets.CHAT_ID }} | |
- run: unzip ${{ env.FILE_NAME }} -d e2e-tests | |
- name: check WSL | |
shell: powershell | |
if: matrix.os == 'windows-latest' | |
run: | | |
wsl -d Ubuntu ls "/mnt/d/a/dbt-language-server/dbt-language-server/e2e-tests/extension/server/" | |
- name: run e2e tests | |
run: | | |
${{ matrix.prepare-for-tests }} | |
node e2e/out/runners/runTest $(pwd)/e2e-tests/extension | |
env: | |
BQ_SERVICE_ACCOUNT: ${{ secrets.BQ_SERVICE_ACCOUNT }} | |
SKIP_TESTS: ${{ matrix.SKIP_TESTS }} | |
PYTHON_INSTALL_LOC: '${{ steps.setup-python.outputs.python-path }}' | |
WIZARD_FOR_DBT_WSL_UBUNTU_NAME: 'Ubuntu' | |
- name: show log files | |
if: always() | |
run: find .vscode-test/user-data -type f -name *.log | |
- name: upload logs | |
if: always() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: e2e-${{ matrix.target }} ${{ matrix.install-dbt }} ${{ matrix.os }} ${{ matrix.python-version }} | |
path: | | |
.vscode-test/user-data/logs/**/*Wizard for dbt Core (TM).* | |
.vscode-test/user-data/logs/**/*Wizard for dbt Core (TM) Trace.* | |
.vscode-test/user-data/logs/**/*Install Latest dbt.* | |
e2e/projects/completion-jinja/logs/* | |
e2e/projects/test-fixture/logs/* | |
e2e/projects/two-projects/logs/* | |
e2e/projects/without-projects/logs/* | |
e2e/projects/special-python-settings/logs/* | |
e2e/projects/postgres/logs/* | |
e2e/projects/project-with-packages/logs/* | |
e2e/projects/snowflake/logs/* | |
publish-vsix: | |
if: ${{ github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v') }} # main branch or have any tag started with 'v' | |
needs: [test-and-package, run-e2e-tests] | |
runs-on: ubuntu-latest | |
steps: | |
- name: load darwin-x64 vsix from cache | |
id: darwin-x64-cache | |
uses: actions/cache@v4 | |
with: | |
path: ./${{ env.VSIX_NAME }} | |
key: ${{ env.VSIX_NAME }} | |
enableCrossOsArchive: true | |
fail-on-cache-miss: true | |
env: | |
VSIX_NAME: ${{ github.sha }}-darwin-x64.vsix | |
- name: load darwin-arm64 vsix from cache | |
id: darwin-arm64-cache | |
uses: actions/cache@v4 | |
with: | |
path: ./${{ env.VSIX_NAME }} | |
key: ${{ env.VSIX_NAME }} | |
enableCrossOsArchive: true | |
fail-on-cache-miss: true | |
env: | |
VSIX_NAME: ${{ github.sha }}-darwin-arm64.vsix | |
- name: load linux-x64 vsix from cache | |
id: linux-x64-cache | |
uses: actions/cache@v4 | |
with: | |
path: ./${{ env.VSIX_NAME }} | |
key: ${{ env.VSIX_NAME }} | |
enableCrossOsArchive: true | |
fail-on-cache-miss: true | |
env: | |
VSIX_NAME: ${{ github.sha }}-linux-x64.vsix | |
- name: load linux-arm64 vsix from cache | |
id: linux-arm64-cache | |
uses: actions/cache@v4 | |
with: | |
path: ./${{ env.VSIX_NAME }} | |
key: ${{ env.VSIX_NAME }} | |
enableCrossOsArchive: true | |
fail-on-cache-miss: true | |
env: | |
VSIX_NAME: ${{ github.sha }}-linux-arm64.vsix | |
- name: load win32-x64 vsix from cache | |
id: windows-cache | |
uses: actions/cache@v4 | |
with: | |
path: ./${{ env.VSIX_NAME }} | |
key: ${{ env.VSIX_NAME }} | |
enableCrossOsArchive: true | |
fail-on-cache-miss: true | |
env: | |
VSIX_NAME: ${{ github.sha }}-win32-x64.vsix | |
- name: 'google auth using service account json' | |
uses: 'google-github-actions/auth@v2' | |
with: | |
credentials_json: '${{ secrets.GCS_SERVICE_ACCOUNT }}' | |
- name: push vsix files to GCS | |
if: ${{ github.ref == 'refs/heads/main' }} | |
uses: google-github-actions/upload-cloud-storage@v2 | |
with: | |
path: . | |
glob: '*.vsix' | |
destination: dbt-language-server | |
process_gcloudignore: false | |
- name: set up node | |
if: ${{ startsWith(github.ref, 'refs/tags/v') }} | |
uses: actions/setup-node@v4 | |
with: | |
node-version: '19.x' | |
- name: publish tagged (latest) extension | |
if: ${{ startsWith(github.ref, 'refs/tags/v') }} | |
env: | |
VSCE_PAT: ${{ secrets.VSCE_TOKEN }} | |
run: npx @vscode/vsce publish --githubBranch main --packagePath $(find . -iname "*.vsix") | |
publish-npm: | |
if: ${{ startsWith(github.ref, 'refs/tags/v') }} | |
needs: [test-and-package, run-e2e-tests] | |
runs-on: ubuntu-latest | |
steps: | |
- name: checkout | |
uses: actions/checkout@v4 | |
- name: build and test | |
run: | | |
npm install | |
npm run build | |
npm run lint | |
npm run ts-unused-exports | |
npm run test | |
- name: set up node | |
uses: actions/setup-node@v4 | |
with: | |
node-version: '20.x' | |
registry-url: 'https://registry.npmjs.org' | |
- name: publish dbt-language-server-common npm package | |
run: | | |
cd common | |
npm publish --verbose | |
env: | |
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} | |
- name: publish dbt-language-server npm package | |
run: | | |
cd server | |
npm publish --verbose | |
env: | |
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} |