From 6698d797d75c54b6e70eb9ee396d4159eec6768e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 1 May 2025 21:21:35 +0000 Subject: [PATCH 01/43] chore(release): update version to 0.0.73 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index 36e6a204e..2225cdf17 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.72 +0.0.73 From 5424d2988be3ca496a28fc59595a09b129f5532d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 1 May 2025 22:19:27 +0000 Subject: [PATCH 02/43] chore(release): update version to 0.0.74 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index 2225cdf17..30eb585d8 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.73 +0.0.74 From ddb320557d14c97014e8e56a90d39299fa0949b3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 3 May 2025 14:04:12 +0000 Subject: [PATCH 03/43] chore(release): update version to 0.0.75 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index 30eb585d8..fb1e9b1d2 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.74 +0.0.75 From 1aa6ba40ede543e51df08f2a9d73d25e4ea60ffe Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 5 May 2025 20:43:30 +0000 Subject: [PATCH 04/43] chore(release): update version to 0.0.76 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index fb1e9b1d2..7818a4fc9 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.75 +0.0.76 From a9b7c16097f8ac5dc8b32cdc97d8cfa34bdc83ae Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 5 May 2025 22:32:27 +0000 Subject: [PATCH 05/43] chore(release): update version to 0.0.77 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index 7818a4fc9..b76f49a4b 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.76 +0.0.77 From 0197335f0116ade8c0dd3ee3be8eb89ec25ea722 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 5 May 2025 23:49:47 +0000 Subject: [PATCH 06/43] chore(release): update version to 0.0.78 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index b76f49a4b..4ed248ba7 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.77 +0.0.78 From d4f5849f405a558b246c6e5cbec6d6488647062f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 5 May 2025 23:52:30 +0000 Subject: [PATCH 07/43] chore(release): update version to 0.0.79 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index 4ed248ba7..2786fad6b 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.78 +0.0.79 From 894e4f8b6610b7788482bcceb076c0047c780f1b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 6 May 2025 20:10:39 +0000 Subject: [PATCH 08/43] chore(release): update version to 0.0.80 --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index 2786fad6b..ee92deb82 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.79 +0.0.80 From 4adca4ecc57f9bd82a744ef0cf8a18fedbc74889 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 07:23:51 -0600 Subject: [PATCH 09/43] docker release fix --- .github/workflows/docker-release.yml | 6 +++- .github/workflows/porter_app_evals_3155.yml | 38 --------------------- 2 files changed, 5 insertions(+), 39 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 798669ff0..0a120fb1c 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -5,6 +5,10 @@ on: branches: - main # Trigger when PR from staging is merged to main +permissions: + contents: read + packages: write + env: # Placeholder for Docker Hub username/organization or GHCR owner DOCKER_REGISTRY_OWNER: ghcr.io/${{ github.repository_owner }} @@ -14,7 +18,7 @@ env: jobs: prepare_docker_release_info: name: Prepare Docker Release Information - runs-on: ubuntu-latest + runs-on: blacksmith-32vcpu-ubuntu-2204 outputs: api_version: ${{ steps.version_info.outputs.api_version }} web_version: ${{ steps.version_info.outputs.web_version }} diff --git a/.github/workflows/porter_app_evals_3155.yml b/.github/workflows/porter_app_evals_3155.yml index 51f95af25..6d0d60e1b 100644 --- a/.github/workflows/porter_app_evals_3155.yml +++ b/.github/workflows/porter_app_evals_3155.yml @@ -6,44 +6,6 @@ - 'api/**' name: Deploy to evals jobs: - database-deploy: - runs-on: blacksmith-16vcpu-ubuntu-2204 - environment: staging - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - profile: minimal - override: true - - - name: Cache Rust dependencies - uses: Swatinem/rust-cache@v2 - - - name: Install Diesel CLI - run: cargo install diesel_cli --no-default-features --features postgres - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - - name: Download Postgres certificate from S3 - run: | - mkdir -p ~/.postgresql - aws s3 cp ${{ secrets.CERT_S3_URL }} ~/.postgresql/root.crt - - - name: Run migrations - working-directory: ./api - run: diesel migration run - env: - DATABASE_URL: ${{ secrets.EVAL_DB_URL }} - PGSSLMODE: disable porter-deploy: runs-on: blacksmith-32vcpu-ubuntu-2204 steps: From f9d644d03081680163f707a59c09dbd21b462c30 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 07:29:51 -0600 Subject: [PATCH 10/43] caching and cli fix --- .github/workflows/cli-release.yml | 4 ++++ .github/workflows/docker-release.yml | 4 ---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 85778b2af..f4b75a2ed 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -77,6 +77,10 @@ jobs: with: ref: ${{ github.sha }} + - name: Install libpq (macOS) + if: runner.os == 'macOS' + run: brew install libpq + - name: Install Rust uses: actions-rs/toolchain@v1 with: diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 0a120fb1c..9ba226ea7 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -106,8 +106,6 @@ jobs: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest - cache-from: type=gha - cache-to: type=gha,mode=max build_and_push_web: name: Build and Push Web Image @@ -150,5 +148,3 @@ jobs: NEXT_PUBLIC_URL=${{ secrets.NEXT_PUBLIC_URL }} NEXT_PUBLIC_SUPABASE_URL=${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} NEXT_PUBLIC_SUPABASE_ANON_KEY=${{ secrets.NEXT_PUBLIC_SUPABASE_ANON_KEY }} - cache-from: type=gha - cache-to: type=gha,mode=max From a96a422b63a464fc28a92bcd5182d679133e5cfd Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 07:43:24 -0600 Subject: [PATCH 11/43] new docker image locations and cli fix --- .github/workflows/cli-release.yml | 216 +++++++++------------------ .github/workflows/docker-release.yml | 4 +- 2 files changed, 75 insertions(+), 145 deletions(-) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index f4b75a2ed..d065e2398 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -3,54 +3,20 @@ name: CLI Release on: push: branches: - - main # Trigger when PR from staging (or any other) is merged to main + - main + paths: + - 'cli/**' + - '.github/workflows/cli-release.yml' + workflow_dispatch: # Add permissions for creating releases permissions: contents: write - # pull-requests: write # Not typically needed for a tag-triggered release workflow + pull-requests: write # As per old workflow jobs: - prepare_cli_release_info: - name: Prepare CLI Release Information - runs-on: ubuntu-latest - outputs: - cli_version: ${{ steps.version_info.outputs.cli_version }} - cli_tag_name: ${{ steps.version_info.outputs.cli_tag_name }} - steps: - - name: Checkout code from main - uses: actions/checkout@v4 - with: - ref: ${{ github.sha }} # Checkout the specific commit on main (merge commit) - - - name: Read CLI Version and Determine Tag - id: version_info - shell: bash - run: | - CLI_VERSION="" - if [ -f cli/cli/Cargo.toml ]; then - CLI_VERSION=$(grep '^version' cli/cli/Cargo.toml | head -n 1 | sed 's/version = \"\(.*\)\"/\1/') - echo "Read CLI version '$CLI_VERSION' from cli/cli/Cargo.toml" - else - echo "Error: cli/cli/Cargo.toml not found!" - exit 1 - fi - - if [ -z "$CLI_VERSION" ]; then - echo "Error: Could not determine CLI version from Cargo.toml." - exit 1 - fi - - CLI_TAG_NAME="cli/v$CLI_VERSION" - echo "Determined CLI Version: $CLI_VERSION" - echo "Determined CLI Tag Name: $CLI_TAG_NAME" - echo "cli_version=$CLI_VERSION" >> $GITHUB_OUTPUT - echo "cli_tag_name=$CLI_TAG_NAME" >> $GITHUB_OUTPUT - -# Separate Build Job (similar to original) - build_cli: + build: name: Build CLI Binaries - needs: prepare_cli_release_info # Does not strictly need outputs, but runs after version is confirmed runs-on: ${{ matrix.os }} strategy: matrix: @@ -59,23 +25,27 @@ jobs: target: x86_64-unknown-linux-gnu artifact_name: buster-cli-linux-x86_64.tar.gz use_tar: true + binary_name: buster-cli - os: macos-latest target: x86_64-apple-darwin artifact_name: buster-cli-darwin-x86_64.tar.gz use_tar: true + binary_name: buster-cli - os: macos-latest target: aarch64-apple-darwin artifact_name: buster-cli-darwin-arm64.tar.gz use_tar: true + binary_name: buster-cli - os: windows-latest target: x86_64-pc-windows-msvc artifact_name: buster-cli-windows-x86_64.zip use_tar: false + binary_name: buster-cli.exe steps: - - name: Checkout code from main + - name: Checkout code uses: actions/checkout@v4 with: - ref: ${{ github.sha }} + fetch-depth: 0 # As per old workflow - name: Install libpq (macOS) if: runner.os == 'macOS' @@ -101,54 +71,32 @@ jobs: echo 'panic = "abort"' >> .cargo/config.toml echo 'opt-level = 3' >> .cargo/config.toml echo 'strip = true' >> .cargo/config.toml - + - name: Build optimized release - working-directory: ./cli # Assuming this is the workspace root for the cli crate + # Builds the buster-cli package from cli/cli/Cargo.toml + working-directory: ./cli run: cargo build --release --target ${{ matrix.target }} --manifest-path ./cli/Cargo.toml - - name: Determine Binary Name and Path - id: binary_info - shell: bash - run: | - mkdir -p cli/target/${{ matrix.target }}/release - # Default to 'buster' if find command fails or returns empty - CRATE_NAME_OUTPUT=$(basename $(find cli/target/${{ matrix.target }}/release -maxdepth 1 -type f -executable ! -name '*.dSYM' ! -name '*.pdb' 2>/dev/null) || echo "buster") - if [ "$CRATE_NAME_OUTPUT" == "." ] || [ -z "$CRATE_NAME_OUTPUT" ]; then CRATE_NAME_OUTPUT="buster"; fi # Further fallback for empty/dot - - # Check if the determined/fallback name actually exists as a file - if [[ "${{ matrix.os }}" == "windows-latest" ]] && [[ "$CRATE_NAME_OUTPUT" != *.exe ]]; then - EXECUTABLE_NAME="${CRATE_NAME_OUTPUT}.exe" - else - EXECUTABLE_NAME="$CRATE_NAME_OUTPUT" - fi - - if ! [ -f "cli/target/${{ matrix.target }}/release/$EXECUTABLE_NAME" ]; then - echo "Warning: Binary '$EXECUTABLE_NAME' not found after build. Defaulting to 'buster' or 'buster.exe'." - if [[ "${{ matrix.os }}" == "windows-latest" ]]; then CRATE_NAME_FINAL="buster.exe"; else CRATE_NAME_FINAL="buster"; fi - else - CRATE_NAME_FINAL=$EXECUTABLE_NAME - fi - echo "Final binary name for packaging: $CRATE_NAME_FINAL" - echo "binary_name=$CRATE_NAME_FINAL" >> $GITHUB_OUTPUT - # GITHUB_OUTPUT for binary_path_val is not strictly needed by subsequent steps if using artifact names directly - # echo "binary_path_val=cli/target/${{ matrix.target }}/release/$CRATE_NAME_FINAL" >> $GITHUB_OUTPUT - - name: Compress binary (Unix) if: matrix.use_tar + # working-directory: ./cli # Old: This was ./cli shell: bash run: | - cd cli/target/${{ matrix.target }}/release - # Use the exact binary name determined (could be buster or buster.exe from binary_info) - tar czf ${{ matrix.artifact_name }} ${{ steps.binary_info.outputs.binary_name }} - if [[ "${{ runner.os }}" == "macOS" ]]; then shasum -a 256 ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256; else sha256sum ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256; fi + cd cli/target/${{ matrix.target }}/release # Adjusted path to be from repo root + tar czf ${{ matrix.artifact_name }} ${{ matrix.binary_name }} + if [[ "${{ runner.os }}" == "macOS" ]]; then + shasum -a 256 ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 + else + sha256sum ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 + fi - name: Compress binary (Windows) if: matrix.use_tar == false + # working-directory: ./cli # Old: This was ./cli shell: pwsh run: | - cd cli/target/${{ matrix.target }}/release - # Use the exact binary name, which should include .exe on Windows from binary_info - Compress-Archive -Path ${{ steps.binary_info.outputs.binary_name }} -DestinationPath ${{ matrix.artifact_name }} + cd cli/target/${{ matrix.target }}/release # Adjusted path to be from repo root + Compress-Archive -Path ${{ matrix.binary_name }} -DestinationPath ${{ matrix.artifact_name }} Get-FileHash -Algorithm SHA256 ${{ matrix.artifact_name }} | Select-Object -ExpandProperty Hash > ${{ matrix.artifact_name }}.sha256 - name: Upload artifacts @@ -160,47 +108,18 @@ jobs: cli/target/${{ matrix.target }}/release/${{ matrix.artifact_name }}.sha256 retention-days: 1 - # This job now handles tagging and creating the GitHub release - tag_and_release_cli: - name: Create Git Tag and GitHub Release for CLI - needs: [prepare_cli_release_info, build_cli] + release: + name: Create GitHub Release for CLI + needs: build runs-on: ubuntu-latest outputs: - cli_version: ${{ needs.prepare_cli_release_info.outputs.cli_version }} - cli_tag_name: ${{ needs.prepare_cli_release_info.outputs.cli_tag_name }} + cli_version: ${{ steps.get_version.outputs.version }} + cli_tag_name: v${{ steps.get_version.outputs.version }} # Matches old tag format steps: - - name: Checkout code from main (for tagging context) + - name: Checkout code uses: actions/checkout@v4 with: - ref: ${{ github.sha }} - fetch-depth: 0 - # IMPORTANT: Use a PAT with repo scope to push tags, especially if main is protected - # or if the default GITHUB_TOKEN doesn't have tag push permissions. - # token: ${{ secrets.REPO_ACCESS_PAT }} - - - name: Configure Git User - run: | - git config user.name "github-actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - - - name: Create and Push Git Tag - env: - # Get tag name from the prepare_cli_release_info job - CLI_TAG_NAME: ${{ needs.prepare_cli_release_info.outputs.cli_tag_name }} - # Ensure PAT is used if GITHUB_TOKEN is insufficient for pushing tags: - # GH_TOKEN: ${{ secrets.REPO_ACCESS_PAT }} # Uncomment and use your PAT secret - run: | - echo "Creating Git tag: $CLI_TAG_NAME on commit ${{ github.sha }}" - # Create tag pointing to the current commit on main (merge commit) - git tag "$CLI_TAG_NAME" ${{ github.sha }} - echo "Pushing Git tag: $CLI_TAG_NAME" - # If using PAT for push, uncomment the following lines after setting GH_TOKEN env var: - # git remote set-url origin https://x-access-token:${GH_TOKEN}@github.com/${{ github.repository }} - # git push origin "refs/tags/$CLI_TAG_NAME" - - # For now, using default GITHUB_TOKEN. THIS MIGHT NOT WORK FOR PROTECTED BRANCHES/TAGS - # OR IF THE TOKEN LACKS PERMISSION. REPLACE WITH PAT PUSH. - git push origin "refs/tags/$CLI_TAG_NAME" + fetch-depth: 0 - name: Download all build artifacts uses: actions/download-artifact@v4 @@ -210,11 +129,24 @@ jobs: - name: List downloaded artifacts (for debugging) run: ls -R downloaded-artifacts + - name: Extract version from cli/cli/Cargo.toml + id: get_version + shell: bash + run: | + # Correctly extract from the package manifest, not the workspace + VERSION=$(grep '^version' cli/cli/Cargo.toml | head -n 1 | sed 's/version = "\(.*\)"/\1/') + if [ -z "$VERSION" ]; then + echo "Error: Could not determine CLI version from cli/cli/Cargo.toml." + exit 1 + fi + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Extracted version: $VERSION" + - name: Create GitHub Release uses: softprops/action-gh-release@v1 with: - tag_name: ${{ needs.prepare_cli_release_info.outputs.cli_tag_name }} - name: CLI Release v${{ needs.prepare_cli_release_info.outputs.cli_version }} + tag_name: v${{ steps.get_version.outputs.version }} # Uses version from cli/cli/Cargo.toml + name: CLI Release v${{ steps.get_version.outputs.version }} files: | downloaded-artifacts/**/buster-cli-linux-x86_64.tar.gz downloaded-artifacts/**/buster-cli-linux-x86_64.tar.gz.sha256 @@ -228,32 +160,32 @@ jobs: prerelease: false generate_release_notes: true env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Default token is usually fine for softprops action if tag exists + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} update_homebrew_tap: name: Update Homebrew Tap - needs: tag_and_release_cli # Trigger after tag_and_release_cli which now outputs version and tag + needs: release runs-on: ubuntu-latest - if: needs.tag_and_release_cli.outputs.cli_tag_name != '' + if: needs.release.outputs.cli_tag_name != '' && needs.release.outputs.cli_version != '' steps: - name: Get release version and tag from previous job id: release_info run: | - echo "RELEASE_VERSION=${{ needs.tag_and_release_cli.outputs.cli_version }}" >> $GITHUB_ENV - echo "RELEASE_TAG=${{ needs.tag_and_release_cli.outputs.cli_tag_name }}" >> $GITHUB_ENV - echo "Using version: ${{ needs.tag_and_release_cli.outputs.cli_version }} from tag: ${{ needs.tag_and_release_cli.outputs.cli_tag_name }}" + echo "RELEASE_VERSION=${{ needs.release.outputs.cli_version }}" >> $GITHUB_ENV + echo "RELEASE_TAG=${{ needs.release.outputs.cli_tag_name }}" >> $GITHUB_ENV + echo "Using version: ${{ needs.release.outputs.cli_version }} from tag: ${{ needs.release.outputs.cli_tag_name }}" - name: Set up GitHub CLI - uses: actions/setup-node@v4 # gh is often bundled, but this ensures it's available or can be installed + uses: actions/setup-node@v4 with: - node-version: '20' # Or any version that ensures gh is available + node-version: '20' - name: Download SHA256 sums from GitHub Release env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Use GITHUB_TOKEN to interact with the current repo's release + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_REPO: ${{ github.repository }} run: | - gh release download ${{ env.RELEASE_TAG }} --pattern '*.sha256' -R $GH_REPO + gh release download ${{ env.RELEASE_TAG }} --pattern '*.sha256' -R $GH_REPO --clobber echo "Downloaded SHA256 files:" ls -la *.sha256 @@ -273,8 +205,8 @@ jobs: uses: actions/checkout@v4 with: repository: buster-so/buster-homebrew - token: ${{ secrets.HOMEBREW_TAP_TOKEN }} # PAT with repo scope for buster-so/buster-homebrew - path: buster-homebrew # Checkout to a specific path + token: ${{ secrets.HOMEBREW_TAP_TOKEN }} + path: buster-homebrew - name: Configure Git working-directory: ./buster-homebrew @@ -286,16 +218,17 @@ jobs: working-directory: ./buster-homebrew env: VERSION: ${{ env.RELEASE_VERSION }} - TAG: ${{ env.RELEASE_TAG }} + TAG: ${{ env.RELEASE_TAG }} # This will be vX.Y.Z SHA_ARM64: ${{ env.SHA_ARM64 }} SHA_INTEL: ${{ env.SHA_INTEL }} SHA_LINUX: ${{ env.SHA_LINUX }} + REPO_OWNER: ${{ github.repository_owner }} # Needed for URLs run: | FORMULA_FILE="Formula/buster.rb" TEMP_FORMULA_FILE="Formula/buster.rb.tmp" - # URLs for artifacts - URL_BASE="https://github.com/${{ github.repository_owner }}/buster/releases/download/$TAG" + # URLs for artifacts, using REPO_OWNER and TAG + URL_BASE="https://github.com/$REPO_OWNER/buster/releases/download/$TAG" URL_ARM64="$URL_BASE/buster-cli-darwin-arm64.tar.gz" URL_INTEL="$URL_BASE/buster-cli-darwin-x86_64.tar.gz" URL_LINUX="$URL_BASE/buster-cli-linux-x86_64.tar.gz" @@ -307,17 +240,16 @@ jobs: # Update version sed "s/^ version .*/ version \\"$VERSION\\"/" "$FORMULA_FILE" > "$TEMP_FORMULA_FILE" && mv "$TEMP_FORMULA_FILE" "$FORMULA_FILE" - - # Update top-level (defaults to ARM usually, as per your formula) + + # Update top-level URL and SHA (typically ARM) sed -E "s#^ url .*# url \\"$URL_ARM64\\"#" "$FORMULA_FILE" > "$TEMP_FORMULA_FILE" && mv "$TEMP_FORMULA_FILE" "$FORMULA_FILE" sed "s/^ sha256 .*/ sha256 \\"$SHA_ARM64\\"/" "$FORMULA_FILE" > "$TEMP_FORMULA_FILE" && mv "$TEMP_FORMULA_FILE" "$FORMULA_FILE" # Update on_macos -> on_arm - # Use a block to target sed within the on_arm block. Delimit with unique markers. awk ' - BEGIN { printing = 1; in_arm_block = 0; } + BEGIN { in_arm_block = 0; } /on_macos do/,/end/ { - if (/on_arm do/) { in_arm_block = 1; } + if (/on_arm do/) { in_arm_block = 1; print; next; } if (in_arm_block && /url /) { print " url \\"\\"" ENVIRON["URL_ARM64"] "\\"\\"" next @@ -333,9 +265,9 @@ jobs: # Update on_macos -> on_intel awk ' - BEGIN { printing = 1; in_intel_block = 0; } + BEGIN { in_intel_block = 0; } /on_macos do/,/end/ { - if (/on_intel do/) { in_intel_block = 1; } + if (/on_intel do/) { in_intel_block = 1; print; next; } if (in_intel_block && /url /) { print " url \\"\\"" ENVIRON["URL_INTEL"] "\\"\\"" next @@ -351,10 +283,9 @@ jobs: # Update on_linux awk ' - BEGIN { printing = 1; in_linux_block = 0; } + BEGIN { in_linux_block = 0; } /on_linux do/,/end/ { - if (/url / && !in_linux_block) { next } # Skip top-level url if not already processed - if (/on_linux do/) { in_linux_block = 1; } + if (/on_linux do/) { in_linux_block = 1; print; next; } if (in_linux_block && /url /) { print " url \\"\\"" ENVIRON["URL_LINUX"] "\\"\\"" next @@ -376,7 +307,6 @@ jobs: working-directory: ./buster-homebrew run: | git add Formula/buster.rb - # Check if there are changes to commit if git diff --staged --quiet; then echo "No changes to commit to Homebrew tap." else diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 9ba226ea7..5fe2d13b1 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -12,8 +12,8 @@ permissions: env: # Placeholder for Docker Hub username/organization or GHCR owner DOCKER_REGISTRY_OWNER: ghcr.io/${{ github.repository_owner }} - API_IMAGE_NAME: api-service - WEB_IMAGE_NAME: web-service + API_IMAGE_NAME: buster/api + WEB_IMAGE_NAME: buster/web jobs: prepare_docker_release_info: From 3410475c2d3cf04c46b74f6980d3c1291417381e Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 07:56:09 -0600 Subject: [PATCH 12/43] docker release public --- .github/workflows/docker-release.yml | 38 +++++++++++++++ api/libs/semantic_layer/spec.yml | 71 ++++++++++++++-------------- 2 files changed, 73 insertions(+), 36 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 5fe2d13b1..0fc8aaed6 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -106,6 +106,25 @@ jobs: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest + + - name: Set API Package Visibility to Public + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ORG_NAME: ${{ github.repository_owner }} + run: | + echo "Attempting to set visibility for $ORG_NAME/${{ env.API_IMAGE_NAME }}" + RESPONSE_CODE=$(curl -L -s -o /dev/null -w "%{http_code}" -X PATCH \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $GH_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/orgs/$ORG_NAME/packages/container/${{ env.API_IMAGE_NAME }}" \ + -d '{"visibility":"public"}') + if [ "$RESPONSE_CODE" -eq 200 ] || [ "$RESPONSE_CODE" -eq 204 ]; then + echo "Package $ORG_NAME/${{ env.API_IMAGE_NAME }} visibility set to public successfully." + else + echo "Failed to set package $ORG_NAME/${{ env.API_IMAGE_NAME }} visibility to public. HTTP Status: $RESPONSE_CODE" + # Optionally, fail the step: exit 1 + fi build_and_push_web: name: Build and Push Web Image @@ -148,3 +167,22 @@ jobs: NEXT_PUBLIC_URL=${{ secrets.NEXT_PUBLIC_URL }} NEXT_PUBLIC_SUPABASE_URL=${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} NEXT_PUBLIC_SUPABASE_ANON_KEY=${{ secrets.NEXT_PUBLIC_SUPABASE_ANON_KEY }} + + - name: Set Web Package Visibility to Public + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ORG_NAME: ${{ github.repository_owner }} + run: | + echo "Attempting to set visibility for $ORG_NAME/${{ env.WEB_IMAGE_NAME }}" + RESPONSE_CODE=$(curl -L -s -o /dev/null -w "%{http_code}" -X PATCH \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $GH_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/orgs/$ORG_NAME/packages/container/${{ env.WEB_IMAGE_NAME }}" \ + -d '{"visibility":"public"}') + if [ "$RESPONSE_CODE" -eq 200 ] || [ "$RESPONSE_CODE" -eq 204 ]; then + echo "Package $ORG_NAME/${{ env.WEB_IMAGE_NAME }} visibility set to public successfully." + else + echo "Failed to set package $ORG_NAME/${{ env.WEB_IMAGE_NAME }} visibility to public. HTTP Status: $RESPONSE_CODE" + # Optionally, fail the step: exit 1 + fi diff --git a/api/libs/semantic_layer/spec.yml b/api/libs/semantic_layer/spec.yml index e730eafcf..81d710161 100644 --- a/api/libs/semantic_layer/spec.yml +++ b/api/libs/semantic_layer/spec.yml @@ -1,37 +1,36 @@ # Schema specification for the model structure -models: - - name: string # Required - description: string # Optional - dimensions: - - name: string # Required - description: string # Optional - type: string # Optional, inferred if omitted - searchable: boolean # Optional, default: false - options: [string] # Optional, default: null - measures: - - name: string # Required - description: string # Optional - type: string # Optional, inferred if omitted - metrics: - - name: string # Required - expr: string # Required, can use model.column from entities - description: string # Optional - args: # Optional, required only if expr contains arguments, default: null - - name: string # Required - type: string # Required - description: string # Optional - filters: - - name: string # Required - expr: string # Required, can use model.column from entities - description: string # Optional - args: # Optional, required only if expr contains arguments, default: null - - name: string # Required - type: string # Required - description: string # Optional - entities: - - name: string # Required - primary_key: string # Required - foreign_key: string # Required - type: string # Optional, e.g., "LEFT", "INNER"; LLM decides if omitted - cardinality: string # Optional, e.g., "one-to-many", "many-to-many" - description: string # Optional \ No newline at end of file +- name: string # Required + description: string # Optional + dimensions: + - name: string # Required + description: string # Optional + type: string # Optional, inferred if omitted + searchable: boolean # Optional, default: false + options: [string] # Optional, default: null + measures: + - name: string # Required + description: string # Optional + type: string # Optional, inferred if omitted + metrics: + - name: string # Required + expr: string # Required, can use model.column from entities + description: string # Optional + args: # Optional, required only if expr contains arguments, default: null + - name: string # Required + type: string # Required + description: string # Optional + filters: + - name: string # Required + expr: string # Required, can use model.column from entities + description: string # Optional + args: # Optional, required only if expr contains arguments, default: null + - name: string # Required + type: string # Required + description: string # Optional + entities: + - name: string # Required + primary_key: string # Required + foreign_key: string # Required + type: string # Optional, e.g., "LEFT", "INNER"; LLM decides if omitted + cardinality: string # Optional, e.g., "one-to-many", "many-to-many" + description: string # Optional From 8d28993bb4832805254a3adb9c95c6d4d65ea80b Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:00:52 -0600 Subject: [PATCH 13/43] ok libpq error --- .github/workflows/cli-release.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index d065e2398..0247be934 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -26,21 +26,29 @@ jobs: artifact_name: buster-cli-linux-x86_64.tar.gz use_tar: true binary_name: buster-cli + rust_flags: "" + pkg_config_path: "" - os: macos-latest target: x86_64-apple-darwin artifact_name: buster-cli-darwin-x86_64.tar.gz use_tar: true binary_name: buster-cli + rust_flags: "-L/usr/local/opt/libpq/lib" + pkg_config_path: "/usr/local/opt/libpq/lib/pkgconfig" - os: macos-latest target: aarch64-apple-darwin artifact_name: buster-cli-darwin-arm64.tar.gz use_tar: true binary_name: buster-cli + rust_flags: "-L/opt/homebrew/opt/libpq/lib" + pkg_config_path: "/opt/homebrew/opt/libpq/lib/pkgconfig" - os: windows-latest target: x86_64-pc-windows-msvc artifact_name: buster-cli-windows-x86_64.zip use_tar: false binary_name: buster-cli.exe + rust_flags: "" + pkg_config_path: "" steps: - name: Checkout code uses: actions/checkout@v4 @@ -75,6 +83,9 @@ jobs: - name: Build optimized release # Builds the buster-cli package from cli/cli/Cargo.toml working-directory: ./cli + env: + RUSTFLAGS: ${{ matrix.rust_flags }} + PKG_CONFIG_PATH: ${{ matrix.pkg_config_path }} run: cargo build --release --target ${{ matrix.target }} --manifest-path ./cli/Cargo.toml - name: Compress binary (Unix) From 13c12be36e33d4c0651d3e1ad2b5e2a84f606e19 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:07:15 -0600 Subject: [PATCH 14/43] platform support for docker images --- .github/workflows/docker-release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 0fc8aaed6..535c81c0b 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -102,6 +102,7 @@ jobs: context: ./api file: ./api/Dockerfile push: true + platforms: linux/amd64,linux/arm64,linux/arm/v7 tags: | ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }} @@ -158,6 +159,7 @@ jobs: context: ./web file: ./web/Dockerfile push: true + platforms: linux/amd64,linux/arm64,linux/arm/v7 tags: | ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }} From 932d6caad1627644acfdf1a2777b1b891c89d74d Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:11:39 -0600 Subject: [PATCH 15/43] remove cargo build deps --- cli/Cargo.toml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 69ddab820..95fb749c8 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -5,13 +5,6 @@ members = [ # Add "libs/*" or specific lib crates here later ] -[profile.release] -lto = true -strip = true -opt-level = "z" -codegen-units = 1 -panic = "abort" - [workspace.dependencies] anyhow = "1.0.79" async-trait = "0.1.80" From 4751cf13f07646ae4bb0a9fd86517e4bdfeec8f6 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:15:46 -0600 Subject: [PATCH 16/43] remove linux arm v7 --- .github/workflows/docker-release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 535c81c0b..8efaa77f7 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -102,7 +102,7 @@ jobs: context: ./api file: ./api/Dockerfile push: true - platforms: linux/amd64,linux/arm64,linux/arm/v7 + platforms: linux/amd64,linux/arm64 tags: | ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }} @@ -159,7 +159,7 @@ jobs: context: ./web file: ./web/Dockerfile push: true - platforms: linux/amd64,linux/arm64,linux/arm/v7 + platforms: linux/amd64,linux/arm64 tags: | ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION }} ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }} From 256bbbb93424871a68109eaa910b2c3dc9e03dc1 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:19:46 -0600 Subject: [PATCH 17/43] threw in old workflow with the github tap.... --- .github/workflows/cli-release.yml | 95 +++++++++---------------------- 1 file changed, 27 insertions(+), 68 deletions(-) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 0247be934..7dc9d6994 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -12,12 +12,10 @@ on: # Add permissions for creating releases permissions: contents: write - pull-requests: write # As per old workflow + pull-requests: write jobs: build: - name: Build CLI Binaries - runs-on: ${{ matrix.os }} strategy: matrix: include: @@ -25,39 +23,24 @@ jobs: target: x86_64-unknown-linux-gnu artifact_name: buster-cli-linux-x86_64.tar.gz use_tar: true - binary_name: buster-cli - rust_flags: "" - pkg_config_path: "" - os: macos-latest target: x86_64-apple-darwin artifact_name: buster-cli-darwin-x86_64.tar.gz use_tar: true - binary_name: buster-cli - rust_flags: "-L/usr/local/opt/libpq/lib" - pkg_config_path: "/usr/local/opt/libpq/lib/pkgconfig" - os: macos-latest target: aarch64-apple-darwin artifact_name: buster-cli-darwin-arm64.tar.gz use_tar: true - binary_name: buster-cli - rust_flags: "-L/opt/homebrew/opt/libpq/lib" - pkg_config_path: "/opt/homebrew/opt/libpq/lib/pkgconfig" - os: windows-latest target: x86_64-pc-windows-msvc artifact_name: buster-cli-windows-x86_64.zip use_tar: false - binary_name: buster-cli.exe - rust_flags: "" - pkg_config_path: "" + runs-on: ${{ matrix.os }} steps: - name: Checkout code uses: actions/checkout@v4 with: - fetch-depth: 0 # As per old workflow - - - name: Install libpq (macOS) - if: runner.os == 'macOS' - run: brew install libpq + fetch-depth: 0 - name: Install Rust uses: actions-rs/toolchain@v1 @@ -79,37 +62,29 @@ jobs: echo 'panic = "abort"' >> .cargo/config.toml echo 'opt-level = 3' >> .cargo/config.toml echo 'strip = true' >> .cargo/config.toml - - name: Build optimized release - # Builds the buster-cli package from cli/cli/Cargo.toml - working-directory: ./cli - env: - RUSTFLAGS: ${{ matrix.rust_flags }} - PKG_CONFIG_PATH: ${{ matrix.pkg_config_path }} - run: cargo build --release --target ${{ matrix.target }} --manifest-path ./cli/Cargo.toml + working-directory: ./cli + run: cargo build --release --target ${{ matrix.target }} - name: Compress binary (Unix) if: matrix.use_tar - # working-directory: ./cli # Old: This was ./cli - shell: bash + working-directory: ./cli run: | - cd cli/target/${{ matrix.target }}/release # Adjusted path to be from repo root - tar czf ${{ matrix.artifact_name }} ${{ matrix.binary_name }} + cd target/${{ matrix.target }}/release + tar czf ${{ matrix.artifact_name }} buster-cli if [[ "${{ runner.os }}" == "macOS" ]]; then shasum -a 256 ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 else sha256sum ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 fi - - name: Compress binary (Windows) if: matrix.use_tar == false - # working-directory: ./cli # Old: This was ./cli + working-directory: ./cli shell: pwsh run: | - cd cli/target/${{ matrix.target }}/release # Adjusted path to be from repo root - Compress-Archive -Path ${{ matrix.binary_name }} -DestinationPath ${{ matrix.artifact_name }} + cd target/${{ matrix.target }}/release + Compress-Archive -Path buster-cli.exe -DestinationPath ${{ matrix.artifact_name }} Get-FileHash -Algorithm SHA256 ${{ matrix.artifact_name }} | Select-Object -ExpandProperty Hash > ${{ matrix.artifact_name }}.sha256 - - name: Upload artifacts uses: actions/upload-artifact@v4 with: @@ -120,58 +95,42 @@ jobs: retention-days: 1 release: - name: Create GitHub Release for CLI needs: build runs-on: ubuntu-latest - outputs: - cli_version: ${{ steps.get_version.outputs.version }} - cli_tag_name: v${{ steps.get_version.outputs.version }} # Matches old tag format steps: - name: Checkout code uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Download all build artifacts + - name: Download all artifacts uses: actions/download-artifact@v4 - with: - path: downloaded-artifacts # Download all artifacts to this directory - - - name: List downloaded artifacts (for debugging) - run: ls -R downloaded-artifacts - - name: Extract version from cli/cli/Cargo.toml + - name: Extract version from Cargo.toml id: get_version - shell: bash run: | - # Correctly extract from the package manifest, not the workspace - VERSION=$(grep '^version' cli/cli/Cargo.toml | head -n 1 | sed 's/version = "\(.*\)"/\1/') - if [ -z "$VERSION" ]; then - echo "Error: Could not determine CLI version from cli/cli/Cargo.toml." - exit 1 - fi + VERSION=$(grep '^version =' cli/Cargo.toml | sed 's/version = "\(.*\)"/\1/') echo "version=$VERSION" >> $GITHUB_OUTPUT echo "Extracted version: $VERSION" - - - name: Create GitHub Release + - name: Create Release uses: softprops/action-gh-release@v1 with: - tag_name: v${{ steps.get_version.outputs.version }} # Uses version from cli/cli/Cargo.toml - name: CLI Release v${{ steps.get_version.outputs.version }} + tag_name: v${{ steps.get_version.outputs.version }} + name: Release v${{ steps.get_version.outputs.version }} files: | - downloaded-artifacts/**/buster-cli-linux-x86_64.tar.gz - downloaded-artifacts/**/buster-cli-linux-x86_64.tar.gz.sha256 - downloaded-artifacts/**/buster-cli-darwin-x86_64.tar.gz - downloaded-artifacts/**/buster-cli-darwin-x86_64.tar.gz.sha256 - downloaded-artifacts/**/buster-cli-darwin-arm64.tar.gz - downloaded-artifacts/**/buster-cli-darwin-arm64.tar.gz.sha256 - downloaded-artifacts/**/buster-cli-windows-x86_64.zip - downloaded-artifacts/**/buster-cli-windows-x86_64.zip.sha256 + **/buster-cli-linux-x86_64.tar.gz + **/buster-cli-linux-x86_64.tar.gz.sha256 + **/buster-cli-darwin-x86_64.tar.gz + **/buster-cli-darwin-x86_64.tar.gz.sha256 + **/buster-cli-darwin-arm64.tar.gz + **/buster-cli-darwin-arm64.tar.gz.sha256 + **/buster-cli-windows-x86_64.zip + **/buster-cli-windows-x86_64.zip.sha256 draft: false prerelease: false - generate_release_notes: true + generate_release_notes: true env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} update_homebrew_tap: name: Update Homebrew Tap From 771bc3b7d234ece0bebda36b66aa94a19238723b Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:33:17 -0600 Subject: [PATCH 18/43] cli release with libpq error resolve --- .github/workflows/cli-release.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 7dc9d6994..6526914fd 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -53,6 +53,23 @@ jobs: - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2 + - name: Install libpq (macOS and Linux) + run: | + if [[ "${{ runner.os }}" == "macOS" ]]; then + brew install libpq + echo "PKG_CONFIG_PATH=$(brew --prefix libpq)/lib/pkgconfig" >> $GITHUB_ENV + echo "LIBRARY_PATH=$(brew --prefix libpq)/lib" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$(brew --prefix libpq)/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV + # For macOS, we might need to explicitly tell rustc where to find the library. + # Adding common libpq paths to rustflags + echo "RUSTFLAGS=-L $(brew --prefix libpq)/lib" >> $GITHUB_ENV + elif [[ "${{ runner.os }}" == "Linux" ]]; then + sudo apt-get update -y + sudo apt-get install -y libpq-dev + fi + env: + HOMEBREW_NO_INSTALL_CLEANUP: 1 # Recommended for CI to speed up + - name: Configure Cargo for optimized build run: | mkdir -p .cargo From a24033fb63ea6f6561d5554405fb41de4d1463a0 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:37:11 -0600 Subject: [PATCH 19/43] window error --- .github/workflows/cli-release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 6526914fd..748cd6296 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -54,6 +54,7 @@ jobs: uses: Swatinem/rust-cache@v2 - name: Install libpq (macOS and Linux) + if: runner.os != 'Windows' run: | if [[ "${{ runner.os }}" == "macOS" ]]; then brew install libpq From 1b16398413d2b163d740a91ffb807cc304a464c2 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:47:08 -0600 Subject: [PATCH 20/43] docker release with arm64 --- .github/workflows/docker-release.yml | 48 ++++++++++++++++++---------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 8efaa77f7..584e795c9 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -74,7 +74,18 @@ jobs: name: Build and Push API Image needs: prepare_docker_release_info if: needs.prepare_docker_release_info.outputs.api_version_found == 'true' - runs-on: blacksmith-32vcpu-ubuntu-2204 + strategy: + fail-fast: false + matrix: + platform: [amd64, arm64] + include: + - platform: amd64 + runner: blacksmith-8vcpu-ubuntu-2204 + docker_platform: linux/amd64 + - platform: arm64 + runner: blacksmith-8vcpu-ubuntu-2204-arm + docker_platform: linux/arm64 + runs-on: ${{ matrix.runner }} env: API_VERSION: ${{ needs.prepare_docker_release_info.outputs.api_version }} steps: @@ -83,9 +94,6 @@ jobs: with: ref: ${{ github.sha }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -102,11 +110,11 @@ jobs: context: ./api file: ./api/Dockerfile push: true - platforms: linux/amd64,linux/arm64 + platforms: ${{ matrix.docker_platform }} tags: | - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest-${{ matrix.platform }} - name: Set API Package Visibility to Public env: @@ -131,7 +139,18 @@ jobs: name: Build and Push Web Image needs: prepare_docker_release_info if: needs.prepare_docker_release_info.outputs.web_version_found == 'true' - runs-on: blacksmith-32vcpu-ubuntu-2204 + strategy: + fail-fast: false + matrix: + platform: [amd64, arm64] + include: + - platform: amd64 + runner: blacksmith-8vcpu-ubuntu-2204 + docker_platform: linux/amd64 + - platform: arm64 + runner: blacksmith-8vcpu-ubuntu-2204-arm + docker_platform: linux/arm64 + runs-on: ${{ matrix.runner }} env: WEB_VERSION: ${{ needs.prepare_docker_release_info.outputs.web_version }} steps: @@ -140,9 +159,6 @@ jobs: with: ref: ${{ github.sha }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -159,11 +175,11 @@ jobs: context: ./web file: ./web/Dockerfile push: true - platforms: linux/amd64,linux/arm64 + platforms: ${{ matrix.docker_platform }} tags: | - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:latest + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:latest-${{ matrix.platform }} build-args: | NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} NEXT_PUBLIC_URL=${{ secrets.NEXT_PUBLIC_URL }} From d8ea573c56ea42901c41b14b2afdf24107749ab8 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 08:54:21 -0600 Subject: [PATCH 21/43] os fix on ssh tunneling --- .../data_source_connections/ssh_tunneling.rs | 48 ++++++++++--------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs b/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs index 30d1b1978..ece4bae8d 100644 --- a/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs +++ b/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs @@ -4,9 +4,10 @@ use std::{ fs, io::Write, net::TcpListener, - os::unix::fs::PermissionsExt, process::{Child, Command}, }; +#[cfg(unix)] +use std::os::unix::fs::PermissionsExt; use tempfile::NamedTempFile; pub fn establish_ssh_tunnel( @@ -61,29 +62,32 @@ pub fn establish_ssh_tunnel( } }; - let mut perms = match fs::metadata(temp_ssh_key.path()) { - Ok(p) => p.permissions(), - Err(e) => { - tracing::error!( - "There was a problem while getting the metadata of the temp file: {}", - e - ); - return Err(anyhow!(e)); - } - }; + #[cfg(unix)] + { + let mut perms = match fs::metadata(temp_ssh_key.path()) { + Ok(p) => p.permissions(), + Err(e) => { + tracing::error!( + "There was a problem while getting the metadata of the temp file: {}", + e + ); + return Err(anyhow!(e)); + } + }; - perms.set_mode(0o600); + perms.set_mode(0o600); - match fs::set_permissions(temp_ssh_key.path(), perms) { - Ok(_) => {} - Err(e) => { - tracing::error!( - "There was a problem while setting the permissions of the temp file: {}", - e - ); - return Err(anyhow!(e)); - } - }; + match fs::set_permissions(temp_ssh_key.path(), perms) { + Ok(_) => {} + Err(e) => { + tracing::error!( + "There was a problem while setting the permissions of the temp file: {}", + e + ); + return Err(anyhow!(e)); + } + }; + } let ssh_tunnel = match Command::new("ssh") .arg("-T") From 38ac1ec47fa69b9eaf4fefd2148575707fca4459 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 09:14:02 -0600 Subject: [PATCH 22/43] no unix commands --- .../data_source_connections/ssh_tunneling.rs | 29 ------------------- 1 file changed, 29 deletions(-) diff --git a/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs b/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs index ece4bae8d..7ec8d5608 100644 --- a/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs +++ b/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs @@ -6,8 +6,6 @@ use std::{ net::TcpListener, process::{Child, Command}, }; -#[cfg(unix)] -use std::os::unix::fs::PermissionsExt; use tempfile::NamedTempFile; pub fn establish_ssh_tunnel( @@ -62,33 +60,6 @@ pub fn establish_ssh_tunnel( } }; - #[cfg(unix)] - { - let mut perms = match fs::metadata(temp_ssh_key.path()) { - Ok(p) => p.permissions(), - Err(e) => { - tracing::error!( - "There was a problem while getting the metadata of the temp file: {}", - e - ); - return Err(anyhow!(e)); - } - }; - - perms.set_mode(0o600); - - match fs::set_permissions(temp_ssh_key.path(), perms) { - Ok(_) => {} - Err(e) => { - tracing::error!( - "There was a problem while setting the permissions of the temp file: {}", - e - ); - return Err(anyhow!(e)); - } - }; - } - let ssh_tunnel = match Command::new("ssh") .arg("-T") .arg("-i") From 0487dc9286bf501f210a5aa4501995d1a1d78625 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 7 May 2025 16:49:44 +0000 Subject: [PATCH 23/43] chore(versions): bump api to v0.1.1; bump web to v0.1.1; bump cli to v0.1.1 [skip ci] --- api/server/Cargo.toml | 2 +- cli/cli/Cargo.toml | 2 +- web/package-lock.json | 4 ++-- web/package.json | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/api/server/Cargo.toml b/api/server/Cargo.toml index 39bd57adb..d8eb7a028 100644 --- a/api/server/Cargo.toml +++ b/api/server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "buster_server" -version = "0.1.0" +version = "0.1.1" edition = "2021" default-run = "buster_server" diff --git a/cli/cli/Cargo.toml b/cli/cli/Cargo.toml index ef12b4f9e..2b6f11506 100644 --- a/cli/cli/Cargo.toml +++ b/cli/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "buster-cli" -version = "0.1.0" +version = "0.1.1" edition = "2021" build = "build.rs" diff --git a/web/package-lock.json b/web/package-lock.json index 55934f40f..a333eb8ab 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,12 +1,12 @@ { "name": "web", - "version": "0.1.5", + "version": "0.1.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "web", - "version": "0.1.5", + "version": "0.1.1", "dependencies": { "@dnd-kit/core": "^6.3.1", "@dnd-kit/modifiers": "^9.0.0", diff --git a/web/package.json b/web/package.json index 8c65831a9..1fcd65c9f 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "web", - "version": "0.1.0", + "version": "0.1.1", "private": true, "scripts": { "dev": "next dev --turbo", From d726202c270218722ddaee3b28a1bc6409aa7368 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 7 May 2025 16:49:45 +0000 Subject: [PATCH 24/43] chore: update tag_info.json with potential release versions [skip ci] --- tag_info.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 tag_info.json diff --git a/tag_info.json b/tag_info.json new file mode 100644 index 000000000..c41501083 --- /dev/null +++ b/tag_info.json @@ -0,0 +1,7 @@ +{ + "api_tag": "api/v0.1.1", "api_version": "0.1.1" +, + "web_tag": "web/v0.1.1", "web_version": "0.1.1" +, + "cli_tag": "cli/v0.1.1", "cli_version": "0.1.1" +} From a901ee382ea982d11171e4009685a985873881b3 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 12:16:25 -0600 Subject: [PATCH 25/43] main env on local build --- .github/workflows/docker-release.yml | 1 + supabase/.env.example | 167 ++++++++++++++------------- 2 files changed, 90 insertions(+), 78 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 584e795c9..9bb5d4dbf 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -139,6 +139,7 @@ jobs: name: Build and Push Web Image needs: prepare_docker_release_info if: needs.prepare_docker_release_info.outputs.web_version_found == 'true' + environment: main strategy: fail-fast: false matrix: diff --git a/supabase/.env.example b/supabase/.env.example index 3d856d987..ed8f8fd57 100644 --- a/supabase/.env.example +++ b/supabase/.env.example @@ -1,58 +1,85 @@ +# General Application Settings +ENVIRONMENT="development" +BUSTER_URL="http://web:3000" +BUSTER_WH_TOKEN="buster-wh-token" + +# --- API Service Specific --- +# Direct Database Connection (for API service and potentially others) +DATABASE_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" +# Pooled Database Connection (for API service, uses Supavisor) +POOLER_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" +# Redis Connection +REDIS_URL="redis://buster-redis:6379" +# Supabase Connection for API service +SUPABASE_URL="http://kong:8000" +SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" + +# --- LLM / AI Services --- +EMBEDDING_PROVIDER="ollama" +EMBEDDING_MODEL="mxbai-embed-large" +COHERE_API_KEY="" +OPENAI_API_KEY="" # For OpenAI models or Supabase Studio assistant +LLM_API_KEY="test-key" +LLM_BASE_URL="http://litellm:4001" + +# --- Web Client (Next.js) Specific --- +NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) +NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web) +NEXT_PUBLIC_SUPABASE_URL="http://localhost:8000" # External URL for Supabase (Kong proxy) +NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" +NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" + +################################################# +# Supabase Stack Configuration Variables +# These are primarily used by the Supabase services themselves +# (defined in supabase/docker-compose.yml) +# and are sourced from this .env file when `docker compose up` is run. +################################################# + ############ # Secrets -# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION ############ - -POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password -JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long -ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE -SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q -DASHBOARD_USERNAME=supabase -DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated +POSTGRES_PASSWORD="your-super-secret-and-long-postgres-password" +JWT_SECRET="your-super-secret-jwt-token-with-at-least-32-characters-long" +ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" +SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" +DASHBOARD_USERNAME="supabase" +DASHBOARD_PASSWORD="this_password_is_insecure_and_should_be_updated" ############ -# Database - You can change these to any PostgreSQL database that has logical replication enabled. +# Database ############ - -POSTGRES_HOST=db -POSTGRES_DB=postgres -POSTGRES_PORT=5432 -# default user is postgres +POSTGRES_HOST="db" +POSTGRES_DB="postgres" +POSTGRES_PORT="5432" ############ # Supavisor -- Database pooler ############ -POOLER_PROXY_PORT_TRANSACTION=6543 -POOLER_DEFAULT_POOL_SIZE=20 -POOLER_MAX_CLIENT_CONN=100 -POOLER_TENANT_ID=your-tenant-id - +POOLER_PROXY_PORT_TRANSACTION="6543" +POOLER_DEFAULT_POOL_SIZE="20" +POOLER_MAX_CLIENT_CONN="100" +POOLER_TENANT_ID="your-tenant-id" ############ -# API Proxy - Configuration for the Kong Reverse proxy. +# API Proxy - Kong ############ - -KONG_HTTP_PORT=8000 -KONG_HTTPS_PORT=8443 - +KONG_HTTP_PORT="8000" +KONG_HTTPS_PORT="8443" ############ -# API - Configuration for PostgREST. +# API - PostgREST ############ - -PGRST_DB_SCHEMAS=public,storage,graphql_public - +PGRST_DB_SCHEMAS="public,storage,graphql_public" ############ -# Auth - Configuration for the GoTrue authentication server. +# Auth - GoTrue ############ - -## General -SITE_URL=http://localhost:3003 -ADDITIONAL_REDIRECT_URLS= -JWT_EXPIRY=3600 -DISABLE_SIGNUP=false -API_EXTERNAL_URL=http://localhost:8000 +SITE_URL="http://localhost:3000" # Default base URL for the site (used in emails, etc.) +ADDITIONAL_REDIRECT_URLS="" +JWT_EXPIRY="3600" +DISABLE_SIGNUP="false" +API_EXTERNAL_URL="http://localhost:8000" # Publicly accessible URL for the Supabase API (via Kong) ## Mailer Config MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify" @@ -61,57 +88,41 @@ MAILER_URLPATHS_RECOVERY="/auth/v1/verify" MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify" ## Email auth -ENABLE_EMAIL_SIGNUP=true -ENABLE_EMAIL_AUTOCONFIRM=false -SMTP_ADMIN_EMAIL=admin@buster.so -SMTP_HOST=supabase-mail -SMTP_PORT=2500 -SMTP_USER= -SMTP_PASS= -SMTP_SENDER_NAME=Buster -ENABLE_ANONYMOUS_USERS=true +ENABLE_EMAIL_SIGNUP="true" +ENABLE_EMAIL_AUTOCONFIRM="false" +SMTP_ADMIN_EMAIL="admin@buster.so" +SMTP_HOST="supabase-mail" +SMTP_PORT="2500" +SMTP_USER="" +SMTP_PASS="" +SMTP_SENDER_NAME="Buster" +ENABLE_ANONYMOUS_USERS="true" ## Phone auth -ENABLE_PHONE_SIGNUP=true -ENABLE_PHONE_AUTOCONFIRM=true - +ENABLE_PHONE_SIGNUP="true" +ENABLE_PHONE_AUTOCONFIRM="true" ############ -# Studio - Configuration for the Dashboard +# Studio - Supabase Dashboard ############ +STUDIO_DEFAULT_ORGANIZATION="Default Organization" +STUDIO_DEFAULT_PROJECT="Default Project" +STUDIO_PORT="3003" +SUPABASE_PUBLIC_URL="http://localhost:8000" # Public URL for Supabase (Kong), used by Studio -STUDIO_DEFAULT_ORGANIZATION=Default Organization -STUDIO_DEFAULT_PROJECT=Default Project - -STUDIO_PORT=3003 -# replace if you intend to use Studio outside of localhost -SUPABASE_PUBLIC_URL=http://localhost:8000 - -# Enable webp support -IMGPROXY_ENABLE_WEBP_DETECTION=true - -# Add your OpenAI API key to enable SQL Editor Assistant -OPENAI_API_KEY= +# Image Proxy +IMGPROXY_ENABLE_WEBP_DETECTION="true" ############ -# Functions - Configuration for Functions +# Functions - Supabase Edge Functions ############ -# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet. -FUNCTIONS_VERIFY_JWT=false +FUNCTIONS_VERIFY_JWT="false" ############ -# Logs - Configuration for Logflare -# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction +# Logs - Logflare ############ - -LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key - -# Change vector.toml sinks to reflect this change -LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key - -# Docker socket location - this value will differ depending on your OS -DOCKER_SOCKET_LOCATION=/var/run/docker.sock - -# Google Cloud Project details -GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID -GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER +LOGFLARE_LOGGER_BACKEND_API_KEY="your-super-secret-and-long-logflare-key" +LOGFLARE_API_KEY="your-super-secret-and-long-logflare-key" +DOCKER_SOCKET_LOCATION="/var/run/docker.sock" +GOOGLE_PROJECT_ID="GOOGLE_PROJECT_ID" +GOOGLE_PROJECT_NUMBER="GOOGLE_PROJECT_NUMBER" \ No newline at end of file From d3f637e0e82e115dcf3bf5a9bcfd7cbd50ad2db5 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 12:47:07 -0600 Subject: [PATCH 26/43] next public web socket url --- .github/workflows/docker-release.yml | 1 + docker-compose.yml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 9bb5d4dbf..bce380d86 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -186,6 +186,7 @@ jobs: NEXT_PUBLIC_URL=${{ secrets.NEXT_PUBLIC_URL }} NEXT_PUBLIC_SUPABASE_URL=${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} NEXT_PUBLIC_SUPABASE_ANON_KEY=${{ secrets.NEXT_PUBLIC_SUPABASE_ANON_KEY }} + NEXT_PUBLIC_WEB_SOCKET_URL=${{ secrets.NEXT_PUBLIC_WEB_SOCKET_URL }} - name: Set Web Package Visibility to Public env: diff --git a/docker-compose.yml b/docker-compose.yml index 3bb5bb055..8d873a282 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,7 +16,7 @@ services: retries: 30 api: - image: ghcr.io/buster-so/buster/api:latest + image: ghcr.io/buster-so/buster/api:latest-arm64 container_name: buster-api env_file: - .env @@ -54,7 +54,7 @@ services: condition: service_healthy web: - image: ghcr.io/buster-so/buster/web:latest + image: ghcr.io/buster-so/buster/web:latest-arm64 container_name: buster-web ports: - "3000:3000" From aa3de5bf3553e694e48ac04a6aa7502ac6a358e1 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 16:13:57 -0600 Subject: [PATCH 27/43] finally have all the networking communicating --- .env.example | 9 +-- docker-compose.yml | 28 ++------ start.sh | 15 ----- supabase/.env.example | 128 ------------------------------------ supabase/docker-compose.yml | 106 ++++++++++++++++++++++++++++- 5 files changed, 115 insertions(+), 171 deletions(-) delete mode 100644 start.sh delete mode 100644 supabase/.env.example diff --git a/.env.example b/.env.example index ed8f8fd57..bf3ece2a7 100644 --- a/.env.example +++ b/.env.example @@ -1,13 +1,13 @@ # General Application Settings ENVIRONMENT="development" -BUSTER_URL="http://web:3000" +BUSTER_URL="http://localhost:3000" BUSTER_WH_TOKEN="buster-wh-token" # --- API Service Specific --- # Direct Database Connection (for API service and potentially others) -DATABASE_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" +DATABASE_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" # Pooled Database Connection (for API service, uses Supavisor) -POOLER_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" +POOLER_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" # Redis Connection REDIS_URL="redis://buster-redis:6379" # Supabase Connection for API service @@ -25,7 +25,8 @@ LLM_BASE_URL="http://litellm:4001" # --- Web Client (Next.js) Specific --- NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web) -NEXT_PUBLIC_SUPABASE_URL="http://localhost:8000" # External URL for Supabase (Kong proxy) +NEXT_PUBLIC_SUPABASE_URL="http://kong:8000" # External URL for Supabase (Kong proxy) +NEXT_PUBLIC_WS_URL="ws://localhost:3001" NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" diff --git a/docker-compose.yml b/docker-compose.yml index 8d873a282..cbb05bfe4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -34,12 +34,7 @@ services: - COHERE_API_KEY=${COHERE_API_KEY} ports: - "3001:3001" - deploy: - resources: - limits: - memory: 4G - reservations: - memory: 2G + - "3000:3000" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:3001/health"] interval: 30s @@ -52,29 +47,16 @@ services: condition: service_healthy kong: condition: service_healthy - + web: image: ghcr.io/buster-so/buster/web:latest-arm64 container_name: buster-web - ports: - - "3000:3000" + env_file: + - .env depends_on: api: condition: service_healthy - - litellm: - image: ghcr.io/berriai/litellm:main-stable - container_name: buster-litellm - restart: always - ports: - - "4001:4001" - env_file: - - .env - environment: - - LITELLM_ENV=local - depends_on: - supavisor: - condition: service_healthy + network_mode: "service:api" volumes: buster_redis_data: \ No newline at end of file diff --git a/start.sh b/start.sh deleted file mode 100644 index 06023d123..000000000 --- a/start.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -echo "Starting Supabase..." -cd supabase -docker compose up -d - -echo "Waiting for Supabase to be healthy..." -until curl -s http://localhost:54321/rest/v1/ > /dev/null; do - echo "Waiting for Supabase..." - sleep 5 -done - -echo "Supabase is ready! Starting main services..." -cd .. -docker compose up \ No newline at end of file diff --git a/supabase/.env.example b/supabase/.env.example deleted file mode 100644 index ed8f8fd57..000000000 --- a/supabase/.env.example +++ /dev/null @@ -1,128 +0,0 @@ -# General Application Settings -ENVIRONMENT="development" -BUSTER_URL="http://web:3000" -BUSTER_WH_TOKEN="buster-wh-token" - -# --- API Service Specific --- -# Direct Database Connection (for API service and potentially others) -DATABASE_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" -# Pooled Database Connection (for API service, uses Supavisor) -POOLER_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" -# Redis Connection -REDIS_URL="redis://buster-redis:6379" -# Supabase Connection for API service -SUPABASE_URL="http://kong:8000" -SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" - -# --- LLM / AI Services --- -EMBEDDING_PROVIDER="ollama" -EMBEDDING_MODEL="mxbai-embed-large" -COHERE_API_KEY="" -OPENAI_API_KEY="" # For OpenAI models or Supabase Studio assistant -LLM_API_KEY="test-key" -LLM_BASE_URL="http://litellm:4001" - -# --- Web Client (Next.js) Specific --- -NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) -NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web) -NEXT_PUBLIC_SUPABASE_URL="http://localhost:8000" # External URL for Supabase (Kong proxy) -NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" -NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" - -################################################# -# Supabase Stack Configuration Variables -# These are primarily used by the Supabase services themselves -# (defined in supabase/docker-compose.yml) -# and are sourced from this .env file when `docker compose up` is run. -################################################# - -############ -# Secrets -############ -POSTGRES_PASSWORD="your-super-secret-and-long-postgres-password" -JWT_SECRET="your-super-secret-jwt-token-with-at-least-32-characters-long" -ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" -SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" -DASHBOARD_USERNAME="supabase" -DASHBOARD_PASSWORD="this_password_is_insecure_and_should_be_updated" - -############ -# Database -############ -POSTGRES_HOST="db" -POSTGRES_DB="postgres" -POSTGRES_PORT="5432" - -############ -# Supavisor -- Database pooler -############ -POOLER_PROXY_PORT_TRANSACTION="6543" -POOLER_DEFAULT_POOL_SIZE="20" -POOLER_MAX_CLIENT_CONN="100" -POOLER_TENANT_ID="your-tenant-id" - -############ -# API Proxy - Kong -############ -KONG_HTTP_PORT="8000" -KONG_HTTPS_PORT="8443" - -############ -# API - PostgREST -############ -PGRST_DB_SCHEMAS="public,storage,graphql_public" - -############ -# Auth - GoTrue -############ -SITE_URL="http://localhost:3000" # Default base URL for the site (used in emails, etc.) -ADDITIONAL_REDIRECT_URLS="" -JWT_EXPIRY="3600" -DISABLE_SIGNUP="false" -API_EXTERNAL_URL="http://localhost:8000" # Publicly accessible URL for the Supabase API (via Kong) - -## Mailer Config -MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify" -MAILER_URLPATHS_INVITE="/auth/v1/verify" -MAILER_URLPATHS_RECOVERY="/auth/v1/verify" -MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify" - -## Email auth -ENABLE_EMAIL_SIGNUP="true" -ENABLE_EMAIL_AUTOCONFIRM="false" -SMTP_ADMIN_EMAIL="admin@buster.so" -SMTP_HOST="supabase-mail" -SMTP_PORT="2500" -SMTP_USER="" -SMTP_PASS="" -SMTP_SENDER_NAME="Buster" -ENABLE_ANONYMOUS_USERS="true" - -## Phone auth -ENABLE_PHONE_SIGNUP="true" -ENABLE_PHONE_AUTOCONFIRM="true" - -############ -# Studio - Supabase Dashboard -############ -STUDIO_DEFAULT_ORGANIZATION="Default Organization" -STUDIO_DEFAULT_PROJECT="Default Project" -STUDIO_PORT="3003" -SUPABASE_PUBLIC_URL="http://localhost:8000" # Public URL for Supabase (Kong), used by Studio - -# Image Proxy -IMGPROXY_ENABLE_WEBP_DETECTION="true" - -############ -# Functions - Supabase Edge Functions -############ -FUNCTIONS_VERIFY_JWT="false" - -############ -# Logs - Logflare -############ -LOGFLARE_LOGGER_BACKEND_API_KEY="your-super-secret-and-long-logflare-key" -LOGFLARE_API_KEY="your-super-secret-and-long-logflare-key" -DOCKER_SOCKET_LOCATION="/var/run/docker.sock" -GOOGLE_PROJECT_ID="GOOGLE_PROJECT_ID" -GOOGLE_PROJECT_NUMBER="GOOGLE_PROJECT_NUMBER" \ No newline at end of file diff --git a/supabase/docker-compose.yml b/supabase/docker-compose.yml index 23ba55227..88c4f85ca 100644 --- a/supabase/docker-compose.yml +++ b/supabase/docker-compose.yml @@ -16,6 +16,46 @@ services: - '9000:9000' # web interface - '1100:1100' # POP3 + studio: + container_name: supabase-studio + image: supabase/studio:20241202-71e5240 + restart: unless-stopped + healthcheck: + test: + [ + "CMD", + "node", + "-e", + "fetch('http://studio:3000/api/profile').then((r) => {if (r.status !== 200) throw new Error(r.status)})" + ] + timeout: 10s + interval: 5s + retries: 3 + depends_on: + analytics: + condition: service_healthy + environment: + STUDIO_PG_META_URL: http://meta:8080 + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + + DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION} + DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT} + OPENAI_API_KEY: ${OPENAI_API_KEY:-} + + SUPABASE_URL: http://kong:8000 + SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL} + SUPABASE_ANON_KEY: ${ANON_KEY} + SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY} + AUTH_JWT_SECRET: ${JWT_SECRET} + + LOGFLARE_API_KEY: ${LOGFLARE_API_KEY} + LOGFLARE_URL: http://analytics:4000 + NEXT_PUBLIC_ENABLE_LOGS: true + # Comment to use Big Query backend for analytics + NEXT_ANALYTICS_BACKEND_PROVIDER: postgres + # Uncomment to use Big Query backend for analytics + # NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery + kong: container_name: supabase-kong image: kong:2.8.1 @@ -147,6 +187,52 @@ services: PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY} command: "postgrest" + realtime: + # This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain + container_name: realtime-dev.supabase-realtime + image: supabase/realtime:v2.33.70 + depends_on: + db: + # Disable this if you are using an external Postgres database + condition: service_healthy + analytics: + condition: service_healthy + healthcheck: + test: + [ + "CMD", + "curl", + "-sSfL", + "--head", + "-o", + "/dev/null", + "-H", + "Authorization: Bearer ${ANON_KEY}", + "http://localhost:4000/api/tenants/realtime-dev/health" + ] + timeout: 5s + interval: 5s + retries: 3 + restart: unless-stopped + environment: + PORT: 4000 + DB_HOST: ${POSTGRES_HOST} + DB_PORT: ${POSTGRES_PORT} + DB_USER: supabase_admin + DB_PASSWORD: ${POSTGRES_PASSWORD} + DB_NAME: ${POSTGRES_DB} + DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime' + DB_ENC_KEY: supabaserealtime + API_JWT_SECRET: ${JWT_SECRET} + SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq + ERL_AFLAGS: -proto_dist inet_tcp + DNS_NODES: "''" + RLIMIT_NOFILE: "10000" + APP_NAME: realtime + SEED_SELF_HOST: true + RUN_JANITOR: true + + # To use S3 backed storage: docker compose -f docker-compose.yml -f docker-compose.s3.yml up storage: container_name: supabase-storage image: supabase/storage-api:v1.11.13 @@ -206,6 +292,24 @@ services: volumes: - ./volumes/storage:/var/lib/storage:z + meta: + container_name: supabase-meta + image: supabase/postgres-meta:v0.84.2 + depends_on: + db: + # Disable this if you are using an external Postgres database + condition: service_healthy + analytics: + condition: service_healthy + restart: unless-stopped + environment: + PG_META_PORT: 8080 + PG_META_DB_HOST: ${POSTGRES_HOST} + PG_META_DB_PORT: ${POSTGRES_PORT} + PG_META_DB_NAME: ${POSTGRES_DB} + PG_META_DB_USER: supabase_admin + PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD} + functions: container_name: supabase-edge-functions image: supabase/edge-runtime:v1.65.3 @@ -379,7 +483,7 @@ services: - POOLER_TENANT_ID=${POOLER_TENANT_ID} - POOLER_DEFAULT_POOL_SIZE=${POOLER_DEFAULT_POOL_SIZE} - POOLER_MAX_CLIENT_CONN=${POOLER_MAX_CLIENT_CONN} - - POOLER_POOL_MODE=transaction + - POOLER_POOL_MODE=session volumes: - ./volumes/pooler/pooler.exs:/etc/pooler/pooler.exs:ro From e616c4a166165b7ecf0d0e2d782e4c3711c5ebe9 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 17:10:57 -0600 Subject: [PATCH 28/43] run temp fix --- cli/Cargo.toml | 1 + cli/cli/Cargo.toml | 1 + cli/cli/src/commands/run.rs | 177 +++++++++++++++++++++++++++--------- 3 files changed, 136 insertions(+), 43 deletions(-) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 95fb749c8..fa07be4e0 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -42,6 +42,7 @@ crossterm = "0.29" # Add crossterm explicitly rustyline = "15.0.0" once_cell = "1.19.0" pathdiff = "0.2.1" +rust-embed = { version = "8.7.1", features = ["include-exclude"] } # Keep dev-dependencies separate if they aren't shared # tempfile = "3.16.0" \ No newline at end of file diff --git a/cli/cli/Cargo.toml b/cli/cli/Cargo.toml index 2b6f11506..0e512d46e 100644 --- a/cli/cli/Cargo.toml +++ b/cli/cli/Cargo.toml @@ -46,6 +46,7 @@ once_cell = { workspace = true } pathdiff = { workspace = true } # Add the shared query engine library query_engine = { workspace = true } +rust-embed = { workspace = true, features = ["include-exclude"] } litellm = { path = "../../api/libs/litellm" } diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index d498607f1..965fcad20 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -1,55 +1,146 @@ +use std::fs; +use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use crate::error::BusterError; +use indicatif::{ProgressBar, ProgressStyle}; +use std::time::Duration; +use tempfile::TempDir; +use rust_embed::RustEmbed; -const DOCKER_COMPOSE_FILE: &str = "docker-compose.yml"; // Or the path you decide for it +#[derive(RustEmbed)] +#[folder = "../../"] +#[include = "docker-compose.yml"] +#[include = "supabase/**/*"] +#[exclude = "supabase/volumes/db/data/**/*"] +#[exclude = "supabase/volumes/storage/**/*"] +#[exclude = "supabase/.env"] +#[exclude = "supabase/test.http"] +#[exclude = "supabase/docker-compose.override.yml"] +struct StaticAssets; -pub async fn start() -> Result<(), BusterError> { - println!("Attempting to start services with docker-compose..."); - - let mut cmd = Command::new("docker-compose"); - cmd.arg("-f") - .arg(DOCKER_COMPOSE_FILE) - .arg("up") - .arg("-d"); - - cmd.stdout(Stdio::inherit()); - cmd.stderr(Stdio::inherit()); - - let status = cmd.status().map_err(|e| { - BusterError::CommandError(format!("Failed to execute docker-compose up: {}", e)) +async fn setup_temporary_compose_environment() -> Result { + let temp_dir = TempDir::new().map_err(|e| { + BusterError::CommandError(format!("Failed to create temporary directory: {}", e)) })?; + let base_path = temp_dir.path(); - if status.success() { - println!("Services started successfully in detached mode."); - Ok(()) + for filename_cow in StaticAssets::iter() { + let filename = filename_cow.as_ref(); + let asset = StaticAssets::get(filename).ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", filename)))?; + let target_file_path = base_path.join(filename); + + if let Some(parent) = target_file_path.parent() { + fs::create_dir_all(parent).map_err(|e| { + BusterError::CommandError(format!( + "Failed to create directory {}: {}", + parent.display(), + e + )) + })?; + } + + fs::write(&target_file_path, asset.data).map_err(|e| { + BusterError::CommandError(format!( + "Failed to write embedded file {} to {}: {}", + filename, + target_file_path.display(), + e + )) + })?; + } + + let supabase_volumes_path = base_path.join("supabase/volumes"); + fs::create_dir_all(supabase_volumes_path.join("functions")).map_err(|e| BusterError::CommandError(format!("Failed to create supabase/volumes/functions: {}", e)))?; + + let local_dotenv_path = PathBuf::from("/Users/dallin/buster/buster/.env"); + if local_dotenv_path.exists() { + let target_dotenv_path = base_path.join(".env"); + fs::copy(&local_dotenv_path, &target_dotenv_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to copy local .env from {} to {}: {}", + local_dotenv_path.display(), + target_dotenv_path.display(), + e + )) + })?; } else { - Err(BusterError::CommandError( - format!("docker-compose up -d failed with status: {}", status) - )) + println!("Warning: Specified .env file not found at {}. Services might not configure correctly if .env is required by docker-compose.yml.", local_dotenv_path.display()); + } + + Ok(temp_dir) +} + +async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Result<(), BusterError> { + let temp_dir = setup_temporary_compose_environment().await?; + let temp_dir_path = temp_dir.path(); + + let persistent_project_path = PathBuf::from("/Users/dallin/buster/buster"); + + let supabase_volumes_persistent_path = persistent_project_path.join("supabase/volumes"); + fs::create_dir_all(supabase_volumes_persistent_path.join("db/data")) + .map_err(|e| BusterError::CommandError(format!("Failed to create persistent supabase/volumes/db/data: {}", e)))?; + fs::create_dir_all(supabase_volumes_persistent_path.join("storage")) + .map_err(|e| BusterError::CommandError(format!("Failed to create persistent supabase/volumes/storage: {}", e)))?; + + let pb = ProgressBar::new_spinner(); + pb.enable_steady_tick(Duration::from_millis(120)); + pb.set_style( + ProgressStyle::default_spinner() + .tick_strings(&["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸", "✔"]) + .template("{spinner:.blue} {msg}") + .expect("Failed to set progress bar style"), + ); + if operation_name == "Starting" { + pb.set_message(format!("{} Buster services... (this may take a few minutes)", operation_name)); + } else { + pb.set_message(format!("{} Buster services...", operation_name)); + } + + let mut cmd = Command::new("docker"); + cmd.arg("compose") + .arg("-p") + .arg("buster") + .arg("--project-directory") + .arg(persistent_project_path) + .arg("-f") + .arg(temp_dir_path.join("docker-compose.yml")) + .args(args) + .stdout(Stdio::null()) + .stderr(Stdio::null()); + + let status_result = cmd.status(); + + match status_result { + Ok(status) => { + if status.success() { + pb.finish_with_message(format!( + "Buster services {} successfully.", + operation_name.to_lowercase() + )); + Ok(()) + } else { + let err_msg = format!( + "docker compose {} failed (status: {}). Check Docker logs for details (context: {}).", + args.join(" "), + status, + temp_dir_path.display() + ); + pb.abandon_with_message(format!("Error: {}", err_msg)); + Err(BusterError::CommandError(err_msg)) + } + } + Err(e) => { + let err_msg = format!("Failed to execute docker compose {}: {}", args.join(" "), e); + pb.abandon_with_message(format!("Error: {}", err_msg)); + Err(BusterError::CommandError(err_msg)) + } } } +pub async fn start() -> Result<(), BusterError> { + run_docker_compose_command(&["up", "-d"], "Starting").await +} + pub async fn stop() -> Result<(), BusterError> { - println!("Attempting to stop services with docker-compose..."); - - let mut cmd = Command::new("docker-compose"); - cmd.arg("-f") - .arg(DOCKER_COMPOSE_FILE) - .arg("down"); - - cmd.stdout(Stdio::inherit()); - cmd.stderr(Stdio::inherit()); - - let status = cmd.status().map_err(|e| { - BusterError::CommandError(format!("Failed to execute docker-compose down: {}", e)) - })?; - - if status.success() { - println!("Services stopped successfully."); - Ok(()) - } else { - Err(BusterError::CommandError( - format!("docker-compose down failed with status: {}", status) - )) - } + run_docker_compose_command(&["down"], "Stopping").await } \ No newline at end of file From c62d09536c91a35cfa5d0aaa9c1a3d71b88fd6f3 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 17:27:04 -0600 Subject: [PATCH 29/43] ok buster run and stop working well --- cli/cli/src/commands/run.rs | 102 ++++++++++++++++++------------------ 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index 965fcad20..520f30a69 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -4,8 +4,8 @@ use std::process::{Command, Stdio}; use crate::error::BusterError; use indicatif::{ProgressBar, ProgressStyle}; use std::time::Duration; -use tempfile::TempDir; use rust_embed::RustEmbed; +use dirs; #[derive(RustEmbed)] #[folder = "../../"] @@ -18,16 +18,24 @@ use rust_embed::RustEmbed; #[exclude = "supabase/docker-compose.override.yml"] struct StaticAssets; -async fn setup_temporary_compose_environment() -> Result { - let temp_dir = TempDir::new().map_err(|e| { - BusterError::CommandError(format!("Failed to create temporary directory: {}", e)) +async fn setup_persistent_app_environment() -> Result { + let home_dir = dirs::home_dir() + .ok_or_else(|| BusterError::CommandError("Failed to get home directory. Cannot set up persistent app path.".to_string()))?; + let app_base_dir = home_dir.join(".buster"); + + fs::create_dir_all(&app_base_dir).map_err(|e| { + BusterError::CommandError(format!( + "Failed to create persistent app directory at {}: {}", + app_base_dir.display(), + e + )) })?; - let base_path = temp_dir.path(); for filename_cow in StaticAssets::iter() { let filename = filename_cow.as_ref(); - let asset = StaticAssets::get(filename).ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", filename)))?; - let target_file_path = base_path.join(filename); + let asset = StaticAssets::get(filename) + .ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", filename)))?; + let target_file_path = app_base_dir.join(filename); if let Some(parent) = target_file_path.parent() { fs::create_dir_all(parent).map_err(|e| { @@ -49,12 +57,12 @@ async fn setup_temporary_compose_environment() -> Result { })?; } - let supabase_volumes_path = base_path.join("supabase/volumes"); - fs::create_dir_all(supabase_volumes_path.join("functions")).map_err(|e| BusterError::CommandError(format!("Failed to create supabase/volumes/functions: {}", e)))?; + let supabase_volumes_functions_path = app_base_dir.join("supabase/volumes/functions"); + fs::create_dir_all(supabase_volumes_functions_path).map_err(|e| BusterError::CommandError(format!("Failed to create supabase/volumes/functions in persistent app dir: {}", e)))?; let local_dotenv_path = PathBuf::from("/Users/dallin/buster/buster/.env"); if local_dotenv_path.exists() { - let target_dotenv_path = base_path.join(".env"); + let target_dotenv_path = app_base_dir.join(".env"); fs::copy(&local_dotenv_path, &target_dotenv_path).map_err(|e| { BusterError::CommandError(format!( "Failed to copy local .env from {} to {}: {}", @@ -67,20 +75,19 @@ async fn setup_temporary_compose_environment() -> Result { println!("Warning: Specified .env file not found at {}. Services might not configure correctly if .env is required by docker-compose.yml.", local_dotenv_path.display()); } - Ok(temp_dir) + Ok(app_base_dir) } async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Result<(), BusterError> { - let temp_dir = setup_temporary_compose_environment().await?; - let temp_dir_path = temp_dir.path(); + let persistent_app_dir = setup_persistent_app_environment().await?; - let persistent_project_path = PathBuf::from("/Users/dallin/buster/buster"); + let data_db_path = persistent_app_dir.join("supabase/volumes/db/data"); + fs::create_dir_all(&data_db_path) + .map_err(|e| BusterError::CommandError(format!("Failed to create persistent data directory at {}: {}", data_db_path.display(), e)))?; - let supabase_volumes_persistent_path = persistent_project_path.join("supabase/volumes"); - fs::create_dir_all(supabase_volumes_persistent_path.join("db/data")) - .map_err(|e| BusterError::CommandError(format!("Failed to create persistent supabase/volumes/db/data: {}", e)))?; - fs::create_dir_all(supabase_volumes_persistent_path.join("storage")) - .map_err(|e| BusterError::CommandError(format!("Failed to create persistent supabase/volumes/storage: {}", e)))?; + let data_storage_path = persistent_app_dir.join("supabase/volumes/storage"); + fs::create_dir_all(&data_storage_path) + .map_err(|e| BusterError::CommandError(format!("Failed to create persistent data directory at {}: {}", data_storage_path.display(), e)))?; let pb = ProgressBar::new_spinner(); pb.enable_steady_tick(Duration::from_millis(120)); @@ -97,43 +104,36 @@ async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Resu } let mut cmd = Command::new("docker"); + cmd.current_dir(&persistent_app_dir); cmd.arg("compose") .arg("-p") .arg("buster") - .arg("--project-directory") - .arg(persistent_project_path) - .arg("-f") - .arg(temp_dir_path.join("docker-compose.yml")) - .args(args) - .stdout(Stdio::null()) - .stderr(Stdio::null()); + .arg("-f") + .arg("docker-compose.yml") + .args(args); - let status_result = cmd.status(); + let output = cmd.output().map_err(|e| { + BusterError::CommandError(format!("Failed to execute docker compose {}: {}", args.join(" "), e)) + })?; - match status_result { - Ok(status) => { - if status.success() { - pb.finish_with_message(format!( - "Buster services {} successfully.", - operation_name.to_lowercase() - )); - Ok(()) - } else { - let err_msg = format!( - "docker compose {} failed (status: {}). Check Docker logs for details (context: {}).", - args.join(" "), - status, - temp_dir_path.display() - ); - pb.abandon_with_message(format!("Error: {}", err_msg)); - Err(BusterError::CommandError(err_msg)) - } - } - Err(e) => { - let err_msg = format!("Failed to execute docker compose {}: {}", args.join(" "), e); - pb.abandon_with_message(format!("Error: {}", err_msg)); - Err(BusterError::CommandError(err_msg)) - } + if output.status.success() { + pb.finish_with_message(format!( + "Buster services {} successfully.", + operation_name.to_lowercase() + )); + Ok(()) + } else { + let err_msg = format!( + "docker compose {} failed (status: {}). Logs:\nWorking directory: {}\nStdout:\n{}\nStderr:\n{}", + args.join(" "), + output.status, + persistent_app_dir.display(), + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + pb.abandon_with_message(format!("Error: docker compose {} failed. See console for details.", args.join(" "))); + println!("\nDocker Compose Error Details:\n{}", err_msg); + Err(BusterError::CommandError(err_msg)) } } From 87fcc41e2deaa6a79fd48d036b9cba68dd081a5f Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 18:13:56 -0600 Subject: [PATCH 30/43] buster start, stop, restart handled well --- api/libs/middleware/src/auth.rs | 76 +++++++------ cli/cli/src/commands/run.rs | 193 +++++++++++++++++++++++++++++--- cli/cli/src/main.rs | 5 + docker-compose.yml | 1 + supabase/.env.example | 129 +++++++++++++++++++++ 5 files changed, 353 insertions(+), 51 deletions(-) create mode 100644 supabase/.env.example diff --git a/api/libs/middleware/src/auth.rs b/api/libs/middleware/src/auth.rs index e6f34bcc5..8c1103363 100644 --- a/api/libs/middleware/src/auth.rs +++ b/api/libs/middleware/src/auth.rs @@ -94,48 +94,50 @@ pub async fn auth(mut req: Request, next: Next) -> Result }; // --- Payment Required Check START --- - if let Some(org_membership) = user.organizations.get(0) { - let org_id = org_membership.id; - let pg_pool = get_pg_pool(); - let mut conn = match pg_pool.get().await { - Ok(conn) => conn, - Err(e) => { - tracing::error!("Failed to get DB connection for payment check: {}", e); - return Err(StatusCode::INTERNAL_SERVER_ERROR); - } - }; + if env::var("ENVIRONMENT").unwrap_or_default() == "production" { + if let Some(org_membership) = user.organizations.get(0) { + let org_id = org_membership.id; + let pg_pool = get_pg_pool(); + let mut conn = match pg_pool.get().await { + Ok(conn) => conn, + Err(e) => { + tracing::error!("Failed to get DB connection for payment check: {}", e); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + }; - match database::schema::organizations::table - .filter(database::schema::organizations::id.eq(org_id)) - .select(database::schema::organizations::payment_required) - .first::(&mut conn) - .await - { - Ok(payment_required) => { - if payment_required { - tracing::warn!( + match database::schema::organizations::table + .filter(database::schema::organizations::id.eq(org_id)) + .select(database::schema::organizations::payment_required) + .first::(&mut conn) + .await + { + Ok(payment_required) => { + if payment_required { + tracing::warn!( + user_id = %user.id, + org_id = %org_id, + "Access denied due to payment requirement for organization." + ); + return Err(StatusCode::PAYMENT_REQUIRED); + } + } + Err(diesel::NotFound) => { + tracing::error!( user_id = %user.id, org_id = %org_id, - "Access denied due to payment requirement for organization." + "Organization not found during payment check." ); - return Err(StatusCode::PAYMENT_REQUIRED); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + Err(e) => { + tracing::error!( + user_id = %user.id, + org_id = %org_id, + "Database error during payment check: {}", e + ); + return Err(StatusCode::INTERNAL_SERVER_ERROR); } - } - Err(diesel::NotFound) => { - tracing::error!( - user_id = %user.id, - org_id = %org_id, - "Organization not found during payment check." - ); - return Err(StatusCode::INTERNAL_SERVER_ERROR); - } - Err(e) => { - tracing::error!( - user_id = %user.id, - org_id = %org_id, - "Database error during payment check: {}", e - ); - return Err(StatusCode::INTERNAL_SERVER_ERROR); } } } diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index 520f30a69..bce1471f1 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -1,4 +1,5 @@ use std::fs; +use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use crate::error::BusterError; @@ -10,6 +11,7 @@ use dirs; #[derive(RustEmbed)] #[folder = "../../"] #[include = "docker-compose.yml"] +#[include = "supabase/.env.example"] #[include = "supabase/**/*"] #[exclude = "supabase/volumes/db/data/**/*"] #[exclude = "supabase/volumes/storage/**/*"] @@ -60,20 +62,32 @@ async fn setup_persistent_app_environment() -> Result { let supabase_volumes_functions_path = app_base_dir.join("supabase/volumes/functions"); fs::create_dir_all(supabase_volumes_functions_path).map_err(|e| BusterError::CommandError(format!("Failed to create supabase/volumes/functions in persistent app dir: {}", e)))?; - let local_dotenv_path = PathBuf::from("/Users/dallin/buster/buster/.env"); - if local_dotenv_path.exists() { - let target_dotenv_path = app_base_dir.join(".env"); - fs::copy(&local_dotenv_path, &target_dotenv_path).map_err(|e| { - BusterError::CommandError(format!( - "Failed to copy local .env from {} to {}: {}", - local_dotenv_path.display(), - target_dotenv_path.display(), - e - )) - })?; - } else { - println!("Warning: Specified .env file not found at {}. Services might not configure correctly if .env is required by docker-compose.yml.", local_dotenv_path.display()); - } + let target_dotenv_path = app_base_dir.join(".env"); + + // Always use .env.example from embedded assets + let example_env_filename = "supabase/.env.example"; + let asset = StaticAssets::get(example_env_filename) + .ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", example_env_filename)))?; + + fs::write(&target_dotenv_path, asset.data).map_err(|e| { + BusterError::CommandError(format!( + "Failed to write {} to {}: {}", + example_env_filename, + target_dotenv_path.display(), + e + )) + })?; + + // Additionally copy the .env to the supabase subdirectory + let supabase_dotenv_path = app_base_dir.join("supabase/.env"); + fs::copy(&target_dotenv_path, &supabase_dotenv_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to copy .env from {} to {}: {}", + target_dotenv_path.display(), + supabase_dotenv_path.display(), + e + )) + })?; Ok(app_base_dir) } @@ -143,4 +157,155 @@ pub async fn start() -> Result<(), BusterError> { pub async fn stop() -> Result<(), BusterError> { run_docker_compose_command(&["down"], "Stopping").await +} + +pub async fn restart() -> Result<(), BusterError> { + println!("WARNING: This command will stop all Buster services, attempt to remove their current images, and then restart them."); + println!("This can lead to a complete wipe of the Buster database and any other local service data."); + println!("This action is irreversible."); + print!("Are you sure you want to proceed? (yes/No): "); + io::stdout().flush().map_err(|e| BusterError::CommandError(format!("Failed to flush stdout: {}", e)))?; + + let mut confirmation = String::new(); + io::stdin().read_line(&mut confirmation).map_err(|e| BusterError::CommandError(format!("Failed to read user input: {}", e)))?; + + if confirmation.trim().to_lowercase() != "yes" { + println!("Restart cancelled by user."); + return Ok(()); + } + + let persistent_app_dir = setup_persistent_app_environment().await?; + + let pb = ProgressBar::new_spinner(); + pb.enable_steady_tick(Duration::from_millis(120)); + pb.set_style( + ProgressStyle::default_spinner() + .tick_strings(&["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸", "✔"]) + .template("{spinner:.blue} {msg}") + .expect("Failed to set progress bar style"), + ); + + pb.set_message("Rebuilding Buster services (step 1/4): Stopping services..."); + + let mut down_cmd = Command::new("docker"); + down_cmd.current_dir(&persistent_app_dir) + .arg("compose") + .arg("-p") + .arg("buster") + .arg("-f") + .arg("docker-compose.yml") + .arg("down"); + + let down_output = down_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose down: {}", e)))?; + if !down_output.status.success() { + let err_msg = format!( + "docker compose down failed (status: {}). Logs: +Working directory: {} +Stdout: +{} +Stderr: +{}", + down_output.status, + persistent_app_dir.display(), + String::from_utf8_lossy(&down_output.stdout), + String::from_utf8_lossy(&down_output.stderr) + ); + pb.abandon_with_message("Error: docker compose down failed. See console for details."); + println!("\nDocker Compose Down Error Details:\n{}", err_msg); + return Err(BusterError::CommandError(err_msg)); + } + + pb.set_message("Rebuilding Buster services (step 2/4): Identifying service images..."); + let mut config_images_cmd = Command::new("docker"); + config_images_cmd.current_dir(&persistent_app_dir) + .arg("compose") + .arg("-p") + .arg("buster") + .arg("-f") + .arg("docker-compose.yml") + .arg("config") + .arg("--images"); + + let config_images_output = config_images_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose config --images: {}", e)))?; + if !config_images_output.status.success() { + let err_msg = format!( + "docker compose config --images failed (status: {}). Logs: +Working directory: {} +Stdout: +{} +Stderr: +{}", + config_images_output.status, + persistent_app_dir.display(), + String::from_utf8_lossy(&config_images_output.stdout), + String::from_utf8_lossy(&config_images_output.stderr) + ); + pb.abandon_with_message("Error: Failed to identify service images. See console for details."); + println!("\nDocker Compose Config --images Error Details:\n{}", err_msg); + return Err(BusterError::CommandError(err_msg)); + } + + let image_list_str = String::from_utf8_lossy(&config_images_output.stdout); + let image_names: Vec<&str> = image_list_str.lines().filter(|line| !line.trim().is_empty()).collect(); + + if image_names.is_empty() { + pb.println("No images identified by docker-compose config --images. Skipping image removal."); + } else { + pb.set_message(format!("Rebuilding Buster services (step 3/4): Removing {} service image(s)...", image_names.len())); + for (index, image_name) in image_names.iter().enumerate() { + let current_image_name = image_name.trim(); + if current_image_name.is_empty() { + continue; + } + pb.set_message(format!( + "Rebuilding Buster services (step 3/4): Removing image {}/{} ('{}')...", + index + 1, + image_names.len(), + current_image_name + )); + let mut rmi_cmd = Command::new("docker"); + rmi_cmd.arg("image").arg("rm").arg(current_image_name); + + let rmi_output = rmi_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker image rm {}: {}", current_image_name, e)))?; + + // Log warning on failure but continue, as image might not exist or be in use by other non-project containers + if !rmi_output.status.success() { + let rmi_stderr = String::from_utf8_lossy(&rmi_output.stderr); + if !rmi_stderr.trim().is_empty() && !rmi_stderr.contains("No such image") { // Don't warn if image was already gone + pb.println(format!("Warning: Could not remove image '{}'. It might be in use or already removed. Stderr: {}", current_image_name, rmi_stderr.trim())); + } + } + } + } + + pb.set_message("Rebuilding Buster services (step 4/4): Starting services (pulling images if needed)..."); + let mut up_cmd = Command::new("docker"); + up_cmd.current_dir(&persistent_app_dir) + .arg("compose") + .arg("-p") + .arg("buster") + .arg("-f") + .arg("docker-compose.yml") + .arg("up") + .arg("-d") + .arg("--pull") // Ensure latest images are pulled + .arg("--force-recreate"); // Recreate containers even if config hasn't changed + + let up_output = up_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose up: {}", e)))?; + + if up_output.status.success() { + pb.finish_with_message("Buster services rebuilt and started successfully."); + Ok(()) + } else { + let err_msg = format!( + "docker compose up failed after image purge (status: {}). Logs:\nWorking directory: {}\nStdout:\n{}\nStderr:\n{}", + up_output.status, + persistent_app_dir.display(), + String::from_utf8_lossy(&up_output.stdout), + String::from_utf8_lossy(&up_output.stderr) + ); + pb.abandon_with_message("Error: docker compose up failed after image purge. See console for details."); + println!("\nDocker Compose Up Error Details:\n{}", err_msg); + Err(BusterError::CommandError(err_msg)) + } } \ No newline at end of file diff --git a/cli/cli/src/main.rs b/cli/cli/src/main.rs index 9a0bdaa1f..5ed284f80 100644 --- a/cli/cli/src/main.rs +++ b/cli/cli/src/main.rs @@ -80,8 +80,12 @@ pub enum Commands { #[arg(long)] path: Option, }, + /// Start the Buster services Start, + /// Stop the Buster services Stop, + /// Restart the Buster services + Restart, } #[derive(Parser)] @@ -140,6 +144,7 @@ async fn main() { Commands::Parse { path } => commands::parse::parse_models_command(path).await, Commands::Start => run::start().await.map_err(anyhow::Error::from), Commands::Stop => run::stop().await.map_err(anyhow::Error::from), + Commands::Restart => run::restart().await.map_err(anyhow::Error::from), }; if let Err(e) = result { diff --git a/docker-compose.yml b/docker-compose.yml index cbb05bfe4..537b46609 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,6 +32,7 @@ services: - EMBEDDING_PROVIDER=${EMBEDDING_PROVIDER} - EMBEDDING_MODEL=${EMBEDDING_MODEL} - COHERE_API_KEY=${COHERE_API_KEY} + - ENVIRONMENT=${ENVIRONMENT} ports: - "3001:3001" - "3000:3000" diff --git a/supabase/.env.example b/supabase/.env.example new file mode 100644 index 000000000..5048b562e --- /dev/null +++ b/supabase/.env.example @@ -0,0 +1,129 @@ +# General Application Settings +ENVIRONMENT="development" +BUSTER_URL="http://localhost:3000" +BUSTER_WH_TOKEN="buster-wh-token" + +# --- API Service Specific --- +# Direct Database Connection (for API service and potentially others) +DATABASE_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" +# Pooled Database Connection (for API service, uses Supavisor) +POOLER_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" +# Redis Connection +REDIS_URL="redis://buster-redis:6379" +# Supabase Connection for API service +SUPABASE_URL="http://kong:8000" +SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" + +# --- LLM / AI Services --- +EMBEDDING_PROVIDER="ollama" +EMBEDDING_MODEL="mxbai-embed-large" +COHERE_API_KEY="" +OPENAI_API_KEY="" # For OpenAI models or Supabase Studio assistant +LLM_API_KEY="test-key" +LLM_BASE_URL="http://litellm:4001" + +# --- Web Client (Next.js) Specific --- +NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) +NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web) +NEXT_PUBLIC_SUPABASE_URL="http://kong:8000" # External URL for Supabase (Kong proxy) +NEXT_PUBLIC_WS_URL="ws://localhost:3001" +NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" +NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" + +################################################# +# Supabase Stack Configuration Variables +# These are primarily used by the Supabase services themselves +# (defined in supabase/docker-compose.yml) +# and are sourced from this .env file when `docker compose up` is run. +################################################# + +############ +# Secrets +############ +POSTGRES_PASSWORD="your-super-secret-and-long-postgres-password" +JWT_SECRET="your-super-secret-jwt-token-with-at-least-32-characters-long" +ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" +SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" +DASHBOARD_USERNAME="supabase" +DASHBOARD_PASSWORD="this_password_is_insecure_and_should_be_updated" + +############ +# Database +############ +POSTGRES_HOST="db" +POSTGRES_DB="postgres" +POSTGRES_PORT="5432" + +############ +# Supavisor -- Database pooler +############ +POOLER_PROXY_PORT_TRANSACTION="6543" +POOLER_DEFAULT_POOL_SIZE="20" +POOLER_MAX_CLIENT_CONN="100" +POOLER_TENANT_ID="your-tenant-id" + +############ +# API Proxy - Kong +############ +KONG_HTTP_PORT="8000" +KONG_HTTPS_PORT="8443" + +############ +# API - PostgREST +############ +PGRST_DB_SCHEMAS="public,storage,graphql_public" + +############ +# Auth - GoTrue +############ +SITE_URL="http://localhost:3000" # Default base URL for the site (used in emails, etc.) +ADDITIONAL_REDIRECT_URLS="" +JWT_EXPIRY="3600" +DISABLE_SIGNUP="false" +API_EXTERNAL_URL="http://localhost:8000" # Publicly accessible URL for the Supabase API (via Kong) + +## Mailer Config +MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify" +MAILER_URLPATHS_INVITE="/auth/v1/verify" +MAILER_URLPATHS_RECOVERY="/auth/v1/verify" +MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify" + +## Email auth +ENABLE_EMAIL_SIGNUP="true" +ENABLE_EMAIL_AUTOCONFIRM="true" +SMTP_ADMIN_EMAIL="admin@buster.so" +SMTP_HOST="supabase-mail" +SMTP_PORT="2500" +SMTP_USER="" +SMTP_PASS="" +SMTP_SENDER_NAME="Buster" +ENABLE_ANONYMOUS_USERS="true" + +## Phone auth +ENABLE_PHONE_SIGNUP="true" +ENABLE_PHONE_AUTOCONFIRM="true" + +############ +# Studio - Supabase Dashboard +############ +STUDIO_DEFAULT_ORGANIZATION="Default Organization" +STUDIO_DEFAULT_PROJECT="Default Project" +STUDIO_PORT="3003" +SUPABASE_PUBLIC_URL="http://localhost:8000" # Public URL for Supabase (Kong), used by Studio + +# Image Proxy +IMGPROXY_ENABLE_WEBP_DETECTION="true" + +############ +# Functions - Supabase Edge Functions +############ +FUNCTIONS_VERIFY_JWT="false" + +############ +# Logs - Logflare +############ +LOGFLARE_LOGGER_BACKEND_API_KEY="your-super-secret-and-long-logflare-key" +LOGFLARE_API_KEY="your-super-secret-and-long-logflare-key" +DOCKER_SOCKET_LOCATION="/var/run/docker.sock" +GOOGLE_PROJECT_ID="GOOGLE_PROJECT_ID" +GOOGLE_PROJECT_NUMBER="GOOGLE_PROJECT_NUMBER" \ No newline at end of file From fb0077c5836429faf2ab3c7693fd381445688cf1 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 18:25:34 -0600 Subject: [PATCH 31/43] docker release update to merge to latest. run updates, migrate rerank --- .github/workflows/docker-release.yml | 199 +++++++++++++++++- api/libs/agents/Cargo.toml | 1 + .../file_tools/search_data_catalog.rs | 39 ++-- cli/cli/src/commands/run.rs | 6 +- docker-compose.yml | 4 +- 5 files changed, 221 insertions(+), 28 deletions(-) diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index bce380d86..675f8b8f2 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -94,6 +94,16 @@ jobs: with: ref: ${{ github.sha }} + - name: Docker meta for API + id: meta_api + uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }} + tags: | + type=semver,pattern={{version}},value=${{ env.API_VERSION }} + type=sha,format=short + type=raw,value=latest + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -105,16 +115,32 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push API image + id: build_api_image_platform uses: useblacksmith/build-push-action@v1 with: context: ./api file: ./api/Dockerfile push: true platforms: ${{ matrix.docker_platform }} - tags: | - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }}-${{ matrix.platform }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }}-${{ matrix.platform }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest-${{ matrix.platform }} + tags: ${{ steps.meta_api.outputs.tags }} + labels: ${{ steps.meta_api.outputs.labels }} + outputs: type=image,name=${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }},push-by-digest=true,name-canonical=true + + - name: Export API digest + run: | + mkdir -p ${{ runner.temp }}/digests + digest_full="${{ steps.build_api_image_platform.outputs.digest }}" + digest_sha="${digest_full#sha256:}" + echo "Digest SHA for API ${{ matrix.platform }}: ${digest_sha}" + echo "${digest_sha}" > "${{ runner.temp }}/digests/api-${{ matrix.platform }}.sha" + + - name: Upload API digest file + uses: actions/upload-artifact@v4 + with: + name: api-digest-${{ matrix.platform }} + path: ${{ runner.temp }}/digests/api-${{ matrix.platform }}.sha + if-no-files-found: error + retention-days: 1 - name: Set API Package Visibility to Public env: @@ -160,6 +186,16 @@ jobs: with: ref: ${{ github.sha }} + - name: Docker meta for Web + id: meta_web + uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }} + tags: | + type=semver,pattern={{version}},value=${{ env.WEB_VERSION }} + type=sha,format=short + type=raw,value=latest + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -171,16 +207,16 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build and push Web image + id: build_web_image_platform uses: useblacksmith/build-push-action@v1 with: context: ./web file: ./web/Dockerfile push: true platforms: ${{ matrix.docker_platform }} - tags: | - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION }}-${{ matrix.platform }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }}-${{ matrix.platform }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:latest-${{ matrix.platform }} + tags: ${{ steps.meta_web.outputs.tags }} + labels: ${{ steps.meta_web.outputs.labels }} + outputs: type=image,name=${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }},push-by-digest=true,name-canonical=true build-args: | NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} NEXT_PUBLIC_URL=${{ secrets.NEXT_PUBLIC_URL }} @@ -188,6 +224,22 @@ jobs: NEXT_PUBLIC_SUPABASE_ANON_KEY=${{ secrets.NEXT_PUBLIC_SUPABASE_ANON_KEY }} NEXT_PUBLIC_WEB_SOCKET_URL=${{ secrets.NEXT_PUBLIC_WEB_SOCKET_URL }} + - name: Export Web digest + run: | + mkdir -p ${{ runner.temp }}/digests + digest_full="${{ steps.build_web_image_platform.outputs.digest }}" + digest_sha="${digest_full#sha256:}" + echo "Digest SHA for Web ${{ matrix.platform }}: ${digest_sha}" + echo "${digest_sha}" > "${{ runner.temp }}/digests/web-${{ matrix.platform }}.sha" + + - name: Upload Web digest file + uses: actions/upload-artifact@v4 + with: + name: web-digest-${{ matrix.platform }} + path: ${{ runner.temp }}/digests/web-${{ matrix.platform }}.sha + if-no-files-found: error + retention-days: 1 + - name: Set Web Package Visibility to Public env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -206,3 +258,134 @@ jobs: echo "Failed to set package $ORG_NAME/${{ env.WEB_IMAGE_NAME }} visibility to public. HTTP Status: $RESPONSE_CODE" # Optionally, fail the step: exit 1 fi + + merge_api_manifests: + name: Merge API Manifests + runs-on: blacksmith-4vcpu-ubuntu-2204 + needs: [prepare_docker_release_info, build_and_push_api] + if: needs.prepare_docker_release_info.outputs.api_version_found == 'true' + steps: + - name: Download API digests + uses: actions/download-artifact@v4 + with: + path: ${{ runner.temp }}/all_api_digests + pattern: api-digest-* + merge-multiple: true + + - name: Log in to Docker Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Docker meta for API Manifest + id: meta_api_manifest + uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }} + tags: | + type=semver,pattern={{version}},value=${{ needs.prepare_docker_release_info.outputs.api_version }} + type=sha,format=short + type=raw,value=latest + # Ensure DOCKER_METADATA_OUTPUT_JSON is populated for the next step + # outputs: | + # json + + - name: Create and push API manifest list + env: + API_IMAGE_FULL_NAME: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }} + working-directory: ${{ runner.temp }}/all_api_digests + run: | + echo "Listing downloaded API digests in $(pwd):" + ls -lR . + + TAG_ARGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") + echo "Generated tag arguments for API manifest: $TAG_ARGS" + + DIGEST_FILES_FOUND=$(find . -type f -name '*.sha' -print) + if [ -z "$DIGEST_FILES_FOUND" ]; then + echo "Error: No API digest files (*.sha) found." + exit 1 + fi + + IMAGE_PLUS_DIGEST_ARGS="" + for digest_file_path in $DIGEST_FILES_FOUND; do + sha_value=$(cat "$digest_file_path") + IMAGE_PLUS_DIGEST_ARGS="$IMAGE_PLUS_DIGEST_ARGS ${API_IMAGE_FULL_NAME}@sha256:${sha_value}" + done + echo "API Manifest images with digests: $IMAGE_PLUS_DIGEST_ARGS" + + if [ -z "$IMAGE_PLUS_DIGEST_ARGS" ]; then + echo "Error: No API digests were processed to create the manifest." + exit 1 + fi + docker buildx imagetools create $TAG_ARGS $IMAGE_PLUS_DIGEST_ARGS + + merge_web_manifests: + name: Merge Web Manifests + runs-on: blacksmith-4vcpu-ubuntu-2204 + needs: [prepare_docker_release_info, build_and_push_web] + if: needs.prepare_docker_release_info.outputs.web_version_found == 'true' + steps: + - name: Download Web digests + uses: actions/download-artifact@v4 + with: + path: ${{ runner.temp }}/all_web_digests + pattern: web-digest-* + merge-multiple: true + + - name: Log in to Docker Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Docker meta for Web Manifest + id: meta_web_manifest + uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }} + tags: | + type=semver,pattern={{version}},value=${{ needs.prepare_docker_release_info.outputs.web_version }} + type=sha,format=short + type=raw,value=latest + # outputs: | + # json + + - name: Create and push Web manifest list + env: + WEB_IMAGE_FULL_NAME: ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }} + working-directory: ${{ runner.temp }}/all_web_digests + run: | + echo "Listing downloaded Web digests in $(pwd):" + ls -lR . + + TAG_ARGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") + echo "Generated tag arguments for Web manifest: $TAG_ARGS" + + DIGEST_FILES_FOUND=$(find . -type f -name '*.sha' -print) + if [ -z "$DIGEST_FILES_FOUND" ]; then + echo "Error: No Web digest files (*.sha) found." + exit 1 + fi + + IMAGE_PLUS_DIGEST_ARGS="" + for digest_file_path in $DIGEST_FILES_FOUND; do + sha_value=$(cat "$digest_file_path") + IMAGE_PLUS_DIGEST_ARGS="$IMAGE_PLUS_DIGEST_ARGS ${WEB_IMAGE_FULL_NAME}@sha256:${sha_value}" + done + echo "Web Manifest images with digests: $IMAGE_PLUS_DIGEST_ARGS" + + if [ -z "$IMAGE_PLUS_DIGEST_ARGS" ]; then + echo "Error: No Web digests were processed to create the manifest." + exit 1 + fi + docker buildx imagetools create $TAG_ARGS $IMAGE_PLUS_DIGEST_ARGS diff --git a/api/libs/agents/Cargo.toml b/api/libs/agents/Cargo.toml index af9b3de18..5b029498d 100644 --- a/api/libs/agents/Cargo.toml +++ b/api/libs/agents/Cargo.toml @@ -36,6 +36,7 @@ tokio-retry = { workspace = true } thiserror = { workspace = true } raindrop = { path = "../raindrop" } sql_analyzer = { path = "../sql_analyzer" } +rerank = { path = "../rerank" } # Development dependencies [dev-dependencies] diff --git a/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs b/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs index fb681e729..84978eb26 100644 --- a/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs +++ b/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs @@ -27,6 +27,7 @@ use uuid::Uuid; use dataset_security::{get_permissioned_datasets, PermissionedDataset}; use sqlx::PgPool; use stored_values; +use rerank::Reranker; use crate::{agent::Agent, tools::ToolExecutor}; @@ -883,25 +884,29 @@ async fn rerank_datasets( if documents.is_empty() || all_datasets.is_empty() { return Ok(vec![]); } - let co = Cohere::default(); - let request = ReRankRequest { - query, - documents, - model: ReRankModel::EnglishV3, - top_n: Some(35), - ..Default::default() - }; + // Initialize your custom reranker + let reranker = Reranker::new() + .map_err(|e| anyhow::anyhow!("Failed to initialize custom Reranker: {}", e))?; - let rerank_results = match co.rerank(&request).await { + // Convert documents from Vec to Vec<&str> for the rerank library + let doc_slices: Vec<&str> = documents.iter().map(AsRef::as_ref).collect(); + + // Define top_n, e.g., 35 as used with Cohere + let top_n = 35; + + // Call your custom reranker's rerank method + let rerank_results = match reranker.rerank(query, &doc_slices, top_n).await { Ok(results) => results, Err(e) => { - error!(error = %e, query = query, "Cohere rerank API call failed"); - return Err(anyhow::anyhow!("Cohere rerank failed: {}", e)); + error!(error = %e, query = query, "Custom reranker API call failed"); + return Err(anyhow::anyhow!("Custom reranker failed: {}", e)); } }; let mut ranked_datasets = Vec::new(); + // The structure of RerankResult from your library (index, relevance_score) + // is compatible with the existing loop logic. for result in rerank_results { if let Some(dataset) = all_datasets.get(result.index as usize) { ranked_datasets.push(RankedDataset { @@ -909,17 +914,19 @@ async fn rerank_datasets( }); } else { error!( - "Invalid dataset index {} from Cohere for query '{}'. Max index: {}", + "Invalid dataset index {} from custom reranker for query '{}'. Max index: {}", result.index, query, - all_datasets.len() - 1 + all_datasets.len().saturating_sub(1) // Avoid panic on empty all_datasets (though guarded above) ); } } - let relevant_datasets = ranked_datasets.into_iter().collect::>(); - - Ok(relevant_datasets) + // The original code collected into Vec<_> then returned. This is fine. + // let relevant_datasets = ranked_datasets.into_iter().collect::>(); + // Ok(relevant_datasets) + // Simpler: + Ok(ranked_datasets) } async fn llm_filter_helper( diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index bce1471f1..3b39a89a1 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -288,8 +288,10 @@ Stderr: .arg("docker-compose.yml") .arg("up") .arg("-d") - .arg("--pull") // Ensure latest images are pulled - .arg("--force-recreate"); // Recreate containers even if config hasn't changed + .arg("--pull") + .arg("always") + .arg("--force-recreate") + .arg("--remove-orphans"); let up_output = up_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose up: {}", e)))?; diff --git a/docker-compose.yml b/docker-compose.yml index 537b46609..5ce16d86e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,7 +16,7 @@ services: retries: 30 api: - image: ghcr.io/buster-so/buster/api:latest-arm64 + image: ghcr.io/buster-so/buster/api:latest container_name: buster-api env_file: - .env @@ -50,7 +50,7 @@ services: condition: service_healthy web: - image: ghcr.io/buster-so/buster/web:latest-arm64 + image: ghcr.io/buster-so/buster/web:latest container_name: buster-web env_file: - .env From 46cb2c3b3b87ca52bdd9e3f68436f51e5f2487fc Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 18:31:37 -0600 Subject: [PATCH 32/43] rerank test and tweak --- api/libs/rerank/Cargo.toml | 4 ++ api/libs/rerank/src/lib.rs | 22 +-------- api/libs/rerank/tests/integration_test.rs | 56 +++++++++++++++++++++++ 3 files changed, 61 insertions(+), 21 deletions(-) create mode 100644 api/libs/rerank/tests/integration_test.rs diff --git a/api/libs/rerank/Cargo.toml b/api/libs/rerank/Cargo.toml index 7476f5f3c..44adfc306 100644 --- a/api/libs/rerank/Cargo.toml +++ b/api/libs/rerank/Cargo.toml @@ -7,3 +7,7 @@ edition = "2021" reqwest = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } + +[dev-dependencies] +dotenv = { workspace = true } +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } diff --git a/api/libs/rerank/src/lib.rs b/api/libs/rerank/src/lib.rs index 4df2ba84f..67177db37 100644 --- a/api/libs/rerank/src/lib.rs +++ b/api/libs/rerank/src/lib.rs @@ -2,15 +2,7 @@ use reqwest::Client; use serde::{Deserialize, Serialize}; use std::error::Error; -#[derive(Debug)] -pub enum RerankerType { - Cohere, - Mxbai, - Jina, -} - pub struct Reranker { - reranker_type: RerankerType, api_key: String, base_url: String, model: String, @@ -19,23 +11,11 @@ pub struct Reranker { impl Reranker { pub fn new() -> Result> { - let provider = std::env::var("RERANK_PROVIDER")?; - let reranker_type = match provider.to_lowercase().as_str() { - "cohere" => RerankerType::Cohere, - "mxbai" => RerankerType::Mxbai, - "jina" => RerankerType::Jina, - _ => return Err("Invalid provider specified".into()), - }; let api_key = std::env::var("RERANK_API_KEY")?; let model = std::env::var("RERANK_MODEL")?; - let base_url = match reranker_type { - RerankerType::Cohere => "https://api.cohere.com/v2/rerank", - RerankerType::Mxbai => "https://api.mixedbread.ai/v1/rerank", - RerankerType::Jina => "https://api.jina.ai/v1/rerank", - }.to_string(); + let base_url = std::env::var("RERANK_BASE_URL")?; let client = Client::new(); Ok(Self { - reranker_type, api_key, base_url, model, diff --git a/api/libs/rerank/tests/integration_test.rs b/api/libs/rerank/tests/integration_test.rs new file mode 100644 index 000000000..d208044f3 --- /dev/null +++ b/api/libs/rerank/tests/integration_test.rs @@ -0,0 +1,56 @@ +use rerank::{Reranker, RerankResult}; +use std::error::Error; + +#[tokio::test] +async fn test_reranker_integration() -> Result<(), Box> { + // Load environment variables from .env file + dotenv::dotenv().ok(); + + // Initialize the reranker + let reranker = Reranker::new()?; + + // Define a sample query and documents + let query = "What is the capital of France?"; + let documents = vec![ + "Paris is a major European city and a global center for art, fashion, gastronomy and culture.", + "London is the capital and largest city of England and the United Kingdom.", + "The Eiffel Tower is a wrought-iron lattice tower on the Champ de Mars in Paris, France.", + "Berlin is the capital and largest city of Germany by both area and population.", + ]; + let top_n = 2; + + // Perform reranking + let results: Vec = reranker.rerank(query, &documents, top_n).await?; + + // Assertions + assert_eq!(results.len(), top_n, "Should return top_n results"); + + // Check that indices are within the bounds of the original documents + for result in &results { + assert!(result.index < documents.len(), "Result index should be valid"); + } + + // Optional: Print results for manual verification (can be removed later) + println!("Query: {}", query); + for result in &results { + println!( + "Document Index: {}, Score: {:.4}, Document: {}", + result.index, + result.relevance_score, + documents[result.index] + ); + } + + // Example assertion: if we expect Paris-related documents to be ranked higher. + // This is a very basic check and might need adjustment based on actual model behavior. + if !results.is_empty() { + let first_result_doc = documents[results[0].index]; + assert!( + first_result_doc.to_lowercase().contains("paris"), + "The top result for 'capital of France' should ideally mention Paris. Model output: {}", + first_result_doc + ); + } + + Ok(()) +} \ No newline at end of file From 6bdd3d74d208da3ec6dbf52bd6fe963a4ad0180a Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 18:53:53 -0600 Subject: [PATCH 33/43] dev mode and env example update --- .env.example | 144 +++++++-------------------------------------------- Makefile | 35 +++++++++++++ api/makefile | 10 ++-- 3 files changed, 60 insertions(+), 129 deletions(-) create mode 100644 Makefile diff --git a/.env.example b/.env.example index bf3ece2a7..04012d965 100644 --- a/.env.example +++ b/.env.example @@ -1,129 +1,25 @@ -# General Application Settings +# API VARS ENVIRONMENT="development" +DATABASE_URL="postgresql://postgres:postgres@127.0.0.1:54322/postgres" +POOLER_URL="postgresql://postgres:postgres@127.0.0.1:54322/postgres" +JWT_SECRET="super-secret-jwt-token-with-at-least-32-characters-long" +REDIS_URL="redis://localhost:6379" BUSTER_URL="http://localhost:3000" BUSTER_WH_TOKEN="buster-wh-token" +LOG_LEVEL="debug" +SUPABASE_URL="http://localhost:54321" +SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU" -# --- API Service Specific --- -# Direct Database Connection (for API service and potentially others) -DATABASE_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" -# Pooled Database Connection (for API service, uses Supavisor) -POOLER_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" -# Redis Connection -REDIS_URL="redis://buster-redis:6379" -# Supabase Connection for API service -SUPABASE_URL="http://kong:8000" -SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" +# AI VARS +RERANK_API_KEY="your_rerank_api_key" +RERANK_MODEL="rerank-v3.5" +RERANK_BASE_URL="https://api.cohere.com/v2/rerank" +LLM_API_KEY="your_llm_api_key" +LLM_BASE_URL="http://localhost:4000" -# --- LLM / AI Services --- -EMBEDDING_PROVIDER="ollama" -EMBEDDING_MODEL="mxbai-embed-large" -COHERE_API_KEY="" -OPENAI_API_KEY="" # For OpenAI models or Supabase Studio assistant -LLM_API_KEY="test-key" -LLM_BASE_URL="http://litellm:4001" - -# --- Web Client (Next.js) Specific --- -NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) -NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web) -NEXT_PUBLIC_SUPABASE_URL="http://kong:8000" # External URL for Supabase (Kong proxy) -NEXT_PUBLIC_WS_URL="ws://localhost:3001" -NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" -NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" - -################################################# -# Supabase Stack Configuration Variables -# These are primarily used by the Supabase services themselves -# (defined in supabase/docker-compose.yml) -# and are sourced from this .env file when `docker compose up` is run. -################################################# - -############ -# Secrets -############ -POSTGRES_PASSWORD="your-super-secret-and-long-postgres-password" -JWT_SECRET="your-super-secret-jwt-token-with-at-least-32-characters-long" -ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" -SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" -DASHBOARD_USERNAME="supabase" -DASHBOARD_PASSWORD="this_password_is_insecure_and_should_be_updated" - -############ -# Database -############ -POSTGRES_HOST="db" -POSTGRES_DB="postgres" -POSTGRES_PORT="5432" - -############ -# Supavisor -- Database pooler -############ -POOLER_PROXY_PORT_TRANSACTION="6543" -POOLER_DEFAULT_POOL_SIZE="20" -POOLER_MAX_CLIENT_CONN="100" -POOLER_TENANT_ID="your-tenant-id" - -############ -# API Proxy - Kong -############ -KONG_HTTP_PORT="8000" -KONG_HTTPS_PORT="8443" - -############ -# API - PostgREST -############ -PGRST_DB_SCHEMAS="public,storage,graphql_public" - -############ -# Auth - GoTrue -############ -SITE_URL="http://localhost:3000" # Default base URL for the site (used in emails, etc.) -ADDITIONAL_REDIRECT_URLS="" -JWT_EXPIRY="3600" -DISABLE_SIGNUP="false" -API_EXTERNAL_URL="http://localhost:8000" # Publicly accessible URL for the Supabase API (via Kong) - -## Mailer Config -MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify" -MAILER_URLPATHS_INVITE="/auth/v1/verify" -MAILER_URLPATHS_RECOVERY="/auth/v1/verify" -MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify" - -## Email auth -ENABLE_EMAIL_SIGNUP="true" -ENABLE_EMAIL_AUTOCONFIRM="false" -SMTP_ADMIN_EMAIL="admin@buster.so" -SMTP_HOST="supabase-mail" -SMTP_PORT="2500" -SMTP_USER="" -SMTP_PASS="" -SMTP_SENDER_NAME="Buster" -ENABLE_ANONYMOUS_USERS="true" - -## Phone auth -ENABLE_PHONE_SIGNUP="true" -ENABLE_PHONE_AUTOCONFIRM="true" - -############ -# Studio - Supabase Dashboard -############ -STUDIO_DEFAULT_ORGANIZATION="Default Organization" -STUDIO_DEFAULT_PROJECT="Default Project" -STUDIO_PORT="3003" -SUPABASE_PUBLIC_URL="http://localhost:8000" # Public URL for Supabase (Kong), used by Studio - -# Image Proxy -IMGPROXY_ENABLE_WEBP_DETECTION="true" - -############ -# Functions - Supabase Edge Functions -############ -FUNCTIONS_VERIFY_JWT="false" - -############ -# Logs - Logflare -############ -LOGFLARE_LOGGER_BACKEND_API_KEY="your-super-secret-and-long-logflare-key" -LOGFLARE_API_KEY="your-super-secret-and-long-logflare-key" -DOCKER_SOCKET_LOCATION="/var/run/docker.sock" -GOOGLE_PROJECT_ID="GOOGLE_PROJECT_ID" -GOOGLE_PROJECT_NUMBER="GOOGLE_PROJECT_NUMBER" \ No newline at end of file +# WEB VARS +NEXT_PUBLIC_API_URL="http://127.0.0.1:3001" +NEXT_PUBLIC_URL="http://localhost:3000" +NEXT_PUBLIC_SUPABASE_URL="http://127.0.0.1:54321" +NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0" +NEXT_PUBLIC_WEB_SOCKET_URL="ws://127.0.0.1:3001" \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..6369431ba --- /dev/null +++ b/Makefile @@ -0,0 +1,35 @@ +# Makefile (root) + +.PHONY: dev api-dev-fg web-dev-fg stop + +# Main development target: runs both API and Web dev servers in parallel. +# They will run until Ctrl+C is pressed. +dev: + @echo "Starting API and Web development servers..." + @echo "Press Ctrl+C to stop all." + # Start API dev server in the background + # The subshell ( ... ) ensures that 'cd' doesn't affect subsequent commands at this level. + (cd api && $(MAKE) dev) & \ + # Start Web dev server in the background + (cd web && $(MAKE) dev) & \ + # Wait for all background jobs of this shell to complete. + # Since dev servers run indefinitely, this 'wait' will also run indefinitely until interrupted (Ctrl+C). + wait + @echo "Development servers stopped or shell command finished." + +# Target to stop API-specific services (like Docker containers, Supabase). +# The web dev server (npm run dev) is expected to be stopped when 'make dev' is interrupted (Ctrl+C). +stop: + @echo "Stopping API services (Redis, Supabase)..." + $(MAKE) -C api stop + @echo "API services stopped. If 'make dev' was running, web server should also be stopped." + +# Individual targets if you want to run them separately (e.g., in different terminal tabs) +# These are foreground targets. +api-dev-fg: + @echo "Starting API development server (foreground)..." + cd api && $(MAKE) dev + +web-dev-fg: + @echo "Starting Web development server (foreground)..." + cd web && $(MAKE) dev \ No newline at end of file diff --git a/api/makefile b/api/makefile index 1505fbf1b..885fd4d49 100644 --- a/api/makefile +++ b/api/makefile @@ -1,5 +1,5 @@ dev: - cd .. && docker compose up -d redis && cd api && \ + cd .. && docker run -d --name buster-redis-make -p 6379:6379 -v buster_redis_data:/data redis && cd api && \ supabase start supabase db reset export DATABASE_URL=postgres://postgres:postgres@127.0.0.1:54322/postgres && \ @@ -21,12 +21,12 @@ update-seed: > libs/database/seed.sql stop: - docker compose down && \ - supabase stop && \ - pkill ollama + docker stop buster-redis-make || true && \ + docker rm buster-redis-make || true && \ + cd .. && docker compose down && supabase stop fast: - cd .. && docker compose up -d redis && cd api && \ + cd .. && docker run -d --name buster-redis-make -p 6379:6379 -v buster_redis_data:/data redis && cd api && \ export RUST_LOG=debug && \ export CARGO_INCREMENTAL=1 && \ nice cargo watch -C server -x run \ No newline at end of file From 024daa9960be1587ba8149b1d3c110c4e8b0682d Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 18:59:59 -0600 Subject: [PATCH 34/43] flash preview for review and data catalog search --- api/libs/agents/src/agents/modes/data_catalog_search.rs | 2 +- api/libs/agents/src/agents/modes/review.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/libs/agents/src/agents/modes/data_catalog_search.rs b/api/libs/agents/src/agents/modes/data_catalog_search.rs index 8aeb28569..c162c4c6d 100644 --- a/api/libs/agents/src/agents/modes/data_catalog_search.rs +++ b/api/libs/agents/src/agents/modes/data_catalog_search.rs @@ -32,7 +32,7 @@ pub fn get_configuration(agent_data: &ModeAgentData, _data_source_syntax: Option // Note: This prompt doesn't use {TODAYS_DATE} // 2. Define the model for this mode - let model = "gemini-2.5-pro-exp-03-25".to_string(); // Use gemini-2.5-pro-exp-03-25 as requested + let model = "gemini-2.5-flash-preview-04-17".to_string(); // Use gemini-2.5-pro-exp-03-25 as requested // 3. Define the tool loader closure let tool_loader: Box) -> Pin> + Send>> + Send + Sync> = diff --git a/api/libs/agents/src/agents/modes/review.rs b/api/libs/agents/src/agents/modes/review.rs index c819c3f58..3f1d6d1ac 100644 --- a/api/libs/agents/src/agents/modes/review.rs +++ b/api/libs/agents/src/agents/modes/review.rs @@ -24,7 +24,7 @@ pub fn get_configuration(_agent_data: &ModeAgentData, _data_source_syntax: Optio let prompt = REVIEW_PROMPT.to_string(); // Use the correct constant // 2. Define the model for this mode (From original MODEL const) - let model = "gemini-2.5-pro-exp-03-25".to_string(); + let model = "gemini-2.5-flash-preview-04-17".to_string(); // 3. Define the tool loader closure let tool_loader: Box< From 011a10dd41e16b59e13a6a09c9d839b6328c492b Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 19:01:18 -0600 Subject: [PATCH 35/43] fix the stop in makefile --- api/makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/makefile b/api/makefile index 885fd4d49..2551b88d9 100644 --- a/api/makefile +++ b/api/makefile @@ -23,7 +23,7 @@ update-seed: stop: docker stop buster-redis-make || true && \ docker rm buster-redis-make || true && \ - cd .. && docker compose down && supabase stop + supabase stop fast: cd .. && docker run -d --name buster-redis-make -p 6379:6379 -v buster_redis_data:/data redis && cd api && \ From 6c037d992a04466b89c01674a3e75bb9096bddc3 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 23:34:59 -0600 Subject: [PATCH 36/43] Update LLM_BASE_URL in environment files and rename restart command to reset in CLI --- .env.example | 2 +- cli/cli/src/commands/run.rs | 20 ++++++++++---------- cli/cli/src/main.rs | 4 ++-- docker-compose.yml | 14 ++++++++++++++ supabase/.env.example | 2 +- 5 files changed, 28 insertions(+), 14 deletions(-) diff --git a/.env.example b/.env.example index 04012d965..2d1362e35 100644 --- a/.env.example +++ b/.env.example @@ -15,7 +15,7 @@ RERANK_API_KEY="your_rerank_api_key" RERANK_MODEL="rerank-v3.5" RERANK_BASE_URL="https://api.cohere.com/v2/rerank" LLM_API_KEY="your_llm_api_key" -LLM_BASE_URL="http://localhost:4000" +LLM_BASE_URL="http://buster-litellm:4001" # WEB VARS NEXT_PUBLIC_API_URL="http://127.0.0.1:3001" diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index 3b39a89a1..6a71d00d0 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -159,18 +159,18 @@ pub async fn stop() -> Result<(), BusterError> { run_docker_compose_command(&["down"], "Stopping").await } -pub async fn restart() -> Result<(), BusterError> { - println!("WARNING: This command will stop all Buster services, attempt to remove their current images, and then restart them."); +pub async fn reset() -> Result<(), BusterError> { + println!("WARNING: This command will stop all Buster services, attempt to remove their current images, and then restart them from scratch."); println!("This can lead to a complete wipe of the Buster database and any other local service data."); println!("This action is irreversible."); - print!("Are you sure you want to proceed? (yes/No): "); + print!("Are you sure you want to proceed with resetting? (yes/No): "); io::stdout().flush().map_err(|e| BusterError::CommandError(format!("Failed to flush stdout: {}", e)))?; let mut confirmation = String::new(); io::stdin().read_line(&mut confirmation).map_err(|e| BusterError::CommandError(format!("Failed to read user input: {}", e)))?; if confirmation.trim().to_lowercase() != "yes" { - println!("Restart cancelled by user."); + println!("Reset cancelled by user."); return Ok(()); } @@ -185,7 +185,7 @@ pub async fn restart() -> Result<(), BusterError> { .expect("Failed to set progress bar style"), ); - pb.set_message("Rebuilding Buster services (step 1/4): Stopping services..."); + pb.set_message("Resetting Buster services (step 1/4): Stopping services..."); let mut down_cmd = Command::new("docker"); down_cmd.current_dir(&persistent_app_dir) @@ -215,7 +215,7 @@ Stderr: return Err(BusterError::CommandError(err_msg)); } - pb.set_message("Rebuilding Buster services (step 2/4): Identifying service images..."); + pb.set_message("Resetting Buster services (step 2/4): Identifying service images..."); let mut config_images_cmd = Command::new("docker"); config_images_cmd.current_dir(&persistent_app_dir) .arg("compose") @@ -251,14 +251,14 @@ Stderr: if image_names.is_empty() { pb.println("No images identified by docker-compose config --images. Skipping image removal."); } else { - pb.set_message(format!("Rebuilding Buster services (step 3/4): Removing {} service image(s)...", image_names.len())); + pb.set_message(format!("Resetting Buster services (step 3/4): Removing {} service image(s)...", image_names.len())); for (index, image_name) in image_names.iter().enumerate() { let current_image_name = image_name.trim(); if current_image_name.is_empty() { continue; } pb.set_message(format!( - "Rebuilding Buster services (step 3/4): Removing image {}/{} ('{}')...", + "Resetting Buster services (step 3/4): Removing image {}/{} ('{}')...", index + 1, image_names.len(), current_image_name @@ -278,7 +278,7 @@ Stderr: } } - pb.set_message("Rebuilding Buster services (step 4/4): Starting services (pulling images if needed)..."); + pb.set_message("Resetting Buster services (step 4/4): Starting services (pulling images if needed)..."); let mut up_cmd = Command::new("docker"); up_cmd.current_dir(&persistent_app_dir) .arg("compose") @@ -296,7 +296,7 @@ Stderr: let up_output = up_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose up: {}", e)))?; if up_output.status.success() { - pb.finish_with_message("Buster services rebuilt and started successfully."); + pb.finish_with_message("Buster services reset and started successfully."); Ok(()) } else { let err_msg = format!( diff --git a/cli/cli/src/main.rs b/cli/cli/src/main.rs index 5ed284f80..182f3a82f 100644 --- a/cli/cli/src/main.rs +++ b/cli/cli/src/main.rs @@ -85,7 +85,7 @@ pub enum Commands { /// Stop the Buster services Stop, /// Restart the Buster services - Restart, + Reset, } #[derive(Parser)] @@ -144,7 +144,7 @@ async fn main() { Commands::Parse { path } => commands::parse::parse_models_command(path).await, Commands::Start => run::start().await.map_err(anyhow::Error::from), Commands::Stop => run::stop().await.map_err(anyhow::Error::from), - Commands::Restart => run::restart().await.map_err(anyhow::Error::from), + Commands::Reset => run::reset().await.map_err(anyhow::Error::from), }; if let Err(e) = result { diff --git a/docker-compose.yml b/docker-compose.yml index 5ce16d86e..250583c4c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -59,5 +59,19 @@ services: condition: service_healthy network_mode: "service:api" + litellm: + image: ghcr.io/berriai/litellm:main-latest + container_name: buster-litellm + ports: + - "4001:4001" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:4001/health/readiness"] + interval: 30s + timeout: 10s + retries: 3 + depends_on: + api: + condition: service_healthy + volumes: buster_redis_data: \ No newline at end of file diff --git a/supabase/.env.example b/supabase/.env.example index 5048b562e..d2cb2c943 100644 --- a/supabase/.env.example +++ b/supabase/.env.example @@ -20,7 +20,7 @@ EMBEDDING_MODEL="mxbai-embed-large" COHERE_API_KEY="" OPENAI_API_KEY="" # For OpenAI models or Supabase Studio assistant LLM_API_KEY="test-key" -LLM_BASE_URL="http://litellm:4001" +LLM_BASE_URL="http://buster-litellm:4001" # --- Web Client (Next.js) Specific --- NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) From 7519e066f90dbfba25622d6659d3cae729eb2236 Mon Sep 17 00:00:00 2001 From: dal Date: Wed, 7 May 2025 23:44:50 -0600 Subject: [PATCH 37/43] flash on conversation title --- .../handlers/src/chats/helpers/generate_conversation_title.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs b/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs index 6bd08c671..15d3c7060 100644 --- a/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs +++ b/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs @@ -71,7 +71,7 @@ pub async fn generate_conversation_title( // Create the request let request = ChatCompletionRequest { - model: "gpt-4o-mini".to_string(), + model: "gemini-2.0-flash-001".to_string(), messages: vec![LiteLLMAgentMessage::User { id: None, content: prompt, From 2d1ded6643793fd00362dfede6b88c3648268a20 Mon Sep 17 00:00:00 2001 From: dal Date: Thu, 8 May 2025 00:03:15 -0600 Subject: [PATCH 38/43] lets test the cli release --- .github/workflows/cli-release.yml | 22 +-- cli/cli/src/commands/run.rs | 236 ++++++++++++++++++++---------- 2 files changed, 164 insertions(+), 94 deletions(-) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 748cd6296..e1742cb3c 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -53,24 +53,6 @@ jobs: - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2 - - name: Install libpq (macOS and Linux) - if: runner.os != 'Windows' - run: | - if [[ "${{ runner.os }}" == "macOS" ]]; then - brew install libpq - echo "PKG_CONFIG_PATH=$(brew --prefix libpq)/lib/pkgconfig" >> $GITHUB_ENV - echo "LIBRARY_PATH=$(brew --prefix libpq)/lib" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=$(brew --prefix libpq)/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV - # For macOS, we might need to explicitly tell rustc where to find the library. - # Adding common libpq paths to rustflags - echo "RUSTFLAGS=-L $(brew --prefix libpq)/lib" >> $GITHUB_ENV - elif [[ "${{ runner.os }}" == "Linux" ]]; then - sudo apt-get update -y - sudo apt-get install -y libpq-dev - fi - env: - HOMEBREW_NO_INSTALL_CLEANUP: 1 # Recommended for CI to speed up - - name: Configure Cargo for optimized build run: | mkdir -p .cargo @@ -115,6 +97,9 @@ jobs: release: needs: build runs-on: ubuntu-latest + outputs: # Outputs for downstream jobs + cli_version: ${{ steps.get_version.outputs.version }} + cli_tag_name: ${{ steps.create_release.outputs.tag_name }} steps: - name: Checkout code uses: actions/checkout@v4 @@ -131,6 +116,7 @@ jobs: echo "version=$VERSION" >> $GITHUB_OUTPUT echo "Extracted version: $VERSION" - name: Create Release + id: create_release uses: softprops/action-gh-release@v1 with: tag_name: v${{ steps.get_version.outputs.version }} diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index 6a71d00d0..0f1a80680 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -1,12 +1,12 @@ +use crate::error::BusterError; +use dirs; +use indicatif::{ProgressBar, ProgressStyle}; +use rust_embed::RustEmbed; use std::fs; use std::io::{self, Write}; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; -use crate::error::BusterError; -use indicatif::{ProgressBar, ProgressStyle}; +use std::path::PathBuf; +use std::process::Command; use std::time::Duration; -use rust_embed::RustEmbed; -use dirs; #[derive(RustEmbed)] #[folder = "../../"] @@ -21,8 +21,11 @@ use dirs; struct StaticAssets; async fn setup_persistent_app_environment() -> Result { - let home_dir = dirs::home_dir() - .ok_or_else(|| BusterError::CommandError("Failed to get home directory. Cannot set up persistent app path.".to_string()))?; + let home_dir = dirs::home_dir().ok_or_else(|| { + BusterError::CommandError( + "Failed to get home directory. Cannot set up persistent app path.".to_string(), + ) + })?; let app_base_dir = home_dir.join(".buster"); fs::create_dir_all(&app_base_dir).map_err(|e| { @@ -35,8 +38,9 @@ async fn setup_persistent_app_environment() -> Result { for filename_cow in StaticAssets::iter() { let filename = filename_cow.as_ref(); - let asset = StaticAssets::get(filename) - .ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", filename)))?; + let asset = StaticAssets::get(filename).ok_or_else(|| { + BusterError::CommandError(format!("Failed to get embedded asset: {}", filename)) + })?; let target_file_path = app_base_dir.join(filename); if let Some(parent) = target_file_path.parent() { @@ -60,15 +64,24 @@ async fn setup_persistent_app_environment() -> Result { } let supabase_volumes_functions_path = app_base_dir.join("supabase/volumes/functions"); - fs::create_dir_all(supabase_volumes_functions_path).map_err(|e| BusterError::CommandError(format!("Failed to create supabase/volumes/functions in persistent app dir: {}", e)))?; - + fs::create_dir_all(supabase_volumes_functions_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to create supabase/volumes/functions in persistent app dir: {}", + e + )) + })?; + let target_dotenv_path = app_base_dir.join(".env"); // Always use .env.example from embedded assets let example_env_filename = "supabase/.env.example"; - let asset = StaticAssets::get(example_env_filename) - .ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", example_env_filename)))?; - + let asset = StaticAssets::get(example_env_filename).ok_or_else(|| { + BusterError::CommandError(format!( + "Failed to get embedded asset: {}", + example_env_filename + )) + })?; + fs::write(&target_dotenv_path, asset.data).map_err(|e| { BusterError::CommandError(format!( "Failed to write {} to {}: {}", @@ -92,16 +105,29 @@ async fn setup_persistent_app_environment() -> Result { Ok(app_base_dir) } -async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Result<(), BusterError> { +async fn run_docker_compose_command( + args: &[&str], + operation_name: &str, +) -> Result<(), BusterError> { let persistent_app_dir = setup_persistent_app_environment().await?; let data_db_path = persistent_app_dir.join("supabase/volumes/db/data"); - fs::create_dir_all(&data_db_path) - .map_err(|e| BusterError::CommandError(format!("Failed to create persistent data directory at {}: {}", data_db_path.display(), e)))?; + fs::create_dir_all(&data_db_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to create persistent data directory at {}: {}", + data_db_path.display(), + e + )) + })?; let data_storage_path = persistent_app_dir.join("supabase/volumes/storage"); - fs::create_dir_all(&data_storage_path) - .map_err(|e| BusterError::CommandError(format!("Failed to create persistent data directory at {}: {}", data_storage_path.display(), e)))?; + fs::create_dir_all(&data_storage_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to create persistent data directory at {}: {}", + data_storage_path.display(), + e + )) + })?; let pb = ProgressBar::new_spinner(); pb.enable_steady_tick(Duration::from_millis(120)); @@ -112,7 +138,10 @@ async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Resu .expect("Failed to set progress bar style"), ); if operation_name == "Starting" { - pb.set_message(format!("{} Buster services... (this may take a few minutes)", operation_name)); + pb.set_message(format!( + "{} Buster services... (this may take a few minutes)", + operation_name + )); } else { pb.set_message(format!("{} Buster services...", operation_name)); } @@ -127,7 +156,11 @@ async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Resu .args(args); let output = cmd.output().map_err(|e| { - BusterError::CommandError(format!("Failed to execute docker compose {}: {}", args.join(" "), e)) + BusterError::CommandError(format!( + "Failed to execute docker compose {}: {}", + args.join(" "), + e + )) })?; if output.status.success() { @@ -145,7 +178,10 @@ async fn run_docker_compose_command(args: &[&str], operation_name: &str) -> Resu String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) ); - pb.abandon_with_message(format!("Error: docker compose {} failed. See console for details.", args.join(" "))); + pb.abandon_with_message(format!( + "Error: docker compose {} failed. See console for details.", + args.join(" ") + )); println!("\nDocker Compose Error Details:\n{}", err_msg); Err(BusterError::CommandError(err_msg)) } @@ -161,15 +197,21 @@ pub async fn stop() -> Result<(), BusterError> { pub async fn reset() -> Result<(), BusterError> { println!("WARNING: This command will stop all Buster services, attempt to remove their current images, and then restart them from scratch."); - println!("This can lead to a complete wipe of the Buster database and any other local service data."); + println!( + "This can lead to a complete wipe of the Buster database and any other local service data." + ); println!("This action is irreversible."); - print!("Are you sure you want to proceed with resetting? (yes/No): "); - io::stdout().flush().map_err(|e| BusterError::CommandError(format!("Failed to flush stdout: {}", e)))?; + print!("Are you sure you want to proceed with resetting? (y/n): "); + io::stdout() + .flush() + .map_err(|e| BusterError::CommandError(format!("Failed to flush stdout: {}", e)))?; let mut confirmation = String::new(); - io::stdin().read_line(&mut confirmation).map_err(|e| BusterError::CommandError(format!("Failed to read user input: {}", e)))?; + io::stdin() + .read_line(&mut confirmation) + .map_err(|e| BusterError::CommandError(format!("Failed to read user input: {}", e)))?; - if confirmation.trim().to_lowercase() != "yes" { + if confirmation.trim().to_lowercase() != "y" { println!("Reset cancelled by user."); return Ok(()); } @@ -185,10 +227,12 @@ pub async fn reset() -> Result<(), BusterError> { .expect("Failed to set progress bar style"), ); + // Step 1: Stop services pb.set_message("Resetting Buster services (step 1/4): Stopping services..."); let mut down_cmd = Command::new("docker"); - down_cmd.current_dir(&persistent_app_dir) + down_cmd + .current_dir(&persistent_app_dir) .arg("compose") .arg("-p") .arg("buster") @@ -196,7 +240,9 @@ pub async fn reset() -> Result<(), BusterError> { .arg("docker-compose.yml") .arg("down"); - let down_output = down_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose down: {}", e)))?; + let down_output = down_cmd.output().map_err(|e| { + BusterError::CommandError(format!("Failed to execute docker compose down: {}", e)) + })?; if !down_output.status.success() { let err_msg = format!( "docker compose down failed (status: {}). Logs: @@ -215,9 +261,52 @@ Stderr: return Err(BusterError::CommandError(err_msg)); } - pb.set_message("Resetting Buster services (step 2/4): Identifying service images..."); + // Step 2: Clear persistent data volumes + pb.set_message("Resetting Buster services (step 2/4): Clearing persistent data volumes..."); + let db_volume_path = persistent_app_dir.join("supabase/volumes/db/data"); + let storage_volume_path = persistent_app_dir.join("supabase/volumes/storage"); + + if db_volume_path.exists() { + fs::remove_dir_all(&db_volume_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to remove db volume at {}: {}", + db_volume_path.display(), + e + )) + })?; + } + fs::create_dir_all(&db_volume_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to recreate db volume at {}: {}", + db_volume_path.display(), + e + )) + })?; + pb.println(format!("Successfully cleared and recreated database volume: {}", db_volume_path.display())); + + if storage_volume_path.exists() { + fs::remove_dir_all(&storage_volume_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to remove storage volume at {}: {}", + storage_volume_path.display(), + e + )) + })?; + } + fs::create_dir_all(&storage_volume_path).map_err(|e| { + BusterError::CommandError(format!( + "Failed to recreate storage volume at {}: {}", + storage_volume_path.display(), + e + )) + })?; + pb.println(format!("Successfully cleared and recreated storage volume: {}", storage_volume_path.display())); + + // Step 3: Identify service images + pb.set_message("Resetting Buster services (step 3/4): Identifying service images..."); let mut config_images_cmd = Command::new("docker"); - config_images_cmd.current_dir(&persistent_app_dir) + config_images_cmd + .current_dir(&persistent_app_dir) .arg("compose") .arg("-p") .arg("buster") @@ -226,7 +315,12 @@ Stderr: .arg("config") .arg("--images"); - let config_images_output = config_images_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose config --images: {}", e)))?; + let config_images_output = config_images_cmd.output().map_err(|e| { + BusterError::CommandError(format!( + "Failed to execute docker compose config --images: {}", + e + )) + })?; if !config_images_output.status.success() { let err_msg = format!( "docker compose config --images failed (status: {}). Logs: @@ -240,25 +334,39 @@ Stderr: String::from_utf8_lossy(&config_images_output.stdout), String::from_utf8_lossy(&config_images_output.stderr) ); - pb.abandon_with_message("Error: Failed to identify service images. See console for details."); - println!("\nDocker Compose Config --images Error Details:\n{}", err_msg); + pb.abandon_with_message( + "Error: Failed to identify service images. See console for details.", + ); + println!( + "\nDocker Compose Config --images Error Details:\n{}", + err_msg + ); return Err(BusterError::CommandError(err_msg)); } let image_list_str = String::from_utf8_lossy(&config_images_output.stdout); - let image_names: Vec<&str> = image_list_str.lines().filter(|line| !line.trim().is_empty()).collect(); + let image_names: Vec<&str> = image_list_str + .lines() + .filter(|line| !line.trim().is_empty()) + .collect(); + // Step 4: Remove service images if image_names.is_empty() { - pb.println("No images identified by docker-compose config --images. Skipping image removal."); + pb.println( + "No images identified by docker-compose config --images. Skipping image removal.", + ); } else { - pb.set_message(format!("Resetting Buster services (step 3/4): Removing {} service image(s)...", image_names.len())); + pb.set_message(format!( + "Resetting Buster services (step 4/4): Removing {} service image(s)...", + image_names.len() + )); for (index, image_name) in image_names.iter().enumerate() { let current_image_name = image_name.trim(); if current_image_name.is_empty() { continue; } pb.set_message(format!( - "Resetting Buster services (step 3/4): Removing image {}/{} ('{}')...", + "Resetting Buster services (step 4/4): Removing image {}/{} ('{}')...", index + 1, image_names.len(), current_image_name @@ -266,48 +374,24 @@ Stderr: let mut rmi_cmd = Command::new("docker"); rmi_cmd.arg("image").arg("rm").arg(current_image_name); - let rmi_output = rmi_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker image rm {}: {}", current_image_name, e)))?; - + let rmi_output = rmi_cmd.output().map_err(|e| { + BusterError::CommandError(format!( + "Failed to execute docker image rm {}: {}", + current_image_name, e + )) + })?; + // Log warning on failure but continue, as image might not exist or be in use by other non-project containers if !rmi_output.status.success() { let rmi_stderr = String::from_utf8_lossy(&rmi_output.stderr); - if !rmi_stderr.trim().is_empty() && !rmi_stderr.contains("No such image") { // Don't warn if image was already gone - pb.println(format!("Warning: Could not remove image '{}'. It might be in use or already removed. Stderr: {}", current_image_name, rmi_stderr.trim())); + if !rmi_stderr.trim().is_empty() && !rmi_stderr.contains("No such image") { + // Don't warn if image was already gone + pb.println(format!("Warning: Could not remove image '{}'. It might be in use or already removed. Stderr: {}", current_image_name, rmi_stderr.trim())); } } } } - pb.set_message("Resetting Buster services (step 4/4): Starting services (pulling images if needed)..."); - let mut up_cmd = Command::new("docker"); - up_cmd.current_dir(&persistent_app_dir) - .arg("compose") - .arg("-p") - .arg("buster") - .arg("-f") - .arg("docker-compose.yml") - .arg("up") - .arg("-d") - .arg("--pull") - .arg("always") - .arg("--force-recreate") - .arg("--remove-orphans"); - - let up_output = up_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose up: {}", e)))?; - - if up_output.status.success() { - pb.finish_with_message("Buster services reset and started successfully."); - Ok(()) - } else { - let err_msg = format!( - "docker compose up failed after image purge (status: {}). Logs:\nWorking directory: {}\nStdout:\n{}\nStderr:\n{}", - up_output.status, - persistent_app_dir.display(), - String::from_utf8_lossy(&up_output.stdout), - String::from_utf8_lossy(&up_output.stderr) - ); - pb.abandon_with_message("Error: docker compose up failed after image purge. See console for details."); - println!("\nDocker Compose Up Error Details:\n{}", err_msg); - Err(BusterError::CommandError(err_msg)) - } -} \ No newline at end of file + pb.finish_with_message("Buster services stopped, volumes cleared, and images removed successfully."); + Ok(()) +} From 3d19d8eab05ad095846fa7c0df1bc373ebccc732 Mon Sep 17 00:00:00 2001 From: dal Date: Thu, 8 May 2025 01:12:57 -0600 Subject: [PATCH 39/43] cli release and locla oai setup --- .env.example | 4 +- .github/workflows/cli-release.yml | 58 ++++- Dockerfile | 61 ----- .../agents/src/agents/buster_multi_agent.rs | 9 +- api/libs/agents/src/agents/modes/analysis.rs | 9 +- .../src/agents/modes/data_catalog_search.rs | 9 +- .../agents/modes/follow_up_initialization.rs | 222 ++++++++++++------ .../agents/src/agents/modes/initialization.rs | 19 +- api/libs/agents/src/agents/modes/planning.rs | 20 +- api/libs/agents/src/agents/modes/review.rs | 14 +- .../file_tools/search_data_catalog.rs | 8 +- .../planning_tools/helpers/todo_generator.rs | 10 +- api/libs/handlers/Cargo.toml | 1 + .../helpers/generate_conversation_title.rs | 8 +- .../handlers/src/chats/post_chat_handler.rs | 9 +- .../routes/helpers/search_data_catalog.rs | 14 +- cli/cli/src/commands/run.rs | 1 + docker-compose.yml | 30 ++- start | 15 -- 19 files changed, 332 insertions(+), 189 deletions(-) delete mode 100644 Dockerfile delete mode 100755 start diff --git a/.env.example b/.env.example index 2d1362e35..7133a3dd4 100644 --- a/.env.example +++ b/.env.example @@ -1,5 +1,5 @@ # API VARS -ENVIRONMENT="development" +ENVIRONMENT="local" DATABASE_URL="postgresql://postgres:postgres@127.0.0.1:54322/postgres" POOLER_URL="postgresql://postgres:postgres@127.0.0.1:54322/postgres" JWT_SECRET="super-secret-jwt-token-with-at-least-32-characters-long" @@ -15,7 +15,7 @@ RERANK_API_KEY="your_rerank_api_key" RERANK_MODEL="rerank-v3.5" RERANK_BASE_URL="https://api.cohere.com/v2/rerank" LLM_API_KEY="your_llm_api_key" -LLM_BASE_URL="http://buster-litellm:4001" +LLM_BASE_URL="https://api.openai.com/v1" # WEB VARS NEXT_PUBLIC_API_URL="http://127.0.0.1:3001" diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index e1742cb3c..3c4252ea1 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -53,6 +53,60 @@ jobs: - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2 + - name: Install libpq (macOS and Linux) + if: runner.os != 'Windows' + run: | + if [[ "${{ runner.os }}" == "macOS" ]]; then + brew install libpq + echo "PKG_CONFIG_PATH=$(brew --prefix libpq)/lib/pkgconfig" >> $GITHUB_ENV + echo "LIBRARY_PATH=$(brew --prefix libpq)/lib" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$(brew --prefix libpq)/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV + # For macOS, we might need to explicitly tell rustc where to find the library. + # Adding common libpq paths to rustflags + echo "RUSTFLAGS=-L $(brew --prefix libpq)/lib" >> $GITHUB_ENV + elif [[ "${{ runner.os }}" == "Linux" ]]; then + sudo apt-get update -y + sudo apt-get install -y libpq-dev + fi + env: + HOMEBREW_NO_INSTALL_CLEANUP: 1 # Recommended for CI to speed up + + - name: Install libpq (Windows) + if: runner.os == 'Windows' + shell: pwsh + run: | + $ErrorActionPreference = "Stop" + $ProgressPreference = "SilentlyContinue" # Optional: for cleaner logs + + $PG_VERSION = "16.2.0" # Specify the desired PostgreSQL version + $PG_TARGET = "x86_64-pc-windows-msvc" + $PG_ARCHIVE_NAME = "postgresql-${PG_VERSION}-${PG_TARGET}.tar.gz" + $PG_DOWNLOAD_URL = "https://github.com/theseus-rs/postgresql-binaries/releases/download/v${PG_VERSION}/${PG_ARCHIVE_NAME}" + $PG_INSTALL_DIR = "C:/pgsql" + + Write-Host "Downloading PostgreSQL binaries from $PG_DOWNLOAD_URL" + Invoke-WebRequest -Uri $PG_DOWNLOAD_URL -OutFile $PG_ARCHIVE_NAME + + Write-Host "Extracting PostgreSQL binaries to $PG_INSTALL_DIR" + New-Item -ItemType Directory -Force -Path $PG_INSTALL_DIR + tar -xzf $PG_ARCHIVE_NAME -C $PG_INSTALL_DIR --strip-components=1 # Assumes archive has a top-level dir + + Write-Host "Setting up environment variables for libpq" + $PG_LIB_PATH = Join-Path $PG_INSTALL_DIR "lib" + $PG_BIN_PATH = Join-Path $PG_INSTALL_DIR "bin" + + echo "RUSTFLAGS=-L ${PG_LIB_PATH}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + echo "PATH=${PG_BIN_PATH};${env:PATH}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + + Write-Host "PostgreSQL lib path: $PG_LIB_PATH" + Write-Host "PostgreSQL bin path added to PATH: $PG_BIN_PATH" + Write-Host "Contents of $PG_LIB_PATH:" + Get-ChildItem $PG_LIB_PATH + Write-Host "Contents of $PG_BIN_PATH:" + Get-ChildItem $PG_BIN_PATH + Write-Host "Updated RUSTFLAGS: $env:RUSTFLAGS" + Write-Host "Updated PATH: $env:PATH" + - name: Configure Cargo for optimized build run: | mkdir -p .cargo @@ -97,9 +151,6 @@ jobs: release: needs: build runs-on: ubuntu-latest - outputs: # Outputs for downstream jobs - cli_version: ${{ steps.get_version.outputs.version }} - cli_tag_name: ${{ steps.create_release.outputs.tag_name }} steps: - name: Checkout code uses: actions/checkout@v4 @@ -116,7 +167,6 @@ jobs: echo "version=$VERSION" >> $GITHUB_OUTPUT echo "Extracted version: $VERSION" - name: Create Release - id: create_release uses: softprops/action-gh-release@v1 with: tag_name: v${{ steps.get_version.outputs.version }} diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 7ce46e2bc..000000000 --- a/Dockerfile +++ /dev/null @@ -1,61 +0,0 @@ -# Build stage for the API -FROM lukemathwalker/cargo-chef AS api-builder -WORKDIR /app/api -COPY api/ . -RUN cargo install diesel_cli --no-default-features --features postgres -RUN cargo build --release --bin bi_api - -# Build stage for the web app -FROM node:18 AS web-builder -WORKDIR /app/web -COPY web/ . -RUN npm ci -RUN npm run build -RUN npm prune --production - -# Final stage -FROM debian:bookworm-slim -WORKDIR /app - -# Install runtime dependencies -RUN apt-get update && apt-get install -y \ - ca-certificates \ - curl \ - postgresql-client \ - libpq-dev \ - nodejs \ - npm \ - && rm -rf /var/lib/apt/lists/* - -# Copy built artifacts -COPY --from=api-builder /app/api/target/release/bi_api ./api/ -COPY --from=api-builder /usr/local/cargo/bin/diesel /usr/local/bin/diesel -COPY --from=web-builder /app/web/.next ./web/.next -COPY --from=web-builder /app/web/public ./web/public -COPY --from=web-builder /app/web/package.json ./web/ -COPY --from=web-builder /app/web/node_modules ./web/node_modules -COPY docker-compose.yml . -COPY api/migrations ./migrations/ -COPY api/diesel.toml . - -# Copy entrypoint script -COPY <) -> Pin> + Send>> + Send + Sync> = diff --git a/api/libs/agents/src/agents/modes/follow_up_initialization.rs b/api/libs/agents/src/agents/modes/follow_up_initialization.rs index 39af16fc7..b6d51d7ff 100644 --- a/api/libs/agents/src/agents/modes/follow_up_initialization.rs +++ b/api/libs/agents/src/agents/modes/follow_up_initialization.rs @@ -1,9 +1,10 @@ use anyhow::Result; use serde_json::Value; use std::collections::HashMap; -use std::sync::Arc; -use std::pin::Pin; +use std::env; use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; // Import necessary types from the parent module (modes/mod.rs) use super::{ModeAgentData, ModeConfiguration}; @@ -13,102 +14,178 @@ use crate::{Agent, ToolExecutor}; use crate::tools::{ categories::{ file_tools::{ - CreateDashboardFilesTool, - CreateMetricFilesTool, - ModifyDashboardFilesTool, - ModifyMetricFilesTool, - SearchDataCatalogTool, - }, - planning_tools::{ - CreatePlanInvestigative, - CreatePlanStraightforward, - }, - response_tools::{ - Done, - MessageUserClarifyingQuestion, + CreateDashboardFilesTool, CreateMetricFilesTool, ModifyDashboardFilesTool, + ModifyMetricFilesTool, SearchDataCatalogTool, }, + planning_tools::{CreatePlanInvestigative, CreatePlanStraightforward}, + response_tools::{Done, MessageUserClarifyingQuestion}, utility_tools::no_search_needed::NoSearchNeededTool, }, planning_tools::ReviewPlan, IntoToolCallExecutor, }; - // Function to get the configuration for the FollowUpInitialization mode pub fn get_configuration(agent_data: &ModeAgentData) -> ModeConfiguration { // 1. Get the prompt, formatted with current data let prompt = FOLLOW_UP_INTIALIZATION_PROMPT - .replace("{DATASETS}", &agent_data.dataset_with_descriptions.join("\n\n")) + .replace( + "{DATASETS}", + &agent_data.dataset_with_descriptions.join("\n\n"), + ) .replace("{TODAYS_DATE}", &agent_data.todays_date); // 2. Define the model for this mode (Using a default, adjust if needed) - let model = "gemini-2.5-pro-exp-03-25".to_string(); // Assuming default based on original MODEL = None + + let model = + if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "o4-mini".to_string() + } else { + "gemini-2.5-pro-exp-03-25".to_string() + }; // 3. Define the tool loader closure - let tool_loader: Box) -> Pin> + Send>> + Send + Sync> = - Box::new(|agent_arc: &Arc| { - let agent_clone = Arc::clone(agent_arc); // Clone Arc for the async block - Box::pin(async move { - // Clear existing tools before loading mode-specific ones - agent_clone.clear_tools().await; + let tool_loader: Box< + dyn Fn(&Arc) -> Pin> + Send>> + Send + Sync, + > = Box::new(|agent_arc: &Arc| { + let agent_clone = Arc::clone(agent_arc); // Clone Arc for the async block + Box::pin(async move { + // Clear existing tools before loading mode-specific ones + agent_clone.clear_tools().await; - // Instantiate all potentially relevant tools for follow-up state - let search_data_catalog_tool = SearchDataCatalogTool::new(agent_clone.clone()); - let no_search_needed_tool = NoSearchNeededTool::new(agent_clone.clone()); - let create_plan_straightforward_tool = CreatePlanStraightforward::new(agent_clone.clone()); - let create_plan_investigative_tool = CreatePlanInvestigative::new(agent_clone.clone()); - let create_metric_files_tool = CreateMetricFilesTool::new(agent_clone.clone()); - let modify_metric_files_tool = ModifyMetricFilesTool::new(agent_clone.clone()); - let create_dashboard_files_tool = CreateDashboardFilesTool::new(agent_clone.clone()); - let modify_dashboard_files_tool = ModifyDashboardFilesTool::new(agent_clone.clone()); - let message_user_clarifying_question_tool = MessageUserClarifyingQuestion::new(); - let done_tool = Done::new(agent_clone.clone()); - let review_tool = ReviewPlan::new(agent_clone.clone()); + // Instantiate all potentially relevant tools for follow-up state + let search_data_catalog_tool = SearchDataCatalogTool::new(agent_clone.clone()); + let no_search_needed_tool = NoSearchNeededTool::new(agent_clone.clone()); + let create_plan_straightforward_tool = + CreatePlanStraightforward::new(agent_clone.clone()); + let create_plan_investigative_tool = CreatePlanInvestigative::new(agent_clone.clone()); + let create_metric_files_tool = CreateMetricFilesTool::new(agent_clone.clone()); + let modify_metric_files_tool = ModifyMetricFilesTool::new(agent_clone.clone()); + let create_dashboard_files_tool = CreateDashboardFilesTool::new(agent_clone.clone()); + let modify_dashboard_files_tool = ModifyDashboardFilesTool::new(agent_clone.clone()); + let message_user_clarifying_question_tool = MessageUserClarifyingQuestion::new(); + let done_tool = Done::new(agent_clone.clone()); + let review_tool = ReviewPlan::new(agent_clone.clone()); - // --- Define Conditions based on Agent State (as per original load_tools) --- - let search_condition = Some(|state: &HashMap| -> bool { - !state.get("searched_data_catalog").and_then(Value::as_bool).unwrap_or(false) - }); - let planning_condition = Some(|state: &HashMap| -> bool { - let searched = state.get("searched_data_catalog").and_then(Value::as_bool).unwrap_or(false); - let has_context = state.contains_key("data_context"); // Assuming context presence implies adequacy - let has_plan = state.contains_key("plan_available"); - searched && has_context && !has_plan - }); - let analysis_condition = Some(|state: &HashMap| -> bool { - state.contains_key("data_context") && state.contains_key("plan_available") - }); - let modify_condition = Some(|state: &HashMap| -> bool { - state.contains_key("metrics_available") - }); - let review_condition = Some(|state: &HashMap| -> bool { - state.get("review_needed").and_then(Value::as_bool).unwrap_or(false) - }); - let always_available = Some(|_state: &HashMap| -> bool { true }); // For done/clarify + // --- Define Conditions based on Agent State (as per original load_tools) --- + let search_condition = Some(|state: &HashMap| -> bool { + !state + .get("searched_data_catalog") + .and_then(Value::as_bool) + .unwrap_or(false) + }); + let planning_condition = Some(|state: &HashMap| -> bool { + let searched = state + .get("searched_data_catalog") + .and_then(Value::as_bool) + .unwrap_or(false); + let has_context = state.contains_key("data_context"); // Assuming context presence implies adequacy + let has_plan = state.contains_key("plan_available"); + searched && has_context && !has_plan + }); + let analysis_condition = Some(|state: &HashMap| -> bool { + state.contains_key("data_context") && state.contains_key("plan_available") + }); + let modify_condition = Some(|state: &HashMap| -> bool { + state.contains_key("metrics_available") + }); + let review_condition = Some(|state: &HashMap| -> bool { + state + .get("review_needed") + .and_then(Value::as_bool) + .unwrap_or(false) + }); + let always_available = Some(|_state: &HashMap| -> bool { true }); // For done/clarify - // Add tools with their respective conditions - agent_clone.add_tool(search_data_catalog_tool.get_name(), search_data_catalog_tool.into_tool_call_executor(), search_condition.clone()).await; - agent_clone.add_tool(no_search_needed_tool.get_name(), no_search_needed_tool.into_tool_call_executor(), search_condition).await; - agent_clone.add_tool(create_plan_straightforward_tool.get_name(), create_plan_straightforward_tool.into_tool_call_executor(), planning_condition.clone()).await; - agent_clone.add_tool(create_plan_investigative_tool.get_name(), create_plan_investigative_tool.into_tool_call_executor(), planning_condition).await; - agent_clone.add_tool(create_metric_files_tool.get_name(), create_metric_files_tool.into_tool_call_executor(), analysis_condition.clone()).await; - agent_clone.add_tool(modify_metric_files_tool.get_name(), modify_metric_files_tool.into_tool_call_executor(), modify_condition.clone()).await; - agent_clone.add_tool(create_dashboard_files_tool.get_name(), create_dashboard_files_tool.into_tool_call_executor(), analysis_condition.clone()).await; - agent_clone.add_tool(modify_dashboard_files_tool.get_name(), modify_dashboard_files_tool.into_tool_call_executor(), modify_condition.clone()).await; - agent_clone.add_tool(review_tool.get_name(), review_tool.into_tool_call_executor(), review_condition).await; - agent_clone.add_tool(message_user_clarifying_question_tool.get_name(), message_user_clarifying_question_tool.into_tool_call_executor(), always_available.clone()).await; - agent_clone.add_tool(done_tool.get_name(), done_tool.into_tool_call_executor(), always_available).await; + // Add tools with their respective conditions + agent_clone + .add_tool( + search_data_catalog_tool.get_name(), + search_data_catalog_tool.into_tool_call_executor(), + search_condition.clone(), + ) + .await; + agent_clone + .add_tool( + no_search_needed_tool.get_name(), + no_search_needed_tool.into_tool_call_executor(), + search_condition, + ) + .await; + agent_clone + .add_tool( + create_plan_straightforward_tool.get_name(), + create_plan_straightforward_tool.into_tool_call_executor(), + planning_condition.clone(), + ) + .await; + agent_clone + .add_tool( + create_plan_investigative_tool.get_name(), + create_plan_investigative_tool.into_tool_call_executor(), + planning_condition, + ) + .await; + agent_clone + .add_tool( + create_metric_files_tool.get_name(), + create_metric_files_tool.into_tool_call_executor(), + analysis_condition.clone(), + ) + .await; + agent_clone + .add_tool( + modify_metric_files_tool.get_name(), + modify_metric_files_tool.into_tool_call_executor(), + modify_condition.clone(), + ) + .await; + agent_clone + .add_tool( + create_dashboard_files_tool.get_name(), + create_dashboard_files_tool.into_tool_call_executor(), + analysis_condition.clone(), + ) + .await; + agent_clone + .add_tool( + modify_dashboard_files_tool.get_name(), + modify_dashboard_files_tool.into_tool_call_executor(), + modify_condition.clone(), + ) + .await; + agent_clone + .add_tool( + review_tool.get_name(), + review_tool.into_tool_call_executor(), + review_condition, + ) + .await; + agent_clone + .add_tool( + message_user_clarifying_question_tool.get_name(), + message_user_clarifying_question_tool.into_tool_call_executor(), + always_available.clone(), + ) + .await; + agent_clone + .add_tool( + done_tool.get_name(), + done_tool.into_tool_call_executor(), + always_available, + ) + .await; - Ok(()) - }) - }); + Ok(()) + }) + }); // 4. Define terminating tools for this mode let terminating_tools = vec![ // From original load_tools // Use hardcoded names if static access isn't available "message_user_clarifying_question".to_string(), // Assuming this is the name - "finish_and_respond".to_string(), // Assuming this is the name for Done tool + "finish_and_respond".to_string(), // Assuming this is the name for Done tool ]; // 5. Construct and return the ModeConfiguration @@ -120,7 +197,6 @@ pub fn get_configuration(agent_data: &ModeAgentData) -> ModeConfiguration { } } - // Keep the prompt constant, but it's no longer pub const FOLLOW_UP_INTIALIZATION_PROMPT: &str = r##"## Overview You are Buster, an AI assistant and expert in **data analytics, data science, and data engineering**. You operate within the **Buster platform**, the world's best BI tool, assisting non-technical users with their analytics tasks. Your capabilities include: diff --git a/api/libs/agents/src/agents/modes/initialization.rs b/api/libs/agents/src/agents/modes/initialization.rs index 30f6a9fa2..b86595b0a 100644 --- a/api/libs/agents/src/agents/modes/initialization.rs +++ b/api/libs/agents/src/agents/modes/initialization.rs @@ -1,6 +1,7 @@ use anyhow::Result; use serde_json::Value; use std::collections::HashMap; +use std::env; use std::future::Future; use std::pin::Pin; use std::sync::Arc; @@ -18,16 +19,28 @@ use crate::tools::{ }; // Function to get the configuration for the Initialization mode -pub fn get_configuration(agent_data: &ModeAgentData, _data_source_syntax: Option) -> ModeConfiguration { +pub fn get_configuration( + agent_data: &ModeAgentData, + _data_source_syntax: Option, +) -> ModeConfiguration { // 1. Get the prompt, formatted with current data let prompt = INTIALIZATION_PROMPT - .replace("{DATASETS}", &agent_data.dataset_with_descriptions.join("\n\n")) + .replace( + "{DATASETS}", + &agent_data.dataset_with_descriptions.join("\n\n"), + ) .replace("{TODAYS_DATE}", &agent_data.todays_date); // 2. Define the model for this mode (Using a default, adjust if needed) // Since the original MODEL was None, we might use the agent's default // or specify a standard one like "gemini-2.5-pro-exp-03-25". Let's use "gemini-2.5-pro-exp-03-25". - let model = "gemini-2.5-pro-exp-03-25".to_string(); + + let model = + if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "o4-mini".to_string() + } else { + "gemini-2.5-pro-exp-03-25".to_string() + }; // 3. Define the tool loader closure let tool_loader: Box< diff --git a/api/libs/agents/src/agents/modes/planning.rs b/api/libs/agents/src/agents/modes/planning.rs index c91c50296..b227fe60a 100644 --- a/api/libs/agents/src/agents/modes/planning.rs +++ b/api/libs/agents/src/agents/modes/planning.rs @@ -1,6 +1,6 @@ use anyhow::Result; use serde_json::Value; -use std::collections::HashMap; +use std::{collections::HashMap, env}; use std::future::Future; use std::pin::Pin; use std::sync::Arc; @@ -21,14 +21,26 @@ use crate::tools::{ }; // Function to get the configuration for the Planning mode -pub fn get_configuration(agent_data: &ModeAgentData, _data_source_syntax: Option) -> ModeConfiguration { +pub fn get_configuration( + agent_data: &ModeAgentData, + _data_source_syntax: Option, +) -> ModeConfiguration { // 1. Get the prompt, formatted with current data let prompt = PLANNING_PROMPT .replace("{TODAYS_DATE}", &agent_data.todays_date) - .replace("{DATASETS}", &agent_data.dataset_with_descriptions.join("\n\n")); + .replace( + "{DATASETS}", + &agent_data.dataset_with_descriptions.join("\n\n"), + ); // 2. Define the model for this mode (Using default based on original MODEL = None) - let model = "gemini-2.5-pro-exp-03-25".to_string(); + + let model = + if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "o4-mini".to_string() + } else { + "gemini-2.5-pro-exp-03-25".to_string() + }; // 3. Define the tool loader closure let tool_loader: Box< diff --git a/api/libs/agents/src/agents/modes/review.rs b/api/libs/agents/src/agents/modes/review.rs index 3f1d6d1ac..834b38ff7 100644 --- a/api/libs/agents/src/agents/modes/review.rs +++ b/api/libs/agents/src/agents/modes/review.rs @@ -1,6 +1,6 @@ use anyhow::Result; use serde_json::Value; -use std::collections::HashMap; +use std::{collections::HashMap, env}; use std::future::Future; use std::pin::Pin; use std::sync::Arc; @@ -19,12 +19,20 @@ use crate::tools::{ }; // Function to get the configuration for the Review mode -pub fn get_configuration(_agent_data: &ModeAgentData, _data_source_syntax: Option) -> ModeConfiguration { +pub fn get_configuration( + _agent_data: &ModeAgentData, + _data_source_syntax: Option, +) -> ModeConfiguration { // 1. Get the prompt (doesn't need formatting for this mode) let prompt = REVIEW_PROMPT.to_string(); // Use the correct constant // 2. Define the model for this mode (From original MODEL const) - let model = "gemini-2.5-flash-preview-04-17".to_string(); + let model = + if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "o4-mini".to_string() + } else { + "gemini-2.5-flash-preview-04-17".to_string() + }; // 3. Define the tool loader closure let tool_loader: Box< diff --git a/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs b/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs index 84978eb26..f002f2dfe 100644 --- a/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs +++ b/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs @@ -984,8 +984,14 @@ async fn llm_filter_helper( let llm_client = LiteLLMClient::new(None, None); + let model = if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "gpt-4.1-mini".to_string() + } else { + "gemini-2.0-flash-001".to_string() + }; + let request = ChatCompletionRequest { - model: "gemini-2.0-flash-001".to_string(), + model, messages: vec![AgentMessage::User { id: None, content: prompt, diff --git a/api/libs/agents/src/tools/categories/planning_tools/helpers/todo_generator.rs b/api/libs/agents/src/tools/categories/planning_tools/helpers/todo_generator.rs index b7d96e629..8a7e38978 100644 --- a/api/libs/agents/src/tools/categories/planning_tools/helpers/todo_generator.rs +++ b/api/libs/agents/src/tools/categories/planning_tools/helpers/todo_generator.rs @@ -1,3 +1,5 @@ +use std::env; + use anyhow::Result; use litellm::{AgentMessage, ChatCompletionRequest, LiteLLMClient, Metadata, ResponseFormat}; use serde_json::Value; @@ -61,8 +63,14 @@ Example Output for the above plan: `["Create line chart visualization 'Daily Tra plan ); + let model = if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "gpt-4.1-mini".to_string() + } else { + "gemini-2.0-flash-001".to_string() + }; + let request = ChatCompletionRequest { - model: "gemini-2.0-flash-001".to_string(), + model, messages: vec![AgentMessage::User { id: None, content: prompt, name: None }], stream: Some(false), response_format: Some(ResponseFormat { type_: "json_object".to_string(), json_schema: None }), diff --git a/api/libs/handlers/Cargo.toml b/api/libs/handlers/Cargo.toml index 67cf66dcd..4aed53257 100644 --- a/api/libs/handlers/Cargo.toml +++ b/api/libs/handlers/Cargo.toml @@ -20,6 +20,7 @@ regex = { workspace = true } indexmap = { workspace = true } async-trait = { workspace = true } + # Local dependencies database = { path = "../database" } agents = { path = "../agents" } diff --git a/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs b/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs index 15d3c7060..9d44d0e5a 100644 --- a/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs +++ b/api/libs/handlers/src/chats/helpers/generate_conversation_title.rs @@ -69,9 +69,15 @@ pub async fn generate_conversation_title( // Set up LiteLLM client let llm_client = LiteLLMClient::new(None, None); + let model = if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "gpt-4.1-mini".to_string() + } else { + "gemini-2.0-flash-001".to_string() + }; + // Create the request let request = ChatCompletionRequest { - model: "gemini-2.0-flash-001".to_string(), + model, messages: vec![LiteLLMAgentMessage::User { id: None, content: prompt, diff --git a/api/libs/handlers/src/chats/post_chat_handler.rs b/api/libs/handlers/src/chats/post_chat_handler.rs index cd4ff69d2..1ac711a3d 100644 --- a/api/libs/handlers/src/chats/post_chat_handler.rs +++ b/api/libs/handlers/src/chats/post_chat_handler.rs @@ -2,6 +2,7 @@ use agents::tools::file_tools::common::{generate_deterministic_uuid, ModifyFiles use dashmap::DashMap; use middleware::AuthenticatedUser; use std::collections::HashSet; +use std::env; use std::{collections::HashMap, time::{Instant, Duration}}; use std::sync::Arc; @@ -2712,9 +2713,15 @@ pub async fn generate_conversation_title( // Set up LiteLLM client let llm_client = LiteLLMClient::new(None, None); + let model = if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "gpt-4.1-mini".to_string() + } else { + "gemini-2.0-flash-001".to_string() + }; + // Create the request let request = ChatCompletionRequest { - model: "gemini-2.0-flash-001".to_string(), + model, messages: vec![LiteLLMAgentMessage::User { id: None, content: prompt, diff --git a/api/server/src/routes/rest/routes/helpers/search_data_catalog.rs b/api/server/src/routes/rest/routes/helpers/search_data_catalog.rs index 56ad3bae6..8a432e45f 100644 --- a/api/server/src/routes/rest/routes/helpers/search_data_catalog.rs +++ b/api/server/src/routes/rest/routes/helpers/search_data_catalog.rs @@ -10,7 +10,10 @@ use futures::stream::{self, StreamExt}; use litellm::{AgentMessage, ChatCompletionRequest, LiteLLMClient, Metadata, ResponseFormat}; use middleware::types::AuthenticatedUser; use serde::{Deserialize, Serialize}; -use std::collections::{HashMap, HashSet}; +use std::{ + collections::{HashMap, HashSet}, + env, +}; use tracing::{debug, error, info, warn}; use uuid::Uuid; @@ -318,9 +321,16 @@ async fn filter_datasets_with_llm( // Initialize LiteLLM client let llm_client = LiteLLMClient::new(None, None); + let model = + if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" { + "gpt-4.1-mini".to_string() + } else { + "gemini-2.0-flash-001".to_string() + }; + // Create the request let request = ChatCompletionRequest { - model: "gemini-2.0-flash-001".to_string(), // Using a small model for cost efficiency + model, // Using a small model for cost efficiency messages: vec![AgentMessage::User { id: None, content: prompt, diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index 0f1a80680..e919832df 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -11,6 +11,7 @@ use std::time::Duration; #[derive(RustEmbed)] #[folder = "../../"] #[include = "docker-compose.yml"] +#[include = "litellm_vertex_config.yaml"] #[include = "supabase/.env.example"] #[include = "supabase/**/*"] #[exclude = "supabase/volumes/db/data/**/*"] diff --git a/docker-compose.yml b/docker-compose.yml index 250583c4c..2954a83dc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -59,19 +59,23 @@ services: condition: service_healthy network_mode: "service:api" - litellm: - image: ghcr.io/berriai/litellm:main-latest - container_name: buster-litellm - ports: - - "4001:4001" - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:4001/health/readiness"] - interval: 30s - timeout: 10s - retries: 3 - depends_on: - api: - condition: service_healthy + # Pausing this for local deployments until we can build out better multi-model support. + # litellm: + # image: ghcr.io/berriai/litellm:main-latest + # container_name: buster-litellm + # volumes: + # - ./litellm_vertex_config.yaml:/litellm_vertex_config.yaml + # command: ["--config", "/litellm_vertex_config.yaml", "--port", "4001"] + # ports: + # - "4001:4001" + # healthcheck: + # test: ["CMD", "curl", "-f", "http://localhost:4001/health/readiness"] + # interval: 30s + # timeout: 10s + # retries: 3 + # depends_on: + # api: + # condition: service_healthy volumes: buster_redis_data: \ No newline at end of file diff --git a/start b/start deleted file mode 100755 index 06023d123..000000000 --- a/start +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -echo "Starting Supabase..." -cd supabase -docker compose up -d - -echo "Waiting for Supabase to be healthy..." -until curl -s http://localhost:54321/rest/v1/ > /dev/null; do - echo "Waiting for Supabase..." - sleep 5 -done - -echo "Supabase is ready! Starting main services..." -cd .. -docker compose up \ No newline at end of file From 9a8297c7474771b41a110dce03e6b83098f66f45 Mon Sep 17 00:00:00 2001 From: dal Date: Thu, 8 May 2025 01:24:46 -0600 Subject: [PATCH 40/43] cli with config commands --- .github/workflows/cli-release.yml | 4 +- cli/cli/src/commands/config.rs | 163 +++++++++++++++++++ cli/cli/src/commands/config_utils.rs | 230 +++++++++++++++++++++++++++ cli/cli/src/commands/mod.rs | 2 + cli/cli/src/commands/run.rs | 54 ++++--- cli/cli/src/main.rs | 3 + 6 files changed, 434 insertions(+), 22 deletions(-) create mode 100644 cli/cli/src/commands/config.rs create mode 100644 cli/cli/src/commands/config_utils.rs diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 3c4252ea1..5ae9ad253 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -100,9 +100,9 @@ jobs: Write-Host "PostgreSQL lib path: $PG_LIB_PATH" Write-Host "PostgreSQL bin path added to PATH: $PG_BIN_PATH" - Write-Host "Contents of $PG_LIB_PATH:" + Write-Host "Contents of ${PG_LIB_PATH}:" Get-ChildItem $PG_LIB_PATH - Write-Host "Contents of $PG_BIN_PATH:" + Write-Host "Contents of ${PG_BIN_PATH}:" Get-ChildItem $PG_BIN_PATH Write-Host "Updated RUSTFLAGS: $env:RUSTFLAGS" Write-Host "Updated PATH: $env:PATH" diff --git a/cli/cli/src/commands/config.rs b/cli/cli/src/commands/config.rs new file mode 100644 index 000000000..8a75777d4 --- /dev/null +++ b/cli/cli/src/commands/config.rs @@ -0,0 +1,163 @@ +use crate::error::BusterError; +use dirs; +use std::fs; +use std::path::PathBuf; +use crate::commands::config_utils; + +async fn get_app_base_dir() -> Result { + let home_dir = dirs::home_dir().ok_or_else(|| { + BusterError::CommandError( + "Failed to get home directory. Cannot determine app directory for config reset.".to_string(), + ) + })?; + Ok(home_dir.join(".buster")) +} + +pub async fn reset_llm_settings() -> Result<(), BusterError> { + let app_base_dir = get_app_base_dir().await?; + + println!("Resetting LLM and Reranker configurations..."); + + let files_to_delete = [ + ".openai_api_key", + ".reranker_provider", + ".reranker_api_key", + ".reranker_model", + ".reranker_base_url", + ]; + + let mut all_successful = true; + let mut any_deleted = false; + + for file_name in files_to_delete.iter() { + let file_path = app_base_dir.join(file_name); + if file_path.exists() { + match fs::remove_file(&file_path) { + Ok(_) => { + println!("Successfully deleted {}", file_path.display()); + any_deleted = true; + } + Err(e) => { + eprintln!("Failed to delete {}: {}. Please remove it manually.", file_path.display(), e); + all_successful = false; + } + } + } + } + + if !any_deleted && all_successful { + println!("No cached LLM or Reranker configurations found to reset."); + } else if all_successful { + println!("LLM and Reranker configurations have been reset successfully."); + println!("You will be prompted to enter them again on the next relevant command (e.g., buster start)."); + } else { + println!("Some configurations could not be automatically reset. Please check messages above."); + } + + Ok(()) +} + +// Function to get current LLM API key (from cache or .env as a fallback display) +fn get_current_llm_api_key_display(app_base_dir: &PathBuf) -> Result { + match config_utils::get_cached_value(app_base_dir, ".openai_api_key")? { + Some(key) => Ok(if key.len() > 4 { format!("...{}", &key[key.len()-4..]) } else { "****".to_string() }), + None => Ok("Not set".to_string()), // Or try to read from .env if complex display needed + } +} + +// Function to get current Reranker config display (from cache or .env) +fn get_current_reranker_config_display(app_base_dir: &PathBuf) -> Result { + let provider = config_utils::get_cached_value(app_base_dir, ".reranker_provider")?; + let model = config_utils::get_cached_value(app_base_dir, ".reranker_model")?; + if let (Some(p), Some(m)) = (provider, model) { + Ok(format!("Provider: {}, Model: {}", p, m)) + } else { + Ok("Not fully set".to_string()) + } +} + +pub async fn manage_settings_interactive() -> Result<(), BusterError> { + let app_base_dir = config_utils::get_app_base_dir().map_err(|e| { + BusterError::CommandError(format!("Failed to get app base directory: {}", e)) + })?; + let target_dotenv_path = app_base_dir.join(".env"); + + println!("--- Buster Interactive Configuration ---"); + + // Manage OpenAI API Key + let current_llm_key_display = get_current_llm_api_key_display(&app_base_dir)?; + let update_llm = config_utils::prompt_for_input( + &format!("Current OpenAI API Key: {}. Update? (y/n)", current_llm_key_display), + Some("n"), + false + )?.to_lowercase(); + + let mut llm_api_key_to_set: Option = None; + if update_llm == "y" { + // Call with force_prompt = true, but the function itself will ask for confirmation if a key exists + // For a cleaner flow here, we handle the top-level decision to update. + let new_key = config_utils::prompt_for_input("Enter new OpenAI API Key:", None, true)?; + config_utils::cache_value(&app_base_dir, ".openai_api_key", &new_key)?; + llm_api_key_to_set = Some(new_key); + println!("OpenAI API Key updated and cached."); + } else { + // If not updating, we still need the current key for .env update + llm_api_key_to_set = config_utils::get_cached_value(&app_base_dir, ".openai_api_key")?; + } + + // Manage Reranker Settings + let current_reranker_display = get_current_reranker_config_display(&app_base_dir)?; + let update_reranker = config_utils::prompt_for_input( + &format!("Current Reranker settings: {}. Update? (y/n)", current_reranker_display), + Some("n"), + false + )?.to_lowercase(); + + let mut reranker_config_to_set: Option = None; + if update_reranker == "y" { + // This function internally handles its own detailed prompting flow + let new_reranker_config = config_utils::prompt_and_manage_reranker_settings(&app_base_dir, true)?; + reranker_config_to_set = Some(new_reranker_config); + println!("Reranker settings updated and cached."); + } else { + // If not updating, get current cached values for .env update + let p = config_utils::get_cached_value(&app_base_dir, ".reranker_provider")?; + let k = config_utils::get_cached_value(&app_base_dir, ".reranker_api_key")?; + let m = config_utils::get_cached_value(&app_base_dir, ".reranker_model")?; + let u = config_utils::get_cached_value(&app_base_dir, ".reranker_base_url")?; + if let (Some(provider), Some(api_key), Some(model), Some(base_url)) = (p,k,m,u) { + reranker_config_to_set = Some(config_utils::RerankerConfig { provider, api_key, model, base_url }); + } + } + + // Update .env file with the (potentially new) settings + // We need to ensure we have values for all fields update_env_file expects, + // even if only some were updated in this session. + let final_llm_api_key = llm_api_key_to_set.clone(); + + let final_rerank_api_key = reranker_config_to_set.as_ref().map(|c| c.api_key.clone()); + let final_rerank_model = reranker_config_to_set.as_ref().map(|c| c.model.clone()); + let final_rerank_base_url = reranker_config_to_set.as_ref().map(|c| c.base_url.clone()); + + // Default LLM_BASE_URL if not set (important if .env is created from scratch) + // The update_env_file function also has a fallback for this. + let llm_base_url_default = "https://api.openai.com/v1".to_string(); + let current_llm_base_url = if target_dotenv_path.exists() { + let env_content = std::fs::read_to_string(&target_dotenv_path).map_err(|e| { + BusterError::CommandError(format!("Failed to read .env file: {}", e)) + })?; + env_content.lines().find(|line| line.starts_with("LLM_BASE_URL=")).map_or(None, |line| line.split_once('=').map(|(_,v)| v.trim_matches('"').to_string())) + } else { None }; + + config_utils::update_env_file( + &target_dotenv_path, + final_llm_api_key.as_deref(), + final_rerank_api_key.as_deref(), + final_rerank_model.as_deref(), + final_rerank_base_url.as_deref(), + current_llm_base_url.as_deref().or(Some(&llm_base_url_default)) // Ensure LLM_BASE_URL is present + )?; + + println!("Configuration saved to {}.", target_dotenv_path.display()); + Ok(()) +} \ No newline at end of file diff --git a/cli/cli/src/commands/config_utils.rs b/cli/cli/src/commands/config_utils.rs new file mode 100644 index 000000000..0c37ebe74 --- /dev/null +++ b/cli/cli/src/commands/config_utils.rs @@ -0,0 +1,230 @@ +use crate::error::BusterError; +use dirs; +use std::fs; +use std::io::{self, Write}; +use std::path::{Path, PathBuf}; + +// Moved from run.rs +pub fn prompt_for_input(prompt_message: &str, default_value: Option<&str>, is_sensitive: bool) -> Result { + if let Some(def_val) = default_value { + print!("{} (default: {}): ", prompt_message, def_val); + } else { + print!("{}: ", prompt_message); + } + io::stdout().flush().map_err(|e| BusterError::CommandError(format!("Failed to flush stdout: {}", e)))?; + + let mut input = String::new(); + // Simple masking for sensitive input is complex in raw terminal io without extra crates. + // For a real CLI, rpassword or similar would be used. + // Here, we just read the line. + io::stdin().read_line(&mut input).map_err(|e| BusterError::CommandError(format!("Failed to read line: {}", e)))?; + let trimmed_input = input.trim().to_string(); + + if trimmed_input.is_empty() { + if let Some(def_val) = default_value { + Ok(def_val.to_string()) + } else { + println!("Input cannot be empty. Please try again."); + prompt_for_input(prompt_message, default_value, is_sensitive) // Recurse + } + } else { + Ok(trimmed_input) + } +} + +pub fn get_app_base_dir() -> Result { + dirs::home_dir() + .map(|home| home.join(".buster")) + .ok_or_else(|| BusterError::CommandError("Failed to get home directory.".to_string())) +} + +pub fn get_cached_value(app_base_dir: &Path, cache_file_name: &str) -> Result, BusterError> { + let cache_file_path = app_base_dir.join(cache_file_name); + if cache_file_path.exists() { + fs::read_to_string(cache_file_path) + .map(|val| Some(val.trim().to_string())) + .map_err(|e| BusterError::CommandError(format!("Failed to read cached file {}: {}", cache_file_name, e))) + } else { + Ok(None) + } +} + +pub fn cache_value(app_base_dir: &Path, cache_file_name: &str, value: &str) -> Result<(), BusterError> { + let cache_file_path = app_base_dir.join(cache_file_name); + fs::create_dir_all(app_base_dir).map_err(|e| BusterError::CommandError(format!("Failed to create app base dir {}: {}", app_base_dir.display(), e)))?; + fs::write(cache_file_path, value) + .map_err(|e| BusterError::CommandError(format!("Failed to cache value to {}: {}", cache_file_name, e))) +} + +pub fn update_env_file( + target_dotenv_path: &Path, + llm_api_key: Option<&str>, + rerank_api_key: Option<&str>, + rerank_model: Option<&str>, + rerank_base_url: Option<&str>, + llm_base_url: Option<&str> // Added for completeness, though not prompted by user yet +) -> Result<(), BusterError> { + let mut new_env_lines: Vec = Vec::new(); + let mut llm_key_updated = false; + let mut rerank_key_updated = false; + let mut rerank_model_updated = false; + let mut rerank_base_updated = false; + let mut llm_base_updated = false; + + if target_dotenv_path.exists() { + let env_content = fs::read_to_string(target_dotenv_path).map_err(|e| { + BusterError::CommandError(format!("Failed to read .env file at {}: {}", target_dotenv_path.display(), e)) + })?; + + for line in env_content.lines() { + if line.starts_with("LLM_API_KEY=") && llm_api_key.is_some() { + new_env_lines.push(format!("LLM_API_KEY=\"{}\"", llm_api_key.unwrap())); + llm_key_updated = true; + } else if line.starts_with("RERANK_API_KEY=") && rerank_api_key.is_some() { + new_env_lines.push(format!("RERANK_API_KEY=\"{}\"", rerank_api_key.unwrap())); + rerank_key_updated = true; + } else if line.starts_with("RERANK_MODEL=") && rerank_model.is_some() { + new_env_lines.push(format!("RERANK_MODEL=\"{}\"", rerank_model.unwrap())); + rerank_model_updated = true; + } else if line.starts_with("RERANK_BASE_URL=") && rerank_base_url.is_some() { + new_env_lines.push(format!("RERANK_BASE_URL=\"{}\"", rerank_base_url.unwrap())); + rerank_base_updated = true; + } else if line.starts_with("LLM_BASE_URL=") && llm_base_url.is_some() { + new_env_lines.push(format!("LLM_BASE_URL=\"{}\"", llm_base_url.unwrap())); + llm_base_updated = true; + } else { + new_env_lines.push(line.to_string()); + } + } + } + + // Add any keys that were not found and updated, if new values are provided + if !llm_key_updated && llm_api_key.is_some() { + new_env_lines.push(format!("LLM_API_KEY=\"{}\"", llm_api_key.unwrap())); + } + if !rerank_key_updated && rerank_api_key.is_some() { + new_env_lines.push(format!("RERANK_API_KEY=\"{}\"", rerank_api_key.unwrap())); + } + if !rerank_model_updated && rerank_model.is_some() { + new_env_lines.push(format!("RERANK_MODEL=\"{}\"", rerank_model.unwrap())); + } + if !rerank_base_updated && rerank_base_url.is_some() { + new_env_lines.push(format!("RERANK_BASE_URL=\"{}\"", rerank_base_url.unwrap())); + } + if !llm_base_updated && llm_base_url.is_some() { + new_env_lines.push(format!("LLM_BASE_URL=\"{}\"", llm_base_url.unwrap())); + } else if !llm_base_updated && llm_base_url.is_none() && !target_dotenv_path.exists() { + // Ensure default LLM_BASE_URL if .env is being created from scratch and no override provided + new_env_lines.push("LLM_BASE_URL=\"https://api.openai.com/v1\"".to_string()); + } + + fs::write(target_dotenv_path, new_env_lines.join("\n")).map_err(|e| { + BusterError::CommandError(format!("Failed to write updated .env file to {}: {}", target_dotenv_path.display(), e)) + }) +} + +pub fn prompt_and_manage_openai_api_key(app_base_dir: &Path, force_prompt: bool) -> Result { + let cache_file = ".openai_api_key"; + let mut current_key = get_cached_value(app_base_dir, cache_file)?; + + if force_prompt || current_key.is_none() { + if current_key.is_some() { + let key_display = current_key.as_ref().map_or("", |k| if k.len() > 4 { &k[k.len()-4..] } else { "****" }); + let update_choice = prompt_for_input(&format!("Current OpenAI API key ends with ...{}. Update? (y/n)", key_display), Some("n"), false)?.to_lowercase(); + if update_choice != "y" { + return Ok(current_key.unwrap()); + } + } + let new_key = prompt_for_input("Enter your OpenAI API Key:", None, true)?; + cache_value(app_base_dir, cache_file, &new_key)?; + current_key = Some(new_key); + } + current_key.ok_or_else(|| BusterError::CommandError("OpenAI API Key setup failed.".to_string())) +} + +pub struct RerankerConfig { + pub provider: String, + pub api_key: String, + pub model: String, + pub base_url: String, +} + +pub fn prompt_and_manage_reranker_settings(app_base_dir: &Path, force_prompt: bool) -> Result { + let provider_cache = ".reranker_provider"; + let key_cache = ".reranker_api_key"; + let model_cache = ".reranker_model"; + let url_cache = ".reranker_base_url"; + + let mut current_provider = get_cached_value(app_base_dir, provider_cache)?; + let mut current_key = get_cached_value(app_base_dir, key_cache)?; + let mut current_model = get_cached_value(app_base_dir, model_cache)?; + let mut current_url = get_cached_value(app_base_dir, url_cache)?; + + let mut needs_update = force_prompt; + if !needs_update && (current_provider.is_none() || current_key.is_none() || current_model.is_none() || current_url.is_none()) { + needs_update = true; // If any part is missing, force update flow for initial setup + } + + if needs_update { + if !force_prompt && current_provider.is_some() && current_model.is_some() { // Already prompted if force_prompt is true + let update_choice = prompt_for_input(&format!("Current Reranker: {} (Model: {}). Update settings? (y/n)", current_provider.as_ref().unwrap_or(&"N/A".to_string()), current_model.as_ref().unwrap_or(&"N/A".to_string())), Some("n"), false)?.to_lowercase(); + if update_choice != "y" && current_provider.is_some() && current_key.is_some() && current_model.is_some() && current_url.is_some(){ + return Ok(RerankerConfig { + provider: current_provider.unwrap(), + api_key: current_key.unwrap(), + model: current_model.unwrap(), + base_url: current_url.unwrap(), + }); + } + } else if force_prompt && current_provider.is_some() && current_model.is_some() { + let update_choice = prompt_for_input(&format!("Current Reranker: {} (Model: {}). Update settings? (y/n)", current_provider.as_ref().unwrap_or(&"N/A".to_string()), current_model.as_ref().unwrap_or(&"N/A".to_string())), Some("n"), false)?.to_lowercase(); + if update_choice != "y" && current_provider.is_some() && current_key.is_some() && current_model.is_some() && current_url.is_some(){ + return Ok(RerankerConfig { + provider: current_provider.unwrap(), + api_key: current_key.unwrap(), + model: current_model.unwrap(), + base_url: current_url.unwrap(), + }); + } + } + + println!("--- Reranker Setup ---"); + println!("Choose your reranker provider:"); + println!("1: Cohere"); + println!("2: Mixedbread"); + println!("3: Jina"); + let provider_choice = loop { + match prompt_for_input("Enter choice (1-3):", Some("1"), false)?.parse::() { + Ok(choice @ 1..=3) => break choice, + _ => println!("Invalid choice. Please enter a number between 1 and 3."), + } + }; + + let (new_provider, default_model, default_url) = match provider_choice { + 1 => ("Cohere", "rerank-english-v3.0", "https://api.cohere.com/v1/rerank"), // user asked for v3.5 but official docs say v3.0 for rerank model + 2 => ("Mixedbread", "mixedbread-ai/mxbai-rerank-xsmall-v1", "https://api.mixedbread.ai/v1/reranking"), + 3 => ("Jina", "jina-reranker-v1-base-en", "https://api.jina.ai/v1/rerank"), + _ => unreachable!(), + }; + + let new_key_val = prompt_for_input(&format!("Enter your {} API Key:", new_provider), None, true)?; + let new_model_val = prompt_for_input(&format!("Enter {} model name:", new_provider), Some(default_model), false)?; + let new_url_val = prompt_for_input(&format!("Enter {} rerank base URL:", new_provider), Some(default_url), false)?; + + cache_value(app_base_dir, provider_cache, new_provider)?; + cache_value(app_base_dir, key_cache, &new_key_val)?; + cache_value(app_base_dir, model_cache, &new_model_val)?; + cache_value(app_base_dir, url_cache, &new_url_val)?; + + current_provider = Some(new_provider.to_string()); + current_key = Some(new_key_val); + current_model = Some(new_model_val); + current_url = Some(new_url_val); + } + + if let (Some(prov), Some(key), Some(model), Some(url)) = (current_provider, current_key, current_model, current_url) { + Ok(RerankerConfig { provider: prov, api_key: key, model, base_url: url }) + } else { + Err(BusterError::CommandError("Reranker configuration setup failed. Some values are missing.".to_string())) + } +} \ No newline at end of file diff --git a/cli/cli/src/commands/mod.rs b/cli/cli/src/commands/mod.rs index 99c423026..3772f9564 100644 --- a/cli/cli/src/commands/mod.rs +++ b/cli/cli/src/commands/mod.rs @@ -1,4 +1,6 @@ pub mod auth; +pub mod config; +pub mod config_utils; pub mod deploy; pub mod generate; pub mod init; diff --git a/cli/cli/src/commands/run.rs b/cli/cli/src/commands/run.rs index e919832df..dfdfdbebe 100644 --- a/cli/cli/src/commands/run.rs +++ b/cli/cli/src/commands/run.rs @@ -1,3 +1,4 @@ +use crate::commands::config_utils; use crate::error::BusterError; use dirs; use indicatif::{ProgressBar, ProgressStyle}; @@ -22,12 +23,9 @@ use std::time::Duration; struct StaticAssets; async fn setup_persistent_app_environment() -> Result { - let home_dir = dirs::home_dir().ok_or_else(|| { - BusterError::CommandError( - "Failed to get home directory. Cannot set up persistent app path.".to_string(), - ) + let app_base_dir = config_utils::get_app_base_dir().map_err(|e| { + BusterError::CommandError(format!("Failed to get app base directory: {}", e)) })?; - let app_base_dir = home_dir.join(".buster"); fs::create_dir_all(&app_base_dir).map_err(|e| { BusterError::CommandError(format!( @@ -74,24 +72,32 @@ async fn setup_persistent_app_environment() -> Result { let target_dotenv_path = app_base_dir.join(".env"); - // Always use .env.example from embedded assets - let example_env_filename = "supabase/.env.example"; - let asset = StaticAssets::get(example_env_filename).ok_or_else(|| { - BusterError::CommandError(format!( - "Failed to get embedded asset: {}", - example_env_filename - )) - })?; + // --- BEGIN API Key and Reranker Setup using config_utils --- + println!("--- Buster Configuration Setup ---"); - fs::write(&target_dotenv_path, asset.data).map_err(|e| { + let llm_api_key = config_utils::prompt_and_manage_openai_api_key(&app_base_dir, false)?; + let reranker_config = config_utils::prompt_and_manage_reranker_settings(&app_base_dir, false)?; + + // Update .env file + config_utils::update_env_file( + &target_dotenv_path, + Some(&llm_api_key), + Some(&reranker_config.api_key), + Some(&reranker_config.model), + Some(&reranker_config.base_url), + None, // Not prompting for LLM_BASE_URL in this flow yet, example has it. + ) + .map_err(|e| { BusterError::CommandError(format!( - "Failed to write {} to {}: {}", - example_env_filename, + "Failed to ensure .env file configurations in {}: {}", target_dotenv_path.display(), e )) })?; + println!("--- Configuration Setup Complete ---"); + // --- END API Key and Reranker Setup using config_utils --- + // Additionally copy the .env to the supabase subdirectory let supabase_dotenv_path = app_base_dir.join("supabase/.env"); fs::copy(&target_dotenv_path, &supabase_dotenv_path).map_err(|e| { @@ -227,7 +233,7 @@ pub async fn reset() -> Result<(), BusterError> { .template("{spinner:.blue} {msg}") .expect("Failed to set progress bar style"), ); - + // Step 1: Stop services pb.set_message("Resetting Buster services (step 1/4): Stopping services..."); @@ -283,7 +289,10 @@ Stderr: e )) })?; - pb.println(format!("Successfully cleared and recreated database volume: {}", db_volume_path.display())); + pb.println(format!( + "Successfully cleared and recreated database volume: {}", + db_volume_path.display() + )); if storage_volume_path.exists() { fs::remove_dir_all(&storage_volume_path).map_err(|e| { @@ -301,7 +310,10 @@ Stderr: e )) })?; - pb.println(format!("Successfully cleared and recreated storage volume: {}", storage_volume_path.display())); + pb.println(format!( + "Successfully cleared and recreated storage volume: {}", + storage_volume_path.display() + )); // Step 3: Identify service images pb.set_message("Resetting Buster services (step 3/4): Identifying service images..."); @@ -393,6 +405,8 @@ Stderr: } } - pb.finish_with_message("Buster services stopped, volumes cleared, and images removed successfully."); + pb.finish_with_message( + "Buster services stopped, volumes cleared, and images removed successfully.", + ); Ok(()) } diff --git a/cli/cli/src/main.rs b/cli/cli/src/main.rs index 182f3a82f..45704fe2a 100644 --- a/cli/cli/src/main.rs +++ b/cli/cli/src/main.rs @@ -80,6 +80,8 @@ pub enum Commands { #[arg(long)] path: Option, }, + /// Interactively manage LLM and Reranker configurations + Config, /// Start the Buster services Start, /// Stop the Buster services @@ -142,6 +144,7 @@ async fn main() { target_semantic_file, } => commands::generate::generate_semantic_models_command(path, target_semantic_file).await, Commands::Parse { path } => commands::parse::parse_models_command(path).await, + Commands::Config => commands::config::manage_settings_interactive().await.map_err(anyhow::Error::from), Commands::Start => run::start().await.map_err(anyhow::Error::from), Commands::Stop => run::stop().await.map_err(anyhow::Error::from), Commands::Reset => run::reset().await.map_err(anyhow::Error::from), From 415b964930b65fd7305462e8d262ee4854dc3f78 Mon Sep 17 00:00:00 2001 From: dal Date: Thu, 8 May 2025 01:34:55 -0600 Subject: [PATCH 41/43] ok remove windows and test --- .github/workflows/cli-release.yml | 64 +++++++------------------------ 1 file changed, 14 insertions(+), 50 deletions(-) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 5ae9ad253..529c9d7ae 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -31,10 +31,10 @@ jobs: target: aarch64-apple-darwin artifact_name: buster-cli-darwin-arm64.tar.gz use_tar: true - - os: windows-latest - target: x86_64-pc-windows-msvc - artifact_name: buster-cli-windows-x86_64.zip - use_tar: false + # - os: windows-latest + # target: x86_64-pc-windows-msvc + # artifact_name: buster-cli-windows-x86_64.zip + # use_tar: false runs-on: ${{ matrix.os }} steps: - name: Checkout code @@ -71,42 +71,6 @@ jobs: env: HOMEBREW_NO_INSTALL_CLEANUP: 1 # Recommended for CI to speed up - - name: Install libpq (Windows) - if: runner.os == 'Windows' - shell: pwsh - run: | - $ErrorActionPreference = "Stop" - $ProgressPreference = "SilentlyContinue" # Optional: for cleaner logs - - $PG_VERSION = "16.2.0" # Specify the desired PostgreSQL version - $PG_TARGET = "x86_64-pc-windows-msvc" - $PG_ARCHIVE_NAME = "postgresql-${PG_VERSION}-${PG_TARGET}.tar.gz" - $PG_DOWNLOAD_URL = "https://github.com/theseus-rs/postgresql-binaries/releases/download/v${PG_VERSION}/${PG_ARCHIVE_NAME}" - $PG_INSTALL_DIR = "C:/pgsql" - - Write-Host "Downloading PostgreSQL binaries from $PG_DOWNLOAD_URL" - Invoke-WebRequest -Uri $PG_DOWNLOAD_URL -OutFile $PG_ARCHIVE_NAME - - Write-Host "Extracting PostgreSQL binaries to $PG_INSTALL_DIR" - New-Item -ItemType Directory -Force -Path $PG_INSTALL_DIR - tar -xzf $PG_ARCHIVE_NAME -C $PG_INSTALL_DIR --strip-components=1 # Assumes archive has a top-level dir - - Write-Host "Setting up environment variables for libpq" - $PG_LIB_PATH = Join-Path $PG_INSTALL_DIR "lib" - $PG_BIN_PATH = Join-Path $PG_INSTALL_DIR "bin" - - echo "RUSTFLAGS=-L ${PG_LIB_PATH}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append - echo "PATH=${PG_BIN_PATH};${env:PATH}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append - - Write-Host "PostgreSQL lib path: $PG_LIB_PATH" - Write-Host "PostgreSQL bin path added to PATH: $PG_BIN_PATH" - Write-Host "Contents of ${PG_LIB_PATH}:" - Get-ChildItem $PG_LIB_PATH - Write-Host "Contents of ${PG_BIN_PATH}:" - Get-ChildItem $PG_BIN_PATH - Write-Host "Updated RUSTFLAGS: $env:RUSTFLAGS" - Write-Host "Updated PATH: $env:PATH" - - name: Configure Cargo for optimized build run: | mkdir -p .cargo @@ -131,14 +95,14 @@ jobs: else sha256sum ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 fi - - name: Compress binary (Windows) - if: matrix.use_tar == false - working-directory: ./cli - shell: pwsh - run: | - cd target/${{ matrix.target }}/release - Compress-Archive -Path buster-cli.exe -DestinationPath ${{ matrix.artifact_name }} - Get-FileHash -Algorithm SHA256 ${{ matrix.artifact_name }} | Select-Object -ExpandProperty Hash > ${{ matrix.artifact_name }}.sha256 + # - name: Compress binary (Windows) + # if: matrix.use_tar == false + # working-directory: ./cli + # shell: pwsh + # run: | + # cd target/${{ matrix.target }}/release + # Compress-Archive -Path buster-cli.exe -DestinationPath ${{ matrix.artifact_name }} + # Get-FileHash -Algorithm SHA256 ${{ matrix.artifact_name }} | Select-Object -ExpandProperty Hash > ${{ matrix.artifact_name }}.sha256 - name: Upload artifacts uses: actions/upload-artifact@v4 with: @@ -178,8 +142,8 @@ jobs: **/buster-cli-darwin-x86_64.tar.gz.sha256 **/buster-cli-darwin-arm64.tar.gz **/buster-cli-darwin-arm64.tar.gz.sha256 - **/buster-cli-windows-x86_64.zip - **/buster-cli-windows-x86_64.zip.sha256 + # **/buster-cli-windows-x86_64.zip + # **/buster-cli-windows-x86_64.zip.sha256 draft: false prerelease: false generate_release_notes: true From 358fa304b22baa68e7e3ae0f93038502288da328 Mon Sep 17 00:00:00 2001 From: dal Date: Thu, 8 May 2025 01:45:21 -0600 Subject: [PATCH 42/43] deploy fix with model name --- cli/cli/src/commands/deploy/deploy.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cli/cli/src/commands/deploy/deploy.rs b/cli/cli/src/commands/deploy/deploy.rs index b18721341..77c52d215 100644 --- a/cli/cli/src/commands/deploy/deploy.rs +++ b/cli/cli/src/commands/deploy/deploy.rs @@ -411,7 +411,7 @@ fn to_deploy_request(model: &Model, sql_content: String) -> DeployDatasetsReques env: "dev".to_string(), // Assuming "dev" environment for now, might need configuration type_: "view".to_string(), // Assuming models are deployed as views, might need configuration name: model.name.clone(), - model: None, // This seems to be for a different kind of model (e.g. Python model), not semantic layer model name itself + model: Some(model.name.clone()), // Use the model's name for the 'model' field schema, database: model.database.clone(), description: model.description.clone().unwrap_or_default(), @@ -639,7 +639,7 @@ pub async fn deploy(path: Option<&str>, dry_run: bool, recursive: bool) -> Resul match client.deploy_datasets(deploy_requests_final).await { Ok(response) => handle_deploy_response(&response, &mut result, &model_mappings_final, &progress), Err(e) => { - eprintln!("❌ Critical error during deployment API call: {}", e.to_string().red()); + eprintln!("❌ Critical error during deployment API call: {}\nDetailed error: {:?}", e.to_string().red(), e); // Populate failures for all models that were attempted if a general API error occurs for mapping in model_mappings_final { result.failures.push(( From f08ef35270b301fa64bf1af6377478255db4cee2 Mon Sep 17 00:00:00 2001 From: dal Date: Thu, 8 May 2025 01:55:58 -0600 Subject: [PATCH 43/43] cli release for homebrew deploy, backwards compatibiltiy on model types --- .github/workflows/cli-release.yml | 4 + api/libs/agents/Cargo.toml | 1 + .../agents/src/agents/buster_multi_agent.rs | 35 +-- .../file_tools/search_data_catalog.rs | 211 +++++++++++------- web/package-lock.json | 4 +- 5 files changed, 155 insertions(+), 100 deletions(-) diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 529c9d7ae..3f02a5b6e 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -115,6 +115,9 @@ jobs: release: needs: build runs-on: ubuntu-latest + outputs: + cli_version: ${{ steps.get_version.outputs.version }} + cli_tag_name: ${{ steps.create_the_release.outputs.tag_name }} steps: - name: Checkout code uses: actions/checkout@v4 @@ -131,6 +134,7 @@ jobs: echo "version=$VERSION" >> $GITHUB_OUTPUT echo "Extracted version: $VERSION" - name: Create Release + id: create_the_release uses: softprops/action-gh-release@v1 with: tag_name: v${{ steps.get_version.outputs.version }} diff --git a/api/libs/agents/Cargo.toml b/api/libs/agents/Cargo.toml index 5b029498d..25febaf8c 100644 --- a/api/libs/agents/Cargo.toml +++ b/api/libs/agents/Cargo.toml @@ -37,6 +37,7 @@ thiserror = { workspace = true } raindrop = { path = "../raindrop" } sql_analyzer = { path = "../sql_analyzer" } rerank = { path = "../rerank" } +semantic_layer = { path = "../semantic_layer" } # Development dependencies [dev-dependencies] diff --git a/api/libs/agents/src/agents/buster_multi_agent.rs b/api/libs/agents/src/agents/buster_multi_agent.rs index c15a13a1a..16d380d16 100644 --- a/api/libs/agents/src/agents/buster_multi_agent.rs +++ b/api/libs/agents/src/agents/buster_multi_agent.rs @@ -24,6 +24,9 @@ use crate::{agent::ModeProvider, Agent, AgentError, AgentExt, AgentThread}; // A use litellm::AgentMessage; +// Import the semantic layer models +use semantic_layer::models::SemanticLayerSpec; // Assuming models.rs is accessible like this + // Import AgentState and determine_agent_state (assuming they are pub in modes/mod.rs or similar) // If not, they might need to be moved or re-exported. // For now, let's assume they are accessible via crate::agents::modes::{AgentState, determine_agent_state} @@ -35,6 +38,7 @@ pub struct BusterSuperAgentOutput { pub duration: i64, pub thread_id: Uuid, pub messages: Vec, + pub message_id: Option, } #[derive(Debug, Deserialize, Serialize)] @@ -115,16 +119,16 @@ impl DatasetWithDescriptions { } // Define structs for YAML parsing -#[derive(Debug, Deserialize)] -struct YamlRoot { - models: Vec, -} +// #[derive(Debug, Deserialize)] +// struct YamlRoot { +// models: Vec, +// } -#[derive(Debug, Deserialize)] -struct ModelInfo { - name: String, - description: String, -} +// #[derive(Debug, Deserialize)] +// struct ModelInfo { +// name: String, +// description: String, +// } impl BusterMultiAgent { pub async fn new(user_id: Uuid, session_id: Uuid, is_follow_up: bool) -> Result { @@ -136,14 +140,19 @@ impl BusterMultiAgent { let dataset_descriptions: Vec = permissioned_datasets .into_iter() .filter_map(|ds| ds.yml_content) // Get Some(String), filter out None - .map(|content| serde_yaml::from_str::(&content)) // Parse String -> Result + .map(|content| serde_yaml::from_str::(&content)) // Parse String -> Result .filter_map(|result| { // Handle Result match result { - Ok(parsed_root) => { + Ok(parsed_spec) => { // Extract info from the first model if available - if let Some(model) = parsed_root.models.first() { - Some(format!("{}: {}", model.name, model.description)) + if let Some(model) = parsed_spec.models.first() { + // model.description is Option, handle it + let description = model + .description + .as_deref() + .unwrap_or("No description available"); + Some(format!("{}: {}", model.name, description)) } else { tracing::warn!("Parsed YAML has no models"); None diff --git a/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs b/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs index f002f2dfe..ea6b8b5a8 100644 --- a/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs +++ b/api/libs/agents/src/tools/categories/file_tools/search_data_catalog.rs @@ -29,6 +29,9 @@ use sqlx::PgPool; use stored_values; use rerank::Reranker; +// Import SemanticLayerSpec +use semantic_layer::models::SemanticLayerSpec; + use crate::{agent::Agent, tools::ToolExecutor}; // NEW: Structure to represent found values with their source information @@ -1173,115 +1176,153 @@ async fn generate_embeddings_batch(texts: Vec) -> Result Result> { - let yaml: serde_yaml::Value = serde_yaml::from_str(yml_content) - .context("Failed to parse dataset YAML content")?; - let mut searchable_dimensions = Vec::new(); - - // Check if models field exists - if let Some(models) = yaml["models"].as_sequence() { - for model in models { - let model_name = model["name"].as_str().unwrap_or("unknown_model").to_string(); - - // Check if dimensions field exists - if let Some(dimensions) = model["dimensions"].as_sequence() { - for dimension in dimensions { - // Check if dimension has searchable: true - if let Some(true) = dimension["searchable"].as_bool() { - let dimension_name = dimension["name"].as_str().unwrap_or("unknown_dimension").to_string(); - - // Store this dimension as searchable + + // Try parsing with SemanticLayerSpec first + match serde_yaml::from_str::(yml_content) { + Ok(spec) => { + debug!("Successfully parsed yml_content with SemanticLayerSpec for extract_searchable_dimensions"); + for model in spec.models { + for dimension in model.dimensions { + if dimension.searchable { searchable_dimensions.push(SearchableDimension { - model_name: model_name.clone(), // Clone here to avoid move - dimension_name: dimension_name.clone(), - dimension_path: vec!["models".to_string(), model_name.clone(), "dimensions".to_string(), dimension_name], + model_name: model.name.clone(), + dimension_name: dimension.name.clone(), + // The dimension_path might need adjustment if its usage relies on the old dynamic structure. + // For now, creating a simplified path. This might need review based on how dimension_path is consumed. + dimension_path: vec!["models".to_string(), model.name.clone(), "dimensions".to_string(), dimension.name], }); } } } } + Err(e_spec) => { + warn!( + "Failed to parse yml_content with SemanticLayerSpec (error: {}), falling back to generic serde_yaml::Value for extract_searchable_dimensions. Consider updating YAML to new spec.", + e_spec + ); + // Fallback to original dynamic parsing logic + let yaml: serde_yaml::Value = serde_yaml::from_str(yml_content) + .context("Failed to parse dataset YAML content (fallback)")?; + + if let Some(models) = yaml["models"].as_sequence() { + for model_val in models { + let model_name = model_val["name"].as_str().unwrap_or("unknown_model").to_string(); + if let Some(dimensions) = model_val["dimensions"].as_sequence() { + for dimension_val in dimensions { + if let Some(true) = dimension_val["searchable"].as_bool() { + let dimension_name = dimension_val["name"].as_str().unwrap_or("unknown_dimension").to_string(); + searchable_dimensions.push(SearchableDimension { + model_name: model_name.clone(), + dimension_name: dimension_name.clone(), + dimension_path: vec!["models".to_string(), model_name.clone(), "dimensions".to_string(), dimension_name], + }); + } + } + } + } + } + } } - Ok(searchable_dimensions) } /// Extract database structure from YAML content based on actual model structure fn extract_database_info_from_yaml(yml_content: &str) -> Result>>>> { - let yaml: serde_yaml::Value = serde_yaml::from_str(yml_content) - .context("Failed to parse dataset YAML content")?; - - // Structure: database -> schema -> table -> columns - let mut database_info = HashMap::new(); - - // Process models - if let Some(models) = yaml["models"].as_sequence() { - for model in models { - // Extract database, schema, and model name (which acts as table name) - let database_name = model["database"].as_str().unwrap_or("unknown").to_string(); - let schema_name = model["schema"].as_str().unwrap_or("public").to_string(); - let table_name = model["name"].as_str().unwrap_or("unknown_model").to_string(); - - // Initialize the nested structure if needed - database_info - .entry(database_name.clone()) - .or_insert_with(HashMap::new) - .entry(schema_name.clone()) - .or_insert_with(HashMap::new); - - // Collect column names from dimensions, measures, and metrics - let mut columns = Vec::new(); - - // Add dimensions - if let Some(dimensions) = model["dimensions"].as_sequence() { - for dim in dimensions { - if let Some(dim_name) = dim["name"].as_str() { - columns.push(dim_name.to_string()); - - // Also add the expression as a potential column to search - if let Some(expr) = dim["expr"].as_str() { - if expr != dim_name { - columns.push(expr.to_string()); + let mut database_info: HashMap>>> = HashMap::new(); + + match serde_yaml::from_str::(yml_content) { + Ok(spec) => { + debug!("Successfully parsed yml_content with SemanticLayerSpec for extract_database_info_from_yaml"); + for model in spec.models { + let db_name = model.database.as_deref().unwrap_or("unknown_db").to_string(); + let sch_name = model.schema.as_deref().unwrap_or("unknown_schema").to_string(); + let tbl_name = model.name.clone(); // model.name is table name + + let mut columns = Vec::new(); + for dim in model.dimensions { + columns.push(dim.name); + // Assuming 'expr' is not directly a column name in SemanticLayerSpec's Dimension for this purpose. + // If dimensions can have expressions that resolve to column names, adjust here. + } + for measure in model.measures { + columns.push(measure.name); + // Assuming 'expr' is not directly a column name here either. + } + for metric in model.metrics { + columns.push(metric.name); // Metrics usually have names, expressions might be too complex for simple column list + } + + database_info + .entry(db_name) + .or_default() + .entry(sch_name) + .or_default() + .insert(tbl_name, columns); + } + } + Err(e_spec) => { + warn!( + "Failed to parse yml_content with SemanticLayerSpec (error: {}), falling back to generic serde_yaml::Value for extract_database_info_from_yaml. Consider updating YAML to new spec.", + e_spec + ); + let yaml: serde_yaml::Value = serde_yaml::from_str(yml_content) + .context("Failed to parse dataset YAML content (fallback)")?; + + if let Some(models) = yaml["models"].as_sequence() { + for model_val in models { + let database_name = model_val["database"].as_str().unwrap_or("unknown").to_string(); + let schema_name = model_val["schema"].as_str().unwrap_or("public").to_string(); + let table_name = model_val["name"].as_str().unwrap_or("unknown_model").to_string(); + + database_info + .entry(database_name.clone()) + .or_insert_with(HashMap::new) + .entry(schema_name.clone()) + .or_insert_with(HashMap::new); + + let mut columns = Vec::new(); + if let Some(dimensions) = model_val["dimensions"].as_sequence() { + for dim in dimensions { + if let Some(dim_name) = dim["name"].as_str() { + columns.push(dim_name.to_string()); + if let Some(expr) = dim["expr"].as_str() { + if expr != dim_name { + columns.push(expr.to_string()); + } + } } } } - } - } - - // Add measures - if let Some(measures) = model["measures"].as_sequence() { - for measure in measures { - if let Some(measure_name) = measure["name"].as_str() { - columns.push(measure_name.to_string()); - - // Also add the expression as a potential column to search - if let Some(expr) = measure["expr"].as_str() { - if expr != measure_name { - columns.push(expr.to_string()); + if let Some(measures) = model_val["measures"].as_sequence() { + for measure in measures { + if let Some(measure_name) = measure["name"].as_str() { + columns.push(measure_name.to_string()); + if let Some(expr) = measure["expr"].as_str() { + if expr != measure_name { + columns.push(expr.to_string()); + } + } } } } - } - } - - // Add metrics - if let Some(metrics) = model["metrics"].as_sequence() { - for metric in metrics { - if let Some(metric_name) = metric["name"].as_str() { - columns.push(metric_name.to_string()); + if let Some(metrics) = model_val["metrics"].as_sequence() { + for metric in metrics { + if let Some(metric_name) = metric["name"].as_str() { + columns.push(metric_name.to_string()); + } + } } + database_info + .get_mut(&database_name) + .unwrap() + .get_mut(&schema_name) + .unwrap() + .insert(table_name, columns); } } - - // Store columns for this model - database_info - .get_mut(&database_name) - .unwrap() - .get_mut(&schema_name) - .unwrap() - .insert(table_name, columns); } } - Ok(database_info) } diff --git a/web/package-lock.json b/web/package-lock.json index 95d9e4ddb..a333eb8ab 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,12 +1,12 @@ { "name": "web", - "version": "0.1.0", + "version": "0.1.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "web", - "version": "0.1.0", + "version": "0.1.1", "dependencies": { "@dnd-kit/core": "^6.3.1", "@dnd-kit/modifiers": "^9.0.0",