diff --git a/.env.example b/.env.example index ed8f8fd57..bf3ece2a7 100644 --- a/.env.example +++ b/.env.example @@ -1,13 +1,13 @@ # General Application Settings ENVIRONMENT="development" -BUSTER_URL="http://web:3000" +BUSTER_URL="http://localhost:3000" BUSTER_WH_TOKEN="buster-wh-token" # --- API Service Specific --- # Direct Database Connection (for API service and potentially others) -DATABASE_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" +DATABASE_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" # Pooled Database Connection (for API service, uses Supavisor) -POOLER_URL="postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres" +POOLER_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres" # Redis Connection REDIS_URL="redis://buster-redis:6379" # Supabase Connection for API service @@ -25,7 +25,8 @@ LLM_BASE_URL="http://litellm:4001" # --- Web Client (Next.js) Specific --- NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api) NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web) -NEXT_PUBLIC_SUPABASE_URL="http://localhost:8000" # External URL for Supabase (Kong proxy) +NEXT_PUBLIC_SUPABASE_URL="http://kong:8000" # External URL for Supabase (Kong proxy) +NEXT_PUBLIC_WS_URL="ws://localhost:3001" NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE" NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q" diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml index 9c9caa3b6..748cd6296 100644 --- a/.github/workflows/cli-release.yml +++ b/.github/workflows/cli-release.yml @@ -2,17 +2,20 @@ name: CLI Release on: push: - tags: - - 'cli/v*' # Trigger on tags like cli/v1.2.3 + branches: + - main + paths: + - 'cli/**' + - '.github/workflows/cli-release.yml' + workflow_dispatch: # Add permissions for creating releases permissions: contents: write - # pull-requests: write # Not typically needed for a tag-triggered release workflow + pull-requests: write jobs: build: - # No specific if condition needed here based on event, tag push is the trigger strategy: matrix: include: @@ -34,11 +37,10 @@ jobs: use_tar: false runs-on: ${{ matrix.os }} steps: - - name: Checkout code at the specific tag + - name: Checkout code uses: actions/checkout@v4 with: - ref: ${{ github.ref }} # Checks out the specific tag that triggered the workflow - fetch-depth: 0 # Useful for some build processes or if release notes need history + fetch-depth: 0 - name: Install Rust uses: actions-rs/toolchain@v1 @@ -51,6 +53,24 @@ jobs: - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2 + - name: Install libpq (macOS and Linux) + if: runner.os != 'Windows' + run: | + if [[ "${{ runner.os }}" == "macOS" ]]; then + brew install libpq + echo "PKG_CONFIG_PATH=$(brew --prefix libpq)/lib/pkgconfig" >> $GITHUB_ENV + echo "LIBRARY_PATH=$(brew --prefix libpq)/lib" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$(brew --prefix libpq)/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV + # For macOS, we might need to explicitly tell rustc where to find the library. + # Adding common libpq paths to rustflags + echo "RUSTFLAGS=-L $(brew --prefix libpq)/lib" >> $GITHUB_ENV + elif [[ "${{ runner.os }}" == "Linux" ]]; then + sudo apt-get update -y + sudo apt-get install -y libpq-dev + fi + env: + HOMEBREW_NO_INSTALL_CLEANUP: 1 # Recommended for CI to speed up + - name: Configure Cargo for optimized build run: | mkdir -p .cargo @@ -60,65 +80,29 @@ jobs: echo 'panic = "abort"' >> .cargo/config.toml echo 'opt-level = 3' >> .cargo/config.toml echo 'strip = true' >> .cargo/config.toml - - name: Build optimized release - working-directory: ./cli # Assuming this is the workspace root for the cli crate - # If your CLI project is in cli/cli, adjust working-directory to ./cli/cli - run: cargo build --release --target ${{ matrix.target }} --manifest-path ./cli/Cargo.toml - - - name: Determine Binary Name and Path - id: binary_info - shell: bash - run: | - # Ensure cli/target directory exists before find, in case of clean builds or different structures - mkdir -p cli/target/${{ matrix.target }}/release - CRATE_NAME_OUTPUT=$(basename $(find cli/target/${{ matrix.target }}/release -maxdepth 1 -type f -executable ! -name '*.dSYM' ! -name '*.pdb' 2>/dev/null || echo "buster")) # Default to buster if not found - # If find returns nothing (e.g. build failed or path is wrong), CRATE_NAME_OUTPUT could be empty or an error message. - # Fallback to a known name or fail if necessary. For now, using "buster" as a placeholder. - if [ -z "$CRATE_NAME_OUTPUT" ] || ! [ -f "cli/target/${{ matrix.target }}/release/$CRATE_NAME_OUTPUT" ]; then - echo "Warning: Could not automatically determine binary name. Assuming 'buster'." - # Attempt to find 'buster' or 'buster.exe' directly if primary find fails - if [[ "${{ matrix.os }}" == "windows-latest" ]]; then - CRATE_NAME_CANDIDATE="buster.exe" - else - CRATE_NAME_CANDIDATE="buster" - fi - if [ -f "cli/target/${{ matrix.target }}/release/$CRATE_NAME_CANDIDATE" ]; then - CRATE_NAME_OUTPUT=$CRATE_NAME_CANDIDATE - else - # If even the fallback isn't found, this will cause issues later. - # Consider failing the step: echo "Error: Binary not found."; exit 1 - # For now, proceeding with a default name and letting later steps handle missing file - echo "Fallback binary '$CRATE_NAME_CANDIDATE' also not found. Proceeding with this name." - CRATE_NAME_OUTPUT=${CRATE_NAME_CANDIDATE%.exe} # Store without .exe for consistency if needed elsewhere - fi - fi - echo "CRATE_NAME=$CRATE_NAME_OUTPUT" - echo "Binary name: $CRATE_NAME_OUTPUT" - echo "binary_name=$CRATE_NAME_OUTPUT" >> $GITHUB_OUTPUT - echo "binary_path=cli/target/${{ matrix.target }}/release/$CRATE_NAME_OUTPUT" - echo "binary_path_val=cli/target/${{ matrix.target }}/release/$CRATE_NAME_OUTPUT" >> $GITHUB_OUTPUT + working-directory: ./cli + run: cargo build --release --target ${{ matrix.target }} - name: Compress binary (Unix) if: matrix.use_tar - shell: bash + working-directory: ./cli run: | - cd cli/target/${{ matrix.target }}/release - tar czf ${{ matrix.artifact_name }} ${{ steps.binary_info.outputs.binary_name }} + cd target/${{ matrix.target }}/release + tar czf ${{ matrix.artifact_name }} buster-cli if [[ "${{ runner.os }}" == "macOS" ]]; then shasum -a 256 ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 else sha256sum ${{ matrix.artifact_name }} > ${{ matrix.artifact_name }}.sha256 fi - - name: Compress binary (Windows) if: matrix.use_tar == false + working-directory: ./cli shell: pwsh run: | - cd cli/target/${{ matrix.target }}/release - Compress-Archive -Path ${{ steps.binary_info.outputs.binary_name }}.exe -DestinationPath ${{ matrix.artifact_name }} + cd target/${{ matrix.target }}/release + Compress-Archive -Path buster-cli.exe -DestinationPath ${{ matrix.artifact_name }} Get-FileHash -Algorithm SHA256 ${{ matrix.artifact_name }} | Select-Object -ExpandProperty Hash > ${{ matrix.artifact_name }}.sha256 - - name: Upload artifacts uses: actions/upload-artifact@v4 with: @@ -131,45 +115,26 @@ jobs: release: needs: build runs-on: ubuntu-latest - # No specific if condition needed here based on event, tag push is the trigger - outputs: - release_tag: ${{ steps.get_tag_info.outputs.cli_tag_name }} - release_version: ${{ steps.get_tag_info.outputs.cli_version }} steps: - - name: Checkout code at the specific tag + - name: Checkout code uses: actions/checkout@v4 with: - ref: ${{ github.ref }} # Checks out the specific tag that triggered the workflow fetch-depth: 0 - - name: Extract CLI Tag and Version from Git Ref - id: get_tag_info - shell: bash - run: | - CLI_TAG_NAME="${{ github.ref_name }}" - # Validate tag format if necessary (e.g., ensure it starts with cli/v) - if [[ ! "$CLI_TAG_NAME" =~ ^cli/v[0-9]+\.[0-9]+\.[0-9]+(.*)$ ]]; then - echo "Error: Tag $CLI_TAG_NAME does not match the expected format 'cli/vX.Y.Z'" - # exit 1 # Optionally fail the job - # For now, we'll proceed and let release creation fail if tag is not suitable - fi - - CLI_VERSION=$(echo "$CLI_TAG_NAME" | sed 's#^cli/v##') - - echo "cli_tag_name=$CLI_TAG_NAME" >> $GITHUB_OUTPUT - echo "cli_version=$CLI_VERSION" >> $GITHUB_OUTPUT - echo "Extracted from Git Ref - CLI Tag: $CLI_TAG_NAME, CLI Version: $CLI_VERSION" - - - name: Download build artifacts + - name: Download all artifacts uses: actions/download-artifact@v4 - # No specific path needed, it downloads all to a directory named after the artifact + - name: Extract version from Cargo.toml + id: get_version + run: | + VERSION=$(grep '^version =' cli/Cargo.toml | sed 's/version = "\(.*\)"/\1/') + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Extracted version: $VERSION" - name: Create Release - # if: steps.get_tag_info.outputs.cli_tag_name != '' # This check is implicitly handled by the tag trigger pattern uses: softprops/action-gh-release@v1 with: - tag_name: ${{ steps.get_tag_info.outputs.cli_tag_name }} # Should be same as github.ref_name - name: CLI Release v${{ steps.get_tag_info.outputs.cli_version }} + tag_name: v${{ steps.get_version.outputs.version }} + name: Release v${{ steps.get_version.outputs.version }} files: | **/buster-cli-linux-x86_64.tar.gz **/buster-cli-linux-x86_64.tar.gz.sha256 @@ -183,32 +148,32 @@ jobs: prerelease: false generate_release_notes: true env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - update-homebrew-tap: + update_homebrew_tap: name: Update Homebrew Tap - needs: release + needs: release runs-on: ubuntu-latest - if: needs.release.outputs.release_tag != '' # Run only if a CLI tag was processed and release was attempted + if: needs.release.outputs.cli_tag_name != '' && needs.release.outputs.cli_version != '' steps: - name: Get release version and tag from previous job id: release_info run: | - echo "RELEASE_VERSION=${{ needs.release.outputs.release_version }}" >> $GITHUB_ENV - echo "RELEASE_TAG=${{ needs.release.outputs.release_tag }}" >> $GITHUB_ENV - echo "Using version: ${{ needs.release.outputs.release_version }} from tag: ${{ needs.release.outputs.release_tag }}" + echo "RELEASE_VERSION=${{ needs.release.outputs.cli_version }}" >> $GITHUB_ENV + echo "RELEASE_TAG=${{ needs.release.outputs.cli_tag_name }}" >> $GITHUB_ENV + echo "Using version: ${{ needs.release.outputs.cli_version }} from tag: ${{ needs.release.outputs.cli_tag_name }}" - name: Set up GitHub CLI - uses: actions/setup-node@v4 # gh is often bundled, but this ensures it's available or can be installed + uses: actions/setup-node@v4 with: - node-version: '20' # Or any version that ensures gh is available + node-version: '20' - name: Download SHA256 sums from GitHub Release env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Use GITHUB_TOKEN to interact with the current repo's release + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_REPO: ${{ github.repository }} run: | - gh release download ${{ env.RELEASE_TAG }} --pattern '*.sha256' -R $GH_REPO + gh release download ${{ env.RELEASE_TAG }} --pattern '*.sha256' -R $GH_REPO --clobber echo "Downloaded SHA256 files:" ls -la *.sha256 @@ -228,8 +193,8 @@ jobs: uses: actions/checkout@v4 with: repository: buster-so/buster-homebrew - token: ${{ secrets.HOMEBREW_TAP_TOKEN }} # PAT with repo scope for buster-so/buster-homebrew - path: buster-homebrew # Checkout to a specific path + token: ${{ secrets.HOMEBREW_TAP_TOKEN }} + path: buster-homebrew - name: Configure Git working-directory: ./buster-homebrew @@ -241,16 +206,17 @@ jobs: working-directory: ./buster-homebrew env: VERSION: ${{ env.RELEASE_VERSION }} - TAG: ${{ env.RELEASE_TAG }} + TAG: ${{ env.RELEASE_TAG }} # This will be vX.Y.Z SHA_ARM64: ${{ env.SHA_ARM64 }} SHA_INTEL: ${{ env.SHA_INTEL }} SHA_LINUX: ${{ env.SHA_LINUX }} + REPO_OWNER: ${{ github.repository_owner }} # Needed for URLs run: | FORMULA_FILE="Formula/buster.rb" TEMP_FORMULA_FILE="Formula/buster.rb.tmp" - # URLs for artifacts - URL_BASE="https://github.com/${{ github.repository_owner }}/buster/releases/download/$TAG" + # URLs for artifacts, using REPO_OWNER and TAG + URL_BASE="https://github.com/$REPO_OWNER/buster/releases/download/$TAG" URL_ARM64="$URL_BASE/buster-cli-darwin-arm64.tar.gz" URL_INTEL="$URL_BASE/buster-cli-darwin-x86_64.tar.gz" URL_LINUX="$URL_BASE/buster-cli-linux-x86_64.tar.gz" @@ -262,17 +228,16 @@ jobs: # Update version sed "s/^ version .*/ version \\"$VERSION\\"/" "$FORMULA_FILE" > "$TEMP_FORMULA_FILE" && mv "$TEMP_FORMULA_FILE" "$FORMULA_FILE" - - # Update top-level (defaults to ARM usually, as per your formula) + + # Update top-level URL and SHA (typically ARM) sed -E "s#^ url .*# url \\"$URL_ARM64\\"#" "$FORMULA_FILE" > "$TEMP_FORMULA_FILE" && mv "$TEMP_FORMULA_FILE" "$FORMULA_FILE" sed "s/^ sha256 .*/ sha256 \\"$SHA_ARM64\\"/" "$FORMULA_FILE" > "$TEMP_FORMULA_FILE" && mv "$TEMP_FORMULA_FILE" "$FORMULA_FILE" # Update on_macos -> on_arm - # Use a block to target sed within the on_arm block. Delimit with unique markers. awk ' - BEGIN { printing = 1; in_arm_block = 0; } + BEGIN { in_arm_block = 0; } /on_macos do/,/end/ { - if (/on_arm do/) { in_arm_block = 1; } + if (/on_arm do/) { in_arm_block = 1; print; next; } if (in_arm_block && /url /) { print " url \\"\\"" ENVIRON["URL_ARM64"] "\\"\\"" next @@ -288,9 +253,9 @@ jobs: # Update on_macos -> on_intel awk ' - BEGIN { printing = 1; in_intel_block = 0; } + BEGIN { in_intel_block = 0; } /on_macos do/,/end/ { - if (/on_intel do/) { in_intel_block = 1; } + if (/on_intel do/) { in_intel_block = 1; print; next; } if (in_intel_block && /url /) { print " url \\"\\"" ENVIRON["URL_INTEL"] "\\"\\"" next @@ -306,10 +271,9 @@ jobs: # Update on_linux awk ' - BEGIN { printing = 1; in_linux_block = 0; } + BEGIN { in_linux_block = 0; } /on_linux do/,/end/ { - if (/url / && !in_linux_block) { next } # Skip top-level url if not already processed - if (/on_linux do/) { in_linux_block = 1; } + if (/on_linux do/) { in_linux_block = 1; print; next; } if (in_linux_block && /url /) { print " url \\"\\"" ENVIRON["URL_LINUX"] "\\"\\"" next @@ -331,7 +295,6 @@ jobs: working-directory: ./buster-homebrew run: | git add Formula/buster.rb - # Check if there are changes to commit if git diff --staged --quiet; then echo "No changes to commit to Homebrew tap." else diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index 97cdaffc7..bce380d86 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -2,47 +2,101 @@ name: Docker Release on: push: - tags: - - 'api/v*' - - 'web/v*' + branches: + - main # Trigger when PR from staging is merged to main + +permissions: + contents: read + packages: write env: # Placeholder for Docker Hub username/organization or GHCR owner DOCKER_REGISTRY_OWNER: ghcr.io/${{ github.repository_owner }} - API_IMAGE_NAME: api-service - WEB_IMAGE_NAME: web-service + API_IMAGE_NAME: buster/api + WEB_IMAGE_NAME: buster/web jobs: - build_and_push_api: - name: Build and Push API Image - if: startsWith(github.ref, 'refs/tags/api/v') # Trigger only for API tags - runs-on: blacksmith-32vcpu-ubuntu-2204 # Updated runner + prepare_docker_release_info: + name: Prepare Docker Release Information + runs-on: blacksmith-32vcpu-ubuntu-2204 + outputs: + api_version: ${{ steps.version_info.outputs.api_version }} + web_version: ${{ steps.version_info.outputs.web_version }} + api_version_found: ${{ steps.version_info.outputs.api_version_found }} + web_version_found: ${{ steps.version_info.outputs.web_version_found }} steps: - - name: Checkout code at the specific tag + - name: Checkout code from main uses: actions/checkout@v4 with: - ref: ${{ github.ref }} # Checks out the specific API tag + ref: ${{ github.sha }} # Checkout the specific commit on main (merge commit) - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + - name: Read API and Web Versions + id: version_info + shell: bash + run: | + API_VERSION="" + WEB_VERSION="" + API_VERSION_FOUND="false" + WEB_VERSION_FOUND="false" + + # Read API version from api/server/Cargo.toml + if [ -f api/server/Cargo.toml ]; then + API_VERSION=$(grep '^version' api/server/Cargo.toml | head -n 1 | sed 's/version = \"\(.*\)\"/\1/') + if [ -n "$API_VERSION" ]; then + echo "Read API version '$API_VERSION' from api/server/Cargo.toml" + API_VERSION_FOUND="true" + else + echo "API version string not found in api/server/Cargo.toml despite file existing." + fi + else + echo "Warning: api/server/Cargo.toml not found. Cannot determine API version." + fi + + # Read Web version from web/package.json + if [ -f web/package.json ]; then + WEB_VERSION=$(jq -r '.version // empty' web/package.json) + if [ -n "$WEB_VERSION" ]; then + echo "Read Web version '$WEB_VERSION' from web/package.json" + WEB_VERSION_FOUND="true" + else + echo "Web version string not found in web/package.json despite file existing." + fi + else + echo "Warning: web/package.json not found. Cannot determine Web version." + fi + + echo "api_version=$API_VERSION" >> $GITHUB_OUTPUT + echo "web_version=$WEB_VERSION" >> $GITHUB_OUTPUT + echo "api_version_found=$API_VERSION_FOUND" >> $GITHUB_OUTPUT + echo "web_version_found=$WEB_VERSION_FOUND" >> $GITHUB_OUTPUT + + build_and_push_api: + name: Build and Push API Image + needs: prepare_docker_release_info + if: needs.prepare_docker_release_info.outputs.api_version_found == 'true' + strategy: + fail-fast: false + matrix: + platform: [amd64, arm64] + include: + - platform: amd64 + runner: blacksmith-8vcpu-ubuntu-2204 + docker_platform: linux/amd64 + - platform: arm64 + runner: blacksmith-8vcpu-ubuntu-2204-arm + docker_platform: linux/arm64 + runs-on: ${{ matrix.runner }} + env: + API_VERSION: ${{ needs.prepare_docker_release_info.outputs.api_version }} + steps: + - name: Checkout code from main + uses: actions/checkout@v4 + with: + ref: ${{ github.sha }} - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Extract API version from Git tag - id: api_version_extractor # Renamed for clarity - run: | - # github.ref_name will be like "api/v1.2.3" - VERSION=$(echo "${{ github.ref_name }}" | sed 's#^api/v##') - if [ -z "$VERSION" ]; then # Should not happen due to startsWith condition - echo "Could not extract version from tag: ${{ github.ref_name }}" - VERSION="unknown" - fi - echo "API_VERSION_ENV=$VERSION" >> $GITHUB_ENV # Set for current job - echo "api_version_output=$VERSION" >> $GITHUB_OUTPUT # Output for other steps if needed - echo "Extracted API version: $VERSION" - shell: bash - - name: Log in to Docker Registry uses: docker/login-action@v3 with: @@ -54,45 +108,61 @@ jobs: uses: useblacksmith/build-push-action@v1 with: context: ./api - file: ./api/Dockerfile # Assuming this Dockerfile is for api/server + file: ./api/Dockerfile push: true + platforms: ${{ matrix.docker_platform }} tags: | - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION_ENV }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }} # SHA of the tag commit - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest - cache-from: type=gha - cache-to: type=gha,mode=max + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ env.API_VERSION }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:${{ github.sha }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.API_IMAGE_NAME }}:latest-${{ matrix.platform }} + + - name: Set API Package Visibility to Public + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ORG_NAME: ${{ github.repository_owner }} + run: | + echo "Attempting to set visibility for $ORG_NAME/${{ env.API_IMAGE_NAME }}" + RESPONSE_CODE=$(curl -L -s -o /dev/null -w "%{http_code}" -X PATCH \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $GH_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/orgs/$ORG_NAME/packages/container/${{ env.API_IMAGE_NAME }}" \ + -d '{"visibility":"public"}') + if [ "$RESPONSE_CODE" -eq 200 ] || [ "$RESPONSE_CODE" -eq 204 ]; then + echo "Package $ORG_NAME/${{ env.API_IMAGE_NAME }} visibility set to public successfully." + else + echo "Failed to set package $ORG_NAME/${{ env.API_IMAGE_NAME }} visibility to public. HTTP Status: $RESPONSE_CODE" + # Optionally, fail the step: exit 1 + fi build_and_push_web: name: Build and Push Web Image - if: startsWith(github.ref, 'refs/tags/web/v') # Trigger only for Web tags - runs-on: blacksmith-32vcpu-ubuntu-2204 # Updated runner + needs: prepare_docker_release_info + if: needs.prepare_docker_release_info.outputs.web_version_found == 'true' + environment: main + strategy: + fail-fast: false + matrix: + platform: [amd64, arm64] + include: + - platform: amd64 + runner: blacksmith-8vcpu-ubuntu-2204 + docker_platform: linux/amd64 + - platform: arm64 + runner: blacksmith-8vcpu-ubuntu-2204-arm + docker_platform: linux/arm64 + runs-on: ${{ matrix.runner }} + env: + WEB_VERSION: ${{ needs.prepare_docker_release_info.outputs.web_version }} steps: - - name: Checkout code at the specific tag + - name: Checkout code from main uses: actions/checkout@v4 with: - ref: ${{ github.ref }} # Checks out the specific Web tag + ref: ${{ github.sha }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Extract Web version from Git tag - id: web_version_extractor # Renamed for clarity - run: | - # github.ref_name will be like "web/v1.2.3" - VERSION=$(echo "${{ github.ref_name }}" | sed 's#^web/v##') - if [ -z "$VERSION" ]; then # Should not happen due to startsWith condition - echo "Could not extract version from tag: ${{ github.ref_name }}" - VERSION="unknown" - fi - echo "WEB_VERSION_ENV=$VERSION" >> $GITHUB_ENV # Set for current job - echo "web_version_output=$VERSION" >> $GITHUB_OUTPUT # Output for other steps if needed - echo "Extracted Web version: $VERSION" - shell: bash - - name: Log in to Docker Registry uses: docker/login-action@v3 with: @@ -106,14 +176,33 @@ jobs: context: ./web file: ./web/Dockerfile push: true + platforms: ${{ matrix.docker_platform }} tags: | - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION_ENV }} - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }} # SHA of the tag commit - ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:latest + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ env.WEB_VERSION }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:${{ github.sha }}-${{ matrix.platform }} + ${{ env.DOCKER_REGISTRY_OWNER }}/${{ env.WEB_IMAGE_NAME }}:latest-${{ matrix.platform }} build-args: | NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} NEXT_PUBLIC_URL=${{ secrets.NEXT_PUBLIC_URL }} NEXT_PUBLIC_SUPABASE_URL=${{ secrets.NEXT_PUBLIC_SUPABASE_URL }} NEXT_PUBLIC_SUPABASE_ANON_KEY=${{ secrets.NEXT_PUBLIC_SUPABASE_ANON_KEY }} - cache-from: type=gha - cache-to: type=gha,mode=max + NEXT_PUBLIC_WEB_SOCKET_URL=${{ secrets.NEXT_PUBLIC_WEB_SOCKET_URL }} + + - name: Set Web Package Visibility to Public + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ORG_NAME: ${{ github.repository_owner }} + run: | + echo "Attempting to set visibility for $ORG_NAME/${{ env.WEB_IMAGE_NAME }}" + RESPONSE_CODE=$(curl -L -s -o /dev/null -w "%{http_code}" -X PATCH \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $GH_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/orgs/$ORG_NAME/packages/container/${{ env.WEB_IMAGE_NAME }}" \ + -d '{"visibility":"public"}') + if [ "$RESPONSE_CODE" -eq 200 ] || [ "$RESPONSE_CODE" -eq 204 ]; then + echo "Package $ORG_NAME/${{ env.WEB_IMAGE_NAME }} visibility set to public successfully." + else + echo "Failed to set package $ORG_NAME/${{ env.WEB_IMAGE_NAME }} visibility to public. HTTP Status: $RESPONSE_CODE" + # Optionally, fail the step: exit 1 + fi diff --git a/.github/workflows/manage-versions.yml b/.github/workflows/manage-versions.yml index e42aae880..e338a7edb 100644 --- a/.github/workflows/manage-versions.yml +++ b/.github/workflows/manage-versions.yml @@ -2,63 +2,66 @@ name: Manage Versions on: pull_request: - types: [closed] + types: [opened, synchronize] branches: - - main + - staging workflow_dispatch: inputs: component: - description: 'Component to version bump' + description: 'Component to version bump (if running manually)' required: true default: 'all' type: choice - options: - - all - - api - - web - - cli + options: [all, api, web, cli] version_spec: - description: 'Version bump type (patch, minor, major) or specific version (e.g., 1.2.3)' + description: 'Version bump type or specific version (if running manually)' required: true default: 'patch' type: string + pr_branch: + description: 'Name of the PR source branch (required if dispatching for a PR)' + required: false + type: string permissions: - contents: write # To push commits and tags + contents: write # To push commits back to the PR branch jobs: - bump_versions: + bump_versions_in_pr: runs-on: blacksmith - if: (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event_name == 'workflow_dispatch' + if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' outputs: new_api_version: ${{ steps.bump.outputs.new_api_version }} new_web_version: ${{ steps.bump.outputs.new_web_version }} new_cli_version: ${{ steps.bump.outputs.new_cli_version }} - api_tag_created: ${{ steps.tag.outputs.api_tag_created }} - web_tag_created: ${{ steps.tag.outputs.web_tag_created }} - cli_tag_created: ${{ steps.tag.outputs.cli_tag_created }} + versions_bumped: ${{ steps.bump.outputs.versions_bumped }} steps: - - name: Determine Branch Name and SHA - id: branch_info + - name: Determine Target Branch for Checkout and Push + id: pr_branch_info shell: bash run: | + TARGET_BRANCH="" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - echo "branch_name=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT - echo "checkout_sha=${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT + TARGET_BRANCH="${{ github.head_ref }}" + echo "Detected PR event. Will operate on PR source branch: $TARGET_BRANCH" elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then - echo "branch_name=${{ github.ref_name }}" >> $GITHUB_OUTPUT - echo "checkout_sha=${{ github.sha }}" >> $GITHUB_OUTPUT + if [[ -z "${{ github.event.inputs.pr_branch }}" ]]; then + echo "Error: 'pr_branch' input is required for manual dispatch to update a PR." + exit 1 + fi + TARGET_BRANCH="${{ github.event.inputs.pr_branch }}" + echo "Detected workflow_dispatch event. Will operate on specified PR branch: $TARGET_BRANCH" else - echo "branch_name=${{ github.ref_name }}" >> $GITHUB_OUTPUT - echo "checkout_sha=${{ github.sha }}" >> $GITHUB_OUTPUT + echo "Error: Unhandled event type '${{ github.event_name }}'" + exit 1 fi + echo "target_branch_name=$TARGET_BRANCH" >> $GITHUB_OUTPUT - - name: Checkout code + - name: Checkout PR source branch uses: actions/checkout@v4 with: - ref: ${{ github.sha }} + ref: ${{ steps.pr_branch_info.outputs.target_branch_name }} token: ${{ secrets.GITHUB_TOKEN }} - fetch-depth: 0 - name: Set up Node.js uses: actions/setup-node@v4 @@ -79,74 +82,42 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - - name: Determine Version Spec and Component from Event + - name: Determine Version Spec and Component id: event_params shell: bash run: | VERSION_SPEC="" COMPONENT="" - COMMIT_MESSAGE_TEXT="" - - echo "Event name: ${{ github.event_name }}" if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then VERSION_SPEC="${{ github.event.inputs.version_spec }}" COMPONENT="${{ github.event.inputs.component }}" echo "Using workflow_dispatch inputs: version_spec='$VERSION_SPEC', component='$COMPONENT'" - elif [[ "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.merged }}" == "true" ]]; then + elif [[ "${{ github.event_name }}" == "pull_request" ]]; then PR_TITLE=$(echo "${{ github.event.pull_request.title }}" | tr '[:upper:]' '[:lower:]') - echo "Pull Request title (lowercase): $PR_TITLE" COMPONENT="all" - if echo "$PR_TITLE" | grep -q -E "breaking change|feat!:"; then - VERSION_SPEC="major" - elif echo "$PR_TITLE" | grep -q -E "^feat\\([^)]+\\)!:"; then - VERSION_SPEC="major" - elif echo "$PR_TITLE" | grep -q -E "^feat:"; then - VERSION_SPEC="minor" - elif echo "$PR_TITLE" | grep -q -E "^fix:"; then - VERSION_SPEC="patch" + if echo "$PR_TITLE" | grep -q -E "breaking change|feat!:"; then VERSION_SPEC="major"; + elif echo "$PR_TITLE" | grep -q -E "^feat\\([^)]+\\)!:"; then VERSION_SPEC="major"; + elif echo "$PR_TITLE" | grep -q -E "^feat:"; then VERSION_SPEC="minor"; + elif echo "$PR_TITLE" | grep -q -E "^fix:"; then VERSION_SPEC="patch"; else - echo "No major/minor/fix keyword found in PR title. Defaulting to patch for merged PR." + echo "No conventional commit keyword (major/minor/patch) found in PR title '$PR_TITLE'." + echo "Version bumping will not occur automatically for this push to the PR." VERSION_SPEC="patch" fi - echo "Determined for PR merge: version_spec='$VERSION_SPEC', component='$COMPONENT'" - elif [[ "${{ github.event_name }}" == "push" ]]; then - COMMIT_MESSAGE_TEXT=$(echo "${{ github.event.head_commit.message }}" | tr '[:upper:]' '[:lower:]') - echo "Push event. Analyzing commit message (lowercase): $COMMIT_MESSAGE_TEXT" - COMPONENT="all" - - if echo "$COMMIT_MESSAGE_TEXT" | grep -q -E "breaking change|feat!:"; then - VERSION_SPEC="major" - elif echo "$COMMIT_MESSAGE_TEXT" | grep -q -E "^feat\\([^)]+\\)!:"; then - VERSION_SPEC="major" - elif echo "$COMMIT_MESSAGE_TEXT" | grep -q -E "^feat:"; then - VERSION_SPEC="minor" - elif echo "$COMMIT_MESSAGE_TEXT" | grep -q -E "^fix:"; then - VERSION_SPEC="patch" - else - echo "No major/minor/fix keyword found in commit message. Defaulting to patch." - VERSION_SPEC="patch" - fi - echo "Determined for push: version_spec='$VERSION_SPEC', component='$COMPONENT'" - else - echo "Unhandled event type: ${{ github.event_name }}. Defaulting to patch and all." - VERSION_SPEC="patch" - COMPONENT="all" + echo "Determined for PR to staging: version_spec='$VERSION_SPEC', component='$COMPONENT'" fi - + if [[ -z "$VERSION_SPEC" ]]; then - echo "Warning: VERSION_SPEC is empty after evaluation. Defaulting to patch." + echo "Warning: VERSION_SPEC is empty. Defaulting to patch." VERSION_SPEC="patch" fi if [[ -z "$COMPONENT" ]]; then - echo "Warning: COMPONENT is empty after evaluation. Defaulting to all." + echo "Warning: COMPONENT is empty. Defaulting to all." COMPONENT="all" fi - echo "Final determined version_spec: $VERSION_SPEC" - echo "Final determined component: $COMPONENT" - echo "version_spec=$VERSION_SPEC" >> $GITHUB_OUTPUT echo "component=$COMPONENT" >> $GITHUB_OUTPUT @@ -231,85 +202,83 @@ jobs: if [[ "$COMMIT_CHANGES" == true ]]; then FINAL_COMMIT_MESSAGE=$(echo "$COMMIT_MESSAGE_PREFIX" | sed 's/;$//') - echo "Final Commit Message: $FINAL_COMMIT_MESSAGE [skip ci]" - echo "COMMIT_MESSAGE_CONTENT=$FINAL_COMMIT_MESSAGE [skip ci]" >> $GITHUB_ENV - echo "commit_message=$FINAL_COMMIT_MESSAGE [skip ci]" >> $GITHUB_OUTPUT + echo "Final Commit Message for version files: $FINAL_COMMIT_MESSAGE [skip ci]" + echo "commit_message=$FINAL_COMMIT_MESSAGE [skip ci]" >> $GITHUB_OUTPUT # For the version files commit + echo "versions_bumped=true" >> $GITHUB_OUTPUT else - echo "No version changes detected." - echo "COMMIT_MESSAGE_CONTENT=" >> $GITHUB_ENV + echo "No version changes detected for source files." echo "commit_message=" >> $GITHUB_OUTPUT + echo "versions_bumped=false" >> $GITHUB_OUTPUT fi + echo "New API Version Output: $NEW_API_VERSION" echo "New Web Version Output: $NEW_WEB_VERSION" echo "New CLI Version Output: $NEW_CLI_VERSION" - - name: Commit version changes - if: steps.bump.outputs.commit_message != '' + - name: Commit and Push Version File Changes to PR Branch + if: steps.bump.outputs.versions_bumped == 'true' env: - COMMIT_MESSAGE_TO_USE: ${{ steps.bump.outputs.commit_message }} + TARGET_BRANCH: ${{ steps.pr_branch_info.outputs.target_branch_name }} + COMMIT_MESSAGE_CONTENT: ${{ steps.bump.outputs.commit_message }} # This is the commit message for version files run: | - git commit -m "$COMMIT_MESSAGE_TO_USE" + echo "Committing version file changes with message: $COMMIT_MESSAGE_CONTENT" + git commit -m "$COMMIT_MESSAGE_CONTENT" # Files were already added by the 'bump' step + echo "Pushing version file changes to PR branch: $TARGET_BRANCH" + git push origin HEAD:"$TARGET_BRANCH" - - name: Create and Push Tags - if: steps.bump.outputs.commit_message != '' + - name: Prepare, Commit, and Push Tag Information File + if: steps.bump.outputs.versions_bumped == 'true' # Only run if versions were actually bumped + id: prepare_tag_info_file + env: + TARGET_BRANCH: ${{ steps.pr_branch_info.outputs.target_branch_name }} run: | - echo "Creating and pushing tags..." + echo "Preparing tag_info.json file..." TAG_INFO_FILE="tag_info.json" echo "{" > $TAG_INFO_FILE - FIRST_TAG=true + FIRST_ENTRY=true NEW_API_VERSION="${{ steps.bump.outputs.new_api_version }}" NEW_WEB_VERSION="${{ steps.bump.outputs.new_web_version }}" NEW_CLI_VERSION="${{ steps.bump.outputs.new_cli_version }}" if [[ -n "$NEW_API_VERSION" ]]; then - TAG_NAME="api/v$NEW_API_VERSION" - echo "Tagging API: $TAG_NAME" - git tag "$TAG_NAME" - if [ "$FIRST_TAG" = false ]; then echo "," >> $TAG_INFO_FILE; fi - echo " \"api_tag\": \"$TAG_NAME\"" >> $TAG_INFO_FILE - FIRST_TAG=false + POTENTIAL_TAG_NAME="api/v$NEW_API_VERSION" + if [ "$FIRST_ENTRY" = false ]; then echo "," >> $TAG_INFO_FILE; fi + echo " \"api_tag\": \"$POTENTIAL_TAG_NAME\", \"api_version\": \"$NEW_API_VERSION\"" >> $TAG_INFO_FILE + FIRST_ENTRY=false fi if [[ -n "$NEW_WEB_VERSION" ]]; then - TAG_NAME="web/v$NEW_WEB_VERSION" - echo "Tagging Web: $TAG_NAME" - git tag "$TAG_NAME" - if [ "$FIRST_TAG" = false ]; then echo "," >> $TAG_INFO_FILE; fi - echo " \"web_tag\": \"$TAG_NAME\"" >> $TAG_INFO_FILE - FIRST_TAG=false + POTENTIAL_TAG_NAME="web/v$NEW_WEB_VERSION" + if [ "$FIRST_ENTRY" = false ]; then echo "," >> $TAG_INFO_FILE; fi + echo " \"web_tag\": \"$POTENTIAL_TAG_NAME\", \"web_version\": \"$NEW_WEB_VERSION\"" >> $TAG_INFO_FILE + FIRST_ENTRY=false fi if [[ -n "$NEW_CLI_VERSION" ]]; then - TAG_NAME="cli/v$NEW_CLI_VERSION" - echo "Tagging CLI: $TAG_NAME" - git tag "$TAG_NAME" - if [ "$FIRST_TAG" = false ]; then echo "," >> $TAG_INFO_FILE; fi - echo " \"cli_tag\": \"$TAG_NAME\"" >> $TAG_INFO_FILE - FIRST_TAG=false + POTENTIAL_TAG_NAME="cli/v$NEW_CLI_VERSION" + if [ "$FIRST_ENTRY" = false ]; then echo "," >> $TAG_INFO_FILE; fi + echo " \"cli_tag\": \"$POTENTIAL_TAG_NAME\", \"cli_version\": \"$NEW_CLI_VERSION\"" >> $TAG_INFO_FILE + FIRST_ENTRY=false fi echo "}" >> $TAG_INFO_FILE - echo "Created tag info file:" + echo "Created tag_info.json:" cat $TAG_INFO_FILE + + echo "Committing and pushing tag_info.json to $TARGET_BRANCH..." + git add $TAG_INFO_FILE + # Check if there are changes to commit for tag_info.json to avoid empty commit + if ! git diff --staged --quiet; then + git commit -m "chore: update tag_info.json with potential release versions [skip ci]" + git push origin HEAD:"$TARGET_BRANCH" + echo "tag_info.json pushed to $TARGET_BRANCH." + else + echo "No changes to tag_info.json to commit." + fi - BRANCH_TO_PUSH="${{ steps.branch_info.outputs.branch_name }}" - echo "Pushing commit and tags to branch: $BRANCH_TO_PUSH" - git push origin HEAD:"refs/heads/$BRANCH_TO_PUSH" --follow-tags - - echo "api_tag_created=$API_TAG_CREATED" >> $GITHUB_OUTPUT - echo "web_tag_created=$WEB_TAG_CREATED" >> $GITHUB_OUTPUT - echo "cli_tag_created=$CLI_TAG_CREATED" >> $GITHUB_OUTPUT - - - name: Upload Tag Information Artifact - if: steps.tag.outputs.api_tag_created == 'true' || steps.tag.outputs.web_tag_created == 'true' || steps.tag.outputs.cli_tag_created == 'true' + - name: Upload Tag Information Artifact (for reference) + if: steps.bump.outputs.versions_bumped == 'true' # Or based on whether tag_info.json was actually changed/committed uses: actions/upload-artifact@v4 with: - name: version-tag-info - path: tag_info.json - retention-days: 1 - - - name: Push changes (if only commit, no tags yet or if tag push failed) - if: steps.bump.outputs.commit_message != '' && (steps.bump.outputs.new_api_version == '' && steps.bump.outputs.new_web_version == '' && steps.bump.outputs.new_cli_version == '') - run: | - BRANCH_TO_PUSH="${{ steps.branch_info.outputs.branch_name }}" - echo "Pushing commit to branch: $BRANCH_TO_PUSH (changes made but no version tags generated)." - git push origin HEAD:"refs/heads/$BRANCH_TO_PUSH" \ No newline at end of file + name: version-tag-info-potential + path: tag_info.json # This will upload the latest version from the workspace + retention-days: 7 \ No newline at end of file diff --git a/.github/workflows/porter_app_evals_3155.yml b/.github/workflows/porter_app_evals_3155.yml index 51f95af25..6d0d60e1b 100644 --- a/.github/workflows/porter_app_evals_3155.yml +++ b/.github/workflows/porter_app_evals_3155.yml @@ -6,44 +6,6 @@ - 'api/**' name: Deploy to evals jobs: - database-deploy: - runs-on: blacksmith-16vcpu-ubuntu-2204 - environment: staging - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - profile: minimal - override: true - - - name: Cache Rust dependencies - uses: Swatinem/rust-cache@v2 - - - name: Install Diesel CLI - run: cargo install diesel_cli --no-default-features --features postgres - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - - name: Download Postgres certificate from S3 - run: | - mkdir -p ~/.postgresql - aws s3 cp ${{ secrets.CERT_S3_URL }} ~/.postgresql/root.crt - - - name: Run migrations - working-directory: ./api - run: diesel migration run - env: - DATABASE_URL: ${{ secrets.EVAL_DB_URL }} - PGSSLMODE: disable porter-deploy: runs-on: blacksmith-32vcpu-ubuntu-2204 steps: diff --git a/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs b/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs index 30d1b1978..7ec8d5608 100644 --- a/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs +++ b/api/libs/query_engine/src/data_source_connections/ssh_tunneling.rs @@ -4,7 +4,6 @@ use std::{ fs, io::Write, net::TcpListener, - os::unix::fs::PermissionsExt, process::{Child, Command}, }; use tempfile::NamedTempFile; @@ -61,30 +60,6 @@ pub fn establish_ssh_tunnel( } }; - let mut perms = match fs::metadata(temp_ssh_key.path()) { - Ok(p) => p.permissions(), - Err(e) => { - tracing::error!( - "There was a problem while getting the metadata of the temp file: {}", - e - ); - return Err(anyhow!(e)); - } - }; - - perms.set_mode(0o600); - - match fs::set_permissions(temp_ssh_key.path(), perms) { - Ok(_) => {} - Err(e) => { - tracing::error!( - "There was a problem while setting the permissions of the temp file: {}", - e - ); - return Err(anyhow!(e)); - } - }; - let ssh_tunnel = match Command::new("ssh") .arg("-T") .arg("-i") diff --git a/api/libs/semantic_layer/spec.yml b/api/libs/semantic_layer/spec.yml index e730eafcf..81d710161 100644 --- a/api/libs/semantic_layer/spec.yml +++ b/api/libs/semantic_layer/spec.yml @@ -1,37 +1,36 @@ # Schema specification for the model structure -models: - - name: string # Required - description: string # Optional - dimensions: - - name: string # Required - description: string # Optional - type: string # Optional, inferred if omitted - searchable: boolean # Optional, default: false - options: [string] # Optional, default: null - measures: - - name: string # Required - description: string # Optional - type: string # Optional, inferred if omitted - metrics: - - name: string # Required - expr: string # Required, can use model.column from entities - description: string # Optional - args: # Optional, required only if expr contains arguments, default: null - - name: string # Required - type: string # Required - description: string # Optional - filters: - - name: string # Required - expr: string # Required, can use model.column from entities - description: string # Optional - args: # Optional, required only if expr contains arguments, default: null - - name: string # Required - type: string # Required - description: string # Optional - entities: - - name: string # Required - primary_key: string # Required - foreign_key: string # Required - type: string # Optional, e.g., "LEFT", "INNER"; LLM decides if omitted - cardinality: string # Optional, e.g., "one-to-many", "many-to-many" - description: string # Optional \ No newline at end of file +- name: string # Required + description: string # Optional + dimensions: + - name: string # Required + description: string # Optional + type: string # Optional, inferred if omitted + searchable: boolean # Optional, default: false + options: [string] # Optional, default: null + measures: + - name: string # Required + description: string # Optional + type: string # Optional, inferred if omitted + metrics: + - name: string # Required + expr: string # Required, can use model.column from entities + description: string # Optional + args: # Optional, required only if expr contains arguments, default: null + - name: string # Required + type: string # Required + description: string # Optional + filters: + - name: string # Required + expr: string # Required, can use model.column from entities + description: string # Optional + args: # Optional, required only if expr contains arguments, default: null + - name: string # Required + type: string # Required + description: string # Optional + entities: + - name: string # Required + primary_key: string # Required + foreign_key: string # Required + type: string # Optional, e.g., "LEFT", "INNER"; LLM decides if omitted + cardinality: string # Optional, e.g., "one-to-many", "many-to-many" + description: string # Optional diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 69ddab820..95fb749c8 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -5,13 +5,6 @@ members = [ # Add "libs/*" or specific lib crates here later ] -[profile.release] -lto = true -strip = true -opt-level = "z" -codegen-units = 1 -panic = "abort" - [workspace.dependencies] anyhow = "1.0.79" async-trait = "0.1.80" diff --git a/docker-compose.yml b/docker-compose.yml index 3bb5bb055..cbb05bfe4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,7 +16,7 @@ services: retries: 30 api: - image: ghcr.io/buster-so/buster/api:latest + image: ghcr.io/buster-so/buster/api:latest-arm64 container_name: buster-api env_file: - .env @@ -34,12 +34,7 @@ services: - COHERE_API_KEY=${COHERE_API_KEY} ports: - "3001:3001" - deploy: - resources: - limits: - memory: 4G - reservations: - memory: 2G + - "3000:3000" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:3001/health"] interval: 30s @@ -52,29 +47,16 @@ services: condition: service_healthy kong: condition: service_healthy - + web: - image: ghcr.io/buster-so/buster/web:latest + image: ghcr.io/buster-so/buster/web:latest-arm64 container_name: buster-web - ports: - - "3000:3000" + env_file: + - .env depends_on: api: condition: service_healthy - - litellm: - image: ghcr.io/berriai/litellm:main-stable - container_name: buster-litellm - restart: always - ports: - - "4001:4001" - env_file: - - .env - environment: - - LITELLM_ENV=local - depends_on: - supavisor: - condition: service_healthy + network_mode: "service:api" volumes: buster_redis_data: \ No newline at end of file diff --git a/start.sh b/start.sh deleted file mode 100644 index 06023d123..000000000 --- a/start.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -echo "Starting Supabase..." -cd supabase -docker compose up -d - -echo "Waiting for Supabase to be healthy..." -until curl -s http://localhost:54321/rest/v1/ > /dev/null; do - echo "Waiting for Supabase..." - sleep 5 -done - -echo "Supabase is ready! Starting main services..." -cd .. -docker compose up \ No newline at end of file diff --git a/supabase/.env.example b/supabase/.env.example deleted file mode 100644 index 3d856d987..000000000 --- a/supabase/.env.example +++ /dev/null @@ -1,117 +0,0 @@ -############ -# Secrets -# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION -############ - -POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password -JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long -ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE -SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q -DASHBOARD_USERNAME=supabase -DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated - -############ -# Database - You can change these to any PostgreSQL database that has logical replication enabled. -############ - -POSTGRES_HOST=db -POSTGRES_DB=postgres -POSTGRES_PORT=5432 -# default user is postgres - -############ -# Supavisor -- Database pooler -############ -POOLER_PROXY_PORT_TRANSACTION=6543 -POOLER_DEFAULT_POOL_SIZE=20 -POOLER_MAX_CLIENT_CONN=100 -POOLER_TENANT_ID=your-tenant-id - - -############ -# API Proxy - Configuration for the Kong Reverse proxy. -############ - -KONG_HTTP_PORT=8000 -KONG_HTTPS_PORT=8443 - - -############ -# API - Configuration for PostgREST. -############ - -PGRST_DB_SCHEMAS=public,storage,graphql_public - - -############ -# Auth - Configuration for the GoTrue authentication server. -############ - -## General -SITE_URL=http://localhost:3003 -ADDITIONAL_REDIRECT_URLS= -JWT_EXPIRY=3600 -DISABLE_SIGNUP=false -API_EXTERNAL_URL=http://localhost:8000 - -## Mailer Config -MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify" -MAILER_URLPATHS_INVITE="/auth/v1/verify" -MAILER_URLPATHS_RECOVERY="/auth/v1/verify" -MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify" - -## Email auth -ENABLE_EMAIL_SIGNUP=true -ENABLE_EMAIL_AUTOCONFIRM=false -SMTP_ADMIN_EMAIL=admin@buster.so -SMTP_HOST=supabase-mail -SMTP_PORT=2500 -SMTP_USER= -SMTP_PASS= -SMTP_SENDER_NAME=Buster -ENABLE_ANONYMOUS_USERS=true - -## Phone auth -ENABLE_PHONE_SIGNUP=true -ENABLE_PHONE_AUTOCONFIRM=true - - -############ -# Studio - Configuration for the Dashboard -############ - -STUDIO_DEFAULT_ORGANIZATION=Default Organization -STUDIO_DEFAULT_PROJECT=Default Project - -STUDIO_PORT=3003 -# replace if you intend to use Studio outside of localhost -SUPABASE_PUBLIC_URL=http://localhost:8000 - -# Enable webp support -IMGPROXY_ENABLE_WEBP_DETECTION=true - -# Add your OpenAI API key to enable SQL Editor Assistant -OPENAI_API_KEY= - -############ -# Functions - Configuration for Functions -############ -# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet. -FUNCTIONS_VERIFY_JWT=false - -############ -# Logs - Configuration for Logflare -# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction -############ - -LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key - -# Change vector.toml sinks to reflect this change -LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key - -# Docker socket location - this value will differ depending on your OS -DOCKER_SOCKET_LOCATION=/var/run/docker.sock - -# Google Cloud Project details -GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID -GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER diff --git a/supabase/docker-compose.yml b/supabase/docker-compose.yml index 23ba55227..88c4f85ca 100644 --- a/supabase/docker-compose.yml +++ b/supabase/docker-compose.yml @@ -16,6 +16,46 @@ services: - '9000:9000' # web interface - '1100:1100' # POP3 + studio: + container_name: supabase-studio + image: supabase/studio:20241202-71e5240 + restart: unless-stopped + healthcheck: + test: + [ + "CMD", + "node", + "-e", + "fetch('http://studio:3000/api/profile').then((r) => {if (r.status !== 200) throw new Error(r.status)})" + ] + timeout: 10s + interval: 5s + retries: 3 + depends_on: + analytics: + condition: service_healthy + environment: + STUDIO_PG_META_URL: http://meta:8080 + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + + DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION} + DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT} + OPENAI_API_KEY: ${OPENAI_API_KEY:-} + + SUPABASE_URL: http://kong:8000 + SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL} + SUPABASE_ANON_KEY: ${ANON_KEY} + SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY} + AUTH_JWT_SECRET: ${JWT_SECRET} + + LOGFLARE_API_KEY: ${LOGFLARE_API_KEY} + LOGFLARE_URL: http://analytics:4000 + NEXT_PUBLIC_ENABLE_LOGS: true + # Comment to use Big Query backend for analytics + NEXT_ANALYTICS_BACKEND_PROVIDER: postgres + # Uncomment to use Big Query backend for analytics + # NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery + kong: container_name: supabase-kong image: kong:2.8.1 @@ -147,6 +187,52 @@ services: PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY} command: "postgrest" + realtime: + # This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain + container_name: realtime-dev.supabase-realtime + image: supabase/realtime:v2.33.70 + depends_on: + db: + # Disable this if you are using an external Postgres database + condition: service_healthy + analytics: + condition: service_healthy + healthcheck: + test: + [ + "CMD", + "curl", + "-sSfL", + "--head", + "-o", + "/dev/null", + "-H", + "Authorization: Bearer ${ANON_KEY}", + "http://localhost:4000/api/tenants/realtime-dev/health" + ] + timeout: 5s + interval: 5s + retries: 3 + restart: unless-stopped + environment: + PORT: 4000 + DB_HOST: ${POSTGRES_HOST} + DB_PORT: ${POSTGRES_PORT} + DB_USER: supabase_admin + DB_PASSWORD: ${POSTGRES_PASSWORD} + DB_NAME: ${POSTGRES_DB} + DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime' + DB_ENC_KEY: supabaserealtime + API_JWT_SECRET: ${JWT_SECRET} + SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq + ERL_AFLAGS: -proto_dist inet_tcp + DNS_NODES: "''" + RLIMIT_NOFILE: "10000" + APP_NAME: realtime + SEED_SELF_HOST: true + RUN_JANITOR: true + + # To use S3 backed storage: docker compose -f docker-compose.yml -f docker-compose.s3.yml up storage: container_name: supabase-storage image: supabase/storage-api:v1.11.13 @@ -206,6 +292,24 @@ services: volumes: - ./volumes/storage:/var/lib/storage:z + meta: + container_name: supabase-meta + image: supabase/postgres-meta:v0.84.2 + depends_on: + db: + # Disable this if you are using an external Postgres database + condition: service_healthy + analytics: + condition: service_healthy + restart: unless-stopped + environment: + PG_META_PORT: 8080 + PG_META_DB_HOST: ${POSTGRES_HOST} + PG_META_DB_PORT: ${POSTGRES_PORT} + PG_META_DB_NAME: ${POSTGRES_DB} + PG_META_DB_USER: supabase_admin + PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD} + functions: container_name: supabase-edge-functions image: supabase/edge-runtime:v1.65.3 @@ -379,7 +483,7 @@ services: - POOLER_TENANT_ID=${POOLER_TENANT_ID} - POOLER_DEFAULT_POOL_SIZE=${POOLER_DEFAULT_POOL_SIZE} - POOLER_MAX_CLIENT_CONN=${POOLER_MAX_CLIENT_CONN} - - POOLER_POOL_MODE=transaction + - POOLER_POOL_MODE=session volumes: - ./volumes/pooler/pooler.exs:/etc/pooler/pooler.exs:ro diff --git a/version.txt b/version.txt index a24809adf..ee92deb82 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.0.48 +0.0.80 diff --git a/web/Dockerfile b/web/Dockerfile index f200c3c78..32e19720a 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,5 +1,5 @@ # Build stage -FROM node:20-alpine AS builder +FROM node:22-alpine AS builder WORKDIR /app diff --git a/web/package-lock.json b/web/package-lock.json index a333eb8ab..95d9e4ddb 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,12 +1,12 @@ { "name": "web", - "version": "0.1.1", + "version": "0.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "web", - "version": "0.1.1", + "version": "0.1.0", "dependencies": { "@dnd-kit/core": "^6.3.1", "@dnd-kit/modifiers": "^9.0.0", diff --git a/web/src/components/features/auth/LoginForm.tsx b/web/src/components/features/auth/LoginForm.tsx index 230db1d7c..6d42fa4c0 100644 --- a/web/src/components/features/auth/LoginForm.tsx +++ b/web/src/components/features/auth/LoginForm.tsx @@ -50,9 +50,9 @@ export const LoginForm: React.FC<{}> = ({}) => { async ({ email, password }: { email: string; password: string }) => { setLoading('email'); try { - const res = await signInWithEmailAndPassword({ email, password }); - if (res?.error) throw res.error; + await signInWithEmailAndPassword({ email, password }); } catch (error: any) { + console.error(error); errorFallback(error); setLoading(null); } @@ -62,9 +62,9 @@ export const LoginForm: React.FC<{}> = ({}) => { const onSignInWithGoogle = useMemoizedFn(async () => { setLoading('google'); try { - const res = await signInWithGoogle(); - if (res?.error) throw res.error; + await signInWithGoogle(); } catch (error: any) { + console.error(error); errorFallback(error); setLoading(null); } @@ -74,8 +74,8 @@ export const LoginForm: React.FC<{}> = ({}) => { setLoading('github'); try { const res = await signInWithGithub(); - if (res?.error) throw res.error; } catch (error: any) { + console.error(error); errorFallback(error); setLoading(null); } @@ -84,8 +84,7 @@ export const LoginForm: React.FC<{}> = ({}) => { const onSignInWithAzure = useMemoizedFn(async () => { setLoading('azure'); try { - const res = await signInWithAzure(); - if (res?.error) throw res.error; + await signInWithAzure(); } catch (error: any) { errorFallback(error); setLoading(null); @@ -95,11 +94,10 @@ export const LoginForm: React.FC<{}> = ({}) => { const onSignUp = useMemoizedFn(async (d: { email: string; password: string }) => { setLoading('email'); try { - const res = await signUp(d); - if (res?.error) throw res.error; - + await signUp(d); setSignUpSuccess(true); } catch (error: any) { + console.error(error); errorFallback(error); setLoading(null); } @@ -113,6 +111,7 @@ export const LoginForm: React.FC<{}> = ({}) => { if (signUpFlow) onSignUp(d); else onSignInWithUsernameAndPassword(d); } catch (error: any) { + console.error(error); const errorMessage = rustErrorHandler(error); if (errorMessage?.message == 'User already registered') { onSignInWithUsernameAndPassword(d); diff --git a/web/src/lib/supabase/signIn.ts b/web/src/lib/supabase/signIn.ts index 5af5e0cb6..2dbf9d17d 100644 --- a/web/src/lib/supabase/signIn.ts +++ b/web/src/lib/supabase/signIn.ts @@ -17,7 +17,6 @@ export const signInWithEmailAndPassword = async ({ password: string; }) => { 'use server'; - const supabase = await createClient(); const { data, error } = await supabase.auth.signInWithPassword({ @@ -26,7 +25,7 @@ export const signInWithEmailAndPassword = async ({ }); if (error) { - return { error: error.message }; + throw error; } revalidatePath('/', 'layout'); @@ -50,7 +49,7 @@ export const signInWithGoogle = async () => { }); if (error) { - return { error: error.message }; + throw error; } revalidatePath('/', 'layout'); @@ -70,7 +69,7 @@ export const signInWithGithub = async () => { }); if (error) { - return { error: error.message }; + throw error; } revalidatePath('/', 'layout'); @@ -91,7 +90,7 @@ export const signInWithAzure = async () => { }); if (error) { - return { error: error.message }; + throw error; } revalidatePath('/', 'layout'); return redirect(data.url); @@ -99,12 +98,15 @@ export const signInWithAzure = async () => { export const signUp = async ({ email, password }: { email: string; password: string }) => { 'use server'; - + console.log('signUp', email, password); const supabase = await createClient(); + console.log('supabase', supabase); const authURL = createBusterRoute({ route: BusterRoutes.AUTH_CONFIRM }); + console.log('authURL', authURL); const authURLFull = `${process.env.NEXT_PUBLIC_URL}${authURL}`; + console.log('authURLFull', authURLFull); const { error } = await supabase.auth.signUp({ email, @@ -113,9 +115,9 @@ export const signUp = async ({ email, password }: { email: string; password: str emailRedirectTo: authURLFull } }); - + console.log('error', error); if (error) { - return { error: error.message }; + throw error; } revalidatePath('/', 'layout');