diff --git a/.claude/skills/ambient-pr-test/SKILL.md b/.claude/skills/ambient-pr-test/SKILL.md index b42eeacc6..0847da7b4 100644 --- a/.claude/skills/ambient-pr-test/SKILL.md +++ b/.claude/skills/ambient-pr-test/SKILL.md @@ -16,7 +16,6 @@ with .claude/skills/ambient-pr-test https://github.com/ambient-code/platform/pu ``` Optional modifiers the user may specify: -- **`--force-build`** — rebuild and push images even if CI already pushed them - **`--keep-alive`** — do not tear down after the workflow; leave the instance online for human access - **`provision-only`** / **`deploy-only`** / **`teardown-only`** — run a single phase instead of the full workflow @@ -56,7 +55,7 @@ This cluster's tenant operator does not emit `Ready` conditions on `TenantNamesp ## Full Workflow ``` -0. Build: skip if CI pushed images (or --force-build to always rebuild) +0. Build: always run build.sh to build and push images tagged pr- 1. Derive instance-id from PR number 2. Provision: bash components/pr-test/provision.sh create 3. Deploy: bash components/pr-test/install.sh @@ -70,21 +69,15 @@ Phases can be run individually — see **Individual Phases** below. ## Step 0: Build and Push Images -Check CI first: -```bash -gh run list --repo ambient-code/platform \ - --workflow "Build and Push Component Docker Images" \ - --branch --limit 1 -``` - -**Skip** if the latest run shows `completed / success`. Otherwise build: +Always run `build.sh` — CI may skip builds when no component source files changed (e.g. sync/merge branches), so never rely on CI to have pushed images: ```bash bash components/pr-test/build.sh https://github.com/ambient-code/platform/pull/1005 ``` -**`--force-build`**: skip the CI check and always run `build.sh` regardless. Use when: -- Images exist but were built from a different commit (e.g. after a force-push) -- CI built images but from a stale SHA +This builds and pushes 3 images tagged `pr-`: +- `quay.io/ambient_code/vteam_api_server:pr-` +- `quay.io/ambient_code/vteam_control_plane:pr-` +- `quay.io/ambient_code/vteam_claude_runner:pr-` Builds 3 images: `vteam_api_server`, `vteam_control_plane`, `vteam_claude_runner`. diff --git a/.github/workflows/ci-failure-resolver-with-agent.yml b/.github/workflows/ci-failure-resolver-with-agent.yml index 5166d1807..fdda8e660 100644 --- a/.github/workflows/ci-failure-resolver-with-agent.yml +++ b/.github/workflows/ci-failure-resolver-with-agent.yml @@ -89,7 +89,7 @@ jobs: - name: Checkout & label if: steps.analyze.outputs.action == 'fix' - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: ref: refs/pull/${{ matrix.pr }}/head fetch-depth: 0 diff --git a/.github/workflows/components-build-deploy.yml b/.github/workflows/components-build-deploy.yml index 5acd360d2..bbfc764bb 100644 --- a/.github/workflows/components-build-deploy.yml +++ b/.github/workflows/components-build-deploy.yml @@ -2,7 +2,7 @@ name: Build and Push Component Docker Images on: push: - branches: [main] + branches: [main, alpha] paths: - '.github/workflows/components-build-deploy.yml' - 'components/manifests/**' @@ -15,7 +15,7 @@ on: - 'components/ambient-control-plane/**' - 'components/ambient-mcp/**' pull_request: - branches: [main] + branches: [main, alpha] paths: - '.github/workflows/components-build-deploy.yml' - 'components/manifests/**' @@ -29,11 +29,6 @@ on: - 'components/ambient-mcp/**' workflow_dispatch: inputs: - force_build_all: - description: 'Force rebuild all components' - required: false - type: boolean - default: false components: description: 'Components to build (comma-separated: frontend,backend,operator,ambient-runner,state-sync,public-api,ambient-api-server,ambient-control-plane,ambient-mcp) - leave empty for all' required: false @@ -107,29 +102,9 @@ jobs: {"name":"ambient-mcp","context":"./components/ambient-mcp","image":"quay.io/ambient_code/vteam_mcp","dockerfile":"./components/ambient-mcp/Dockerfile"} ]' - FORCE_ALL="${{ github.event.inputs.force_build_all }}" SELECTED="${{ github.event.inputs.components }}" - EVENT="${{ github.event_name }}" - - # Map component names to paths-filter output names - # (ambient-runner uses claude-runner filter) - declare -A FILTER_MAP=( - [frontend]="${{ steps.filter.outputs.frontend }}" - [backend]="${{ steps.filter.outputs.backend }}" - [operator]="${{ steps.filter.outputs.operator }}" - [ambient-runner]="${{ steps.filter.outputs.claude-runner }}" - [state-sync]="${{ steps.filter.outputs.state-sync }}" - [public-api]="${{ steps.filter.outputs.public-api }}" - [ambient-api-server]="${{ steps.filter.outputs.ambient-api-server }}" - ) - - if [ "$FORCE_ALL" == "true" ]; then - # Force build all - FILTERED="$ALL_COMPONENTS" - elif [ "$EVENT" == "workflow_dispatch" ] && [ -z "$SELECTED" ] && [ "$FORCE_ALL" != "true" ]; then - # Dispatch with no selection and no force — build all - FILTERED="$ALL_COMPONENTS" - elif [ -n "$SELECTED" ]; then + + if [ -n "$SELECTED" ]; then # Dispatch with specific components FILTERED=$(echo "$ALL_COMPONENTS" | jq -c --arg sel "$SELECTED" '[.[] | select(.name as $n | $sel | split(",") | map(gsub("^\\s+|\\s+$";"")) | index($n))]') elif [ "$EVENT" == "pull_request" ]; then @@ -215,7 +190,7 @@ jobs: cache-from: type=gha,scope=${{ matrix.component.name }}-${{ matrix.arch.suffix }} cache-to: type=gha,mode=max,scope=${{ matrix.component.name }}-${{ matrix.arch.suffix }} - - name: Build ${{ matrix.component.name }} (${{ matrix.arch.suffix }}) for pull request + - name: Build and push ${{ matrix.component.name }} (${{ matrix.arch.suffix }}) for pull request if: github.event_name == 'pull_request' uses: docker/build-push-action@v7 with: @@ -228,10 +203,11 @@ jobs: ${{ matrix.component.image }}:pr-${{ github.event.pull_request.number }}-${{ github.sha }}-${{ matrix.arch.suffix }} build-args: AMBIENT_VERSION=${{ github.sha }} cache-from: type=gha,scope=${{ matrix.component.name }}-${{ matrix.arch.suffix }} + cache-to: type=gha,mode=max,scope=${{ matrix.component.name }}-${{ matrix.arch.suffix }} merge-manifests: needs: [detect-changes, build] - if: github.event_name != 'pull_request' && needs.detect-changes.outputs.has-builds == 'true' + if: needs.detect-changes.outputs.has-builds == 'true' runs-on: ubuntu-latest permissions: contents: read @@ -251,7 +227,8 @@ jobs: username: ${{ secrets.QUAY_USERNAME }} password: ${{ secrets.QUAY_PASSWORD }} - - name: Create multi-arch manifest for ${{ matrix.component.name }} + - name: Create multi-arch manifest for ${{ matrix.component.name }} (main) + if: github.event_name != 'pull_request' && github.ref == 'refs/heads/main' # Suffixes (-amd64, -arm64) must match the arch matrix in the build job above. # Arch-suffixed tags remain in the registry after merging. Clean these up # via Quay tag expiration policies or a periodic job. @@ -263,6 +240,23 @@ jobs: ${{ matrix.component.image }}:${{ github.sha }}-amd64 \ ${{ matrix.component.image }}:${{ github.sha }}-arm64 + - name: Create multi-arch manifest for ${{ matrix.component.name }} (alpha) + if: github.event_name != 'pull_request' && github.ref == 'refs/heads/alpha' + run: | + docker buildx imagetools create \ + -t ${{ matrix.component.image }}:alpha \ + -t ${{ matrix.component.image }}:${{ github.sha }} \ + ${{ matrix.component.image }}:${{ github.sha }}-amd64 \ + ${{ matrix.component.image }}:${{ github.sha }}-arm64 + + - name: Create multi-arch manifest for ${{ matrix.component.name }} (pull request) + if: github.event_name == 'pull_request' + run: | + docker buildx imagetools create \ + -t ${{ matrix.component.image }}:pr-${{ github.event.pull_request.number }} \ + ${{ matrix.component.image }}:pr-${{ github.event.pull_request.number }}-amd64 \ + ${{ matrix.component.image }}:pr-${{ github.event.pull_request.number }}-arm64 + update-rbac-and-crd: runs-on: ubuntu-latest needs: [detect-changes, merge-manifests] diff --git a/.github/workflows/feedback-loop.yml b/.github/workflows/feedback-loop.yml index 54d3381fb..42487d927 100644 --- a/.github/workflows/feedback-loop.yml +++ b/.github/workflows/feedback-loop.yml @@ -36,7 +36,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Python uses: actions/setup-python@v6 diff --git a/.github/workflows/sync-alpha-from-main.yml b/.github/workflows/sync-alpha-from-main.yml new file mode 100644 index 000000000..545545769 --- /dev/null +++ b/.github/workflows/sync-alpha-from-main.yml @@ -0,0 +1,199 @@ +name: Sync Alpha from Main + +on: + push: + branches: [main] + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +concurrency: + group: sync-alpha-from-main + cancel-in-progress: false + +jobs: + sync: + name: Rebase main into alpha + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Check if alpha is already up to date + id: check + run: | + MAIN_SHA="$(git rev-parse origin/main)" + ALPHA_SHA="$(git rev-parse origin/alpha)" + COMMIT_COUNT="$(git rev-list --count "${ALPHA_SHA}..${MAIN_SHA}")" + + echo "main_sha=${MAIN_SHA}" >> "$GITHUB_OUTPUT" + echo "alpha_sha=${ALPHA_SHA}" >> "$GITHUB_OUTPUT" + echo "commit_count=${COMMIT_COUNT}" >> "$GITHUB_OUTPUT" + + if [ "${COMMIT_COUNT}" -eq 0 ]; then + echo "needs_sync=false" >> "$GITHUB_OUTPUT" + echo "alpha is already up to date with main" + else + echo "needs_sync=true" >> "$GITHUB_OUTPUT" + echo "Commits in main not in alpha: ${COMMIT_COUNT}" + fi + + - name: Check for existing open sync PR + if: steps.check.outputs.needs_sync == 'true' + id: existing_pr + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + EXISTING=$(gh pr list \ + --base alpha \ + --state open \ + --json headRefName \ + --jq '[.[] | select(.headRefName | startswith("chore/sync-alpha-from-main-"))] | length') + + if [ "${EXISTING}" -gt 0 ]; then + echo "Open sync PR already exists — skipping" + echo "pr_exists=true" >> "$GITHUB_OUTPUT" + else + echo "pr_exists=false" >> "$GITHUB_OUTPUT" + fi + + - name: Create work branch off alpha + if: steps.check.outputs.needs_sync == 'true' && steps.existing_pr.outputs.pr_exists == 'false' + id: branch + run: | + TIMESTAMP="$(date +%Y%m%d-%H%M%S)" + WORK_BRANCH="chore/sync-alpha-from-main-${TIMESTAMP}" + echo "work_branch=${WORK_BRANCH}" >> "$GITHUB_OUTPUT" + + git checkout -b "${WORK_BRANCH}" origin/alpha + echo "Created ${WORK_BRANCH} from origin/alpha" + + - name: Attempt rebase of main onto work branch + if: steps.check.outputs.needs_sync == 'true' && steps.existing_pr.outputs.pr_exists == 'false' + id: rebase + env: + WORK_BRANCH: ${{ steps.branch.outputs.work_branch }} + MAIN_SHA: ${{ steps.check.outputs.main_sha }} + ALPHA_SHA: ${{ steps.check.outputs.alpha_sha }} + run: | + MERGE_BASE="$(git merge-base "${ALPHA_SHA}" "${MAIN_SHA}")" + + git rebase --onto "${WORK_BRANCH}" "${MERGE_BASE}" origin/main && { + echo "rebase_clean=true" >> "$GITHUB_OUTPUT" + git checkout -B "${WORK_BRANCH}" + echo "Rebase completed cleanly" + } || { + echo "rebase_clean=false" >> "$GITHUB_OUTPUT" + git rebase --abort 2>/dev/null || true + + echo "Rebase had conflicts — falling back to merge" + MERGE_MSG=$(cat <<'MSG' +chore: merge main into alpha (conflict resolution required) + +Automated merge of origin/main into origin/alpha. +Rebase encountered conflicts; falling back to merge. +A human must resolve conflict markers before merging this PR. +MSG +) + git merge --no-ff --allow-unrelated-histories origin/main -m "${MERGE_MSG}" || { + git add -A + CONFLICT_MSG=$(cat <<'MSG' +chore: best-effort merge main into alpha (conflicts present) + +Automated merge of origin/main into origin/alpha. +Both rebase and merge encountered conflicts. Conflict markers +are present and must be resolved before this PR can be merged. +MSG +) + git commit --no-verify -m "${CONFLICT_MSG}" + } + } + + - name: Push work branch + if: steps.check.outputs.needs_sync == 'true' && steps.existing_pr.outputs.pr_exists == 'false' + env: + WORK_BRANCH: ${{ steps.branch.outputs.work_branch }} + run: | + git push origin "${WORK_BRANCH}" + + - name: Open PR against alpha + if: steps.check.outputs.needs_sync == 'true' && steps.existing_pr.outputs.pr_exists == 'false' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WORK_BRANCH: ${{ steps.branch.outputs.work_branch }} + COMMIT_COUNT: ${{ steps.check.outputs.commit_count }} + REBASE_CLEAN: ${{ steps.rebase.outputs.rebase_clean }} + MAIN_SHA: ${{ steps.check.outputs.main_sha }} + ALPHA_SHA: ${{ steps.check.outputs.alpha_sha }} + run: | + if [ "${REBASE_CLEAN}" = "true" ]; then + CONFLICT_NOTE="Rebase completed cleanly — no conflicts detected. This PR can be merged directly." + else + CONFLICT_NOTE="⚠️ **Conflicts detected.** Rebase fell back to merge. Search for \`<<<<<<<\` conflict markers and resolve before merging." + fi + + gh pr create \ + --base alpha \ + --head "${WORK_BRANCH}" \ + --title "chore: sync alpha from main ($(date +%Y-%m-%d))" \ + --body "## Summary + +Automated sync of \`main\` into \`alpha\` triggered by push to \`main\`. + +| | | +|---|---| +| Commits synced | ${COMMIT_COUNT} | +| origin/main | \`${MAIN_SHA:0:8}\` | +| origin/alpha | \`${ALPHA_SHA:0:8}\` | + +## Status + +${CONFLICT_NOTE} + +## Review Instructions + +1. Check for conflict markers (\`<<<<<<<\`) in changed files. +2. Resolve any conflicts and push to this branch. +3. Verify the build passes. +4. Merge into \`alpha\`. + +--- +*Auto-generated by \`.github/workflows/sync-alpha-from-main.yml\`*" + + - name: Summary + if: always() + env: + NEEDS_SYNC: ${{ steps.check.outputs.needs_sync }} + PR_EXISTS: ${{ steps.existing_pr.outputs.pr_exists || 'false' }} + COMMIT_COUNT: ${{ steps.check.outputs.commit_count || '0' }} + REBASE_CLEAN: ${{ steps.rebase.outputs.rebase_clean || 'n/a' }} + JOB_STATUS: ${{ job.status }} + run: | + if [ "${NEEDS_SYNC}" = "false" ]; then + echo "## ✅ Already in sync" >> "$GITHUB_STEP_SUMMARY" + echo "alpha is up to date with main — nothing to do." >> "$GITHUB_STEP_SUMMARY" + elif [ "${PR_EXISTS}" = "true" ]; then + echo "## ℹ️ Sync PR already open" >> "$GITHUB_STEP_SUMMARY" + echo "An open sync PR already exists against alpha — skipped." >> "$GITHUB_STEP_SUMMARY" + elif [ "${JOB_STATUS}" = "failure" ]; then + echo "## ❌ Sync failed" >> "$GITHUB_STEP_SUMMARY" + echo "Check the logs above for details." >> "$GITHUB_STEP_SUMMARY" + elif [ "${REBASE_CLEAN}" = "true" ]; then + echo "## ✅ PR opened — clean rebase" >> "$GITHUB_STEP_SUMMARY" + echo "${COMMIT_COUNT} commits synced from main to alpha with no conflicts." >> "$GITHUB_STEP_SUMMARY" + else + echo "## ⚠️ PR opened — conflicts require resolution" >> "$GITHUB_STEP_SUMMARY" + echo "${COMMIT_COUNT} commits from main; rebase had conflicts. PR opened for human resolution." >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.gitignore b/.gitignore index 408320472..02a896e87 100644 --- a/.gitignore +++ b/.gitignore @@ -79,6 +79,8 @@ Thumbs.db # IDE / AI assistant configuration .cursor/ .tessl/ +.idea/ + # mypy .mypy_cache/ diff --git a/Makefile b/Makefile index ccb6351d9..44c9f4513 100644 --- a/Makefile +++ b/Makefile @@ -1,14 +1,14 @@ .PHONY: help setup build-all build-frontend build-backend build-operator build-runner build-state-sync build-public-api build-cli deploy clean check-architecture -.PHONY: local-up local-down local-clean local-status local-rebuild local-reload-backend local-reload-frontend local-reload-operator local-reload-api-server +.PHONY: local-down local-status local-reload-api-server local-up local-clean local-rebuild local-reload-backend local-reload-frontend local-reload-operator .PHONY: local-dev-token .PHONY: local-logs local-logs-backend local-logs-frontend local-logs-operator local-shell local-shell-frontend -.PHONY: local-test local-test-dev local-test-quick test-all local-url local-troubleshoot local-port-forward local-stop-port-forward -.PHONY: push-all registry-login setup-hooks remove-hooks lint check-minikube check-kind check-kubectl check-local-context dev-bootstrap kind-rebuild kind-status kind-login +.PHONY: local-test local-test-dev local-test-quick test-all local-troubleshoot local-port-forward local-stop-port-forward +.PHONY: push-all registry-login setup-hooks remove-hooks lint check-kind check-kubectl check-local-context dev-bootstrap kind-rebuild kind-reload-backend kind-reload-frontend kind-reload-operator kind-status kind-login .PHONY: e2e-test e2e-setup e2e-clean deploy-langfuse-openshift .PHONY: unleash-port-forward unleash-status .PHONY: setup-minio minio-console minio-logs minio-status .PHONY: validate-makefile lint-makefile check-shell makefile-health -.PHONY: _create-operator-config _auto-port-forward _show-access-info _build-and-load _kind-load-images +.PHONY: _create-operator-config _auto-port-forward _show-access-info _kind-load-images # Default target .DEFAULT_GOAL := help @@ -131,7 +131,7 @@ help: ## Display this help message @echo '$(COLOR_BOLD)Ambient Code Platform - Development Makefile$(COLOR_RESET)' @echo '' @echo '$(COLOR_BOLD)Quick Start:$(COLOR_RESET)' - @echo ' $(COLOR_GREEN)make local-up$(COLOR_RESET) Start local development environment' + @echo ' $(COLOR_GREEN)make kind-up$(COLOR_RESET) Start local development environment' @echo ' $(COLOR_GREEN)make local-status$(COLOR_RESET) Check status of local environment' @echo ' $(COLOR_GREEN)make local-logs$(COLOR_RESET) View logs from all components' @echo ' $(COLOR_GREEN)make local-down$(COLOR_RESET) Stop local environment' @@ -156,8 +156,8 @@ help: ## Display this help message @echo ' make kind-up LOCAL_IMAGES=true Build from source and deploy to kind (requires podman)' @echo ' make kind-rebuild Rebuild and reload all components in kind' @echo ' make kind-status Show all kind clusters and their ports' - @echo ' make local-up CONTAINER_ENGINE=docker' - @echo ' make local-reload-backend' + @echo ' make kind-up CONTAINER_ENGINE=docker' + @echo ' make kind-rebuild' @echo ' make build-all PLATFORM=linux/arm64' ##@ Building @@ -318,69 +318,14 @@ grafana-dashboard: ## Open Grafana (create route first) @echo " URL: https://$$(oc get route grafana -n $(NAMESPACE) -o jsonpath='{.spec.host}')" @echo " Login: admin/admin" -##@ Local Development (Minikube) +##@ Local Development -local-up: check-minikube check-kubectl ## Start local development environment (minikube) - @echo "$(COLOR_BOLD)🚀 Starting Ambient Code Platform Local Environment$(COLOR_RESET)" - @echo "" - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 1/8: Starting minikube..." - @if [ "$(CONTAINER_ENGINE)" = "docker" ]; then \ - minikube start --driver=docker --memory=4096 --cpus=2 $(QUIET_REDIRECT) || \ - (minikube status >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) Minikube already running") || \ - (echo "$(COLOR_RED)✗$(COLOR_RESET) Failed to start minikube" && exit 1); \ - else \ - minikube start --driver=podman --memory=4096 --cpus=2 --kubernetes-version=v1.35.0 --container-runtime=cri-o $(QUIET_REDIRECT) || \ - (minikube status >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) Minikube already running") || \ - (echo "$(COLOR_RED)✗$(COLOR_RESET) Failed to start minikube" && exit 1); \ - fi - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 2/8: Enabling addons..." - @minikube addons enable ingress $(QUIET_REDIRECT) || true - @minikube addons enable storage-provisioner $(QUIET_REDIRECT) || true - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 3/8: Building images..." - @$(MAKE) --no-print-directory _build-and-load - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 4/8: Creating namespace..." - @kubectl create namespace $(NAMESPACE) --dry-run=client -o yaml | kubectl apply -f - $(QUIET_REDIRECT) - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 5/8: Applying CRDs and RBAC..." - @kubectl apply -f components/manifests/base/crds/ $(QUIET_REDIRECT) || true - @kubectl apply -f components/manifests/base/rbac/ $(QUIET_REDIRECT) || true - @kubectl apply -f components/manifests/minikube/local-dev-rbac.yaml $(QUIET_REDIRECT) || true - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 6/8: Creating storage..." - @kubectl apply -f components/manifests/base/workspace-pvc.yaml -n $(NAMESPACE) $(QUIET_REDIRECT) || true - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 6.5/8: Configuring operator..." - @$(MAKE) --no-print-directory _create-operator-config - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 7/8: Deploying services..." - @kubectl apply -f components/manifests/minikube/backend-deployment.yaml $(QUIET_REDIRECT) - @kubectl apply -f components/manifests/minikube/backend-service.yaml $(QUIET_REDIRECT) - @kubectl apply -f components/manifests/minikube/frontend-deployment.yaml $(QUIET_REDIRECT) - @kubectl apply -f components/manifests/minikube/frontend-service.yaml $(QUIET_REDIRECT) - @kubectl apply -f components/manifests/minikube/operator-deployment.yaml $(QUIET_REDIRECT) - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Step 8/8: Setting up ingress..." - @kubectl wait --namespace ingress-nginx --for=condition=ready pod \ - --selector=app.kubernetes.io/component=controller --timeout=90s >/dev/null 2>&1 || true - @kubectl apply -f components/manifests/minikube/ingress.yaml $(QUIET_REDIRECT) || true - @echo "" - @echo "$(COLOR_GREEN)✓ Ambient Code Platform is starting up!$(COLOR_RESET)" - @echo "" - @$(MAKE) --no-print-directory _show-access-info - @$(MAKE) --no-print-directory _auto-port-forward - @echo "" - @echo "$(COLOR_YELLOW)⚠ Next steps:$(COLOR_RESET)" - @echo " • Wait ~30s for pods to be ready" - @echo " • Run: $(COLOR_BOLD)make local-status$(COLOR_RESET) to check deployment" - @echo " • Run: $(COLOR_BOLD)make local-logs$(COLOR_RESET) to view logs" - -local-down: check-kubectl check-local-context ## Stop Ambient Code Platform (keep minikube running) +local-down: check-kubectl check-local-context ## Stop Ambient Code Platform (keep cluster running) @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Stopping Ambient Code Platform..." @$(MAKE) --no-print-directory local-stop-port-forward @kubectl delete namespace $(NAMESPACE) --ignore-not-found=true --timeout=60s - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Ambient Code Platform stopped (minikube still running)" - @echo " To stop minikube: $(COLOR_BOLD)make local-clean$(COLOR_RESET)" - -local-clean: check-minikube ## Delete minikube cluster completely - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Deleting minikube cluster..." - @$(MAKE) --no-print-directory local-stop-port-forward - @minikube delete - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Minikube cluster deleted" + @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Ambient Code Platform stopped (cluster still running)" + @echo " To delete kind cluster: $(COLOR_BOLD)make kind-down$(COLOR_RESET)" local-status: check-kubectl ## Show status of local deployment @echo "$(COLOR_BOLD)📊 Ambient Code Platform Status$(COLOR_RESET)" @@ -388,11 +333,8 @@ local-status: check-kubectl ## Show status of local deployment @if $(if $(filter podman,$(CONTAINER_ENGINE)),KIND_EXPERIMENTAL_PROVIDER=podman) kind get clusters 2>/dev/null | grep -q '^$(KIND_CLUSTER_NAME)$$'; then \ echo "$(COLOR_BOLD)Kind:$(COLOR_RESET)"; \ echo "$(COLOR_GREEN)✓$(COLOR_RESET) Cluster '$(KIND_CLUSTER_NAME)' running"; \ - elif command -v minikube >/dev/null 2>&1; then \ - echo "$(COLOR_BOLD)Minikube:$(COLOR_RESET)"; \ - minikube status 2>/dev/null || echo "$(COLOR_RED)✗$(COLOR_RESET) Minikube not running"; \ else \ - echo "$(COLOR_RED)✗$(COLOR_RESET) No local cluster found (kind or minikube)"; \ + echo "$(COLOR_RED)✗$(COLOR_RESET) No kind cluster found. Run 'make kind-up' first."; \ fi @echo "" @echo "$(COLOR_BOLD)Pods:$(COLOR_RESET)" @@ -406,76 +348,8 @@ local-status: check-kubectl ## Show status of local deployment echo " Run in another terminal: $(COLOR_BLUE)make kind-port-forward$(COLOR_RESET)"; \ echo " Frontend: $(COLOR_BLUE)http://localhost:$(KIND_FWD_FRONTEND_PORT)$(COLOR_RESET)"; \ echo " Backend: $(COLOR_BLUE)http://localhost:$(KIND_FWD_BACKEND_PORT)$(COLOR_RESET)"; \ - else \ - $(MAKE) --no-print-directory _show-access-info; \ - fi - - -local-rebuild: check-local-context ## Rebuild and reload all components - @echo "$(COLOR_BOLD)🔄 Rebuilding all components...$(COLOR_RESET)" - @$(MAKE) --no-print-directory _build-and-load - @$(MAKE) --no-print-directory _restart-all - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) All components rebuilt and reloaded" - -local-reload-backend: check-local-context ## Rebuild and reload backend only - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding backend..." - @cd components/backend && $(CONTAINER_ENGINE) build --build-arg AMBIENT_VERSION=$(shell git describe --tags --always --dirty) -t $(BACKEND_IMAGE) . >/dev/null 2>&1 - @$(CONTAINER_ENGINE) tag $(BACKEND_IMAGE) localhost/$(BACKEND_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) save -o /tmp/backend-reload.tar localhost/$(BACKEND_IMAGE) - @minikube image load /tmp/backend-reload.tar >/dev/null 2>&1 - @rm -f /tmp/backend-reload.tar - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting backend..." - @kubectl rollout restart deployment/backend-api -n $(NAMESPACE) >/dev/null 2>&1 - @kubectl rollout status deployment/backend-api -n $(NAMESPACE) --timeout=60s - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Backend reloaded" - @OS=$$(uname -s); \ - if [ "$$OS" = "Darwin" ] && [ "$(CONTAINER_ENGINE)" = "podman" ]; then \ - echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting backend port forward..."; \ - if [ -f /tmp/ambient-code/port-forward-backend.pid ]; then \ - kill $$(cat /tmp/ambient-code/port-forward-backend.pid) 2>/dev/null || true; \ - fi; \ - kubectl port-forward -n $(NAMESPACE) svc/backend-service 8080:8080 > /tmp/ambient-code/port-forward-backend.log 2>&1 & \ - echo $$! > /tmp/ambient-code/port-forward-backend.pid; \ - sleep 2; \ - echo "$(COLOR_GREEN)✓$(COLOR_RESET) Backend port forward restarted"; \ fi -local-reload-frontend: check-local-context ## Rebuild and reload frontend only - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding frontend..." - @cd components/frontend && $(CONTAINER_ENGINE) build -t $(FRONTEND_IMAGE) . >/dev/null 2>&1 - @$(CONTAINER_ENGINE) tag $(FRONTEND_IMAGE) localhost/$(FRONTEND_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) save -o /tmp/frontend-reload.tar localhost/$(FRONTEND_IMAGE) - @minikube image load /tmp/frontend-reload.tar >/dev/null 2>&1 - @rm -f /tmp/frontend-reload.tar - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting frontend..." - @kubectl rollout restart deployment/frontend -n $(NAMESPACE) >/dev/null 2>&1 - @kubectl rollout status deployment/frontend -n $(NAMESPACE) --timeout=60s - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Frontend reloaded" - @OS=$$(uname -s); \ - if [ "$$OS" = "Darwin" ] && [ "$(CONTAINER_ENGINE)" = "podman" ]; then \ - echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting frontend port forward..."; \ - if [ -f /tmp/ambient-code/port-forward-frontend.pid ]; then \ - kill $$(cat /tmp/ambient-code/port-forward-frontend.pid) 2>/dev/null || true; \ - fi; \ - kubectl port-forward -n $(NAMESPACE) svc/frontend-service 3000:3000 > /tmp/ambient-code/port-forward-frontend.log 2>&1 & \ - echo $$! > /tmp/ambient-code/port-forward-frontend.pid; \ - sleep 2; \ - echo "$(COLOR_GREEN)✓$(COLOR_RESET) Frontend port forward restarted"; \ - fi - - -local-reload-operator: check-local-context ## Rebuild and reload operator only - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding operator..." - @cd components/operator && $(CONTAINER_ENGINE) build -t $(OPERATOR_IMAGE) . >/dev/null 2>&1 - @$(CONTAINER_ENGINE) tag $(OPERATOR_IMAGE) localhost/$(OPERATOR_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) save -o /tmp/operator-reload.tar localhost/$(OPERATOR_IMAGE) - @minikube image load /tmp/operator-reload.tar >/dev/null 2>&1 - @rm -f /tmp/operator-reload.tar - @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting operator..." - @kubectl rollout restart deployment/agentic-operator -n $(NAMESPACE) >/dev/null 2>&1 - @kubectl rollout status deployment/agentic-operator -n $(NAMESPACE) --timeout=60s - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Operator reloaded" - local-reload-api-server: check-local-context ## Rebuild and reload ambient-api-server only @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding ambient-api-server..." @$(CONTAINER_ENGINE) build $(PLATFORM_FLAG) -t $(API_SERVER_IMAGE) components/ambient-api-server >/dev/null 2>&1 @@ -549,11 +423,11 @@ check-shell: ## Validate shell scripts with shellcheck (if available) echo " Install with: brew install shellcheck (macOS) or apt-get install shellcheck (Linux)"; \ fi -makefile-health: check-minikube check-kubectl ## Run comprehensive Makefile health check +makefile-health: check-kind check-kubectl ## Run comprehensive Makefile health check @echo "$(COLOR_BOLD)🏥 Makefile Health Check$(COLOR_RESET)" @echo "" @echo "$(COLOR_BOLD)Prerequisites:$(COLOR_RESET)" - @minikube version >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) minikube available" || echo "$(COLOR_RED)✗$(COLOR_RESET) minikube missing" + @kind version >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) kind available" || echo "$(COLOR_RED)✗$(COLOR_RESET) kind missing" @kubectl version --client >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) kubectl available" || echo "$(COLOR_RED)✗$(COLOR_RESET) kubectl missing" @command -v $(CONTAINER_ENGINE) >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) $(CONTAINER_ENGINE) available" || echo "$(COLOR_RED)✗$(COLOR_RESET) $(CONTAINER_ENGINE) missing" @echo "" @@ -570,18 +444,14 @@ local-test-dev: ## Run local developer experience tests @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Running local developer experience tests..." @./tests/local-dev-test.sh $(if $(filter true,$(CI_MODE)),--ci,) -local-test-quick: check-kubectl ## Quick smoke test of local environment (kind or minikube) +local-test-quick: check-kubectl ## Quick smoke test of local environment @echo "$(COLOR_BOLD)🧪 Quick Smoke Test$(COLOR_RESET)" @echo "" @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Detecting cluster type..." @if kind get clusters 2>/dev/null | grep -q .; then \ echo "$(COLOR_GREEN)✓$(COLOR_RESET) Kind cluster running"; \ - CLUSTER_TYPE=kind; \ - elif command -v minikube >/dev/null 2>&1 && minikube status >/dev/null 2>&1; then \ - echo "$(COLOR_GREEN)✓$(COLOR_RESET) Minikube running"; \ - CLUSTER_TYPE=minikube; \ else \ - echo "$(COLOR_RED)✗$(COLOR_RESET) No local cluster found (kind or minikube)"; exit 1; \ + echo "$(COLOR_RED)✗$(COLOR_RESET) No kind cluster found. Run 'make kind-up' first."; exit 1; \ fi @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Testing namespace..." @kubectl get namespace $(NAMESPACE) >/dev/null 2>&1 && echo "$(COLOR_GREEN)✓$(COLOR_RESET) Namespace exists" || (echo "$(COLOR_RED)✗$(COLOR_RESET) Namespace missing" && exit 1) @@ -648,9 +518,6 @@ local-shell-frontend: check-kubectl ## Open shell in frontend pod local-test: local-test-quick ## Alias for local-test-quick (backward compatibility) -local-url: check-minikube ## Display access URLs - @$(MAKE) --no-print-directory _show-access-info - local-port-forward: check-kubectl ## Port-forward for direct access (8080→backend, 3000→frontend) @echo "$(COLOR_BOLD)🔌 Setting up port forwarding$(COLOR_RESET)" @echo "" @@ -891,6 +758,49 @@ kind-rebuild: check-kind check-kubectl check-local-context build-all ## Rebuild, @kubectl rollout status deployment -n $(NAMESPACE) --timeout=120s $(QUIET_REDIRECT) @echo "$(COLOR_GREEN)✓$(COLOR_RESET) All components rebuilt and restarted" +kind-reload-backend: check-kind check-kubectl check-local-context ## Rebuild and reload backend only (kind) + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding backend..." + @cd components/backend && $(CONTAINER_ENGINE) build $(PLATFORM_FLAG) \ + --build-arg AMBIENT_VERSION=$(shell git describe --tags --always --dirty) \ + -t $(BACKEND_IMAGE) . $(QUIET_REDIRECT) + @$(CONTAINER_ENGINE) tag $(BACKEND_IMAGE) localhost/$(BACKEND_IMAGE) 2>/dev/null || true + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Loading image into kind cluster ($(KIND_CLUSTER_NAME))..." + @$(CONTAINER_ENGINE) save localhost/$(BACKEND_IMAGE) | \ + $(CONTAINER_ENGINE) exec -i $(KIND_CLUSTER_NAME)-control-plane \ + ctr --namespace=k8s.io images import - + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting backend..." + @kubectl rollout restart deployment/backend-api -n $(NAMESPACE) $(QUIET_REDIRECT) + @kubectl rollout status deployment/backend-api -n $(NAMESPACE) --timeout=60s + @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Backend reloaded" + +kind-reload-frontend: check-kind check-kubectl check-local-context ## Rebuild and reload frontend only (kind) + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding frontend..." + @cd components/frontend && $(CONTAINER_ENGINE) build $(PLATFORM_FLAG) \ + -t $(FRONTEND_IMAGE) . $(QUIET_REDIRECT) + @$(CONTAINER_ENGINE) tag $(FRONTEND_IMAGE) localhost/$(FRONTEND_IMAGE) 2>/dev/null || true + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Loading image into kind cluster ($(KIND_CLUSTER_NAME))..." + @$(CONTAINER_ENGINE) save localhost/$(FRONTEND_IMAGE) | \ + $(CONTAINER_ENGINE) exec -i $(KIND_CLUSTER_NAME)-control-plane \ + ctr --namespace=k8s.io images import - + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting frontend..." + @kubectl rollout restart deployment/frontend -n $(NAMESPACE) $(QUIET_REDIRECT) + @kubectl rollout status deployment/frontend -n $(NAMESPACE) --timeout=60s + @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Frontend reloaded" + +kind-reload-operator: check-kind check-kubectl check-local-context ## Rebuild and reload operator only (kind) + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Rebuilding operator..." + @cd components/operator && $(CONTAINER_ENGINE) build $(PLATFORM_FLAG) \ + -t $(OPERATOR_IMAGE) . $(QUIET_REDIRECT) + @$(CONTAINER_ENGINE) tag $(OPERATOR_IMAGE) localhost/$(OPERATOR_IMAGE) 2>/dev/null || true + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Loading image into kind cluster ($(KIND_CLUSTER_NAME))..." + @$(CONTAINER_ENGINE) save localhost/$(OPERATOR_IMAGE) | \ + $(CONTAINER_ENGINE) exec -i $(KIND_CLUSTER_NAME)-control-plane \ + ctr --namespace=k8s.io images import - + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Restarting operator..." + @kubectl rollout restart deployment/agentic-operator -n $(NAMESPACE) $(QUIET_REDIRECT) + @kubectl rollout status deployment/agentic-operator -n $(NAMESPACE) --timeout=60s + @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Operator reloaded" + kind-status: ## Show all kind clusters and their port assignments @echo "$(COLOR_BOLD)Kind Cluster Status$(COLOR_RESET)" @echo "" @@ -944,11 +854,37 @@ unleash-status: check-kubectl ## Show Unleash deployment status @kubectl get deployment,pod,svc -l 'app.kubernetes.io/name in (unleash,postgresql)' -n $${NAMESPACE:-ambient-code} 2>/dev/null || \ echo "$(COLOR_RED)✗$(COLOR_RESET) Unleash not found. Run 'make deploy' first." -##@ Internal Helpers (do not call directly) +##@ Deprecated Aliases +# These targets preserve backward compatibility with the old minikube-based +# workflow. Each prints a deprecation notice and delegates to the kind +# equivalent. A follow-up issue tracks updating docs that still reference +# the old names. + +local-up: ## Deprecated: use kind-up + @echo "$(COLOR_YELLOW)Warning:$(COLOR_RESET) '$@' is deprecated. Use 'make kind-up' instead." + @$(MAKE) --no-print-directory kind-up + +local-clean: ## Deprecated: use kind-down + @echo "$(COLOR_YELLOW)Warning:$(COLOR_RESET) '$@' is deprecated. Use 'make kind-down' instead." + @$(MAKE) --no-print-directory kind-down -check-minikube: ## Check if minikube is installed - @command -v minikube >/dev/null 2>&1 || \ - (echo "$(COLOR_RED)✗$(COLOR_RESET) minikube not found. Install: https://minikube.sigs.k8s.io/docs/start/" && exit 1) +local-rebuild: ## Deprecated: use kind-rebuild + @echo "$(COLOR_YELLOW)Warning:$(COLOR_RESET) '$@' is deprecated. Use 'make kind-rebuild' instead." + @$(MAKE) --no-print-directory kind-rebuild + +local-reload-backend: ## Deprecated: use kind-reload-backend + @echo "$(COLOR_YELLOW)Warning:$(COLOR_RESET) '$@' is deprecated. Use 'make kind-reload-backend' instead." + @$(MAKE) --no-print-directory kind-reload-backend + +local-reload-frontend: ## Deprecated: use kind-reload-frontend + @echo "$(COLOR_YELLOW)Warning:$(COLOR_RESET) '$@' is deprecated. Use 'make kind-reload-frontend' instead." + @$(MAKE) --no-print-directory kind-reload-frontend + +local-reload-operator: ## Deprecated: use kind-reload-operator + @echo "$(COLOR_YELLOW)Warning:$(COLOR_RESET) '$@' is deprecated. Use 'make kind-reload-operator' instead." + @$(MAKE) --no-print-directory kind-reload-operator + +##@ Internal Helpers (do not call directly) check-kind: ## Check if kind is installed @command -v kind >/dev/null 2>&1 || \ @@ -958,14 +894,14 @@ check-kubectl: ## Check if kubectl is installed @command -v kubectl >/dev/null 2>&1 || \ (echo "$(COLOR_RED)✗$(COLOR_RESET) kubectl not found. Install: https://kubernetes.io/docs/tasks/tools/" && exit 1) -check-local-context: ## Verify kubectl context points to a local cluster (kind or minikube) +check-local-context: ## Verify kubectl context points to a local kind cluster ifneq ($(SKIP_CONTEXT_CHECK),true) @ctx=$$(kubectl config current-context 2>/dev/null || echo ""); \ - if echo "$$ctx" | grep -qE '^(kind-|minikube$$)'; then \ + if echo "$$ctx" | grep -qE '^kind-'; then \ : ; \ else \ echo "$(COLOR_RED)✗$(COLOR_RESET) Current kubectl context '$$ctx' does not look like a local cluster."; \ - echo " Expected a context starting with 'kind-' or named 'minikube'."; \ + echo " Expected a context starting with 'kind-'."; \ echo " Switch context first, e.g.: kubectl config use-context kind-ambient-local"; \ echo ""; \ echo " To bypass this check: make SKIP_CONTEXT_CHECK=true"; \ @@ -1004,38 +940,6 @@ _kind-load-images: ## Internal: Load images into kind cluster done @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Images loaded" -_build-and-load: ## Internal: Build and load images - @echo " Building backend ($(PLATFORM))..." - @$(CONTAINER_ENGINE) build $(PLATFORM_FLAG) --build-arg AMBIENT_VERSION=$(shell git describe --tags --always --dirty) -t $(BACKEND_IMAGE) components/backend $(QUIET_REDIRECT) - @echo " Building frontend ($(PLATFORM))..." - @$(CONTAINER_ENGINE) build $(PLATFORM_FLAG) -t $(FRONTEND_IMAGE) components/frontend $(QUIET_REDIRECT) - @echo " Building operator ($(PLATFORM))..." - @$(CONTAINER_ENGINE) build $(PLATFORM_FLAG) -t $(OPERATOR_IMAGE) components/operator $(QUIET_REDIRECT) - @echo " Building runner ($(PLATFORM))..." - @$(CONTAINER_ENGINE) build $(PLATFORM_FLAG) -t $(RUNNER_IMAGE) -f components/runners/ambient-runner/Dockerfile components/runners $(QUIET_REDIRECT) - @echo " Building api-server ($(PLATFORM))..." - @$(CONTAINER_ENGINE) build $(PLATFORM_FLAG) -t $(API_SERVER_IMAGE) components/ambient-api-server $(QUIET_REDIRECT) - @echo " Tagging images with localhost prefix..." - @$(CONTAINER_ENGINE) tag $(BACKEND_IMAGE) localhost/$(BACKEND_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) tag $(FRONTEND_IMAGE) localhost/$(FRONTEND_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) tag $(OPERATOR_IMAGE) localhost/$(OPERATOR_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) tag $(RUNNER_IMAGE) localhost/$(RUNNER_IMAGE) 2>/dev/null || true - @$(CONTAINER_ENGINE) tag $(API_SERVER_IMAGE) localhost/$(API_SERVER_IMAGE) 2>/dev/null || true - @echo " Loading images into minikube..." - @mkdir -p /tmp/minikube-images - @$(CONTAINER_ENGINE) save -o /tmp/minikube-images/backend.tar localhost/$(BACKEND_IMAGE) - @$(CONTAINER_ENGINE) save -o /tmp/minikube-images/frontend.tar localhost/$(FRONTEND_IMAGE) - @$(CONTAINER_ENGINE) save -o /tmp/minikube-images/operator.tar localhost/$(OPERATOR_IMAGE) - @$(CONTAINER_ENGINE) save -o /tmp/minikube-images/runner.tar localhost/$(RUNNER_IMAGE) - @$(CONTAINER_ENGINE) save -o /tmp/minikube-images/api-server.tar localhost/$(API_SERVER_IMAGE) - @minikube image load /tmp/minikube-images/backend.tar $(QUIET_REDIRECT) - @minikube image load /tmp/minikube-images/frontend.tar $(QUIET_REDIRECT) - @minikube image load /tmp/minikube-images/operator.tar $(QUIET_REDIRECT) - @minikube image load /tmp/minikube-images/runner.tar $(QUIET_REDIRECT) - @minikube image load /tmp/minikube-images/api-server.tar $(QUIET_REDIRECT) - @rm -rf /tmp/minikube-images - @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Images built and loaded" - _restart-all: ## Internal: Restart all deployments @kubectl rollout restart deployment -n $(NAMESPACE) >/dev/null 2>&1 @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Waiting for deployments to be ready..." @@ -1043,34 +947,16 @@ _restart-all: ## Internal: Restart all deployments _show-access-info: ## Internal: Show access information @echo "$(COLOR_BOLD)🌐 Access URLs:$(COLOR_RESET)" - @OS=$$(uname -s); \ - if [ "$$OS" = "Darwin" ] && [ "$(CONTAINER_ENGINE)" = "podman" ]; then \ - echo " $(COLOR_YELLOW)Note:$(COLOR_RESET) Port forwarding will start automatically"; \ - echo " Once pods are ready, access at:"; \ - echo " Frontend: $(COLOR_BLUE)http://localhost:3000$(COLOR_RESET)"; \ - echo " Backend: $(COLOR_BLUE)http://localhost:8080$(COLOR_RESET)"; \ - echo ""; \ - echo " $(COLOR_BOLD)To manage port forwarding:$(COLOR_RESET)"; \ - echo " Stop: $(COLOR_BOLD)make local-stop-port-forward$(COLOR_RESET)"; \ - echo " Restart: $(COLOR_BOLD)make local-port-forward$(COLOR_RESET)"; \ - else \ - MINIKUBE_IP=$$(minikube ip 2>/dev/null) && \ - echo " Frontend: $(COLOR_BLUE)http://$$MINIKUBE_IP:30030$(COLOR_RESET)" && \ - echo " Backend: $(COLOR_BLUE)http://$$MINIKUBE_IP:30080$(COLOR_RESET)" || \ - echo " $(COLOR_RED)✗$(COLOR_RESET) Cannot get minikube IP"; \ - echo ""; \ - echo "$(COLOR_BOLD)Alternative:$(COLOR_RESET) Port forward for localhost access"; \ - echo " Run: $(COLOR_BOLD)make local-port-forward$(COLOR_RESET)"; \ - echo " Then access:"; \ - echo " Frontend: $(COLOR_BLUE)http://localhost:3000$(COLOR_RESET)"; \ - echo " Backend: $(COLOR_BLUE)http://localhost:8080$(COLOR_RESET)"; \ - fi + @echo " Run: $(COLOR_BOLD)make kind-port-forward$(COLOR_RESET)" + @echo " Then access:" + @echo " Frontend: $(COLOR_BLUE)http://localhost:$(KIND_FWD_FRONTEND_PORT)$(COLOR_RESET)" + @echo " Backend: $(COLOR_BLUE)http://localhost:$(KIND_FWD_BACKEND_PORT)$(COLOR_RESET)" @echo "" @echo "$(COLOR_YELLOW)⚠ SECURITY NOTE:$(COLOR_RESET) Authentication is DISABLED for local development." local-dev-token: check-kubectl ## Print a TokenRequest token for local-dev-user (for local dev API calls) @kubectl get serviceaccount local-dev-user -n $(NAMESPACE) >/dev/null 2>&1 || \ - (echo "$(COLOR_RED)✗$(COLOR_RESET) local-dev-user ServiceAccount not found in namespace $(NAMESPACE). Run 'make local-up' first." && exit 1) + (echo "$(COLOR_RED)✗$(COLOR_RESET) local-dev-user ServiceAccount not found in namespace $(NAMESPACE). Run 'make kind-up' first." && exit 1) @TOKEN=$$(kubectl -n $(NAMESPACE) create token local-dev-user 2>/dev/null); \ if [ -z "$$TOKEN" ]; then \ echo "$(COLOR_RED)✗$(COLOR_RESET) Failed to mint token (kubectl create token). Ensure TokenRequest is supported and kubectl is v1.24+"; \ diff --git a/components/ambient-control-plane/internal/reconciler/kube_reconciler.go b/components/ambient-control-plane/internal/reconciler/kube_reconciler.go index 5caf299ba..0e472c18a 100644 --- a/components/ambient-control-plane/internal/reconciler/kube_reconciler.go +++ b/components/ambient-control-plane/internal/reconciler/kube_reconciler.go @@ -624,7 +624,7 @@ func (r *SimpleKubeReconciler) assembleInitialPrompt(ctx context.Context, sessio parts = append(parts, agent.Prompt) } - msgs, err := sdk.InboxMessages().List(ctx, session.ProjectID, session.AgentID, &types.ListOptions{Size: 100}) + msgs, err := sdk.InboxMessages().List(ctx, &types.ListOptions{Size: 100, Search: fmt.Sprintf("project_id = '%s' and agent_id = '%s'", session.ProjectID, session.AgentID)}) if err != nil { r.logger.Warn().Err(err).Str("agent_id", session.AgentID).Msg("assembleInitialPrompt: failed to fetch inbox messages") } else { diff --git a/components/ambient-sdk/ts-sdk/package-lock.json b/components/ambient-sdk/ts-sdk/package-lock.json index 799dc6cbd..cd260d5ff 100644 --- a/components/ambient-sdk/ts-sdk/package-lock.json +++ b/components/ambient-sdk/ts-sdk/package-lock.json @@ -1853,9 +1853,9 @@ "license": "ISC" }, "node_modules/handlebars": { - "version": "4.7.8", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", - "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "version": "4.7.9", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.9.tgz", + "integrity": "sha512-4E71E0rpOaQuJR2A3xDZ+GM1HyWYv1clR58tC8emQNeQe3RH7MAzSbat+V0wG78LQBo6m6bzSG/L4pBuCsgnUQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3130,9 +3130,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", "dev": true, "license": "MIT", "engines": { diff --git a/components/backend/handlers/integration_validation.go b/components/backend/handlers/integration_validation.go index 7b034e75e..d66d406f9 100644 --- a/components/backend/handlers/integration_validation.go +++ b/components/backend/handlers/integration_validation.go @@ -2,13 +2,25 @@ package handlers import ( "context" + "errors" "fmt" "net/http" + "net/url" "time" "github.com/gin-gonic/gin" ) +// networkError extracts the inner error from a *url.Error, stripping the +// URL and HTTP method so they cannot leak into user-facing messages. +func networkError(err error) error { + var urlErr *url.Error + if errors.As(err, &urlErr) { + return urlErr.Err + } + return err +} + // ValidateGitHubToken checks if a GitHub token is valid by calling the GitHub API func ValidateGitHubToken(ctx context.Context, token string) (bool, error) { if token == "" { @@ -26,8 +38,7 @@ func ValidateGitHubToken(ctx context.Context, token string) (bool, error) { resp, err := client.Do(req) if err != nil { - // Don't wrap error - could leak token from request details - return false, fmt.Errorf("request failed") + return false, fmt.Errorf("request failed: %w", networkError(err)) } defer resp.Body.Close() @@ -56,8 +67,7 @@ func ValidateGitLabToken(ctx context.Context, token, instanceURL string) (bool, resp, err := client.Do(req) if err != nil { - // Don't wrap error - could leak token from request details - return false, fmt.Errorf("request failed") + return false, fmt.Errorf("request failed: %w", networkError(err)) } defer resp.Body.Close() @@ -81,6 +91,8 @@ func ValidateJiraToken(ctx context.Context, url, email, apiToken string) (bool, } var got401 bool + var lastNetErr error + var sawHTTPResponse bool for _, apiURL := range apiURLs { req, err := http.NewRequestWithContext(ctx, "GET", apiURL, nil) @@ -94,8 +106,10 @@ func ValidateJiraToken(ctx context.Context, url, email, apiToken string) (bool, resp, err := client.Do(req) if err != nil { + lastNetErr = networkError(err) continue } + sawHTTPResponse = true defer resp.Body.Close() // 200 = valid, 401 = invalid, 404 = wrong API version (try next) @@ -113,6 +127,11 @@ func ValidateJiraToken(ctx context.Context, url, email, apiToken string) (bool, return false, nil } + // If all attempts failed with network errors, surface the cause + if lastNetErr != nil && !sawHTTPResponse { + return false, fmt.Errorf("request failed: %w", lastNetErr) + } + // Couldn't validate - assume valid to avoid false negatives return true, nil } @@ -134,8 +153,7 @@ func ValidateGoogleToken(ctx context.Context, accessToken string) (bool, error) resp, err := client.Do(req) if err != nil { - // Don't wrap error - could leak token from request details - return false, fmt.Errorf("request failed") + return false, fmt.Errorf("request failed: %w", networkError(err)) } defer resp.Body.Close() diff --git a/components/backend/handlers/integration_validation_test.go b/components/backend/handlers/integration_validation_test.go new file mode 100644 index 000000000..88594e68a --- /dev/null +++ b/components/backend/handlers/integration_validation_test.go @@ -0,0 +1,132 @@ +//go:build test + +package handlers + +import ( + test_constants "ambient-code-backend/tests/constants" + "context" + "net" + "net/http" + "net/http/httptest" + "strings" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Integration Validation", Label(test_constants.LabelUnit, test_constants.LabelHandlers), func() { + // connRefusedURL returns a URL whose port is closed, triggering + // "connection refused" from the dialer. + connRefusedURL := func() string { + ln, err := net.Listen("tcp", "127.0.0.1:0") + Expect(err).NotTo(HaveOccurred()) + addr := ln.Addr().String() + ln.Close() // close immediately so the port is unreachable + return "http://" + addr + } + + Describe("networkError", func() { + It("strips the URL and method from a *url.Error", func() { + // Make a request to a closed port to produce a *url.Error. + target := connRefusedURL() + client := &http.Client{} + req, err := http.NewRequestWithContext(context.Background(), "GET", target, nil) + Expect(err).NotTo(HaveOccurred()) + + _, doErr := client.Do(req) + Expect(doErr).To(HaveOccurred()) + + inner := networkError(doErr) + Expect(inner.Error()).NotTo(ContainSubstring(target)) + Expect(inner.Error()).NotTo(ContainSubstring("GET")) + }) + + It("returns non-url.Error values unchanged", func() { + original := net.UnknownNetworkError("test") + Expect(networkError(original)).To(Equal(original)) + }) + }) + + Describe("ValidateGitLabToken", func() { + It("surfaces the network cause on connection refused", func() { + target := connRefusedURL() + _, err := ValidateGitLabToken(context.Background(), "glpat-secret-token", target) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("request failed:")) + Expect(err.Error()).To(ContainSubstring("refused")) + }) + + It("does not leak the full request URL path", func() { + target := connRefusedURL() + _, err := ValidateGitLabToken(context.Background(), "glpat-secret-token", target) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).NotTo(ContainSubstring("/api/v4/")) + }) + + It("does not leak the token", func() { + target := connRefusedURL() + token := "glpat-secret-token-value" + _, err := ValidateGitLabToken(context.Background(), token, target) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).NotTo(ContainSubstring(token)) + }) + + It("returns true for a valid token", func() { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Header.Get("Authorization") == "Bearer valid-token" { + w.WriteHeader(http.StatusOK) + } else { + w.WriteHeader(http.StatusUnauthorized) + } + })) + defer ts.Close() + + valid, err := ValidateGitLabToken(context.Background(), "valid-token", ts.URL) + Expect(err).NotTo(HaveOccurred()) + Expect(valid).To(BeTrue()) + }) + + It("returns false for an invalid token", func() { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusUnauthorized) + })) + defer ts.Close() + + valid, err := ValidateGitLabToken(context.Background(), "bad-token", ts.URL) + Expect(err).NotTo(HaveOccurred()) + Expect(valid).To(BeFalse()) + }) + }) + + Describe("ValidateJiraToken", func() { + It("surfaces the network cause when all endpoints fail", func() { + target := connRefusedURL() + _, err := ValidateJiraToken(context.Background(), target, "user@example.com", "api-token") + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("request failed:")) + Expect(err.Error()).To(ContainSubstring("refused")) + }) + + It("does not leak the full request URL path", func() { + target := connRefusedURL() + _, err := ValidateJiraToken(context.Background(), target, "user@example.com", "api-token") + Expect(err).To(HaveOccurred()) + Expect(err.Error()).NotTo(ContainSubstring("/rest/api/")) + }) + + It("returns true for valid credentials", func() { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if strings.Contains(r.URL.Path, "/rest/api/3/myself") { + w.WriteHeader(http.StatusOK) + } else { + w.WriteHeader(http.StatusNotFound) + } + })) + defer ts.Close() + + valid, err := ValidateJiraToken(context.Background(), ts.URL, "user@example.com", "api-token") + Expect(err).NotTo(HaveOccurred()) + Expect(valid).To(BeTrue()) + }) + }) +}) diff --git a/components/frontend/package-lock.json b/components/frontend/package-lock.json index e172b6302..5a8dd28a1 100644 --- a/components/frontend/package-lock.json +++ b/components/frontend/package-lock.json @@ -11363,9 +11363,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", "dev": true, "license": "MIT", "engines": { @@ -12871,9 +12871,9 @@ } }, "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { @@ -13583,9 +13583,9 @@ } }, "node_modules/vite/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { @@ -13674,9 +13674,9 @@ } }, "node_modules/vitest/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { diff --git a/components/frontend/src/lib/__tests__/status-colors.test.ts b/components/frontend/src/lib/__tests__/status-colors.test.ts index 881355343..d6020103d 100644 --- a/components/frontend/src/lib/__tests__/status-colors.test.ts +++ b/components/frontend/src/lib/__tests__/status-colors.test.ts @@ -64,8 +64,8 @@ describe('getK8sResourceStatusColor', () => { expect(getK8sResourceStatusColor('Not Found')).toBe(STATUS_COLORS.warning); }); - it('returns default for unrecognized status', () => { - expect(getK8sResourceStatusColor('SomethingElse')).toBe(STATUS_COLORS.default); + it('returns error for unrecognized status', () => { + expect(getK8sResourceStatusColor('SomethingElse')).toBe(STATUS_COLORS.error); }); }); diff --git a/components/manifests/overlays/mpp-openshift/ambient-api-server-args-patch.yaml b/components/manifests/overlays/mpp-openshift/ambient-api-server-args-patch.yaml index 41776e0e5..c9a8caa22 100644 --- a/components/manifests/overlays/mpp-openshift/ambient-api-server-args-patch.yaml +++ b/components/manifests/overlays/mpp-openshift/ambient-api-server-args-patch.yaml @@ -31,7 +31,5 @@ spec: - --jwk-cert-file=/configs/authentication/jwks.json - --enable-grpc=true - --grpc-server-bindaddress=:9000 - - --grpc-jwk-cert-file=/configs/authentication/jwks.json - - --grpc-jwk-cert-url=https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/certs,https://kubernetes.default.svc/openid/v1/jwks - --alsologtostderr - -v=4 diff --git a/components/manifests/overlays/mpp-openshift/ambient-control-plane.yaml b/components/manifests/overlays/mpp-openshift/ambient-control-plane.yaml index eb9f60950..c8546e7fd 100644 --- a/components/manifests/overlays/mpp-openshift/ambient-control-plane.yaml +++ b/components/manifests/overlays/mpp-openshift/ambient-control-plane.yaml @@ -92,7 +92,7 @@ spec: volumes: - name: project-kube-token secret: - secretName: tenantaccess-ambient-control-plane-token + secretName: ambient-control-plane-token - name: vertex-credentials secret: secretName: ambient-vertex diff --git a/components/pr-test/install.sh b/components/pr-test/install.sh index a099898db..0c1807aba 100755 --- a/components/pr-test/install.sh +++ b/components/pr-test/install.sh @@ -11,7 +11,6 @@ REQUIRED_SOURCE_SECRETS=( ambient-vertex ambient-api-server ambient-api-server-db - tenantaccess-ambient-control-plane-token ) usage() { @@ -47,6 +46,9 @@ del s['metadata']['creationTimestamp'] s['metadata'].pop('ownerReferences', None) s['metadata'].pop('annotations', None) s.pop('status', None) +# Service account token secrets cannot be applied directly; re-create as Opaque +if s.get('type') == 'kubernetes.io/service-account-token': + s['type'] = 'Opaque' print(json.dumps(s)) " | oc apply -n "$NAMESPACE" -f - } @@ -144,7 +146,7 @@ oc set env deployment/ambient-control-plane -n "$NAMESPACE" \ CP_RUNTIME_NAMESPACE="$NAMESPACE" KUBE_HOST=$(oc whoami --show-server) -KUBE_CA=$(oc get secret tenantaccess-ambient-control-plane-token -n "$NAMESPACE" \ +KUBE_CA=$(oc get secret ambient-control-plane-token -n "$NAMESPACE" \ -o jsonpath='{.data.ca\.crt}') python3 - << PYEOF diff --git a/components/runners/ambient-runner/ambient_runner/bridges/claude/grpc_transport.py b/components/runners/ambient-runner/ambient_runner/bridges/claude/grpc_transport.py index 059e0b856..aed305c04 100644 --- a/components/runners/ambient-runner/ambient_runner/bridges/claude/grpc_transport.py +++ b/components/runners/ambient-runner/ambient_runner/bridges/claude/grpc_transport.py @@ -380,7 +380,7 @@ async def _write_message(self, status: str) -> None: assistant_text = next( ( - m.get("content", "") + m.get("content") or "" for m in self._accumulated_messages if m.get("role") == "assistant" ), diff --git a/components/runners/ambient-runner/tests/test_app_initial_prompt.py b/components/runners/ambient-runner/tests/test_app_initial_prompt.py index 69e901bcc..c296e6dca 100644 --- a/components/runners/ambient-runner/tests/test_app_initial_prompt.py +++ b/components/runners/ambient-runner/tests/test_app_initial_prompt.py @@ -338,7 +338,7 @@ async def test_uses_agentic_session_namespace_fallback_for_project(self): @pytest.mark.asyncio class TestAutoExecuteInitialPrompt: - async def test_routes_to_grpc_when_url_set(self): + async def test_skips_push_when_grpc_url_set(self): with ( patch( "ambient_runner.app._push_initial_prompt_via_grpc", @@ -354,7 +354,7 @@ async def test_routes_to_grpc_when_url_set(self): "hello", "sess-1", grpc_url="localhost:9000" ) - mock_grpc.assert_awaited_once_with("hello", "sess-1") + mock_grpc.assert_not_awaited() mock_http.assert_not_awaited() async def test_routes_to_http_when_no_grpc_url(self): diff --git a/components/runners/ambient-runner/tests/test_grpc_transport.py b/components/runners/ambient-runner/tests/test_grpc_transport.py index 852cf997d..cee6b846c 100644 --- a/components/runners/ambient-runner/tests/test_grpc_transport.py +++ b/components/runners/ambient-runner/tests/test_grpc_transport.py @@ -522,18 +522,14 @@ async def test_run_finished_pushes_completed(self): call = client.session_messages.push.call_args assert call[0][0] == "s-1" assert call[1]["event_type"] == "assistant" - payload = json.loads(call[1]["payload"]) - assert payload["status"] == "completed" - assert payload["run_id"] == "r-1" - assert len(payload["messages"]) == 1 + assert call[1]["payload"] == "done" async def test_run_error_pushes_error_status(self): writer, client = self._writer() await writer.consume(self._make_run_error_event()) client.session_messages.push.assert_called_once() - payload = json.loads(client.session_messages.push.call_args[1]["payload"]) - assert payload["status"] == "error" + assert client.session_messages.push.call_args[1]["event_type"] == "assistant" async def test_non_terminal_events_do_not_push(self): writer, client = self._writer() @@ -550,16 +546,15 @@ async def test_unknown_event_type_ignored(self): async def test_latest_snapshot_replaces_previous(self): writer, client = self._writer() msg1 = MagicMock() - msg1.model_dump.return_value = {"content": "first"} + msg1.model_dump.return_value = {"role": "assistant", "content": "first"} msg2 = MagicMock() - msg2.model_dump.return_value = {"content": "second"} + msg2.model_dump.return_value = {"role": "assistant", "content": "second"} await writer.consume(self._make_messages_snapshot([msg1])) await writer.consume(self._make_messages_snapshot([msg2])) await writer.consume(self._make_run_finished_event()) - payload = json.loads(client.session_messages.push.call_args[1]["payload"]) - assert payload["messages"][0]["content"] == "second" + assert client.session_messages.push.call_args[1]["payload"] == "second" async def test_no_grpc_client_write_skipped(self): writer = GRPCMessageWriter(session_id="s-1", run_id="r-1", grpc_client=None) diff --git a/components/runners/ambient-runner/uv.lock b/components/runners/ambient-runner/uv.lock index 61c938162..67b949620 100644 --- a/components/runners/ambient-runner/uv.lock +++ b/components/runners/ambient-runner/uv.lock @@ -690,61 +690,61 @@ toml = [ [[package]] name = "cryptography" -version = "46.0.5" +version = "46.0.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, - { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, - { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, - { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, - { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, - { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, - { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, - { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, - { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, - { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, - { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, - { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, - { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, - { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, - { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, - { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, - { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, - { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, - { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, - { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, - { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, - { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, - { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, - { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, - { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, - { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, - { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, - { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" }, + { url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" }, + { url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/7ccff00ced5bac74b775ce0beb7d1be4e8637536b522b5df9b73ada42da2/cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead", size = 3475444, upload-time = "2026-03-25T23:34:38.944Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1f/4c926f50df7749f000f20eede0c896769509895e2648db5da0ed55db711d/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8", size = 4218227, upload-time = "2026-03-25T23:34:40.871Z" }, + { url = "https://files.pythonhosted.org/packages/c6/65/707be3ffbd5f786028665c3223e86e11c4cda86023adbc56bd72b1b6bab5/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0", size = 4381399, upload-time = "2026-03-25T23:34:42.609Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6d/73557ed0ef7d73d04d9aba745d2c8e95218213687ee5e76b7d236a5030fc/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b", size = 4217595, upload-time = "2026-03-25T23:34:44.205Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c5/e1594c4eec66a567c3ac4400008108a415808be2ce13dcb9a9045c92f1a0/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a", size = 4380912, upload-time = "2026-03-25T23:34:46.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/89/843b53614b47f97fe1abc13f9a86efa5ec9e275292c457af1d4a60dc80e0/cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e", size = 3409955, upload-time = "2026-03-25T23:34:48.465Z" }, ] [[package]] diff --git a/docs/package-lock.json b/docs/package-lock.json index f9f919627..1149e83af 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,7 +9,7 @@ "version": "0.0.1", "dependencies": { "@astrojs/starlight": "^0.34", - "astro": "^5.7", + "astro": "^5.18", "playwright": "^1.58.2", "sharp": "^0.33.0" }, @@ -2341,9 +2341,9 @@ } }, "node_modules/anymatch/node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", "license": "MIT", "engines": { "node": ">=8.6" @@ -2393,14 +2393,14 @@ } }, "node_modules/astro": { - "version": "5.18.0", - "resolved": "https://registry.npmjs.org/astro/-/astro-5.18.0.tgz", - "integrity": "sha512-CHiohwJIS4L0G6/IzE1Fx3dgWqXBCXus/od0eGUfxrZJD2um2pE7ehclMmgL/fXqbU7NfE1Ze2pq34h2QaA6iQ==", + "version": "5.18.1", + "resolved": "https://registry.npmjs.org/astro/-/astro-5.18.1.tgz", + "integrity": "sha512-m4VWilWZ+Xt6NPoYzC4CgGZim/zQUO7WFL0RHCH0AiEavF1153iC3+me2atDvXpf/yX4PyGUeD8wZLq1cirT3g==", "license": "MIT", "dependencies": { "@astrojs/compiler": "^2.13.0", - "@astrojs/internal-helpers": "0.7.5", - "@astrojs/markdown-remark": "6.3.10", + "@astrojs/internal-helpers": "0.7.6", + "@astrojs/markdown-remark": "6.3.11", "@astrojs/telemetry": "3.3.0", "@capsizecss/unpack": "^4.0.0", "@oslojs/encoding": "^1.1.0", @@ -2490,6 +2490,41 @@ "astro": "^4.0.0-beta || ^5.0.0-beta || ^3.3.0 || ^6.0.0-beta" } }, + "node_modules/astro/node_modules/@astrojs/internal-helpers": { + "version": "0.7.6", + "resolved": "https://registry.npmjs.org/@astrojs/internal-helpers/-/internal-helpers-0.7.6.tgz", + "integrity": "sha512-GOle7smBWKfMSP8osUIGOlB5kaHdQLV3foCsf+5Q9Wsuu+C6Fs3Ez/ttXmhjZ1HkSgsogcM1RXSjjOVieHq16Q==", + "license": "MIT" + }, + "node_modules/astro/node_modules/@astrojs/markdown-remark": { + "version": "6.3.11", + "resolved": "https://registry.npmjs.org/@astrojs/markdown-remark/-/markdown-remark-6.3.11.tgz", + "integrity": "sha512-hcaxX/5aC6lQgHeGh1i+aauvSwIT6cfyFjKWvExYSxUhZZBBdvCliOtu06gbQyhbe0pGJNoNmqNlQZ5zYUuIyQ==", + "license": "MIT", + "dependencies": { + "@astrojs/internal-helpers": "0.7.6", + "@astrojs/prism": "3.3.0", + "github-slugger": "^2.0.0", + "hast-util-from-html": "^2.0.3", + "hast-util-to-text": "^4.0.2", + "import-meta-resolve": "^4.2.0", + "js-yaml": "^4.1.1", + "mdast-util-definitions": "^6.0.0", + "rehype-raw": "^7.0.0", + "rehype-stringify": "^10.0.1", + "remark-gfm": "^4.0.1", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.1.2", + "remark-smartypants": "^3.0.2", + "shiki": "^3.21.0", + "smol-toml": "^1.6.0", + "unified": "^11.0.5", + "unist-util-remove-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "unist-util-visit-parents": "^6.0.2", + "vfile": "^6.0.3" + } + }, "node_modules/astro/node_modules/@img/sharp-darwin-arm64": { "version": "0.34.5", "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", @@ -4092,9 +4127,9 @@ } }, "node_modules/dompurify": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", - "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.3.tgz", + "integrity": "sha512-Oj6pzI2+RqBfFG+qOaOLbFXLQ90ARpcGG6UePL82bJLtdsa6CYJD7nmiU8MW9nQNOtCHV3lZ/Bzq1X0QYbBZCA==", "dev": true, "license": "(MPL-2.0 OR Apache-2.0)", "optionalDependencies": { @@ -6640,9 +6675,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "license": "MIT", "engines": { "node": ">=12" diff --git a/docs/package.json b/docs/package.json index b1eefbdee..8f84a2ad2 100644 --- a/docs/package.json +++ b/docs/package.json @@ -12,7 +12,7 @@ }, "dependencies": { "@astrojs/starlight": "^0.34", - "astro": "^5.7", + "astro": "^5.18", "playwright": "^1.58.2", "sharp": "^0.33.0" }, diff --git a/e2e/package-lock.json b/e2e/package-lock.json index 6b2136e49..40cd22ce6 100644 --- a/e2e/package-lock.json +++ b/e2e/package-lock.json @@ -5132,9 +5132,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { diff --git a/scripts/rebase-main-to-alpha.sh b/scripts/rebase-main-to-alpha.sh new file mode 100755 index 000000000..7d22bb303 --- /dev/null +++ b/scripts/rebase-main-to-alpha.sh @@ -0,0 +1,137 @@ +#!/bin/bash +# rebase-main-to-alpha.sh - Rebase all commits from main into the alpha branch. +# +# Creates a working branch off alpha, replays all commits from main that are +# not yet in alpha, and opens a PR against alpha for human review and merge. +# Merge conflicts are left in-place with conflict markers for the reviewer. +# +# Usage: +# ./scripts/rebase-main-to-alpha.sh +# +# Requirements: +# - git, gh (GitHub CLI) +# - Authenticated to GitHub: gh auth status +# - Remote named 'upstream' pointing to ambient-code/platform + +set -euo pipefail + +UPSTREAM="${UPSTREAM:-upstream}" +MAIN_BRANCH="main" +ALPHA_BRANCH="alpha" +TIMESTAMP="$(date +%Y%m%d-%H%M%S)" +WORK_BRANCH="chore/rebase-main-to-alpha-${TIMESTAMP}" + +log() { echo "[rebase-main-to-alpha] $*"; } +die() { echo "[rebase-main-to-alpha] ERROR: $*" >&2; exit 1; } + +# Verify required tools +command -v git >/dev/null || die "git not found" +command -v gh >/dev/null || die "gh (GitHub CLI) not found — install from https://cli.github.com" + +# Verify upstream remote exists +git remote get-url "${UPSTREAM}" >/dev/null 2>&1 || \ + die "Remote '${UPSTREAM}' not found. Add it: git remote add upstream git@github.com:ambient-code/platform.git" + +log "Fetching ${UPSTREAM}..." +git fetch "${UPSTREAM}" "${MAIN_BRANCH}" "${ALPHA_BRANCH}" + +MAIN_SHA="$(git rev-parse "${UPSTREAM}/${MAIN_BRANCH}")" +ALPHA_SHA="$(git rev-parse "${UPSTREAM}/${ALPHA_BRANCH}")" +MERGE_BASE="$(git merge-base "${ALPHA_SHA}" "${MAIN_SHA}")" + +log "upstream/${MAIN_BRANCH}: ${MAIN_SHA}" +log "upstream/${ALPHA_BRANCH}: ${ALPHA_SHA}" +log "merge-base: ${MERGE_BASE}" + +# Count commits in main not yet in alpha +COMMIT_COUNT="$(git rev-list --count "${ALPHA_SHA}..${MAIN_SHA}")" +if [ "${COMMIT_COUNT}" -eq 0 ]; then + log "alpha is already up to date with main. Nothing to do." + exit 0 +fi +log "Commits in main not in alpha: ${COMMIT_COUNT}" + +# Create work branch off alpha +log "Creating work branch '${WORK_BRANCH}' from ${UPSTREAM}/${ALPHA_BRANCH}..." +git checkout -b "${WORK_BRANCH}" "${UPSTREAM}/${ALPHA_BRANCH}" + +# Attempt rebase of main onto the work branch (best-effort) +log "Rebasing ${UPSTREAM}/${MAIN_BRANCH} onto ${WORK_BRANCH} (best-effort)..." +REBASE_EXIT=0 +git rebase --onto "${WORK_BRANCH}" "${MERGE_BASE}" "${UPSTREAM}/${MAIN_BRANCH}" || REBASE_EXIT=$? + +if [ "${REBASE_EXIT}" -ne 0 ]; then + log "Rebase encountered conflicts. Collecting conflict state..." + + # Stage all files — conflict markers will be preserved in working tree + git add -A || true + + CONFLICT_FILES="$(git diff --name-only --diff-filter=U HEAD 2>/dev/null || git status --short | grep '^UU' | awk '{print $2}' || echo "(see git status)")" + + # Abort the rebase so we're on a clean branch, then merge instead as fallback + git rebase --abort 2>/dev/null || true + + log "Rebase aborted due to conflicts. Falling back to merge for best-effort commit..." + git merge --no-ff --allow-unrelated-histories "${UPSTREAM}/${MAIN_BRANCH}" \ + -m "chore: merge main into alpha (best-effort — conflicts require human resolution) + +Automated merge of upstream/main into upstream/alpha. +Conflicts detected during rebase; falling back to merge. + +Conflicting files: +${CONFLICT_FILES} + +Please resolve conflicts and merge this PR manually. +Generated by scripts/rebase-main-to-alpha.sh on ${TIMESTAMP}." || { + # Merge also has conflicts — stage everything and commit with markers + git add -A + git commit --no-verify -m "chore: best-effort merge main into alpha (conflicts present) + +Automated merge of upstream/main into upstream/alpha. +Both rebase and merge encountered conflicts. Conflict markers are +present in the files listed below. A human must resolve these before +merging this PR. + +Generated by scripts/rebase-main-to-alpha.sh on ${TIMESTAMP}." + } +else + log "Rebase completed cleanly." + # Rebase leaves us in detached-ish state — need to update work branch + git checkout -B "${WORK_BRANCH}" +fi + +# Push work branch +log "Pushing ${WORK_BRANCH} to ${UPSTREAM}..." +git push "${UPSTREAM}" "${WORK_BRANCH}" --force-with-lease + +# Open PR against alpha +log "Opening PR against ${ALPHA_BRANCH}..." +PR_URL="$(gh pr create \ + --repo ambient-code/platform \ + --base "${ALPHA_BRANCH}" \ + --head "${WORK_BRANCH}" \ + --title "chore: rebase main into alpha (${TIMESTAMP})" \ + --body "$(cat <