Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
78 changes: 78 additions & 0 deletions .github/scripts/matrix_generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import os
import json
import argparse
from typing import List, Dict, Tuple

def calculate_package_weight(pkg_path: str) -> int:
"""
Dynamically profiles a package to determine its computational weight.
GAPIC clients are lightweight. Handwritten clients are heavy (based on test count).
"""
base_weight = 1

meta_path = os.path.join(pkg_path, ".repo-metadata.json")
if os.path.isfile(meta_path):
try:
with open(meta_path, 'r') as f:
if json.load(f).get("library_type") == "GAPIC_AUTO":
return base_weight
except Exception:
pass

test_dir = os.path.join(pkg_path, "tests")
test_file_count = 0
if os.path.isdir(test_dir):
for root, _, files in os.walk(test_dir):
test_file_count += sum(1 for f in files if f.endswith(".py"))

return base_weight + test_file_count

def create_balanced_buckets(packages: List[str], max_buckets: int) -> List[str]:
"""
Distributes packages using the Longest Processing Time (LPT) algorithm.
"""
valid_pkgs = [p for p in packages if os.path.isfile(os.path.join(p, "noxfile.py"))]
if not valid_pkgs:
return []

pkg_weights: List[Tuple[str, int]] = []
for pkg in valid_pkgs:
pkg_weights.append((pkg, calculate_package_weight(pkg)))

pkg_weights.sort(key=lambda x: x[1], reverse=True)

# Do not spin up empty VMs if we have fewer packages than max_buckets
num_buckets = min(len(valid_pkgs), max_buckets)
buckets: List[Dict] = [{"packages": [], "total_weight": 0} for _ in range(num_buckets)]

for pkg, weight in pkg_weights:
lightest_bucket = min(buckets, key=lambda b: b["total_weight"])
lightest_bucket["packages"].append(pkg)
lightest_bucket["total_weight"] += weight

return [" ".join(b["packages"]) for b in buckets]

def main():
parser = argparse.ArgumentParser()
parser.add_argument("--matrix-multiplier", type=int, required=True,
help="Number of matrix permutations (e.g., 6 for Py versions)")
parser.add_argument("--max-vms", type=int, default=40,
help="Hard cap on VMs to protect the organization concurrency limit")
args = parser.parse_args()

# THE L8 MATH: Never exceed ~250 jobs per workflow run.
safe_github_limit = 250 // args.matrix_multiplier
max_allowed_buckets = min(safe_github_limit, args.max_vms)

changed_dirs = os.environ.get("CHANGED_DIRS", "").split()
buckets = create_balanced_buckets(changed_dirs, max_allowed_buckets)

github_output = os.environ.get("GITHUB_OUTPUT")
if github_output:
with open(github_output, "a") as f:
f.write(f"buckets={json.dumps(buckets)}\n")
else:
print(json.dumps(buckets, indent=2))

if __name__ == "__main__":
main()
107 changes: 107 additions & 0 deletions .github/workflows/experiment.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
name: CI Unit
on:
pull_request:
branches: [ main, preview ]

concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

jobs:
# ==========================================
# 1. DISCOVERY & BUCKETING
# ==========================================
discover:
runs-on: ubuntu-latest
outputs:
buckets: ${{ steps.generate-matrix.outputs.buckets }}
steps:
- uses: actions/checkout@v4

- name: Detect Changed Packages
id: changes
uses: tj-actions/changed-files@v44
with:
files: packages/**
dir_names: true
dir_names_max_depth: 2
matrix: false

- name: Generate Balanced Buckets
id: generate-matrix
env:
CHANGED_DIRS: ${{ steps.changes.outputs.all_changed_files }}
run: python .github/scripts/matrix_generator.py --matrix-multiplier 6 --max-vms 40

# ==========================================
# 2. HORIZONTAL EXECUTION
# ==========================================
unit-tests:
needs: discover
if: ${{ needs.discover.outputs.buckets != '[]' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
max-parallel: 60
matrix:
chunk: ${{ fromJSON(needs.discover.outputs.buckets) }}
python: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]

name: Unit (Py ${{ matrix.python }})
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v5
with:
python-version: ${{ matrix.python }}
enable-cache: true

- name: Optimize Core Dependencies
run: git config --global url."${GITHUB_WORKSPACE}".insteadOf "https://github.com/googleapis/google-cloud-python"

- name: Execute Chunk
run: |
# The Engine: uv handles virtualenvs, nox delegates to uv
export NOX_DEFAULT_VENV_BACKEND=uv

# The Fix: Forces uv to install `pip` in the venv so legacy `pip freeze` commands don't crash
export UV_VENV_SEED=1

# Speed optimization: Pre-compile python bytecode
export UV_COMPILE_BYTECODE=1

FAILED=0

for pkg in ${{ matrix.chunk }}; do
echo "=========================================================="
echo "🚀 TESTING: $pkg (Python ${{ matrix.python }})"
echo "=========================================================="
cd "$pkg"

# Clean, readable bash: Check if the session exists before running it
if uvx --with 'nox[uv]' nox -l | grep -q "unit-${{ matrix.python }}"; then
# Run the test and stream logs directly to the UI
uvx --with 'nox[uv]' nox -s "unit-${{ matrix.python }}" || FAILED=1
else
echo "⏭️ Session 'unit-${{ matrix.python }}' not defined for $pkg. Safely skipping."
fi

cd "$GITHUB_WORKSPACE"
done

exit $FAILED

# ==========================================
# 3. GATEKEEPER
# ==========================================
presubmit-passed:
if: always()
needs: [discover, unit-tests]
runs-on: ubuntu-latest
steps:
- name: Evaluate Pipeline Status
run: |
if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" || "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then
echo "::error::One or more required CI jobs failed or were cancelled."
exit 1
fi
echo "All dynamically generated CI jobs completed successfully."
Loading
Loading