-
Notifications
You must be signed in to change notification settings - Fork 9
115 lines (101 loc) · 3.42 KB
/
e2e.yml
File metadata and controls
115 lines (101 loc) · 3.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
name: E2E Tests
on:
workflow_dispatch:
inputs:
reason:
description: "Reason for manual run"
required: false
push:
branches: [main]
paths:
- 'src/**'
- 'tests/**'
- 'pyproject.toml'
- '.github/workflows/e2e.yml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
e2e-tests:
runs-on: ubuntu-latest
name: E2E Tests (Staging DB)
timeout-minutes: 120
services:
redis:
image: redis:7
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install uv
run: |
curl -LsSf https://astral.sh/uv/install.sh | sh
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Install dependencies
run: |
uv pip install -e '.[dev,test]' --system
- name: Set up SSH tunnel to staging DB
run: |
mkdir -p ~/.ssh
echo "${{ secrets.STAGING_SSH_KEY }}" > ~/.ssh/staging_key
chmod 600 ~/.ssh/staging_key
# Tunnel: localhost:5432 -> staging EC2 -> RDS:5432
ssh -f -N -L 5432:${{ secrets.STAGING_DB_HOST }}:5432 \
-i ~/.ssh/staging_key \
-o StrictHostKeyChecking=no \
-o ServerAliveInterval=30 \
ec2-user@${{ secrets.STAGING_HOST }}
# Wait for tunnel to be ready
for i in $(seq 1 10); do
if pg_isready -h localhost -p 5432 2>/dev/null; then
echo "SSH tunnel ready"
break
fi
sleep 1
done
- name: Flush caches before E2E
env:
DATABASE_URL: postgresql+psycopg://openarg:${{ secrets.STAGING_DB_PASSWORD }}@localhost:5432/openarg_staging
run: |
python -c "
import asyncio
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy import text
async def flush():
engine = create_async_engine('$DATABASE_URL')
async with engine.begin() as conn:
r = await conn.execute(text('DELETE FROM query_cache'))
print(f'Flushed {r.rowcount} query_cache entries')
await engine.dispose()
asyncio.run(flush())
"
redis-cli -h localhost -p 6379 FLUSHALL || true
echo "Caches flushed"
- name: Run E2E tests
env:
APP_ENV: e2e
DATABASE_URL: postgresql+psycopg://openarg:${{ secrets.STAGING_DB_PASSWORD }}@localhost:5432/openarg_staging
SANDBOX_DATABASE_URL: postgresql+psycopg://openarg:${{ secrets.STAGING_DB_PASSWORD }}@localhost:5432/openarg_staging
CELERY_BROKER_URL: redis://localhost:6379/0
CELERY_RESULT_BACKEND: redis://localhost:6379/1
REDIS_CACHE_URL: redis://localhost:6379/2
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: us-east-1
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
BACKEND_API_KEY: ${{ secrets.BACKEND_API_KEY }}
run: |
pytest tests/e2e/ -v --tb=short --timeout=300 --reruns 2 --reruns-delay 5 -m e2e -o "addopts="
- name: Cleanup SSH tunnel
if: always()
run: |
pkill -f "ssh.*-L 5432" || true