Skip to content

Commit 2652f78

Browse files
committed
updates
1 parent 24d07f1 commit 2652f78

File tree

15 files changed

+1360
-5
lines changed

15 files changed

+1360
-5
lines changed

.github/workflows/e2e-tests.yml

Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
name: E2E Tests
2+
3+
on:
4+
# Run on demand via workflow_dispatch
5+
workflow_dispatch:
6+
inputs:
7+
test_username:
8+
description: "Twitter username for testing (without @)"
9+
required: false
10+
default: "X"
11+
test_user_id:
12+
description: "Twitter user ID for testing"
13+
required: false
14+
default: "783214"
15+
16+
# Run on schedule (e.g., daily at midnight UTC)
17+
schedule:
18+
- cron: "0 0 * * *"
19+
20+
# Run on push to main (optional - can be removed if too expensive)
21+
push:
22+
branches: [main]
23+
paths:
24+
- "src/**"
25+
- "tests/e2e/**"
26+
- ".github/workflows/e2e-tests.yml"
27+
28+
concurrency:
29+
group: e2e-${{ github.ref }}
30+
cancel-in-progress: true
31+
32+
env:
33+
SCRAPEBADGER_API_KEY: ${{ secrets.SCRAPEBADGER_API_KEY }}
34+
SCRAPEBADGER_BASE_URL: ${{ secrets.SCRAPEBADGER_BASE_URL || 'https://scrapebadger.com' }}
35+
36+
jobs:
37+
e2e:
38+
name: E2E Tests
39+
runs-on: ubuntu-latest
40+
# Only run if we have the API key secret
41+
if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || (github.event_name == 'push' && github.repository == 'scrapebadger/scrapebadger-python') }}
42+
43+
steps:
44+
- uses: actions/checkout@v4
45+
46+
- name: Install uv
47+
uses: astral-sh/setup-uv@v4
48+
with:
49+
enable-cache: true
50+
51+
- name: Set up Python 3.12
52+
run: uv python install 3.12
53+
54+
- name: Install dependencies
55+
run: uv sync --extra dev
56+
57+
- name: Check API key is set
58+
run: |
59+
if [ -z "$SCRAPEBADGER_API_KEY" ]; then
60+
echo "::error::SCRAPEBADGER_API_KEY secret is not set"
61+
exit 1
62+
fi
63+
echo "API key is configured"
64+
65+
- name: Run E2E tests
66+
env:
67+
TEST_USERNAME: ${{ github.event.inputs.test_username || 'X' }}
68+
TEST_USER_ID: ${{ github.event.inputs.test_user_id || '783214' }}
69+
# Optional: Additional test data can be configured here
70+
# TEST_TWEET_ID: ${{ secrets.TEST_TWEET_ID }}
71+
# TEST_LIST_ID: ${{ secrets.TEST_LIST_ID }}
72+
# TEST_COMMUNITY_ID: ${{ secrets.TEST_COMMUNITY_ID }}
73+
run: |
74+
uv run --frozen pytest tests/e2e -v --tb=short -x
75+
76+
- name: Upload test results
77+
if: always()
78+
uses: actions/upload-artifact@v4
79+
with:
80+
name: e2e-test-results
81+
path: |
82+
.pytest_cache/
83+
retention-days: 7
84+
85+
notify:
86+
name: Notify on failure
87+
needs: e2e
88+
runs-on: ubuntu-latest
89+
if: failure() && github.event_name == 'schedule'
90+
steps:
91+
- name: Create issue on failure
92+
uses: actions/github-script@v7
93+
with:
94+
script: |
95+
const title = `E2E Tests Failed - ${new Date().toISOString().split('T')[0]}`;
96+
const body = `
97+
## E2E Test Failure
98+
99+
The scheduled E2E tests have failed.
100+
101+
**Workflow Run:** [View Run](https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})
102+
103+
Please investigate and fix any issues.
104+
`;
105+
106+
// Check if an issue with this title already exists (to avoid duplicates)
107+
const issues = await github.rest.issues.listForRepo({
108+
owner: context.repo.owner,
109+
repo: context.repo.repo,
110+
state: 'open',
111+
labels: 'e2e-failure'
112+
});
113+
114+
const existingIssue = issues.data.find(issue => issue.title === title);
115+
116+
if (!existingIssue) {
117+
await github.rest.issues.create({
118+
owner: context.repo.owner,
119+
repo: context.repo.repo,
120+
title: title,
121+
body: body,
122+
labels: ['e2e-failure', 'bug']
123+
});
124+
}

.github/workflows/test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ jobs:
4343
run: uv run --frozen mypy src/
4444

4545
- name: Run tests with coverage
46-
run: uv run --frozen pytest --cov=src/scrapebadger --cov-report=xml --cov-report=term-missing
46+
run: uv run --frozen pytest --cov=src/scrapebadger --cov-report=xml --cov-report=term-missing --ignore=tests/e2e
4747

4848
- name: Upload coverage to Codecov
4949
if: matrix.python-version == '3.12'

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,7 @@ from scrapebadger._internal import ClientConfig
302302

303303
config = ClientConfig(
304304
api_key="your-key",
305-
base_url="https://api.scrapebadger.com",
305+
base_url="https://scrapebadger.com",
306306
timeout=300.0,
307307
connect_timeout=10.0,
308308
max_retries=3,

pyproject.toml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,9 @@ addopts = [
7474
"-ra",
7575
"-q",
7676
]
77+
markers = [
78+
"e2e: mark test as end-to-end test requiring real API access",
79+
]
7780

7881
[tool.coverage.run]
7982
source = ["src/scrapebadger"]
@@ -116,6 +119,13 @@ ignore = [
116119
"A003", # Class attribute shadowing builtin
117120
]
118121

122+
[tool.ruff.lint.per-file-ignores]
123+
"tests/**" = [
124+
"TCH001", # Imports needed at runtime for isinstance checks
125+
"TCH002",
126+
"TCH003",
127+
]
128+
119129
[tool.ruff.lint.isort]
120130
known-first-party = ["scrapebadger"]
121131

src/scrapebadger/_internal/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from typing import Any
77

88
# Default API base URL
9-
DEFAULT_BASE_URL = "https://api.scrapebadger.com"
9+
DEFAULT_BASE_URL = "https://scrapebadger.com"
1010

1111
# Default timeout in seconds
1212
DEFAULT_TIMEOUT = 300.0 # 5 minutes (matching server MAX_POLL_TIME)

src/scrapebadger/twitter/models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ class TrendCategory(StrEnum):
5656
"""
5757

5858
TRENDING = "trending"
59-
FOR_YOU = "for_you"
59+
FOR_YOU = "for-you"
6060
NEWS = "news"
6161
SPORTS = "sports"
6262
ENTERTAINMENT = "entertainment"

tests/e2e/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
"""End-to-end tests for ScrapeBadger SDK.
2+
3+
These tests make real API calls against the ScrapeBadger API.
4+
They require a valid API key set via the SCRAPEBADGER_API_KEY environment variable.
5+
"""

tests/e2e/conftest.py

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
"""E2E test configuration and fixtures.
2+
3+
These tests make real API calls against the ScrapeBadger API.
4+
Requires SCRAPEBADGER_API_KEY and SCRAPEBADGER_BASE_URL environment variables.
5+
"""
6+
7+
from __future__ import annotations
8+
9+
import os
10+
from dataclasses import dataclass
11+
from typing import TYPE_CHECKING
12+
13+
import pytest
14+
15+
from scrapebadger import ScrapeBadger
16+
17+
if TYPE_CHECKING:
18+
from collections.abc import AsyncGenerator
19+
20+
21+
def pytest_configure(config: pytest.Config) -> None:
22+
"""Register custom markers."""
23+
config.addinivalue_line(
24+
"markers",
25+
"e2e: mark test as end-to-end test requiring real API access",
26+
)
27+
28+
29+
def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None:
30+
"""Add e2e marker to all tests in e2e directory."""
31+
for item in items:
32+
if "e2e" in str(item.fspath):
33+
item.add_marker(pytest.mark.e2e)
34+
35+
36+
@dataclass(frozen=True)
37+
class E2ETestData:
38+
"""Test data for e2e tests.
39+
40+
Uses stable public accounts and resources that are unlikely to change.
41+
Override via environment variables if needed.
42+
"""
43+
44+
# User test data
45+
username: str = "X" # Twitter/X official account (stable)
46+
username_alt: str = "elonmusk" # Elon Musk (active, stable)
47+
user_id: str = "783214" # @X user ID
48+
49+
# Tweet test data - use recent tweets from stable accounts
50+
# These may need periodic updates if tweets are deleted
51+
tweet_id: str = os.environ.get("TEST_TWEET_ID", "1802331592918618529")
52+
53+
# List test data - use public lists
54+
list_id: str = os.environ.get("TEST_LIST_ID", "1736495155853967360")
55+
56+
# Community test data
57+
community_id: str = os.environ.get("TEST_COMMUNITY_ID", "1493016274714259462")
58+
59+
# Geo test data
60+
place_id: str = "5a110d312052166f" # San Francisco
61+
lat: float = 37.7749
62+
long: float = -122.4194
63+
64+
# Trend test data
65+
woeid_us: int = 23424977 # United States
66+
woeid_worldwide: int = 1 # Worldwide
67+
68+
@classmethod
69+
def from_env(cls) -> E2ETestData:
70+
"""Create test data from environment variables with fallbacks."""
71+
return cls(
72+
username=os.environ.get("TEST_USERNAME", cls.username),
73+
username_alt=os.environ.get("TEST_USERNAME_ALT", cls.username_alt),
74+
user_id=os.environ.get("TEST_USER_ID", cls.user_id),
75+
tweet_id=os.environ.get("TEST_TWEET_ID", cls.tweet_id),
76+
list_id=os.environ.get("TEST_LIST_ID", cls.list_id),
77+
community_id=os.environ.get("TEST_COMMUNITY_ID", cls.community_id),
78+
place_id=os.environ.get("TEST_PLACE_ID", cls.place_id),
79+
)
80+
81+
82+
@pytest.fixture(scope="session")
83+
def api_key() -> str:
84+
"""Get API key from environment."""
85+
key = os.environ.get("SCRAPEBADGER_API_KEY")
86+
if not key:
87+
pytest.skip("SCRAPEBADGER_API_KEY environment variable not set")
88+
return key
89+
90+
91+
@pytest.fixture(scope="session")
92+
def base_url() -> str:
93+
"""Get base URL from environment."""
94+
return os.environ.get("SCRAPEBADGER_BASE_URL", "https://scrapebadger.com")
95+
96+
97+
@pytest.fixture(scope="session")
98+
def test_data() -> E2ETestData:
99+
"""Get test data with environment variable overrides."""
100+
return E2ETestData.from_env()
101+
102+
103+
@pytest.fixture
104+
async def client(api_key: str, base_url: str) -> AsyncGenerator[ScrapeBadger, None]:
105+
"""Create a ScrapeBadger client for e2e tests.
106+
107+
This fixture creates a new client for each test to ensure isolation.
108+
"""
109+
async with ScrapeBadger(
110+
api_key=api_key,
111+
base_url=base_url,
112+
timeout=60.0, # Longer timeout for e2e tests
113+
max_retries=2,
114+
) as client:
115+
yield client
116+
117+
118+
@pytest.fixture(scope="session")
119+
async def session_client(api_key: str, base_url: str) -> AsyncGenerator[ScrapeBadger, None]:
120+
"""Create a session-scoped ScrapeBadger client.
121+
122+
Use this for tests that can share a client to reduce connection overhead.
123+
"""
124+
async with ScrapeBadger(
125+
api_key=api_key,
126+
base_url=base_url,
127+
timeout=60.0,
128+
max_retries=2,
129+
) as client:
130+
yield client

0 commit comments

Comments
 (0)