Skip to content

Commit aac1478

Browse files
committed
Revert "ci: reduce test matrix to Python 3.12 only and fix CI failures"
This reverts commit 4305e32.
1 parent 4305e32 commit aac1478

File tree

11 files changed

+39
-41
lines changed

11 files changed

+39
-41
lines changed

.github/workflows/test.yml

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,34 +9,36 @@ on:
99
jobs:
1010
test:
1111
runs-on: ubuntu-latest
12+
strategy:
13+
matrix:
14+
python-version: ["3.10", "3.11", "3.12"]
1215

1316
steps:
1417
- uses: actions/checkout@v4
1518

16-
- name: Set up Python
19+
- name: Set up Python ${{ matrix.python-version }}
1720
uses: actions/setup-python@v4
1821
with:
19-
python-version: "3.12"
22+
python-version: ${{ matrix.python-version }}
2023

2124
- name: Cache pip dependencies
2225
uses: actions/cache@v3
2326
with:
2427
path: ~/.cache/pip
25-
key: ${{ runner.os }}-pip-3.12-${{ hashFiles('**/pyproject.toml') }}
28+
key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml') }}
2629
restore-keys: |
27-
${{ runner.os }}-pip-3.12-
30+
${{ runner.os }}-pip-${{ matrix.python-version }}-
2831
2932
- name: Install dependencies
3033
run: |
3134
python -m pip install --upgrade pip
32-
pip install pytest pytest-asyncio responses aioresponses
35+
pip install pytest pytest-asyncio responses
3336
cd scrapegraph-py
3437
pip install -e ".[html]"
3538
36-
- name: Run tests
39+
- name: Run mocked tests with coverage
3740
run: |
38-
cd scrapegraph-py
39-
pytest tests/ -v --ignore=tests/test_integration_v2.py
41+
pytest
4042
- name: Upload coverage to Codecov
4143
uses: codecov/codecov-action@v3
4244
with:
@@ -66,7 +68,7 @@ jobs:
6668
- name: Run linting
6769
run: |
6870
cd scrapegraph-py
69-
flake8 scrapegraph_py/ tests/ --max-line-length=120 --extend-ignore=E203,W503,E501,F401,F841
71+
flake8 scrapegraph_py/ tests/ --max-line-length=88 --extend-ignore=E203,W503
7072
black --check scrapegraph_py/ tests/
7173
isort --check-only scrapegraph_py/ tests/
7274
mypy scrapegraph_py/ --ignore-missing-imports

scrapegraph-py/scrapegraph_py/async_client.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -121,9 +121,7 @@ async def create(
121121
async def list(self) -> Dict[str, Any]:
122122
"""List all monitors."""
123123
logger.info("Listing monitors")
124-
return await self._client._make_request(
125-
"GET", f"{self._client.base_url}/monitor"
126-
)
124+
return await self._client._make_request("GET", f"{self._client.base_url}/monitor")
127125

128126
async def get(self, monitor_id: str) -> Dict[str, Any]:
129127
"""Get a specific monitor."""

scrapegraph-py/scrapegraph_py/client.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,7 @@ def status(self, crawl_id: str) -> Dict[str, Any]:
8383
crawl_id: The crawl job ID
8484
"""
8585
logger.info(f"Fetching crawl status for {crawl_id}")
86-
return self._client._make_request(
87-
"GET", f"{self._client.base_url}/crawl/{crawl_id}"
88-
)
86+
return self._client._make_request("GET", f"{self._client.base_url}/crawl/{crawl_id}")
8987

9088
def stop(self, crawl_id: str) -> Dict[str, Any]:
9189
"""Stop a running crawl job.

scrapegraph-py/scrapegraph_py/logger.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
Disable logging:
1616
>>> sgai_logger.disable()
1717
"""
18-
1918
import logging
2019
import logging.handlers
2120
from typing import Dict, Optional

scrapegraph-py/scrapegraph_py/models/__init__.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,13 @@
22
Pydantic models for the ScrapeGraphAI v2 API.
33
"""
44

5-
from .crawl import CrawlFormat, CrawlRequest
5+
from .shared import FetchConfig, LlmConfig
6+
from .scrape import ScrapeFormat, ScrapeRequest, GetScrapeRequest
67
from .extract import ExtractRequest
7-
from .history import HistoryFilter
8-
from .monitor import MonitorCreateRequest
9-
from .scrape import GetScrapeRequest, ScrapeFormat, ScrapeRequest
108
from .search import SearchRequest
11-
from .shared import FetchConfig, LlmConfig
9+
from .crawl import CrawlFormat, CrawlRequest
10+
from .monitor import MonitorCreateRequest
11+
from .history import HistoryFilter
1212

1313
__all__ = [
1414
# Shared

scrapegraph-py/scrapegraph_py/models/extract.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,7 @@ class ExtractRequest(BaseModel):
1515
"""Request model for POST /v2/extract."""
1616

1717
url: str = Field(..., description="URL of the page to extract data from")
18-
prompt: str = Field(
19-
..., description="Natural language prompt describing what to extract"
20-
)
18+
prompt: str = Field(..., description="Natural language prompt describing what to extract")
2119
output_schema: Optional[Dict[str, Any]] = Field(
2220
default=None,
2321
description="JSON Schema defining the structure of the extracted data",

scrapegraph-py/scrapegraph_py/models/history.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,9 @@ class HistoryFilter(BaseModel):
1515
endpoint: Optional[str] = Field(
1616
default=None, description="Filter by endpoint name (e.g. 'scrape', 'extract')"
1717
)
18-
status: Optional[str] = Field(default=None, description="Filter by request status")
18+
status: Optional[str] = Field(
19+
default=None, description="Filter by request status"
20+
)
1921
limit: Optional[int] = Field(
2022
default=None, ge=1, le=100, description="Maximum number of results (1-100)"
2123
)

scrapegraph-py/scrapegraph_py/utils/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,4 @@
33
44
This module contains helper functions for API key validation,
55
HTTP response handling, and other common operations used throughout the SDK.
6-
"""
6+
"""

scrapegraph-py/scrapegraph_py/utils/toon_converter.py

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,10 @@
44
This module provides utilities to convert API responses to TOON format,
55
which reduces token usage by 30-60% compared to JSON.
66
"""
7-
87
from typing import Any, Dict, Optional
98

109
try:
1110
from toon import encode as toon_encode
12-
1311
TOON_AVAILABLE = True
1412
except ImportError:
1513
TOON_AVAILABLE = False
@@ -19,44 +17,44 @@
1917
def convert_to_toon(data: Any, options: Optional[Dict[str, Any]] = None) -> str:
2018
"""
2119
Convert data to TOON format.
22-
20+
2321
Args:
2422
data: Python dict or list to convert to TOON format
2523
options: Optional encoding options for TOON
2624
- delimiter: 'comma' (default), 'tab', or 'pipe'
2725
- indent: Number of spaces per level (default: 2)
2826
- key_folding: 'off' (default) or 'safe'
2927
- flatten_depth: Max depth for key folding (default: None)
30-
28+
3129
Returns:
3230
TOON formatted string
33-
31+
3432
Raises:
3533
ImportError: If toonify library is not installed
3634
"""
3735
if not TOON_AVAILABLE or toon_encode is None:
3836
raise ImportError(
39-
"toonify library is not installed. " "Install it with: pip install toonify"
37+
"toonify library is not installed. "
38+
"Install it with: pip install toonify"
4039
)
41-
40+
4241
return toon_encode(data, options=options)
4342

4443

45-
def process_response_with_toon(
46-
response: Dict[str, Any], return_toon: bool = False
47-
) -> Any:
44+
def process_response_with_toon(response: Dict[str, Any], return_toon: bool = False) -> Any:
4845
"""
4946
Process API response and optionally convert to TOON format.
50-
47+
5148
Args:
5249
response: The API response dictionary
5350
return_toon: If True, convert the response to TOON format
54-
51+
5552
Returns:
5653
Either the original response dict or TOON formatted string
5754
"""
5855
if not return_toon:
5956
return response
60-
57+
6158
# Convert the response to TOON format
6259
return convert_to_toon(response)
60+

scrapegraph-py/tests/test_integration_v2.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,12 @@
1414

1515
from scrapegraph_py.client import Client
1616

17+
1718
pytestmark = pytest.mark.integration
1819

19-
BASE_URL = os.getenv("SGAI_API_BASE_URL", "https://sgai-api-dev-v2.onrender.com/api/v1")
20+
BASE_URL = os.getenv(
21+
"SGAI_API_BASE_URL", "https://sgai-api-dev-v2.onrender.com/api/v1"
22+
)
2023

2124

2225
@pytest.fixture
@@ -27,7 +30,6 @@ def client():
2730

2831
# Patch the base URL for dev testing
2932
import scrapegraph_py.config as cfg
30-
3133
original = cfg.API_BASE_URL
3234
cfg.API_BASE_URL = BASE_URL
3335

0 commit comments

Comments
 (0)