551. How to make async scrape requests
662. How to process multiple URLs concurrently
773. How to use render_heavy_js for JavaScript-heavy websites
8- 4. How to add custom headers in async mode
8+ 4. How to use branding parameter
9+ 5. How to add custom headers in async mode
910
1011Equivalent curl command:
1112curl -X POST https://api.scrapegraphai.com/v1/scrape \
1213 -H "Content-Type: application/json" \
1314 -H "SGAI-APIKEY: your-api-key-here" \
1415 -d '{
15- "website_url": "https://example.com",
16- "render_heavy_js": false
16+ "website_url": "https://www.cubic.dev/",
17+ "render_heavy_js": false,
18+ "branding": true
1719 }'
1820
1921Requirements:
@@ -165,6 +167,31 @@ async def concurrent_scraping_example():
165167 return results
166168
167169
170+ async def async_scrape_with_branding ():
171+ """Demonstrate async scraping with branding enabled."""
172+ print ("\n 🏷️ Async Branding Example" )
173+ print ("=" * 30 )
174+
175+ async with AsyncClient .from_env () as client :
176+ try :
177+ print ("Making async scrape request with branding enabled..." )
178+ result = await client .scrape (
179+ website_url = "https://www.cubic.dev/" ,
180+ render_heavy_js = False ,
181+ branding = True
182+ )
183+
184+ html_content = result .get ("html" , "" )
185+ print (f"✅ Success! Received { len (html_content ):,} characters of HTML" )
186+ print (f"Request ID: { result .get ('request_id' , 'N/A' )} " )
187+
188+ return result
189+
190+ except Exception as e :
191+ print (f"❌ Error: { str (e )} " )
192+ return None
193+
194+
168195async def async_scrape_with_custom_headers ():
169196 """Demonstrate async scraping with custom headers."""
170197 print ("\n 🔧 Async Custom Headers Example" )
@@ -228,7 +255,17 @@ def demonstrate_curl_equivalent():
228255 print (" \" render_heavy_js\" : false" )
229256 print (" }'" )
230257
231- print ("\n 2. Multiple concurrent requests:" )
258+ print ("\n 2. With branding enabled:" )
259+ print ("curl -X POST https://api.scrapegraphai.com/v1/scrape \\ " )
260+ print (" -H \" Content-Type: application/json\" \\ " )
261+ print (" -H \" SGAI-APIKEY: your-api-key-here\" \\ " )
262+ print (" -d '{" )
263+ print (" \" website_url\" : \" https://www.cubic.dev/\" ," )
264+ print (" \" render_heavy_js\" : false," )
265+ print (" \" branding\" : true" )
266+ print (" }'" )
267+
268+ print ("\n 3. Multiple concurrent requests:" )
232269 print ("# Run multiple curl commands in parallel:" )
233270 print ("curl -X POST https://api.scrapegraphai.com/v1/scrape \\ " )
234271 print (" -H \" Content-Type: application/json\" \\ " )
@@ -253,7 +290,8 @@ async def main():
253290 # Run async examples
254291 result1 = await basic_async_scrape ()
255292 result2 = await async_scrape_with_heavy_js ()
256- result3 = await async_scrape_with_custom_headers ()
293+ result3 = await async_scrape_with_branding ()
294+ result4 = await async_scrape_with_custom_headers ()
257295 concurrent_results = await concurrent_scraping_example ()
258296
259297 # Save results if successful
@@ -265,12 +303,18 @@ async def main():
265303 if result3 :
266304 html3 = result3 .get ("html" , "" )
267305 if html3 :
268- await save_html_to_file_async (html3 , "custom_headers_async_scrape" )
306+ await save_html_to_file_async (html3 , "branding_async_scrape" )
307+
308+ if result4 :
309+ html4 = result4 .get ("html" , "" )
310+ if html4 :
311+ await save_html_to_file_async (html4 , "custom_headers_async_scrape" )
269312
270313 print ("\n 🎯 Summary:" )
271314 print (f"✅ Basic async scrape: { 'Success' if result1 else 'Failed' } " )
272315 print (f"✅ Heavy JS async scrape: { 'Success' if result2 else 'Failed' } " )
273- print (f"✅ Custom headers async scrape: { 'Success' if result3 else 'Failed' } " )
316+ print (f"✅ Branding async scrape: { 'Success' if result3 else 'Failed' } " )
317+ print (f"✅ Custom headers async scrape: { 'Success' if result4 else 'Failed' } " )
274318 print (f"✅ Concurrent scraping: { 'Success' if concurrent_results else 'Failed' } " )
275319
276320 except Exception as e :
0 commit comments