@@ -383,29 +383,29 @@ async def main():
383383 scroll_delay = 0.2 ,
384384 )
385385
386- # # Execute market data extraction
387- # results: List[CrawlResult] = await crawler.arun(
388- # url="https://coinmarketcap.com/?page=1", config=crawl_config
389- # )
390-
391- # # Process results
392- # raw_df = pd.DataFrame()
393- # for result in results:
394- # if result.success and result.media["tables"]:
395- # # Extract primary market table
396- # # DataFrame
397- # raw_df = pd.DataFrame(
398- # result.media["tables"][0]["rows"],
399- # columns=result.media["tables"][0]["headers"],
400- # )
401- # break
386+ # Execute market data extraction
387+ results : List [CrawlResult ] = await crawler .arun (
388+ url = "https://coinmarketcap.com/?page=1" , config = crawl_config
389+ )
390+
391+ # Process results
392+ raw_df = pd .DataFrame ()
393+ for result in results :
394+ if result .success and result .media ["tables" ]:
395+ # Extract primary market table
396+ # DataFrame
397+ raw_df = pd .DataFrame (
398+ result .media ["tables" ][0 ]["rows" ],
399+ columns = result .media ["tables" ][0 ]["headers" ],
400+ )
401+ break
402402
403403
404404 # This is for debugging only
405405 # ////// Remove this in production from here..
406406 # Save raw data for debugging
407- # raw_df.to_csv(f"{__current_dir__}/tmp/raw_crypto_data.csv", index=False)
408- # print("🔍 Raw data saved to 'raw_crypto_data.csv'")
407+ raw_df .to_csv (f"{ __current_dir__ } /tmp/raw_crypto_data.csv" , index = False )
408+ print ("🔍 Raw data saved to 'raw_crypto_data.csv'" )
409409
410410 # Read from file for debugging
411411 raw_df = pd .read_csv (f"{ __current_dir__ } /tmp/raw_crypto_data.csv" )
0 commit comments