@@ -221,26 +221,34 @@ Tables that score above the threshold (default: 7) are extracted and stored in r
221221
222222### Accessing Table data:
223223``` python
224- async with AsyncWebCrawler() as crawler:
225- result = await crawler.arun(
226- url = " https://example.com/" ,
227- config = CrawlerRunConfig(
228- table_score_threshold = 7 # Minimum score for table detection
224+ import asyncio
225+ from crawl4ai import AsyncWebCrawler, CrawlerRunConfig
226+
227+ async def main ():
228+ async with AsyncWebCrawler() as crawler:
229+ result = await crawler.arun(
230+ url = " https://www.w3schools.com/html/html_tables.asp" ,
231+ config = CrawlerRunConfig(
232+ table_score_threshold = 7 # Minimum score for table detection
233+ )
229234 )
230- )
231-
232- if result.success and result.tables:
233- print (f " Found { len (result.tables)} tables " )
234-
235- for i, table in enumerate (result.tables):
236- print (f " \n Table { i+ 1 } : " )
237- print (f " Caption: { table.get(' caption' , ' No caption' )} " )
238- print (f " Headers: { table[' headers' ]} " )
239- print (f " Rows: { len (table[' rows' ])} " )
240-
241- # Print first few rows as example
242- for j, row in enumerate (table[' rows' ][:3 ]):
243- print (f " Row { j+ 1 } : { row} " )
235+
236+ if result.success and result.tables:
237+ print (f " Found { len (result.tables)} tables " )
238+
239+ for i, table in enumerate (result.tables):
240+ print (f " \n Table { i+ 1 } : " )
241+ print (f " Caption: { table.get(' caption' , ' No caption' )} " )
242+ print (f " Headers: { table[' headers' ]} " )
243+ print (f " Rows: { len (table[' rows' ])} " )
244+
245+ # Print first few rows as example
246+ for j, row in enumerate (table[' rows' ][:3 ]):
247+ print (f " Row { j+ 1 } : { row} " )
248+
249+ if __name__ == " __main__" :
250+ asyncio.run(main())
251+
244252```
245253
246254### Configuring Table Extraction:
0 commit comments