|
| 1 | +""" |
| 2 | +Basic example of scraping pipeline using SmartScraper from text |
| 3 | +""" |
| 4 | + |
| 5 | +import os |
| 6 | +from dotenv import load_dotenv |
| 7 | +from scrapegraphai.graphs import SmartScraperGraph |
| 8 | +from scrapegraphai.utils import prettify_exec_info |
| 9 | +load_dotenv() |
| 10 | + |
| 11 | +# ************************************************ |
| 12 | +# Read the text file |
| 13 | +# ************************************************ |
| 14 | +files = ["inputs/example_1.txt", "inputs/example_2.txt"] |
| 15 | +tasks = ["List me all the projects with their description.", |
| 16 | + "List me all the articles with their description."] |
| 17 | + |
| 18 | + |
| 19 | +# ************************************************ |
| 20 | +# Define the configuration for the graph |
| 21 | +# ************************************************ |
| 22 | + |
| 23 | +openai_key = os.getenv("OPENAI_APIKEY") |
| 24 | + |
| 25 | +graph_config = { |
| 26 | + "llm": { |
| 27 | + "api_key": openai_key, |
| 28 | + "model": "gpt-4o", |
| 29 | + }, |
| 30 | +} |
| 31 | + |
| 32 | +# ************************************************ |
| 33 | +# Create the SmartScraperGraph instance and run it |
| 34 | +# ************************************************ |
| 35 | + |
| 36 | +for i in range(0, 2): |
| 37 | + with open(files[i], 'r', encoding="utf-8") as file: |
| 38 | + text = file.read() |
| 39 | + |
| 40 | + smart_scraper_graph = SmartScraperGraph( |
| 41 | + prompt=tasks[i], |
| 42 | + source=text, |
| 43 | + config=graph_config |
| 44 | + ) |
| 45 | + |
| 46 | + result = smart_scraper_graph.run() |
| 47 | + print(result) |
| 48 | + # ************************************************ |
| 49 | + # Get graph execution info |
| 50 | + # ************************************************ |
| 51 | + |
| 52 | + graph_exec_info = smart_scraper_graph.get_execution_info() |
| 53 | + print(prettify_exec_info(graph_exec_info)) |
0 commit comments