From 8932d5fc350b91a064c02874537a516009f9adab Mon Sep 17 00:00:00 2001 From: garciasces Date: Sun, 16 Mar 2025 14:15:56 +0100 Subject: [PATCH] Update llm.rst In latest version, if you don't add the model provider separated by a slash in front of the model name, this will return error. File ".venv/lib/python3.11/site-packages/scrapegraphai/graphs/abstract_graph.py", line 180, in _create_llm f"""Provider {llm_params['model_provider']} is not supported. ~~~~~~~~~~^^^^^^^^^^^^^^^^^^ KeyError: 'model_provider' I tested adding "ollama/" in front of model name and it works. I assume this should be fixed for other providers too, but I haven't tested that personally. --- docs/source/scrapers/llm.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/scrapers/llm.rst b/docs/source/scrapers/llm.rst index bc0ed9bb..080daeeb 100644 --- a/docs/source/scrapers/llm.rst +++ b/docs/source/scrapers/llm.rst @@ -30,7 +30,7 @@ Then we can use them in the graph configuration as follows: graph_config = { "llm": { - "model": "llama3", + "model": "ollama/llama3", "temperature": 0.0, "format": "json", },