11# SPDX-FileCopyrightText: Copyright (c) 2025 Cisco and/or its affiliates.
22# SPDX-License-Identifier: Apache-2.0
33
4+ import asyncio
5+ from dataclasses import dataclass
6+ from os import environ
7+
48from llama_index .core .workflow import (
59 Event ,
610 StartEvent ,
711 StopEvent ,
812 Workflow ,
913 step ,
1014)
11-
1215from llama_index .llms .azure_openai import AzureOpenAI
13- import asyncio
14- from os import environ
16+
17+
18+ @dataclass
19+ class ResearchLog :
20+ research_system_prompt : str = ""
21+ research_prompt : str = ""
22+
23+ create_report_system_prompt : str = ""
24+ create_report_prompt : str = ""
25+
26+ llm_model : str = ""
27+
28+ result : str = ""
1529
1630
1731class ResearchEvent (Event ):
1832 research : str
1933 topic : str
2034
21- class ResearchFlow (Workflow ):
2235
36+ log = ResearchLog ()
37+
38+
39+ class ResearchFlow (Workflow ):
2340 def set_llm (self , sys_prompt : str ):
2441 azure_openai_api_key = environ .get ("AZURE_OPENAI_API_KEY" )
2542 azure_openai_endpoint = environ .get ("AZURE_OPENAI_ENDPOINT" )
26- openai_api_version = environ .get ("AZURE_OPENAI_API_VERSION" , "2025-02-01-preview" )
43+ openai_api_version = environ .get (
44+ "AZURE_OPENAI_API_VERSION" , "2025-02-01-preview"
45+ )
2746 azure_deployment_name = environ .get ("AZURE_DEPLOYMENT_NAME" , "gpt-4o-mini" )
2847 azure_model_version = environ .get ("AZURE_MODEL_VERSION" , "gpt-4o-mini" )
2948
@@ -37,13 +56,15 @@ def set_llm(self, sys_prompt: str):
3756 system_prompt = sys_prompt ,
3857 )
3958
59+ log .llm_model = azure_model_version
60+
4061 return llm
4162
4263 @step
4364 async def research (self , ev : StartEvent ) -> ResearchEvent :
4465 topic = ev .topic
4566
46- sys_prompt = f"""
67+ sys_prompt = f"""
4768 You are a { topic } Senior Data Researcher. Goal: Uncover cutting-edge developments in { topic }
4869 You are a seasoned researcher known for finding the most relevant information and presenting it clearly.
4970 """
@@ -55,14 +76,18 @@ async def research(self, ev: StartEvent) -> ResearchEvent:
5576 llm = self .set_llm (sys_prompt )
5677
5778 response = await llm .acomplete (prompt )
79+
80+ log .research_system_prompt = sys_prompt
81+ log .research_prompt = prompt
82+
5883 return ResearchEvent (research = str (response ), topic = str (topic ))
5984
6085 @step
6186 async def create_report (self , ev : ResearchEvent ) -> StopEvent :
6287 research = ev .research
6388 topic = ev .topic
6489
65- sys_prompt = f"""
90+ sys_prompt = f"""
6691 You are a { topic } Reporting Analyst.
6792 Goal: Create detailed reports based on { topic } data analysis and research findings
6893 You are known for turning complex data into clear, concise reports.
@@ -75,13 +100,24 @@ async def create_report(self, ev: ResearchEvent) -> StopEvent:
75100 llm = self .set_llm (sys_prompt )
76101
77102 response = await llm .acomplete (prompt )
103+
104+ log .create_report_system_prompt = sys_prompt
105+ log .create_report_prompt = prompt
106+
78107 return StopEvent (result = str (response ))
79108
80109
81- async def run ():
110+ async def run (topic : str ):
82111 w = ResearchFlow (timeout = 60 , verbose = False )
83- result = await w .run (topic = "Artificial Intelligence" )
112+ result = await w .run (topic = topic )
84113 print (str (result ))
85114
115+ log .result = result
116+
117+
118+ def main (topic : str ):
119+ asyncio .run (run (topic ))
120+
121+
86122if __name__ == "__main__" :
87- asyncio . run ( run () )
123+ main ( topic = "Artificial Intelligence" )
0 commit comments