Skip to content

Commit 78e1b2a

Browse files
authored
Merge pull request #1176 from c-bata/follow-up-plotly-graph
Follow-up #1171: Add some improvements on automatic plotly chart generation
2 parents 427934b + cb53808 commit 78e1b2a

File tree

2 files changed

+30
-4
lines changed

2 files changed

+30
-4
lines changed

optuna_dashboard/llm/_api_views.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
import logging
34
from typing import TYPE_CHECKING
45

56
from bottle import Bottle
@@ -19,6 +20,9 @@
1920
from .llm.provider import LLMProvider
2021

2122

23+
_logger = logging.getLogger(__name__)
24+
25+
2226
def register_llm_route(app: Bottle, llm_provider: LLMProvider | None) -> None:
2327
@app.post("/api/llm/trial_filter_query")
2428
@json_api_view
@@ -71,10 +75,6 @@ def get_generate_plotly_graph_func_str() -> dict[str, str]:
7175
try:
7276
prompt_for_func = get_generate_plotly_graph_prompt(user_query, func_str, err_msg)
7377
generate_plotly_graph_func_str = llm_provider.call(prompt_for_func)
74-
prompt_for_title = get_generate_plotly_graph_title_prompt(
75-
user_query, generate_plotly_graph_func_str
76-
)
77-
generated_plotly_graph_title = llm_provider.call(prompt_for_title)
7878
except RateLimitExceeded as e:
7979
response.status = 429 # Too Many Requests
8080
reason = f"Rate limit exceeded. Try again later. The actual error: {str(e)}"
@@ -87,6 +87,17 @@ def get_generate_plotly_graph_func_str() -> dict[str, str]:
8787
response.status = 500
8888
return {"reason": str(e)}
8989

90+
try:
91+
# TODO(c-bata): Consider removing the generate_plotly_graph_func_str argument
92+
# to enable parallel llm provider calls
93+
prompt_for_title = get_generate_plotly_graph_title_prompt(
94+
user_query, generate_plotly_graph_func_str
95+
)
96+
generated_plotly_graph_title = llm_provider.call(prompt_for_title)
97+
except Exception as e:
98+
_logger.error("Failed to generate title for plotly graph: %s", e)
99+
generated_plotly_graph_title = f'Graph generated by "{user_query}"'
100+
90101
response.status = 200
91102
return {
92103
"generate_plotly_graph_func_str": generate_plotly_graph_func_str,

optuna_dashboard/ts/components/StudyDetail.tsx

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
import ChevronRightIcon from "@mui/icons-material/ChevronRight"
22
import HomeIcon from "@mui/icons-material/Home"
33
import {
4+
Alert,
45
Box,
56
Card,
67
CardContent,
78
IconButton,
9+
Link as MuiLink,
810
Typography,
911
useTheme,
1012
} from "@mui/material"
@@ -150,6 +152,19 @@ export const StudyDetail: FC<{
150152
>
151153
Graph Generated by LLM
152154
</Typography>
155+
<Alert severity="info" sx={{ m: theme.spacing(0, 2) }}>
156+
This feature enables the LLM to create Plotly charts based on
157+
natural language descriptions, helping you visualize study results
158+
interactively. For more details, please refer to{" "}
159+
<MuiLink
160+
href="https://optuna-dashboard.readthedocs.io/en/latest/tutorials/llm-integration.html#automatic-plotly-charts-generation"
161+
target="_blank"
162+
color="inherit"
163+
>
164+
the documentation
165+
</MuiLink>
166+
.
167+
</Alert>
153168
<GraphByLLM study={studyDetail} />
154169
</>
155170
)}

0 commit comments

Comments
 (0)