From 5ff6d5abd432bc19fb496cb93245708e474c2eb3 Mon Sep 17 00:00:00 2001 From: H9660 Date: Wed, 7 May 2025 03:00:37 +0530 Subject: [PATCH 1/3] Added gemini script for api calls --- script/gemini_call/customize.py | 63 +++++++++++++++++++++ script/gemini_call/gemini_call.py | 94 +++++++++++++++++++++++++++++++ script/gemini_call/meta.yaml | 36 ++++++++++++ script/gemini_call/run.bat | 1 + script/gemini_call/run.sh | 17 ++++++ 5 files changed, 211 insertions(+) create mode 100644 script/gemini_call/customize.py create mode 100644 script/gemini_call/gemini_call.py create mode 100644 script/gemini_call/meta.yaml create mode 100644 script/gemini_call/run.bat create mode 100644 script/gemini_call/run.sh diff --git a/script/gemini_call/customize.py b/script/gemini_call/customize.py new file mode 100644 index 000000000..37e1bb064 --- /dev/null +++ b/script/gemini_call/customize.py @@ -0,0 +1,63 @@ +from mlc import utils +import os +import json +import yaml + + +def write_gemini_yaml(model, system_prompt, user_prompt, + filename='gemini-prompt.yaml'): + data = { + 'model': model, + 'contents': [ + { + 'role': 'user', + 'parts': [ + {'text': f"{system_prompt}\n\n{user_prompt}"} + ] + } + ], + 'generationConfig': { + 'temperature': 0.7, + 'maxOutputTokens': 200 + } + } + + with open(filename, 'w', encoding='utf-8') as f: + yaml.dump(data, f, sort_keys=False, allow_unicode=True) + + +def preprocess(i): + env = i['env'] + state = i['state'] + + if 'MLC_GEMINI_CONFIG_PATH' not in env or not os.path.exists( + env['MLC_GEMINI_CONFIG_PATH']): + if 'user_prompt' in state: + model = env.get('MLC_GEMINI_MODEL', 'gemini-2.0-flash') + user_prompt = state['user_prompt'] + system_prompt = state.get( + 'system_prompt', + 'You are an AI agent expected to answer questions correctly') + write_gemini_yaml( + model, + system_prompt, + user_prompt, + 'tmp-gemini-prompt.yaml') + env['MLC_GEMINI_CONFIG_PATH'] = 'tmp-gemini-prompt.yaml' + + env['MLC_RUN_CMD'] = f'{env["MLC_PYTHON_BIN_WITH_PATH"]} "{os.path.join(env["MLC_TMP_CURRENT_SCRIPT_PATH"], "gemini_call.py")}"' + + return {'return': 0} + + +def postprocess(i): + env = i['env'] + state = i['state'] + + filename = 'tmp-gemini-results.json' + with open(filename, 'r', encoding='utf-8') as f: + data = json.load(f) + + state['MLC_GEMINI_RESPONSE'] = data['content'] + os_info = i['os_info'] + return {'return': 0} diff --git a/script/gemini_call/gemini_call.py b/script/gemini_call/gemini_call.py new file mode 100644 index 000000000..6cf6fbf5e --- /dev/null +++ b/script/gemini_call/gemini_call.py @@ -0,0 +1,94 @@ +import requests +import os +import json +import yaml + +import yaml + +def extract_prompts(yaml_path): + with open(yaml_path, 'r', encoding='utf-8') as f: + data = yaml.safe_load(f) + + full_text = data['contents'][0]['parts'][0]['text'] + + # Split at "Question Text:" + if "Question Text:" not in full_text: + raise ValueError("Expected 'Question Text:' marker not found.") + + system_prompt, question_part = full_text.split("Question Text:", 1) + + # Trim whitespace + system_prompt = system_prompt.strip() + user_prompt = question_part.strip() + + return system_prompt, user_prompt + + +def gemini_call(message=None): + try: + api_key = os.environ['MLC_GEMINI_API_KEY'] + url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key={api_key}" + config_path = os.environ.get('MLC_GEMINI_CONFIG_PATH') + # Load config if it exists + if config_path and os.path.exists(config_path): + try: + with open(config_path, 'r') as file: + data = yaml.safe_load(file) + except Exception as e: + return {"error": f"Error reading config file: {str(e)}"} + + if os.environ.get('MLC_GEMINI_CONFIG_MODIFY', '') == 'yes': + try: + data['messages'][1]['content'] = data['messages'][1]['content'].replace( + "{{ MESSAGE }}", message or "") + except Exception as e: + return {"error": f"Config format issue: {str(e)}"} + # Load prompts + system_prompt, user_prompt = extract_prompts(config_path) + # Combine both in first message + full_prompt = f"{system_prompt}\n\n{user_prompt}" + + data = { + "contents": [ + { + "role": "user", + "parts": [ + {"text": full_prompt} + ] + } + ] + } + + headers = { + 'Content-Type': 'application/json' + } + + response = requests.post(url, json=data, headers=headers) + response.raise_for_status() + result = response.json() + + content = result['candidates'][0]['content']['parts'][0]['text'] + + with open('tmp-gemini-results.json', 'w', encoding='utf-8') as f: + json.dump({'content': content}, f, ensure_ascii=False, indent=2) + + return {"content": content} + + except requests.exceptions.RequestException as e: + return {"error": f"Request error: {str(e)}"} + + except KeyError as e: + return {"error": f"Missing key in response: {str(e)}"} + + except Exception as e: + return {"error": f"Unexpected error: {str(e)}"} + + +def main(): + result = gemini_call() + if 'error' in result: + raise Exception(result['error']) + + +if __name__ == '__main__': + main() diff --git a/script/gemini_call/meta.yaml b/script/gemini_call/meta.yaml new file mode 100644 index 000000000..e663a65c0 --- /dev/null +++ b/script/gemini_call/meta.yaml @@ -0,0 +1,36 @@ +alias: gemini_call +automation_alias: script +automation_uid: 5b4e0237da074764 +category: MLC Script Template +deps: + - tags: get,python3 + names: + - python + - python3 +new_env_keys: [] +new_state_keys: +- MLC_GEMINI_RESPONSE +post_deps: [] +posthook_deps: [] +prehook_deps: [] +tags: +- gemini-call +- query +input_mapping: + api_key: MLC_GEMINI_API_KEY + config_path: MLC_GEMINI_CONFIG_PATH + system_prompt: MLC_GEMINI_SYSTEM_PROMPT + user_prompt: MLC_GEMINI_USER_PROMPT + model: MLC_GEMINI_MODEL +tests: + run_inputs: [] +uid: 1cfe3d0658364a2b +variations: + gemini: + group: api_provider + default: true + env: + MLC_GEMINI_API_URL: 'https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key=' + + + diff --git a/script/gemini_call/run.bat b/script/gemini_call/run.bat new file mode 100644 index 000000000..4509961ce --- /dev/null +++ b/script/gemini_call/run.bat @@ -0,0 +1 @@ +%MLC_RUN_CMD% \ No newline at end of file diff --git a/script/gemini_call/run.sh b/script/gemini_call/run.sh new file mode 100644 index 000000000..fa21de70c --- /dev/null +++ b/script/gemini_call/run.sh @@ -0,0 +1,17 @@ +#!/bin/bash +function exit_if_error() { + test $? -eq 0 || exit $? +} + +function run() { + echo "Running: " + echo "$1" + echo "" + if [[ ${MLC_FAKE_RUN} != 'yes' ]]; then + eval "$1" + exit_if_error + fi +} + +#Add your run commands here... +# run "$MLC_RUN_CMD" From a5ce9a349237f1483fd353b95b3460cbe25c47b4 Mon Sep 17 00:00:00 2001 From: H9660 Date: Fri, 9 May 2025 11:08:46 +0530 Subject: [PATCH 2/3] Changed encoding to support more characters --- script/gemini_call/gemini_call.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/gemini_call/gemini_call.py b/script/gemini_call/gemini_call.py index 6cf6fbf5e..d2bad375f 100644 --- a/script/gemini_call/gemini_call.py +++ b/script/gemini_call/gemini_call.py @@ -32,7 +32,7 @@ def gemini_call(message=None): # Load config if it exists if config_path and os.path.exists(config_path): try: - with open(config_path, 'r') as file: + with open(config_path, 'r', encoding="utf-8") as file: data = yaml.safe_load(file) except Exception as e: return {"error": f"Error reading config file: {str(e)}"} From 94b0f5364529cb0b06e21716fcc42f4d6f1a4632 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 9 May 2025 05:40:59 +0000 Subject: [PATCH 3/3] [Automated Commit] Format Codebase [skip ci] --- script/gemini_call/gemini_call.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/script/gemini_call/gemini_call.py b/script/gemini_call/gemini_call.py index d2bad375f..1f4791b97 100644 --- a/script/gemini_call/gemini_call.py +++ b/script/gemini_call/gemini_call.py @@ -5,6 +5,7 @@ import yaml + def extract_prompts(yaml_path): with open(yaml_path, 'r', encoding='utf-8') as f: data = yaml.safe_load(f) @@ -36,7 +37,7 @@ def gemini_call(message=None): data = yaml.safe_load(file) except Exception as e: return {"error": f"Error reading config file: {str(e)}"} - + if os.environ.get('MLC_GEMINI_CONFIG_MODIFY', '') == 'yes': try: data['messages'][1]['content'] = data['messages'][1]['content'].replace(