|
1 | | -from llama3.generic_llama import GenericLlama |
| 1 | +import logging |
2 | 2 |
|
| 3 | +from llama3.generic_llama import ( |
| 4 | + GenericLlama, |
| 5 | + LlamaDisabledError, |
| 6 | + LlamaNotInstalledError, |
| 7 | + LlamaModelNotFoundError, |
| 8 | +) |
3 | 9 | from reference.config import MESSAGES, RESPONSE_FORMAT |
| 10 | +from tracker.models import GeneralEvent |
4 | 11 |
|
5 | 12 |
|
6 | 13 | def mark_reference(reference_text): |
7 | | - reference_marker = GenericLlama(MESSAGES, RESPONSE_FORMAT) |
8 | | - output = reference_marker.run(reference_text) |
9 | | - # output['choices'][0]['message']['content'] |
10 | | - for item in output["choices"]: |
11 | | - yield item["message"]["content"] |
| 14 | + try: |
| 15 | + reference_marker = GenericLlama(MESSAGES, RESPONSE_FORMAT) |
| 16 | + output = reference_marker.run(reference_text) |
| 17 | + for item in output.get("choices", []): |
| 18 | + yield item.get("message", {}).get("content", "") |
| 19 | + |
| 20 | + except (LlamaDisabledError, LlamaNotInstalledError, LlamaModelNotFoundError) as e: |
| 21 | + logging.error(f"Error marking reference: {e}") |
| 22 | + GeneralEvent.create( |
| 23 | + exception=e, |
| 24 | + exc_traceback=None, |
| 25 | + item=None, |
| 26 | + action="mark_reference", |
| 27 | + detail={"reference_text": reference_text} |
| 28 | + ) |
| 29 | + if isinstance(e, LlamaModelNotFoundError): |
| 30 | + yield f"Llama model file not found: {str(e)}" |
| 31 | + else: |
| 32 | + yield f"Llama model is not available: {str(e)}" |
| 33 | + |
| 34 | + except Exception as e: |
| 35 | + logging.error(f"Unexpected error marking reference: {e}") |
| 36 | + GeneralEvent.create( |
| 37 | + exception=e, |
| 38 | + exc_traceback=None, |
| 39 | + item=None, |
| 40 | + action="mark_reference", |
| 41 | + detail={"reference_text": reference_text} |
| 42 | + ) |
| 43 | + yield f"An unexpected error occurred: {str(e)}" |
12 | 44 |
|
13 | 45 |
|
14 | 46 | def mark_references(reference_block): |
|
0 commit comments