diff --git a/src/ezpyai/exceptions.py b/src/ezpyai/exceptions.py index 913d658..76d4138 100644 --- a/src/ezpyai/exceptions.py +++ b/src/ezpyai/exceptions.py @@ -1,3 +1,4 @@ +# LLMProvider Exceptions class UnsupportedModelError(Exception): """Exception raised when an unsupported model is used.""" @@ -12,6 +13,29 @@ def __init__(self, message="Unsupported lora", *args): super().__init__(message, *args) +# LLM Exceptions +class PromptUserMessageMissingError(Exception): + """Exception raised when there is no user message in the prompt.""" + + def __init__(self, message="Prompt user message missing", *args): + super().__init__(message, *args) + + +class LLMResponseEmptyError(Exception): + """Exception raised when there is no LLM response message.""" + + def __init__(self, message="LLM response empty", *args): + super().__init__(message, *args) + + +class LLMInferenceError(Exception): + """Exception raised when there is an error during the LLM inference.""" + + def __init__(self, message="LLM inference error", *args): + super().__init__(message, *args) + + +# General Exceptions class JSONParseError(Exception): """Exception raised when a JSON parse error occurs.""" @@ -38,3 +62,10 @@ class FileProcessingError(Exception): def __init__(self, message="Error during file processing", *args): super().__init__(message, *args) + + +class FileNotFoundError(Exception): + """Exception raised when a file is not found.""" + + def __init__(self, message="File not found", *args): + super().__init__(message, *args) diff --git a/src/ezpyai/llm/dataset/sources/telegram.py b/src/ezpyai/llm/dataset/sources/telegram.py index f586354..4e58814 100644 --- a/src/ezpyai/llm/dataset/sources/telegram.py +++ b/src/ezpyai/llm/dataset/sources/telegram.py @@ -1,3 +1,11 @@ +import os + +from ezpyai.exceptions import FileNotFoundError + + class DatasetSourceTelegram: def __init__(self, json_export_file_path: str) -> None: - pass + if not os.path.exists(json_export_file_path): + raise FileNotFoundError(f"File not found: {json_export_file_path}") + + self._json_export_file_path = json_export_file_path diff --git a/src/ezpyai/llm/exceptions.py b/src/ezpyai/llm/exceptions.py deleted file mode 100644 index a72915d..0000000 --- a/src/ezpyai/llm/exceptions.py +++ /dev/null @@ -1,19 +0,0 @@ -class NoUserMessage(Exception): - """Exception raised when there is no user message.""" - - def __init__(self, message="No user message", *args): - super().__init__(message, *args) - - -class NoLLMResponseMessage(Exception): - """Exception raised when there is no response from the LLM.""" - - def __init__(self, message="No LLM response message", *args): - super().__init__(message, *args) - - -class InvokeError(Exception): - """Exception raised when an invocation error occurs.""" - - def __init__(self, message="Invocation error", *args): - super().__init__(message, *args) diff --git a/src/ezpyai/llm/knowledge/_knowledge_gatherer.py b/src/ezpyai/llm/knowledge/_knowledge_gatherer.py index 9e88e1a..19f0b0e 100644 --- a/src/ezpyai/llm/knowledge/_knowledge_gatherer.py +++ b/src/ezpyai/llm/knowledge/_knowledge_gatherer.py @@ -7,7 +7,12 @@ import hashlib import pandas as pd import xml.etree.ElementTree as ET -import ezpyai.exceptions as exceptions + +from ezpyai.exceptions import ( + UnsupportedFileTypeError, + FileReadError, + FileProcessingError, +) from bs4 import BeautifulSoup from typing import Dict @@ -184,13 +189,9 @@ def _process_file(self, file_path: str): self._process_zip(file_path) return else: - raise exceptions.UnsupportedFileTypeError( - f"Unsupported file type for {file_path}" - ) + raise UnsupportedFileTypeError(f"Unsupported file type for {file_path}") except Exception as e: - raise exceptions.FileReadError( - f"Error reading {file_path}: {str(e)}" - ) from e + raise FileReadError(f"Error reading {file_path}: {str(e)}") from e paragraphs = content.split("\n") paragraph_counter = 1 @@ -235,7 +236,7 @@ def _process_zip(self, zip_path: str): self._process_directory(temp_dir) except Exception as e: - raise exceptions.FileProcessingError( + raise FileProcessingError( f"Error processing ZIP file {zip_path}: {str(e)}" ) from e finally: diff --git a/src/ezpyai/llm/providers/openai.py b/src/ezpyai/llm/providers/openai.py index 30a99d3..a48e1bd 100644 --- a/src/ezpyai/llm/providers/openai.py +++ b/src/ezpyai/llm/providers/openai.py @@ -1,12 +1,17 @@ import os -import ezpyai.llm.exceptions as exceptions -from typing import Annotated +from typing import List, Dict from openai import OpenAI as _OpenAI from ezpyai._logger import logger from ezpyai.llm.providers._llm_provider import BaseLLMProvider from ezpyai.llm.prompt import Prompt +from ezpyai.exceptions import ( + PromptUserMessageMissingError, + LLMInferenceError, + LLMResponseEmptyError, +) + from ezpyai._constants import ( ENV_VAR_NAME_OPENAI_API_KEY, ENV_VAR_NAME_OPENAI_ORGANIZATION, @@ -91,9 +96,7 @@ def _get_system_message(self, message: str) -> dict: def _get_user_message(self, message: str) -> dict: return {"role": "user", "content": message} - def _prompt_to_messages( - self, prompt: Prompt - ) -> Annotated[list[dict], "Raises exceptions.NoUserMessage"]: + def _prompt_to_messages(self, prompt: Prompt) -> List[Dict]: messages = [] if prompt.has_system_message(): messages.append(self._get_system_message(prompt.get_system_message())) @@ -102,16 +105,13 @@ def _prompt_to_messages( messages.append(self._get_user_message(prompt.get_context_as_string())) if not prompt.has_user_message(): - raise exceptions.NoUserMessage() + raise PromptUserMessageMissingError() messages.append(self._get_user_message(prompt.get_user_message())) return messages - def get_response(self, prompt: Prompt) -> Annotated[ - str, - "Raises exceptions.NoUserMessage, exceptions.NoLLMResponseMessage, exceptions.InvokeError", - ]: + def get_response(self, prompt: Prompt) -> str: messages = self._prompt_to_messages(prompt) try: @@ -124,9 +124,9 @@ def get_response(self, prompt: Prompt) -> Annotated[ messages=messages, ) except Exception as e: - raise exceptions.InvokeError() from e + raise LLMInferenceError() from e if not response.choices: - raise exceptions.NoLLMResponseMessage() + raise LLMResponseEmptyError() return response.choices[0].message.content