Skip to content

Commit

Permalink
fix imports via ruff
Browse files Browse the repository at this point in the history
Signed-off-by: Stephanie <yangcao@redhat.com>
  • Loading branch information
yangcao77 committed Jan 22, 2025
1 parent c147719 commit e0be713
Show file tree
Hide file tree
Showing 15 changed files with 19 additions and 22 deletions.
10 changes: 5 additions & 5 deletions ols/app/endpoints/conversations.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,18 @@

from ols import config
from ols.app.endpoints.ols import (
retrieve_user_id,
retrieve_previous_input,
retrieve_skip_user_id_check,
retrieve_user_id,
)
from ols.app.models.models import (
ErrorResponse,
ForbiddenResponse,
UnauthorizedResponse,
CacheEntry,
ChatHistoryResponse,
ConversationDeletionResponse,
ErrorResponse,
ForbiddenResponse,
ListConversationsResponse,
CacheEntry,
UnauthorizedResponse,
)
from ols.src.auth.auth import get_auth_dependency

Expand Down
1 change: 0 additions & 1 deletion ols/app/endpoints/ols.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
from ols.utils import errors_parsing, suid
from ols.utils.token_handler import PromptTooLongError


logger = logging.getLogger(__name__)

router = APIRouter(tags=["query"])
Expand Down
3 changes: 1 addition & 2 deletions ols/app/models/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,10 @@
from typing import Any, Dict, Optional, Self, Union

from langchain.llms.base import LLM
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from pydantic import BaseModel, field_validator, model_validator
from pydantic.dataclasses import dataclass

from langchain_core.messages import AIMessage, HumanMessage, BaseMessage

from ols.constants import MEDIA_TYPE_JSON, MEDIA_TYPE_TEXT
from ols.customize import prompts
from ols.utils import suid
Expand Down
2 changes: 1 addition & 1 deletion ols/app/routers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@

from ols.app.endpoints import (
authorized,
conversations,
feedback,
health,
ols,
streaming_ols,
conversations,
)
from ols.app.metrics import metrics

Expand Down
2 changes: 1 addition & 1 deletion tests/benchmarks/test_prompt_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# pylint: disable=W0621

import pytest
from langchain_core.messages import AIMessage, HumanMessage

from ols.constants import (
GPT35_TURBO,
Expand All @@ -16,7 +17,6 @@
PROVIDER_WATSONX,
)
from ols.src.prompts.prompt_generator import GeneratePrompt
from langchain_core.messages import AIMessage, HumanMessage

# providers and models used by parametrized benchmarks
provider_and_model = (
Expand Down
1 change: 1 addition & 0 deletions tests/integration/test_conversations.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Integration tests for /conversations REST API endpoints."""

from unittest.mock import patch

import pytest
import requests
from fastapi.testclient import TestClient
Expand Down
1 change: 0 additions & 1 deletion tests/integration/test_redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from ols.app.models.models import CacheEntry
from ols.src.cache.redis_cache import RedisCache


USER_ID = "00000000-0000-0000-0000-000000000001"
CONVERSATION_ID = "00000000-0000-0000-0000-000000000002"

Expand Down
2 changes: 1 addition & 1 deletion tests/unit/app/endpoints/test_ols.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import pytest
from fastapi import HTTPException
from langchain_core.messages import AIMessage, HumanMessage

from ols import config, constants
from ols.app.endpoints import ols
Expand All @@ -25,7 +26,6 @@
from ols.utils.errors_parsing import DEFAULT_ERROR_MESSAGE
from ols.utils.redactor import Redactor, RegexFilter
from ols.utils.token_handler import PromptTooLongError
from langchain_core.messages import AIMessage, HumanMessage


@pytest.fixture(scope="function")
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/app/models/test_models.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Unit tests for the API models."""

import pytest
from langchain_core.messages import AIMessage, HumanMessage
from pydantic import ValidationError

from ols.app.models.models import (
Expand All @@ -18,7 +19,6 @@
)
from ols.constants import MEDIA_TYPE_JSON, MEDIA_TYPE_TEXT
from ols.utils import suid
from langchain_core.messages import AIMessage, HumanMessage


class TestLLM:
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/cache/test_in_memory_cache.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
"""Unit tests for InMemoryCache class."""

import pytest
from langchain_core.messages import AIMessage, HumanMessage

from ols import constants
from ols.app.models.config import InMemoryCacheConfig
from ols.app.models.models import CacheEntry
from ols.src.cache.in_memory_cache import InMemoryCache
from ols.utils import suid
from langchain_core.messages import AIMessage, HumanMessage

conversation_id = suid.get_suid()
user_provided_user_id = "test-user1"
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/cache/test_postgres_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@

import psycopg2
import pytest
from langchain_core.messages import AIMessage, HumanMessage

from ols.app.models.config import PostgresConfig
from ols.app.models.models import CacheEntry, MessageEncoder, MessageDecoder
from ols.app.models.models import CacheEntry, MessageDecoder, MessageEncoder
from ols.src.cache.cache_error import CacheError
from ols.src.cache.postgres_cache import PostgresCache
from ols.utils import suid
from langchain_core.messages import AIMessage, HumanMessage

user_id = suid.get_suid()
conversation_id = suid.get_suid()
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/cache/test_redis_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
from unittest.mock import patch

import pytest
from langchain_core.messages import AIMessage, HumanMessage

from ols import constants
from ols.app.models.config import RedisConfig
from ols.app.models.models import CacheEntry, MessageEncoder, MessageDecoder
from ols.app.models.models import CacheEntry, MessageDecoder, MessageEncoder
from ols.src.cache.redis_cache import RedisCache
from ols.utils import suid
from tests.mock_classes.mock_redis_client import MockRedisClient
from langchain_core.messages import AIMessage, HumanMessage

conversation_id = suid.get_suid()
cache_entry_1 = CacheEntry(
Expand Down
3 changes: 1 addition & 2 deletions tests/unit/prompts/test_prompt_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
PromptTemplate,
SystemMessagePromptTemplate,
)
from langchain_core.messages import AIMessage, HumanMessage

from ols.constants import (
GPT35_TURBO,
Expand All @@ -21,8 +22,6 @@
restructure_rag_context_pre,
)

from langchain_core.messages import AIMessage, HumanMessage

model = [GRANITE_13B_CHAT_V2, GPT35_TURBO]

system_instruction = """
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/query_helpers/test_docs_summarizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from unittest.mock import ANY, patch

import pytest
from langchain_core.messages import HumanMessage

from ols import config
from ols.app.models.config import LoggingConfig
Expand All @@ -15,7 +16,6 @@
from tests.mock_classes.mock_llama_index import MockLlamaIndex
from tests.mock_classes.mock_llm_chain import mock_llm_chain
from tests.mock_classes.mock_llm_loader import mock_llm_loader
from langchain_core.messages import HumanMessage

conversation_id = suid.get_suid()

Expand Down
2 changes: 1 addition & 1 deletion tests/unit/utils/test_token_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
from unittest import TestCase, mock

import pytest
from langchain_core.messages import AIMessage, HumanMessage

from ols.constants import TOKEN_BUFFER_WEIGHT, ModelFamily
from ols.utils.token_handler import PromptTooLongError, TokenHandler
from tests.mock_classes.mock_retrieved_node import MockRetrievedNode
from langchain_core.messages import HumanMessage, AIMessage


class TestTokenHandler(TestCase):
Expand Down

0 comments on commit e0be713

Please sign in to comment.