Skip to content

Commit

Permalink
Various UX improvements
Browse files Browse the repository at this point in the history
Various improvements
  • Loading branch information
pablonyx authored Feb 6, 2025
2 parents 396f096 + f137fc7 commit a0a1b43
Show file tree
Hide file tree
Showing 29 changed files with 466 additions and 316 deletions.
11 changes: 11 additions & 0 deletions backend/ee/onyx/db/persona.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@

from sqlalchemy.orm import Session

from onyx.configs.constants import NotificationType
from onyx.db.models import Persona__User
from onyx.db.models import Persona__UserGroup
from onyx.db.notification import create_notification
from onyx.server.features.persona.models import PersonaSharedNotificationData


def make_persona_private(
Expand All @@ -23,6 +26,14 @@ def make_persona_private(
for user_uuid in user_ids:
db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid))

create_notification(
user_id=user_uuid,
notif_type=NotificationType.PERSONA_SHARED,
db_session=db_session,
additional_data=PersonaSharedNotificationData(
persona_id=persona_id,
).model_dump(),
)
if group_ids:
for group_id in group_ids:
db_session.add(
Expand Down
5 changes: 3 additions & 2 deletions backend/ee/onyx/server/middleware/tenant_tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,13 +87,14 @@ async def _get_tenant_id_from_request(
if not is_valid_schema_name(tenant_id):
raise HTTPException(status_code=400, detail="Invalid tenant ID format")

return tenant_id

except Exception as e:
logger.error(f"Unexpected error in _get_tenant_id_from_request: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")

finally:
if tenant_id:
return tenant_id

# As a final step, check for explicit tenant_id cookie
tenant_id_cookie = request.cookies.get(TENANT_ID_COOKIE_NAME)
if tenant_id_cookie and is_valid_schema_name(tenant_id_cookie):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@
from onyx.agents.agent_search.deep_search.main.models import (
AgentRefinedMetrics,
)
from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion
from onyx.agents.agent_search.deep_search.main.operations import (
dispatch_subquestion,
dispatch_subquestion_sep,
)
from onyx.agents.agent_search.deep_search.main.states import (
InitialQuestionDecompositionUpdate,
Expand Down Expand Up @@ -111,7 +112,9 @@ def decompose_orig_question(
)
# dispatches custom events for subquestion tokens, adding in subquestion ids.
streamed_tokens = dispatch_separated(
model.stream(msg), dispatch_subquestion(0, writer)
model.stream(msg),
dispatch_subquestion(0, writer),
sep_callback=dispatch_subquestion_sep(0, writer),
)

stop_event = StreamStopInfo(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
from onyx.agents.agent_search.deep_search.main.models import (
RefinementSubQuestion,
)
from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion
from onyx.agents.agent_search.deep_search.main.operations import (
dispatch_subquestion,
dispatch_subquestion_sep,
)
from onyx.agents.agent_search.deep_search.main.states import MainState
from onyx.agents.agent_search.deep_search.main.states import (
Expand Down Expand Up @@ -96,7 +97,9 @@ def create_refined_sub_questions(
model = graph_config.tooling.fast_llm

streamed_tokens = dispatch_separated(
model.stream(msg), dispatch_subquestion(1, writer)
model.stream(msg),
dispatch_subquestion(1, writer),
sep_callback=dispatch_subquestion_sep(1, writer),
)
response = merge_content(*streamed_tokens)

Expand Down
19 changes: 19 additions & 0 deletions backend/onyx/agents/agent_search/deep_search/main/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@
SubQuestionAnswerResults,
)
from onyx.agents.agent_search.shared_graph_utils.utils import write_custom_event
from onyx.chat.models import StreamStopInfo
from onyx.chat.models import StreamStopReason
from onyx.chat.models import StreamType
from onyx.chat.models import SubQuestionPiece
from onyx.context.search.models import IndexFilters
from onyx.tools.models import SearchQueryInfo
Expand All @@ -34,6 +37,22 @@ def _helper(sub_question_part: str, sep_num: int) -> None:
return _helper


def dispatch_subquestion_sep(level: int, writer: StreamWriter) -> Callable[[int], None]:
def _helper(sep_num: int) -> None:
write_custom_event(
"stream_finished",
StreamStopInfo(
stop_reason=StreamStopReason.FINISHED,
stream_type=StreamType.SUB_QUESTIONS,
level=level,
level_question_num=sep_num,
),
writer,
)

return _helper


def calculate_initial_agent_stats(
decomp_answer_results: list[SubQuestionAnswerResults],
original_question_stats: AgentChunkRetrievalStats,
Expand Down
3 changes: 3 additions & 0 deletions backend/onyx/agents/agent_search/shared_graph_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,7 @@ def _dispatch_nonempty(
def dispatch_separated(
tokens: Iterator[BaseMessage],
dispatch_event: Callable[[str, int], None],
sep_callback: Callable[[int], None] | None = None,
sep: str = DISPATCH_SEP_CHAR,
) -> list[BaseMessage_Content]:
num = 1
Expand All @@ -304,6 +305,8 @@ def dispatch_separated(
if sep in content:
sub_question_parts = content.split(sep)
_dispatch_nonempty(sub_question_parts[0], dispatch_event, num)
if sep_callback:
sep_callback(num)
num += 1
_dispatch_nonempty(
"".join(sub_question_parts[1:]).strip(), dispatch_event, num
Expand Down
12 changes: 12 additions & 0 deletions backend/onyx/db/persona.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from onyx.configs.chat_configs import BING_API_KEY
from onyx.configs.chat_configs import CONTEXT_CHUNKS_ABOVE
from onyx.configs.chat_configs import CONTEXT_CHUNKS_BELOW
from onyx.configs.constants import NotificationType
from onyx.context.search.enums import RecencyBiasSetting
from onyx.db.constants import SLACK_BOT_PERSONA_PREFIX
from onyx.db.models import DocumentSet
Expand All @@ -33,6 +34,8 @@
from onyx.db.models import User
from onyx.db.models import User__UserGroup
from onyx.db.models import UserGroup
from onyx.db.notification import create_notification
from onyx.server.features.persona.models import PersonaSharedNotificationData
from onyx.server.features.persona.models import PersonaSnapshot
from onyx.server.features.persona.models import PersonaUpsertRequest
from onyx.utils.logger import setup_logger
Expand Down Expand Up @@ -170,6 +173,15 @@ def make_persona_private(
for user_uuid in user_ids:
db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid))

create_notification(
user_id=user_uuid,
notif_type=NotificationType.PERSONA_SHARED,
db_session=db_session,
additional_data=PersonaSharedNotificationData(
persona_id=persona_id,
).model_dump(),
)

db_session.commit()

# May cause error if someone switches down to MIT from EE
Expand Down
17 changes: 8 additions & 9 deletions backend/onyx/server/query_and_chat/chat_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -717,15 +717,14 @@ def upload_files_for_chat(
else ChatFileType.PLAIN_TEXT
)

if file_type == ChatFileType.IMAGE:
file_content_io = file.file
# NOTE: Image conversion to JPEG used to be enforced here.
# This was removed to:
# 1. Preserve original file content for downloads
# 2. Maintain transparency in formats like PNG
# 3. Ameliorate issue with file conversion
else:
file_content_io = io.BytesIO(file.file.read())
file_content = file.file.read() # Read the file content

# NOTE: Image conversion to JPEG used to be enforced here.
# This was removed to:
# 1. Preserve original file content for downloads
# 2. Maintain transparency in formats like PNG
# 3. Ameliorate issue with file conversion
file_content_io = io.BytesIO(file_content)

new_content_type = file.content_type

Expand Down
1 change: 1 addition & 0 deletions backend/tests/integration/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
COPY ./onyx /app/onyx
COPY ./shared_configs /app/shared_configs
COPY ./alembic /app/alembic
COPY ./alembic_tenants /app/alembic_tenants
COPY ./alembic.ini /app/alembic.ini
COPY ./pytest.ini /app/pytest.ini
COPY supervisord.conf /usr/etc/supervisord.conf
Expand Down
50 changes: 29 additions & 21 deletions web/src/app/admin/configuration/llm/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
GeminiIcon,
OpenSourceIcon,
AnthropicSVG,
IconProps,
} from "@/components/icons/icons";
import { FaRobot } from "react-icons/fa";

Expand Down Expand Up @@ -74,37 +75,44 @@ export interface LLMProviderDescriptor {
}

export const getProviderIcon = (providerName: string, modelName?: string) => {
const modelNameToIcon = (
modelName: string,
fallbackIcon: ({ size, className }: IconProps) => JSX.Element
): (({ size, className }: IconProps) => JSX.Element) => {
if (modelName?.toLowerCase().includes("amazon")) {
return AmazonIcon;
}
if (modelName?.toLowerCase().includes("phi")) {
return MicrosoftIconSVG;
}
if (modelName?.toLowerCase().includes("mistral")) {
return MistralIcon;
}
if (modelName?.toLowerCase().includes("llama")) {
return MetaIcon;
}
if (modelName?.toLowerCase().includes("gemini")) {
return GeminiIcon;
}
if (modelName?.toLowerCase().includes("claude")) {
return AnthropicIcon;
} else {
return fallbackIcon;
}
};

switch (providerName) {
case "openai":
// Special cases for openai based on modelName
if (modelName?.toLowerCase().includes("amazon")) {
return AmazonIcon;
}
if (modelName?.toLowerCase().includes("phi")) {
return MicrosoftIconSVG;
}
if (modelName?.toLowerCase().includes("mistral")) {
return MistralIcon;
}
if (modelName?.toLowerCase().includes("llama")) {
return MetaIcon;
}
if (modelName?.toLowerCase().includes("gemini")) {
return GeminiIcon;
}
if (modelName?.toLowerCase().includes("claude")) {
return AnthropicIcon;
}

return OpenAIIcon; // Default for openai
return modelNameToIcon(modelName || "", OpenAIIcon);
case "anthropic":
return AnthropicSVG;
case "bedrock":
return AWSIcon;
case "azure":
return AzureIcon;
default:
return CPUIcon;
return modelNameToIcon(modelName || "", CPUIcon);
}
};

Expand Down
2 changes: 1 addition & 1 deletion web/src/app/admin/settings/SettingsForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ export function SettingsForm() {
<Checkbox
label="Pro Search Disabled"
sublabel="If set, users will not be able to use Pro Search."
checked={settings.pro_search_disabled}
checked={settings.pro_search_disabled ?? false}
onChange={(e) =>
handleToggleSettingsField("pro_search_disabled", e.target.checked)
}
Expand Down
2 changes: 1 addition & 1 deletion web/src/app/admin/settings/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export interface Settings {
notifications: Notification[];
needs_reindexing: boolean;
gpu_enabled: boolean;
pro_search_disabled: boolean;
pro_search_disabled: boolean | null;
product_gating: GatingType;
auto_scroll: boolean;
}
Expand Down
Loading

0 comments on commit a0a1b43

Please sign in to comment.