Skip to content

Commit

Permalink
Improve RAG preset experience (#617)
Browse files Browse the repository at this point in the history
* Default ML response processors to append outputs to ext in response

Signed-off-by: Tyler Ohlsen <ohltyler@amazon.com>

* Auto-navigate to ml outputs tab if found; minor reformatting

Signed-off-by: Tyler Ohlsen <ohltyler@amazon.com>

* Update presets to omit ext.ml_inference

Signed-off-by: Tyler Ohlsen <ohltyler@amazon.com>

---------

Signed-off-by: Tyler Ohlsen <ohltyler@amazon.com>
  • Loading branch information
ohltyler authored Feb 13, 2025
1 parent 474a8a7 commit 2d1d174
Show file tree
Hide file tree
Showing 5 changed files with 65 additions and 35 deletions.
4 changes: 2 additions & 2 deletions common/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -281,8 +281,8 @@ export type SearchPipelineConfig = {
export type MLInferenceProcessor = IngestProcessor & {
ml_inference: {
model_id: string;
input_map?: {};
output_map?: {};
input_map?: {}[];
output_map?: {}[];
[key: string]: any;
};
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,34 +18,26 @@ import {
ML_RESPONSE_PROCESSOR_EXAMPLE_DOCS_LINK,
} from '../../../common';

interface MLResponseProps {
mlResponse: {};
interface MLOutputsProps {
mlOutputs: {};
}

/**
* Small component to render the ML response within a raw search response.
* Small component to render the ML outputs within a raw search response.
*/
export function MLResponse(props: MLResponseProps) {
export function MLOutputs(props: MLOutputsProps) {
return (
<>
<EuiSpacer size="s" />
<EuiText size="s">
Showing results stored in <EuiCode>ext.ml_inference</EuiCode> from the
search response.{' '}
<EuiLink href={ML_RESPONSE_PROCESSOR_EXAMPLE_DOCS_LINK} target="_blank">
See an example
</EuiLink>
</EuiText>
<EuiSpacer size="m" />
{isEmpty(props.mlResponse) ? (
<EuiEmptyPrompt title={<h2>No response found</h2>} titleSize="s" />
{isEmpty(props.mlOutputs) ? (
<EuiEmptyPrompt title={<h2>No outputs found</h2>} titleSize="s" />
) : (
<EuiCodeEditor
mode="json"
theme="textmate"
width="100%"
height="100%"
value={customStringify(props.mlResponse)}
value={customStringify(props.mlOutputs)}
readOnly={true}
setOptions={{
fontSize: '12px',
Expand All @@ -55,6 +47,14 @@ export function MLResponse(props: MLResponseProps) {
tabSize={2}
/>
)}
<EuiSpacer size="s" />
<EuiText size="s" color="subdued">
Showing ML outputs stored in <EuiCode>ext.ml_inference</EuiCode> from
the search response.{' '}
<EuiLink href={ML_RESPONSE_PROCESSOR_EXAMPLE_DOCS_LINK} target="_blank">
See an example
</EuiLink>
</EuiText>
</>
);
}
34 changes: 20 additions & 14 deletions public/general_components/results/results.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
* SPDX-License-Identifier: Apache-2.0
*/

import React, { useState } from 'react';
import { get } from 'lodash';
import React, { useEffect, useState } from 'react';
import { get, isEmpty } from 'lodash';
import {
EuiPanel,
EuiFlexGroup,
Expand All @@ -14,25 +14,31 @@ import {
import { SearchResponse } from '../../../common';
import { ResultsTable } from './results_table';
import { ResultsJSON } from './results_json';
import { MLResponse } from './ml_response';
import { MLOutputs } from './ml_outputs';

interface ResultsProps {
response: SearchResponse;
}

enum VIEW {
HITS_TABLE = 'hits_table',
ML_OUTPUTS = 'ml_outputs',
RAW_JSON = 'raw_json',
ML_RESPONSE = 'ml_response',
}

/**
* Basic component to view OpenSearch response results. Can view hits in a tabular format,
* or the raw JSON response.
*/
export function Results(props: ResultsProps) {
// selected view state
// selected view state. auto-navigate to ML outputs if there is values found
// in "ext.ml_inference" in the search response.
const [selectedView, setSelectedView] = useState<VIEW>(VIEW.HITS_TABLE);
useEffect(() => {
if (!isEmpty(get(props.response, 'ext.ml_inference', {}))) {
setSelectedView(VIEW.ML_OUTPUTS);
}
}, [props.response]);

return (
<EuiPanel
Expand All @@ -55,12 +61,12 @@ export function Results(props: ResultsProps) {
label: 'Hits',
},
{
id: VIEW.RAW_JSON,
label: 'Raw JSON',
id: VIEW.ML_OUTPUTS,
label: 'ML outputs',
},
{
id: VIEW.ML_RESPONSE,
label: 'ML response',
id: VIEW.RAW_JSON,
label: 'Raw JSON',
},
]}
idSelected={selectedView}
Expand All @@ -73,14 +79,14 @@ export function Results(props: ResultsProps) {
{selectedView === VIEW.HITS_TABLE && (
<ResultsTable hits={props.response?.hits?.hits || []} />
)}
{selectedView === VIEW.ML_OUTPUTS && (
<MLOutputs
mlOutputs={getMLResponseFromSearchResponse(props.response)}
/>
)}
{selectedView === VIEW.RAW_JSON && (
<ResultsJSON response={props.response} />
)}
{selectedView === VIEW.ML_RESPONSE && (
<MLResponse
mlResponse={getMLResponseFromSearchResponse(props.response)}
/>
)}
</>
</EuiFlexItem>
</EuiFlexGroup>
Expand Down
5 changes: 2 additions & 3 deletions public/pages/workflows/new_workflow/quick_configure_modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -741,8 +741,7 @@ function updateRAGSearchResponseProcessors(
llmInterface: ModelInterface | undefined
): WorkflowConfig {
config.search.enrichResponse.processors.forEach((processor, idx) => {
// prefill ML inference. By default, store the inference results
// under the `ext.ml_inference` response body.
// prefill ML inference
if (processor.type === PROCESSOR_TYPE.ML) {
config.search.enrichResponse.processors[idx].fields.forEach((field) => {
if (field.id === 'model' && fields.llmId) {
Expand Down Expand Up @@ -785,7 +784,7 @@ function updateRAGSearchResponseProcessors(
...outputMap[0],
value: {
transformType: TRANSFORM_TYPE.FIELD,
value: `ext.ml_inference.${fields.llmResponseField}`,
value: fields.llmResponseField,
},
};
} else {
Expand Down
27 changes: 26 additions & 1 deletion public/utils/config_to_template_utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
*/

import { FormikValues } from 'formik';
import { isEmpty } from 'lodash';
import { get, isEmpty } from 'lodash';
import {
TemplateFlows,
TemplateNode,
Expand Down Expand Up @@ -233,6 +233,31 @@ export function processorConfigsToTemplateProcessors(
);
}

// process where the returned values from the output map should be stored.
// by default, if many-to-one, append with "ext.ml_inference", such that the outputs
// will be stored in a standalone field in the search response, instead of appended
// to each document redundantly.
const oneToOne = formValues?.one_to_one as boolean | undefined;
if (
oneToOne !== undefined &&
oneToOne === false &&
processor.ml_inference?.output_map !== undefined
) {
const updatedOutputMap = processor.ml_inference.output_map?.map(
(mapEntry) => {
let updatedMapEntry = {};
Object.keys(mapEntry).forEach((key) => {
updatedMapEntry = {
...updatedMapEntry,
[`ext.ml_inference.${key}`]: get(mapEntry, key),
};
});
return updatedMapEntry;
}
);
processor.ml_inference.output_map = updatedOutputMap;
}

// process optional fields
let additionalFormValues = {} as FormikValues;
Object.keys(formValues).forEach((formKey: string) => {
Expand Down

0 comments on commit 2d1d174

Please sign in to comment.