Skip to content

Commit

Permalink
feat: asChatEngine function for index (#1640)
Browse files Browse the repository at this point in the history
  • Loading branch information
thucpn authored Feb 11, 2025
1 parent 83cff12 commit d924c63
Show file tree
Hide file tree
Showing 10 changed files with 146 additions and 10 deletions.
6 changes: 6 additions & 0 deletions .changeset/tall-kids-prove.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"@llamaindex/core": patch
"llamaindex": patch
---

feat: asChatEngine function for index
15 changes: 15 additions & 0 deletions examples/chat-engine/keyword-index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { Document, KeywordTableIndex } from "llamaindex";
import essay from "../essay";

async function main() {
const document = new Document({ text: essay });
const index = await KeywordTableIndex.fromDocuments([document]);
const chatEngine = index.asChatEngine();

const response = await chatEngine.chat({
message: "What is Harsh Mistress?",
});
console.log(response.message.content);
}

main().catch(console.error);
17 changes: 17 additions & 0 deletions examples/chat-engine/summary-index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { Document, SummaryIndex, SummaryRetrieverMode } from "llamaindex";
import essay from "../essay";

async function main() {
const document = new Document({ text: essay });
const index = await SummaryIndex.fromDocuments([document]);
const chatEngine = index.asChatEngine({
mode: SummaryRetrieverMode.LLM,
});

const response = await chatEngine.chat({
message: "Summary about the author",
});
console.log(response.message.content);
}

main().catch(console.error);
15 changes: 15 additions & 0 deletions examples/chat-engine/vector-store-index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { Document, VectorStoreIndex } from "llamaindex";
import essay from "../essay";

async function main() {
const document = new Document({ text: essay });
const index = await VectorStoreIndex.fromDocuments([document]);
const chatEngine = index.asChatEngine({ similarityTopK: 5 });

const response = await chatEngine.chat({
message: "What did I work on in February 2021?",
});
console.log(response.message.content);
}

main().catch(console.error);
20 changes: 11 additions & 9 deletions packages/core/src/chat-engine/context-chat-engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,16 @@ import type {
import { DefaultContextGenerator } from "./default-context-generator";
import type { ContextGenerator } from "./type";

export type ContextChatEngineOptions = {
retriever: BaseRetriever;
chatModel?: LLM | undefined;
chatHistory?: ChatMessage[] | undefined;
contextSystemPrompt?: ContextSystemPrompt | undefined;
nodePostprocessors?: BaseNodePostprocessor[] | undefined;
systemPrompt?: string | undefined;
contextRole?: MessageType | undefined;
};

/**
* ContextChatEngine uses the Index to get the appropriate context for each query.
* The context is stored in the system prompt, and the chat history is chunk,
Expand All @@ -35,15 +45,7 @@ export class ContextChatEngine extends PromptMixin implements BaseChatEngine {
return this.memory.getMessages();
}

constructor(init: {
retriever: BaseRetriever;
chatModel?: LLM | undefined;
chatHistory?: ChatMessage[] | undefined;
contextSystemPrompt?: ContextSystemPrompt | undefined;
nodePostprocessors?: BaseNodePostprocessor[] | undefined;
systemPrompt?: string | undefined;
contextRole?: MessageType | undefined;
}) {
constructor(init: ContextChatEngineOptions) {
super();
this.chatModel = init.chatModel ?? Settings.llm;
this.memory = new ChatMemoryBuffer({ chatHistory: init?.chatHistory });
Expand Down
5 changes: 4 additions & 1 deletion packages/core/src/chat-engine/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ export {
type NonStreamingChatEngineParams,
type StreamingChatEngineParams,
} from "./base";
export { ContextChatEngine } from "./context-chat-engine";
export {
ContextChatEngine,
type ContextChatEngineOptions,
} from "./context-chat-engine";
export { DefaultContextGenerator } from "./default-context-generator";
export { SimpleChatEngine } from "./simple-chat-engine";
12 changes: 12 additions & 0 deletions packages/llamaindex/src/indices/BaseIndex.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
import type {
BaseChatEngine,
ContextChatEngineOptions,
} from "@llamaindex/core/chat-engine";
import type { BaseQueryEngine } from "@llamaindex/core/query-engine";
import type { BaseSynthesizer } from "@llamaindex/core/response-synthesizers";
import type { BaseRetriever } from "@llamaindex/core/retriever";
Expand Down Expand Up @@ -53,6 +57,14 @@ export abstract class BaseIndex<T> {
responseSynthesizer?: BaseSynthesizer;
}): BaseQueryEngine;

/**
* Create a new chat engine from the index.
* @param options
*/
abstract asChatEngine(
options?: Omit<ContextChatEngineOptions, "retriever">,
): BaseChatEngine;

/**
* Insert a document into the index.
* @param document
Expand Down
17 changes: 17 additions & 0 deletions packages/llamaindex/src/indices/keyword/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ import { BaseRetriever } from "@llamaindex/core/retriever";
import type { BaseDocumentStore } from "@llamaindex/core/storage/doc-store";
import { extractText } from "@llamaindex/core/utils";
import { llmFromSettingsOrContext } from "../../Settings.js";
import {
ContextChatEngine,
type BaseChatEngine,
type ContextChatEngineOptions,
} from "../../engines/chat/index.js";

export interface KeywordIndexOptions {
nodes?: BaseNode[];
Expand Down Expand Up @@ -152,6 +157,10 @@ const KeywordTableRetrieverMap = {
[KeywordTableRetrieverMode.RAKE]: KeywordTableRAKERetriever,
};

export type KeywordTableIndexChatEngineOptions = {
retriever?: BaseRetriever;
} & Omit<ContextChatEngineOptions, "retriever">;

/**
* The KeywordTableIndex, an index that extracts keywords from each Node and builds a mapping from each keyword to the corresponding Nodes of that keyword.
*/
Expand Down Expand Up @@ -251,6 +260,14 @@ export class KeywordTableIndex extends BaseIndex<KeywordTable> {
);
}

asChatEngine(options?: KeywordTableIndexChatEngineOptions): BaseChatEngine {
const { retriever, ...contextChatEngineOptions } = options ?? {};
return new ContextChatEngine({
retriever: retriever ?? this.asRetriever(),
...contextChatEngineOptions,
});
}

static async extractKeywords(
text: string,
serviceContext?: ServiceContext,
Expand Down
20 changes: 20 additions & 0 deletions packages/llamaindex/src/indices/summary/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ import {
llmFromSettingsOrContext,
nodeParserFromSettingsOrContext,
} from "../../Settings.js";
import type {
BaseChatEngine,
ContextChatEngineOptions,
} from "../../engines/chat/index.js";
import { ContextChatEngine } from "../../engines/chat/index.js";
import { RetrieverQueryEngine } from "../../engines/query/index.js";
import type { StorageContext } from "../../storage/StorageContext.js";
import { storageContextFromDefaults } from "../../storage/StorageContext.js";
Expand All @@ -44,6 +49,11 @@ export enum SummaryRetrieverMode {
LLM = "llm",
}

export type SummaryIndexChatEngineOptions = {
retriever?: BaseRetriever;
mode?: SummaryRetrieverMode;
} & Omit<ContextChatEngineOptions, "retriever">;

export interface SummaryIndexOptions {
nodes?: BaseNode[] | undefined;
indexStruct?: IndexList | undefined;
Expand Down Expand Up @@ -193,6 +203,16 @@ export class SummaryIndex extends BaseIndex<IndexList> {
);
}

asChatEngine(options?: SummaryIndexChatEngineOptions): BaseChatEngine {
const { retriever, mode, ...contextChatEngineOptions } = options ?? {};
return new ContextChatEngine({
retriever:
retriever ??
this.asRetriever({ mode: mode ?? SummaryRetrieverMode.DEFAULT }),
...contextChatEngineOptions,
});
}

static async buildIndexFromNodes(
nodes: BaseNode[],
docStore: BaseDocumentStore,
Expand Down
29 changes: 29 additions & 0 deletions packages/llamaindex/src/indices/vectorStore/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
import {
ContextChatEngine,
type ContextChatEngineOptions,
} from "@llamaindex/core/chat-engine";
import { IndexDict, IndexStructType } from "@llamaindex/core/data-structs";
import {
DEFAULT_SIMILARITY_TOP_K,
Expand Down Expand Up @@ -59,6 +63,12 @@ export interface VectorIndexConstructorProps extends BaseIndexInit<IndexDict> {
vectorStores?: VectorStoreByType | undefined;
}

export type VectorIndexChatEngineOptions = {
retriever?: BaseRetriever;
similarityTopK?: number;
preFilters?: MetadataFilters;
} & Omit<ContextChatEngineOptions, "retriever">;

/**
* The VectorStoreIndex, an index that stores the nodes only according to their vector embeddings.
*/
Expand Down Expand Up @@ -309,6 +319,25 @@ export class VectorStoreIndex extends BaseIndex<IndexDict> {
);
}

/**
* Convert the index to a chat engine.
* @param options The options for creating the chat engine
* @returns A ContextChatEngine that uses the index's retriever to get context for each query
*/
asChatEngine(options: VectorIndexChatEngineOptions = {}) {
const {
retriever,
similarityTopK,
preFilters,
...contextChatEngineOptions
} = options;
return new ContextChatEngine({
retriever:
retriever ?? this.asRetriever({ similarityTopK, filters: preFilters }),
...contextChatEngineOptions,
});
}

protected async insertNodesToStore(
newIds: string[],
nodes: BaseNode[],
Expand Down

0 comments on commit d924c63

Please sign in to comment.