Skip to content

Commit

Permalink
Merge branch 'develop' into sif-dev
Browse files Browse the repository at this point in the history
  • Loading branch information
actions-user committed Jan 3, 2025
2 parents d67fc11 + 7cb04dc commit cecad33
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 1 deletion.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ IMAGE_OPENAI_MODEL= # Default: dall-e-3
ETERNALAI_URL=
ETERNALAI_MODEL= # Default: "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16"
ETERNALAI_API_KEY=
ETERNAL_AI_LOG_REQUEST=false #Default: false

GROK_API_KEY= # GROK API Key
GROQ_API_KEY= # Starts with gsk_
Expand Down
47 changes: 46 additions & 1 deletion packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,6 @@ export async function generateText({
switch (provider) {
// OPENAI & LLAMACLOUD shared same structure.
case ModelProviderName.OPENAI:
case ModelProviderName.ETERNALAI:
case ModelProviderName.ALI_BAILIAN:
case ModelProviderName.VOLENGINE:
case ModelProviderName.LLAMACLOUD:
Expand Down Expand Up @@ -347,6 +346,52 @@ export async function generateText({
break;
}

case ModelProviderName.ETERNALAI: {
elizaLogger.debug("Initializing EternalAI model.");
const openai = createOpenAI({
apiKey,
baseURL: endpoint,
fetch: async (url: string, options: any) => {
const fetching = await runtime.fetch(url, options);
if (
parseBooleanFromText(
runtime.getSetting("ETERNAL_AI_LOG_REQUEST")
)
) {
elizaLogger.info(
"Request data: ",
JSON.stringify(options, null, 2)
);
const clonedResponse = fetching.clone();
clonedResponse.json().then((data) => {
elizaLogger.info(
"Response data: ",
JSON.stringify(data, null, 2)
);
});
}
return fetching;
},
});

const { text: openaiResponse } = await aiGenerateText({
model: openai.languageModel(model),
prompt: context,
system:
runtime.character.system ??
settings.SYSTEM_PROMPT ??
undefined,
temperature: temperature,
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
});

response = openaiResponse;
elizaLogger.debug("Received response from EternalAI model.");
break;
}

case ModelProviderName.GOOGLE: {
const google = createGoogleGenerativeAI({
apiKey,
Expand Down

0 comments on commit cecad33

Please sign in to comment.