Skip to content

Commit

Permalink
Merge pull request #2082 from francis2tm/feat/atoma-provider
Browse files Browse the repository at this point in the history
feat: atoma provider
  • Loading branch information
shakkernerd authored Jan 16, 2025
2 parents 3b334cb + 5c0f8f3 commit ac87ec7
Show file tree
Hide file tree
Showing 5 changed files with 77 additions and 1 deletion.
7 changes: 7 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,13 @@ EMBEDDING_OPENAI_MODEL= # Default: text-embedding-3-small
IMAGE_OPENAI_MODEL= # Default: dall-e-3
USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI/1536, leave blank for local

# Atoma SDK Configuration
ATOMASDK_BEARER_AUTH= # Atoma SDK Bearer Auth token
ATOMA_API_URL= # Default: https://api.atoma.network/v1
SMALL_ATOMA_MODEL= # Default: meta-llama/Llama-3.3-70B-Instruct
MEDIUM_ATOMA_MODEL= # Default: meta-llama/Llama-3.3-70B-Instruct
LARGE_ATOMA_MODEL= # Default: meta-llama/Llama-3.3-70B-Instruct

# Eternal AI's Decentralized Inference API
ETERNALAI_URL=
ETERNALAI_MODEL= # Default: "NousResearch/Hermes-3-Llama-3.1-70B-FP8"
Expand Down
5 changes: 5 additions & 0 deletions agent/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,11 @@ export function getTokenForProvider(
character.settings?.secrets?.VENICE_API_KEY ||
settings.VENICE_API_KEY
);
case ModelProviderName.ATOMA:
return (
character.settings?.secrets?.ATOMASDK_BEARER_AUTH ||
settings.ATOMASDK_BEARER_AUTH
);
case ModelProviderName.AKASH_CHAT_API:
return (
character.settings?.secrets?.AKASH_CHAT_API_KEY ||
Expand Down
32 changes: 31 additions & 1 deletion packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1032,6 +1032,36 @@ export async function generateText({
break;
}

case ModelProviderName.ATOMA: {
elizaLogger.debug("Initializing Atoma model.");
const atoma = createOpenAI({
apiKey,
baseURL: endpoint,
fetch: runtime.fetch,
});

const { text: atomaResponse } = await aiGenerateText({
model: atoma.languageModel(model),
prompt: context,
system:
runtime.character.system ??
settings.SYSTEM_PROMPT ??
undefined,
tools: tools,
onStepFinish: onStepFinish,
maxSteps: maxSteps,
temperature: temperature,
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = atomaResponse;
elizaLogger.debug("Received response from Atoma model.");
break;
}

case ModelProviderName.GALADRIEL: {
elizaLogger.debug("Initializing Galadriel model.");
const headers = {};
Expand Down Expand Up @@ -2417,4 +2447,4 @@ export async function generateTweetActions({
await new Promise((resolve) => setTimeout(resolve, retryDelay));
retryDelay *= 2;
}
}
}
32 changes: 32 additions & 0 deletions packages/core/src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -998,6 +998,38 @@ export const models: Models = {
},
},
},
[ModelProviderName.ATOMA]: {
endpoint: settings.ATOMA_API_URL || "https://api.atoma.network/v1",
model: {
[ModelClass.SMALL]: {
name:
settings.SMALL_ATOMA_MODEL ||
"meta-llama/Llama-3.3-70B-Instruct",
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
temperature: 0.7,
},
[ModelClass.MEDIUM]: {
name:
settings.MEDIUM_ATOMA_MODEL ||
"meta-llama/Llama-3.3-70B-Instruct",
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
temperature: 0.7,
},
[ModelClass.LARGE]: {
name:
settings.LARGE_ATOMA_MODEL ||
"meta-llama/Llama-3.3-70B-Instruct",
stop: [],
maxInputTokens: 128000,
maxOutputTokens: 8192,
temperature: 0.7,
},
},
},
};

export function getModelSettings(
Expand Down
2 changes: 2 additions & 0 deletions packages/core/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,7 @@ export type Models = {
[ModelProviderName.LIVEPEER]: Model;
[ModelProviderName.DEEPSEEK]: Model;
[ModelProviderName.INFERA]: Model;
[ModelProviderName.ATOMA]: Model;
};

/**
Expand Down Expand Up @@ -264,6 +265,7 @@ export enum ModelProviderName {
LETZAI = "letzai",
DEEPSEEK = "deepseek",
INFERA = "infera",
ATOMA = "atoma",
}

/**
Expand Down

0 comments on commit ac87ec7

Please sign in to comment.