Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
alex-dixon committed Oct 12, 2024
1 parent bf9c5c5 commit d9614eb
Show file tree
Hide file tree
Showing 4 changed files with 141 additions and 112 deletions.
4 changes: 2 additions & 2 deletions typescript/test/image.mocha.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import assert from 'assert'

test('detectImageFormatFromBase64', () => {
// Test empty string
assert.equal(imageFormatFromBase64(''), 'unknown')
assert.equal(imageFormatFromBase64(''), null)

// Test JPEG
assert.equal(imageFormatFromBase64('/9j/4AAQSkZJRgABAQEAYABgAAD'), 'jpeg')
Expand All @@ -29,5 +29,5 @@ test('detectImageFormatFromBase64', () => {
assert.equal(imageFormatFromBase64('UklGRh4AAABXRUJQVlA4'), 'webp')

// Test unknown format
assert.equal(imageFormatFromBase64('SGVsbG8gV29ybGQh'), 'unknown')
assert.equal(imageFormatFromBase64('SGVsbG8gV29ybGQh'), null)
})
108 changes: 55 additions & 53 deletions typescript/test/runtime.mocha.ts
Original file line number Diff line number Diff line change
@@ -1,69 +1,71 @@
import * as logging from '../src/util/_logging'
logging.setGlobalLevel(logging.LogLevel.DEBUG)
import { test, before } from 'mocha'
import { test, beforeEach } from 'mocha'
import OpenAI from 'openai'
import { config } from '../src/configurator'
import { Message } from '../src/types'
import { complex, simple } from 'ell-ai'
import assert from 'assert'
import {chatCompletionsToStream} from "./util";

// this runs like...everywhere. in every test
// before(() => {
// config.defaultClient = config.defaultClient || new OpenAI({ apiKey: 'test' })
// // @ts-expect-error
// config.defaultClient.chat.completions.create = async (...args) => {
// return <OpenAI.Chat.Completions.ChatCompletion>{
// usage: {
// prompt_tokens: 10,
// completion_tokens: 10,
// latency_ms: 10,
// total_tokens: 20,
// },
// id: 'chatcmpl-123',
// created: 1677652288,
// model: 'gpt-3.5-turbo-0125',
// object: 'chat.completion',
// choices: [
// <OpenAI.Chat.Completions.ChatCompletion.Choice>{
// index: 0,
// finish_reason: 'stop',
// logprobs: null,
// message: {
// // @ts-expect-error
// content: args[0].messages[0].content[0].text,
// role: 'assistant',
// refusal: null,
// },
// },
// ],
// }
// }
// })

const logger = logging.getLogger('runtime.test')
test('runtime', async () => {
logger.debug('runtime', { test: 'test' })
const child = simple({ model: 'gpt-4o-mini' }, async (a: string) => {
return 'child'
})
const hello = simple({ model: 'gpt-4o' }, async (a: { a: string }) => {
const ok = await child(a.a)
return a.a + ok

describe('lmp', () => {
beforeEach(() => {
config.defaultClient = config.defaultClient || new OpenAI({ apiKey: 'test' })
// @ts-expect-error
config.defaultClient.chat.completions.create = async (...args) => {
return chatCompletionsToStream([<OpenAI.Chat.Completions.ChatCompletion>{
usage: {
prompt_tokens: 10,
completion_tokens: 10,
latency_ms: 10,
total_tokens: 20,
},
id: 'chatcmpl-123',
created: 1677652288,
model: 'gpt-3.5-turbo-0125',
object: 'chat.completion',
choices: [
<OpenAI.Chat.Completions.ChatCompletion.Choice>{
index: 0,
finish_reason: 'stop',
logprobs: null,
message: {
// @ts-expect-error
content: args[0].messages[0].content[0].text,
role: 'assistant',
refusal: null,
},
},
],
}])
}
})

const result = await hello({ a: 'world' })
test('runtime', async () => {
const child = simple({ model: 'gpt-4o-mini' }, async (a: string) => {
return 'child'
})
const hello = simple({ model: 'gpt-4o' }, async (a: { a: string }) => {
const ok = await child(a.a)
return a.a + ok
})

assert.equal(result, 'worldchild')
const result = await hello({ a: 'world' })

assert.ok(hello.__ell_lmp_id__?.startsWith('lmp-'))
assert.equal(hello.__ell_lmp_name__, 'hello')
assert.equal(result, 'worldchild')

assert.ok(child.__ell_lmp_id__?.startsWith('lmp-'))
assert.equal(child.__ell_lmp_name__, 'child')
})
assert.ok(hello.__ell_lmp_id__?.startsWith('lmp-'))
assert.equal(hello.__ell_lmp_name__, 'hello')

test('complex', async () => {
const child2 = complex({ model: 'gpt-4o-mini' }, async (a: string) => [new Message('assistant', 'child')])
const result = await child2('world')
assert.deepStrictEqual(result, [new Message('assistant', 'child')])
assert.ok(child.__ell_lmp_id__?.startsWith('lmp-'))
assert.equal(child.__ell_lmp_name__, 'child')
})

test('complex', async () => {
const child2 = complex({ model: 'gpt-4o-mini' }, async (a: string) => [new Message('assistant', 'child')])
const result = await child2('world')
assert.deepStrictEqual(result, new Message('assistant', 'child'))
})
})
118 changes: 61 additions & 57 deletions typescript/test/tools.mocha.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,66 +7,70 @@ import { Message } from '../src/types'
import { complex, tool } from 'ell-ai'
import assert from 'assert'

before(() => {
config.defaultClient = config.defaultClient || new OpenAI({ apiKey: 'test' })
// @ts-expect-error
config.defaultClient.chat.completions.create = async (...args) => {
return <OpenAI.Chat.Completions.ChatCompletion>{
usage: {
prompt_tokens: 10,
completion_tokens: 10,
latency_ms: 10,
total_tokens: 20,
},
id: 'chatcmpl-123',
created: 1677652288,
model: 'gpt-3.5-turbo-0125',
object: 'chat.completion',
choices: [
<OpenAI.Chat.Completions.ChatCompletion.Choice>{
index: 0,
finish_reason: 'tool_calls',
logprobs: null,
message: {
tool_calls: [
{
type: 'function',
id: '123',
function: { name: 'getWeather', arguments: JSON.stringify({ place: 'santa cruz' }) },
},
],
},
},
],
}
}
})

test('tools', async () => {
const getWeather = tool(
async ({ place }: { place: string }) => {
return `The weather in ${place} is pretty nice.`
},
{
description: 'Get the weather in a given place',
paramDescriptions: {
place: 'The place to get the weather for',
},

describe("tools", () => {
before(() => {
config.defaultClient = config.defaultClient || new OpenAI({apiKey: 'test'})
// @ts-expect-error
config.defaultClient.chat.completions.create = async (...args) => {
return <OpenAI.Chat.Completions.ChatCompletion>{
usage: {
prompt_tokens: 10,
completion_tokens: 10,
latency_ms: 10,
total_tokens: 20,
},
id: 'chatcmpl-123',
created: 1677652288,
model: 'gpt-3.5-turbo-0125',
object: 'chat.completion',
choices: [
<OpenAI.Chat.Completions.ChatCompletion.Choice>{
index: 0,
finish_reason: 'tool_calls',
logprobs: null,
message: {
tool_calls: [
{
type: 'function',
id: '123',
function: {name: 'getWeather', arguments: JSON.stringify({place: 'santa cruz'})},
},
],
},
},
],
}
}
)
const hello = complex({ model: 'gpt-4o', tools: [getWeather] }, async (place: string) => {
return [new Message('user', `Can you tell me the weather in ${place}?`)]
})
test('getWeather', async () => {
const getWeather = tool(
async ({place}: { place: string }) => {
return `The weather in ${place} is pretty nice.`
},
{
description: 'Get the weather in a given place',
paramDescriptions: {
place: 'The place to get the weather for',
},
}
)
const hello = complex({model: 'gpt-4o', tools: [getWeather]}, async (place: string) => {
return [new Message('user', `Can you tell me the weather in ${place}?`)]
})

const result = await hello('santa cruz')
assert.equal(
await result.callToolsAndCollectAsMessage().then((x) => x.toolResults?.[0]?.result.map((x) => x.text).join('')),
'"The weather in santa cruz is pretty nice."'
)

const result = await hello('santa cruz')
assert.equal(
await result.callToolsAndCollectAsMessage().then((x) => x.toolResults?.[0]?.result.map((x) => x.text).join('')),
'"The weather in santa cruz is pretty nice."'
)

// @ts-expect-error
assert.ok(getWeather.__ell_lmp_id__?.startsWith('lmp-'))
// @ts-expect-error
assert.equal(getWeather.__ell_lmp_name__, 'getWeather')
})

// @ts-expect-error
assert.ok(getWeather.__ell_lmp_id__?.startsWith('lmp-'))
// @ts-expect-error
assert.equal(getWeather.__ell_lmp_name__, 'getWeather')
})
})
23 changes: 23 additions & 0 deletions typescript/test/util.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import OpenAI from "openai";

export const chatCompletionsToStream = (completions: OpenAI.Chat.Completions.ChatCompletion[]) => {
return completions.map((completion):OpenAI.ChatCompletionChunk => {
return {
id: completion.id,
created: completion.created,
model: completion.model,
object: 'chat.completion.chunk',
choices: completion.choices.map((choice,i):OpenAI.ChatCompletionChunk.Choice => {
return {
delta: {
content: choice.message.content,
role: choice.message.role,
refusal: choice.message.refusal,
},
index: choice.index || i,
finish_reason: choice.finish_reason,
}
}),
}
})
}

0 comments on commit d9614eb

Please sign in to comment.