Skip to content

Commit

Permalink
Add init tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ehsanmok committed Feb 16, 2025
1 parent 56445ce commit 56f1ed6
Show file tree
Hide file tree
Showing 13 changed files with 150 additions and 32 deletions.
40 changes: 27 additions & 13 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,30 +26,44 @@ on:
- 'CODE_OF_CONDUCT.md'

jobs:
lint:
name: Lint
validate-and-test:
name: Validate and Test
runs-on: ubuntu-latest
container:
image: ubuntu:latest
image: amd64/ubuntu:latest
steps:
- uses: actions/checkout@v4

- name: Install prerequisites
run: |
apt-get update
apt-get install -y curl
apt-get install -y curl git
- name: Install Magic CLI
run: curl -ssL https://magic.modular.com/ | bash

- name: Add Magic to PATH
shell: bash
run: |
source /root/.bashrc
echo "$HOME/.modular/bin" >> $GITHUB_PATH
curl -ssL https://magic.modular.com/ | bash
echo "/root/.modular/bin" >> $GITHUB_PATH
export PATH="/root/.modular/bin:$PATH"
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Run Metadata Validation
shell: bash
run: magic run lint

- name: Get changed directories
id: changed-dirs
run: |
source /root/.bashrc
$HOME/.modular/bin/magic run lint
if [ "${{ github.event_name }}" == "pull_request" ]; then
CHANGED_DIRS=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.sha }} | grep -v '^\.github/' | cut -d'/' -f1 | sort -u | tr '\n' ' ')
else
CHANGED_DIRS=$(git diff --name-only ${{ github.event.before }} ${{ github.sha }} | grep -v '^\.github/' | cut -d'/' -f1 | sort -u | tr '\n' ' ')
fi
echo "Changed directories: $CHANGED_DIRS"
echo "dirs=$CHANGED_DIRS" >> $GITHUB_OUTPUT
- name: Run Tests
if: steps.changed-dirs.outputs.dirs != ''
shell: bash
run: magic run test ${{ steps.changed-dirs.outputs.dirs }}
1 change: 1 addition & 0 deletions ai-weather-agent/pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ version = "0.0.0"
[tasks]
app = "honcho start"
clean = "honcho -f Procfile.clean start"
test = "echo 'test passed'"

[dependencies]
docker-compose = ">=2.29"
Expand Down
1 change: 1 addition & 0 deletions max-offline-inference/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ max_offline_inference = { path = ".", editable = true }

[tool.pixi.tasks]
app = "python main.py"
test = "echo 'test passed'"

[tool.pixi.dependencies]
max-pipelines = "*"
Expand Down
1 change: 1 addition & 0 deletions max-serve-continuous-chat/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ max-serve-continuous-chat = { path = ".", editable = true }
[tool.pixi.tasks]
app = "honcho start"
clean = "honcho -f Procfile.clean start"
test = "echo 'test passed'"

[tool.pixi.dependencies]
docker-compose = ">=2.32.4,<3"
Expand Down
1 change: 1 addition & 0 deletions max-serve-open-webui/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ max-pipelines = "*"
[tool.pixi.tasks]
app = { cmd = "honcho start", depends-on = "set-env" }
clean = "honcho -f Procfile.clean start"
test = "echo 'test passed'"

### Dependencies
[tool.pixi.dependencies]
Expand Down
4 changes: 2 additions & 2 deletions max-serve-openai-embeddings/Procfile
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
llm: (magic global install max-pipelines && magic global update max-pipelines) || true; MAX_SERVE_PORT=8000 MAX_SERVE_HOST=127.0.0.1 HUGGING_FACE_HUB_TOKEN=$(cat .env | grep HUGGING_FACE_HUB_TOKEN | cut -d '=' -f2) && max-pipelines serve --huggingface-repo-id sentence-transformers/all-mpnet-base-v2
main: magic run python main.py
llm: for i in $(seq 1 3); do (magic global install max-pipelines && magic global update max-pipelines) || true; MAX_SERVE_PORT=8000 MAX_SERVE_HOST=127.0.0.1 HUGGING_FACE_HUB_TOKEN=$(cat .env | grep HUGGING_FACE_HUB_TOKEN | cut -d '=' -f2) && max-pipelines serve --huggingface-repo-id sentence-transformers/all-mpnet-base-v2 && break || (echo "Attempt $i failed, retrying..." && sleep 5); done
main: magic run python main.py && kill -2 $(pgrep -f "max-pipelines serve")
63 changes: 46 additions & 17 deletions max-serve-openai-embeddings/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,16 @@


logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(__name__)

MODEL_NAME = "sentence-transformers/all-mpnet-base-v2" #1
BASE_URL=os.getenv('BASE_URL', 'http://0.0.0.0:8000/v1')
API_KEY="local"
MODEL_NAME = "sentence-transformers/all-mpnet-base-v2"
BASE_URL = os.getenv("BASE_URL", "http://0.0.0.0:8000/v1")
API_KEY = "local"

client = OpenAI(base_url=BASE_URL, api_key=API_KEY)

client = OpenAI(base_url=BASE_URL, api_key=API_KEY) #2

def wait_for_healthy(base_url: str):
@retry(
Expand All @@ -40,29 +40,58 @@ def _check_health():

return _check_health()


def main():
wait_for_healthy(BASE_URL)
"""Test embeddings using OpenAI client"""
sentences = [ #3
response = wait_for_healthy(BASE_URL)
assert response.status_code == 200, "Server health check failed"

sentences = [
"Rice is often served in round bowls.",
"The juice of lemons makes fine punch.",
"The bright sun shines on the old garden.",
"The soft breeze came across the meadow.",
"The small pup gnawed a hole in the sock."
"The small pup gnawed a hole in the sock.",
]

try:
response = client.embeddings.create( #4
model=MODEL_NAME,
input=sentences
)
response = client.embeddings.create(model=MODEL_NAME, input=sentences)
logger.info("\n=== Generated embeddings with OpenAI client ===")
logger.info("Successfully generated embeddings!")
logger.info(f"Number of embeddings: {len(response.data)}") #5
logger.info(f"Embedding dimension: {len(response.data[0].embedding)}")
logger.info(f"First embedding, first few values: {response.data[0].embedding[:5]}")

assert len(response.data) == len(
sentences
), f"Expected {len(sentences)} embeddings, got {len(response.data)}"

embedding_dim = len(response.data[0].embedding)
assert (
embedding_dim == 768
), f"Expected embedding dimension 768, got {embedding_dim}"

logger.info(f"Number of embeddings: {len(response.data)}")
logger.info(f"Embedding dimension: {embedding_dim}")
logger.info(
f"First embedding, first few values: {response.data[0].embedding[:5]}"
)

# Verify consistency with more relaxed tolerance
repeat_response = client.embeddings.create(
model=MODEL_NAME, input=[sentences[0]]
)
similarity = sum(
a * b
for a, b in zip(
response.data[0].embedding, repeat_response.data[0].embedding
)
)
assert (
similarity > 0.95
), f"Embedding consistency check failed (similarity={similarity})"

logger.info("All validation checks passed!")

except Exception as e:
logger.error(f"Error using client: {str(e)}")
raise


if __name__ == "__main__":
Expand Down
1 change: 1 addition & 0 deletions max-serve-openai-embeddings/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ max_embeddings = { path = ".", editable = true }
[tool.pixi.tasks]
app = "honcho start"
clean = "honcho -f Procfile.clean start"
test = "honcho start"

[tool.pixi.dependencies]
docker-compose = ">=2.32.4,<3"
Expand Down
1 change: 1 addition & 0 deletions max-serve-openai-function-calling/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ max_serve_openai_function_calling = { path = ".", editable = true }
single_function_call = "bash run.sh single_function_call.py"
multi_function_calls = "bash run.sh multi_function_calls.py"
app = "bash run.sh app.py"
test = "bash run.sh single_function_call.py"

[tool.pixi.dependencies]
bash = ">=5.2.21,<6"
1 change: 1 addition & 0 deletions max-serve-openai-function-calling/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ cleanup() {
}

setup_max_serve() {
export HF_HUB_ENABLE_HF_TRANSFER=1
export HUGGING_FACE_HUB_TOKEN=$(cat .env | grep HUGGING_FACE_HUB_TOKEN | cut -d '=' -f2)
magic global install max-pipelines && magic global update max-pipelines
}
Expand Down
5 changes: 5 additions & 0 deletions max-serve-openai-function-calling/single_function_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,12 @@ def main():
for tool_call in output.tool_calls:
if tool_call.function.name == "get_weather":
city = eval(tool_call.function.arguments)["city"]
assert city == "San Francisco"
weather_response = get_weather(city)
assert (
weather_response
== "The weather in San Francisco is sunny with a temperature of 72°F"
)
print("\nWeather response:", weather_response)


Expand Down
7 changes: 7 additions & 0 deletions pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,13 @@ version = "0.0.0"
[dependencies]
python = ">=3.10,<3.13"

[tasks]
test = "python scripts/run_tests.py $1"

[environments]
default = { solve-group = "default" }
lint = { features = ["lint"], solve-group = "default" }
test = { features = ["test"], solve-group = "default" }

[feature.lint.tasks]
lint = """
Expand All @@ -20,3 +24,6 @@ python scripts/validate_metadata.py
[feature.lint.pypi-dependencies]
pyyaml = "*"
jsonschema = "*"

[feature.test.pypi-dependencies]
pyyaml = "*"
56 changes: 56 additions & 0 deletions scripts/run_tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import sys
import os
import subprocess
from pathlib import Path


def run_tests_for_directory(directory: str) -> bool:
"""
Run tests for a specific directory.
Returns True if all tests pass, False otherwise.
"""
print(f"Running tests for {directory}...")
original_dir = os.getcwd()
os.chdir(directory)

# Check for project file in order of preference after changing directory
project_files = ["pixi.toml", "pyproject.toml", "mojoproject.toml"]
project_file = next((f for f in project_files if Path(f).exists()), None)

if project_file:
print("PIXI_PROJECT_MANIFEST: ", str(Path(project_file).absolute()))
os.environ["PIXI_PROJECT_MANIFEST"] = str(Path(project_file).absolute())

try:
subprocess.run(["magic", "run", "test"], check=True)
success = True
except subprocess.CalledProcessError as e:
print(f"Test failed: {e}")
success = False

os.chdir(original_dir)
return success


def main():
if len(sys.argv) < 2 or not sys.argv[1]:
directories = [
d
for d in os.listdir()
if os.path.isdir(d) and d not in [".git", "scripts", ".github", ".magic"]
]
else:
directories = sys.argv[1:]

print(f"Testing directories: {directories}")
failed = False

for directory in directories:
if not run_tests_for_directory(directory):
failed = True

sys.exit(1 if failed else 0)


if __name__ == "__main__":
main()

0 comments on commit 56f1ed6

Please sign in to comment.