diff --git a/README.md b/README.md index 2789043..07cf06c 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,7 @@ An AI-based CLI assistant to help you with command line & shell. ## Demo -Would be adding install instructions and better demo soon, until then check this out. -### Setup -![image](https://github.com/user-attachments/assets/87d3ba64-ecb7-43c6-9863-a62c39396ac5) - -### Inferences -![image](https://github.com/user-attachments/assets/7af58310-183a-429b-aa66-e6abe36713fb) -![image](https://github.com/user-attachments/assets/20062ac2-1057-4139-9f60-990bd41605da) +https://github.com/user-attachments/assets/effefe1a-c0ed-4b60-838c-98f992f6c25f ## Installation @@ -80,8 +74,23 @@ $ how to [OPTIONS] TASK ## Providers -`how-cli` uses ChatModels as they support chat messages as opposed to TextModels and below model providers and their corresponding models are available to use. +`how-cli` uses ChatModels as they support chat messages as opposed to TextModels and below model providers and their corresponding models are available to use. If you could test the models that are marked as ❌, please create an issue or pull request along with the test results. + +| Provider | Model | Package | Tested | +|:--------:|:-----:|:-------:|:------:| +| GoogleGenAI | `gemini-1.5-flash` | `langchain-google-genai` | ✅ | +| GoogleVertexAI | `gemini-1.5-flash` | `langchain-google-vertexai` | ❌ | +| GroqMistralAI | `mixtral-8x7b-32768` | `langchain-groq` | ✅ | +| GroqLLaMa | `llama3-70b-8192` | `langchain-groq` | ✅ | +| OpenAI | `gpt-4o` | `langchain-openai` | ❌ | +| Anthropic | `claude-3-5-sonnet-20240620` | `langchain-anthropic` | ❌ | + + +# License +`how-cil` is licensed under the MIT License, it can be found [here](https://github.com/FireHead90544/how-cli/blob/main/LICENSE). + + +# Honourable Mentions +This project is greatly inspired by [kynnyhsap's](https://github.com/kynnyhsap) [how](https://github.com/kynnyhsap/how). Though my implementation is completely different (refer to the below image for architectural details), but at the core both the projects aims to do the same thing. Also, check out LangChain & Typer using which this project was built. -| Provider | Model | Package | -|:--------:|:-----:|:-------:| -| Google | `gemini-1.5-flash` | `langchain-google-genai` | \ No newline at end of file +![arch](https://github.com/user-attachments/assets/5335fb1d-7899-4ebf-9ff3-dfa139a9c5f8) diff --git a/how/core/__init__.py b/how/core/__init__.py index 911be7c..1f44d28 100644 --- a/how/core/__init__.py +++ b/how/core/__init__.py @@ -2,5 +2,5 @@ Package containing the core modules of the project. """ -__version__ = "0.1.0" +__version__ = "0.2.0" __author__ = "Rudransh Joshi (FireHead90544)" \ No newline at end of file diff --git a/how/core/providers.py b/how/core/providers.py index 38bff74..401b879 100644 --- a/how/core/providers.py +++ b/how/core/providers.py @@ -1,8 +1,17 @@ import os from langchain_google_genai import ChatGoogleGenerativeAI +from langchain_google_vertexai import ChatVertexAI +from langchain_openai import ChatOpenAI +from langchain_anthropic import ChatAnthropic +from langchain_groq import ChatGroq os.environ["GRPC_VERBOSITY"] = "NONE" LLM_PROVIDERS = { - "Gemini": { "provider": ChatGoogleGenerativeAI, "model": "gemini-1.5-flash" }, + "GoogleGenAI": { "provider": ChatGoogleGenerativeAI, "model": "gemini-1.5-flash" }, + "GoogleVertexAI": { "provider": ChatVertexAI, "model": "gemini-1.5-flash" }, + "GroqMistralAI": { "provider": ChatGroq, "model": "mixtral-8x7b-32768" }, + "GroqLLaMa": { "provider": ChatGroq, "model": "llama3-70b-8192" }, + "OpenAI": { "provider": ChatOpenAI, "model": "gpt-4o" }, + "Anthropic": { "provider": ChatAnthropic, "model": "claude-3-5-sonnet-20240620" } } diff --git a/requirements.txt b/requirements.txt index 7cc09b5..046493b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,7 @@ typer langchain -langchain-google-genai \ No newline at end of file +langchain-google-genai +langchain-google-vertexai +langchain-groq +langchain-openai +langchain-anthropic \ No newline at end of file diff --git a/setup.py b/setup.py index 0feea77..86e2e44 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from how import __version__, __author__ from setuptools import setup, find_packages -with open("README.md", "r") as f: +with open("README.md", "r", encoding="utf-8") as f: readme = f.read() with open("requirements.txt", "r") as f: