Exploring the tunable knobs via morphik.toml
morphik.toml
[registered_models] # OpenAI openai_gpt4 = { model_name = "gpt-4" } openai_gpt4_mini = { model_name = "gpt-4-0125-preview" } # Anthropic claude_3_opus = { model_name = "claude-3-opus-20240229" } claude_3_sonnet = { model_name = "claude-3-sonnet-20240229" } # Google gemini_pro = { model_name = "gemini/gemini-pro" } gemini_flash = { model_name = "gemini/gemini-1.5-flash" } # Azure OpenAI azure_gpt4 = { model_name = "azure/gpt-4", api_base = "YOUR_AZURE_URL", api_key = "YOUR_KEY" } # AWS Bedrock bedrock_claude = { model_name = "bedrock/anthropic.claude-v2" } # And 100+ more providers...
[completion] model = "claude_3_opus" # Use any registered model [embedding] model = "openai_embedding" # Use any registered embedding model
[registered_models] # Ollama models ollama_llama = { model_name = "ollama_chat/llama3.2", api_base = "http://localhost:11434" } ollama_qwen_vision = { model_name = "ollama_chat/qwen2.5:72b", api_base = "http://localhost:11434", vision = true } ollama_embedding = { model_name = "ollama/nomic-embed-text", api_base = "http://localhost:11434" }
[registered_models] # Lemonade models lemonade_qwen = { model_name = "openai/Qwen2.5-VL-7B-Instruct-GGUF", api_base = "http://localhost:8020/api/v1", vision = true } lemonade_embedding = { model_name = "openai/nomic-embed-text-v1-GGUF", api_base = "http://localhost:8020/api/v1" }
http://host.docker.internal:PORT
http://ollama:11434
Was this page helpful?