Spaces:
Sleeping
Sleeping
Update config.yaml
Browse files- config.yaml +0 -6
config.yaml
CHANGED
|
@@ -3,7 +3,6 @@ model_list:
|
|
| 3 |
litellm_params:
|
| 4 |
model: ollama/mistral:7b
|
| 5 |
api_base: https://zhengr-ollama.hf.space
|
| 6 |
-
api_key: sk-1234
|
| 7 |
# Model-specific parameters
|
| 8 |
#model: "huggingface/mistralai/Mistral-7B-Instruct-v0.1"
|
| 9 |
#api_base: "<your-api-base>"
|
|
@@ -14,11 +13,6 @@ model_list:
|
|
| 14 |
#bos_token: "<s>"
|
| 15 |
#eos_token: "</s>"
|
| 16 |
#max_tokens: 4096
|
| 17 |
-
- model_name: xinference-llama-3-instruct
|
| 18 |
-
litellm_params:
|
| 19 |
-
model: xinference/mistral-instruct-v0.3
|
| 20 |
-
api_base: https://zhengr-xinference.hf.space/api
|
| 21 |
-
api_key: sk-1234
|
| 22 |
|
| 23 |
litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py
|
| 24 |
drop_params: True
|
|
|
|
| 3 |
litellm_params:
|
| 4 |
model: ollama/mistral:7b
|
| 5 |
api_base: https://zhengr-ollama.hf.space
|
|
|
|
| 6 |
# Model-specific parameters
|
| 7 |
#model: "huggingface/mistralai/Mistral-7B-Instruct-v0.1"
|
| 8 |
#api_base: "<your-api-base>"
|
|
|
|
| 13 |
#bos_token: "<s>"
|
| 14 |
#eos_token: "</s>"
|
| 15 |
#max_tokens: 4096
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py
|
| 18 |
drop_params: True
|