Upload agent
Browse files- agent.json +5 -5
- app.py +5 -5
agent.json
CHANGED
|
@@ -5,8 +5,8 @@
|
|
| 5 |
"model": {
|
| 6 |
"class": "HfApiModel",
|
| 7 |
"data": {
|
| 8 |
-
"last_input_token_count":
|
| 9 |
-
"last_output_token_count":
|
| 10 |
"model_id": "Qwen/Qwen2.5-Coder-32B-Instruct",
|
| 11 |
"provider": null
|
| 12 |
}
|
|
@@ -31,11 +31,11 @@
|
|
| 31 |
"post_messages": "Based on the above, please provide an answer to the following user task:\n{{task}}"
|
| 32 |
}
|
| 33 |
},
|
| 34 |
-
"max_steps":
|
| 35 |
-
"verbosity_level":
|
| 36 |
"grammar": null,
|
| 37 |
"planning_interval": null,
|
| 38 |
-
"name":
|
| 39 |
"description": null,
|
| 40 |
"requirements": [
|
| 41 |
"smolagents"
|
|
|
|
| 5 |
"model": {
|
| 6 |
"class": "HfApiModel",
|
| 7 |
"data": {
|
| 8 |
+
"last_input_token_count": null,
|
| 9 |
+
"last_output_token_count": null,
|
| 10 |
"model_id": "Qwen/Qwen2.5-Coder-32B-Instruct",
|
| 11 |
"provider": null
|
| 12 |
}
|
|
|
|
| 31 |
"post_messages": "Based on the above, please provide an answer to the following user task:\n{{task}}"
|
| 32 |
}
|
| 33 |
},
|
| 34 |
+
"max_steps": 5,
|
| 35 |
+
"verbosity_level": 2,
|
| 36 |
"grammar": null,
|
| 37 |
"planning_interval": null,
|
| 38 |
+
"name": "test",
|
| 39 |
"description": null,
|
| 40 |
"requirements": [
|
| 41 |
"smolagents"
|
app.py
CHANGED
|
@@ -20,15 +20,15 @@ final_answer = FinalAnswer()
|
|
| 20 |
with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
|
| 21 |
prompt_templates = yaml.safe_load(stream)
|
| 22 |
|
| 23 |
-
|
| 24 |
model=model,
|
| 25 |
tools=[],
|
| 26 |
managed_agents=[],
|
| 27 |
-
max_steps=
|
| 28 |
-
verbosity_level=
|
| 29 |
grammar=None,
|
| 30 |
planning_interval=None,
|
| 31 |
-
name=
|
| 32 |
description=None,
|
| 33 |
executor_type='local',
|
| 34 |
executor_kwargs={},
|
|
@@ -36,4 +36,4 @@ agent = CodeAgent(
|
|
| 36 |
prompt_templates=prompt_templates
|
| 37 |
)
|
| 38 |
if __name__ == "__main__":
|
| 39 |
-
GradioUI(
|
|
|
|
| 20 |
with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
|
| 21 |
prompt_templates = yaml.safe_load(stream)
|
| 22 |
|
| 23 |
+
agent_test = CodeAgent(
|
| 24 |
model=model,
|
| 25 |
tools=[],
|
| 26 |
managed_agents=[],
|
| 27 |
+
max_steps=5,
|
| 28 |
+
verbosity_level=2,
|
| 29 |
grammar=None,
|
| 30 |
planning_interval=None,
|
| 31 |
+
name='test',
|
| 32 |
description=None,
|
| 33 |
executor_type='local',
|
| 34 |
executor_kwargs={},
|
|
|
|
| 36 |
prompt_templates=prompt_templates
|
| 37 |
)
|
| 38 |
if __name__ == "__main__":
|
| 39 |
+
GradioUI(agent_test).launch()
|