Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import json | |
| import ssl | |
| import http.client | |
| def get_api_key(): | |
| context = ssl.create_default_context() | |
| context.check_hostname = True | |
| conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context) | |
| conn.request("GET", "/admin/api-keys/") | |
| api_key_resp = conn.getresponse() | |
| api_key_string = api_key_resp.read().decode("utf-8").replace("\n", "").replace("\t", "") | |
| api_key_json = json.loads(api_key_string) | |
| api_key = api_key_json[0]['api_key'] | |
| conn.close() | |
| return api_key | |
| def generate_top_response(system_prompt,model_input, api_key): | |
| payload = json.dumps( | |
| {"top_n": 100, "messages": [{"role": "system", "content": system_prompt},{"role": "user", "content": model_input}]} | |
| ) | |
| headers = { | |
| "Content-Type": "application/json", | |
| "Authorization": f"Bearer {api_key}", | |
| "Endpoint-Version": "2023-05-19", | |
| } | |
| context = ssl.create_default_context() | |
| context.check_hostname = True | |
| conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context) | |
| conn.request("POST", "/chat", payload, headers) | |
| response = conn.getresponse() | |
| utf_string = response.read().decode("utf-8").replace("\n", "").replace("\t", "") | |
| print(utf_string) | |
| json_resp = json.loads(utf_string) | |
| conn.close() | |
| for choice in json_resp['choices']: | |
| uid = choice['uid'] | |
| return uid, choice['message']['content'] | |
| def generate_benchmark_response(system_prompt, model_input, api_key): | |
| context = ssl.create_default_context() | |
| context.check_hostname = True | |
| conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context) | |
| conn.request("GET", "/top_miner_uids") | |
| benchmark_uid_resp = conn.getresponse() | |
| benchmark_uid_string = benchmark_uid_resp.read().decode("utf-8").replace("\n", "").replace("\t", "") | |
| benchmark_uid_json = json.loads(benchmark_uid_string) | |
| conn.close() | |
| payload = json.dumps( | |
| {"uids": benchmark_uid_json , "messages": [{"role": "system", "content": system_prompt},{"role": "user", "content": model_input}]} | |
| ) | |
| headers = { | |
| "Content-Type": "application/json", | |
| "Authorization": f"Bearer {api_key}", | |
| "Endpoint-Version": "2023-05-19", | |
| } | |
| conn = http.client.HTTPSConnection("test.neuralinternet.ai", context=context) | |
| conn.request("POST", "/chat", payload, headers) | |
| response = conn.getresponse() | |
| utf_string = response.read().decode("utf-8").replace("\n", "").replace("\t", "") | |
| json_resp = json.loads(utf_string) | |
| #print(utf_string) | |
| conn.close() | |
| for choice in json_resp['choices']: | |
| uid = choice['uid'] | |
| model_resp = choice['message']['content'] | |
| return uid, model_resp | |
| def dynamic_function(system_prompt, prompt): | |
| if len(system_prompt) == 0: | |
| system_prompt = "You are an AI Assistant, created by bittensor and powered by NI(Neural Internet). Your task is to provide consise response to user's prompt" | |
| api_key = get_api_key() | |
| top_uid, top_response = generate_top_response(system_prompt, prompt, api_key) | |
| benchmark_uid, benchmark_response = generate_benchmark_response(system_prompt, prompt, api_key) | |
| return f"TOP_{top_uid}: {top_response}\n\n\nBenchmark_{benchmark_uid}:{benchmark_response}" | |
| interface = gr.Interface( | |
| fn=dynamic_function, | |
| inputs=[ | |
| gr.inputs.Textbox(label="System Prompt", optional=True), | |
| gr.inputs.Textbox(label="Enter your question") | |
| ], | |
| outputs=gr.outputs.Textbox(label="Responses"), | |
| title="Bittensor Compare Util", | |
| ) | |
| # Launch the Gradio Interface | |
| interface.launch(share=False, enable_queue=True) | |