|
|
import networkx as nx |
|
|
import matplotlib.pyplot as plt |
|
|
import json |
|
|
import requests |
|
|
import pandas as pd |
|
|
import os |
|
|
import gradio as gr |
|
|
import openai |
|
|
import scipy as sp |
|
|
|
|
|
|
|
|
API_KEY = os.getenv("OAIK") |
|
|
client = openai.OpenAI(api_key=API_KEY) |
|
|
|
|
|
api_key_airt = os.getenv("AIRT_KEY") |
|
|
AIRT_DBASE = 'appUuBVTJR5ju0y6J' |
|
|
AIRT_TABLE = 'foros_postdoc' |
|
|
|
|
|
url = f"https://api.airtable.com/v0/{AIRT_DBASE}/{AIRT_TABLE}" |
|
|
headers = { |
|
|
"Authorization": f"Bearer {api_key_airt}", |
|
|
"Content-Type": "application/json" |
|
|
} |
|
|
|
|
|
def cargar_vocabulario(archivo): |
|
|
with open(archivo, "r", encoding="utf-8") as f: |
|
|
vocabulario = [line.strip() for line in f if line.strip()] |
|
|
return vocabulario |
|
|
|
|
|
def cargar_nombres(archivo): |
|
|
with open(archivo, "r", encoding="utf-8") as f: |
|
|
students = [line.strip() for line in f if line.strip()] |
|
|
return students |
|
|
|
|
|
students = cargar_nombres("nombres_postdoc.txt") |
|
|
VOCABULARY = cargar_vocabulario("vocabulario_postdoc.txt") |
|
|
|
|
|
G = nx.DiGraph() |
|
|
|
|
|
def extract_concepts(text): |
|
|
|
|
|
instrucciones = os.getenv('instrucciones_postdoc') |
|
|
prompt = f"""Utilizando como base este vocabulario de términos, {VOCABULARY}, explora el siguiente texto: '{text}', y genera una \ |
|
|
lista corta de los conceptos presentes en el texto coincidentes con los términos del vocabulario, la lista debe ser una \ |
|
|
secuencia simple de términos encontrados coincidentes separados por comas.""" |
|
|
|
|
|
version_model = 'gpt-3.5-turbo-0125' |
|
|
response = client.chat.completions.create( |
|
|
model=version_model, |
|
|
messages=[{"role": "system", "content": instrucciones}, |
|
|
{"role": "user", "content": prompt}], |
|
|
temperature=0.8, |
|
|
max_tokens=300, |
|
|
) |
|
|
extract_concepts = response.choices[0].message.content.split(',') |
|
|
return extract_concepts |
|
|
|
|
|
def cargar_desde_airtable(): |
|
|
response = requests.get(url, headers=headers) |
|
|
if response.status_code != 200: |
|
|
print(f"Error: {response.status_code} - {response.text}") |
|
|
return pd.DataFrame(columns=["Nombre", "Conceptos"]) |
|
|
|
|
|
records = response.json().get("records", []) |
|
|
aportes = [] |
|
|
|
|
|
for record in records: |
|
|
nombre = record["fields"].get("Nombre", "").strip() |
|
|
conceptos = record["fields"].get("Conceptos", "").strip() |
|
|
aportes.append([nombre, conceptos]) |
|
|
|
|
|
df = pd.DataFrame(aportes, columns=["Nombre", "Conceptos"]) |
|
|
|
|
|
print("Loaded Airtable Data:") |
|
|
print(df) |
|
|
return df |
|
|
|
|
|
def inicializar_grafo(): |
|
|
df = cargar_desde_airtable() |
|
|
|
|
|
for _, row in df.iterrows(): |
|
|
nombre = row["Nombre"].strip() |
|
|
conceptos = row["Conceptos"].strip() |
|
|
|
|
|
if not nombre or not conceptos: |
|
|
continue |
|
|
|
|
|
|
|
|
if not G.has_node(nombre): |
|
|
G.add_node(nombre, color='lightblue') |
|
|
|
|
|
for termino in conceptos.split(','): |
|
|
termino = termino.strip() |
|
|
if termino: |
|
|
if not G.has_node(termino): |
|
|
G.add_node(termino, color='lightgreen') |
|
|
if not G.has_edge(nombre, termino): |
|
|
G.add_edge(nombre, termino) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def visualizar_grafo(): |
|
|
plt.figure(figsize=(12, 8)) |
|
|
|
|
|
if len(G.nodes) == 0: |
|
|
print("⚠️ Warning: The graph is empty! Check if data was loaded correctly.") |
|
|
plt.text(0.5, 0.5, "No data available", fontsize=12, ha='center') |
|
|
plt.savefig("graph.png") |
|
|
plt.close() |
|
|
return "graph.png" |
|
|
|
|
|
centrality = nx.betweenness_centrality(G) |
|
|
pos = nx.kamada_kawai_layout(G, scale=2.0) |
|
|
node_colors = [] |
|
|
node_sizes = [] |
|
|
|
|
|
for node in G.nodes(): |
|
|
if node in students: |
|
|
node_colors.append('lightblue') |
|
|
else: |
|
|
node_colors.append('lightgreen') |
|
|
node_sizes.append(800 + 5000 * centrality.get(node, 0)) |
|
|
|
|
|
nx.draw( |
|
|
G, pos, with_labels=True, node_color=node_colors, edge_color='gray', |
|
|
node_size=node_sizes, font_size=8 |
|
|
) |
|
|
|
|
|
edge_labels = nx.get_edge_attributes(G, 'label') |
|
|
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels, font_size=7) |
|
|
plt.title("Red de Aportes - Optimizada y Expandida") |
|
|
plt.savefig("graph.png") |
|
|
plt.close() |
|
|
|
|
|
return "graph.png" |
|
|
|
|
|
def guardar_en_airtable(nombre, conceptos, texto): |
|
|
if isinstance(conceptos, list): |
|
|
conceptos = [c.strip().replace("'", "").replace('"', '') for c in conceptos if c.strip()] |
|
|
data = {"fields": {"Nombre": nombre, "Conceptos": ", ".join(conceptos), "Texto_Aporte": texto}} |
|
|
response = requests.post(url, headers=headers, json=data) |
|
|
if response.status_code != 200: |
|
|
print(f"Error saving to Airtable: {response.status_code} - {response.text}") |
|
|
|
|
|
def agregar_aporte(nombre, texto): |
|
|
conceptos = extract_concepts(texto) |
|
|
print(f"Extracted Concepts: {conceptos}") |
|
|
|
|
|
if not G.has_node(nombre): |
|
|
G.add_node(nombre, color='lightblue') |
|
|
|
|
|
for termino in conceptos: |
|
|
termino = termino.strip() |
|
|
if not G.has_node(termino): |
|
|
G.add_node(termino, color='lightgreen') |
|
|
if not G.has_edge(nombre, termino): |
|
|
G.add_edge(nombre, termino) |
|
|
|
|
|
guardar_en_airtable(nombre, conceptos, texto) |
|
|
return visualizar_grafo() |
|
|
|
|
|
def reload_data(): |
|
|
global G |
|
|
G.clear() |
|
|
inicializar_grafo() |
|
|
return visualizar_grafo() |
|
|
|