| | |
| | """ |
| | Trek WhatsApp AI Asistani - Hybrid Model Version |
| | Gorsel varsa: GPT-4o (vision) |
| | Metin varsa: GPT-5.2 (daha akilli) |
| | """ |
| |
|
| | import os |
| | import json |
| | import re |
| | import requests |
| | import xml.etree.ElementTree as ET |
| | import warnings |
| | import time |
| | import threading |
| | import datetime |
| | import unicodedata |
| | from concurrent.futures import ThreadPoolExecutor, as_completed |
| | from fastapi import FastAPI, Request |
| | from twilio.rest import Client |
| | from twilio.twiml.messaging_response import MessagingResponse |
| |
|
| | |
| | from prompts import get_active_prompts |
| | from whatsapp_renderer import extract_product_info_whatsapp |
| | from whatsapp_passive_profiler import ( |
| | analyze_user_message, get_user_profile_summary, get_personalized_recommendations |
| | ) |
| |
|
| | |
| | import logging |
| | logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
| | logger = logging.getLogger(__name__) |
| |
|
| | |
| | |
| | USE_IMPROVED_SEARCH = False |
| |
|
| | warnings.simplefilter('ignore') |
| |
|
| | |
| | |
| | |
| | |
| | BLOCKED_NUMBERS = { |
| | "whatsapp:+905376627860", |
| | } |
| |
|
| | |
| | RATE_LIMIT_PER_MINUTE = 10 |
| | rate_limit_tracker = {} |
| |
|
| | def is_blocked(phone_number): |
| | """Numara blocklist'te mi kontrol et""" |
| | if phone_number in BLOCKED_NUMBERS: |
| | logger.warning(f"π« ENGELLENEN NUMARA: {phone_number} - mesaj ignore edildi") |
| | return True |
| | return False |
| |
|
| | def is_rate_limited(phone_number): |
| | """Dakikada cok fazla mesaj atan numarayi gecici engelle""" |
| | now = time.time() |
| | if phone_number not in rate_limit_tracker: |
| | rate_limit_tracker[phone_number] = [] |
| |
|
| | |
| | rate_limit_tracker[phone_number] = [ |
| | t for t in rate_limit_tracker[phone_number] if now - t < 60 |
| | ] |
| |
|
| | if len(rate_limit_tracker[phone_number]) >= RATE_LIMIT_PER_MINUTE: |
| | logger.warning(f"β±οΈ RATE LIMIT: {phone_number} - dakikada {RATE_LIMIT_PER_MINUTE}+ mesaj") |
| | return True |
| |
|
| | rate_limit_tracker[phone_number].append(now) |
| | return False |
| |
|
| | |
| | |
| | |
| | MODEL_CONFIG = { |
| | "vision": "gpt-4o", |
| | "text": "gpt-5.2-chat-latest", |
| | "fallback": "gpt-4o" |
| | } |
| |
|
| | |
| | def get_model_for_request(has_media=False): |
| | """ |
| | Istek tipine gore uygun modeli sec |
| | has_media=True -> GPT-4o (vision destekli) |
| | has_media=False -> GPT-5.2 (daha akilli metin isleme) |
| | """ |
| | if has_media: |
| | logger.info(f"πΌοΈ Gorsel tespit edildi -> Model: {MODEL_CONFIG['vision']}") |
| | return MODEL_CONFIG["vision"] |
| | else: |
| | logger.info(f"π Metin mesaji -> Model: {MODEL_CONFIG['text']}") |
| | return MODEL_CONFIG["text"] |
| |
|
| | |
| | |
| | |
| | API_URL = "https://api.openai.com/v1/chat/completions" |
| | OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") |
| | logger.info(f"OpenAI API Key var mi: {'Evet' if OPENAI_API_KEY else 'Hayir'}") |
| |
|
| | |
| | TWILIO_ACCOUNT_SID = os.getenv("TWILIO_ACCOUNT_SID") |
| | TWILIO_AUTH_TOKEN = os.getenv("TWILIO_AUTH_TOKEN") |
| | TWILIO_MESSAGING_SERVICE_SID = os.getenv("TWILIO_MESSAGING_SERVICE_SID", "MG11c1dfac28ad5f81908ec9ede0f7247f") |
| | TWILIO_WHATSAPP_NUMBER = "whatsapp:+905332047254" |
| |
|
| | logger.info(f"Twilio SID var mi: {'Evet' if TWILIO_ACCOUNT_SID else 'Hayir'}") |
| | logger.info(f"Twilio Auth Token var mi: {'Evet' if TWILIO_AUTH_TOKEN else 'Hayir'}") |
| | logger.info(f"Messaging Service SID var mi: {'Evet' if TWILIO_MESSAGING_SERVICE_SID else 'Hayir'}") |
| |
|
| | if not TWILIO_ACCOUNT_SID or not TWILIO_AUTH_TOKEN: |
| | logger.error("β Twilio bilgileri eksik!") |
| | twilio_client = None |
| | else: |
| | try: |
| | twilio_client = Client(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN) |
| | logger.info("β
Twilio client basariyla olusturuldu!") |
| | except Exception as e: |
| | logger.error(f"β Twilio client hatasi: {e}") |
| | twilio_client = None |
| |
|
| | |
| | |
| | |
| | try: |
| | from smart_warehouse_with_price import get_warehouse_stock_smart_with_price |
| | USE_GPT5_SEARCH = True |
| | logger.info("β
GPT-5 complete smart warehouse with price (BF algorithm) loaded") |
| | except ImportError: |
| | USE_GPT5_SEARCH = False |
| | logger.info("β GPT-5 search not available") |
| |
|
| | |
| | try: |
| | from media_queue_v2 import media_queue |
| | USE_MEDIA_QUEUE = True |
| | logger.info("β
Media Queue V2 loaded successfully") |
| | except ImportError: |
| | USE_MEDIA_QUEUE = False |
| | logger.info("β Media Queue V2 not available") |
| |
|
| | |
| | try: |
| | from store_notification import ( |
| | notify_product_reservation, |
| | notify_price_inquiry, |
| | notify_stock_inquiry, |
| | send_test_notification, |
| | send_store_notification, |
| | should_notify_mehmet_bey |
| | ) |
| | USE_STORE_NOTIFICATION = True |
| | logger.info("β
Store Notification System loaded") |
| | except ImportError: |
| | USE_STORE_NOTIFICATION = False |
| | logger.info("β Store Notification System not available") |
| |
|
| | |
| | try: |
| | from follow_up_system import ( |
| | FollowUpManager, |
| | analyze_message_for_follow_up, |
| | FollowUpType |
| | ) |
| | USE_FOLLOW_UP = True |
| | follow_up_manager = FollowUpManager() |
| | logger.info("β
Follow-Up System loaded") |
| | except ImportError: |
| | USE_FOLLOW_UP = False |
| | follow_up_manager = None |
| | logger.info("β Follow-Up System not available") |
| |
|
| | |
| | try: |
| | from intent_analyzer import ( |
| | analyze_customer_intent, |
| | should_notify_store, |
| | get_smart_notification_message |
| | ) |
| | USE_INTENT_ANALYZER = True |
| | logger.info("β
GPT-5 Intent Analyzer loaded") |
| | except ImportError: |
| | USE_INTENT_ANALYZER = False |
| | logger.info("β Intent Analyzer not available") |
| |
|
| | |
| | |
| | |
| | STOCK_API_BASE = "https://video.trek-turkey.com/bizimhesap-proxy.php" |
| |
|
| | |
| | stock_cache = {} |
| | CACHE_DURATION = 300 |
| |
|
| | |
| | turkish_map = {'Δ±': 'i', 'Δ': 'g', 'ΓΌ': 'u', 'Ε': 's', 'ΓΆ': 'o', 'Γ§': 'c', 'Δ°': 'i', 'I': 'i'} |
| |
|
| | def normalize_turkish(text): |
| | """Turkce karakterleri normalize et""" |
| | if not text: |
| | return "" |
| | text = unicodedata.normalize('NFD', text) |
| | text = ''.join(char for char in text if unicodedata.category(char) != 'Mn') |
| | for tr_char, en_char in turkish_map.items(): |
| | text = text.replace(tr_char, en_char) |
| | return text.lower() |
| |
|
| | def fetch_warehouse_inventory(warehouse, product_name, search_terms): |
| | """Tek bir magazanin stok bilgisini al""" |
| | try: |
| | warehouse_id = warehouse['id'] |
| | warehouse_name = warehouse['title'] |
| |
|
| | |
| | is_dsw = 'DSW' in warehouse_name or 'ΓN SΔ°PARΔ°Ε' in warehouse_name.upper() |
| |
|
| | |
| | inventory_url = f"{STOCK_API_BASE}?action=inventory&warehouse={warehouse_id}&endpoint=inventory/{warehouse_id}" |
| | inventory_response = requests.get(inventory_url, timeout=3, verify=False) |
| |
|
| | if inventory_response.status_code != 200: |
| | return None |
| |
|
| | inventory_data = inventory_response.json() |
| |
|
| | |
| | if 'data' not in inventory_data or 'inventory' not in inventory_data['data']: |
| | return None |
| |
|
| | products_list = inventory_data['data']['inventory'] |
| |
|
| | |
| | size_terms = ['xs', 's', 'm', 'ml', 'l', 'xl', 'xxl', '2xl', '3xl', 'small', 'medium', 'large'] |
| | size_numbers = ['44', '46', '48', '50', '52', '54', '56', '58', '60'] |
| |
|
| | |
| | has_size_query = False |
| | size_query = None |
| | for term in search_terms: |
| | if term in size_terms or term in size_numbers: |
| | has_size_query = True |
| | size_query = term |
| | break |
| |
|
| | |
| | is_only_size_query = len(search_terms) == 1 and has_size_query |
| |
|
| | |
| | warehouse_variants = [] |
| | dsw_stock_count = 0 |
| |
|
| | for product in products_list: |
| | product_title = normalize_turkish(product.get('title', '')).lower() |
| | original_title = product.get('title', '') |
| |
|
| | |
| | if is_only_size_query: |
| | if size_query in product_title.split() or f'({size_query})' in product_title or f' {size_query} ' in product_title or product_title.endswith(f' {size_query}'): |
| | qty = int(product.get('qty', 0)) |
| | stock = int(product.get('stock', 0)) |
| | actual_stock = max(qty, stock) |
| |
|
| | if actual_stock > 0: |
| | if is_dsw: |
| | dsw_stock_count += actual_stock |
| | continue |
| | warehouse_variants.append(f"{original_title}: β Stokta") |
| | else: |
| | |
| | if has_size_query: |
| | non_size_terms = [t for t in search_terms if t != size_query] |
| | product_matches = all(term in product_title for term in non_size_terms) |
| | size_matches = size_query in product_title.split() or f'({size_query})' in product_title or f' {size_query} ' in product_title or product_title.endswith(f' {size_query}') |
| |
|
| | if product_matches and size_matches: |
| | qty = int(product.get('qty', 0)) |
| | stock = int(product.get('stock', 0)) |
| | actual_stock = max(qty, stock) |
| |
|
| | if actual_stock > 0: |
| | if is_dsw: |
| | dsw_stock_count += actual_stock |
| | continue |
| |
|
| | variant_info = original_title |
| | possible_names = [ |
| | product_name.upper(), |
| | product_name.lower(), |
| | product_name.title(), |
| | product_name.upper().replace('I', 'Δ°'), |
| | product_name.upper().replace('Δ°', 'I'), |
| | ] |
| |
|
| | if 'fx sport' in product_name.lower(): |
| | possible_names.extend(['FX Sport AL 3', 'FX SPORT AL 3', 'Fx Sport Al 3']) |
| |
|
| | for possible_name in possible_names: |
| | variant_info = variant_info.replace(possible_name, '').strip() |
| |
|
| | variant_info = ' '.join(variant_info.split()) |
| |
|
| | if variant_info and variant_info != original_title: |
| | warehouse_variants.append(f"{variant_info}: β Stokta") |
| | else: |
| | warehouse_variants.append(f"{original_title}: β Stokta") |
| | else: |
| | if all(term in product_title for term in search_terms): |
| | qty = int(product.get('qty', 0)) |
| | stock = int(product.get('stock', 0)) |
| | actual_stock = max(qty, stock) |
| |
|
| | if actual_stock > 0: |
| | if is_dsw: |
| | dsw_stock_count += actual_stock |
| | continue |
| |
|
| | variant_info = original_title |
| | possible_names = [ |
| | product_name.upper(), |
| | product_name.lower(), |
| | product_name.title(), |
| | product_name.upper().replace('I', 'Δ°'), |
| | product_name.upper().replace('Δ°', 'I'), |
| | ] |
| |
|
| | if 'fx sport' in product_name.lower(): |
| | possible_names.extend(['FX Sport AL 3', 'FX SPORT AL 3', 'Fx Sport Al 3']) |
| |
|
| | for possible_name in possible_names: |
| | variant_info = variant_info.replace(possible_name, '').strip() |
| |
|
| | variant_info = ' '.join(variant_info.split()) |
| |
|
| | if variant_info and variant_info != original_title: |
| | warehouse_variants.append(f"{variant_info}: β Stokta") |
| | else: |
| | warehouse_variants.append(f"{original_title}: β Stokta") |
| |
|
| | |
| | if warehouse_variants and not is_dsw: |
| | return {'warehouse': warehouse_name, 'variants': warehouse_variants, 'is_dsw': False} |
| | elif dsw_stock_count > 0: |
| | return {'dsw_stock': dsw_stock_count, 'is_dsw': True} |
| |
|
| | return None |
| |
|
| | except Exception: |
| | return None |
| |
|
| | def get_realtime_stock_parallel(product_name): |
| | """API'den gercek zamanli stok bilgisini cek - Paralel versiyon with cache""" |
| | try: |
| | |
| | cache_key = normalize_turkish(product_name).lower() |
| | current_time = time.time() |
| |
|
| | if cache_key in stock_cache: |
| | cached_data, cached_time = stock_cache[cache_key] |
| | if current_time - cached_time < CACHE_DURATION: |
| | logger.info(f"Cache'den donduruluyor: {product_name}") |
| | return cached_data |
| |
|
| | |
| | warehouses_url = f"{STOCK_API_BASE}?action=warehouses&endpoint=warehouses" |
| | warehouses_response = requests.get(warehouses_url, timeout=3, verify=False) |
| |
|
| | if warehouses_response.status_code != 200: |
| | logger.error(f"Magaza listesi alinamadi: {warehouses_response.status_code}") |
| | return None |
| |
|
| | warehouses_data = warehouses_response.json() |
| |
|
| | if 'data' not in warehouses_data or 'warehouses' not in warehouses_data['data']: |
| | logger.error("Magaza verisi bulunamadi") |
| | return None |
| |
|
| | warehouses = warehouses_data['data']['warehouses'] |
| |
|
| | |
| | search_terms = normalize_turkish(product_name).lower().split() |
| | logger.info(f"Aranan urun: {product_name} -> {search_terms}") |
| |
|
| | stock_info = {} |
| | total_dsw_stock = 0 |
| | total_stock = 0 |
| |
|
| | |
| | with ThreadPoolExecutor(max_workers=10) as executor: |
| | futures = { |
| | executor.submit(fetch_warehouse_inventory, warehouse, product_name, search_terms): warehouse |
| | for warehouse in warehouses |
| | } |
| |
|
| | for future in as_completed(futures): |
| | result = future.result() |
| | if result: |
| | if result.get('is_dsw'): |
| | total_dsw_stock += result.get('dsw_stock', 0) |
| | else: |
| | warehouse_name = result['warehouse'] |
| | stock_info[warehouse_name] = result['variants'] |
| | total_stock += 1 |
| |
|
| | |
| | if not stock_info: |
| | if total_dsw_stock > 0: |
| | result = f"{product_name}: Su anda magazalarda stokta yok, ancak yakinda gelecek. On siparis verebilirsiniz." |
| | else: |
| | result = f"{product_name}: Su anda hicbir magazada stokta bulunmuyor." |
| | else: |
| | prompt_lines = [f"{product_name} stok durumu:"] |
| | for warehouse, variants in stock_info.items(): |
| | if isinstance(variants, list): |
| | prompt_lines.append(f"- {warehouse}:") |
| | for variant in variants: |
| | prompt_lines.append(f" β’ {variant}") |
| | else: |
| | prompt_lines.append(f"- {warehouse}: {variants}") |
| |
|
| | if total_stock > 0: |
| | prompt_lines.append(f"β Urun stokta mevcut") |
| |
|
| | result = "\n".join(prompt_lines) |
| |
|
| | |
| | stock_cache[cache_key] = (result, current_time) |
| |
|
| | return result |
| |
|
| | except Exception as e: |
| | logger.error(f"API hatasi: {e}") |
| | return None |
| |
|
| | def is_stock_query(message): |
| | """Mesajin stok sorgusu olup olmadigini kontrol et - Basit yedek kontrol""" |
| | |
| | |
| | basic_keywords = ['stok', 'stock', 'var mΔ±', 'mevcut'] |
| | message_lower = message.lower() |
| | return any(keyword in message_lower for keyword in basic_keywords) |
| |
|
| | |
| | |
| | |
| | def get_warehouse_stock(product_name): |
| | """B2B API'den magaza stok bilgilerini cek - GPT-5 enhanced""" |
| | |
| | if USE_GPT5_SEARCH: |
| | try: |
| | gpt5_result = get_warehouse_stock_smart_with_price(product_name) |
| | if gpt5_result and isinstance(gpt5_result, list): |
| | if all(isinstance(item, str) for item in gpt5_result): |
| | return gpt5_result |
| | warehouse_info = [] |
| | for item in gpt5_result: |
| | if isinstance(item, dict): |
| | info = f"π¦ {item.get('name', '')}" |
| | if item.get('variant'): |
| | info += f" ({item['variant']})" |
| | if item.get('warehouses'): |
| | info += f"\nπ Mevcut: {', '.join(item['warehouses'])}" |
| | if item.get('price'): |
| | info += f"\nπ° {item['price']}" |
| | warehouse_info.append(info) |
| | else: |
| | warehouse_info.append(str(item)) |
| | return warehouse_info if warehouse_info else None |
| | except Exception as e: |
| | logger.error(f"GPT-5 warehouse search error: {e}") |
| |
|
| | |
| | try: |
| | import re |
| | warehouse_url = 'https://video.trek-turkey.com/bizimhesap-warehouse-xml-b2b-api-v2.php' |
| | response = requests.get(warehouse_url, verify=False, timeout=15) |
| |
|
| | if response.status_code != 200: |
| | return None |
| |
|
| | root = ET.fromstring(response.content) |
| |
|
| | |
| | search_name = normalize_turkish(product_name.lower().strip()) |
| | search_name = search_name.replace('(2026)', '').replace('(2025)', '').replace(' gen 3', '').replace(' gen', '').strip() |
| | search_words = search_name.split() |
| |
|
| | best_matches = [] |
| | exact_matches = [] |
| | variant_matches = [] |
| | candidates = [] |
| |
|
| | size_color_words = ['s', 'm', 'l', 'xl', 'xs', 'small', 'medium', 'large', |
| | 'turuncu', 'siyah', 'beyaz', 'mavi', 'kirmizi', 'yesil', |
| | 'orange', 'black', 'white', 'blue', 'red', 'green'] |
| |
|
| | variant_words = [word for word in search_words if word in size_color_words] |
| | product_words = [word for word in search_words if word not in size_color_words] |
| |
|
| | is_size_color_query = len(variant_words) > 0 and len(search_words) <= 4 |
| |
|
| | if is_size_color_query: |
| | for product in root.findall('Product'): |
| | product_name_elem = product.find('ProductName') |
| | variant_elem = product.find('ProductVariant') |
| |
|
| | if product_name_elem is not None and product_name_elem.text: |
| | xml_product_name = product_name_elem.text.strip() |
| | normalized_product_name = normalize_turkish(xml_product_name.lower()) |
| |
|
| | product_name_matches = True |
| | if product_words: |
| | product_name_matches = all(word in normalized_product_name for word in product_words) |
| |
|
| | if product_name_matches: |
| | if variant_elem is not None and variant_elem.text: |
| | variant_text = normalize_turkish(variant_elem.text.lower().replace('-', ' ')) |
| |
|
| | if all(word in variant_text for word in variant_words): |
| | variant_matches.append((product, xml_product_name, variant_text)) |
| |
|
| | if variant_matches: |
| | candidates = variant_matches |
| | else: |
| | is_size_color_query = False |
| |
|
| | if not is_size_color_query or not candidates: |
| | for product in root.findall('Product'): |
| | product_name_elem = product.find('ProductName') |
| | if product_name_elem is not None and product_name_elem.text: |
| | xml_product_name = product_name_elem.text.strip() |
| | normalized_xml = normalize_turkish(xml_product_name.lower()) |
| | normalized_xml = normalized_xml.replace('(2026)', '').replace('(2025)', '').replace(' gen 3', '').replace(' gen', '').strip() |
| | xml_words = normalized_xml.split() |
| |
|
| | if len(search_words) >= 2 and len(xml_words) >= 2: |
| | search_key = f"{search_words[0]} {search_words[1]}" |
| | xml_key = f"{xml_words[0]} {xml_words[1]}" |
| |
|
| | if search_key == xml_key: |
| | exact_matches.append((product, xml_product_name, normalized_xml)) |
| |
|
| | if not candidates: |
| | candidates = exact_matches if exact_matches else [] |
| |
|
| | if not candidates: |
| | for product in root.findall('Product'): |
| | product_name_elem = product.find('ProductName') |
| | if product_name_elem is not None and product_name_elem.text: |
| | xml_product_name = product_name_elem.text.strip() |
| | normalized_xml = normalize_turkish(xml_product_name.lower()) |
| | normalized_xml = normalized_xml.replace('(2026)', '').replace('(2025)', '').replace(' gen 3', '').replace(' gen', '').strip() |
| | xml_words = normalized_xml.split() |
| |
|
| | common_words = set(search_words) & set(xml_words) |
| |
|
| | if (len(common_words) >= 2 and |
| | len(search_words) > 0 and len(xml_words) > 0 and |
| | search_words[0] == xml_words[0]): |
| | best_matches.append((product, xml_product_name, normalized_xml, len(common_words))) |
| |
|
| | if best_matches: |
| | max_common = max(match[3] for match in best_matches) |
| | candidates = [(match[0], match[1], match[2]) for match in best_matches if match[3] == max_common] |
| |
|
| | warehouse_stock_map = {} |
| |
|
| | for product, xml_name, _ in candidates: |
| | for warehouse in product.findall('Warehouse'): |
| | name_elem = warehouse.find('Name') |
| | stock_elem = warehouse.find('Stock') |
| |
|
| | if name_elem is not None and stock_elem is not None: |
| | warehouse_name = name_elem.text if name_elem.text else "Bilinmeyen" |
| | try: |
| | stock_count = int(stock_elem.text) if stock_elem.text else 0 |
| | if stock_count > 0: |
| | if warehouse_name in warehouse_stock_map: |
| | warehouse_stock_map[warehouse_name] += stock_count |
| | else: |
| | warehouse_stock_map[warehouse_name] = stock_count |
| | except (ValueError, TypeError): |
| | pass |
| |
|
| | if warehouse_stock_map: |
| | all_warehouse_info = [] |
| | for warehouse_name, total_stock in warehouse_stock_map.items(): |
| | all_warehouse_info.append(f"{warehouse_name}: Stokta var") |
| | return all_warehouse_info |
| | else: |
| | return ["Hicbir magazada stokta bulunmuyor"] |
| |
|
| | except Exception as e: |
| | logger.error(f"Magaza stok bilgisi cekme hatasi: {e}") |
| | return None |
| |
|
| | |
| | |
| | |
| | try: |
| | url = 'https://www.trekbisiklet.com.tr/output/8582384479' |
| | response = requests.get(url, verify=False, timeout=10) |
| | root = ET.fromstring(response.content) |
| | all_items = root.findall('item') |
| |
|
| | products = [] |
| |
|
| | for item in all_items: |
| | stock_number = 0 |
| | stock_amount = "stokta degil" |
| | price = "" |
| | price_eft = "" |
| | product_link = "" |
| | picture_url = "" |
| | category_tree = "" |
| | category_label = "" |
| | stock_code = "" |
| | root_product_stock_code = "" |
| | is_option_of_product = "0" |
| | is_optioned_product = "0" |
| |
|
| | rootlabel = item.find('rootlabel') |
| | if rootlabel is None or not rootlabel.text: |
| | continue |
| |
|
| | full_name = rootlabel.text.strip() |
| | name_words = full_name.lower().split() |
| | name = name_words[0] if name_words else "unknown" |
| |
|
| | |
| | stock_element = item.find('stockAmount') |
| | if stock_element is not None and stock_element.text: |
| | try: |
| | stock_number = int(stock_element.text.strip()) |
| | stock_amount = "stokta" if stock_number > 0 else "stokta degil" |
| | except (ValueError, TypeError): |
| | stock_number = 0 |
| | stock_amount = "stokta degil" |
| |
|
| | |
| | link_element = item.find('productLink') |
| | product_link = link_element.text if link_element is not None and link_element.text else "" |
| |
|
| | |
| | picture_element = item.find('picture1Path') |
| | picture_url = picture_element.text if picture_element is not None and picture_element.text else "" |
| |
|
| | |
| | category_tree_element = item.find('categoryTree') |
| | category_tree = category_tree_element.text if category_tree_element is not None and category_tree_element.text else "" |
| |
|
| | category_label_element = item.find('productCategoryLabel') |
| | category_label = category_label_element.text if category_label_element is not None and category_label_element.text else "" |
| |
|
| | |
| | stock_code_element = item.find('stockCode') |
| | stock_code = stock_code_element.text if stock_code_element is not None and stock_code_element.text else "" |
| |
|
| | |
| | root_product_stock_code_element = item.find('rootProductStockCode') |
| | root_product_stock_code = root_product_stock_code_element.text if root_product_stock_code_element is not None and root_product_stock_code_element.text else "" |
| |
|
| | is_option_of_product_element = item.find('isOptionOfAProduct') |
| | is_option_of_product = is_option_of_product_element.text if is_option_of_product_element is not None and is_option_of_product_element.text else "0" |
| |
|
| | is_optioned_product_element = item.find('isOptionedProduct') |
| | is_optioned_product = is_optioned_product_element.text if is_optioned_product_element is not None and is_optioned_product_element.text else "0" |
| |
|
| | |
| | if stock_amount == "stokta": |
| | |
| | price_element = item.find('priceTaxWithCur') |
| | price_str = price_element.text if price_element is not None and price_element.text else "0" |
| |
|
| | |
| | price_rebate_element = item.find('priceRebateWithTax') |
| | price_rebate_str = price_rebate_element.text if price_rebate_element is not None and price_rebate_element.text else "" |
| |
|
| | final_price_str = price_str |
| | if price_rebate_str: |
| | try: |
| | normal_price = float(price_str) |
| | rebate_price = float(price_rebate_str) |
| | if rebate_price < normal_price: |
| | final_price_str = price_rebate_str |
| | except (ValueError, TypeError): |
| | final_price_str = price_str |
| |
|
| | |
| | price_eft_element = item.find('priceEft') |
| | price_eft_str = price_eft_element.text if price_eft_element is not None and price_eft_element.text else "" |
| |
|
| | |
| | try: |
| | price_float = float(final_price_str) |
| | if price_float > 200000: |
| | price = str(round(price_float / 5000) * 5000) |
| | elif price_float > 30000: |
| | price = str(round(price_float / 1000) * 1000) |
| | elif price_float > 10000: |
| | price = str(round(price_float / 100) * 100) |
| | else: |
| | price = str(round(price_float / 10) * 10) |
| | except (ValueError, TypeError): |
| | price = final_price_str |
| |
|
| | |
| | if price_eft_str: |
| | try: |
| | price_eft_float = float(price_eft_str) |
| | if price_eft_float > 200000: |
| | price_eft = str(round(price_eft_float / 5000) * 5000) |
| | elif price_eft_float > 30000: |
| | price_eft = str(round(price_eft_float / 1000) * 1000) |
| | elif price_eft_float > 10000: |
| | price_eft = str(round(price_eft_float / 100) * 100) |
| | else: |
| | price_eft = str(round(price_eft_float / 10) * 10) |
| | except (ValueError, TypeError): |
| | price_eft = price_eft_str |
| | else: |
| | try: |
| | price_eft_float = float(price_str) |
| | price_eft = str(round(price_eft_float * 0.975 / 10) * 10) |
| | except: |
| | price_eft = "" |
| |
|
| | |
| | item_info = (stock_amount, price, product_link, price_eft, str(stock_number), |
| | picture_url, category_tree, category_label, stock_code, |
| | root_product_stock_code, is_option_of_product, is_optioned_product) |
| | products.append((name, item_info, full_name)) |
| |
|
| | logger.info(f"β
{len(products)} urun yuklendi") |
| |
|
| | except Exception as e: |
| | logger.error(f"β Urun yukleme hatasi: {e}") |
| | import traceback |
| | traceback.print_exc() |
| | products = [] |
| |
|
| | |
| | |
| | |
| | def get_system_messages(): |
| | """Sistem mesajlarini yukle - Moduler prompts'tan""" |
| | try: |
| | return get_active_prompts() |
| | except: |
| | |
| | return [ |
| | {"role": "system", "content": "Sen Trek bisiklet uzmani AI asistanisin. Trek ve Electra bisikletler konusunda uzmansin. Stokta bulunan urunlerin fiyat bilgilerini verebilirsin."} |
| | ] |
| |
|
| | |
| | |
| | |
| | conversation_memory = {} |
| |
|
| | def get_conversation_context(phone_number): |
| | """Kullanicinin sohbet gecmisini getir""" |
| | if phone_number not in conversation_memory: |
| | conversation_memory[phone_number] = { |
| | "messages": [], |
| | "current_category": None, |
| | "current_product": None, |
| | "current_product_link": None, |
| | "current_product_price": None, |
| | "last_activity": None |
| | } |
| | return conversation_memory[phone_number] |
| |
|
| | def add_to_conversation(phone_number, user_message, ai_response): |
| | """Sohbet gecmisine ekle""" |
| | context = get_conversation_context(phone_number) |
| | context["last_activity"] = datetime.datetime.now() |
| |
|
| | context["messages"].append({ |
| | "user": user_message, |
| | "ai": ai_response, |
| | "timestamp": datetime.datetime.now() |
| | }) |
| |
|
| | |
| | if len(context["messages"]) > 10: |
| | context["messages"] = context["messages"][-10:] |
| |
|
| | detect_category(phone_number, user_message, ai_response) |
| |
|
| | def detect_category(phone_number, user_message, ai_response): |
| | """Konusulan kategoriyi ve tam urun adini tespit et""" |
| | context = get_conversation_context(phone_number) |
| |
|
| | categories = { |
| | "marlin": ["marlin", "marlin+"], |
| | "madone": ["madone"], |
| | "emonda": ["emonda", "Γ©monda"], |
| | "domane": ["domane"], |
| | "checkpoint": ["checkpoint"], |
| | "fuel": ["fuel", "fuel ex", "fuel exe"], |
| | "procaliber": ["procaliber"], |
| | "supercaliber": ["supercaliber"], |
| | "fx": ["fx"], |
| | "ds": ["ds", "dual sport"], |
| | "powerfly": ["powerfly"], |
| | "rail": ["rail"], |
| | "verve": ["verve"], |
| | "townie": ["townie"] |
| | } |
| |
|
| | user_lower = user_message.lower() |
| | for category, keywords in categories.items(): |
| | for keyword in keywords: |
| | if keyword in user_lower: |
| | context["current_category"] = category |
| |
|
| | |
| | |
| | import re |
| | |
| | pattern = rf'{keyword}\s*\+?\s*(?:slr\s*)?(\d+)?' |
| | match = re.search(pattern, user_lower) |
| | if match: |
| | model_num = match.group(1) |
| | if model_num: |
| | |
| | full_product = match.group(0).strip() |
| | context["current_product"] = full_product |
| | else: |
| | |
| | context["current_product"] = keyword |
| | else: |
| | context["current_product"] = keyword |
| |
|
| | return category |
| |
|
| | return context.get("current_category") |
| |
|
| | def build_context_messages(phone_number, current_message): |
| | """Sohbet gecmisi ile sistem mesajlarini olustur""" |
| | context = get_conversation_context(phone_number) |
| | system_messages = get_system_messages() |
| |
|
| | |
| | if context.get("current_category"): |
| | cat = context['current_category'].upper() |
| | category_msg = f"""KRITIK BAGLAIM BILGISI: |
| | Musteri su anda {cat} modelleri hakkinda konusuyor. |
| | Butun sorulari bu baglamda cevapla. |
| | "Hangi model var", "stok var mi", "fiyat ne" gibi sorular {cat} icin sorulmus demektir. |
| | DS, FX, Verve gibi BASKA kategorilerden bahsetme - sadece {cat} hakkinda konusuyoruz!""" |
| | system_messages.append({"role": "system", "content": category_msg}) |
| |
|
| | |
| | if context.get("current_product"): |
| | product_context = f"Son konusulan urun: {context['current_product']}" |
| | if context.get("current_product_link"): |
| | product_context += f"\nUrun linki: {context['current_product_link']}" |
| | if context.get("current_product_price"): |
| | product_context += f"\nUrun fiyati: {context['current_product_price']}" |
| | system_messages.append({"role": "system", "content": product_context}) |
| |
|
| | |
| | recent_messages = context["messages"][-3:] if context["messages"] else [] |
| |
|
| | all_messages = system_messages.copy() |
| |
|
| | |
| | for msg in recent_messages: |
| | all_messages.append({"role": "user", "content": msg["user"]}) |
| | all_messages.append({"role": "assistant", "content": msg["ai"]}) |
| |
|
| | |
| | all_messages.append({"role": "user", "content": current_message}) |
| |
|
| | return all_messages |
| |
|
| | |
| | |
| | |
| |
|
| |
|
| | def extract_product_from_vision_response(response): |
| | """ |
| | GPT Vision yanitindan urun adini cikarir |
| | Ornek: "Trek Domane+ SLR 7 AXS" -> "Domane+ SLR 7 AXS" |
| | """ |
| | import re |
| | |
| | |
| | patterns = [ |
| | |
| | r'Trek\s+((?:Domane|Madone|Emonda|Marlin|Fuel|Rail|Powerfly|Checkpoint|FX|Verve|Dual\s*Sport|Procaliber|Supercaliber|Roscoe|Top\s*Fuel|Slash|Remedy|X-?Caliber|Allant)[+]?\s*(?:SLR|SL|Gen|EX|EXe)?\s*\d*\s*(?:AXS|Di2|eTap|Frameset)?)', |
| | |
| | r'((?:Domane|Madone|Emonda|Marlin|Fuel|Rail|Powerfly|Checkpoint|FX|Verve|Dual\s*Sport|Procaliber|Supercaliber|Roscoe|Top\s*Fuel|Slash|Remedy|X-?Caliber|Allant)[+]?\s*(?:SLR|SL|Gen|EX|EXe)?\s*\d+\s*(?:AXS|Di2|eTap|Frameset)?)', |
| | |
| | r'((?:Domane|Madone|Emonda)[+]?\s+(?:SLR|SL)\s*\d+)', |
| | |
| | r'(Marlin\s*\d+)', |
| | |
| | r'(FX\s*(?:Sport)?\s*\d+)', |
| | ] |
| | |
| | response_lower = response.lower() |
| | |
| | for pattern in patterns: |
| | match = re.search(pattern, response, re.IGNORECASE) |
| | if match: |
| | product = match.group(1).strip() |
| | |
| | product = re.sub(r'^Trek\s+', '', product, flags=re.IGNORECASE) |
| | return product |
| | |
| | return None |
| |
|
| |
|
| | def process_whatsapp_message_with_media(user_message, phone_number, media_urls, media_types): |
| | """ |
| | GORSEL MESAJ ISLEME - GPT-4o Vision kullanir |
| | """ |
| | try: |
| | logger.info(f"πΌοΈ Gorsel analizi basliyor: {len(media_urls)} medya") |
| | logger.info(f"π Medya URL'leri: {media_urls}") |
| | logger.info(f"π Medya tipleri: {media_types}") |
| |
|
| | |
| | try: |
| | profile_analysis = analyze_user_message(phone_number, user_message) |
| | logger.info(f"π Profil analizi: {phone_number} -> {profile_analysis}") |
| | except: |
| | pass |
| |
|
| | |
| | model = get_model_for_request(has_media=True) |
| |
|
| | |
| | messages = build_context_messages(phone_number, user_message if user_message else "Gonderilen gorseli analiz et") |
| |
|
| | |
| | vision_message = { |
| | "role": "user", |
| | "content": [] |
| | } |
| |
|
| | |
| | if user_message and user_message.strip(): |
| | vision_message["content"].append({ |
| | "type": "text", |
| | "text": user_message |
| | }) |
| | else: |
| | vision_message["content"].append({ |
| | "type": "text", |
| | "text": "Bu gorselde ne var? Eger bisiklet veya bisiklet parcasi ise detayli acikla." |
| | }) |
| |
|
| | |
| | valid_images = 0 |
| | for i, media_url in enumerate(media_urls): |
| | media_type = media_types[i] if i < len(media_types) else "image/jpeg" |
| |
|
| | |
| | if media_type and media_type.startswith('image/'): |
| | try: |
| | |
| | if 'api.twilio.com' in media_url: |
| | import re |
| | match = re.search(r'/Messages/([^/]+)/Media/([^/]+)', media_url) |
| | if match: |
| | message_sid = match.group(1) |
| | media_sid = match.group(2) |
| | proxy_url = f"https://video.trek-turkey.com/twilio-media-proxy.php?action=media&message={message_sid}&media={media_sid}" |
| | logger.info(f"π Proxy URL olusturuldu: {proxy_url}") |
| |
|
| | |
| | try: |
| | test_response = requests.head(proxy_url, timeout=5, verify=False) |
| | if test_response.status_code == 200: |
| | vision_message["content"].append({ |
| | "type": "image_url", |
| | "image_url": {"url": proxy_url} |
| | }) |
| | valid_images += 1 |
| | logger.info(f"β
Proxy URL gecerli: {proxy_url}") |
| | else: |
| | logger.error(f"β Proxy URL calismiyor: {test_response.status_code}") |
| | vision_message["content"].append({ |
| | "type": "image_url", |
| | "image_url": {"url": media_url} |
| | }) |
| | valid_images += 1 |
| | except Exception as proxy_error: |
| | logger.error(f"β Proxy test hatasi: {proxy_error}") |
| | vision_message["content"].append({ |
| | "type": "image_url", |
| | "image_url": {"url": media_url} |
| | }) |
| | valid_images += 1 |
| | else: |
| | logger.error(f"β Twilio URL parse edilemedi: {media_url}") |
| | else: |
| | vision_message["content"].append({ |
| | "type": "image_url", |
| | "image_url": {"url": media_url} |
| | }) |
| | valid_images += 1 |
| | logger.info(f"β
Dogrudan URL eklendi: {media_url}") |
| | except Exception as url_error: |
| | logger.error(f"β URL isleme hatasi: {url_error}") |
| | else: |
| | logger.warning(f"β οΈ Gorsel olmayan medya atlandi: {media_type}") |
| |
|
| | |
| | if valid_images == 0: |
| | logger.error("β Hic gecerli gorsel bulunamadi") |
| | return "Gonderdiginiz gorsel islenemedi. Lutfen farkli bir gorsel gonderin veya sorunuzu yazili olarak iletin." |
| |
|
| | logger.info(f"β
{valid_images} gorsel islenecek") |
| |
|
| | |
| | messages = [msg for msg in messages if not (msg.get("role") == "user" and msg == messages[-1])] |
| | messages.append(vision_message) |
| |
|
| | |
| | bike_recognition_prompt = { |
| | "role": "system", |
| | "content": """Gonderilen gorselleri dikkatle analiz et: |
| | 1. Eger bisiklet veya bisiklet parcasi goruyorsan, detaylica tanimla (marka, model, renk, beden, ozellikler) |
| | 2. Trek bisiklet ise modeli tahmin etmeye calis |
| | 3. Stok veya fiyat sorulursa, gorseldeki bisikletin ozelliklerini belirterek bilgi ver |
| | 4. Gorsel net degilse veya tanimlanamiyorsa, kullanicidan daha net bir gorsel istemek yerine, gorselde gorduklerini acikla |
| | 5. Eger gorsel bisikletle ilgili degilse, ne gordugunu kisaca acikla""" |
| | } |
| | messages.insert(0, bike_recognition_prompt) |
| |
|
| | if not OPENAI_API_KEY: |
| | logger.error("β OpenAI API anahtari eksik") |
| | return "Sistem hatasi olustu. Lutfen daha sonra tekrar deneyin." |
| |
|
| | logger.info(f"π€ GPT Vision API'ye gonderiliyor: {len(messages)} mesaj, {valid_images} gorsel") |
| |
|
| | payload = { |
| | "model": model, |
| | "messages": messages, |
| | "max_tokens": 800, |
| | "temperature": 0.3 |
| | } |
| |
|
| | headers = { |
| | "Content-Type": "application/json", |
| | "Authorization": f"Bearer {OPENAI_API_KEY}" |
| | } |
| |
|
| | response = requests.post(API_URL, headers=headers, json=payload, timeout=30) |
| |
|
| | logger.info(f"π₯ API yaniti: {response.status_code}") |
| |
|
| | if response.status_code == 200: |
| | result = response.json() |
| | ai_response = result['choices'][0]['message']['content'] |
| | logger.info(f"β
Gorsel analizi basarili: {ai_response[:100]}...") |
| |
|
| | |
| | try: |
| | from smart_warehouse_with_price import get_warehouse_stock |
| | |
| | |
| | product_name = extract_product_from_vision_response(ai_response) |
| | |
| | if product_name: |
| | logger.info(f"π Gorselden tespit edilen urun: {product_name}") |
| | |
| | |
| | stock_info = get_warehouse_stock(product_name) |
| | |
| | if stock_info: |
| | logger.info(f"β
Stok bilgisi bulundu: {stock_info[:100]}...") |
| | |
| | |
| | if "stokta bulunmuyor" in ai_response.lower() or "stokta yok" in ai_response.lower(): |
| | if "stokta bulunmuyor" not in stock_info.lower(): |
| | |
| | ai_response = re.sub( |
| | r'[^.]*stok[^.]*bulunmuyor[^.]*[.]?', |
| | '', |
| | ai_response, |
| | flags=re.IGNORECASE |
| | ) |
| | ai_response = ai_response.strip() |
| | |
| | |
| | if "Stok:" not in ai_response and "stokta" not in stock_info.lower(): |
| | ai_response = ai_response + "\n\n" + stock_info |
| |
|
| | elif "Stok:" not in ai_response: |
| | ai_response = ai_response + "\n\n" + stock_info |
| |
|
| | else: |
| | logger.info(f"β οΈ Urun icin stok bilgisi bulunamadi: {product_name}") |
| | else: |
| | logger.info("β οΈ Gorselden urun adi cikarilamadi") |
| | except Exception as stock_error: |
| | logger.error(f"β Vision stok kontrolu hatasi: {stock_error}") |
| |
|
| | |
| | try: |
| | formatted_response = extract_product_info_whatsapp(ai_response) |
| | except: |
| | formatted_response = ai_response |
| |
|
| | |
| | add_to_conversation(phone_number, f"[Gorsel gonderildi] {user_message if user_message else ''}", formatted_response) |
| |
|
| | return formatted_response |
| | else: |
| | error_detail = response.text[:500] if response.text else "Detay yok" |
| | logger.error(f"β OpenAI API Error: {response.status_code} - {error_detail}") |
| |
|
| | if response.status_code == 400: |
| | return "Gorsel formati desteklenmiyor. Lutfen JPG veya PNG formatinda bir gorsel gonderin." |
| | elif response.status_code == 413: |
| | return "Gorsel boyutu cok buyuk. Lutfen daha kucuk bir gorsel gonderin." |
| | elif response.status_code == 429: |
| | return "Sistem su anda yogun. Lutfen birkac saniye sonra tekrar deneyin." |
| | else: |
| | return "Gorsel su anda analiz edilemiyor. Sorunuzu yazili olarak iletebilir misiniz?" |
| |
|
| | except requests.exceptions.Timeout: |
| | logger.error("β API timeout hatasi") |
| | return "Islem zaman asimina ugradi. Lutfen tekrar deneyin." |
| | except Exception as e: |
| | logger.error(f"β Medya isleme hatasi: {e}") |
| | import traceback |
| | traceback.print_exc() |
| | return "Gorsel islenirken bir sorun olustu. Lutfen sorunuzu yazili olarak iletin veya farkli bir gorsel deneyin." |
| |
|
| |
|
| | def process_whatsapp_message_with_memory(user_message, phone_number): |
| | """ |
| | METIN MESAJ ISLEME - GPT-5.2 kullanir (daha akilli) |
| | """ |
| | try: |
| | |
| | model = get_model_for_request(has_media=False) |
| |
|
| | |
| | try: |
| | profile_analysis = analyze_user_message(phone_number, user_message) |
| | logger.info(f"π Profil analizi: {phone_number}") |
| | except: |
| | pass |
| |
|
| | |
| | if USE_STORE_NOTIFICATION: |
| | try: |
| | should_notify_mehmet, notification_reason, urgency = should_notify_mehmet_bey(user_message) |
| |
|
| | if not should_notify_mehmet and USE_INTENT_ANALYZER: |
| | context = get_conversation_context(phone_number) |
| | intent_analysis = analyze_customer_intent(user_message, context) |
| | should_notify_mehmet, notification_reason, urgency = should_notify_mehmet_bey(user_message, intent_analysis) |
| | else: |
| | intent_analysis = None |
| |
|
| | if should_notify_mehmet: |
| | if intent_analysis: |
| | product = intent_analysis.get("product") or "Belirtilmemis" |
| | else: |
| | context = get_conversation_context(phone_number) |
| | product = context.get("current_category") or "Urun belirtilmemis" |
| |
|
| | if "rezervasyon" in notification_reason.lower() or urgency == "high": |
| | action = "reserve" |
| | elif "magaza" in notification_reason.lower() or "lokasyon" in notification_reason.lower(): |
| | action = "info" |
| | elif "fiyat" in notification_reason.lower() or "odeme" in notification_reason.lower(): |
| | action = "price" |
| | else: |
| | action = "info" |
| |
|
| | additional_info = f"{notification_reason}\n\nMusteri Mesaji: '{user_message}'" |
| | if urgency == "high": |
| | additional_info = "β οΈ YUKSEK ONCELIK β οΈ\n" + additional_info |
| |
|
| | result = send_store_notification( |
| | customer_phone=phone_number, |
| | customer_name=None, |
| | product_name=product, |
| | action=action, |
| | store_name=None, |
| | additional_info=additional_info |
| | ) |
| |
|
| | if result: |
| | logger.info(f"β
Mehmet Bey'e bildirim gonderildi!") |
| | logger.info(f" π Sebep: {notification_reason}") |
| | logger.info(f" β‘ Oncelik: {urgency}") |
| | logger.info(f" π¦ Urun: {product}") |
| |
|
| | |
| | if USE_FOLLOW_UP and follow_up_manager: |
| | try: |
| | follow_up_analysis = analyze_message_for_follow_up(user_message) |
| | if follow_up_analysis and follow_up_analysis.get("needs_follow_up"): |
| | follow_up = follow_up_manager.create_follow_up( |
| | customer_phone=phone_number, |
| | product_name=product, |
| | follow_up_type=follow_up_analysis["follow_up_type"], |
| | original_message=user_message, |
| | follow_up_hours=follow_up_analysis.get("follow_up_hours", 24), |
| | notes=follow_up_analysis.get("reason", "") |
| | ) |
| | logger.info(f"π Takip olusturuldu: {follow_up_analysis.get('reason', '')}") |
| | except Exception as follow_up_error: |
| | logger.error(f"Takip sistemi hatasi: {follow_up_error}") |
| | except Exception as notify_error: |
| | logger.error(f"Bildirim hatasi: {notify_error}") |
| |
|
| | |
| | messages = build_context_messages(phone_number, user_message) |
| |
|
| | |
| | detected_product = None |
| | intent_analysis = None |
| | if USE_INTENT_ANALYZER: |
| | try: |
| | context = get_conversation_context(phone_number) |
| | intent_analysis = analyze_customer_intent(user_message, context) |
| | if intent_analysis and intent_analysis.get("product"): |
| | detected_product = intent_analysis.get("product") |
| | logger.info(f"π― Intent'ten tespit edilen urun: {detected_product}") |
| | except Exception as e: |
| | logger.error(f"Intent analiz hatasi: {e}") |
| | intent_analysis = None |
| |
|
| | |
| | |
| | |
| | if not detected_product: |
| | context = get_conversation_context(phone_number) |
| | |
| | if context.get("current_product"): |
| | detected_product = context.get("current_product") |
| | logger.info(f"π― Context'ten tespit edilen TAM URUN: {detected_product}") |
| | elif context.get("current_category"): |
| | detected_product = context.get("current_category") |
| | logger.info(f"π― Context'ten tespit edilen kategori: {detected_product}") |
| |
|
| | |
| | stock_query_product = detected_product if detected_product else user_message |
| |
|
| | |
| | input_words = user_message.lower().split() |
| | |
| | if detected_product: |
| | input_words.extend(detected_product.lower().split()) |
| |
|
| | |
| | found_product_link = None |
| | found_product_image = None |
| | found_product_name = None |
| | best_match_score = 0 |
| |
|
| | |
| | search_text = (detected_product or user_message).lower() |
| | search_words = search_text.split() |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | def is_bicycle_search(text): |
| | """Arama bisiklet mi yoksa aksesuar mi?""" |
| | bike_indicators = ['madone', 'domane', 'emonda', 'checkpoint', 'fuel', 'slash', |
| | 'marlin', 'procaliber', 'supercaliber', 'fx', 'verve', 'dual sport', |
| | 'powerfly', 'rail', 'allant', 'bisiklet', 'bike'] |
| | text_lower = text.lower() |
| | return any(ind in text_lower for ind in bike_indicators) |
| |
|
| | def get_product_type(category_tree, product_name): |
| | """Urun tipini belirle""" |
| | cat_lower = (category_tree or "").lower() |
| | name_lower = (product_name or "").lower() |
| |
|
| | |
| | if 'bisiklet' in cat_lower or 'bike' in cat_lower: |
| | if 'yol' in cat_lower or 'road' in cat_lower: |
| | return 'road_bike' |
| | elif 'dag' in cat_lower or 'mountain' in cat_lower or 'mtb' in cat_lower: |
| | return 'mtb' |
| | elif 'elektrik' in cat_lower or 'e-bike' in cat_lower: |
| | return 'ebike' |
| | elif 'sehir' in cat_lower or 'hybrid' in cat_lower: |
| | return 'hybrid' |
| | return 'bicycle' |
| | elif 'aksesuar' in cat_lower or 'parΓ§a' in cat_lower or 'parca' in cat_lower or 'accessory' in cat_lower or 'yedek' in cat_lower: |
| | return 'accessory' |
| |
|
| | |
| | |
| | |
| | accessory_keywords = ['sele', 'gidon', 'pedal', 'zincir', 'lastik', 'jant', 'fren', 'vites', |
| | 'kadro kulaΔΔ±', 'kadro kulagi', 'kulak', 'kablo', 'kasnak', 'diΕli', |
| | 'zil', 'far', 'lamba', 'pompa', 'kilit', 'Γ§anta', 'canta', 'suluk', |
| | 'gΓΆzlΓΌk', 'gozluk', 'kask', 'eldiven', 'ayakkabΔ±', 'ayakkabi', |
| | 'forma', 'tayt', 'Εort', 'sort', 'mont', 'yaΔmurluk', 'yagmurluk'] |
| | if any(x in name_lower for x in accessory_keywords): |
| | return 'accessory' |
| | if any(x in name_lower for x in ['madone', 'domane', 'emonda', 'checkpoint', 'fuel', 'marlin', 'fx']): |
| | return 'bicycle' |
| |
|
| | return 'unknown' |
| |
|
| | def calculate_smart_match_score(search_words, product_name, product_category, is_bike_search): |
| | """Akilli eslesme skoru hesapla""" |
| | product_name_lower = product_name.lower() |
| | product_words = product_name_lower.split() |
| | product_type = get_product_type(product_category, product_name) |
| |
|
| | |
| | base_score = sum(1 for word in search_words if word in product_name_lower) |
| |
|
| | |
| | if is_bike_search: |
| | if product_type == 'bicycle': |
| | base_score += 2 |
| | elif product_type == 'accessory': |
| | base_score -= 100 |
| |
|
| | |
| | |
| | |
| | critical_variants = ['axs', 'etap', 'di2', 'frameset'] |
| |
|
| | |
| | product_critical_variants = [v for v in critical_variants if v in product_name_lower] |
| | |
| | user_critical_variants = [v for v in critical_variants if v in ' '.join(search_words)] |
| |
|
| | |
| | for variant in product_critical_variants: |
| | if variant not in user_critical_variants: |
| | base_score -= 50 |
| |
|
| | |
| | for variant in user_critical_variants: |
| | if variant not in product_critical_variants: |
| | base_score -= 50 |
| |
|
| | |
| | all_words_match = all(word in product_name_lower for word in search_words if len(word) > 2) |
| | if all_words_match and len(search_words) > 1: |
| | base_score += 3 |
| |
|
| | |
| | for word in search_words: |
| | if word.isdigit(): |
| | if word in product_words: |
| | base_score += 2 |
| | else: |
| | |
| | for pword in product_words: |
| | if pword.isdigit() and pword != word: |
| | base_score -= 20 |
| |
|
| | return base_score |
| |
|
| | |
| | is_bike_search = is_bicycle_search(search_text) |
| |
|
| | for product_info in products: |
| | product_full_name = product_info[2] |
| | product_category = product_info[1][6] if len(product_info[1]) > 6 else "" |
| |
|
| | |
| | match_score = calculate_smart_match_score( |
| | search_words, |
| | product_full_name, |
| | product_category, |
| | is_bike_search |
| | ) |
| |
|
| | |
| | if match_score > best_match_score and product_info[1][0] == "stokta": |
| | best_match_score = match_score |
| |
|
| | normal_price = f"Fiyat: {product_info[1][1]} TL" |
| | if product_info[1][3]: |
| | eft_price = f"Havale: {product_info[1][3]} TL" |
| | price_info = f"{normal_price}, {eft_price}" |
| | else: |
| | price_info = normal_price |
| |
|
| | |
| | if product_info[1][2]: |
| | found_product_link = product_info[1][2] |
| | if product_info[1][5]: |
| | found_product_image = product_info[1][5] |
| | found_product_name = product_info[2] |
| |
|
| | |
| | |
| | new_msg = f"{product_info[2]} {product_info[1][0]} - {price_info}" |
| | messages.append({"role": "system", "content": new_msg}) |
| |
|
| | |
| | if best_match_score == 0: |
| | for product_info in products: |
| | if product_info[0] in input_words and product_info[1][0] == "stokta": |
| | normal_price = f"Fiyat: {product_info[1][1]} TL" |
| | if product_info[1][3]: |
| | eft_price = f"Havale: {product_info[1][3]} TL" |
| | price_info = f"{normal_price}, {eft_price}" |
| | else: |
| | price_info = normal_price |
| |
|
| | if product_info[1][2]: |
| | found_product_link = product_info[1][2] |
| | if product_info[1][5]: |
| | found_product_image = product_info[1][5] |
| | found_product_name = product_info[2] |
| |
|
| | new_msg = f"{product_info[2]} {product_info[1][0]} - {price_info}" |
| | messages.append({"role": "system", "content": new_msg}) |
| | break |
| |
|
| | |
| | |
| | warehouse_info = get_warehouse_stock(stock_query_product) |
| | xml_has_valid_stock = False |
| |
|
| | if warehouse_info: |
| | stock_msg = "Magaza Stok Durumu:\n" + "\n".join(warehouse_info) if isinstance(warehouse_info, list) else str(warehouse_info) |
| | messages.append({"role": "system", "content": stock_msg}) |
| |
|
| | |
| | |
| | stock_msg_lower = stock_msg.lower() |
| | negative_indicators = ["mevcut deΔil", "mevcut degil", "bulunamadi", "bulunmuyor", "tΓΌkendi", "tukendi", "stokta yok"] |
| | xml_has_valid_stock = not any(indicator in stock_msg_lower for indicator in negative_indicators) |
| |
|
| | if xml_has_valid_stock: |
| | logger.info(f"β
XML'den stok bilgisi bulundu: {stock_query_product}") |
| |
|
| | |
| | context = get_conversation_context(phone_number) |
| | link_match = re.search(r'Link: (https?://[^\s]+)', stock_msg) |
| | if link_match: |
| | context["current_product_link"] = link_match.group(1) |
| | logger.info(f"π Link context'e kaydedildi: {link_match.group(1)}") |
| | price_match = re.search(r'Fiyat: ([^\n]+)', stock_msg) |
| | if price_match: |
| | context["current_product_price"] = price_match.group(1) |
| | |
| | if stock_query_product: |
| | context["current_product"] = stock_query_product |
| |
|
| | |
| | |
| | should_query_stock = False |
| |
|
| | if not xml_has_valid_stock: |
| | if intent_analysis: |
| | |
| | intents = intent_analysis.get("intents", []) |
| | |
| | stock_related_intents = ["stock", "info", "price", "availability"] |
| | should_query_stock = any(intent in intents for intent in stock_related_intents) |
| |
|
| | |
| | if detected_product: |
| | should_query_stock = True |
| |
|
| | |
| | if not intent_analysis and is_stock_query(user_message): |
| | should_query_stock = True |
| |
|
| | if should_query_stock and stock_query_product and not xml_has_valid_stock: |
| | realtime_stock = get_realtime_stock_parallel(stock_query_product) |
| | if realtime_stock: |
| | messages.append({"role": "system", "content": f"Gercek Zamanli Stok:\n{realtime_stock}"}) |
| | logger.info(f"π¦ API'den stok bilgisi eklendi: {stock_query_product}") |
| |
|
| | if not OPENAI_API_KEY: |
| | return "Sistem hatasi olustu." |
| |
|
| | |
| | |
| | turkish_reminder = """KRITIK KURALLAR (HER YANIT ICIN GECERLI): |
| | 1. ASLA 'sen' kullanma, HER ZAMAN 'siz' kullan (istersen -> isterseniz, sana -> size) |
| | 2. ASLA soru ile bitirme (ayirtayim mi?, ister misiniz?, bakar misiniz? YASAK) |
| | 3. Bilgiyi ver ve sus, musteri karar versin |
| | 4. ONEMLI: Onceki mesajlarda bahsedilen urunleri UNUTMA! "Hangi model var" gibi sorular onceki konudan devam eder. |
| | YANLIS: "Istersen beden ve magaza bazli stok bilgisini de netlestirebilirim." |
| | DOGRU: "Beden ve magaza bazli stok bilgisi icin yazabilirsiniz." """ |
| | messages.append({"role": "system", "content": turkish_reminder}) |
| |
|
| | |
| | |
| | if "gpt-5" in model or "o1" in model or "o3" in model: |
| | payload = { |
| | "model": model, |
| | "messages": messages, |
| | "max_completion_tokens": 1000 |
| | } |
| | else: |
| | payload = { |
| | "model": model, |
| | "messages": messages, |
| | "temperature": 0.3, |
| | "max_tokens": 1000 |
| | } |
| |
|
| | headers = { |
| | "Content-Type": "application/json", |
| | "Authorization": f"Bearer {OPENAI_API_KEY}" |
| | } |
| |
|
| | logger.info(f"π€ API istegi gonderiliyor - Model: {model}") |
| |
|
| | response = requests.post(API_URL, headers=headers, json=payload, timeout=30) |
| |
|
| | if response.status_code == 200: |
| | result = response.json() |
| | ai_response = result['choices'][0]['message']['content'] |
| |
|
| | |
| | used_model = result.get('model', model) |
| | logger.info(f"β
Yanit alindi - Kullanilan model: {used_model}") |
| |
|
| | try: |
| | formatted_response = extract_product_info_whatsapp(ai_response) |
| | except: |
| | formatted_response = ai_response |
| |
|
| | |
| | |
| | |
| | is_specific_product_query = best_match_score >= 3 |
| |
|
| | if found_product_link and is_specific_product_query: |
| | formatted_response += f"\n\nπ {found_product_link}" |
| |
|
| | add_to_conversation(phone_number, user_message, formatted_response) |
| |
|
| | |
| | |
| | if found_product_image and is_specific_product_query: |
| | return (formatted_response, found_product_image) |
| |
|
| | return formatted_response |
| | else: |
| | error_msg = response.text[:200] if response.text else "Bilinmeyen hata" |
| | logger.error(f"β API hatasi {response.status_code}: {error_msg}") |
| |
|
| | |
| | if model != MODEL_CONFIG["fallback"]: |
| | logger.info(f"π Fallback modele geciliyor: {MODEL_CONFIG['fallback']}") |
| | fallback_model = MODEL_CONFIG["fallback"] |
| |
|
| | |
| | fallback_payload = { |
| | "model": fallback_model, |
| | "messages": messages, |
| | "temperature": 0.3, |
| | "max_tokens": 1000 |
| | } |
| |
|
| | response = requests.post(API_URL, headers=headers, json=fallback_payload, timeout=30) |
| |
|
| | if response.status_code == 200: |
| | result = response.json() |
| | ai_response = result['choices'][0]['message']['content'] |
| | add_to_conversation(phone_number, user_message, ai_response) |
| | return ai_response |
| |
|
| | return "Su anda bir sorun yasiyorum. Lutfen tekrar deneyin." |
| |
|
| | except requests.exceptions.Timeout: |
| | logger.error("β API timeout hatasi") |
| | return "Islem zaman asimina ugradi. Lutfen tekrar deneyin." |
| | except Exception as e: |
| | logger.error(f"β Mesaj isleme hatasi: {e}") |
| | import traceback |
| | traceback.print_exc() |
| | return "Teknik bir sorun olustu. Lutfen tekrar deneyin." |
| |
|
| |
|
| | |
| | |
| | |
| | app = FastAPI(title="Trek WhatsApp Bot - Hybrid Model") |
| |
|
| | @app.post("/whatsapp-webhook") |
| | async def whatsapp_webhook(request: Request): |
| | """WhatsApp webhook - Hybrid model ile""" |
| | try: |
| | form_data = await request.form() |
| |
|
| | from_number = form_data.get('From') |
| | to_number = form_data.get('To') |
| | message_body = form_data.get('Body', '') |
| | message_status = form_data.get('MessageStatus') |
| |
|
| | |
| | num_media = int(form_data.get('NumMedia', 0)) |
| | media_urls = [] |
| | media_types = [] |
| |
|
| | for i in range(num_media): |
| | media_url = form_data.get(f'MediaUrl{i}') |
| | media_type = form_data.get(f'MediaContentType{i}') |
| | if media_url: |
| | media_urls.append(media_url) |
| | media_types.append(media_type) |
| |
|
| | logger.info(f"π± Webhook - From: {from_number}, Body: {message_body[:50] if message_body else 'N/A'}, Media: {num_media}") |
| |
|
| | |
| | if from_number and is_blocked(from_number): |
| | return {"status": "blocked", "message": "Blocked number"} |
| |
|
| | |
| | if from_number and is_rate_limited(from_number): |
| | return {"status": "rate_limited", "message": "Too many messages"} |
| |
|
| | |
| | if message_status in ['sent', 'delivered', 'read', 'failed']: |
| | return {"status": "ignored", "message": f"Status: {message_status}"} |
| |
|
| | |
| | if to_number != TWILIO_WHATSAPP_NUMBER: |
| | return {"status": "ignored", "message": "Outgoing message"} |
| |
|
| | |
| | if not message_body and num_media == 0: |
| | return {"status": "ignored", "message": "Empty message"} |
| |
|
| | logger.info(f"β
MESAJ ALINDI: {from_number} -> Metin: {bool(message_body)}, Medya: {num_media}") |
| |
|
| | if not twilio_client: |
| | return {"status": "error", "message": "Twilio yapilandirmasi eksik"} |
| |
|
| | |
| | |
| | |
| | product_image_url = None |
| |
|
| | if num_media > 0 and media_urls: |
| | |
| | logger.info("πΌοΈ GORSEL TESPIT EDILDI -> GPT-4o Vision kullanilacak") |
| | ai_response = process_whatsapp_message_with_media( |
| | message_body, |
| | from_number, |
| | media_urls, |
| | media_types |
| | ) |
| | else: |
| | |
| | logger.info("π METIN MESAJI -> GPT-5.2 kullanilacak") |
| | result = process_whatsapp_message_with_memory( |
| | message_body, |
| | from_number |
| | ) |
| |
|
| | |
| | if isinstance(result, tuple): |
| | ai_response, product_image_url = result |
| | logger.info(f"πΌοΈ Urun gorseli bulundu: {product_image_url}") |
| | else: |
| | ai_response = result |
| |
|
| | |
| | if len(ai_response) > 1500: |
| | ai_response = ai_response[:1500] + "...\n\nDetayli bilgi: trekbisiklet.com.tr" |
| |
|
| | |
| | |
| | message = twilio_client.messages.create( |
| | messaging_service_sid=TWILIO_MESSAGING_SERVICE_SID, |
| | body=ai_response, |
| | to=from_number |
| | ) |
| |
|
| | logger.info(f"β
YANIT GONDERILDI: {ai_response[:100]}...") |
| |
|
| | |
| | if product_image_url: |
| | try: |
| | image_message = twilio_client.messages.create( |
| | messaging_service_sid=TWILIO_MESSAGING_SERVICE_SID, |
| | media_url=[product_image_url], |
| | to=from_number |
| | ) |
| | logger.info(f"πΌοΈ URUN GORSELI GONDERILDI: {product_image_url}") |
| | except Exception as img_error: |
| | logger.error(f"β Gorsel gonderme hatasi: {img_error}") |
| |
|
| | return {"status": "success", "message_sid": message.sid} |
| |
|
| | except Exception as e: |
| | logger.error(f"β Webhook hatasi: {str(e)}") |
| | import traceback |
| | traceback.print_exc() |
| | return {"status": "error", "message": str(e)} |
| |
|
| |
|
| | @app.get("/") |
| | async def root(): |
| | return { |
| | "message": "Trek WhatsApp Bot - Hybrid Model calisiyor!", |
| | "status": "active", |
| | "models": { |
| | "vision": MODEL_CONFIG["vision"], |
| | "text": MODEL_CONFIG["text"], |
| | "fallback": MODEL_CONFIG["fallback"] |
| | } |
| | } |
| |
|
| |
|
| | @app.get("/health") |
| | async def health(): |
| | return { |
| | "status": "healthy", |
| | "twilio_configured": twilio_client is not None, |
| | "openai_configured": OPENAI_API_KEY is not None, |
| | "models": MODEL_CONFIG, |
| | "products_loaded": len(products), |
| | "modules": { |
| | "gpt5_search": USE_GPT5_SEARCH, |
| | "media_queue": USE_MEDIA_QUEUE, |
| | "store_notification": USE_STORE_NOTIFICATION, |
| | "follow_up": USE_FOLLOW_UP, |
| | "intent_analyzer": USE_INTENT_ANALYZER |
| | } |
| | } |
| |
|
| |
|
| | @app.get("/test-models") |
| | async def test_models(): |
| | """Model durumlarini test et""" |
| | results = {} |
| |
|
| | for model_type, model_name in MODEL_CONFIG.items(): |
| | try: |
| | payload = { |
| | "model": model_name, |
| | "messages": [{"role": "user", "content": "Merhaba, test mesaji."}], |
| | "max_tokens": 10 |
| | } |
| | headers = { |
| | "Content-Type": "application/json", |
| | "Authorization": f"Bearer {OPENAI_API_KEY}" |
| | } |
| | response = requests.post(API_URL, headers=headers, json=payload, timeout=10) |
| | results[model_type] = { |
| | "model": model_name, |
| | "status": "OK" if response.status_code == 200 else f"Error: {response.status_code}", |
| | "available": response.status_code == 200 |
| | } |
| | except Exception as e: |
| | results[model_type] = { |
| | "model": model_name, |
| | "status": f"Error: {str(e)}", |
| | "available": False |
| | } |
| |
|
| | return results |
| |
|
| |
|
| | @app.get("/cache-status") |
| | async def cache_status(): |
| | """Cache durumunu goster""" |
| | return { |
| | "cache_size": len(stock_cache), |
| | "cache_duration_seconds": CACHE_DURATION, |
| | "cached_products": list(stock_cache.keys())[:10] |
| | } |
| |
|
| |
|
| | @app.post("/clear-cache") |
| | async def clear_cache(): |
| | """Cache'i temizle""" |
| | global stock_cache |
| | old_size = len(stock_cache) |
| | stock_cache = {} |
| | return {"message": f"Cache temizlendi. {old_size} kayit silindi."} |
| |
|
| |
|
| | if __name__ == "__main__": |
| | import uvicorn |
| | print("=" * 60) |
| | print(" Trek WhatsApp Bot - HYBRID MODEL") |
| | print("=" * 60) |
| | print(f" πΌοΈ Gorsel mesajlar -> {MODEL_CONFIG['vision']}") |
| | print(f" π Metin mesajlar -> {MODEL_CONFIG['text']}") |
| | print(f" π Fallback -> {MODEL_CONFIG['fallback']}") |
| | print("=" * 60) |
| | print(f" π¦ Yuklenen urun sayisi: {len(products)}") |
| | print(f" π GPT-5 Search: {'Aktif' if USE_GPT5_SEARCH else 'Pasif'}") |
| | print(f" π Store Notification: {'Aktif' if USE_STORE_NOTIFICATION else 'Pasif'}") |
| | print(f" π Follow-Up System: {'Aktif' if USE_FOLLOW_UP else 'Pasif'}") |
| | print(f" π§ Intent Analyzer: {'Aktif' if USE_INTENT_ANALYZER else 'Pasif'}") |
| | print("=" * 60) |
| | uvicorn.run(app, host="0.0.0.0", port=7860) |
| |
|