File size: 7,143 Bytes
8a87574
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
"""
OpenRouter client for Pip's fallback responses.
OpenRouter provides access to multiple LLM providers with a unified API.
Uses OpenAI-compatible API.
"""

import os
import asyncio
from typing import AsyncGenerator
from openai import AsyncOpenAI


class OpenRouterClient:
    """OpenRouter-powered fallback for Pip."""
    
    # Free/cheap models on OpenRouter
    DEFAULT_MODEL = "meta-llama/llama-3.1-8b-instruct:free"
    FALLBACK_MODEL = "meta-llama/llama-3.2-3b-instruct:free"
    
    def __init__(self, api_key: str = None):
        api_key = api_key or os.getenv("OPENROUTER_API_KEY")
        self.available = bool(api_key)
        
        if self.available:
            self.client = AsyncOpenAI(
                api_key=api_key,
                base_url="https://openrouter.ai/api/v1"
            )
        else:
            self.client = None
            print("⚠️ OpenRouter: No API key found - service disabled")
        
        self.model = self.DEFAULT_MODEL
    
    async def quick_acknowledge(self, user_input: str, system_prompt: str) -> str:
        """Generate a quick acknowledgment."""
        if not self.available or not self.client:
            return "I hear you..."
        
        try:
            response = await self.client.chat.completions.create(
                model=self.model,
                max_tokens=50,
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": user_input}
                ],
                extra_headers={
                    "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
                    "X-Title": "Pip Emotional Companion"
                }
            )
            return response.choices[0].message.content
        except Exception as e:
            print(f"OpenRouter quick_acknowledge error: {e}")
            return "I hear you..."
    
    async def analyze_emotion_fast(self, user_input: str, system_prompt: str) -> dict:
        """Quick emotion analysis."""
        import json
        
        default_response = {
            "primary_emotions": ["neutral"],
            "intensity": 5,
            "pip_expression": "neutral",
            "intervention_needed": False
        }
        
        if not self.available or not self.client:
            return default_response
        
        try:
            response = await self.client.chat.completions.create(
                model=self.model,
                max_tokens=256,
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": user_input}
                ],
                extra_headers={
                    "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
                    "X-Title": "Pip Emotional Companion"
                }
            )
            
            content = response.choices[0].message.content
            if "```json" in content:
                content = content.split("```json")[1].split("```")[0]
            elif "```" in content:
                content = content.split("```")[1].split("```")[0]
            return json.loads(content.strip())
        except Exception as e:
            print(f"OpenRouter analyze_emotion error: {e}")
            return default_response
    
    async def generate_response_stream(
        self,
        user_input: str,
        emotion_state: dict,
        system_prompt: str
    ) -> AsyncGenerator[str, None]:
        """Generate conversational response with streaming."""
        if not self.available or not self.client:
            yield "I'm here with you. Sometimes words take a moment to find..."
            return
        
        context = f"""
User's emotions: {emotion_state.get('primary_emotions', [])}
Intensity: {emotion_state.get('intensity', 5)}/10

User said: {user_input}
"""
        
        try:
            stream = await self.client.chat.completions.create(
                model=self.model,
                max_tokens=512,
                stream=True,
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": context}
                ],
                extra_headers={
                    "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
                    "X-Title": "Pip Emotional Companion"
                }
            )
            
            async for chunk in stream:
                if chunk.choices[0].delta.content:
                    yield chunk.choices[0].delta.content
        except Exception as e:
            print(f"OpenRouter generate_response_stream error: {e}")
            yield "I'm here with you. Let me gather my thoughts..."
    
    async def enhance_prompt(
        self, 
        user_input: str, 
        emotion_state: dict,
        mode: str,
        system_prompt: str
    ) -> str:
        """Transform user context into a detailed image prompt."""
        emotions = emotion_state.get('primary_emotions', ['peaceful'])
        fallback = f"A beautiful, calming scene representing {emotions[0] if emotions else 'peace'}, soft colors, dreamy atmosphere"
        
        if not self.available or not self.client:
            return fallback
        
        context = f"""
User said: "{user_input}"

Detected emotions: {emotion_state.get('primary_emotions', [])}
Emotional intensity: {emotion_state.get('intensity', 5)}/10
Current mode: {mode}

Generate a vivid, specific image prompt based on THIS user's context.
"""
        
        try:
            response = await self.client.chat.completions.create(
                model=self.model,
                max_tokens=300,
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": context}
                ],
                extra_headers={
                    "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
                    "X-Title": "Pip Emotional Companion"
                }
            )
            return response.choices[0].message.content
        except Exception as e:
            print(f"OpenRouter enhance_prompt error: {e}")
            return fallback
    
    async def generate_text(self, prompt: str) -> str:
        """Generate text for various purposes."""
        if not self.available or not self.client:
            return ""
        
        try:
            response = await self.client.chat.completions.create(
                model=self.model,
                max_tokens=512,
                messages=[
                    {"role": "user", "content": prompt}
                ],
                extra_headers={
                    "HTTP-Referer": "https://huggingface.co/spaces/MCP-1st-Birthday/pipV1",
                    "X-Title": "Pip Emotional Companion"
                }
            )
            return response.choices[0].message.content
        except Exception as e:
            print(f"OpenRouter generate_text error: {e}")
            return ""