173 lines
4.6 KiB
Python
173 lines
4.6 KiB
Python
|
|
"""
|
||
|
|
Tikker AI Microservice
|
||
|
|
|
||
|
|
Provides AI-powered analysis of keystroke data using OpenAI API.
|
||
|
|
Handles text analysis, pattern detection, and insights generation.
|
||
|
|
"""
|
||
|
|
|
||
|
|
from fastapi import FastAPI, HTTPException, Query
|
||
|
|
from pydantic import BaseModel
|
||
|
|
from typing import Dict, Any, Optional, List
|
||
|
|
import logging
|
||
|
|
import os
|
||
|
|
|
||
|
|
try:
|
||
|
|
from openai import OpenAI
|
||
|
|
except ImportError:
|
||
|
|
OpenAI = None
|
||
|
|
|
||
|
|
logging.basicConfig(level=logging.INFO)
|
||
|
|
logger = logging.getLogger(__name__)
|
||
|
|
|
||
|
|
app = FastAPI(
|
||
|
|
title="Tikker AI Service",
|
||
|
|
description="AI analysis for keystroke data",
|
||
|
|
version="1.0.0"
|
||
|
|
)
|
||
|
|
|
||
|
|
client = None
|
||
|
|
api_key = os.getenv("OPENAI_API_KEY")
|
||
|
|
|
||
|
|
if api_key:
|
||
|
|
try:
|
||
|
|
client = OpenAI(api_key=api_key)
|
||
|
|
except Exception as e:
|
||
|
|
logger.error(f"Failed to initialize OpenAI client: {e}")
|
||
|
|
|
||
|
|
|
||
|
|
class TextAnalysisRequest(BaseModel):
|
||
|
|
text: str
|
||
|
|
analysis_type: str = "general"
|
||
|
|
|
||
|
|
|
||
|
|
class AnalysisResult(BaseModel):
|
||
|
|
text: str
|
||
|
|
analysis_type: str
|
||
|
|
summary: str
|
||
|
|
keywords: List[str]
|
||
|
|
sentiment: Optional[str] = None
|
||
|
|
insights: List[str]
|
||
|
|
|
||
|
|
|
||
|
|
class HealthResponse(BaseModel):
|
||
|
|
status: str
|
||
|
|
ai_available: bool
|
||
|
|
api_version: str
|
||
|
|
|
||
|
|
|
||
|
|
@app.get("/health", response_model=HealthResponse)
|
||
|
|
async def health_check() -> HealthResponse:
|
||
|
|
"""Health check endpoint."""
|
||
|
|
return HealthResponse(
|
||
|
|
status="healthy",
|
||
|
|
ai_available=client is not None,
|
||
|
|
api_version="1.0.0"
|
||
|
|
)
|
||
|
|
|
||
|
|
|
||
|
|
@app.post("/analyze", response_model=AnalysisResult)
|
||
|
|
async def analyze_text(request: TextAnalysisRequest) -> AnalysisResult:
|
||
|
|
"""
|
||
|
|
Analyze text using AI.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
request: Text analysis request with text and analysis type
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
Analysis result with summary, keywords, and insights
|
||
|
|
"""
|
||
|
|
if not client:
|
||
|
|
raise HTTPException(
|
||
|
|
status_code=503,
|
||
|
|
detail="AI service not available - no API key configured"
|
||
|
|
)
|
||
|
|
|
||
|
|
if not request.text or len(request.text.strip()) == 0:
|
||
|
|
raise HTTPException(status_code=400, detail="Text cannot be empty")
|
||
|
|
|
||
|
|
try:
|
||
|
|
analysis_type = request.analysis_type.lower()
|
||
|
|
|
||
|
|
if analysis_type == "activity":
|
||
|
|
prompt = f"""Analyze this keystroke activity log and provide:
|
||
|
|
1. A brief summary (1-2 sentences)
|
||
|
|
2. Key patterns or observations (3-4 bullet points)
|
||
|
|
3. Sentiment or work intensity assessment
|
||
|
|
|
||
|
|
Text: {request.text}
|
||
|
|
|
||
|
|
Respond in JSON format with keys: summary, keywords (list), insights (list), sentiment"""
|
||
|
|
elif analysis_type == "productivity":
|
||
|
|
prompt = f"""Analyze this text for productivity patterns and provide:
|
||
|
|
1. Summary of productivity indicators
|
||
|
|
2. Key terms related to productivity
|
||
|
|
3. Specific insights about work patterns
|
||
|
|
|
||
|
|
Text: {request.text}
|
||
|
|
|
||
|
|
Respond in JSON format with keys: summary, keywords (list), insights (list)"""
|
||
|
|
else:
|
||
|
|
prompt = f"""Provide a general analysis of this text:
|
||
|
|
1. Brief summary (1-2 sentences)
|
||
|
|
2. Important keywords or themes
|
||
|
|
3. Key insights
|
||
|
|
|
||
|
|
Text: {request.text}
|
||
|
|
|
||
|
|
Respond in JSON format with keys: summary, keywords (list), insights (list)"""
|
||
|
|
|
||
|
|
response = client.chat.completions.create(
|
||
|
|
model="gpt-3.5-turbo",
|
||
|
|
messages=[
|
||
|
|
{"role": "system", "content": "You are a helpful analyst. Always respond in valid JSON format."},
|
||
|
|
{"role": "user", "content": prompt}
|
||
|
|
],
|
||
|
|
temperature=0.7,
|
||
|
|
max_tokens=500
|
||
|
|
)
|
||
|
|
|
||
|
|
result_text = response.choices[0].message.content
|
||
|
|
|
||
|
|
import json
|
||
|
|
try:
|
||
|
|
parsed = json.loads(result_text)
|
||
|
|
except:
|
||
|
|
parsed = {
|
||
|
|
"summary": result_text[:100],
|
||
|
|
"keywords": ["analysis"],
|
||
|
|
"insights": [result_text]
|
||
|
|
}
|
||
|
|
|
||
|
|
return AnalysisResult(
|
||
|
|
text=request.text,
|
||
|
|
analysis_type=analysis_type,
|
||
|
|
summary=parsed.get("summary", ""),
|
||
|
|
keywords=parsed.get("keywords", []),
|
||
|
|
sentiment=parsed.get("sentiment"),
|
||
|
|
insights=parsed.get("insights", [])
|
||
|
|
)
|
||
|
|
|
||
|
|
except Exception as e:
|
||
|
|
logger.error(f"Analysis error: {e}")
|
||
|
|
raise HTTPException(status_code=500, detail=f"Analysis failed: {str(e)}")
|
||
|
|
|
||
|
|
|
||
|
|
@app.get("/")
|
||
|
|
async def root() -> Dict[str, Any]:
|
||
|
|
"""Root endpoint with service information."""
|
||
|
|
return {
|
||
|
|
"name": "Tikker AI Service",
|
||
|
|
"version": "1.0.0",
|
||
|
|
"status": "running",
|
||
|
|
"ai_available": client is not None,
|
||
|
|
"endpoints": {
|
||
|
|
"health": "/health",
|
||
|
|
"analyze": "/analyze"
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
if __name__ == "__main__":
|
||
|
|
import uvicorn
|
||
|
|
uvicorn.run(app, host="0.0.0.0", port=8001)
|