This commit is contained in:
Bendt
2025-12-29 16:40:40 -05:00
parent 2f002081e5
commit 8a121d7fec
6 changed files with 205 additions and 128 deletions

BIN
.coverage Normal file

Binary file not shown.

View File

@@ -425,7 +425,7 @@ async def _sync_outlook_data(
# Define scopes for Microsoft Graph API
scopes = [
"https://graph.microsoft.com/Calendars.Read",
"https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite",
]
@@ -721,7 +721,7 @@ def sync(
# This prevents the TUI from appearing to freeze during device flow auth
if not demo:
scopes = [
"https://graph.microsoft.com/Calendars.Read",
"https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite",
]
if not has_valid_cached_token(scopes):
@@ -963,7 +963,7 @@ def interactive(org, vdir, notify, dry_run, demo):
# This prevents the TUI from appearing to freeze during device flow auth
if not demo:
scopes = [
"https://graph.microsoft.com/Calendars.Read",
"https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite",
]
if not has_valid_cached_token(scopes):

View File

@@ -1103,7 +1103,7 @@ async def run_dashboard_sync(
# Get auth token
scopes = [
"https://graph.microsoft.com/Calendars.Read",
"https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite",
]
access_token, headers = get_access_token(scopes)

View File

@@ -4,144 +4,162 @@ Allows responding to calendar invites directly from email.
"""
import asyncio
import aiohttp
import logging
import re
import os
from typing import Optional, Tuple
logger = logging.getLogger(__name__)
# Set up dedicated RSVP logger
rsvp_logger = logging.getLogger("calendar_rsvp")
rsvp_logger.setLevel(logging.DEBUG)
# Create file handler if not already set up
if not rsvp_logger.handlers:
log_dir = os.path.expanduser("~/.local/share/luk")
os.makedirs(log_dir, exist_ok=True)
log_file = os.path.join(log_dir, "calendar_rsvp.log")
handler = logging.FileHandler(log_file)
handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s"))
rsvp_logger.addHandler(handler)
# Timeout for API calls (seconds)
API_TIMEOUT = 15
# Required scopes for calendar operations
CALENDAR_SCOPES = [
"https://graph.microsoft.com/Calendars.ReadWrite",
]
def detect_calendar_invite(message_content: str, headers: dict) -> Optional[str]:
"""Detect if a message is a calendar invite and extract event ID if possible.
def _get_auth_headers_sync() -> Optional[dict]:
"""Get auth headers synchronously using cached token only.
Calendar invites from Microsoft/Outlook typically have:
- Content-Type: text/calendar or multipart with text/calendar part
- Meeting ID patterns in the content
- Teams/Outlook meeting links
Args:
message_content: The message body content
headers: Message headers
Returns:
Event identifier hint if detected, None otherwise
Returns None if no valid cached token exists (to avoid blocking on device flow).
"""
# Check for calendar-related content patterns
calendar_patterns = [
r"Microsoft Teams meeting",
r"Join the meeting",
r"Meeting ID:",
r"teams\.microsoft\.com/l/meetup-join",
r"Accept\s+Tentative\s+Decline",
r"VEVENT",
r"BEGIN:VCALENDAR",
]
from src.services.microsoft_graph.auth import (
has_valid_cached_token,
get_access_token,
)
content_lower = message_content.lower() if message_content else ""
rsvp_logger.debug("Checking for valid cached token...")
for pattern in calendar_patterns:
if re.search(pattern, message_content or "", re.IGNORECASE):
return "calendar_invite_detected"
if not has_valid_cached_token(CALENDAR_SCOPES):
rsvp_logger.warning("No valid cached token found")
return None
return None
try:
rsvp_logger.debug("Getting access token from cache...")
_, headers = get_access_token(CALENDAR_SCOPES)
rsvp_logger.debug("Got auth headers successfully")
return headers
except Exception as e:
rsvp_logger.error(f"Failed to get auth headers: {e}")
return None
async def find_event_by_subject(
subject: str, organizer_email: Optional[str] = None
) -> Optional[dict]:
"""Find a calendar event by subject and optionally organizer.
async def find_event_by_uid(uid: str, headers: dict) -> Optional[dict]:
"""Find a calendar event by its iCalUId.
Args:
subject: Event subject to search for
organizer_email: Optional organizer email to filter by
uid: The iCalendar UID from the ICS file
headers: Auth headers for MS Graph API
Returns:
Event dict if found, None otherwise
"""
rsvp_logger.info(f"Looking up event by UID: {uid}")
try:
from src.services.microsoft_graph.auth import get_access_token
from src.services.microsoft_graph.client import fetch_with_aiohttp
from datetime import datetime, timedelta
scopes = ["https://graph.microsoft.com/Calendars.Read"]
_, headers = get_access_token(scopes)
# Search for events in the next 60 days with matching subject
start_date = datetime.now()
end_date = start_date + timedelta(days=60)
start_str = start_date.strftime("%Y-%m-%dT00:00:00Z")
end_str = end_date.strftime("%Y-%m-%dT23:59:59Z")
# URL encode the subject for the filter
subject_escaped = subject.replace("'", "''")
# Search by iCalUId - this is the unique identifier that should match
uid_escaped = uid.replace("'", "''")
url = (
f"https://graph.microsoft.com/v1.0/me/calendarView?"
f"startDateTime={start_str}&endDateTime={end_str}&"
f"$filter=contains(subject,'{subject_escaped}')&"
f"$select=id,subject,organizer,start,end,responseStatus&"
f"$top=10"
f"https://graph.microsoft.com/v1.0/me/events?"
f"$filter=iCalUId eq '{uid_escaped}'&"
f"$select=id,subject,organizer,start,end,responseStatus,iCalUId"
)
response = await fetch_with_aiohttp(url, headers)
if not response:
return None
events = response.get("value", [])
rsvp_logger.debug(f"Request URL: {url}")
if events:
# If organizer email provided, try to match
if organizer_email:
for event in events:
org_email = (
event.get("organizer", {})
.get("emailAddress", {})
.get("address", "")
# Use aiohttp directly with timeout
timeout = aiohttp.ClientTimeout(total=API_TIMEOUT)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url, headers=headers) as response:
rsvp_logger.debug(f"Response status: {response.status}")
if response.status != 200:
error_text = await response.text()
rsvp_logger.error(f"API error: {response.status} - {error_text}")
return None
data = await response.json()
events = data.get("value", [])
rsvp_logger.info(f"Found {len(events)} events matching UID")
if events:
event = events[0]
rsvp_logger.debug(
f"Event found: {event.get('subject')} - ID: {event.get('id')}"
)
if organizer_email.lower() in org_email.lower():
return event
return event
# Return first match
return events[0]
return None
except asyncio.TimeoutError:
rsvp_logger.error(f"Timeout after {API_TIMEOUT}s looking up event by UID")
return None
except Exception as e:
logger.error(f"Error finding event by subject: {e}")
rsvp_logger.error(f"Error finding event by UID: {e}", exc_info=True)
return None
async def respond_to_calendar_invite(event_id: str, response: str) -> Tuple[bool, str]:
async def respond_to_calendar_invite(
event_id: str, response: str, headers: dict
) -> Tuple[bool, str]:
"""Respond to a calendar invite.
Args:
event_id: Microsoft Graph event ID
response: Response type - 'accept', 'tentativelyAccept', or 'decline'
headers: Auth headers for MS Graph API
Returns:
Tuple of (success, message)
"""
rsvp_logger.info(f"Responding to event {event_id} with: {response}")
try:
from src.services.microsoft_graph.auth import get_access_token
from src.services.microsoft_graph.calendar import respond_to_invite
response_url = (
f"https://graph.microsoft.com/v1.0/me/events/{event_id}/{response}"
)
rsvp_logger.debug(f"Response URL: {response_url}")
scopes = ["https://graph.microsoft.com/Calendars.ReadWrite"]
_, headers = get_access_token(scopes)
# Use aiohttp directly with timeout
timeout = aiohttp.ClientTimeout(total=API_TIMEOUT)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.post(response_url, headers=headers, json={}) as resp:
rsvp_logger.debug(f"Response status: {resp.status}")
success = await respond_to_invite(headers, event_id, response)
if success:
response_text = {
"accept": "accepted",
"tentativelyAccept": "tentatively accepted",
"decline": "declined",
}.get(response, response)
return True, f"Successfully {response_text} the meeting"
else:
return False, "Failed to respond to the meeting invite"
if resp.status in (200, 202):
response_text = {
"accept": "accepted",
"tentativelyAccept": "tentatively accepted",
"decline": "declined",
}.get(response, response)
rsvp_logger.info(f"Successfully {response_text} the meeting")
return True, f"Successfully {response_text} the meeting"
else:
error_text = await resp.text()
rsvp_logger.error(
f"Failed to respond: {resp.status} - {error_text}"
)
return False, f"Failed to respond: {resp.status}"
except asyncio.TimeoutError:
rsvp_logger.error(f"Timeout after {API_TIMEOUT}s responding to invite")
return False, f"Request timed out after {API_TIMEOUT}s"
except Exception as e:
logger.error(f"Error responding to invite: {e}")
rsvp_logger.error(f"Error responding to invite: {e}", exc_info=True)
return False, f"Error: {str(e)}"
@@ -162,70 +180,97 @@ def action_tentative_invite(app):
def _respond_to_current_invite(app, response: str):
"""Helper to respond to the current message's calendar invite."""
from src.mail.widgets.ContentContainer import ContentContainer
rsvp_logger.info(f"Starting invite response: {response}")
current_message_id = app.current_message_id
if not current_message_id:
rsvp_logger.warning("No message selected")
app.notify("No message selected", severity="warning")
return
# Get message metadata
metadata = app.message_store.get_metadata(current_message_id)
if not metadata:
app.notify("Could not load message metadata", severity="error")
return
subject = metadata.get("subject", "")
from_addr = metadata.get("from", {}).get("addr", "")
if not subject:
# Get auth headers FIRST (synchronously, before spawning worker)
# This uses cached token only - won't block on device flow
headers = _get_auth_headers_sync()
if not headers:
rsvp_logger.error("No valid auth token - user needs to run luk sync first")
app.notify(
"No subject found - cannot match to calendar event", severity="warning"
"Not authenticated. Run 'luk sync' first to login.", severity="error"
)
return
# Run the async response in a worker
# Get the parsed calendar event from ContentContainer
# This has the UID from the ICS which we can use for direct lookup
calendar_event = None
try:
content_container = app.query_one(ContentContainer)
calendar_event = content_container.current_calendar_event
except Exception as e:
rsvp_logger.error(f"Failed to get ContentContainer: {e}")
if not calendar_event:
rsvp_logger.warning("No calendar event data found in current message")
app.notify("No calendar invite found in this message", severity="warning")
return
event_uid = calendar_event.uid
event_summary = calendar_event.summary or "(no subject)"
rsvp_logger.info(f"Calendar event: {event_summary}, UID: {event_uid}")
if not event_uid:
rsvp_logger.warning("No UID found in calendar event")
app.notify("Calendar invite missing UID - cannot respond", severity="warning")
return
app.run_worker(
_async_respond_to_invite(app, subject, from_addr, response),
_async_respond_to_invite(app, event_uid, event_summary, response, headers),
exclusive=True,
name="respond_invite",
)
async def _async_respond_to_invite(
app, subject: str, organizer_email: str, response: str
app, event_uid: str, event_summary: str, response: str, headers: dict
):
"""Async worker to find and respond to calendar invite."""
# First, find the event
app.notify(f"Searching for calendar event: {subject[:40]}...")
"""Async worker to find and respond to calendar invite using UID."""
rsvp_logger.info(f"Async response started for UID: {event_uid}")
event = await find_event_by_subject(subject, organizer_email)
app.notify(f"Looking up event...")
if not event:
# Find event by UID (direct lookup, no search needed)
graph_event = await find_event_by_uid(event_uid, headers)
if not graph_event:
rsvp_logger.warning(f"Event not found for UID: {event_uid}")
app.notify(
f"Could not find calendar event matching: {subject[:40]}",
f"Event not found in calendar: {event_summary[:40]}",
severity="warning",
)
return
event_id = event.get("id")
event_id = graph_event.get("id")
if not event_id:
app.notify(
"Could not get event ID from calendar",
severity="error",
)
rsvp_logger.error("No event ID in response")
app.notify("Could not get event ID from calendar", severity="error")
return
current_response = event.get("responseStatus", {}).get("response", "")
current_response = graph_event.get("responseStatus", {}).get("response", "")
rsvp_logger.debug(f"Current response status: {current_response}")
# Check if already responded
if current_response == "accepted" and response == "accept":
rsvp_logger.info("Already accepted")
app.notify("Already accepted this invite", severity="information")
return
elif current_response == "declined" and response == "decline":
rsvp_logger.info("Already declined")
app.notify("Already declined this invite", severity="information")
return
# Respond to the invite
success, message = await respond_to_calendar_invite(event_id, response)
success, message = await respond_to_calendar_invite(event_id, response, headers)
severity = "information" if success else "error"
app.notify(message, severity=severity)

View File

@@ -405,7 +405,6 @@ class ContentContainer(Vertical):
if is_calendar and raw_success and raw_content:
calendar_event = parse_calendar_from_raw_message(raw_content)
if calendar_event:
self.current_calendar_event = calendar_event
self._show_calendar_panel(calendar_event)
else:
self._hide_calendar_panel()
@@ -777,6 +776,9 @@ class ContentContainer(Vertical):
# Remove existing panel if any
self._hide_calendar_panel()
# Store the calendar event for RSVP actions
self.current_calendar_event = event
# Create and mount new panel at the beginning of the scroll container
# Don't use a fixed ID to avoid DuplicateIds errors when panels are
# removed asynchronously

View File

@@ -19,12 +19,42 @@ logging.getLogger("asyncio").setLevel(logging.ERROR)
logging.getLogger("azure").setLevel(logging.ERROR)
logging.getLogger("azure.core").setLevel(logging.ERROR)
# Token cache location - use consistent path regardless of working directory
TOKEN_CACHE_DIR = os.path.expanduser("~/.local/share/luk")
TOKEN_CACHE_FILE = os.path.join(TOKEN_CACHE_DIR, "token_cache.bin")
# Legacy cache file (in current working directory) - for migration
LEGACY_CACHE_FILE = "token_cache.bin"
def ensure_directory_exists(path):
if not os.path.exists(path):
os.makedirs(path)
def _get_cache_file():
"""Get the token cache file path, migrating from legacy location if needed."""
ensure_directory_exists(TOKEN_CACHE_DIR)
# If new location exists, use it
if os.path.exists(TOKEN_CACHE_FILE):
return TOKEN_CACHE_FILE
# If legacy location exists, migrate it
if os.path.exists(LEGACY_CACHE_FILE):
try:
import shutil
shutil.copy2(LEGACY_CACHE_FILE, TOKEN_CACHE_FILE)
os.remove(LEGACY_CACHE_FILE)
except Exception:
pass # If migration fails, just use new location
return TOKEN_CACHE_FILE
# Default to new location
return TOKEN_CACHE_FILE
def has_valid_cached_token(scopes=None):
"""
Check if we have a valid cached token (without triggering auth flow).
@@ -45,7 +75,7 @@ def has_valid_cached_token(scopes=None):
return False
cache = msal.SerializableTokenCache()
cache_file = "token_cache.bin"
cache_file = _get_cache_file()
if not os.path.exists(cache_file):
return False
@@ -92,9 +122,9 @@ def get_access_token(scopes):
"Please set the AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables."
)
# Token cache
# Token cache - use consistent location
cache = msal.SerializableTokenCache()
cache_file = "token_cache.bin"
cache_file = _get_cache_file()
if os.path.exists(cache_file):
cache.deserialize(open(cache_file, "r").read())