This commit is contained in:
Bendt
2025-12-29 16:40:40 -05:00
parent 2f002081e5
commit 8a121d7fec
6 changed files with 205 additions and 128 deletions

BIN
.coverage Normal file

Binary file not shown.

View File

@@ -425,7 +425,7 @@ async def _sync_outlook_data(
# Define scopes for Microsoft Graph API # Define scopes for Microsoft Graph API
scopes = [ scopes = [
"https://graph.microsoft.com/Calendars.Read", "https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite",
] ]
@@ -721,7 +721,7 @@ def sync(
# This prevents the TUI from appearing to freeze during device flow auth # This prevents the TUI from appearing to freeze during device flow auth
if not demo: if not demo:
scopes = [ scopes = [
"https://graph.microsoft.com/Calendars.Read", "https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite",
] ]
if not has_valid_cached_token(scopes): if not has_valid_cached_token(scopes):
@@ -963,7 +963,7 @@ def interactive(org, vdir, notify, dry_run, demo):
# This prevents the TUI from appearing to freeze during device flow auth # This prevents the TUI from appearing to freeze during device flow auth
if not demo: if not demo:
scopes = [ scopes = [
"https://graph.microsoft.com/Calendars.Read", "https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite",
] ]
if not has_valid_cached_token(scopes): if not has_valid_cached_token(scopes):

View File

@@ -1103,7 +1103,7 @@ async def run_dashboard_sync(
# Get auth token # Get auth token
scopes = [ scopes = [
"https://graph.microsoft.com/Calendars.Read", "https://graph.microsoft.com/Calendars.ReadWrite",
"https://graph.microsoft.com/Mail.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite",
] ]
access_token, headers = get_access_token(scopes) access_token, headers = get_access_token(scopes)

View File

@@ -4,144 +4,162 @@ Allows responding to calendar invites directly from email.
""" """
import asyncio import asyncio
import aiohttp
import logging import logging
import re import os
from typing import Optional, Tuple from typing import Optional, Tuple
logger = logging.getLogger(__name__) # Set up dedicated RSVP logger
rsvp_logger = logging.getLogger("calendar_rsvp")
rsvp_logger.setLevel(logging.DEBUG)
# Create file handler if not already set up
if not rsvp_logger.handlers:
log_dir = os.path.expanduser("~/.local/share/luk")
os.makedirs(log_dir, exist_ok=True)
log_file = os.path.join(log_dir, "calendar_rsvp.log")
handler = logging.FileHandler(log_file)
handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s"))
rsvp_logger.addHandler(handler)
# Timeout for API calls (seconds)
API_TIMEOUT = 15
# Required scopes for calendar operations
CALENDAR_SCOPES = [
"https://graph.microsoft.com/Calendars.ReadWrite",
]
def detect_calendar_invite(message_content: str, headers: dict) -> Optional[str]: def _get_auth_headers_sync() -> Optional[dict]:
"""Detect if a message is a calendar invite and extract event ID if possible. """Get auth headers synchronously using cached token only.
Calendar invites from Microsoft/Outlook typically have: Returns None if no valid cached token exists (to avoid blocking on device flow).
- Content-Type: text/calendar or multipart with text/calendar part
- Meeting ID patterns in the content
- Teams/Outlook meeting links
Args:
message_content: The message body content
headers: Message headers
Returns:
Event identifier hint if detected, None otherwise
""" """
# Check for calendar-related content patterns from src.services.microsoft_graph.auth import (
calendar_patterns = [ has_valid_cached_token,
r"Microsoft Teams meeting", get_access_token,
r"Join the meeting", )
r"Meeting ID:",
r"teams\.microsoft\.com/l/meetup-join",
r"Accept\s+Tentative\s+Decline",
r"VEVENT",
r"BEGIN:VCALENDAR",
]
content_lower = message_content.lower() if message_content else "" rsvp_logger.debug("Checking for valid cached token...")
for pattern in calendar_patterns: if not has_valid_cached_token(CALENDAR_SCOPES):
if re.search(pattern, message_content or "", re.IGNORECASE): rsvp_logger.warning("No valid cached token found")
return "calendar_invite_detected" return None
return None try:
rsvp_logger.debug("Getting access token from cache...")
_, headers = get_access_token(CALENDAR_SCOPES)
rsvp_logger.debug("Got auth headers successfully")
return headers
except Exception as e:
rsvp_logger.error(f"Failed to get auth headers: {e}")
return None
async def find_event_by_subject( async def find_event_by_uid(uid: str, headers: dict) -> Optional[dict]:
subject: str, organizer_email: Optional[str] = None """Find a calendar event by its iCalUId.
) -> Optional[dict]:
"""Find a calendar event by subject and optionally organizer.
Args: Args:
subject: Event subject to search for uid: The iCalendar UID from the ICS file
organizer_email: Optional organizer email to filter by headers: Auth headers for MS Graph API
Returns: Returns:
Event dict if found, None otherwise Event dict if found, None otherwise
""" """
rsvp_logger.info(f"Looking up event by UID: {uid}")
try: try:
from src.services.microsoft_graph.auth import get_access_token # Search by iCalUId - this is the unique identifier that should match
from src.services.microsoft_graph.client import fetch_with_aiohttp uid_escaped = uid.replace("'", "''")
from datetime import datetime, timedelta
scopes = ["https://graph.microsoft.com/Calendars.Read"]
_, headers = get_access_token(scopes)
# Search for events in the next 60 days with matching subject
start_date = datetime.now()
end_date = start_date + timedelta(days=60)
start_str = start_date.strftime("%Y-%m-%dT00:00:00Z")
end_str = end_date.strftime("%Y-%m-%dT23:59:59Z")
# URL encode the subject for the filter
subject_escaped = subject.replace("'", "''")
url = ( url = (
f"https://graph.microsoft.com/v1.0/me/calendarView?" f"https://graph.microsoft.com/v1.0/me/events?"
f"startDateTime={start_str}&endDateTime={end_str}&" f"$filter=iCalUId eq '{uid_escaped}'&"
f"$filter=contains(subject,'{subject_escaped}')&" f"$select=id,subject,organizer,start,end,responseStatus,iCalUId"
f"$select=id,subject,organizer,start,end,responseStatus&"
f"$top=10"
) )
response = await fetch_with_aiohttp(url, headers) rsvp_logger.debug(f"Request URL: {url}")
if not response:
return None
events = response.get("value", [])
if events: # Use aiohttp directly with timeout
# If organizer email provided, try to match timeout = aiohttp.ClientTimeout(total=API_TIMEOUT)
if organizer_email: async with aiohttp.ClientSession(timeout=timeout) as session:
for event in events: async with session.get(url, headers=headers) as response:
org_email = ( rsvp_logger.debug(f"Response status: {response.status}")
event.get("organizer", {})
.get("emailAddress", {}) if response.status != 200:
.get("address", "") error_text = await response.text()
rsvp_logger.error(f"API error: {response.status} - {error_text}")
return None
data = await response.json()
events = data.get("value", [])
rsvp_logger.info(f"Found {len(events)} events matching UID")
if events:
event = events[0]
rsvp_logger.debug(
f"Event found: {event.get('subject')} - ID: {event.get('id')}"
) )
if organizer_email.lower() in org_email.lower(): return event
return event
# Return first match return None
return events[0]
except asyncio.TimeoutError:
rsvp_logger.error(f"Timeout after {API_TIMEOUT}s looking up event by UID")
return None return None
except Exception as e: except Exception as e:
logger.error(f"Error finding event by subject: {e}") rsvp_logger.error(f"Error finding event by UID: {e}", exc_info=True)
return None return None
async def respond_to_calendar_invite(event_id: str, response: str) -> Tuple[bool, str]: async def respond_to_calendar_invite(
event_id: str, response: str, headers: dict
) -> Tuple[bool, str]:
"""Respond to a calendar invite. """Respond to a calendar invite.
Args: Args:
event_id: Microsoft Graph event ID event_id: Microsoft Graph event ID
response: Response type - 'accept', 'tentativelyAccept', or 'decline' response: Response type - 'accept', 'tentativelyAccept', or 'decline'
headers: Auth headers for MS Graph API
Returns: Returns:
Tuple of (success, message) Tuple of (success, message)
""" """
rsvp_logger.info(f"Responding to event {event_id} with: {response}")
try: try:
from src.services.microsoft_graph.auth import get_access_token response_url = (
from src.services.microsoft_graph.calendar import respond_to_invite f"https://graph.microsoft.com/v1.0/me/events/{event_id}/{response}"
)
rsvp_logger.debug(f"Response URL: {response_url}")
scopes = ["https://graph.microsoft.com/Calendars.ReadWrite"] # Use aiohttp directly with timeout
_, headers = get_access_token(scopes) timeout = aiohttp.ClientTimeout(total=API_TIMEOUT)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.post(response_url, headers=headers, json={}) as resp:
rsvp_logger.debug(f"Response status: {resp.status}")
success = await respond_to_invite(headers, event_id, response) if resp.status in (200, 202):
response_text = {
if success: "accept": "accepted",
response_text = { "tentativelyAccept": "tentatively accepted",
"accept": "accepted", "decline": "declined",
"tentativelyAccept": "tentatively accepted", }.get(response, response)
"decline": "declined", rsvp_logger.info(f"Successfully {response_text} the meeting")
}.get(response, response) return True, f"Successfully {response_text} the meeting"
return True, f"Successfully {response_text} the meeting" else:
else: error_text = await resp.text()
return False, "Failed to respond to the meeting invite" rsvp_logger.error(
f"Failed to respond: {resp.status} - {error_text}"
)
return False, f"Failed to respond: {resp.status}"
except asyncio.TimeoutError:
rsvp_logger.error(f"Timeout after {API_TIMEOUT}s responding to invite")
return False, f"Request timed out after {API_TIMEOUT}s"
except Exception as e: except Exception as e:
logger.error(f"Error responding to invite: {e}") rsvp_logger.error(f"Error responding to invite: {e}", exc_info=True)
return False, f"Error: {str(e)}" return False, f"Error: {str(e)}"
@@ -162,70 +180,97 @@ def action_tentative_invite(app):
def _respond_to_current_invite(app, response: str): def _respond_to_current_invite(app, response: str):
"""Helper to respond to the current message's calendar invite.""" """Helper to respond to the current message's calendar invite."""
from src.mail.widgets.ContentContainer import ContentContainer
rsvp_logger.info(f"Starting invite response: {response}")
current_message_id = app.current_message_id current_message_id = app.current_message_id
if not current_message_id: if not current_message_id:
rsvp_logger.warning("No message selected")
app.notify("No message selected", severity="warning") app.notify("No message selected", severity="warning")
return return
# Get message metadata # Get auth headers FIRST (synchronously, before spawning worker)
metadata = app.message_store.get_metadata(current_message_id) # This uses cached token only - won't block on device flow
if not metadata: headers = _get_auth_headers_sync()
app.notify("Could not load message metadata", severity="error") if not headers:
return rsvp_logger.error("No valid auth token - user needs to run luk sync first")
subject = metadata.get("subject", "")
from_addr = metadata.get("from", {}).get("addr", "")
if not subject:
app.notify( app.notify(
"No subject found - cannot match to calendar event", severity="warning" "Not authenticated. Run 'luk sync' first to login.", severity="error"
) )
return return
# Run the async response in a worker # Get the parsed calendar event from ContentContainer
# This has the UID from the ICS which we can use for direct lookup
calendar_event = None
try:
content_container = app.query_one(ContentContainer)
calendar_event = content_container.current_calendar_event
except Exception as e:
rsvp_logger.error(f"Failed to get ContentContainer: {e}")
if not calendar_event:
rsvp_logger.warning("No calendar event data found in current message")
app.notify("No calendar invite found in this message", severity="warning")
return
event_uid = calendar_event.uid
event_summary = calendar_event.summary or "(no subject)"
rsvp_logger.info(f"Calendar event: {event_summary}, UID: {event_uid}")
if not event_uid:
rsvp_logger.warning("No UID found in calendar event")
app.notify("Calendar invite missing UID - cannot respond", severity="warning")
return
app.run_worker( app.run_worker(
_async_respond_to_invite(app, subject, from_addr, response), _async_respond_to_invite(app, event_uid, event_summary, response, headers),
exclusive=True, exclusive=True,
name="respond_invite", name="respond_invite",
) )
async def _async_respond_to_invite( async def _async_respond_to_invite(
app, subject: str, organizer_email: str, response: str app, event_uid: str, event_summary: str, response: str, headers: dict
): ):
"""Async worker to find and respond to calendar invite.""" """Async worker to find and respond to calendar invite using UID."""
# First, find the event rsvp_logger.info(f"Async response started for UID: {event_uid}")
app.notify(f"Searching for calendar event: {subject[:40]}...")
event = await find_event_by_subject(subject, organizer_email) app.notify(f"Looking up event...")
if not event: # Find event by UID (direct lookup, no search needed)
graph_event = await find_event_by_uid(event_uid, headers)
if not graph_event:
rsvp_logger.warning(f"Event not found for UID: {event_uid}")
app.notify( app.notify(
f"Could not find calendar event matching: {subject[:40]}", f"Event not found in calendar: {event_summary[:40]}",
severity="warning", severity="warning",
) )
return return
event_id = event.get("id") event_id = graph_event.get("id")
if not event_id: if not event_id:
app.notify( rsvp_logger.error("No event ID in response")
"Could not get event ID from calendar", app.notify("Could not get event ID from calendar", severity="error")
severity="error",
)
return return
current_response = event.get("responseStatus", {}).get("response", "") current_response = graph_event.get("responseStatus", {}).get("response", "")
rsvp_logger.debug(f"Current response status: {current_response}")
# Check if already responded # Check if already responded
if current_response == "accepted" and response == "accept": if current_response == "accepted" and response == "accept":
rsvp_logger.info("Already accepted")
app.notify("Already accepted this invite", severity="information") app.notify("Already accepted this invite", severity="information")
return return
elif current_response == "declined" and response == "decline": elif current_response == "declined" and response == "decline":
rsvp_logger.info("Already declined")
app.notify("Already declined this invite", severity="information") app.notify("Already declined this invite", severity="information")
return return
# Respond to the invite # Respond to the invite
success, message = await respond_to_calendar_invite(event_id, response) success, message = await respond_to_calendar_invite(event_id, response, headers)
severity = "information" if success else "error" severity = "information" if success else "error"
app.notify(message, severity=severity) app.notify(message, severity=severity)

View File

@@ -405,7 +405,6 @@ class ContentContainer(Vertical):
if is_calendar and raw_success and raw_content: if is_calendar and raw_success and raw_content:
calendar_event = parse_calendar_from_raw_message(raw_content) calendar_event = parse_calendar_from_raw_message(raw_content)
if calendar_event: if calendar_event:
self.current_calendar_event = calendar_event
self._show_calendar_panel(calendar_event) self._show_calendar_panel(calendar_event)
else: else:
self._hide_calendar_panel() self._hide_calendar_panel()
@@ -777,6 +776,9 @@ class ContentContainer(Vertical):
# Remove existing panel if any # Remove existing panel if any
self._hide_calendar_panel() self._hide_calendar_panel()
# Store the calendar event for RSVP actions
self.current_calendar_event = event
# Create and mount new panel at the beginning of the scroll container # Create and mount new panel at the beginning of the scroll container
# Don't use a fixed ID to avoid DuplicateIds errors when panels are # Don't use a fixed ID to avoid DuplicateIds errors when panels are
# removed asynchronously # removed asynchronously

View File

@@ -19,12 +19,42 @@ logging.getLogger("asyncio").setLevel(logging.ERROR)
logging.getLogger("azure").setLevel(logging.ERROR) logging.getLogger("azure").setLevel(logging.ERROR)
logging.getLogger("azure.core").setLevel(logging.ERROR) logging.getLogger("azure.core").setLevel(logging.ERROR)
# Token cache location - use consistent path regardless of working directory
TOKEN_CACHE_DIR = os.path.expanduser("~/.local/share/luk")
TOKEN_CACHE_FILE = os.path.join(TOKEN_CACHE_DIR, "token_cache.bin")
# Legacy cache file (in current working directory) - for migration
LEGACY_CACHE_FILE = "token_cache.bin"
def ensure_directory_exists(path): def ensure_directory_exists(path):
if not os.path.exists(path): if not os.path.exists(path):
os.makedirs(path) os.makedirs(path)
def _get_cache_file():
"""Get the token cache file path, migrating from legacy location if needed."""
ensure_directory_exists(TOKEN_CACHE_DIR)
# If new location exists, use it
if os.path.exists(TOKEN_CACHE_FILE):
return TOKEN_CACHE_FILE
# If legacy location exists, migrate it
if os.path.exists(LEGACY_CACHE_FILE):
try:
import shutil
shutil.copy2(LEGACY_CACHE_FILE, TOKEN_CACHE_FILE)
os.remove(LEGACY_CACHE_FILE)
except Exception:
pass # If migration fails, just use new location
return TOKEN_CACHE_FILE
# Default to new location
return TOKEN_CACHE_FILE
def has_valid_cached_token(scopes=None): def has_valid_cached_token(scopes=None):
""" """
Check if we have a valid cached token (without triggering auth flow). Check if we have a valid cached token (without triggering auth flow).
@@ -45,7 +75,7 @@ def has_valid_cached_token(scopes=None):
return False return False
cache = msal.SerializableTokenCache() cache = msal.SerializableTokenCache()
cache_file = "token_cache.bin" cache_file = _get_cache_file()
if not os.path.exists(cache_file): if not os.path.exists(cache_file):
return False return False
@@ -92,9 +122,9 @@ def get_access_token(scopes):
"Please set the AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables." "Please set the AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables."
) )
# Token cache # Token cache - use consistent location
cache = msal.SerializableTokenCache() cache = msal.SerializableTokenCache()
cache_file = "token_cache.bin" cache_file = _get_cache_file()
if os.path.exists(cache_file): if os.path.exists(cache_file):
cache.deserialize(open(cache_file, "r").read()) cache.deserialize(open(cache_file, "r").read())