- Create InvitesPanel widget showing pending invites from Microsoft Graph - Add fetch_pending_invites() and respond_to_invite() API functions - Invites load asynchronously in background on app mount - Display invite subject, date/time, and organizer - Add 'i' keybinding to focus invites panel - Style: tentative invites shown in warning color
544 lines
17 KiB
Python
544 lines
17 KiB
Python
"""
|
|
Calendar operations for Microsoft Graph API.
|
|
"""
|
|
|
|
import os
|
|
import json
|
|
import re
|
|
import glob
|
|
from datetime import datetime, timedelta
|
|
from dateutil import parser
|
|
|
|
from .client import fetch_with_aiohttp, post_with_aiohttp, delete_with_aiohttp
|
|
|
|
|
|
async def fetch_calendar_events(
|
|
headers, days_back=1, days_forward=6, start_date=None, end_date=None
|
|
):
|
|
"""
|
|
Fetch calendar events from Microsoft Graph API.
|
|
|
|
Args:
|
|
headers (dict): Headers including authentication.
|
|
days_back (int): Number of days to look back.
|
|
days_forward (int): Number of days to look forward.
|
|
start_date (datetime): Optional start date, overrides days_back if provided.
|
|
end_date (datetime): Optional end date, overrides days_forward if provided.
|
|
|
|
Returns:
|
|
tuple: (events, total_count) where events is a list of event dictionaries
|
|
and total_count is the total number of events.
|
|
"""
|
|
# Calculate date range
|
|
if start_date is None:
|
|
start_date = datetime.now() - timedelta(days=days_back)
|
|
|
|
if end_date is None:
|
|
end_date = start_date + timedelta(days=days_forward)
|
|
|
|
# Format dates for API
|
|
start_date_str = start_date.strftime("%Y-%m-%dT00:00:00Z")
|
|
end_date_str = end_date.strftime("%Y-%m-%dT23:59:59Z")
|
|
|
|
# Prepare the API query
|
|
calendar_url = (
|
|
f"https://graph.microsoft.com/v1.0/me/calendarView?"
|
|
f"startDateTime={start_date_str}&endDateTime={end_date_str}&"
|
|
f"$select=id,subject,organizer,start,end,location,isAllDay,showAs,sensitivity,iCalUId,lastModifiedDateTime&$count=true"
|
|
)
|
|
|
|
events = []
|
|
|
|
# Make the API request
|
|
response_data = await fetch_with_aiohttp(calendar_url, headers)
|
|
events.extend(response_data.get("value", []))
|
|
|
|
# Check if there are more events (pagination)
|
|
next_link = response_data.get("@odata.nextLink")
|
|
while next_link:
|
|
response_data = await fetch_with_aiohttp(next_link, headers)
|
|
events.extend(response_data.get("value", []))
|
|
next_link = response_data.get("@odata.nextLink")
|
|
|
|
# Return events and total count
|
|
total_count = response_data.get("@odata.count", len(events))
|
|
return events, total_count
|
|
|
|
|
|
def parse_ical_file(file_path):
|
|
"""
|
|
Parse a single iCalendar file and extract event data.
|
|
|
|
Args:
|
|
file_path (str): Path to the .ics file
|
|
|
|
Returns:
|
|
dict: Event data or None if parsing fails
|
|
"""
|
|
try:
|
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
content = f.read()
|
|
|
|
event_data = {}
|
|
in_event = False
|
|
|
|
for line in content.split("\n"):
|
|
line = line.strip()
|
|
|
|
if line == "BEGIN:VEVENT":
|
|
in_event = True
|
|
continue
|
|
elif line == "END:VEVENT":
|
|
break
|
|
elif not in_event:
|
|
continue
|
|
|
|
if ":" in line:
|
|
key, value = line.split(":", 1)
|
|
|
|
# Handle special cases
|
|
if key == "UID":
|
|
event_data["uid"] = value
|
|
elif key == "SUMMARY":
|
|
event_data["subject"] = (
|
|
value.replace("\\,", ",")
|
|
.replace("\\;", ";")
|
|
.replace("\\n", "\n")
|
|
)
|
|
elif key.startswith("DTSTART"):
|
|
event_data["start"] = _parse_ical_datetime(key, value)
|
|
elif key.startswith("DTEND"):
|
|
event_data["end"] = _parse_ical_datetime(key, value)
|
|
elif key == "LOCATION":
|
|
event_data["location"] = value.replace("\\,", ",").replace(
|
|
"\\;", ";"
|
|
)
|
|
elif key == "DESCRIPTION":
|
|
event_data["description"] = (
|
|
value.replace("\\,", ",")
|
|
.replace("\\;", ";")
|
|
.replace("\\n", "\n")
|
|
)
|
|
|
|
# Get file modification time for tracking local changes
|
|
event_data["local_mtime"] = os.path.getmtime(file_path)
|
|
event_data["local_file"] = file_path
|
|
|
|
return event_data if "uid" in event_data else None
|
|
|
|
except Exception as e:
|
|
print(f"Error parsing {file_path}: {e}")
|
|
return None
|
|
|
|
|
|
def _parse_ical_datetime(key, value):
|
|
"""Parse iCalendar datetime format."""
|
|
try:
|
|
if "TZID=" in key:
|
|
# Extract timezone info if present
|
|
tz_part = (
|
|
key.split("TZID=")[1].split(":")[0]
|
|
if ":" in key
|
|
else key.split("TZID=")[1]
|
|
)
|
|
# For now, treat as naive datetime and let dateutil handle it
|
|
return parser.parse(value.replace("Z", ""))
|
|
elif value.endswith("Z"):
|
|
# UTC time
|
|
return parser.parse(value)
|
|
else:
|
|
# Naive datetime
|
|
return parser.parse(value.replace("Z", ""))
|
|
except Exception:
|
|
return None
|
|
|
|
|
|
def get_local_calendar_events(vdir_path):
|
|
"""
|
|
Get all local calendar events from vdir format.
|
|
|
|
Args:
|
|
vdir_path (str): Path to vdir calendar directory
|
|
|
|
Returns:
|
|
dict: Dictionary mapping UIDs to event data
|
|
"""
|
|
local_events = {}
|
|
|
|
if not os.path.exists(vdir_path):
|
|
return local_events
|
|
|
|
ics_files = glob.glob(os.path.join(vdir_path, "*.ics"))
|
|
|
|
for file_path in ics_files:
|
|
event_data = parse_ical_file(file_path)
|
|
if event_data and "uid" in event_data:
|
|
local_events[event_data["uid"]] = event_data
|
|
|
|
return local_events
|
|
|
|
|
|
async def create_calendar_event(headers, event_data):
|
|
"""
|
|
Create a new calendar event on Microsoft Graph.
|
|
|
|
Args:
|
|
headers (dict): Authentication headers
|
|
event_data (dict): Event data from local file
|
|
|
|
Returns:
|
|
dict: Created event response or None if failed
|
|
"""
|
|
try:
|
|
# Convert local event data to Microsoft Graph format
|
|
graph_event = {
|
|
"subject": event_data.get("subject", "Untitled Event"),
|
|
"start": {"dateTime": event_data["start"].isoformat(), "timeZone": "UTC"},
|
|
"end": {"dateTime": event_data["end"].isoformat(), "timeZone": "UTC"},
|
|
}
|
|
|
|
if event_data.get("location"):
|
|
graph_event["location"] = {"displayName": event_data["location"]}
|
|
|
|
if event_data.get("description"):
|
|
graph_event["body"] = {
|
|
"contentType": "text",
|
|
"content": event_data["description"],
|
|
}
|
|
|
|
# Create the event
|
|
create_url = "https://graph.microsoft.com/v1.0/me/events"
|
|
status = await post_with_aiohttp(create_url, headers, graph_event)
|
|
|
|
if status == 201:
|
|
return graph_event
|
|
else:
|
|
print(f"Failed to create event: HTTP {status}")
|
|
return None
|
|
|
|
except Exception as e:
|
|
print(f"Error creating event: {e}")
|
|
return None
|
|
|
|
|
|
async def delete_calendar_event_by_uid(headers, ical_uid):
|
|
"""
|
|
Delete a calendar event by its iCalUId.
|
|
|
|
Args:
|
|
headers (dict): Authentication headers
|
|
ical_uid (str): The iCalUId of the event to delete
|
|
|
|
Returns:
|
|
bool: True if deleted successfully, False otherwise
|
|
"""
|
|
try:
|
|
# First, find the event by iCalUId
|
|
search_url = f"https://graph.microsoft.com/v1.0/me/events?$filter=iCalUId eq '{ical_uid}'"
|
|
response = await fetch_with_aiohttp(search_url, headers)
|
|
|
|
events = response.get("value", [])
|
|
if not events:
|
|
print(f"Event with UID {ical_uid} not found on server")
|
|
return False
|
|
|
|
# Delete the event using its Graph ID
|
|
event_id = events[0]["id"]
|
|
delete_url = f"https://graph.microsoft.com/v1.0/me/events/{event_id}"
|
|
status = await delete_with_aiohttp(delete_url, headers)
|
|
|
|
if status == 204:
|
|
print(f"Successfully deleted event with UID {ical_uid}")
|
|
return True
|
|
else:
|
|
print(f"Failed to delete event: HTTP {status}")
|
|
return False
|
|
|
|
except Exception as e:
|
|
print(f"Error deleting event: {e}")
|
|
return False
|
|
|
|
|
|
def get_sync_timestamp_file(vdir_path):
|
|
"""Get the path to the sync timestamp file."""
|
|
return os.path.join(vdir_path, ".sync_timestamp")
|
|
|
|
|
|
def get_last_sync_time(vdir_path):
|
|
"""
|
|
Get the timestamp of the last sync.
|
|
|
|
Args:
|
|
vdir_path (str): Path to vdir calendar directory
|
|
|
|
Returns:
|
|
float: Unix timestamp of last sync, or 0 if never synced
|
|
"""
|
|
timestamp_file = get_sync_timestamp_file(vdir_path)
|
|
if os.path.exists(timestamp_file):
|
|
try:
|
|
with open(timestamp_file, "r") as f:
|
|
return float(f.read().strip())
|
|
except (ValueError, IOError):
|
|
return 0
|
|
return 0
|
|
|
|
|
|
def update_sync_timestamp(vdir_path):
|
|
"""
|
|
Update the sync timestamp to current time.
|
|
|
|
Args:
|
|
vdir_path (str): Path to vdir calendar directory
|
|
"""
|
|
timestamp_file = get_sync_timestamp_file(vdir_path)
|
|
try:
|
|
with open(timestamp_file, "w") as f:
|
|
f.write(str(datetime.now().timestamp()))
|
|
except IOError as e:
|
|
print(f"Warning: Could not update sync timestamp: {e}")
|
|
|
|
|
|
def detect_deleted_events(vdir_path):
|
|
"""
|
|
Detect events that have been deleted from vdir since last sync.
|
|
Uses sync state and file modification times to determine deletions.
|
|
|
|
Args:
|
|
vdir_path (str): Path to vdir calendar directory
|
|
|
|
Returns:
|
|
list: List of UIDs that were deleted locally
|
|
"""
|
|
if not os.path.exists(vdir_path):
|
|
return []
|
|
|
|
state_file = os.path.join(vdir_path, ".sync_state.json")
|
|
last_sync_time = get_last_sync_time(vdir_path)
|
|
|
|
# Load previous sync state
|
|
previous_state = {}
|
|
if os.path.exists(state_file):
|
|
try:
|
|
with open(state_file, "r") as f:
|
|
previous_state = json.load(f)
|
|
except Exception:
|
|
return []
|
|
|
|
if not previous_state:
|
|
return [] # No previous state to compare against
|
|
|
|
# Get current local events
|
|
current_local_events = get_local_calendar_events(vdir_path)
|
|
|
|
deleted_events = []
|
|
|
|
# Check each event from previous state
|
|
for uid in previous_state:
|
|
if uid not in current_local_events:
|
|
# Event is no longer in local files
|
|
# Check if the vdir has been modified since last sync
|
|
# This ensures we only delete events that were intentionally removed
|
|
vdir_mtime = os.path.getmtime(vdir_path)
|
|
if vdir_mtime > last_sync_time:
|
|
deleted_events.append(uid)
|
|
|
|
return deleted_events
|
|
|
|
|
|
async def sync_local_calendar_changes(
|
|
headers, vdir_path, progress, task_id, dry_run=False
|
|
):
|
|
"""
|
|
Sync local calendar changes (new events and deletions) to Microsoft Graph.
|
|
|
|
Args:
|
|
headers (dict): Authentication headers
|
|
vdir_path (str): Path to local vdir calendar directory
|
|
progress: Progress instance for updates
|
|
task_id: Progress task ID
|
|
dry_run (bool): If True, only report what would be done
|
|
|
|
Returns:
|
|
tuple: (created_count, deleted_count)
|
|
"""
|
|
if not os.path.exists(vdir_path):
|
|
progress.console.print(
|
|
f"[yellow]Local calendar directory not found: {vdir_path}[/yellow]"
|
|
)
|
|
return 0, 0
|
|
|
|
# Track state file for knowing what was previously synced
|
|
state_file = os.path.join(vdir_path, ".sync_state.json")
|
|
|
|
# Load previous sync state
|
|
previous_state = {}
|
|
if os.path.exists(state_file):
|
|
try:
|
|
with open(state_file, "r") as f:
|
|
previous_state = json.load(f)
|
|
except Exception as e:
|
|
progress.console.print(f"[yellow]Could not load sync state: {e}[/yellow]")
|
|
|
|
# Detect deleted events using enhanced detection
|
|
deleted_events = detect_deleted_events(vdir_path)
|
|
|
|
# Get current local events
|
|
current_local_events = get_local_calendar_events(vdir_path)
|
|
|
|
# Get current remote events to avoid duplicates
|
|
try:
|
|
remote_events, _ = await fetch_calendar_events(
|
|
headers, days_back=30, days_forward=90
|
|
)
|
|
remote_uids = {
|
|
event.get("iCalUId", event.get("id", "")) for event in remote_events
|
|
}
|
|
except Exception as e:
|
|
progress.console.print(f"[red]Error fetching remote events: {e}[/red]")
|
|
return 0, 0
|
|
|
|
created_count = 0
|
|
deleted_count = 0
|
|
|
|
# Find new local events (not in previous state and not on server)
|
|
new_local_events = []
|
|
for uid, event_data in current_local_events.items():
|
|
if uid not in previous_state and uid not in remote_uids:
|
|
# This is a new local event
|
|
new_local_events.append((uid, event_data))
|
|
|
|
progress.update(task_id, total=len(new_local_events) + len(deleted_events))
|
|
|
|
# Handle deletions FIRST to clean up server before adding new events
|
|
for uid in deleted_events:
|
|
if dry_run:
|
|
progress.console.print(f"[DRY-RUN] Would delete event with UID: {uid}")
|
|
else:
|
|
result = await delete_calendar_event_by_uid(headers, uid)
|
|
if result:
|
|
deleted_count += 1
|
|
progress.console.print(f"[green]Deleted event with UID: {uid}[/green]")
|
|
else:
|
|
progress.console.print(
|
|
f"[red]Failed to delete event with UID: {uid}[/red]"
|
|
)
|
|
|
|
progress.advance(task_id)
|
|
|
|
# Create new events on server
|
|
for uid, event_data in new_local_events:
|
|
if dry_run:
|
|
progress.console.print(
|
|
f"[DRY-RUN] Would create event: {event_data.get('subject', 'Untitled')}"
|
|
)
|
|
else:
|
|
result = await create_calendar_event(headers, event_data)
|
|
if result:
|
|
created_count += 1
|
|
progress.console.print(
|
|
f"[green]Created event: {event_data.get('subject', 'Untitled')}[/green]"
|
|
)
|
|
else:
|
|
progress.console.print(
|
|
f"[red]Failed to create event: {event_data.get('subject', 'Untitled')}[/red]"
|
|
)
|
|
|
|
progress.advance(task_id)
|
|
|
|
# Update sync state and timestamp
|
|
if not dry_run:
|
|
new_state = {
|
|
uid: event_data.get("local_mtime", 0)
|
|
for uid, event_data in current_local_events.items()
|
|
}
|
|
try:
|
|
with open(state_file, "w") as f:
|
|
json.dump(new_state, f, indent=2)
|
|
|
|
# Update sync timestamp to mark when this sync completed
|
|
update_sync_timestamp(vdir_path)
|
|
|
|
except Exception as e:
|
|
progress.console.print(f"[yellow]Could not save sync state: {e}[/yellow]")
|
|
|
|
if created_count > 0 or deleted_count > 0:
|
|
progress.console.print(
|
|
f"[cyan]Local calendar sync completed: {created_count} created, {deleted_count} deleted[/cyan]"
|
|
)
|
|
|
|
return created_count, deleted_count
|
|
|
|
|
|
async def fetch_pending_invites(headers, days_forward=30):
|
|
"""
|
|
Fetch calendar invites that need a response (pending/tentative).
|
|
|
|
Args:
|
|
headers (dict): Headers including authentication.
|
|
days_forward (int): Number of days to look forward.
|
|
|
|
Returns:
|
|
list: List of invite dictionaries with response status info.
|
|
"""
|
|
start_date = datetime.now()
|
|
end_date = start_date + timedelta(days=days_forward)
|
|
|
|
start_date_str = start_date.strftime("%Y-%m-%dT00:00:00Z")
|
|
end_date_str = end_date.strftime("%Y-%m-%dT23:59:59Z")
|
|
|
|
# Fetch events with response status
|
|
calendar_url = (
|
|
f"https://graph.microsoft.com/v1.0/me/calendarView?"
|
|
f"startDateTime={start_date_str}&endDateTime={end_date_str}&"
|
|
f"$select=id,subject,organizer,start,end,location,isAllDay,responseStatus,isCancelled&"
|
|
f"$filter=responseStatus/response eq 'notResponded' or responseStatus/response eq 'tentativelyAccepted'&"
|
|
f"$orderby=start/dateTime"
|
|
)
|
|
|
|
invites = []
|
|
|
|
try:
|
|
response_data = await fetch_with_aiohttp(calendar_url, headers)
|
|
invites.extend(response_data.get("value", []))
|
|
|
|
# Handle pagination
|
|
next_link = response_data.get("@odata.nextLink")
|
|
while next_link:
|
|
response_data = await fetch_with_aiohttp(next_link, headers)
|
|
invites.extend(response_data.get("value", []))
|
|
next_link = response_data.get("@odata.nextLink")
|
|
|
|
except Exception as e:
|
|
print(f"Error fetching pending invites: {e}")
|
|
|
|
return invites
|
|
|
|
|
|
async def respond_to_invite(headers, event_id, response):
|
|
"""
|
|
Respond to a calendar invite.
|
|
|
|
Args:
|
|
headers (dict): Authentication headers
|
|
event_id (str): The ID of the event to respond to
|
|
response (str): Response type - 'accept', 'tentativelyAccept', or 'decline'
|
|
|
|
Returns:
|
|
bool: True if response was successful
|
|
"""
|
|
valid_responses = ["accept", "tentativelyAccept", "decline"]
|
|
if response not in valid_responses:
|
|
print(f"Invalid response type: {response}. Must be one of {valid_responses}")
|
|
return False
|
|
|
|
try:
|
|
response_url = (
|
|
f"https://graph.microsoft.com/v1.0/me/events/{event_id}/{response}"
|
|
)
|
|
status = await post_with_aiohttp(response_url, headers, {})
|
|
return status in (200, 202)
|
|
except Exception as e:
|
|
print(f"Error responding to invite: {e}")
|
|
return False
|