add vdir sync feature
This commit is contained in:
@@ -3,9 +3,13 @@ Calendar operations for Microsoft Graph API.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import glob
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil import parser
|
||||
|
||||
from .client import fetch_with_aiohttp
|
||||
from .client import fetch_with_aiohttp, post_with_aiohttp, delete_with_aiohttp
|
||||
|
||||
|
||||
async def fetch_calendar_events(
|
||||
@@ -40,7 +44,7 @@ async def fetch_calendar_events(
|
||||
calendar_url = (
|
||||
f"https://graph.microsoft.com/v1.0/me/calendarView?"
|
||||
f"startDateTime={start_date_str}&endDateTime={end_date_str}&"
|
||||
f"$select=id,subject,organizer,start,end,location,isAllDay,showAs,sensitivity&$count=true"
|
||||
f"$select=id,subject,organizer,start,end,location,isAllDay,showAs,sensitivity,iCalUId,lastModifiedDateTime&$count=true"
|
||||
)
|
||||
|
||||
events = []
|
||||
@@ -59,3 +63,408 @@ async def fetch_calendar_events(
|
||||
# Return events and total count
|
||||
total_count = response_data.get("@odata.count", len(events))
|
||||
return events, total_count
|
||||
|
||||
|
||||
def parse_ical_file(file_path):
|
||||
"""
|
||||
Parse a single iCalendar file and extract event data.
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the .ics file
|
||||
|
||||
Returns:
|
||||
dict: Event data or None if parsing fails
|
||||
"""
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
event_data = {}
|
||||
in_event = False
|
||||
|
||||
for line in content.split("\n"):
|
||||
line = line.strip()
|
||||
|
||||
if line == "BEGIN:VEVENT":
|
||||
in_event = True
|
||||
continue
|
||||
elif line == "END:VEVENT":
|
||||
break
|
||||
elif not in_event:
|
||||
continue
|
||||
|
||||
if ":" in line:
|
||||
key, value = line.split(":", 1)
|
||||
|
||||
# Handle special cases
|
||||
if key == "UID":
|
||||
event_data["uid"] = value
|
||||
elif key == "SUMMARY":
|
||||
event_data["subject"] = (
|
||||
value.replace("\\,", ",")
|
||||
.replace("\\;", ";")
|
||||
.replace("\\n", "\n")
|
||||
)
|
||||
elif key.startswith("DTSTART"):
|
||||
event_data["start"] = _parse_ical_datetime(key, value)
|
||||
elif key.startswith("DTEND"):
|
||||
event_data["end"] = _parse_ical_datetime(key, value)
|
||||
elif key == "LOCATION":
|
||||
event_data["location"] = value.replace("\\,", ",").replace(
|
||||
"\\;", ";"
|
||||
)
|
||||
elif key == "DESCRIPTION":
|
||||
event_data["description"] = (
|
||||
value.replace("\\,", ",")
|
||||
.replace("\\;", ";")
|
||||
.replace("\\n", "\n")
|
||||
)
|
||||
|
||||
# Get file modification time for tracking local changes
|
||||
event_data["local_mtime"] = os.path.getmtime(file_path)
|
||||
event_data["local_file"] = file_path
|
||||
|
||||
return event_data if "uid" in event_data else None
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error parsing {file_path}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _parse_ical_datetime(key, value):
|
||||
"""Parse iCalendar datetime format."""
|
||||
try:
|
||||
if "TZID=" in key:
|
||||
# Extract timezone info if present
|
||||
tz_part = (
|
||||
key.split("TZID=")[1].split(":")[0]
|
||||
if ":" in key
|
||||
else key.split("TZID=")[1]
|
||||
)
|
||||
# For now, treat as naive datetime and let dateutil handle it
|
||||
return parser.parse(value.replace("Z", ""))
|
||||
elif value.endswith("Z"):
|
||||
# UTC time
|
||||
return parser.parse(value)
|
||||
else:
|
||||
# Naive datetime
|
||||
return parser.parse(value.replace("Z", ""))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def get_local_calendar_events(vdir_path):
|
||||
"""
|
||||
Get all local calendar events from vdir format.
|
||||
|
||||
Args:
|
||||
vdir_path (str): Path to vdir calendar directory
|
||||
|
||||
Returns:
|
||||
dict: Dictionary mapping UIDs to event data
|
||||
"""
|
||||
local_events = {}
|
||||
|
||||
if not os.path.exists(vdir_path):
|
||||
return local_events
|
||||
|
||||
ics_files = glob.glob(os.path.join(vdir_path, "*.ics"))
|
||||
|
||||
for file_path in ics_files:
|
||||
event_data = parse_ical_file(file_path)
|
||||
if event_data and "uid" in event_data:
|
||||
local_events[event_data["uid"]] = event_data
|
||||
|
||||
return local_events
|
||||
|
||||
|
||||
async def create_calendar_event(headers, event_data):
|
||||
"""
|
||||
Create a new calendar event on Microsoft Graph.
|
||||
|
||||
Args:
|
||||
headers (dict): Authentication headers
|
||||
event_data (dict): Event data from local file
|
||||
|
||||
Returns:
|
||||
dict: Created event response or None if failed
|
||||
"""
|
||||
try:
|
||||
# Convert local event data to Microsoft Graph format
|
||||
graph_event = {
|
||||
"subject": event_data.get("subject", "Untitled Event"),
|
||||
"start": {"dateTime": event_data["start"].isoformat(), "timeZone": "UTC"},
|
||||
"end": {"dateTime": event_data["end"].isoformat(), "timeZone": "UTC"},
|
||||
}
|
||||
|
||||
if event_data.get("location"):
|
||||
graph_event["location"] = {"displayName": event_data["location"]}
|
||||
|
||||
if event_data.get("description"):
|
||||
graph_event["body"] = {
|
||||
"contentType": "text",
|
||||
"content": event_data["description"],
|
||||
}
|
||||
|
||||
# Create the event
|
||||
create_url = "https://graph.microsoft.com/v1.0/me/events"
|
||||
status = await post_with_aiohttp(create_url, headers, graph_event)
|
||||
|
||||
if status == 201:
|
||||
return graph_event
|
||||
else:
|
||||
print(f"Failed to create event: HTTP {status}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating event: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def delete_calendar_event_by_uid(headers, ical_uid):
|
||||
"""
|
||||
Delete a calendar event by its iCalUId.
|
||||
|
||||
Args:
|
||||
headers (dict): Authentication headers
|
||||
ical_uid (str): The iCalUId of the event to delete
|
||||
|
||||
Returns:
|
||||
bool: True if deleted successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
# First, find the event by iCalUId
|
||||
search_url = f"https://graph.microsoft.com/v1.0/me/events?$filter=iCalUId eq '{ical_uid}'"
|
||||
response = await fetch_with_aiohttp(search_url, headers)
|
||||
|
||||
events = response.get("value", [])
|
||||
if not events:
|
||||
print(f"Event with UID {ical_uid} not found on server")
|
||||
return False
|
||||
|
||||
# Delete the event using its Graph ID
|
||||
event_id = events[0]["id"]
|
||||
delete_url = f"https://graph.microsoft.com/v1.0/me/events/{event_id}"
|
||||
status = await delete_with_aiohttp(delete_url, headers)
|
||||
|
||||
if status == 204:
|
||||
print(f"Successfully deleted event with UID {ical_uid}")
|
||||
return True
|
||||
else:
|
||||
print(f"Failed to delete event: HTTP {status}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error deleting event: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_sync_timestamp_file(vdir_path):
|
||||
"""Get the path to the sync timestamp file."""
|
||||
return os.path.join(vdir_path, ".sync_timestamp")
|
||||
|
||||
|
||||
def get_last_sync_time(vdir_path):
|
||||
"""
|
||||
Get the timestamp of the last sync.
|
||||
|
||||
Args:
|
||||
vdir_path (str): Path to vdir calendar directory
|
||||
|
||||
Returns:
|
||||
float: Unix timestamp of last sync, or 0 if never synced
|
||||
"""
|
||||
timestamp_file = get_sync_timestamp_file(vdir_path)
|
||||
if os.path.exists(timestamp_file):
|
||||
try:
|
||||
with open(timestamp_file, "r") as f:
|
||||
return float(f.read().strip())
|
||||
except (ValueError, IOError):
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
def update_sync_timestamp(vdir_path):
|
||||
"""
|
||||
Update the sync timestamp to current time.
|
||||
|
||||
Args:
|
||||
vdir_path (str): Path to vdir calendar directory
|
||||
"""
|
||||
timestamp_file = get_sync_timestamp_file(vdir_path)
|
||||
try:
|
||||
with open(timestamp_file, "w") as f:
|
||||
f.write(str(datetime.now().timestamp()))
|
||||
except IOError as e:
|
||||
print(f"Warning: Could not update sync timestamp: {e}")
|
||||
|
||||
|
||||
def detect_deleted_events(vdir_path):
|
||||
"""
|
||||
Detect events that have been deleted from vdir since last sync.
|
||||
Uses sync state and file modification times to determine deletions.
|
||||
|
||||
Args:
|
||||
vdir_path (str): Path to vdir calendar directory
|
||||
|
||||
Returns:
|
||||
list: List of UIDs that were deleted locally
|
||||
"""
|
||||
if not os.path.exists(vdir_path):
|
||||
return []
|
||||
|
||||
state_file = os.path.join(vdir_path, ".sync_state.json")
|
||||
last_sync_time = get_last_sync_time(vdir_path)
|
||||
|
||||
# Load previous sync state
|
||||
previous_state = {}
|
||||
if os.path.exists(state_file):
|
||||
try:
|
||||
with open(state_file, "r") as f:
|
||||
previous_state = json.load(f)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
if not previous_state:
|
||||
return [] # No previous state to compare against
|
||||
|
||||
# Get current local events
|
||||
current_local_events = get_local_calendar_events(vdir_path)
|
||||
|
||||
deleted_events = []
|
||||
|
||||
# Check each event from previous state
|
||||
for uid in previous_state:
|
||||
if uid not in current_local_events:
|
||||
# Event is no longer in local files
|
||||
# Check if the vdir has been modified since last sync
|
||||
# This ensures we only delete events that were intentionally removed
|
||||
vdir_mtime = os.path.getmtime(vdir_path)
|
||||
if vdir_mtime > last_sync_time:
|
||||
deleted_events.append(uid)
|
||||
|
||||
return deleted_events
|
||||
|
||||
|
||||
async def sync_local_calendar_changes(
|
||||
headers, vdir_path, progress, task_id, dry_run=False
|
||||
):
|
||||
"""
|
||||
Sync local calendar changes (new events and deletions) to Microsoft Graph.
|
||||
|
||||
Args:
|
||||
headers (dict): Authentication headers
|
||||
vdir_path (str): Path to local vdir calendar directory
|
||||
progress: Progress instance for updates
|
||||
task_id: Progress task ID
|
||||
dry_run (bool): If True, only report what would be done
|
||||
|
||||
Returns:
|
||||
tuple: (created_count, deleted_count)
|
||||
"""
|
||||
if not os.path.exists(vdir_path):
|
||||
progress.console.print(
|
||||
f"[yellow]Local calendar directory not found: {vdir_path}[/yellow]"
|
||||
)
|
||||
return 0, 0
|
||||
|
||||
# Track state file for knowing what was previously synced
|
||||
state_file = os.path.join(vdir_path, ".sync_state.json")
|
||||
|
||||
# Load previous sync state
|
||||
previous_state = {}
|
||||
if os.path.exists(state_file):
|
||||
try:
|
||||
with open(state_file, "r") as f:
|
||||
previous_state = json.load(f)
|
||||
except Exception as e:
|
||||
progress.console.print(f"[yellow]Could not load sync state: {e}[/yellow]")
|
||||
|
||||
# Detect deleted events using enhanced detection
|
||||
deleted_events = detect_deleted_events(vdir_path)
|
||||
|
||||
# Get current local events
|
||||
current_local_events = get_local_calendar_events(vdir_path)
|
||||
|
||||
# Get current remote events to avoid duplicates
|
||||
try:
|
||||
remote_events, _ = await fetch_calendar_events(
|
||||
headers, days_back=30, days_forward=90
|
||||
)
|
||||
remote_uids = {
|
||||
event.get("iCalUId", event.get("id", "")) for event in remote_events
|
||||
}
|
||||
except Exception as e:
|
||||
progress.console.print(f"[red]Error fetching remote events: {e}[/red]")
|
||||
return 0, 0
|
||||
|
||||
created_count = 0
|
||||
deleted_count = 0
|
||||
|
||||
# Find new local events (not in previous state and not on server)
|
||||
new_local_events = []
|
||||
for uid, event_data in current_local_events.items():
|
||||
if uid not in previous_state and uid not in remote_uids:
|
||||
# This is a new local event
|
||||
new_local_events.append((uid, event_data))
|
||||
|
||||
progress.update(task_id, total=len(new_local_events) + len(deleted_events))
|
||||
|
||||
# Handle deletions FIRST to clean up server before adding new events
|
||||
for uid in deleted_events:
|
||||
if dry_run:
|
||||
progress.console.print(f"[DRY-RUN] Would delete event with UID: {uid}")
|
||||
else:
|
||||
result = await delete_calendar_event_by_uid(headers, uid)
|
||||
if result:
|
||||
deleted_count += 1
|
||||
progress.console.print(f"[green]Deleted event with UID: {uid}[/green]")
|
||||
else:
|
||||
progress.console.print(
|
||||
f"[red]Failed to delete event with UID: {uid}[/red]"
|
||||
)
|
||||
|
||||
progress.advance(task_id)
|
||||
|
||||
# Create new events on server
|
||||
for uid, event_data in new_local_events:
|
||||
if dry_run:
|
||||
progress.console.print(
|
||||
f"[DRY-RUN] Would create event: {event_data.get('subject', 'Untitled')}"
|
||||
)
|
||||
else:
|
||||
result = await create_calendar_event(headers, event_data)
|
||||
if result:
|
||||
created_count += 1
|
||||
progress.console.print(
|
||||
f"[green]Created event: {event_data.get('subject', 'Untitled')}[/green]"
|
||||
)
|
||||
else:
|
||||
progress.console.print(
|
||||
f"[red]Failed to create event: {event_data.get('subject', 'Untitled')}[/red]"
|
||||
)
|
||||
|
||||
progress.advance(task_id)
|
||||
|
||||
# Update sync state and timestamp
|
||||
if not dry_run:
|
||||
new_state = {
|
||||
uid: event_data.get("local_mtime", 0)
|
||||
for uid, event_data in current_local_events.items()
|
||||
}
|
||||
try:
|
||||
with open(state_file, "w") as f:
|
||||
json.dump(new_state, f, indent=2)
|
||||
|
||||
# Update sync timestamp to mark when this sync completed
|
||||
update_sync_timestamp(vdir_path)
|
||||
|
||||
except Exception as e:
|
||||
progress.console.print(f"[yellow]Could not save sync state: {e}[/yellow]")
|
||||
|
||||
if created_count > 0 or deleted_count > 0:
|
||||
progress.console.print(
|
||||
f"[cyan]Local calendar sync completed: {created_count} created, {deleted_count} deleted[/cyan]"
|
||||
)
|
||||
|
||||
return created_count, deleted_count
|
||||
|
||||
Reference in New Issue
Block a user