Complete Phase 1: parallel sync, IPC, theme colors, lazy CLI loading
- Sync: Parallelize message downloads with asyncio.gather (batch size 5) - Sync: Increase HTTP semaphore from 2 to 5 concurrent requests - Sync: Add IPC notifications to sync daemon after sync completes - Mail: Replace all hardcoded RGB colors with theme variables - Mail: Remove envelope icon/checkbox gap (padding cleanup) - Mail: Add IPC listener for refresh notifications from sync - Calendar: Style current time line with error color and solid line - Tasks: Fix table not displaying (CSS grid to horizontal layout) - CLI: Implement lazy command loading for faster startup (~12s to ~0.3s) - Add PROJECT_PLAN.md with full improvement roadmap - Add src/utils/ipc.py for Unix socket cross-app communication
This commit is contained in:
@@ -110,26 +110,47 @@ async def fetch_mail_async(
|
||||
progress.update(task_id, total=len(messages_to_download), completed=0)
|
||||
downloaded_count = 0
|
||||
|
||||
for message in messages_to_download:
|
||||
# Download messages in parallel batches for better performance
|
||||
BATCH_SIZE = 5
|
||||
|
||||
for i in range(0, len(messages_to_download), BATCH_SIZE):
|
||||
# Check if task was cancelled/disabled
|
||||
if is_cancelled and is_cancelled():
|
||||
progress.console.print("Task cancelled, stopping inbox fetch")
|
||||
break
|
||||
|
||||
progress.console.print(
|
||||
f"Processing message: {message.get('subject', 'No Subject')}", end="\r"
|
||||
)
|
||||
await save_mime_to_maildir_async(
|
||||
maildir_path,
|
||||
message,
|
||||
attachments_dir,
|
||||
headers,
|
||||
progress,
|
||||
dry_run,
|
||||
download_attachments,
|
||||
)
|
||||
progress.update(task_id, advance=1)
|
||||
downloaded_count += 1
|
||||
batch = messages_to_download[i : i + BATCH_SIZE]
|
||||
|
||||
# Create tasks for parallel download
|
||||
async def download_message(message):
|
||||
progress.console.print(
|
||||
f"Processing message: {message.get('subject', 'No Subject')[:50]}",
|
||||
end="\r",
|
||||
)
|
||||
await save_mime_to_maildir_async(
|
||||
maildir_path,
|
||||
message,
|
||||
attachments_dir,
|
||||
headers,
|
||||
progress,
|
||||
dry_run,
|
||||
download_attachments,
|
||||
)
|
||||
return 1
|
||||
|
||||
# Execute batch in parallel
|
||||
tasks = [download_message(msg) for msg in batch]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Count successful downloads
|
||||
batch_success = sum(1 for r in results if r == 1)
|
||||
downloaded_count += batch_success
|
||||
progress.update(task_id, advance=len(batch))
|
||||
|
||||
# Log any errors
|
||||
for idx, result in enumerate(results):
|
||||
if isinstance(result, Exception):
|
||||
progress.console.print(f"Error downloading message: {result}")
|
||||
|
||||
progress.update(task_id, completed=downloaded_count)
|
||||
progress.console.print(f"\nFinished downloading {downloaded_count} new messages.")
|
||||
@@ -461,37 +482,57 @@ async def fetch_archive_mail_async(
|
||||
# Update progress to reflect only the messages we actually need to download
|
||||
progress.update(task_id, total=len(messages_to_download), completed=0)
|
||||
|
||||
# Load sync state once, we'll update it incrementally
|
||||
# Load sync state once, we'll update it after each batch for resilience
|
||||
synced_ids = _load_archive_sync_state(maildir_path) if not dry_run else set()
|
||||
downloaded_count = 0
|
||||
|
||||
for message in messages_to_download:
|
||||
# Download messages in parallel batches for better performance
|
||||
BATCH_SIZE = 5
|
||||
|
||||
for i in range(0, len(messages_to_download), BATCH_SIZE):
|
||||
# Check if task was cancelled/disabled
|
||||
if is_cancelled and is_cancelled():
|
||||
progress.console.print("Task cancelled, stopping archive fetch")
|
||||
break
|
||||
|
||||
progress.console.print(
|
||||
f"Processing archived message: {message.get('subject', 'No Subject')[:50]}",
|
||||
end="\r",
|
||||
)
|
||||
# Save to .Archive folder instead of main maildir
|
||||
await save_mime_to_maildir_async(
|
||||
archive_dir, # Use archive_dir instead of maildir_path
|
||||
message,
|
||||
attachments_dir,
|
||||
headers,
|
||||
progress,
|
||||
dry_run,
|
||||
download_attachments,
|
||||
)
|
||||
progress.update(task_id, advance=1)
|
||||
downloaded_count += 1
|
||||
batch = messages_to_download[i : i + BATCH_SIZE]
|
||||
batch_msg_ids = []
|
||||
|
||||
# Update sync state after each message for resilience
|
||||
# This ensures we don't try to re-upload this message in archive_mail_async
|
||||
if not dry_run:
|
||||
synced_ids.add(message["id"])
|
||||
# Create tasks for parallel download
|
||||
async def download_message(message):
|
||||
progress.console.print(
|
||||
f"Processing archived message: {message.get('subject', 'No Subject')[:50]}",
|
||||
end="\r",
|
||||
)
|
||||
# Save to .Archive folder instead of main maildir
|
||||
await save_mime_to_maildir_async(
|
||||
archive_dir, # Use archive_dir instead of maildir_path
|
||||
message,
|
||||
attachments_dir,
|
||||
headers,
|
||||
progress,
|
||||
dry_run,
|
||||
download_attachments,
|
||||
)
|
||||
return message["id"]
|
||||
|
||||
# Execute batch in parallel
|
||||
tasks = [download_message(msg) for msg in batch]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Process results and collect successful message IDs
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
progress.console.print(f"Error downloading archived message: {result}")
|
||||
elif result:
|
||||
batch_msg_ids.append(result)
|
||||
downloaded_count += 1
|
||||
|
||||
progress.update(task_id, advance=len(batch))
|
||||
|
||||
# Update sync state after each batch (not each message) for resilience + performance
|
||||
if not dry_run and batch_msg_ids:
|
||||
synced_ids.update(batch_msg_ids)
|
||||
_save_archive_sync_state(maildir_path, synced_ids)
|
||||
|
||||
progress.update(task_id, completed=downloaded_count)
|
||||
|
||||
Reference in New Issue
Block a user