From 523cf78737c632f2149feeba092013283d883a86 Mon Sep 17 00:00:00 2001 From: Tim Bendt Date: Wed, 17 Dec 2025 11:20:28 -0500 Subject: [PATCH] sync fixed --- .coverage | Bin 69632 -> 69632 bytes src/cli/sync.py | 14 +- src/cli/sync_dashboard.py | 476 +++++++++++++++++++++++++++++++++-- tests/test_sync_dashboard.py | 1 + uv.lock | 146 +++++------ 5 files changed, 548 insertions(+), 89 deletions(-) diff --git a/.coverage b/.coverage index 68d4746a1b1af106ce372d55b394ff9d15830862..c5b67e6ba3d5b6e26d59662c595370872d396cec 100644 GIT binary patch delta 244 zcmZozz|ydQWdmD7)C>mxul#TMpYq@3Kg^%U_mgiLp9h}`3>*p#9Et)03JzddB@PY-Mxe?FpoAD>PH5;+m6OcH>>w-BCNuWO Z3NkQAFfbfoU=RQ+XJByHoYz080RS8hII;i$ delta 226 zcmZozz|ydQWdmD7)Jg{aul#TMpYq@3Kg^%U_ls{iUnE~3pC_LKp9P-|?^)ihyo-24 zcs+R?c{O+ycqMoRcz*C4t5= zwQ{qdfG5}H8D0OFqXIct895u3bgUZYD=;LS|L3}Y(Rv0329_K3Oh5_(4zLP<8B7cc z0xXO`rUOF*1BU~HfCGb|lY)Xn0|yh60|NsS!?snck|wDXv4f0EnbaRE&;V4;!@vO2 N0|G3Y^ZF+>000#2I7t8i diff --git a/src/cli/sync.py b/src/cli/sync.py index 9d67ee4..ffbb23f 100644 --- a/src/cli/sync.py +++ b/src/cli/sync.py @@ -704,7 +704,19 @@ def run( if dashboard: from .sync_dashboard import run_dashboard_sync - asyncio.run(run_dashboard_sync()) + sync_config = { + "dry_run": dry_run, + "vdir": vdir, + "icsfile": icsfile, + "org": org, + "days_back": days_back, + "days_forward": days_forward, + "continue_iteration": continue_iteration, + "download_attachments": download_attachments, + "two_way_calendar": two_way_calendar, + "notify": notify, + } + asyncio.run(run_dashboard_sync(notify=notify, sync_config=sync_config)) elif daemon: from .sync_daemon import create_daemon_config, SyncDaemon diff --git a/src/cli/sync_dashboard.py b/src/cli/sync_dashboard.py index 7d12c80..5acd181 100644 --- a/src/cli/sync_dashboard.py +++ b/src/cli/sync_dashboard.py @@ -17,7 +17,11 @@ from textual.binding import Binding from rich.text import Text from datetime import datetime, timedelta import asyncio +import os +import sys +import time from typing import Dict, Any, Optional, List, Callable +from pathlib import Path # Default sync interval in seconds (5 minutes) DEFAULT_SYNC_INTERVAL = 300 @@ -42,6 +46,21 @@ SPINNER_FRAMES = [ # SPINNER_FRAMES = ["⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"] # Braille orbit +# Sync configuration defaults +DEFAULT_SYNC_CONFIG = { + "dry_run": False, + "vdir": "~/Calendar", + "icsfile": None, + "org": "corteva", + "days_back": 1, + "days_forward": 30, + "continue_iteration": False, + "download_attachments": False, + "two_way_calendar": False, + "notify": True, +} + + class TaskStatus: """Status constants for tasks.""" @@ -135,6 +154,7 @@ class SyncDashboard(App): Binding("q", "quit", "Quit"), Binding("ctrl+c", "quit", "Quit"), Binding("s", "sync_now", "Sync Now"), + Binding("d", "daemonize", "Daemonize"), Binding("r", "refresh", "Refresh"), Binding("+", "increase_interval", "+Interval"), Binding("-", "decrease_interval", "-Interval"), @@ -162,7 +182,7 @@ class SyncDashboard(App): } .countdown-container { - height: 3; + height: 5; padding: 0 1; border-top: solid $primary; background: $surface; @@ -172,6 +192,15 @@ class SyncDashboard(App): text-align: center; } + .daemon-status { + text-align: center; + color: $text-muted; + } + + .daemon-running { + color: $success; + } + .main-panel { width: 1fr; height: 100%; @@ -231,7 +260,13 @@ class SyncDashboard(App): sync_interval: reactive[int] = reactive(DEFAULT_SYNC_INTERVAL) next_sync_time: reactive[float] = reactive(0.0) - def __init__(self, sync_interval: int = DEFAULT_SYNC_INTERVAL): + def __init__( + self, + sync_interval: int = DEFAULT_SYNC_INTERVAL, + notify: bool = True, + sync_config: Optional[Dict[str, Any]] = None, + demo_mode: bool = False, + ): super().__init__() self._mounted: asyncio.Event = asyncio.Event() self._task_logs: Dict[str, List[str]] = {} @@ -240,6 +275,11 @@ class SyncDashboard(App): self._countdown_task: Optional[asyncio.Task] = None self._spinner_task: Optional[asyncio.Task] = None self._initial_sync_interval = sync_interval + self._notify = notify + self._demo_mode = demo_mode + # Merge provided config with defaults + self._sync_config = {**DEFAULT_SYNC_CONFIG, **(sync_config or {})} + self._sync_config["notify"] = notify def compose(self) -> ComposeResult: """Compose the dashboard layout.""" @@ -266,6 +306,9 @@ class SyncDashboard(App): yield Static( "Next sync: --:--", id="countdown", classes="countdown-text" ) + yield Static( + "Daemon: --", id="daemon-status", classes="daemon-status" + ) # Main panel with selected task details with Vertical(classes="main-panel"): @@ -484,6 +527,49 @@ class SyncDashboard(App): f"Sync interval: {self.sync_interval // 60} min", ) + def action_daemonize(self) -> None: + """Start sync daemon in background and exit TUI.""" + from src.cli.sync_daemon import SyncDaemon, create_daemon_config + + # Check if daemon is already running + config = create_daemon_config( + sync_interval=self.sync_interval, + notify=True, # Enable notifications for daemon + ) + daemon = SyncDaemon(config) + + if daemon.is_running(): + self._log_to_task( + self.selected_task, + f"Daemon already running (PID {daemon.get_pid()})", + ) + return + + # Start daemon and exit + self._log_to_task(self.selected_task, "Starting background daemon...") + + # Fork the daemon process + try: + pid = os.fork() + if pid == 0: + # Child process - become the daemon + os.setsid() + # Second fork to prevent zombie processes + pid2 = os.fork() + if pid2 == 0: + # Grandchild - this becomes the daemon + daemon.start() + else: + os._exit(0) + else: + # Parent process - wait briefly then exit TUI + import time + + time.sleep(0.5) + self.exit(message="Daemon started. Sync continues in background.") + except OSError as e: + self._log_to_task(self.selected_task, f"Failed to daemonize: {e}") + def set_sync_callback(self, callback: Callable) -> None: """Set the callback to run when sync is triggered.""" self._sync_callback = callback @@ -517,8 +603,6 @@ class SyncDashboard(App): def _update_countdown_display(self) -> None: """Update the countdown display widget.""" - import time - try: countdown_widget = self.query_one("#countdown", Static) remaining = max(0, self.next_sync_time - time.time()) @@ -534,6 +618,29 @@ class SyncDashboard(App): except Exception: pass + # Update daemon status + self._update_daemon_status() + + def _update_daemon_status(self) -> None: + """Update the daemon status indicator.""" + try: + daemon_widget = self.query_one("#daemon-status", Static) + pid_file = Path(os.path.expanduser("~/.config/luk/luk.pid")) + + if pid_file.exists(): + try: + with open(pid_file, "r") as f: + pid = int(f.read().strip()) + # Check if process is running + os.kill(pid, 0) + daemon_widget.update(Text(f"Daemon: PID {pid}", style="green")) + except (ValueError, ProcessLookupError, OSError): + daemon_widget.update(Text("Daemon: stopped", style="dim")) + else: + daemon_widget.update(Text("Daemon: stopped", style="dim")) + except Exception: + pass + async def _animate_spinners(self) -> None: """Animate spinners for running tasks.""" while True: @@ -550,6 +657,63 @@ class SyncDashboard(App): await asyncio.sleep(0.08) +class DashboardProgressAdapter: + """Adapter to make dashboard tracker work with functions expecting Rich Progress.""" + + def __init__(self, tracker: "SyncProgressTracker", task_id: str): + self.tracker = tracker + self.task_id = task_id + self.console = DashboardConsoleAdapter(tracker, task_id) + self._total = 100 + self._completed = 0 + + def update(self, task_id=None, total=None, completed=None, advance=None): + """Update progress (mimics Rich Progress.update).""" + if total is not None: + self._total = total + if completed is not None: + self._completed = completed + if advance is not None: + self._completed += advance + + # Convert to percentage for dashboard + if self._total > 0: + pct = int((self._completed / self._total) * 100) + self.tracker.update_task(self.task_id, pct) + + def advance(self, task_id=None, advance: int = 1): + """Advance progress by a given amount (mimics Rich Progress.advance).""" + self._completed += advance + if self._total > 0: + pct = int((self._completed / self._total) * 100) + self.tracker.update_task(self.task_id, pct) + + def add_task(self, description: str, total: int = 100): + """Mimics Rich Progress.add_task (no-op, we use existing tasks).""" + self._total = total + return None + + +class DashboardConsoleAdapter: + """Adapter that logs console prints to dashboard task log.""" + + def __init__(self, tracker: "SyncProgressTracker", task_id: str): + self.tracker = tracker + self.task_id = task_id + + def print(self, message: str = "", **kwargs): + """Log a message to the task's activity log. + + Accepts **kwargs to handle Rich console.print() arguments like 'end', 'style', etc. + """ + # Strip Rich markup for cleaner logs + import re + + clean_message = re.sub(r"\[.*?\]", "", str(message)) + if clean_message.strip(): + self.tracker.dashboard._log_to_task(self.task_id, clean_message.strip()) + + class SyncProgressTracker: """Track sync progress and update the dashboard.""" @@ -594,24 +758,38 @@ def get_progress_tracker() -> Optional[SyncProgressTracker]: return _progress_tracker -async def run_dashboard_sync(): - """Run sync with dashboard UI.""" +async def run_dashboard_sync( + notify: bool = True, + sync_config: Optional[Dict[str, Any]] = None, + demo_mode: bool = False, +): + """Run sync with dashboard UI. + + Args: + notify: Whether to send notifications for new emails + sync_config: Configuration dict for sync operations (vdir, org, etc.) + demo_mode: If True, use simulated sync instead of real operations + """ global _dashboard_instance, _progress_tracker - dashboard = SyncDashboard() + dashboard = SyncDashboard( + notify=notify, + sync_config=sync_config, + demo_mode=demo_mode, + ) tracker = SyncProgressTracker(dashboard) _dashboard_instance = dashboard _progress_tracker = tracker - async def do_sync(): - """Run the actual sync process.""" + async def do_demo_sync(): + """Run simulated sync for demo/testing.""" + import random + try: # Reset all tasks before starting dashboard.reset_all_tasks() - # Simulate sync progress for demo (replace with actual sync calls) - # Stage 1: Sync local changes to server # Archive mail @@ -630,12 +808,21 @@ async def run_dashboard_sync(): # Stage 2: Fetch from server - # Inbox sync + # Inbox sync - simulate finding new messages tracker.start_task("inbox", 100) for i in range(0, 101, 20): tracker.update_task("inbox", i, f"Fetching emails... {i}%") await asyncio.sleep(0.3) - tracker.complete_task("inbox", "150 emails processed") + + new_message_count = random.randint(0, 5) + if new_message_count > 0: + tracker.complete_task("inbox", f"{new_message_count} new emails") + if dashboard._notify: + from src.utils.notifications import notify_new_emails + + notify_new_emails(new_message_count, "") + else: + tracker.complete_task("inbox", "No new emails") # Calendar sync tracker.start_task("calendar", 100) @@ -667,13 +854,272 @@ async def run_dashboard_sync(): except Exception as e: tracker.error_task("archive", str(e)) + async def do_real_sync(): + """Run the actual sync process using real sync operations.""" + from src.utils.mail_utils.helpers import ensure_directory_exists + from src.services.microsoft_graph.auth import get_access_token + from src.services.microsoft_graph.mail import ( + archive_mail_async, + delete_mail_async, + synchronize_maildir_async, + process_outbox_async, + fetch_mail_async, + ) + from src.services.microsoft_graph.calendar import ( + fetch_calendar_events, + sync_local_calendar_changes, + ) + from src.cli.sync import ( + should_run_godspeed_sync, + should_run_sweep, + run_godspeed_sync, + run_task_sweep, + load_sync_state, + save_sync_state, + get_godspeed_sync_directory, + get_godspeed_credentials, + create_maildir_structure, + ) + from src.utils.calendar_utils import save_events_to_vdir, save_events_to_file + from src.utils.notifications import notify_new_emails + + config = dashboard._sync_config + + try: + # Reset all tasks before starting + dashboard.reset_all_tasks() + + # Setup paths + org = config.get("org", "corteva") + vdir = os.path.expanduser(config.get("vdir", "~/Calendar")) + icsfile = config.get("icsfile") + dry_run = config.get("dry_run", False) + days_back = config.get("days_back", 1) + days_forward = config.get("days_forward", 30) + download_attachments = config.get("download_attachments", False) + two_way_calendar = config.get("two_way_calendar", False) + + base_maildir_path = os.getenv("MAILDIR_PATH", os.path.expanduser("~/Mail")) + maildir_path = f"{base_maildir_path}/{org}" + attachments_dir = os.path.join(maildir_path, "attachments") + + # Create directory structure + ensure_directory_exists(attachments_dir) + create_maildir_structure(maildir_path) + + # Get auth token + scopes = [ + "https://graph.microsoft.com/Calendars.Read", + "https://graph.microsoft.com/Mail.ReadWrite", + ] + access_token, headers = get_access_token(scopes) + + # ===== STAGE 1: Sync local changes to server ===== + + # Archive mail + tracker.start_task("archive", 100) + tracker.update_task("archive", 10, "Checking for archived messages...") + try: + archive_progress = DashboardProgressAdapter(tracker, "archive") + await archive_mail_async( + maildir_path, headers, archive_progress, None, dry_run + ) + tracker.complete_task("archive", "Archive sync complete") + except Exception as e: + tracker.error_task("archive", str(e)) + + # Process outbox (send pending emails) + tracker.start_task("outbox", 100) + tracker.update_task("outbox", 10, "Checking outbox...") + try: + outbox_progress = DashboardProgressAdapter(tracker, "outbox") + result = await process_outbox_async( + base_maildir_path, org, headers, outbox_progress, None, dry_run + ) + sent_count, failed_count = result if result else (0, 0) + if sent_count > 0: + tracker.complete_task("outbox", f"{sent_count} emails sent") + else: + tracker.complete_task("outbox", "No pending emails") + except Exception as e: + tracker.error_task("outbox", str(e)) + + # ===== STAGE 2: Fetch from server ===== + + # Count messages before sync for notification + messages_before = 0 + new_dir = os.path.join(maildir_path, "new") + cur_dir = os.path.join(maildir_path, "cur") + if os.path.exists(new_dir): + messages_before += len([f for f in os.listdir(new_dir) if ".eml" in f]) + if os.path.exists(cur_dir): + messages_before += len([f for f in os.listdir(cur_dir) if ".eml" in f]) + + # Inbox sync + tracker.start_task("inbox", 100) + tracker.update_task("inbox", 10, "Fetching emails from server...") + try: + inbox_progress = DashboardProgressAdapter(tracker, "inbox") + await fetch_mail_async( + maildir_path, + attachments_dir, + headers, + inbox_progress, + None, + dry_run, + download_attachments, + ) + tracker.update_task("inbox", 80, "Processing messages...") + + # Count new messages + messages_after = 0 + if os.path.exists(new_dir): + messages_after += len( + [f for f in os.listdir(new_dir) if ".eml" in f] + ) + if os.path.exists(cur_dir): + messages_after += len( + [f for f in os.listdir(cur_dir) if ".eml" in f] + ) + + new_message_count = messages_after - messages_before + + if new_message_count > 0: + tracker.complete_task("inbox", f"{new_message_count} new emails") + if dashboard._notify and not dry_run: + notify_new_emails(new_message_count, org) + else: + tracker.complete_task("inbox", "No new emails") + except Exception as e: + tracker.error_task("inbox", str(e)) + + # Calendar sync + tracker.start_task("calendar", 100) + tracker.update_task("calendar", 10, "Fetching calendar events...") + try: + events, total_events = await fetch_calendar_events( + headers=headers, days_back=days_back, days_forward=days_forward + ) + tracker.update_task( + "calendar", 50, f"Processing {len(events)} events..." + ) + + if not dry_run: + calendar_progress = DashboardProgressAdapter(tracker, "calendar") + org_vdir_path = os.path.join(vdir, org) if vdir else None + if vdir and org_vdir_path: + save_events_to_vdir( + events, org_vdir_path, calendar_progress, None, dry_run + ) + elif icsfile: + save_events_to_file( + events, + f"{icsfile}/events_latest.ics", + calendar_progress, + None, + dry_run, + ) + + tracker.complete_task("calendar", f"{len(events)} events synced") + except Exception as e: + tracker.error_task("calendar", str(e)) + + # ===== STAGE 3: Godspeed operations ===== + + # Godspeed sync (runs every 15 minutes) + tracker.start_task("godspeed", 100) + if should_run_godspeed_sync(): + tracker.update_task("godspeed", 10, "Syncing with Godspeed...") + try: + email, password, token = get_godspeed_credentials() + if token or (email and password): + from src.services.godspeed.client import GodspeedClient + from src.services.godspeed.sync import GodspeedSync + + sync_dir = get_godspeed_sync_directory() + client = GodspeedClient( + email=email, password=password, token=token + ) + sync_engine = GodspeedSync(client, sync_dir) + sync_engine.sync_bidirectional() + + state = load_sync_state() + state["last_godspeed_sync"] = time.time() + save_sync_state(state) + + tracker.complete_task("godspeed", "Sync completed") + else: + tracker.skip_task("godspeed", "No credentials configured") + except Exception as e: + tracker.error_task("godspeed", str(e)) + else: + tracker.skip_task("godspeed", "Not due yet (every 15 min)") + + # Task sweep (runs once daily after 6 PM) + tracker.start_task("sweep", 100) + if should_run_sweep(): + tracker.update_task("sweep", 10, "Sweeping tasks from notes...") + try: + from src.cli.godspeed import TaskSweeper + from datetime import datetime + + notes_dir_env = os.getenv("NOTES_DIR") + if notes_dir_env and Path(notes_dir_env).exists(): + godspeed_dir = get_godspeed_sync_directory() + sweeper = TaskSweeper( + Path(notes_dir_env), godspeed_dir, dry_run=dry_run + ) + result = sweeper.sweep_tasks() + + state = load_sync_state() + state["last_sweep_date"] = datetime.now().strftime("%Y-%m-%d") + save_sync_state(state) + + swept = result.get("swept_tasks", 0) + if swept > 0: + tracker.complete_task("sweep", f"{swept} tasks swept") + else: + tracker.complete_task("sweep", "No tasks to sweep") + else: + tracker.skip_task("sweep", "$NOTES_DIR not configured") + except Exception as e: + tracker.error_task("sweep", str(e)) + else: + from datetime import datetime + + current_hour = datetime.now().hour + if current_hour < 18: + tracker.skip_task("sweep", "Before 6 PM") + else: + tracker.skip_task("sweep", "Already completed today") + + # Schedule next sync + dashboard.schedule_next_sync() + + except Exception as e: + # If we fail early (e.g., auth), log to the first pending task + for task_id in [ + "archive", + "outbox", + "inbox", + "calendar", + "godspeed", + "sweep", + ]: + if task_id in dashboard._task_items: + item = dashboard._task_items[task_id] + if item.status == TaskStatus.PENDING: + tracker.error_task(task_id, str(e)) + break + + # Choose sync function based on mode + do_sync = do_demo_sync if demo_mode else do_real_sync + # Set the sync callback so 's' key triggers it dashboard.set_sync_callback(do_sync) async def sync_loop(): """Run sync on interval.""" - import time - # Wait for the dashboard to be mounted before updating widgets await dashboard._mounted.wait() diff --git a/tests/test_sync_dashboard.py b/tests/test_sync_dashboard.py index 429ba28..e76cd16 100644 --- a/tests/test_sync_dashboard.py +++ b/tests/test_sync_dashboard.py @@ -192,6 +192,7 @@ class TestSyncDashboard: assert "q" in binding_keys assert "r" in binding_keys assert "s" in binding_keys # Sync now + assert "d" in binding_keys # Daemonize assert "+" in binding_keys # Increase interval assert "-" in binding_keys # Decrease interval assert "ctrl+c" in binding_keys diff --git a/uv.lock b/uv.lock index b82f0ad..0b4474c 100644 --- a/uv.lock +++ b/uv.lock @@ -585,79 +585,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/71/3e/b04a0adda73bd52b390d730071c0d577073d3d26740ee1bad25c3ad0f37b/frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191", size = 12404, upload-time = "2025-04-17T22:38:51.668Z" }, ] -[[package]] -name = "gtd-terminal-tools" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "aiohttp" }, - { name = "certifi" }, - { name = "click" }, - { name = "html2text" }, - { name = "mammoth" }, - { name = "markitdown", extra = ["all"] }, - { name = "msal" }, - { name = "openai" }, - { name = "orjson" }, - { name = "pillow" }, - { name = "python-dateutil" }, - { name = "python-docx" }, - { name = "requests" }, - { name = "rich" }, - { name = "textual" }, - { name = "textual-image" }, - { name = "ticktick-py" }, -] - -[package.dev-dependencies] -dev = [ - { name = "black" }, - { name = "build" }, - { name = "mypy" }, - { name = "pre-commit" }, - { name = "pytest" }, - { name = "pytest-asyncio" }, - { name = "pytest-cov" }, - { name = "ruff" }, - { name = "textual" }, - { name = "twine" }, -] - -[package.metadata] -requires-dist = [ - { name = "aiohttp", specifier = ">=3.11.18" }, - { name = "certifi", specifier = ">=2025.4.26" }, - { name = "click", specifier = ">=8.1.0" }, - { name = "html2text", specifier = ">=2025.4.15" }, - { name = "mammoth", specifier = ">=1.9.0" }, - { name = "markitdown", extras = ["all"], specifier = ">=0.1.1" }, - { name = "msal", specifier = ">=1.32.3" }, - { name = "openai", specifier = ">=1.78.1" }, - { name = "orjson", specifier = ">=3.10.18" }, - { name = "pillow", specifier = ">=11.2.1" }, - { name = "python-dateutil", specifier = ">=2.9.0.post0" }, - { name = "python-docx", specifier = ">=1.1.2" }, - { name = "requests", specifier = ">=2.31.0" }, - { name = "rich", specifier = ">=14.0.0" }, - { name = "textual", specifier = ">=3.2.0" }, - { name = "textual-image", specifier = ">=0.8.2" }, - { name = "ticktick-py", specifier = ">=2.0.0" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "black", specifier = ">=24.0.0" }, - { name = "build", specifier = ">=1.0.0" }, - { name = "mypy", specifier = ">=1.8.0" }, - { name = "pre-commit", specifier = ">=3.5.0" }, - { name = "pytest", specifier = ">=8.0.0" }, - { name = "pytest-asyncio", specifier = ">=0.24.0" }, - { name = "pytest-cov", specifier = ">=6.0.0" }, - { name = "ruff", specifier = ">=0.11.8" }, - { name = "textual", specifier = ">=3.2.0" }, - { name = "twine", specifier = ">=5.0.0" }, -] - [[package]] name = "h11" version = "0.16.0" @@ -922,6 +849,79 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820, upload-time = "2024-02-04T14:48:02.496Z" }, ] +[[package]] +name = "luk" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "aiohttp" }, + { name = "certifi" }, + { name = "click" }, + { name = "html2text" }, + { name = "mammoth" }, + { name = "markitdown", extra = ["all"] }, + { name = "msal" }, + { name = "openai" }, + { name = "orjson" }, + { name = "pillow" }, + { name = "python-dateutil" }, + { name = "python-docx" }, + { name = "requests" }, + { name = "rich" }, + { name = "textual" }, + { name = "textual-image" }, + { name = "ticktick-py" }, +] + +[package.dev-dependencies] +dev = [ + { name = "black" }, + { name = "build" }, + { name = "mypy" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "ruff" }, + { name = "textual" }, + { name = "twine" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", specifier = ">=3.11.18" }, + { name = "certifi", specifier = ">=2025.4.26" }, + { name = "click", specifier = ">=8.1.0" }, + { name = "html2text", specifier = ">=2025.4.15" }, + { name = "mammoth", specifier = ">=1.9.0" }, + { name = "markitdown", extras = ["all"], specifier = ">=0.1.1" }, + { name = "msal", specifier = ">=1.32.3" }, + { name = "openai", specifier = ">=1.78.1" }, + { name = "orjson", specifier = ">=3.10.18" }, + { name = "pillow", specifier = ">=11.2.1" }, + { name = "python-dateutil", specifier = ">=2.9.0.post0" }, + { name = "python-docx", specifier = ">=1.1.2" }, + { name = "requests", specifier = ">=2.31.0" }, + { name = "rich", specifier = ">=14.0.0" }, + { name = "textual", specifier = ">=3.2.0" }, + { name = "textual-image", specifier = ">=0.8.2" }, + { name = "ticktick-py", specifier = ">=2.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "black", specifier = ">=24.0.0" }, + { name = "build", specifier = ">=1.0.0" }, + { name = "mypy", specifier = ">=1.8.0" }, + { name = "pre-commit", specifier = ">=3.5.0" }, + { name = "pytest", specifier = ">=8.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0" }, + { name = "pytest-cov", specifier = ">=6.0.0" }, + { name = "ruff", specifier = ">=0.11.8" }, + { name = "textual", specifier = ">=3.2.0" }, + { name = "twine", specifier = ">=5.0.0" }, +] + [[package]] name = "lxml" version = "5.4.0"