godspeed app sync
This commit is contained in:
@@ -7,6 +7,7 @@ from .drive import drive
|
||||
from .email import email
|
||||
from .calendar import calendar
|
||||
from .ticktick import ticktick
|
||||
from .godspeed import godspeed
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -20,6 +21,9 @@ cli.add_command(drive)
|
||||
cli.add_command(email)
|
||||
cli.add_command(calendar)
|
||||
cli.add_command(ticktick)
|
||||
cli.add_command(godspeed)
|
||||
|
||||
# Add 'tt' as a short alias for ticktick
|
||||
cli.add_command(ticktick, name="tt")
|
||||
# Add 'gs' as a short alias for godspeed
|
||||
cli.add_command(godspeed, name="gs")
|
||||
|
||||
616
src/cli/godspeed.py
Normal file
616
src/cli/godspeed.py
Normal file
@@ -0,0 +1,616 @@
|
||||
"""CLI interface for Godspeed sync functionality."""
|
||||
|
||||
import click
|
||||
import getpass
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
from ..services.godspeed.client import GodspeedClient
|
||||
from ..services.godspeed.sync import GodspeedSync
|
||||
|
||||
|
||||
def get_credentials():
|
||||
"""Get Godspeed credentials from environment or user input."""
|
||||
email = os.getenv("GODSPEED_EMAIL")
|
||||
password = os.getenv("GODSPEED_PASSWORD")
|
||||
token = os.getenv("GODSPEED_TOKEN")
|
||||
|
||||
if token:
|
||||
return None, None, token
|
||||
|
||||
if not email:
|
||||
email = click.prompt("Godspeed email")
|
||||
|
||||
if not password:
|
||||
password = click.prompt("Godspeed password", hide_input=True)
|
||||
|
||||
return email, password, None
|
||||
|
||||
|
||||
def get_sync_directory():
|
||||
"""Get sync directory from environment or default."""
|
||||
sync_dir = os.getenv("GODSPEED_SYNC_DIR")
|
||||
if sync_dir:
|
||||
return Path(sync_dir)
|
||||
|
||||
# Default to ~/Documents/Godspeed or ~/.local/share/gtd-terminal-tools/godspeed
|
||||
home = Path.home()
|
||||
|
||||
# Try Documents first
|
||||
docs_dir = home / "Documents" / "Godspeed"
|
||||
if docs_dir.parent.exists():
|
||||
return docs_dir
|
||||
|
||||
# Fall back to data directory
|
||||
data_dir = home / ".local" / "share" / "gtd-terminal-tools" / "godspeed"
|
||||
return data_dir
|
||||
|
||||
|
||||
@click.group()
|
||||
def godspeed():
|
||||
"""Godspeed sync tool - bidirectional sync between Godspeed API and markdown files."""
|
||||
pass
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
def download():
|
||||
"""Download tasks from Godspeed API to local files."""
|
||||
email, password, token = get_credentials()
|
||||
sync_dir = get_sync_directory()
|
||||
|
||||
try:
|
||||
client = GodspeedClient(email=email, password=password, token=token)
|
||||
sync_engine = GodspeedSync(client, sync_dir)
|
||||
sync_engine.download_from_api()
|
||||
|
||||
click.echo(f"\nTasks downloaded to: {sync_dir}")
|
||||
click.echo(
|
||||
"You can now edit the markdown files and run 'godspeed upload' to sync changes back."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during download: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
def upload():
|
||||
"""Upload local markdown files to Godspeed API."""
|
||||
email, password, token = get_credentials()
|
||||
sync_dir = get_sync_directory()
|
||||
|
||||
if not sync_dir.exists():
|
||||
click.echo(f"Sync directory does not exist: {sync_dir}", err=True)
|
||||
click.echo("Run 'godspeed download' first to initialize the sync directory.")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
client = GodspeedClient(email=email, password=password, token=token)
|
||||
sync_engine = GodspeedSync(client, sync_dir)
|
||||
sync_engine.upload_to_api()
|
||||
|
||||
click.echo("Local changes uploaded successfully.")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during upload: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
def sync():
|
||||
"""Perform bidirectional sync between local files and Godspeed API."""
|
||||
email, password, token = get_credentials()
|
||||
sync_dir = get_sync_directory()
|
||||
|
||||
try:
|
||||
client = GodspeedClient(email=email, password=password, token=token)
|
||||
sync_engine = GodspeedSync(client, sync_dir)
|
||||
sync_engine.sync_bidirectional()
|
||||
|
||||
click.echo(f"\nSync complete. Files are in: {sync_dir}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during sync: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
def status():
|
||||
"""Show sync status and directory information."""
|
||||
sync_dir = get_sync_directory()
|
||||
|
||||
if not sync_dir.exists():
|
||||
click.echo(f"Sync directory does not exist: {sync_dir}")
|
||||
click.echo("Run 'godspeed download' or 'godspeed sync' to initialize.")
|
||||
return
|
||||
|
||||
# Create a minimal sync engine for status (no API client needed)
|
||||
sync_engine = GodspeedSync(None, sync_dir)
|
||||
status_info = sync_engine.get_sync_status()
|
||||
|
||||
click.echo(f"Sync Directory: {status_info['sync_directory']}")
|
||||
click.echo(f"Local Files: {status_info['local_files']}")
|
||||
click.echo(f"Total Local Tasks: {status_info['total_local_tasks']}")
|
||||
click.echo(f"Tracked Tasks: {status_info['tracked_tasks']}")
|
||||
click.echo(f"Tracked Lists: {status_info['tracked_lists']}")
|
||||
|
||||
if status_info["last_sync"]:
|
||||
click.echo(f"Last Sync: {status_info['last_sync']}")
|
||||
else:
|
||||
click.echo("Last Sync: Never")
|
||||
|
||||
click.echo("\nMarkdown Files:")
|
||||
for file_path in sync_engine.list_local_files():
|
||||
tasks = sync_engine._read_list_file(file_path)
|
||||
completed = sum(
|
||||
1 for _, status, _, _ in tasks if status in ["complete", "cleared"]
|
||||
)
|
||||
total = len(tasks)
|
||||
click.echo(f" {file_path.name}: {completed}/{total} completed")
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
def test_connection():
|
||||
"""Test connection to Godspeed API with SSL diagnostics."""
|
||||
import requests
|
||||
import ssl
|
||||
import socket
|
||||
|
||||
click.echo("Testing connection to Godspeed API...")
|
||||
|
||||
# Check if SSL bypass is enabled first
|
||||
disable_ssl = os.getenv("GODSPEED_DISABLE_SSL_VERIFY", "").lower() == "true"
|
||||
if disable_ssl:
|
||||
click.echo("⚠️ SSL verification is disabled (GODSPEED_DISABLE_SSL_VERIFY=true)")
|
||||
|
||||
# Test basic connectivity
|
||||
ssl_error_occurred = False
|
||||
try:
|
||||
response = requests.get("https://api.godspeedapp.com", timeout=10)
|
||||
click.echo("✓ Basic HTTPS connection successful")
|
||||
except requests.exceptions.SSLError as e:
|
||||
ssl_error_occurred = True
|
||||
click.echo(f"✗ SSL Error: {e}")
|
||||
if not disable_ssl:
|
||||
click.echo("\n💡 Try setting: export GODSPEED_DISABLE_SSL_VERIFY=true")
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
click.echo(f"✗ Connection Error: {e}")
|
||||
return
|
||||
except Exception as e:
|
||||
click.echo(f"✗ Unexpected Error: {e}")
|
||||
return
|
||||
|
||||
# Test with SSL bypass if enabled and there was an SSL error
|
||||
if disable_ssl and ssl_error_occurred:
|
||||
try:
|
||||
response = requests.get(
|
||||
"https://api.godspeedapp.com", verify=False, timeout=10
|
||||
)
|
||||
click.echo("✓ Connection successful with SSL bypass")
|
||||
except Exception as e:
|
||||
click.echo(f"✗ Connection failed even with SSL bypass: {e}")
|
||||
return
|
||||
|
||||
# Test authentication if credentials available
|
||||
email, password, token = get_credentials()
|
||||
if token or (email and password):
|
||||
try:
|
||||
client = GodspeedClient(email=email, password=password, token=token)
|
||||
lists = client.get_lists()
|
||||
click.echo(f"✓ Authentication successful, found {len(lists)} lists")
|
||||
except Exception as e:
|
||||
click.echo(f"✗ Authentication failed: {e}")
|
||||
else:
|
||||
click.echo("ℹ️ No credentials provided for authentication test")
|
||||
|
||||
click.echo("\nConnection test complete!")
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
def open():
|
||||
"""Open the sync directory in the default file manager."""
|
||||
sync_dir = get_sync_directory()
|
||||
|
||||
if not sync_dir.exists():
|
||||
click.echo(f"Sync directory does not exist: {sync_dir}", err=True)
|
||||
click.echo("Run 'godspeed download' or 'godspeed sync' to initialize.")
|
||||
return
|
||||
|
||||
import subprocess
|
||||
import platform
|
||||
|
||||
system = platform.system()
|
||||
try:
|
||||
if system == "Darwin": # macOS
|
||||
subprocess.run(["open", str(sync_dir)])
|
||||
elif system == "Windows":
|
||||
subprocess.run(["explorer", str(sync_dir)])
|
||||
else: # Linux
|
||||
subprocess.run(["xdg-open", str(sync_dir)])
|
||||
|
||||
click.echo(f"Opened sync directory: {sync_dir}")
|
||||
except Exception as e:
|
||||
click.echo(f"Could not open directory: {e}", err=True)
|
||||
click.echo(f"Sync directory is: {sync_dir}")
|
||||
|
||||
|
||||
class TaskSweeper:
|
||||
"""Sweeps incomplete tasks from markdown files into Godspeed Inbox."""
|
||||
|
||||
def __init__(self, notes_dir: Path, godspeed_dir: Path, dry_run: bool = False):
|
||||
self.notes_dir = Path(notes_dir)
|
||||
self.godspeed_dir = Path(godspeed_dir)
|
||||
self.dry_run = dry_run
|
||||
self.inbox_file = self.godspeed_dir / "Inbox.md"
|
||||
|
||||
# Try to use the sync engine for consistent ID generation and formatting
|
||||
try:
|
||||
self.sync_engine = GodspeedSync(None, str(godspeed_dir))
|
||||
except Exception:
|
||||
# Fallback parsing if sync engine fails
|
||||
self.sync_engine = None
|
||||
|
||||
def _parse_task_line_fallback(self, line: str):
|
||||
"""Fallback task parsing if sync engine not available."""
|
||||
import re
|
||||
import uuid
|
||||
|
||||
# Match patterns like: - [ ] Task title <!-- id:abc123 -->
|
||||
task_pattern = (
|
||||
r"^\s*-\s*\[([xX\s\-])\]\s*(.+?)(?:\s*<!--\s*id:(\w+)\s*-->)?\s*$"
|
||||
)
|
||||
match = re.match(task_pattern, line.strip())
|
||||
|
||||
if not match:
|
||||
return None
|
||||
|
||||
checkbox, title_and_notes, local_id = match.groups()
|
||||
|
||||
# Determine status
|
||||
if checkbox.lower() == "x":
|
||||
status = "complete"
|
||||
elif checkbox == "-":
|
||||
status = "cleared"
|
||||
else:
|
||||
status = "incomplete"
|
||||
|
||||
# Extract title (remove any inline notes after <!--)
|
||||
title = title_and_notes.split("<!--")[0].strip()
|
||||
|
||||
# Generate ID if missing
|
||||
if not local_id:
|
||||
if hasattr(self, "sync_engine") and self.sync_engine:
|
||||
local_id = self.sync_engine._generate_local_id()
|
||||
else:
|
||||
import uuid
|
||||
|
||||
local_id = str(uuid.uuid4())[:8]
|
||||
|
||||
return local_id, status, title, ""
|
||||
|
||||
def _parse_markdown_file(self, file_path: Path):
|
||||
"""Parse a markdown file and extract tasks and non-task content."""
|
||||
if not file_path.exists():
|
||||
return [], []
|
||||
|
||||
tasks = []
|
||||
non_task_lines = []
|
||||
|
||||
try:
|
||||
import builtins
|
||||
|
||||
with builtins.open(str(file_path), "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
except Exception as e:
|
||||
click.echo(f" ⚠️ Error reading {file_path}: {e}")
|
||||
return [], []
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
line = line.rstrip()
|
||||
|
||||
# Check if this line looks like a task
|
||||
if line.strip().startswith("- ["):
|
||||
# Always use fallback parsing
|
||||
parsed = self._parse_task_line_fallback(line)
|
||||
if parsed:
|
||||
tasks.append(parsed)
|
||||
continue
|
||||
|
||||
# Not a task, keep as regular content
|
||||
non_task_lines.append(line)
|
||||
|
||||
return tasks, non_task_lines
|
||||
|
||||
def _write_tasks_to_file(self, file_path: Path, tasks):
|
||||
"""Write tasks to a markdown file."""
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
import builtins
|
||||
|
||||
# Read existing content if file exists
|
||||
existing_content = ""
|
||||
if file_path.exists():
|
||||
with builtins.open(str(file_path), "r", encoding="utf-8") as f:
|
||||
existing_content = f.read()
|
||||
|
||||
# Format new tasks
|
||||
new_task_lines = []
|
||||
for local_id, status, title, notes in tasks:
|
||||
if self.sync_engine:
|
||||
formatted = self.sync_engine._format_task_line(
|
||||
local_id, status, title, notes
|
||||
)
|
||||
else:
|
||||
# Fallback formatting
|
||||
checkbox = {"incomplete": "[ ]", "complete": "[x]", "cleared": "[-]"}[
|
||||
status
|
||||
]
|
||||
formatted = f"- {checkbox} {title} <!-- id:{local_id} -->"
|
||||
if notes:
|
||||
formatted += f"\n {notes}"
|
||||
|
||||
new_task_lines.append(formatted)
|
||||
|
||||
# Combine with existing content
|
||||
if existing_content.strip():
|
||||
new_content = (
|
||||
existing_content.rstrip() + "\n\n" + "\n".join(new_task_lines) + "\n"
|
||||
)
|
||||
else:
|
||||
new_content = "\n".join(new_task_lines) + "\n"
|
||||
|
||||
with builtins.open(str(file_path), "w", encoding="utf-8") as f:
|
||||
f.write(new_content)
|
||||
|
||||
def _clean_file(self, file_path: Path, non_task_lines):
|
||||
"""Remove tasks from original file, keeping only non-task content."""
|
||||
import builtins
|
||||
|
||||
if not non_task_lines or all(not line.strip() for line in non_task_lines):
|
||||
# File would be empty, delete it
|
||||
if not self.dry_run:
|
||||
file_path.unlink()
|
||||
click.echo(f" 🗑️ Would delete empty file: {file_path}")
|
||||
else:
|
||||
# Write back non-task content
|
||||
cleaned_content = "\n".join(non_task_lines).strip()
|
||||
if cleaned_content:
|
||||
cleaned_content += "\n"
|
||||
|
||||
if not self.dry_run:
|
||||
with builtins.open(str(file_path), "w", encoding="utf-8") as f:
|
||||
f.write(cleaned_content)
|
||||
click.echo(f" ✂️ Cleaned file (removed tasks): {file_path}")
|
||||
|
||||
def find_markdown_files(self):
|
||||
"""Find all markdown files in the notes directory, excluding Godspeed directory."""
|
||||
markdown_files = []
|
||||
|
||||
for md_file in self.notes_dir.rglob("*.md"):
|
||||
# Skip files in the Godspeed directory
|
||||
if (
|
||||
self.godspeed_dir in md_file.parents
|
||||
or md_file.parent == self.godspeed_dir
|
||||
):
|
||||
continue
|
||||
|
||||
# Skip hidden files and directories
|
||||
if any(part.startswith(".") for part in md_file.parts):
|
||||
continue
|
||||
|
||||
markdown_files.append(md_file)
|
||||
|
||||
return sorted(markdown_files)
|
||||
|
||||
def sweep_tasks(self):
|
||||
"""Sweep incomplete tasks from all markdown files into Inbox."""
|
||||
click.echo(f"🧹 Sweeping incomplete tasks from: {self.notes_dir}")
|
||||
click.echo(f"📥 Target Inbox: {self.inbox_file}")
|
||||
click.echo(f"🔍 Dry run: {self.dry_run}")
|
||||
click.echo("=" * 60)
|
||||
|
||||
markdown_files = self.find_markdown_files()
|
||||
click.echo(f"\n📁 Found {len(markdown_files)} markdown files to process")
|
||||
|
||||
swept_tasks = []
|
||||
processed_files = []
|
||||
|
||||
for file_path in markdown_files:
|
||||
try:
|
||||
rel_path = file_path.relative_to(self.notes_dir)
|
||||
rel_path_str = str(rel_path)
|
||||
except Exception as e:
|
||||
click.echo(f"Error getting relative path for {file_path}: {e}")
|
||||
rel_path_str = str(file_path.name)
|
||||
|
||||
click.echo(f"\n📄 Processing: {rel_path_str}")
|
||||
|
||||
tasks, non_task_lines = self._parse_markdown_file(file_path)
|
||||
|
||||
if not tasks:
|
||||
click.echo(f" ℹ️ No tasks found")
|
||||
continue
|
||||
if not tasks:
|
||||
click.echo(f" ℹ️ No tasks found")
|
||||
continue
|
||||
|
||||
# Separate incomplete tasks from completed/cleared ones
|
||||
incomplete_tasks = []
|
||||
complete_tasks = []
|
||||
|
||||
for task in tasks:
|
||||
local_id, status, title, notes = task
|
||||
if status == "incomplete":
|
||||
incomplete_tasks.append(task)
|
||||
else:
|
||||
complete_tasks.append(task)
|
||||
|
||||
if incomplete_tasks:
|
||||
click.echo(f" 🔄 Found {len(incomplete_tasks)} incomplete tasks:")
|
||||
for _, status, title, notes in incomplete_tasks:
|
||||
click.echo(f" • {title}")
|
||||
if notes:
|
||||
click.echo(f" Notes: {notes}")
|
||||
|
||||
# Add source file annotation with clean task IDs
|
||||
annotated_tasks = []
|
||||
for local_id, status, title, notes in incomplete_tasks:
|
||||
# Generate a fresh ID for swept tasks to avoid conflicts
|
||||
if self.sync_engine:
|
||||
fresh_id = self.sync_engine._generate_local_id()
|
||||
else:
|
||||
import uuid
|
||||
|
||||
fresh_id = str(uuid.uuid4())[:8]
|
||||
|
||||
# Add source info to notes
|
||||
source_notes = f"From: {rel_path_str}"
|
||||
if notes:
|
||||
combined_notes = f"{notes}\n{source_notes}"
|
||||
else:
|
||||
combined_notes = source_notes
|
||||
annotated_tasks.append((fresh_id, status, title, combined_notes))
|
||||
|
||||
swept_tasks.extend(annotated_tasks)
|
||||
processed_files.append(str(rel_path))
|
||||
|
||||
if complete_tasks:
|
||||
click.echo(
|
||||
f" ✅ Keeping {len(complete_tasks)} completed/cleared tasks in place"
|
||||
)
|
||||
|
||||
# Reconstruct remaining content (non-tasks + completed tasks)
|
||||
remaining_content = non_task_lines.copy()
|
||||
|
||||
# Add completed/cleared tasks back to remaining content
|
||||
if complete_tasks:
|
||||
remaining_content.append("") # Empty line before tasks
|
||||
for task in complete_tasks:
|
||||
if self.sync_engine:
|
||||
formatted = self.sync_engine._format_task_line(*task)
|
||||
else:
|
||||
local_id, status, title, notes = task
|
||||
checkbox = {
|
||||
"incomplete": "[ ]",
|
||||
"complete": "[x]",
|
||||
"cleared": "[-]",
|
||||
}[status]
|
||||
formatted = f"- {checkbox} {title} <!-- id:{local_id} -->"
|
||||
if notes:
|
||||
formatted += f"\n {notes}"
|
||||
remaining_content.append(formatted)
|
||||
|
||||
# Clean the original file
|
||||
if incomplete_tasks:
|
||||
self._clean_file(file_path, remaining_content)
|
||||
|
||||
# Write swept tasks to Inbox
|
||||
if swept_tasks:
|
||||
click.echo(f"\n📥 Writing {len(swept_tasks)} tasks to Inbox...")
|
||||
if not self.dry_run:
|
||||
self._write_tasks_to_file(self.inbox_file, swept_tasks)
|
||||
click.echo(f" ✅ Inbox updated: {self.inbox_file}")
|
||||
|
||||
# Summary
|
||||
click.echo(f"\n" + "=" * 60)
|
||||
click.echo(f"📊 SWEEP SUMMARY:")
|
||||
click.echo(f" • Files processed: {len(processed_files)}")
|
||||
click.echo(f" • Tasks swept: {len(swept_tasks)}")
|
||||
click.echo(f" • Target: {self.inbox_file}")
|
||||
|
||||
if self.dry_run:
|
||||
click.echo(f"\n⚠️ DRY RUN - No files were actually modified")
|
||||
click.echo(f" Run without --dry-run to perform the sweep")
|
||||
|
||||
return {
|
||||
"swept_tasks": len(swept_tasks),
|
||||
"processed_files": processed_files,
|
||||
"inbox_file": str(self.inbox_file),
|
||||
}
|
||||
|
||||
|
||||
@godspeed.command()
|
||||
@click.argument(
|
||||
"notes_dir",
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
|
||||
required=False,
|
||||
)
|
||||
@click.argument(
|
||||
"godspeed_dir",
|
||||
type=click.Path(file_okay=False, dir_okay=True, path_type=Path),
|
||||
required=False,
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Show what would be done without making changes"
|
||||
)
|
||||
def sweep(notes_dir, godspeed_dir, dry_run):
|
||||
"""Sweep incomplete tasks from markdown files into Godspeed Inbox.
|
||||
|
||||
NOTES_DIR: Directory containing markdown files with tasks to sweep (optional, defaults to $NOTES_DIR)
|
||||
GODSPEED_DIR: Godspeed sync directory (optional, defaults to sync directory)
|
||||
"""
|
||||
# Handle notes_dir default from environment
|
||||
if notes_dir is None:
|
||||
notes_dir_env = os.getenv("NOTES_DIR")
|
||||
if not notes_dir_env:
|
||||
click.echo(
|
||||
"❌ No notes directory specified and $NOTES_DIR environment variable not set",
|
||||
err=True,
|
||||
)
|
||||
click.echo("Usage: godspeed sweep <notes_dir> [godspeed_dir]", err=True)
|
||||
click.echo(
|
||||
" or: export NOTES_DIR=/path/to/notes && godspeed sweep", err=True
|
||||
)
|
||||
sys.exit(1)
|
||||
notes_dir = Path(notes_dir_env)
|
||||
if not notes_dir.exists():
|
||||
click.echo(
|
||||
f"❌ Notes directory from $NOTES_DIR does not exist: {notes_dir}",
|
||||
err=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
if not notes_dir.is_dir():
|
||||
click.echo(
|
||||
f"❌ Notes path from $NOTES_DIR is not a directory: {notes_dir}",
|
||||
err=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if godspeed_dir is None:
|
||||
godspeed_dir = get_sync_directory()
|
||||
|
||||
# Ensure we have Path objects
|
||||
notes_dir = Path(notes_dir)
|
||||
godspeed_dir = Path(godspeed_dir)
|
||||
|
||||
try:
|
||||
sweeper = TaskSweeper(notes_dir, godspeed_dir, dry_run)
|
||||
result = sweeper.sweep_tasks()
|
||||
|
||||
if result["swept_tasks"] > 0:
|
||||
click.echo(f"\n🎉 Successfully swept {result['swept_tasks']} tasks!")
|
||||
if not dry_run:
|
||||
click.echo(f"💡 Next steps:")
|
||||
click.echo(f" 1. Review tasks in: {result['inbox_file']}")
|
||||
click.echo(f" 2. Run 'godspeed upload' to sync to API")
|
||||
click.echo(
|
||||
f" 3. Organize tasks into appropriate lists in Godspeed app"
|
||||
)
|
||||
else:
|
||||
click.echo(f"\n✨ No incomplete tasks found to sweep.")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"❌ Error during sweep: {e}", err=True)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
godspeed()
|
||||
298
src/cli/sync.py
298
src/cli/sync.py
@@ -1,8 +1,11 @@
|
||||
import click
|
||||
import asyncio
|
||||
import os
|
||||
from rich.progress import Progress, SpinnerColumn, MofNCompleteColumn
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from rich.progress import Progress, SpinnerColumn, MofNCompleteColumn
|
||||
|
||||
from src.utils.mail_utils.helpers import ensure_directory_exists
|
||||
from src.utils.calendar_utils import save_events_to_vdir, save_events_to_file
|
||||
@@ -21,6 +24,180 @@ from src.services.microsoft_graph.mail import (
|
||||
process_outbox_async,
|
||||
)
|
||||
from src.services.microsoft_graph.auth import get_access_token
|
||||
from src.services.godspeed.client import GodspeedClient
|
||||
from src.services.godspeed.sync import GodspeedSync
|
||||
|
||||
|
||||
# Timing state management
|
||||
def get_sync_state_file():
|
||||
"""Get the path to the sync state file."""
|
||||
return os.path.expanduser("~/.local/share/gtd-terminal-tools/sync_state.json")
|
||||
|
||||
|
||||
def load_sync_state():
|
||||
"""Load the sync state from file."""
|
||||
state_file = get_sync_state_file()
|
||||
if os.path.exists(state_file):
|
||||
try:
|
||||
with open(state_file, "r") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
pass
|
||||
return {
|
||||
"last_godspeed_sync": 0,
|
||||
"last_sweep_date": None,
|
||||
"sweep_completed_today": False,
|
||||
}
|
||||
|
||||
|
||||
def save_sync_state(state):
|
||||
"""Save the sync state to file."""
|
||||
state_file = get_sync_state_file()
|
||||
os.makedirs(os.path.dirname(state_file), exist_ok=True)
|
||||
with open(state_file, "w") as f:
|
||||
json.dump(state, f, indent=2)
|
||||
|
||||
|
||||
def should_run_godspeed_sync():
|
||||
"""Check if Godspeed sync should run (every 15 minutes)."""
|
||||
state = load_sync_state()
|
||||
current_time = time.time()
|
||||
last_sync = state.get("last_godspeed_sync", 0)
|
||||
return current_time - last_sync >= 900 # 15 minutes in seconds
|
||||
|
||||
|
||||
def should_run_sweep():
|
||||
"""Check if sweep should run (once after 6pm each day)."""
|
||||
state = load_sync_state()
|
||||
current_time = datetime.now()
|
||||
|
||||
# Check if it's after 6 PM
|
||||
if current_time.hour < 18:
|
||||
return False
|
||||
|
||||
# Check if we've already swept today
|
||||
today_str = current_time.strftime("%Y-%m-%d")
|
||||
last_sweep_date = state.get("last_sweep_date")
|
||||
|
||||
return last_sweep_date != today_str
|
||||
|
||||
|
||||
def get_godspeed_sync_directory():
|
||||
"""Get Godspeed sync directory from environment or default."""
|
||||
sync_dir = os.getenv("GODSPEED_SYNC_DIR")
|
||||
if sync_dir:
|
||||
return Path(sync_dir)
|
||||
|
||||
# Default to ~/Documents/Godspeed or ~/.local/share/gtd-terminal-tools/godspeed
|
||||
home = Path.home()
|
||||
|
||||
# Try Documents first
|
||||
docs_dir = home / "Documents" / "Godspeed"
|
||||
if docs_dir.parent.exists():
|
||||
return docs_dir
|
||||
|
||||
# Fall back to data directory
|
||||
data_dir = home / ".local" / "share" / "gtd-terminal-tools" / "godspeed"
|
||||
return data_dir
|
||||
|
||||
|
||||
def get_godspeed_credentials():
|
||||
"""Get Godspeed credentials from environment."""
|
||||
email = os.getenv("GODSPEED_EMAIL")
|
||||
password = os.getenv("GODSPEED_PASSWORD")
|
||||
token = os.getenv("GODSPEED_TOKEN")
|
||||
return email, password, token
|
||||
|
||||
|
||||
async def run_godspeed_sync(progress=None):
|
||||
"""Run Godspeed bidirectional sync."""
|
||||
try:
|
||||
email, password, token = get_godspeed_credentials()
|
||||
if not (token or (email and password)):
|
||||
if progress:
|
||||
progress.console.print(
|
||||
"[yellow]⚠️ Skipping Godspeed sync: No credentials configured[/yellow]"
|
||||
)
|
||||
return False
|
||||
|
||||
sync_dir = get_godspeed_sync_directory()
|
||||
|
||||
if progress:
|
||||
progress.console.print(
|
||||
f"[cyan]🔄 Running Godspeed sync to {sync_dir}...[/cyan]"
|
||||
)
|
||||
|
||||
client = GodspeedClient(email=email, password=password, token=token)
|
||||
sync_engine = GodspeedSync(client, sync_dir)
|
||||
sync_engine.sync_bidirectional()
|
||||
|
||||
# Update sync state
|
||||
state = load_sync_state()
|
||||
state["last_godspeed_sync"] = time.time()
|
||||
save_sync_state(state)
|
||||
|
||||
if progress:
|
||||
progress.console.print("[green]✅ Godspeed sync completed[/green]")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
if progress:
|
||||
progress.console.print(f"[red]❌ Godspeed sync failed: {e}[/red]")
|
||||
return False
|
||||
|
||||
|
||||
async def run_task_sweep(progress=None):
|
||||
"""Run task sweep from notes directory to Godspeed inbox."""
|
||||
try:
|
||||
from src.cli.godspeed import TaskSweeper
|
||||
|
||||
notes_dir_env = os.getenv("NOTES_DIR")
|
||||
if not notes_dir_env:
|
||||
if progress:
|
||||
progress.console.print(
|
||||
"[yellow]⚠️ Skipping task sweep: $NOTES_DIR not configured[/yellow]"
|
||||
)
|
||||
return False
|
||||
|
||||
notes_dir = Path(notes_dir_env)
|
||||
if not notes_dir.exists():
|
||||
if progress:
|
||||
progress.console.print(
|
||||
f"[yellow]⚠️ Skipping task sweep: Notes directory does not exist: {notes_dir}[/yellow]"
|
||||
)
|
||||
return False
|
||||
|
||||
godspeed_dir = get_godspeed_sync_directory()
|
||||
|
||||
if progress:
|
||||
progress.console.print(
|
||||
f"[cyan]🧹 Running task sweep from {notes_dir} to {godspeed_dir}...[/cyan]"
|
||||
)
|
||||
|
||||
sweeper = TaskSweeper(notes_dir, godspeed_dir, dry_run=False)
|
||||
result = sweeper.sweep_tasks()
|
||||
|
||||
# Update sweep state
|
||||
state = load_sync_state()
|
||||
state["last_sweep_date"] = datetime.now().strftime("%Y-%m-%d")
|
||||
save_sync_state(state)
|
||||
|
||||
if result["swept_tasks"] > 0:
|
||||
if progress:
|
||||
progress.console.print(
|
||||
f"[green]✅ Task sweep completed: {result['swept_tasks']} tasks swept[/green]"
|
||||
)
|
||||
else:
|
||||
if progress:
|
||||
progress.console.print(
|
||||
"[green]✅ Task sweep completed: No tasks to sweep[/green]"
|
||||
)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
if progress:
|
||||
progress.console.print(f"[red]❌ Task sweep failed: {e}[/red]")
|
||||
return False
|
||||
|
||||
|
||||
# Function to create Maildir structure
|
||||
@@ -362,6 +539,36 @@ async def _sync_outlook_data(
|
||||
notify_new_emails(new_message_count, org)
|
||||
|
||||
progress.console.print("[bold green]Step 2: New data fetched.[/bold green]")
|
||||
|
||||
# Stage 3: Run Godspeed operations based on timing
|
||||
progress.console.print(
|
||||
"\n[bold cyan]Step 3: Running Godspeed operations...[/bold cyan]"
|
||||
)
|
||||
|
||||
# Check if Godspeed sync should run (every 15 minutes)
|
||||
if should_run_godspeed_sync():
|
||||
await run_godspeed_sync(progress)
|
||||
else:
|
||||
progress.console.print("[dim]⏭️ Skipping Godspeed sync (not due yet)[/dim]")
|
||||
|
||||
# Check if task sweep should run (once after 6pm daily)
|
||||
if should_run_sweep():
|
||||
await run_task_sweep(progress)
|
||||
else:
|
||||
current_hour = datetime.now().hour
|
||||
if current_hour < 18:
|
||||
progress.console.print(
|
||||
"[dim]⏭️ Skipping task sweep (before 6 PM)[/dim]"
|
||||
)
|
||||
else:
|
||||
progress.console.print(
|
||||
"[dim]⏭️ Skipping task sweep (already completed today)[/dim]"
|
||||
)
|
||||
|
||||
progress.console.print(
|
||||
"[bold green]Step 3: Godspeed operations completed.[/bold green]"
|
||||
)
|
||||
|
||||
click.echo("Sync complete.")
|
||||
|
||||
|
||||
@@ -656,59 +863,43 @@ async def daemon_mode(
|
||||
pending_email_count = len(pending_emails)
|
||||
outbox_changes = pending_email_count > 0
|
||||
|
||||
# Check Godspeed operations
|
||||
godspeed_sync_due = should_run_godspeed_sync()
|
||||
sweep_due = should_run_sweep()
|
||||
|
||||
# Determine what changed and show appropriate status
|
||||
if mail_changes and calendar_changes and outbox_changes:
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"Changes detected! Mail: Remote {remote_message_count}, Local {local_message_count} | Calendar: {calendar_change_desc} | Outbox: {pending_email_count} pending. Starting sync...",
|
||||
"yellow",
|
||||
changes_detected = (
|
||||
mail_changes
|
||||
or calendar_changes
|
||||
or outbox_changes
|
||||
or godspeed_sync_due
|
||||
or sweep_due
|
||||
)
|
||||
|
||||
if changes_detected:
|
||||
change_parts = []
|
||||
if mail_changes:
|
||||
change_parts.append(
|
||||
f"Mail: Remote {remote_message_count}, Local {local_message_count}"
|
||||
)
|
||||
)
|
||||
elif mail_changes and calendar_changes:
|
||||
if calendar_changes:
|
||||
change_parts.append(f"Calendar: {calendar_change_desc}")
|
||||
if outbox_changes:
|
||||
change_parts.append(f"Outbox: {pending_email_count} pending")
|
||||
if godspeed_sync_due:
|
||||
change_parts.append("Godspeed sync due")
|
||||
if sweep_due:
|
||||
change_parts.append("Task sweep due")
|
||||
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"Changes detected! Mail: Remote {remote_message_count}, Local {local_message_count} | Calendar: {calendar_change_desc}. Starting sync...",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
elif mail_changes and outbox_changes:
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"Changes detected! Mail: Remote {remote_message_count}, Local {local_message_count} | Outbox: {pending_email_count} pending. Starting sync...",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
elif calendar_changes and outbox_changes:
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"Changes detected! Calendar: {calendar_change_desc} | Outbox: {pending_email_count} pending. Starting sync...",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
elif mail_changes:
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"New messages detected! Remote: {remote_message_count}, Local: {local_message_count}. Starting sync...",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
elif calendar_changes:
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"Calendar changes detected! {calendar_change_desc}. Starting sync...",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
elif outbox_changes:
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"Outbound emails detected! {pending_email_count} emails pending. Starting sync...",
|
||||
f"Changes detected! {' | '.join(change_parts)}. Starting sync...",
|
||||
"yellow",
|
||||
)
|
||||
)
|
||||
|
||||
# Sync if any changes detected
|
||||
if mail_changes or calendar_changes or outbox_changes:
|
||||
if changes_detected:
|
||||
await _sync_outlook_data(
|
||||
dry_run,
|
||||
vdir,
|
||||
@@ -732,6 +923,23 @@ async def daemon_mode(
|
||||
|
||||
status_parts.append(f"Outbox: {pending_email_count} pending")
|
||||
|
||||
# Add Godspeed status
|
||||
state = load_sync_state()
|
||||
last_godspeed = state.get("last_godspeed_sync", 0)
|
||||
minutes_since_godspeed = int((time.time() - last_godspeed) / 60)
|
||||
status_parts.append(f"Godspeed: {minutes_since_godspeed}m ago")
|
||||
|
||||
last_sweep = state.get("last_sweep_date")
|
||||
if last_sweep == datetime.now().strftime("%Y-%m-%d"):
|
||||
status_parts.append("Sweep: done today")
|
||||
else:
|
||||
current_hour = datetime.now().hour
|
||||
if current_hour >= 18:
|
||||
status_parts.append("Sweep: due")
|
||||
else:
|
||||
hours_until_sweep = 18 - current_hour
|
||||
status_parts.append(f"Sweep: in {hours_until_sweep}h")
|
||||
|
||||
console.print(
|
||||
create_status_display(
|
||||
f"No changes detected ({', '.join(status_parts)})",
|
||||
|
||||
0
src/services/godspeed/__init__.py
Normal file
0
src/services/godspeed/__init__.py
Normal file
129
src/services/godspeed/client.py
Normal file
129
src/services/godspeed/client.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""Godspeed API client for task and list management."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Any
|
||||
from datetime import datetime
|
||||
import urllib3
|
||||
|
||||
|
||||
class GodspeedClient:
|
||||
"""Client for interacting with the Godspeed API."""
|
||||
|
||||
BASE_URL = "https://api.godspeedapp.com"
|
||||
|
||||
def __init__(self, email: str = None, password: str = None, token: str = None):
|
||||
self.email = email
|
||||
self.password = password
|
||||
self.token = token
|
||||
self.session = requests.Session()
|
||||
|
||||
# Handle SSL verification bypass for corporate networks
|
||||
disable_ssl = os.getenv("GODSPEED_DISABLE_SSL_VERIFY", "").lower() == "true"
|
||||
if disable_ssl:
|
||||
self.session.verify = False
|
||||
# Suppress only the specific warning about unverified HTTPS requests
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
print("⚠️ SSL verification disabled for Godspeed API")
|
||||
|
||||
if token:
|
||||
self.session.headers.update({"Authorization": f"Bearer {token}"})
|
||||
elif email and password:
|
||||
self._authenticate()
|
||||
|
||||
def _authenticate(self) -> str:
|
||||
"""Authenticate and get access token."""
|
||||
if not self.email or not self.password:
|
||||
raise ValueError("Email and password required for authentication")
|
||||
|
||||
response = self.session.post(
|
||||
f"{self.BASE_URL}/sessions/sign_in",
|
||||
json={"email": self.email, "password": self.password},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
if not data.get("success"):
|
||||
raise Exception("Authentication failed")
|
||||
|
||||
self.token = data["token"]
|
||||
self.session.headers.update({"Authorization": f"Bearer {self.token}"})
|
||||
return self.token
|
||||
|
||||
def get_lists(self) -> List[Dict[str, Any]]:
|
||||
"""Get all lists."""
|
||||
response = self.session.get(f"{self.BASE_URL}/lists")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_tasks(self, list_id: str = None, status: str = None) -> Dict[str, Any]:
|
||||
"""Get tasks with optional filtering."""
|
||||
params = {}
|
||||
if list_id:
|
||||
params["list_id"] = list_id
|
||||
if status:
|
||||
params["status"] = status
|
||||
|
||||
response = self.session.get(f"{self.BASE_URL}/tasks", params=params)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_task(self, task_id: str) -> Dict[str, Any]:
|
||||
"""Get a single task by ID."""
|
||||
response = self.session.get(f"{self.BASE_URL}/tasks/{task_id}")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def create_task(
|
||||
self,
|
||||
title: str,
|
||||
list_id: str = None,
|
||||
notes: str = None,
|
||||
location: str = "end",
|
||||
**kwargs,
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new task."""
|
||||
data = {"title": title, "location": location}
|
||||
|
||||
if list_id:
|
||||
data["list_id"] = list_id
|
||||
if notes:
|
||||
data["notes"] = notes
|
||||
|
||||
# Add any additional kwargs
|
||||
data.update(kwargs)
|
||||
|
||||
response = self.session.post(
|
||||
f"{self.BASE_URL}/tasks",
|
||||
json=data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def update_task(self, task_id: str, **kwargs) -> Dict[str, Any]:
|
||||
"""Update an existing task."""
|
||||
response = self.session.patch(
|
||||
f"{self.BASE_URL}/tasks/{task_id}",
|
||||
json=kwargs,
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def delete_task(self, task_id: str) -> None:
|
||||
"""Delete a task."""
|
||||
response = self.session.delete(f"{self.BASE_URL}/tasks/{task_id}")
|
||||
response.raise_for_status()
|
||||
|
||||
def complete_task(self, task_id: str) -> Dict[str, Any]:
|
||||
"""Mark a task as complete."""
|
||||
return self.update_task(task_id, is_complete=True)
|
||||
|
||||
def incomplete_task(self, task_id: str) -> Dict[str, Any]:
|
||||
"""Mark a task as incomplete."""
|
||||
return self.update_task(task_id, is_complete=False)
|
||||
87
src/services/godspeed/config.py
Normal file
87
src/services/godspeed/config.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""Configuration and credential management for Godspeed sync."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
class GodspeedConfig:
|
||||
"""Manages configuration and credentials for Godspeed sync."""
|
||||
|
||||
def __init__(self, config_dir: Optional[Path] = None):
|
||||
if config_dir is None:
|
||||
config_dir = Path.home() / ".local" / "share" / "gtd-terminal-tools"
|
||||
|
||||
self.config_dir = Path(config_dir)
|
||||
self.config_file = self.config_dir / "godspeed_config.json"
|
||||
self.config = self._load_config()
|
||||
|
||||
def _load_config(self) -> Dict[str, Any]:
|
||||
"""Load configuration from file."""
|
||||
if self.config_file.exists():
|
||||
with open(self.config_file, "r") as f:
|
||||
return json.load(f)
|
||||
return {}
|
||||
|
||||
def _save_config(self):
|
||||
"""Save configuration to file."""
|
||||
self.config_dir.mkdir(parents=True, exist_ok=True)
|
||||
with open(self.config_file, "w") as f:
|
||||
json.dump(self.config, f, indent=2)
|
||||
|
||||
def get_email(self) -> Optional[str]:
|
||||
"""Get stored email or from environment."""
|
||||
return os.getenv("GODSPEED_EMAIL") or self.config.get("email")
|
||||
|
||||
def set_email(self, email: str):
|
||||
"""Store email in config."""
|
||||
self.config["email"] = email
|
||||
self._save_config()
|
||||
|
||||
def get_token(self) -> Optional[str]:
|
||||
"""Get stored token or from environment."""
|
||||
return os.getenv("GODSPEED_TOKEN") or self.config.get("token")
|
||||
|
||||
def set_token(self, token: str):
|
||||
"""Store token in config."""
|
||||
self.config["token"] = token
|
||||
self._save_config()
|
||||
|
||||
def get_sync_directory(self) -> Path:
|
||||
"""Get sync directory from config or environment."""
|
||||
sync_dir = os.getenv("GODSPEED_SYNC_DIR") or self.config.get("sync_directory")
|
||||
|
||||
if sync_dir:
|
||||
return Path(sync_dir)
|
||||
|
||||
# Default to ~/Documents/Godspeed or ~/.local/share/gtd-terminal-tools/godspeed
|
||||
home = Path.home()
|
||||
|
||||
# Try Documents first
|
||||
docs_dir = home / "Documents" / "Godspeed"
|
||||
if docs_dir.parent.exists():
|
||||
return docs_dir
|
||||
|
||||
# Fall back to data directory
|
||||
return home / ".local" / "share" / "gtd-terminal-tools" / "godspeed"
|
||||
|
||||
def set_sync_directory(self, sync_dir: Path):
|
||||
"""Store sync directory in config."""
|
||||
self.config["sync_directory"] = str(sync_dir)
|
||||
self._save_config()
|
||||
|
||||
def clear_credentials(self):
|
||||
"""Clear stored credentials."""
|
||||
self.config.pop("email", None)
|
||||
self.config.pop("token", None)
|
||||
self._save_config()
|
||||
|
||||
def get_all_settings(self) -> Dict[str, Any]:
|
||||
"""Get all current settings."""
|
||||
return {
|
||||
"email": self.get_email(),
|
||||
"has_token": bool(self.get_token()),
|
||||
"sync_directory": str(self.get_sync_directory()),
|
||||
"config_file": str(self.config_file),
|
||||
}
|
||||
395
src/services/godspeed/sync.py
Normal file
395
src/services/godspeed/sync.py
Normal file
@@ -0,0 +1,395 @@
|
||||
"""Two-way synchronization engine for Godspeed API and local markdown files."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from datetime import datetime
|
||||
|
||||
from .client import GodspeedClient
|
||||
|
||||
|
||||
class GodspeedSync:
|
||||
"""Handles bidirectional sync between Godspeed API and local markdown files."""
|
||||
|
||||
def __init__(self, client: GodspeedClient, sync_dir: Path):
|
||||
self.client = client
|
||||
self.sync_dir = Path(sync_dir)
|
||||
self.metadata_file = self.sync_dir / ".godspeed_metadata.json"
|
||||
self.metadata = self._load_metadata()
|
||||
|
||||
def _load_metadata(self) -> Dict:
|
||||
"""Load sync metadata from local file."""
|
||||
if self.metadata_file.exists():
|
||||
with open(self.metadata_file, "r") as f:
|
||||
return json.load(f)
|
||||
return {
|
||||
"task_mapping": {}, # local_id -> godspeed_id
|
||||
"list_mapping": {}, # list_name -> list_id
|
||||
"last_sync": None,
|
||||
}
|
||||
|
||||
def _save_metadata(self):
|
||||
"""Save sync metadata to local file."""
|
||||
self.sync_dir.mkdir(parents=True, exist_ok=True)
|
||||
with open(self.metadata_file, "w") as f:
|
||||
json.dump(self.metadata, f, indent=2)
|
||||
|
||||
def _sanitize_filename(self, name: str) -> str:
|
||||
"""Convert list name to safe filename."""
|
||||
# Replace special characters with underscores
|
||||
sanitized = re.sub(r'[<>:"/\\|?*]', "_", name)
|
||||
# Remove multiple underscores
|
||||
sanitized = re.sub(r"_+", "_", sanitized)
|
||||
# Strip leading/trailing underscores and spaces
|
||||
return sanitized.strip("_ ")
|
||||
|
||||
def _generate_local_id(self) -> str:
|
||||
"""Generate a unique local ID for tracking."""
|
||||
import uuid
|
||||
|
||||
return str(uuid.uuid4())[:8]
|
||||
|
||||
def _parse_task_line(self, line: str) -> Optional[Tuple[str, str, str, str]]:
|
||||
"""Parse a markdown task line and extract components.
|
||||
|
||||
Returns: (local_id, status, title, notes) or None if invalid
|
||||
status can be: 'incomplete', 'complete', or 'cleared'
|
||||
"""
|
||||
# Match patterns like:
|
||||
# - [ ] Task title <!-- id:abc123 -->
|
||||
# - [x] Completed task <!-- id:def456 -->
|
||||
# - [-] Cleared/cancelled task <!-- id:ghi789 -->
|
||||
# - [ ] Task with notes <!-- id:jkl012 --> Some notes here
|
||||
|
||||
task_pattern = r"^\s*-\s*\[([xX\s\-])\]\s*(.+?)(?:\s*<!--\s*id:(\w+)\s*-->)?\s*(?:\n\s*(.+))?$"
|
||||
match = re.match(task_pattern, line.strip(), re.MULTILINE | re.DOTALL)
|
||||
|
||||
if not match:
|
||||
return None
|
||||
|
||||
checkbox, title_and_maybe_notes, local_id, extra_notes = match.groups()
|
||||
|
||||
# Determine status from checkbox
|
||||
if checkbox.lower() == "x":
|
||||
status = "complete"
|
||||
elif checkbox == "-":
|
||||
status = "cleared"
|
||||
else:
|
||||
status = "incomplete"
|
||||
|
||||
# Split title and inline notes if present
|
||||
title_parts = title_and_maybe_notes.split("<!--")[0].strip()
|
||||
notes = extra_notes.strip() if extra_notes else ""
|
||||
|
||||
if not local_id:
|
||||
local_id = self._generate_local_id()
|
||||
|
||||
return local_id, status, title_parts, notes
|
||||
|
||||
def _format_task_line(
|
||||
self, local_id: str, status: str, title: str, notes: str = ""
|
||||
) -> str:
|
||||
"""Format a task as a markdown line with ID tracking."""
|
||||
if status == "complete":
|
||||
checkbox = "[x]"
|
||||
elif status == "cleared":
|
||||
checkbox = "[-]"
|
||||
else:
|
||||
checkbox = "[ ]"
|
||||
|
||||
line = f"- {checkbox} {title} <!-- id:{local_id} -->"
|
||||
if notes:
|
||||
line += f"\n {notes}"
|
||||
return line
|
||||
|
||||
def _read_list_file(self, list_path: Path) -> List[Tuple[str, str, str, str]]:
|
||||
"""Read and parse tasks from a markdown file."""
|
||||
if not list_path.exists():
|
||||
return []
|
||||
|
||||
tasks = []
|
||||
with open(list_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
# Split into potential task blocks
|
||||
lines = content.split("\n")
|
||||
current_task_lines = []
|
||||
|
||||
for line in lines:
|
||||
if line.strip().startswith("- ["):
|
||||
# Process previous task if exists
|
||||
if current_task_lines:
|
||||
task_block = "\n".join(current_task_lines)
|
||||
parsed = self._parse_task_line(task_block)
|
||||
if parsed:
|
||||
tasks.append(parsed)
|
||||
current_task_lines = []
|
||||
|
||||
current_task_lines = [line]
|
||||
elif current_task_lines and line.strip():
|
||||
# Continuation of current task (notes)
|
||||
current_task_lines.append(line)
|
||||
elif current_task_lines:
|
||||
# Empty line ends the current task
|
||||
task_block = "\n".join(current_task_lines)
|
||||
parsed = self._parse_task_line(task_block)
|
||||
if parsed:
|
||||
tasks.append(parsed)
|
||||
current_task_lines = []
|
||||
|
||||
# Process last task if exists
|
||||
if current_task_lines:
|
||||
task_block = "\n".join(current_task_lines)
|
||||
parsed = self._parse_task_line(task_block)
|
||||
if parsed:
|
||||
tasks.append(parsed)
|
||||
|
||||
return tasks
|
||||
|
||||
def _write_list_file(self, list_path: Path, tasks: List[Tuple[str, str, str, str]]):
|
||||
"""Write tasks to a markdown file."""
|
||||
list_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(list_path, "w", encoding="utf-8") as f:
|
||||
for local_id, status, title, notes in tasks:
|
||||
f.write(self._format_task_line(local_id, status, title, notes))
|
||||
f.write("\n")
|
||||
|
||||
def download_from_api(self) -> None:
|
||||
"""Download all lists and tasks from Godspeed API to local files."""
|
||||
print("Downloading from Godspeed API...")
|
||||
|
||||
# Get all lists
|
||||
lists_data = self.client.get_lists()
|
||||
lists = (
|
||||
lists_data if isinstance(lists_data, list) else lists_data.get("lists", [])
|
||||
)
|
||||
|
||||
# Update list mapping
|
||||
for list_item in lists:
|
||||
list_name = list_item["name"]
|
||||
list_id = list_item["id"]
|
||||
self.metadata["list_mapping"][list_name] = list_id
|
||||
|
||||
# Get only incomplete tasks (hide completed/cleared from local files)
|
||||
all_tasks_data = self.client.get_tasks(status="incomplete")
|
||||
tasks = all_tasks_data.get("tasks", [])
|
||||
task_lists = all_tasks_data.get("lists", {})
|
||||
|
||||
# Group tasks by list
|
||||
tasks_by_list = {}
|
||||
for task in tasks:
|
||||
list_id = task.get("list_id")
|
||||
if list_id in task_lists:
|
||||
list_name = task_lists[list_id]["name"]
|
||||
else:
|
||||
# Find list name from our mapping
|
||||
list_name = None
|
||||
for name, lid in self.metadata["list_mapping"].items():
|
||||
if lid == list_id:
|
||||
list_name = name
|
||||
break
|
||||
if not list_name:
|
||||
list_name = "Unknown"
|
||||
|
||||
if list_name not in tasks_by_list:
|
||||
tasks_by_list[list_name] = []
|
||||
tasks_by_list[list_name].append(task)
|
||||
|
||||
# Create directory structure and files
|
||||
for list_name, list_tasks in tasks_by_list.items():
|
||||
safe_name = self._sanitize_filename(list_name)
|
||||
list_path = self.sync_dir / f"{safe_name}.md"
|
||||
|
||||
# Convert API tasks to our format
|
||||
local_tasks = []
|
||||
for task in list_tasks:
|
||||
# Find existing local ID or create new one
|
||||
godspeed_id = task["id"]
|
||||
local_id = None
|
||||
for lid, gid in self.metadata["task_mapping"].items():
|
||||
if gid == godspeed_id:
|
||||
local_id = lid
|
||||
break
|
||||
|
||||
if not local_id:
|
||||
local_id = self._generate_local_id()
|
||||
self.metadata["task_mapping"][local_id] = godspeed_id
|
||||
|
||||
# Convert API task status to our format
|
||||
is_complete = task.get("is_complete", False)
|
||||
is_cleared = task.get("is_cleared", False)
|
||||
|
||||
if is_cleared:
|
||||
status = "cleared"
|
||||
elif is_complete:
|
||||
status = "complete"
|
||||
else:
|
||||
status = "incomplete"
|
||||
|
||||
title = task["title"]
|
||||
notes = task.get("notes", "")
|
||||
|
||||
local_tasks.append((local_id, status, title, notes))
|
||||
|
||||
self._write_list_file(list_path, local_tasks)
|
||||
print(f" Downloaded {len(local_tasks)} tasks to {list_path}")
|
||||
|
||||
self.metadata["last_sync"] = datetime.now().isoformat()
|
||||
self._save_metadata()
|
||||
print(f"Download complete. Synced {len(tasks_by_list)} lists.")
|
||||
|
||||
def upload_to_api(self) -> None:
|
||||
"""Upload local markdown files to Godspeed API."""
|
||||
print("Uploading to Godspeed API...")
|
||||
|
||||
# Find all markdown files
|
||||
md_files = list(self.sync_dir.glob("*.md"))
|
||||
|
||||
for md_file in md_files:
|
||||
if md_file.name.startswith("."):
|
||||
continue # Skip hidden files
|
||||
|
||||
list_name = md_file.stem
|
||||
local_tasks = self._read_list_file(md_file)
|
||||
|
||||
# Get or create list ID
|
||||
list_id = self.metadata["list_mapping"].get(list_name)
|
||||
if not list_id:
|
||||
print(
|
||||
f" Warning: No list ID found for '{list_name}', tasks will go to Inbox"
|
||||
)
|
||||
list_id = None
|
||||
|
||||
for local_id, status, title, notes in local_tasks:
|
||||
# Skip tasks with empty titles
|
||||
if not title or not title.strip():
|
||||
print(f" Skipping task with empty title (id: {local_id})")
|
||||
continue
|
||||
|
||||
godspeed_id = self.metadata["task_mapping"].get(local_id)
|
||||
|
||||
if godspeed_id:
|
||||
# Update existing task
|
||||
try:
|
||||
update_data = {"title": title.strip()}
|
||||
|
||||
# Handle status conversion to API format
|
||||
if status == "complete":
|
||||
update_data["is_complete"] = True
|
||||
update_data["is_cleared"] = False
|
||||
elif status == "cleared":
|
||||
# Note: API requires task to be complete before clearing
|
||||
update_data["is_complete"] = True
|
||||
update_data["is_cleared"] = True
|
||||
else: # incomplete
|
||||
update_data["is_complete"] = False
|
||||
update_data["is_cleared"] = False
|
||||
|
||||
if notes and notes.strip():
|
||||
update_data["notes"] = notes.strip()
|
||||
|
||||
self.client.update_task(godspeed_id, **update_data)
|
||||
|
||||
action = {
|
||||
"complete": "completed",
|
||||
"cleared": "cleared",
|
||||
"incomplete": "reopened",
|
||||
}[status]
|
||||
print(f" Updated task ({action}): {title}")
|
||||
except Exception as e:
|
||||
print(f" Error updating task '{title}': {e}")
|
||||
else:
|
||||
# Create new task
|
||||
try:
|
||||
create_data = {
|
||||
"title": title.strip(),
|
||||
"list_id": list_id,
|
||||
}
|
||||
|
||||
# Only add notes if they exist and are not empty
|
||||
if notes and notes.strip():
|
||||
create_data["notes"] = notes.strip()
|
||||
|
||||
print(f" Creating task: '{title}' with data: {create_data}")
|
||||
response = self.client.create_task(**create_data)
|
||||
print(f" API response: {response}")
|
||||
|
||||
# Handle different response formats
|
||||
if isinstance(response, dict):
|
||||
if "id" in response:
|
||||
new_godspeed_id = response["id"]
|
||||
elif "task" in response and "id" in response["task"]:
|
||||
new_godspeed_id = response["task"]["id"]
|
||||
else:
|
||||
print(
|
||||
f" Warning: No ID found in response: {response}"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
print(
|
||||
f" Warning: Unexpected response format: {response}"
|
||||
)
|
||||
continue
|
||||
|
||||
self.metadata["task_mapping"][local_id] = new_godspeed_id
|
||||
|
||||
# Set status if not incomplete
|
||||
if status == "complete":
|
||||
self.client.update_task(new_godspeed_id, is_complete=True)
|
||||
print(f" Created completed task: {title}")
|
||||
elif status == "cleared":
|
||||
# Mark complete first, then clear
|
||||
self.client.update_task(
|
||||
new_godspeed_id, is_complete=True, is_cleared=True
|
||||
)
|
||||
print(f" Created cleared task: {title}")
|
||||
else:
|
||||
print(f" Created task: {title}")
|
||||
except Exception as e:
|
||||
print(f" Error creating task '{title}': {e}")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
self.metadata["last_sync"] = datetime.now().isoformat()
|
||||
self._save_metadata()
|
||||
print("Upload complete.")
|
||||
|
||||
def sync_bidirectional(self) -> None:
|
||||
"""Perform a full bidirectional sync."""
|
||||
print("Starting bidirectional sync...")
|
||||
|
||||
# Download first to get latest state
|
||||
self.download_from_api()
|
||||
|
||||
# Then upload any local changes
|
||||
self.upload_to_api()
|
||||
|
||||
print("Bidirectional sync complete.")
|
||||
|
||||
def list_local_files(self) -> List[Path]:
|
||||
"""List all markdown files in sync directory."""
|
||||
if not self.sync_dir.exists():
|
||||
return []
|
||||
return list(self.sync_dir.glob("*.md"))
|
||||
|
||||
def get_sync_status(self) -> Dict:
|
||||
"""Get current sync status and statistics."""
|
||||
local_files = self.list_local_files()
|
||||
|
||||
total_local_tasks = 0
|
||||
for file_path in local_files:
|
||||
tasks = self._read_list_file(file_path)
|
||||
total_local_tasks += len(tasks)
|
||||
|
||||
return {
|
||||
"sync_directory": str(self.sync_dir),
|
||||
"local_files": len(local_files),
|
||||
"total_local_tasks": total_local_tasks,
|
||||
"tracked_tasks": len(self.metadata["task_mapping"]),
|
||||
"tracked_lists": len(self.metadata["list_mapping"]),
|
||||
"last_sync": self.metadata.get("last_sync"),
|
||||
}
|
||||
Reference in New Issue
Block a user