drive viewer

This commit is contained in:
Tim Bendt
2025-05-09 08:28:46 -06:00
parent 9731c94a21
commit fbb9f875e6
9 changed files with 838 additions and 292 deletions

385
drive_view_tui.py Normal file
View File

@@ -0,0 +1,385 @@
import os
from select import select
import sys
import json
import asyncio
from datetime import datetime
import msal
import aiohttp
from textual.app import App, ComposeResult
from textual.binding import Binding
from textual.containers import Container, Horizontal, Vertical
from textual.widgets import (
Header,
Footer,
Static,
Label,
DataTable,
Button,
ListView,
ListItem,
LoadingIndicator
)
from textual.reactive import reactive
from textual.worker import Worker, get_current_worker
from textual import work
class OneDriveTUI(App):
"""A Textual app for OneDrive integration with MSAL authentication."""
CSS_PATH = "drive_view_tui.tcss"
# Reactive variables
is_authenticated = reactive(False)
selected_drive_id = reactive("")
drive_name = reactive("")
# App bindings
BINDINGS = [
Binding("q", "quit", "Quit"),
Binding("r", "refresh", "Refresh"),
Binding("f", "toggle_follow", "Toggle Follow"),
]
def __init__(self):
super().__init__()
self.access_token = None
self.drives = []
self.followed_items = []
self.msal_app = None
self.cache = None
# Read Azure app credentials from environment variables
self.client_id = os.getenv("AZURE_CLIENT_ID")
self.tenant_id = os.getenv("AZURE_TENANT_ID")
self.scopes = ["https://graph.microsoft.com/Files.ReadWrite.All"]
self.cache_file = "token_cache.bin"
def compose(self) -> ComposeResult:
"""Create child widgets for the app."""
yield Header(show_clock=True)
with Container(id="main_container"):
yield Label("Authenticating with Microsoft Graph API...", id="status_label")
with Container(id="auth_container"):
yield Label("", id="auth_message")
yield Button("Login", id="login_button", variant="primary")
with Horizontal(id="content_container", classes="hide"):
with Vertical(id="drive_container"):
yield ListView(id="drive_list")
with Vertical(id="items_container"):
yield DataTable(id="items_table")
yield Label("No items found", id="no_items_label", classes="hide")
yield Footer()
def on_mount(self) -> None:
"""Initialize the app when mounted."""
self.cache = msal.SerializableTokenCache()
self.query_one("#auth_container").ALLOW_SELECT = True
# Initialize the table
self.query_one("#drive_list").border_title = "Available Drives"
self.query_one("#content_container").border_title = "Followed Items"
table = self.query_one("#items_table")
table.add_columns("Name", "Type", "Last Modified", "Size", "Web URL")
# Load cached token if available
if os.path.exists(self.cache_file):
with open(self.cache_file, "r") as f:
self.cache.deserialize(f.read())
# Initialize MSAL app
self.initialize_msal()
self.notify("Initializing MSAL app...", severity="info")
def initialize_msal(self) -> None:
"""Initialize the MSAL application."""
if not self.client_id or not self.tenant_id:
self.notify(
"Please set AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables.",
severity="error",
timeout=10,
)
return
authority = f"https://login.microsoftonline.com/{self.tenant_id}"
self.msal_app = msal.PublicClientApplication(
self.client_id, authority=authority, token_cache=self.cache
)
# Try silent authentication first
if not self.msal_app:
return
accounts = self.msal_app.get_accounts()
if accounts:
self.query_one("#status_label").update("Trying silent authentication...")
worker = self.get_token_silent(accounts[0])
worker.wait()
self.query_one("#status_label").update("Authenticated successfully.")
self.query_one("#auth_container").add_class("hide")
self.notify("Authenticated successfully.", severity="success")
else:
self.query_one("#status_label").update("Please log in to continue.")
self.query_one("#auth_container").remove_class("hide")
@work
async def get_token_silent(self, account):
"""Get token silently."""
token_response = self.msal_app.acquire_token_silent(self.scopes, account=account)
if token_response and "access_token" in token_response:
self.access_token = token_response["access_token"]
self.is_authenticated = True
self.load_initial_data()
else:
self.query_one("#status_label").update("Silent authentication failed. Please log in.")
self.query_one("#auth_container").remove_class("hide")
self.query_one("#content_container").loading = False
def on_button_pressed(self, event: Button.Pressed) -> None:
"""Handle button presses."""
if event.button.id == "login_button":
self.initiate_device_flow()
def initiate_device_flow(self):
self.notify("Starting device code flow...", severity="info")
"""Initiate the MSAL device code flow."""
self.query_one("#content_container").loading = True
self.query_one("#status_label").update("Initiating device code flow...")
# Initiate device flow
flow = self.msal_app.initiate_device_flow(scopes=self.scopes)
if "user_code" not in flow:
self.notify("Failed to create device flow", severity="error")
return
# self.notify(str(flow), severity="info")
# Display the device code message
self.query_one("#auth_message").update(flow["message"])
self.query_one("#status_label").update("Waiting for authentication...")
self.wait_for_device_code(flow)
@work(thread=True)
async def wait_for_device_code(self, flow):
"""Wait for the user to authenticate using the device code."""
# Poll for token acquisition
result = self.msal_app.acquire_token_by_device_flow(flow)
if "access_token" in result:
self.access_token = result["access_token"]
self.is_authenticated = True
elif "error" in result and result["error"] == "authorization_pending":
# Wait before polling again
asyncio.sleep(5)
else:
self.notify(f"Authentication failed: {result.get('error_description', 'Unknown error')}", severity="error")
return
# Save the token to cache
with open(self.cache_file, "w") as f:
f.write(self.cache.serialize())
# Load initial data after authentication
self.load_initial_data()
@work
async def load_initial_data(self):
"""Load initial data after authentication."""
# Load drives first
# Hide auth container and show content container
self.query_one("#auth_container").add_class("hide")
self.query_one("#content_container").remove_class("hide")
self.query_one("#content_container").loading = False
worker = self.load_drives()
await worker.wait()
# Find and select the OneDrive drive
for drive in self.drives:
if drive.get("name") == "OneDrive":
self.selected_drive_id = drive.get("id")
self.drive_name = drive.get("name")
break
# If we have a selected drive, load followed items
if self.selected_drive_id:
self.load_followed_items()
@work
async def load_drives(self):
"""Load OneDrive drives."""
if not self.access_token:
return
headers = {"Authorization": f"Bearer {self.access_token}"}
try:
async with aiohttp.ClientSession() as session:
async with session.get(
"https://graph.microsoft.com/v1.0/me/drives",
headers=headers
) as response:
if response.status != 200:
self.notify(f"Failed to load drives: {response.status}", severity="error")
return
drives_data = await response.json()
self.drives = drives_data.get("value", [])
for drive in self.drives:
drive_name = drive.get("name", "Unknown")
drive_id = drive.get("id", "Unknown")
# Add the drive to the list
self.query_one("#drive_list").append(
ListItem(Label(drive_name))
)
# Update the drives label
if self.drives:
self.query_one("#drive_list").border_subtitle = f"Available: {len(self.drives)}"
except Exception as e:
self.notify(f"Error loading drives: {str(e)}", severity="error")
@work
async def load_followed_items(self):
"""Load followed items from the selected drive."""
if not self.access_token or not self.selected_drive_id:
return
self.query_one("#status_label").update("Loading followed items...")
headers = {"Authorization": f"Bearer {self.access_token}"}
# Update drive label
self.query_one("#drive_list").index = 0
try:
url = f"https://graph.microsoft.com/v1.0/me/drives/{self.selected_drive_id}/following"
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
if response.status != 200:
self.notify(f"Failed to load followed items: {await response.text()}", severity="error")
return
items_data = await response.json()
self.followed_items = items_data.get("value", [])
# Update the table with the followed items
# self.update_items_table()
table = self.query_one("#items_table")
table.clear()
if not self.followed_items:
self.query_one("#no_items_label").remove_class("hide")
return
self.query_one("#no_items_label").add_class("hide")
for item in self.followed_items:
name = item.get("name", "Unknown")
item_type = "Folder" if item.get("folder") else "File"
# Format the last modified date
last_modified = item.get("lastModifiedDateTime", "")
if last_modified:
try:
date_obj = datetime.fromisoformat(last_modified.replace('Z', '+00:00'))
last_modified = date_obj.strftime("%Y-%m-%d %H:%M")
except:
pass
# Format the size
size = item.get("size", 0)
if size:
if size < 1024:
size_str = f"{size} B"
elif size < 1024 * 1024:
size_str = f"{size / 1024:.1f} KB"
elif size < 1024 * 1024 * 1024:
size_str = f"{size / (1024 * 1024):.1f} MB"
else:
size_str = f"{size / (1024 * 1024 * 1024):.1f} GB"
else:
size_str = "N/A"
web_url = item.get("webUrl", "")
table.add_row(name, item_type, last_modified, size_str, web_url)
except Exception as e:
self.notify(f"Error loading followed items: {str(e)}", severity="error")
self.query_one("#status_label").update("Ready")
async def update_items_table(self):
table = self.query_one("#items_table")
table.clear()
if not self.followed_items:
self.query_one("#no_items_label").remove_class("hide")
return
self.query_one("#no_items_label").add_class("hide")
for item in self.followed_items:
name = item.get("name", "Unknown")
item_type = "Folder" if item.get("folder") else "File"
# Format the last modified date
last_modified = item.get("lastModifiedDateTime", "")
if last_modified:
try:
date_obj = datetime.fromisoformat(last_modified.replace('Z', '+00:00'))
last_modified = date_obj.strftime("%Y-%m-%d %H:%M")
except:
pass
# Format the size
size = item.get("size", 0)
if size:
if size < 1024:
size_str = f"{size} B"
elif size < 1024 * 1024:
size_str = f"{size / 1024:.1f} KB"
elif size < 1024 * 1024 * 1024:
size_str = f"{size / (1024 * 1024):.1f} MB"
else:
size_str = f"{size / (1024 * 1024 * 1024):.1f} GB"
else:
size_str = "N/A"
web_url = item.get("webUrl", "")
table.add_row(name, item_type, last_modified, size_str, web_url)
async def action_refresh(self) -> None:
"""Refresh the data."""
if self.is_authenticated and self.selected_drive_id:
self.load_followed_items()
self.notify("Refreshed followed items")
async def action_toggle_follow(self) -> None:
"""Toggle follow status for selected item."""
# This would be implemented to follow/unfollow the selected item
# Currently just a placeholder for the key binding
self.notify("Toggle follow functionality not implemented yet")
async def action_quit(self) -> None:
"""Quit the application."""
self.exit()
if __name__ == "__main__":
app = OneDriveTUI()
app.run()

126
drive_view_tui.tcss Normal file
View File

@@ -0,0 +1,126 @@
/* OneDrive TUI App CSS Styles */
/* Main container */
#main_container {
padding: 1 2;
}
/* Authentication container */
#auth_container {
display: block;
align: center middle;
height: 1fr;
margin: 1 2;
padding: 1;
border: heavy $accent;
background: $surface;
&.hide {
display:none;
}
}
#auth_message {
margin-bottom: 1;
width: 100%;
height: auto;
padding: 1;
text-align: center;
}
#login_button {
align: center middle;
margin: 1;
min-width: 20;
}
/* Content container that holds drives and items */
#content_container {
margin-top: 1;
}
/* Status and loading elements */
#status_label {
text-align: center;
margin-bottom: 1;
}
#loading {
align: center middle;
margin: 2;
}
/* Title styles */
.title {
color: $accent;
background: $boost;
text-align: center;
padding: 1;
text-style: bold;
border: heavy $accent;
margin-bottom: 1;
}
/* Drive container styles */
#drive_container {
width: 1fr;
margin-bottom: 1;
height: 100%;
}
#drive_list {
border: round $primary;
padding: 1;
height: 100%;
}
#drive_label {
color: $text;
text-style: bold;
}
/* Items container and table styles */
#items_container {
padding: 0;
width: 3fr;
height: 100%;
}
#items_table {
width: 100%;
height: auto;
}
#no_items_label {
color: $text-muted;
text-align: center;
padding: 2;
}
/* Utility classes */
.hide {
display: none;
}
/* DataTable styling */
DataTable {
border: solid $accent;
background: $primary-background-lighten-1;
margin: 1 0;
}
DataTable > .datatable--header {
background: $primary;
color: $text;
text-style: bold;
}
DataTable > .datatable--cursor {
background: $secondary;
}
/* Override scrollbar styles */
* {
scrollbar-color: $accent $surface;
scrollbar-background: $surface;
scrollbar-color-hover: $accent-lighten-1;
scrollbar-background-hover: $surface-lighten-1;
}

View File

@@ -23,35 +23,27 @@ import msal
import orjson
# Filepath for caching timestamp
cache_timestamp_file = "cache_timestamp.json"
cache_timestamp_file = 'cache_timestamp.json'
# Filepath for sync timestamp
sync_timestamp_file = "sync_timestamp.json"
sync_timestamp_file = 'sync_timestamp.json'
# Function to load the last sync timestamp
def load_last_sync_timestamp():
if os.path.exists(sync_timestamp_file):
with open(sync_timestamp_file, "r") as f:
return json.load(f).get("last_sync", 0)
with open(sync_timestamp_file, 'r') as f:
return json.load(f).get('last_sync', 0)
return 0
# Function to save the current sync timestamp
def save_sync_timestamp():
with open(sync_timestamp_file, "w") as f:
json.dump({"last_sync": time.time()}, f)
with open(sync_timestamp_file, 'w') as f:
json.dump({'last_sync': time.time()}, f)
# Add argument parsing for dry-run mode
arg_parser = argparse.ArgumentParser(description="Fetch and synchronize emails.")
arg_parser.add_argument(
"--dry-run",
action="store_true",
help="Run in dry-run mode without making changes.",
default=False,
)
arg_parser.add_argument("--dry-run", action="store_true", help="Run in dry-run mode without making changes.", default=False)
args = arg_parser.parse_args()
dry_run = args.dry_run
@@ -59,52 +51,45 @@ dry_run = args.dry_run
# Define a global semaphore for throttling
semaphore = asyncio.Semaphore(4)
async def fetch_with_aiohttp(url, headers):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
if response.status != 200:
raise Exception(
f"Failed to fetch {url}: {response.status} {await response.text()}"
)
raise Exception(f"Failed to fetch {url}: {response.status} {await response.text()}")
raw_bytes = await response.read()
content_length = response.headers.get("Content-Length")
content_length = response.headers.get('Content-Length')
if content_length and len(raw_bytes) != int(content_length):
print("Warning: Incomplete response received!")
return None
return orjson.loads(raw_bytes)
async def post_with_aiohttp(url, headers, json_data):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.post(url, headers=headers, json=json_data) as response:
return response.status
async def patch_with_aiohttp(url, headers, json_data):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.patch(url, headers=headers, json=json_data) as response:
return response.status
async def delete_with_aiohttp(url, headers):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.delete(url, headers=headers) as response:
return response.status
async def synchronize_maildir_async(maildir_path, headers, progress, task_id):
last_sync = load_last_sync_timestamp()
# Find messages moved from "new" to "cur" and mark them as read
new_dir = os.path.join(maildir_path, "new")
cur_dir = os.path.join(maildir_path, "cur")
new_files = set(glob.glob(os.path.join(new_dir, "*.eml*")))
cur_files = set(glob.glob(os.path.join(cur_dir, "*.eml*")))
new_dir = os.path.join(maildir_path, 'new')
cur_dir = os.path.join(maildir_path, 'cur')
new_files = set(glob.glob(os.path.join(new_dir, '*.eml*')))
cur_files = set(glob.glob(os.path.join(cur_dir, '*.eml*')))
moved_to_cur = [os.path.basename(f) for f in cur_files - new_files]
progress.update(task_id, total=len(moved_to_cur))
@@ -113,22 +98,18 @@ async def synchronize_maildir_async(maildir_path, headers, progress, task_id):
if os.path.getmtime(os.path.join(cur_dir, filename)) < last_sync:
progress.update(task_id, advance=1)
continue
message_id = re.sub(
r"\:2.+", "", filename.split(".")[0]
) # Extract the Message-ID from the filename
message_id = re.sub(r"\:2.+", "", filename.split('.')[0]) # Extract the Message-ID from the filename
if not dry_run:
status = await patch_with_aiohttp(
f"https://graph.microsoft.com/v1.0/me/messages/{message_id}",
f'https://graph.microsoft.com/v1.0/me/messages/{message_id}',
headers,
{"isRead": True},
{'isRead': True}
)
if status == 404:
os.remove(os.path.join(cur_dir, filename))
else:
progress.console.print(
f"[DRY-RUN] Would mark message as read: {message_id}"
)
progress.console.print(f"[DRY-RUN] Would mark message as read: {message_id}")
progress.advance(task_id)
# Save the current sync timestamp
@@ -137,17 +118,16 @@ async def synchronize_maildir_async(maildir_path, headers, progress, task_id):
else:
progress.console.print("[DRY-RUN] Would save sync timestamp.")
async def fetch_mail_async(maildir_path, attachments_dir, headers, progress, task_id):
mail_url = "https://graph.microsoft.com/v1.0/me/mailFolders/inbox/messages?$top=100&$orderby=receivedDateTime asc&$select=id,subject,from,toRecipients,ccRecipients,receivedDateTime,isRead"
mail_url = 'https://graph.microsoft.com/v1.0/me/mailFolders/inbox/messages?$top=100&$orderby=receivedDateTime asc&$select=id,subject,from,toRecipients,ccRecipients,receivedDateTime,isRead'
messages = []
# Fetch the total count of messages in the inbox
inbox_url = "https://graph.microsoft.com/v1.0/me/mailFolders/inbox"
inbox_url = 'https://graph.microsoft.com/v1.0/me/mailFolders/inbox'
response = await fetch_with_aiohttp(inbox_url, headers)
total_messages = response.get("totalItemCount", 0)
total_messages = response.get('totalItemCount', 0)
progress.update(task_id, total=total_messages)
while mail_url:
@@ -156,24 +136,22 @@ async def fetch_mail_async(maildir_path, attachments_dir, headers, progress, tas
except Exception as e:
progress.console.print(f"Error fetching messages: {e}")
continue
messages.extend(response_data.get("value", []))
progress.advance(task_id, len(response_data.get("value", [])))
messages.extend(response_data.get('value', []))
progress.advance(task_id, len(response_data.get('value', [])))
# Get the next page URL from @odata.nextLink
mail_url = response_data.get("@odata.nextLink")
mail_url = response_data.get('@odata.nextLink')
inbox_msg_ids = set(message["id"] for message in messages)
inbox_msg_ids = set(message['id'] for message in messages)
progress.update(task_id, completed=(len(messages) / 2))
new_dir = os.path.join(maildir_path, "new")
cur_dir = os.path.join(maildir_path, "cur")
new_files = set(glob.glob(os.path.join(new_dir, "*.eml*")))
cur_files = set(glob.glob(os.path.join(cur_dir, "*.eml*")))
new_dir = os.path.join(maildir_path, 'new')
cur_dir = os.path.join(maildir_path, 'cur')
new_files = set(glob.glob(os.path.join(new_dir, '*.eml*')))
cur_files = set(glob.glob(os.path.join(cur_dir, '*.eml*')))
for filename in Set.union(cur_files, new_files):
message_id = filename.split(".")[0].split("/")[
-1
] # Extract the Message-ID from the filename
if message_id not in inbox_msg_ids:
message_id = filename.split('.')[0].split('/')[-1] # Extract the Message-ID from the filename
if (message_id not in inbox_msg_ids):
if not dry_run:
progress.console.print(f"Deleting {filename} from inbox")
os.remove(filename)
@@ -181,81 +159,57 @@ async def fetch_mail_async(maildir_path, attachments_dir, headers, progress, tas
progress.console.print(f"[DRY-RUN] Would delete {filename} from inbox")
for message in messages:
progress.console.print(
f"Processing message: {message.get('subject', 'No Subject')}", end="\r"
)
await save_mime_to_maildir_async(
maildir_path, message, attachments_dir, headers, progress
)
progress.console.print(f"Processing message: {message.get('subject', 'No Subject')}", end='\r')
await save_mime_to_maildir_async(maildir_path, message, attachments_dir, headers, progress)
progress.update(task_id, advance=0.5)
progress.update(task_id, completed=len(messages))
progress.console.print(f"\nFinished saving {len(messages)} messages.")
async def archive_mail_async(maildir_path, headers, progress, task_id):
archive_dir = os.path.join(maildir_path, ".Archives")
archive_files = glob.glob(os.path.join(archive_dir, "**", "*.eml*"), recursive=True)
archive_dir = os.path.join(maildir_path, '.Archives')
archive_files = glob.glob(os.path.join(archive_dir, '**', '*.eml*'), recursive=True)
progress.update(task_id, total=len(archive_files))
folder_response = await fetch_with_aiohttp(
"https://graph.microsoft.com/v1.0/me/mailFolders", headers
)
folders = folder_response.get("value", [])
archive_folder_id = next(
(
folder.get("id")
for folder in folders
if folder.get("displayName", "").lower() == "archive"
),
None,
)
folder_response = await fetch_with_aiohttp('https://graph.microsoft.com/v1.0/me/mailFolders', headers)
folders = folder_response.get('value', [])
archive_folder_id = next((folder.get('id') for folder in folders if folder.get('displayName', '').lower() == 'archive'), None)
if not archive_folder_id:
raise Exception("No folder named 'Archive' found on the server.")
for filepath in archive_files:
message_id = os.path.basename(filepath).split(".")[
0
] # Extract the Message-ID from the filename
message_id = os.path.basename(filepath).split('.')[0] # Extract the Message-ID from the filename
if not dry_run:
status = await post_with_aiohttp(
f"https://graph.microsoft.com/v1.0/me/messages/{message_id}/microsoft.graph.move",
f'https://graph.microsoft.com/v1.0/me/messages/{message_id}/microsoft.graph.move',
headers,
{"destinationId": archive_folder_id},
{'destinationId': archive_folder_id}
)
if status != 201: # 201 Created indicates success
progress.console.print(
f"Failed to move message to 'Archive': {message_id}, {status}"
)
progress.console.print(f"Failed to move message to 'Archive': {message_id}, {status}")
if status == 404:
os.remove(filepath) # Remove the file from local archive if not fo
progress.console.print(
f"Message not found on server, removed local copy: {message_id}"
)
progress.console.print(f"Message not found on server, removed local copy: {message_id}")
elif status == 204:
progress.console.print(f"Moved message to 'Archive': {message_id}")
else:
progress.console.print(
f"[DRY-RUN] Would move message to 'Archive' folder: {message_id}"
)
progress.console.print(f"[DRY-RUN] Would move message to 'Archive' folder: {message_id}")
progress.advance(task_id)
return
async def delete_mail_async(maildir_path, headers, progress, task_id):
trash_dir = os.path.join(maildir_path, ".Trash", "cur")
trash_files = set(glob.glob(os.path.join(trash_dir, "*.eml*")))
trash_dir = os.path.join(maildir_path, '.Trash', 'cur')
trash_files = set(glob.glob(os.path.join(trash_dir, '*.eml*')))
progress.update(task_id, total=len(trash_files))
for filepath in trash_files:
message_id = os.path.basename(filepath).split(".")[
0
] # Extract the Message-ID from the filename
message_id = os.path.basename(filepath).split('.')[0] # Extract the Message-ID from the filename
if not dry_run:
progress.console.print(f"Moving message to trash: {message_id}")
status = await delete_with_aiohttp(
f"https://graph.microsoft.com/v1.0/me/messages/{message_id}", headers
f'https://graph.microsoft.com/v1.0/me/messages/{message_id}',
headers
)
if status == 204 or status == 404:
os.remove(filepath) # Remove the file from local trash
@@ -263,18 +217,17 @@ async def delete_mail_async(maildir_path, headers, progress, task_id):
progress.console.print(f"[DRY-RUN] Would delete message: {message_id}")
progress.advance(task_id)
async def fetch_calendar_async(headers, progress, task_id):
yesterday = datetime.now().replace(hour=0, minute=0, second=0) - timedelta(days=1)
end_of_today = datetime.now().replace(hour=23, minute=59, second=59)
six_days_future = end_of_today + timedelta(days=6)
# example https://graph.microsoft.com/v1.0/me/calendarView?startDateTime=2025-05-06T00:00:00&endDateTime=2025-05-13T23:59:59.999999&$count=true&$select=id
event_base_url = f"https://graph.microsoft.com/v1.0/me/calendarView?startDateTime={yesterday.isoformat()}&endDateTime={six_days_future.isoformat()}"
event_base_url =f"https://graph.microsoft.com/v1.0/me/calendarView?startDateTime={yesterday.isoformat()}&endDateTime={six_days_future.isoformat()}"
total_event_url = f"{event_base_url}&$count=true&$select=id"
total = await fetch_with_aiohttp(total_event_url, headers)
total_events = total.get("@odata.count", 0) + 1
total_events = total.get('@odata.count', 0) + 1
progress.update(task_id, total=total_events)
calendar_url = f"{event_base_url}&$top=100&$select=start,end,iCalUid,subject,bodyPreview,webLink,location,recurrence,showAs,responseStatus,onlineMeeting"
events = []
@@ -282,58 +235,47 @@ async def fetch_calendar_async(headers, progress, task_id):
progress.update(task_id, total=total_events + total_events % 100)
while calendar_url:
response_data = await fetch_with_aiohttp(calendar_url, headers)
events.extend(response_data.get("value", []))
events.extend(response_data.get('value', []))
progress.advance(task_id, 1)
# Get the next page URL from @odata.nextLink
calendar_url = response_data.get("@odata.nextLink")
calendar_url = response_data.get('@odata.nextLink')
output_file = "output_ics/outlook_events_latest.ics"
output_file = 'output_ics/outlook_events_latest.ics'
if not dry_run:
os.makedirs(os.path.dirname(output_file), exist_ok=True)
progress.console.print(f"Saving events to {output_file}...")
with open(output_file, "w") as f:
with open(output_file, 'w') as f:
f.write("BEGIN:VCALENDAR\nVERSION:2.0\n")
for event in events:
progress.advance(task_id)
if "start" in event and "end" in event:
start = parser.isoparse(event["start"]["dateTime"]).astimezone(UTC)
end = parser.isoparse(event["end"]["dateTime"]).astimezone(UTC)
f.write(
f"BEGIN:VEVENT\nSUMMARY:{event['subject']}\nDESCRIPTION:{event.get('bodyPreview', '')}\n"
)
if 'start' in event and 'end' in event:
start = parser.isoparse(event['start']['dateTime']).astimezone(UTC)
end = parser.isoparse(event['end']['dateTime']).astimezone(UTC)
f.write(f"BEGIN:VEVENT\nSUMMARY:{event['subject']}\nDESCRIPTION:{event.get('bodyPreview', '')}\n")
f.write(f"UID:{event.get('iCalUId', '')}\n")
f.write(f"LOCATION:{event.get('location', {})['displayName']}\n")
f.write(f"CLASS:{event.get('showAs', '')}\n")
f.write(f"STATUS:{event.get('responseStatus', {})['response']}\n")
if "onlineMeeting" in event and event["onlineMeeting"]:
f.write(
f"URL:{event.get('onlineMeeting', {}).get('joinUrl', '')}\n"
)
if 'onlineMeeting' in event and event['onlineMeeting']:
f.write(f"URL:{event.get('onlineMeeting', {}).get('joinUrl', '')}\n")
f.write(f"DTSTART:{start.strftime('%Y%m%dT%H%M%S')}\n")
f.write(f"DTEND:{end.strftime('%Y%m%dT%H%M%S')}\n")
if (
"recurrence" in event and event["recurrence"]
): # Check if 'recurrence' exists and is not None
for rule in event["recurrence"]:
if rule.startswith("RRULE"):
rule_parts = rule.split(";")
if 'recurrence' in event and event['recurrence']: # Check if 'recurrence' exists and is not None
for rule in event['recurrence']:
if rule.startswith('RRULE'):
rule_parts = rule.split(';')
new_rule_parts = []
for part in rule_parts:
if part.startswith("UNTIL="):
until_value = part.split("=")[1]
if part.startswith('UNTIL='):
until_value = part.split('=')[1]
until_date = parser.isoparse(until_value)
if (
start.tzinfo is not None
and until_date.tzinfo is None
):
if start.tzinfo is not None and until_date.tzinfo is None:
until_date = until_date.replace(tzinfo=UTC)
new_rule_parts.append(
f"UNTIL={until_date.strftime('%Y%m%dT%H%M%SZ')}"
)
new_rule_parts.append(f"UNTIL={until_date.strftime('%Y%m%dT%H%M%SZ')}")
else:
new_rule_parts.append(part)
rule = ";".join(new_rule_parts)
rule = ';'.join(new_rule_parts)
f.write(f"{rule}\n")
f.write("END:VEVENT\n")
f.write("END:VCALENDAR\n")
@@ -343,20 +285,18 @@ async def fetch_calendar_async(headers, progress, task_id):
progress.console.print(f"[DRY-RUN] Would save events to {output_file}")
# Function to create Maildir structure
def create_maildir_structure(base_path):
os.makedirs(os.path.join(base_path, "cur"), exist_ok=True)
os.makedirs(os.path.join(base_path, "new"), exist_ok=True)
os.makedirs(os.path.join(base_path, "tmp"), exist_ok=True)
os.makedirs(os.path.join(base_path, 'cur'), exist_ok=True)
os.makedirs(os.path.join(base_path, 'new'), exist_ok=True)
os.makedirs(os.path.join(base_path, 'tmp'), exist_ok=True)
async def save_mime_to_maildir_async(
maildir_path, email_data, attachments_dir, headers, progress
):
async def save_mime_to_maildir_async(maildir_path, email_data, attachments_dir, headers, progress):
# Create a new EmailMessage object
# Determine the directory based on isRead
target_dir = "cur" if email_data.get("isRead", False) else "new"
id = email_data.get("id", "")
target_dir = 'cur' if email_data.get('isRead', False) else 'new'
id = email_data.get('id', '')
if not id:
progress.console.print("Message ID not found. Skipping save.")
return
@@ -365,67 +305,48 @@ async def save_mime_to_maildir_async(
# Check if the file already exists
if os.path.exists(email_filepath):
progress.console.print(
f"Message {id} already exists in {target_dir}. Skipping save."
)
progress.console.print(f"Message {id} already exists in {target_dir}. Skipping save.")
return
# Fetch the full MIME payload from the API
mime_url = f"https://graph.microsoft.com/v1.0/me/messages/{id}/$value"
mime_url = f'https://graph.microsoft.com/v1.0/me/messages/{id}/$value'
try:
async with aiohttp.ClientSession() as session:
async with session.get(mime_url, headers=headers) as response:
if response.status != 200:
raise Exception(
f"Failed to fetch MIME payload for {id}: {response.status} {await response.text()}"
)
raise Exception(f"Failed to fetch MIME payload for {id}: {response.status} {await response.text()}")
mime_payload = await response.text()
# Save the MIME payload to the Maildir
os.makedirs(os.path.dirname(email_filepath), exist_ok=True)
with open(email_filepath, "w") as f:
with open(email_filepath, 'w') as f:
f.write(mime_payload)
progress.console.print(f"Saved message {id} to {target_dir}.")
except Exception as e:
progress.console.print(f"Failed to save message {id}: {e}")
def save_email_to_maildir(maildir_path, email_data, attachments_dir, progress):
# Create a new EmailMessage object
msg = EmailMessage()
received_datetime = email_data.get("receivedDateTime", "")
received_datetime = email_data.get('receivedDateTime', '')
if received_datetime:
parsed_datetime = parser.isoparse(received_datetime)
msg["Date"] = format_datetime(parsed_datetime)
msg['Date'] = format_datetime(parsed_datetime)
else:
msg["Date"] = ""
msg['Date'] = ''
msg["Message-ID"] = email_data.get("id", "")
msg["Subject"] = email_data.get("subject", "No Subject")
msg["From"] = (
email_data.get("from", {})
.get("emailAddress", {})
.get("address", "unknown@unknown.com")
)
msg["To"] = ", ".join(
[
recipient["emailAddress"]["address"]
for recipient in email_data.get("toRecipients", [])
]
)
msg["Cc"] = ", ".join(
[
recipient["emailAddress"]["address"]
for recipient in email_data.get("ccRecipients", [])
]
)
msg['Message-ID'] = email_data.get('id', '')
msg['Subject'] = email_data.get('subject', 'No Subject')
msg['From'] = email_data.get('from', {}).get('emailAddress', {}).get('address', 'unknown@unknown.com')
msg['To'] = ', '.join([recipient['emailAddress']['address'] for recipient in email_data.get('toRecipients', [])])
msg['Cc'] = ', '.join([recipient['emailAddress']['address'] for recipient in email_data.get('ccRecipients', [])])
# Convert the email body from HTML to Markdown
body_html = email_data.get("body", {}).get("content", "")
if email_data.get("body", {}).get("contentType", "").lower() == "html":
body_html = email_data.get('body', {}).get('content', '')
if email_data.get('body', {}).get('contentType', '').lower() == 'html':
markdown_converter = html2text.HTML2Text()
markdown_converter.ignore_images = True
markdown_converter.ignore_links = True
@@ -434,45 +355,38 @@ def save_email_to_maildir(maildir_path, email_data, attachments_dir, progress):
body_markdown = body_html
# Remove lines between any alphanumeric BannerStart and BannerEnd
body_markdown = re.sub(
r"\w+BannerStart.*?\w+BannerEnd", "", body_markdown, flags=re.DOTALL
)
body_markdown = re.sub(r'\w+BannerStart.*?\w+BannerEnd', '', body_markdown, flags=re.DOTALL)
msg.set_content(body_markdown)
# Download attachments
progress.console.print(f"Downloading attachments for message: {msg['Message-ID']}")
for attachment in email_data.get("attachments", []):
attachment_name = attachment.get("name", "unknown")
attachment_content = attachment.get("contentBytes")
for attachment in email_data.get('attachments', []):
attachment_name = attachment.get('name', 'unknown')
attachment_content = attachment.get('contentBytes')
if attachment_content:
attachment_path = os.path.join(attachments_dir, attachment_name)
if not dry_run:
with open(attachment_path, "wb") as f:
f.write(attachment_content.encode("utf-8"))
msg.add_attachment(
attachment_content.encode("utf-8"), filename=attachment_name
)
with open(attachment_path, 'wb') as f:
f.write(attachment_content.encode('utf-8'))
msg.add_attachment(attachment_content.encode('utf-8'), filename=attachment_name)
else:
progress.console.print(
f"[DRY-RUN] Would save attachment to {attachment_path}"
)
progress.console.print(f"[DRY-RUN] Would save attachment to {attachment_path}")
# Determine the directory based on isRead
target_dir = "cur" if email_data.get("isRead", False) else "new"
target_dir = 'cur' if email_data.get('isRead', False) else 'new'
email_filename = f"{msg['Message-ID']}.eml"
email_filepath = os.path.join(maildir_path, target_dir, email_filename)
# Check if the file already exists in any subfolder
for root, _, files in os.walk(maildir_path):
if email_filename in files:
progress.console.print(
f"Message {msg['Message-ID']} already exists in {root}. Skipping save."
)
progress.console.print(f"Message {msg['Message-ID']} already exists in {root}. Skipping save.")
return
# Save the email to the Maildir
if not dry_run:
with open(email_filepath, "w") as f:
with open(email_filepath, 'w') as f:
f.write(msg.as_string())
progress.console.print(f"Saved message {msg['Message-ID']}")
else:
@@ -480,77 +394,66 @@ def save_email_to_maildir(maildir_path, email_data, attachments_dir, progress):
async def main():
# Save emails to Maildir
maildir_path = os.getenv("MAILDIR_PATH", os.path.expanduser("~/Mail")) + "/corteva"
attachments_dir = os.path.join(maildir_path, "attachments")
maildir_path = os.getenv('MAILDIR_PATH', os.path.expanduser('~/Mail')) + "/corteva"
attachments_dir = os.path.join(maildir_path, 'attachments')
os.makedirs(attachments_dir, exist_ok=True)
create_maildir_structure(maildir_path)
# Read Azure app credentials from environment variables
client_id = os.getenv("AZURE_CLIENT_ID")
tenant_id = os.getenv("AZURE_TENANT_ID")
client_id = os.getenv('AZURE_CLIENT_ID')
tenant_id = os.getenv('AZURE_TENANT_ID')
if not client_id or not tenant_id:
raise ValueError(
"Please set the AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables."
)
raise ValueError("Please set the AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables.")
# Token cache
cache = msal.SerializableTokenCache()
cache_file = "token_cache.bin"
cache_file = 'token_cache.bin'
if os.path.exists(cache_file):
cache.deserialize(open(cache_file, "r").read())
cache.deserialize(open(cache_file, 'r').read())
# Authentication
authority = f"https://login.microsoftonline.com/{tenant_id}"
scopes = [
"https://graph.microsoft.com/Calendars.Read",
"https://graph.microsoft.com/Mail.ReadWrite",
]
authority = f'https://login.microsoftonline.com/{tenant_id}'
scopes = ['https://graph.microsoft.com/Calendars.Read', 'https://graph.microsoft.com/Mail.ReadWrite']
app = msal.PublicClientApplication(
client_id, authority=authority, token_cache=cache
)
app = msal.PublicClientApplication(client_id, authority=authority, token_cache=cache)
accounts = app.get_accounts()
if accounts:
token_response = app.acquire_token_silent(scopes, account=accounts[0])
else:
flow = app.initiate_device_flow(scopes=scopes)
if "user_code" not in flow:
if 'user_code' not in flow:
raise Exception("Failed to create device flow")
print(
Panel(
flow["message"],
border_style="magenta",
padding=2,
title="MSAL Login Flow Link",
)
)
print(Panel(flow['message'], border_style="magenta", padding=2, title="MSAL Login Flow Link"))
token_response = app.acquire_token_by_device_flow(flow)
if "access_token" not in token_response:
if 'access_token' not in token_response:
raise Exception("Failed to acquire token")
# Save token cache
with open(cache_file, "w") as f:
with open(cache_file, 'w') as f:
f.write(cache.serialize())
access_token = token_response["access_token"]
headers = {
"Authorization": f"Bearer {access_token}",
"Prefer": 'outlook.body-content-type="text"',
}
access_token = token_response['access_token']
headers = {'Authorization': f'Bearer {access_token}', 'Prefer': 'outlook.body-content-type="text"'}
accounts = app.get_accounts()
if not accounts:
raise Exception("No accounts found")
maildir_path = os.getenv("MAILDIR_PATH", os.path.expanduser("~/Mail")) + "/corteva"
maildir_path = os.getenv('MAILDIR_PATH', os.path.expanduser('~/Mail')) + "/corteva"
progress = Progress(
SpinnerColumn(), MofNCompleteColumn(), *Progress.get_default_columns()
SpinnerColumn(),
MofNCompleteColumn(),
*Progress.get_default_columns()
)
with progress:
task_fetch = progress.add_task("[green]Syncing Inbox...", total=0)
@@ -563,12 +466,9 @@ async def main():
synchronize_maildir_async(maildir_path, headers, progress, task_read),
archive_mail_async(maildir_path, headers, progress, task_archive),
delete_mail_async(maildir_path, headers, progress, task_delete),
fetch_mail_async(
maildir_path, attachments_dir, headers, progress, task_fetch
),
fetch_calendar_async(headers, progress, task_calendar),
fetch_mail_async(maildir_path, attachments_dir, headers, progress, task_fetch),
fetch_calendar_async(headers, progress, task_calendar)
)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -28,6 +28,7 @@ from actions.delete import delete_current
from actions.open import action_open
from actions.task import action_create_task
from widgets.EnvelopeHeader import EnvelopeHeader
from widgets.ContentContainer import ContentContainer
from maildir_gtd.utils import group_envelopes_by_date
logging.basicConfig(
@@ -106,6 +107,7 @@ class EmailViewerApp(App):
Binding("1", "focus_1", "Focus Accounts Panel"),
Binding("2", "focus_2", "Focus Folders Panel"),
Binding("3", "focus_3", "Focus Envelopes Panel"),
Binding("f", "toggle_mode", "Toggle Content Mode"),
]
BINDINGS.extend(
@@ -129,7 +131,7 @@ class EmailViewerApp(App):
ListView(id="folders_list", classes="list_view"),
id="sidebar",
),
ScrollableContainer(EnvelopeHeader(), Markdown(), id="main_content"),
ContentContainer(id="main_content"),
id="outer-wrapper",
)
yield Footer()
@@ -218,33 +220,33 @@ class EmailViewerApp(App):
if new_message_id == old_message_id:
return
self.msg_worker.cancel() if self.msg_worker else None
headers = self.query_one(EnvelopeHeader)
logging.info(f"new_message_id: {new_message_id}, type: {type(new_message_id)}")
logging.info(f"message_metadata keys: {list(self.message_metadata.keys())}")
content_container = self.query_one("#main_content")
content_container.display_content(new_message_id)
if new_message_id in self.message_metadata:
metadata = self.message_metadata[new_message_id]
self.current_message_index = metadata["index"]
headers.subject = metadata["subject"].strip()
headers.from_ = metadata["from"].get("addr", "")
headers.to = metadata["to"].get("addr", "")
message_date = re.sub(r"[\+\-]\d\d:\d\d", "", metadata["date"])
message_date = datetime.strptime(message_date, "%Y-%m-%d %H:%M").strftime(
"%a %b %d %H:%M"
)
headers.date = message_date
headers.cc = metadata["cc"].get("addr", "") if "cc" in metadata else ""
self.current_message_index = metadata["index"]
content_container.update_header(
subject=metadata.get("subject", "").strip(),
from_=metadata["from"].get("addr", ""),
to=metadata["to"].get("addr", ""),
date=message_date,
cc=metadata["cc"].get("addr", "") if "cc" in metadata else "",
)
self.query_one(ListView).index = metadata["index"]
else:
logging.warning(f"Message ID {new_message_id} not found in metadata.")
if self.message_body_cache.get(new_message_id):
# If the message body is already cached, use it
msg = self.query_one(Markdown)
msg.update(self.message_body_cache[new_message_id])
return
else:
self.query_one("#main_content").loading = True
self.msg_worker = self.fetch_one_message(new_message_id)
def on_list_view_selected(self, event: ListView.Selected) -> None:
"""Called when an item in the list view is selected."""
@@ -257,31 +259,31 @@ class EmailViewerApp(App):
return
self.current_message_id = int(self.all_envelopes[event.list_view.index]["id"])
@work(exclusive=False)
async def fetch_one_message(self, new_message_id: int) -> None:
msg = self.query_one(Markdown)
# @work(exclusive=False)
# async def fetch_one_message(self, new_message_id: int) -> None:
# content_container = self.query_one(ContentContainer)
try:
process = await asyncio.create_subprocess_shell(
f"himalaya message read {str(new_message_id)}",
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await process.communicate()
logging.info(f"stdout: {stdout.decode()[0:50]}...")
if process.returncode == 0:
# Render the email content as Markdown
fixedText = stdout.decode().replace("(https://urldefense.com/v3/", "(")
fixedText = re.sub(r"atlOrigin.+?\)", ")", fixedText)
logging.info(f"rendering fixedText: {fixedText[0:50]}")
self.message_body_cache[new_message_id] = fixedText
await msg.update(fixedText)
self.query_one("#main_content").loading = False
logging.info(fixedText)
# try:
# process = await asyncio.create_subprocess_shell(
# f"himalaya message read {str(new_message_id)} -p",
# stdout=asyncio.subprocess.PIPE,
# stderr=asyncio.subprocess.PIPE,
# )
# stdout, stderr = await process.communicate()
# logging.info(f"stdout: {stdout.decode()[0:50]}...")
# if process.returncode == 0:
# # Render the email content as Markdown
# fixedText = stdout.decode().replace("(https://urldefense.com/v3/", "(")
# fixedText = re.sub(r"atlOrigin.+?\)", ")", fixedText)
# logging.info(f"rendering fixedText: {fixedText[0:50]}")
# self.message_body_cache[new_message_id] = fixedText
# await content_container.display_content(new_message_id)
# self.query_one("#main_content").loading = False
# logging.info(fixedText)
except Exception as e:
self.show_status(f"Error fetching message content: {e}", "error")
logging.error(f"Error fetching message content: {e}")
# except Exception as e:
# self.show_status(f"Error fetching message content: {e}", "error")
# logging.error(f"Error fetching message content: {e}")
@work(exclusive=False)
async def fetch_envelopes(self) -> None:
@@ -429,11 +431,7 @@ class EmailViewerApp(App):
message, title="Status", severity=severity, timeout=2.6, markup=True
)
def action_toggle_header(self) -> None:
"""Toggle the visibility of the EnvelopeHeader panel."""
header = self.query_one(EnvelopeHeader)
header.styles.height = "1" if self.header_expanded else "auto"
self.header_expanded = not self.header_expanded
async def action_toggle_sort_order(self) -> None:
"""Toggle the sort order of the envelope list."""
@@ -447,6 +445,11 @@ class EmailViewerApp(App):
else:
self.action_newest()
async def action_toggle_mode(self) -> None:
"""Toggle the content mode between plaintext and markdown."""
content_container = self.query_one(ContentContainer)
await content_container.toggle_mode()
def action_next(self) -> None:
if not self.current_message_index >= 0:
return

View File

@@ -146,3 +146,22 @@ Label.group_header {
width: 100%;
padding: 0 1;
}
#plaintext_content {
padding: 1 2;
height: auto;
width: 100%;
}
.hidden {
display: none;
}
#markdown_content {
padding: 1 2;
}
ContentContainer {
width: 100%;
height: 1fr;
}

View File

@@ -0,0 +1,127 @@
import re
import asyncio
import logging
from functools import lru_cache
from textual import work
from textual.app import ComposeResult
from textual.widgets import Label, Markdown
from textual.containers import ScrollableContainer
from widgets.EnvelopeHeader import EnvelopeHeader
class ContentContainer(ScrollableContainer):
"""A custom container that can switch between plaintext and markdown rendering."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.plaintext_mode = True
self.markup_worker = None
self.current_text = ""
self.current_id = None
# LRU cache with a max size of 100 messages
self.get_message_body = lru_cache(maxsize=100)(self._get_message_body)
def compose(self) -> ComposeResult:
"""Compose the container with a label for plaintext and markdown for rich content."""
yield EnvelopeHeader()
yield Label(id="plaintext_content")
yield Markdown(id="markdown_content", classes="hidden")
def update_header(self, subject: str = "", date: str = "", from_: str = "", to: str = "", cc: str = "", bcc: str = "") -> None:
"""Update the header with the given email details."""
header = self.query_one(EnvelopeHeader)
header.subject = subject
header.date = date
header.from_ = from_
header.to = to
header.cc = cc
header.bcc = bcc
def action_toggle_header(self) -> None:
"""Toggle the visibility of the EnvelopeHeader panel."""
header = self.query_one(EnvelopeHeader)
header.styles.height = "1" if self.header_expanded else "auto"
self.header_expanded = not self.header_expanded
async def display_content(self, message_id: int) -> None:
"""Display content for the given message ID."""
self.current_id = message_id
# Show loading state
self.loading = True
# Get message body (from cache or fetch new)
message_text = await self.get_message_body(message_id)
self.current_text = message_text
# Update the plaintext content
plaintext = self.query_one("#plaintext_content", Label)
await plaintext.update(message_text)
if not self.plaintext_mode:
# We're in markdown mode, so render the markdown
await self.render_markdown()
else:
# Hide markdown, show plaintext
plaintext.remove_class("hidden")
self.query_one("#markdown_content").add_class("hidden")
self.loading = False
@work(exclusive=True)
async def _get_message_body(self, message_id: int) -> str:
"""Fetch the message body from Himalaya CLI."""
try:
process = await asyncio.create_subprocess_shell(
f"himalaya message read {str(message_id)} -p",
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await process.communicate()
logging.info(f"stdout: {stdout.decode()[0:50]}...")
if process.returncode == 0:
# Process the email content
fixedText = stdout.decode().replace("https://urldefense.com/v3/", "")
fixedText = re.sub(r"atlOrigin.+?\w", "", fixedText)
logging.info(f"rendering fixedText: {fixedText[0:50]}")
return fixedText
else:
logging.error(f"Error fetching message: {stderr.decode()}")
return f"Error fetching message content: {stderr.decode()}"
except Exception as e:
logging.error(f"Error fetching message content: {e}")
return f"Error fetching message content: {e}"
async def render_markdown(self) -> None:
"""Render the markdown content asynchronously."""
if self.markup_worker:
self.markup_worker.cancel()
markdown = self.query_one("#markdown_content", Markdown)
plaintext = self.query_one("#plaintext_content", Label)
await markdown.update(self.current_text)
# Show markdown, hide plaintext
markdown.remove_class("hidden")
plaintext.add_class("hidden")
async def toggle_mode(self) -> None:
"""Toggle between plaintext and markdown mode."""
self.plaintext_mode = not self.plaintext_mode
if self.plaintext_mode:
# Switch to plaintext
self.query_one("#plaintext_content").remove_class("hidden")
self.query_one("#markdown_content").add_class("hidden")
else:
# Switch to markdown
await self.render_markdown()
return self.plaintext_mode
def clear_cache(self) -> None:
"""Clear the message body cache."""
self.get_message_body.cache_clear()

View File

@@ -17,4 +17,5 @@ dependencies = [
[dependency-groups]
dev = [
"ruff>=0.11.8",
"textual>=3.2.0",
]

19
tui.py
View File

@@ -1,19 +0,0 @@
from textual.app import App, ComposeResult
from textual.widgets import Header, Footer, Static, Label
class MSALApp(App):
"""A Textual app for MSAL authentication."""
CSS_PATH = "msal_app.tcss" # Optional: For styling
def compose(self) -> ComposeResult:
"""Create child widgets for the app."""
yield Header(show_clock=True)
yield Footer()
yield Static(Label("MSAL Authentication App"), id="main_content")
if __name__ == "__main__":
app = MSALApp()
app.run()

6
uv.lock generated
View File

@@ -213,6 +213,7 @@ dependencies = [
[package.dev-dependencies]
dev = [
{ name = "ruff" },
{ name = "textual" },
]
[package.metadata]
@@ -227,7 +228,10 @@ requires-dist = [
]
[package.metadata.requires-dev]
dev = [{ name = "ruff", specifier = ">=0.11.8" }]
dev = [
{ name = "ruff", specifier = ">=0.11.8" },
{ name = "textual", specifier = ">=3.2.0" },
]
[[package]]
name = "html2text"