dashboard sync app

This commit is contained in:
Tim Bendt
2025-12-16 17:13:26 -05:00
parent 73079f743a
commit d7c82a0da0
25 changed files with 4181 additions and 69 deletions

View File

@@ -9,7 +9,7 @@ class GitLabMonitorConfig:
def __init__(self, config_path: Optional[str] = None):
self.config_path = config_path or os.path.expanduser(
"~/.config/gtd-tools/gitlab_monitor.yaml"
"~/.config/luk/gitlab_monitor.yaml"
)
self.config = self._load_config()
@@ -56,9 +56,7 @@ class GitLabMonitorConfig:
},
"logging": {
"level": "INFO",
"log_file": os.path.expanduser(
"~/.config/gtd-tools/gitlab_monitor.log"
),
"log_file": os.path.expanduser("~/.local/share/luk/gitlab_monitor.log"),
},
}

View File

@@ -15,7 +15,12 @@ class GodspeedClient:
BASE_URL = "https://api.godspeedapp.com"
def __init__(self, email: str = None, password: str = None, token: str = None):
def __init__(
self,
email: Optional[str] = None,
password: Optional[str] = None,
token: Optional[str] = None,
):
self.email = email
self.password = password
self.token = token
@@ -60,7 +65,9 @@ class GodspeedClient:
response.raise_for_status()
return response.json()
def get_tasks(self, list_id: str = None, status: str = None) -> Dict[str, Any]:
def get_tasks(
self, list_id: Optional[str] = None, status: Optional[str] = None
) -> Dict[str, Any]:
"""Get tasks with optional filtering."""
params = {}
if list_id:
@@ -81,8 +88,8 @@ class GodspeedClient:
def create_task(
self,
title: str,
list_id: str = None,
notes: str = None,
list_id: Optional[str] = None,
notes: Optional[str] = None,
location: str = "end",
**kwargs,
) -> Dict[str, Any]:

View File

@@ -63,9 +63,22 @@ def get_access_token(scopes):
)
accounts = app.get_accounts()
token_response = None
# Try silent authentication first
if accounts:
token_response = app.acquire_token_silent(scopes, account=accounts[0])
else:
# If silent auth failed or no accounts, clear cache and do device flow
if not token_response or "access_token" not in token_response:
# Clear the cache to force fresh authentication
if os.path.exists(cache_file):
os.remove(cache_file)
cache = msal.SerializableTokenCache() # Create new empty cache
app = msal.PublicClientApplication(
client_id, authority=authority, token_cache=cache
)
flow = app.initiate_device_flow(scopes=scopes)
if "user_code" not in flow:
raise Exception("Failed to create device flow")

View File

@@ -18,16 +18,50 @@ semaphore = asyncio.Semaphore(2)
async def _handle_throttling_retry(func, *args, max_retries=3):
"""Handle 429 throttling with exponential backoff retry."""
"""Handle 429 throttling and 401 authentication errors with exponential backoff retry."""
for attempt in range(max_retries):
try:
return await func(*args)
except Exception as e:
if "429" in str(e) and attempt < max_retries - 1:
error_str = str(e)
if (
"429" in error_str
or "InvalidAuthenticationToken" in error_str
or "401" in error_str
) and attempt < max_retries - 1:
wait_time = (2**attempt) + 1 # Exponential backoff: 2, 5, 9 seconds
print(
f"Rate limited, waiting {wait_time}s before retry {attempt + 1}/{max_retries}"
)
if "429" in error_str:
print(
f"Rate limited, waiting {wait_time}s before retry {attempt + 1}/{max_retries}"
)
elif "InvalidAuthenticationToken" in error_str or "401" in error_str:
print(
f"Authentication failed (token expired), refreshing token and retrying in {wait_time}s (attempt {attempt + 1}/{max_retries})"
)
# Force re-authentication by clearing cache and getting new token
import os
cache_file = "token_cache.bin"
if os.path.exists(cache_file):
os.remove(cache_file)
# Re-import and call get_access_token to refresh
from src.services.microsoft_graph.auth import get_access_token
# We need to get the scopes from somewhere - for now assume standard scopes
scopes = [
"https://graph.microsoft.com/Calendars.Read",
"https://graph.microsoft.com/Mail.ReadWrite",
]
try:
new_token, new_headers = get_access_token(scopes)
# Update the headers in args - this is a bit hacky but should work
if len(args) > 1 and isinstance(args[1], dict):
args = list(args)
args[1] = new_headers
args = tuple(args)
except Exception as auth_error:
print(f"Failed to refresh token: {auth_error}")
raise e # Re-raise original error
await asyncio.sleep(wait_time)
continue
raise e
@@ -55,10 +89,11 @@ async def _fetch_impl(url, headers):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
if response.status == 429:
# Let the retry handler deal with throttling
if response.status in [401, 429]:
# Let the retry handler deal with authentication and throttling
response_text = await response.text()
raise Exception(
f"Failed to fetch {url}: {response.status} {await response.text()}"
f"Failed to fetch {url}: {response.status} {response_text}"
)
elif response.status != 200:
raise Exception(
@@ -92,9 +127,10 @@ async def _post_impl(url, headers, json_data):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.post(url, headers=headers, json=json_data) as response:
if response.status == 429:
if response.status in [401, 429]:
response_text = await response.text()
raise Exception(
f"Failed to post {url}: {response.status} {await response.text()}"
f"Failed to post {url}: {response.status} {response_text}"
)
return response.status
@@ -119,9 +155,10 @@ async def _patch_impl(url, headers, json_data):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.patch(url, headers=headers, json=json_data) as response:
if response.status == 429:
if response.status in [401, 429]:
response_text = await response.text()
raise Exception(
f"Failed to patch {url}: {response.status} {await response.text()}"
f"Failed to patch {url}: {response.status} {response_text}"
)
return response.status
@@ -145,9 +182,10 @@ async def _delete_impl(url, headers):
async with semaphore:
async with aiohttp.ClientSession() as session:
async with session.delete(url, headers=headers) as response:
if response.status == 429:
if response.status in [401, 429]:
response_text = await response.text()
raise Exception(
f"Failed to delete {url}: {response.status} {await response.text()}"
f"Failed to delete {url}: {response.status} {response_text}"
)
return response.status
@@ -176,9 +214,10 @@ async def _batch_impl(requests, headers):
async with session.post(
batch_url, headers=headers, json=batch_data
) as response:
if response.status == 429:
if response.status in [401, 429]:
response_text = await response.text()
raise Exception(
f"Batch request failed: {response.status} {await response.text()}"
f"Batch request failed: {response.status} {response_text}"
)
elif response.status != 200:
raise Exception(