gitlab feature start
This commit is contained in:
@@ -34,11 +34,11 @@ from textual import work
|
|||||||
from textual.widgets.option_list import Option
|
from textual.widgets.option_list import Option
|
||||||
|
|
||||||
# Import file icons utility - note the updated import
|
# Import file icons utility - note the updated import
|
||||||
from utils.file_icons import get_file_icon
|
from src.utils.file_icons import get_file_icon
|
||||||
|
|
||||||
# Import our DocumentViewerScreen
|
# Import our DocumentViewerScreen
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), "maildir_gtd"))
|
sys.path.append(os.path.join(os.path.dirname(__file__), "src", "maildir_gtd"))
|
||||||
from maildir_gtd.screens.DocumentViewer import DocumentViewerScreen
|
from screens.DocumentViewer import DocumentViewerScreen
|
||||||
|
|
||||||
|
|
||||||
class FolderHistoryEntry:
|
class FolderHistoryEntry:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
[tools]
|
[tools]
|
||||||
|
bun = "latest"
|
||||||
node = "22.17.1"
|
node = "22.17.1"
|
||||||
uv = 'latest'
|
uv = 'latest'
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from .email import email
|
|||||||
from .calendar import calendar
|
from .calendar import calendar
|
||||||
from .ticktick import ticktick
|
from .ticktick import ticktick
|
||||||
from .godspeed import godspeed
|
from .godspeed import godspeed
|
||||||
|
from .gitlab_monitor import gitlab_monitor
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
@@ -22,8 +23,11 @@ cli.add_command(email)
|
|||||||
cli.add_command(calendar)
|
cli.add_command(calendar)
|
||||||
cli.add_command(ticktick)
|
cli.add_command(ticktick)
|
||||||
cli.add_command(godspeed)
|
cli.add_command(godspeed)
|
||||||
|
cli.add_command(gitlab_monitor)
|
||||||
|
|
||||||
# Add 'tt' as a short alias for ticktick
|
# Add 'tt' as a short alias for ticktick
|
||||||
cli.add_command(ticktick, name="tt")
|
cli.add_command(ticktick, name="tt")
|
||||||
# Add 'gs' as a short alias for godspeed
|
# Add 'gs' as a short alias for godspeed
|
||||||
cli.add_command(godspeed, name="gs")
|
cli.add_command(godspeed, name="gs")
|
||||||
|
# Add 'glm' as a short alias for gitlab_monitor
|
||||||
|
cli.add_command(gitlab_monitor, name="glm")
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
import click
|
import click
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
def drive():
|
def drive():
|
||||||
"""View OneDrive files."""
|
"""View OneDrive files."""
|
||||||
click.echo("Launching OneDrive viewer...")
|
click.echo("Launching OneDrive viewer...")
|
||||||
subprocess.run(["python3", "src/drive_view_tui.py"])
|
# Get the directory containing this file, then go up to project root
|
||||||
|
current_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
subprocess.run(["python3", "drive_view_tui.py"], cwd=current_dir)
|
||||||
|
|||||||
152
src/cli/gitlab_monitor.py
Normal file
152
src/cli/gitlab_monitor.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
import click
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def gitlab_monitor():
|
||||||
|
"""GitLab pipeline monitoring daemon."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@gitlab_monitor.command()
|
||||||
|
@click.option("--config", help="Path to configuration file")
|
||||||
|
@click.option("--daemon", "-d", is_flag=True, help="Run in background as daemon")
|
||||||
|
def start(config, daemon):
|
||||||
|
"""Start the GitLab pipeline monitoring daemon."""
|
||||||
|
daemon_path = os.path.join(
|
||||||
|
os.path.dirname(__file__), "..", "services", "gitlab_monitor", "daemon.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
if daemon:
|
||||||
|
# Run as background daemon
|
||||||
|
click.echo("Starting GitLab pipeline monitor daemon in background...")
|
||||||
|
|
||||||
|
cmd = [sys.executable, daemon_path]
|
||||||
|
if config:
|
||||||
|
cmd.extend(["--config", config])
|
||||||
|
|
||||||
|
# Create pid file
|
||||||
|
pid_file = os.path.expanduser("~/.config/gtd-tools/gitlab_monitor.pid")
|
||||||
|
Path(pid_file).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Start daemon process
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
preexec_fn=os.setsid,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Save PID
|
||||||
|
with open(pid_file, "w") as f:
|
||||||
|
f.write(str(process.pid))
|
||||||
|
|
||||||
|
click.echo(f"Daemon started with PID {process.pid}")
|
||||||
|
click.echo(f"PID file: {pid_file}")
|
||||||
|
else:
|
||||||
|
# Run in foreground
|
||||||
|
click.echo("Starting GitLab pipeline monitor (press Ctrl+C to stop)...")
|
||||||
|
|
||||||
|
# Import and run the daemon
|
||||||
|
from src.services.gitlab_monitor.daemon import main
|
||||||
|
|
||||||
|
asyncio.run(main())
|
||||||
|
|
||||||
|
|
||||||
|
@gitlab_monitor.command()
|
||||||
|
def stop():
|
||||||
|
"""Stop the GitLab pipeline monitoring daemon."""
|
||||||
|
pid_file = os.path.expanduser("~/.config/gtd-tools/gitlab_monitor.pid")
|
||||||
|
|
||||||
|
if not os.path.exists(pid_file):
|
||||||
|
click.echo("Daemon is not running (no PID file found)")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(pid_file, "r") as f:
|
||||||
|
pid = int(f.read().strip())
|
||||||
|
|
||||||
|
# Send SIGTERM to process group
|
||||||
|
os.killpg(os.getpgid(pid), signal.SIGTERM)
|
||||||
|
|
||||||
|
# Remove PID file
|
||||||
|
os.unlink(pid_file)
|
||||||
|
|
||||||
|
click.echo(f"Daemon stopped (PID {pid})")
|
||||||
|
except (ValueError, ProcessLookupError, OSError) as e:
|
||||||
|
click.echo(f"Error stopping daemon: {e}")
|
||||||
|
# Clean up stale PID file
|
||||||
|
if os.path.exists(pid_file):
|
||||||
|
os.unlink(pid_file)
|
||||||
|
|
||||||
|
|
||||||
|
@gitlab_monitor.command()
|
||||||
|
def status():
|
||||||
|
"""Check the status of the GitLab pipeline monitoring daemon."""
|
||||||
|
pid_file = os.path.expanduser("~/.config/gtd-tools/gitlab_monitor.pid")
|
||||||
|
|
||||||
|
if not os.path.exists(pid_file):
|
||||||
|
click.echo("Daemon is not running")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(pid_file, "r") as f:
|
||||||
|
pid = int(f.read().strip())
|
||||||
|
|
||||||
|
# Check if process exists
|
||||||
|
os.kill(pid, 0) # Send signal 0 to check if process exists
|
||||||
|
click.echo(f"Daemon is running (PID {pid})")
|
||||||
|
except (ValueError, ProcessLookupError, OSError):
|
||||||
|
click.echo("Daemon is not running (stale PID file)")
|
||||||
|
# Clean up stale PID file
|
||||||
|
os.unlink(pid_file)
|
||||||
|
|
||||||
|
|
||||||
|
@gitlab_monitor.command()
|
||||||
|
@click.option("--config", help="Path to configuration file")
|
||||||
|
def test(config):
|
||||||
|
"""Test the configuration and dependencies."""
|
||||||
|
from src.services.gitlab_monitor.daemon import GitLabPipelineMonitor
|
||||||
|
|
||||||
|
monitor = GitLabPipelineMonitor(config)
|
||||||
|
|
||||||
|
click.echo("Configuration test:")
|
||||||
|
click.echo(
|
||||||
|
f"GitLab token configured: {'✓' if monitor.config.get_gitlab_token() else '✗'}"
|
||||||
|
)
|
||||||
|
click.echo(
|
||||||
|
f"OpenAI key configured: {'✓' if monitor.config.get_openai_key() else '✗'}"
|
||||||
|
)
|
||||||
|
click.echo(f"Subject patterns: {monitor.config.get_subject_patterns()}")
|
||||||
|
click.echo(f"Sender patterns: {monitor.config.get_sender_patterns()}")
|
||||||
|
click.echo(f"Check interval: {monitor.config.get_check_interval()}s")
|
||||||
|
click.echo(f"Config file: {monitor.config.config_path}")
|
||||||
|
|
||||||
|
|
||||||
|
@gitlab_monitor.command()
|
||||||
|
def config():
|
||||||
|
"""Show the current configuration file path and create default if needed."""
|
||||||
|
from src.services.gitlab_monitor.config import GitLabMonitorConfig
|
||||||
|
|
||||||
|
config = GitLabMonitorConfig()
|
||||||
|
click.echo(f"Configuration file: {config.config_path}")
|
||||||
|
|
||||||
|
if os.path.exists(config.config_path):
|
||||||
|
click.echo("Configuration file exists")
|
||||||
|
else:
|
||||||
|
click.echo("Default configuration file created")
|
||||||
|
|
||||||
|
click.echo("\nTo configure the daemon:")
|
||||||
|
click.echo("1. Set environment variables:")
|
||||||
|
click.echo(" export GITLAB_API_TOKEN='your_gitlab_token'")
|
||||||
|
click.echo(" export OPENAI_API_KEY='your_openai_key'")
|
||||||
|
click.echo("2. Or edit the configuration file directly")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
gitlab_monitor()
|
||||||
0
src/services/gitlab_monitor/__init__.py
Normal file
0
src/services/gitlab_monitor/__init__.py
Normal file
111
src/services/gitlab_monitor/config.py
Normal file
111
src/services/gitlab_monitor/config.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class GitLabMonitorConfig:
|
||||||
|
"""Configuration management for GitLab pipeline monitoring daemon."""
|
||||||
|
|
||||||
|
def __init__(self, config_path: Optional[str] = None):
|
||||||
|
self.config_path = config_path or os.path.expanduser(
|
||||||
|
"~/.config/gtd-tools/gitlab_monitor.yaml"
|
||||||
|
)
|
||||||
|
self.config = self._load_config()
|
||||||
|
|
||||||
|
def _load_config(self) -> Dict[str, Any]:
|
||||||
|
"""Load configuration from file or create default."""
|
||||||
|
config_file = Path(self.config_path)
|
||||||
|
|
||||||
|
if config_file.exists():
|
||||||
|
try:
|
||||||
|
with open(config_file, "r") as f:
|
||||||
|
return yaml.safe_load(f) or {}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error loading config: {e}")
|
||||||
|
return self._default_config()
|
||||||
|
else:
|
||||||
|
# Create default config file
|
||||||
|
config = self._default_config()
|
||||||
|
self._save_config(config)
|
||||||
|
return config
|
||||||
|
|
||||||
|
def _default_config(self) -> Dict[str, Any]:
|
||||||
|
"""Return default configuration."""
|
||||||
|
return {
|
||||||
|
"email_monitoring": {
|
||||||
|
"subject_patterns": ["Failed pipeline", "Pipeline failed"],
|
||||||
|
"sender_patterns": ["*@gitlab.com", "*gitlab*"],
|
||||||
|
"check_interval": 30, # seconds
|
||||||
|
},
|
||||||
|
"gitlab": {
|
||||||
|
"api_token": os.getenv("GITLAB_API_TOKEN", ""),
|
||||||
|
"base_url": "https://gitlab.com",
|
||||||
|
"default_project_id": None,
|
||||||
|
},
|
||||||
|
"openai": {
|
||||||
|
"api_key": os.getenv("OPENAI_API_KEY", ""),
|
||||||
|
"model": "gpt-4", # GPT-5 not available yet, using GPT-4
|
||||||
|
"max_tokens": 1000,
|
||||||
|
"temperature": 0.1,
|
||||||
|
},
|
||||||
|
"notifications": {
|
||||||
|
"enabled": True,
|
||||||
|
"sound": True,
|
||||||
|
"show_summary_window": True,
|
||||||
|
},
|
||||||
|
"logging": {
|
||||||
|
"level": "INFO",
|
||||||
|
"log_file": os.path.expanduser(
|
||||||
|
"~/.config/gtd-tools/gitlab_monitor.log"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def _save_config(self, config: Dict[str, Any]):
|
||||||
|
"""Save configuration to file."""
|
||||||
|
config_file = Path(self.config_path)
|
||||||
|
config_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
with open(config_file, "w") as f:
|
||||||
|
yaml.dump(config, f, default_flow_style=False)
|
||||||
|
|
||||||
|
def get_gitlab_token(self) -> Optional[str]:
|
||||||
|
"""Get GitLab API token."""
|
||||||
|
return self.config.get("gitlab", {}).get("api_token") or os.getenv(
|
||||||
|
"GITLAB_API_TOKEN"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_openai_key(self) -> Optional[str]:
|
||||||
|
"""Get OpenAI API key."""
|
||||||
|
return self.config.get("openai", {}).get("api_key") or os.getenv(
|
||||||
|
"OPENAI_API_KEY"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_subject_patterns(self) -> list:
|
||||||
|
"""Get email subject patterns to monitor."""
|
||||||
|
return self.config.get("email_monitoring", {}).get("subject_patterns", [])
|
||||||
|
|
||||||
|
def get_sender_patterns(self) -> list:
|
||||||
|
"""Get sender patterns to monitor."""
|
||||||
|
return self.config.get("email_monitoring", {}).get("sender_patterns", [])
|
||||||
|
|
||||||
|
def get_check_interval(self) -> int:
|
||||||
|
"""Get email check interval in seconds."""
|
||||||
|
return self.config.get("email_monitoring", {}).get("check_interval", 30)
|
||||||
|
|
||||||
|
def get_gitlab_base_url(self) -> str:
|
||||||
|
"""Get GitLab base URL."""
|
||||||
|
return self.config.get("gitlab", {}).get("base_url", "https://gitlab.com")
|
||||||
|
|
||||||
|
def get_openai_model(self) -> str:
|
||||||
|
"""Get OpenAI model to use."""
|
||||||
|
return self.config.get("openai", {}).get("model", "gpt-4")
|
||||||
|
|
||||||
|
def is_notifications_enabled(self) -> bool:
|
||||||
|
"""Check if notifications are enabled."""
|
||||||
|
return self.config.get("notifications", {}).get("enabled", True)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Save current configuration to file."""
|
||||||
|
self._save_config(self.config)
|
||||||
250
src/services/gitlab_monitor/daemon.py
Normal file
250
src/services/gitlab_monitor/daemon.py
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import fnmatch
|
||||||
|
import re
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Add src to path for imports
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||||
|
|
||||||
|
from src.services.himalaya import client as himalaya_client
|
||||||
|
from .config import GitLabMonitorConfig
|
||||||
|
from .gitlab_client import GitLabClient
|
||||||
|
from .openai_analyzer import OpenAIAnalyzer
|
||||||
|
from .notifications import MacOSNotificationManager
|
||||||
|
|
||||||
|
|
||||||
|
class GitLabPipelineMonitor:
|
||||||
|
"""Daemon that monitors emails for GitLab pipeline failures and provides AI analysis."""
|
||||||
|
|
||||||
|
def __init__(self, config_path: Optional[str] = None):
|
||||||
|
self.config = GitLabMonitorConfig(config_path)
|
||||||
|
self.gitlab_client = None
|
||||||
|
self.openai_analyzer = None
|
||||||
|
self.notifications = MacOSNotificationManager()
|
||||||
|
self.last_check_time = datetime.now()
|
||||||
|
self.processed_emails = set() # Track processed email IDs
|
||||||
|
|
||||||
|
self._setup_logging()
|
||||||
|
self._initialize_clients()
|
||||||
|
|
||||||
|
def _setup_logging(self):
|
||||||
|
"""Configure logging."""
|
||||||
|
log_level = getattr(
|
||||||
|
logging, self.config.config.get("logging", {}).get("level", "INFO")
|
||||||
|
)
|
||||||
|
log_file = self.config.config.get("logging", {}).get("log_file")
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=log_level,
|
||||||
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
handlers=[
|
||||||
|
logging.StreamHandler(),
|
||||||
|
logging.FileHandler(log_file) if log_file else logging.NullHandler(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def _initialize_clients(self):
|
||||||
|
"""Initialize GitLab and OpenAI clients."""
|
||||||
|
gitlab_token = self.config.get_gitlab_token()
|
||||||
|
openai_key = self.config.get_openai_key()
|
||||||
|
|
||||||
|
if not gitlab_token:
|
||||||
|
self.logger.warning(
|
||||||
|
"GitLab API token not configured. Set GITLAB_API_TOKEN environment variable."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not openai_key:
|
||||||
|
self.logger.warning(
|
||||||
|
"OpenAI API key not configured. Set OPENAI_API_KEY environment variable."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.gitlab_client = GitLabClient(
|
||||||
|
self.config.get_gitlab_base_url(), gitlab_token
|
||||||
|
)
|
||||||
|
|
||||||
|
self.openai_analyzer = OpenAIAnalyzer(
|
||||||
|
openai_key, self.config.get_openai_model()
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info("GitLab Pipeline Monitor initialized successfully")
|
||||||
|
|
||||||
|
async def start_monitoring(self):
|
||||||
|
"""Start the email monitoring daemon."""
|
||||||
|
if not self.gitlab_client or not self.openai_analyzer:
|
||||||
|
self.logger.error("Cannot start monitoring: missing API tokens")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.logger.info("Starting GitLab pipeline monitoring daemon...")
|
||||||
|
|
||||||
|
check_interval = self.config.get_check_interval()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
await self._check_for_pipeline_emails()
|
||||||
|
await asyncio.sleep(check_interval)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.logger.info("Monitoring stopped by user")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error in monitoring loop: {e}")
|
||||||
|
await asyncio.sleep(check_interval)
|
||||||
|
|
||||||
|
async def _check_for_pipeline_emails(self):
|
||||||
|
"""Check for new GitLab pipeline failure emails."""
|
||||||
|
try:
|
||||||
|
# Get recent emails using the existing Himalaya client
|
||||||
|
envelopes, success = await himalaya_client.list_envelopes(limit=50)
|
||||||
|
|
||||||
|
if not success or not envelopes:
|
||||||
|
return
|
||||||
|
|
||||||
|
for envelope in envelopes:
|
||||||
|
# Skip if we've already processed this email
|
||||||
|
email_id = envelope.get("id")
|
||||||
|
if email_id in self.processed_emails:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if email matches our patterns
|
||||||
|
if self._is_pipeline_failure_email(envelope):
|
||||||
|
self.logger.info(f"Found pipeline failure email: {email_id}")
|
||||||
|
await self._process_pipeline_failure_email(envelope)
|
||||||
|
self.processed_emails.add(email_id)
|
||||||
|
|
||||||
|
# Limit the size of processed emails set
|
||||||
|
if len(self.processed_emails) > 1000:
|
||||||
|
# Keep only the most recent 500
|
||||||
|
recent_emails = list(self.processed_emails)[-500:]
|
||||||
|
self.processed_emails = set(recent_emails)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error checking emails: {e}")
|
||||||
|
|
||||||
|
def _is_pipeline_failure_email(self, envelope: Dict[str, Any]) -> bool:
|
||||||
|
"""Check if email matches pipeline failure patterns."""
|
||||||
|
subject = envelope.get("subject", "").lower()
|
||||||
|
sender_addr = envelope.get("from", {}).get("addr", "").lower()
|
||||||
|
|
||||||
|
# Check subject patterns
|
||||||
|
subject_patterns = self.config.get_subject_patterns()
|
||||||
|
subject_match = any(pattern.lower() in subject for pattern in subject_patterns)
|
||||||
|
|
||||||
|
# Check sender patterns
|
||||||
|
sender_patterns = self.config.get_sender_patterns()
|
||||||
|
sender_match = any(
|
||||||
|
fnmatch.fnmatch(sender_addr, pattern.lower()) for pattern in sender_patterns
|
||||||
|
)
|
||||||
|
|
||||||
|
return subject_match and sender_match
|
||||||
|
|
||||||
|
async def _process_pipeline_failure_email(self, envelope: Dict[str, Any]):
|
||||||
|
"""Process a pipeline failure email."""
|
||||||
|
try:
|
||||||
|
# Get email content
|
||||||
|
email_id = envelope.get("id")
|
||||||
|
content, success = await himalaya_client.get_message_content(email_id)
|
||||||
|
|
||||||
|
if not success or not content:
|
||||||
|
self.logger.error(f"Failed to get content for email {email_id}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Extract GitLab project and pipeline information
|
||||||
|
project_info = self.gitlab_client.extract_project_info_from_email(content)
|
||||||
|
|
||||||
|
if not project_info:
|
||||||
|
self.logger.warning(
|
||||||
|
f"Could not extract GitLab info from email {email_id}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
project_path = project_info.get("project_path")
|
||||||
|
pipeline_id = project_info.get("pipeline_id")
|
||||||
|
|
||||||
|
if not project_path or not pipeline_id:
|
||||||
|
self.logger.warning(
|
||||||
|
f"Missing project path or pipeline ID in email {email_id}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get GitLab project
|
||||||
|
project = self.gitlab_client.get_project_by_path(project_path)
|
||||||
|
if not project:
|
||||||
|
self.logger.error(f"Could not find GitLab project: {project_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
project_id = project["id"]
|
||||||
|
project_name = project["name"]
|
||||||
|
|
||||||
|
# Get failed jobs with traces
|
||||||
|
failed_jobs = self.gitlab_client.get_failed_jobs_with_traces(
|
||||||
|
project_id, pipeline_id
|
||||||
|
)
|
||||||
|
|
||||||
|
if not failed_jobs:
|
||||||
|
self.logger.info(f"No failed jobs found for pipeline {pipeline_id}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Send initial notification
|
||||||
|
if self.config.is_notifications_enabled():
|
||||||
|
self.notifications.send_pipeline_failure_notification(
|
||||||
|
project_name, pipeline_id, len(failed_jobs)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Analyze failures with OpenAI
|
||||||
|
analysis = self.openai_analyzer.analyze_pipeline_failures(failed_jobs)
|
||||||
|
|
||||||
|
if analysis:
|
||||||
|
self.logger.info(f"Analysis completed for pipeline {pipeline_id}")
|
||||||
|
|
||||||
|
# Show analysis window
|
||||||
|
if self.config.is_notifications_enabled():
|
||||||
|
self.notifications.show_failure_analysis(
|
||||||
|
project_name, pipeline_id, analysis
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.logger.error(f"Failed to analyze pipeline {pipeline_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error processing pipeline failure email: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""Main entry point for the daemon."""
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="GitLab Pipeline Monitoring Daemon")
|
||||||
|
parser.add_argument("--config", help="Path to configuration file")
|
||||||
|
parser.add_argument(
|
||||||
|
"--test", action="store_true", help="Test configuration and exit"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
monitor = GitLabPipelineMonitor(args.config)
|
||||||
|
|
||||||
|
if args.test:
|
||||||
|
print("Configuration test:")
|
||||||
|
print(
|
||||||
|
f"GitLab token configured: {'Yes' if monitor.config.get_gitlab_token() else 'No'}"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"OpenAI key configured: {'Yes' if monitor.config.get_openai_key() else 'No'}"
|
||||||
|
)
|
||||||
|
print(f"Subject patterns: {monitor.config.get_subject_patterns()}")
|
||||||
|
print(f"Sender patterns: {monitor.config.get_sender_patterns()}")
|
||||||
|
print(f"Check interval: {monitor.config.get_check_interval()}s")
|
||||||
|
return
|
||||||
|
|
||||||
|
await monitor.start_monitoring()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
106
src/services/gitlab_monitor/gitlab_client.py
Normal file
106
src/services/gitlab_monitor/gitlab_client.py
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
|
||||||
|
class GitLabClient:
|
||||||
|
"""Client for interacting with GitLab CI API."""
|
||||||
|
|
||||||
|
def __init__(self, base_url: str, api_token: str):
|
||||||
|
self.base_url = base_url.rstrip("/")
|
||||||
|
self.api_token = api_token
|
||||||
|
self.session = requests.Session()
|
||||||
|
self.session.headers.update(
|
||||||
|
{"Authorization": f"Bearer {api_token}", "Content-Type": "application/json"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def _make_request(
|
||||||
|
self, endpoint: str, method: str = "GET", **kwargs
|
||||||
|
) -> Optional[Dict]:
|
||||||
|
"""Make API request to GitLab."""
|
||||||
|
url = urljoin(f"{self.base_url}/api/v4/", endpoint)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.session.request(method, url, **kwargs)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logging.error(f"GitLab API request failed: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def extract_project_info_from_email(
|
||||||
|
self, email_content: str
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Extract project ID and pipeline ID from GitLab notification email."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Common patterns in GitLab emails
|
||||||
|
patterns = {
|
||||||
|
"project_url": r"https?://[^/]+/([^/]+/[^/]+)/-/pipelines/(\d+)",
|
||||||
|
"pipeline_id": r"Pipeline #(\d+)",
|
||||||
|
"project_name": r"Project:\s*([^\n]+)",
|
||||||
|
"pipeline_url": r"(https?://[^/]+/[^/]+/[^/]+/-/pipelines/\d+)",
|
||||||
|
}
|
||||||
|
|
||||||
|
extracted = {}
|
||||||
|
|
||||||
|
for key, pattern in patterns.items():
|
||||||
|
match = re.search(pattern, email_content)
|
||||||
|
if match:
|
||||||
|
if key == "project_url":
|
||||||
|
extracted["project_path"] = match.group(1)
|
||||||
|
extracted["pipeline_id"] = int(match.group(2))
|
||||||
|
elif key == "pipeline_id":
|
||||||
|
extracted["pipeline_id"] = int(match.group(1))
|
||||||
|
elif key == "project_name":
|
||||||
|
extracted["project_name"] = match.group(1).strip()
|
||||||
|
elif key == "pipeline_url":
|
||||||
|
extracted["pipeline_url"] = match.group(1)
|
||||||
|
|
||||||
|
return extracted if extracted else None
|
||||||
|
|
||||||
|
def get_project_by_path(self, project_path: str) -> Optional[Dict]:
|
||||||
|
"""Get project information by path (namespace/project)."""
|
||||||
|
encoded_path = project_path.replace("/", "%2F")
|
||||||
|
return self._make_request(f"projects/{encoded_path}")
|
||||||
|
|
||||||
|
def get_pipeline(self, project_id: int, pipeline_id: int) -> Optional[Dict]:
|
||||||
|
"""Get pipeline information."""
|
||||||
|
return self._make_request(f"projects/{project_id}/pipelines/{pipeline_id}")
|
||||||
|
|
||||||
|
def get_pipeline_jobs(
|
||||||
|
self, project_id: int, pipeline_id: int
|
||||||
|
) -> Optional[List[Dict]]:
|
||||||
|
"""Get jobs for a pipeline."""
|
||||||
|
return self._make_request(f"projects/{project_id}/pipelines/{pipeline_id}/jobs")
|
||||||
|
|
||||||
|
def get_job_trace(self, project_id: int, job_id: int) -> Optional[str]:
|
||||||
|
"""Get trace log for a specific job."""
|
||||||
|
url = urljoin(
|
||||||
|
f"{self.base_url}/api/v4/", f"projects/{project_id}/jobs/{job_id}/trace"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.session.get(url)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.text
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logging.error(f"Failed to get job trace: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_failed_jobs_with_traces(
|
||||||
|
self, project_id: int, pipeline_id: int
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""Get all failed jobs with their trace logs."""
|
||||||
|
jobs = self.get_pipeline_jobs(project_id, pipeline_id)
|
||||||
|
if not jobs:
|
||||||
|
return []
|
||||||
|
|
||||||
|
failed_jobs = [job for job in jobs if job.get("status") == "failed"]
|
||||||
|
|
||||||
|
for job in failed_jobs:
|
||||||
|
trace = self.get_job_trace(project_id, job["id"])
|
||||||
|
job["trace"] = trace
|
||||||
|
|
||||||
|
return failed_jobs
|
||||||
69
src/services/gitlab_monitor/notifications.py
Normal file
69
src/services/gitlab_monitor/notifications.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import subprocess
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class MacOSNotificationManager:
|
||||||
|
"""Manager for macOS notifications and display windows."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.app_name = "GitLab Pipeline Monitor"
|
||||||
|
|
||||||
|
def send_notification(
|
||||||
|
self, title: str, message: str, sound: bool = True, action_button: bool = True
|
||||||
|
) -> bool:
|
||||||
|
"""Send macOS notification with optional action button."""
|
||||||
|
try:
|
||||||
|
cmd = [
|
||||||
|
"osascript",
|
||||||
|
"-e",
|
||||||
|
f'''display notification "{message}" with title "{title}" subtitle "{self.app_name}"''',
|
||||||
|
]
|
||||||
|
|
||||||
|
if sound:
|
||||||
|
cmd[-1] += ' sound name "Glass"'
|
||||||
|
|
||||||
|
subprocess.run(cmd, check=True, capture_output=True)
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error(f"Failed to send notification: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def show_summary_window(self, title: str, summary: str) -> bool:
|
||||||
|
"""Display a summary window using AppleScript."""
|
||||||
|
try:
|
||||||
|
# Escape quotes in the summary
|
||||||
|
escaped_summary = summary.replace('"', '\\"').replace("\n", "\\n")
|
||||||
|
|
||||||
|
applescript = f'''
|
||||||
|
tell application "System Events"
|
||||||
|
display dialog "{escaped_summary}" with title "{title}" buttons {{"Copy", "Close"}} default button "Close" with icon note giving up after 300
|
||||||
|
set buttonPressed to button returned of result
|
||||||
|
if buttonPressed is "Copy" then
|
||||||
|
set the clipboard to "{escaped_summary}"
|
||||||
|
end if
|
||||||
|
end tell
|
||||||
|
'''
|
||||||
|
|
||||||
|
subprocess.run(
|
||||||
|
["osascript", "-e", applescript], check=True, capture_output=True
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logging.error(f"Failed to show summary window: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_pipeline_failure_notification(
|
||||||
|
self, project_name: str, pipeline_id: int, job_count: int
|
||||||
|
) -> bool:
|
||||||
|
"""Send notification specifically for pipeline failures."""
|
||||||
|
title = "GitLab Pipeline Failed"
|
||||||
|
message = f"{project_name} Pipeline #{pipeline_id} - {job_count} failed job(s)"
|
||||||
|
return self.send_notification(title, message, sound=True)
|
||||||
|
|
||||||
|
def show_failure_analysis(
|
||||||
|
self, project_name: str, pipeline_id: int, analysis: str
|
||||||
|
) -> bool:
|
||||||
|
"""Show the AI analysis of pipeline failures."""
|
||||||
|
title = f"Pipeline Analysis - {project_name} #{pipeline_id}"
|
||||||
|
return self.show_summary_window(title, analysis)
|
||||||
79
src/services/gitlab_monitor/openai_analyzer.py
Normal file
79
src/services/gitlab_monitor/openai_analyzer.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import openai
|
||||||
|
import logging
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class OpenAIAnalyzer:
|
||||||
|
"""OpenAI client for analyzing pipeline failure logs."""
|
||||||
|
|
||||||
|
def __init__(self, api_key: str, model: str = "gpt-4", max_tokens: int = 1000):
|
||||||
|
self.client = openai.OpenAI(api_key=api_key)
|
||||||
|
self.model = model
|
||||||
|
self.max_tokens = max_tokens
|
||||||
|
|
||||||
|
def analyze_pipeline_failures(self, failed_jobs: List[Dict]) -> Optional[str]:
|
||||||
|
"""Analyze pipeline failures and provide summary and fix suggestions."""
|
||||||
|
if not failed_jobs:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Prepare the analysis prompt
|
||||||
|
analysis_prompt = self._build_analysis_prompt(failed_jobs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.client.chat.completions.create(
|
||||||
|
model=self.model,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": "You are a senior DevOps engineer helping to diagnose CI/CD pipeline failures. Provide concise, actionable summaries and solutions.",
|
||||||
|
},
|
||||||
|
{"role": "user", "content": analysis_prompt},
|
||||||
|
],
|
||||||
|
max_tokens=self.max_tokens,
|
||||||
|
temperature=0.1,
|
||||||
|
)
|
||||||
|
|
||||||
|
return response.choices[0].message.content
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"OpenAI analysis failed: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _build_analysis_prompt(self, failed_jobs: List[Dict]) -> str:
|
||||||
|
"""Build the analysis prompt for OpenAI."""
|
||||||
|
prompt = """Analyze the following GitLab CI pipeline failures and provide:
|
||||||
|
|
||||||
|
1. A brief summary of what went wrong (2-3 sentences max)
|
||||||
|
2. Specific fix recommendations for each job type
|
||||||
|
3. Organize by job name/type for easy scanning
|
||||||
|
|
||||||
|
Failed Jobs:
|
||||||
|
"""
|
||||||
|
|
||||||
|
for job in failed_jobs:
|
||||||
|
job_name = job.get("name", "Unknown Job")
|
||||||
|
job_stage = job.get("stage", "Unknown Stage")
|
||||||
|
trace = job.get("trace", "No trace available")
|
||||||
|
|
||||||
|
# Truncate trace if too long (keep last 2000 chars for most relevant errors)
|
||||||
|
if len(trace) > 2000:
|
||||||
|
trace = "..." + trace[-2000:]
|
||||||
|
|
||||||
|
prompt += f"""
|
||||||
|
## {job_name} (Stage: {job_stage})
|
||||||
|
|
||||||
|
```
|
||||||
|
{trace}
|
||||||
|
```
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
prompt += """
|
||||||
|
Please categorize fixes by job type:
|
||||||
|
- **Linting/Formatting** (eslint, prettier, black, etc.): Quick syntax fixes
|
||||||
|
- **Type Checking** (typescript, mypy, etc.): Type annotation issues
|
||||||
|
- **Tests** (jest, pytest, etc.): Test failures requiring code analysis
|
||||||
|
- **Build/Deploy**: Configuration or dependency issues
|
||||||
|
|
||||||
|
Format your response with clear headings and bullet points for quick scanning."""
|
||||||
|
|
||||||
|
return prompt
|
||||||
Reference in New Issue
Block a user