Replace emoji and > separator with nerdfont icons in URL shortener

This commit is contained in:
Bendt
2025-12-19 16:22:32 -05:00
parent 994e545bd0
commit 98c318af04
2 changed files with 17 additions and 14 deletions

View File

@@ -86,6 +86,9 @@ class LinkItem:
- Keeping first and last path segments, eliding middle only if needed - Keeping first and last path segments, eliding middle only if needed
- Adapting to available width - Adapting to available width
""" """
# Nerdfont chevron separator (nf-cod-chevron_right)
sep = " \ueab6 "
# Special handling for common sites # Special handling for common sites
path = path.strip("/") path = path.strip("/")
@@ -95,26 +98,26 @@ class LinkItem:
if match: if match:
repo, type_, num = match.groups() repo, type_, num = match.groups()
icon = "#" if type_ == "issues" else "PR#" icon = "#" if type_ == "issues" else "PR#"
return f"{domain} > {repo} {icon}{num}" return f"{domain}{sep}{repo} {icon}{num}"
match = re.match(r"([^/]+/[^/]+)", path) match = re.match(r"([^/]+/[^/]+)", path)
if match: if match:
return f"{domain} > {match.group(1)}" return f"{domain}{sep}{match.group(1)}"
# Google Docs # Google Docs
if "docs.google.com" in domain: if "docs.google.com" in domain:
if "/document/" in path: if "/document/" in path:
return f"{domain} > Document" return f"{domain}{sep}Document"
if "/spreadsheets/" in path: if "/spreadsheets/" in path:
return f"{domain} > Spreadsheet" return f"{domain}{sep}Spreadsheet"
if "/presentation/" in path: if "/presentation/" in path:
return f"{domain} > Slides" return f"{domain}{sep}Slides"
# Jira/Atlassian # Jira/Atlassian
if "atlassian.net" in domain or "jira" in domain.lower(): if "atlassian.net" in domain or "jira" in domain.lower():
match = re.search(r"([A-Z]+-\d+)", path) match = re.search(r"([A-Z]+-\d+)", path)
if match: if match:
return f"{domain} > {match.group(1)}" return f"{domain}{sep}{match.group(1)}"
# GitLab # GitLab
if "gitlab" in domain.lower(): if "gitlab" in domain.lower():
@@ -122,7 +125,7 @@ class LinkItem:
if match: if match:
repo, type_, num = match.groups() repo, type_, num = match.groups()
icon = "#" if type_ == "issues" else "MR!" icon = "#" if type_ == "issues" else "MR!"
return f"{domain} > {repo} {icon}{num}" return f"{domain}{sep}{repo} {icon}{num}"
# Generic shortening - keep URL readable # Generic shortening - keep URL readable
if len(url) <= max_len: if len(url) <= max_len:
@@ -136,31 +139,31 @@ class LinkItem:
# Try to fit the full path first # Try to fit the full path first
full_path = "/".join(path_parts) full_path = "/".join(path_parts)
result = f"{domain} > {full_path}" result = f"{domain}{sep}{full_path}"
if len(result) <= max_len: if len(result) <= max_len:
return result return result
# Keep first segment + last two segments if possible # Keep first segment + last two segments if possible
if len(path_parts) >= 3: if len(path_parts) >= 3:
short_path = f"{path_parts[0]}/.../{path_parts[-2]}/{path_parts[-1]}" short_path = f"{path_parts[0]}/.../{path_parts[-2]}/{path_parts[-1]}"
result = f"{domain} > {short_path}" result = f"{domain}{sep}{short_path}"
if len(result) <= max_len: if len(result) <= max_len:
return result return result
# Keep first + last segment # Keep first + last segment
if len(path_parts) >= 2: if len(path_parts) >= 2:
short_path = f"{path_parts[0]}/.../{path_parts[-1]}" short_path = f"{path_parts[0]}/.../{path_parts[-1]}"
result = f"{domain} > {short_path}" result = f"{domain}{sep}{short_path}"
if len(result) <= max_len: if len(result) <= max_len:
return result return result
# Just last segment # Just last segment
result = f"{domain} > .../{path_parts[-1]}" result = f"{domain}{sep}.../{path_parts[-1]}"
if len(result) <= max_len: if len(result) <= max_len:
return result return result
# Truncate with ellipsis as last resort # Truncate with ellipsis as last resort
result = f"{domain} > {path_parts[-1]}" result = f"{domain}{sep}{path_parts[-1]}"
if len(result) > max_len: if len(result) > max_len:
result = result[: max_len - 3] + "..." result = result[: max_len - 3] + "..."

View File

@@ -56,7 +56,7 @@ def compress_urls_in_content(content: str, max_url_len: int = 50) -> str:
# Keep original anchor text, but if it's the same as URL, use short version # Keep original anchor text, but if it's the same as URL, use short version
if anchor_text == url or anchor_text.startswith("http"): if anchor_text == url or anchor_text.startswith("http"):
return f"[🔗 {short_url}]({url})" return f"[\uf0c1 {short_url}]({url})"
else: else:
return match.group(0) # Keep original if anchor text is meaningful return match.group(0) # Keep original if anchor text is meaningful
@@ -74,7 +74,7 @@ def compress_urls_in_content(content: str, max_url_len: int = 50) -> str:
) )
# Return as markdown link with icon # Return as markdown link with icon
return f"[🔗 {short_url}]({url})" return f"[\uf0c1 {short_url}]({url})"
# First, process markdown links # First, process markdown links
md_link_pattern = r"\[([^\]]+)\]\((https?://[^)]+)\)" md_link_pattern = r"\[([^\]]+)\]\((https?://[^)]+)\)"