From 8a121d7fecaf698f487810cb43bd3bd345edc6c9 Mon Sep 17 00:00:00 2001 From: Bendt Date: Mon, 29 Dec 2025 16:40:40 -0500 Subject: [PATCH] wip --- .coverage | Bin 0 -> 69632 bytes src/cli/sync.py | 6 +- src/cli/sync_dashboard.py | 2 +- src/mail/actions/calendar_invite.py | 285 ++++++++++++++++----------- src/mail/widgets/ContentContainer.py | 4 +- src/services/microsoft_graph/auth.py | 36 +++- 6 files changed, 205 insertions(+), 128 deletions(-) create mode 100644 .coverage diff --git a/.coverage b/.coverage new file mode 100644 index 0000000000000000000000000000000000000000..07729746398bf2cef23d98cfb17045ee5c6663f0 GIT binary patch literal 69632 zcmeHQ3v?URnV!*%-qMU5CvhCx$v948J8>*KZ#!u$zwJ0LlJICkQO45P7Lg@Ik{t|9 zkxVGi(rvdK`Z$M$@;EK*i!SY+J-`-Rwk_>}w)E_lZXZiH0k*UYDYRz^o6uzcdq*=G zSutm>oU=X6-Z{4J%xLa6|Nq_p{`ddqp?lz}U6HgRjl`4Va$4HJ_!yRD0+Pfqj1&IL z;9pzpu)=E_P_l;gxmKM_^QRVi#cLRk?K6ycgLkjE&U2r4wfiTYap$Mp_uFM>2)>{T zWq>k38K4aO|7IY2mB(IDS;cswcJJ@)JlHE8?CjdrE2;aW2A2fO zx^+^gl#Cyj5=v4Ui9{7C5(`H{ayk+lmC|F1cJxF_38R4y_Q6nx4s6Et(ONJPhFWQ5 z6d)4G$he%Gl8!1|vHD3Q?^Iznp5Rd1A%%7~IwVj(4^O3@GrH@X^c zyVYf{C@*L4P_RyNG6a9_0&QSNUX{TS01CyEVI=)>l%jQAm>eKRoi5I z+@Fq1(MSxd8B0adk$6l}Zc;)MX(fC~1)C6Q+5+SmwE?UnNTDNY&D^;R?Bpw7qGs;! zT;&lEE*95l%~wRHhRRsx*u?lSP=4dYcmg#J)KE{Hj?YzduDa=XPFd4!bBVUh>s!#A z8s(@()mjs=$gv4UZ~P`HU$Y6T5sNoTNC~ijRc>2FTRF>sRVp{7jzuBf3d$4dINlCI z;|E*e!*jCCVXs)eoW0{1HWWlpa5$1q=_}3xqtSQIVLIshx$Oq^l0qXvy(k)$;v=}r zpxTn!IAG1jKI7NOkdvsRN%7o)AnBjf+K{jygESnC4;R`RwwROx9Rz%VcG$>7EQGpB z3h5AvPbx`yRB6tAtGb0hwIPA&v^s4Sz02n^9o+vHhY=Y@TTB z$;+z(oofrr?~B zl!Rp{9+f)s7^x|QF*;d~Wzq&0m*va{@R)sN-^2?e3^i42Z|alp;8FH0qJlN;)#GNazqO3^uIe za!Lv%75Eay(#Cr3!^{A+Rd6)?Tr8Et{Rlbw<_C1XD1mkIS0axlK!f zk<9usZ!den;X<@yv=hXgfey5(o&h;GwFQS&7PO;sv5u?J3sSP;c?SOILK&b8PzERi zlmW^BWq>k38K4YM1}FoR0m{IY#{kD#SUbx9E#j+;cm_V`LK&b8PzERilmW^BWq>k3 z8K4YM1}FoR0m{IiB?BG{S0idqc(nLfuBtNk48WG=)(y=qF3`#%zR8GhivRs*sUQ`a zGC&!i3{VCr1C#;E0A+wOKpCJ6PzERilmVRq4_Cu#mjNs;uF9Fa3SfmJ(DVPWl!@ns zw}r0>Hwu35yWXd~$GnZ6H#`q{WRK7NtUKYZbG_`k+tuT|;QXHRq_flUf#Y$9;;6B| zX20Klt-aj#f-Pffx4vV2)H-Ud=ilNV<`41J+>f~|x6$&p* zDMN{+{CnbQCDj{_q~l3=`D61|j!6;W$cY5h+qQ*cP7u`#1|zXZIv9k?ZJRk}il{QY z-!u{#g{tkFIOZ0js>5>VC=dg6hc*IfRVFKs%aN%6cqBZkq*MOhn9Av{NGd&;R>lEr za07r<%m-{hfmd!&8|+yRP_?B%b<0suO->HRCgByyeR52R0^ZJ6z^f#|i^bEC5&Zsd zJQjpEjuJ^FmBRMX+X7f336^GO14=Y;0NWg*-VZQlsSo9}(&<;#OKJM;Y6h?xlQI>7 z?S_|u$28)QXzS)9T4y-iJE_Fdeei1_0Jgn}V{RptOgHZSwIGVwUR|WvwTg1IKN&%m z-oIu(K*pZhXuhZF0;wKiM+3;fl5&yntsV7HpCherkiBm;0F;>oz};Ol-2)1|+&89~ z+rTQoSyT#6H}YD^@t%krjU(sPvl5WJr6BE!#Exq64Aub(k>}C$>WPOY#=%&xi73ah z=jmHfDn)w}N^Cbc3G@nZVydeaaGWL;gE4C|0DYBu~?g-!?o5 zqa46=F9#U6Nl^+~-C#OfmvPK-5)8cy_tg+EsxvVLKc5hlr)0m*#y~aTm^Hd$9D^=H zRe)sH^%jhJV7_T+8&t^fayuHTx|fo&s|}$Bi8?C*#BP$_iF70i!OIxDJ0FIjDHjF= zmH>p+Bm|J^Pff)_!LXbf8;;9Kp3OUDckD5N`rTpBpgD8o7#rnp~~#AA|T2vKzqZcL9#su&4;fz{@5l z#~d@+@|x3t^KW+$1a)~|qm0L6P`k}e5JEw1gS^{p=94T{??hBi!*7qEP_)MiteJIG z*zZSh7Eu`g5Ac9u*1=#Dqa_8yN?Hl2ndSgTilnNfn^i(ig0X{UofbecYu;EU70JMC zJ8M3T#0Y9#NWgt<7Wibv$5ApaijVU(+)LahZkj*LKO(evpZ5OVdAs;s`|HA^mWPBh z)_3jK*|L^1wr#c-t&iCs5U0hhmi>;eTkPypjz4;aU7vCFIxjj#9LqdquAjMQ+{fH2 z?F(5O^CR{XY&Ube=Rcj#dz0QZo`;2_&MtJy+%mvt%gqMYAC~{OZQ)X6IBImibvt*0 zte%1WE$!SCS!I3T-@J{xg{*2mqu8{I&sLeOj9Xm`UM`XUZ`{deE9M2J=l>h_^4Z#w zpt>)a|F0k7vz0`61%0+4|8MQ%vmz0emTVN{|1BicYdwCh{NKNu&(@fgsR(R#{@*+= z(PriU>psHWN-UXf+)X6Cx=1lY{=ar!K!%>WhM=eF0;wLNv4hXD#9ZWiYeRs~a>Vrw zve)LxVvknTbTc^9^|u&O2UE6sUZJf)yrqSB_SE}|CPj^N7Jh)|F0{VqQd-t zMK_;ynpI4XSkbtY&;M&n_A~|gztqEL-DX88Xmx|>EN|nE6Jh9Gco_+!IuphDe+`Mz z72_CmsV4Dn1*0CA@9g})ikMw(2sKEww2RN$&C;ueZnN|M%1%COH4CAI@&)<-5`w@- zl|jw_D@gjV-Y|N`jjlsL2)no>Gtp7>WOh+WW}@R58f77gndo}Wk^h%p7BFOtz}131 zJp3q_$+T@w$#lxf_VL-e`NQS&f8S*RG&F@sGVHM|KJ z9ugy0F;J5i<^OJyXr>6qz>8}$cg$?dYfc07aFPUdMftyjB!nu=|Lp`xma4ZQ|F@Ai zD(v?!mH%5w91KP=T9PsU=ZTS2m9(%jsOSG2iFuDh!Yq)uBdmlrYF%mwq`&{CU4)<| zWq>k38K4YM1}FoR0m=YnfHFWCpbSt3u1E$rcx=Fn-v4LC4;c8P3uS;ZKpCJ6PzERi zlmW^BWq>k38K4YM1}FnpAOjrdb)x(K8|`;7;``#4#0l|;*eE)MQ^KS$E(EO8PKFJ@b$W(peDDv8Bg06H#g{=_EYG5SJ|-Znhr^e7Pv@@X z2WnekyT^UHoC%=A;U6#%QVR?76SGW)@i|)HkOi(4jMU_ZwL=x~Lg35;pY+ms5hRxV)uWcPwgu7BQ;$yIsCnk&sD=dp z0{qW#@R30Tq6L37gPD9EZL6!FK&yCRci2}0ny-3AVACpCi7q~;bTZS(t{9*j>z%=3 zs>f?es9TIhax7j6)tVQb8=ZhAWC96>{}vdY)MKkdQo?(kD^Mv|Y_+Iddi`n%$~_x@ zp=5Fx%vU&k9#9q;C}Uxn#c)btIUKia!-Ws-g?&DTy>S`R_F&b8e|dMWW~-oOl!NTUEk#W(R!0;+Przez}8@UK+Gp-WONkU06j4t*Xm3 z!&X<pA!u^WB^YeHA^J|HYsx!Bp zzjz;$@dX-LTNNBASX}m{Q2LgCVC}9%I@nfQ-n!MP&ix%&4AMF8ybi$Mxr^y%271sc z=ymvSdqs-`-O)1L(g*50kkMEqAD?k90knlHc5s>6T@?s#$;xw|c=8me*aDj4+W14# zKH=42bveS;GVj5O(c%a&fQ}a*oP~EV+>*rryuRh!Th3Xn?AWykpQkQ%aU1ZEqe88W zK7V|n9ApFjeUcAkK+6)fz@KOre5-aViJ7ih2^7M^Lq4UKxBNr4bH6#U}utVKMO#gICb{KsXNe@84kGw)o#GC!5WKbV$ta=4Ey?h z0me~V1~q$H*Bt3&XF&3s(ebckwBIq$Fs@}jIAn))&FB87!pBsDo6dQF2Lp=Hk!-27E4a@%j6` z0X_px7#7Q07qwt!cY^a^EO+`W;CCI+IJrfd21l%F_;v6w zj>YVeyY9U6$n>jhiwEQzcAR_m8|Z)lJ{}VRa2fC~Xb+gVgh+u8z{q;sAno)L=Y_M< zs}At?Ec`PL+!s{kGe8kKv}nm|QSepf)Y(_gKl8iu&*Cx=Z+F6$Z_Ul@r~^^DXRYPTt*@Q*F`3L^MB3sv z?VVrBAYDMjFW{^$Pq zs}6T&-Qsh;T7<g;1}ScwG0E}U*$*j>0- zvCfK%CLR|J94;Cy=z4~;9^L;(*Our)8K4YM1}FoR0m=YnfHFWCpbSt3CElmW^BWq>k3 z8K4YM1}FoR0m=YnfHH99GvIP8V>Yp;ck-{3AjOd&;%k zvefx&*S*3~@6+DjJ8yUW%ymfouICZ=%bwpj+THiKMfX0>Qtzbauy>Q=g#C5lQTsy6 zL&6#ByY}mBS<4yQHrtEV$LtR{z9~+NTP^#&YdjANN1a^;Zb_!ZMDr}C{MsbYd*W#& z)fD%#LOU3njHH!)a!f%J z=B^*&vz0`61#@=l1gFwXf1hG?g1s5I1oxIN(@A5F-85vQGY5K@@q4G`n6d& zRrOMue*WEjw#KY}MPR#OR_n3u{Pexe^AfEy9PXV|V(GqkavbzrM>17xP6oej+)X6C zx=68W73FAuG7<(c*3Jva&{Nlt^af2I)k8FP@L86)mGZr{A;4!j;`#>J>q({`#@$MD zkp~nx85+~fZ8h=4wHgl09y=0Aj`u|5XnYi!Wfk$1yc*K3NbIO4&&r*Aw!$ni1$yYD@Ms-AP4G zD+ghe10acbhI)--gXt`9GV{>BC|W!ogfYyaDb(5|0T%fJPcrJwRAB9SI#p|4?;5 zDIcSebW|P=j>luUK=3L;eQ|IFXWmERrd5;78Ffs#DGdO7OYRl%Xjn-hh&?4k#J~|^ zh}|Uf-502Vh6*)~+f_0Z&;e1DG(^%p3WP|F#g7MbJ{5p=5<5Z7(-`{LAaNL}foL`z zRA-b2fd)#Z2H6<^^p~s=#Ilo-_(Tdo`Ut$I<}{$1lO(8vE}fc+g}~P+(7B+tgRoZ{ zTEI?Nbu=DMB@_j@x2vaY#KDHzTET+&-%1?(6yy?N7`8%crpXf{;Xz1ukimqUOyQ`7 zBkt}hl7U%EJ9mro!@U3O0ntzwJ^#O+JJgkX}Bk&u$|BS&>yP9`Jqq~BmI zVYM~gQ6;YbqtbWj&dA?et~a0afzB`K&2 zrb`qeaec)S$rI@@pnI6)GJ}p%7+CrBVL-ly&-%^A;B%p&I6;mC<(M3ux*7YzBM15H z+WEp7T{apggCrNE=91OWt~Xo`A#=p)jr_68(L8$Ic6~{v7fzvGZ|v@eOESG2j;`CG zl2DG0$x)*$A0>%t^=8BB(_n*n$(z_RKgIp!Nn$Ym8p{8 z(!7e0h|1{^^#Z1i{QR@MhnJ))sRE!}CaA8C!DVA5iCA@$m|hW%!Dw6rsjZ3=NrXUv z3X=Y#R@5^eya^Z{a$KIB6t3zx28pUk`nE2S9*?8T1Yi}5OVUe6 z(e+wXl3qHFL9c}*7i)CA3;+W0#TxBSp8;eki8bPjVH!wpOueSPE0RhNrWJHwZX?ME zeMztZMGoVLdjop@zl}M`h~E*j;uhg;;cLQ;g5UeD_bKl&Z=>f8&qE&B<8wdjPPpq_ zFT3t`^*Ap$zvn#Z>~wtKcpPT;tFgZZ^ZH$DFSos5%h=kj?^qwTj#}&axA=$oLwq&& zV=l{Ww7hM3&~g<_I`AZWg6(3h%n#v|`qJOVGud*J7cf3-K-j#MV^Tyos%PG|g=0<- z)iX40+h&fLBC4#9LhYM4<`$x=`8$e38-cVcla()N1O|&5+yGz|^8wQ>YR`Irsx1Yo zJLkL$@Bi;?1-wcUJYxt~c>S-p1+dIMtARPw0*`|G|3iL&F-yG`MCqBtto#4FngOiF zq<%$Uv+w^8t(%W%+AXeJmb1NyV{RptOgHZSwIGVwUR|V^;r@UBn)v`3dupTko~jF^ zdWanjAcNWPM6a9gtsV7HpCjecAba0x04OsFP++=+_x}f00nVaQaJpyR|L<7|NZwMA zjQ9Ts>i~tw^JrbN=>C7-ic%>my#L=-3pi%4r|S_zVW{0J0Y*ulrr`d6_i})7m%_2Z zbha+znBycEdKd1iAz)NzLXR%>Xl|eyaLgKAF^)l(p(;Q!>v{`DJuu%iTzN(XJmQ9f zpYElk>}o-oL88t|0I{2-SIw(uU*iZY0SK!}2q0CvCzrqfzq!JE;A6Zf(6boMY_<>U z4WozIxuCyq5ulj$O~ojAGg$~Y#Ac#PV`!A2azHX`Cb~qkv&mfYH?-h#fFWaq*nM{y zfGs!K)^kdxQ?}O!aCP&B%csqI#mfO`Xo?O20L_kky21v4PA>qEbueQDh%UZ$c>uz! zN1x-`SDza&%o@3hfttMNLSVlOaLk59MK}gtHaR)wn8}vcoCa{Q-9Zr4<$X=={(qaD zAcTV226?yH%qLl@-i9l=Jyu}NtfRty|5Eq=2VlZK(+&os7%eGixc@)Eks{49EB*d| zCrsjL+Ptw$Dw2WOcGi3vi4hESQU34e{EYY%%-*+B__=V8uv1{YPk0mFEndd+EzeD! zE$;tvf7SglceU%^UH7{7yULtDcYe`%J>1p*jpOefla6inv-W?mAGO!nehOCrTCBgc zp0sY}-{BwSZ{S7ldG6x?PZ!F-mB7Hz81z=+zyjq4mw1|Da1_AI=05t+Xav*afW8rc zGCMlyPl3=P|RMm(NX4ln&RpsfaW(jl+T4${4~XZAi%DjC#-P*M4{B48vxYo<-NINHMHx2 zk6i{KXFpA`>v{m4Z}aGVqVF)E5%;a)rzv_40S>Y0&3>Aq=cA>d%zm1p>pDO&8xiZx zSokzW+qI;zAOs)k%VnFdF`uuZ*j0PEtnF&DQ)&(l40rQEvQui0z_%SBJEeB<4^G*( zpRB6pWuR){Drg^)O9F+y1?jtwOkaFo1?qL|g)^JY*KjncJt&=f1io_*0GQ3!3IOy+ z;5&Dh3ZXv&-?a-M%o=!s6!}NsJBCW7BKHV<|4slfo9h(_G3ycd?m++|x(=u{iowih zcMt$!Z=>H5D}1=FX9ob89VZKc4B`xa1W?Qd*M%svAFdnRP7I~pq?rA1-Qc#8p=Li^ O*V9f+lO~mcrvC+y`Qm#3 literal 0 HcmV?d00001 diff --git a/src/cli/sync.py b/src/cli/sync.py index 5222787..a8ad815 100644 --- a/src/cli/sync.py +++ b/src/cli/sync.py @@ -425,7 +425,7 @@ async def _sync_outlook_data( # Define scopes for Microsoft Graph API scopes = [ - "https://graph.microsoft.com/Calendars.Read", + "https://graph.microsoft.com/Calendars.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite", ] @@ -721,7 +721,7 @@ def sync( # This prevents the TUI from appearing to freeze during device flow auth if not demo: scopes = [ - "https://graph.microsoft.com/Calendars.Read", + "https://graph.microsoft.com/Calendars.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite", ] if not has_valid_cached_token(scopes): @@ -963,7 +963,7 @@ def interactive(org, vdir, notify, dry_run, demo): # This prevents the TUI from appearing to freeze during device flow auth if not demo: scopes = [ - "https://graph.microsoft.com/Calendars.Read", + "https://graph.microsoft.com/Calendars.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite", ] if not has_valid_cached_token(scopes): diff --git a/src/cli/sync_dashboard.py b/src/cli/sync_dashboard.py index 94d0a4c..369d741 100644 --- a/src/cli/sync_dashboard.py +++ b/src/cli/sync_dashboard.py @@ -1103,7 +1103,7 @@ async def run_dashboard_sync( # Get auth token scopes = [ - "https://graph.microsoft.com/Calendars.Read", + "https://graph.microsoft.com/Calendars.ReadWrite", "https://graph.microsoft.com/Mail.ReadWrite", ] access_token, headers = get_access_token(scopes) diff --git a/src/mail/actions/calendar_invite.py b/src/mail/actions/calendar_invite.py index 2f75b6d..346027f 100644 --- a/src/mail/actions/calendar_invite.py +++ b/src/mail/actions/calendar_invite.py @@ -4,144 +4,162 @@ Allows responding to calendar invites directly from email. """ import asyncio +import aiohttp import logging -import re +import os from typing import Optional, Tuple -logger = logging.getLogger(__name__) +# Set up dedicated RSVP logger +rsvp_logger = logging.getLogger("calendar_rsvp") +rsvp_logger.setLevel(logging.DEBUG) + +# Create file handler if not already set up +if not rsvp_logger.handlers: + log_dir = os.path.expanduser("~/.local/share/luk") + os.makedirs(log_dir, exist_ok=True) + log_file = os.path.join(log_dir, "calendar_rsvp.log") + handler = logging.FileHandler(log_file) + handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")) + rsvp_logger.addHandler(handler) + +# Timeout for API calls (seconds) +API_TIMEOUT = 15 + +# Required scopes for calendar operations +CALENDAR_SCOPES = [ + "https://graph.microsoft.com/Calendars.ReadWrite", +] -def detect_calendar_invite(message_content: str, headers: dict) -> Optional[str]: - """Detect if a message is a calendar invite and extract event ID if possible. +def _get_auth_headers_sync() -> Optional[dict]: + """Get auth headers synchronously using cached token only. - Calendar invites from Microsoft/Outlook typically have: - - Content-Type: text/calendar or multipart with text/calendar part - - Meeting ID patterns in the content - - Teams/Outlook meeting links - - Args: - message_content: The message body content - headers: Message headers - - Returns: - Event identifier hint if detected, None otherwise + Returns None if no valid cached token exists (to avoid blocking on device flow). """ - # Check for calendar-related content patterns - calendar_patterns = [ - r"Microsoft Teams meeting", - r"Join the meeting", - r"Meeting ID:", - r"teams\.microsoft\.com/l/meetup-join", - r"Accept\s+Tentative\s+Decline", - r"VEVENT", - r"BEGIN:VCALENDAR", - ] + from src.services.microsoft_graph.auth import ( + has_valid_cached_token, + get_access_token, + ) - content_lower = message_content.lower() if message_content else "" + rsvp_logger.debug("Checking for valid cached token...") - for pattern in calendar_patterns: - if re.search(pattern, message_content or "", re.IGNORECASE): - return "calendar_invite_detected" + if not has_valid_cached_token(CALENDAR_SCOPES): + rsvp_logger.warning("No valid cached token found") + return None - return None + try: + rsvp_logger.debug("Getting access token from cache...") + _, headers = get_access_token(CALENDAR_SCOPES) + rsvp_logger.debug("Got auth headers successfully") + return headers + except Exception as e: + rsvp_logger.error(f"Failed to get auth headers: {e}") + return None -async def find_event_by_subject( - subject: str, organizer_email: Optional[str] = None -) -> Optional[dict]: - """Find a calendar event by subject and optionally organizer. +async def find_event_by_uid(uid: str, headers: dict) -> Optional[dict]: + """Find a calendar event by its iCalUId. Args: - subject: Event subject to search for - organizer_email: Optional organizer email to filter by + uid: The iCalendar UID from the ICS file + headers: Auth headers for MS Graph API Returns: Event dict if found, None otherwise """ + rsvp_logger.info(f"Looking up event by UID: {uid}") + try: - from src.services.microsoft_graph.auth import get_access_token - from src.services.microsoft_graph.client import fetch_with_aiohttp - from datetime import datetime, timedelta - - scopes = ["https://graph.microsoft.com/Calendars.Read"] - _, headers = get_access_token(scopes) - - # Search for events in the next 60 days with matching subject - start_date = datetime.now() - end_date = start_date + timedelta(days=60) - - start_str = start_date.strftime("%Y-%m-%dT00:00:00Z") - end_str = end_date.strftime("%Y-%m-%dT23:59:59Z") - - # URL encode the subject for the filter - subject_escaped = subject.replace("'", "''") - + # Search by iCalUId - this is the unique identifier that should match + uid_escaped = uid.replace("'", "''") url = ( - f"https://graph.microsoft.com/v1.0/me/calendarView?" - f"startDateTime={start_str}&endDateTime={end_str}&" - f"$filter=contains(subject,'{subject_escaped}')&" - f"$select=id,subject,organizer,start,end,responseStatus&" - f"$top=10" + f"https://graph.microsoft.com/v1.0/me/events?" + f"$filter=iCalUId eq '{uid_escaped}'&" + f"$select=id,subject,organizer,start,end,responseStatus,iCalUId" ) - response = await fetch_with_aiohttp(url, headers) - if not response: - return None - events = response.get("value", []) + rsvp_logger.debug(f"Request URL: {url}") - if events: - # If organizer email provided, try to match - if organizer_email: - for event in events: - org_email = ( - event.get("organizer", {}) - .get("emailAddress", {}) - .get("address", "") + # Use aiohttp directly with timeout + timeout = aiohttp.ClientTimeout(total=API_TIMEOUT) + async with aiohttp.ClientSession(timeout=timeout) as session: + async with session.get(url, headers=headers) as response: + rsvp_logger.debug(f"Response status: {response.status}") + + if response.status != 200: + error_text = await response.text() + rsvp_logger.error(f"API error: {response.status} - {error_text}") + return None + + data = await response.json() + events = data.get("value", []) + + rsvp_logger.info(f"Found {len(events)} events matching UID") + + if events: + event = events[0] + rsvp_logger.debug( + f"Event found: {event.get('subject')} - ID: {event.get('id')}" ) - if organizer_email.lower() in org_email.lower(): - return event + return event - # Return first match - return events[0] + return None + except asyncio.TimeoutError: + rsvp_logger.error(f"Timeout after {API_TIMEOUT}s looking up event by UID") return None - except Exception as e: - logger.error(f"Error finding event by subject: {e}") + rsvp_logger.error(f"Error finding event by UID: {e}", exc_info=True) return None -async def respond_to_calendar_invite(event_id: str, response: str) -> Tuple[bool, str]: +async def respond_to_calendar_invite( + event_id: str, response: str, headers: dict +) -> Tuple[bool, str]: """Respond to a calendar invite. Args: event_id: Microsoft Graph event ID response: Response type - 'accept', 'tentativelyAccept', or 'decline' + headers: Auth headers for MS Graph API Returns: Tuple of (success, message) """ + rsvp_logger.info(f"Responding to event {event_id} with: {response}") + try: - from src.services.microsoft_graph.auth import get_access_token - from src.services.microsoft_graph.calendar import respond_to_invite + response_url = ( + f"https://graph.microsoft.com/v1.0/me/events/{event_id}/{response}" + ) + rsvp_logger.debug(f"Response URL: {response_url}") - scopes = ["https://graph.microsoft.com/Calendars.ReadWrite"] - _, headers = get_access_token(scopes) + # Use aiohttp directly with timeout + timeout = aiohttp.ClientTimeout(total=API_TIMEOUT) + async with aiohttp.ClientSession(timeout=timeout) as session: + async with session.post(response_url, headers=headers, json={}) as resp: + rsvp_logger.debug(f"Response status: {resp.status}") - success = await respond_to_invite(headers, event_id, response) - - if success: - response_text = { - "accept": "accepted", - "tentativelyAccept": "tentatively accepted", - "decline": "declined", - }.get(response, response) - return True, f"Successfully {response_text} the meeting" - else: - return False, "Failed to respond to the meeting invite" + if resp.status in (200, 202): + response_text = { + "accept": "accepted", + "tentativelyAccept": "tentatively accepted", + "decline": "declined", + }.get(response, response) + rsvp_logger.info(f"Successfully {response_text} the meeting") + return True, f"Successfully {response_text} the meeting" + else: + error_text = await resp.text() + rsvp_logger.error( + f"Failed to respond: {resp.status} - {error_text}" + ) + return False, f"Failed to respond: {resp.status}" + except asyncio.TimeoutError: + rsvp_logger.error(f"Timeout after {API_TIMEOUT}s responding to invite") + return False, f"Request timed out after {API_TIMEOUT}s" except Exception as e: - logger.error(f"Error responding to invite: {e}") + rsvp_logger.error(f"Error responding to invite: {e}", exc_info=True) return False, f"Error: {str(e)}" @@ -162,70 +180,97 @@ def action_tentative_invite(app): def _respond_to_current_invite(app, response: str): """Helper to respond to the current message's calendar invite.""" + from src.mail.widgets.ContentContainer import ContentContainer + + rsvp_logger.info(f"Starting invite response: {response}") + current_message_id = app.current_message_id if not current_message_id: + rsvp_logger.warning("No message selected") app.notify("No message selected", severity="warning") return - # Get message metadata - metadata = app.message_store.get_metadata(current_message_id) - if not metadata: - app.notify("Could not load message metadata", severity="error") - return - - subject = metadata.get("subject", "") - from_addr = metadata.get("from", {}).get("addr", "") - - if not subject: + # Get auth headers FIRST (synchronously, before spawning worker) + # This uses cached token only - won't block on device flow + headers = _get_auth_headers_sync() + if not headers: + rsvp_logger.error("No valid auth token - user needs to run luk sync first") app.notify( - "No subject found - cannot match to calendar event", severity="warning" + "Not authenticated. Run 'luk sync' first to login.", severity="error" ) return - # Run the async response in a worker + # Get the parsed calendar event from ContentContainer + # This has the UID from the ICS which we can use for direct lookup + calendar_event = None + try: + content_container = app.query_one(ContentContainer) + calendar_event = content_container.current_calendar_event + except Exception as e: + rsvp_logger.error(f"Failed to get ContentContainer: {e}") + + if not calendar_event: + rsvp_logger.warning("No calendar event data found in current message") + app.notify("No calendar invite found in this message", severity="warning") + return + + event_uid = calendar_event.uid + event_summary = calendar_event.summary or "(no subject)" + + rsvp_logger.info(f"Calendar event: {event_summary}, UID: {event_uid}") + + if not event_uid: + rsvp_logger.warning("No UID found in calendar event") + app.notify("Calendar invite missing UID - cannot respond", severity="warning") + return + app.run_worker( - _async_respond_to_invite(app, subject, from_addr, response), + _async_respond_to_invite(app, event_uid, event_summary, response, headers), exclusive=True, name="respond_invite", ) async def _async_respond_to_invite( - app, subject: str, organizer_email: str, response: str + app, event_uid: str, event_summary: str, response: str, headers: dict ): - """Async worker to find and respond to calendar invite.""" - # First, find the event - app.notify(f"Searching for calendar event: {subject[:40]}...") + """Async worker to find and respond to calendar invite using UID.""" + rsvp_logger.info(f"Async response started for UID: {event_uid}") - event = await find_event_by_subject(subject, organizer_email) + app.notify(f"Looking up event...") - if not event: + # Find event by UID (direct lookup, no search needed) + graph_event = await find_event_by_uid(event_uid, headers) + + if not graph_event: + rsvp_logger.warning(f"Event not found for UID: {event_uid}") app.notify( - f"Could not find calendar event matching: {subject[:40]}", + f"Event not found in calendar: {event_summary[:40]}", severity="warning", ) return - event_id = event.get("id") + event_id = graph_event.get("id") if not event_id: - app.notify( - "Could not get event ID from calendar", - severity="error", - ) + rsvp_logger.error("No event ID in response") + app.notify("Could not get event ID from calendar", severity="error") return - current_response = event.get("responseStatus", {}).get("response", "") + current_response = graph_event.get("responseStatus", {}).get("response", "") + rsvp_logger.debug(f"Current response status: {current_response}") # Check if already responded if current_response == "accepted" and response == "accept": + rsvp_logger.info("Already accepted") app.notify("Already accepted this invite", severity="information") return elif current_response == "declined" and response == "decline": + rsvp_logger.info("Already declined") app.notify("Already declined this invite", severity="information") return # Respond to the invite - success, message = await respond_to_calendar_invite(event_id, response) + success, message = await respond_to_calendar_invite(event_id, response, headers) severity = "information" if success else "error" app.notify(message, severity=severity) diff --git a/src/mail/widgets/ContentContainer.py b/src/mail/widgets/ContentContainer.py index 6c5a1c5..9ef7800 100644 --- a/src/mail/widgets/ContentContainer.py +++ b/src/mail/widgets/ContentContainer.py @@ -405,7 +405,6 @@ class ContentContainer(Vertical): if is_calendar and raw_success and raw_content: calendar_event = parse_calendar_from_raw_message(raw_content) if calendar_event: - self.current_calendar_event = calendar_event self._show_calendar_panel(calendar_event) else: self._hide_calendar_panel() @@ -777,6 +776,9 @@ class ContentContainer(Vertical): # Remove existing panel if any self._hide_calendar_panel() + # Store the calendar event for RSVP actions + self.current_calendar_event = event + # Create and mount new panel at the beginning of the scroll container # Don't use a fixed ID to avoid DuplicateIds errors when panels are # removed asynchronously diff --git a/src/services/microsoft_graph/auth.py b/src/services/microsoft_graph/auth.py index d0616e3..af82009 100644 --- a/src/services/microsoft_graph/auth.py +++ b/src/services/microsoft_graph/auth.py @@ -19,12 +19,42 @@ logging.getLogger("asyncio").setLevel(logging.ERROR) logging.getLogger("azure").setLevel(logging.ERROR) logging.getLogger("azure.core").setLevel(logging.ERROR) +# Token cache location - use consistent path regardless of working directory +TOKEN_CACHE_DIR = os.path.expanduser("~/.local/share/luk") +TOKEN_CACHE_FILE = os.path.join(TOKEN_CACHE_DIR, "token_cache.bin") + +# Legacy cache file (in current working directory) - for migration +LEGACY_CACHE_FILE = "token_cache.bin" + def ensure_directory_exists(path): if not os.path.exists(path): os.makedirs(path) +def _get_cache_file(): + """Get the token cache file path, migrating from legacy location if needed.""" + ensure_directory_exists(TOKEN_CACHE_DIR) + + # If new location exists, use it + if os.path.exists(TOKEN_CACHE_FILE): + return TOKEN_CACHE_FILE + + # If legacy location exists, migrate it + if os.path.exists(LEGACY_CACHE_FILE): + try: + import shutil + + shutil.copy2(LEGACY_CACHE_FILE, TOKEN_CACHE_FILE) + os.remove(LEGACY_CACHE_FILE) + except Exception: + pass # If migration fails, just use new location + return TOKEN_CACHE_FILE + + # Default to new location + return TOKEN_CACHE_FILE + + def has_valid_cached_token(scopes=None): """ Check if we have a valid cached token (without triggering auth flow). @@ -45,7 +75,7 @@ def has_valid_cached_token(scopes=None): return False cache = msal.SerializableTokenCache() - cache_file = "token_cache.bin" + cache_file = _get_cache_file() if not os.path.exists(cache_file): return False @@ -92,9 +122,9 @@ def get_access_token(scopes): "Please set the AZURE_CLIENT_ID and AZURE_TENANT_ID environment variables." ) - # Token cache + # Token cache - use consistent location cache = msal.SerializableTokenCache() - cache_file = "token_cache.bin" + cache_file = _get_cache_file() if os.path.exists(cache_file): cache.deserialize(open(cache_file, "r").read())