From 80565d79e015f743a8f8a227e8060d89aec89fcc Mon Sep 17 00:00:00 2001 From: Timothy Carambat Date: Mon, 4 Nov 2024 11:34:29 -0800 Subject: [PATCH] 2488 novita ai llm integration (#2582) * feat: add new model provider: Novita AI * feat: finished novita AI * fix: code lint * remove unneeded logging * add back log for novita stream not self closing * Clarify ENV vars for LLM/embedder seperation for future Patch ENV check for workspace/agent provider --------- Co-authored-by: Jason Co-authored-by: shatfield4 --- .vscode/settings.json | 1 + README.md | 1 + docker/.env.example | 4 + .../LLMSelection/NovitaLLMOptions/index.jsx | 142 +++++++ frontend/src/hooks/useGetProvidersModels.js | 8 +- frontend/src/media/llmprovider/novita.png | Bin 0 -> 39296 bytes .../GeneralSettings/LLMPreference/index.jsx | 11 + .../Steps/DataHandling/index.jsx | 9 + .../Steps/LLMPreference/index.jsx | 10 + .../AgentConfig/AgentLLMSelection/index.jsx | 2 + locales/README.ja-JP.md | 1 + locales/README.zh-CN.md | 1 + server/.env.example | 4 + server/models/systemSettings.js | 5 + server/storage/models/.gitignore | 3 +- server/utils/AiProviders/novita/index.js | 376 ++++++++++++++++++ server/utils/agents/aibitat/index.js | 2 + .../agents/aibitat/providers/ai-provider.js | 8 + .../utils/agents/aibitat/providers/index.js | 2 + .../utils/agents/aibitat/providers/novita.js | 115 ++++++ server/utils/agents/index.js | 6 + server/utils/helpers/customModels.js | 18 + server/utils/helpers/index.js | 6 + server/utils/helpers/updateENV.js | 15 + 24 files changed, 748 insertions(+), 2 deletions(-) create mode 100644 frontend/src/components/LLMSelection/NovitaLLMOptions/index.jsx create mode 100644 frontend/src/media/llmprovider/novita.png create mode 100644 server/utils/AiProviders/novita/index.js create mode 100644 server/utils/agents/aibitat/providers/novita.js diff --git a/.vscode/settings.json b/.vscode/settings.json index 14efd3fae5..307bbe6c71 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -33,6 +33,7 @@ "Mintplex", "mixtral", "moderations", + "novita", "numpages", "Ollama", "Oobabooga", diff --git a/README.md b/README.md index 4edf494820..861e4fa59e 100644 --- a/README.md +++ b/README.md @@ -96,6 +96,7 @@ AnythingLLM divides your documents into objects called `workspaces`. A Workspace - [Text Generation Web UI](https://github.com/oobabooga/text-generation-webui) - [Apipie](https://apipie.ai/) - [xAI](https://x.ai/) +- [Novita AI (chat models)](https://novita.ai/model-api/product/llm-api?utm_source=github_anything-llm&utm_medium=github_readme&utm_campaign=link) **Embedder models:** diff --git a/docker/.env.example b/docker/.env.example index 2f6f896b0f..0580465965 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -90,6 +90,10 @@ GID='1000' # LITE_LLM_BASE_PATH='http://127.0.0.1:4000' # LITE_LLM_API_KEY='sk-123abc' +# LLM_PROVIDER='novita' +# NOVITA_LLM_API_KEY='your-novita-api-key-here' check on https://novita.ai/settings#key-management +# NOVITA_LLM_MODEL_PREF='gryphe/mythomax-l2-13b' + # LLM_PROVIDER='cohere' # COHERE_API_KEY= # COHERE_MODEL_PREF='command-r' diff --git a/frontend/src/components/LLMSelection/NovitaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/NovitaLLMOptions/index.jsx new file mode 100644 index 0000000000..26e1fe04b7 --- /dev/null +++ b/frontend/src/components/LLMSelection/NovitaLLMOptions/index.jsx @@ -0,0 +1,142 @@ +import System from "@/models/system"; +import { CaretDown, CaretUp } from "@phosphor-icons/react"; +import { useState, useEffect } from "react"; + +export default function NovitaLLMOptions({ settings }) { + return ( +
+
+
+ + +
+ {!settings?.credentialsOnly && ( + + )} +
+ +
+ ); +} + +function AdvancedControls({ settings }) { + const [showAdvancedControls, setShowAdvancedControls] = useState(false); + + return ( +
+ + +
+ ); +} + +function NovitaModelSelection({ settings }) { + const [groupedModels, setGroupedModels] = useState({}); + const [loading, setLoading] = useState(true); + + useEffect(() => { + async function findCustomModels() { + setLoading(true); + const { models } = await System.customModels("novita"); + if (models?.length > 0) { + const modelsByOrganization = models.reduce((acc, model) => { + acc[model.organization] = acc[model.organization] || []; + acc[model.organization].push(model); + return acc; + }, {}); + + setGroupedModels(modelsByOrganization); + } + + setLoading(false); + } + findCustomModels(); + }, []); + + if (loading || Object.keys(groupedModels).length === 0) { + return ( +
+ + +
+ ); + } + + return ( +
+ + +
+ ); +} diff --git a/frontend/src/hooks/useGetProvidersModels.js b/frontend/src/hooks/useGetProvidersModels.js index a493438c74..8245872455 100644 --- a/frontend/src/hooks/useGetProvidersModels.js +++ b/frontend/src/hooks/useGetProvidersModels.js @@ -63,7 +63,13 @@ function groupModels(models) { }, {}); } -const groupedProviders = ["togetherai", "fireworksai", "openai", "openrouter"]; +const groupedProviders = [ + "togetherai", + "fireworksai", + "openai", + "novita", + "openrouter", +]; export default function useGetProviderModels(provider = null) { const [defaultModels, setDefaultModels] = useState([]); const [customModels, setCustomModels] = useState([]); diff --git a/frontend/src/media/llmprovider/novita.png b/frontend/src/media/llmprovider/novita.png new file mode 100644 index 0000000000000000000000000000000000000000..85f18acb240516dde86aef9c99ceb55cd29523c1 GIT binary patch literal 39296 zcmeF2cUMzgxAsFQx$ozB&nI~QJ7X+L_DDu{nQN|VUh}u&jPK~2W#(cAfk0>VbhYk+ zKr~?|Kc{GcBjP3eB;e0!A6-i%2qa-}@6I0&9(}5M7(Hz`=)xIB2N_1s=QK>*&dz~7a@j7w z{)YajIs7rWIXJvIBPZe?=wC+u@ZH$I{8Ra_-=j*z`w3#33bRg!XxtOB>9!$gw)tu6 zesJsl?8nXibaC?U=26u4@$x2S*eHnryS`tWNWcB!Mo)P?%2yV z5_jAYa*Vt9bPN(g{hLbCe7d>wkttM*O8S`jwBzOCLFkv`m#uvrf9Ex53QB38BmP)@ z1Cb_04wB7%KOKRC2R$uUz-H=-E9t@D?2N zWBl9|Q=P|#Jk!@g91q7$u9!Y}J87%>$${DSa2#{xR?wDdkH6uSSzFbX;vlrj=l0WA zx-O{13)e%8HbrvgY;U~#9OS1%S^d>6O1%>W$(^wM0?A#lz45u&M?3hjbMUeuRojI! zkUxJecMKSL`D5pX)?zw>UF`q;Wk3xhw{^vomru^hGc2il?Jv&e)L(dmG?;dy|-~pfvd->xcH`DX|eQ5 zXAbK6Y4AFgqDMbIwa(fsP~Cqov48uM)gSt%|B&I^pBFrnhXUwB!GZiR^Mg0&o9PlE zVwT5d7Qi@oT)ms=xGrb6Ueh<~(y@*cGRS8sy~*s7x)}3T|K&SBS-nFlE3Z^e^Ac3->48WuO8t-{eGadyj{ z1wMD#a=Awb^3Q9R(*HJHzpPan^?U3Ysg_q+U@#xLhC8+6LXWL!KeALOdmb?UMz+r> z*&XM|+!XH3z1*%M{aO{=itL!1d+7HlCHwLnTedZ+Q%DHoOGxBPNc2kx;{`~}O9<12 z9nxh(+w+Eq^MvcK8|7a&O1y4Vz0}IxYEFN_ym$ZoLxPyXQ-7JKw?T{L5g3mMipSR} ze`*vl*(Uc=(_`QIC)3oOk83xl&i)m}xTThVKZ$s?e4eA5--9Y@=JtPS02UP zXN*n05T?%+0@rJdSTqu5K!HL)f$+AwM6e1*b7a*U_<;Yyu zX#uHm9OUpA<|Qw(wD++*>tmtV(0db#!hB<7V%1_6oQ}>MAUK~th+8b*<|BhZHpRee z$nzm^H<8JKr?kL4m7LB}RN?v{W&0*8(4Jo=1N2UM$vg1?%(>m@Qp9UDj zreXA@f(g$=L)w%H4TxRj4ZM>=cOcLhF&DFrLxe-sc+j0FDP}-0JYM4i4QcjkCi@EK zRK@F1p>*-5<|hLM@eduhB6k4w*Z!L|~%Cnnbv*UvM|oD>Z!?X!%?7 zY@-WquLL^Kog;p3wKt}&j{d#e*fA$85h81O;CO0|$yX=VsBFZAD-Mjz|3ib)2f^A= z+bprjhcB?WHoVvEj_YoCBHoc=pr^(|!nPBZrvlFHs?D-<0B_p)_GS$pP36vT+%2q6 zD2(wDmSFqVHq4?%T$NE~=aLG`Go>q19a*O-z-2yzyD(^>!wyf8SBVch7#`5)A~jv> z)}mh$nLuits3l+uT)YF?Xyb{6Uw{bgBDk`mH>!PdEWl5DD)R=KJ)T`dZ_~vl^Pl0e zR-4mFl%(J7qdh3Y+)}US1D&zrc7&8~w*EEfiGbe)p6mVb!N19{Z^camhrjX2kipJU zdi({A*;_D3olW%K!;U>pw?;Tbqk#c9+EEvJ$|s`JMZdv@iFQ9zV?xdg0}X?cnyZ#I zNTU3^hUX}Jol5)?r235kU3BcB%!Zy-r*J`Z0!k_^Sbl?MSDn=MhZf>7r8(Eh?De!1XpDWka-()M+7U!HEm96uuEA^%#`Y5T=#X$x%_zd2bOo-2v3D+UWz2P+_+ z;87N2nv&n11{9_1y)+wqbC?3*w^Y!#wmgwC)DYTqoNg4uWo@J^IOiRA!{Zx?Y@E)?qMTr6)odrMzV^)CA1c$fZ88yCvh?X3wU zVMrH!DWqzvNGYR|%AM;-DDHB85v)r&hnXJDjyhh7Wa)Cgi`L+UyU6Tc^Gh~ChwW7@ zprz2*W!jQySC|q%9KwH~hU+5H4P|ipN4;c36%g&QM8DL`)IAV|Zx^dQ0K2g~8jqt5 zid5m)RU@X`aI;Z3tu zYR!4%NDj*gQ_Kdc^G=6<@!m-qEx5uC#LC0Yyc}9GU1Nlm5yFjwBw9Uch?KhY!C|%} z2BFtPDjV65=lq3i%(6y*M-iBH=G`peTvNdQ z!;Y?rJV%Pfz>?}ff3j^Tfy|(0=*XY3ZKan?vm|%J^H^qqw02c|NT+Z@ z^OwdDO3TxE_gp5Y%Ez!coL=k=`S14Le6FzP@OIQsmq@IfZ*!ErTobcfKWjYrGFr%O zT;W2h+dR=hXxB4Av%YuRPMJI9bc?|EBe{+AMwQ(aj^vQ8e`& zbkTjn^tcwI;DBRDW6JA775oNHtr|)t2+UnuXn)-Xq#hHr^v-4OEV(aq@099y{155<{Q^zV+lkntk=TaOsv;yxGQ_(!KJ*ne64&3{~{l13VB{ zm44?o?f%^VB`IND^NXXpW;(CB<|G%YfzR_8P|jKJ)C%q|DMyva^R&kUhoYg}7-6UW z;YPY&8bDI=OcI5^s?3Z{j)sD+z?@vH<<9e@1iu01YVtKI8^sR8`3<_|7TpsMHi6Rf z`tHBxgUn{Fm<9|?aNnQGk|dqm<*V%Z$?@G}@-^`3E+(we8Q`kJsIT)_e3_s=`OiUk zR}X8U__?R>ER`N;!oPtCCkMnRuR0F`$;hrz&LW)g+kveC|OSXgimpI zJ37~xg%@ac>=nZ!_olKDMe=eRz~_>t+=`#Y8ZG?f7@jJ7h~eCdsxqVLbYTs&#VS2h z84p;GF83>b7LA})R@^v8?&#Klt)BNTtiuVW#iTp0NUr7?3=FbV@iZcxZ`P$#LlbhB!ZeTT!{p)jvHbl4R}lDtZA;6yt=dU>A0 zcXE*DeI0aKsKg}0c73$oS&e6xYC(p6$ASWrz_k+&=Xhp_{A7nGEI8g|Fel6OQb%`t zjpIuXR;g+h3QaKIRF(;_W=%#l_6{#-aQpQsifV$z;F}b6f+i!d6kR=d&6ff7UltSn zXu2(%p+Fq%)>wmYN{%;nnEn#NXV(ldk1*vY!dv>wc#F3;Xh4adI8LVJ!4@5xzqN?s z4u?tzLtuZiNt-}G=ao^Ev7gXxxU`qqLt=^75Yw{>%>@lD9*Y%?GozebQ$B9HZEFb` zSP{(?*9D>&lT`T#IqB7VqXvB|5d{LB!u9&7bhMZEavgK0vlahm)>)}3V)e=D%@&w) zDB{CjGz93;U@3n16q0X4eMESL!P2PMNalUj@4N2ur(|1h&MaRtt^{U*H6!nISxShM zJNZ!kjLg$fe2fN>K%YeNd^%y5uEEUw)=j4UqamGN`vG{vs4O4X(yeyUbf_8mn zW}34zY|1CM%qFV3i`5VOBb6%xx2}GtIcmvm;xrtU$wEZDtimzq%duh~rkkxo#{zB_ zd}eFVqd_@F&G(b}PBP5207D_uNX#oSpZ+_uZ$$P<>;b{!UD=2P+HLuW3C#*7&mq}W zdyGNr7w$*hGc6b-vx#Ga&isx@Ck-t14K+X#p#Cmp>+7CPJBV0Wg1HqSB z;`Vys8P2dxNyp^m2~?+TEJ%wC_2?W%N)@W!SgpRu!-BFu4PPa{%R!E$7$|@adjl-SmZ%SnCuxt2)%6o1nYhbqfoRYC)Xu0&;=?^4S=}Xs@_ z-oPe|K;zcj1sCoYt+d==t$>9xJe22A6B|m`vtD}>?gk5Ifa!+5&_#n#1%J8ROp7ZQdi8t4 z+|%g_9$XG{eAgbZzoZ$FY-{J@0Vc3|y|h9CiK_ax`yS$|qTE4MPmcuB7bl2!XpWll zNnP+1A9@BK59pIMpR+Cl5^htB4^RkQRouyBHbeWQo4vxu9+K|eeWOYuj}%K?fp%9g znSZ(=o1TG{@a9XLa!ZzP!IgA$NJN0isnwNFw9RTbF&l}@Nm7m_)3VesLdUQ6#!z4%o=mTXDONyv=kS-*flHVW7&UZ7>KJdqc#w28tbY3eKvlv9th-4NatSU->%N$z+in$hvHJjNbA>$g@UUV3fN5Y4k`ybWj zSwu5_eDPBsGLt1~Bqu%;uHe-3TUiuN539WK{X912tqt!1FB!BkLaIR|KE4{Ql9ALX zYH?CqR{EJd38m-FU81u}Zox%ekka4&-K_PN{IR2B^WNRLvKzc>l9BZrZ1PmsN}|E* z`Qm--{W^M2m-}m0GO}V>XfLd+l~mqIzsR&Z*>9gou@~4b}uUZqJzAa+XPTk z*Wywyvlr}<DN=`06Eut}o%D#oOyKvPB!>NM9Ia#KSl*jz^6P}8iRirQOpl0v-aO*T?(NJ)%(UD(FR z6X#yvFcrQx1@UutbS-ETE}JUDt5d>msIKw{7uMR`H&z7}`LucZt^vSE&{?$CiAX*L ztL}mh;Hx0h;!>-w6CI2^;v%-MuppG$#&Wup9_yGFgcZwSHGpWm&1LOa%9pVqppdYs ztQk=O>qYIB4Gdy~dKu6$7Dg%rRt3Zsyavk1tI$jfat*>>Z=Rt!k$NXj<~PfglAFwz z0=0lz>m-JuwJ)1=#No|d!LQ*f;~HE9Ka&C?ia%m5K3Fw^xI3f(e`9 zN@vU1c>oP)?J69zWlD*A!Xnxf)9GyBmX!Z41$4HY7;n=%QcAN@p(8$2)y(RZaft=U zAJbV;d#}gB!oMQV?H8&yH3n}cGFQs?WMEz2@lPO^QxX`r1ZL*Poc9o0zH^pIyD@l| z5-v;f*^e}!_rGyVJdgjG5@knSNu$EFkM}!&Ag8-EkX$K%jwvx1&8qz3+@kN^v(LNx z<-D7?1oFx>^L4Q#cgd$%c45TpJm1G2lqpBb>-#sfQHW}vW^}T*WKK%wBv$W1 zP!Z0|e>n(@-0Z=jwjWQZYyA;I;+ajnnRIc&aj7sp`3pkAL_y3~TJfDuLA{&`VVXz0 zFoDx;u&OWVo!&Qmo&1j8`w-r*c79#H@!6xD;_+f-GJ0iFu;(Mo@{m!|5V!|&wFeUE zz_Kpb!x80hcG*Jnd`rTzQ%0nNK|4m9n(B3?<~B8}!!rSG_fmWi#dnnh#m5XQR0Dfq zE*w0ZWoML7OlXQBA~+7J<^=GX-a%15`RT{KVhYvmm&uH0dv8_W2YSF?#mq%`@FoY2 zYS+aKakmB#eo49NI>5T5M#s)GQ@*6RNs=5_r#QbOY*z(UJvg#QQVO&vY#)PsihOj7 zsDdB2OSCA)AE_l;%f=slT#I~yihLT)Z_tKvr^Y53UQ9C7(h+0^#)CUryGeJndX`i; zT7ElR81aoe0tSV;q9q-w!5{i8CJu7o14{}5Bz4l=~p_S zB9N*gckPKYf8hOrtIb;E%~yTM^V@4zg>Y`+Kd^CqNNb^CuS`|xYreJvopYV5za=ja zg@uN_GaGrY`P$hN+BvAUf=jP?xz+XX$xrCyaQUCxl`!ovX{5XUUlw2xb+nwWi@yK6 zqkMc@EHtR(dUL(k6pitJ66bFHC~Qm)r;Gys!6EUHpBAzY}t#(B&*de|Fr~vmbL`yRbG@{U20YhmXG6{4W3W(>!qP zjj1SqL;P-PUZnl z!p-+`r#C2-i1f1>h_x5#QolI*V{OFYB64FZeC9fW6i&-``8Uy@pH04tHW!SZl=250 zPj1jx`hs;MCJYto2ou~Qw|!>zNKdOFOk2B>k$qz8b~!vkIy5F^Y*2{r&h^s2d!d=f zr3<5rp~eRr57Lik|32HFY!&;nbu<0=mwEb+*3jO=rjS3G;3s3Df6LFexNT5)j+3c1 ze+O?`Jow`R-~6)@)B4v9b|h!|Gsbix;^xtVdyCThl(*|WAE~$h{xlzJeX&3A;LeXZ zr9XfFw03>?;910=Cj16A%3E9*!?-1P7Cun*yP7>e9lkUywggv0vxR6Y|5=t&K!@;u zo`}N({a53<{#)N7l=V-kJ%e*>jAV%25VwtqF>&Ra&GHY6_Almd!TCE@2l*H*lP22h zvg5N`mG-W;`n_-Ku@F2mzn-4eVt0iuWfvi^+c{4M%kbJRJg1^zR6(>l4Vyfr1|H47 zb94UPlFg5j$-1{TEcZWigYK0h%;1ie>vGKOJg*55NQUz<2KFAlh0itS+ zqdzTIb*QgBRh{zXEg?KHs`y#^(d{ntr;2~TP5M#%f7364lG*w~ z-n#$1GG<}QXZXmNObx;V)*RavkW++z<0}E-5cdnTSnZ}PA**% z_M<9F^gQT~PU;BgQ zeJgK(EBTfe0)&?KEfMQYEwp3m9}#9SPz1UroAvDH;H_>bif~V{UCE67uwc=6*!Awg zQHUG!-{nHuOYeVse=G;n6tpa?U!HXSi+Xf6A_Iy zRwWdbW-b|#&&sK`KmPG^fJL5xkvBp`2&mcKKAQtf1R;YrY1X|cxu<<&o(zgT_w~HI z`Jy=T#+kd@1+t0+vWr*ljRyrUTi?t+&TrE165UCD>7d!lNoUonSe&IZqbcV!8J}$X z{$WYx=O{JwsgT&i$FzUKqZ52}O3tC8C(9l;=zsh*dYPN6Sol+bhUZZ3B;Cs!=|!K-Zle@abT=WN6->RXiuR9gCB z@Z>9ig@4W`aXZbXASPJKwK43tLVa%i*Vu*`DmTAHy<}1-`jOwimBwi`(+6i_sXmVH zH<2bCUFwvB8yO*2iM^|X1=RjtGkX{+;R+V4^L8eDNLhC8;+2Bb=fm!auK64C={G-I z3tX-~dRxkT^C;n63tuGPk@f4WfQPl-pigdIZ|j~oK9OTZ^x^gZe7 zF`s!>$=W2OuWR9zVmqnTZC|ORA8iFexf9RDH`OH?*vzMp#M*Y{!;!PUO)p@(Hj0^S zMUIGn;kWZ3J#oLzs?BGX40R09*~0_|{+uf(o(CK{z#1(Nsg&>7h`yh|&hzEURrrS4 z0b?kMG1S$@p2WfQPnNk2%2HO-3rL39U+=T?%SoV>*P^I<06o-OhukWjwLKav{(i6N zT}doT?ID*?w9P5SE5Tx{{0{cZSiKv={(eaEs!%WawBonLBRxt+)7!ejk<(wf!XDXX zyLmt@Ijg254iN>{$6{@+BLZe`LGu+j_W1}%I#?)4JH7k*`L=t@X>vU|R=h#Dz1yZ0A7gA;QZ!zu5)MSdqkW!tzW{E4;$Mbb_3Prr!6 zY;4QGV)PCC0H$9^6fU?;u`XU3IR1KQqa&=t6+ zC-mz`XDf?*xp#L2wK*0B=qhn|Ku4G9uMqmK4U>%1@i)7)d6f0U@DXl9ITo=b%~G1U zy;Hl!#;Q=FGQ^0EkQv{%6ESzIG9?3b|3dT5pSWMMKNo-NQOrYAqP1DVWUmZ$Ofso{ zAJqgrk1}c)mJ@5ljbaO_$YUvF!s1Yod5On(ZG~F+wZ^`>hyPu^r2`GGa9_&gBqG>y z^vQX}B%$iLvZ~#+c%XEZJj2q7;uW-AvFO9`pj0uZz83P40Ox#80Fq zvxXqN7sai7HddX&W^>~sy9SFitFZL8@UWGv@4cSY&qlXy9zXGw8R`huC|dNsqXkO) zp1V-1SX1$&QEutp=tf<+aWG5x-kA=8&zVh6X?j))?ka4fkt0)zAqo(w_&0astLd;H4!z)n!Y%HC4hz;|rz9NmK<6etW7!sxBBi=F0~d3~vQ{@OoNgahP% z5p1#~CQpH3#=<-$(pZ#aR;!KZpwi}gfP0iz-D5;2yRSaqGl6g<#NlrrZy&PV(pCB_r=`Hys} zFAj1AadPd({>0y9uvsgLHV9@(j>~-~W3FdZWkjdn5p}7vyM2tdIogmo-idjwCM%n^(1N6{Kmg=bGWCr zn>;!7)-{V@nNiT=;C)j3+U|EXj_Gr#PrfsPm)SQZ5#`Y-RM!c)TACRtD#oPb9dV|N z(fe-1^>XzuyrJ9o6&}}I4=Vj{_9^=5d!&w^EekIQb)4*nt+`?Mc z<$MXVlEcC!

Mni`j}&vUY>Be4r!veo)i4+1_ST0s4^vr#DiqH^b!-K1o-kk${TB zc1a0;BB`?GkUnu6J+{f^Zq-P{+0gq^47^`Y9ixw1kB5Gw7mm;tq^1w8D})EJ_-3+L zB;OnyKxmU`9)PQ%omcE&mygudctVY1aZ5wTOOo!S`=Q$$!0_Id`Zs}}DCCvdgcycq zV}0>Woi6tLkOljjP+x#|iEY47Wo>2mej-+HM!jkhe##{?4l_5TERThaBkej$*z*$r z^E4!B{*%7@qJ7LE!Y3AXI*^8_kT~SQ-DH*cAatNp<%g+9_Y|Fs+3`GMfyzYE2Y0B| z1;E{uxJSwxNPfan*QC>6GlvvC3)5^UE3{5Iocj)@WfTPT$jSME%8Gr0z9ekcmv6tm z%B}SZxp~bb3dSgKh5GFCV+Iw-5MDa^U^4gky0^A^j;aO`d?1t{xr zHFdFq!Tb-R_jN1B!ni_6+)Y@0l-CV-D?6Fiwzyl$Qg&kMTFYMC9M50N!qA5Yfs=R< zki5Sz2){iOcESg2+eQUt`SzBag-?0)<**34IK!T)WFNKNcHdS6V!(TwToN(NWcH8p zN1MS5KL=@nQeoprljvv$7=ViLT-q(P-mGtni33-@7*$`DsjdT@{$b}5%cfu|N7F9i z)u^FBz#*?J7zFWnR&ruGWFa43$nIlDT)JW`aN6uYuw^H( z3~aLU42-Vf_x60+B_EI(z!u*t2hJH&92BWB!nB%ak4~?(&j-3Jd@VgQ#22u;)xX&O zT#E;Y6z6=P$3rY=0(|%I!`|5WCD!<%(I-G_Zy2*vCLX|M33Y>I?IB9Q{Bo4SgiVKu z+m1J4Rd0rw_9nDrZnaI_yFD#_xrXJ>=1g|aV(`tsI37cftM5ajbma<~CX*g3mQFG_ zj|-BaLm|3k04}*Kl~*nPsPR-_*s;heocbt+3G_JG;UB$G*RS*Kr82XQ*h82VB@?U9 z91osix%SyuQLNWG=>7Al5j=NpQi9DPLa;j_$-VSv!bx-7~p%O zz87a(cVnY25%#llwUt`qnqse}@1LmU3*xeZq>M@|XmI)$5<2X6ZoB&k`0#wm%`RBh z0G;oz(Q5-=h0`@0(lv;y<*SS@dBuev0^tp~rFAqU5fHCROH5)^1vRBf#{w)4FRWJ> zgNp7UW-N5`oE14{U=Rg_{WeWk%r_{^X;M>avsvm{eS$?Yr@tHKx9lwpEx4`$d4~Dxl&$W189JW0L_W|(dTI{ONtjmKp{ur zDo1T9ODaCW_Dc&Tba&|9+n$s(Eyi$caZs9U@rTB#K@uG!PXhH)SBxrrRdPH&S&^f9 z%5&FQmg>_XSo{H7XoCJ?eQ8bVjBsrO{!ybYh0g=*2J^-0tRcRHfF6I~@6tJuF5^Bn zr6AuW(T6ZCG}ltLm$5@a#3tdPC#GNObampk>`9Vid26+p9l zW+941_W)E7Zf|B3y0d2v6cKdzYww>?Unepb%MTQvhU_CIY@_U8y@6BKBBlYS@%W0E z4Bfpnn;*zNWd>DQj{Ca^MasL^A0I?nCHDePqPun!of`#b!EEsp=qiLSMF7&x<@6Hk zc^(D;?-lMC9vQ64pp(FYGlQm0PO9XZVECYQSz?roQ-)2z zdOd*x^W0ue1ezGX{^esHydiElziU^d4wA4El|6aY4_EOOMd|RJy>r-NnWKYQLW*zju?`G4UfnAKh*I z7GNw)OOT;*4{pLP&Mhx1BY9Q;(|jf7r?^qLEi+&(LW(Gs znn(IRXN(`JHlw)|i3hU2BG5E@kVHa8O=ao%DGoc&`-C^?Xm7eV7ZI((8f%P`r)VB` z+asma^3RCDx4nesX!?OqRYO6%3hz^n>D7gdi~D?PeJLcBAL9Zi$r4r-K#dlv{T185 zMnv!`Eyhnki&Q~sy{Jbu+M8Dudx8OMGPwEpkS&C)37R4N1K$A5Vj0?5g?fpj=GSESF`l_;%k3@f?l8Veb!d~^rUY3|Y7BNAk6LKQ2LaHHF0Sir zhL?v>W@ftcSy=q%C@nMx!}Kk+C$W2=-IUR0EzqfS=ak^G9edJgfIl(o7A9Pfp`2c# zVXPC9OH9yd`THoB79TO+aa|F-zzw2c8AbrIFA`4oiXi9H#7_d)?k*SO3p|dnF!;Xx zY!_+etmiK#^6g!3udP7-f4JC6Ai^aWdFPRKu7$uo)^E-Sk%_{h8}t_P6)`#?IS$5mem0~Gw{>~z^=_d z{R@MtfiZbQ$6rEh|2nsZ)=ai4jr>jdS1L$?zQ^7%1`HcT>NeUYc}kB`$fEQLQ}yi7 zus;^&!pY*)A~k@hzRg?_+S`-ZVr<9)5Wl4YbeIcx0)sGS za_y9A96RG@RL`KZ`F5E(@(^?Y`gS_PFc$yPthn#tW49YKh{-b|p9`OCPlT-myC&GE zWCKZ~ggFa267gGkIjIYX1ab}MHPX{RR(HWFj3=nyKLV@?7W#s)f>0L8^Bj_2rv0kK zQQX#NH)GvvZSzY^lnY(kz*Y-=%szUTp=;ljRCrWnK!fU?^SrP3QG?L2pwW4y8q%J^ zqB?{REv(5ba6uF>Y1T6#^WDBHMvJF2_YIfV zSy?@WnOEFima0D^KG7G1FTAVth`Fv9UYWVCYx0fHobUE@W&MCF44`~JPe(igm~J)~ zxu-nahd=i3g3p5_{Ut5_vFw1L$oX~D*Z&Oa48zJj?0;0m(uX$;e#32b$vz}*t;#s+Ls;MSKnb2 zX8n>4r!74{^h%~h8&$^xUUys(IH530B{lO-plNk za!w6tYe609d;lvg9<5SbNrTRy1tkUm8(}9UL0(&T7RMul-%O;<-_LUD{{VQIKFeuW zN^P?L(BM&Th^R0qo$+5f$#**cy2I`?#@5FdZ^6umJ6&~c#rj~%s-AaZyEQi8lTRqC zGKQnCtF(AtX1MPong^YpN2wA=gV~#LPF=39{d5lk_Mh*_0x~c}-BXC%;RShYE%Uv4 z4tDy0pS*OL{WGVPWCXl7M5Fr(C|6O92UHLMz&Nc*oOn%-vFi#e=-i7ug0eN=DB<-3 zz_PBcLzfaer28a{8YD_v!1U*s>GSyg*3g8EnHRb+!hf7};JMEw9l7-b2GupLH4WCD zeT#j71X<{X0TArxs41`I$zIq+=-Uiz3Kg(hUv${ha1kWm&;xGbXPlD-_ySMdh?>Ll zi;zvE9|5w;NX@ozAAR zwq^~096s_P!eOiLAHrf)tXG@ZWkl*q1oz7Rw^rDh*(p7++Q7_8MML{ekf4FFL3N7X z+h>=hMo|@0GQsKNaMMEAP!YAN%_V<9!%Je^G~g%4`3*j7499d`Ta3;o$H8kSsgdlu zhLmW8#J&YAqX2iR*hq~eH(IEzaw?4LJ0uL)lQYk`ot7Jmv!Nu$2fvxM$1>^I^rBw> zH14?7Skq6kbJ@dR=U?p@)8{IOsxXq?Wqqb+sWV0hcTWDd*sD9 zcW%Tbx^ns_$Fqr*vnf;08|m;npl8DoP!YJ=ld9h{-%Fvt?tA5!tA#(aX>@i^hRNQLJWfM&V)mSFy3o2_BL91yjNDC8j)CWD!N0N zu|VfjjH1H5N0`N-4;ohQNu?%`*dbY>Yz0tJFW>pj_wg?uRrWx2a}PE*y5H#0b^>U* zrziIl8;HZko06*5b1?g@kr+MU6-}vPs)ahG*~IPvQ^l$HWG}N#fi|E4kcZUkaC#*c zxhHQnh=pqbdMV=WXzPQ38~>yopuu{S8UM0l z$s#H)p{I6Dzl%x0bVz@)C4wN@fWMpd8x9b$+qcam?p5^&b1cezNQ|2ESwEezKxFP_ z3l!0epp_3wm~RYzCLxI|Y+@oM-$%dDKwCweS-vCg3Y$aLS$8s7;zZPO%nHrzJhtpf zS0UimzjtzF3%2Gp8?Nsy7H{4PAY;jNF@=>>3xR-Wz!iQ{K$W2Yo7hH5y|Wkh%koiE z|23(3(t$0r`6a9Jyfx7KXn^mhz$)Z7&^SFd|<3l z+~KyE=m3ldl|PVVsL-te#Uzi(LcCvEG3Iy4H6||*s}rL}eU_CO0<2fb%jZ;eyS zd=C}s1Mr)GD-bZ<;}RVc*I+y&H5gNR=Ip`w1K!2(+fv-MES3C*Wc7lu<=@-Z1^|oPWgqJ+ zboiBd%|OKxMR6!*<5oZ=;CO15vzP7dA{(N-AmVV{Q2LMqP)Ku2&r!=3P?Bl3&J|? zW>E~IfF@U$j`vG_oM1)ICURZLd~n(muM?YG1>J0p-K#K*g~LJkz2ogY_eSwckGysN zHrgr_^fbkgX&J{q(#bz>kWnOR>;j;rC_D_F95jf6?i~Q5J~(9dPfHJ57}Q`F{&j|k zR{MIg{|PXQo?O8y#V4&6R9d=pdF+lb@|d6r+E6x_E)q?9sz7_ zFb&Y%&P-*O9sof1w{sc}(idH7oVHKiaIUGq$hc5Gd4^Oo7;~QmdY)$$E-=|a2l{T- zQLYIp$R8J``<^*70(%^2`u%Cw*oW7n&t>Jf&}x9B?ui|mWea&jY=4yC(_`eNLwYi| z5j^)JxT0iV`~ySzNkKzn9Q-H=XBtqX>bafTY&KK2R6(86&yYvhdFF5EGvGBF1wdE( z>|90Z9r$=otx}==1^}U%Iw2RG>>DGO?eBN#LjF|R6{&lGXz(_pcg{{rjIx*Zq)W!d z7T{c)q#1d5@H(b>?8n>sq4X%JQ2IRDxplJ+8+tYMPY%8Z!av5>FCE8`3xl%`c#JO< zgg0uEIxe%np$0XZtHt%mUh>(5ozo_Vpo%QN80<1+ZDSG@nd+Mn5p7V}L9AsZ|ENfRC}@B8{wfXo}0^LUi@*b!z?`^rRhjjk~_3fSm^%Cba(lP-D_|*yc zAMkKza4als;<;>%KU=1Xbdf3$4Z$qmBxL(wzZ6BU#)Ow*3INh8!EAxheKyoli*WOTa0sZN^vXY;;8!sq z>w4ja?p2Q}IHZpcPWD;%Vxv5zE4Ddoy`Hf2-PN6AiUNS~wKXs^+e#qf`^FnQ)jXf^2-E1b8!^fwYcc3MeveN%q_G;AQ-SRDCg!l`TCKc ztMcxo+isPbmoF>02i4`Nz?L~(t0YM=obV6@ZLoW$M{WkaB=l%XT9E@@A zI2`w0Yp%KGoX7ZoTLqP0?cgv_!S+{}5F)oZRXI4F5?Io~T%gEwww6uUT4KdxJ%&{- z|J0Q4-IYk7+U!VQW%w2%@Czo0e9xn)1lHlvGex1IXkXk%FcvMo7v!7bU`3#^8GwTu z`V-L2Mqns_!9$4rcj~^wR}2=ODPWYFNW{f*X5S}-iP?-^gTQ7;klF7J%+`%)hbkb8 zb%(yIuC*Zf)s6{RBn2f3{jSdhFCRgdJohgl`_IdFjjNSkc*YtTGwmUk3s}SV$+Q@J zV6R)`p!!kd$(U+fB3m=co$5cub%*`_68E~TSe;%YB6^)yWQX_r+dx{m!C?7em3n_% z?k55`P|?LBj3h>OfLaddy!x{yrJh%tHr5EL7v|Z}VXA%S|76tXhc*MRZ&?5N9mN@> z$-2S_FkvC0;Q_F(0u9v5h14q+tGc2HV);(>b?l)Ij?I5aNL-7%50K8XA$jit4YUrB z1`i$pbNJx_Hy?*+WeZqknFyVnmX&dE1ZO3lGzh6jn6zsCy8wy=HwNo!NH zu*$2q>F^u;}DOSxRblU&S1z5kG?j42g3 zeW`MA_clj`xnK*`=(}QF_eD}#0^w67X$$DwNzVCYwSmdNzu4Y}0LsBFlRI@9=>s__ z&z|pnKkMh$M1l`oWK;!gO(|;aWk!AQj;ysxx=&58wFCjb6=Dv^U77Ps#mSjehr&4r zXaji3a&-Tybhx7mosw$XWFoWO`L$%sqV~_88LU4_SmU`pU6;jxjq zZ$V)jI#06|xqqJu z$^>S!BM8dMbzdI9yH<4f<>%6g-0S|Sl zKOl^9dos0-wSA)LV{jJ)4_yRatVYHai879^xqDIdjqoAPFE#u@f4(41i^j6;Y9Mf@ zMPG2wl(-5$j^%oHtfp5u2IEF~MF?_J&2v|t6AiVt=zmUAZvz8$-oR$w_`bRtk?mjU zc%KLZUF_$%)5T~0L8qXF>rB!NS!~j;!1M}2Qv4Tafh1Ub$_R?KAWr!9E75IW5}g-zWB=Rm zDr@iZ)19?=Jn&ud9J1Jha;skDo;fDrTf9xQGFqCKVQV79JX6XvGJ7X0SJ7dIcHuJY zht<5*$ji$$<*KUZcXmQY6vxjYZ_HU2pHiU=5aUbEuNR*dGr$(jnan}+8GkZ554;oc zOS|KlcBjmOeUzIc-{s^%kJr)(e@sT=5(^zhFWs61>`5g%6w;XDi8#?MdOTzZ{0|b> zF-aF!rnhg;ZDV=p>{rA76ASSu|8g7_5?dD%gO*ove!>gOzBR0tclw^^9m<||Tuy_` zH+Y(dd+=?u%A?1HRoX9!w{ToZ9zS?%^4^D*#xTb1Ub?~DqHA!w@coHj4uxC1B~zu{ ztx;k+SS$(S-S#U}uDk}q4hBWDURFJPHouYV zx!Sww$O3a&?8}?G?u*lN1pQ@@olhI47yFWSWGN+hzLe@yIF~Ex+pzKv_pzl-ci$0l z-Bs{37FZT2XM9^6Z{X*93I*OOW)QQswI- z^4mZCE#_f^{e%0TyT=rzdEJLw#uRe_krpt|h~rMOlU$*xV-s7cuLd@;n`6=M!M*xN zYZUr(6PyC-DZdLvF)rsMRrK zxeJ`|B($?u013ORB(ob(`C7?wiPs;KX<;N$lz)`|z7qk*wghI)Awcb|1w^5~o{e>T z(6k-zT50af?6>oru;i0syW0LgGl~6y`<+hAR^S;9CzMSJQ;TCiBVyp3V@x}1=s;d8 zuu+9kuptR`fh0HmF>5}<6hBz-&i#C4^ov1NN0cM96vkzKq)JJR=kfAOg_@+5d-EyB zeJk%U1Q%i=7P29TxJUcSEDRIA3OamHEgA;L)K?^@dmu0u;=(q=WUK~v+ z%RF^Mvg00er8&qd!H_K4lnU&aynvB)guKttkO+(z%fC(9$6Rmzrt zkKs@H-TP{qrTKo!PfP(GqMpu!UcUC&3pY>TOm6)CP4Zp!h5A-d5UGOcWML%e7_R}1 z?hkH~jQvy^i>pC^%)ZiVBv5+{D_YA>Zy%9IwD1jVxw1-n=8()XqznTuP zySk(Fd32pBu(^1;2rA-^LO-y{&NyWxUU{CZxEiULYioY7rJ@~o3j3Ls+#m2r-Emeh zT07QoA6V>18ZXOvzz=m)TsB}|)$O4@{H&CpX=VES3N4`W9Np+5bd2{{p6f}P;WP3p zcB)2QS;pVuOQI6g7K|9CegV(vhq1Fq(A^ThaBQ!3W}9=b_T%8DPWzbQzk6n|fYq-j z&xN*9KO=OGhPDQ@-LReuQVXAnC_R;rFu+2Ef@3@=Wi6d7Nuf;er89EcFyAQdE-)(> zrx-|H~S;U<{3ZE0cFw^MdGCwYtmBQ zzSpW!QvN=wGmWO})x|nI{5$=yPV;j3YS+G5x7+oVvq0a0bE&$z0-BH78Xcy-;L&EH z>#hGda$ymvvUiV`O9z3ZsQ(?SZ?M6b>iW8C(Amy~Dk zKMsqrd3i^D90FeZSLv)3QgOSt#7j0Um_?9h#+68Eh7Shx<)N3s>2zxgydm+&@^BN8 zeCI7P4hRdrHNIp2{US)zsE%H%I+M1&@~1VsOY2WQ(b%#}W-|5#p5;AHu(@*TZxCV4qf=D2QCo z{W2w5X80gxqb-IN5ak;07iX@ST>Mq#J%y~8vXL^oRPBbcPYEGpNlt(KjWND>aRvPF zhArS(4IU7()X-#QhoA}|0GG#ZeH4hAjniI9sc_^fH~%xS%-ZnhtH7URI<15XOYy>m z7pccDgERw@b8#hUAQak;sGjTw=POznJMYIhPXy0cCoZ$P`{{oxHIrNDd{@BYf*vjc z`az|6Sx34i;gV?{c-q57QFrRDX^D-&_?@nV$dB4SnC($y}mdNS)S4 z)RQVY%sjC4Yr&3@{2`8JGa?s&(SG3U?fy8QZ_3uC>9k`b87wYT$m&Bqg+usoK~}wD@Y%2}klfu1{B@YHQXJ5R zO8cf08%QI?u!eV?#kXEUV&3#_@bY^Bh0c`Iy}DiAc=Yw9<1G+lC|8mkIJ>_O1tm2B zFHQOPURWg2^_pUBAO8LD4nf?btaRPj^oYt+;etz852#9%!kQDnKYZZ?wXZqR72|8+ zFt%C!AAOrpg6hoRoN)ArGvN? z6cTs-G!M{nIt-f{of-$}AkCv(o4gx0zzU6Ibfg*zl2-g))6Q-ka~oz3g<@Crz^>P8 zH^6I`kxECGzR2y52`wyGc#-pTwKS9mUe}SHNtqG-A{hP2_oScvio%v4h-xb3OKxSH zPoX~9T#@;2NySJ~OsWeEyIvN-vb=YX832b$&Uk|Nrz^)FBVs&K8X*Zzrc>0%w}AkJ?+LT{ zar#@5!bu*GDURp#K((;40a;;Zk(S!}>+_pIYw^lMe>p|0T0!xVz_*?{o_i>826%Wj-I(s|TXs{1PLvuzh3AzD zPT>Q2myQSE?n(`hDGp^71%t7&I1z9>H)p1jy_a;DFVgH|e4g3*2PLGuTu0or<(o>2 znpKF~E^36F7K<$`9SQI#kUQzmLg)9HwpKH|+RzHGGTe53a{sroIRojw=6al?j;72? zdBxyy|7LxF9PVCRzn<4RoHh(VPzaI^@n&>{g3G{My)40F@K|J7|FbBXH|}ZSHv??mp-WV0xI9&H-O5++QBKGyHjo^*15PRN zSW9q9a9yC&RVLSz_lW=zyGh#&Ka-o_sFl0g()tP}pH%bvrSHMZ{Z}uC`E|B`_}>Eu zL68oy%J?fXJ+$t6KM+kFW%H{IO>|t%?e#gnq_DO7DR_K#bo}W*zKA_xy2tk@Uu2qe zl>JFIm&`~W#_eVwQyLRzSz9vo$E_>FXMlc~3lPHr*kIg@e-nul&X7R!6BFRRW0oRW zm_`9~uYDL|MYLIv&kb&K_?&{ET4o3WDqxEVM6?|WQ?^|>SX>HiShj{@dnF$FKGI_G zvY^X{&>s;p``$xY7962p`7oG!8c0(@Xms;O_G%LcH-jqYwhyfdhwN`?YRSMS=e4Kx zF3k4HQ;gtRectC>I#sU3_O3DlC>OB_&~wx0_K&$9QVaC_21G!dd4?sF`g+Hx8B|AX z45c4@LGGH^t8BbJ%EUw2wxWcQnA#85j)s9l+Mg;kIzxZ3-?yDZ0Pp-W9#is zQ|l*6i`N3ZiRpELAPnl)n{6|ig&N(G54*1zL-Ih~sOtrE!{qYwI$Nsg|3=@?pU+Lv z#X`+OE>~wR*SNvM)%(^F-dto@acODHFBq)Mi{kjR;Jhw*0-)JT!WR4>$;v)SO)d_g zM4hSP9|;9CTPsq9e~~XNOYf}7)6s1eF7fzV zJoun5`s2!+}OaMK%@x}DRP@H){9L4P1S9X%lQz@BJ5$S4$TTF%(M~Hc(bgRpVfUU!= zPGuR9=6)xEAA)#0L>usvXXwhFOiL;CwLjFAW`QWFG_c9KI->m+$s|wpmiOWG-&@^e z;l%98>@!k*e?ysmdx_yvd=5IM+wm>V zB|LF;oU-k@b0~tfKzB~^&|G)BS$v}4535^BN*NS?xhc`NGG@1J z?E*l)+PW-Iy|($SY(iF;rx;VA@=%2Mw*Pp;#rXzb?eT_eLplr>ojq~jchv~s7bIy# zK={p;4&v;T5Ce%aulKRT!ypE9a9hBY>Qy^B?lQKG7QIB`?T>`genTk-Wp-(>0_&fE{FFHWyTK z`qjq`ZX0YCc>PbD${qFIIffTk7dXFi9aP#gs8MTNAn%FwuG}vM2DL{;KPG!i(&mr* zc9@1VLmDnnKj(S-aT#Rbg9CbJgZ7w{D-Ediy;XCk>y2kIEhpKm zlGI8Q2O6gtL!mzqu5>H@9mf=L$waSO^YLx;i-FuADte3PDxE7m6>|)m!`nX!h5H@#b%zom=4>} zh33{1!uI}{`!SMokxEc7&(v7eizxw>)7$Q=d5{{>*DxP{52xR`8D_%{%psLUHoYVYZlU3ZRf$f)OB-@pKDNLp-TU2C;?BU4Ed+5DE)jwbHS?A>cPwZ zqfN8(MW=TqAK_FoSU^4rL)`K-csI;SV)bmGf^`m9b?O)EHNp;;$jTX=M?!-S%k#-( z`Y}a6T>B03|Fi&U-#9{}4+W7Hk5yVA5^-HRM{>X1n5%C8CB=H>1ID)#J?w<2-OGza4b0I|U3$l3eq( z%Z<3z>w6Sz>+o(t<&ps1Sy1Bsr{Y}GPp4tey!#(1G{-N)?qRDItqolC(TkfP`;rtkGka%|8M(`At zVk^e)Zmf$oz*a=egIg4aOa`;WSMW;Lb#b)pWs&mqaQb;)N6aIKk>8@@C$U+R7)CGh zQU4HHL~(?Qd9z>bXonxC-`M(^!d&@gU2_4o)_hfv?swb2?mlNaRzH)+b1q>{gsGKA zxc%h|D*=XsAQnrKH+7{a``S6!3zF1hlgF8-c}}EV=QhnD&8Mvb&FO7mL4KfP@)*)@ zWAMN>Tk$c<__54&e(n*aeY4sYF<8?6?Yr>L9`sWYtzg-Fdy!!(AXF1xW^S=~ya7j= z)4sDEGC(x~SO^Y;P&W`QUUdFwzxGx-_Pg+v3ZS;#-2U7sMDLDE276AD^YJD6z0HEw z`OV+WopA4(dMMqDNfG~{<5XGO!Fbqy-XQ?c1;E7VcOe)kf2qd`FN_qAB2ZD>Q@&F# z2`69~h3~r`K6Vy=OEm)!G9|Ej5{Ny4BS?vj`fKHS1!@e?h>_1&SMUeDpORlVk{XmF zyPb@PN-!mmw~bv#fEwrPhs&+c-##Xv>ucfsMUd~d+PZ=lZZNjIu~ispvZPSFo4_QD zT&1VJr2b~2;{?ZHRmkhVWg$R6WvQYcwwz2ooTBtJ$+l4+KQ!IUCo96Q)CY6J84;fR zq!qm6uj`T7+}xTsBv>*RgW4&FTSJ!`mSbCB%Zk*v%00!{v>kd`?l}Bz(jUl)-(I5g zHPb_5zP#Jc+q%%pB#h)9$PjgdB+qFVy4DO-j;{LNo*%Bxn!D3~g|4n9gBtg)W_+!9 zvfCY(Sdvygm(QdPGN<{U8LNUO7r9>>XSubDbO%(>n}j<|bbAx6{~sA&8M144c#KwSsi@yl-0nxLKru`{(0+TCV***`9oy zD(3G`6ut}O_rD()sD&DxkH0v9oiosPQr6hTCUM5#H6* zKMzu+IW7%N4pxR2LV;w54l}YkoU&ITwbd_5S65k0EzcNrrDe%XOXPmp0J*(%0^M=P z{VdzkQ(A|&{T|CRA?~c9{~V#44}e!t4&MAX-HXsM@|!Q7>bH@Nb7*$qg4eac#zBOr zF|PXlQrFY><)%5L&2Z;_lUKG8vs6UTcx&C5(5w1jzU>2Bz)evN zskQx8VadVLB1in}6Q)F+ss<-)h$aE* zLn0qqnR+K8Y^Mq z3?3GIE#uxfI5w>5`rN7)a3mEo|=7fl9w+Q6`U4Y2Z!dtX~Cx_=>H7 z#P3>_JfY&vU@)yfx#V9w|2VjdWQ^$XfASGb{rVxQxkDhm&B$KxqQhDiTOOjU1*r%G z2ZxDaLq=Jy=Z62W`uK@?#H;P#w`9@y4(3OG{A<#pdmzfHc=E=WVlO7SFZCEZkon=> zm&O!MTk}1vs>u42f3)bv-)ZrUU+z9RNB%zfsOo?BmplH2-FnOtq@vuvXHL6e^}18N z|Mu6VQTnaS)uV;f?BvzRTPXQEMx&dU^UFX0#h3qnDuN9ow$6*0U$O#>67CXmOiVEe zPEXyPPdmQJ#JC1wKHJI}Za?#Wx^^v5l5$|ZwHKGjz^ zi&@34R%B<3lGw=kay~L7e?(C)_Gxc=Pi*nrw+YzIRxw@ll^Tc&h0)9Bf^^(Tb}PgU z>*yf1w}FzjRLjot7B$3|y~?sLiDt5-&GuJ+f{*s^Y?w}J2B_YQYqskv^BYKw1v{jT{-PiH^G za`yH6k-H1;e7m7iTPsIGXqIzLFWQUQL0R%JI9X^i!}a$W)n4>C1}HB%7pDRjN8Mov8OYQH=|p5F2VIVhxErj&Zr*n(~zT#RkFG z1FsIcn%A25UdKoz=|Z`LWFFXZgUK)gLHLHTM)1{&g201$#u11-kOm|Rl_S(gz06k| ze8TR|cXzx9f*^%!N#uq)JoTt;3RIE!!GdU5JC>=hiC`q{=Te_%J{3{`ndAA1VTs2E z#wAEL?D&(O==$+j?0oe}40{I>$(rqV7!RG{|ETp9^}6ZGSVs`+G?;|7OjVj;wR)4S z2mI%XLVzX($)}s>ZBqJ70JwO5UAi&IcIDO7gZ1Ja{N0tVlt2SZ33^V~4}0tSI-b&Z za&$~j?^_IA`}|OIsWWWpMJ4suxVE`r^{FtDBkaf@x>%`CTrv7{|moL(~ zZ)!K%B5rIXjosa;Y^d`(h17-qbbKzm^MuVCF+uZA+pO)r9X)Pp;qW|fD|%LukxT2T zvm-i9MHDSTWR{lTHM=#$C066vXj=v?)?~R48FzX9X9eA0dL)bYwSSJ4b?wOYrJaUq zr#+_HG?fSV7L6L;JZAnp%+c{FSiljmr|axnxn}=)msej&YEVe zdYJn@@Tz)+jNZXmbWKxu(!Rv{$^X&%YS-D3TUho{j2&xi#Te7@eaWI~eQtKDc2GFl z+-2cmA&s|$80Ye|v7>v55Qsb7{kq;Xeq`PAsrknseDcJUlb}m?Rx&K7!r#lx@=VX- z(8_!)+eK(Z_y#p=%cXTyX93~-oC@upqSp`R)RIzv8=drgqrU;~B%vENnm7j#QSZ!+ zVpN-?>84rZn4$p!%SDczD=7{9?(|*V!`?OJS$w<2xX1E90r-napVPadM^cWG_OTzI zh*^wJ>tw`a1qb7WM>pqox-JsU8hob}0*|CNe0NNO`@;M{(>e;S#cf}$LFg$oDrUe&ZCm+{BqjHyS7xO4yrqXPoplF_ zfU8xn&dUz1OulL_BDz|=f#Tltw+2^a{DF}njs=d5!k1NH{5eNv@np1f~XrWDZW9!XZW6Y;Ewco3`(kDa;EtF!<^5N%f&HXhE93cUUJNM>z5 z$1gxvpb~Z>Wxv#Of_KuVSg)VCs9ZRwS)|0sYyXvloj=E4Z?aF;WkCwsgDCEj)yVe! zk??>btaRr1Uyda5Zp%{FH&fUi!QyzLsmVkXFC9A%Usa28@l@^Xw;vCaWG>J@TzOty zR4nx{ThZwm{VFyjTQNRMK9k(?GwcARLLHFWrszz_q%FRTbu3hE8lc}4HT?^I$7gBWye?nX%p=A}?3}>ukd7FXZ=#ww=~~WQ${W^phkLC;Zn0OM z6ikR@|B^IR*Q-5_3-r9G!Lb`JA;vDkz% z@3bNSM^TlI} zgIx&`@6^8ld*V)r(T-pG$cwyQJ@KXszkIO?`FM|R)%#*EqaIDl<=poSU|`r;=qjuc?YqKySbm;xk({FS zph+xDlL2VxXci6nR_gSQ;4)W<@78zPgyh+1a~q;!5vR(7?dosAnv5*9X*HBdA2C*% z|Fn&0=v*utUrQX4uh*=1@rGlQMc<=j*bMj@h4AGLeebVw&B%$3<~6JDcF&K&7ZY}2 zrO=P&2drTuQ}2z|^JfpTgY@LR!ajh*PspFx)=CPizjl{D3l`>gpJr5B+DgA%8to|d z7%s$In|1&bDu1-QKcwqttvK-N-Qwk2*8JI6NGMYx1B?X=_Kp43#oR?Kd3X3)4Tx%T7mX|N9Z>5mvmUS@#-^s?? zq_jVKx2Gj_(?&&8M|EGjM2po(?sN0i1Zq@PHDBp)?M!mfVu&5=i5x%i&{UVwfkb^~ z8&ruSoLNqZ4by|t2nyuB09!-choL#zSER<17p6IXAyf2y{{KGki;Woq$AC#p4sPaT ztIDHU){?-FD~-(jWks?#-`kkC6&q#@<+P$>3mYuGTXhiP3v99=%~f2Z+)-??WXWg~ z{;Yg^&Or~l#&HX|78o05O~4c=+OdUwT9-8ZYHz|>rU@axOY@m1j$4XTZfY61}_^8V{b2R+7> ziRgy}W;`ewJWOm&^qajXg<>?v>Y)tt+iaKF!htM>JnRct$ojzV-Mn9v6+~+A=~7#e`on&^m7K* zfjoKm(=D2}YW4rsi6lbx?s+~Fik`=UGvVabysI_#ewy>VHGc7I9|&e1h{dxvvSD?V z*Q#1zIe1Gi=z!U6)hAMTnj(iM89#fjW{zl4HEW%Ds-Mb~19=d9cx%he zsfo&Gu}0fh6&8(iSk*?q-6$+7l6{e({cwII{Gi_+Qpl%nn8SBGoN+h<=UThY2AZNC$kz3UZM!uu*~fc>9u3n>qSPu?>OQmkoh^ zO!c1FKDpEABv=Afh1Imw7wORGRwo!aOoCi}@`|!v9?i-;-yj-0Ho3Ts$e2(pY_*58 z(wIciizT`Ml;Ca*3H9ab{HE;b%8;C9iM1+&!Q7b z18?gccX}i9V|FgzNw5^h*2f!VZuF$e_pOPvx!6wET9$DM*E(PTFf8KNA0PU^x0b#)m0RR>UH0!hNKI)rrw^_3(zdR z7rq-2A1(h^hm-C+ojzqKhpe7`)#G1P-`Z=&EiMU(T#!5q^rc-&OH%)6l>%hIT=$yg4Q)dFWpZ1Sbp8HDHsc~^u3_jR)BLs%yIA}My z#mnMRIruZ)em@~Qjb8E>p(Mj2^QQj+kp}OLeuY;xHqVp-5QUy;yl7d+ z>`&gBl@f^1gC@}(tqM+Wp?MEx=4fPBx}%tMoKk*4QO|8dIReQVdhdb4hJ19woQNWG zv_rAiljYK0(f4bD%cb#&KMUR1uGj>2-Ur(#k>ua<@_$?gq~ij_@bP>In?@@Fi#ut=GDU)ryYMB zo9{^++#Hj#hzT{F`r*pa0q%H*xqN*@8a3UzB0>-P_I+#^<$}*ECqaS--tbv@OE``k z{c{24hP^lEqr4OMG$>3+Bmy%py5U(Kc&68(ni7j?TQ|vQ`!;{xYl%jIywD z@K|wD5=j;h{ZJ>cE6~;JBJ=tD#T~0s@-h9h3v~M@#}s}rKGH#Gp)V@z-~1}acwe4T z!#x?{b62&R(hk*)4;!6ktP(WCBI5fjRYEi{~Ag9V=3 z(%IhBhRP_pqJ_xv65;sJ8G}kx8!(IY`i!AC39`aV@1=cn=tq@i=ZzVAZv={x#mIkHA3ZDCXS>4BKfR|d~y1mhW6=XOcvUqM6G z!Yz~xheHPumyEIm;0dCS$Bq`RTGfDztU*dz9jsLLZE0XLKppmdW<24<6U(%#$_=Mr zWJH#q!O1at4#nN++3`2OqLtm}`1R?xpUrqF>zWr>jHd+y+$v#S5m5(Img2h)+lX@> zQ&@Z;;Ql`NrT+;8mZjWCwFTxF-e`5rZnZu+sY0Ja#sNjMzYe zc*;Qv2jA6DnI^Mlu+G>_-5UQoNM=<>KIUKy6bg1@(nZO_Up1TUqqV z?;9E#4O$X2GoKxnG~b0PB_Q>%_+5RGhFhN=jm682Co@TQjYkd(Y}c5bnRV|bEv7zY z{gMc~&$D3`_%t@9rL&e%S^|Uk5uove?#1=w&wd=TKhBHzREhdfAP zaIakse2o*UvQK^W8oG@K&!_?$qp#FWtYSw|A%k%Q+ zN5_98xb~jNdx1uKN%k%CPjVge02SnF)!tdMlPoe1qeYwdx}VM=_rP1kr}+Ww=?Rq~ zuGVy>TxP*!6Q-@^cS$uE2dlyLqa}e?ecqBg*y5Iz*Y`xS7Ydg=amzHVF!Nm45F90k z+_}q~V3X+y6(dcggnnHw&T2J@d6EsgVo?<$O`EsV0}rg;*kZoJ+Q;}FqDNGnNzfOm z93m8=H&?%JTscR+r!W5ja?9P<-@l528C-8fMpyV2QAz~h1$aGH3hW^wIH6hl!dY5{ zkOnxR|CNKJxM$ORV!4THl9#n{b;2;Rkn|4c_Q_0g*`QVImaSt%f~y_?W`9mTW_vf< z9@2vlf7H-hx2)k$!`%wA$CwmomvcqU4O0XZR=wwCf*(2lJOWdYNI z-XvH`*O<_%m^qQ)eg4chz)KY$3`bHd15S{d8^fLWPH?V=pUSapAm2pxTH7_q9+irI z6&v=NXeEjbKRoNIXLNt0IlsZdqfg|qy!Njbpa+G$w2ahiHhabw0KiDGu|yEB$&^Wd zx#t+ghPP$ONJ46;c(z+~C)a?GZfZ`Uxs?UK2k^o82dLa`AN7clK|*-N7*u7I5l?=s zT4gn9K4q2ltibg4SJPjLjpeGp#$G=1-|>l_!Tro%cfWEz226Kka$Og<9h<}JgeHQ$ zL|PXkN@9bfd=M&}J9{qk)S%Ey%7>ea7hZbBH($p%zxnWQ3qG8E{w9KVtV;B7Xl>;- zyQaloPqdXqHddh2_L=r|9u{f2ay0Fmfi`uNSEn&>$+y5{NXRJW=aR3t$H{xX!u=ge zJm1%gXYt>jmmEfkap1+HGL2S=|V#&Zi--cGv-5=2^ zBGCvl00Sok1ls>T(`}BvTBWt0ye6CTzV(b{(zM1#l`72ZMCISGdR z9Y;od!DIjQ<7_VseJmy^LNBn`wsZHxefg8ifE}su^-g5d!*TGw8qocLoS=PG8k5a9 z(VEbF^l=hSf9f?Ssq#NG{^;|wY2y}pxSWxFrf=EW_4X$FmqAi`R>e?_;^}Thfvb{W zZ1TNY8dG66ZxEf0_jWeA)*Wd0AmBJU4}Z1Pc))Cj1&)I2AW&bz5>N;YZ7bT-?@aom^X*g3DP(e@a3<>y2Wj%R=jI-P3`i%5 z)2)yvV%Rs+XJ7dH`TNH=w{H3SpXd%Mqcqtb_n>h{hqSmOEolNbBU(c)(uD_F&b*N?Wwz9XMmED0dwmE+9a zj_(5=ClI=(0ZU}q@fj!Nzx!pN0`!F??0;>aPPwU|A4!szzHMAPxxhM$-@0ZPA=-5(r1<~!#fOE{Ls2*AbawP;zi z%R*08TQn<$Izt$8K(YPj4Ws(EA7zIIv<>x2yUbDpN@PX%So|#OtO!h`N}ZrAiaw83%mFh)6O!BhQXc@nxw8yAFQ2eA*(lCF+8}dpht9^-(Pc(zum5k##xxONe#r=%l3RjaUwmrPCXHWL(z*VA;b&JZKd?Qg&rxAZ`XHh~@n9{}j4vae z5_ay7JUjVQ(u{wV;oYV95^nC+|HfQsUQQks%I|MQqY-$Eo1QMXUBA9#3~aSDo1b( z@dAU!Hx(;#dLYyuE>o#sE8|Uuo?B33&-Sku(KwTouzmLD(AaM0dtKowmsEQuMj)VN@ z+}?*y-XTA+*f{{4RH=jEn)aY7bDolILu2!N#IBg$51@egK3?>-k;G%RfE3o6*>3VT zFVKq_Yjr1S$>Zw1)3g0G_-&DzMuLkL`5n84ya4&nsWFefIq zlNXeFJ%-*SNfkQZJ(6|lUz+=1*m&TP7AgSuEQ2A=49lk_GXxsNQ!Mbe_NRI;sRud~ zdjy9$q3ZRH=dGqhWyh~y51V@F$E(qBqwg`BkNLq3SPrR|@u*VIBgoFtZUksJn`R$H znfjV)l>qPL89Ny{o#uY}KCO!UPNb%b;1X^-aIZT_T7*7fzU|E0OVFCydPogra7Hmx z)(u-kRQ?7jhw2g^ST(pw7_pU+GDIh=$3=Qgg!Ps_u$9iHpu}rn zN9NHsCrGWO5|Ff`sW6vf4+2nKh3vjgKo!L-$ANdy$|(jnVHa4XN15|EVI2 zTA$WJj{x5x!03s84x^83ebt#5K1j}T7gKODY( ztZ!T?Eohi$qECvho4CikgMT%OcrhWxpY~$w-sIU54#|eSHGXzZWUYLYHGIBgI@KG| z()mzq=mb%4o~Crcr@8;lYsR7PjCZP`meJwOI= zL_eTv)kbV_uqE{@YdIJ1&`+k3GyNWu-asqD%})Gt!CV#O7ftN_07HMR6;H|iaJ1w04)3BqVdsdPrru|R0nfQXHZ{F#4^6t}Nq znH@>8PHHmWnYeIynKYr@9h@+0`LWlNtkjDCmiFZ_6>wzb!pM^srOeR?4J)We3^LkQ z8bVgQ(*M>1bckwVIcyo`Jx}vrNGrLE_=*%M?Q3^>OJmRV*C2GBdGj4t1I{oS+8W!J zR!#XTXEy{se%wism~R&MKRAUaA>F$g2X1XFKmk}hq&i^g&m~q{Wh^{9hOP~rz=6AR zB~d3(9%1Ce%tuy=Ybv6A^Mo>CEnfzWrKEVZ*aU~|@aj}V9LzGGTy6wkWI$j(mXWew zS&5b2c}qYcbzc(m)PMP9f4wP>SUWpv!|qYM=7Z}%wec!eLo7(P!7hC7HPBqTnk=kh z7?I7CIOfHa!e}!0`W;6-X3vcC9+IJ5hu15&T??88yMNNX9q?6d zJuKEA`Sc#rK>&ss64#XO&>V|6Jk1-uGFK8CcCCl|46SF+x_hU-;%j+S4!FC!%n+<6 z*|wkhde7`Ov1K8VwNLly=Z}fT<6PB5<%b2VePB%AiIX)tX@FRfQONRb=pPp`+>_I(x#Q;ODbN)^1$8DFXbB?^t28uU2oA%66D=6V1`La5lDV3bc8D zC^OrSt&~dx`!A(U&J2;3507ek$msoFP`SH+EJ*15cMWl;1)4Yg7B8E zDU$*myg{z#zyk6Xdz)?a8$O*dfjRsJ=1Z6m-rlYheaH~7#XUG-o-C6|gH@k{O ze$@$MEqErj#!(FQLFi%W7vu?1WQN)gqf;n#a{0sb%IppmT9|;vY!eug)#bZxX8Lvr zXLHa#r-ZsyLkOrW{Gd}%iCP>oT8sA85nfLTH+xd;PTk^Li-Z=kGp&7Z|6vj zRQb%Dj74bpABRiaW*J1~(^!fr1^Zr4v-rb+jZ5*cZFHh_Ca+u$Y>9W@6A~GgfceA_ zsA-k`a~Kk7NVj0$Ct=>G+y&D?+`b$7ehv}E4pv${PO^k*%BxqPm}uj`pswrY%-sEw z8ma1P7_%%=+Vt?n$@O6VZbn6h4b2X^Xq9MlP~6!)60X;#?VDv`23G_p18fJ@go6Br zg6~}1aj1V!bHHr$GaiU8{g#lqcG$F8{~p9I>@KE%>Q1s-G+%E7PB3BQP4@e`6pVJQ zLvh@BSxf-bjx8W6J3Uy2=7}sTBHtu5-VLm~(HGD1gu_to85OrFotu}1(V$;iidS{2OQ)-$bc zDbUAdJn~l9YqkHC_o}JUze%N*c ze(ZiGy;9|%)jJk6IbU7kIHxG-NI%s(2*V0KEf~&cBE>hBrRB3EnH)2hniBfTA|^J? zAbRXfh7AE_D4B0W3YJPXe-q3TKv?+uf4Pg_T72wOVgOm_CqR79} zE0uLXHDs~N)V!ynf$z1Riw(=p^m{#vzc~@Gu82xP+dqr!Ifgp}f1K35d_Fp1l2|*b z85dmgLil5AJNtHk48TWE=>O7<;*sSuLN?+u(H8IC)$>nV0)7OehW(1Hrm(JxUezv% zc_OyYTVw9=jHMTRYctF3YcNh9FfK@~6lc`ca*!2qAMwTSEl6ZkRNF2hs2@%#GBtzt zDr16+ww3EScEPHvdNBeTm`v4G_a3;iPSZOz`9D8St@ngN$VA0Fa<7>@A|PJ$ z?f+};%KxEi+juk{Qki)wdkkewMn#Fy7+NR|stJ#5r7RIe_8DUdAxmU=?Q6Cyk!?&3 zJ=Pe8A?p}h$TnuiWGusbdfxxx{XFM$emduK?sK2-=en-@zP{h<+;<}72(Ub_r}#7l zfe{~ZwYE8is2C#aK&~5#Up-b17XSOOb*?9E+>M8L9;UH7vc#S+sjd z2DTk>nC0Y9Za*>KK5gy-eWwFQJ1BaPVVI`R8{WX!xEf-ex|*H#r*8f+O0TClWr4RK zU`8)G^Be3;L-D*vxUT07#J$tmT7PHnRjTZ%4QzUDPH#f!TL8cUM%H3gR;VCWAR>dA4GK* z6nYyYS#G-oN$4ECGY9U`A7lGUjbJjtkq;z4A|7$)R7>pjNHu;43We2MHRgpIzzq~T zOn0WBVtOb69<0l|(PQ7Nvp7y`tD4ya?G9yhLHRAE4{e?BS&E zrSo>?I+qbG6~4DOQh*Xr`MqLP{KQTpTg3fw{y>w|dO6qL8;bfpguCLJsRB z+dW>?r;vdWA^L5u$*HT^8b__8j#xj%Go4xFAL3WDYlXnNfA;Mca)y3Xu4_8wK<%HAf1TV|i=Dn^w@?t7vfu;{8fq1nASS5#9AxtcDm*ziwCS zD*x8dFn25eXPdct^kR&IsC8KVbB+4NGddhT0wFLgEI;fzMYCVC6H-?1BTRbbKGpz(a0l3%I14MLh1=Gf3AjyQZAZ-t4}D}g;C8b?M_QNjca?iB?(L+9N`6&C z*Rd5c55xwoWc*w^B7FIwpYY9$7v!~mK2es?^LSeM08m$R74#`r?~ox`0K)t9ELtFs z8Oj_|+3c$z*36m@o7K(JvVuuoYD9%(Twje^^>}`P#+_U#=q=%~`~n9@bzrp6R zLR`D@!9Qo7w^H#r#+1!z(p#zeHCna{`1x`{Qxa|U0MBZq+;M#03 zj>CT2ra8>5s=})_T}b$gnvA7&91M(MJiP&+It%S=?9~+kg$rb-0n{y*V^>vj-!1KZ#n?Co- zXPkwv-#RYPdpiTCo(%i9!j&*9k$_?ltmkcM3E<;h2ccYPXN5-Fm-di^T7n zr05gHzZCmVrH>WuV!zSoTLI|_HZx@OedjMvEpJj-f&Eh$tO3bB07TfEtNcQ@R(ad9gkKHTeY2JK(;vBo6Ynf*nxMS)m%fl{3XEOsb(ZrOo;dkqWpyN6c`pEs% zG$&x6vHob=%%6nD*^-I(W(~$43oq7?{^oy@@}Gqn5jZQ>9vuf=^UNJRqUycMIly1& z{~CMpY=hy>^@aM;snt0RB$_^>0Sxf2mDzFzZ}ts zirac%xf?gpkRGWm;KT9@q>Lyb+rz4b(|c3LQjQ-LrR&blU=(bxEgGN zj-Q<^(A!x=3?uUWO_^0oJd_B(g#%<$W>BK`;`{B>ksCMeAVUoF1r~RNoiZ9Uy|^>O zp++6Q5J}X@CHLq8?`vGDjelG;*UKgEl;Nh+^wrXiDigJ2YdH*(EynASz<+-!CsAjv za65g*nGy7lfo{GHFV^T@A^Obt^GE5-U09qTY_aw@d*umH>v`J8l}Ag{ z)zPE;z5wH#c+#|lN6|0!kl~5=skeY7e&LsJkC}i@Vs=JYZPfn3wPd+B3HYDVeROYS zi`T~Lo!RJS#GsSnWN?~_U-Qj`U*e`ch=az=*XcS+fEx+|j;`jakVeq-njJ7tNseK2 zjoz1CtW`{ykVLI|a=EaQEcBEH05d^)J-uZuxZ3D&HqZe>c`&@|Z$qT`Z%}}MyAvCQ zoShT>VMV7>h$>RPXKEHcwocAJu3X;qH^UnYTisxvL@U3SPSPp38==7?<&}64#dz~+ znh$W)c)K*@tmY5*wN|>KCRLlQ{{mq8DG8v;4iq~ekQjY248k9|R2>B=`|7AIbwR}j*S5#TvgiWD$hFXxR(w7s3Qy(Y9*;X7CIl5Wq~Gn|P} z4{>A&=_msF=gIqB_GkPiP|_lNzcG_(sXwfdppdpb@3#Ae`15tfpK`x>3>A3ZYiGcmAv|odTZX%wCgvoebrN;b zGjTnm^@Uu{akm*KilOmsr-Yv zlhPYP4R!#`D0;w`>xPkj1BVU#S+=qCQH7}HK$Dat21g8pVL@?0koYNjX(T9w_)ohk z8?_er1lT*dVj)?XV5F%g03Rp>4>ThK89b_w-z^rxaL1$8(E77faB(M<7*ZAb|L=-d3vcN literal 0 HcmV?d00001 diff --git a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx index e7b06e172c..9ca39d52b0 100644 --- a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx +++ b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx @@ -11,6 +11,7 @@ import AzureOpenAiLogo from "@/media/llmprovider/azure.png"; import AnthropicLogo from "@/media/llmprovider/anthropic.png"; import GeminiLogo from "@/media/llmprovider/gemini.png"; import OllamaLogo from "@/media/llmprovider/ollama.png"; +import NovitaLogo from "@/media/llmprovider/novita.png"; import LMStudioLogo from "@/media/llmprovider/lmstudio.png"; import LocalAiLogo from "@/media/llmprovider/localai.png"; import TogetherAILogo from "@/media/llmprovider/togetherai.png"; @@ -39,6 +40,7 @@ import LocalAiOptions from "@/components/LLMSelection/LocalAiOptions"; import NativeLLMOptions from "@/components/LLMSelection/NativeLLMOptions"; import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions"; import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions"; +import NovitaLLMOptions from "@/components/LLMSelection/NovitaLLMOptions"; import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions"; import FireworksAiOptions from "@/components/LLMSelection/FireworksAiOptions"; import MistralOptions from "@/components/LLMSelection/MistralOptions"; @@ -113,6 +115,15 @@ export const AVAILABLE_LLM_PROVIDERS = [ description: "Run LLMs locally on your own machine.", requiredConfig: ["OllamaLLMBasePath"], }, + { + name: "Novita AI", + value: "novita", + logo: NovitaLogo, + options: (settings) => , + description: + "Reliable, Scalable, and Cost-Effective for LLMs from Novita AI", + requiredConfig: ["NovitaLLMApiKey"], + }, { name: "LM Studio", value: "lmstudio", diff --git a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx index 33750cba2e..d200f60b1f 100644 --- a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx +++ b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx @@ -15,6 +15,7 @@ import MistralLogo from "@/media/llmprovider/mistral.jpeg"; import HuggingFaceLogo from "@/media/llmprovider/huggingface.png"; import PerplexityLogo from "@/media/llmprovider/perplexity.png"; import OpenRouterLogo from "@/media/llmprovider/openrouter.jpeg"; +import NovitaLogo from "@/media/llmprovider/novita.png"; import GroqLogo from "@/media/llmprovider/groq.png"; import KoboldCPPLogo from "@/media/llmprovider/koboldcpp.png"; import TextGenWebUILogo from "@/media/llmprovider/text-generation-webui.png"; @@ -149,6 +150,14 @@ export const LLM_SELECTION_PRIVACY = { ], logo: OpenRouterLogo, }, + novita: { + name: "Novita AI", + description: [ + "Your chats will not be used for training", + "Your prompts and document text used in response creation are visible to Novita AI", + ], + logo: NovitaLogo, + }, groq: { name: "Groq", description: [ diff --git a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx index cc17acfd31..5f58cba1aa 100644 --- a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx +++ b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx @@ -22,6 +22,7 @@ import LiteLLMLogo from "@/media/llmprovider/litellm.png"; import AWSBedrockLogo from "@/media/llmprovider/bedrock.png"; import DeepSeekLogo from "@/media/llmprovider/deepseek.png"; import APIPieLogo from "@/media/llmprovider/apipie.png"; +import NovitaLogo from "@/media/llmprovider/novita.png"; import XAILogo from "@/media/llmprovider/xai.png"; import CohereLogo from "@/media/llmprovider/cohere.png"; @@ -48,6 +49,7 @@ import LiteLLMOptions from "@/components/LLMSelection/LiteLLMOptions"; import AWSBedrockLLMOptions from "@/components/LLMSelection/AwsBedrockLLMOptions"; import DeepSeekOptions from "@/components/LLMSelection/DeepSeekOptions"; import ApiPieLLMOptions from "@/components/LLMSelection/ApiPieOptions"; +import NovitaLLMOptions from "@/components/LLMSelection/NovitaLLMOptions"; import XAILLMOptions from "@/components/LLMSelection/XAiLLMOptions"; import LLMItem from "@/components/LLMSelection/LLMItem"; @@ -104,6 +106,14 @@ const LLMS = [ options: (settings) => , description: "Run LLMs locally on your own machine.", }, + { + name: "Novita AI", + value: "novita", + logo: NovitaLogo, + options: (settings) => , + description: + "Reliable, Scalable, and Cost-Effective for LLMs from Novita AI", + }, { name: "LM Studio", value: "lmstudio", diff --git a/frontend/src/pages/WorkspaceSettings/AgentConfig/AgentLLMSelection/index.jsx b/frontend/src/pages/WorkspaceSettings/AgentConfig/AgentLLMSelection/index.jsx index c59a77e715..1e21e50b35 100644 --- a/frontend/src/pages/WorkspaceSettings/AgentConfig/AgentLLMSelection/index.jsx +++ b/frontend/src/pages/WorkspaceSettings/AgentConfig/AgentLLMSelection/index.jsx @@ -17,6 +17,7 @@ const ENABLED_PROVIDERS = [ "koboldcpp", "togetherai", "openrouter", + "novita", "mistral", "perplexity", "textgenwebui", @@ -40,6 +41,7 @@ const WARN_PERFORMANCE = [ "ollama", "localai", "openrouter", + "novita", "generic-openai", "textgenwebui", ]; diff --git a/locales/README.ja-JP.md b/locales/README.ja-JP.md index e273576af0..9ba566eb91 100644 --- a/locales/README.ja-JP.md +++ b/locales/README.ja-JP.md @@ -85,6 +85,7 @@ AnythingLLMのいくつかのクールな機能 - [Fireworks AI (チャットモデル)](https://fireworks.ai/) - [Perplexity (チャットモデル)](https://www.perplexity.ai/) - [OpenRouter (チャットモデル)](https://openrouter.ai/) +- [Novita AI (チャットモデル)](https://novita.ai/model-api/product/llm-api?utm_source=github_anything-llm&utm_medium=github_readme&utm_campaign=link) - [Mistral](https://mistral.ai/) - [Groq](https://groq.com/) - [Cohere](https://cohere.com/) diff --git a/locales/README.zh-CN.md b/locales/README.zh-CN.md index 03e9ece135..df14a8b622 100644 --- a/locales/README.zh-CN.md +++ b/locales/README.zh-CN.md @@ -81,6 +81,7 @@ AnythingLLM的一些酷炫特性 - [Fireworks AI (聊天模型)](https://fireworks.ai/) - [Perplexity (聊天模型)](https://www.perplexity.ai/) - [OpenRouter (聊天模型)](https://openrouter.ai/) +- [Novita AI (聊天模型)](https://novita.ai/model-api/product/llm-api?utm_source=github_anything-llm&utm_medium=github_readme&utm_campaign=link) - [Mistral](https://mistral.ai/) - [Groq](https://groq.com/) - [Cohere](https://cohere.com/) diff --git a/server/.env.example b/server/.env.example index 1995892780..723b3a644c 100644 --- a/server/.env.example +++ b/server/.env.example @@ -91,6 +91,10 @@ SIG_SALT='salt' # Please generate random string at least 32 chars long. # LITE_LLM_BASE_PATH='http://127.0.0.1:4000' # LITE_LLM_API_KEY='sk-123abc' +# LLM_PROVIDER='novita' +# NOVITA_LLM_API_KEY='your-novita-api-key-here' check on https://novita.ai/settings#key-management +# NOVITA_LLM_MODEL_PREF='gryphe/mythomax-l2-13b' + # LLM_PROVIDER='cohere' # COHERE_API_KEY= # COHERE_MODEL_PREF='command-r' diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js index 41dfc92931..f43118c626 100644 --- a/server/models/systemSettings.js +++ b/server/models/systemSettings.js @@ -448,6 +448,11 @@ const SystemSettings = { OllamaLLMKeepAliveSeconds: process.env.OLLAMA_KEEP_ALIVE_TIMEOUT ?? 300, OllamaLLMPerformanceMode: process.env.OLLAMA_PERFORMANCE_MODE ?? "base", + // Novita LLM Keys + NovitaLLMApiKey: !!process.env.NOVITA_LLM_API_KEY, + NovitaLLMModelPref: process.env.NOVITA_LLM_MODEL_PREF, + NovitaLLMTimeout: process.env.NOVITA_LLM_TIMEOUT_MS, + // TogetherAI Keys TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY, TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF, diff --git a/server/storage/models/.gitignore b/server/storage/models/.gitignore index b78160e797..e669b51b27 100644 --- a/server/storage/models/.gitignore +++ b/server/storage/models/.gitignore @@ -2,4 +2,5 @@ Xenova downloaded/* !downloaded/.placeholder openrouter -apipie \ No newline at end of file +apipie +novita \ No newline at end of file diff --git a/server/utils/AiProviders/novita/index.js b/server/utils/AiProviders/novita/index.js new file mode 100644 index 0000000000..f15d20d41f --- /dev/null +++ b/server/utils/AiProviders/novita/index.js @@ -0,0 +1,376 @@ +const { NativeEmbedder } = require("../../EmbeddingEngines/native"); +const { v4: uuidv4 } = require("uuid"); +const { + writeResponseChunk, + clientAbortedHandler, +} = require("../../helpers/chat/responses"); +const fs = require("fs"); +const path = require("path"); +const { safeJsonParse } = require("../../http"); +const cacheFolder = path.resolve( + process.env.STORAGE_DIR + ? path.resolve(process.env.STORAGE_DIR, "models", "novita") + : path.resolve(__dirname, `../../../storage/models/novita`) +); + +class NovitaLLM { + constructor(embedder = null, modelPreference = null) { + if (!process.env.NOVITA_LLM_API_KEY) + throw new Error("No Novita API key was set."); + + const { OpenAI: OpenAIApi } = require("openai"); + this.basePath = "https://api.novita.ai/v3/openai"; + this.openai = new OpenAIApi({ + baseURL: this.basePath, + apiKey: process.env.NOVITA_LLM_API_KEY ?? null, + defaultHeaders: { + "HTTP-Referer": "https://anythingllm.com", + "X-Novita-Source": "anythingllm", + }, + }); + this.model = + modelPreference || + process.env.NOVITA_LLM_MODEL_PREF || + "gryphe/mythomax-l2-13b"; + this.limits = { + history: this.promptWindowLimit() * 0.15, + system: this.promptWindowLimit() * 0.15, + user: this.promptWindowLimit() * 0.7, + }; + + this.embedder = embedder ?? new NativeEmbedder(); + this.defaultTemp = 0.7; + this.timeout = this.#parseTimeout(); + + if (!fs.existsSync(cacheFolder)) + fs.mkdirSync(cacheFolder, { recursive: true }); + this.cacheModelPath = path.resolve(cacheFolder, "models.json"); + this.cacheAtPath = path.resolve(cacheFolder, ".cached_at"); + + this.log(`Loaded with model: ${this.model}`); + } + + log(text, ...args) { + console.log(`\x1b[36m[${this.constructor.name}]\x1b[0m ${text}`, ...args); + } + + /** + * Novita has various models that never return `finish_reasons` and thus leave the stream open + * which causes issues in subsequent messages. This timeout value forces us to close the stream after + * x milliseconds. This is a configurable value via the NOVITA_LLM_TIMEOUT_MS value + * @returns {number} The timeout value in milliseconds (default: 500) + */ + #parseTimeout() { + if (isNaN(Number(process.env.NOVITA_LLM_TIMEOUT_MS))) return 500; + const setValue = Number(process.env.NOVITA_LLM_TIMEOUT_MS); + if (setValue < 500) return 500; + return setValue; + } + + // This checks if the .cached_at file has a timestamp that is more than 1Week (in millis) + // from the current date. If it is, then we will refetch the API so that all the models are up + // to date. + #cacheIsStale() { + const MAX_STALE = 6.048e8; // 1 Week in MS + if (!fs.existsSync(this.cacheAtPath)) return true; + const now = Number(new Date()); + const timestampMs = Number(fs.readFileSync(this.cacheAtPath)); + return now - timestampMs > MAX_STALE; + } + + // The Novita model API has a lot of models, so we cache this locally in the directory + // as if the cache directory JSON file is stale or does not exist we will fetch from API and store it. + // This might slow down the first request, but we need the proper token context window + // for each model and this is a constructor property - so we can really only get it if this cache exists. + // We used to have this as a chore, but given there is an API to get the info - this makes little sense. + async #syncModels() { + if (fs.existsSync(this.cacheModelPath) && !this.#cacheIsStale()) + return false; + + this.log("Model cache is not present or stale. Fetching from Novita API."); + await fetchNovitaModels(); + return; + } + + #appendContext(contextTexts = []) { + if (!contextTexts || !contextTexts.length) return ""; + return ( + "\nContext:\n" + + contextTexts + .map((text, i) => { + return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`; + }) + .join("") + ); + } + + models() { + if (!fs.existsSync(this.cacheModelPath)) return {}; + return safeJsonParse( + fs.readFileSync(this.cacheModelPath, { encoding: "utf-8" }), + {} + ); + } + + streamingEnabled() { + return "streamGetChatCompletion" in this; + } + + static promptWindowLimit(modelName) { + const cacheModelPath = path.resolve(cacheFolder, "models.json"); + const availableModels = fs.existsSync(cacheModelPath) + ? safeJsonParse( + fs.readFileSync(cacheModelPath, { encoding: "utf-8" }), + {} + ) + : {}; + return availableModels[modelName]?.maxLength || 4096; + } + + promptWindowLimit() { + const availableModels = this.models(); + return availableModels[this.model]?.maxLength || 4096; + } + + async isValidChatCompletionModel(model = "") { + await this.#syncModels(); + const availableModels = this.models(); + return availableModels.hasOwnProperty(model); + } + + /** + * Generates appropriate content array for a message + attachments. + * @param {{userPrompt:string, attachments: import("../../helpers").Attachment[]}} + * @returns {string|object[]} + */ + #generateContent({ userPrompt, attachments = [] }) { + if (!attachments.length) { + return userPrompt; + } + + const content = [{ type: "text", text: userPrompt }]; + for (let attachment of attachments) { + content.push({ + type: "image_url", + image_url: { + url: attachment.contentString, + detail: "auto", + }, + }); + } + return content.flat(); + } + + constructPrompt({ + systemPrompt = "", + contextTexts = [], + chatHistory = [], + userPrompt = "", + attachments = [], + }) { + const prompt = { + role: "system", + content: `${systemPrompt}${this.#appendContext(contextTexts)}`, + }; + return [ + prompt, + ...chatHistory, + { + role: "user", + content: this.#generateContent({ userPrompt, attachments }), + }, + ]; + } + + async getChatCompletion(messages = null, { temperature = 0.7 }) { + if (!(await this.isValidChatCompletionModel(this.model))) + throw new Error( + `Novita chat: ${this.model} is not valid for chat completion!` + ); + + const result = await this.openai.chat.completions + .create({ + model: this.model, + messages, + temperature, + }) + .catch((e) => { + throw new Error(e.message); + }); + + if (!result.hasOwnProperty("choices") || result.choices.length === 0) + return null; + return result.choices[0].message.content; + } + + async streamGetChatCompletion(messages = null, { temperature = 0.7 }) { + if (!(await this.isValidChatCompletionModel(this.model))) + throw new Error( + `Novita chat: ${this.model} is not valid for chat completion!` + ); + + const streamRequest = await this.openai.chat.completions.create({ + model: this.model, + stream: true, + messages, + temperature, + }); + return streamRequest; + } + + handleStream(response, stream, responseProps) { + const timeoutThresholdMs = this.timeout; + const { uuid = uuidv4(), sources = [] } = responseProps; + + return new Promise(async (resolve) => { + let fullText = ""; + let lastChunkTime = null; // null when first token is still not received. + + // Establish listener to early-abort a streaming response + // in case things go sideways or the user does not like the response. + // We preserve the generated text but continue as if chat was completed + // to preserve previously generated content. + const handleAbort = () => clientAbortedHandler(resolve, fullText); + response.on("close", handleAbort); + + // NOTICE: Not all Novita models will return a stop reason + // which keeps the connection open and so the model never finalizes the stream + // like the traditional OpenAI response schema does. So in the case the response stream + // never reaches a formal close state we maintain an interval timer that if we go >=timeoutThresholdMs with + // no new chunks then we kill the stream and assume it to be complete. Novita is quite fast + // so this threshold should permit most responses, but we can adjust `timeoutThresholdMs` if + // we find it is too aggressive. + const timeoutCheck = setInterval(() => { + if (lastChunkTime === null) return; + + const now = Number(new Date()); + const diffMs = now - lastChunkTime; + if (diffMs >= timeoutThresholdMs) { + this.log( + `Novita stream did not self-close and has been stale for >${timeoutThresholdMs}ms. Closing response stream.` + ); + writeResponseChunk(response, { + uuid, + sources, + type: "textResponseChunk", + textResponse: "", + close: true, + error: false, + }); + clearInterval(timeoutCheck); + response.removeListener("close", handleAbort); + resolve(fullText); + } + }, 500); + + try { + for await (const chunk of stream) { + const message = chunk?.choices?.[0]; + const token = message?.delta?.content; + lastChunkTime = Number(new Date()); + + if (token) { + fullText += token; + writeResponseChunk(response, { + uuid, + sources: [], + type: "textResponseChunk", + textResponse: token, + close: false, + error: false, + }); + } + + if (message.finish_reason !== null) { + writeResponseChunk(response, { + uuid, + sources, + type: "textResponseChunk", + textResponse: "", + close: true, + error: false, + }); + response.removeListener("close", handleAbort); + resolve(fullText); + } + } + } catch (e) { + writeResponseChunk(response, { + uuid, + sources, + type: "abort", + textResponse: null, + close: true, + error: e.message, + }); + response.removeListener("close", handleAbort); + resolve(fullText); + } + }); + } + + // Simple wrapper for dynamic embedder & normalize interface for all LLM implementations + async embedTextInput(textInput) { + return await this.embedder.embedTextInput(textInput); + } + async embedChunks(textChunks = []) { + return await this.embedder.embedChunks(textChunks); + } + + async compressMessages(promptArgs = {}, rawHistory = []) { + const { messageArrayCompressor } = require("../../helpers/chat"); + const messageArray = this.constructPrompt(promptArgs); + return await messageArrayCompressor(this, messageArray, rawHistory); + } +} + +async function fetchNovitaModels() { + return await fetch(`https://api.novita.ai/v3/openai/models`, { + method: "GET", + headers: { + "Content-Type": "application/json", + }, + }) + .then((res) => res.json()) + .then(({ data = [] }) => { + const models = {}; + data.forEach((model) => { + models[model.id] = { + id: model.id, + name: model.title, + organization: + model.id.split("/")[0].charAt(0).toUpperCase() + + model.id.split("/")[0].slice(1), + maxLength: model.context_size, + }; + }); + + // Cache all response information + if (!fs.existsSync(cacheFolder)) + fs.mkdirSync(cacheFolder, { recursive: true }); + fs.writeFileSync( + path.resolve(cacheFolder, "models.json"), + JSON.stringify(models), + { + encoding: "utf-8", + } + ); + fs.writeFileSync( + path.resolve(cacheFolder, ".cached_at"), + String(Number(new Date())), + { + encoding: "utf-8", + } + ); + return models; + }) + .catch((e) => { + console.error(e); + return {}; + }); +} + +module.exports = { + NovitaLLM, + fetchNovitaModels, +}; diff --git a/server/utils/agents/aibitat/index.js b/server/utils/agents/aibitat/index.js index 24f027cff6..d61867f4d4 100644 --- a/server/utils/agents/aibitat/index.js +++ b/server/utils/agents/aibitat/index.js @@ -791,6 +791,8 @@ ${this.getHistory({ to: route.to }) return new Providers.ApiPieProvider({ model: config.model }); case "xai": return new Providers.XAIProvider({ model: config.model }); + case "novita": + return new Providers.NovitaProvider({ model: config.model }); default: throw new Error( diff --git a/server/utils/agents/aibitat/providers/ai-provider.js b/server/utils/agents/aibitat/providers/ai-provider.js index c9925d1cd0..1bbf4a0a4e 100644 --- a/server/utils/agents/aibitat/providers/ai-provider.js +++ b/server/utils/agents/aibitat/providers/ai-provider.js @@ -206,6 +206,14 @@ class Provider { apiKey: process.env.LITE_LLM_API_KEY ?? null, ...config, }); + case "novita": + return new ChatOpenAI({ + configuration: { + baseURL: "https://api.novita.ai/v3/openai", + }, + apiKey: process.env.NOVITA_LLM_API_KEY ?? null, + ...config, + }); default: throw new Error(`Unsupported provider ${provider} for this task.`); diff --git a/server/utils/agents/aibitat/providers/index.js b/server/utils/agents/aibitat/providers/index.js index 47e2d87168..c454c39387 100644 --- a/server/utils/agents/aibitat/providers/index.js +++ b/server/utils/agents/aibitat/providers/index.js @@ -18,6 +18,7 @@ const DeepSeekProvider = require("./deepseek.js"); const LiteLLMProvider = require("./litellm.js"); const ApiPieProvider = require("./apipie.js"); const XAIProvider = require("./xai.js"); +const NovitaProvider = require("./novita.js"); module.exports = { OpenAIProvider, @@ -40,4 +41,5 @@ module.exports = { LiteLLMProvider, ApiPieProvider, XAIProvider, + NovitaProvider, }; diff --git a/server/utils/agents/aibitat/providers/novita.js b/server/utils/agents/aibitat/providers/novita.js new file mode 100644 index 0000000000..16251aa25e --- /dev/null +++ b/server/utils/agents/aibitat/providers/novita.js @@ -0,0 +1,115 @@ +const OpenAI = require("openai"); +const Provider = require("./ai-provider.js"); +const InheritMultiple = require("./helpers/classes.js"); +const UnTooled = require("./helpers/untooled.js"); + +/** + * The agent provider for the Novita AI provider. + */ +class NovitaProvider extends InheritMultiple([Provider, UnTooled]) { + model; + + constructor(config = {}) { + const { model = "gryphe/mythomax-l2-13b" } = config; + super(); + const client = new OpenAI({ + baseURL: "https://api.novita.ai/v3/openai", + apiKey: process.env.NOVITA_LLM_API_KEY, + maxRetries: 3, + defaultHeaders: { + "HTTP-Referer": "https://anythingllm.com", + "X-Novita-Source": "anythingllm", + }, + }); + + this._client = client; + this.model = model; + this.verbose = true; + } + + get client() { + return this._client; + } + + async #handleFunctionCallChat({ messages = [] }) { + return await this.client.chat.completions + .create({ + model: this.model, + temperature: 0, + messages, + }) + .then((result) => { + if (!result.hasOwnProperty("choices")) + throw new Error("Novita chat: No results!"); + if (result.choices.length === 0) + throw new Error("Novita chat: No results length!"); + return result.choices[0].message.content; + }) + .catch((_) => { + return null; + }); + } + + /** + * Create a completion based on the received messages. + * + * @param messages A list of messages to send to the API. + * @param functions + * @returns The completion. + */ + async complete(messages, functions = null) { + let completion; + if (functions.length > 0) { + const { toolCall, text } = await this.functionCall( + messages, + functions, + this.#handleFunctionCallChat.bind(this) + ); + + if (toolCall !== null) { + this.providerLog(`Valid tool call found - running ${toolCall.name}.`); + this.deduplicator.trackRun(toolCall.name, toolCall.arguments); + return { + result: null, + functionCall: { + name: toolCall.name, + arguments: toolCall.arguments, + }, + cost: 0, + }; + } + completion = { content: text }; + } + + if (!completion?.content) { + this.providerLog("Will assume chat completion without tool call inputs."); + const response = await this.client.chat.completions.create({ + model: this.model, + messages: this.cleanMsgs(messages), + }); + completion = response.choices[0].message; + } + + // The UnTooled class inherited Deduplicator is mostly useful to prevent the agent + // from calling the exact same function over and over in a loop within a single chat exchange + // _but_ we should enable it to call previously used tools in a new chat interaction. + this.deduplicator.reset("runs"); + return { + result: completion.content, + cost: 0, + }; + } + + /** + * Get the cost of the completion. + * + * @param _usage The completion to get the cost for. + * @returns The cost of the completion. + * Stubbed since Novita AI has no cost basis. + */ + getCost() { + return 0; + } +} + +module.exports = NovitaProvider; diff --git a/server/utils/agents/index.js b/server/utils/agents/index.js index fd7d06e8bd..6b1d42af29 100644 --- a/server/utils/agents/index.js +++ b/server/utils/agents/index.js @@ -173,6 +173,10 @@ class AgentHandler { if (!process.env.XAI_LLM_API_KEY) throw new Error("xAI API Key must be provided to use agents."); break; + case "novita": + if (!process.env.NOVITA_LLM_API_KEY) + throw new Error("Novita API Key must be provided to use agents."); + break; default: throw new Error( @@ -234,6 +238,8 @@ class AgentHandler { return process.env.APIPIE_LLM_MODEL_PREF ?? null; case "xai": return process.env.XAI_LLM_MODEL_PREF ?? "grok-beta"; + case "novita": + return process.env.NOVITA_LLM_MODEL_PREF ?? "gryphe/mythomax-l2-13b"; default: return null; } diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js index 7ccbf13c73..1639337695 100644 --- a/server/utils/helpers/customModels.js +++ b/server/utils/helpers/customModels.js @@ -4,6 +4,7 @@ const { perplexityModels } = require("../AiProviders/perplexity"); const { togetherAiModels } = require("../AiProviders/togetherAi"); const { fireworksAiModels } = require("../AiProviders/fireworksAi"); const { ElevenLabsTTS } = require("../TextToSpeech/elevenLabs"); +const { fetchNovitaModels } = require("../AiProviders/novita"); const SUPPORT_CUSTOM_MODELS = [ "openai", "localai", @@ -21,6 +22,7 @@ const SUPPORT_CUSTOM_MODELS = [ "groq", "deepseek", "apipie", + "novita", "xai", ]; @@ -61,6 +63,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) { return await getDeepSeekModels(apiKey); case "apipie": return await getAPIPieModels(apiKey); + case "novita": + return await getNovitaModels(); case "xai": return await getXAIModels(apiKey); default: @@ -362,6 +366,20 @@ async function getOpenRouterModels() { return { models, error: null }; } +async function getNovitaModels() { + const knownModels = await fetchNovitaModels(); + if (!Object.keys(knownModels).length === 0) + return { models: [], error: null }; + const models = Object.values(knownModels).map((model) => { + return { + id: model.id, + organization: model.organization, + name: model.name, + }; + }); + return { models, error: null }; +} + async function getAPIPieModels(apiKey = null) { const knownModels = await fetchApiPieModels(apiKey); if (!Object.keys(knownModels).length === 0) diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js index 84f971cc63..57ec191e77 100644 --- a/server/utils/helpers/index.js +++ b/server/utils/helpers/index.js @@ -165,6 +165,9 @@ function getLLMProvider({ provider = null, model = null } = {}) { case "apipie": const { ApiPieLLM } = require("../AiProviders/apipie"); return new ApiPieLLM(embedder, model); + case "novita": + const { NovitaLLM } = require("../AiProviders/novita"); + return new NovitaLLM(embedder, model); case "xai": const { XAiLLM } = require("../AiProviders/xai"); return new XAiLLM(embedder, model); @@ -297,6 +300,9 @@ function getLLMProviderClass({ provider = null } = {}) { case "apipie": const { ApiPieLLM } = require("../AiProviders/apipie"); return ApiPieLLM; + case "novita": + const { NovitaLLM } = require("../AiProviders/novita"); + return NovitaLLM; case "xai": const { XAiLLM } = require("../AiProviders/xai"); return XAiLLM; diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js index 6081159a52..676eb812f5 100644 --- a/server/utils/helpers/updateENV.js +++ b/server/utils/helpers/updateENV.js @@ -395,6 +395,20 @@ const KEY_MAPPING = { checks: [], }, + // Novita Options + NovitaLLMApiKey: { + envKey: "NOVITA_LLM_API_KEY", + checks: [isNotEmpty], + }, + NovitaLLMModelPref: { + envKey: "NOVITA_LLM_MODEL_PREF", + checks: [isNotEmpty], + }, + NovitaLLMTimeout: { + envKey: "NOVITA_LLM_TIMEOUT_MS", + checks: [], + }, + // Groq Options GroqApiKey: { envKey: "GROQ_API_KEY", @@ -655,6 +669,7 @@ function supportedLLM(input = "") { "huggingface", "perplexity", "openrouter", + "novita", "groq", "koboldcpp", "textgenwebui",