From 5c8a45d72b6332baef5fefd2a5a471adb5959779 Mon Sep 17 00:00:00 2001 From: lena-larionova Date: Thu, 29 Aug 2024 11:37:28 -0700 Subject: [PATCH] initial draft for AI proxy advanced --- .../kong-inc/ai-proxy-advanced/_changelog.md | 6 + .../ai-proxy-advanced/_metadata/_index.yml | 20 ++ .../how-to/_load-balancing.md | 5 + .../how-to/_semantic-routing.md | 5 + .../ai-proxy-advanced/overview/_index.md | 199 ++++++++++++++++++ .../kong-inc/ai-proxy-advanced/versions.yml | 3 + app/_src/gateway/ai-gateway/index.md | 4 + .../icons/hub/kong-inc_ai-proxy-advanced.png | Bin 0 -> 13419 bytes 8 files changed, 242 insertions(+) create mode 100644 app/_hub/kong-inc/ai-proxy-advanced/_changelog.md create mode 100644 app/_hub/kong-inc/ai-proxy-advanced/_metadata/_index.yml create mode 100644 app/_hub/kong-inc/ai-proxy-advanced/how-to/_load-balancing.md create mode 100644 app/_hub/kong-inc/ai-proxy-advanced/how-to/_semantic-routing.md create mode 100644 app/_hub/kong-inc/ai-proxy-advanced/overview/_index.md create mode 100644 app/_hub/kong-inc/ai-proxy-advanced/versions.yml create mode 100644 app/assets/images/icons/hub/kong-inc_ai-proxy-advanced.png diff --git a/app/_hub/kong-inc/ai-proxy-advanced/_changelog.md b/app/_hub/kong-inc/ai-proxy-advanced/_changelog.md new file mode 100644 index 000000000000..cc5e491f0cfc --- /dev/null +++ b/app/_hub/kong-inc/ai-proxy-advanced/_changelog.md @@ -0,0 +1,6 @@ +## Changelog + +### {{site.base_gateway}} 3.8.x + +* Introduced the AI Proxy plugin, which can mediate request and response formats, as well as authentication between users. +This plugin supports and provides over the regular AI Proxy plugin. diff --git a/app/_hub/kong-inc/ai-proxy-advanced/_metadata/_index.yml b/app/_hub/kong-inc/ai-proxy-advanced/_metadata/_index.yml new file mode 100644 index 000000000000..f6b0113b294f --- /dev/null +++ b/app/_hub/kong-inc/ai-proxy-advanced/_metadata/_index.yml @@ -0,0 +1,20 @@ +name: AI Proxy Advanced +search_aliases: + - ai + - llm + - artificial + - intelligence + - language + - model +dbless_compatible: yes +free: false +enterprise: true +konnect: true +network_config_opts: All +notes: -- +categories: + - ai +weight: 100 +publisher: Kong Inc. +desc: Route across different LLMs and models using advanced load balancing algorithms, including semantic routing +type: plugin \ No newline at end of file diff --git a/app/_hub/kong-inc/ai-proxy-advanced/how-to/_load-balancing.md b/app/_hub/kong-inc/ai-proxy-advanced/how-to/_load-balancing.md new file mode 100644 index 000000000000..95820470cd64 --- /dev/null +++ b/app/_hub/kong-inc/ai-proxy-advanced/how-to/_load-balancing.md @@ -0,0 +1,5 @@ +--- +nav_title: Load Balancing +title: Load Balance between LLM targets +--- + diff --git a/app/_hub/kong-inc/ai-proxy-advanced/how-to/_semantic-routing.md b/app/_hub/kong-inc/ai-proxy-advanced/how-to/_semantic-routing.md new file mode 100644 index 000000000000..4ad633a7890f --- /dev/null +++ b/app/_hub/kong-inc/ai-proxy-advanced/how-to/_semantic-routing.md @@ -0,0 +1,5 @@ +--- +nav_title: Semantic Routing +title: Semantic Routing +--- + diff --git a/app/_hub/kong-inc/ai-proxy-advanced/overview/_index.md b/app/_hub/kong-inc/ai-proxy-advanced/overview/_index.md new file mode 100644 index 000000000000..212c566c5f10 --- /dev/null +++ b/app/_hub/kong-inc/ai-proxy-advanced/overview/_index.md @@ -0,0 +1,199 @@ +--- +nav_title: Overview +--- + +The AI Proxy Advanced plugin lets you transform and proxy requests to multiple AI providers and models at the same time. +This lets you set up load balancing between targets. + +The plugin accepts requests in one of a few defined and standardised formats, translates them to the configured target format, and then transforms the response back into a standard format. + +The following table describes which providers and requests the AI Proxy Advanced plugin supports: + +| Provider | Chat | Completion | Streaming | +| -------- | ---- | ---------- | --------- | +| OpenAI (GPT-4, GPT-3.5) | ✅ | ✅ | ✅ | +| OpenAI (GPT-4o and Multi-Modal) | ✅ | ✅ | ✅ | +| Cohere | ✅ | ✅ | ✅ | +| Azure | ✅ | ✅ | ✅ | +| Anthropic | ✅ | ❌ | Only chat type | +| Mistral (mistral.ai, OpenAI, raw, and OLLAMA formats) | ✅ | ✅ | ✅ | +| Llama2 (raw, OLLAMA, and OpenAI formats) | ✅ | ✅ | ✅ | +| Llama3 (OLLAMA and OpenAI formats) | ✅ | ✅ | ✅ | + +## How it works + +The AI Proxy Advanced plugin will mediate the following for you: + +* Request and response formats appropriate for the configured `provider` and `route_type` +* The following service request coordinates (unless the model is self-hosted): + * Protocol + * Host name + * Port + * Path + * HTTP method +* Authentication on behalf of the Kong API consumer +* Decorating the request with parameters from the `config.options` block, appropriate for the chosen provider +* Recording of usage statistics of the configured LLM provider and model into your selected [Kong log](/hub/?category=logging) plugin output +* Optionally, additionally recording all post-transformation request and response messages from users, to and from the configured LLM +* Fulfillment of requests to self-hosted models, based on select supported format transformations + +Flattening all of the provider formats allows you to standardize the manipulation of the data before and after transmission. It also allows your to provide a choice of LLMs to the Kong consumers, using consistent request and response formats, regardless of the backend provider or model. + +This plugin currently only supports REST-based full text responses. + +## Load balancing + +This plugin supports following load balancing alogrithms: +* lowest-usage +* round-robin (weighted) +* consistent-hashing (sticky-session on given header value) + +## Semantic routing + +Info about semantic routing? + +## Request and response formats + +The plugin's [`config.route_type`](/hub/kong-inc/ai-proxy/configuration/#config-route_type) should be set based on the target upstream endpoint and model, based on this capability matrix: + +| Provider Name | Provider Upstream Path | Kong `route_type` | Example Model Name | +|---------------|----------------------------------------------------------|----------------------|------------------------| +| OpenAI | `/v1/chat/completions` | `llm/v1/chat` | gpt-4 | +| OpenAI | `/v1/completions` | `llm/v1/completions` | gpt-3.5-turbo-instruct | +| Cohere | `/v1/chat` | `llm/v1/chat` | command | +| Cohere | `/v1/generate` | `llm/v1/completions` | command | +| Azure | `/openai/deployments/{deployment_name}/chat/completions` | `llm/v1/chat` | gpt-4 | +| Azure | `/openai/deployments/{deployment_name}/completions` | `llm/v1/completions` | gpt-3.5-turbo-instruct | + +{% if_version gte:3.7.x %} +| Anthropic | `/v1/messages` | `llm/v1/chat` | claude-2.1 | +{% endif_version %} +{% if_version lte:3.6.x %} +| Anthropic | `/v1/complete` | `llm/v1/chat` | claude-2.1 | +{% endif_version %} + +| Anthropic | `/v1/complete` | `llm/v1/completions` | claude-2.1 | +| Llama2 | User-defined | `llm/v1/chat` | User-defined | +| Llama2 | User-defined | `llm/v1/completions` | User-defined | +| Mistral | User-defined | `llm/v1/chat` | User-defined | +| Mistral | User-defined | `llm/v1/completions` | User-defined | + +The following upstream URL patterns are used: + +| Provider | URL | +|-----------|--------------------------------------------------------------------------------------------------------| +| OpenAI | `https://api.openai.com:443/{route_type_path}` | +| Cohere | `https://api.cohere.com:443/{route_type_path}` | +| Azure | `https://{azure_instance}.openai.azure.com:443/openai/deployments/{deployment_name}/{route_type_path}` | +| Anthropic | `https://api.anthropic.com:443/{route_type_path}` | +| Llama2 | As defined in `config.model.options.upstream_url` | +| Mistral | As defined in `config.model.options.upstream_url` | + + +{:.important} +> While only the **Llama2** and **Mistral** models are classed as self-hosted, the target URL can be overridden for any of the supported providers. +> For example, a self-hosted or otherwise OpenAI-compatible endpoint can be called by setting the same [`config.model.options.upstream_url`](/hub/kong-inc/ai-proxy/configuration/#config-model-options-upstream_url) plugin option. + +### Input formats + +Kong will mediate the request and response format based on the selected [`config.provider`](/hub/kong-inc/ai-proxy/configuration/#config-provider) and [`config.route_type`](/hub/kong-inc/ai-proxy/configuration/#config-route_type), as outlined in the table above. + +The Kong AI Proxy accepts the following inputs formats, standardized across all providers; the `config.route_type` must be configured respective to the required request and response format examples: + +{% navtabs %} +{% navtab llm/v1/chat %} +```json +{ + "messages": [ + { + "role": "system", + "content": "You are a scientist." + }, + { + "role": "user", + "content": "What is the theory of relativity?" + } + ] +} +``` +{% endnavtab %} + +{% navtab llm/v1/completions %} +```json +{ + "prompt": "You are a scientist. What is the theory of relativity?" +} +``` +{% endnavtab %} +{% endnavtabs %} + +### Response formats + +Conversely, the response formats are also transformed to a standard format across all providers: + +{% navtabs %} +{% navtab llm/v1/chat %} +```json +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "content": "The theory of relativity is a...", + "role": "assistant" + } + } + ], + "created": 1707769597, + "id": "chatcmpl-ID", + "model": "gpt-4-0613", + "object": "chat.completion", + "usage": { + "completion_tokens": 5, + "prompt_tokens": 26, + "total_tokens": 31 + } +} +``` +{% endnavtab %} + +{% navtab llm/v1/completions %} + +```json +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "text": "The theory of relativity is a..." + } + ], + "created": 1707769597, + "id": "cmpl-ID", + "model": "gpt-3.5-turbo-instruct", + "object": "text_completion", + "usage": { + "completion_tokens": 10, + "prompt_tokens": 7, + "total_tokens": 17 + } +} +``` +{% endnavtab %} +{% endnavtabs %} + +The request and response formats are loosely based on OpenAI. +See the [sample OpenAPI specification](https://github.com/kong/kong/blob/master/spec/fixtures/ai-proxy/oas.yaml) for more detail on the supported formats. + +## Get started with the AI Proxy plugin + +* [Configuration reference](/hub/kong-inc/ai-proxy-advanced/configuration/) +* [Basic configuration example](/hub/kong-inc/ai-proxy-advanced/how-to/basic-example/) +* Learn how to use the plugin with different providers: + * [something](/hub/kong-inc/ai-proxy/how-to/llm-provider-integration-guides/) + +### All AI Gateway plugins + +{% include_cached /md/ai-plugins-links.md release=page.release %} + diff --git a/app/_hub/kong-inc/ai-proxy-advanced/versions.yml b/app/_hub/kong-inc/ai-proxy-advanced/versions.yml new file mode 100644 index 000000000000..629ede91a3f1 --- /dev/null +++ b/app/_hub/kong-inc/ai-proxy-advanced/versions.yml @@ -0,0 +1,3 @@ +strategy: gateway +releases: + minimum_version: '3.8.x' \ No newline at end of file diff --git a/app/_src/gateway/ai-gateway/index.md b/app/_src/gateway/ai-gateway/index.md index 8225b63cea15..106566089722 100644 --- a/app/_src/gateway/ai-gateway/index.md +++ b/app/_src/gateway/ai-gateway/index.md @@ -72,8 +72,12 @@ a provider-agnostic API. This normalized API layer affords developers and organi * Request routing can be dynamic, allowing AI usage to be optimized based on various metrics: cost, usage, response accuracy, and so on. * AI services can be used by other {{site.base_gateway}} plugins to augment non-AI API traffic +{% if_version lte:3.7.x %} This core AI Gateway feature is enabled with the [AI Proxy](/hub/kong-inc/ai-proxy/) plugin, which is deployed by default in the getting started script referenced above. +{% elseif_version gte:3.8.x %} +This core AI Gateway feature is enabled with the [AI Proxy](/hub/kong-inc/ai-proxy/) and [AI Proxy Advanced](/hub/kong-inc/ai-proxy-advanced/) plugins. The quickstart script referenced above uses the basic AI Proxy plugin. For load balancing and semantic routing capabilities, check out the AI Proxy Advanced plugin instead. +{% endif %} The AI Proxy supports two types of LLM requests: diff --git a/app/assets/images/icons/hub/kong-inc_ai-proxy-advanced.png b/app/assets/images/icons/hub/kong-inc_ai-proxy-advanced.png new file mode 100644 index 0000000000000000000000000000000000000000..328252d7ac1675327cd34f47bfa2d41737cf7b34 GIT binary patch literal 13419 zcmai*Ra6{J*R~Vfb#NaBcMINE_yUajv4}lQeebB+31a~L6JA6FvTL1BX zP+eVnS5?>QgI-Q(VKh1 zkZKEOjd1-55JZoabz9HVYG7xI4r7bNh0)df zz3xx&;IJo6tVIF${q?`P*r1!6xKeG@u80B!`1RARIZP`$NvY(&#T0ppCq}>T38Ra-U=)zZ%@wIy+4Im*sQl51m z>6N(*nb75clFT0}qeUm#oJ-tK|7yUbV7BtM{BFjSZA)X2JG5AV9z9(duEleDL;NL( zvkt{@3?qOZb+(T%I%<=d5rBrSIXi`$l#H|m3~oe03-9TlROzLWwP-c`;*cseMKR;7 z049#e^eodLPhe!p>hh45;;B#0*I>lz#f`}X;${wiVFCD23?Qi^L)6R2qwe$Nxod|gH-0G$o@myebK6YNf$ zZKf*6et+vLR?*JuJY6dL%o_W!z3C{Sb;n_KMU3mxo<3}%&dVSo z-jYbx7`$jH7h*a@AtisI?C_M8%yKxd@J&t|^=kAF^>b(D!nxfqKF-p;L2;%LJ z6G2@Au)L`Obm<9!tNt5;G$-fST8)rf>uMmg{p#-8W=!|<4%T3q^uKKXu^Cfu(K*bS z0FV_f6RI3dy1YXma4kSpnPC>@-?zcW%shkO7%T-witRK_h_y*8bUqTW#X?S0D9ER% zU8>)8m*6Fk6_(oV!URg9_Kt_{^zXjMH% z?zEO2SF=my?w?QlA*?Yi9Qv$6C9UshNhN>P;DS>>h%{BAPe2!)Ds07 ziOFt@wnAS%4~+ov8RNV zho`~0Iho^HSKeJsjTS&_HlQ^H=OSI;8(9a&>Zr~HDmOxcE=iY|4Q%S=$*Aarcl5es zTA*vp7Hgf0afpITwlMO^y_<@;z`?&Q;LJ(Un6+bm074ti%M3CKYXIWZv#J8(+|`eb zjthSQH4&t!$u#m=^Dw>^4xkWkE1y(-z-rheItQ>ekU|c~W8cAD&j?g@T^8WCQ9YmB z4Iep_-AG3h=*UGF33k+#HcU_V0J9SXcyi)6exYwF%UCW)(S`1n5YvI!PzRWp*-}2@P##~Cd#R_^}7@Bfm9vy$fr$vy_etX z6mVLXWx}9m)sASOQ6QJB?ZRN&@a6O6f#*!}HDk$b+yLSZqftF1)y zKc^A<7t$OT-9FP(&N=sp0`w_Eb7-}^teIWCEK}EAzXR7IE3}84I^hF`!*`m z4MHzBvh{6@a>mUnCDw19uJ28))z{z#rtn9TbNg_81JGMfjObxtj08>Ny|M7iBcEc~ zf-zf>u=P-{MACw@YHz|}qLP$+?;dX^w49aS2w<5WN%y1W_@%(Lu)Wn28A{ExOAow? za=eqOHLGI612npuQ^|GllfvX1i9B(lXLpVsr{|2#m>>CiX@eXiq5*gY@UckZLLUZpv8-j{ zIzUN&L6Vg^bKe4I^S^Z*N32m_oQwIGu@p*N%A+SkXSqB?{Pl9p8SI|0E5q`P>x1#` z*8{==z4Ck_$8qG3U7Sb7sD6Zdb_P?42cW#zxXMeL3xZ@n1h2=AbxYq}Ca|;mloH%4 zsKxfVOlM3gvFwPM)r2N4%QwN}D_~5W$Um-$TNuI%`IyudPkR0_uVMkR0AqWv)X&0I zwLqbC4+runRsN>M0{7y325AzPgckMH8+GqqnhPjQQd(ku`o-fAU>hx-cSK&yU5|bR zkBY2`*dM&^x9TuG-t|LmVq{lkSToGdI$Bn8z0@ceV6>fj4+F1D-RK~>Z8jw=gw|~o z{rH%#4@Bc#wiXL*IVkmw$+6Z7_IL$h!w>3k^Gg6pD5MTJrBwDSieJ%m<^CwXJu5fp z^tIPW>)q3%K8|nnT;O=!K0up7#u-L0qxiA3CzZXHJy&+-)dEQl-$C_5AfZeWx51Nv z7hmJ2h5l}XS7TZ)S?&V?JK6cK(T;xN$11pOCScXPjK(m84Y5;%#}=7emb<4mg|z+F z?-$J^9lqRhWMxh6G=EkVOD`DTG^@TiLXy2~=lA(qsrR%()b4I`@jW zYA3frmTn^_7jX+z^n_tRL7sMywX5g1AAFcJKjyEogxne|@|HdNWTbkcPgK9g=}q5W zcx7HPdb<&Q0(FZB5u;dBxORk)&vczs@5?wax`6zL2%39ZVm#t?YTTXUl*ntQ7kc_EX~v z5)}I<-jCm@=WojR@SjV^!KgCHC%Wnu!vy;>Kg@;eNl=NSX2^i*7)YLUDF)Qjq|!R4 zGL;JKR@R20%7k*lyWH!_R||LjKs}~rb4QVgx^;J7?rr&@87aAM8WH0VavwqGE9+uq zg2!#r#-90$wG`CfKmH^iING}ZmHi9a$D+J%8w4OLE?kaf;oMchGDvtCRrtsI#r$|I zm9jsNMjnp(>}G88;sjdK0hb1Ad5x>o%pC($-j&~xPW`LjgYx8NG1*Pf% zUgr6O5p|sH@6bC3%(yhD@1FXk54;Lp-*dU#ZZtot7*6j|de>15AsgV?%nu@3Dq{ z1!3NuQO;ymXMH%ga2#(UVq#Dw^1~^&AP^GWRw-V3%v8HWs*@E_`b0W zitY6{Pa7*jpfKAH(xbizI;AD3SEqzDT@a6`kiN_}_l3ReqCWHT`J%!2BYCNnI*7b_hMr>)to9hIj%+zWiJ;fcXf$)Ccp$d-(FcN+;#jGp zZrO99iiXthF>B}iCnf9H&5&{G?9Yq>(O7GK`W948rgHr$_WUBfhfG}B-H9i9&!pb; z*@!pip{@uK29~;!(GsBxv_84R$N;uq=ww{i#>SUURzT9|TMsPku;9TSK@e5yY5I*2 zV2o?;+UMZ?t%1Q)-r9hmraegf3aO%1Rr)?_h`Bzy%BQn#U$gYF@i3ndtP88O1zJ_* zTx2y_0n*KYVs4Ki#i#c6zk`VFdf9YN3n5|r3C3mMxTEN>Y3PymXKBzRhBo8k`ZKK? z3w zEx{p=c>^}P1@5Tgorr%TX?chp#VTs{yJA*9;>!*2a1WyLqB7Jh|Jfj}rrdER=Fg1M zA#HP6|ASbYCf|A7p+bJ=B$=ch@OD0MZT8?*L*X}Xp zSQgmmKR_wgCXy6*e>W~2nAOtz2XS5*d{vhe-#wS0Og2UXn)t79FdA+N6Jz=7ad5W% zn~{aRPAV<5In+Z;z(tX1#dU9sl zc#iNL+X7zq;@yEcU&XrIOl|WL6Z4cw2o+eTz#0KmO6^yIeu|(c1H@ck=@={CWE1ci zkxEv(ul5RprB)NYztk`Xdo=Mfx{YGZzVZD@qScHSa`fYq0b}8o3q3A86}L%IGm{nxO6jwon66*bj5nV$#LC;eSWlW;0n)Owk;C@gq32~@F$R?( zWb6l?QWxAP1!??)4F0nX&70e*y4t#_PBRq=I&! zBF6MJEdoX4f%{3>tnu{HjTIOX-dCyCJb=1{sjA#wgQbNbkpDT211YxQdG_(BLAi+F zKFw)qbA8V9skfIHAo9KIGp4M06HBPHuWzr*whZ9u%24SxQ_^>Rf$!|qhQu|MCRaGm zL~wRg6-DZx>I>D=_o)nLzTGm9LW%VCI(uAS>(GETkNz&>*5ElS-ROmj~@ypU^cZuZnWv%q%oN(mD_6R$Ln3}Lx{C*9#7Gz6`{;uZ3ton3KgEv8s%*N;<7Q}7<`&pC9$RBjQ!!q^P?ASvje6p zi791PStC9RIWaRgyFAx9@LQIc{&{3OuvXxo)}>Q?m`<@Tfke+1ykTl<6Q$8JG2Yac zzPIvCVD!Mk61QtdPp{8iozSjfTR$iPni}ScTR$eQ*cOX6K$)w|9`@MdHfGm0Gp!qX6EfBr8G$wRpMDMgq643FPtAE?PcmVae{B`q;ov|2GG_2ISIuO=@Kf(!9LVM@wZ;8Q{z%j%7 zCL_p3OM^m2|F!GPJTsL~c!pe47&mcj&!%$@Kk|6Xo@Py*1fi&`9z#!6yBtaRoFe&s zohM4Hg$11fUR7cv+-TMYjJC=uh+u6rDy$@ZI*sJ8mgVzoz{970q&*kT(aos)JH(D6 z-)B(n%Q73d=x6Clfz>aDCCdmCvfHV+twZir=9jCZ_)}38S z6RBB^IJCa6Bevmd1Hm&luTDbp(26N)gAYlh~!331G)~ZGms#ExsQ%SX&u#T_3_fsN0i!ilI=cAM%kuzbM!c+>t zrqdMCNVmbET=y`S$vztdB@)7Dy6sla11emaenzfye-wc7rjFqD(~B_cxEL+Yr;{Wd%XN8>L%yi{Dk40zpGw@$DhH=u9*yaQHW!$=A3jDo>htga zjd26N$$QK0=1j{!cWh|D6MWL4CBM<;Mh|;_-#+Pgibgqra~1F&9?`}v_$uXBljdQX zo=UjzjiBD(U9PmE$pNh%i!Z_}{0acN$?O5?T*3FXq=$Mvzn0j^?lBQvR~{SR!Hw|K zWU;=;3DU>2M6^}+`Su-Y=C_HSebn1pgHm)8c<~oPxrFTtisCM`e5;LH2gc z+?@MdGiMKUzXT~4@3som^DAt)a|j|Z`>Y7cfi%T%~`&gAYY*=K2xt zB_V>HCfljhX!~xg6+a7Y`C7ECNQN1tIVut+w!DZNlj0vLBxY!2Vt0FOe^ga%;Sf+o zno?tUshKy2T26Ha>coU+E!co{zu*==2!vW$cAv3LVRHBP;=><)D>c^ASWp2~N-6_f4VlgDMf+f?Yxc)rFF-mG z!J2?u0VScFK?MM*YO>6Lhit4EuR68Il0Q{u$~e+?MQ(kP-R&Mp2{D%EW%_C5N!0m^ zH}P%{nP^y-#W@7M-^lsCt@hB;%!29sUoGRH3h`Fo9~X9V{_fBgh6m2#QFOB*;^x?x zQM;)gKp5wz8$6tz_SqqFL>R;0@6I|Yn^mz`>(e~wXIL;s7AD<=R^hlCcKEL*yG=$x z&S#s72B1RLRRay@7`&Fmr}VpC)emwbhQ5+>_*yYXOZ%=Ah%Kl&PA0naqaedB2lzJ$r0dQQ4;HCa^2Y z(TKIJ*}(&Q$}XbyJzum{cqY=c@s9rLTh2u7Jg58Nby69f{mD&s61C|>+Kp!9oeBt` zRRxJuc5@Y!HzgXjnzv`9TANH`C5jtrB*@O7kIiahVX8Heu_q!s4jZ=ki@C)UNzYe) zucvHx(jQQwihIH@SM_RH&vHeO5eTUpXxO^aUK|pQO)?@)9hj(`Kj5eA`lPr50bz(v zML4fYFJ66!(qtE4R=~$%0=T``dPvPiyl~898X*kG)83^)&XQQ7x_611j4 z`4+QVpDbF`s~yBaY;u2|AeO1piC)I$Q~Nr}jFcHNi8*vZYE7Fsbpu3J4i>HEIlHIA zFFK>lknoEBKHO#2tJd4&GqcG3V}oHS#!$}j`!R7~ zuOzCELMW8?q38bhh#5Uufj0}t{1zj*SEgJoCDU<2v3@0b9?iJMavRx0dD8HfHi?$9 zmEqthulCfwQ5*X)1ozP-(}V>NH6AA0^VyvMq6SuNt2H!D-;a%H;R9GrSIuP`7aj6SF_81vjv}U8G zA=-)7r+a_X&?bXJCOsScr^{+ECC8=kZ|l+*(yH*Qm9ngUoBbDXBkL{RFM_%hzIUR zRiu73h-Fp17M_+5;+5s+LGNH9Grp&vQIs*%i$~;*=m4*b#BN#H*Da^va$TW=H5t>D z>2MZB&ctU->rR1u4)$sbypf8c-+sKrMTZ_d&_@Z{)q4TqmE)Oe5UQEV6nKz@d!d^v zG)mgTii>fj&Hl%8x}3#SLcY?Y)oQU4zaC3j*bn0_!P4&`+7yLf2n$cccbroS!XD#b zeYorCwCKv54r@d`0cD0Rk3fcDH2RE%fv|Gjy%YH6+aSX_dK`KWaQ%~@SVg_SgiOy> zJ$suuqn<3{gm%rZ|G;@%$fWYd-v0)-r>gv$3*)2x zFE4genkI)T^^5t|52J$Jk#)+MnhMuR`VlOXlVK~5&-xcyEVYWmzzw*<^=~^-3yW?YVj};)cwE89DmxcfB9Z=O)fM zfut8Xn0bh2<0XN2St^e9x}9UfPDrgV$gxG#R#Lz`&n_u-R%$v-~+1 zsiGF{q_Y)gyzVpY)xfKy_g6|)`)d6gN8~_%a~wH=5a$xAvR4t_F&6UG!b0w0z;Xy+ zV0Cc$T*7DS4CP0}yTU^O+-iMe;);5kA$cFY~_Hgzy<@~?tJ4+9yy%g8;(pRI&R z(1gSn(a`m^qEY`|V0&wa?t&|~g4p!d{GpBxAKWo|0wJO`K*uzU|M2iX z=V3K&u(ipWg+~5Z=6<2YCJ$lGr;dg+>la!sl?wM~lj^=DGIaFTx$eA< zTFOec@f352`B@_gtf2FFMPplmIve_#)m6FyW2!EVV8X*m?3S?J; zb|Hk$rAfV9@Y8xDai@Qb(PB2#j7MYY2Gz? Hyz5TKr{Mw4vIOCItXJTU39MZt-c zschFakrmz(^endQ_L-LI%^#2heZn#%XXZ{FpL+2xSy?t{eF6-|YEvb&Supj81zDJ} z5h^&C`jp2i9f_g$@qDAh_Qjk;&L_LQKVO?DL}BGyB0lfOWYXsyG8MMQ&oK1FvsMAJRQO3O3ds;ZFn2SEGJoUV4?3{(ahUP@ zd+TAsoZV;vz{(VIs3@&o}t|XJ&y9B(YU-OoU|kWYaEWQ&s|r zF=i{sxoC|D4r_&G$#`yM+scBWo44^kAL6+XHXpV(dVpUNS zxFO$JZ6o^QEyYDV93VOzQp31*qn`s>R=yN!c&~X$i=fY$DG4#bv>nP=ye0E!jPcm- z2Ka1W$2l0)P}50T{W{a?2(1~19F0!GHm;lrXbAMF$uS0W>te1~;4jPh(Kla*je9Q7FQ!EmXANI!d-SGenx zt-k`q96KsSj+5D4BuV!3e6-X)2ivAC+NjK zPOg>#&w#E`$ncIQ3%ZRDkqxo^$M7?Co|CMURv1mr!-o~9ea)6}(qPl#UB;D-f;Rt* zU9m0NJb3Jy;|h=?GXS8La8=B3xEyz?;KZABSkC-JQgfnEX+6b&!zHTV~o*gK@|gETl+SDd`$Ssk0)M5ZudmwZ=SrM zP&JRd$5cuXarylyy2!F5fNs?L%+7gDU;jim=1QP|?=esEh4e*KlSOPeg-($?XA*1} z+N9E3qi8jQb$?sS7rl|`0f=!lLfLcK;?f#2h2?}!D3ml5vtI=cJX!`!~1qUdmIp$BdJMV8HEzw@D&X=|n(Ug$wjyZdWI(@vMIgh20OA zDR~r9bYb7xI_WV1|MB5AONvAr^;WMlBIQg+TBU{6w3nm@w`*u!py>E9aY(l~0Cxq7 zO`B2fWjTWp7dFTM}q)2qxDr=aYilf4CkNGs9nXZ>G!8-|4es@OGB* z@I=nUMqF?*u=UW6)=7d*nT2{Dt%q$ww_O(uR_cMIPp>Z{DSj)9t5W5(V*CLS2EqJ> z+EiGSrX8dkI#r7q?6-v@L$VpX)l}@iB0Lu-6siB-NJ$P_yw-Ma202hjRozggGgO)q zY|T~ywn~M6JO4YU+!mIep3_!?LP}Na(}!!!LZR#LZT~XdJl2i#>~;I}`th1yK3=SXWiOIf%9cGqPLlLC8v9Di9+^ zPk35p^;(`8)TtVi7<&`xjJ`=?4_rtSV+?8D6@YD(FqPz2f?wQRn>1 zleAy4F7_N@wtjPa&FxQhn>pHz`>7-7aZH+GYURt4m_4%s+PvLGNrkm1of0{|)*Yv7VCgxG;^q`Y!>ZQ*vDNyHHgjYKcU`C|An*Ca=KG+J%1GPO_OJwD z<5EunbVgvZK2(y6H%>p%B_iY%u-_l25VF7dSw1w%g!O5F{cXpVLE*Cm6lLxsZlXM= z_hRD9UOR}q*D*>0#VX%##`$h7!g5KD04oB}(?0Fm?!C3cT`ia7qrxR9Yht(PB_rOb zrhB+bMaZZJ-d-G!6*(+S@0GH@#0K9n_$x80bN;y9#%lS=d9tx<1(DtmF*!dI&mjXA zgJ8!O)$MrkMIICi9<|7=U!@9&6Fu6tS>}5|Z6~LcPi`8d)El<55RC93FWjrmKVRr8 z?}@Uw0Ize?-}VQdy-FwK1=yAYXMQN=WqEPKJX6A^Rvc3ZgUGx z7N8lU5X%!@Dq}=hSCOo`eB#UEz1yecY(i9g)0cc>nBpwXFw1(|0rE5^8A@exqTEnb zr#&Ahj_WfxR~LkYUUK=OZ*9=!w!y>*Jcoxb@ioDLc9D_+F`2ThnrU}E$Nt29=%nx@X;?H=XX-2mfJ+= zk5u1BZl#>&Rx{x_#aliWWcLL;ll4xA^lxWWQ~;iJjez(*)Wt>tbF) zEtc{SLey=ExU62f2|0t*kJibsJD^8GzX4o*qYsVrm&5O z@l4_ed$!p_fvi8C&rXbD#PbsEZ~q^?kH?qRnDFLxP~3vnnDZeqndUq~)ZhwIj#WFX zv>F@5X?t3#fUwEw$5D(~#;sy>WtWjcG=O^L&oX4u^~vq8VPdiG0_4*;#GS{T@R zxw;`1RqQNB_`@dY9x0KI6wcageMVYnUXnCn*4^e#hW=}I(cz8@cdkTbhStxi%w&+1b{a4++M^-FUb!!L`r`Md_nADTCP8vKneDXf9X?r^z|YNGP% zm~b%X8<`zf{nZTRw$efNu{+d?&%&)TydGGP`r+vJXgsB+ zgmT%a^AIkY`<9b!KP1g?YIowG(ENW4_3(3)4U}B%#8$9k}y0jSn z$%w(aRchXk5g7X5|G#MCF}CK$B(+`{+bLqtf77bH$2J$5%0tN}H`=R?y6Q#4Wy zzLx7^>^L$3p7Ui2HLgP6SoJAvB@mjd_}lFN-ykxnMoAO7fCh57)D%l9-IU>f@V#_3 zUCqtme|Pe8FC6rc1`S3)WPv=)h6xYTf(L*GGY1g(yuG*H)t$G@O8)3LphAP$N=ci{bi>Qv_|Jon z8iFft;GrR#x(@~a9fv?PL~k)Dp=&kZBJX&P>MXPDn9vi62yM6%LYl|3dxg z&0#C%{b|yzgttghR%h(5yW4jSX#;>@B>CT$U+jGs{r&LvNkF?;P=v7`S0URpuS*SH zoWi1+-?XMbe}9TT(^>)#6s@ht1f`_eoVJnG2&GtN`1o4&!Gbd7BZ(0v n0ERi)Uj3PA$i0~BD>mE2VU&?7W7FH&pm&P0Zv>Ror;z^x2$cmb literal 0 HcmV?d00001