diff --git a/src/routes/+layout.svelte b/src/routes/+layout.svelte index 6432a4a777508..12545e5526ad9 100644 --- a/src/routes/+layout.svelte +++ b/src/routes/+layout.svelte @@ -6,11 +6,11 @@ import { fade } from 'svelte/transition'; import { page } from '$app/stores'; export let data; + const url = 'https://onnxruntime.ai'; {@html oneLight} - ONNX Runtime | {data.pathname == '/' ? 'Home' @@ -22,24 +22,24 @@ ? 'Home' : data.pathname.substring(1).charAt(0).toUpperCase() + data.pathname.substring(2)}" /> - <meta - name="description" - content="Cross-platform accelerated machine learning. Built-in optimizations speed up training and inferencing with your existing technology stack." - /> + <meta name="title" content={"ONNX Runtime |" + data.pathname == '/' + ? 'Home' + : data.pathname.substring(1).charAt(0).toUpperCase() + data.pathname.substring(2)} /> + <meta property="twitter:title" content={"ONNX Runtime |" + data.pathname == '/' + ? 'Home' + : data.pathname.substring(1).charAt(0).toUpperCase() + data.pathname.substring(2)} /> + <meta property="twitter:url" content={url + data.pathname} /> + <meta property="og:url" content={url + data.pathname} /> + <meta http-equiv="X-UA-Compatible" content="ie=edge" /> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="theme-color" content="#B2B2B2" /> <meta name="msapplication-TileColor" content="#B2B2B2" /> <meta name="theme-color" content="#B2B2B2" /> - <!-- OpenGraph meta tags --> - <meta - property="og:description" - content="Cross-platform accelerated machine learning. Built-in optimizations speed up training and inferencing with your existing technology stack." - /> - <meta property="og:image" content="https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png" /> - <meta property="og:url" content="https://onnxruntime.ai" /> <meta property="og:type" content="website" /> + + </svelte:head> <div class="selection:bg-info"> {#if !$page.url.pathname.startsWith('/blogs/')} diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte index 08b50564bb4c0..84249666de438 100644 --- a/src/routes/+page.svelte +++ b/src/routes/+page.svelte @@ -42,7 +42,27 @@ } }); }); + let description = 'Cross-platform accelerated machine learning. Built-in optimizations speed up training and inferencing with your existing technology stack.' + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnx runtime, onnx, onnxruntime, onnx runtime performance, onnx runtime training, onnx runtime inference, onnx runtime cross-platform, onnx runtime cross platform, onnx runtime crossplatform, onnx runtime training and inference, onnx runtime training & inference, onnx runtime training inference, onnx runtime training, onnx runtime inference, onnx runtime cross-platform, onnx runtime cross platform, onnx runtime crossplatform, onnx runtime training and inference, onnx runtime training & inference, onnx runtime training inference' </script> +<svelte:head> + <!-- Dynamic meta tags --> + <meta name="description" content={description} /> + <meta name="image" content={image} /> + <meta name="author" content={authors.join(', ')} /> + <meta name="keywords" content={keywords} /> + <!-- Open Graph / Facebook --> + <meta property="og:description" content={description}/> + <meta property="og:image" content={image} /> + + <!-- Twitter --> + <meta property="twitter:description" content={description} /> + <meta property="twitter:image" content={image} /> + <meta property="twitter:card" content={imageSquare} /> +</svelte:head> <Hero /> <Customers /> diff --git a/src/routes/blogs/+page.svelte b/src/routes/blogs/+page.svelte index 2fac1a18d20a4..43f3197318b23 100644 --- a/src/routes/blogs/+page.svelte +++ b/src/routes/blogs/+page.svelte @@ -373,13 +373,26 @@ link: 'https://www.linkedin.com/pulse/hcm-sentence-similarity-language-model-using-java-jonathon-palmieri-tdlpc%3FtrackingId=CN2PPVO4Toqh8r6JsAYMIw%253D%253D/?trackingId=ByNomo0pQFKM%2F%2BWEknVs7Q%3D%3D' } ]; + let description = 'ONNX Runtime Blogs - your source for staying updated on the latest ONNX Runtime updated and information.' + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime blogs, onnx runtime community blogs, onnx runtime community posts, onnx runtime community announcements' </script> - <svelte:head> - <meta - name="description" - content="ONNX Runtime Blogs - your source for staying updated on the latest ONNX Runtime updated and information." - /> + <!-- Dynamic meta tags --> + <meta name="description" content={description} /> + <meta name="image" content={image} /> + <meta name="author" content={authors.join(', ')} /> + <meta name="keywords" content={keywords} /> + <!-- Open Graph / Facebook --> + <meta property="og:description" content={description}/> + <meta property="og:image" content={image} /> + + <!-- Twitter --> + <meta property="twitter:description" content={description} /> + <meta property="twitter:image" content={image} /> + <meta property="twitter:card" content={imageSquare} /> </svelte:head> <div class="container mx-auto"> <div class="flex"> diff --git a/src/routes/blogs/post.svelte b/src/routes/blogs/post.svelte index edeb14e0292b4..1b024eb5b2e40 100644 --- a/src/routes/blogs/post.svelte +++ b/src/routes/blogs/post.svelte @@ -35,10 +35,6 @@ * @type {any} */ export let url; - /** - * @type {any} - */ - export let robots; /** * @type {any} */ @@ -53,22 +49,27 @@ </script> <svelte:head> - <meta name="title" content={title} /> - <meta name="description" content={description} /> - <meta name="keywords" content={keywords} /> - <meta name="author" content={authors.join(', ')} /> - <meta name="date" content={date} /> - <meta name="og:title" content={title} /> - <meta name="og:description" content={description} /> - <meta name="og:type" content="article" /> - <meta name="og:url" content={url} /> - <meta name="twitter:title" content={title} /> - <meta name="twitter:description" content={description} /> - <meta name="image" content={image} /> - <meta name="og:image" content={imageSquare ? imageSquare : image} /> - <meta name="twitter:card" content={imageSquare ? imageSquare : image} /> - <meta name="twitter:image" content={image} /> - <meta name="robots" content={robots} /> + <!-- Primary Meta Tags --> +<title>ONNX Runtime | {title} + + + + + + + + + + + + + + + + + + +
diff --git a/src/routes/components/winarm.svelte b/src/routes/components/winarm.svelte index 830f228b67005..79274200ccc75 100644 --- a/src/routes/components/winarm.svelte +++ b/src/routes/components/winarm.svelte @@ -1,6 +1,26 @@ + + + + + + + + + + + + + + +

ONNX Runtime + Windows Dev Kit 2023 = NPU powered AI

diff --git a/src/routes/events/+page.svelte b/src/routes/events/+page.svelte index 58c1e56c60225..80bfb1f22e45a 100644 --- a/src/routes/events/+page.svelte +++ b/src/routes/events/+page.svelte @@ -31,14 +31,28 @@ link: 'https://www.youtube-nocookie.com/embed/lOp8WK0fa94?si=rHn8fxAii3Ksr8Ey' } ]; + let description = 'ONNX Runtime Events - information on past and future ONNX Runtime Events.'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime events, onnx runtime community events, onnx runtime community meetup, onnx runtime community meetups' - - + + + + + + + + + + + + + +

Events

diff --git a/src/routes/generative-ai/+page.svelte b/src/routes/generative-ai/+page.svelte index 72fded08bc7e4..6d779f90d3224 100644 --- a/src/routes/generative-ai/+page.svelte +++ b/src/routes/generative-ai/+page.svelte @@ -7,7 +7,26 @@ const imgalt = 'ONNX Runtime Logo'; import stablediffusion1 from '../../images/StableDiffusion1.webp'; import stablediffusion2 from '../../images/StableDiffusion2.webp'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime generative ai, onnx runtime generative ai models, onnx runtime generative ai deployment, onnx runtime generative ai performance, onnx runtime generative ai time to market, onnx runtime generative ai deploy anywhere, onnx runtime generative ai boost performance, onnx runtime generative ai improve time to market, onnx runtime generative ai production ready, onnx runtime generative ai lower latency, onnx runtime generative ai higher throughput, onnx runtime generative ai get innovations into production faster, onnx runtime generative ai testimonials, onnx runtime generative ai performance enhancements, onnx runtime generative ai production ready, onnx runtime generative ai lower latency, onnx runtime generative ai higher throughput, onnx runtime generative ai get innovations into production faster, onnx runtime generative ai testimonials, onnx runtime generative ai performance enhancements' + + + + + + + + + + + + + + + - - + + + + + + + + + + + + +
diff --git a/src/routes/huggingface/+page.svelte b/src/routes/huggingface/+page.svelte index 2560e0038232f..513f2acb2ba60 100644 --- a/src/routes/huggingface/+page.svelte +++ b/src/routes/huggingface/+page.svelte @@ -9,13 +9,25 @@ 'ONNX Runtime can be used to accelerate well over 130,000 of the models available on Hugging Face.'; const imgsrc = 'onnxruntimelogo'; const imgalt = 'ONNX Runtime Logo'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime hugging face, onnx runtime hugging face models, onnx runtime hugging face deployment, onnx runtime hugging face performance, onnx runtime hugging face time to market, onnx runtime hugging face deploy anywhere, onnx runtime hugging face boost performance, onnx runtime hugging face improve time to market, onnx runtime hugging face production ready, onnx runtime hugging face lower latency, onnx runtime hugging face higher throughput, onnx runtime hugging face get innovations into production faster, onnx runtime hugging face testimonials, onnx runtime hugging face performance enhancements, onnx runtime hugging face production ready, onnx runtime hugging face lower latency, onnx runtime hugging face higher throughput, onnx runtime hugging face get innovations into production faster, onnx runtime hugging face testimonials, onnx runtime hugging face performance enhancements' - - + + + + + + + + + + + + +
diff --git a/src/routes/inference/+page.svelte b/src/routes/inference/+page.svelte index ad2d0e95970c0..b9c45332b2873 100644 --- a/src/routes/inference/+page.svelte +++ b/src/routes/inference/+page.svelte @@ -9,13 +9,25 @@ 'ONNX Runtime provides a performant solution to inference models from varying source frameworks (PyTorch, Hugging Face, TensorFlow) on different software and hardware stacks. ONNX Runtime Inference takes advantage of hardware accelerators, supports APIs in multiple languages (Python, C++, C#, C, Java, and more), and works on cloud servers, edge and mobile devices, and in web browsers.'; const imgsrc = 'onnxruntimelogo'; const imgalt = 'ONNX Runtime Logo'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime inferencing, onnx runtime inferencing models, onnx runtime inferencing deployment, onnx runtime inferencing performance, onnx runtime inferencing time to market, onnx runtime inferencing deploy anywhere, onnx runtime inferencing boost performance, onnx runtime inferencing improve time to market, onnx runtime inferencing production ready, onnx runtime inferencing lower latency, onnx runtime inferencing higher throughput, onnx runtime inferencing get innovations into production faster, onnx runtime inferencing testimonials, onnx runtime inferencing performance enhancements, onnx runtime inferencing production ready, onnx runtime inferencing lower latency, onnx runtime inferencing higher throughput, onnx runtime inferencing get innovations into production faster, onnx runtime inferencing testimonials, onnx runtime inferencing performance enhancements' - - + + + + + + + + + + + + +
diff --git a/src/routes/pytorch/+page.svelte b/src/routes/pytorch/+page.svelte index 1366434f712a9..ebfc45c009f31 100644 --- a/src/routes/pytorch/+page.svelte +++ b/src/routes/pytorch/+page.svelte @@ -8,19 +8,32 @@ import ImagePt4 from '../../images/undraw/image_PT4.svelte'; import ImagePt5 from '../../images/undraw/image_PT5.svelte'; import prodready from '../../images/prod-ready.png'; + const title = 'PyTorch + ONNX Runtime'; const description = 'PyTorch leads the deep learning landscape with its readily digestible and flexible API; the large number of ready-made models available, particularly in the natural language (NLP) domain; as well as its domain specific libraries.'; const imgsrc = 'onnxruntimelogo'; const imgalt = 'ONNX Runtime Logo'; const pythonCode = 'import torch\n\ntorch.onnx.export(\nmodel,\ninputs,\n"model.onnx")'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime pytorch, onnx runtime pytorch models, onnx runtime pytorch deployment, onnx runtime pytorch performance, onnx runtime pytorch time to market, onnx runtime pytorch deploy anywhere, onnx runtime pytorch boost performance, onnx runtime pytorch improve time to market, onnx runtime pytorch production ready, onnx runtime pytorch lower latency, onnx runtime pytorch higher throughput, onnx runtime pytorch get innovations into production faster, onnx runtime pytorch testimonials, onnx runtime pytorch performance enhancements, onnx runtime pytorch production ready, onnx runtime pytorch lower latency, onnx runtime pytorch higher throughput, onnx runtime pytorch get innovations into production faster, onnx runtime pytorch testimonials, onnx runtime pytorch performance enhancements' - - + + + + + + + + + + + + + diff --git a/src/routes/testimonials/+page.svelte b/src/routes/testimonials/+page.svelte index 8c3a9d05e088f..957f6b63b0ad8 100644 --- a/src/routes/testimonials/+page.svelte +++ b/src/routes/testimonials/+page.svelte @@ -278,10 +278,26 @@ imgalt: 'Xilinx logo' } ]; + let description = "ONNX Runtime's many consumer testimonials. Hear from some of the products and companies using ONNX Runtime." + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnx runtime testimonials' - - + + + + + + + + + + + + +
diff --git a/src/routes/training/+page.svelte b/src/routes/training/+page.svelte index 343c79f61e15d..4a79374814646 100644 --- a/src/routes/training/+page.svelte +++ b/src/routes/training/+page.svelte @@ -10,13 +10,25 @@ 'ONNX Runtime can be used to accelerate both large model training and on-device training.'; const imgsrc = 'onnxruntimelogo'; const imgalt = 'ONNX Runtime Logo'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnx runtime, onnx, onnxruntime, onnx runtime performance, onnx runtime training, onnx runtime cross-platform, onnx runtime cross platform, onnx runtime crossplatform, onnx runtime training, onnx runtime cross-platform, onnx runtime cross platform, onnx runtime crossplatform' - - + + + + + + + + + + + + + diff --git a/src/routes/windows/+page.svelte b/src/routes/windows/+page.svelte index b6c7ab8703e94..d816ac5255624 100644 --- a/src/routes/windows/+page.svelte +++ b/src/routes/windows/+page.svelte @@ -6,13 +6,25 @@ 'ONNX Runtime makes it easier for you to create amazing AI experiences on Windows with less engineering effort and better performance.'; const imgsrc = 'onnxruntimelogo'; const imgalt = 'ONNX Runtime Logo'; + let image = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let imageSquare = 'https://i.ibb.co/0YBy62j/ORT-icon-for-light-bg.png' + let authors = [''] + let keywords = 'onnxruntime, onnx runtime windows, onnx runtime windows models, onnx runtime windows deployment, onnx runtime windows performance, onnx runtime windows time to market, onnx runtime windows deploy anywhere, onnx runtime windows boost performance, onnx runtime windows improve time to market, onnx runtime windows production ready, onnx runtime windows lower latency, onnx runtime windows higher throughput, onnx runtime windows get innovations into production faster, onnx runtime windows testimonials, onnx runtime windows performance enhancements, onnx runtime windows production ready, onnx runtime windows lower latency, onnx runtime windows higher throughput, onnx runtime windows get innovations into production faster, onnx runtime windows testimonials, onnx runtime windows performance enhancements' - - + + + + + + + + + + + + +