From 36bfdb89c429ae50fa75039cd93fbc1697f496d4 Mon Sep 17 00:00:00 2001 From: khs1994 Date: Thu, 8 Feb 2024 08:58:55 +0800 Subject: [PATCH] Sync from docker/docker.github.io@9969d8e by PCIT --- SUMMARY.md | 7 + assets/css/typography.css | 41 ++ content/admin/company/new-company.md | 4 +- content/admin/organization/_index.md | 4 +- .../admin/organization/general-settings.md | 17 +- .../compose/multiple-compose-files/extends.md | 34 ++ .../compose/multiple-compose-files/include.md | 41 ++ content/engine/release-notes/25.0.md | 37 ++ content/guides/use-case/nlp/_index.md | 33 ++ .../use-case/nlp/language-translation.md | 339 ++++++++++++++ .../use-case/nlp/named-entity-recognition.md | 349 ++++++++++++++ .../guides/use-case/nlp/sentiment-analysis.md | 359 +++++++++++++++ .../use-case/nlp/text-classification.md | 431 ++++++++++++++++++ .../guides/use-case/nlp/text-summarization.md | 354 ++++++++++++++ content/scout/policy/_index.md | 12 +- content/trusted-content/insights-analytics.md | 55 ++- data/redirects.yml | 3 +- data/toc.yaml | 16 +- layouts/_default/_markup/render-image.html | 2 + layouts/partials/head.html | 5 +- layouts/shortcodes/inline-image.html | 1 + static/assets/fonts/RobotoFlex.woff2 | Bin 0 -> 33500 bytes static/assets/fonts/RobotoFlex_LICENSE.txt | 93 ++++ static/assets/fonts/RobotoMono-Italic.woff2 | Bin 0 -> 14400 bytes static/assets/fonts/RobotoMono-Regular.woff2 | Bin 0 -> 13128 bytes static/assets/fonts/RobotoMono_LICENSE.txt | 202 ++++++++ tailwind.config.js | 2 +- 27 files changed, 2408 insertions(+), 33 deletions(-) create mode 100644 content/guides/use-case/nlp/_index.md create mode 100644 content/guides/use-case/nlp/language-translation.md create mode 100644 content/guides/use-case/nlp/named-entity-recognition.md create mode 100644 content/guides/use-case/nlp/sentiment-analysis.md create mode 100644 content/guides/use-case/nlp/text-classification.md create mode 100644 content/guides/use-case/nlp/text-summarization.md create mode 100644 static/assets/fonts/RobotoFlex.woff2 create mode 100644 static/assets/fonts/RobotoFlex_LICENSE.txt create mode 100644 static/assets/fonts/RobotoMono-Italic.woff2 create mode 100644 static/assets/fonts/RobotoMono-Regular.woff2 create mode 100644 static/assets/fonts/RobotoMono_LICENSE.txt diff --git a/SUMMARY.md b/SUMMARY.md index 29a43acb7..09fe3a156 100644 --- a/SUMMARY.md +++ b/SUMMARY.md @@ -942,6 +942,13 @@ * [Use Containers For Generative AI Development](content/guides/use-case/genai-pdf-bot/develop.md) - Genai Video Bot * [Gen AI Video Transcription And Chat](content/guides/use-case/genai-video-bot/_index.md) + - Nlp + * [Natural Language Processing Guides](content/guides/use-case/nlp/_index.md) + * [Build A Language Translation App](content/guides/use-case/nlp/language-translation.md) + * [Build A Named Entity Recognition App](content/guides/use-case/nlp/named-entity-recognition.md) + * [Build A Sentiment Analysis App](content/guides/use-case/nlp/sentiment-analysis.md) + * [Build A Text Recognition App](content/guides/use-case/nlp/text-classification.md) + * [Build A Text Summarization App](content/guides/use-case/nlp/text-summarization.md) - Walkthroughs * [Access A Local Folder From A Container](content/guides/walkthroughs/access-local-folder.md) * [Containerize Your Application](content/guides/walkthroughs/containerize-your-app.md) diff --git a/assets/css/typography.css b/assets/css/typography.css index c2c81299a..05384ca74 100644 --- a/assets/css/typography.css +++ b/assets/css/typography.css @@ -1,4 +1,45 @@ @layer base { + + /* + * Font faces for Roboto Flex and Roboto Mono. + * + * - https://fonts.google.com/specimen/Roboto+Flex + * - https://fonts.google.com/specimen/Roboto+Mono + * + * The TTF fonts have been compressed to woff2, + * preserving the latin character subset. + * + * */ + + /* Roboto Flex */ + @font-face { + font-family: 'Roboto Flex'; + src: url('/assets/fonts/RobotoFlex.woff2') format('woff2 supports variations'), + url('/assets/fonts/RobotoFlex.woff2') format('woff2-variations'); + font-weight: 100 1000; /* Range of weights Roboto Flex supports */ + font-stretch: 25% 151%; /* Range of width Roboto Flex supports */ + font-style: oblique 0deg 10deg; /* Range of oblique angle Roboto Flex supports */ + font-display: fallback; + } + + /* Roboto Mono */ + @font-face { + font-family: 'Roboto Mono'; + src: url('/assets/fonts/RobotoMono-Regular.woff2') format('woff2'); + font-weight: 100 700; /* Define the range of weight the variable font supports */ + font-style: normal; + font-display: fallback; + } + + /* Roboto Mono Italic */ + @font-face { + font-family: 'Roboto Mono'; + src: url('/assets/fonts/RobotoMono-Italic.woff2') format('woff2'); + font-weight: 100 700; /* Define the range of weight the variable font supports */ + font-style: italic; + font-display: fallback; + } + .prose { li { @apply my-2; diff --git a/content/admin/company/new-company.md b/content/admin/company/new-company.md index a3e02c972..de63e3f78 100644 --- a/content/admin/company/new-company.md +++ b/content/admin/company/new-company.md @@ -39,8 +39,8 @@ To create a new company: To create a new company: 1. In the Admin Console, navigate to the organization you want to place under a company. The organization must have a Business subscription, and you must be an owner of the organization. -2. Under **Organization Settings**, select **General**. -3. In the **Organization management** section, select **Create a company**. +2. Under **Organization settings**, select **Company management**. +3. Select **Create a company**. 4. Enter a unique name for your company, then select **Continue**. > **Tip** diff --git a/content/admin/organization/_index.md b/content/admin/organization/_index.md index 0390af103..6037c7f06 100644 --- a/content/admin/organization/_index.md +++ b/content/admin/organization/_index.md @@ -19,8 +19,8 @@ grid: description: Define which registries your developers can access. icon: home_storage link: /admin/organization/registry-access/ -- title: General settings - description: Configure general information or create a company. +- title: Organization settings + description: Configure information for your organization and manage settings. icon: settings link: /admin/organization/general-settings/ - title: SSO & SCIM diff --git a/content/admin/organization/general-settings.md b/content/admin/organization/general-settings.md index 54234f041..1a86d0e85 100644 --- a/content/admin/organization/general-settings.md +++ b/content/admin/organization/general-settings.md @@ -1,12 +1,12 @@ --- -description: Learn how to manage general settings for organizations. +description: Learn how to manage settings for organizations using Docker Admin Console. keywords: organization, settings, Admin Console title: General organization settings --- {{< include "admin-early-access.md" >}} -This section describes how to manage general settings in the Docker Admin Console. +This section describes how to manage organization settings in the Docker Admin Console. ## Configure general information @@ -23,16 +23,9 @@ To edit this information: 1. Sign in to the [Admin Console](https://admin.docker.com). 2. In the left navigation, select your organization in the drop-down menu. -3. Under **Organization Settings**, select **General**. +3. Under **Organization settings**, select **General**. 4. Specify the organization information and select **Save**. -## Create a company +## Next steps -To create a new company: - -1. Sign in to the [Admin Console](https://admin.docker.com). -2. In the left navigation, select your organization in the drop-down menu. -3. Under **Organization Settings**, select **General**. -4. In the **Organization management** section, select **Create a company**. -5. Enter a unique name for your company, then select **Continue**. -6. Review the company migration details and then select **Create company**. \ No newline at end of file +In the **Organization settings** menu, you can also [configure SSO](../../security/for-admins/single-sign-on/configure/) and [set up SCIM](../../security/for-admins/scim.md). If your organization isn't part of a company, from here you can also [audit your domains](../../security/for-admins/domain-audit.md) or [create a company](new-company.md). diff --git a/content/compose/multiple-compose-files/extends.md b/content/compose/multiple-compose-files/extends.md index af67e6634..f6eb61834 100644 --- a/content/compose/multiple-compose-files/extends.md +++ b/content/compose/multiple-compose-files/extends.md @@ -192,6 +192,40 @@ configuration. But this isn’t an acceptable solution when you want to re-use someone else's unfamiliar configurations and you don’t know about its own dependencies. +## Relative paths + +When using `extends` with a `file` attribute which points to another folder, relative paths +declared by the service being extended are converted so they still point to the +same file when used by the extending service. This is illustrated in the following example: + +Base Compose file: +```yaml +services: + webapp: + image: example + extends: + file: ../commons/compose.yaml + service: base +``` + +The `commons/compose.yaml` file: +```yaml +services: + base: + env_file: ./container.env +``` + +The resulting service refers to the original `container.env` file +within the `commons` directory. This can be confirmed with `docker compose config` +which inspects the actual model: +```yaml +services: + webapp: + image: example + env_file: + - ../commons/container.env +``` + ## Reference information - [`extends`](../compose-file/05-services.md#extends) diff --git a/content/compose/multiple-compose-files/include.md b/content/compose/multiple-compose-files/include.md index b49223d77..2dd35664f 100644 --- a/content/compose/multiple-compose-files/include.md +++ b/content/compose/multiple-compose-files/include.md @@ -36,6 +36,47 @@ services: This means the team managing `serviceB` can refactor its own database component to introduce additional services without impacting any dependent teams. It also means that the dependent teams don't need to include additional flags on each Compose command they run. +## Include and overrides + +Compose reports an error if any resource from `include` conflicts with resources from the included Compose file. This rule prevents +unexpected conflicts with resources defined by the included compose file author. However, there may be some circumstances where you might want to tweak the +included model. This can be achieved by adding an override file to the include directive: +```yaml +include: + - path : + - third-party/compose.yaml + - override.yaml # local override for third-party model +``` + +The main limitation with this approach is that you need to maintain a dedicated override file per include. For complex projects with multiple +includes this would result into many Compose files. + +The other option is to use a `compose.override.yaml` file. While conflicts will be rejected from the file using `include` when same +resource is declared, a global Compose override file can override the resulting merged model, as demonstrated in following example: + +Main `compose.yaml` file: +```yaml +include: + - team-1/compose.yaml # declare service-1 + - team-2/compose.yaml # declare service-2 +``` + +Override `compose.override.yaml` file: +```yaml +services: + service-1: + # override included service-1 to enable debugger port + ports: + - 2345:2345 + + service-2: + # override included service-2 to use local data folder containing test data + volumes: + - ./data:/data +``` + +Combined together, this allows you to benefit from third-party reusable components, and adjust the Compose model for your needs. + ## Reference information [`include` top-level element](../compose-file/14-include.md) \ No newline at end of file diff --git a/content/engine/release-notes/25.0.md b/content/engine/release-notes/25.0.md index d7f03c946..efbe3722a 100644 --- a/content/engine/release-notes/25.0.md +++ b/content/engine/release-notes/25.0.md @@ -19,6 +19,43 @@ For more information about: - Deprecated and removed features, see [Deprecated Engine Features](../deprecated.md). - Changes to the Engine API, see [Engine API version history](../api/version-history.md). +## 25.0.3 + +{{< release-date date="2024-02-06" >}} + +For a full list of pull requests and changes in this release, refer to the relevant GitHub milestones: + +- [docker/cli, 25.0.3 milestone](https://github.com/docker/cli/issues?q=is%3Aclosed+milestone%3A25.0.3) +- [moby/moby, 25.0.3 milestone](https://github.com/moby/moby/issues?q=is%3Aclosed+milestone%3A25.0.3) + + +### Bug fixes and enhancements + +- containerd image store: Fix a bug where `docker image history` would fail if a manifest wasn't found in the content store. [moby/moby#47348](https://github.com/moby/moby/pull/47348) +- Ensure that a generated MAC address is not restored when a container is restarted, but a configured MAC address is preserved. [moby/moby#47304](https://github.com/moby/moby/pull/47304) + + > **Note** + > + > - Containers created with Docker Engine version 25.0.0 may have duplicate MAC addresses. + > They must be re-created. + > - Containers with user-defined MAC addresses created with Docker Engine versions 25.0.0 or 25.0.1 + > receive new MAC addresses when started using Docker Engine version 25.0.2. + > They must also be re-created. + +- Fix `docker save @` producing an OCI archive with index without manifests. [moby/moby#47294](https://github.com/moby/moby/pull/47294) +- Fix a bug preventing bridge networks from being created with an MTU higher than 1500 on RHEL and CentOS 7. [moby/moby#47308](https://github.com/moby/moby/issues/47308), [moby/moby#47311](https://github.com/moby/moby/pull/47311) +- Fix a bug where containers are unable to communicate over an `internal` network. [moby/moby#47303](https://github.com/moby/moby/pull/47303) +- Fix a bug where the value of the `ipv6` daemon option was ignored. [moby/moby#47310](https://github.com/moby/moby/pull/47310) +- Fix a bug where trying to install a pulling using a digest revision would cause a panic. [moby/moby#47323](https://github.com/moby/moby/pull/47323) +- Fix a potential race condition in the managed containerd supervisor. [moby/moby#47313](https://github.com/moby/moby/pull/47313) +- Fix an issue with the `journald` log driver preventing container logs from being followed correctly with systemd version 255. [moby/moby47243](https://github.com/moby/moby/pull/47243) +- seccomp: Update the builtin seccomp profile to include syscalls added in kernel v5.17 - v6.7 to align the profile with the profile used by containerd. [moby/moby#47341](https://github.com/moby/moby/pull/47341) +- Windows: Fix cache not being used when building images based on Windows versions older than the host's version. [moby/moby#47307](https://github.com/moby/moby/pull/47307), [moby/moby#47337](https://github.com/moby/moby/pull/47337) + +### Packaging updates + +- Removed support for Ubuntu Lunar (23.04). [docker/ce-packaging#986](https://github.com/docker/docker-ce-packaging/pull/986) + ## 25.0.2 {{< release-date date="2024-01-31" >}} diff --git a/content/guides/use-case/nlp/_index.md b/content/guides/use-case/nlp/_index.md new file mode 100644 index 000000000..27a746a4d --- /dev/null +++ b/content/guides/use-case/nlp/_index.md @@ -0,0 +1,33 @@ +--- +description: Learn how to build natural language processing (NLP) applications. +keywords: nlp, natural language processing +title: Natural language processing guides +--- + +The natural language processing (NLP) guides teach you how to build and run NLP +applications. NLP applications can interpret and generate human language, both +spoken and written. This is a natural language, as opposed to a formal or +artificial one. NLP is a part of artificial intelligence (AI) and has a long +history that goes back to the origins of linguistics. + +> **Acknowledgment** +> +> These NLP guides are community contributions. Docker would like to thank +> [Harsh Manvar](https://github.com/harsh4870) for his contribution to the NLP +> guides. + +Select one of the guides to get started: + +* [Language translation](language-translation.md): Automatically translate text + or speech from one language to another. +* [Named entity recognition](named-entity-recognition.md): Identify and extract + specific entities or phrases from text, such as person names, locations, + organizations, dates, etc., and classify them into predefined categories. +* [Sentiment analysis](sentiment-analysis.md): Identify and extract emotions, + opinions, and attitudes from text or speech. +* [Text classification](text-classification.md): Assign a label or category to a + text document or a segment of text, based on its content or context. +* [Text summarization](text-summarization.md): Create a concise and informative + summary of a longer text. + + diff --git a/content/guides/use-case/nlp/language-translation.md b/content/guides/use-case/nlp/language-translation.md new file mode 100644 index 000000000..81cb503e9 --- /dev/null +++ b/content/guides/use-case/nlp/language-translation.md @@ -0,0 +1,339 @@ +--- +title: Build a language translation app +keywords: nlp, natural language processing, text summarization, python, language translation, googletrans +description: Learn how to build and run a language translation application using Python, Googletrans, and Docker. +--- + +## Overview + +This guide walks you through building and running a language translation +application. You'll build the application using Python with Googletrans, and +then set up the environment and run the application using Docker. + +The application demonstrates a simple but practical use of the Googletrans +library for language translation, showcasing basic Python and Docker concepts. +Googletrans is a free and unlimited Python library that implements the Google +Translate API. It uses the Google Translate Ajax API to make calls to such +methods as detect and translate. + +## Prerequisites + +* You have installed the latest version of [Docker Desktop](../../../get-docker.md). Docker adds new features regularly and some parts of this guide may work only with the latest version of Docker Desktop. +* You have a [Git client](https://git-scm.com/downloads). The examples in this section use a command-line based Git client, but you can use any client. + +## Get the sample application + +1. Open a terminal, and clone the sample application's repository using the + following command. + + ```console + $ git clone https://github.com/harsh4870/Docker-NLP.git + ``` + +2. Verify that you cloned the repository. + + You should see the following files in your `Docker-NLP` directory. + ```text + 01_sentiment_analysis.py + 02_name_entity_recognition.py + 03_text_classification.py + 04_text_summarization.py + 05_language_translation.py + entrypoint.sh + requirements.txt + Dockerfile + README.md + ``` + +## Explore the application code + +The source code for the application is in the +`Docker-NLP/05_language_translation.py` file. Open `05_language_translation.py` +in a text or code editor to explore its contents in the following steps. + +1. Import the required libraries. + + ```python + from googletrans import Translator + ``` + + This line imports the `Translator` class from `googletrans`. + Googletrans is a Python library that provides an interface to Google + Translate's AJAX API. + +2. Specify the main execution block. + ```python + if __name__ == "__main__": + ``` + This Python idiom ensures that the following code block runs only if this + script is the main program. It provides flexibility, allowing the script to + function both as a standalone program and as an imported module. + +3. Create an infinite loop for continuous input. + + ```python + while True: + input_text = input("Enter the text for translation (type 'exit' to end): ") + + if input_text.lower() == 'exit': + print("Exiting...") + break + ``` + + An infinite loop is established here to continuously prompt you for text + input, ensuring interactivity. The loop breaks when you type `exit`, allowing + you to control the application flow effectively. + +4. Create an instance of Translator. + + ```python + translator = Translator() + ``` + + This creates an instance of the Translator class, which + performs the translation. + +5. Translate text. + + ```python + translated_text = translator.translate(input_text, dest='fr').text + ``` + + Here, the `translator.translate` method is called with the user input. The + `dest='fr'` argument specifies that the destination language for translation + is French. The `.text` attribute gets the translated string. For more details + about the available language codes, see the + [Googletrans docs](https://py-googletrans.readthedocs.io/en/latest/). + +6. Print the original and translated text. + + ```python + print(f"Original Text: {input_text}") + print(f"Translated Text: {translated_text}") + ``` + + These two lines print the original text entered by the user and the + translated text. + +7. Create `requirements.txt`. The sample application already contains the + `requirements.txt` file to specify the necessary modules that the + application imports. Open `requirements.txt` in a code or text editor to + explore its contents. + + ```text + ... + + # 05 language_translation + googletrans==4.0.0-rc1 + ``` + + Only `googletrans` is required for the language translation application. + +## Explore the application environment + +You'll use Docker to run the application in a container. Docker lets you +containerize the application, providing a consistent and isolated environment +for running it. This means the application will operate as intended within its +Docker container, regardless of the underlying system differences. + +To run the application in a container, a Dockerfile is required. A Dockerfile is +a text document that contains all the commands you would call on the command +line to assemble an image. An image is a read-only template with instructions +for creating a Docker container. + +The sample application already contains a `Dockerfile`. Open the `Dockerfile` in a code or text editor to explore its contents. + +The following steps explain each part of the `Dockerfile`. For more details, see the [Dockerfile reference](/engine/reference/builder/). + +1. Specify the base image. + + ```dockerfile + FROM python:3.8-slim + ``` + + This command sets the foundation for the build. `python:3.8-slim` is a + lightweight version of the Python 3.8 image, optimized for size and speed. + Using this slim image reduces the overall size of your Docker image, leading + to quicker downloads and less surface area for security vulnerabilities. This + is particularly useful for a Python-based application where you might not + need the full standard Python image. + +2. Set the working directory. + + ```dockerfile + WORKDIR /app + ``` + + `WORKDIR` sets the current working directory within the Docker image. By + setting it to `/app`, you ensure that all subsequent commands in the + Dockerfile + (like `COPY` and `RUN`) are executed in this directory. This also helps in + organizing your Docker image, as all application-related files are contained + in a specific directory. + +3. Copy the requirements file into the image. + + ```dockerfile + COPY requirements.txt /app + ``` + + The `COPY` command transfers the `requirements.txt` file from + your local machine into the Docker image. This file lists all Python + dependencies required by the application. Copying it into the container + lets the next command (`RUN pip install`) install these dependencies + inside the image environment. + +4. Install the Python dependencies in the image. + + ```dockerfile + RUN pip install --no-cache-dir -r requirements.txt + ``` + + This line uses `pip`, Python's package installer, to install the packages + listed in `requirements.txt`. The `--no-cache-dir` option disables + the cache, which reduces the size of the Docker image by not storing the + unnecessary cache data. + +5. Run additional commands. + + ```dockerfile + RUN python -m spacy download en_core_web_sm + ``` + + This step is specific to NLP applications that require the spaCy library. It downloads the `en_core_web_sm` model, which is a small English language model for spaCy. While not needed for this app, it's included for compatibility with other NLP applications that might use this Dockerfile. + +6. Copy the application code into the image. + + ```dockerfile + COPY *.py /app + COPY entrypoint.sh /app + ``` + + These commands copy your Python scripts and the `entrypoint.sh` script into the image's `/app` directory. This is crucial because the container needs these scripts to run the application. The `entrypoint.sh` script is particularly important as it dictates how the application starts inside the container. + +7. Set permissions for the `entrypoint.sh` script. + + ```dockerfile + RUN chmod +x /app/entrypoint.sh + ``` + + This command modifies the file permissions of `entrypoint.sh`, making it + executable. This step is necessary to ensure that the Docker container can + run this script to start the application. + +8. Set the entry point. + + ```dockerfile + ENTRYPOINT ["/app/entrypoint.sh"] + ``` + + The `ENTRYPOINT` instruction configures the container to run `entrypoint.sh` + as its default executable. This means that when the container starts, it + automatically executes the script. + + You can explore the `entrypoint.sh` script by opening it in a code or text + editor. As the sample contains several applications, the script lets you + specify which application to run when the container starts. + +## Run the application + +To run the application using Docker: + +1. Build the image. + + In a terminal, run the following command inside the directory of where the `Dockerfile` is located. + + ```console + $ docker build -t basic-nlp . + ``` + + The following is a break down of the command: + + - `docker build`: This is the primary command used to build a Docker image + from a Dockerfile and a context. The context is typically a set of files at + a specified location, often the directory containing the Dockerfile. + - `-t basic-nlp`: This is an option for tagging the image. The `-t` flag + stands for tag. It assigns a name to the image, which in this case is + `basic-nlp`. Tags are a convenient way to reference images later, + especially when pushing them to a registry or running containers. + - `.`: This is the last part of the command and specifies the build context. + The period (`.`) denotes the current directory. Docker will look for a + Dockerfile in this directory. The build context (the current directory, in + this case) is sent to the Docker daemon to enable the build. It includes + all the files and subdirectories in the specified directory. + + For more details, see the [docker build CLI reference](/engine/reference/commandline/image_build/). + + Docker outputs several logs to your console as it builds the image. You'll + see it download and install the dependencies. Depending on your network + connection, this may take several minutes. Docker does have a caching + feature, so subsequent builds can be faster. The console will + return to the prompt when it's complete. + +2. Run the image as a container. + + In a terminal, run the following command. + + ```console + $ docker run -it basic-nlp 05_language_translation.py + ``` + + The following is a break down of the command: + + - `docker run`: This is the primary command used to run a new container from + a Docker image. + - `-it`: This is a combination of two options: + - `-i` or `--interactive`: This keeps the standard input (STDIN) open even + if not attached. It lets the container remain running in the + foreground and be interactive. + - `-t` or `--tty`: This allocates a pseudo-TTY, essentially simulating a + terminal, like a command prompt or a shell. It's what lets you + interact with the application inside the container. + - `basic-nlp`: This specifies the name of the Docker image to use for + creating the container. In this case, it's the image named `basic-nlp` that + you created with the `docker build` command. + - `05_language_translation.py`: This is the script you want to run inside the + Docker container. It gets passed to the `entrypoint.sh` script, which runs + it when the container starts. + + For more details, see the [docker run CLI reference](/engine/reference/commandline/container_run/). + + > **Note** + > + > For Windows users, you may get an error when running the container. Verify + > that the line endings in the `entrypoint.sh` are `LF` (`\n`) and not `CRLF` (`\r\n`), + > then rebuild the image. For more details, see [Avoid unexpected syntax errors, use Unix style line endings for files in containers](/desktop/troubleshoot/topics/#avoid-unexpected-syntax-errors-use-unix-style-line-endings-for-files-in-containers). + + You will see the following in your console after the container starts. + + ```console + Enter the text for translation (type 'exit' to end): + ``` + +3. Test the application. + + Enter some text to get the text summarization. + + ```console + Enter the text for translation (type 'exit' to end): Hello, how are you doing? + Original Text: Hello, how are you doing? + Translated Text: Bonjour comment allez-vous? + ``` + +## Summary + +In this guide, you learned how to build and run a language translation +application. You learned how to build the application using Python with +Googletrans, and then set up the environment and run the application using +Docker. + +Related information: + +* [Docker CLI reference](/engine/reference/commandline/docker/) +* [Dockerfile reference](/engine/reference/builder/) +* [Googletrans](https://github.com/ssut/py-googletrans) +* [Python documentation](https://docs.python.org/3/) + +## Next steps + +Explore more [natural language processing guides](./_index.md). \ No newline at end of file diff --git a/content/guides/use-case/nlp/named-entity-recognition.md b/content/guides/use-case/nlp/named-entity-recognition.md new file mode 100644 index 000000000..c0ea45347 --- /dev/null +++ b/content/guides/use-case/nlp/named-entity-recognition.md @@ -0,0 +1,349 @@ +--- +title: Build a named entity recognition app +keywords: nlp, natural language processing, named entity recognition, python, spacy, ner +description: Learn how to build and run a named entity recognition application using Python, spaCy, and Docker. +--- + +## Overview + +This guide walks you through building and running a named entity recognition +(NER) application. You'll build the application using Python with +spaCy, and then set up the environment and run the application using Docker. + +The application processes input text to identify and print named entities, like people, organizations, or locations. + +## Prerequisites + +* You have installed the latest version of [Docker Desktop](../../../get-docker.md). Docker adds new features regularly and some parts of this guide may work only with the latest version of Docker Desktop. +* You have a [Git client](https://git-scm.com/downloads). The examples in this section use a command-line based Git client, but you can use any client. + +## Get the sample application + +1. Open a terminal, and clone the sample application's repository using the + following command. + + ```console + $ git clone https://github.com/harsh4870/Docker-NLP.git + ``` + +2. Verify that you cloned the repository. + + You should see the following files in your `Docker-NLP` directory. + + ```text + 01_sentiment_analysis.py + 02_name_entity_recognition.py + 03_text_classification.py + 04_text_summarization.py + 05_language_translation.py + entrypoint.sh + requirements.txt + Dockerfile + README.md + ``` + +## Explore the application code + +The source code for the name recognition application is in the `Docker-NLP/02_name_entity_recognition.py` file. Open `02_name_entity_recognition.py` in a text or code editor to explore its contents in the following steps. + +1. Import the required libraries. + + ```python + import spacy + ``` + + This line imports the `spaCy` library. `spaCy` is a popular library in Python + used for natural language processing (NLP). + +2. Load the language model. + + ```python + nlp = spacy.load("en_core_web_sm") + ``` + + Here, the `spacy.load` function loads a language model. The `en_core_web_sm` + model is a small English language model. You can use this model for various + NLP tasks, including tokenization, part-of-speech tagging, and named entity + recognition. + +3. Specify the main execution block. + + ```python + if __name__ == "__main__": + ``` + + This Python idiom ensures that the following code block runs only if this + script is the main program. It provides flexibility, allowing the script to + function both as a standalone program and as an imported module. + +4. Create an infinite loop for continuous input. + + ```python + while True: + ``` + + This while loop runs indefinitely until it's explicitly broken. It lets + the user continuously enter text for entity recognition until they decide + to exit. + +5. Get user input. + + ```python + input_text = input("Enter the text for entity recognition (type 'exit' to end): ") + ``` + + This line prompts the user to enter text. The program will then perform entity recognition on this text. + +6. Define an exit condition. + + ```python + if input_text.lower() == 'exit': + print("Exiting...") + break + ``` + + If the user types something, the program converts the input to lowercase and + compares it to `exit`. If they match, the program prints **Exiting...** and + breaks out of the while loop, effectively ending the program. + +7. Perform named entity recognition. + + ```python + doc = nlp(input_text) + + for ent in doc.ents: + print(f"Entity: {ent.text}, Type: {ent.label_}") + ``` + + - `doc = nlp(input_text)`: Here, the nlp model processes the user-input text. This creates a Doc object which contains various NLP attributes, including identified entities. + - `for ent in doc.ents:`: This loop iterates over the entities found in the text. + - `print(f"Entity: {ent.text}, Type: {ent.label_}")`: For each entity, it prints the entity text and its type (like PERSON, ORG, or GPE). + + +8. Create `requirements.txt`. + + The sample application already contains the `requirements.txt` file to specify the necessary packages that the application imports. Open `requirements.txt` in a code or text editor to explore its contents. + + ```text + # 02 named_entity_recognition + spacy==3.7.2 + + ... + ``` + + Only the `spacy` package is required for the named recognition application. + +## Explore the application environment + +You'll use Docker to run the application in a container. Docker lets you +containerize the application, providing a consistent and isolated environment +for running it. This means the application will operate as intended within its +Docker container, regardless of the underlying system differences. + +To run the application in a container, a Dockerfile is required. A Dockerfile is +a text document that contains all the commands you would call on the command +line to assemble an image. An image is a read-only template with instructions +for creating a Docker container. + +The sample application already contains a `Dockerfile`. Open the `Dockerfile` in a code or text editor to explore its contents. + +The following steps explain each part of the `Dockerfile`. For more details, see the [Dockerfile reference](/engine/reference/builder/). + +1. Specify the base image. + + ```dockerfile + FROM python:3.8-slim + ``` + + This command sets the foundation for the build. `python:3.8-slim` is a + lightweight version of the Python 3.8 image, optimized for size and speed. + Using this slim image reduces the overall size of your Docker image, leading + to quicker downloads and less surface area for security vulnerabilities. This + is particularly useful for a Python-based application where you might not + need the full standard Python image. + +2. Set the working directory. + + ```dockerfile + WORKDIR /app + ``` + + `WORKDIR` sets the current working directory within the Docker image. By + setting it to `/app`, you ensure that all subsequent commands in the + Dockerfile (like `COPY` and `RUN`) are executed in this directory. This also + helps in organizing your Docker image, as all application-related files are + contained in a specific directory. + +3. Copy the requirements file into the image. + + ```dockerfile + COPY requirements.txt /app + ``` + + The `COPY` command transfers the `requirements.txt` file from + your local machine into the Docker image. This file lists all Python + dependencies required by the application. Copying it into the container + lets the next command (`RUN pip install`) install these dependencies + inside the image environment. + +4. Install the Python dependencies in the image. + + ```dockerfile + RUN pip install --no-cache-dir -r requirements.txt + ``` + + This line uses `pip`, Python's package installer, to install the packages + listed in `requirements.txt`. The `--no-cache-dir` option disables + the cache, which reduces the size of the Docker image by not storing the + unnecessary cache data. + +5. Run additional commands. + + ```dockerfile + RUN python -m spacy download en_core_web_sm + ``` + + This step is specific to NLP applications that require the spaCy library. It downloads the `en_core_web_sm` model, which is a small English language model for spaCy. + +6. Copy the application code into the image. + + ```dockerfile + COPY *.py /app + COPY entrypoint.sh /app + ``` + + These commands copy your Python scripts and the `entrypoint.sh` script into + the image's `/app` directory. This is crucial because the container needs + these scripts to run the application. The `entrypoint.sh` script is + particularly important as it dictates how the application starts inside the + container. + +7. Set permissions for the `entrypoint.sh` script. + + ```dockerfile + RUN chmod +x /app/entrypoint.sh + ``` + + This command modifies the file permissions of `entrypoint.sh`, making it + executable. This step is necessary to ensure that the Docker container can + run this script to start the application. + +8. Set the entry point. + + ```dockerfile + ENTRYPOINT ["/app/entrypoint.sh"] + ``` + + The `ENTRYPOINT` instruction configures the container to run `entrypoint.sh` + as its default executable. This means that when the container starts, it + automatically executes the script. + + You can explore the `entrypoint.sh` script by opening it in a code or text + editor. As the sample contains several applications, the script lets you + specify which application to run when the container starts. + +## Run the application + +To run the application using Docker: + +1. Build the image. + + In a terminal, run the following command inside the directory of where the `Dockerfile` is located. + + ```console + $ docker build -t basic-nlp . + ``` + + The following is a break down of the command: + + - `docker build`: This is the primary command used to build a Docker image + from a Dockerfile and a context. The context is typically a set of files at + a specified location, often the directory containing the Dockerfile. + - `-t basic-nlp`: This is an option for tagging the image. The `-t` flag + stands for tag. It assigns a name to the image, which in this case is + `basic-nlp`. Tags are a convenient way to reference images later, + especially when pushing them to a registry or running containers. + - `.`: This is the last part of the command and specifies the build context. + The period (`.`) denotes the current directory. Docker will look for a + Dockerfile in this directory. The build context (the current directory, in + this case) is sent to the Docker daemon to enable the build. It includes + all the files and subdirectories in the specified directory. + + For more details, see the [docker build CLI reference](/engine/reference/commandline/image_build/). + + Docker outputs several logs to your console as it builds the image. You'll + see it download and install the dependencies. Depending on your network + connection, this may take several minutes. Docker does have a caching + feature, so subsequent builds can be faster. The console will + return to the prompt when it's complete. + +2. Run the image as a container. + + In a terminal, run the following command. + + ```console + $ docker run -it basic-nlp 02_name_entity_recognition.py + ``` + + The following is a break down of the command: + + - `docker run`: This is the primary command used to run a new container from + a Docker image. + - `-it`: This is a combination of two options: + - `-i` or `--interactive`: This keeps the standard input (STDIN) open even + if not attached. It lets the container remain running in the + foreground and be interactive. + - `-t` or `--tty`: This allocates a pseudo-TTY, essentially simulating a + terminal, like a command prompt or a shell. It's what lets you + interact with the application inside the container. + - `basic-nlp`: This specifies the name of the Docker image to use for + creating the container. In this case, it's the image named `basic-nlp` that + you created with the `docker build` command. + - `02_name_entity_recognition.py`: This is the script you want to run inside + the Docker container. It gets passed to the `entrypoint.sh` script, which + runs it when the container starts. + + For more details, see the [docker run CLI reference](/engine/reference/commandline/container_run/). + + + > **Note** + > + > For Windows users, you may get an error when running the container. Verify + > that the line endings in the `entrypoint.sh` are `LF` (`\n`) and not `CRLF` (`\r\n`), + > then rebuild the image. For more details, see [Avoid unexpected syntax errors, use Unix style line endings for files in containers](/desktop/troubleshoot/topics/#avoid-unexpected-syntax-errors-use-unix-style-line-endings-for-files-in-containers). + + You will see the following in your console after the container starts. + + ```console + Enter the text for entity recognition (type 'exit' to end): + ``` + +3. Test the application. + + Enter some information to get the named entity recognition. + + ```console + Enter the text for entity recognition (type 'exit' to end): Apple Inc. is planning to open a new store in San Francisco. Tim Cook is the CEO of Apple. + + Entity: Apple Inc., Type: ORG + Entity: San Francisco, Type: GPE + Entity: Tim Cook, Type: PERSON + Entity: Apple, Type: ORG + ``` + +## Summary + +This guide demonstrated how to build and run a named entity recognition +application. You learned how to build the application using Python with spaCy, +and then set up the environment and run the application using Docker. + +Related information: + +* [Docker CLI reference](/engine/reference/commandline/docker/) +* [Dockerfile reference](/engine/reference/builder/) +* [spaCy](https://spacy.io/) +* [Python documentation](https://docs.python.org/3/) + +## Next steps + +Explore more [natural language processing guides](./_index.md). \ No newline at end of file diff --git a/content/guides/use-case/nlp/sentiment-analysis.md b/content/guides/use-case/nlp/sentiment-analysis.md new file mode 100644 index 000000000..ba718c38d --- /dev/null +++ b/content/guides/use-case/nlp/sentiment-analysis.md @@ -0,0 +1,359 @@ +--- +title: Build a sentiment analysis app +keywords: nlp, natural language processing, sentiment analysis, python, nltk +description: Learn how to build and run a sentiment analysis application using Python, NLTK, and Docker. +--- + +## Overview + +In this guide, you learn how to build and run a sentiment analysis application. +You'll build the application using Python with the Natural Language Toolkit +(NLTK), and then set up the environment and run the application using Docker. + +The application analyzes user input text for sentiment using NLTK's +SentimentIntensityAnalyzer and outputs whether the sentiment is positive, +negative, or neutral. + +## Prerequisites + +* You have installed the latest version of [Docker Desktop](../../../get-docker.md). Docker adds new features regularly and some parts of this guide may work only with the latest version of Docker Desktop. +* You have a [Git client](https://git-scm.com/downloads). The examples in this section use a command-line based Git client, but you can use any client. + +## Get the sample application + +1. Open a terminal, and clone the sample application's repository using the + following command. + + ```console + $ git clone https://github.com/harsh4870/Docker-NLP.git + ``` + +2. Verify that you cloned the repository. + + You should see the following files in your `Docker-NLP` directory. + + ```text + 01_sentiment_analysis.py + 02_name_entity_recognition.py + 03_text_classification.py + 04_text_summarization.py + 05_language_translation.py + entrypoint.sh + requirements.txt + Dockerfile + README.md + ``` + +## Explore the application code + +The source code for the sentiment analysis application is in the `Docker-NLP/01_sentiment_analysis.py` file. Open `01_sentiment_analysis.py` in a text or code editor to explore its contents in the following steps. + +1. Import the required libraries. + + ```python + import nltk + from nltk.sentiment import SentimentIntensityAnalyzer + import ssl + ``` + + - `nltk`: This is the Natural Language Toolkit library used for working with + human language data in Python. + - `SentimentIntensityAnalyzer`: This is a specific tool from NLTK used for + determining the sentiment of a piece of text. + - `ssl`: This module provides access to Transport Layer Security (encryption) + functions used for secure web connections. + +2. Handle SSL certificate verification. + + ```python + try: + _create_unverified_https_context = ssl._create_unverified_context + except AttributeError: + pass + else: + ssl._create_default_https_context = _create_unverified_https_context + ``` + + This block is a workaround for certain environments where downloading data through NLTK might fail due to SSL certificate verification issues. It's telling Python to ignore SSL certificate verification for HTTPS requests. + +3. Download NLTK resources. + + ```python + nltk.download('vader_lexicon') + nltk.download('punkt') + ``` + + - `vader_lexicon`: This is a lexicon used by the `SentimentIntensityAnalyzer` + for sentiment analysis. + - `punkt`: This is used by NLTK for tokenizing sentences. It's necessary for + the `SentimentIntensityAnalyzer` to function correctly. + +4. Create a sentiment analysis function. + + ```python + def perform_semantic_analysis(text): + sid = SentimentIntensityAnalyzer() + sentiment_score = sid.polarity_scores(text) + + if sentiment_score['compound'] >= 0.05: + return "Positive" + elif sentiment_score['compound'] <= -0.05: + return "Negative" + else: + return "Neutral" + ``` + + - `SentimentIntensityAnalyzer()` creates an instance of the + analyzer. + - `polarity_scores(text)` generates a sentiment score for the input text. + + The function returns **Positive**, **Negative**, or **Neutral** based on the + compound score. + +5. Create the main loop. + + ```python + if __name__ == "__main__": + while True: + input_text = input("Enter the text for semantic analysis (type 'exit' to end): ") + + if input_text.lower() == 'exit': + print("Exiting...") + break + + result = perform_semantic_analysis(input_text) + print(f"Sentiment: {result}") + ``` + + This part of the script runs an infinite loop to accept user input for + analysis. If the user types `exit`, the program terminates. Otherwise, it + prints out the sentiment of the provided text. + +6. Create `requirements.txt`. + + The sample application already contains the + `requirements.txt` file to specify the necessary packages that the + application imports. Open `requirements.txt` in a code or text editor to + explore its contents. + + ```text + # 01 sentiment_analysis + nltk==3.6.5 + + ... + ``` + + Only the `nltk` package is required for the sentiment analysis application. + +## Explore the application environment + +You'll use Docker to run the application in a container. Docker lets you +containerize the application, providing a consistent and isolated environment +for running it. This means the application will operate as intended within its +Docker container, regardless of the underlying system differences. + +To run the application in a container, a Dockerfile is required. A Dockerfile is +a text document that contains all the commands you would call on the command +line to assemble an image. An image is a read-only template with instructions +for creating a Docker container. + +The sample application already contains a `Dockerfile`. Open the `Dockerfile` in a code or text editor to explore its contents. + +The following steps explain each part of the `Dockerfile`. For more details, see the [Dockerfile reference](/engine/reference/builder/). + +1. Specify the base image. + + ```dockerfile + FROM python:3.8-slim + ``` + + This command sets the foundation for the build. `python:3.8-slim` is a + lightweight version of the Python 3.8 image, optimized for size and speed. + Using this slim image reduces the overall size of your Docker image, leading + to quicker downloads and less surface area for security vulnerabilities. This + is particularly useful for a Python-based application where you might not + need the full standard Python image. + +2. Set the working directory. + + ```dockerfile + WORKDIR /app + ``` + + `WORKDIR` sets the current working directory within the Docker image. By + setting it to `/app`, you ensure that all subsequent commands in the + Dockerfile (like `COPY` and `RUN`) are executed in this directory. This also + helps in organizing your Docker image, as all application-related files are + contained in a specific directory. + +3. Copy the requirements file into the image. + + ```dockerfile + COPY requirements.txt /app + ``` + + The `COPY` command transfers the `requirements.txt` file from + your local machine into the Docker image. This file lists all Python + dependencies required by the application. Copying it into the container + lets the next command (`RUN pip install`) install these dependencies + inside the image environment. + +4. Install the Python dependencies in the image. + + ```dockerfile + RUN pip install --no-cache-dir -r requirements.txt + ``` + + This line uses `pip`, Python's package installer, to install the packages + listed in `requirements.txt`. The `--no-cache-dir` option disables + the cache, which reduces the size of the Docker image by not storing the + unnecessary cache data. + +5. Run additional commands. + + ```dockerfile + RUN python -m spacy download en_core_web_sm + ``` + + This step is specific to NLP applications that require the spaCy library. It downloads the `en_core_web_sm` model, which is a small English language model for spaCy. While not needed for this app, it's included for compatibility with other NLP applications that might use this Dockerfile. + +6. Copy the application code into the image. + + ```dockerfile + COPY *.py /app + COPY entrypoint.sh /app + ``` + + These commands copy your Python scripts and the `entrypoint.sh` script into + the image's `/app` directory. This is crucial because the container needs + these scripts to run the application. The `entrypoint.sh` script is + particularly important as it dictates how the application starts inside the + container. + +7. Set permissions for the `entrypoint.sh` script. + + ```dockerfile + RUN chmod +x /app/entrypoint.sh + ``` + + This command modifies the file permissions of `entrypoint.sh`, making it + executable. This step is necessary to ensure that the Docker container can + run this script to start the application. + +8. Set the entry point. + + ```dockerfile + ENTRYPOINT ["/app/entrypoint.sh"] + ``` + + The `ENTRYPOINT` instruction configures the container to run `entrypoint.sh` + as its default executable. This means that when the container starts, it + automatically executes the script. + + You can explore the `entrypoint.sh` script by opening it in a code or text + editor. As the sample contains several applications, the script lets you + specify which application to run when the container starts. + +## Run the application + +To run the application using Docker: + +1. Build the image. + + In a terminal, run the following command inside the directory of where the `Dockerfile` is located. + + ```console + $ docker build -t basic-nlp . + ``` + + The following is a break down of the command: + + - `docker build`: This is the primary command used to build a Docker image + from a Dockerfile and a context. The context is typically a set of files at + a specified location, often the directory containing the Dockerfile. + - `-t basic-nlp`: This is an option for tagging the image. The `-t` flag + stands for tag. It assigns a name to the image, which in this case is + `basic-nlp`. Tags are a convenient way to reference images later, + especially when pushing them to a registry or running containers. + - `.`: This is the last part of the command and specifies the build context. + The period (`.`) denotes the current directory. Docker will look for a + Dockerfile in this directory. The build context (the current directory, in + this case) is sent to the Docker daemon to enable the build. It includes + all the files and subdirectories in the specified directory. + + Docker outputs several logs to your console as it builds the image. You'll + see it download and install the dependencies. Depending on your network + connection, this may take several minutes. Docker does have a caching + feature, so subsequent builds can be faster. The console will + return to the prompt when it's complete. + + For more details, see the [docker build CLI reference](/engine/reference/commandline/image_build/). + +2. Run the image as a container. + + In a terminal, run the following command. + + ```console + $ docker run -it basic-nlp 01_sentiment_analysis.py + ``` + + The following is a break down of the command: + + - `docker run`: This is the primary command used to run a new container from + a Docker image. + - `-it`: This is a combination of two options: + - `-i` or `--interactive`: This keeps the standard input (STDIN) open even + if not attached. It lets the container remain running in the + foreground and be interactive. + - `-t` or `--tty`: This allocates a pseudo-TTY, essentially simulating a + terminal, like a command prompt or a shell. It's what lets you + interact with the application inside the container. + - `basic-nlp`: This specifies the name of the Docker image to use for + creating the container. In this case, it's the image named `basic-nlp` that + you created with the `docker build` command. + - `01_sentiment_analysis.py`: This is the script you want to run inside the + Docker container. It gets passed to the `entrypoint.sh` script, which runs + it when the container starts. + + For more details, see the [docker run CLI reference](/engine/reference/commandline/container_run/). + + + > **Note** + > + > For Windows users, you may get an error when running the container. Verify + > that the line endings in the `entrypoint.sh` are `LF` (`\n`) and not `CRLF` (`\r\n`), + > then rebuild the image. For more details, see [Avoid unexpected syntax errors, use Unix style line endings for files in containers](/desktop/troubleshoot/topics/#avoid-unexpected-syntax-errors-use-unix-style-line-endings-for-files-in-containers). + + You will see the following in your console after the container starts. + + ```console + Enter the text for semantic analysis (type 'exit' to end): + ``` + +3. Test the application. + + Enter a comment to get the sentiment analysis. + + ```console + Enter the text for semantic analysis (type 'exit' to end): I love containers! + Sentiment: Positive + Enter the text for semantic analysis (type 'exit' to end): I'm still learning about containers. + Sentiment: Neutral + ``` + +## Summary + +In this guide, you learned how to build and run a sentiment analysis +application. You learned how to build the application using Python with NLTK, +and then set up the environment and run the application using Docker. + +Related information: + +* [Docker CLI reference](/engine/reference/commandline/docker/) +* [Dockerfile reference](/engine/reference/builder/) +* [Natural Language Toolkit](https://www.nltk.org/) +* [Python documentation](https://docs.python.org/3/) + +## Next steps + +Explore more [natural language processing guides](./_index.md). \ No newline at end of file diff --git a/content/guides/use-case/nlp/text-classification.md b/content/guides/use-case/nlp/text-classification.md new file mode 100644 index 000000000..632608ff3 --- /dev/null +++ b/content/guides/use-case/nlp/text-classification.md @@ -0,0 +1,431 @@ +--- +title: Build a text recognition app +keywords: nlp, natural language processing, sentiment analysis, python, nltk, scikit-learn, text classification +description: Learn how to build and run a text recognition application using Python, NLTK, scikit-learn, and Docker. +--- + +## Overview + +In this guide, you'll learn how to create and run a text recognition +application. You'll build the application using Python with scikit-learn and the +Natural Language Toolkit (NLTK). Then you'll set up the environment and run the +application using Docker. + +The application analyzes the sentiment of a user's input text using NLTK's +SentimentIntensityAnalyzer. It lets the user input text, which is then processed +to determine its sentiment, classifying it as either positive or negative. Also, +it displays the accuracy and a detailed classification report of its sentiment +analysis model based on a predefined dataset. + +## Prerequisites + +* You have installed the latest version of [Docker Desktop](../../../get-docker.md). Docker adds new features regularly and some parts of this guide may work only with the latest version of Docker Desktop. +* You have a [Git client](https://git-scm.com/downloads). The examples in this section use a command-line based Git client, but you can use any client. + +## Get the sample application + +1. Open a terminal, and clone the sample application's repository using the + following command. + + ```console + $ git clone https://github.com/harsh4870/Docker-NLP.git + ``` + +2. Verify that you cloned the repository. + + You should see the following files in your `Docker-NLP` directory. + + ```text + 01_sentiment_analysis.py + 02_name_entity_recognition.py + 03_text_classification.py + 04_text_summarization.py + 05_language_translation.py + entrypoint.sh + requirements.txt + Dockerfile + README.md + ``` + +## Explore the application code + +The source code for the text classification application is in the `Docker-NLP/03_text_classification.py` file. Open `03_text_classification.py` in a text or code editor to explore its contents in the following steps. + +1. Import the required libraries. + + ```python + import nltk + from nltk.sentiment import SentimentIntensityAnalyzer + from sklearn.metrics import accuracy_score, classification_report + from sklearn.model_selection import train_test_split + import ssl + ``` + + - `nltk`: A popular Python library for natural language processing (NLP). + - `SentimentIntensityAnalyzer`: A component of `nltk` for sentiment analysis. + - `accuracy_score`, `classification_report`: Functions from scikit-learn for + evaluating the model. + - `train_test_split`: Function from scikit-learn to split datasets into + training and testing sets. + - `ssl`: Used for handling SSL certificate issues which might occur while + downloading data for `nltk`. + +2. Handle SSL certificate verification. + + ```python + try: + _create_unverified_https_context = ssl._create_unverified_context + except AttributeError: + pass + else: + ssl._create_default_https_context = _create_unverified_https_context + ``` + + This block is a workaround for certain environments where downloading data + through NLTK might fail due to SSL certificate verification issues. It's + telling Python to ignore SSL certificate verification for HTTPS requests. + +3. Download NLTK resources. + + ```python + nltk.download('vader_lexicon') + ``` + + The `vader_lexicon` is a lexicon used by the `SentimentIntensityAnalyzer` for + sentiment analysis. + + +4. Define text for testing and corresponding labels. + + ```python + texts = [...] + labels = [0, 1, 2, 0, 1, 2] + ``` + + This section defines a small dataset of texts and their corresponding labels (0 for positive, 1 for negative, and 2 for spam). + +5. Split the test data. + + ```python + X_train, X_test, y_train, y_test = train_test_split(texts, labels, test_size=0.2, random_state=42) + ``` + + This part splits the dataset into training and testing sets, with 20% of data + as the test set. As this application uses a pre-trained model, it doesn't + train the model. + +6. Set up sentiment analysis. + + ```python + sia = SentimentIntensityAnalyzer() + ``` + + This code initializes the `SentimentIntensityAnalyzer` to analyze the + sentiment of text. + +7. Generate predictions and classifications for the test data. + + ```python + vader_predictions = [sia.polarity_scores(text)["compound"] for text in X_test] + threshold = 0.2 + vader_classifications = [0 if score > threshold else 1 for score in vader_predictions] + ``` + + This part generates sentiment scores for each text in the test set and classifies them as positive or negative based on a threshold. + +8. Evaluate the model. + + ```python + accuracy = accuracy_score(y_test, vader_classifications) + report_vader = classification_report(y_test, vader_classifications, zero_division='warn') + ``` + + This part calculates the accuracy and classification report for the predictions. + +9. Specify the main execution block. + + ```python + if __name__ == "__main__": + ``` + + This Python idiom ensures that the following code block runs only if this + script is the main program. It provides flexibility, allowing the script to + function both as a standalone program and as an imported module. + +10. Create an infinite loop for continuous input. + + ```python + while True: + input_text = input("Enter the text for classification (type 'exit' to end): ") + + if input_text.lower() == 'exit': + print("Exiting...") + break + ``` + This while loop runs indefinitely until it's explicitly broken. It lets the + user continuously enter text for entity recognition until they decide to + exit. + +11. Analyze the text. + + ```python + input_text_score = sia.polarity_scores(input_text)["compound"] + input_text_classification = 0 if input_text_score > threshold else 1 + ``` + +12. Print the VADER Classification Report and the sentiment analysis. + + ```python + print(f"Accuracy: {accuracy:.2f}") + print("\nVADER Classification Report:") + print(report_vader) + + print(f"\nTest Text (Positive): '{input_text}'") + print(f"Predicted Sentiment: {'Positive' if input_text_classification == 0 else 'Negative'}") + ``` + +13. Create `requirements.txt`. The sample application already contains the + `requirements.txt` file to specify the necessary packages that the + application imports. Open `requirements.txt` in a code or text editor to + explore its contents. + + ```text + # 01 sentiment_analysis + nltk==3.6.5 + + ... + + # 03 text_classification + scikit-learn==1.3.2 + + ... + ``` + + Both the `nltk` and `scikit-learn` modules are required for the text + classification application. + +## Explore the application environment + +You'll use Docker to run the application in a container. Docker lets you +containerize the application, providing a consistent and isolated environment +for running it. This means the application will operate as intended within its +Docker container, regardless of the underlying system differences. + +To run the application in a container, a Dockerfile is required. A Dockerfile is +a text document that contains all the commands you would call on the command +line to assemble an image. An image is a read-only template with instructions +for creating a Docker container. + +The sample application already contains a `Dockerfile`. Open the `Dockerfile` in a code or text editor to explore its contents. + +The following steps explain each part of the `Dockerfile`. For more details, see the [Dockerfile reference](/engine/reference/builder/). + +1. Specify the base image. + + ```dockerfile + FROM python:3.8-slim + ``` + + This command sets the foundation for the build. `python:3.8-slim` is a + lightweight version of the Python 3.8 image, optimized for size and speed. + Using this slim image reduces the overall size of your Docker image, leading + to quicker downloads and less surface area for security vulnerabilities. This + is particularly useful for a Python-based application where you might not + need the full standard Python image. + +2. Set the working directory. + + ```dockerfile + WORKDIR /app + ``` + + `WORKDIR` sets the current working directory within the Docker image. By + setting it to `/app`, you ensure that all subsequent commands in the + Dockerfile (like `COPY` and `RUN`) are executed in this directory. This also + helps in organizing your Docker image, as all application-related files are + contained in a specific directory. + +3. Copy the requirements file into the image. + + ```dockerfile + COPY requirements.txt /app + ``` + + The `COPY` command transfers the `requirements.txt` file from + your local machine into the Docker image. This file lists all Python + dependencies required by the application. Copying it into the container + lets the next command (`RUN pip install`) install these dependencies + inside the image environment. + +4. Install the Python dependencies in the image. + + ```dockerfile + RUN pip install --no-cache-dir -r requirements.txt + ``` + + This line uses `pip`, Python's package installer, to install the packages + listed in `requirements.txt`. The `--no-cache-dir` option disables + the cache, which reduces the size of the Docker image by not storing the + unnecessary cache data. + +5. Run additional commands. + + ```dockerfile + RUN python -m spacy download en_core_web_sm + ``` + + This step is specific to NLP applications that require the spaCy library. It downloads the `en_core_web_sm` model, which is a small English language model for spaCy. While not needed for this app, it's included for compatibility with other NLP applications that might use this Dockerfile. + +6. Copy the application code into the image. + + ```dockerfile + COPY *.py /app + COPY entrypoint.sh /app + ``` + + These commands copy your Python scripts and the `entrypoint.sh` script into + the image's `/app` directory. This is crucial because the container needs + these scripts to run the application. The `entrypoint.sh` script is + particularly important as it dictates how the application starts inside the + container. + +7. Set permissions for the `entrypoint.sh` script. + + ```dockerfile + RUN chmod +x /app/entrypoint.sh + ``` + + This command modifies the file permissions of `entrypoint.sh`, making it + executable. This step is necessary to ensure that the Docker container can + run this script to start the application. + +8. Set the entry point. + + ```dockerfile + ENTRYPOINT ["/app/entrypoint.sh"] + ``` + + The `ENTRYPOINT` instruction configures the container to run `entrypoint.sh` + as its default executable. This means that when the container starts, it + automatically executes the script. + + You can explore the `entrypoint.sh` script by opening it in a code or text + editor. As the sample contains several applications, the script lets you + specify which application to run when the container starts. + + +## Run the application + +To run the application using Docker: + +1. Build the image. + + In a terminal, run the following command inside the directory of where the `Dockerfile` is located. + + ```console + $ docker build -t basic-nlp . + ``` + + The following is a break down of the command: + + - `docker build`: This is the primary command used to build a Docker image + from a Dockerfile and a context. The context is typically a set of files at + a specified location, often the directory containing the Dockerfile. + - `-t basic-nlp`: This is an option for tagging the image. The `-t` flag + stands for tag. It assigns a name to the image, which in this case is + `basic-nlp`. Tags are a convenient way to reference images later, + especially when pushing them to a registry or running containers. + - `.`: This is the last part of the command and specifies the build context. + The period (`.`) denotes the current directory. Docker will look for a + Dockerfile in this directory. The build context (the current directory, in + this case) is sent to the Docker daemon to enable the build. It includes + all the files and subdirectories in the specified directory. + + For more details, see the [docker build CLI reference](/engine/reference/commandline/image_build/). + + Docker outputs several logs to your console as it builds the image. You'll + see it download and install the dependencies. Depending on your network + connection, this may take several minutes. Docker does have a caching + feature, so subsequent builds can be faster. The console will + return to the prompt when it's complete. + +2. Run the image as a container. + + In a terminal, run the following command. + + ```console + $ docker run -it basic-nlp 03_text_classification.py + ``` + + The following is a break down of the command: + + - `docker run`: This is the primary command used to run a new container from + a Docker image. + - `-it`: This is a combination of two options: + - `-i` or `--interactive`: This keeps the standard input (STDIN) open even + if not attached. It lets the container remain running in the + foreground and be interactive. + - `-t` or `--tty`: This allocates a pseudo-TTY, essentially simulating a + terminal, like a command prompt or a shell. It's what lets you + interact with the application inside the container. + - `basic-nlp`: This specifies the name of the Docker image to use for + creating the container. In this case, it's the image named `basic-nlp` that + you created with the `docker build` command. + - `03_text_classification.py`: This is the script you want to run inside the + Docker container. It gets passed to the `entrypoint.sh` script, which runs + it when the container starts. + + For more details, see the [docker run CLI reference](/engine/reference/commandline/container_run/). + + > **Note** + > + > For Windows users, you may get an error when running the container. Verify + > that the line endings in the `entrypoint.sh` are `LF` (`\n`) and not `CRLF` (`\r\n`), + > then rebuild the image. For more details, see [Avoid unexpected syntax errors, use Unix style line endings for files in containers](/desktop/troubleshoot/topics/#avoid-unexpected-syntax-errors-use-unix-style-line-endings-for-files-in-containers). + + You will see the following in your console after the container starts. + + ```console + Enter the text for classification (type 'exit' to end): + ``` + +3. Test the application. + + Enter some text to get the text classification. + + ```console + Enter the text for classification (type 'exit' to end): I love containers! + Accuracy: 1.00 + + VADER Classification Report: + precision recall f1-score support + + 0 1.00 1.00 1.00 1 + 1 1.00 1.00 1.00 1 + + accuracy 1.00 2 + macro avg 1.00 1.00 1.00 2 + weighted avg 1.00 1.00 1.00 2 + + Test Text (Positive): 'I love containers!' + Predicted Sentiment: Positive + ``` + +## Summary + +In this guide, you learned how to build and run a text classification +application. You learned how to build the application using Python with +scikit-learn and NLTK. Then you learned how to set up the environment and run +the application using Docker. + +Related information: + +* [Docker CLI reference](/engine/reference/commandline/docker/) +* [Dockerfile reference](/engine/reference/builder/) +* [Natural Language Toolkit](https://www.nltk.org/) +* [Python documentation](https://docs.python.org/3/) +* [scikit-learn](https://scikit-learn.org/) + +## Next steps + +Explore more [natural language processing guides](./_index.md). \ No newline at end of file diff --git a/content/guides/use-case/nlp/text-summarization.md b/content/guides/use-case/nlp/text-summarization.md new file mode 100644 index 000000000..64a095797 --- /dev/null +++ b/content/guides/use-case/nlp/text-summarization.md @@ -0,0 +1,354 @@ +--- +title: Build a text summarization app +keywords: nlp, natural language processing, text summarization, python, bert extractive summarizer +description: Learn how to build and run a text summarization application using Python, Bert Extractive Summarizer, and Docker. +--- + +## Overview + +In this guide, you'll learn how to build and run a text summarization +application. You'll build the application using Python with the Bert Extractive +Summarizer, and then set up the environment and run the application using +Docker. + +The sample text summarization application uses the Bert Extractive Summarizer. +This tool utilizes the HuggingFace Pytorch transformers library to run +extractive summarizations. This works by first embedding the sentences, then +running a clustering algorithm, finding the sentences that are closest to the +cluster's centroids. + +## Prerequisites + +* You have installed the latest version of [Docker Desktop](../../../get-docker.md). Docker adds new features regularly and some parts of this guide may work only with the latest version of Docker Desktop. +* You have a [Git client](https://git-scm.com/downloads). The examples in this section use a command-line based Git client, but you can use any client. + +## Get the sample application + +1. Open a terminal, and clone the sample application's repository using the + following command. + + ```console + $ git clone https://github.com/harsh4870/Docker-NLP.git + ``` + +2. Verify that you cloned the repository. + + You should see the following files in your `Docker-NLP` directory. + + ```text + 01_sentiment_analysis.py + 02_name_entity_recognition.py + 03_text_classification.py + 04_text_summarization.py + 05_language_translation.py + entrypoint.sh + requirements.txt + Dockerfile + README.md + ``` + +## Explore the application code + +The source code for the text summarization application is in the `Docker-NLP/04_text_summarization.py` file. Open `04_text_summarization.py` in a text or code editor to explore its contents in the following steps. + +1. Import the required libraries. + + ```python + from summarizer import Summarizer + ``` + + This line of code imports the `Summarizer` class from the `summarizer` + package, essential for your text summarization application. The summarizer + module implements the Bert Extractive Summarizer, leveraging the HuggingFace + Pytorch transformers library, renowned in the NLP (Natural Language + Processing) domain. This library offers access to pre-trained models like + BERT, which revolutionized language understanding tasks, including text + summarization. + + The BERT model, or Bidirectional Encoder Representations from Transformers, + excels in understanding context in language, using a mechanism known as + "attention" to determine the significance of words in a sentence. For + summarization, the model embeds sentences and then uses a clustering + algorithm to identify key sentences, those closest to the centroids of these + clusters, effectively capturing the main ideas of the text. + +2. Specify the main execution block. + + ```python + if __name__ == "__main__": + ``` + + This Python idiom ensures that the following code block runs only if this + script is the main program. It provides flexibility, allowing the script to + function both as a standalone program and as an imported module. + +3. Create an infinite loop for continuous input. + + ```python + while True: + input_text = input("Enter the text for summarization (type 'exit' to end): ") + + if input_text.lower() == 'exit': + print("Exiting...") + break + ``` + + An infinite loop continuously prompts you for text + input, ensuring interactivity. The loop breaks when you type `exit`, allowing + you to control the application flow effectively. + + +4. Create an instance of Summarizer. + + ```python + bert_model = Summarizer() + ``` + + Here, you create an instance of the Summarizer class named `bert_model`. This + instance is now ready to perform the summarization task using the BERT model, + simplifying the complex processes of embedding sentences and clustering into + an accessible interface. + +5. Generate and print a summary. + + ```python + summary = bert_model(input_text) + print(summary) + ``` + + Your input text is processed by the bert_model instance, which then returns a + summarized version. This demonstrates the power of Python's high-level + libraries in enabling complex operations with minimal code. + +6. Create `requirements.txt`. The sample application already contains the + `requirements.txt` file to specify the necessary modules that the + application imports. Open `requirements.txt` in a code or text editor to + explore its contents. + + ```text + ... + + # 04 text_summarization + bert-extractive-summarizer==0.10.1 + + ... + + torch==2.1.2 + ``` + + The `bert-extractive-summarizer` and `torch` modules are required for the + text summarization application. The summarizer module generates a summary of + the input text. This requires PyTorch because the underlying BERT model, + which is used for generating the summary, is implemented in PyTorch. + +## Explore the application environment + +You'll use Docker to run the application in a container. Docker lets you +containerize the application, providing a consistent and isolated environment +for running it. This means the application will operate as intended within its +Docker container, regardless of the underlying system differences. + +To run the application in a container, a Dockerfile is required. A Dockerfile is +a text document that contains all the commands you would call on the command +line to assemble an image. An image is a read-only template with instructions +for creating a Docker container. + +The sample application already contains a `Dockerfile`. Open the `Dockerfile` in a code or text editor to explore its contents. + +The following steps explain each part of the `Dockerfile`. For more details, see the [Dockerfile reference](/engine/reference/builder/). + +1. Specify the base image. + + ```dockerfile + FROM python:3.8-slim + ``` + + This command sets the foundation for the build. `python:3.8-slim` is a + lightweight version of the Python 3.8 image, optimized for size and speed. + Using this slim image reduces the overall size of your Docker image, leading + to quicker downloads and less surface area for security vulnerabilities. This + is particularly useful for a Python-based application where you might not + need the full standard Python image. + +2. Set the working directory. + + ```dockerfile + WORKDIR /app + ``` + + `WORKDIR` sets the current working directory within the Docker image. By + setting it to `/app`, you ensure that all subsequent commands in the + Dockerfile (like `COPY` and `RUN`) are executed in this directory. This also + helps in organizing your Docker image, as all application-related files are + contained in a specific directory. + +3. Copy the requirements file into the image. + + ```dockerfile + COPY requirements.txt /app + ``` + + The `COPY` command transfers the `requirements.txt` file from + your local machine into the Docker image. This file lists all Python + dependencies required by the application. Copying it into the container + lets the next command (`RUN pip install`) to install these dependencies + inside the image environment. + +4. Install the Python dependencies in the image. + + ```dockerfile + RUN pip install --no-cache-dir -r requirements.txt + ``` + + This line uses `pip`, Python's package installer, to install the packages + listed in `requirements.txt`. The `--no-cache-dir` option disables + the cache, which reduces the size of the Docker image by not storing the + unnecessary cache data. + +5. Run additional commands. + + ```dockerfile + RUN python -m spacy download en_core_web_sm + ``` + + This step is specific to NLP applications that require the spaCy library. It + downloads the `en_core_web_sm` model, which is a small English language model + for spaCy. While not needed for this app, it's included for compatibility + with other NLP applications that might use this Dockerfile. + +6. Copy the application code into the image. + + ```dockerfile + COPY *.py /app + COPY entrypoint.sh /app + ``` + + These commands copy your Python scripts and the `entrypoint.sh` script into the image's `/app` directory. This is crucial because the container needs these scripts to run the application. The `entrypoint.sh` script is particularly important as it dictates how the application starts inside the container. + +7. Set permissions for the `entrypoint.sh` script. + + ```dockerfile + RUN chmod +x /app/entrypoint.sh + ``` + + This command modifies the file permissions of `entrypoint.sh`, making it + executable. This step is necessary to ensure that the Docker container can + run this script to start the application. + +8. Set the entry point. + + ```dockerfile + ENTRYPOINT ["/app/entrypoint.sh"] + ``` + + The `ENTRYPOINT` instruction configures the container to run `entrypoint.sh` + as its default executable. This means that when the container starts, it + automatically executes the script. + + You can explore the `entrypoint.sh` script by opening it in a code or text + editor. As the sample contains several applications, the script lets you + specify which application to run when the container starts. + +## Run the application + +To run the application using Docker: + +1. Build the image. + + In a terminal, run the following command inside the directory of where the `Dockerfile` is located. + + ```console + $ docker build -t basic-nlp . + ``` + + The following is a break down of the command: + + - `docker build`: This is the primary command used to build a Docker image + from a Dockerfile and a context. The context is typically a set of files at + a specified location, often the directory containing the Dockerfile. + - `-t basic-nlp`: This is an option for tagging the image. The `-t` flag + stands for tag. It assigns a name to the image, which in this case is + `basic-nlp`. Tags are a convenient way to reference images later, + especially when pushing them to a registry or running containers. + - `.`: This is the last part of the command and specifies the build context. + The period (`.`) denotes the current directory. Docker will look for a + Dockerfile in this directory. The build context (the current directory, in + this case) is sent to the Docker daemon to enable the build. It includes + all the files and subdirectories in the specified directory. + + For more details, see the [docker build CLI reference](/engine/reference/commandline/image_build/). + + Docker outputs several logs to your console as it builds the image. You'll + see it download and install the dependencies. Depending on your network + connection, this may take several minutes. Docker does have a caching + feature, so subsequent builds can be faster. The console will + return to the prompt when it's complete. + +2. Run the image as a container. + + In a terminal, run the following command. + + ```console + $ docker run -it basic-nlp 04_text_summarization.py + ``` + + The following is a break down of the command: + + - `docker run`: This is the primary command used to run a new container from + a Docker image. + - `-it`: This is a combination of two options: + - `-i` or `--interactive`: This keeps the standard input (STDIN) open even + if not attached. It lets the container remain running in the + foreground and be interactive. + - `-t` or `--tty`: This allocates a pseudo-TTY, essentially simulating a + terminal, like a command prompt or a shell. It's what lets you + interact with the application inside the container. + - `basic-nlp`: This specifies the name of the Docker image to use for + creating the container. In this case, it's the image named `basic-nlp` that + you created with the `docker build` command. + - `04_text_summarization.py`: This is the script you want to run inside the + Docker container. It gets passed to the `entrypoint.sh` script, which runs + it when the container starts. + + For more details, see the [docker run CLI reference](/engine/reference/commandline/container_run/). + + > **Note** + > + > For Windows users, you may get an error when running the container. Verify + > that the line endings in the `entrypoint.sh` are `LF` (`\n`) and not `CRLF` (`\r\n`), + > then rebuild the image. For more details, see [Avoid unexpected syntax errors, use Unix style line endings for files in containers](/desktop/troubleshoot/topics/#avoid-unexpected-syntax-errors-use-unix-style-line-endings-for-files-in-containers). + + You will see the following in your console after the container starts. + + ```console + Enter the text for summarization (type 'exit' to end): + ``` + +3. Test the application. + + Enter some text to get the text summarization. + + ```console + Enter the text for summarization (type 'exit' to end): Artificial intelligence (AI) is a branch of computer science that aims to create machines capable of intelligent behavior. These machines are designed to mimic human cognitive functions such as learning, problem-solving, and decision-making. AI technologies can be classified into two main types: narrow or weak AI, which is designed for a particular task, and general or strong AI, which possesses the ability to understand, learn, and apply knowledge across various domains. One of the most popular approaches in AI is machine learning, where algorithms are trained on large datasets to recognize patterns and make predictions. + + Artificial intelligence (AI) is a branch of computer science that aims to create machines capable of intelligent behavior. These machines are designed to mimic human cognitive functions such as learning, problem-solving, and decision-making. + ``` + +## Summary + +In this guide, you learned how to build and run a text summarization +application. You learned how to build the application using Python with Bert +Extractive Summarizer, and then set up the environment and run the application +using Docker. + +Related information: + +* [Docker CLI reference](/engine/reference/commandline/docker/) +* [Dockerfile reference](/engine/reference/builder/) +* [Bert Extractive Summarizer](https://github.com/dmmiller612/bert-extractive-summarizer) +* [PyTorch](https://pytorch.org/) +* [Python documentation](https://docs.python.org/3/) + +## Next steps + +Explore more [natural language processing guides](./_index.md). \ No newline at end of file diff --git a/content/scout/policy/_index.md b/content/scout/policy/_index.md index d2882eb15..fec0430a3 100644 --- a/content/scout/policy/_index.md +++ b/content/scout/policy/_index.md @@ -293,14 +293,22 @@ An asterisk (`*`) matches up until the character that follows, or until the end of the image reference. Note that the `docker.io` prefix is required in order to match Docker Hub images. This is the registry hostname of Docker Hub. +You can also configure the policy to allow only supported tags of Docker +Official Images. When this option is enabled, images using unsupported tags of +official images trigger a policy violation. Supported tags for official images +are listed in the **Supported tags** section of the repository overview on +Docker Hub. + This policy isn't enabled by default. To enable the policy: 1. Go to the [Docker Scout Dashboard](https://scout.docker.com/). 2. Go to the **Policies** section. 3. Select the **Unapproved base images** policy in the list. 4. Enter the patterns that you want to allow. -5. Select **Save and enable**. The policy is now enabled for your current - organization. +5. Select whether you want to allow only supported tags of official images. +6. Select **Save and enable**. + + The policy is now enabled for your current organization. Your images need provenance attestations for this policy to successfully evaluate. For more information, see [No base image data](#no-base-image-data). diff --git a/content/trusted-content/insights-analytics.md b/content/trusted-content/insights-analytics.md index 3218830d8..509532e0a 100644 --- a/content/trusted-content/insights-analytics.md +++ b/content/trusted-content/insights-analytics.md @@ -8,7 +8,7 @@ aliases: --- Insights and analytics provides usage analytics for Docker Verified -Publisher (DVP) and Docker-Sponsored Open Source (DSOS) images on Docker Hub. This includes self-serve access to metrics as both raw data and summary data for a desired time span. You can view the number of image pulls by tag or by digest, and get breakdowns by geolocation, cloud provider, client, and more. +Publisher (DVP) and Docker-Sponsored Open Source (DSOS) images on Docker Hub. This includes self-serve access to image and extension usage metrics for a desired time span. You can also display the number of image pulls by tag or by digest, and get breakdowns by geolocation, cloud provider, client, and more. > **Tip** @@ -18,11 +18,11 @@ Publisher (DVP) and Docker-Sponsored Open Source (DSOS) images on Docker Hub. Th to learn more about the programs. { .tip } -## View the analytics data +## View the image's analytics data You can find analytics data for your repositories on the **Insights and analytics** dashboard at the following URL: -`https://hub.docker.com/orgs/{namespace}/insights`. The dashboard contains a +`https://hub.docker.com/orgs/{namespace}/insights/images`. The dashboard contains a visualization of the usage data and a table where you can download the data as CSV files. @@ -53,6 +53,12 @@ preserves the display selections you made. When someone follows the link, the **Insights and analytics** page opens and displays the chart with the same configuration as you had set up when creating the link. +## Extension analytics data + +If you have published Docker Extensions in the Extension marketplace, you can also get analytics about your extension usage, available as CSV files. +You can download extension CSV reports from the **Insights and analytics** dashboard at the following URL: +`https://hub.docker.com/orgs/{namespace}/insights/extensions`. If your Docker namespace contains extensions known in the marketplace, you will see an **Extensions** tab listing CSV files for your extension(s). + ## Exporting analytics data You can export the analytics data either from the web dashboard, or using the @@ -94,7 +100,7 @@ points and with different structure. The following sections describe the available data points for each format. The **Date added** column shows when the field was first introduced. -### Raw data +### Image pulls raw data The raw data format contains the following data points. Each row in the CSV file represents an image pull. @@ -118,12 +124,12 @@ represents an image pull. | Domain | Request origin domain, see [Privacy](#privacy). | October 11, 2022 | | Owner | The name of the organization that owns the repository. | December 19, 2022 | -[1]: #action-classification-rules +[1]: #image-pulls-action-classification-rules [2]: /registry/spec/api/ [3]: /admin/organization/orgs/ [4]: /docker-hub/repos/ -### Summary data +### Image pulls summary data There are two levels of summary data available: @@ -142,7 +148,7 @@ span: | Version check | HEAD by tag, not followed by a GET | January 1, 2022 | | Owner | The name of the organization that owns the repository. | December 19, 2022 | -### Action classification rules +### Image pulls action classification rules An action represents the multiple request events associated with a `docker pull`. Pulls are grouped by category to make the data more meaningful @@ -174,6 +180,37 @@ pulls. To provide feedback or ask questions about these rules, | HEAD | digest | GET by same digest | Pull by digest | Image is single-arch and/or image is multi-arch but some part of the image already exists on the local machine | | | HEAD | digest | GET by same digest, then a second GET by different digest | Pull by Digest | Image is multi-arch | | +### Extension Summary data + +There are two levels of extension summary data available: + +- Core summary, with basic extension usage information: number of extension installs, uninstalls, and total install all times + +The core-summary-data file contains the following data points for the selected time +span: + +| Data point | Description | Date added | +| ----------------- | ------------------------------------------------------- | ----------------- | +| Installs | Number of installs for the extension | Feb 1, 2024 | +| TotalInstalls | Number of installs for the extension all times | Feb 1, 2024 | +| Uninstalls | Number of uninstalls for the extension | Feb 1, 2024 | +| TotalUninstalls | Number of uninstalls for the extension all times | Feb 1, 2024 | +| Updates | Number of updates for the extension | Feb 1, 2024 | + +- Premium summary, with advanced extension usage information: installs, uninstalls by unique users, extension opening by unique users. + +The core-summary-data file contains the following data points for the selected time +span: + +| Data point | Description | Date added | +| ----------------- | ------------------------------------------------------- | ----------------- | +| Installs | Number of installs for the extension | Feb 1, 2024 | +| UniqueInstalls | Number of unique users installing the extension | Feb 1, 2024 | +| Uninstalls | Number of uninstalls for the extension | Feb 1, 2024 | +| UniqueUninstalls | Number of unique users uninstalling the extension | Feb 1, 2024 | +| Usage | Number of openings of the extension tab | Feb 1, 2024 | +| UniqueUsers | Number of unique users openings the extension tab | Feb 1, 2024 | + ## Changes in data over time The insights and analytics service is continuously improved to increase the @@ -198,11 +235,11 @@ consumers of content on Docker Hub remain completely anonymous. > analytics data. { .important } -The summary dataset includes unique IP address count. This data point only +The image pulls summary dataset includes unique IP address count. This data point only includes the number of distinct unique IP addresses that request an image. Individual IP addresses are never shared. -The raw dataset includes user IP domains as a data point. This is the domain name +The image pulls raw dataset includes user IP domains as a data point. This is the domain name associated with the IP address used to pull an image. If the IP type is `business`, the domain represents the company or organization associated with that IP address (for example, `docker.com`). For any other IP type that's not diff --git a/data/redirects.yml b/data/redirects.yml index 06da0885c..504bdd25e 100644 --- a/data/redirects.yml +++ b/data/redirects.yml @@ -661,8 +661,9 @@ # Build links (external) "https://www.docker.com/build-early-access-program/?utm_campaign=onboard-30-customer-zero&utm_medium=in-product-ad&utm_source=desktop_v4": - /go/build-eap/ -"https://www.docker.com/products/build-cloud/": +"https://www.docker.com/products/build-cloud/?utm_campaign=2024-02-02-banner_dbc_ga&utm_medium=in-product-ad&utm_source=desktop_v4": - /go/build-ga/ +"https://www.docker.com/products/build-cloud/?utm_campaign=2024-02-02-dbc_cli&utm_medium=in-product-ad&utm_source=desktop_v4": - /go/docker-build-cloud/ # CLI backlinks diff --git a/data/toc.yaml b/data/toc.yaml index 66e437c96..b9cd14227 100644 --- a/data/toc.yaml +++ b/data/toc.yaml @@ -165,6 +165,20 @@ Guides: title: Develop your app - path: /guides/use-case/genai-video-bot/ title: Video transcription and chat + - sectiontitle: Natural language processing + section: + - path: /guides/use-case/nlp/ + title: Overview + - path: /guides/use-case/nlp/language-translation/ + title: Language translation + - path: /guides/use-case/nlp/named-entity-recognition/ + title: Named entity recognition + - path: /guides/use-case/nlp/sentiment-analysis/ + title: Sentiment analysis + - path: /guides/use-case/nlp/text-classification/ + title: Text classification + - path: /guides/use-case/nlp/text-summarization/ + title: Text summarization - sectiontitle: Develop with Docker section: @@ -2034,7 +2048,7 @@ Manuals: - path: /admin/organization/activity-logs/ title: Activity logs - path: /admin/organization/general-settings/ - title: General settings + title: Organization settings - sectiontitle: Company administration section: - path: /admin/company/ diff --git a/layouts/_default/_markup/render-image.html b/layouts/_default/_markup/render-image.html index b9d646b15..47d9fd60a 100644 --- a/layouts/_default/_markup/render-image.html +++ b/layouts/_default/_markup/render-image.html @@ -15,6 +15,7 @@ class="cursor-pointer hover:opacity-90" > {{ .Text }} {{ .Text }} {{ partial "utils/css.html" . }} {{ $theme := resources.Get "js/theme.js" | js.Build (dict "minify" true) }} diff --git a/layouts/shortcodes/inline-image.html b/layouts/shortcodes/inline-image.html index 451501233..a281c91cc 100644 --- a/layouts/shortcodes/inline-image.html +++ b/layouts/shortcodes/inline-image.html @@ -7,6 +7,7 @@ {{ end }} {{ $alt }}Lo;WJ!6%G=bq80> zOPwX#4wk?csuMOn6i%%6^6NDQRlLG}=1lG%{~kBAzw5)%%t z)3Z${P8F;c)*1RBu3l-l2A8SIJ#DTW*|yTShw6j#GWklJAe7+@tjB_uVLTh0paR>B z=*Y`tq|D0MFyB`391&Ui`(d} z6n~jl)M%~`7ux@jsHN&>No1zbK#*S!rdiI>V)U0o6zAE#@wja zd}U@Odu|~a9-2$xhrLueEv=I(v{bA|7Cn|nZj8;mDAE?gx&h-d5_s-JkVCjEvHyYPrq zomKU&z$amMqQBJ1L+Em?qvK&$nn=)!b_iiU7|AM(Oq_X&U2v#`EsAwl;B4t7#kcdN}>Fwk%y-TeY zQXC{-(MW<+;44Cl>panryV*xeK0``V?}_fOQC+ommvTnE$P-_<<8!=l;;<0EYOftY zNMtH&jrpPrflPELNb9GToFBtCAV?A`bm=lU|R(`MlCG$_4v+EuUscLi%K& z2g{mrgd#Bzif$clV`DFZJ}O&1YF2J{wQ(%D;;TxkjX(zxbIIb8DQk2{pk(Jy<)5D%!79cvW~) z@<`f<#~h8Ri#YVDuTi;w7E+}xC7!c*0d4X6o15F0^=$!>~7y-jqOOx9?n3vj9_CgDA0)KtXST>j9Y5B^~Ssm#s=gt9T zy;`K|4Q19^$LhouiDM)SWJm;yNCXES^FMxR@mD$kY{^QOq&-P)=R6muVF(SF+Qkw^ zp#8iK(x~<)#uhtoGzbL`4%pQKJo1yJF8A%ZgssY1U(${Yikc({aRh_+ z>TF3PffOP@B(ECW*np4k%n#stF*hoW`kiRcl>rCH`0HE)G5DIhAOAgKkKRMh*r{mF zF^>(AHxDEEaT&6p@{FX|DpH&Q02E@!FuYFrD2xghBJ1%`IWP=RzXOwiLiwy*j~!{V zrwD1#+wU|flmqEpT_)$cFFZgJ!p7Y>MPgTE?&+9*9Kd*^qc)ttjeV!>RPCUC^9~y- z;|X>&*t+JSsjGVN@%WScR_q8H=N9klN5Fm}(o9B5(Zk~}Tx(SmxlU>73GrZf5bFC}J&G}R3;miD4Yg(pS*^@LrdoE2b z7=Fc7W|eKX_?zg5@ki$S<44@v%*$0i>8yg$^o|es8A2U`dpO@$*Q2XOm}cm!M2in^ zIRyq#tiGU$DlXd=FAtQ@_+d(Pf!M~-_^=9J!N6T{S|+YrO?Rxa5LjrH*UuJD#r6wD zaU{U6e#kNh&D1ok^Q0O(f+;!tmSA1e`X8z(Uz?QZxnOOHkkCyZi;-9BB^WZ#co$Mj zl4DkqaI(IUz0Y{NC!&3OT#7P^(>)X)UcPTc!S2EQ+O0=5c%VK1s}&JS_~$#7kGJijcs^2++5u6JUR-&s}7HctRo}UIxo?1 z+HwN$$}bb;KSbqDy(iyB;mESb1~I5(oc@>sjxHZC*3Y-tzZ>y=*aOS(QmG5_!Zx0i z*n#cBVtR)nPEm|TJ1`~%PoYr%TB|il`)K0?a2yUo&wbgzXzVp0rBbMnX^X7*CAqST zxEflO(`8hu<$Sc4*$xmhtzTd7(8|*-K#?e6;t0w?=rZPvY1*F&8Yjgje21vlppWs>>&vE~7pPm7MCb?e}m7J24R_SJMyosvm)M}*>ZFYHXsj_FA zGn{{509SjUkcEwr75mfx!=E^wf(0yU06Cn5DMYG}Ih}^h3|imB=y>N~4@{IGarFC% z-y8T2A?}}8%VAcjIy+uSc>+-5~0cJH53eu#LSdIWVbgu)N$J(K>rj`;Inp?e9dV?7p?reR#Fo8O)} zaw^m-W{g?Rqse|WtM29?>n+(zQ}-RkhqA>=DR|JBp_Aj`E_AS{QG_y?8VFJPDZJ+} zEz^t%Y1Y6LISm=;Kj8)D}=!4<8GHdp+MxgvKL86aetTj2i9YDR!#$Ie$j0M(ezi(SjU7N01&c_ICMz#U%)n%N5u-#1LfkX1=6#g^QKTd72Yd>lpeo_A@`NPR*|C z&cRz(7Mv>3kjuNE??EDRIvv#UIb;}_R6_xFxWwrQO?AW7Q+)Df?H{babLtTz&>^Nw zQxmKG=gyg+qCIfU5R!hD_FDwaHF9UB{T5b(#{O#mEzqkEzjLers;&FO9hwIUYPt<+ z2AAC~2mSyb0s;+cS5#omfpHJX3p}w{Vq!G4Bf&dFeXc99e!POO{1H@XMICuDvs8!N z`C+-m2PSzN_JfHH=x>f-WqD}mZ~{c~8P#!q3XmjFDr4pJ8P{=U1<~$lXliV%uCH-( zaB+6DwzoY2(h=5)OTqmIJmAts&>=((Bb33cO`73SsgtRi#3~oCqe}$+BnBWNcSpQs zADn~me8A!x0Hy)(>vr?0Gkf{%+`*cKLS9+QifMK3&E$P0YVLlUqD;G!^)Z8*()Rv@ z*ZZ6W0)YxtDNxREM6Fmp2-agJFc(}b(Fv0)8f9N9##V{b^?8lq0H*pUPHkPlgh7b@ zKl`D=dl$U=Fjsb<{xP_JYp9g~WQLkQS3q5;(!3xK=f~MIK(Anv8 z{qy2$iy8~+D>NU5SXi1|&VD~(baKPYh~+tCre}r(_9053U$!s|E`8ihq zFr3<1aa#HP;@^fj>P`u`BJnvOl7womIhJLUo*U;Ap9j1IB?&knY&K1B33z7N-H{cU z?Io$zcpy+g#PNUY^XY)pPRd8TEL^1I&zrrYt3y>g$^RrTnQ8zs`4bqjN;bHE3icny zBLH!7c6G^0yFet8$lL{yWc=p9j)4^-P>n@AQU#Dzh{pd7^l1}8^n&m+o^GOKU%&R2(0A4{>U+Qv|~N!SyShhaBc8%kaRv2hld866zv zizOAHC^P?q(Y&)ZCL14-&r_JzmY-~n5W%Q1FSO|KOaXUrw>~VyPqDO#Kl7&u7>JQ9 zg$Bs+c5_)En1XOg!JTh1d}RF;>^Uerc-Uk1a1~oC_f;zB!bK#E=4J zM(BfUXjxjR`U0$I+2cvtUu z(*Mdaidz4ZW3GS&P>{TejQ`(`%%vi#@~t2i2!vyT2`NIeTKKCMO4x-vn8!(+l2q7A zw^j)Huuzm)oTK*-#G|5xF4(KAiF;wBIru6P%d}`JptihUHSg_9l(N@j)_jjzB zF5rsO&D}cdV$7txip#3ODi#8RF#*M14IAl40Iaoxp6c0)gFDEh5~2?x zGN;8Z89Q4Guk6f7_cn03jk0vxSUINxreq{IvQ-OapRp8zhyW zk5jGY*w*@|qWdxM%Lq_l2n0GVN*t+Rd3>sZ4s2P?F7%XUK7m=Qg$?dGDk^kV1y_9o&HuME+7OSklmubwasHU7eG?qXTx)jx^DK(dWS7&A(*^ zxDy|Uh%zvxMKzcMcHx|OV%FM(X_DABMwNAr2e%2HpI4A?ghT#i$cX&v7m)Wu8@gpq z^cp22O6bNpIWJ8RyYu4i@;VXiw>*u6lY{bh=fcyu z${JtW3|6!4z@hy!Tp;w(fBQnEPAvomBz|R_M1gtGoXFVFR8rdCX7JqzqWm7djLiLN z^dfmDR-GyQ|Cb)npyOG~mu?WbONF(5;_n+eiy1-r7lY*k)i7N3GB7z?YIblyjBmmW z7PSIe$UwI*0(G^a{!Ik8A%aQoL1&uaISjB_1>5_!W@=XlvZP28qeN0TqgoV%$%5Q( z#F8C~dxL-65qb}e4#L45dfWQ{v`1cntJy-?#thng^d;#K33R$N>K-#HVm2OZBb&B- zT9nBo{$xU}$2fQ}2F#%_s*f$fB_MEsuc7hwCqO3b^(WdxQq*{X6eU$@$}C)OUlh;d zNMIo?VE5b_>}pG;Z8C8vuK0^fP4-_u6xr%21~En*v+nwu}WKR`(KT+MgNM}r{r(#PX{!lMZ>9y z#nc8QN9fS;S7e32geN2f zadZRPUksau{@tPR0cRQK+>IcU1faeNubtl1He-bB|0cYqmWAFtnC&%flfzVT-Iw9_ zzZF0a6gf>TrfqnYcK}m9Z=4Exc&YMJsCcMiv7$m@rHoU!()IMuY1P#y&iV7h=b;$? zjDfMWp`~j<)K;yTZp8o73$v&C(3<`~is}j&CTv!3FGcddClGo)zb?iX4}?+ z3Kp&)i&+>}G`eqvaYXx|yoA-bAX$=Yfcdvp);Q|Lnspx&2Tq(4ifMymnqZ zj17w1V1Y;=h036CN&i02J18Y|L}bc+5t9dKv6POJvR2wFg!5)(I!IY#jIPX7bJ3`A zafPZ@Km%nbcK=x7mt?FFKIPmU){|E0h49ez?m zeA7nM25+h>PPX?rw{8rQq66>HH2a~D61wC!7*;zGGc_*YN+)O`g7xwA2xz)W zvK6bA#Pe3&nF5Pqw(M+dIj1!KF9dLd!;J6j>`hbu_SX}TFJnk3NQy*E`=c|4saPyr zLh~Xeo1&@0dWyw5W3eQMtJC$0WqtYl_G+vi6D=zIDGiJ%W3Wjnhi1^<&L?LGM82!DIuO|k#>_3^?@0j$c3 zteF1;Qz96e2#YNFmmr&M%)bvEkQbO3xxqn|?Qy_?TwpoxFFg*{CW-5(aP0a%pJ9hV ziGvFRj0~04q3nI~cqf-DbBsEX-wFj?U3I!nrZBPYEMv7*DdR~^CV?U&!P=qnhW<&7 z#jSC{MM>ynI#b=hhmXx<7#%Hv`ID&SF6uu%v(c}%3|EAUL5gOd*qt8JWnHkfj_j9W zg*1ZvErvPUy}lm&UhbPf^l*9bYY9$-1W4!zQ)pX$0VG#gvWhpkl*eJju%og4G$-<& zzf-<$B)`Km?+g4ZXhTa1A3V(;PocWO*d97C0W`U|r~i$`as$zWFOY3*CpDYf{& zN{$vdS?S-oik)Se8B~xV(9ps335`1}Hv9WD+?X%R&2&3G?=JVBJ2S%~^U~qS?@2_N zY1n@kLBrCAl2D(5nHw`{{rX2HM}ZN#>;466B15@MwS2`QR+SvS27x6F200n8tVao~ zn9|eX6mN;9<8945*!9j_k{P|OR~86N=wfS%ZYj)~i|F>*piF&>iSnK&+>sb-K6NMj%ga6cMvSYk=w$M= zHGA<&<7U=s#$$Zp=Z$qPH2%w=S@(6y)%y;4&+`=i=k3gU%T3)D9(cCPXKT;96aNuP zuJ=ugpZA?`s^$Y0s~UT`XS9s9(HEn{b4A|nHR&g5qI90q+S}c3sO{b6_AT#S;3Ps@ zGv%?};908Cctm__P}nQ%u3dEM<#SQ0MDUr!O7G4Ua16)0M6q(gR2V=~Y#euAk%igW zKOy}(2KhH6m7aZY{+A)U&+9b5qn;QJtH;d{nbqbQf9X(dhov&c7TOMJhgO~F^D1ti zGQux9Hm?0Q;eKLRKqP$dpdY-ddQ)6w5kpSZImfyrFmk!ld2+gG58XbiVbxQv9rSuL z_d-mtlD6X^uZ3Y-FFEr9m?;{KGVaJT)0Tp`}NqWOG{+xzb?sNLYOM2txA;I~k5 zs(QIn)2hvIFi3J?`-HVM#(IaWH8a@Qc21K{61DPbI=NXJK%!E~_OiIh;_P%dBkY7xssXZcVp;e$w=(50;R@}>=V9(qMwe!m9~ zy_0?sN#iXGHDU|Pqm9k)dkT%1Dc1?Hvd((WnJ4|N-2~xTLqK}K2s+@fr}>sQRb*U- z?}@UM4zCB$fOwBgc7ZC1+8mgd||52R4#EIq3WLXpFk;MWC5T?^erdDZPhdX}` z&iBTmR?b>h&h9hlJnu0D>eKyST`qU?d&pWasE0T|XFnT2z~PcjtTQkBKO?KW&MFd4R5f6j#e8@mPCqTJiHJ;3y0gS~X8j;CpG~{b5sH_C`BK)V>7TF!p6!Yz+7A7@B zQU7ba{o|~VDrRT{YUj=WH68v_HUI1Uu>^dpqt#8c_3`!L<>{Zhz2A0?HbS5WP=w5P zk`Yh_AQKB<#gaPKf;fAz5kS5N_{07G7~qo=Qx(1kc%_xa`ERduA-50i|G$IL{IWj( z+hYZw|J(ie&p=WD|Mflszazn*Grp?_U8MgUzgXzQf*B>U9SrEitDp5kh*5@^-JQ9v z*_zQK0S&-Nl0pOI^nkuY$vJ+Z|50P)Rso(e$O$pMi68`#;lvP(z{KuP82o*)n{mhc zNYUhDS-*2ItYyrS94sr*EL9P}2OJuW!EA8)Ymj83M4dMOuR}zYu*GC%&~3Y8JSHq( zAoP7OMkUj?d%(2vnbBjvRm%dEN(GU5fC|`fB`DhEd7G|g6}H<`n{RU6UY_9}hn}u_ z0Nu^dH(sxn0S3o`ys}?z!_WxivL_f z7%jik(20cNfJVY#Fc}PIv+Dr{7X9d}SE*1f)GdV0Y%*V}MqU%$@>G^E4bNt36~S7^ zKr=E0g_pw_ivV2keI%NvjCAJ^N8B{d31pf?A3yXG=ZSQSwG6&1o`3E-gK`nk5a8j` z5vQ!k`O+L~Ot|`RI{H;DQ${P3ESX;jqKpqL5gMI-U~Rw88iJ!vDwRU1mj7+Wk^EB$ zfk~wBf)P+IrRH41S#&-UV)C7Npi+rMvZ3$59Ssx2?>H*uQo6>n$E z0x0`S+~Oi_6Fx0xane;>x8cvgJPEItFWC;uMIw>~ggAV+dk%irr{&O|Vr0#Ae*OV) z_ceCxp%{#qw_tLqJRslCnJ4qp%7>a(dV@Ru6NH@DcJQb$VsbSThTT5pJsh~LA}0@p$bHD^rOzf8 zgT%qvKH&LFd($idlJo(JrY|qV)j+BHY#&0e1EVEPNGm@0^QS2QPqI5i#3}N=F;gN7 z{B{)+m!EbNQ!0{jD+`E(H;z3eFrSO=w0Ex0s#$~W$cH37)kLx?7E$RhTY28UThhCF z@>>ZFeYn?U8aBu0QIE&8+W&51NsjFuoVj^hbM7ivsqu|9(Qi~W9zNU8m<8CCZmw{5 z^)Labsv5{>%a8IF4EZrDm1}2=>@A>?$rMWYiVqUH(Ot0KzNt8K2>(S`p!HfurUr%Z zW8vtHz5i=HAoRr^sd=sp;Z?QN}NDi@mn!HN82 z>D@#Rgb+h~eInZxsWYD$5EZ8s8fB4U-;YkRuq2+i|FCABN;D$$NVPrGI^hGwvp=#RJodaN+fZSiYi_c_PQt^a+DbA6Z#_qq-n=6%L3{yxibRO;ujN&g=5n}2oZi3KE~ zo7eRtR#4X2%lF|D>^-!{p>#Ir)8=cg)b&UA*VT^)X;zlk4hOD}g#!LJst2_m)X&SB zwXct}y;Hxbg_%^haKaurw@JGOj2$1q2Jg*IZ2bU*T3r&8Nv_`lRbfBt;f`HI9zZz@ z>1_yl_U6cqM#P!h?_elSAa{iWz}UfBgXFq?cTw?P(!wCQWHzWU*Q4)AS#Gu51CGR) z2Z!Qw{8~*d0UiIXt(+~2w;F$MZ?Gpgyb%9W>tnGjIeuJ1U6 za<`vl`Qvi8JucLeFsA%%Fi&Yt-dRiE98|H&h&K?55?Dzt^3{wMf}oZQ zTdb8mk7t-ePijB zY-yA`ml$BCPLx^C7LaxbIZcY@%T$yIS`se?94A8y_KiH)fpS{H_5Qjzbs&xoKVtv= z)sPFQ(>}`DGvbZA8+%?!=Kh&h^eMTbWzw*L;QdGghEHJ!!8jX$_@Eupx6jV_iDwFM>VSIwhD#E{8U#c&E-xg; zBAD8peA$u)h=Iv$a~bL0!)pjg?)R?yHe!EZXdVD$YiRM$3VRIo3s*y1M(i4lHO`TLG7!vb5)a^oEm{} zkYvt!=-vTbPB$j71ms)2!0?B(pcxPl5X`ZDa=_Sg;2QQAp>Md_9szyYrL~a0x!1Jb zXzs0QM=XLFQnA(q_P#p;8;jcVT55G#K{{f1k&6_B=r_txPf^2I$T_FR@(T2__0_}7 zF;a^}5ewUg(9SGV?m_?JKxWT;Nn2It>Pwz*U=lb+C9A^a$^ZfUj6G31M#!$U=Ps1zbfu6B#@v)bVzov(*q!_#js9eb`!H$7n)eygJbXEk zBqG&19V4T@DUJF^HW2Y3wOVl)R^dJKk}|Li`<4Ln6w|1pG;H0tPbXfjS~;*B=1E}X zu;pWgr0!Gu;r4wUD&Ir7EZhITSSN`WdIsTV1x}IxvzAybCJNIyV zhE9@nVMb{#Zp0!2531!6v>zkY5cdi=(a^Ra&fZw~xNpNYjK9%xMuE7WjY z?aUUaG$O$g4r-6+KcWTL8D+=`-3#GvEuBT^5AH6p`Cp8-HX}8uL_`If_7n0ty1`gE zTaIkY3{5Jy$by)FEMq}aHJI6p2!59`Eabp7xYFkALlI7CxqCt!H(~7s@kp*4--Wm_4&@UhsM3Q1&BCf&GXl=h;ZBtZL8a ze`vygXZupBCc~m51`^96Q|+ww#3ne&T<6XLHF9rwlW|(_OBd}TRbA7k&ILa|^F5qs zR~jDcneuQ#jq5}Zslq5iYnv(aUu0K=ui1MQfsg7`TF0AZhZzLF1?(){Zb14ejd<{v0OLy>85*T;q}FoA`7Pcemh0Hm`y&a4kJnkh04$B7yRLu zOip{}5^R+%#aL^>>mZ0sFGO^_tyNxDGP}(-rIepJ{noNoOkDuaNft#lJ+}0W(!iX( z`7Yne?Fi3d(`Mu5JIWPn79g4esFWs|j477c&so-#eWrCiwCDTNw^(F0wn z>tiTz$V-q6%c0e4V_=?mSZ5mSB2Mh{r;adrzc|WWSSA(NVSh-Ln4{UiB|pqx`C#k5 zdw7c?;GfiZ#MvdS5_2*)7(*_Z=#;vp?r9X!t6PyixKuQ2ZvH@z3RG`nj6(LmhV7zJ zYbv+FL^gVj6*(Ut>sS~tSpSf>gb3|(_Pu&_Gb9pmD)JXR=sF^Rb1D6hdZ!3wm|#;E z4KY`p_7x`hm~x>`AJc)insc*8k_df6^Nz0sPUc zS8*^7u66bNuE%NxveI`z>TQzQu=g#@&GL|#G^MK?zl!IA_T9M4)9J=Aa5raa3o467=hS#>g`Q({t&x* zTwQ6>0I(m!@%)-`-=~|ib{~ScJNz@OXaruCppgO*RHj^+?w;`|aLXCq{pMtdzj;R76?ETPo;_?Sdj*}Y%&sqN{=EL!eA$lZ*1CYdzahT-$MCStqL=7ZT1qZKN zc$6-v$w!#ET#8qA+9HU7cuqw8%~lx(5<1nPb@eAswE2n7MWSfeJ(+#_Wl1m(C{)xS zvLBLjCuwg{VzkF)i_$1#m0IWfrDR&KNufgKq-YfD_NN_GWr5U1Le#(dsRMTg1$8{k zg|*g;gIp*=>*zpa81U0&Ub?S$i)2Z{)lAFkwoCiPhN zPMq<63FXIGxf({MT;yx<8jgG~);M%iK>1??jv3A4kO=-HLH6IID| zyFG$rgKsf`1$Lk#TBQO{W%j(`6E@%@$- znM4#jn#^L}WHgzygH1kG+v66MLb*siLh*}r6T)OXMmc|8r$V_>!76!f_!(c_tSg3X z?U2t4$GMkK9LIIhf&4kgc^tDe$7LEcP1|`*MUCe`5Z;SqWKzMbbBxnA;KMXSGu~6l zG6WV6z{$#_ifPUpV01)3lT_g%-IO|p88}gQ!m92hwPZ=X@|TO9z;2Se<$QX{V(~WX zwfl2vWl5RV36164yrBWsd^Ym(s&x}BmUY7xoYvL_-p+z?i&)P@kh`fXPb_&~3&qco z1$POAjVV+CdT@2XGW7UsSD1G|ksjdP5daJTe<6cB8}^q>Dc%TuTxAMLjnk&CgL;g) zpHiH5-lg_Gl`1zu?9Pxf!4(7$D>L3U{z z!CIy6;@+C!>(3H8PCn4wQX-lW`1$e>Ej;>mx64+(Re3#vko3Ge5cU5$I)7Q8qdf2= z!zP}?_}R4IWN92iwo`JVfq$>ZDXI+_ZPQ)#5OwF?B+ge6dQ z*}>sTM;YN|_LkPoV_>NUN-DSUaBM)}{+qPTcGk1mAu+Ooy)2r%wQP2Q+Xsy)C<{dc#kaYX)EO}Nlev1&U&6Bp#*u_zr7c;O46)293bT8L=Z?%#fu(Pp4)l#HDd-+;3bzjvuhIyaDS0BJ&u#x8its- zIjEF}Lc<(*I2^EPw&{*S%fa#zePm#JUG*PT)Hx?zn>Fg!xx-omLvls|4FK|W*fHYC z&ALiAzcPwIL?|(pz)$6){-u~d^{Xx)d+u-g#K`D%5!D9L3A;i+5}q(x!d(ez;hVFG z`i2+dtueNfv}ooX98c4BzXu=$TZh$H0P8BW5{hF8C4%B^jQRK*nz(eIK;{Kol3Y4E z*+S)&PP{5~ZR+kmP@>v4op8)nU%lA<00)N=%WP5bW~F)x2M+4=vnZwh<_;!V=bFN0 zMXBWmuI144q-1Em9v_Afn&EIpn&~V#)+>$sW|#{q^_y%I(#RNh%T2;R_PB+74mBfy z3>rsd$k8s)@U}prMC$U7fGa|rEDU-2lccrbkqPXbUNc3EViUJl*3I!Yr9sIweW=Y} zXT3u?NKEs-&O*)VL7oVW1o(RNlLG4&bySsrt!Vc@X`A z4lIb~%9`DmmK1MX<#Vpa!G$GKfCiLbRfwH+$<|KZ&Ww3` z8pCvuVDC$kKxCwCl&xDS1t^n0zVR!pJ#$@=Wz&uI5p?X*8e$Q&|3m3KEMcjH!Wi z>#orn>}xu2=$q7eAHnK(omQPFoq)Fdd@AK*Mc+4furBXh?|Qka_xE&th%so zbb>|O?9y|D(z?$u6=(dED!dBjp99{CbqmiIU^tK1DAlUn^8s&0KLmPh3n-iK)NQr* zL@_*h$mpEHdIX8TdEbsTAfAOCkM)i|zNipf=gHNA-5fb=l2)M?q5WC}WWJ=LJQk=8 z6Pk4HPfg_Y%5bTdQ4!+|8lU!^uKYN+L(lcanX0_sypoVH#;+?7HY$_k%x#p92rSmL z*Vmf={gWk#Y+r+p)2!(HXAFoL8-r%8CU3lvZuMjkeyjc+#}P!+^10{`6(Kufvu1>n z<|+pXLJ`pZv|1nOJt0=tuF{%fMx1A++!(ZOt?pC)Rs|TWgOmBXLEqGcQ{`3#qbi73 zMjz2RPCahW$7Z0!E0Ha4h=@@RrLIJ7A<98`?yG_?X)9imjeX6Jt;d?JX0SS?!2$6XzxUojC{V5PR`wFqd3gjE)`DQH&73nvJK@e+aVAU1X^A^wdMS zPk571yKmpP|4Q*OagyV=IxDF))Vx?)Gr-1(oHCi&%BD4`B|dVi3HE0Bq_lB~@X^dl z&~bCRF=BbD6UQE-4@1=)4y!;}aSl4c;_J!!z$gUVkYeao;#J5Bd!RIc?Wm*Lx9x1@ zx}N>k^YrTB_u^a?3&i)^??z48%DdfHanzoBm$_+UP8cGx4hVePzjDaI%~b&p(Bv(8 zpB^s`9tKX+wHJk^5OtbzlIhbg|^oX&q z5g3cdrtVqcjC*A16gRDr+XMJ!Le=-v_YTsmxB4wMjk-V=<@VX=*8{q-vTqoI-nvitYBroWx|(F!#vf&vNh<{vL>0#;}xq0Y{F;Eeg8T4{&0L7sNaGUamH$#^1f91AG|~*W7`@ zy4ZsrTBz2_S&N$7ruL!>E(?Kjc>@T+80W$XC3ZHVaL+EkFiC4Um%czh!(YTx1U@aE zdX$~bw@8omjEpy0Q$_b@(hSj)4b_8m!rMD*AsP%0q`A@eHKs`l>T3ZNNICaJr`N`r#< zXwt459c@A90KA_~vp`R6Gwh^@#JAo`JA-4hPnBxAV%A5J*op8q8WWwv;2s$uNN1{& zu~>V5!FGfU*cOhO5xDnpMZs=xZIHP0f#w$rP9rZRxU=zKRpt!ADeai_cTSjFQGQLH z<9PZZA+u(`ypO?gZKtEyP_3)Dr(-j|pX$vEOyu6~P_riSEt-pgU;|lk!*q~ z4*5WU?U07+a6n9L!=jzp0sG`S^Gu;)j9WJSWOQMFvt-D}gB^%j4`;$IE?!n&c$3mD z4gfsADVr`O6Gd(a@w(S&yTr#Ro!nl&O!?iR1n=?w(1qP&=GFS1@^J9R_M`SX8=K2x z8&aE4ALK#SAHQQ6JfiOwaE;(0_1S6Dxfdil|3HaTphU5TtIau)ibnJSibpN*(Al`O z0Cr-1`-iQ)x809f>6Fl5Z%{9quxRb@OL+@X1n3*Fp zIb9Pkf@u60fYwI0I>&DR-E_%A;qB~D4B9S_F4XGg(~DoWBI8}yx!_diA*3n2nS-)- zgg9L1q4UA8RdV4HMimRZlk$8DZ4b0SJ~4_F6X;M_(Y$q1sJcx;UcgvLZM0C(xU*_B zaex=xr!FO=Tq=Pd7)QGk=Sjti6>^NoBj@*Ga>x8P8>Ie3W37xEweyu*91r#MTv&4ff|XgV$Och8E+ux^th;% zSi9Z$YMu^vRxYT(FiKED5(JyzvbCxFXrvR#TGv>dnW%e>RcizaEnc{f96wB&=YV>T z6}%h7D9QvwraC_6i#t9#hFCscyIayYKr}M^?O!}bS%cl?lAOUt4o%MZ`sqQBVKhrY zAmliG+@hRdRu%}OhAKUPjHZ%M3kU*a-mYfQbItP^nvq`tTgTYh7%Yh7xtt$lC6A3! zzhnmpdWsvUe6ygjxivp?)Ql0(hqa2e7+@q`Z)x$6mJz%`cULjp%6SP60|k@k?eqHi z)5L?T^8H)mc8(X$qhfk_wBrr{s1FS`9moa`2w>9E_{3$w8J@)k{RG|5pyM+8#Ch$! z7kTT}wL6JE3BfGN@&mm0NHWWFUR{i$k_h}$QoJu|D+`4>8|QsVGo}t4*g!DKIlg$x zv%9byaI};EZDYeL1{-F4m^&eWr%ncwIJ5~g;lqg|;Ky56Z(BksW{sa7axGh{3rRmw zGMzj2Q*xir`KylcZDu~lX7f427(Q;m$C+R2lUK6i^F)y2Nmw$*;cMQNcYX?}j%3EG z#%%Q&y|{Qz#JE|j+yx%7%4Yn`)%82S%yeB`ZuFi619Q2rmERz#QdUIF|gYo{81li2D`Q_kVU z{zsIjPRF--lm4ZV#*r?og(VlLO{Q!}Sw)pAqdHpeqtElP(~gCj+tMJGIo!4A{T=#h zONZM17~)`|x6pZrpi&Xrs*m=0{%-5S%=%g8jJ<-N*{iG=2<&|InL-p+JAK0HlaRPAGLka3s5iZ~maO@s2VA`f__6p239 zl2)LUu@G*q0#$pc`SM&GLX@jvzE9AJk2I^1lCtU!z9bS0MK6qDoKI0-TvRHtCX_GK z4FQ6Cor1d>tW^)Ou^G&5Z+{B1kqskOYaWCj<>i?9pY;I=QvFI)fxN^mKIXhM#gJaR z5FQSz--gXc*h70ReLk&1~qF_M`v{iyzQwb{9UX+1M@I&ogn5fW?|z-~T(lc#LE zODmippdj}p#e2nBPz31Wt2D@}U{hS2X9ux`2{t3C%D0jJ(Jm!jw~|TR$m^9N`KJL* za~A_78zj}Vhefuo7BbrQ*2npjVHf&lQ$7uxft6+$Me;H=D=Tsq&U&A!s-huD-TPz5 zi-3uo!W!Mtwx}FKox7lM)DY6TX792bS?=Do>I`Oow4cv?(x=ZGIhot2Ru1ut^439< zwTw73+QxEpU!zX#or|{qnW_X;jieNniu_igf-ZP;;!0e%jE-^Fv4`6SL_wr%%r+qP}nw(aiSwr#t6w{6?DZEN;(-Z|$)Ong7Sh?)AEt15G4 zR#n8RT6te>IE{Z8`GmQ9Y$&8trWfu5ney|p_ur!fE_=J?SZdk_V1)4LI(~d7HJT*| zY5`%!YO%{U#PFoIx8%dmH_6#ACiRwstABu%3(63V$@V2`gjLe$BrOQf{+$=Zk5jATnYJ?CmsprgvZ$D08VV?z*DSsZ)GAv*h z$KarcFuO98o=g}RCj8)1ONiojv1^<4btV+%O-Cb;K!MBx;S)o9_Qw~0eTgH2 zNOtbxXJg=w^rh8pP};-6^;om_ znpD@uXZYu8APGdgswZp1$>-ua1o3S^1uGK{d8}E+vm`8^%YouhY~!S1MRJSVD@S}} z+PkQdn#A^l%o>=k>t>*;W#H@iEAw2`cHUj$Hy=jGJ^K^p;KJwRvMf0+#3 z{5md1;fC5?LBr$MpS%M+&YHubQxua`K z>AOv*T_*FA00GQ=Lz*8Lg4`&M)>4*I(J!q!rSy5BE-8W6e9r*lAb!YgvTf=H1>Q_w zJj-YkfqhYKC~J=cSxLLx)Kf?0O`?_Vcmd>5xcW|4&SWdn~XwuI3)YyHe^S`kEkFI0*S ze`J^FkUPZPBD`Jh^^Z_ybGKYn8w{J#bzec?k6sD?gn8?z3Avs@jNMu>4cuS)<%p8U z6-YI)r+=R<3pBYiDkC=AO9rSsCHcrQdl*t`s=@26&0IUBsnhq>j4QO8N*W1?xf6iy z+{y%{r7WztK~KMX@$b_w301|+$Pf?mpww}!B#}KNBFI*n2yW08k+4A$pjtjnx#*y# z>MtyE-1BUWD3WEqtCX7>KgmH;G%umE*^KJ1CScZRx84i*0w*P4|Rf880gl z;x!ZD{FE4Q=o$C&OLfG3{KN!#_TE#i9@!oQm$9v0$_PMLrd=4wg$1_w(ah3PJ-@u> zCPr=X>dyAioS|QB8U@nES8q=s)`3r{$3`FrrTCLLbr|Og`X4*0o`|RV5WhK)dGYs= z5pR@x?w+dUX3H6j`Qdln9HOhR zwQJ=fg?BFQ0j!Fc_WBg6Y?s=4p7-vw{rv=O4@O_>v}C2fO~-v852k|fo0qqO$$c+$ zFnr%$1^OP1W-bWHZFpr_Rm{qgG!tAIUrJ6aJx#cslw`=A>t0$Q)79MheVByWwSJF3 z)(MW4HMo0V0thHizY^c)>tR|Fk<#I8J*WR_upFROA_q3hfDpQZ@%Jng`YZ&3R}`>r z==z@CLM9zi!)kq< z5h=yFgk(N*$U)}w53TwJ+LGkZkyc4k$)Of!aFTSnw*pY*p}=k)OyDzEXldl$Cmzfx z#e2!%d_T+WUWX~!pM1emDTx5 zSk?Pu0(Z@$LCuAiR8=Y;8p0BXuXqggrOp8+)^P;2ggzco-6dbw>#6l;^_sILQPmo7 z%(fOeiw~tpJ!a@0Kj0Xmf2)S z=&VG-;)<{d&_x!pQKBxQq{an}@OBM}1oT1D z%;ysDH>8II&B%`LaENTXV=@_At>qlP)Y;S|OtxIPvye`Nb}p`uQWCqzsYTR>i?U$| zw%E>A<6bV;bK~wD-*1U%AfK+l2}7O0Zr!O+k?I^@LV9?yS8y<~K{J(Ov}0n`Au-{% zx9FBB#u^>3`)m0p{hJ|@^NRqf{2gWgU~Ep^Fxsc=B`5mgpPdkBw~Jw=M3bML7}n zHCSv%+75a;%5rojfRg*NB4+_hf+iixF=WwmbPRe!wXlNue4@MslNH-bK?olFQqE(H zvqHkJKi(u_ko%AxCf{t5A zni1m{oLqeszHxR&b_P{0q5?}ZV z4@E&K1mj2|IrZMv?#3Q}?xOq5$HTEOC`xbV-a=mf(60wk@spDrOcS0v!SO^LS# zhEtSUJ9F*erpx^DFILg^@sZc~VB%}r!<`1-T$E8#2i5VSb-ag$O_CQ#WB%cN(aMBaZ6 z5YU=doKlhcLD+GoV5qIK=E0aYm8||HXnjtF=yL4Pt}N-J`OXd|IJ5L!UnU0~fn&|f zsVAj9Q#QKucgKFrwEDrPe9TZez0ZFZUWp6~Z*Bp6!ifExX%dAcT02UUHuNaU~X?d2$)C(;d}d znQo<6A~$xn4rL-Ex&wQ7HoZ%>)}<=8&v&fcU8lX1shVi(M&O_W$-Hcd&l&WCN^z}G z$S`WR{`LLrYTXgyGFCMU<$g#Z9|qyM=3G_po`)1cbPVHU(^nF24V$t}b<`by3qG5hi|$-}=P+5>rL2 z2vWHV-d>LizGexymq!_>HE7jdgvqkfG@YA~5a5arXt4<|y!Rej{0_F}_7b)L?M3A5 zjjiN!dJCJXV&Dogk=1->YXA(!!5XrmTJnl%9rehaF?=V%&HWN8ppmV={lYwCM*I(K zn@dfqP=s^9%B?U{)+skygGKV3mz1;QA~rb1v+F9LfCZ=M>FiwM+cB(;=G`^`@HkS* z)WE66Ingt6Bks&qgieoIh$k{H+$wX($6bG8<~s+_kKh3Bz}*@b(A1iePfyHf^PWpD zfbw$5=x7Bq0hFFspYBbZyz%h}Wpqz-@emtn=s7W#z3g9so9(9)x>OF|mc@mfP9QFq zD%~K?u+@30^!JfwYK3AW^jaZa%SF6S0m8i+woDRqb;{!zq{0bHlG!%9RX8y2dbn>|U;v@94wf0}>yFn1`vRbyh@a(#swi|5u&e&O? zzst$Y2*_L~`u?oHF^AeoTR)wlm3eInk=@bO`fPIE9w-IXn+O^xpMKkf2X^hGkvP)7 zKQ4h#64ww@r%@1*5mVO)qxi$&>+}O6{*xn;Kw{ne>jvj~r`xC14}48fvi@|{?GJMP z*5mk&N=Q;EB2G-T;;4%1t0IJ~zZ`S>gzQdu;ts_S5sX5i=wTz`qA7G!2qSa6z$s*HA;ITPk{Y=Fg}#VekrArHAjk z;-Z>rj(Sdpj=VOy0y?jd^}s>qNN>NeXpiv=uPNK<(9bupnelS*+IOObZ9w<9jcffQ z3w!2au0M71oob>@*GeAFn0DcwAq22aGjnKChep9-O&ez&up-wI32XPmC7@jOR|xyc z9m`s>p73t`+WS@}KfKi>P3_35S3coa!^-(`r%?9h+?y`PQ~C+;>1%ZtnUx$Ja{mLA zQZVtkhtx%o7JPee5Q>^{7OOJQ<$hYnZRY7vC;)Rx2PqDrB z%MdS1RaWU~V111>$gHoZ!73?bDqz|u_jd!wHT`j}M<+0h5!}KmjGLz9My`{kV>2%m z4H>u79Q4?R&#~GTojhqG(3)aY(J2w!D@LU+4OOVzQHh_;PWHV4JwO%P8=v=gk$uc( zA>XuF-&v?AdZy(AYPCnn`rq#G*ba9y%x5r*bD_&7%@W%%o%C7?vL2%Zv)_t=Sn*1F z(g*Q{AV&Q?&4jb%lS98_0a?R7t%OP7WM8p2QLN%?jAy zC2&_%U?eE4W%y`Z6wtX+w-`9ybbm_C77I|ivn3P5B1$40K2Y_Bz~ zK-?vOuTE$9O~NO%3^D^5q|8`o8Z5SdVV3B9<$IniiKlOfA+H|Jd-r6;B+VTx?i`{<%nGS8B zdxc??$k(7y{H(U-eMsv)hiwQH*Jbo_+**?tV_4^Yjs``k5@T|rS){~e21da`B&?Zq)2kw6&{!Q)9k*sVevIN4}vm4unBVE zARq?v^T*_=ocCE~^yeE1qSJ~2v>1;3^#(nu-iJPL`lfnPwxXiA`nh|$TrKUHHSmuK z(DThF$ddYkc|s=s;Y0-^ezmgSpvzD1T}$aI>?q#VH!;bRMn27(@*D&ctjD6zW>zd( zPgeKLHeh4pi>wXi!AS^`A9Z2bBqmRIBj@gC6rBLynrhw#VCB4YkNg_`-7Ufj=3iMI zUg+Gbxd#A4r_Ha)oPjT*pg@UuMhC^;s}<(Yf@;kI?4EDbJ`|!EK`bQJmo4AIDUc;D zK1lboHo`L3Mc`JMwfSr`r3divZ-HJ4D@W6qEM5Yhh){2ngw--pj1&3NrwI|<;4Nw} z!!z@=B5_hO`q{Z-u|_ev2A6dY&Sr&0@$>U(j0IV?u4{&duI|Dg!0E4*tSBd@Y^YzT zJX|gL5OLD*Hyeo~XmIABuOic5El6_Ru)Ew>)fd8qo*PF$pGu-(mP`8{*~Ys0T53H% zT1|j<78|$DS*}&Y(_Uynf#fZc{D5qpxi65Zf|ow{ zIQ=d%Mvo`fCC+q75QKZq?z31+BwSVn6$({bd(-LV_4FN6Nz@e+Uv+a#N-yiU#QsT> zlRMIu;)`HS#X1j}r=p|79ZfZXe?I6bel>wJ*EWX{^pEl70@PcGjUxHxz)@6je+(MAhZjHjwhQa4quAK8|>}=@^RH}tk*Nz7$`??kMD^5IsOC- zTZ`7p^mH>0>A$+U$;Hl|D`smU0_xDU)Y)NDSVH)c?ugUVHdKOuLcC615$6#fEk0Z_ z$weF8#ZzO=`cbI(+ONd@9jNP53XI!$mMB=}0R}vsrGaVJRmaFq^aZ-^4;lXa2opVX zJPBHW^HD~M39MdN{s)pc*Utxfw&rzgze|Mtw1+3}GB*BdFkpmU@S!mBc&a^Fp2l}y zNnEtho(KWu-8n4AcGTj*V{r+xT-u*$_qpV!%cR#FSC%cAErNasyOgye1m_3%yMESP zYhxfDOV?>HNH9n)kV(+t8ZSP4WE1Fyc$RM0m{rq!Z45`4x>YradmoEO^gIMV0D)h{Z}tZV#ZXSc3=#-sLmbIMscS4>;y z?!58ELSlsV0RyT@vkPDyYd6A`Rax$^v?JJ+PoLe8YI`dGQ*jV5>9M-?(0J-pB zG2{O9T~tTD6X$%|5(!G!b6eGSvU3&4WI}0ivlr;IAN3-%<^eEqx(#zugfznC5mNse zrb~TZFbI_@1h$2sCT1c}P=39w&5eA}l@lx|8{%UvcBcb)aO^tn`cV}I?ml5-^+qs; z@HPL`uRRB*cOi-wcA?S%kQirIP#y%J#cQIW`qR)lT&A~h5Pf|vG}(1=4BJox4VpXy z+u5r0ig4<5D)3M!K?SvYFB8w=d(fz|@O4r$lacoaA5G8HR*19c=lDbaR!btTJ5iA1 zA*W7yd0AcZh0FHH<3p^j;lpg?U8B9!9&&=4DJB_>4m&*qi{sF*Kzl^&k4Ac<@COe! z)^eD+FqDYoIOPKO;j+zIkIb@87U=KOUtUvQQ|<8=SJ{dJbx2wyP1;pDXahN24#m~= z)r8hEAN<@i`ZCY??)xe>s!Y=W7J0;H}0af!!xuMiY2PZ|oBesX6fOSBpIOv37D4fsl5{vtId_qY2Y-SO>} z7mr#6v{!UB^I{o(9`9lkUexWy**Y0~{v_RSD&=kTZ!7bG*rg6A(dkrHhv1+AKF$T# z*O)TUA%abFc#^IHXD>#Jk=t*|8yTqZ#vd?kzlJn@Urf%(U9{(GWD^TrUnexOW#G$x zY+qVFK(6Bxr|9oCu5;s_mC9D;gG3+7 zg#5c?@L62Re@ogrPLgDvG4JT{14d3$>?p}yVBFkd`ElztXnk&~><9{(!(7P;`0_2B zw_gNzK9<2um{doTz#mrN5Xy*8L(Jydo02&TgOZLql&+MF#Kl8@p-cQs{2cJyk%P(` zG{RX2>w+wAhYV=y8@7wVZZBc|ZpC8zoRqRZ>8t8%*o)tW(T78xzC*nWCmS<1jt*eo zs*TMmCW-x4cMO1&15~m!CHLeeq42amWtn?B-|B7W@5kM$feu#Cy!(tx!9x*NT#y}5 zg0E{Rz2%pf-HEH%LBN0C)-^HbHnke2eKdVw1J@GHA5IUvL|snHkuNBod-++ewV;hyotPcyOxqtPlCRm>LSkIa z5__HlXfIYwO5ZNydBG2}nP0ROEA#6)%<=KuNIW*CU_4?o5Z9s(V?W1QC8lA`^K;%+SQ?t0iq*eQ!r#t{g+4=AbnGw&@ zV-iU#0}t%G5^pQ05$2VX{&lHd=jH6Nkmqv>W7w~r7NP4wCXseVual`IR5O5N9Q^lu zC%S+;GwzL0^qh-Kx>er&k-NCaWTD^_#4D&|kEOrfx=!C z0(j0gY{7g$Y zUp5xp?;4H%_uVYyo*yNpG1VlAA^ksPA|u*vva-XW8Ye48Ya;+fQJFJlv!Sufr3+8R zKB0E59rxF)JF*X{QkSZmKg}$^<~~Ol!*xIOM=G|@vS@AWC6~{uy`^X7N)D?O5n(`X zs;2Tst4>|~$JQy|>rGtK)U8o}U0sBn_hzTFoKqgDw1W}rF*`r-dZA$o6@G6(Guw7k z)F*6y9CN9}8*Dhme`sl@ZTS~A3_e**1$ilW8s8bns`5V$GbPx5!GP=L^G$fY zxGphqLJ7d0`_@BUVfLt~q%m!kqEva+(?r|$G1g`2{i4VAbSr$NUL zzB^0}oAw zgV7s?aW6B?K?9=g{Rv97NQ=w~3}w9kY3=qjtNfB})$%~>jLRq$f>qtvGGWe!r10_KU8HEY9oOVx3CWqB&i@^sAcn0%blSmPOHIURq!ZwrEZMs@D8IrT&EMddUT|Zi zyfuSL^3rq#SmZ#yPg`KMb-KbKs271*ofFP0Gax3&%|m{Lmo75>Z#$eTfwLqcyfUjA z+VH^}HNy*g#E0STntVi{#IdBHg1h<1TD;Rbj50Kzx9jYK0O)F8 z(1p+RA2@sfwQ7dQv7$289lv6(Q+_D=~vp(8Q{2&f*#1y4@J|NP;26<`4CyoS7c~mOKw< zj-SR&Z?_oFqDO|O;Fw<0*oOi4b81Z$)J*nAX6wZk?ySL(Y;kB9=PG(1;<|8XN$<~N zhQBS7%P0+#kQyeI7~}%WBK!Sd-%pWB8n7C}44vejf`vBL29CamG#XMTVPhu~(ibs8 z994Dj=7SoEBxfyV4!;3bXQSWmFn+YF@w^dXo&-FFk%wsnB+4rLU>OM21L(?yo0H07 z9oB)8PxL*N9+x zO9={5?f~W~g>fBe4IjYz;C`Y7j@1akngBx$g4q-By7gk5bTdR3?_S9vq$;Wy1a3%7 zXGOc|Z-r62BYp9%e6~SSU2yrN`gj?7$>L_+E2!2dIBLIRSce)HN09KYCkFfPY% z4UExe$v*fc7vnSWq8$ckyJb8wQ716Om4MG?9G&@Z)vDLW6g7w%>XvYSSzpqu5rtW;HiCZXwfEHD;;RZl(Jus;!y~7dv9FK&wvc9Hi zc2$9t@rme*!O?Inm-u%lEDEn}<^huASit^!F;hUWw7ov*PwOK<8YkP!5OOA0rMWV} zignXjVKcwcqRyygUL~;TXPQ@IbF>lG0m2GU&CuWXe_ZE1w)2E2qn1_j@m!h-=Me6B zhCSv)d`M#^IOP14{Th24z@3c?>Ig>F@xXW;Irzd!Jdr&2T(q(l4k%UCp(mf%liSUs zgDZ3yWk8`u5+Ktyo?7gMLk4+hV&o(3di?^3?vYxtNKY-*6vxnTT(g#Rnfq7g4nT32 zgfSkJj&(!M`7DrtzJkMfe+@g&7Lyc?VXe}qpc7u&&oDbvCCkOxl5dJE%w{__#$_y# zLL=wHhb?U-Rib!wHw2&t@WKDdlCy=fs>TZbx;zk8t08^FoLQIK^D0W(k47)PS|-@? z&wX49{DnBR8nuf1%N1#SK`hgQKztz((v7+0s$MH`bj3#BV%!v6a{$Fj2Pe1=%M7G@ zkh*ih+k_7ExafH(ggz|`>CSu))_(Tslu^eCDwU`)z-CKDLvx8nemQ7}-?l$yB1Id0 zWO#IGO+518ofJW57(gbLOIONK>3XWjyNrMBOaOC(K$$+5hn(4Jm={X&J|Qb2IA2&@ zcHq-oaeKh~r1A>66|^ud z)ZnJVVS+#6ZUUl%QQh@OchM0X0Ba@?#oE%GD}R=JPYcL~aGlbRh|X$iX`p|d?GR6ORup*Jd+J^sXl1$oZCu^&nbeNh(q1*GRxp6J8!S0J)*W_*J)sU|#= zhp0V&OO=qKKnvn&#iTRPm#c=dUZ~hY=dNY<$5hv`j#R(OTVL(NY{WEN7D zMTu=7jpT_;lFBj07MsmiXg2Zc2?euj!-im2grtuwd*b%)YkKB)u2==GBvDZ)@Poar zJ(bARbC9@LOpH?EI}&Qo)^WS{q^nx*`gy;?z7b`HC}{*p4sWO>LEFbw(X#7C79!Al z*mFD4gbZ6tNcO**7K4fVAWwB<9$2V#zgn=orBMe)L27zKU5;Y}bu{d%rfJkm#n)pR zd4TqGBT4nTUpB7j4BD>HB}I-566nb2Wyjbpx?kw2zLIM18^}AZUN&d*j z6UQ|s!os63RYll#g?zp^qQbd?+e$ZU5kTuXA-1$+5meRl zou+0@5UA?Vc<-mhsuqK;&5{A9iQ4rWOOq%qvopm`FzsH2Rsnu{uTS~xLYaaB;qErH zWDy{H1pu(UK=z>etboVPUY*dC!Z)SaVJ`k76Z7W%%npC_68ARkcO$LY{fpguM0MnR`o2(n*$+H@6!D+6VLo-x}{a; z^!gY1Q-2k)P_X(+w^9d;zq0Nyb?<&u6p2GZt+OQ3YokuAg{68_-qx~Eik+QD9LLR9 zNGT$f0}XhkJ@ceq`qH4IXu=12OcGe(wV;X6-(EBL++0nV`HeKC{UfqAM?=$jh$i4r zVXlz6OtaIrai7b0anI&twU`$FR$W0 zh<8}Zrns8CEhD^eD}ST7PMF$>5DJXRPN zu~-s7)L}7rzi_nKMlzEY$k`HAEEc}>WPY9gsr)_p9*Zt&(@gW~>FvrTwWox99ur03 z4FgTZqJ-#WC~i`%?Cx0H)=rC{|r(DM#=-edw#S-|D`p;kdTaEVQa)1Sh&<+d2xfkYPk@T{fEx zH?*qSy3p6Wm{;KR47rZYzAs-^0t3A+8q`Y20#;`NLd2Y*Z+0U17gKdPB~zvrA8as& zGF7SVDdqTs&IL?>=@eCkVJcvk1ql|Fzn{b)8>3z9wvF0>^ zDsQUos>{jj$7SfXUTM_y?1~82{Fm7jt*0@yjt44+!lZf??_!-BA5}!M_3yrVqANw> z3ameu4>42~Y2Y3euXn}t(J*jHrLl-%BA+5+D~_e&6$69YS&c7dasU$K!_OjrqsqWZ zmr!9q7N0scOX25h#Xmu8&LvEciCQ9?hFPiW*bgPFyeq2-pV5va_p(>#>if81|3EL( zl4v!Ed(K4A9c{o-WqOCsquLbC){OLDA`0%OA^qf=juX{w;-<=3AN7q`OvLn8#&V-f z)UGOaA5WqxR9=j0u*vrMqT)@5jK{K)(!i_M=$8qLoXFa^#><-y%?lDdR-@3WPZ@SE zWRbshBnB{JD>pI&({VV&kxa0s7V4i&qH} z6%$*{ z3qUa5ZsX3t9Esj=ApTuDha_heyu*)LCCFI00#DN}dGI)`IKI3R3)#wcrr<@e#%>Ho zaCs~s7Y%a>G)eQ2A~1KS4or>GUo9J{9cruaQ>US_c)rE#h%0-N+EHXGj)_;+@O0+< zxX;@g6e>h-cO4wSy{`d&-^^~Z;!jMXs}VPOH%^C+W*H>y(Ok~-F4|sq3#ccoj=wG= z8r4R02eOj4w4X#7Re;*OHYqsu0NcmBIXiH4-*Uo(9 zfITMYRJSc)#m&YBW32k1`j(4L>a7QzL`**|r)=A50w(~}T87k=E}Z)wI?sQ6QotH( zo7sw4!NhCoMBi`qM_vRCb?FQNQoAmBQ>Yjv9U>df)|VsB-9t+7vXk^5s^go+mo4{1e>A-RzQ=1Ng1^O^1qTPr)3fK# zl%Gh>vnjiZ=FbHZ4b7Bv%98d^Ay~r3+&S8B#`<%NkUsIEIr-6TKtekN^oa^jpuNl< zyG1scj#Y2V&0pv&ToL}7QRSfQ`deX&VCvjlQN5+oEw~eRN>84bS|!l(lZ;?c%Lz zM&Y}Z@8{r(){l7Le5(*8LpNYCx&9ga?kVELA|oIkaH~CdAC*CkCTijcI@Cjg$L2#s zkyOW(q%PzkQtt5Vock$6s$RWf-k3`xP6_t#O|6=6H~OkvmoC)_)|lxzPnA)|7qZ`` z+JT~4#zLSssDK`q{{Z26VFTzDBF8Q{RhX*Moz0KUC@7uYPA4+br#}d|w>f0$t0`+H z-m3fOa+nyUiD~V_U6nh!jt`!L^X6UB+V}YT+x**y*9YUN-Z8#dLw>Jfj$*iTm(c-J;L&F` zNRZMLRFl2pA{ia@NHOs)M6-=-jkogQY6@cRQGJxE5r`yzN=P=(RVF)m*3Vq!%JlU# zwslfF=4P554m-MH+zrAmT{2|aaZNSJ5f*19_O{im5gAC8av(5sQvs$+b;TA%G)KTcA$;gA^%n7nbz&es2g_1tARlK_=G9t z_w1W?jT!ecY(jpDGT&31Wb8bRRh)X8D>K4LJ4h>f4zaz)^YP> zfK%*+)-Oa2zGrt)ex108V~F#g^3_w?)Y9mgCKXArP9~-)TP30#_McPPtM=b!?N&ol zy!OAa?N^2w`W%i+Mby|{I-A<))HxBOvx=Ibzk=}sr1iII9wKJYlc0!dO4~O47xz2| zYicRlecI=1-iRBw>4faRqrsaLHSATNrK4&G=;TT@?1lQOSCMI5;mUyhH4F0q*%5_o zPGWv4&Tw^>S8goDQ}cAMyC^EwNVCB{bn8uysa(1`t7p(le5T983SUOrX*pcLo_vmA z?b&|i2iWlZI6M;0w*c;eHCpJ$vej*c43vR27Xu<=&DH=`W9ehYEd$IrLH@XCca$@} zpxn+7=YTdHK?8PDtlon$Rswb{f6oD9(NjaK-E4Z^&@!xoN7ecINB5F1Xhgv!VI%sm zgk4{KKL8f*koi01Wki)M{p8m%m3WM3Hc8RU`DW7r*-nxj7>^|0b%ZA~Q9-nl5E@Y% zQ&VaG2Al9sO4+7}8^6D$=(Kdc$RIn2Cx6#`BtObKJx5)T5>By@kx5x(kfgHBULiU{ z9dn$|S#55D*Y=F#IlVX=r!>p*>7VG_drH1|Q{BG$R9n;q@@$hvW64_KHF(`HJTNa) zzdFZ63@kPi-izA|BPGQXgV7YR86;NLBt%naBiR2%dn1;mey&X%##$Z*axhu?TOhC1TfV>^fxP@2#NtCaRDYm0AkQ|JQ_oIcz2kHpll4gxQywbL>MOq zWP#bSXrMzWaxjRc2eI-# zz@3cv8=$3 zvoj8W__X?9D6m3szs%kwNz4xn9D;t6?PDC)1|1Qs+@|G}b|UN)~}Zyt#fw(6ZAJ{f*heAhMYzgT8&2`BT+VpqgIbHjt^m~<9OBqRYcEp(zmHz z6(xD;C8;q9-U#2UdLb|ZKm@f4K=7(I7zM$#XY#8puOPX1XE$vU4Ai9;o-`5(1BtEc zr-7@u&{gJVB@NjsUwBkPX|sSPn7?ewBis^Zhh&y< zeg0FRGm(2>{>Q(BYVV@F5VaMHB@WW_>Gn5WHH`qOhv7d$-6XUQ{K=M2txIJprMf|)GPJ!`qO*ZzDq4ibGLBz-#i#Sp z6=Jo#UtqJ^jOL>zYA_v^gXHZD2c*&&{q|EX3%0G{690f8|4aH)*O>UXLs8^^U{iah z*lyQ6>i#8PQ*}6WVF^`b8MbLDSFJhtzZJ5IG<1_xwiqOl~i@if3bHDTGc zcK!T6&YcGutw)|de?{#R``i@5)cyG{v5*wuUnnZp>rF3l&*HYqVhtqf7i01szmeKZ zz@HO;YXlUP1=xi@xUnlw|L=;IkhRhyw_@(%88m0ytZws{Cpk@93&pSG8l-x{_^K!G{q$7;{F`)|o+{Sn_Pxx`H23`d3~~GTm*0%NNeC z{@;wWMf1b1RR6WJ{MQclZ+p8hG=hO3Cc2`9kxmbA7=3;eba`Wa%?|Lt-MW3-EnN`* zXAB?6f5zbd--f34ncN7p+SBpE75GoAQ`4;#FMoqS*}>BR_}5|o$v|nd&cKhNmyuiT z+`yn~D@18C%?k!VD2?$q;syA6JO%@8XY>PY^A?1^kI8teFXVqB?Ewn=cQ&y8J6quY z*nd1lzgqke{+&mm|Kv_wqqt-P+fEGM5g$3(|5=FXpC;WlKmZZu#2MaUFwRbd_}SkwZ{=&R)O%>GQ7; z?e&m4E7rG?QqCP&u_z7`_6m6IjnE-pV0J?S{NR3<8z_cRyw40DmOp^CS?jP zmp0;L(ta?8JEf*WM}}I6!mI&9?ggNvLB&UA2;Od2d5kd8yF~mrJ~imO6hsMb_DXVa%HoEzdA7)M aFNHoXF-imQ`S3|#DSpqz#_j?E0Qg_QO0@9+ literal 0 HcmV?d00001 diff --git a/static/assets/fonts/RobotoFlex_LICENSE.txt b/static/assets/fonts/RobotoFlex_LICENSE.txt new file mode 100644 index 000000000..6ba3f606c --- /dev/null +++ b/static/assets/fonts/RobotoFlex_LICENSE.txt @@ -0,0 +1,93 @@ +Copyright 2017 The Roboto Flex Project Authors (https://github.com/TypeNetwork/Roboto-Flex) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +https://openfontlicense.org + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/static/assets/fonts/RobotoMono-Italic.woff2 b/static/assets/fonts/RobotoMono-Italic.woff2 new file mode 100644 index 0000000000000000000000000000000000000000..fd16a7138e16c38924053516a3b1b3d015ce1420 GIT binary patch literal 14400 zcmV-GIKRhtPew8T0RR91060JZ4*&oF0A@@805{|S0RR9100000000000000000000 z0000Q78@WOHU?k-fqW4N3W2T|frfJngJu8$HUcCAgK7jI1&TBWg;xv*8@NxiqJFSm z#-b>hoKE(COKyyzHG=jXn3SZ%a@!+xH)@2UG{+624`CA}TC^gyFkASLenB&cN#Uzy zM!)ff`#YVbn-;g5o2u>+TKb6B|Jv_&&+grQUkx#x&TG&Z5<|l`p_W7wCF7Nbhhoy8 z;q?vA&#m_dEQngMQKLqTjg1_m{wj!VurYF@TaaiGgHXUgKrsqu0M;pXol&4ydUty0 zMGGNbc-JJ&+fRl%!+{QzhTrh~_xr(q?fVdu1l^KKB|{T$B3j+5Wrk3^(arz#&#_Ra zX?r63Wk7|1xAHXck8^MR8z)03yx$ZlY5S4fSOOo*L99O~Ps#gxoz`A;Gyh3}LoaZ7 zR8fyb^Q0$7N)&dM(6WDHS2rWKfH85xb_neylC0{A^7?CVxB{Y=b0#*mK`fwi)1=1d zUr1m5nEr&eMWb--{%eoqk0KGEF(Pst7E%qW7tFLd@|qP@CBSn-Rc`UveXF6S zY<6s(Hq1Q^9l#8!s}%&mWKkvGQC?H}+&of3d>fy;W*XX4RSh8i`v1y$9syuW7R`f7 z;#p$YT_e4U+@p!Z5k_Mp_ zMWX*^V>&8`i80fH0@Q%M3i2uMx|bv&>qJ!0+_hRQ-|e zlEIJT@vp5pGn7D`MfyW^Dq>9>jCpiWOCuVt0Wnm-bEZD>2^ENy3z;%NKu}mYsHD;w zxdrvd%n}jEi7qtjc(iTK7Y~UO+5$L-h@k~onn&Um?K(N;=r3^CO+OH!q6LdK>Y?o7 zic;OBdByFqN~9&mTVw5n%!y8Bo_t$pO;;`(swLX71hPX_%mPN9<#DDykftwUr);I!FVYnJ0i6 zz9Yr6`vQ2kdTUMTN%^Y~rfo0aZ>L!|OwRm->w%ex(-00Cz>Rej?*VF&clq3kgY?zXKWC)@3YiwVo<_fRfBC#`R6Befvn^zUxiCDj(<5Yw<~H&&_WWMb5&tP?E* z;wV)XGv0CN>QI&6q{56Ztyb$RJt_^VhB>; z5u5Z)gai38X)vj)u!PXualmmIK*@p>DyjrH+w5o+`kBTpgyBQeU5>F*I7MJ0*(1$K z$zN@++N?{=5d1Jr<}{!V(ohq5^+y$~rnhv?d-X$*5$@VPe1b#Hv;mzNpbu&sMMJ=0 z61Yg*XuBcpbnd+YelMHDpM=yYK1PO#|GKX3gIk&gdth!@hOjd3;~oK7gYF^U7n(H#Mr3#;JEz*UZND}}y5(Ey@CQ5X8|+gXSc5J9X0 zWj_PpB>7Gb#6aSVkFgt%;npbJ^fK=l^J$=SMtcDqzT8&95Q%Z_13yPF7>Zm64@nIG zO}#ok!rX&!z96DjGjG|lc3Ok7i3w2w6L8(s>eG!o_YTdqsbsrLb2_}LEg^PP^7BPg z@drpUQo31iX-M_?w8dfK9lJH&<@u&I`h(;~R+qlo_;zjE<@pCg&C5lOL2=}yu|P^( z$g$cmI7}4N61a8UXXMS~<<<~5(;AC@+eXy@5U{S!XIMjJDQiioy{vR%h1S>RTuSs< zax;ke@G}o_(hHf<1sp8bUGOwo2QHGpL)=wqNWEgb8x&Bq2K6A|@uD2dc5%_Z%N;AN zS5X7gAfl8OzIeR7K#sz!U#WjX+WJ6VSN7XwDN~Y zg4)XPE3vMS4ooV|V;W-XmB~0vw@+Hic?P^vNqZ8Q02!rI6=zBvf<7(5xqgH#R_7^} zXUp3rXN=F`XYv@k(emQxUM|qL6GOUp1~?;~X-yVdwb@%gt3)V(vEoS zOvu~h^kK^`cORe%aCl)h>_=|YvP*O7AutYDnYn)=vIpERQWS5L}nt<_A zt%0jUIH>3r<*d+rqb<7+EE;WW`*=5yV5E9=o88dp+rnfGRFcYl7KvoMR!hA)f#vbJ zGdFvH4)_p!RasN>;$==VxVxML+WC`P(u!$7yB5%4oVMR$&_lCvT9Qmo`s&jX&#sPQ zQVUt(kTvb>G#RDxUN6_5Hbs>Ik2Ww1%wb?&fxJ`M0R=p)FykJFA>}*YkZ^d*>Rc)= zq|?Fs@{^{}Nl3Q>??n?PfmoNJ3Vt?4!wuyT?Al1SGe;-Am$PvsjSm{0paWHuX!{ow z%ZCbzj3VL$Wh7|*(0{fw4^%ddA_4&k^qq2>%f`FJH=s44j5C5_pb<2@(`yJZd+wtl{_;v>yf8tafVY-YQX|k zdOmjpxsTnk&9a>-TWCC3 z$Cx=;G{rgU^)&u%$$K=?v>qbediz(dq~WDk-8I)E!E!(7QxPc)fus$D6AwR(7-p+T zC_zRneXa`GAD1PT)=@baw-S`*@n=V2Q_AGuj81GF)+&g)LaeRpQK|Qm>Q(rmCjQeM zYqPO7SY|*lBJ>tVZI{d9mr5F2zmTleVZ#O0zLJFtayGP5h8lZeBwnkdm7YFz>hx zC;Y!nWxsJ@y*y?SkIg&nahZ*~h$M-p`vDM(Oz`g}GT~R}D%BLRDW{LCH$lTA(k=WT zAL@+ZUR}Suvt6Psh=+}ajk8@p{|iibMfYe+rYY}EK0dxVZeqHmKTlLgYRXVDChkMT z_xM)c?VF5~&;?*}L~Fb99CZf9uLRr>+PT*c&DRScca!a1-B?vb%B;Sp+@oLS%jO-8RebC`LJZ#$K z2F~2m;5k2qzg=|pI6mO{NntxxOxKPD!b$(MWFg?kA?#8?CaWaHQ|K@s$1$X_M3&2K z-!57um>8~r&Xf_Rf$IKymIQ=IK^ILie0-b*O%5Hg=BRdst#^nyi_A&i5e`vjiV$dx zwN+?Z3Wo^OcLwzL&+?C+PGHpBnK zRo_XV_A^&m^`G+#90Z?hv1&NBvdW|#%H4yioyF|i^7{|VRyFIb!~(=a;E z-)oUcoR^a5|69AMk}ZmdyDOoUCeo{BxyLZ$HCFrGyk8DcpYOrGylrv7OcT)c=0U4m z>a?Jy|NjM)0R3<;QXWRizI-{EVZ4_|(});(MMy=ML$2#iKfIW`M;b;!(<)>lntnd9)-UBfkh0OUSrGo?oaBnIPu~ z0Q@d4o=1My+HRent8eq6ccJrPdJP%~N-YjNzPAg?hK#~kPGs@*uJ_3)S zad-;m{Q1MjgV%Z@5b8ygBClNoMTMqo%wTaUt~K>!eo0d*2-vyGzJ9ZfKC-13fMglj zLSuRsqemO)XT3F4ZPmuuP)7M|{2F3>-yORjBpolS9p+*eY=_v4-N3G&d{~-5ZpsMQ z)(0oTiTP)}a;dq&w?U9fYQ=>%U}P)TO{+J?h0@D+#`RO@1l++wjOf2kQS24$#>ubC z5*(}6iJpFpT_;;>U;~fp5jwA{43RNXJX&x?+>!1uUp1V*bH;a1+?Zl7pkuA1duFA3 z`Y7}Lapz}a&-c5|13Q}2%thBD)*8X4U|}iTCWnDl*k9aehvcq20&;z3m;w2WHOPL2KIJAgeZSxyWX?ZxJ!OAp2PDrlH8Vk#*z zR8EUXcgEx#TuKb+8NubEiuNZp#=Ac(A?VP2p6-RI3J+G<)Ecp@JFdq5^;W@W!3=Ta z>{q+F73Keb;Psm+klLUDkDx(z5|jfOVLpc+k+chweH5U%tg_y^Qa=&IRr*X#;Zs!2 zobry2^psU3bNm`WHer!B_JhNjZeP9>(@V>P3Oc(=s4&!~^)fDyQ^Sq|)6}qGe(=U{ z9h3zf;_%r#cH$H|C&VU8mLlPKg{H?OO9FD_s1(MqDRzLPgHph*FgS=CoVXE_b_9&E z()0=09+T|}o&#-mDM=`pqrbCI;T8o#m)@CoUb({h{5`I&i=&+7C@+ylnD%3Zo%g|i zFE&scE141qhIC<@x7d~GP+Ue+d23~u1Zq#D9zN%s;v@_e~{Dgy*k2^)BV;8)Q>)xQ+yWFDF6cT)n( z@b@sJifSphtV$}QH|h1I;UWkUd$+baDT|AegIkMYCF$s68K;(f18OFev!n651eDjex4^<8Y&_+2>a zppPw0^3Mq5>!}qK1+`*5Rui8@(+34+dbd#&-fitT4RGFWYcoyU^*6>Gm)yeA(LGeHc8Y$5i(+er^PWKg=s3ey?zIh-F`f%G}%kIT#d0_K-xyGR&FIvw$YjG(lv#cobAlnw2J#cWA<^=7{$nua}V zm1wDirmf^wil}a;+fcXx@IkkGH@+^d);%U3R4}DF@6xhTROgI^kUr)vaTBL?Oiru| zQ9}|ak45As;Qz9!WSh&z#+;S$nLWyZVh<0wTkW6!Xz2Q-Jio4uG1+-}&XrXhq!^F1 zlKuN%qOV(L+xk;sdqmNPE0XC{d|);nhcB2;Qe)Nc-uWqR-juq%xm$O!^4udoCfDQ~ zbA?*vLPTmnVC4#W!$HdsU#-oJNA z$SsP{ojFtNss~)d@q-9gPUYUc6#-(zl9o9?=aeH9nw6Bf-RgF!sn3hfo*pjpOvKPq(}V5kd@vtsE&pT>H+m?4dsvCgzEGEcPFh7%EF>b3(*)8V;L1Q;LP3}lE( z|1Mr&%8|cKI?z0uQVS8i3+<4iujuTf#)B>ZB0fzyZjDQLu-kDw&E}CY$8oDsX96Mv zJIPxWn&pFz$5C1Xum#*y}5q{z+oVmk@H zEx@*ylZ04j`ga+aw|QjTUsA~{sob|-XkX1FuVxZpn{k>{SSFUs9VfJrs8Lm2jyi}6 zF`7U4?I62+@4hl77rLtrn42Tz5N82M4W96G92V%IT0F5-dnBl*jbxP#(c-E!1Xudb8fB1tca8}9JDGA zD9<{bFXWP|qzlHd6=pJ27JF-5?LZStTf5OyS#K z>!~mZt!yZcNV#>Z)T!c^96!%spFGAqWDrjM@N^fP33Sj=mLyQZ${ax&%yiMKb8^-x zq~tppzUd|YN}Z0Ka%iTIlCYHG|Lr4}9N*)J^r&r(edGLeC>c5wA_^78_iD!AWt$LGQaTbY}ksKGc}t9rLTENt$QSZbk6LGgJ>!0O<7F2C*O`ZrqmQvqgN>)K zZy$M-g9gxf^Ek=OIAinn5jGAez}v^$u$?C;xn{X0{1f|tPj=n?@M~UIUA|i!7CaYR zqd&BJ_8RCOoqKxGuDU1PcTWb^>%rE?;GeBwrp>sGZKUy;(1$B(Z{x$Re;!S#9pRvfeFC<_qKbi$Ha&KEdaisK5YAqqYwy+zujj3)}k4(7e~n> zO{5VJW_q>_bMrC={5x-Z36%Dze@APAx?SjN1C)>yY@+@Fdutoe%41+F3--jRu#+Ic z@#INztjaR!7Km)LZX3)E%P(LSn+Q$B%*reC4KzOZrG^r9$g76V@hNTN6YFa6Tv_l{ zq20;8+?7^gn*sItv>-CKJBEIaMg2R$5qCeDSSYo;`b#JsaGrwu?f!NN+XC)+i>=cj+!DpLZUl;^T>ncZH4x zh5u$B0i^)f1}i)c>~~cZtw@nc^-lT}S-4bIlHt#*)?Bh*ke zVj)o|5h+rxSe44erX^E0Jh!MlCNp zG^4ohELE1-&ZqsQ{?bh;

3|+IQ!izA!qDz8WQxq4OChLPQ~=T}d_`SEsar$B$$4 zdssn`G^X;@{_%IV-vb8B94zo0{kW0{gKV&!Gj~O?)iTpu zP9d#x2Q4nRBD0mxZUh$97nCqX>3dQgKjU4G&9H>o#T$24_y{01)E3X+<&{P0F6)b- z0>li=i{Nq+cU$FH2t0PV*O9NUZ`70ESg4IA7AF+HdgZxlkJb|-W)1Ay6o9QTipg^- zk28s%Hr(|5B*#s%B+%VSq0=QDE{s*@PXte_S@baWFfTkwpd#FzQuwIU7HNr9`)4cn zpq0dt@bD&KDwLon@19rifn68JaWc2_&m5VX%;lyh7R}&O?XhZjU@ZFQ2e7SZv^KA=#W*UOr{Rys^A|5Ip_e z=l93;rO=I^2K8Ny<{sPUZ0EpSbSWUz0jJ44tKhRq{rs{1O%HJ2L`(vUW1IsRb;gm~ ze>vV5xSKRJ80Uu`_CJ7r%hS~U#|m_D?V1`&prJcy2hi0%BI|;}1~qF@R2V(HmR8!l z(p3zKxnlsHMOR~Xz|kUMqQjyw)4*PoTsGdmSgBo|6KMn0J(oK}+{VH6VLmDk1T6d76bApvz z`@z8tZ&J+Dpu;{Y(k~Q=+ZO81p>v4xNzGYo)_jbD3hNt61H=%}*>`-{z1INE+6tfT zaV?gU&Lk$|7UQ4}Kaof|H~oUVYLCuSM$ls5=(P3e>6CUD#tsd3d`TxxY(OBSxB;fr zbM0EE*pwn2-~=Z`7Lot_D@mN1N$fvhaf~xtKWCAc8qt$QV1rDnzv)NsW}1T_H)`4X z$|{{r1uMzn&p*86CPx9J5p-HId1L|*L7VVzY`aoRu!dijEG*3Ba)V;PVch^YjV*w7 zrpCk(Htevt$eBY|V@6+w)TIzuOZ>2B@1X)^QTJ?d2U=yj-3_L|jS1k`YOfs50y#Ut zNnpyiFku}8V9$EnHQ@&ln^=?UNXIzSA5H|1Lz@_1)jIAD-DcK@IK36<_dZ&GiDCW0 z@3mp`(&md{?&y$?L2URO^dj*y`5fOaJU-f}&_t|#f z_pWqaac?0#Mq-Mo$ub*_%gBJIEmHA&v8yM0`XKA|T!hYRYRE3D9KRP8N1pKHdUBgI z0Iaq5jvh)OaQL4uTc9@^i7(IK?XXC}@3CJ!)!kP^d zGp9-(k}v$~KC(`O-{*rfk|%Yr`qlRG3AoCwVd+g8A4sykYp||%5i~ZzZa&U%bMp>f zkIuL^!vz+BijcMAf%QnZ!#3N6TUhYL#|dt3Il}sv7sv~YC(uQpGJO5|fJX37mif`g zv+C_9H8Z1NJG)32V*RtU{4;P^HNB(^#|&v%l%gah&B;*s)F33(W@SP#pKI;?c9!}g z%WBfV5l2X5$1JjB0+YN6D>%Gr^_Mutf4imc=;7Rd}?t+9$cq0+U4F{`2R%I2tGm+8YCh!yMI=g`Q%vl{7l(yu`Ctd_oYS zU_jpZjy!S)`Z}-~BLYDKDrL#KmeeLDFM8Nqg^}3|5=3ho$_Ljpuj~pCgE+8`$@SGW ztZmI~3lj*&EwpH{Aa@`S6|5% z&a#CK`1&TdX-ydd-w=Si1GHAN=pDq_`^L`SW;=faeC51?XsBP*%Nn!PuH%0R@m9{m zY;laPA5MJ#u8fedlzdamU8APAMb2^Oe={MH*yo-{ar?cVNmE56y%X_$~_L57RF3m zy0kYe8`43?e5G`WzhONpC)~c^;6%%ZNAaO=?t5zPvYrYEE-sIfljF78fYCi%cUcFr z<3#6Gn-Y)xrH`k3auRU(O=o>#@!lyb>t}bD4on0DoVdGm%8Ge%ap@ok zymzVylx5l3VJZ60H*O$#t|(m)Q6S?S_VxVc{<(IS5`ZPB*BZ5+1B*F~Ft%Tj4yBFw_)9??wBG02 z---%b_!l!^-d`98rHJHan*#fMtAdp-Id4B_`rK_RC7r$*nY?C;dDE51NbdG($tcz?#A3u;$jv&Fi=E_?G*T>5- z8A&s|j&TphW;KxTS~uB&Ny4OSj9iKwipvU&nxKCRcJ2o3&_An=MXn5TCpwY4@16UKNG`W?M>>*+?=C(E^iD0jWg6a6 zOB}7^j_-uW^~6#0w;J*YjXVOvv$hago6ZmTDPl~2r6p2)yp#NC8BWZ*#U6u0!G_Y! zprqY4FxPf5sXX#Gt@bZ&1R7Tj!cBg^ThQ<#D=BFn>qAq@bb1miA)DOTAEHX$`bF*^ zNbLXd$8UV1|1K?Uo{R_P)fzB~p*i|QNBx*ymhLq9)4%_kIgWAsG>S-rbArB{{PHFz z#l^%Q_zSPSz>Lr_!0{W-F3tetTJ z<6r}0v8Iw3FfwcN$cO_aU-2Z7lBl#9Oes2txhqvMA;>|90r>wB^G1+AA7(K>F55-H z;EzuKE?$UMG7uY|ym`ysNt0q*8QNDWa9@>o7Go+Gei+#Z-jAXActTbMq32mxej*^4 z)>u}X3Edk~6%|2ZNDj3JI9bLtp{15JNTFMUBCVV_kPfDk6TEWGS&MqDi+J^FfwOU7 zfNX9l^t7xS=5NaSt^`rmUOLl*BxTvgIc#~*_5g#~W~&E6yShrbd@ixqlGMg~Kh-w#sP zy1Fhpnr!G?5B!yru4U@d#?Sb@!H{h#CU@7adW;X3BVL-t&Fv zc-i%53;2l3iU|J-(77wwAK(j9Mj)`h{=DOAdU5vrm56qv9F<7x{rI{kNChQABLOqx zie608QLx%eblgho3Ih=(E!8GABZrwmCdbPYo^Zul4JmC+E$g2p=w|>v0J|tIppZvW zQ|!G+Bc8Eei^-#G)+OV$S-p3Je#koeNZk<-r5^n&FFhq)*YF#Dy>*d!vgJ;q#6g_6 za{ue8&@3ndl(oDdkGxz*9(kPj-DV&}lu#?H!ZN=5+obb2T1$2%+A{*7Ky*S`V?P_o z2+*d??QD!k#kW=>l?{sA&W2c%*tROuSRk}N zD`1<&J68);7Zf^`Ry^|@?LYacEmB*(DV9-w5a?Z)@UT4&Gfy3kFd)Sqo@S!qYX^{8 zS2O3m-ue}r)d81F#<}88(AiKn0py+tm4u2z(x=cxCt2z<2I09SidfB8Qx0!A2AfB$ICX9=D7)^5sHGalb1d89W&)?MqQ zEZ4aUb{`j$Xc3jPUqZIbZ=p!yKu>_P6{pQK-pw_sXM`iIts%v)>xb~E#QfDz81DI`C4=FO7&2yb|?`tt1%SSs#D*~t?sO&@w%{#Y; zmP*oE6GUlr93%#H;gp#g8czi3eCMu&Rv?8lsEk4?Udp7b8|{_JNgKF8v)T95X<3h1 zn!jW5AKzy(U@)n`VG*;3yt*lNakiMFH@M*Gvk8V>o4`U?;G*fQ64N}eY}d;V-tO>M z(v^|^$rKP8ME#fS>(>Oh*#oGI;FAp9>#FdQzq(Y-A~8Xn^1zVt_KgAqUFFxZJn9$keWSFh}W1exB`AhGG+d{>-_zntxqi7 zJ7rD(YH4X4W0FDNFKGvN>)w7kYQ^t=1RL^(7CZANvQr7l`p27wtb6=jV3ufjsWWf9 zM^9>RK%CVI6NK@}*QYz`YqV`^J-OQoCkz$2$kidmGLU&@p~wkh^iF3aQa{;Ka4{Zf>bw7~VW^ zJQfSG)~)kYzkZeE)wU2zCbaGexk8O5sz{U((iJ}o{&0sAPbygp+a9@o3d3{vi1Q*} z;h6I~hMag3={hcD`^oEQzPoz{YSB^n)nFSL_9Z$0Qeii-!XEyA|3i_@ z&*U+%e?#rYK>#Ql9Z+Qcs#Sa^ADZ0d#z{9R8A%eZSiyJkp($K%-1Ly^z47cXA3KP6 z=-|u-tf%*fFE;|-=Jdg1fHV6*@1&-Ne2mk>AKol|X-oz6kkL&o%O)g-eSDQw!8q^# zPi@xg>Xb&_m2NRT1@CRTTD#xoa$Z0sTq9wu$H zNs>~|2MTR-UIidmK^Q(p9NoVg+3&9IHVW`kWHWV}i zt&%u2OB%7!UGukbDfh=cbAYIGia6HCshy2m)afwRjq#Y8#bBqv6XHleYS_G=7y8xq zrMv^R!`wcuMmo3?v+x8UAGIa>k%SE^_)dNcGN+S!DH58mxr4ZIh0ujYmpkt}m zSis#;JT{uzgmfE*zpc3h|FuTkkf9yCxED`{teiAULuuO&>egdcs{uE?cdj# zmD`8BFKpd4KZhU(fAMlyPq+buemQWA-kA;OQax~E&eZ#drV4h`xZd?#?MhzWjz8Nn3Ir_ykzPO|rvDEmo|iH@tZ}0d8t|@)0O1eE zWo)Y8DV<^0TT{-Ma5dU9&Yex_8J8Z%a6>~wlMCdhd1|~Dn3?HX^bwwlKEfH%N0>Ey zoCylU-$d9g`U`7Bf1y+K7sv+7L*cLy=%DPq<#OGB-)RS0C+`Jr?yG^2#`o$Qw;h+TFV5Yo9lOx?9Mzi23Z5>^Q+X5UBw zGA`?YCp(?*NkjZ02Td~sX!1=r`!aW%H~@7!->wQxf12impWq>TnmU=-2KUqSR9?Lv z=x(<3m)O(FLUw0d!JbvFVE1;*o$RWjOE}It<)YXqWi`ZHL_KFz1w}B8b2m)q6fG^c zC*b+N*Aw+EJg_{zA^^OYpQ0-3u=`XRRc! z-e+mE3$exyk%jaweVykDqaZfhL=bUe;E;{nST10VD!b%fvrdvD(_3)>fJzYj#I=R*DQ!|-)NB%Dh7j*`l z{n)jF%qg`Abgloq)m!ml?(%fTR_q(=dZ5iOZFcX^c3iK(4e z;FgWG%tP2%V~W510M|S);A&K-#4U8NG#+@;>JzZ+`hz-;t#r{^GrAz_BT)&qoFkk7 ztfU1-h$hMN*mdwD<||*hiJt;5lRohl`#m9J!Aa_C#4=*H&5CI1O;yHr~aPO79#1qV1%AyNh6enUq0tU92|jFjqJUmMDM)3mb1 zoLmm2agxhKol?dTAVWbx)S-u82b1kW%dN*R80tZKDa zB70}VysF?<7_t>6@%vDY1qM+}KFEQK8W^6uwe%8JNp--FTgB7&T;MBLVTCG%@)f8- z5r~N;1Vy47pG#CkvWV2AFh#7RWmn+@RhAUxX|_?As}|J)=jKTDnff@B9jQ2_a|8&G zuTbfhuq=$Zla>hDv29`rNsuh2e1-P)OB0AV;p!sa1+mvLw%CWg?FuWml9S zGQNVbNUCjZA%n8<2A)GW0jRunD)+&<&e&F#P=Bhv=Tbd1@_n&Prgg`v5 zafQ}*Z^&S9Lm5*B$Wy`50warziD!D z^YHTV3kV9aV-E`lkATPl37I1&&Rn>naO2K{CofdqeE9N1tO0`-w>ePF$L8BF#G+XJoDXm(xY4?*4 zPI}Dfa7^gdOHX481q}->xZ!~pKKKzpkPaPhBicHMKLM+9uy~kp@zO?m3HcaGX zm00CEh@%2Y6Kt!icbfHR}2RmJ4-VW!Nvj1 zc<3vOqFi#)?7vRl5Ys!fiXBBYwhc)oDRBitm$*i`)Ch>umrT5$?EImScth8P?c-QD z6&|6nPjcb^`Tc&aea^j45Yx~I(W_aN&@GV&iH27V5h9I5A|!lf(w{p7bL&Qx)JBet zjg0|o)-YDZ$X<~tC@nUjh=2luNhnNv#`~ggRJ|Sl{;NdP+P^(_|FeulaDb!|iC_sx z#)v4^L<-Px`M1k*Rq~}Pq8%t56M?Rno#y9t^Z(o=l=hqid0%ZbA4XdvNX4;!al&1a z6;^7FSWOiB()~WL=bK#QVh#dGdX_r;CApQY(hrG3Vv%HID3XH=M~acv$gTb;lq(AQ>vyRb-&zZ4^p&A;a{{sp(gTS^ zVosHcWdB}}{-kedHmyh%4clLS{`&dm;i-qS505gWNL%#j`j{fp(%lQ6Byze`iO@Ug$fv|K`CQ#z0S)4MW<*QIrUIz-6h3ADa>8jE^x9ga2O(u?Hs1pF*9NoK z_v?cL+yQ?(J24gGUE9(>kyDwDer2VM>nlxswll7#^A@)AW}l>`xuZTU=JO{q=c^fJ z{&G%aW~pnRm!`M*r~D>P55S9~rCz#={^rTr{y6DVASbYVb~oQJTXM>Xm;jhf1iayZ z#Y9S6NqHT=e?+x41#&SjXT;B>r@6}Y{R)6AdpMIE2!4_=6=#_(YFm|C{BOJtHhtFH zNPW@%u;hyvf{#K*Oa0q61c_m44Ekh0C>n8=5xhTD+(K*>EUC=45H0rasDN12!v~|S z!M*ge66-?Sa>hoA?kNI2FKBythVY2oF5oUpJu$sS0;`R?Qk~r~GMn^~wJYRjn}@h(N`rx-ngN_n^pti7SGF|X_ZSd#W##Iy^*;vQ1F7W5l|HoZBB{#KQX zD9E4)Br5E8hIvivXk{*Fvp!q+V?eg{l4E=b<Y=fjk?835Bb$iND$5$&?g#ejw1~ z0?Kb8w`o;R{kIhYFUKxut9^;NLMB!`_y9S4?^dOyIQy6iauvIRQ5ZNJY@XV55a^No zhy~ru#b|I z0sd?A>iq)^@^uD=`Ygz1oZK)m4|wCmr>I-LY{yTRnhTb6e%or~pK`}1Gk0qUol$Iw_tF5qqgB;9lj4@8!N4E}U3$ zA_qi%N?@FVMpFYRlBo{LXoa~9-5pVzmQCeIqjXEtEhX6@Qmn@v2eh$OiQ}UMqB|47 z+hljr5H*UP6(I&1g|I7aUm24O3Lg%#=ian)*Q(JI<6Y{UYllbp9v{quoVi3d^O)Xi zv16-})*DeXwSv$hBF_`B0`o4DICk53_jTLZ@GM8kP;`>PtViXK)ha}0M@@$5>KR6+~muOcCP!+eXE3M_eS++ zg)wZ6sq*AIY_f5CoX^@SMcXu0wy&mUD}gKM{H@dU5Ff zwKa3lQ|nGsfD#!oz}Cl6itd0wu|>wmG2&H80~#HWh}Po0Q-{CW6A7C}?Ov z05}iI1L*e>n-+?;v53DkY>~Z9L89A7VAOg$F~X9TlEdj!QdS8w7}-G3_aY13_C=aV zb@j2rjeAPFQJj~xXA0feS+E8RYhw)??CFk=N1XW%gG7^mnC~>H+q5F<4mPqNt>lw7 z4%s2=9ErO$;war0Q5__d@JPVgYlorl0B`8)B}UR3gxhg&Rh?k5adIk5X7(aZaG|h~ zJDAHqw*ncIfzL?hmlN5811ywL_gFte@|~|kwX~?~(^^p!*KVP*;)TiSq_Gxf`71XX z%bw6(BuL~~JP=Qh!izwW;9BYo5Nd6L_6%HI52s_iS>u`&2QznENX_<3cHet;fPk02 z@1ST1+f^r+6$1ErImcRZ_EuT`Xr%h2KMghHw8Lny$$Fe-MmK+PATTwQ0Ow6iX1yFaQ#r7SB=?EAYPl{FQAiMiXW}Z!f z6ax!)!^LQOXIF-lWkSN^?egsaN(CJXlf6`jHA5@@1CspEa$vWdizrAig+fh+)}Yo$ zM&p0`|IW>@PzUW`u&Ni-FuQmZO|@^u3%5T6yw~%ZGipE(H;rSkijhcEt>fSVG-wV8 z6=0wwKrHyl(QMJGCIHDj9hOvPS_V<~ovOB@{<%As$rl-31iyW}#WxGmFNJvF#4-mn zw;0#IL75Bd8rMbFlzz$H<6A^SnD zq6iZt$|X^cVnhZ7VDJudqTxV^+v=2--?Or;$nwR^*_1S~DNrs2oN(Iy`H;Es&H6RQ z&(H*zY+A7tQ9(s-_~O{oVod%zVmN!=AJm7G-x69@I88>~=rj1@Dy#nOl5VWB62x}vR|a6T-voH>!`0k_c9M=mv{A-8DIeoYz$Zo5W? z5{Zmbcd$x0-OI&JHZKFy8vr<(qOsiuLO`Q!CS$2lHtF${TF&Is&*kbmJqaRCy5pj; zZ`QY0Ho7>7$8bj7akHnf5rF_sf$=IjW9bsfPgS`qYglY`Sd{I*-_O>8K-##ELY&M= zlCCc=Cyhu>)K_ct5|=%nAj+l`Gy2w(s4mIs)KX4f^nQ_>|2wf645_TnGXCiRctXpL zo*j?f5mUr=O#x!WWaBNNi|g^TpsUTka&{mN9!yTXQYl>746QHZ;8IExpa4)GyF2&%t@@B1~aesfJh=B(McAW~|nUZ#3;)NvK0*3UAz5pyst>WWiP z@i&$mro;GaWeFjzV<^(XKA{n|ch(WqLLEkDNN?{{JGfDm4x(CmM6Jshfkpo~^FAUV zkf;Bz{Ye@dBs(AC106Y{_pU@TYRK?8`huave}XJwkye>D%1T}J3kO8hQdf?`z~s8o znr+<7T_VS3;U!4G2T=iugA%3MwPsp7V#@UNh&fDc$bk%E%69_xMKU8ku&80dY4%w7 zjZA$oxKycD6z2o#S7c;%eC%4tB3L0vmvE-{mDgwN=7b*NBIe zBQ&xt^#%8h{r#m-h0fzv06D5LTLSm@KnNb$f45?e(O)GaJAqWXgN%VH&*<6@@9RBH zod3a$729H%l+TC-S$XlE?c~te&FJ#hl>w?R5ikZ#gxtrgF|leZ;HhXI2$0I!%H2Smp3O{0iHE=?0|%Cf8?x(@z6tyt558CclW17KXuj8*Xq-wR?EXPtv^Lw zyo)9oY}B%Mxa+6_W#@qBBw~Qa_^lEE3b0=eLkMC%-CKG1W$wSZ1Po8$TxLnd~+h7^sQ@;)K^%{a^^ ztxep6wLUSKtGCL!%W%hV?~LCwU0}DDF5#X^%^ijXyMq6aF5oQv=i3l>l=VmW7KU~E zpFMUz{ef`T5W;roAdVgF6k=_dI^Kcd4p-=NfoCy_^Olzl=V|~1`lmQqxqkW;S1K3C zfxbXYP4!bJlYuk@39&71@@AY)?Q-jFp0Bct>`s@Qd8|5{{Q%Wm?TV*MI2{8(W&*K< z800mlHkw=eZ@JVEJK5}QMDmQdrfz`f>n&N`2sCZ=6`(jbZ!4G`j2`UX=KFuiW3lGN zvU9QPx+iMGD&Wh^Qpa}5dzt06SgJN|*8y1D>zLRiZbTkJ5UTUZDW{Ysm?{KgdP5ge zZGE`!&m^0!Fc)XWlH^oI2bJ2V%Jhr$DrO)^2sJg-H}25IEHpnXo$&nyvRt$eRE8Os z9js%?ZSVHojj>(8z4Vy6C?S=-C|=yB%J5Zq6tSG0{UU+cu}L)>Wq`u;c!5BNhgqTw z?8KjI#R%~gd#@!OYQ*htPwmhFc3+>pLZ~q>o<3XP+t-`tUq@*MgaR!Cg%Jk^(*c4_ zVOX*-3UMnCfc7rAg%BbX@_zR9WeW~n?O4h^?Rf%))k)z!RaNWQI&P(x{MU)SaV7V5 zy^$o#Z?|j#$$|YO>dMS$qe`@UlV1`&BBZQgNfkvF*G_o-hT4{hb5=dUvoi#2ix|~Q zN?^qdotut%_z;mC>Nalnk*oIHE1a%EPVu8%?989xpwcuY9}(iu34r4KJi0e;X?#+h z(T))r2UY^ywH0L}%^-Uz&_~&I)asTjCapytBxu&hsKH6`CVnAHXk5)Qn)ueM)l}61rGx#1vr)@&5t;-d+po zih?VeYlSOHev2A2>DvqYd?$B!ZY*9+p9XlH|!z4*% zM6@*X0^n9s27V5J_jMe3j9jLQ4hvVt$rZAAFg73ajj#!8MF3(mLgF&s^)N-aVmh+C zY&x=sV}02eD_lN0&y2FHe47gRF!}$#e3|j4dVR^ot?9w)aYBI>0}a5HTFcCT6$KAp zbkksX;n^q`5uwyZPB;1{&H+}y(l`fuWnzBMF%Z&!XW^D2?c7XtNRuErytkucV|ZTq zwCvzn-ujIFVpItOQy8QO&%(Kyp|TiwXx#8fVhK-r*QEn^>L{xhNGcI6<#?@Fg4I(f z1|*M{izbt?nQT*zyzk1Dae9t+oOS)0WH^@!GaNdXrI5nL6O@w^Ua7$b-e7O<7)@)~ z7_n(He`WlBHYOeEoP!GEX8HY^p|44aiSy?tX75QZ)kPZPP0JxgqAH>wQ}V!JtK37UfIT5mCb-_#wXOm?{%rN|9|=y zbM>;pzpswdwK-cDPp-%ZbzD@@PscQA-S4{G-}I){nLM19A~2x2{9H7Jg4Oc4SOYE8 z5Xj@^28zTugMTgyo&Fp6QFKNM>cbpV8k>#E`0yba#bBfcfR6#`pn8MbCbwNbw9@FB zc3HxNs7~-s5X}^Aj4Ll6k70^fV%DC#6w)@Tm?|DhC}w5Bbh@jJW)|tBDY0_-eqqcV zk(Tp&PJaVmj-F5O)rAV!dZsP`->(UG;zX)4Qv3jK^w=@oYXM*P{umw7uo4J2IE*!p zF)ytfZ5##Lk1;9pRx&9KhZnxzVO;nb{2DX`hBq{r-+h^*$I8VY$v?fCGdXf{e?#y-1l&5bnP=M0Hm3jhi?9$~Q*R6f`;Mip3)H z9>phNVnp;vk|w855h|fdD5uPufZVN>gj|eqHgyfJ#D#qCO{R%tHNr#ZnLBJsE!EDg zrCG923hK&OcELY+N$%i8_8DN?zqzOZ#*8+n8TZXqWh>Fdn%Fw4* zQJFTt5WPZ36cVTKsbIVP2J65v13D%)INOm+%q}m_3+RDwzVI_9*Q=q3n8;<55{pZ8 zeqd9U}mFctZ_S=i%X8Z8gPktor_)qEAdT;DwUB;6w?baC)zeLA~+Ezf%#p8NKx3 z>G&oeg^$8}6vRw`Siey}Ib4z2c<7JSv{aA+#&D9LMJhhv0ZZddWn$$turyAOC5{2R zr^d0wvc%Ey(af^UF+lG#euZO8*@`@}ke?eQl?3PE58CJz>u;Xg%*{1Sir>DGPeAlv zf!&6^8y@+84*h@rqeM9?x#rmxDSPLIbAR0>&XFa zzCb9%=T=n}VLt%r%9+)U(nOi zTQERq^8)d?Y3`4UJ#ih3m*<|#ZB}-*w_cL+q_SM1L`?Z)GIHL_mkzazHioiZz6h3- zNex7?B$ps9?p}Ct%dQr5$HHTe+S5u*It7o1tC5vPt{2ip^qq7AWC~as_r(nmKMO%Q zx~-j%FBOvXZ7WLxE&@(#d3b2jvgP^S3~^<^uAR($8_^-N%*X)&OCdNZ$xq%!7L&!K zq!n6COs{|{WQ<8;Je^7d-~Qw#IdO=|jU{?GU6_wcNFd}k{Q=R%j)_nVT3@oJr}r*`R+FU{yCakcBsY!s zHeb38(yl6RU-6A!Vdl5h%!BAO@aZr`$fx=6 z7igl-z;wSTZRTSQM#{>DR)1(J=9qvLn6znt)nq@Y^p|q;;rov#)p{#g?rrBJ&pDx<7I9~2H45&ryX)YLu+!iIFRADR2D4tTf$^i zi<4M67$g#x1ZLs{-K#c?hUd)Nn;fp-D!E6Cf{VGcTt&EYZ}PmPVa}01tdO5mOT9QzD{Vge-{H&%8N>` zPe2R4p{4clnd*`V4BfvIEOQ&V-b9ZrPo@e6qjjozOyf3komG^zWqbwe2wTRMPRG<_ z8dYMcL>7O(J-9-3SR@fi{!m+PI4#Q!_42}0Qf;MnN=9W}tquof_fL&=k41`62E?x% zKVC$!dna^YwfBM~wGA$3EnQz7K9nCB+#L*-VW8DvCz%R{VqLs0%;33t`nfDq8dxMs zD8L9Y3MW2a#`4TA73a#T4MncOxm&_cnS`5)(Bn2Wfm@mm7Q+Pw+r~2G93S%9h$`6M zPrkc+aQ*k{lSmlZK7_1?RSo9Nx0i>>l^1X&HPwguXC~_L1$YjB$}~|z8!1d1 zO7y3zNx)$jXRQ3+^3kQEnl%@F2t_RBJc8H1KhHLKdv~4v^%A&>S5v{L8k>(mqlQ%H<7h2UvSx%kilZjClH!gtSs>HpXL9&{-<{L>?5siZtK?l zzzXcI1rB?_Soz_pyyJmo4sfP%Hgk-b*&(MLo&5dIzyI(zD#RFUWowD^{Qc{rKjQuG zho4cwMqCuS((QdJlGP%1GiD`UAAe3*A$AQ>fkK`Qm^Rsv;?;xIe& zWHq#&d73VNl-I@Phvj{g$AuB2>MlWw={a`kp1Tf|Is)b;gf|!U zt^}ndWxM6o(bO!HB%=B7=oF_kXN)j7$gh!$Scas;VTR6K<)lR%y;6Zh+`9HxidPsr z4e{^4Xd&7p3vXy_Tom{_r!m$*3G(dFvtl}JU{Y-WdIt$%&uAiTYR>RuU_e-=WMLo z?mgivr!o&^3+QH-h4Ec+d5fuwm5A;I*h;zhb}3xCJbBDaz>m{hZ7oRKlYR zutU@)QX9^60Ultj=tE_#c`1fh z(c;z+F#&2zc2yCoh^4#cPIB$%#qtw)GKhPn-mEj`Fi{vh9y_^WaZBfl$doL{CJxH#}@gWlXexgqSzQ0()TK}sVSr$ z*lKT>Zn!s)IeZqI%_kE_*sN*LwDG<>3R>%@PwnDp`}?NH*)Ba=l9VJPPiF^blV={P zG{J?dd~JFmw{H{7abj1`*^0kZ&sT&E@U1OG0fk>ar(i?LN!~sV*c9IWWM(H-*w4(# z*$}p6TsW3%V=+3g$@&q}I%dO0*=AM06Uaszu}5<~i% z=QT%nrEdPgZgNaa7wJHCecQsic|rV;!KB#0m04w#>MrFKJy=CgOzfkR9x5sA+ed5J z5=jl)I7V()DyepKdktr)t>ugv`8~71$|UWW+cD8mAF=VmRY4RX!Hke7P+#46A-sP6 z#QeGQ!7oRXhiRYjo9P>n31+hR3oblY;Ks&s5Ls`cJnXq^qn7#0XatP9IK3RHQdEZM z4Y*v6DPKJL?CWk;WywzJ@88me2?Ds``yyI0PS4F2bo^--7xH+^GVIPLnSo!2 zFcLB;NL@6q3?U7#^ZocCtT1XHWNzw4IYmz0kJpy$AoGd8q>EBGo|)D5?~b1J7cL6P zgNu1dz7co4xZ48dWNRPcKxnazI@;slMI9n0AwZn02=WpQqLkL^7*F8xHJwnzV9X;B z<}(<@+lcsL24g;+(8yqv0Q#c?rRYx|KE`0Mok2k^lAsivi!=z=iN(Y~K72xzGT`xt zKIHiGBCTno; zy)r9%y&{Z%ShO`sQxkhurjka^CRbD(NtQ)QRk3GlYGThwRWjv~UYoAQ!h=cNkv4tpsq{6?c3!i8b=ZI;A2Y@)rK+(i zsggNcp*oV3+p|eM40ckr4aOffp+ER3E!e0l@&JEJ)xnK*|Cg@x6F?IWR{+LrF= zp3PF}%2sbFjv1H~6;*%~N=k8BT~Ib-1n%tYqQk)7|N8aa6G~KFUfPdvDh9|WhUx@@ z*o}VhsHf+*pNQ4~bSvqn8+3$AO@@A#93T>jMCw|gYD$>94< z(st;jMIz6z*y})ThhInK0R@NR<+CxpXg|uTkgJCZz3GXpkRefg-ZAiX0P3o#Zn@4% z1uhmfHF8qJnS)J~nCM2z!2`qwd5wbDaPUmRM0*}MOKMnus3r(1Eld@{2?UzAAPpuB zstHzFD!w3iq6nT^0F{EKcgLM0@~e(e;h|%XO)KIqS)|7ebPnXy>@mH1Vc|eRqW4z_ zkZ$`y<*SKaoG`Q3Re3{g+TW$6%>fj2l$}o&JS;*NKqLnwQo8K^1HS;$yYtT5FEE#@ zPiN$u^-o(Vvx%nzoG2R~L1O59hd=3&tnREMrYZsFcmBPvK2<7N} zQWZgJYJf7~_pKX?^MBFxeiW=K2$$o$Ds|tuK>65#BmDbsoAhg~#E}V&SZuz+^iYGTegkkN_7fr3V+X`&OXKl|90thw z*Xi34$Pq9Z+YjDp<5FUilfHqzBGBh*V^iXi(?`4?65IT=|LH*g-RFHZkf~^c% z1f?aQlyQhDVM(T9YCxYk*d?!RP{ycY^1Mu4B#)9sEg@9{N!tRjsq>%eznNL1bGo!! z>hj<3&hrd!J5bI^7G{S31F7H4oipG5KT4VNI>)ZOneI+T`ux2G;R%v#!T&Tce>?`Y z9~T)tH!VF_;(BCczy^UkS`b4!?8lc%c35MA+yW#> z#O03|7eVkUo?(zoaGAS^pt!@AP)Ge@*MFXOgf*o?cP#}~asLGXX7YO)*UCLrvGb-nqrp{lr+`A_(;(WE- z(C z(HR9eQTUXr3f^kv`m^&Ct4Fh387B)%MEWkBtd^$j;LoGH_o&{Tea>)ISibZ5#NGdp zh79GoKX268gYib_&Z!@Ba)9WQ3$&honkJ{ohhy_Odf4g*%ECw!u-;>`FOc8gWPpX` zu8pTFXbSqteT$*d{@qw?2|tn=>-lxZ4!9NK>E$O;Fp|22N5}eiox6F0&Heq3DFZ2Y zI|tU^=&+Y$6(YLeumpRyUId|_KfiN0>G7AdUqT!ca(f0G7lO8CTnq+A>9<@xSz78DkB@BTlY z9$)X>i-y6n@4vMKp_{Jc!aZ!x-;ad`^5Ks@2;sp*C1!fYFOHDs;mG3SeGajMQ&N>& ze}W1;`%X$99(VGh-@neI1wWivh!JaYG7)a{xx9cD5 z*EhLyu+y%yzjM?y3cNPJI3d2Wi8V@J5{X(S2YqBAFolnNa6$!tDBy#GzH*RA#IyVi zPoJgZxd9iuUi58Uqdze!a19Jk!#}%L)L4z1pnvu>jle}8?)-Ppq4WQE&F}P=Dq$FN z9` zcUM#k>FmOo8nE{29_*dZaw|)iI9X+udDgVS)Zdg~o}RuKFdics+LpG>Yl9@wH~jf$ zBPA-l)RpJ5=3KQiulle=A{L91?$w#?oVRo7&W4?qEs1wTB8hZXR>P~C)8sNpE^!Wv zZVo*<${5SE1l()>OVhl{ySuu&%DvK-EL}{?I}zI!+j}zoB#4+@b|{vF#l~SmFC3rR zfGZcC{y0spSidsx=+Nf9d(6CPzcHTOurojstmpGkU=EzcH(=530lEC3G9TaLjWZ5e zTS?WErzb7{p106(3QtxJa~=R(hN0FZ)VjDK&@e%Tt-%}%fTk8;4lswkDDrX-1Ft^} ztaIhdVgS_2`=g<000G7irweE8wzvPm(JEjFnHHNz?sCe>P!V&S-DDF+_v zPpwnP4L4alxK~jet7bz~ARMkqYSQA>QR zMdMJtMNU*lW|kUg1{RYK9<0mVL@{vG6$ya#NpD>ms`Tu?(Ye=RSiTpv^$lO?c13|f z>=Yjj)W7Q8PHhSoTG7BEIeD%)%xjsVh8mgi7<+&);uVQ;$cS8Irqv!T!W1@C&U9bV zq&$wo@+N3<7I1VfGm?if%M!xe1333cIG>XXgq=Bpp#&p70^j|SnN==lUPwWALygc7 z#iA)8iI2hepggnY3!VYOl6V%lQ<0CrOt%isoM{+M#KlL-LxT?s@y2i$4y z)7(xYZm7_47j!OI+gk=>bE6Pp0L=|^{Imrw0`PvEM;YmD;K*7f_#<}q_uy}xEda8L z_v{xCY?rSgHyS|z9M~K3=U(=l@IN;81ps*b>*4_bUO({4|6BfiYb7hR019S)3W&!5 zD?DxRzm7pEy{bAZc6|KHtDn-swH{tk-pL?7qw1pNc0Tyu3Ku2HqH;i|a@+M)C_Q7+ zHai@~7i+5`YwdBEUZ-hVTOGGbw?chRaaCs0^YnRcZPpnwquVlz3|Ma3 z8CMvck!f0kZI(yDj4r!qk(Ck~jmUf@f(+=tUKR5A=Qm41kQQIAI^?$GK(xb9!X?rg z2VKcOIj6Qb^ATA9tg~Wk0#cDZzG=JsWxfK!ZZgZ}&NXhrkPxEVBJK=TfDA&F2D|V^ z+eTlaGolf5jw4xPschw}Xz|mxId61M2XjFR5k#U+ebyvHwlMNeS}o6V`3cb+n&p`Q zs0RV~wjInOmP-M3v8F2#7+RGEg4jzFj5UMc;EIL7%@QGUG$dqPO$a%R147BlL!p|T zXF?4(AB0-Om?zZ<4h7?HDiaS$2LS?B%@NEvRilDAP8BLxz*Z%KCGVL)n+jA1~4KLq*p)#bpth6j#mGlfTeQ@vl z*2*i`#cH&_8AI^a;#83;^|TV19dsYktE$mM-1u?>Ifyg94T7uH37Akuv4#RVt7tUo|8uG737Rgp4t0aj0ogt0o*VYFgT>j8UoE z3mNFNDg|ml%g<9QRYAg$g^gAoRdSFHPM!j3at$paouDO%Rt7!u6f?*rKD=5Lk}FBG zB)u*{hE&Eeh&gK%)2I|m$u%d8Y?U4$6Q-r4a}c0cc4ks=k{Ilx1+pw;QfD4LEpS*+ z1F2PB+w}}e_`5BFR3;aa%uRiXf_tF2wOu4%C8btSO~w zq#DoYnM$*I30P2|oQAHHK^@^UyMv|5bDyMXU9CTnhK3TC6)msPx0nv*exXPTA`(&J z&nAII%~Vfs+NB4O;&0z$atN}$nrzszW6yyjCv!M+;mVCWC>Rf(ym<5B3(gM$l0Ou* z02o+-aDoI2fk!|@LPkLqiiVDXiG__LOgOFxJbVH|B4QF!GI9z^Dv{KpM2itCPP_z( zk|ax!Dowf!nX+Wdp|RBl3v@VSwHFrZvdpjv8)L>&x3%l_$!DGVEq2s>AB~vw)feBa zx6LUhOw-cop?8MCNvBY9A7y|qN40!2y`D>bT2g>sdu z)Tmag&MR{@s5eie`SuyptVxSjZ@jbL4My8z&Q15+iiM&wAAcw1eeR;(d_c9W9Ga0000KfM%Qk literal 0 HcmV?d00001 diff --git a/static/assets/fonts/RobotoMono_LICENSE.txt b/static/assets/fonts/RobotoMono_LICENSE.txt new file mode 100644 index 000000000..d64569567 --- /dev/null +++ b/static/assets/fonts/RobotoMono_LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/tailwind.config.js b/tailwind.config.js index 74cdd805b..daaf363ce 100644 --- a/tailwind.config.js +++ b/tailwind.config.js @@ -264,7 +264,7 @@ module.exports = { fontFamily: { sans: [ - "Roboto", + "Roboto Flex", "system-ui", "-apple-system", "BlinkMacSystemFont",