diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8d79fa7..83d74d7 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,6 +1,6 @@
name: CI
-on: [push, pull_request, workflow_dispatch]
+on: [pull_request]
jobs:
test:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index c39c7c0..d5cc5b9 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,6 +1,6 @@
name: Build distribution
-on: [push, pull_request]
+on: [pull_request]
jobs:
test:
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..caf5390
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,2 @@
+test:
+ poetry run pytest
\ No newline at end of file
diff --git a/docs/README.md b/README.md
similarity index 100%
rename from docs/README.md
rename to README.md
diff --git a/docs/example.ipynb b/docs/example.ipynb
index f854590..b484346 100644
--- a/docs/example.ipynb
+++ b/docs/example.ipynb
@@ -1,155 +1,343 @@
{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "from factryengine import Task, Resource, Scheduler"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [],
- "source": [
- "r1 = Resource(id=1, available_windows=[(0, 10), (20, 30)])\n",
- "r2 = Resource(id=2, available_windows=[(0, 10), (20, 30)])\n",
- "\n",
- "t1 = Task(id=1, duration=5, resources=[r1], priority=1)\n",
- "t2 = Task(id=2, duration=5, resources=[r2], priority=1, predecessors=[t1])\n",
- "tasks = [t1,t2]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [
+ "cells": [
{
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Scheduled 2 of 2 tasks.\n"
- ]
- }
- ],
- "source": [
- "result = Scheduler(tasks=tasks).schedule()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "metadata": {},
- "outputs": [
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Example 1: allocate to fastest resource\n",
+ "\n",
+ "In this simple example we have 2 tasks which require the same resource. Task 2 depends on Task 1\n"
+ ]
+ },
{
- "data": {
- "text/html": [
- "
\n",
- "\n",
- "
\n",
- " \n",
- " \n",
- " | \n",
- " task_uid | \n",
- " assigned_resource_ids | \n",
- " task_start | \n",
- " task_end | \n",
- " resource_intervals | \n",
- "
\n",
- " \n",
- " \n",
- " \n",
- " 0 | \n",
- " 1 | \n",
- " [1] | \n",
- " 0.0 | \n",
- " 5.0 | \n",
- " ([(0.0, 5.0)]) | \n",
- "
\n",
- " \n",
- " 1 | \n",
- " 2 | \n",
- " [2] | \n",
- " 5.0 | \n",
- " 10.0 | \n",
- " ([(5.0, 10.0)]) | \n",
- "
\n",
- " \n",
- "
\n",
- "
"
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Scheduled 2 of 2 tasks.\n"
+ ]
+ }
],
- "text/plain": [
- " task_uid assigned_resource_ids task_start task_end resource_intervals\n",
- "0 1 [1] 0.0 5.0 ([(0.0, 5.0)])\n",
- "1 2 [2] 5.0 10.0 ([(5.0, 10.0)])"
+ "source": [
+ "from factryengine import Task, Resource, Scheduler, Assignment, ResourceGroup\n",
+ "\n",
+ "# create the resource\n",
+ "resource = Resource(id=1, available_windows=[(0, 10), (20, 30)])\n",
+ "\n",
+ "# create the resource group\n",
+ "resource_group = ResourceGroup(resources=[resource])\n",
+ "\n",
+ "# create the assignment\n",
+ "assignment = Assignment(resource_groups=[resource_group], resource_count=1)\n",
+ "\n",
+ "# create tasks\n",
+ "t1 = Task(id=1, duration=5, priority=1, constraints=[resource], predecessor_ids=[2])\n",
+ "t2 = Task(id=2, duration=5, priority=1, constraints=[resource])\n",
+ "\n",
+ "tasks = [t1, t2]\n",
+ "result = Scheduler(tasks=tasks, resources=[resource]).schedule()"
]
- },
- "execution_count": 10,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "result.to_dataframe()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
+ },
{
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAABKUAAAJOCAYAAABm7rQwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAwHklEQVR4nO3de5TVdaH//9fMIBcFjyWkiXFJhVARCIT0iLcyyZTUo5acNG/hNS2zRD15w0IhRMkUyUgtE83UVaZlVFr9UiwNxF8iAiaYiqChiMhlZr5/+HXW4YvXcXhvZ/N4rMVa7s+ePfMaWZ+1xqef/ZmaxsbGxgAAAABAQbWVHgAAAADAhkeUAgAAAKA4UQoAAACA4kQpAAAAAIoTpQAAAAAoTpQCAAAAoDhRCgAAAIDiRCkAAAAAihOlAAAAACiuTaUHAAC8W0cccUQeeOCBtY7V1NRk4403To8ePfKlL30pn/vc5yq07v3lqaeeyic/+cm1jtXU1KRDhw7p2bNnRowYkUMOOSRJMn369Bx55JG5/vrrM2TIkErMBQA2IKIUANAqbb/99jnvvPOaHtfX1+fZZ5/Ntddem29+85vZbLPNsscee1Rw4fvLiSeemD333DNJ0tjYmOXLl+dnP/tZzjnnnKxZsyZf+MIXKjsQANjgiFIAQKvUsWPH9O/ff53ju+++e3bZZZfceuutotT/0q1bt3X+fe26666ZPXt2rr32WlEKACjOPaUAgKrSrl27tG3bNjU1NU3HGhoaMnny5Oyzzz7Zcccds+++++bHP/7xWq9bsGBBTjjhhAwZMiT9+vXL5z//+dx7771rfcysWbNy7LHHZsiQIfn4xz+eE044IY8//njT87feemt69+6dp556aq3X7b333hk1alTT4969e+eKK67IwQcfnJ122ilXXHFFkmT+/Pk55ZRTMnjw4Oy88845/vjjM2/evKbXrVy5MmPHjs0ee+yRHXfcMQcccEDuvPPOZv+7qq2tTZ8+ffL000+/6cdMmzYtI0aMyIABA7Ljjjtm2LBhueGGG5qenz59enr37p377rsvxxxzTPr165f//M//zLhx41JfX9/sbQBA9ROlAIBWqbGxMWvWrGn6s3LlysyfPz9nnXVWli9fvtY9pc4///xMnDgxw4cPz6RJkzJs2LB85zvfyfe///0kr0Wr448/PitWrMjYsWNz5ZVXZrPNNsuJJ56YJ598Mkly//335/DDD0+SfOc738lFF12UZ555Jl/4whfWCkfv1KRJk3LAAQdk4sSJ2XfffbNo0aJ8/vOfzz//+c+cf/75GTduXJYsWZIvfelLWbp0aRobG3PyySdn6tSpOfroo3PVVVdlwIAB+drXvpbbb7+92f8en3jiiXTr1u0Nn7vnnnty8sknZ4cddsiVV16Z733ve/nIRz6SCy+8MDNnzlzrY88444wMHDgwkyZNyv77759rrrkmP/vZz5q9CwCoft6+BwC0Sn/961+zww47rHWspqYmvXr1yuWXX5699toryWvR5eabb87pp5+ekSNHJkl222231NTU5Oqrr86IESOyZs2azJ8/PyeddFLTW/5ev4Jp1apVSZLx48ene/fumTx5curq6po+zz777JOJEyfm8ssvf1f7Bw0alKOPPrrp8SWXXJJVq1blRz/6Ubp06ZIk+djHPpbDDz88M2fOTJs2bfKnP/0pEyZMyH777ZckGTp0aFasWJHvfve72X///dOmzZv/aNfQ0JA1a9Y0/fOiRYvy4x//OLNnz87555//hq+ZO3duDjrooJxzzjlNxwYMGJAhQ4Zk+vTp6devX9PxQw89NCeffHKSZJdddsm0adNyzz33eFsgAPCmRCkAoFXaYYcdcsEFFyRJnnvuuVx22WVZvXp1Lrvssnz0ox9t+rj7778/jY2N2XvvvZuiTPLaW+quuuqqPPjgg/nkJz+ZbbfdNt/61rfy5z//Obvttlt23333nHXWWUmSV155JbNmzcopp5zSFKSSZNNNN81ee+21ztv83ok+ffqs9fjBBx9M//79m4JUkmy55Zb5wx/+kCT57ne/m5qamuyxxx7rfB+/+MUv8vjjj6/zOf+3c845Z624lCSdOnXKiSeemM9//vNv+JrjjjsuSbJ8+fI88cQTWbBgQWbNmpUkTbHudQMGDFjr8ZZbbplXXnnlTfcAAIhSAECrtMkmm6Rv375Nj/v165fhw4fnmGOOya233poPfvCDSZKlS5cmST772c++4edZtGhRampqMmXKlFx11VX57W9/m9tvvz0bbbRRPvWpT+WCCy7Iq6++msbGxnTu3Hmd13fu3DnLli171/s33njjtR4vXbo0W2+99Zt+/Otv4fv4xz/+hs8/99xzbxmlTjnllKbfvldbW5tOnTpl6623Tm3tm9/N4YUXXsh5552XadOmpaamJt27d8+gQYOSvPb2yf+tffv2az2ura1d52MAAP43UQoAqAqdO3fOueeem9NOOy3f/va3M378+CSvXc2UJNddd1022WSTdV631VZbJUm22GKLnH/++TnvvPMye/bs/PrXv84PfvCDfOADH8g3vvGN1NTUZMmSJeu8fvHixdlss82SpOnm6g0NDWt9zPLly992f6dOnfLCCy+sc/y+++7L1ltvnU6dOmXjjTfO9ddf/4av7969+1t+/q5du64V8d6JM844I/Pnz8+1116bAQMGpG3btlmxYkVuvvnmd/V5AADeiBudAwBVY9iwYRk6dGjuuOOOPPDAA0nSdGXPv//97/Tt27fpzwsvvJDLL788S5cuzd///vfsuuuuefjhh1NTU5M+ffrka1/7Wnr16pWnn346G2+8cXbcccfcdddda/1GuWXLluWee+7JwIEDkyQdO3ZMkjz77LNNHzNv3rymq7XeyqBBgzJz5sy1wtTzzz+f4447Lvfee28GDx6cV155JY2NjWt9H3PmzMn3v//9td7S11IefPDBfPrTn86QIUPStm3bJMkf//jHJOuGNwCAd8uVUgBAVTn77LMzfPjwXHTRRbntttvSu3fvDB8+PN/61rfyr3/9KzvuuGOeeOKJTJgwIVtvvXV69OiRNWvWpH379vnmN7+Zr3zlK+ncuXP+8pe/5NFHH82RRx6ZJPn617+eY489NiNHjsyIESOyevXqTJ48OatWrWq6wfeQIUPSvn37XHzxxTnttNOyfPnyTJw4selKqrdy1FFH5fbbb89xxx2X448/PhtttFGuuuqqbLnlljnggAPSqVOn7LzzzjnppJNy0kknZZtttsnDDz+ciRMnZujQoU1vV2xJO+20U375y19mhx12yJZbbpmHHnookydPTk1NTVasWNHiXw8A2LCIUgBAVfnoRz+aI444IlOmTMmNN96YL37xixkzZkyuvvrqTJ06Nc8++2w233zz7LfffvnqV7+aurq61NXVZcqUKRk/fny+/e1v56WXXkqPHj1y4YUX5uCDD07y2m+U+9GPfpSJEyfm9NNPT9u2bTNo0KBccskl2W677ZK89lbB733vexk/fnxOPvnkdO3aNaecckpuv/32t9394Q9/OD/96U8zbty4jBo1Km3bts2QIUMyYcKE/Md//EeSZPLkybn88stz9dVX5/nnn88WW2yRo48+uimKtbSLL744o0ePzujRo5MkPXr0yAUXXJBf/OIX+dvf/rZeviYAsOGoaXQHSgAAAAAKc08pAAAAAIoTpQAAAAAoTpQCAAAAoDhRCgAAAIDiRCkAAAAAihOlAAAAACiuTaUHNFdDQ0PWrFmT2tra1NTUVHoOAAAAAEkaGxvT0NCQNm3apLb2za+HarVRas2aNZk1a1alZwAAAADwBvr27Zu2bdu+6fOtNkq9Xtq23377t/wGgXevvr4+s2bNSt++fVNXV1fpOVBVnF+wfji3YP1xfsH6U63n1+vf11tdJZW04ij1+lv26urqquovDt5PnF+w/ji/YP1wbsH64/yC9adaz6+3u92SG50DAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFtan0AACA97MFCxZkyZIllZ7B26ivr8+cOXPS0NCQurq6Ss+BquL8gvWnvr4+zz33XKVnVIwoBQDwJhYsWJA+H/tYXlmxotJTAIAq1b5du/zj0UfTs2fPSk8pTpQCAHgTS5YsySsrVuTSQwZlmw91qvQcAKDKzHtuWU6/5W9ZsmSJKAUAwLq2+VCn7LjVByo9AwCgqrjROQAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxbWp9AAAAIDW7tmXVmT0r2bmvvmL075NXT7bd+ucsc8OabdRXaWnAbxvVfRKqUWLFuXUU0/N4MGDM3To0IwZMyYrV66s5CQAAIB3pbGxMafcOD0rVtdn6nF75PLPD87vHnsml/7uH5WeBvC+VrEo1djYmFNPPTUrVqzIDTfckAkTJuQPf/hDLrvsskpNAgAAeNfmL3k5f1/4QsYePDC9ttg0O/fonK/uvX1+OXNhpacBvK9VLErNnz8/M2bMyJgxY7Lddttl0KBBOfXUU3PHHXdUahIAAMC71qVju/zoS/+Zzh3br3V82crVFVoE0DpULEp16dIl11xzTTp37rzW8ZdffrlCiwAAAN69TTu0ze7bbdH0uKGhMT+ePi+7fvRDFVwF8P5XsSi16aabZujQoU2PGxoa8pOf/CSf+MQnKjUJAADgPbv4N4/k/396aU7fZ/tKTwF4X3vf/Pa9cePG5R//+EduueWWd/W6+vr61NfXr6dVsGF6/ZxybkHLc361Lv6egHfrkt88kmvvm5uJnx+c3lv8R6XnAK1EQ0NDVf3c8U6/l/dFlBo3blyuu+66TJgwIb169XpXr/3HP/xGC1hfZs2aVekJULWcX63DnDlzKj0BaEXOv2NGfvrAExl/yKAM26FrpecArcjcuXPTps37ItEUVfHvePTo0bnxxhszbty47Lvvvu/69dtvv33atm27HpbBhqu+vj6zZs1K3759U1dXV+k5UFWcX61LQ0NDpScArcTE3z+aGx94IpcfNjif2VGQAt6dbbfdNv3796/0jBbz+s+8b6eiUeqKK67I1KlTc+mll2bYsGHN+hx1dXV+qIf1xPkF64/zq3XwdwS8E3OfeylX3DM7J+zeK4O6b57Fy15teq5Lp/Zv8UqA19TW1m6QP3dULErNmzcvV155ZUaOHJmBAwdm8eLFTc916dKlUrMAAADelWmPPpP6hsZ8/57H8v17HlvruXkXHVyhVQDvfxWLUr/73e9SX1+fq666KlddddVazz322GNv8ioAAID3lxP26J0T9uhd6RkArU7FotTIkSMzcuTISn15AAAAACqottIDAAAAANjwiFIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQXJvmvvDBBx/MddddlyeffDKTJk3KL3/5y3Tt2jWf/exnW3IfAEDFzXtuWaUnAABVaEP/GaNZUeruu+/OWWedlcMOOyz33HNP1qxZkzZt2mTUqFF58cUXM2LEiJbeCQBQXOfOnbNxhw45/Za/VXoKAFCl2rdrl86dO1d6RkU0K0pdccUVOf/883PAAQdk6tSpSZJjjjkmXbp0ycSJE0UpAKAqdOvWLY/Onp0lS5ZUegpvo76+PnPmzEmvXr1SV1dX6TlQVZxfsP7U19fnueeeS7du3So9pSKaFaWefPLJ9O/ff53jO+20UxYtWvReNwEAvG9069Ztg/1BsTWpr69PbW1t+vfv7z+aoYU5v2D9qa+vz4wZMyo9o2KadaPzbbfdNn/605/WOX7bbbdl2223fc+jAAAAAKhuzbpS6qyzzsoJJ5yQ+++/P6tXr86kSZPyz3/+M4888kgmTZrU0hsBAAAAqDLNulJq0KBB+fWvf51tttkme++9d5YuXZoBAwbkrrvuyi677NLSGwEAAACoMs26UipJnn/++ey111457bTTkiRTpkzJsmUb9q8yBAAAAOCdadaVUnfeeWcOPfTQPPTQQ03HZs2alcMOOyzTpk1rsXEAAAAAVKdmRamJEyfmggsuyFFHHdV0bMKECTnvvPMyYcKEltoGAAAAQJVqVpR69tlnM2DAgHWODxw4MAsXLnzPowAAAACobs2KUttvv31+8pOfrHP85ptvzsc+9rH3PAoAAACA6tasG52PGjUqxx57bO6999706dMnSfLYY49l6dKlmTx5cosOBAAAAKD6NCtK7bTTTvnNb36TX/3qV3niiSfSpk2bDBkyJMOHD0+nTp1aeiMAAAAAVaZZUerggw/OmDFjcsQRR7T0HgAAAAA2AM26p9Rzzz2Xurq6lt4CAAAAwAaiWVdKHXjggTnuuOMyfPjwdO3aNe3atVvneQAAAAB4M82KUnfeeWdqa2tzxx13rPNcTU2NKAUAAADAW2pWlPr973/f0jsAAAAA2IA0K0r99a9/fcvnd95552aNAQAAAGDD0Kwo9Wa/da9t27bp0qVLfve7372nUQAAAABUt2ZFqdmzZ6/1uL6+PgsWLMjo0aNzwAEHtMgwAAAAAKpXbUt8krq6uvTs2TOjRo3K5Zdf3hKfEgAAAIAq1iJR6nXPP/98XnrppZb8lAAAAABUoWa9fe+ss85a59jy5cvzl7/8JcOGDXvPowAAAACobs2KUm9ks802y5lnnpnPfe5zLfUpAQAAAKhSzYpSY8aMaekdAAAAAGxAmn2l1LRp03LNNddk/vz5qa+vT8+ePfPFL34xBx54YAvOAwAAAKAaNStKTZ06NZdcckm++MUvZuTIkWloaMhDDz2UCy64IKtXr86hhx7a0jsBAAAAqCLNilLXXHNNzjvvvLWuivrUpz6V7bbbLpMmTRKlAAAAAHhLtc150fPPP5/+/fuvc3zAgAF55pln3usmAAAAAKpcs6JUnz59cvvtt69z/Lbbbsu22277XjcBAAAAUOWa9fa9b3zjGznqqKMyffr09OvXL0kyY8aMzJ49O5MmTWrRgQAAAABUn2ZdKTVgwIDceuut6devX+bNm5ennnoqO++8c+6666584hOfaOmNAAAAAFSZZl0plSTbbLNNRo0alSR59dVXM2fOnHTs2LHFhgEAAABQvZp1pdTcuXNz2GGH5aGHHspLL72Ugw46KIcddlh233333H///S29EQAAAIAq06wodcEFF+QjH/lIevTokVtuuSXLli3Ln//855xwwgm55JJLWnojAAAAAFWmWVHq4Ycfzle/+tV88IMfzLRp07LPPvukc+fO2X///TN//vyW3ggAAABAlWlWlOrUqVOWLFmSZ555JjNmzMiee+6ZJHn00Uez+eabt+Q+AAAAAKpQs250fvDBB+fEE09M27Zts/XWW2e33XbLjTfemLFjx+a0005r6Y0AAAAAVJlmRanTTz89ffv2zb/+9a/sv//+qaury1ZbbZVLL700e+21V0tvBAAAAKDKNCtKJck+++yTl19+OQsWLMimm26agQMHpmPHji25DQAAAIAq1ax7Sq1cuTL/8z//k8GDB+eQQw7JokWLMmrUqBx77LF58cUXW3ojAAAAAFWmWVFq3LhxmTt3bm677ba0a9cuSfKVr3wl//73v3PRRRe16EAAAAAAqk+zotTdd9+dc845J71792461rt374wePTp//OMfW2wcAAAAANWpWVFq+fLl6dChwzrHGxoaUl9f/55HAQAAAFDdmhWl9t5770yYMCEvv/xy07GFCxfmoosuyh577NFi4wAAAACoTs2KUueee25qa2szePDgrFixIv/1X/+VT3/609l0003zrW99q6U3AgAAAFBl2rzbF7z88stp06ZNvve972XhwoWZN29e1qxZk549e6ZTp075zne+k7Fjx66PrQAAAABUiXccpZ599tmMGjUq06dPT5LsvvvuGTt2bPbcc8/U19fn2muvzZVXXpk2bd515wIAAABgA/OO37534YUX5l//+lfGjh2bCRMmZPHixRkzZkwWLVqUQw89NOPHj89nP/vZ/PrXv16fewEAAACoAu/4sqYHH3wwl112WXbZZZckyfbbb5+DDjoos2fPTmNjY2666ab07dt3vQ19MzNnzkxdXV3xrwvVrL6+PnPmzElDQ4PzC1qY86v16dy5c7p161bpGQAAVecdR6mXXnop22yzTdPjbt26ZfXq1enatWsuu+yybLTRRutl4NvZa6+9snz58op8bQCg+rXvsHEem/2oMAUA0MLecZRqbGxc5//o1tXV5Stf+UrFglSS9N3npGy0yZYV+/oAQPVa9sJTmXHXhCxZskSUAgBoYe/5ruSbbLJJS+xoto4f6Jr2H+hR0Q0AAAAAvDvvKkrddddd6dixY9PjhoaG3H333dl8883X+rgDDzywRcYBAAAAUJ3ecZTaaqutMmXKlLWObb755rnhhhvWOlZTUyNKAQAAAPCW3nGU+v3vf78+dwAAAACwAamt9AAAAAAANjyiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUsMGoX7M69153apYsnFXpKQAAABu8NpUeAFBC/ZpV+fudl2bZ8wsqPQUAAIC8T66UWrVqVfbff/9Mnz690lOAKrTs+YX5/278ZpYvfbbSUwAAAPi/Kh6lVq5cmdNPPz2PP/54pacAVer5px7J5h/pm90Ov6TSUwAAAPi/Kvr2vblz5+brX/96GhsbKzkDqHI9+n2m0hMAAAD4f1T0SqkHHnggQ4YMyU033VTJGQAAAAAUVtErpUaMGFHJLw8A8I7U19envr6+0jN4C6///fh7gpbn/IL1p1rPr3f6/fjtewAAb2POnDmpra34rTh5B2bNmlXpCVC1nF+w/myo55coBQDwNnr16pX+/ftXegZvob6+PrNmzUrfvn1TV1dX6TlQVZxfsP5U6/n1+vf1dkQpAIC3UVdXV1U/KFYzf1ew/ji/YP3ZUM8v16EDAAAAUJwrpYANyv6n317pCQAAAMSVUgAAAABUwPvmSqnHHnus0hMAAAAAKMSVUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcW0qPeC9evnf/8rKVfWVngEAVKFlLzxV6QkAAFWr1UepWb+9MsuXL6/0DACgSrXvsHE6d+5c6RkAAFWn1UepP/zhD6mrq6v0DKgq9fX1mTNnTnr16uX8ghbm/Gp9OnfunG7dulV6BgBA1Wn1Uapfv35p27ZtpWdAVamvr09tbW369+/vP5qhhTm/AADgNW50DgAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMWJUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFNem0gOaq7GxMUlSX1+f+vr6Cq+B6vL6OeXcgpbn/IL1w7kF64/zC9afaj2/Xv9+Xm83b6am8e0+4n1q1apVmTVrVqVnAAAAAPAG+vbtm7Zt277p8602SjU0NGTNmjWpra1NTU1NpecAAAAAkNeukGpoaEibNm1SW/vmd45qtVEKAAAAgNbLjc4BAAAAKE6UAgAAAKA4UQoAAACA4kQpAAAAAIoTpQAAAAAoTpQCAAAAoLhWGaVWrlyZs88+O4MGDcpuu+2WKVOmVHoSVIVFixbl1FNPzeDBgzN06NCMGTMmK1eurPQsqDojR47MqFGjKj0DqsaqVatywQUXZOedd86uu+6aSy+9NI2NjZWeBVXhmWeeyfHHH5+Pf/zj2XvvvXPttddWehK0eqtWrcr++++f6dOnNx1buHBhjjrqqPTv3z/77bdf/vznP1dwYTltKj2gOcaOHZtHHnkk1113XZ5++umceeaZ2WqrrTJs2LBKT4NWq7GxMaeeemo23XTT3HDDDXnxxRdz9tlnp7a2NmeeeWal50HV+NWvfpV77703Bx10UKWnQNW46KKLMn369Pzwhz/M8uXL87WvfS1bbbVVvvCFL1R6GrR6X/3qV7PVVlvl1ltvzdy5c3PGGWeka9eu2WeffSo9DVqllStX5utf/3oef/zxpmONjY05+eST06tXr/z85z/PtGnTcsopp+TOO+/MVlttVcG161+ru1LqlVdeyc9+9rOcc8452WGHHbLPPvvkuOOOyw033FDpadCqzZ8/PzNmzMiYMWOy3XbbZdCgQTn11FNzxx13VHoaVI2lS5dm7Nix6du3b6WnQNVYunRpfv7zn2f06NHZaaedsssuu+SYY47JzJkzKz0NWr0XX3wxM2bMyIknnpgePXrkU5/6VIYOHZr77ruv0tOgVZo7d24OO+ywLFiwYK3j999/fxYuXJgLL7ww22yzTY4//vj0798/P//5zyu0tJxWF6Vmz56dNWvWZMCAAU3HBg4cmJkzZ6ahoaGCy6B169KlS6655pp07tx5reMvv/xyhRZB9bnkkkvyuc99Lttuu22lp0DVePDBB9OxY8cMHjy46djIkSMzZsyYCq6C6tC+fft06NAht956a1avXp358+fnoYceSp8+fSo9DVqlBx54IEOGDMlNN9201vGZM2dm++23z8Ybb9x0bODAgZkxY0bhheW1uii1ePHifOADH0jbtm2bjnXu3DkrV67M0qVLKzcMWrlNN900Q4cObXrc0NCQn/zkJ/nEJz5RwVVQPe6777787W9/y0knnVTpKVBVFi5cmK5du+b222/PsGHD8slPfjLf//73/c9KaAHt2rXLueeem5tuuin9+vXLZz7zmey+++459NBDKz0NWqURI0bk7LPPTocOHdY6vnjx4nzoQx9a69jmm2+eZ599tuS8imh195RasWLFWkEqSdPjVatWVWISVKVx48blH//4R2655ZZKT4FWb+XKlTnvvPNy7rnnpn379pWeA1XllVdeyZNPPpmpU6dmzJgxWbx4cc4999x06NAhxxxzTKXnQas3b9687LXXXjn66KPz+OOPZ/To0dlll10yfPjwSk+DqvFmnWNDaBytLkq1a9dunb+Y1x/7QR9axrhx43LddddlwoQJ6dWrV6XnQKt3xRVXZMcdd1zrakSgZbRp0yYvv/xyxo8fn65duyZJnn766dx4442iFLxH9913X2655Zbce++9ad++ffr27ZtFixblqquuEqWgBbVr126dd36tWrVqg2gcrS5KbbHFFvn3v/+dNWvWpE2b1+YvXrw47du3z6abblrhddD6jR49OjfeeGPGjRuXfffdt9JzoCr86le/ypIlS5ruh/j6/0z5zW9+k7///e+VnAatXpcuXdKuXbumIJUkPXv2zDPPPFPBVVAdHnnkkXTv3n2t/zDefvvtM2nSpAquguqzxRZbZO7cuWsdW7JkyTpv6atGrS5K9enTJ23atMmMGTMyaNCgJK/d4LJv376prW11t8iC95UrrrgiU6dOzaWXXpphw4ZVeg5UjR//+MdZs2ZN0+Pvfve7SZIzzjijUpOgavTr1y8rV67ME088kZ49eyZ57TfK/u9IBTTPhz70oTz55JNZtWpV01uL5s+fn6233rrCy6C69OvXL5MnT86rr77aFIEffPDBDBw4sMLL1r9WV3E6dOiQAw88MOeff34efvjhTJs2LVOmTMmRRx5Z6WnQqs2bNy9XXnllvvzlL2fgwIFZvHhx0x/gvenatWu6d+/e9GeTTTbJJptsku7du1d6GrR6H/3oR7PnnnvmrLPOyuzZs/OnP/0pkydPzuGHH17padDq7b333tloo43yP//zP3niiSfy+9//PpMmTcoRRxxR6WlQVQYPHpwPf/jDOeuss/L4449n8uTJefjhh3PIIYdUetp6V9PY2NhY6RHv1ooVK3L++efn7rvvTseOHXPsscfmqKOOqvQsaNUmT56c8ePHv+Fzjz32WOE1UN1GjRqVJLn44osrvASqw7JlyzJ69Oj89re/TYcOHTJixIicfPLJqampqfQ0aPXmzp2bb3/723n44YfzwQ9+MP/93/+dL33pS84veI969+6d66+/PkOGDEmSPPnkkznnnHMyc+bMdO/ePWeffXZ23XXXCq9c/1pllAIAAACgdWt1b98DAAAAoPUTpQAAAAAoTpQCAAAAoDhRCgAAAIDiRCkAAAAAihOlAAAAAChOlAIAAACgOFEKAAAAgOLaVHoAAEA1GTVqVG677ba3/Jjf/e532XrrrQstAgB4f6ppbGxsrPQIAIBqsWzZsrz66qtJkjvvvDNTpkzJLbfckiRpaGhIfX19tthii9TV1VVyJgBAxblSCgCgBXXq1CmdOnVq+ue6urp06dKlwqsAAN5/3FMKAKCQp556Kr17985TTz2VJOndu3fuuuuufOYzn0m/fv1y+umnZ+HChTnyyCPTr1+/jBgxIosWLWp6/W9/+9vst99+6devXw455JA88MADlfpWAADeM1EKAKCCJk6cmIsvvjhXX3117r777hx++OE5/PDDM3Xq1CxevDg/+MEPkiSzZ8/OmWeemRNPPDG/+MUvMnz48Hz5y1/Ok08+WeHvAACgebx9DwCggo466qj069cvSdKnT5/07Nkzn/nMZ5Ikn/70pzN79uwkyQ9/+MMcdthhOeCAA5IkRx55ZP7617/mxhtvzKhRoyozHgDgPRClAAAq6CMf+UjTP7dv3z5du3Zd6/GqVauSJPPmzctdd92Vm266qen51atXZ7fddis3FgCgBYlSAAAV9P/+Fr7a2je+u0J9fX2+/OUv58ADD1zrePv27dfXNACA9co9pQAAWoGePXvmqaeeSvfu3Zv+3HTTTfnjH/9Y6WkAAM0iSgEAtAJHHXVU7rzzzlx//fVZsGBBrr322lx77bXp0aNHpacBADSLKAUA0Ar0798/Y8eOzU9/+tPst99+ufnmmzN+/PjsvPPOlZ4GANAsNY2NjY2VHgEAAADAhsWVUgAAAAAUJ0oBAAAAUJwoBQAAAEBxohQAAAAAxYlSAAAAABQnSgEAAABQnCgFAAAAQHGiFAAAAADFiVIAAAAAFCdKAQAAAFCcKAUAAABAcaIUAAAAAMX9H4NMjKvTjUZ3AAAAAElFTkSuQmCC",
- "text/plain": [
- ""
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " task_id | \n",
+ " assigned_resource_ids | \n",
+ " task_start | \n",
+ " task_end | \n",
+ " resource_intervals | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 1 | \n",
+ " [1] | \n",
+ " 5 | \n",
+ " 10 | \n",
+ " ((5, 10)) | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " 2 | \n",
+ " [1] | \n",
+ " 0 | \n",
+ " 5 | \n",
+ " ((0, 5)) | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " task_id assigned_resource_ids task_start task_end resource_intervals\n",
+ "0 1 [1] 5 10 ((5, 10))\n",
+ "1 2 [1] 0 5 ((0, 5))"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# we can get the result as a dataframe\n",
+ "result.to_dataframe()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Task 2 is scheduled before task 1 because task 2 is a predecessor of task 1.\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Example 2: allocate to fastest team\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Scheduled 1 of 1 tasks.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " task_id | \n",
+ " assigned_resource_ids | \n",
+ " task_start | \n",
+ " task_end | \n",
+ " resource_intervals | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 1 | \n",
+ " [3, 4] | \n",
+ " 0 | \n",
+ " 3 | \n",
+ " ((0, 3), (0, 3)) | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " task_id assigned_resource_ids task_start task_end resource_intervals\n",
+ "0 1 [3, 4] 0 3 ((0, 3), (0, 3))"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "r1 = Resource(id=1, available_windows=[(2, 10), (20, 30)])\n",
+ "r2 = Resource(id=2, available_windows=[(2, 10), (20, 30)])\n",
+ "r3 = Resource(id=3, available_windows=[(0, 10), (20, 30)])\n",
+ "r4 = Resource(id=4, available_windows=[(0, 10), (20, 30)])\n",
+ "\n",
+ "team1 = ResourceGroup(resources=[r1, r2])\n",
+ "team2 = ResourceGroup(resources=[r3, r4])\n",
+ "\n",
+ "assignment1 = Assignment(resource_groups=[team1, team2], use_all_resources=True)\n",
+ "\n",
+ "t1 = Task(id=1, duration=6, assignments=[assignment1], priority=1)\n",
+ "\n",
+ "tasks = [t1]\n",
+ "resources = [r1, r2, r3, r4]\n",
+ "\n",
+ "result = Scheduler(tasks=tasks, resources=resources).schedule()\n",
+ "result.to_dataframe()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "duration of 6 is cut in half due to there being 2 workers.\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Example 3: Combining Constraint and assignments\n",
+ "\n",
+ "Scenario could be you have a task that require a machine which should be there for the entire duration of the task, and you have a group of resources which can operate the machine but you only need one.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Scheduled 1 of 1 tasks.\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " task_id | \n",
+ " assigned_resource_ids | \n",
+ " task_start | \n",
+ " task_end | \n",
+ " resource_intervals | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 1 | \n",
+ " [4, 1] | \n",
+ " 20 | \n",
+ " 30 | \n",
+ " ((20, 30), (20, 30)) | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " task_id assigned_resource_ids task_start task_end resource_intervals\n",
+ "0 1 [4, 1] 20 30 ((20, 30), (20, 30))"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "machine = Resource(id=1, available_windows=[(20, 30)])\n",
+ "operator1 = Resource(id=2, available_windows=[(0, 10), (20, 28)])\n",
+ "operator2 = Resource(id=3, available_windows=[(15, 25)])\n",
+ "operator3 = Resource(id=4, available_windows=[(0, 10), (20, 30)])\n",
+ "\n",
+ "operator_group = ResourceGroup(resources=[operator1, operator2, operator3])\n",
+ "\n",
+ "assignment = Assignment(resource_groups=[operator_group], resource_count=1)\n",
+ "\n",
+ "# add machine as a constraint\n",
+ "t1 = Task(\n",
+ " id=1, duration=10, assignments=[assignment], priority=1, constraints=[machine]\n",
+ ")\n",
+ "\n",
+ "tasks = [t1]\n",
+ "resources = [operator1, operator2, operator3, machine]\n",
+ "\n",
+ "result = Scheduler(tasks=tasks, resources=resources).schedule()\n",
+ "result.to_dataframe()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "we can see that the task is scheduled to start at 20 and end at 30, which is the only time when the machine is available. Operator with id 4 is selected as he is the only one avaialble.\n"
]
- },
- "metadata": {},
- "output_type": "display_data"
}
- ],
- "source": [
- "result.plot_resource_plan()"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "base",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.10.10"
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "base",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.6"
+ },
+ "orig_nbformat": 4
},
- "orig_nbformat": 4
- },
- "nbformat": 4,
- "nbformat_minor": 2
+ "nbformat": 4,
+ "nbformat_minor": 2
}
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..7906747
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,53 @@
+# ⚙️ factryengine
+
+`factryengine` is a high-speed Python package for effortless and efficient task scheduling, specifically tailored for production scheduling. Built with `numpy`, it ensures tasks are executed in the correct order while considering their priorities, resources, and dependencies.
+
+## 🛠 Installation
+
+Install `factryengine` with a simple pip command:
+
+```bash
+pip install factryengine
+```
+
+## 🌟 Features
+
+- **Fast Performance**: Built with `numpy` for high-speed task scheduling.
+- **Production Scheduling**: Specifically designed for seamless production scheduling.
+- **Simple Task Creation**: Easily define tasks with attributes like duration, priority, and resources.
+- **Resource Management**: Assign resources with availability windows to tasks.
+- **Task Dependencies**: Ensure tasks that depend on others are scheduled in the correct order.
+- **Efficient Scheduling**: Automatically schedule tasks while considering their priorities and dependencies.
+
+## 🚀 Quick Start
+
+Get started with `factryengine` with this basic example:
+
+```python
+from factryengine import Task, Resource, Scheduler
+
+# Creating a Resource object
+resource = Resource(id=1, available_windows=[(0,10)])
+
+# Creating Task objects
+task1 = Task(id=1, duration=3, priority=2, resources=[[resource]])
+task2 = Task(id=2, duration=5, priority=1, resources=[[resource]], predecessors=[task1])
+
+# Creating a Scheduler object and scheduling the tasks
+scheduler = Scheduler(tasks=[task1, task2])
+scheduler_result = scheduler.schedule()
+```
+
+In this example, `task1` is scheduled before `task2` as `task2` depends on `task1`, despite its lower priority.
+
+## 📖 Documentation
+
+For more detailed information, check out the [documentation](https://yacobolo.github.io/factryengine/).
+
+## 🤝 Contributing
+
+Contributions, issues, and feature requests are welcome!
+
+## 📝 License
+
+This project is [MIT](link_to_license) licensed.
diff --git a/mkdocs.yml b/mkdocs.yml
index 0235964..56609dd 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -32,7 +32,7 @@ repo_url: https://github.com/Yacobolo/factryengine
repo_name: factryengine
nav:
- - Home: README.md
+ - Home: index.md
- Usage: usage.md
- Example: example.ipynb
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..3f2f9ed
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1479 @@
+# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+
+[[package]]
+name = "annotated-types"
+version = "0.6.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
+ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
+]
+
+[[package]]
+name = "appnope"
+version = "0.1.3"
+description = "Disable App Nap on macOS >= 10.9"
+optional = false
+python-versions = "*"
+files = [
+ {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"},
+ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"},
+]
+
+[[package]]
+name = "asttokens"
+version = "2.4.1"
+description = "Annotate AST trees with source code positions"
+optional = false
+python-versions = "*"
+files = [
+ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
+ {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
+]
+
+[package.dependencies]
+six = ">=1.12.0"
+
+[package.extras]
+astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
+test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
+
+[[package]]
+name = "cffi"
+version = "1.16.0"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
+ {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
+ {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
+ {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
+ {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
+ {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
+ {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
+ {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
+ {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
+ {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
+ {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
+ {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
+ {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
+ {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
+ {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "comm"
+version = "0.2.1"
+description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"},
+ {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"},
+]
+
+[package.dependencies]
+traitlets = ">=4"
+
+[package.extras]
+test = ["pytest"]
+
+[[package]]
+name = "contourpy"
+version = "1.2.0"
+description = "Python library for calculating contours of 2D quadrilateral grids"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "contourpy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0274c1cb63625972c0c007ab14dd9ba9e199c36ae1a231ce45d725cbcbfd10a8"},
+ {file = "contourpy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab459a1cbbf18e8698399c595a01f6dcc5c138220ca3ea9e7e6126232d102bb4"},
+ {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdd887f17c2f4572ce548461e4f96396681212d858cae7bd52ba3310bc6f00f"},
+ {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d16edfc3fc09968e09ddffada434b3bf989bf4911535e04eada58469873e28e"},
+ {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c203f617abc0dde5792beb586f827021069fb6d403d7f4d5c2b543d87edceb9"},
+ {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b69303ceb2e4d4f146bf82fda78891ef7bcd80c41bf16bfca3d0d7eb545448aa"},
+ {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:884c3f9d42d7218304bc74a8a7693d172685c84bd7ab2bab1ee567b769696df9"},
+ {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1b1208102be6e851f20066bf0e7a96b7d48a07c9b0cfe6d0d4545c2f6cadab"},
+ {file = "contourpy-1.2.0-cp310-cp310-win32.whl", hash = "sha256:34b9071c040d6fe45d9826cbbe3727d20d83f1b6110d219b83eb0e2a01d79488"},
+ {file = "contourpy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:bd2f1ae63998da104f16a8b788f685e55d65760cd1929518fd94cd682bf03e41"},
+ {file = "contourpy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd10c26b4eadae44783c45ad6655220426f971c61d9b239e6f7b16d5cdaaa727"},
+ {file = "contourpy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c6b28956b7b232ae801406e529ad7b350d3f09a4fde958dfdf3c0520cdde0dd"},
+ {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebeac59e9e1eb4b84940d076d9f9a6cec0064e241818bcb6e32124cc5c3e377a"},
+ {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:139d8d2e1c1dd52d78682f505e980f592ba53c9f73bd6be102233e358b401063"},
+ {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e9dc350fb4c58adc64df3e0703ab076f60aac06e67d48b3848c23647ae4310e"},
+ {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18fc2b4ed8e4a8fe849d18dce4bd3c7ea637758c6343a1f2bae1e9bd4c9f4686"},
+ {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:16a7380e943a6d52472096cb7ad5264ecee36ed60888e2a3d3814991a0107286"},
+ {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d8faf05be5ec8e02a4d86f616fc2a0322ff4a4ce26c0f09d9f7fb5330a35c95"},
+ {file = "contourpy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:67b7f17679fa62ec82b7e3e611c43a016b887bd64fb933b3ae8638583006c6d6"},
+ {file = "contourpy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ad97258985328b4f207a5e777c1b44a83bfe7cf1f87b99f9c11d4ee477c4de"},
+ {file = "contourpy-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:575bcaf957a25d1194903a10bc9f316c136c19f24e0985a2b9b5608bdf5dbfe0"},
+ {file = "contourpy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9e6c93b5b2dbcedad20a2f18ec22cae47da0d705d454308063421a3b290d9ea4"},
+ {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:464b423bc2a009088f19bdf1f232299e8b6917963e2b7e1d277da5041f33a779"},
+ {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ce4788b7d93e47f84edd3f1f95acdcd142ae60bc0e5493bfd120683d2d4316"},
+ {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7d1f8871998cdff5d2ff6a087e5e1780139abe2838e85b0b46b7ae6cc25399"},
+ {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e739530c662a8d6d42c37c2ed52a6f0932c2d4a3e8c1f90692ad0ce1274abe0"},
+ {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:247b9d16535acaa766d03037d8e8fb20866d054d3c7fbf6fd1f993f11fc60ca0"},
+ {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:461e3ae84cd90b30f8d533f07d87c00379644205b1d33a5ea03381edc4b69431"},
+ {file = "contourpy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:1c2559d6cffc94890b0529ea7eeecc20d6fadc1539273aa27faf503eb4656d8f"},
+ {file = "contourpy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:491b1917afdd8638a05b611a56d46587d5a632cabead889a5440f7c638bc6ed9"},
+ {file = "contourpy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5fd1810973a375ca0e097dee059c407913ba35723b111df75671a1976efa04bc"},
+ {file = "contourpy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:999c71939aad2780f003979b25ac5b8f2df651dac7b38fb8ce6c46ba5abe6ae9"},
+ {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7caf9b241464c404613512d5594a6e2ff0cc9cb5615c9475cc1d9b514218ae8"},
+ {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:266270c6f6608340f6c9836a0fb9b367be61dde0c9a9a18d5ece97774105ff3e"},
+ {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbd50d0a0539ae2e96e537553aff6d02c10ed165ef40c65b0e27e744a0f10af8"},
+ {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11f8d2554e52f459918f7b8e6aa20ec2a3bce35ce95c1f0ef4ba36fbda306df5"},
+ {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ce96dd400486e80ac7d195b2d800b03e3e6a787e2a522bfb83755938465a819e"},
+ {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d3364b999c62f539cd403f8123ae426da946e142312a514162adb2addd8d808"},
+ {file = "contourpy-1.2.0-cp39-cp39-win32.whl", hash = "sha256:1c88dfb9e0c77612febebb6ac69d44a8d81e3dc60f993215425b62c1161353f4"},
+ {file = "contourpy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:78e6ad33cf2e2e80c5dfaaa0beec3d61face0fb650557100ee36db808bfa6843"},
+ {file = "contourpy-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be16975d94c320432657ad2402f6760990cb640c161ae6da1363051805fa8108"},
+ {file = "contourpy-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b95a225d4948b26a28c08307a60ac00fb8671b14f2047fc5476613252a129776"},
+ {file = "contourpy-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d7e03c0f9a4f90dc18d4e77e9ef4ec7b7bbb437f7f675be8e530d65ae6ef956"},
+ {file = "contourpy-1.2.0.tar.gz", hash = "sha256:171f311cb758de7da13fc53af221ae47a5877be5a0843a9fe150818c51ed276a"},
+]
+
+[package.dependencies]
+numpy = ">=1.20,<2.0"
+
+[package.extras]
+bokeh = ["bokeh", "selenium"]
+docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
+mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.6.1)", "types-Pillow"]
+test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
+test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"]
+
+[[package]]
+name = "cycler"
+version = "0.12.1"
+description = "Composable style cycles"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
+ {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
+]
+
+[package.extras]
+docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
+tests = ["pytest", "pytest-cov", "pytest-xdist"]
+
+[[package]]
+name = "debugpy"
+version = "1.8.0"
+description = "An implementation of the Debug Adapter Protocol for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"},
+ {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"},
+ {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"},
+ {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"},
+ {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"},
+ {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"},
+ {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"},
+ {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"},
+ {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"},
+ {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"},
+ {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"},
+ {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"},
+ {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"},
+ {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"},
+ {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"},
+ {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"},
+ {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"},
+ {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"},
+]
+
+[[package]]
+name = "decorator"
+version = "5.1.1"
+description = "Decorators for Humans"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
+ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
+]
+
+[[package]]
+name = "executing"
+version = "2.0.1"
+description = "Get the currently executing AST node of a frame, and other information"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"},
+ {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"},
+]
+
+[package.extras]
+tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
+
+[[package]]
+name = "fonttools"
+version = "4.47.0"
+description = "Tools to manipulate font files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fonttools-4.47.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d2404107626f97a221dc1a65b05396d2bb2ce38e435f64f26ed2369f68675d9"},
+ {file = "fonttools-4.47.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c01f409be619a9a0f5590389e37ccb58b47264939f0e8d58bfa1f3ba07d22671"},
+ {file = "fonttools-4.47.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d986b66ff722ef675b7ee22fbe5947a41f60a61a4da15579d5e276d897fbc7fa"},
+ {file = "fonttools-4.47.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8acf6dd0434b211b3bd30d572d9e019831aae17a54016629fa8224783b22df8"},
+ {file = "fonttools-4.47.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:495369c660e0c27233e3c572269cbe520f7f4978be675f990f4005937337d391"},
+ {file = "fonttools-4.47.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59227d7ba5b232281c26ae04fac2c73a79ad0e236bca5c44aae904a18f14faf"},
+ {file = "fonttools-4.47.0-cp310-cp310-win32.whl", hash = "sha256:59a6c8b71a245800e923cb684a2dc0eac19c56493e2f896218fcf2571ed28984"},
+ {file = "fonttools-4.47.0-cp310-cp310-win_amd64.whl", hash = "sha256:52c82df66201f3a90db438d9d7b337c7c98139de598d0728fb99dab9fd0495ca"},
+ {file = "fonttools-4.47.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:854421e328d47d70aa5abceacbe8eef231961b162c71cbe7ff3f47e235e2e5c5"},
+ {file = "fonttools-4.47.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:511482df31cfea9f697930f61520f6541185fa5eeba2fa760fe72e8eee5af88b"},
+ {file = "fonttools-4.47.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0e2c88c8c985b7b9a7efcd06511fb0a1fe3ddd9a6cd2895ef1dbf9059719d7"},
+ {file = "fonttools-4.47.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7a0a8848726956e9d9fb18c977a279013daadf0cbb6725d2015a6dd57527992"},
+ {file = "fonttools-4.47.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e869da810ae35afb3019baa0d0306cdbab4760a54909c89ad8904fa629991812"},
+ {file = "fonttools-4.47.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd23848f877c3754f53a4903fb7a593ed100924f9b4bff7d5a4e2e8a7001ae11"},
+ {file = "fonttools-4.47.0-cp311-cp311-win32.whl", hash = "sha256:bf1810635c00f7c45d93085611c995fc130009cec5abdc35b327156aa191f982"},
+ {file = "fonttools-4.47.0-cp311-cp311-win_amd64.whl", hash = "sha256:61df4dee5d38ab65b26da8efd62d859a1eef7a34dcbc331299a28e24d04c59a7"},
+ {file = "fonttools-4.47.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e3f4d61f3a8195eac784f1d0c16c0a3105382c1b9a74d99ac4ba421da39a8826"},
+ {file = "fonttools-4.47.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:174995f7b057e799355b393e97f4f93ef1f2197cbfa945e988d49b2a09ecbce8"},
+ {file = "fonttools-4.47.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea592e6a09b71cb7a7661dd93ac0b877a6228e2d677ebacbad0a4d118494c86d"},
+ {file = "fonttools-4.47.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40bdbe90b33897d9cc4a39f8e415b0fcdeae4c40a99374b8a4982f127ff5c767"},
+ {file = "fonttools-4.47.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:843509ae9b93db5aaf1a6302085e30bddc1111d31e11d724584818f5b698f500"},
+ {file = "fonttools-4.47.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9acfa1cdc479e0dde528b61423855913d949a7f7fe09e276228298fef4589540"},
+ {file = "fonttools-4.47.0-cp312-cp312-win32.whl", hash = "sha256:66c92ec7f95fd9732550ebedefcd190a8d81beaa97e89d523a0d17198a8bda4d"},
+ {file = "fonttools-4.47.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8fa20748de55d0021f83754b371432dca0439e02847962fc4c42a0e444c2d78"},
+ {file = "fonttools-4.47.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c75e19971209fbbce891ebfd1b10c37320a5a28e8d438861c21d35305aedb81c"},
+ {file = "fonttools-4.47.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e79f1a3970d25f692bbb8c8c2637e621a66c0d60c109ab48d4a160f50856deff"},
+ {file = "fonttools-4.47.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:562681188c62c024fe2c611b32e08b8de2afa00c0c4e72bed47c47c318e16d5c"},
+ {file = "fonttools-4.47.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a77a60315c33393b2bd29d538d1ef026060a63d3a49a9233b779261bad9c3f71"},
+ {file = "fonttools-4.47.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4fabb8cc9422efae1a925160083fdcbab8fdc96a8483441eb7457235df625bd"},
+ {file = "fonttools-4.47.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2a78dba8c2a1e9d53a0fb5382979f024200dc86adc46a56cbb668a2249862fda"},
+ {file = "fonttools-4.47.0-cp38-cp38-win32.whl", hash = "sha256:e6b968543fde4119231c12c2a953dcf83349590ca631ba8216a8edf9cd4d36a9"},
+ {file = "fonttools-4.47.0-cp38-cp38-win_amd64.whl", hash = "sha256:4a9a51745c0439516d947480d4d884fa18bd1458e05b829e482b9269afa655bc"},
+ {file = "fonttools-4.47.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:62d8ddb058b8e87018e5dc26f3258e2c30daad4c87262dfeb0e2617dd84750e6"},
+ {file = "fonttools-4.47.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5dde0eab40faaa5476133123f6a622a1cc3ac9b7af45d65690870620323308b4"},
+ {file = "fonttools-4.47.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4da089f6dfdb822293bde576916492cd708c37c2501c3651adde39804630538"},
+ {file = "fonttools-4.47.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:253bb46bab970e8aae254cebf2ae3db98a4ef6bd034707aa68a239027d2b198d"},
+ {file = "fonttools-4.47.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1193fb090061efa2f9e2d8d743ae9850c77b66746a3b32792324cdce65784154"},
+ {file = "fonttools-4.47.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:084511482dd265bce6dca24c509894062f0117e4e6869384d853f46c0e6d43be"},
+ {file = "fonttools-4.47.0-cp39-cp39-win32.whl", hash = "sha256:97620c4af36e4c849e52661492e31dc36916df12571cb900d16960ab8e92a980"},
+ {file = "fonttools-4.47.0-cp39-cp39-win_amd64.whl", hash = "sha256:e77bdf52185bdaf63d39f3e1ac3212e6cfa3ab07d509b94557a8902ce9c13c82"},
+ {file = "fonttools-4.47.0-py3-none-any.whl", hash = "sha256:d6477ba902dd2d7adda7f0fd3bfaeb92885d45993c9e1928c9f28fc3961415f7"},
+ {file = "fonttools-4.47.0.tar.gz", hash = "sha256:ec13a10715eef0e031858c1c23bfaee6cba02b97558e4a7bfa089dba4a8c2ebf"},
+]
+
+[package.extras]
+all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"]
+graphite = ["lz4 (>=1.7.4.2)"]
+interpolatable = ["munkres", "pycairo", "scipy"]
+lxml = ["lxml (>=4.0,<5)"]
+pathops = ["skia-pathops (>=0.5.0)"]
+plot = ["matplotlib"]
+repacker = ["uharfbuzz (>=0.23.0)"]
+symfont = ["sympy"]
+type1 = ["xattr"]
+ufo = ["fs (>=2.2.0,<3)"]
+unicode = ["unicodedata2 (>=15.1.0)"]
+woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "ipykernel"
+version = "6.28.0"
+description = "IPython Kernel for Jupyter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"},
+ {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"},
+]
+
+[package.dependencies]
+appnope = {version = "*", markers = "platform_system == \"Darwin\""}
+comm = ">=0.1.1"
+debugpy = ">=1.6.5"
+ipython = ">=7.23.1"
+jupyter-client = ">=6.1.12"
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+matplotlib-inline = ">=0.1"
+nest-asyncio = "*"
+packaging = "*"
+psutil = "*"
+pyzmq = ">=24"
+tornado = ">=6.1"
+traitlets = ">=5.4.0"
+
+[package.extras]
+cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"]
+pyqt5 = ["pyqt5"]
+pyside6 = ["pyside6"]
+test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"]
+
+[[package]]
+name = "ipython"
+version = "8.20.0"
+description = "IPython: Productive Interactive Computing"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "ipython-8.20.0-py3-none-any.whl", hash = "sha256:bc9716aad6f29f36c449e30821c9dd0c1c1a7b59ddcc26931685b87b4c569619"},
+ {file = "ipython-8.20.0.tar.gz", hash = "sha256:2f21bd3fc1d51550c89ee3944ae04bbc7bc79e129ea0937da6e6c68bfdbf117a"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+decorator = "*"
+jedi = ">=0.16"
+matplotlib-inline = "*"
+pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
+prompt-toolkit = ">=3.0.41,<3.1.0"
+pygments = ">=2.4.0"
+stack-data = "*"
+traitlets = ">=5"
+
+[package.extras]
+all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
+black = ["black"]
+doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"]
+kernel = ["ipykernel"]
+nbconvert = ["nbconvert"]
+nbformat = ["nbformat"]
+notebook = ["ipywidgets", "notebook"]
+parallel = ["ipyparallel"]
+qtconsole = ["qtconsole"]
+test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"]
+test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath", "trio"]
+
+[[package]]
+name = "jedi"
+version = "0.19.1"
+description = "An autocompletion tool for Python that can be used for text editors."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
+ {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
+]
+
+[package.dependencies]
+parso = ">=0.8.3,<0.9.0"
+
+[package.extras]
+docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
+testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
+
+[[package]]
+name = "jupyter-client"
+version = "8.6.0"
+description = "Jupyter protocol implementation and client libraries"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"},
+ {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"},
+]
+
+[package.dependencies]
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+python-dateutil = ">=2.8.2"
+pyzmq = ">=23.0"
+tornado = ">=6.2"
+traitlets = ">=5.3"
+
+[package.extras]
+docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
+test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
+
+[[package]]
+name = "jupyter-core"
+version = "5.7.1"
+description = "Jupyter core package. A base package on which Jupyter projects rely."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"},
+ {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"},
+]
+
+[package.dependencies]
+platformdirs = ">=2.5"
+pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""}
+traitlets = ">=5.3"
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"]
+test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
+
+[[package]]
+name = "kiwisolver"
+version = "1.4.5"
+description = "A fast implementation of the Cassowary constraint solver"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"},
+ {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"},
+ {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"},
+ {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"},
+ {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"},
+ {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"},
+ {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"},
+ {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"},
+ {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"},
+ {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"},
+ {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"},
+ {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"},
+ {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"},
+ {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"},
+ {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"},
+ {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"},
+ {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"},
+ {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"},
+ {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"},
+ {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"},
+ {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"},
+ {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"},
+ {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"},
+]
+
+[[package]]
+name = "matplotlib"
+version = "3.8.2"
+description = "Python plotting package"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "matplotlib-3.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:09796f89fb71a0c0e1e2f4bdaf63fb2cefc84446bb963ecdeb40dfee7dfa98c7"},
+ {file = "matplotlib-3.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9c6976748a25e8b9be51ea028df49b8e561eed7809146da7a47dbecebab367"},
+ {file = "matplotlib-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78e4f2cedf303869b782071b55fdde5987fda3038e9d09e58c91cc261b5ad18"},
+ {file = "matplotlib-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e208f46cf6576a7624195aa047cb344a7f802e113bb1a06cfd4bee431de5e31"},
+ {file = "matplotlib-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46a569130ff53798ea5f50afce7406e91fdc471ca1e0e26ba976a8c734c9427a"},
+ {file = "matplotlib-3.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:830f00640c965c5b7f6bc32f0d4ce0c36dfe0379f7dd65b07a00c801713ec40a"},
+ {file = "matplotlib-3.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d86593ccf546223eb75a39b44c32788e6f6440d13cfc4750c1c15d0fcb850b63"},
+ {file = "matplotlib-3.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a5430836811b7652991939012f43d2808a2db9b64ee240387e8c43e2e5578c8"},
+ {file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9576723858a78751d5aacd2497b8aef29ffea6d1c95981505877f7ac28215c6"},
+ {file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba9cbd8ac6cf422f3102622b20f8552d601bf8837e49a3afed188d560152788"},
+ {file = "matplotlib-3.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:03f9d160a29e0b65c0790bb07f4f45d6a181b1ac33eb1bb0dd225986450148f0"},
+ {file = "matplotlib-3.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:3773002da767f0a9323ba1a9b9b5d00d6257dbd2a93107233167cfb581f64717"},
+ {file = "matplotlib-3.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c318c1e95e2f5926fba326f68177dee364aa791d6df022ceb91b8221bd0a627"},
+ {file = "matplotlib-3.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:091275d18d942cf1ee9609c830a1bc36610607d8223b1b981c37d5c9fc3e46a4"},
+ {file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b0f3b8ea0e99e233a4bcc44590f01604840d833c280ebb8fe5554fd3e6cfe8d"},
+ {file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b1704a530395aaf73912be741c04d181f82ca78084fbd80bc737be04848331"},
+ {file = "matplotlib-3.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533b0e3b0c6768eef8cbe4b583731ce25a91ab54a22f830db2b031e83cca9213"},
+ {file = "matplotlib-3.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:0f4fc5d72b75e2c18e55eb32292659cf731d9d5b312a6eb036506304f4675630"},
+ {file = "matplotlib-3.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:deaed9ad4da0b1aea77fe0aa0cebb9ef611c70b3177be936a95e5d01fa05094f"},
+ {file = "matplotlib-3.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:172f4d0fbac3383d39164c6caafd3255ce6fa58f08fc392513a0b1d3b89c4f89"},
+ {file = "matplotlib-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7d36c2209d9136cd8e02fab1c0ddc185ce79bc914c45054a9f514e44c787917"},
+ {file = "matplotlib-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5864bdd7da445e4e5e011b199bb67168cdad10b501750367c496420f2ad00843"},
+ {file = "matplotlib-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8345b48e95cee45ff25192ed1f4857273117917a4dcd48e3905619bcd9c9b8"},
+ {file = "matplotlib-3.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:7c48d9e221b637c017232e3760ed30b4e8d5dfd081daf327e829bf2a72c731b4"},
+ {file = "matplotlib-3.8.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa11b3c6928a1e496c1a79917d51d4cd5d04f8a2e75f21df4949eeefdf697f4b"},
+ {file = "matplotlib-3.8.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1095fecf99eeb7384dabad4bf44b965f929a5f6079654b681193edf7169ec20"},
+ {file = "matplotlib-3.8.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:bddfb1db89bfaa855912261c805bd0e10218923cc262b9159a49c29a7a1c1afa"},
+ {file = "matplotlib-3.8.2.tar.gz", hash = "sha256:01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1"},
+]
+
+[package.dependencies]
+contourpy = ">=1.0.1"
+cycler = ">=0.10"
+fonttools = ">=4.22.0"
+kiwisolver = ">=1.3.1"
+numpy = ">=1.21,<2"
+packaging = ">=20.0"
+pillow = ">=8"
+pyparsing = ">=2.3.1"
+python-dateutil = ">=2.7"
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.1.6"
+description = "Inline Matplotlib backend for Jupyter"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"},
+ {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"},
+]
+
+[package.dependencies]
+traitlets = "*"
+
+[[package]]
+name = "nest-asyncio"
+version = "1.5.8"
+description = "Patch asyncio to allow nested event loops"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"},
+ {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"},
+]
+
+[[package]]
+name = "networkx"
+version = "3.2.1"
+description = "Python package for creating and manipulating graphs and networks"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"},
+ {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"},
+]
+
+[package.extras]
+default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"]
+developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"]
+doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"]
+extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"]
+test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
+
+[[package]]
+name = "numpy"
+version = "1.26.3"
+description = "Fundamental package for array computing in Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"},
+ {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"},
+ {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"},
+ {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"},
+ {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"},
+ {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"},
+ {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"},
+ {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"},
+ {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"},
+ {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"},
+ {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"},
+ {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"},
+ {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"},
+ {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"},
+ {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"},
+ {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"},
+ {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"},
+ {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"},
+ {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"},
+ {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"},
+ {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"},
+ {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"},
+ {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"},
+ {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"},
+ {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"},
+ {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"},
+ {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"},
+ {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"},
+ {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"},
+ {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"},
+ {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"},
+ {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"},
+ {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"},
+ {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"},
+ {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"},
+ {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pandas"
+version = "2.1.4"
+description = "Powerful data structures for data analysis, time series, and statistics"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"},
+ {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"},
+ {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"},
+ {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"},
+ {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"},
+ {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"},
+ {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"},
+ {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"},
+ {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"},
+ {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"},
+ {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"},
+ {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"},
+ {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"},
+ {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"},
+ {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"},
+ {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"},
+ {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"},
+ {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"},
+ {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"},
+ {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"},
+ {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"},
+ {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"},
+ {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"},
+ {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"},
+ {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"},
+]
+
+[package.dependencies]
+numpy = [
+ {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""},
+ {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""},
+]
+python-dateutil = ">=2.8.2"
+pytz = ">=2020.1"
+tzdata = ">=2022.1"
+
+[package.extras]
+all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"]
+aws = ["s3fs (>=2022.05.0)"]
+clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"]
+compression = ["zstandard (>=0.17.0)"]
+computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"]
+consortium-standard = ["dataframe-api-compat (>=0.1.7)"]
+excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"]
+feather = ["pyarrow (>=7.0.0)"]
+fss = ["fsspec (>=2022.05.0)"]
+gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"]
+hdf5 = ["tables (>=3.7.0)"]
+html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"]
+mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"]
+output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"]
+parquet = ["pyarrow (>=7.0.0)"]
+performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"]
+plot = ["matplotlib (>=3.6.1)"]
+postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"]
+spss = ["pyreadstat (>=1.1.5)"]
+sql-other = ["SQLAlchemy (>=1.4.36)"]
+test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
+xml = ["lxml (>=4.8.0)"]
+
+[[package]]
+name = "parso"
+version = "0.8.3"
+description = "A Python Parser"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
+ {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
+]
+
+[package.extras]
+qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
+testing = ["docopt", "pytest (<6.0.0)"]
+
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+description = "Pexpect allows easy control of interactive console applications."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
+ {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
+]
+
+[package.dependencies]
+ptyprocess = ">=0.5"
+
+[[package]]
+name = "pillow"
+version = "10.2.0"
+description = "Python Imaging Library (Fork)"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"},
+ {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"},
+ {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"},
+ {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"},
+ {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"},
+ {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"},
+ {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"},
+ {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"},
+ {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"},
+ {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"},
+ {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"},
+ {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"},
+ {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"},
+ {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"},
+ {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"},
+ {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"},
+ {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"},
+ {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"},
+ {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"},
+ {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"},
+ {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"},
+ {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"},
+ {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"},
+ {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"},
+ {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"},
+ {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"},
+ {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"},
+ {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"},
+ {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"},
+ {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"},
+ {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"},
+ {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"},
+ {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"},
+ {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"},
+ {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"},
+ {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"},
+]
+
+[package.extras]
+docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
+fpx = ["olefile"]
+mic = ["olefile"]
+tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
+typing = ["typing-extensions"]
+xmp = ["defusedxml"]
+
+[[package]]
+name = "platformdirs"
+version = "4.1.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"},
+ {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+
+[[package]]
+name = "pluggy"
+version = "1.3.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
+ {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.43"
+description = "Library for building powerful interactive command lines in Python"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
+ {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
+]
+
+[package.dependencies]
+wcwidth = "*"
+
+[[package]]
+name = "psutil"
+version = "5.9.7"
+description = "Cross-platform lib for process and system monitoring in Python."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"},
+ {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"},
+ {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"},
+ {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"},
+ {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"},
+ {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"},
+ {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"},
+ {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"},
+ {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"},
+ {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"},
+ {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"},
+ {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"},
+ {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"},
+ {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"},
+ {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"},
+ {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"},
+]
+
+[package.extras]
+test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+description = "Run a subprocess in a pseudo terminal"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
+ {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.2"
+description = "Safely evaluate AST nodes without side effects"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
+ {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
+]
+
+[package.extras]
+tests = ["pytest"]
+
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+
+[[package]]
+name = "pydantic"
+version = "2.5.3"
+description = "Data validation using Python type hints"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"},
+ {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"},
+]
+
+[package.dependencies]
+annotated-types = ">=0.4.0"
+pydantic-core = "2.14.6"
+typing-extensions = ">=4.6.1"
+
+[package.extras]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.14.6"
+description = ""
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"},
+ {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"},
+ {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"},
+ {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"},
+ {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"},
+ {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"},
+ {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"},
+ {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"},
+ {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"},
+ {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"},
+ {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"},
+ {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"},
+ {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"},
+ {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"},
+ {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"},
+ {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"},
+ {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"},
+ {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"},
+ {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"},
+ {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"},
+ {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"},
+ {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"},
+ {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"},
+ {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"},
+ {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"},
+ {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"},
+ {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"},
+ {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"},
+ {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"},
+ {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"},
+ {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
+[[package]]
+name = "pygments"
+version = "2.17.2"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
+ {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
+]
+
+[package.extras]
+plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pyparsing"
+version = "3.1.1"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
+optional = false
+python-versions = ">=3.6.8"
+files = [
+ {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"},
+ {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"},
+]
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
+
+[[package]]
+name = "pytest"
+version = "7.4.4"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
+ {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pytz"
+version = "2023.3.post1"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
+ {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
+]
+
+[[package]]
+name = "pywin32"
+version = "306"
+description = "Python for Window Extensions"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
+ {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"},
+ {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"},
+ {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"},
+ {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"},
+ {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"},
+ {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"},
+ {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"},
+ {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"},
+ {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"},
+ {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"},
+ {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"},
+ {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"},
+ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
+]
+
+[[package]]
+name = "pyzmq"
+version = "25.1.2"
+description = "Python bindings for 0MQ"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"},
+ {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"},
+ {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"},
+ {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"},
+ {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"},
+ {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"},
+ {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"},
+ {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"},
+ {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"},
+ {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"},
+ {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"},
+ {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"},
+ {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"},
+ {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"},
+ {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"},
+ {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"},
+ {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"},
+ {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"},
+ {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"},
+ {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"},
+ {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"},
+ {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"},
+ {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"},
+ {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"},
+ {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"},
+ {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"},
+ {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"},
+ {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"},
+ {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"},
+ {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"},
+ {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"},
+ {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"},
+ {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"},
+ {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"},
+ {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"},
+ {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"},
+ {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"},
+ {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"},
+ {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"},
+ {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"},
+ {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"},
+ {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"},
+ {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"},
+ {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"},
+ {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"},
+ {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"},
+ {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"},
+ {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"},
+ {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"},
+ {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"},
+ {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"},
+ {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"},
+ {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"},
+ {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"},
+ {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"},
+ {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"},
+ {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"},
+ {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"},
+ {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"},
+ {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"},
+ {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"},
+ {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"},
+ {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"},
+ {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"},
+ {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"},
+ {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"},
+ {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"},
+ {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"},
+ {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"},
+ {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"},
+ {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"},
+ {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"},
+ {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"},
+ {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"},
+ {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"},
+ {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"},
+ {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"},
+]
+
+[package.dependencies]
+cffi = {version = "*", markers = "implementation_name == \"pypy\""}
+
+[[package]]
+name = "seaborn"
+version = "0.13.1"
+description = "Statistical data visualization"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "seaborn-0.13.1-py3-none-any.whl", hash = "sha256:6baa69b6d1169ae59037971491c450c0b73332b42bd4b23570b62a546bc61cb8"},
+ {file = "seaborn-0.13.1.tar.gz", hash = "sha256:bfad65e9c5989e5e1897e61bdbd2f22e62455940ca76fd49eca3ed69345b9179"},
+]
+
+[package.dependencies]
+matplotlib = ">=3.4,<3.6.1 || >3.6.1"
+numpy = ">=1.20,<1.24.0 || >1.24.0"
+pandas = ">=1.2"
+
+[package.extras]
+dev = ["flake8", "flit", "mypy", "pandas-stubs", "pre-commit", "pytest", "pytest-cov", "pytest-xdist"]
+docs = ["ipykernel", "nbconvert", "numpydoc", "pydata_sphinx_theme (==0.10.0rc2)", "pyyaml", "sphinx (<6.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-issues"]
+stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+description = "Extract data from python stack frames and tracebacks for informative displays"
+optional = false
+python-versions = "*"
+files = [
+ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
+ {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
+]
+
+[package.dependencies]
+asttokens = ">=2.1.0"
+executing = ">=1.2.0"
+pure-eval = "*"
+
+[package.extras]
+tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
+
+[[package]]
+name = "tornado"
+version = "6.4"
+description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
+optional = false
+python-versions = ">= 3.8"
+files = [
+ {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"},
+ {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"},
+ {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"},
+ {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"},
+ {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"},
+ {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"},
+ {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"},
+ {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"},
+ {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"},
+ {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"},
+ {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"},
+]
+
+[[package]]
+name = "traitlets"
+version = "5.14.1"
+description = "Traitlets Python configuration system"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"},
+ {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"},
+]
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
+test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"]
+
+[[package]]
+name = "typing-extensions"
+version = "4.9.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
+ {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
+]
+
+[[package]]
+name = "tzdata"
+version = "2023.4"
+description = "Provider of IANA time zone data"
+optional = false
+python-versions = ">=2"
+files = [
+ {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"},
+ {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"},
+]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+description = "Measures the displayed width of unicode strings in a terminal"
+optional = false
+python-versions = "*"
+files = [
+ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
+ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
+]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.11"
+content-hash = "17504894e73dc34a8b545bf0b20dbc4bd2f176cdf51990e52104463686088d41"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..51c7528
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,29 @@
+[tool.poetry]
+name = "factryengine"
+version = "0.0.0"
+description = "production / job shop / resource scheduler for Python"
+authors = ["Jacob Østergaard Nielsen "]
+license = "MIT"
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.11"
+pandas = "^2.1.4"
+seaborn = "^0.13.1"
+networkx = "^3.2.1"
+pydantic = "^2.5.3"
+
+
+[tool.poetry.group.dev.dependencies]
+pytest = "^7.4.4"
+ipykernel = "^6.28.0"
+
+
+[tool.poetry-dynamic-versioning]
+enable = true
+[build-system]
+requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning>=1.0.0,<2.0.0"]
+build-backend = "poetry_dynamic_versioning.backend"
+
+[tool.ruff]
+ignore-init-module-imports = true
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index dac34af..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-numpy
-networkx
-pandas
-pydantic
-seaborn
-matplotlib
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 43ddaaf..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,21 +0,0 @@
-[versioneer]
-VCS = git
-style = pep440
-versionfile_source = src/factryengine/_version.py
-versionfile_build = factryengine/_version.py
-tag_prefix =
-parentdir_prefix =
-
-[options]
-packages = find:
-python_requires = >=3.6
-package_dir =
- =src
-zip_safe = no
-
-[options.extras_require]
-testing =
- pytest>=6.0
- pytest-cov>=2.0
-
-
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 376758f..0000000
--- a/setup.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import setuptools
-
-import versioneer
-
-with open("docs/README.md", "r", encoding="utf-8") as f:
- long_description = f.read()
-with open("requirements.txt", "r") as fh:
- requirements = [line.strip() for line in fh]
-
-setuptools.setup(
- name="factryengine",
- version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(),
- description="production / job shop / resource scheduler for Python",
- author="Jacob Østergaard Nielsen",
- author_email="jaoe@oestergaard-as.dk",
- url="https://github.com/Yacobolo/factryengine",
- classifiers=[
- "Programming Language :: Python :: 3",
- "Operating System :: OS Independent",
- "License :: OSI Approved :: MIT License",
- ],
- long_description=long_description,
- long_description_content_type="text/markdown",
- packages=setuptools.find_packages("src"),
- package_dir={"": "src"},
- python_requires=">=3.6",
- install_requires=requirements,
- extras_require={
- "testing": [
- "pytest>=6.0",
- "pytest-cov>=2.0",
- ],
- },
-)
diff --git a/src/factryengine/__init__.py b/src/factryengine/__init__.py
index aa41d55..1fe482b 100644
--- a/src/factryengine/__init__.py
+++ b/src/factryengine/__init__.py
@@ -1,6 +1,9 @@
-from .models.resource import Resource # noqa: F401
-from .models.task import Task # noqa: F401
-from .scheduler.core import Scheduler # noqa: F401
+import warnings
-from . import _version
-__version__ = _version.get_versions()['version']
+import numpy as np
+
+from .models import Assignment, Resource, ResourceGroup, Task
+from .scheduler.core import Scheduler
+
+# Ignore numpy's UserWarning
+warnings.filterwarnings("ignore", category=UserWarning, module="numpy.*")
diff --git a/src/factryengine/_version.py b/src/factryengine/_version.py
deleted file mode 100644
index 9c7b0fc..0000000
--- a/src/factryengine/_version.py
+++ /dev/null
@@ -1,716 +0,0 @@
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain.
-# Generated by versioneer-0.29
-# https://github.com/python-versioneer/python-versioneer
-
-"""Git implementation of _version.py."""
-
-import errno
-import functools
-import os
-import re
-import subprocess
-import sys
-from typing import Any, Callable, Dict, List, Optional, Tuple
-
-
-def get_keywords() -> Dict[str, str]:
- """Get the keywords needed to look up the version information."""
- # these strings will be replaced by git during git-archive.
- # setup.py/versioneer.py will grep for the variable names, so they must
- # each be defined on a line of their own. _version.py will just call
- # get_keywords().
- git_refnames = "$Format:%d$"
- git_full = "$Format:%H$"
- git_date = "$Format:%ci$"
- keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
- return keywords
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
- VCS: str
- style: str
- tag_prefix: str
- parentdir_prefix: str
- versionfile_source: str
- verbose: bool
-
-
-def get_config() -> VersioneerConfig:
- """Create, populate and return the VersioneerConfig() object."""
- # these strings are filled in when 'setup.py versioneer' creates
- # _version.py
- cfg = VersioneerConfig()
- cfg.VCS = "git"
- cfg.style = "pep440"
- cfg.tag_prefix = ""
- cfg.parentdir_prefix = ""
- cfg.versionfile_source = "src/factryengine/_version.py"
- cfg.verbose = False
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY: Dict[str, str] = {}
-HANDLERS: Dict[str, Dict[str, Callable]] = {}
-
-
-def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
- """Create decorator to mark a method as the handler of a VCS."""
-
- def decorate(f: Callable) -> Callable:
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
-
- return decorate
-
-
-def run_command(
- commands: List[str],
- args: List[str],
- cwd: Optional[str] = None,
- verbose: bool = False,
- hide_stderr: bool = False,
- env: Optional[Dict[str, str]] = None,
-) -> Tuple[Optional[str], Optional[int]]:
- """Call the given command(s)."""
- assert isinstance(commands, list)
- process = None
-
- popen_kwargs: Dict[str, Any] = {}
- if sys.platform == "win32":
- # This hides the console window if pythonw.exe is used
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- popen_kwargs["startupinfo"] = startupinfo
-
- for command in commands:
- try:
- dispcmd = str([command] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- process = subprocess.Popen(
- [command] + args,
- cwd=cwd,
- env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr else None),
- **popen_kwargs,
- )
- break
- except OSError as e:
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %s" % dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %s" % (commands,))
- return None, None
- stdout = process.communicate()[0].strip().decode()
- if process.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % dispcmd)
- print("stdout was %s" % stdout)
- return None, process.returncode
- return stdout, process.returncode
-
-
-def versions_from_parentdir(
- parentdir_prefix: str,
- root: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for _ in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {
- "version": dirname[len(parentdir_prefix) :],
- "full-revisionid": None,
- "dirty": False,
- "error": None,
- "date": None,
- }
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords: Dict[str, str] = {}
- try:
- with open(versionfile_abs, "r") as fobj:
- for line in fobj:
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- except OSError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(
- keywords: Dict[str, str],
- tag_prefix: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Get version information from git keywords."""
- if "refnames" not in keywords:
- raise NotThisMethod("Short version file found")
- date = keywords.get("date")
- if date is not None:
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
-
- # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = {r.strip() for r in refnames.strip("()").split(",")}
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = {r for r in refs if re.search(r"\d", r)}
- if verbose:
- print("discarding '%s', no digits" % ",".join(refs - tags))
- if verbose:
- print("likely tags: %s" % ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix) :]
- # Filter out refs that exactly match prefix or that don't start
- # with a number once the prefix is stripped (mostly a concern
- # when prefix is '')
- if not re.match(r"\d", r):
- continue
- if verbose:
- print("picking %s" % r)
- return {
- "version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": None,
- "date": date,
- }
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {
- "version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": "no suitable tags",
- "date": None,
- }
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(
- tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command
-) -> Dict[str, Any]:
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- # GIT_DIR can interfere with correct operation of Versioneer.
- # It may be intended to be passed to the Versioneer-versioned project,
- # but that should not change where we get our version from.
- env = os.environ.copy()
- env.pop("GIT_DIR", None)
- runner = functools.partial(runner, env=env)
-
- _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose)
- if rc != 0:
- if verbose:
- print("Directory %s not under git control" % root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = runner(
- GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- f"{tag_prefix}[[:digit:]]*",
- ],
- cwd=root,
- )
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces: Dict[str, Any] = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
- # --abbrev-ref was added in git-1.6.3
- if rc != 0 or branch_name is None:
- raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
- branch_name = branch_name.strip()
-
- if branch_name == "HEAD":
- # If we aren't exactly on a branch, pick a branch which represents
- # the current commit. If all else fails, we are on a branchless
- # commit.
- branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
- # --contains was added in git-1.5.4
- if rc != 0 or branches is None:
- raise NotThisMethod("'git branch --contains' returned error")
- branches = branches.split("\n")
-
- # Remove the first line if we're running detached
- if "(" in branches[0]:
- branches.pop(0)
-
- # Strip off the leading "* " from the list of branches.
- branches = [branch[2:] for branch in branches]
- if "master" in branches:
- branch_name = "master"
- elif not branches:
- branch_name = None
- else:
- # Pick the first branch that is returned. Good or bad.
- branch_name = branches[0]
-
- pieces["branch"] = branch_name
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[: git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
- if not mo:
- # unparsable. Maybe git-describe is misbehaving?
- pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix) :]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
- pieces["distance"] = len(out.split()) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def plus_or_dot(pieces: Dict[str, Any]) -> str:
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces: Dict[str, Any]) -> str:
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_branch(pieces: Dict[str, Any]) -> str:
- """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
-
- The ".dev0" means not master branch. Note that .dev0 sorts backwards
- (a feature branch will appear "older" than the master branch).
-
- Exceptions:
- 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0"
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
- """Split pep440 version string at the post-release segment.
-
- Returns the release segments before the post-release and the
- post-release version number (or -1 if no post-release segment is present).
- """
- vc = str.split(ver, ".post")
- return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
-
-
-def render_pep440_pre(pieces: Dict[str, Any]) -> str:
- """TAG[.postN.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post0.devDISTANCE
- """
- if pieces["closest-tag"]:
- if pieces["distance"]:
- # update the post release segment
- tag_version, post_version = pep440_split_post(pieces["closest-tag"])
- rendered = tag_version
- if post_version is not None:
- rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
- else:
- rendered += ".post0.dev%d" % (pieces["distance"])
- else:
- # no commits, use the tag as the version
- rendered = pieces["closest-tag"]
- else:
- # exception #1
- rendered = "0.post0.dev%d" % pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- return rendered
-
-
-def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
-
- The ".dev0" means not master branch.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_old(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces: Dict[str, Any]) -> str:
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces: Dict[str, Any]) -> str:
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {
- "version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None,
- }
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-branch":
- rendered = render_pep440_branch(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-post-branch":
- rendered = render_pep440_post_branch(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%s'" % style)
-
- return {
- "version": rendered,
- "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"],
- "error": None,
- "date": pieces.get("date"),
- }
-
-
-def get_versions() -> Dict[str, Any]:
- """Get version information or return default if unable to do so."""
- # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
- # __file__, we can work backwards from there to the root. Some
- # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
- # case we can only use expanded keywords.
-
- cfg = get_config()
- verbose = cfg.verbose
-
- try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
- except NotThisMethod:
- pass
-
- try:
- root = os.path.realpath(__file__)
- # versionfile_source is the relative path from the top of the source
- # tree (where the .git directory might live) to this file. Invert
- # this to find the root from __file__.
- for _ in cfg.versionfile_source.split("/"):
- root = os.path.dirname(root)
- except NameError:
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None,
- }
-
- try:
- pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
- return render(pieces, cfg.style)
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- except NotThisMethod:
- pass
-
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version",
- "date": None,
- }
diff --git a/src/factryengine/models/__init__.py b/src/factryengine/models/__init__.py
index 5b8bd48..7aea31d 100644
--- a/src/factryengine/models/__init__.py
+++ b/src/factryengine/models/__init__.py
@@ -1,2 +1,2 @@
-from .resource import Resource
-from .task import Task
+from .resource import Resource, ResourceGroup
+from .task import Assignment, Task
diff --git a/src/factryengine/models/resource.py b/src/factryengine/models/resource.py
index f5868cf..cee2353 100644
--- a/src/factryengine/models/resource.py
+++ b/src/factryengine/models/resource.py
@@ -1,9 +1,10 @@
import numpy as np
-from pydantic import BaseModel
+from pydantic import BaseModel, Field, validator
class Resource(BaseModel):
id: int
+ name: str = ""
available_windows: list[tuple[int, int]] = []
efficiency_multiplier: float = 1
@@ -15,6 +16,12 @@ def __eq__(self, other):
return self.id == other.id
return False
+ @validator("name", pre=True, always=True)
+ def set_name(cls, v, values):
+ if v == "":
+ return str(values.get("id"))
+ return v
+
def merge_intervals(self) -> None:
"""
Merges overlapping intervals in available_windows.
@@ -42,3 +49,11 @@ def merge_intervals(self) -> None:
self.available_windows = [
(windows[i, 0], windows[j, 1]) for i, j in zip(start_indices, end_indices)
]
+
+
+class ResourceGroup(BaseModel):
+ resources: set[Resource] = Field(..., min_items=1)
+
+ def get_resource_ids(self) -> tuple[int]:
+ """returns a tuple of resource ids"""
+ return tuple([resource.id for resource in self.resources])
diff --git a/src/factryengine/models/task.py b/src/factryengine/models/task.py
index 5e4d8cb..cf04f20 100644
--- a/src/factryengine/models/task.py
+++ b/src/factryengine/models/task.py
@@ -1,99 +1,87 @@
-from itertools import count
-
-from pydantic import BaseModel, Field, PrivateAttr, validator
-
-from .resource import Resource
+from pydantic import BaseModel, Field, model_validator, validator
+
+from .resource import Resource, ResourceGroup
+
+
+class Assignment(BaseModel):
+ """
+ Atlest one assignment is required for the entire duration of the task,
+ it could be an operator or worker. If multiple resources are assigned they
+ minimize the completion time for the task.
+ """
+
+ resource_groups: list[ResourceGroup] = Field(..., min_items=1)
+ resource_count: int = Field(None)
+ use_all_resources: bool = Field(
+ False, description="will use all resources available"
+ )
+
+ @model_validator(mode="after")
+ def check_valid_combinations(self):
+ if self.resource_count is None and self.use_all_resources is False:
+ raise ValueError("Either resource_count or use_all_resources must be set")
+
+ if self.resource_count:
+ if self.resource_count < 1:
+ raise ValueError("resource_count must be greater than 0")
+ return self
+
+ def get_resource_ids(self) -> list[tuple[int]]:
+ """returns a list of tuples of resource ids for each resource group in the assignment"""
+ resource_ids = []
+ for resource_group in self.resource_groups:
+ resource_ids.append(
+ tuple([resource.id for resource in resource_group.resources])
+ )
+ return resource_ids
+
+ def get_unique_resources(self) -> set[Resource]:
+ """returns a set of all unique resources required for the assignment"""
+ unique_resources = set()
+ for resource_group in self.resource_groups:
+ unique_resources.update(resource_group.resources)
+ return unique_resources
class Task(BaseModel):
- id: int | str
+ id: int
+ name: str = ""
duration: int = Field(gt=0)
priority: int = Field(gt=0)
- resources: list[set[Resource]]
- resource_count: int | str = 1
- predecessors: list["Task"] = []
+ assignments: list[Assignment] = []
+ constraints: set[Resource] = set()
+ predecessor_ids: set[int] = set()
predecessor_delay: int = Field(0, gt=0)
- batch_size: int = Field(None, gt=0)
quantity: int = Field(None, gt=0)
- _batch_id: int = PrivateAttr(None)
-
- @property
- def uid(self) -> str:
- """returns the unique id of the task"""
- if self.batch_id is None:
- return str(self.id)
- else:
- return f"{self.id}-{self.batch_id}"
-
- @property
- def batch_id(self):
- """returns the batch id of the task"""
- return self._batch_id
def __hash__(self):
- return hash(self.uid)
+ return hash(self.id)
def __eq__(self, other):
if isinstance(other, Task):
- return self.uid == other.uid
+ return self.id == other.id
return False
- @validator("resources", pre=True)
- def ensure_list(cls, v):
- """ensures that the resources are in the form of a list of lists"""
- if not isinstance(v, list): # if a single resource object is passed
- return [[v]] # make it a list of list
- if (
- isinstance(v, list) and len(v) > 0 and not isinstance(v[0], list)
- ): # if a list of resources is passed
- return [v] # make it a list of list
- return v # if a list of lists is passed, return as it is
-
- @validator("resource_count", always=True)
- def set_resource_count(cls, v, values):
- """
- sets the resource count of the task. If resource_count is set to "all", it is
- set to the maximum number of resources in any resource group
- """
- if isinstance(v, str) and v.lower() == "all":
- if "resources" in values:
- return max(
- len(resource_group) for resource_group in values["resources"]
- )
-
- elif isinstance(v, int):
- return v
- else:
- raise ValueError("Invalid value for resource_count.")
-
- def set_batch_id(self, batch_id):
- """sets the batch id of the task"""
- self._batch_id = batch_id
-
- def get_resources(self):
- """returns a list of all resources required for the task"""
- return [
- resource for resource_list in self.resources for resource in resource_list
- ]
-
- def get_resource_group_count(self):
- """returns the number of resource groups required for the task"""
- return len(self.resources)
-
- def get_resource_group_indices(self) -> list[list[int]]:
- """
- returns a list of lists of indices of resources in each resource group
- """
- counter = count()
- return [[next(counter) for _ in sublist] for sublist in self.resources]
-
- def is_splittable(self):
- """
- Checks if the task is splittable into batches.
- """
- return (
- self.batch_size is not None
- and self.quantity is not None
- and self.batch_size > 0
- and self.quantity > self.batch_size
- )
+ @model_validator(mode="after")
+ def check_assigments_or_constraints_are_set(self):
+ if not self.assignments and not self.constraints:
+ raise ValueError("Either assignments or constraints must be set")
+ return self
+
+ def get_unique_resources(self) -> set[Resource]:
+ """returns a set of all unique resources required for the task"""
+ unique_resources = set()
+ for assignment in self.assignments:
+ unique_resources.update(assignment.get_unique_resources())
+ unique_resources.update(self.constraints)
+ return unique_resources
+
+ @validator("name", pre=True, always=True)
+ def set_name(cls, v, values) -> str:
+ if v == "":
+ return str(values.get("id"))
+ return v
+
+ def get_id(self) -> int:
+ """returns the task id"""
+ return self.id
diff --git a/src/factryengine/scheduler/core.py b/src/factryengine/scheduler/core.py
index 666164d..abb9473 100644
--- a/src/factryengine/scheduler/core.py
+++ b/src/factryengine/scheduler/core.py
@@ -1,39 +1,50 @@
-from ..models import Task
+from ..models import Resource, Task
from .heuristic_solver.main import HeuristicSolver
from .scheduler_result import SchedulerResult
-from .task_batch_processor import TaskBatchProcessor
from .task_graph import TaskGraph
class Scheduler:
- def __init__(self, tasks: list[Task]):
+ def __init__(self, tasks: list[Task], resources: list[Resource]):
self.tasks = tasks
- self.resources = set(
- resource for task in tasks for resource in task.get_resources()
- )
+ self.resources = resources
self.task_dict = self.get_task_dict(tasks)
self.task_graph = TaskGraph(self.task_dict)
- def schedule(self):
- # merge intervals for all resources
+ def schedule(self) -> SchedulerResult:
+ """
+ Schedule tasks based on the task order from the task graph.
+ """
+
+ # Merge intervals for all resources
for resource in self.resources:
resource.merge_intervals()
- # schedule tasks
+ # Get the order in which tasks should be scheduled
task_order = self.task_graph.get_task_order()
- heuristic_solver = HeuristicSolver(self.task_dict, self.resources, task_order)
- result = heuristic_solver.solve()
- scheduler_result = SchedulerResult(result)
+ # Create a heuristic solver with the tasks, resources, and task order
+ heuristic_solver = HeuristicSolver(
+ task_dict=self.task_dict, resources=self.resources, task_order=task_order
+ )
+
+ # Use the heuristic solver to find a solution
+ solver_result = heuristic_solver.solve()
+
+ # Create a scheduler result with the solver result and unscheduled tasks
+ scheduler_result = SchedulerResult(
+ task_vars=solver_result,
+ unscheduled_task_ids=heuristic_solver.unscheduled_task_ids,
+ )
+
+ # Print a summary of the scheduling results
print(scheduler_result.summary())
+
+ # Return the scheduler result
return scheduler_result
def get_task_dict(self, tasks: list[Task]):
"""
- returns the task dictionary with tasks split into batches
+ returns the task dictionary with task id as key and task object as value
"""
- task_dict = {task.uid: task for task in tasks}
- task_graph = TaskGraph(task_dict).graph
- task_batch_processor = TaskBatchProcessor(task_graph, task_dict)
- task_dict_with_batches = task_batch_processor.split_tasks_into_batches()
- return task_dict_with_batches
+ return {task.get_id(): task for task in tasks}
diff --git a/src/factryengine/scheduler/heuristic_solver/exceptions.py b/src/factryengine/scheduler/heuristic_solver/exceptions.py
new file mode 100644
index 0000000..9d420b7
--- /dev/null
+++ b/src/factryengine/scheduler/heuristic_solver/exceptions.py
@@ -0,0 +1,4 @@
+class AllocationError(Exception):
+ """Exception raised when task allocation fails."""
+
+ pass
diff --git a/src/factryengine/scheduler/heuristic_solver/main.py b/src/factryengine/scheduler/heuristic_solver/main.py
index 81552e2..4b5bb52 100644
--- a/src/factryengine/scheduler/heuristic_solver/main.py
+++ b/src/factryengine/scheduler/heuristic_solver/main.py
@@ -1,6 +1,8 @@
import numpy as np
from ...models import Resource, Task
+from ..utils import get_task_predecessors
+from .exceptions import AllocationError
from .task_allocator import TaskAllocator
from .window_manager import WindowManager
@@ -17,95 +19,103 @@ def __init__(
self.task_allocator = TaskAllocator()
self.window_manager = WindowManager(resources)
self.task_vars = {
- task_uid: {
- "task_uid": task_uid,
+ task_id: {
+ "task_id": task_id,
"assigned_resource_ids": None,
"task_start": None,
"task_end": None,
"resource_intervals": None,
+ "error_message": None,
}
- for task_uid in self.task_dict.keys()
+ for task_id in self.task_dict.keys()
}
+ self.unscheduled_task_ids = []
- def solve(self):
- unscheduled_tasks = []
-
- for task_uid in self.task_order:
- task = self.task_dict[task_uid]
-
- # get task resources and windows dict
- task_resource_ids = np.array(
- [resource.id for resource in task.get_resources()]
- )
+ def solve(self) -> list[dict]:
+ """
+ Iterates through the task order and allocates resources to each task
+ """
+ for task_id in self.task_order:
+ task = self.task_dict[task_id]
- task_earliest_start = self._get_task_earliest_start(task)
+ task_earliest_start = self._get_task_earliest_start(task, self.task_dict)
if task_earliest_start is None:
- unscheduled_tasks.append(task_uid)
+ self.mark_task_as_unscheduled(
+ task_id=task_id, error_message="Task has unscheduled predecessors"
+ )
continue
- task_resource_windows = self.window_manager.get_task_resource_windows(
- task_resource_ids, task_earliest_start
+ # get task resources and windows dict
+ task_resource_ids = np.array(
+ [resource.id for resource in task.get_unique_resources()]
)
- if not task_resource_windows:
- unscheduled_tasks.append(task_uid)
- continue
- # allocate task
- allocated_resource_windows_dict = self.task_allocator.allocate_task(
- resource_windows=task_resource_windows,
- resource_ids=task_resource_ids,
- task_duration=task.duration,
- resource_count=task.resource_count,
- resource_group_indices=task.get_resource_group_indices(),
+ task_resource_windows_dict = (
+ self.window_manager.get_task_resource_windows_dict(
+ task_resource_ids, task_earliest_start
+ )
)
- if not allocated_resource_windows_dict:
- unscheduled_tasks.append(task_uid)
+ if task_resource_windows_dict == {}:
+ self.mark_task_as_unscheduled(
+ task_id=task_id, error_message="No available resources"
+ )
continue
- resource_windows_min_max = self.min_max_dict_np(
- allocated_resource_windows_dict
- )
+ # allocate task
+ try:
+ allocated_resource_windows_dict = self.task_allocator.allocate_task(
+ resource_windows_dict=task_resource_windows_dict,
+ assignments=task.assignments,
+ task_duration=task.duration,
+ constraints=task.constraints,
+ )
+ except AllocationError as e:
+ self.mark_task_as_unscheduled(task_id=task_id, error_message=str(e))
+ continue
# update resource windows
- self.window_manager.update_resource_windows(resource_windows_min_max)
+ self.window_manager.update_resource_windows(allocated_resource_windows_dict)
# Append task values
task_values = {
- "task_uid": task_uid,
+ "task_id": task_id,
"assigned_resource_ids": list(allocated_resource_windows_dict.keys()),
"task_start": min(
- start for start, _ in resource_windows_min_max.values()
+ start for start, _ in allocated_resource_windows_dict.values()
+ ),
+ "task_end": max(
+ end for _, end in allocated_resource_windows_dict.values()
),
- "task_end": max(end for _, end in resource_windows_min_max.values()),
"resource_intervals": allocated_resource_windows_dict.values(),
}
- self.task_vars[task_uid] = task_values
+ self.task_vars[task_id] = task_values
return list(
self.task_vars.values()
) # Return values of the dictionary as a list
- def _get_task_earliest_start(self, task):
+ def _get_task_earliest_start(self, task: Task, task_dict: dict) -> int | None:
"""
Retuns the earliest start of a task based on the latest end of its predecessors.
"""
task_ends = []
- for pred in task.predecessors:
- task_end = self.task_vars[pred.uid]["task_end"]
+
+ predecessors = get_task_predecessors(task, task_dict)
+
+ for pred in predecessors:
+ task_end = self.task_vars[pred.id]["task_end"]
if task_end is None:
return None
task_ends.append(task_end + task.predecessor_delay)
return max(task_ends, default=0)
- def min_max_dict_np(self, d):
- result = {}
-
- for key, value_list in d.items():
- min_val = np.min([x[0] for x in value_list])
- max_val = np.max([x[1] for x in value_list])
- result[key] = (min_val, max_val)
-
- return result
+ def mark_task_as_unscheduled(self, task_id: str, error_message: str) -> None:
+ """
+ Updates the error message of a task
+ """
+ self.task_vars[task_id]["error_message"] = error_message
+ self.unscheduled_task_ids.append(task_id)
+ return
diff --git a/src/factryengine/scheduler/heuristic_solver/matrix.py b/src/factryengine/scheduler/heuristic_solver/matrix.py
new file mode 100644
index 0000000..9ada80d
--- /dev/null
+++ b/src/factryengine/scheduler/heuristic_solver/matrix.py
@@ -0,0 +1,65 @@
+from dataclasses import dataclass
+
+import numpy as np
+
+
+@dataclass
+class Matrix:
+ """
+ Datastructure for representing resource windows as a matrix.
+ Used in the allocation of tasks.
+ Uses numpy arrays for efficient computation.
+ """
+
+ resource_ids: np.ndarray # 1d array of resource ids
+ intervals: np.ndarray # 1d array of intervals
+ resource_matrix: np.ma.core.MaskedArray # 2d array of resource windows
+
+ @classmethod
+ def merge(cls, matrices: list["Matrix"]) -> "Matrix":
+ """
+ merges a list of matrices into one matrix.
+ """
+ resource_ids = np.concatenate([matrix.resource_ids for matrix in matrices])
+
+ # Check if intervals are the same
+ first_intervals = matrices[0].intervals
+ if any(
+ not np.array_equal(first_intervals, matrix.intervals) for matrix in matrices
+ ):
+ raise ValueError("All matrices must have the same intervals")
+
+ resource_matrix = np.ma.hstack([matrix.resource_matrix for matrix in matrices])
+ return cls(resource_ids, first_intervals, resource_matrix)
+
+ @classmethod
+ def compare_update_mask_and_merge(cls, matrices: list["Matrix"]) -> "Matrix":
+ """
+ Compares each row of each array in the list and masks the rows with smallest sums.
+ Returns the combined array with the masked rows.
+ """
+ row_sums = [
+ np.sum(matrix.resource_matrix, axis=1, keepdims=True) for matrix in matrices
+ ]
+ max_sum_index = np.argmax(np.hstack(row_sums), axis=1)
+
+ # update the masks
+ for i, matrix in enumerate(matrices):
+ mask = np.ones_like(matrix.resource_matrix) * (max_sum_index[:, None] != i)
+ matrix.resource_matrix.mask = mask
+
+ return cls.merge(matrices)
+
+ @classmethod
+ def trim(cls, original_matrix: "Matrix", trim_matrix: "Matrix") -> "Matrix":
+ """
+ Trims a Matrix based on another
+ """
+
+ return cls(
+ resource_ids=original_matrix.resource_ids,
+ intervals=trim_matrix.intervals,
+ resource_matrix=original_matrix.resource_matrix[
+ : len(trim_matrix.intervals)
+ ],
+ )
diff --git a/src/factryengine/scheduler/heuristic_solver/task_allocator.py b/src/factryengine/scheduler/heuristic_solver/task_allocator.py
index 2818f9e..37c1eb9 100644
--- a/src/factryengine/scheduler/heuristic_solver/task_allocator.py
+++ b/src/factryengine/scheduler/heuristic_solver/task_allocator.py
@@ -1,169 +1,190 @@
-from typing import Optional
+from math import ceil
import numpy as np
+from factryengine.models import Assignment, Resource, ResourceGroup
+
+from .exceptions import AllocationError
+from .matrix import Matrix
+
class TaskAllocator:
def allocate_task(
self,
- resource_windows: list[np.array],
- resource_ids: np.array,
+ resource_windows_dict: dict[int, np.array],
+ assignments: list[Assignment],
+ constraints: set[Resource],
task_duration: float,
- resource_count: int,
- resource_group_indices: list[list[int]],
- ) -> Optional[dict[int, np.array]]:
- matrix = self.create_matrix(resource_windows)
+ ) -> dict[int, tuple[int, int]]:
+ """
+ allocates a task to the resources with the fastest completion based on the resource windows dict.
+ Assignments determine which resources and how many.
+ """
+
+ resource_windows_matrix = self._create_matrix_from_resource_windows_dict(
+ resource_windows_dict
+ )
- solution_matrix, solution_resource_ids = self.solve_matrix(
- matrix=matrix,
+ # create constraints matrix
+ constraints_matrix = self._create_constraints_matrix(
+ resource_constraints=constraints,
+ resource_windows_matrix=resource_windows_matrix,
task_duration=task_duration,
- resource_ids=resource_ids,
- resource_count=resource_count,
- resource_group_indices=resource_group_indices,
)
- if solution_matrix is None:
- return None
- # get allocated windows
- allocated_windows = self._get_resource_intervals(
- solution_matrix, solution_resource_ids
+ if assignments and constraints:
+ # update the resource matrix with the constraint matrix
+ self._apply_constraint_to_resource_windows_matrix(
+ constraints_matrix, resource_windows_matrix
+ )
+
+ # build assignment matrices
+ assignments_matrix = self._create_assignments_matrix(
+ assignments=assignments,
+ resource_windows_matrix=resource_windows_matrix,
+ task_duration=task_duration,
)
- return allocated_windows
+ # matrix to solve
+ matrix_to_solve = assignments_matrix or constraints_matrix
- def create_matrix(self, windows: list[np.array]) -> np.array:
- boundaries = np.unique(
- np.concatenate([window[:, 0:2].flatten() for window in windows])
+ # find the solution
+ solution_matrix = self._solve_matrix(
+ matrix=matrix_to_solve,
+ task_duration=task_duration,
)
- matrix = [boundaries]
- for window in windows:
- window = self._transform_array(window)
- window[:, 1] = self._cumsum_reset_at_minus_one(window[:, 1])
- new_boundaries = np.setdiff1d(boundaries, window[:, 0])
- window = self._expand_array(window, new_boundaries)
- matrix.append(window[:, 1])
+ # process solution to find allocated resource windows
+ allocated_windows = self._get_resource_intervals(
+ matrix=solution_matrix,
+ )
+
+ # add constraints to allocated windows
+ if constraints and assignments:
+ constraints_matrix_trimmed = Matrix.trim(
+ original_matrix=constraints_matrix, trim_matrix=solution_matrix
+ )
+ allocated_windows.update(
+ self._get_resource_intervals(
+ matrix=constraints_matrix_trimmed,
+ )
+ )
- return np.stack(matrix, axis=-1)
+ return allocated_windows
- def solve_matrix(
+ def _solve_matrix(
self,
- matrix: np.array,
task_duration: float,
- resource_ids: np.array,
- resource_count=1,
- resource_group_indices=list[list[int]],
- ) -> Optional[dict[int, np.array]]:
+ matrix: Matrix = None,
+ ) -> Matrix:
"""
- Finds the earliest possible solution for a given task based on its duration and
- the number of resources available. The method uses a matrix representation of
- the resource windows to calculate the optimal allocation of the task.
+ Takes the task matrix as input and finds the earliest solution
+ where the work of the resources equals the task duration.
+ If no solution is found, returns AllocationError.
+ Returns a matrix with the solution as the last row.
"""
- resource_matrix = matrix[:, 1:]
- if resource_count == 1 and task_duration > resource_matrix.max():
- return (None, None)
+ # Check if total resources in assignments_matrix meet task_duration
- # mask all but the largest group per row if there are multiple groups
- if len(resource_group_indices) > 1:
- resource_matrix = self._fill_array_except_largest_group_per_row(
- resource_matrix, resource_group_indices
- )
+ matrix_meet_duration = np.sum(matrix.resource_matrix, axis=1) >= task_duration
- # mask all but the k largest elements per row
- masked_resource_matrix = self._mask_smallest_except_k_largest(
- resource_matrix, resource_count
- )
-
- arr_sum = np.sum(masked_resource_matrix, axis=1)
- if task_duration > arr_sum.max():
- return (None, None)
-
- # get solution index and resource ids
- solution_index = np.argmax(arr_sum >= task_duration)
- solution_resources_mask = ~masked_resource_matrix.mask[solution_index]
- solution_resource_ids = resource_ids[solution_resources_mask]
-
- # solve matrix
- solution_cols_mask = np.concatenate([[True], solution_resources_mask])
- solution_matrix = matrix[: solution_index + 1, solution_cols_mask]
- solution = self._solve_task_end(solution_matrix[-2:], task_duration)
- solution_matrix = np.vstack(
- (solution_matrix[:solution_index], np.atleast_2d(solution))
- )
+ # Find index of first true condition
+ solution_index = np.argmax(matrix_meet_duration)
- return solution_matrix, solution_resource_ids
+ # check if solution exists
+ if solution_index == 0:
+ raise AllocationError("No solution found.")
- def _solve_task_end(self, matrix: np.array, task_duration: int) -> np.array:
- # Calculate slopes and intercepts for all columns after the first one directly
- # into total_m and total_b
- total_m, total_b = 0, 0
- mb_values = []
- for i in range(1, matrix.shape[1]):
- m, b = np.polyfit(matrix[:, 0], matrix[:, i], 1)
- total_m += m
- total_b += b
- mb_values.append((m, b))
+ # select the resources which are part of the solution
+ solution_resources_mask = ~matrix.resource_matrix.mask[solution_index]
+ solution_resource_ids = matrix.resource_ids[solution_resources_mask]
- # Compute the column 0 value that makes the sum of the predicted values equal to
- # the desired sum
- col0 = (task_duration - total_b) / total_m
+ end_index = solution_index + 1
+ # filter resource matrix using solution resource mask
+ solution_resource_matrix = matrix.resource_matrix[
+ :end_index, solution_resources_mask
+ ]
+ # do linear regression to find precise solution
+ # only use the last two rows of the matrix where
+ # where the first row is the solution row
+ last_two_rows = slice(-2, None)
+ interval, solution_row = self._solve_task_end(
+ resource_matrix=solution_resource_matrix[:][last_two_rows],
+ intervals=matrix.intervals[:end_index][last_two_rows],
+ task_duration=task_duration,
+ )
- # Compute the corresponding values for the other columns using matrix operations
- other_cols = [m * col0 + b for m, b in mb_values]
+ # update the solution row
+ solution_resource_matrix[-1] = solution_row
- # Return a numpy array containing the solved value for column 0 and the
- # predicted values for the other columns
- result = np.array([col0] + other_cols)
+ solution_intervals = np.append(matrix.intervals[:solution_index], interval)
- return result
+ return Matrix(
+ resource_ids=solution_resource_ids,
+ intervals=solution_intervals,
+ resource_matrix=solution_resource_matrix,
+ )
- def _get_window_start_index(self, arr):
+ def _solve_task_end(
+ self,
+ resource_matrix: np.ma.MaskedArray,
+ intervals: np.ndarray,
+ task_duration: int,
+ ) -> tuple[int, np.array]:
"""
- returns the index of the first non-zero element in an array from the end.
+ Calculates the end of a task given a resource matrix, intervals, and task duration.
"""
- zero_indices = np.nonzero(arr == 0) # Find the indices of zeros from the end
+ # Initialize total slope and intercept
+ total_slope, total_intercept = 0, 0
- if zero_indices[0].size > 0:
- return zero_indices[0][-1]
- else:
- return 0
+ # List to store slope and intercept values for each column
+ slope_intercept_values = []
- def _get_resource_intervals(self, solution_matrix, resources):
- """
- gets the resource intervals from the solution matrix.
- """
- start_indexes = [
- self._get_window_start_index(resource_arr)
- for resource_arr in solution_matrix[:, 1:].T
+ # Calculate slope and intercept for each column
+ for resource_col in range(resource_matrix.shape[1]):
+ slope, intercept = np.polyfit(
+ x=intervals, y=resource_matrix[:, resource_col], deg=1
+ )
+ total_slope += slope
+ total_intercept += intercept
+ slope_intercept_values.append((slope, intercept))
+
+ # Compute the column 0 value that makes the sum of the predicted values equal to the task duration
+ col0_value = (task_duration - total_intercept) / total_slope
+
+ # Compute the corresponding values for the other columns
+ other_columns_values = [
+ slope * col0_value + intercept
+ for slope, intercept in slope_intercept_values
]
- end_index = solution_matrix.shape[0] - 1
- resource_windows_dict = {
- resource_id: (
- self._split_intervals(solution_matrix[start_index:, [0, i + 1]])
- )
- for i, (resource_id, start_index) in enumerate(
- zip(resources, start_indexes)
- )
- if start_index < end_index
- }
- return resource_windows_dict
+ # Return a numpy array containing the solved value for column 0 and the predicted values for the other columns
+ return col0_value, other_columns_values
- def _split_intervals(self, arr):
+ def _get_resource_intervals(
+ self,
+ matrix: np.array,
+ ) -> dict[int, tuple[int, int]]:
"""
- splits an array into intervals based on the values in the second column.
- Splitting is done when the value in the second column does not change.
+ gets the resource intervals from the solution matrix.
"""
- diff = np.diff(arr[:, 1])
- indices = np.where(diff == 0)[0]
- splits = np.split(arr[:, 0], indices + 1)
- intervals = [
- (round(np.min(split), 2), round(np.max(split), 2))
- for split in splits
- if split.size > 1
- ]
- return intervals
+ end_index = matrix.resource_matrix.shape[0] - 1
+ resource_windows_dict = {}
+ # loop through resource ids and resource intervals
+ for resource_id, resource_intervals in zip(
+ matrix.resource_ids, matrix.resource_matrix.T
+ ):
+ # ensure only continuous intervals are selected
+ indexes = self._find_indexes(resource_intervals.data)
+ if indexes is not None:
+ start_index, end_index = indexes
+ resource_windows_dict[resource_id] = (
+ ceil(round(matrix.intervals[start_index], 1)),
+ ceil(round(matrix.intervals[end_index], 1)),
+ )
+ return resource_windows_dict
- def _mask_smallest_except_k_largest(self, array, k) -> np.ma.core.MaskedArray:
+ def _mask_smallest_elements_except_top_k_per_row(
+ self, array: np.ma.core.MaskedArray, k
+ ) -> np.ma.core.MaskedArray:
"""
Masks the smallest elements in an array, except for the k largest elements on
each row. This is a helper method used in the finding of the earliest solution.
@@ -174,45 +195,10 @@ def _mask_smallest_except_k_largest(self, array, k) -> np.ma.core.MaskedArray:
rows = np.arange(array.shape[0])[:, np.newaxis]
mask[rows, indices[:, -k:]] = False
mask[array == 0] = True
- masked_array = np.ma.masked_array(array, mask=mask)
- return masked_array
+ array.mask = mask
+ return array
- def _fill_array_except_largest_group_per_row(
- self, array, group_indices=list[list[int, int]]
- ) -> np.array:
- """
- Returns an array where all elements in each row are filled with zero except
- those in the group (set of columns) with the largest sum. Groups are defined by
- a list of lists, each inner list containing the indices of columns in that
- group. Originally zero elements and those not in the largest sum group are
- filled withzeros.
- """
- num_rows = array.shape[0]
- # Initialize mask with all True (masked)
- mask = np.ones_like(array, dtype=bool)
-
- # Iterate over each row in the array
- for i in range(num_rows):
- row = array[i]
- # Calculate the sums for each group in the row
- group_sums = [np.sum(row[group]) for group in group_indices]
- # Find the indices of the group with the largest sum
- largest_group = group_indices[np.argmax(group_sums)]
- # Unmask (False) the elements in the largest group
- mask[i, largest_group] = False
-
- # Ensure all zeros in the array are masked
- mask[array == 0] = True
-
- # Apply the mask to the array
- masked_array = np.ma.masked_array(array, mask=mask)
-
- # Fill masked values with zeros
- filled_array = masked_array.filled(0)
-
- return filled_array
-
- def _cumsum_reset_at_minus_one(self, a) -> np.ndarray:
+ def _cumsum_reset_at_minus_one(self, a: np.ndarray) -> np.ndarray:
"""
Computes the cumulative sum of an array but resets the sum to zero whenever a
-1 is encountered. This is a helper method used in the creation of the resource
@@ -224,59 +210,298 @@ def _cumsum_reset_at_minus_one(self, a) -> np.ndarray:
overcount = np.maximum.accumulate(without_reset * reset_at)
return without_reset - overcount
- def _transform_array(self, arr):
- # Separate the start/end values and start/end duration values
- interval_values = arr[:, [0, 1]].ravel()
- durations = arr[:, [3, 2]].ravel()
+ def _cumsum_reset_at_minus_one_2d(self, arr: np.ndarray) -> np.ndarray:
+ return np.apply_along_axis(self._cumsum_reset_at_minus_one, axis=0, arr=arr)
+
+ def _replace_masked_values_with_nan(
+ self, array: np.ndarray, mask: np.ndarray
+ ) -> np.ndarray:
+ """
+ replaces masked values in an array with nan.
+ """
+ result_array = np.full(mask.shape, np.nan) # Initialize with nan
+ result_array[mask] = array
+ return result_array
+
+ def _create_matrix_from_resource_windows_dict(
+ self, windows_dict: dict[int, np.ndarray]
+ ) -> Matrix:
+ """
+ creates a matrix from a dictionary of resource windows.
+ """
+ # convert the dictionary of resource windows to a numpy array
+ resource_windows_list = list(windows_dict.values())
+
+ # get all window interval boundaries
+ all_interval_boundaries = []
+ for window in resource_windows_list:
+ all_interval_boundaries.extend(window["start"].ravel())
+ all_interval_boundaries.extend(window["end"].ravel())
+
+ # find unique values
+ intervals = np.unique(all_interval_boundaries)
+
+ # first column is the interval boundaries
+ matrix = [intervals]
+ # loop through the resource windows and create a column for each resource
+ for window in resource_windows_list:
+ window_boundaries = np.dstack((window["start"], window["end"])).flatten()
+
+ missing_boundaries_mask = np.isin(intervals, window_boundaries)
+
+ window_durations = np.dstack(
+ (window["is_split"], window["duration"])
+ ).flatten()
+
+ # replace masked values with nan
+
+ resource_column = self._replace_masked_values_with_nan(
+ window_durations, missing_boundaries_mask
+ )
+
+ # fill nan values with linear interpolation
+
+ resource_column = self._linear_interpolate_nan(resource_column, intervals)
+
+ # distribute the window durations over the intervals
+ resource_column = self._diff_and_zero_negatives(resource_column)
+
+ matrix.append(resource_column)
- # Stack the interval values and durations together
- result = np.column_stack((interval_values, durations))
+ # create numpy matrix
- return result
+ matrix = np.stack(matrix, axis=1)
+
+ # select only the resource columns
+ resource_matrix = np.ma.MaskedArray(matrix[:, 1:])
+
+ # extract intervals
+ resource_ids = np.array(list(windows_dict.keys()))
+ return Matrix(
+ resource_ids=resource_ids,
+ intervals=intervals,
+ resource_matrix=resource_matrix,
+ )
- def _expand_array(self, arr: np.ndarray, new_boundaries: np.ndarray) -> np.ndarray:
+ def _diff_and_zero_negatives(self, arr):
+ arr = np.diff(arr, prepend=0)
+ # replace negative values with 0
+ arr[arr < 0] = 0
+ return arr
+
+ def _create_resource_group_matrix(
+ self,
+ resource_group: ResourceGroup,
+ resource_count: int,
+ use_all_resources: bool,
+ resource_windows_matrix: Matrix,
+ ) -> Matrix:
"""
- Expands an array with new boundaries and calculates the corresponding durations
- using linear interpolation.
+ Creates a resource group matrix from a resource group and resource windows matrix.
"""
- new_boundaries = np.asarray(new_boundaries)
- # Only keep the boundaries that are not already in the array and within the
- # existing range
- mask = (
- ~np.isin(new_boundaries, arr[:, 0])
- & (new_boundaries >= arr[0, 0])
- & (new_boundaries <= arr[-1, 0])
+ resource_ids = np.array(resource_group.get_resource_ids())
+
+ # find the resources that exist in the windows matrix
+ available_resources = np.intersect1d(
+ resource_ids, resource_windows_matrix.resource_ids
)
- filtered_boundaries = new_boundaries[mask]
+ available_resources_count = len(available_resources)
+ if available_resources_count == 0:
+ return None
- # Find the indices where the new boundaries fit
- idxs = np.searchsorted(arr[:, 0], filtered_boundaries)
+ # Find the indices of the available resources in the windows matrix
+ resource_indexes = np.where(
+ np.isin(resource_windows_matrix.resource_ids, available_resources)
+ )[0]
- # Calculate the weights for linear interpolation
- weights = (filtered_boundaries - arr[idxs - 1, 0]) / (
- arr[idxs, 0] - arr[idxs - 1, 0]
+ # Build the resource_matrix for the resource group matrix
+ resource_matrix = resource_windows_matrix.resource_matrix[:, resource_indexes]
+ # compute the cumulative sum of the resource matrix columns
+ resource_matrix = self._cumsum_reset_at_minus_one_2d(resource_matrix)
+
+ # mask all but the k largest elements per row
+ if use_all_resources is False:
+ if resource_count < available_resources_count:
+ resource_matrix = self._mask_smallest_elements_except_top_k_per_row(
+ resource_matrix, resource_count
+ )
+
+ return Matrix(
+ resource_ids=resource_ids,
+ intervals=resource_windows_matrix.intervals,
+ resource_matrix=resource_matrix,
)
- duration_increase = weights * (arr[idxs, 1] - arr[idxs - 1, 1])
+ def _create_constraints_matrix(
+ self,
+ resource_constraints: set[Resource],
+ resource_windows_matrix: Matrix,
+ task_duration: int,
+ ) -> Matrix:
+ """
+ Checks if the resource constraints are available and updates the resource windows matrix.
+ """
+ if not resource_constraints:
+ return None
+
+ # get the constraint resource ids
+ resource_ids = np.array([resource.id for resource in resource_constraints])
- # ensure duration cannot decrease
- duration_increase[duration_increase < 0] = 0
+ # check if all resource constraints are available
+ if not np.all(np.isin(resource_ids, resource_windows_matrix.resource_ids)):
+ raise AllocationError("All resource constraints are not available")
- # Calculate the new durations using linear interpolation
- new_durations = arr[idxs - 1, 1] + duration_increase
+ # Find the indices of the available resources in the windows matrix
+ resource_indexes = np.where(
+ np.isin(resource_windows_matrix.resource_ids, resource_ids)
+ )[0]
- # Combine the new boundaries and durations into an array
- new_rows_within_range = np.column_stack((filtered_boundaries, new_durations))
+ # get the windows for the resource constraints
+ constraint_windows = resource_windows_matrix.resource_matrix[
+ :, resource_indexes
+ ]
+
+ # Compute the minimum along axis 1, mask values <= 0, and compute the cumulative sum
+ # devide by the number of resources to not increase the task completion time
+ min_values_matrix = (
+ np.min(constraint_windows, axis=1, keepdims=True)
+ * np.ones_like(constraint_windows)
+ / len(resource_ids)
+ )
+
+ resource_matrix = np.ma.masked_less_equal(
+ x=min_values_matrix,
+ value=0,
+ ).cumsum(axis=0)
- # Handle boundaries that are outside the existing range
- new_rows_outside_range = np.column_stack(
- (new_boundaries[~mask], np.zeros(np.sum(~mask)))
+ return Matrix(
+ resource_ids=resource_ids,
+ intervals=resource_windows_matrix.intervals,
+ resource_matrix=resource_matrix,
+ )
+
+ def _apply_constraint_to_resource_windows_matrix(
+ self, constraint_matrix: Matrix, resource_windows_matrix: Matrix
+ ) -> None:
+ """
+ Adds reset to windows where the constraints are not available.
+ Resets are represented by -1.
+ """
+ # create a mask from the constraint matrix
+ mask = (
+ np.ones_like(resource_windows_matrix.resource_matrix.data, dtype=bool)
+ * constraint_matrix.resource_matrix.mask
)
+ # add reset to the resource matrix
+ resource_windows_matrix.resource_matrix[mask] = -1
+
+ def _create_assignments_matrix(
+ self,
+ assignments: list[Assignment],
+ resource_windows_matrix: Matrix,
+ task_duration: int,
+ ) -> Matrix:
+ if assignments == []:
+ return None
+
+ assignment_matrices = []
+ for assignment in assignments:
+ # create resource group matrices
+ resource_group_matrices = []
+ for resource_group in assignment.resource_groups:
+ resource_group_matrix = self._create_resource_group_matrix(
+ resource_group=resource_group,
+ resource_count=assignment.resource_count,
+ use_all_resources=assignment.use_all_resources,
+ resource_windows_matrix=resource_windows_matrix,
+ )
+ if resource_group_matrix is None:
+ continue
+
+ resource_group_matrices.append(resource_group_matrix)
+
+ if resource_group_matrices == []:
+ raise AllocationError("No resource groups with available resources.")
+
+ # keep resource group matrix rows with the fastest completion
+ assignment_matrix = Matrix.compare_update_mask_and_merge(
+ resource_group_matrices
+ )
+ assignment_matrices.append(assignment_matrix)
+
+ # merge assignment matrices
+ assignments_matrix = Matrix.merge(assignment_matrices)
- # Combine the old boundaries/durations and new boundaries/durations and sort by
- # the boundaries
- combined = np.vstack((arr, new_rows_within_range, new_rows_outside_range))
- combined = combined[np.argsort(combined[:, 0])]
+ # check if solution exists
+ if not np.any(assignments_matrix.resource_matrix >= task_duration):
+ raise AllocationError("No solution found.")
- return combined
+ return assignments_matrix
+
+ def _find_indexes(self, arr: np.array) -> tuple[int, int] | None:
+ """
+ Find the start and end indexes from the last zero to the last number with no increase in a NumPy array.
+ """
+ # if last element is zero return None
+ if arr[-1] == 0:
+ return None
+
+ # Find the index of the last zero
+ zero_indexes = np.nonzero(arr == 0)[0]
+ if zero_indexes.size > 0:
+ start_index = zero_indexes[-1]
+ else:
+ return None
+
+ # Use np.diff to find where the array stops increasing
+ diffs = np.diff(arr[start_index:])
+
+ # Find where the difference is less than or equal to zero (non-increasing sequence)
+ non_increasing = np.where(diffs == 0)[0]
+
+ if non_increasing.size > 0:
+ # The end index is the last non-increasing index + 1 to account for the difference in np.diff indexing
+ end_index = non_increasing[0] + start_index
+ else:
+ end_index = (
+ arr.size - 1
+ ) # If the array always increases, end at the last index
+
+ return start_index, end_index
+
+ def _linear_interpolate_nan(self, y: np.ndarray, x: np.ndarray) -> np.ndarray:
+ """
+ Linearly interpolate NaN values in a 1D array.
+ Ignores when the slope is negative.
+ """
+ # fill trailing and ending NaNs with 0
+ start_index = np.argmax(~np.isnan(y))
+ y[:start_index] = 0
+ end_index = len(y) - np.argmax(~np.isnan(y[::-1]))
+ y[end_index:] = 0
+ # Ensure input arrays are numpy arrays
+ nan_mask = np.isnan(y)
+ xp = x[~nan_mask]
+ x = x[nan_mask]
+ yp = y[~nan_mask]
+
+ # Find indices where the right side of the interval for each x would be
+ idx = np.searchsorted(xp, x) - 1
+ idx[idx < 0] = 0
+ idx[idx >= len(xp) - 1] = len(xp) - 2
+
+ # Compute the slope (dy/dx) between the interval points
+ slope = (yp[idx + 1] - yp[idx]) / (xp[idx + 1] - xp[idx])
+ positive_slope_mask = slope > 0
+
+ # Create a combined mask for NaN positions with positive slopes
+ combined_mask = np.zeros_like(y, dtype=bool)
+ combined_mask[nan_mask] = positive_slope_mask
+
+ # Compute the interpolated values
+ interpolated_values = (yp[idx] + slope * (x - xp[idx]))[positive_slope_mask]
+ y[combined_mask] = interpolated_values
+
+ # convert nan to zero
+ return np.nan_to_num(y)
diff --git a/src/factryengine/scheduler/heuristic_solver/window_manager.py b/src/factryengine/scheduler/heuristic_solver/window_manager.py
index 160d0eb..1b90b19 100644
--- a/src/factryengine/scheduler/heuristic_solver/window_manager.py
+++ b/src/factryengine/scheduler/heuristic_solver/window_manager.py
@@ -2,46 +2,44 @@
from ...models.resource import Resource
+window_dtype = [
+ ("start", np.float32),
+ ("end", np.float32),
+ ("duration", np.float32),
+ ("is_split", np.int32),
+]
+
class WindowManager:
def __init__(self, resources: list[Resource]):
self.resources = resources
- self.resource_windows_dict = self.create_resource_windows_dict()
+ self.resource_windows_dict = self._create_resource_windows_dict()
- def create_resource_windows_dict(self) -> dict[int, np.ndarray]:
- """
- Creates a dictionary mapping resource IDs to numpy arrays representing windows.
- """
- return {
- resource.id: self.windows_to_numpy(resource.available_windows)
- for resource in self.resources
- }
-
- def get_task_resource_windows(
+ def get_task_resource_windows_dict(
self, task_resource_ids: list[int], task_earliest_start: int
- ) -> list[np.ndarray]:
+ ) -> dict[int, np.ndarray]:
"""
Returns the resource windows for the resource ids.
The windows are trimmed to the min_task_start.
"""
- return [
- trimmed_window
- for resource_id in task_resource_ids
- if (
- trimmed_window := self.trim_window(
- window=self.resource_windows_dict[resource_id],
- trim_interval=(0, task_earliest_start),
- )
- ).size
- > 0
- ]
+ trimmed_windows_dict = {}
- def windows_to_numpy(self, windows: list[tuple[int, int]]) -> np.ndarray:
- """
- Converts a list of windows to a numpy array.
- """
- arr = np.array(windows)
- return np.concatenate([arr, np.diff(arr), np.zeros((arr.shape[0], 1))], axis=1)
+ # Loop over each resource ID
+ for resource_id in task_resource_ids:
+ # Get the window for the current resource ID
+ resource_windows = self.resource_windows_dict[resource_id]
+
+ # Trim the window to the task's earliest start time
+ trimmed_window = self._trim_window(
+ window=resource_windows,
+ trim_interval=(0, task_earliest_start),
+ )
+
+ # If the trimmed window is not empty, add it to the dictionary
+ if trimmed_window.size > 0:
+ trimmed_windows_dict[resource_id] = trimmed_window
+
+ return trimmed_windows_dict
def update_resource_windows(
self, allocated_resource_windows_dict: dict[int, list[tuple[int, int]]]
@@ -51,11 +49,41 @@ def update_resource_windows(
"""
for resource_id, trim_interval in allocated_resource_windows_dict.items():
window = self.resource_windows_dict[resource_id]
- self.resource_windows_dict[resource_id] = self.trim_window(
+ self.resource_windows_dict[resource_id] = self._trim_window(
window, trim_interval
)
- def trim_window(
+ def _create_resource_windows_dict(self) -> dict[int, np.ndarray]:
+ """
+ Creates a dictionary mapping resource IDs to numpy arrays representing windows.
+ """
+ return {
+ resource.id: self._windows_to_numpy(resource.available_windows)
+ for resource in self.resources
+ }
+
+ def _windows_to_numpy(self, windows: list[tuple[int, int]]) -> np.ndarray:
+ """
+ Converts a list of windows to a numpy array.
+ """
+ # Convert the list of windows to a numpy array
+ arr = np.array(windows)
+
+ # Create an empty structured array with the specified dtype
+ result = np.zeros(arr.shape[0], dtype=window_dtype)
+
+ # Fill the 'start' and 'end' fields with the first and second columns of 'arr', respectively
+ result["start"], result["end"] = arr[:, 0], arr[:, 1]
+
+ # Calculate the duration of each window and fill the 'duration' field
+ result["duration"] = np.diff(arr, axis=1).flatten()
+
+ # Fill the 'is_split' field with zeros
+ result["is_split"] = 0
+
+ return result
+
+ def _trim_window(
self, window: np.ndarray, trim_interval: tuple[int, int]
) -> np.ndarray:
"""
@@ -64,49 +92,61 @@ def trim_window(
window = window.copy()
trim_start, trim_end = trim_interval
- start_idx = np.searchsorted(window[:, 1], trim_start, side="right")
- end_idx = np.searchsorted(window[:, 0], trim_end, side="left")
+ start_idx = np.searchsorted(window["end"], trim_start, side="right")
+ end_idx = np.searchsorted(window["start"], trim_end, side="left")
if start_idx == end_idx:
return window
- overlap_windows = window[start_idx:end_idx]
- mask_end = overlap_windows[:, 1] <= trim_end
- mask_start = overlap_windows[:, 0] >= trim_start
+ overlap_intervals = window[start_idx:end_idx]
+ mask_end = overlap_intervals["end"] <= trim_end
+ mask_start = overlap_intervals["start"] >= trim_start
mask_delete = np.logical_and(mask_end, mask_start)
mask_between = np.logical_and(
- overlap_windows[:, 0] < trim_start, overlap_windows[:, 1] > trim_end
+ overlap_intervals["start"] < trim_start, overlap_intervals["end"] > trim_end
)
slopes = self._calculate_slopes(
- overlap_windows
+ overlap_intervals
) # Compute slopes for all overlap_windows
window = self._handle_mask_between(
- window,
- overlap_windows,
- mask_between,
- slopes,
- trim_start,
- trim_end,
- start_idx,
- end_idx,
+ windows=window,
+ overlap_windows=overlap_intervals,
+ mask_between=mask_between,
+ slopes=slopes,
+ trim_start=trim_start,
+ trim_end=trim_end,
+ start_idx=start_idx,
+ end_idx=end_idx,
)
+
+ window = self._handle_mask_start(
+ windows=window,
+ overlap_windows=overlap_intervals,
+ mask_start=mask_start,
+ trim_end=trim_end,
+ )
+
window = self._handle_mask_end(
- window, overlap_windows, mask_end, trim_start, end_idx
+ windows=window,
+ overlap_windows=overlap_intervals,
+ mask_end=mask_end,
+ trim_start=trim_start,
+ end_idx=end_idx,
)
- window = self._handle_mask_start(window, overlap_windows, mask_start, trim_end)
+
window = self._delete_overlapped_windows(
window, mask_delete, start_idx, end_idx
)
return window
- def _calculate_slopes(self, windows: np.ndarray) -> np.ndarray:
+ def _calculate_slopes(self, window: np.ndarray) -> np.ndarray:
"""
- Calculates the slopes for the given windows.
+ Calculates the slopes for the given intervals.
"""
- return (windows[:, 2] - windows[:, 3]) / (windows[:, 1] - windows[:, 0])
+ return window["duration"] / (window["end"] - window["start"])
def _handle_mask_between(
self,
@@ -123,62 +163,76 @@ def _handle_mask_between(
Handles the case where mask_between is True.
"""
if np.any(mask_between):
+ # Get the slopes between the mask
slopes_between = slopes[mask_between]
+
+ # Duplicate the overlap windows
overlap_windows = np.concatenate([overlap_windows, overlap_windows])
- overlap_windows[0, 1] = trim_start
- overlap_windows[0, 2] = (
- overlap_windows[0, 1] - overlap_windows[0, 0]
+
+ # Update the end and duration of the first overlap window
+ overlap_windows[0]["end"] = trim_start
+ overlap_windows[0]["duration"] = (
+ overlap_windows[0]["end"] - overlap_windows[0]["start"]
) * slopes_between
- overlap_windows[1, 0] = trim_end
- overlap_windows[1, 2] = (
- overlap_windows[1, 1] - overlap_windows[1, 0]
+ # Update the end, duration, and is_split of the second overlap window
+ overlap_windows[1]["end"] = trim_end
+ overlap_windows[1]["duration"] = (
+ overlap_windows[1]["end"] - overlap_windows[1]["start"]
) * slopes_between
- overlap_windows[1, 3] = -1
+ overlap_windows[1]["is_split"] = -1
- return np.concatenate(
+ # Concatenate the windows before the start index, the overlap windows, and the windows after the end index
+ final_windows = np.concatenate(
(windows[:start_idx], overlap_windows, windows[end_idx:])
)
+ return final_windows
+
return windows
- def _handle_mask_end(
+ def _handle_mask_start(
self,
windows: np.ndarray,
overlap_windows: np.ndarray,
- mask_end: np.ndarray,
- trim_start: int,
- end_idx: int,
+ mask_start: np.ndarray,
+ trim_end: int,
) -> np.ndarray:
"""
- Handles the case where mask_end is True.
+ Handles the case where mask_start is True.
"""
- if np.any(mask_end):
- overlap_windows[mask_end, 1] = trim_start
- overlap_windows[mask_end, 2] = (
- overlap_windows[mask_end, 1] - overlap_windows[mask_end, 0]
+ if np.any(mask_start):
+ # Update "start" field
+ overlap_windows["start"][mask_start] = trim_end
+
+ # Update "duration" field based on updated "start" and existing "end"
+ overlap_windows["duration"][mask_start] = (
+ overlap_windows["end"][mask_start] - trim_end
)
- end_idx_temp = min(end_idx, windows.shape[0] - 1) # handle out of bounds
- windows[end_idx_temp, 3] = -1
+
+ # Update "is_split" field
+ overlap_windows["is_split"][mask_start] = -1
return windows
- def _handle_mask_start(
+ def _handle_mask_end(
self,
windows: np.ndarray,
overlap_windows: np.ndarray,
- mask_start: np.ndarray,
- trim_end: int,
+ mask_end: np.ndarray,
+ trim_start: int,
+ end_idx: int,
) -> np.ndarray:
"""
- Handles the case where mask_start is True.
+ Handles the case where mask_end is True.
"""
- if np.any(mask_start):
- overlap_windows[mask_start, 0] = trim_end
- overlap_windows[mask_start, 2] = (
- overlap_windows[mask_start, 1] - overlap_windows[mask_start, 0]
+ if np.any(mask_end):
+ overlap_windows["end"][mask_end] = trim_start
+ overlap_windows["duration"][mask_end] = (
+ trim_start - overlap_windows["start"][mask_end]
)
- overlap_windows[mask_start, 3] = -1
+ end_idx_temp = min(end_idx, windows.shape[0] - 1) # handle out of bounds
+ windows["is_split"][end_idx_temp] = -1
return windows
diff --git a/src/factryengine/scheduler/scheduler_result.py b/src/factryengine/scheduler/scheduler_result.py
index 30b2c82..fdf3737 100644
--- a/src/factryengine/scheduler/scheduler_result.py
+++ b/src/factryengine/scheduler/scheduler_result.py
@@ -5,78 +5,122 @@
class SchedulerResult:
- def __init__(self, task_vars):
+ def __init__(self, task_vars: list[dict], unscheduled_task_ids: list[int]):
self.task_vars = task_vars
- self.unscheduled_tasks = [
- task["task_uid"]
- for task in self.task_vars
- if task["assigned_resource_ids"] is None
- ]
+ self.unscheduled_task_ids = unscheduled_task_ids
- def to_dict(self):
+ def to_dict(self) -> list[dict]:
return self.task_vars
- def to_dataframe(self):
+ def to_dataframe(self) -> pd.DataFrame:
df = pd.DataFrame(self.task_vars)
return df
- def summary(self):
- summary = f"Scheduled {len(self.task_vars) - len(self.unscheduled_tasks)} of {len(self.task_vars)} tasks."
- if self.unscheduled_tasks:
- summary += f"\nNo available resources found for task ids: {', '.join(map(str, self.unscheduled_tasks))}"
+ def summary(self) -> str:
+ """
+ Generate a summary of the scheduling results.
+ """
+
+ # Calculate the number of scheduled and total tasks
+ num_scheduled_tasks = len(self.task_vars) - len(self.unscheduled_task_ids)
+ total_tasks = len(self.task_vars)
+
+ # Start the summary with the number of scheduled tasks
+ summary = f"Scheduled {num_scheduled_tasks} of {total_tasks} tasks."
+
+ # If there are any unscheduled tasks, add them to the summary
+ if self.unscheduled_task_ids:
+ unscheduled_task_ids = ", ".join(map(str, self.unscheduled_task_ids))
+ summary += (
+ f"\nNo available resources found for task ids: {unscheduled_task_ids}"
+ )
+
return summary
def plot_resource_plan(self) -> None:
+ # Get the resource intervals DataFrame
df = self.get_resource_intervals_df()
- # Create a color dictionary for each unique resource for distinction in the plot
- tasks = df["task_uid"].unique()
- colors = sns.color_palette(
- "deep", len(tasks)
- ) # Using seaborn "dark" color palette
- color_dict = dict(zip(tasks, colors))
+ # Create a color dictionary for each unique task for distinction in the plot
+ unique_tasks = df["task_id"].unique()
+ color_palette = sns.color_palette(
+ "deep", len(unique_tasks)
+ ) # Using seaborn "deep" color palette
+ color_dict = dict(zip(unique_tasks, color_palette))
# Set seaborn style
sns.set_style("whitegrid")
+ # Create a new figure with a specific size
plt.figure(figsize=(12, 6))
- for task_uid, group_df in df.groupby("task_uid"):
+ # Iterate over each task group
+ for task_id, group_df in df.groupby("task_id"):
+ # Iterate over each task in the group
for task in group_df.itertuples():
+ # Draw a horizontal bar for the task
plt.barh(
task.resource_id,
left=task.interval_start,
width=task.interval_end - task.interval_start,
- color=color_dict[task_uid],
+ color=color_dict[task_id],
edgecolor="black",
)
+
+ # Add a text label in the middle of the bar
+ text_position = (task.interval_start + task.interval_end) / 2
plt.text(
- x=(task.interval_start + task.interval_end)
- / 2, # x position, in the middle of task bar
- y=task.resource_id, # y position, on the resource line
- s=task.task_uid, # text string, which is task_uid here
- va="center", # vertical alignment
- ha="center", # horizontal alignment
- color="black", # text color
+ x=text_position,
+ y=task.resource_id,
+ s=task.task_id,
+ va="center",
+ ha="center",
+ color="black",
fontsize=10,
- ) # font size
+ )
+ # Set the labels and title of the plot
plt.xlabel("Time")
plt.ylabel("Resource")
plt.title("Resource Plan")
+
+ # Set the y-ticks to the unique resource IDs
plt.yticks(df["resource_id"].unique())
- plt.tight_layout() # adjusts subplot params so that the subplot(s) fits into the figure area.
+
+ # Adjust the layout so everything fits nicely
+ plt.tight_layout()
+
+ # Display the plot
plt.show()
def get_resource_intervals_df(self) -> pd.DataFrame:
+ """
+ Explodes the resource intervals to create a dataframe with one row per resource interval
+ """
+ # Convert the object to a DataFrame
df = self.to_dataframe()
- df = df.explode(["assigned_resource_ids", "resource_intervals"]).explode(
- "resource_intervals"
+
+ # Explode the 'assigned_resource_ids' and 'resource_intervals' columns
+ exploded_df = df.explode(["assigned_resource_ids", "resource_intervals"])
+
+ # Drop any rows with missing values
+ cleaned_df = exploded_df.dropna()
+
+ # Extract the start and end of the interval from the 'resource_intervals' column
+ cleaned_df["interval_start"] = cleaned_df.resource_intervals.apply(
+ lambda x: x[0]
)
- df = df.dropna()
- df["interval_start"] = df.resource_intervals.apply(lambda x: x[0])
- df["interval_end"] = df.resource_intervals.apply(lambda x: x[1])
- df = df.rename(columns={"assigned_resource_ids": "resource_id"})
- df = df[["task_uid", "resource_id", "interval_start", "interval_end"]]
- df = df.infer_objects()
- return df
+ cleaned_df["interval_end"] = cleaned_df.resource_intervals.apply(lambda x: x[1])
+
+ # Rename the 'assigned_resource_ids' column to 'resource_id'
+ renamed_df = cleaned_df.rename(columns={"assigned_resource_ids": "resource_id"})
+
+ # Select only the columns we're interested in
+ selected_columns_df = renamed_df[
+ ["task_id", "resource_id", "interval_start", "interval_end"]
+ ]
+
+ # Infer the best data types for each column
+ final_df = selected_columns_df.infer_objects()
+
+ return final_df
diff --git a/src/factryengine/scheduler/task_batch_processor.py b/src/factryengine/scheduler/task_batch_processor.py
deleted file mode 100644
index 1d3cd19..0000000
--- a/src/factryengine/scheduler/task_batch_processor.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from copy import deepcopy
-
-import networkx as nx
-
-from ..models import Task
-
-
-class TaskBatchProcessor:
- """
- The TaskBatchProcessor class is responsible for preprocessing tasks.
- """
-
- def __init__(self, task_graph: nx.DiGraph, task_dict: dict[str, Task]):
- self.task_graph = task_graph
- self.task_dict = deepcopy(task_dict)
-
- def split_tasks_into_batches(self) -> dict[str, Task]:
- """
- This function performs splitting of tasks into batches if necessary and returns
- an updated task dictionary with possibly split tasks. Tasks are split only if
- they have a batch size and the quantity is greater than the batch size.
- """
- # get the order of tasks based on their dependencies
- task_order = list(nx.topological_sort(self.task_graph))
-
- for task_uid in task_order:
- current_task = self.task_dict[task_uid]
-
- # Skip the current iteration if the task doesn't have a batch size or qty
- if not current_task.is_splittable():
- continue
-
- # split current task into batches
- task_splits = TaskSplitter(task=current_task).split_into_batches()
-
- # update the relationships of predecessor and successor tasks
- self._update_predecessor_successor_relationships(current_task, task_splits)
-
- # remove the current task from the task dictionary
- del self.task_dict[task_uid]
-
- # add the split tasks to the task dictionary
- for split_task in task_splits:
- self.task_dict[split_task.uid] = split_task
-
- return self.task_dict
-
- def _update_predecessor_successor_relationships(
- self, task: Task, batches: list[Task]
- ) -> None:
- """
- This function updates the predecessor and successor relationships of the given
- task
- """
-
- # update predecessors
- if self._are_predecessors_single_task(task):
- for batch, pred in zip(batches, task.predecessors):
- batch.predecessors = [pred]
-
- # update successors
- successor_tasks = self._get_successor_tasks(task.uid)
- for successor in successor_tasks:
- successor.predecessors.remove(task)
- successor.predecessors.extend(batches)
-
- def _are_predecessors_single_task(self, task: Task) -> bool:
- """Checks if the predecessors of the given task are a single task"""
- unique_tasks = len(set(pred_task.id for pred_task in task.predecessors))
- predecessors_quantity = sum(
- pred_task.quantity for pred_task in task.predecessors
- )
- return unique_tasks == 1 and predecessors_quantity == task.quantity
-
- def _get_successor_tasks(self, task_uid: str) -> list[Task]:
- """
- Given a task_uid, this function returns the list of successor tasks.
- """
- successor_uids = list(self.task_graph.successors(task_uid))
- return [self.task_dict[successor_id] for successor_id in successor_uids]
-
-
-class TaskSplitter:
- """
- The TaskSplitter class is responsible for splitting tasks into batches.
- """
-
- def __init__(self, task: Task):
- self.task = task
-
- def split_into_batches(self) -> list[Task]:
- """
- Splits a task into batches.
- """
- num_batches, remaining = divmod(self.task.quantity, self.task.batch_size)
- batches = [
- self._create_new_task(i + 1, self.task.batch_size)
- for i in range(num_batches)
- ]
-
- if remaining > 0:
- batches.append(self._create_new_task(num_batches + 1, remaining))
-
- return batches
-
- def _create_new_task(self, batch_id: int, quantity: int) -> Task:
- """Creates a new task with the given batch_id and quantity."""
- new_task = self.task.copy(deep=True)
- new_task.quantity = quantity
- new_task.duration = (quantity / self.task.quantity) * self.task.duration
- new_task.set_batch_id(batch_id)
- return new_task
diff --git a/src/factryengine/scheduler/task_graph.py b/src/factryengine/scheduler/task_graph.py
index 58672b0..eed4319 100644
--- a/src/factryengine/scheduler/task_graph.py
+++ b/src/factryengine/scheduler/task_graph.py
@@ -1,6 +1,7 @@
import networkx as nx
from ..models import Task
+from .utils import get_task_predecessors
class TaskGraph:
@@ -15,10 +16,14 @@ def _create_task_graph(self):
task_graph = nx.DiGraph()
for task in self.tasks_dict.values():
task_graph.add_node(
- task.uid, duration=task.duration, priority=task.priority
+ task.get_id(), duration=task.duration, priority=task.priority
)
- for predecessor in task.predecessors:
- task_graph.add_edge(predecessor.uid, task.uid)
+
+ predecessors = get_task_predecessors(task, self.tasks_dict)
+
+ for predecessor in predecessors:
+ task_graph.add_edge(predecessor.get_id(), task.get_id())
+
return task_graph
def _compute_longest_paths(self):
@@ -26,7 +31,7 @@ def _compute_longest_paths(self):
Computes the longest path for each node in the task graph using a topological
sort.
"""
- longest_path = {task_uid: 0 for task_uid in self.tasks_dict}
+ longest_path = {task_id: 0 for task_id in self.tasks_dict}
for task in nx.topological_sort(self.graph):
duration = self.graph.nodes[task]["duration"]
for predecessor in self.graph.predecessors(task):
@@ -59,9 +64,12 @@ def visit(node):
for task in sorted(
self.tasks_dict.values(),
- key=lambda t: (self.graph.nodes[t.uid]["priority"], -longest_path[t.uid]),
+ key=lambda t: (
+ self.graph.nodes[t.get_id()]["priority"],
+ -longest_path[t.get_id()],
+ ),
):
- visit(task.uid)
+ visit(task.get_id())
return result
diff --git a/src/factryengine/scheduler/utils.py b/src/factryengine/scheduler/utils.py
new file mode 100644
index 0000000..8b880cc
--- /dev/null
+++ b/src/factryengine/scheduler/utils.py
@@ -0,0 +1,13 @@
+from ..models import Task
+
+
+def get_task_predecessors(task: Task, task_dict: dict) -> list[Task]:
+ """
+ returns a list of tasks that are predecessors of the given task
+ """
+ try:
+ predecessors = [task_dict[pred_id] for pred_id in task.predecessor_ids]
+
+ except KeyError as e:
+ raise ValueError(f"Predecessor with ID {e.args[0]} does not exist.")
+ return predecessors
diff --git a/src/factryengine/scheduler/visualizations.py b/src/factryengine/scheduler/visualizations.py
deleted file mode 100644
index 1d25bd3..0000000
--- a/src/factryengine/scheduler/visualizations.py
+++ /dev/null
@@ -1 +0,0 @@
-# todo add resource gantt and resource utilization heatmap
\ No newline at end of file
diff --git a/tests/scheduler/test_task_allocator.py b/tests/scheduler/test_task_allocator.py
index f8dc8e9..0dce74c 100644
--- a/tests/scheduler/test_task_allocator.py
+++ b/tests/scheduler/test_task_allocator.py
@@ -1,7 +1,7 @@
import numpy as np
import pytest
-
-from factryengine.scheduler.heuristic_solver.task_allocator import TaskAllocator
+from factryengine.scheduler.heuristic_solver.task_allocator import Matrix, TaskAllocator
+from factryengine.scheduler.heuristic_solver.window_manager import window_dtype
@pytest.fixture
@@ -9,165 +9,115 @@ def task_allocator():
return TaskAllocator()
-def test_allocate_task_returns_expected_result(task_allocator):
- resource_windows = [np.array([[0, 1, 1, -1], [2, 3, 1, 0]])]
- resource_ids = np.array([1])
- task_duration = 2
- resource_count = 1
- resource_group_indices = [[0]]
- result = task_allocator.allocate_task(
- resource_windows=resource_windows,
- resource_ids=resource_ids,
- task_duration=task_duration,
- resource_count=resource_count,
- resource_group_indices=resource_group_indices,
- )
- # Check if the result is a dictionary
- assert isinstance(result, dict)
- # Check if the result has the correct keys (allocated resources)
- assert list(result.keys()) == [1]
- # Validate a specific element in the result (replace with your expected values)
- assert result == {1: [(0.0, 1.0), (2.0, 3.0)]}
-
-
-def test_allocate_task_returns_none_when_no_solution(task_allocator):
- resource_windows = [np.array([[0, 1, 1, -1], [2, 3, 1, 0]])]
- resource_ids = np.array([1])
- task_duration = 4
- resource_count = 1
- resource_group_indices = [[0]]
- result = task_allocator.allocate_task(
- resource_windows=resource_windows,
- resource_ids=resource_ids,
- task_duration=task_duration,
- resource_count=resource_count,
- resource_group_indices=resource_group_indices,
+def test_can_allocate_task():
+ pass
+
+
+def test_solve_matrix():
+ pass
+
+
+def test_solve_task_end(task_allocator):
+ resource_matrix = np.ma.array([[0, 0], [10, 10]])
+ intervals = np.array([0, 10])
+ task_duration = 10
+ result_x, result_y = task_allocator._solve_task_end(
+ resource_matrix, intervals, task_duration
)
- assert result is None
-
-
-def test_allocate_task_with_invalid_input(task_allocator):
- resource_windows_dict = {1: np.array([[0, 1, 1, 0], [2, 3, 1, -1]])}
- task_duration = "invalid"
- resource_count = 1
- resource_group_indices = [[0]]
- with pytest.raises(TypeError):
- task_allocator.allocate_task(
- resource_windows_dict, task_duration, resource_count, resource_group_indices
- )
-
-
-def test_allocate_task_with_empty_input(task_allocator):
- resource_windows_dict = {}
- task_duration = 1
- resource_count = 1
- resource_group_indices = [[]]
- with pytest.raises(Exception): # or the specific exception that you expect
- task_allocator.allocate_task(
- resource_windows_dict, task_duration, resource_count, resource_group_indices
- )
-
-
-# def test_allocate_task_with_negative_task_duration(task_allocator):
-# resource_windows_dict = {1: np.array([[0, 1, 1, 0], [2, 3, 1, -1]])}
-# task_duration = -1
-# resource_count = 1
-# resource_group_indices = [[0]]
-# with pytest.raises(Exception): # or your expected behavior
-# task_allocator.allocate_task(
-# resource_windows_dict, task_duration, resource_count, resource_group_indices
-# )
-
-
-def test_fill_array_except_largest_group_per_row(task_allocator):
- array = np.array(
- [
- [5, 0, 10],
- [10, 30, 20],
- [1, 1, 1],
- ]
+ assert result_x == 5
+ assert np.array_equal(result_y, np.array([5, 5]))
+
+
+def test_get_resource_intervals(task_allocator):
+ solution_resource_ids = np.array([1, 2, 3])
+ solution_intervals = np.array([0, 1, 2])
+ resource_matrix = np.ma.array([[0, 0, 0], [1, 0, 0], [2, 1, 0]])
+ solution_matrix = Matrix(
+ resource_ids=solution_resource_ids,
+ intervals=solution_intervals,
+ resource_matrix=resource_matrix,
)
- group_indices = [[0, 1], [2]]
- result = task_allocator._fill_array_except_largest_group_per_row(
- array, group_indices
+ result = task_allocator._get_resource_intervals(solution_matrix)
+ expeceted = {1: (0, 2), 2: (1, 2)}
+ assert result == expeceted
+
+
+def test_create_matrix_from_resource_windows_dict(task_allocator):
+ resource_windows_dict = {
+ 1: np.array([(0, 10, 10, 0), (20, 30, 10, -1)], dtype=window_dtype),
+ 2: np.array([(0, 10, 10, 0), (25, 30, 5, 0)], dtype=window_dtype),
+ }
+ result = task_allocator._create_matrix_from_resource_windows_dict(
+ windows_dict=resource_windows_dict
)
- expected = np.array(
+ expected_resource_ids = np.array([1, 2])
+ expected_matrix = np.array(
+ # interval, resource 1, resource 2
[
- [0, 0, 10],
- [10, 30, 0],
- [1, 1, 0],
+ [0, 0, 0],
+ [10, 10, 10],
+ [20, 0, 10], # reset due to -1
+ [25, 5, 10],
+ [30, 10, 15],
]
)
- assert np.array_equal(result, expected)
+ expected_intervals = expected_matrix[:, 0]
+ expected_resource_matrix = np.ma.array(expected_matrix[:, 1:])
+ assert np.array_equal(
+ result.resource_ids, expected_resource_ids
+ ), f"Expected resource_ids to be {expected_resource_ids}, but got {result.resource_ids}"
-def test_expand_array(task_allocator):
- arr = np.array(
- [
- [5, 0],
- [10, 10],
- ]
- )
- new_boundaries = np.array([7, 12])
- result = task_allocator._expand_array(arr, new_boundaries)
- expected = np.array(
- [
- [5, 0],
- [7, 4],
- [10, 10],
- [12, 0],
- ]
- )
- assert np.array_equal(result, expected)
+ assert np.array_equal(
+ result.intervals, expected_intervals
+ ), f"Expected intervals to be {expected_intervals}, but got {result.intervals}"
+ assert np.array_equal(
+ result.resource_matrix, expected_resource_matrix
+ ), f"Expected resource_matrix to be {expected_resource_matrix}, but got {result.resource_matrix}"
-test_case_values = [
- # trim both intervals
- (
- "No Resets",
- [
- np.array([[0, 10, 10, 0], [20, 30, 10, 0]]),
- np.array([[5, 15, 10, 0], [20, 30, 10, 0]]),
- ],
- np.array(
- [
- [0, 0, 0],
- [5, 5, 0],
- [10, 10, 5],
- [15, 10, 10],
- [20, 10, 10],
- [30, 20, 20],
- ]
+
+@pytest.mark.parametrize(
+ "array, k, expected",
+ [
+ (
+ np.ma.array([[5, 0, 10], [10, 30, 20], [1, 1, 1]]),
+ 2,
+ np.array(
+ [[False, True, False], [True, False, False], [True, False, False]]
+ ),
),
- ),
- (
- "With resets and high durations",
- [
- np.array([[0, 10, 20, 0], [20, 30, 10, -1]]),
- np.array([[5, 15, 10, 0], [20, 30, 10, -1]]),
- np.array([[0, 30, 60, 0]]),
- ],
- np.array(
- [
- [0, 0, 0, 0],
- [5, 10, 0, 10],
- [10, 20, 5, 20],
- [15, 20, 10, 30],
- [20, 0, 0, 40],
- [30, 10, 10, 60],
- ]
+ (
+ np.ma.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]),
+ 1,
+ np.array([[True, True, False], [True, True, False], [True, True, False]]),
+ ),
+ (
+ np.ma.array([[9, 8, 7], [6, 5, 4], [3, 2, 1]]),
+ 3,
+ np.array(
+ [[False, False, False], [False, False, False], [False, False, False]]
+ ),
),
- ),
-]
+ ],
+)
+def test_mask_smallest_elements_except_top_k_per_row(
+ task_allocator, array, k, expected
+):
+ result = task_allocator._mask_smallest_elements_except_top_k_per_row(array, k).mask
+ assert np.array_equal(result, expected)
@pytest.mark.parametrize(
- "test_name, resource_windows_dict, expected",
- test_case_values,
- ids=[case[0] for case in test_case_values],
+ "array, expected",
+ [
+ (np.array([0, 1, 5, -1, 10]), [0, 1, 6, 0, 10]),
+ (np.array([-1, 2, 3, 0, 4]), [0, 2, 5, 5, 9]),
+ (np.array([0, -1, 2, 4, -1]), [0, 0, 2, 6, 0]),
+ ],
)
-def test_create_matrix(test_name, task_allocator, resource_windows_dict, expected):
- result = task_allocator.create_matrix(resource_windows_dict)
+def test_cumsum_reset_at_minus_one(task_allocator, array, expected):
+ result = task_allocator._cumsum_reset_at_minus_one(array)
assert np.array_equal(result, expected)
@@ -179,38 +129,55 @@ def test_create_matrix(test_name, task_allocator, resource_windows_dict, expecte
(np.array([2, 3, 4, 5, 6]), 0),
],
)
-def test_get_window_start_index(task_allocator, array, expected):
- result = task_allocator._get_window_start_index(array)
+def test_find_last_zero_index(task_allocator, array, expected):
+ result = task_allocator._find_last_zero_index(array)
assert np.array_equal(result, expected)
-def test_mask_smallest_except_k_largest(task_allocator):
- array = np.array(
- [
- [5, 0, 10],
- [10, 30, 20],
- [1, 1, 1],
- ]
- )
- result = task_allocator._mask_smallest_except_k_largest(array, 2).mask
- expected = np.array(
- [
- [False, True, False],
- [True, False, False],
- [True, False, False],
- ]
- )
- assert np.array_equal(result, expected)
+@pytest.mark.parametrize(
+ "array, expected",
+ [
+ (
+ np.array([[[0, 0, 0], [2, 2, np.nan], [3, 3, 3]]]),
+ np.array([[[0, 0, 0], [2, 2, 2], [3, 3, 3]]]),
+ ),
+ (
+ np.array([[[0, 0, 0], [5, np.nan, np.nan], [10, 15, 20]]]),
+ np.array([[[0, 0, 0], [5, 7.5, 10], [10, 15, 20]]]),
+ ),
+ (
+ np.array([[[0, 0], [5, np.nan], [10, np.nan], [15, 15]]]),
+ np.array([[[0, 0], [5, 5], [10, 10], [15, 15]]]),
+ ),
+ ],
+)
+def test_interpolate_y_values(array, expected):
+ task_allocator = TaskAllocator()
+ result = task_allocator._interpolate_y_values(array)
+ np.testing.assert_array_equal(result, expected)
@pytest.mark.parametrize(
- "array, expected",
+ "array, mask, expected",
[
- (np.array([0, 1, 5, -1, 10]), [0, 1, 6, 0, 10]),
- (np.array([-1, 2, 3, 0, 4]), [0, 2, 5, 5, 9]),
- (np.array([0, -1, 2, 4, -1]), [0, 0, 2, 6, 0]),
+ (
+ np.array([1, 3]),
+ np.array([True, False, True]),
+ np.array([1, np.nan, 3]),
+ ),
+ (
+ np.array([]),
+ np.array([False, False, False]),
+ np.array([np.nan, np.nan, np.nan]),
+ ),
+ (
+ np.array([1, 2, 3]),
+ np.array([True, True, True]),
+ np.array([1, 2, 3]),
+ ),
],
)
-def test_cumsum_reset_at_minus_one(task_allocator, array, expected):
- result = task_allocator._cumsum_reset_at_minus_one(array)
- assert np.array_equal(result, expected)
+def test_replace_masked_values_with_nan(array, mask, expected):
+ task_allocator = TaskAllocator()
+ result = task_allocator._replace_masked_values_with_nan(array, mask)
+ np.testing.assert_array_equal(result, expected)
diff --git a/tests/scheduler/test_task_batch_processor.py b/tests/scheduler/test_task_batch_processor.py
deleted file mode 100644
index 49ca728..0000000
--- a/tests/scheduler/test_task_batch_processor.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import networkx as nx
-import pytest
-
-from factryengine import Resource, Task
-from factryengine.scheduler.task_batch_processor import TaskBatchProcessor
-from factryengine.scheduler.task_graph import TaskGraph
-
-
-# Replace the following line with actual task and graph creation
-def create_task(id, quantity, batch_size, duration=10):
- resource = Resource(id=1, available_windows=[(1, 100)])
- return Task(
- id=id,
- duration=duration,
- priority=1,
- quantity=quantity,
- batch_size=batch_size,
- resources=resource,
- )
-
-
-def create_graph():
- G = nx.DiGraph()
- # Add edges and nodes as needed
- return G
-
-
-@pytest.fixture
-def task_processor():
- task_dict = {1: create_task(1, 50, 10), 2: create_task(2, 100, 20)}
- task_graph = create_graph()
- return TaskBatchProcessor(task_graph, task_dict)
-
-
-test_case_values = [
- (
- "no batch size",
- {"1": create_task(1, 50, None)},
- {"1": create_task(1, 50, None)},
- ),
- (
- "no quantity",
- {"1": create_task(1, None, 10)},
- {"1": create_task(1, None, 10)},
- ),
- (
- "no batch size or quantity",
- {"1": create_task(1, None, None)},
- {"1": create_task(1, None, None)},
- ),
- (
- "batch size > quantity",
- {"1": create_task(1, 10, 50)},
- {"1": create_task(1, 10, 50)},
- ),
- (
- "batch size = quantity",
- {"1": create_task(1, 50, 50)},
- {"1": create_task(1, 50, 50)},
- ),
- (
- "create two batches",
- {"1": create_task(1, 20, 10, 10)},
- {"1-1": create_task(1, 10, 10, 5), "1-2": create_task(1, 10, 10, 5)},
- ),
- (
- "create two batches with remainder",
- {"1": create_task(1, 19, 10, 19)},
- {"1-1": create_task(1, 10, 10, 10), "1-2": create_task(1, 9, 10, 9)},
- ),
-]
-
-
-@pytest.mark.parametrize(
- "test_name, task_dict, expected",
- test_case_values,
- ids=[case[0] for case in test_case_values],
-)
-def test_split_tasks_into_batches(test_name, task_dict, expected):
- graph = TaskGraph(task_dict).graph
- task_processor = TaskBatchProcessor(task_graph=graph, task_dict=task_dict)
- result = task_processor.split_tasks_into_batches()
-
- assert len(result) == len(expected), "The number of tasks is not equal"
-
- for key in expected.keys():
- assert key in result, f"Key {key} not in result"
- assert result[key].id == expected[key].id
- assert result[key].duration == expected[key].duration
- assert result[key].priority == expected[key].priority
- assert result[key].quantity == expected[key].quantity
- assert result[key].batch_size == expected[key].batch_size
-
-
-def get_task_predecessor_uids(task):
- return [pred.uid for pred in task.predecessors]
-
-
-def test_split_tasks_into_batches_predecessors():
- task_dict = {
- "1": create_task(1, 20, 10),
- "2": create_task(2, 100, 50),
- "3": create_task(3, 100, 50),
- }
- task_dict["2"].predecessors.append(task_dict["1"])
- task_dict["3"].predecessors.append(task_dict["2"])
-
- graph = TaskGraph(task_dict).graph
- task_processor = TaskBatchProcessor(task_graph=graph, task_dict=task_dict)
- result = task_processor.split_tasks_into_batches()
-
- assert (
- len(result) == 6
- ), "The length of the result does not match the expected value."
- assert (
- len(result["1-1"].predecessors) == 0
- ), "The length of predecessors of task '1-1' is not as expected."
- assert get_task_predecessor_uids(result["2-1"]) == [
- "1-1",
- "1-2",
- ], "The predecessors of task '2-1' do not match the expected value."
- assert get_task_predecessor_uids(result["2-2"]) == [
- "1-1",
- "1-2",
- ], "The predecessors of task '2-2' do not match the expected value."
- assert get_task_predecessor_uids(result["3-1"]) == [
- "2-1"
- ], "The predecessors of task '3-1' do not match the expected value."
- assert get_task_predecessor_uids(result["3-2"]) == [
- "2-2"
- ], "The predecessors of task '3-2' do not match the expected value."
diff --git a/tests/scheduler/test_task_graph.py b/tests/scheduler/test_task_graph.py
index c75d2cc..f32b4bb 100644
--- a/tests/scheduler/test_task_graph.py
+++ b/tests/scheduler/test_task_graph.py
@@ -1,5 +1,4 @@
import pytest
-
from factryengine import Task
from factryengine.scheduler.task_graph import TaskGraph
@@ -9,10 +8,8 @@
@pytest.fixture
def tasks_dict() -> dict[str, Task]:
task1 = Task(id=1, duration=2, priority=1, resources=[], predecessors=[])
- task2 = Task(id=2, duration=1, priority=2, resources=[], predecessors=[task1])
- task3 = Task(
- id=3, duration=3, priority=3, resources=[], predecessors=[task1, task2]
- )
+ task2 = Task(id=2, duration=1, priority=2, resources=[], predecessor_ids=[1])
+ task3 = Task(id=3, duration=3, priority=3, resources=[], predecessor_ids=[1, 2])
tasks_dict = {
"1": task1,
"2": task2,
@@ -24,7 +21,6 @@ def tasks_dict() -> dict[str, Task]:
def test_create_task_graph(tasks_dict):
task_graph = TaskGraph(tasks_dict)
-
assert len(task_graph.graph) == 3
assert len(task_graph.graph.edges) == 3
assert "1" in task_graph.graph
diff --git a/tests/scheduler/test_utils.py b/tests/scheduler/test_utils.py
new file mode 100644
index 0000000..2a1d7a7
--- /dev/null
+++ b/tests/scheduler/test_utils.py
@@ -0,0 +1,38 @@
+import pytest
+from factryengine.models import Task
+from factryengine.scheduler.utils import get_task_predecessors
+
+
+@pytest.fixture
+def tasks_dict() -> dict[str, Task]:
+ task1 = Task(id=1, duration=2, priority=1, resources=[], predecessors=[])
+ task2 = Task(id=2, duration=1, priority=2, resources=[], predecessor_ids=[1])
+ task3 = Task(id=3, duration=3, priority=3, resources=[], predecessor_ids=[1, 2])
+ tasks_dict = {
+ "1": task1,
+ "2": task2,
+ "3": task3,
+ }
+
+ return tasks_dict
+
+
+@pytest.mark.parametrize(
+ "task_id,expected_predecessors",
+ [
+ ("1", []),
+ ("2", ["1"]),
+ ("3", ["1", "2"]),
+ ],
+)
+def test_can_get_task_predecessors(task_id, expected_predecessors, tasks_dict):
+ task = tasks_dict[task_id]
+ predecessors = get_task_predecessors(task, tasks_dict)
+
+ assert [pred.get_id() for pred in predecessors] == expected_predecessors
+
+
+def test_get_task_predecessors_raises_error_if_predecessor_does_not_exist(tasks_dict):
+ task = Task(id=4, duration=2, priority=1, resources=[], predecessor_ids=[5])
+ with pytest.raises(ValueError):
+ predecessors = get_task_predecessors(task, tasks_dict)
diff --git a/tests/scheduler/test_window_manager.py b/tests/scheduler/test_window_manager.py
index 9b6c6b6..5580b5d 100644
--- a/tests/scheduler/test_window_manager.py
+++ b/tests/scheduler/test_window_manager.py
@@ -7,6 +7,8 @@
resource1 = Resource(id=1, available_windows=[(1, 5), (7, 9)])
resource2 = Resource(id=2, available_windows=[(2, 6), (8, 10)])
resources = [resource1, resource2]
+window_dtype = [("start", int), ("end", int), ("duration", int), ("is_split", int)]
+
# Test WindowManager
window_manager = WindowManager(resources)
@@ -14,13 +16,13 @@
# Test case values
test_case_values = [
- ([resource1], {1: np.array([(1, 5, 4, 0), (7, 9, 2, 0)])}),
- ([resource2], {2: np.array([(2, 6, 4, 0), (8, 10, 2, 0)])}),
+ ([resource1], {1: np.array([(1, 5, 4, 0), (7, 9, 2, 0)], dtype=window_dtype)}),
+ ([resource2], {2: np.array([(2, 6, 4, 0), (8, 10, 2, 0)], dtype=window_dtype)}),
(
resources,
{
- 1: np.array([(1, 5, 4, 0), (7, 9, 2, 0)]),
- 2: np.array([(2, 6, 4, 0), (8, 10, 2, 0)]),
+ 1: np.array([(1, 5, 4, 0), (7, 9, 2, 0)], dtype=window_dtype),
+ 2: np.array([(2, 6, 4, 0), (8, 10, 2, 0)], dtype=window_dtype),
},
),
]
@@ -30,7 +32,7 @@
@pytest.mark.parametrize("resources, expected_output", test_case_values)
def test_create_resource_windows_dict(resources, expected_output):
window_manager = WindowManager(resources)
- result = window_manager.create_resource_windows_dict()
+ result = window_manager._create_resource_windows_dict()
for key in expected_output:
assert np.array_equal(result[key], expected_output[key])
assert len(result) == len(expected_output)
@@ -38,15 +40,15 @@ def test_create_resource_windows_dict(resources, expected_output):
# Test case values
test_case_values = [
- ([(1, 5), (7, 9)], np.array([(1, 5, 4, 0), (7, 9, 2, 0)])),
- ([(2, 6), (8, 10)], np.array([(2, 6, 4, 0), (8, 10, 2, 0)])),
+ ([(1, 5), (7, 9)], np.array([(1, 5, 4, 0), (7, 9, 2, 0)], dtype=window_dtype)),
+ ([(2, 6), (8, 10)], np.array([(2, 6, 4, 0), (8, 10, 2, 0)], dtype=window_dtype)),
]
# Test the windows_to_numpy method
@pytest.mark.parametrize("windows, expected_output", test_case_values)
def test_windows_to_numpy(windows, expected_output):
- assert np.array_equal(window_manager.windows_to_numpy(windows), expected_output)
+ assert np.array_equal(window_manager._windows_to_numpy(windows), expected_output)
# # Test case values
@@ -124,5 +126,5 @@ def test_windows_to_numpy(windows, expected_output):
ids=[case[0] for case in test_case_values],
)
def test_trim_windows(test_name, windows, trim_interval, expected):
- result = window_manager.trim_window(windows, trim_interval)
+ result = window_manager._trim_window(windows, trim_interval)
assert np.array_equal(result, expected)
diff --git a/versioneer.py b/versioneer.py
deleted file mode 100644
index d46f423..0000000
--- a/versioneer.py
+++ /dev/null
@@ -1,1886 +0,0 @@
-# Version: 0.18
-
-"""The Versioneer - like a rocketeer, but for versions.
-
-The Versioneer
-==============
-
-* like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
-* Brian Warner
-* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
-python projects. The goal is to remove the tedious and error-prone "update
-the embedded version string" step from your release process. Making a new
-release should be as easy as recording a new tag in your version-control
-system, and maybe making new tarballs.
-
-
-## Quick Install
-
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
-* run `versioneer install` in your source tree, commit the results
-
-## Version Identifiers
-
-Source trees come from a variety of places:
-
-* a version-control system checkout (mostly used by developers)
-* a nightly tarball, produced by build automation
-* a snapshot tarball, produced by a web-based VCS browser, like github's
- "tarball from tag" feature
-* a release tarball, produced by "setup.py sdist", distributed through PyPI
-
-Within each source tree, the version identifier (either a string or a number,
-this tool is format-agnostic) can come from a variety of places:
-
-* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
- about recent "tags" and an absolute revision-id
-* the name of the directory into which the tarball was unpacked
-* an expanded VCS keyword ($Id$, etc)
-* a `_version.py` created by some earlier build step
-
-For released software, the version identifier is closely related to a VCS
-tag. Some projects use tag names that include more than just the version
-string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
-needs to strip the tag prefix to extract the version identifier. For
-unreleased software (between tags), the version identifier should provide
-enough information to help developers recreate the same tree, while also
-giving them an idea of roughly how old the tree is (after version 1.2, before
-version 1.3). Many VCS systems can report a description that captures this,
-for example `git describe --tags --dirty --always` reports things like
-"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
-0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
-
-The version identifier is used for multiple purposes:
-
-* to allow the module to self-identify its version: `myproject.__version__`
-* to choose a name and prefix for a 'setup.py sdist' tarball
-
-## Theory of Operation
-
-Versioneer works by adding a special `_version.py` file into your source
-tree, where your `__init__.py` can import it. This `_version.py` knows how to
-dynamically ask the VCS tool for version information at import time.
-
-`_version.py` also contains `$Revision$` markers, and the installation
-process marks `_version.py` to have this marker rewritten with a tag name
-during the `git archive` command. As a result, generated tarballs will
-contain enough information to get the proper version.
-
-To allow `setup.py` to compute a version too, a `versioneer.py` is added to
-the top level of your source tree, next to `setup.py` and the `setup.cfg`
-that configures it. This overrides several distutils/setuptools commands to
-compute the version when invoked, and changes `setup.py build` and `setup.py
-sdist` to replace `_version.py` with a small static file that contains just
-the generated version data.
-
-## Installation
-
-See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
-
-## Version-String Flavors
-
-Code which uses Versioneer can learn about its version string at runtime by
-importing `_version` from your main `__init__.py` file and running the
-`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
-import the top-level `versioneer.py` and run `get_versions()`.
-
-Both functions return a dictionary with different flavors of version
-information:
-
-* `['version']`: A condensed version string, rendered using the selected
- style. This is the most commonly used value for the project's version
- string. The default "pep440" style yields strings like `0.11`,
- `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
- below for alternative styles.
-
-* `['full-revisionid']`: detailed revision identifier. For Git, this is the
- full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
-
-* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
- commit date in ISO 8601 format. This will be None if the date is not
- available.
-
-* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
- this is only accurate if run in a VCS checkout, otherwise it is likely to
- be False or None
-
-* `['error']`: if the version string could not be computed, this will be set
- to a string describing the problem, otherwise it will be None. It may be
- useful to throw an exception in setup.py if this is set, to avoid e.g.
- creating tarballs with a version string of "unknown".
-
-Some variants are more useful than others. Including `full-revisionid` in a
-bug report should allow developers to reconstruct the exact code being tested
-(or indicate the presence of local changes that should be shared with the
-developers). `version` is suitable for display in an "about" box or a CLI
-`--version` output: it can be easily compared against release notes and lists
-of bugs fixed in various releases.
-
-The installer adds the following text to your `__init__.py` to place a basic
-version in `YOURPROJECT.__version__`:
-
- from ._version import get_versions
- __version__ = get_versions()['version']
- del get_versions
-
-## Styles
-
-The setup.cfg `style=` configuration controls how the VCS information is
-rendered into a version string.
-
-The default style, "pep440", produces a PEP440-compliant string, equal to the
-un-prefixed tag name for actual releases, and containing an additional "local
-version" section with more detail for in-between builds. For Git, this is
-TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
-tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
-that this commit is two revisions ("+2") beyond the "0.11" tag. For released
-software (exactly equal to a known tag), the identifier will only contain the
-stripped tag, e.g. "0.11".
-
-Other styles are available. See [details.md](details.md) in the Versioneer
-source tree for descriptions.
-
-## Debugging
-
-Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
-to return a version of "0+unknown". To investigate the problem, run `setup.py
-version`, which will run the version-lookup code in a verbose mode, and will
-display the full contents of `get_versions()` (including the `error` string,
-which may help identify what went wrong).
-
-## Known Limitations
-
-Some situations are known to cause problems for Versioneer. This details the
-most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
-
-### Subprojects
-
-Versioneer has limited support for source trees in which `setup.py` is not in
-the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
-two common reasons why `setup.py` might not be in the root:
-
-* Source trees which contain multiple subprojects, such as
- [Buildbot](https://github.com/buildbot/buildbot), which contains both
- "master" and "slave" subprojects, each with their own `setup.py`,
- `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
- distributions (and upload multiple independently-installable tarballs).
-* Source trees whose main purpose is to contain a C library, but which also
- provide bindings to Python (and perhaps other langauges) in subdirectories.
-
-Versioneer will look for `.git` in parent directories, and most operations
-should get the right version string. However `pip` and `setuptools` have bugs
-and implementation details which frequently cause `pip install .` from a
-subproject directory to fail to find a correct version string (so it usually
-defaults to `0+unknown`).
-
-`pip install --editable .` should work correctly. `setup.py install` might
-work too.
-
-Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
-some later version.
-
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
-this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
-issue from the Versioneer side in more detail.
-[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
-[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
-pip to let Versioneer work correctly.
-
-Versioneer-0.16 and earlier only looked for a `.git` directory next to the
-`setup.cfg`, so subprojects were completely unsupported with those releases.
-
-### Editable installs with setuptools <= 18.5
-
-`setup.py develop` and `pip install --editable .` allow you to install a
-project into a virtualenv once, then continue editing the source code (and
-test) without re-installing after every change.
-
-"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
-convenient way to specify executable scripts that should be installed along
-with the python package.
-
-These both work as expected when using modern setuptools. When using
-setuptools-18.5 or earlier, however, certain operations will cause
-`pkg_resources.DistributionNotFound` errors when running the entrypoint
-script, which must be resolved by re-installing the package. This happens
-when the install happens with one version, then the egg_info data is
-regenerated while a different version is checked out. Many setup.py commands
-cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
-a different virtualenv), so this can be surprising.
-
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
-this one, but upgrading to a newer version of setuptools should probably
-resolve it.
-
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
-
-## Updating Versioneer
-
-To upgrade your project to a new release of Versioneer, do the following:
-
-* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
- indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
- `SRC/_version.py`
-* commit any changed files
-
-## Future Directions
-
-This tool is designed to make it easily extended to other version-control
-systems: all VCS-specific components are in separate directories like
-src/git/ . The top-level `versioneer.py` script is assembled from these
-components by running make-versioneer.py . In the future, make-versioneer.py
-will take a VCS name as an argument, and will construct a version of
-`versioneer.py` that is specific to the given VCS. It might also take the
-configuration arguments that are currently provided manually during
-installation by editing setup.py . Alternatively, it might go the other
-direction and include code from all supported VCS systems, reducing the
-number of intermediate scripts.
-
-
-## License
-
-To make Versioneer easier to embed, all its code is dedicated to the public
-domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
-
-"""
-
-from __future__ import print_function
-
-try:
- import configparser
-except ImportError:
- import ConfigParser as configparser
-
-import errno
-import json
-import os
-import re
-import subprocess
-import sys
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
-
-def get_root():
- """Get the project root directory.
-
- We require that all commands are run from the project root, i.e. the
- directory that contains setup.py, setup.cfg, and versioneer.py .
- """
- root = os.path.realpath(os.path.abspath(os.getcwd()))
- setup_py = os.path.join(root, "setup.py")
- versioneer_py = os.path.join(root, "versioneer.py")
- if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
- # allow 'python path/to/setup.py COMMAND'
- root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
- setup_py = os.path.join(root, "setup.py")
- versioneer_py = os.path.join(root, "versioneer.py")
- if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
- err = (
- "Versioneer was unable to run the project root directory. "
- "Versioneer requires setup.py to be executed from "
- "its immediate directory (like 'python setup.py COMMAND'), "
- "or in a way that lets it use sys.argv[0] to find the root "
- "(like 'python path/to/setup.py COMMAND')."
- )
- raise VersioneerBadRootError(err)
- try:
- # Certain runtime workflows (setup.py install/develop in a setuptools
- # tree) execute all dependencies in a single python process, so
- # "versioneer" may be imported multiple times, and python's shared
- # module-import table will cache the first one. So we can't use
- # os.path.dirname(__file__), as that will find whichever
- # versioneer.py was first imported, even in later projects.
- me = os.path.realpath(os.path.abspath(__file__))
- me_dir = os.path.normcase(os.path.splitext(me)[0])
- vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
- if me_dir != vsr_dir:
- print(
- "Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(me), versioneer_py)
- )
- except NameError:
- pass
- return root
-
-
-def get_config_from_root(root):
- """Read the project setup.cfg file to determine Versioneer config."""
- # This might raise EnvironmentError (if setup.cfg is missing), or
- # configparser.NoSectionError (if it lacks a [versioneer] section), or
- # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
- # the top of versioneer.py for instructions on writing your setup.cfg .
- setup_cfg = os.path.join(root, "setup.cfg")
- parser = configparser.SafeConfigParser()
- with open(setup_cfg, "r") as f:
- parser.readfp(f)
- VCS = parser.get("versioneer", "VCS") # mandatory
-
- def get(parser, name):
- if parser.has_option("versioneer", name):
- return parser.get("versioneer", name)
- return None
-
- cfg = VersioneerConfig()
- cfg.VCS = VCS
- cfg.style = get(parser, "style") or ""
- cfg.versionfile_source = get(parser, "versionfile_source")
- cfg.versionfile_build = get(parser, "versionfile_build")
- cfg.tag_prefix = get(parser, "tag_prefix")
- if cfg.tag_prefix in ("''", '""'):
- cfg.tag_prefix = ""
- cfg.parentdir_prefix = get(parser, "parentdir_prefix")
- cfg.verbose = get(parser, "verbose")
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
-
- def decorate(f):
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
-
- return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
- """Call the given command(s)."""
- assert isinstance(commands, list)
- p = None
- for c in commands:
- try:
- dispcmd = str([c] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen(
- [c] + args,
- cwd=cwd,
- env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr else None),
- )
- break
- except EnvironmentError:
- e = sys.exc_info()[1]
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %s" % dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %s" % (commands,))
- return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % dispcmd)
- print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
-
-
-LONG_VERSION_PY[
- "git"
-] = '''
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-
-
-def get_keywords():
- """Get the keywords needed to look up the version information."""
- # these strings will be replaced by git during git-archive.
- # setup.py/versioneer.py will grep for the variable names, so they must
- # each be defined on a line of their own. _version.py will just call
- # get_keywords().
- git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
- git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
- git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
- keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
- return keywords
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
-
-def get_config():
- """Create, populate and return the VersioneerConfig() object."""
- # these strings are filled in when 'setup.py versioneer' creates
- # _version.py
- cfg = VersioneerConfig()
- cfg.VCS = "git"
- cfg.style = "%(STYLE)s"
- cfg.tag_prefix = "%(TAG_PREFIX)s"
- cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
- cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
- cfg.verbose = False
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
- def decorate(f):
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
- return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
- """Call the given command(s)."""
- assert isinstance(commands, list)
- p = None
- for c in commands:
- try:
- dispcmd = str([c] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
- break
- except EnvironmentError:
- e = sys.exc_info()[1]
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %%s" %% dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %%s" %% (commands,))
- return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %%s (error)" %% dispcmd)
- print("stdout was %%s" %% stdout)
- return None, p.returncode
- return stdout, p.returncode
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for i in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {"version": dirname[len(parentdir_prefix):],
- "full-revisionid": None,
- "dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print("Tried directories %%s but none started with prefix %%s" %%
- (str(rootdirs), parentdir_prefix))
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords = {}
- try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
- """Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
- date = keywords.get("date")
- if date is not None:
- # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %%d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
- if verbose:
- print("discarding '%%s', no digits" %% ",".join(refs - tags))
- if verbose:
- print("likely tags: %%s" %% ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %%s" %% r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {"version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": "no suitable tags", "date": None}
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
- if rc != 0:
- if verbose:
- print("Directory %%s not under git control" %% root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%%s*" %% tag_prefix],
- cwd=root)
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[:git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
- if not mo:
- # unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = ("unable to parse git-describe output: '%%s'"
- %% describe_out)
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%%s' doesn't start with prefix '%%s'"
- print(fmt %% (full_tag, tag_prefix))
- pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
- %% (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
- cwd=root)[0].strip()
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def plus_or_dot(pieces):
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces):
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
- pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post.devDISTANCE
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += ".post.dev%%d" %% pieces["distance"]
- else:
- # exception #1
- rendered = "0.post.dev%%d" %% pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces):
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%%s" %% pieces["short"]
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%%s" %% pieces["short"]
- return rendered
-
-
-def render_pep440_old(pieces):
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Eexceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces):
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces):
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces, style):
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {"version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None}
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%%s'" %% style)
-
- return {"version": rendered, "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"], "error": None,
- "date": pieces.get("date")}
-
-
-def get_versions():
- """Get version information or return default if unable to do so."""
- # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
- # __file__, we can work backwards from there to the root. Some
- # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
- # case we can only use expanded keywords.
-
- cfg = get_config()
- verbose = cfg.verbose
-
- try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
- verbose)
- except NotThisMethod:
- pass
-
- try:
- root = os.path.realpath(__file__)
- # versionfile_source is the relative path from the top of the source
- # tree (where the .git directory might live) to this file. Invert
- # this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
- root = os.path.dirname(root)
- except NameError:
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None}
-
- try:
- pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
- return render(pieces, cfg.style)
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- except NotThisMethod:
- pass
-
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version", "date": None}
-'''
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords = {}
- try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
- """Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
- date = keywords.get("date")
- if date is not None:
- # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r"\d", r)])
- if verbose:
- print("discarding '%s', no digits" % ",".join(refs - tags))
- if verbose:
- print("likely tags: %s" % ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix) :]
- if verbose:
- print("picking %s" % r)
- return {
- "version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": None,
- "date": date,
- }
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {
- "version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": "no suitable tags",
- "date": None,
- }
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
- if rc != 0:
- if verbose:
- print("Directory %s not under git control" % root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(
- GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- "%s*" % tag_prefix,
- ],
- cwd=root,
- )
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[: git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
- if not mo:
- # unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix) :]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
- 0
- ].strip()
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def do_vcs_install(manifest_in, versionfile_source, ipy):
- """Git-specific installation logic for Versioneer.
-
- For Git, this means creating/changing .gitattributes to mark _version.py
- for export-subst keyword substitution.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
- files = [manifest_in, versionfile_source]
- if ipy:
- files.append(ipy)
- try:
- me = __file__
- if me.endswith(".pyc") or me.endswith(".pyo"):
- me = os.path.splitext(me)[0] + ".py"
- versioneer_file = os.path.relpath(me)
- except NameError:
- versioneer_file = "versioneer.py"
- files.append(versioneer_file)
- present = False
- try:
- f = open(".gitattributes", "r")
- for line in f.readlines():
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- f.close()
- except EnvironmentError:
- pass
- if not present:
- f = open(".gitattributes", "a+")
- f.write("%s export-subst\n" % versionfile_source)
- f.close()
- files.append(".gitattributes")
- run_command(GITS, ["add", "--"] + files)
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for i in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {
- "version": dirname[len(parentdir_prefix) :],
- "full-revisionid": None,
- "dirty": False,
- "error": None,
- "date": None,
- }
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
-# revision-control system data, or from the parent directory name of an
-# unpacked source archive. Distribution tarballs contain a pre-generated copy
-# of this file.
-
-import json
-
-version_json = '''
-%s
-''' # END VERSION_JSON
-
-
-def get_versions():
- return json.loads(version_json)
-"""
-
-
-def versions_from_file(filename):
- """Try to determine the version from _version.py if present."""
- try:
- with open(filename) as f:
- contents = f.read()
- except EnvironmentError:
- raise NotThisMethod("unable to read _version.py")
- mo = re.search(
- r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
- )
- if not mo:
- mo = re.search(
- r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
- )
- if not mo:
- raise NotThisMethod("no version_json in _version.py")
- return json.loads(mo.group(1))
-
-
-def write_to_version_file(filename, versions):
- """Write the given version number to the given _version.py file."""
- os.unlink(filename)
- contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": "))
- with open(filename, "w") as f:
- f.write(SHORT_VERSION_PY % contents)
-
- print("set %s to '%s'" % (filename, versions["version"]))
-
-
-def plus_or_dot(pieces):
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces):
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post.devDISTANCE
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
- else:
- # exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces):
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- return rendered
-
-
-def render_pep440_old(pieces):
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Eexceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces):
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces):
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces, style):
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {
- "version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None,
- }
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%s'" % style)
-
- return {
- "version": rendered,
- "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"],
- "error": None,
- "date": pieces.get("date"),
- }
-
-
-class VersioneerBadRootError(Exception):
- """The project root directory is unknown or missing key files."""
-
-
-def get_versions(verbose=False):
- """Get the project version from whatever source is available.
-
- Returns dict with two keys: 'version' and 'full'.
- """
- if "versioneer" in sys.modules:
- # see the discussion in cmdclass.py:get_cmdclass()
- del sys.modules["versioneer"]
-
- root = get_root()
- cfg = get_config_from_root(root)
-
- assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
- handlers = HANDLERS.get(cfg.VCS)
- assert handlers, "unrecognized VCS '%s'" % cfg.VCS
- verbose = verbose or cfg.verbose
- assert (
- cfg.versionfile_source is not None
- ), "please set versioneer.versionfile_source"
- assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
-
- versionfile_abs = os.path.join(root, cfg.versionfile_source)
-
- # extract version from first of: _version.py, VCS command (e.g. 'git
- # describe'), parentdir. This is meant to work for developers using a
- # source checkout, for users of a tarball created by 'setup.py sdist',
- # and for users of a tarball/zipball created by 'git archive' or github's
- # download-from-tag feature or the equivalent in other VCSes.
-
- get_keywords_f = handlers.get("get_keywords")
- from_keywords_f = handlers.get("keywords")
- if get_keywords_f and from_keywords_f:
- try:
- keywords = get_keywords_f(versionfile_abs)
- ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
- if verbose:
- print("got version from expanded keyword %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- try:
- ver = versions_from_file(versionfile_abs)
- if verbose:
- print("got version from file %s %s" % (versionfile_abs, ver))
- return ver
- except NotThisMethod:
- pass
-
- from_vcs_f = handlers.get("pieces_from_vcs")
- if from_vcs_f:
- try:
- pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
- ver = render(pieces, cfg.style)
- if verbose:
- print("got version from VCS %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- if verbose:
- print("got version from parentdir %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- if verbose:
- print("unable to compute version")
-
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version",
- "date": None,
- }
-
-
-def get_version():
- """Get the short version string for this project."""
- return get_versions()["version"]
-
-
-def get_cmdclass():
- """Get the custom setuptools/distutils subclasses used by Versioneer."""
- if "versioneer" in sys.modules:
- del sys.modules["versioneer"]
- # this fixes the "python setup.py develop" case (also 'install' and
- # 'easy_install .'), in which subdependencies of the main project are
- # built (using setup.py bdist_egg) in the same python process. Assume
- # a main project A and a dependency B, which use different versions
- # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
- # sys.modules by the time B's setup.py is executed, causing B to run
- # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
- # sandbox that restores sys.modules to it's pre-build state, so the
- # parent is protected against the child's "import versioneer". By
- # removing ourselves from sys.modules here, before the child build
- # happens, we protect the child from the parent's versioneer too.
- # Also see https://github.com/warner/python-versioneer/issues/52
-
- cmds = {}
-
- # we add "version" to both distutils and setuptools
- from distutils.core import Command
-
- class cmd_version(Command):
- description = "report generated version string"
- user_options = []
- boolean_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- vers = get_versions(verbose=True)
- print("Version: %s" % vers["version"])
- print(" full-revisionid: %s" % vers.get("full-revisionid"))
- print(" dirty: %s" % vers.get("dirty"))
- print(" date: %s" % vers.get("date"))
- if vers["error"]:
- print(" error: %s" % vers["error"])
-
- cmds["version"] = cmd_version
-
- # we override "build_py" in both distutils and setuptools
- #
- # most invocation pathways end up running build_py:
- # distutils/build -> build_py
- # distutils/install -> distutils/build ->..
- # setuptools/bdist_wheel -> distutils/install ->..
- # setuptools/bdist_egg -> distutils/install_lib -> build_py
- # setuptools/install -> bdist_egg ->..
- # setuptools/develop -> ?
- # pip install:
- # copies source tree to a tempdir before running egg_info/etc
- # if .git isn't copied too, 'git describe' will fail
- # then does setup.py bdist_wheel, or sometimes setup.py install
- # setup.py egg_info -> ?
-
- # we override different "build_py" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.build_py import build_py as _build_py
- else:
- from distutils.command.build_py import build_py as _build_py
-
- class cmd_build_py(_build_py):
- def run(self):
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- _build_py.run(self)
- # now locate _version.py in the new build/ directory and replace
- # it with an updated value
- if cfg.versionfile_build:
- target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- cmds["build_py"] = cmd_build_py
-
- if "cx_Freeze" in sys.modules: # cx_freeze enabled?
- from cx_Freeze.dist import build_exe as _build_exe
-
- # nczeczulin reports that py2exe won't like the pep440-style string
- # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
- # setup(console=[{
- # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
- # "product_version": versioneer.get_version(),
- # ...
-
- class cmd_build_exe(_build_exe):
- def run(self):
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- target_versionfile = cfg.versionfile_source
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- _build_exe.run(self)
- os.unlink(target_versionfile)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(
- LONG
- % {
- "DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- }
- )
-
- cmds["build_exe"] = cmd_build_exe
- del cmds["build_py"]
-
- if "py2exe" in sys.modules: # py2exe enabled?
- try:
- from py2exe.distutils_buildexe import py2exe as _py2exe # py3
- except ImportError:
- from py2exe.build_exe import py2exe as _py2exe # py2
-
- class cmd_py2exe(_py2exe):
- def run(self):
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- target_versionfile = cfg.versionfile_source
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- _py2exe.run(self)
- os.unlink(target_versionfile)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(
- LONG
- % {
- "DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- }
- )
-
- cmds["py2exe"] = cmd_py2exe
-
- # we override different "sdist" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.sdist import sdist as _sdist
- else:
- from distutils.command.sdist import sdist as _sdist
-
- class cmd_sdist(_sdist):
- def run(self):
- versions = get_versions()
- self._versioneer_generated_versions = versions
- # unless we update this, the command will keep using the old
- # version
- self.distribution.metadata.version = versions["version"]
- return _sdist.run(self)
-
- def make_release_tree(self, base_dir, files):
- root = get_root()
- cfg = get_config_from_root(root)
- _sdist.make_release_tree(self, base_dir, files)
- # now locate _version.py in the new base_dir directory
- # (remembering that it may be a hardlink) and replace it with an
- # updated value
- target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(
- target_versionfile, self._versioneer_generated_versions
- )
-
- cmds["sdist"] = cmd_sdist
-
- return cmds
-
-
-CONFIG_ERROR = """
-setup.cfg is missing the necessary Versioneer configuration. You need
-a section like:
-
- [versioneer]
- VCS = git
- style = pep440
- versionfile_source = src/myproject/_version.py
- versionfile_build = myproject/_version.py
- tag_prefix =
- parentdir_prefix = myproject-
-
-You will also need to edit your setup.py to use the results:
-
- import versioneer
- setup(version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(), ...)
-
-Please read the docstring in ./versioneer.py for configuration instructions,
-edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
-"""
-
-SAMPLE_CONFIG = """
-# See the docstring in versioneer.py for instructions. Note that you must
-# re-run 'versioneer.py setup' after changing this section, and commit the
-# resulting files.
-
-[versioneer]
-#VCS = git
-#style = pep440
-#versionfile_source =
-#versionfile_build =
-#tag_prefix =
-#parentdir_prefix =
-
-"""
-
-INIT_PY_SNIPPET = """
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
-"""
-
-
-def do_setup():
- """Main VCS-independent setup function for installing Versioneer."""
- root = get_root()
- try:
- cfg = get_config_from_root(root)
- except (
- EnvironmentError,
- configparser.NoSectionError,
- configparser.NoOptionError,
- ) as e:
- if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
- print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
- with open(os.path.join(root, "setup.cfg"), "a") as f:
- f.write(SAMPLE_CONFIG)
- print(CONFIG_ERROR, file=sys.stderr)
- return 1
-
- print(" creating %s" % cfg.versionfile_source)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(
- LONG
- % {
- "DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- }
- )
-
- ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
- if os.path.exists(ipy):
- try:
- with open(ipy, "r") as f:
- old = f.read()
- except EnvironmentError:
- old = ""
- if INIT_PY_SNIPPET not in old:
- print(" appending to %s" % ipy)
- with open(ipy, "a") as f:
- f.write(INIT_PY_SNIPPET)
- else:
- print(" %s unmodified" % ipy)
- else:
- print(" %s doesn't exist, ok" % ipy)
- ipy = None
-
- # Make sure both the top-level "versioneer.py" and versionfile_source
- # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
- # they'll be copied into source distributions. Pip won't be able to
- # install the package without this.
- manifest_in = os.path.join(root, "MANIFEST.in")
- simple_includes = set()
- try:
- with open(manifest_in, "r") as f:
- for line in f:
- if line.startswith("include "):
- for include in line.split()[1:]:
- simple_includes.add(include)
- except EnvironmentError:
- pass
- # That doesn't cover everything MANIFEST.in can do
- # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
- # it might give some false negatives. Appending redundant 'include'
- # lines is safe, though.
- if "versioneer.py" not in simple_includes:
- print(" appending 'versioneer.py' to MANIFEST.in")
- with open(manifest_in, "a") as f:
- f.write("include versioneer.py\n")
- else:
- print(" 'versioneer.py' already in MANIFEST.in")
- if cfg.versionfile_source not in simple_includes:
- print(
- " appending versionfile_source ('%s') to MANIFEST.in"
- % cfg.versionfile_source
- )
- with open(manifest_in, "a") as f:
- f.write("include %s\n" % cfg.versionfile_source)
- else:
- print(" versionfile_source already in MANIFEST.in")
-
- # Make VCS-specific changes. For git, this means creating/changing
- # .gitattributes to mark _version.py for export-subst keyword
- # substitution.
- do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
- return 0
-
-
-def scan_setup_py():
- """Validate the contents of setup.py against Versioneer's expectations."""
- found = set()
- setters = False
- errors = 0
- with open("setup.py", "r") as f:
- for line in f.readlines():
- if "import versioneer" in line:
- found.add("import")
- if "versioneer.get_cmdclass()" in line:
- found.add("cmdclass")
- if "versioneer.get_version()" in line:
- found.add("get_version")
- if "versioneer.VCS" in line:
- setters = True
- if "versioneer.versionfile_source" in line:
- setters = True
- if len(found) != 3:
- print("")
- print("Your setup.py appears to be missing some important items")
- print("(but I might be wrong). Please make sure it has something")
- print("roughly like the following:")
- print("")
- print(" import versioneer")
- print(" setup( version=versioneer.get_version(),")
- print(" cmdclass=versioneer.get_cmdclass(), ...)")
- print("")
- errors += 1
- if setters:
- print("You should remove lines like 'versioneer.VCS = ' and")
- print("'versioneer.versionfile_source = ' . This configuration")
- print("now lives in setup.cfg, and should be removed from setup.py")
- print("")
- errors += 1
- return errors
-
-
-if __name__ == "__main__":
- cmd = sys.argv[1]
- if cmd == "setup":
- errors = do_setup()
- errors += scan_setup_py()
- if errors:
- sys.exit(1)