diff --git a/404.html b/404.html index 5cf39c13..0cde2353 100644 --- a/404.html +++ b/404.html @@ -5,7 +5,7 @@ Page Not Found | VAME - + diff --git a/assets/js/04a8f406.31392b8a.js b/assets/js/04a8f406.432dd89d.js similarity index 51% rename from assets/js/04a8f406.31392b8a.js rename to assets/js/04a8f406.432dd89d.js index 93dcc48b..9f35134b 100644 --- a/assets/js/04a8f406.31392b8a.js +++ b/assets/js/04a8f406.432dd89d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[88],{445:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>p,frontMatter:()=>s,metadata:()=>c,toc:()=>a});var i=t(4848),r=t(8453);const s={sidebar_label:"new",title:"vame.initialize_project.new"},o=void 0,c={id:"reference/vame/initialize_project/new",title:"vame.initialize_project.new",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/initialize_project/new.md",sourceDirName:"reference/vame/initialize_project",slug:"/reference/vame/initialize_project/new",permalink:"/VAME/docs/reference/vame/initialize_project/new",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"new",title:"vame.initialize_project.new"},sidebar:"docsSidebar",previous:{title:"videowriter",permalink:"/VAME/docs/reference/vame/analysis/videowriter"},next:{title:"logger",permalink:"/VAME/docs/reference/vame/logging/logger"}},l={},a=[{value:"init_new_project",id:"init_new_project",level:4}];function d(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,r.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,i.jsx)(n.p,{children:"The following code is adapted from:"}),"\n",(0,i.jsxs)(n.p,{children:["DeepLabCut2.0 Toolbox (deeplabcut.org)\n\xa9 A. & M. Mathis Labs\n",(0,i.jsx)(n.a,{href:"https://github.com/AlexEMG/DeepLabCut",children:"https://github.com/AlexEMG/DeepLabCut"}),"\nPlease see AUTHORS for contributors.\n",(0,i.jsx)(n.a,{href:"https://github.com/AlexEMG/DeepLabCut/blob/master/AUTHORS",children:"https://github.com/AlexEMG/DeepLabCut/blob/master/AUTHORS"}),"\nLicensed under GNU Lesser General Public License v3.0"]}),"\n",(0,i.jsx)(n.h4,{id:"init_new_project",children:"init_new_project"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def init_new_project(project: str,\n videos: List[str],\n poses_estimations: List[str],\n working_directory: str = None,\n videotype: str = '.mp4') -> str\n"})}),"\n",(0,i.jsx)(n.p,{children:"Creates a new VAME project with the given parameters."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"project"})," ",(0,i.jsx)(n.em,{children:"str"})," - Project name."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"videos"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of videos paths to be used in the project. E.g. ['./sample_data/Session001.mp4']"]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"poses_estimations"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of pose estimation files paths to be used in the project. E.g. ['./sample_data/pose estimation/Session001.csv'] working_directory (str, optional): ",(0,i.jsx)(n.em,{children:"description"}),". Defaults to None."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"videotype"})," ",(0,i.jsx)(n.em,{children:"str, optional"})," - Video extension (.mp4 or .avi). Defaults to '.mp4'."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"projconfigfile"})," ",(0,i.jsx)(n.em,{children:"str"})," - Path to the new vame project config file."]}),"\n"]})]})}function p(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(d,{...e})}):d(e)}},8453:(e,n,t)=>{t.d(n,{R:()=>o,x:()=>c});var i=t(6540);const r={},s=i.createContext(r);function o(e){const n=i.useContext(s);return i.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function c(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(r):e.components||r:o(e.components),i.createElement(s.Provider,{value:n},e.children)}}}]); \ No newline at end of file +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[88],{445:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>p,frontMatter:()=>s,metadata:()=>c,toc:()=>a});var i=t(4848),r=t(8453);const s={sidebar_label:"new",title:"vame.initialize_project.new"},o=void 0,c={id:"reference/vame/initialize_project/new",title:"vame.initialize_project.new",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/initialize_project/new.md",sourceDirName:"reference/vame/initialize_project",slug:"/reference/vame/initialize_project/new",permalink:"/VAME/docs/reference/vame/initialize_project/new",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"new",title:"vame.initialize_project.new"},sidebar:"docsSidebar",previous:{title:"videowriter",permalink:"/VAME/docs/reference/vame/analysis/videowriter"},next:{title:"logger",permalink:"/VAME/docs/reference/vame/logging/logger"}},l={},a=[{value:"init_new_project",id:"init_new_project",level:4}];function d(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,r.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,i.jsx)(n.p,{children:"The following code is adapted from:"}),"\n",(0,i.jsxs)(n.p,{children:["DeepLabCut2.0 Toolbox (deeplabcut.org)\n\xa9 A. & M. Mathis Labs\n",(0,i.jsx)(n.a,{href:"https://github.com/AlexEMG/DeepLabCut",children:"https://github.com/AlexEMG/DeepLabCut"}),"\nPlease see AUTHORS for contributors.\n",(0,i.jsx)(n.a,{href:"https://github.com/AlexEMG/DeepLabCut/blob/master/AUTHORS",children:"https://github.com/AlexEMG/DeepLabCut/blob/master/AUTHORS"}),"\nLicensed under GNU Lesser General Public License v3.0"]}),"\n",(0,i.jsx)(n.h4,{id:"init_new_project",children:"init_new_project"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def init_new_project(\n project: str,\n videos: List[str],\n poses_estimations: List[str],\n working_directory: str = '.',\n videotype: str = '.mp4',\n paths_to_pose_nwb_series_data: Optional[str] = None) -> str\n"})}),"\n",(0,i.jsx)(n.p,{children:"Creates a new VAME project with the given parameters."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"project"})," ",(0,i.jsx)(n.em,{children:"str"})," - Project name."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"videos"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of videos paths to be used in the project. E.g. ['./sample_data/Session001.mp4']"]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"poses_estimations"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of pose estimation files paths to be used in the project. E.g. ['./sample_data/pose estimation/Session001.csv'] working_directory (str, optional): ",(0,i.jsx)(n.em,{children:"description"}),". Defaults to None."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"videotype"})," ",(0,i.jsx)(n.em,{children:"str, optional"})," - Video extension (.mp4 or .avi). Defaults to '.mp4'."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"projconfigfile"})," ",(0,i.jsx)(n.em,{children:"str"})," - Path to the new vame project config file."]}),"\n"]})]})}function p(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(d,{...e})}):d(e)}},8453:(e,n,t)=>{t.d(n,{R:()=>o,x:()=>c});var i=t(6540);const r={},s=i.createContext(r);function o(e){const n=i.useContext(s);return i.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function c(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(r):e.components||r:o(e.components),i.createElement(s.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/20a1189d.5177a708.js b/assets/js/20a1189d.57cf727b.js similarity index 54% rename from assets/js/20a1189d.5177a708.js rename to assets/js/20a1189d.57cf727b.js index 4db7f298..f545751d 100644 --- a/assets/js/20a1189d.5177a708.js +++ b/assets/js/20a1189d.57cf727b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[3567],{3072:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>o,contentTitle:()=>l,default:()=>h,frontMatter:()=>t,metadata:()=>c,toc:()=>a});var r=i(4848),s=i(8453);const t={sidebar_label:"gif_creator",title:"vame.analysis.gif_creator"},l=void 0,c={id:"reference/vame/analysis/gif_creator",title:"vame.analysis.gif_creator",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/gif_creator.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/gif_creator",permalink:"/VAME/docs/reference/vame/analysis/gif_creator",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"gif_creator",title:"vame.analysis.gif_creator"},sidebar:"docsSidebar",previous:{title:"generative_functions",permalink:"/VAME/docs/reference/vame/analysis/generative_functions"},next:{title:"pose_segmentation",permalink:"/VAME/docs/reference/vame/analysis/pose_segmentation"}},o={},a=[{value:"create_video",id:"create_video",level:4},{value:"gif",id:"gif",level:4}];function d(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,s.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,r.jsx)(n.h4,{id:"create_video",children:"create_video"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-python",children:"def create_video(path_to_file: str, file: str, embed: np.ndarray,\n clabel: np.ndarray, frames: List[np.ndarray], start: int,\n length: int, max_lag: int, num_points: int) -> None\n"})}),"\n",(0,r.jsx)(n.p,{children:"Create video frames for the given embedding."}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"path_to_file"})," ",(0,r.jsx)(n.em,{children:"str"})," - Path to the file."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"file"})," ",(0,r.jsx)(n.em,{children:"str"})," - File name."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"embed"})," ",(0,r.jsx)(n.em,{children:"np.ndarray"})," - Embedding array."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"clabel"})," ",(0,r.jsx)(n.em,{children:"np.ndarray"})," - Cluster labels."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"frames"})," ",(0,r.jsx)(n.em,{children:"List[np.ndarray]"})," - List of frames."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"start"})," ",(0,r.jsx)(n.em,{children:"int"})," - Starting index."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"length"})," ",(0,r.jsx)(n.em,{children:"int"})," - Length of the video."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"max_lag"})," ",(0,r.jsx)(n.em,{children:"int"})," - Maximum lag."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"num_points"})," ",(0,r.jsx)(n.em,{children:"int"})," - Number of points."]}),"\n"]}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,r.jsx)(n.p,{children:"None"}),"\n",(0,r.jsx)(n.h4,{id:"gif",children:"gif"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-python",children:"def gif(\n config: str,\n pose_ref_index: int,\n subtract_background: bool = True,\n start: int | None = None,\n length: int = 500,\n max_lag: int = 30,\n label: str = 'community',\n file_format: str = '.mp4',\n crop_size: Tuple[int, int] = (300, 300)) -> None\n"})}),"\n",(0,r.jsx)(n.p,{children:"Create a GIF from the given configuration."}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"config"})," ",(0,r.jsx)(n.em,{children:"str"})," - Path to the configuration file."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"pose_ref_index"})," ",(0,r.jsx)(n.em,{children:"int"})," - Pose reference index."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"subtract_background"})," ",(0,r.jsx)(n.em,{children:"bool, optional"})," - Whether to subtract background. Defaults to True."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"start"})," ",(0,r.jsx)(n.em,{children:"int, optional"})," - Starting index. Defaults to None."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"length"})," ",(0,r.jsx)(n.em,{children:"int, optional"})," - Length of the video. Defaults to 500."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"max_lag"})," ",(0,r.jsx)(n.em,{children:"int, optional"})," - Maximum lag. Defaults to 30."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"label"})," ",(0,r.jsx)(n.em,{children:"str, optional"})," - Label type [None, community, motif]. Defaults to 'community'."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"file_format"})," ",(0,r.jsx)(n.em,{children:"str, optional"})," - File format. Defaults to '.mp4'."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"crop_size"})," ",(0,r.jsx)(n.em,{children:"Tuple[int, int], optional"})," - Crop size. Defaults to (300,300)."]}),"\n"]}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,r.jsx)(n.p,{children:"None"})]})}function h(e={}){const{wrapper:n}={...(0,s.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(d,{...e})}):d(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>l,x:()=>c});var r=i(6540);const s={},t=r.createContext(s);function l(e){const n=r.useContext(t);return r.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function c(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(s):e.components||s:l(e.components),r.createElement(t.Provider,{value:n},e.children)}}}]); \ No newline at end of file +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[3567],{3072:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>a,contentTitle:()=>l,default:()=>h,frontMatter:()=>t,metadata:()=>o,toc:()=>c});var r=i(4848),s=i(8453);const t={sidebar_label:"gif_creator",title:"vame.analysis.gif_creator"},l=void 0,o={id:"reference/vame/analysis/gif_creator",title:"vame.analysis.gif_creator",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/gif_creator.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/gif_creator",permalink:"/VAME/docs/reference/vame/analysis/gif_creator",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"gif_creator",title:"vame.analysis.gif_creator"},sidebar:"docsSidebar",previous:{title:"generative_functions",permalink:"/VAME/docs/reference/vame/analysis/generative_functions"},next:{title:"pose_segmentation",permalink:"/VAME/docs/reference/vame/analysis/pose_segmentation"}},a={},c=[{value:"create_video",id:"create_video",level:4},{value:"gif",id:"gif",level:4}];function d(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,s.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,r.jsx)(n.h4,{id:"create_video",children:"create_video"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-python",children:"def create_video(path_to_file: str, file: str, embed: np.ndarray,\n clabel: np.ndarray, frames: List[np.ndarray], start: int,\n length: int, max_lag: int, num_points: int) -> None\n"})}),"\n",(0,r.jsx)(n.p,{children:"Create video frames for the given embedding."}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"path_to_file"})," ",(0,r.jsx)(n.em,{children:"str"})," - Path to the file."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"file"})," ",(0,r.jsx)(n.em,{children:"str"})," - File name."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"embed"})," ",(0,r.jsx)(n.em,{children:"np.ndarray"})," - Embedding array."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"clabel"})," ",(0,r.jsx)(n.em,{children:"np.ndarray"})," - Cluster labels."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"frames"})," ",(0,r.jsx)(n.em,{children:"List[np.ndarray]"})," - List of frames."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"start"})," ",(0,r.jsx)(n.em,{children:"int"})," - Starting index."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"length"})," ",(0,r.jsx)(n.em,{children:"int"})," - Length of the video."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"max_lag"})," ",(0,r.jsx)(n.em,{children:"int"})," - Maximum lag."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"num_points"})," ",(0,r.jsx)(n.em,{children:"int"})," - Number of points."]}),"\n"]}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,r.jsx)(n.p,{children:"None"}),"\n",(0,r.jsx)(n.h4,{id:"gif",children:"gif"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-python",children:"def gif(\n config: str,\n pose_ref_index: int,\n parametrization: Parametrizations,\n subtract_background: bool = True,\n start: int | None = None,\n length: int = 500,\n max_lag: int = 30,\n label: str = 'community',\n file_format: str = '.mp4',\n crop_size: Tuple[int, int] = (300, 300)) -> None\n"})}),"\n",(0,r.jsx)(n.p,{children:"Create a GIF from the given configuration."}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"config"})," ",(0,r.jsx)(n.em,{children:"str"})," - Path to the configuration file."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"pose_ref_index"})," ",(0,r.jsx)(n.em,{children:"int"})," - Pose reference index."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"subtract_background"})," ",(0,r.jsx)(n.em,{children:"bool, optional"})," - Whether to subtract background. Defaults to True."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"start"})," ",(0,r.jsx)(n.em,{children:"int, optional"})," - Starting index. Defaults to None."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"length"})," ",(0,r.jsx)(n.em,{children:"int, optional"})," - Length of the video. Defaults to 500."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"max_lag"})," ",(0,r.jsx)(n.em,{children:"int, optional"})," - Maximum lag. Defaults to 30."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"label"})," ",(0,r.jsx)(n.em,{children:"str, optional"})," - Label type [None, community, motif]. Defaults to 'community'."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"file_format"})," ",(0,r.jsx)(n.em,{children:"str, optional"})," - File format. Defaults to '.mp4'."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.code,{children:"crop_size"})," ",(0,r.jsx)(n.em,{children:"Tuple[int, int], optional"})," - Crop size. Defaults to (300,300)."]}),"\n"]}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,r.jsx)(n.p,{children:"None"})]})}function h(e={}){const{wrapper:n}={...(0,s.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(d,{...e})}):d(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>l,x:()=>o});var r=i(6540);const s={},t=r.createContext(s);function l(e){const n=r.useContext(t);return r.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function o(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(s):e.components||s:l(e.components),r.createElement(t.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/62763787.9acdd957.js b/assets/js/62763787.9acdd957.js new file mode 100644 index 00000000..815ccdd3 --- /dev/null +++ b/assets/js/62763787.9acdd957.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[4077],{689:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>d,contentTitle:()=>o,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var s=i(4848),t=i(8453);const r={sidebar_label:"videowriter",title:"vame.analysis.videowriter"},o=void 0,l={id:"reference/vame/analysis/videowriter",title:"vame.analysis.videowriter",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/videowriter.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/videowriter",permalink:"/VAME/docs/reference/vame/analysis/videowriter",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"videowriter",title:"vame.analysis.videowriter"},sidebar:"docsSidebar",previous:{title:"umap_visualization",permalink:"/VAME/docs/reference/vame/analysis/umap_visualization"},next:{title:"new",permalink:"/VAME/docs/reference/vame/initialize_project/new"}},d={},c=[{value:"get_cluster_vid",id:"get_cluster_vid",level:4},{value:"motif_videos",id:"motif_videos",level:4},{value:"community_videos",id:"community_videos",level:4}];function a(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"get_cluster_vid",children:"get_cluster_vid"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'def get_cluster_vid(cfg: dict,\n path_to_file: str,\n file: str,\n n_cluster: int,\n videoType: str,\n flag: str,\n param: Parametrizations,\n output_video_type: str = ".mp4",\n tqdm_logger_stream: TqdmToLogger | None = None) -> None\n'})}),"\n",(0,s.jsx)(n.p,{children:"Generate cluster videos."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"path_to_file"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path to the file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"file"})," ",(0,s.jsx)(n.em,{children:"str"})," - Name of the file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"videoType"})," ",(0,s.jsx)(n.em,{children:"str"})," - Type of input video."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"flag"})," ",(0,s.jsx)(n.em,{children:"str"})," - Flag indicating the type of video (motif or community)."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Generate cluster videos and save them to fs on project folder."}),"\n",(0,s.jsx)(n.h4,{id:"motif_videos",children:"motif_videos"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=MotifVideosFunctionSchema)\ndef motif_videos(config: Union[str, Path],\n parametrization: Parametrizations,\n videoType: str = '.mp4',\n output_video_type: str = '.mp4',\n save_logs: bool = False) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate motif videos."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"Union[str, Path]"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"videoType"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of video. Default is '.mp4'."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"output_video_type"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of output video. Default is '.mp4'."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Generate motif videos and save them to filesystem on project cluster_videos folder."}),"\n",(0,s.jsx)(n.h4,{id:"community_videos",children:"community_videos"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=CommunityVideosFunctionSchema)\ndef community_videos(config: Union[str, Path],\n parametrization: Parametrizations,\n videoType: str = '.mp4',\n save_logs: bool = False,\n output_video_type: str = '.mp4') -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate community videos."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"Union[str, Path]"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"videoType"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of video. Default is '.mp4'."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Generate community videos and save them to filesystem on project community_videos folder."})]})}function m(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(a,{...e})}):a(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>o,x:()=>l});var s=i(6540);const t={},r=s.createContext(t);function o(e){const n=s.useContext(r);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function l(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(t):e.components||t:o(e.components),s.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/62763787.a2324226.js b/assets/js/62763787.a2324226.js deleted file mode 100644 index e39ab71c..00000000 --- a/assets/js/62763787.a2324226.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[4077],{689:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>d,contentTitle:()=>r,default:()=>h,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var s=i(4848),t=i(8453);const o={sidebar_label:"videowriter",title:"vame.analysis.videowriter"},r=void 0,l={id:"reference/vame/analysis/videowriter",title:"vame.analysis.videowriter",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/videowriter.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/videowriter",permalink:"/VAME/docs/reference/vame/analysis/videowriter",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"videowriter",title:"vame.analysis.videowriter"},sidebar:"docsSidebar",previous:{title:"umap_visualization",permalink:"/VAME/docs/reference/vame/analysis/umap_visualization"},next:{title:"new",permalink:"/VAME/docs/reference/vame/initialize_project/new"}},d={},c=[{value:"get_cluster_vid",id:"get_cluster_vid",level:4},{value:"motif_videos",id:"motif_videos",level:4},{value:"community_videos",id:"community_videos",level:4}];function a(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"get_cluster_vid",children:"get_cluster_vid"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'def get_cluster_vid(cfg: dict,\n path_to_file: str,\n file: str,\n n_cluster: int,\n videoType: str,\n flag: str,\n output_video_type: str = ".mp4",\n tqdm_logger_stream: TqdmToLogger | None = None) -> None\n'})}),"\n",(0,s.jsx)(n.p,{children:"Generate cluster videos."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"path_to_file"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path to the file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"file"})," ",(0,s.jsx)(n.em,{children:"str"})," - Name of the file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"videoType"})," ",(0,s.jsx)(n.em,{children:"str"})," - Type of input video."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"flag"})," ",(0,s.jsx)(n.em,{children:"str"})," - Flag indicating the type of video (motif or community)."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Generate cluster videos and save them to fs on project folder."}),"\n",(0,s.jsx)(n.h4,{id:"motif_videos",children:"motif_videos"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=MotifVideosFunctionSchema)\ndef motif_videos(config: Union[str, Path],\n videoType: str = '.mp4',\n output_video_type: str = '.mp4',\n save_logs: bool = False) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate motif videos."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"Union[str, Path]"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"videoType"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of video. Default is '.mp4'."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"output_video_type"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of output video. Default is '.mp4'."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Generate motif videos and save them to filesystem on project cluster_videos folder."}),"\n",(0,s.jsx)(n.h4,{id:"community_videos",children:"community_videos"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=CommunityVideosFunctionSchema)\ndef community_videos(config: Union[str, Path],\n videoType: str = '.mp4',\n save_logs: bool = False) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate community videos."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"Union[str, Path]"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"videoType"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of video. Default is '.mp4'."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Generate community videos and save them to filesystem on project community_videos folder."})]})}function h(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(a,{...e})}):a(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>r,x:()=>l});var s=i(6540);const t={},o=s.createContext(t);function r(e){const n=s.useContext(o);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function l(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(t):e.components||t:r(e.components),s.createElement(o.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/97262fb0.27469122.js b/assets/js/97262fb0.27469122.js deleted file mode 100644 index a36f3be7..00000000 --- a/assets/js/97262fb0.27469122.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[4841],{2383:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>c,contentTitle:()=>o,default:()=>h,frontMatter:()=>r,metadata:()=>t,toc:()=>d});var l=i(4848),s=i(8453);const r={sidebar_label:"align_egocentrical",title:"vame.util.align_egocentrical"},o=void 0,t={id:"reference/vame/util/align_egocentrical",title:"vame.util.align_egocentrical",description:"Variational Animal Motion Embedding 0.1 Toolbox",source:"@site/docs/reference/vame/util/align_egocentrical.md",sourceDirName:"reference/vame/util",slug:"/reference/vame/util/align_egocentrical",permalink:"/VAME/docs/reference/vame/util/align_egocentrical",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"align_egocentrical",title:"vame.util.align_egocentrical"},sidebar:"docsSidebar",previous:{title:"states",permalink:"/VAME/docs/reference/vame/schemas/states"},next:{title:"auxiliary",permalink:"/VAME/docs/reference/vame/util/auxiliary"}},c={},d=[{value:"align_mouse",id:"align_mouse",level:4},{value:"play_aligned_video",id:"play_aligned_video",level:4},{value:"alignment",id:"alignment",level:4},{value:"egocentric_alignment",id:"egocentric_alignment",level:4}];function a(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,s.R)(),...e.components};return(0,l.jsxs)(l.Fragment,{children:[(0,l.jsx)(n.p,{children:"Variational Animal Motion Embedding 0.1 Toolbox\n\xa9 K. Luxem & J. K\xfcrsch & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,l.jsx)(n.h4,{id:"align_mouse",children:"align_mouse"}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-python",children:"def align_mouse(\n path_to_file: str,\n filename: str,\n video_format: str,\n crop_size: Tuple[int, int],\n pose_list: List[np.ndarray],\n pose_ref_index: Tuple[int, int],\n confidence: float,\n pose_flip_ref: Tuple[int, int],\n bg: np.ndarray,\n frame_count: int,\n use_video: bool = True,\n tqdm_stream: TqdmToLogger = None\n) -> Tuple[List[np.ndarray], List[List[np.ndarray]], np.ndarray]\n"})}),"\n",(0,l.jsx)(n.p,{children:"Align the mouse in the video frames."}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"path_to_file"})," ",(0,l.jsx)(n.em,{children:"str"})," - Path to the file directory."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"filename"})," ",(0,l.jsx)(n.em,{children:"str"})," - Name of the video file without the format."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"video_format"})," ",(0,l.jsx)(n.em,{children:"str"})," - Format of the video file."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"crop_size"})," ",(0,l.jsx)(n.em,{children:"Tuple[int, int]"})," - Size to crop the video frames."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"pose_list"})," ",(0,l.jsx)(n.em,{children:"List[np.ndarray]"})," - List of pose coordinates."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"pose_ref_index"})," ",(0,l.jsx)(n.em,{children:"Tuple[int, int]"})," - Pose reference indices."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"confidence"})," ",(0,l.jsx)(n.em,{children:"float"})," - Pose confidence threshold."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"pose_flip_ref"})," ",(0,l.jsx)(n.em,{children:"Tuple[int, int]"})," - Reference indices for flipping."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"bg"})," ",(0,l.jsx)(n.em,{children:"np.ndarray"})," - Background image."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"frame_count"})," ",(0,l.jsx)(n.em,{children:"int"})," - Number of frames to align."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"filename"}),"0 ",(0,l.jsx)(n.em,{children:"bool, optional"})," - bool if video should be cropped or DLC points only. Defaults to True."]}),"\n"]}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,l.jsx)(n.p,{children:"Tuple[List[np.ndarray], List[List[np.ndarray]], np.ndarray]: List of aligned images, list of aligned DLC points, and time series data."}),"\n",(0,l.jsx)(n.h4,{id:"play_aligned_video",children:"play_aligned_video"}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-python",children:"def play_aligned_video(a: List[np.ndarray], n: List[List[np.ndarray]],\n frame_count: int) -> None\n"})}),"\n",(0,l.jsx)(n.p,{children:"Play the aligned video."}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"a"})," ",(0,l.jsx)(n.em,{children:"List[np.ndarray]"})," - List of aligned images."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"n"})," ",(0,l.jsx)(n.em,{children:"List[List[np.ndarray]]"})," - List of aligned DLC points."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"frame_count"})," ",(0,l.jsx)(n.em,{children:"int"})," - Number of frames in the video."]}),"\n"]}),"\n",(0,l.jsx)(n.h4,{id:"alignment",children:"alignment"}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-python",children:"def alignment(\n path_to_file: str,\n filename: str,\n pose_ref_index: List[int],\n video_format: str,\n crop_size: Tuple[int, int],\n confidence: float,\n use_video: bool = False,\n check_video: bool = False,\n tqdm_stream: TqdmToLogger = None\n) -> Tuple[np.ndarray, List[np.ndarray]]\n"})}),"\n",(0,l.jsx)(n.p,{children:"Perform alignment of egocentric data."}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"path_to_file"})," ",(0,l.jsx)(n.em,{children:"str"})," - Path to the file directory."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"filename"})," ",(0,l.jsx)(n.em,{children:"str"})," - Name of the video file without the format."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"pose_ref_index"})," ",(0,l.jsx)(n.em,{children:"List[int]"})," - Pose reference indices."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"video_format"})," ",(0,l.jsx)(n.em,{children:"str"})," - Format of the video file."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"crop_size"})," ",(0,l.jsx)(n.em,{children:"Tuple[int, int]"})," - Size to crop the video frames."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"confidence"})," ",(0,l.jsx)(n.em,{children:"float"})," - Pose confidence threshold."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"use_video"})," ",(0,l.jsx)(n.em,{children:"bool, optional"})," - Whether to use video for alignment. Defaults to False."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"check_video"})," ",(0,l.jsx)(n.em,{children:"bool, optional"})," - Whether to check the aligned video. Defaults to False."]}),"\n"]}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,l.jsx)(n.p,{children:"Tuple[np.ndarray, List[np.ndarray]]: Aligned time series data and list of aligned frames."}),"\n",(0,l.jsx)(n.h4,{id:"egocentric_alignment",children:"egocentric_alignment"}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-python",children:"@save_state(model=EgocentricAlignmentFunctionSchema)\ndef egocentric_alignment(config: str,\n pose_ref_index: list = [5, 6],\n crop_size: tuple = (300, 300),\n use_video: bool = False,\n video_format: str = '.mp4',\n check_video: bool = False,\n save_logs: bool = False) -> None\n"})}),"\n",(0,l.jsx)(n.p,{children:"Aligns egocentric data for VAME training"}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"config"})," ",(0,l.jsx)(n.em,{children:"str"})," - Path for the project config file."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"pose_ref_index"})," ",(0,l.jsx)(n.em,{children:"list, optional"})," - Pose reference index to be used to align. Defaults to [5,6]."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"crop_size"})," ",(0,l.jsx)(n.em,{children:"tuple, optional"})," - Size to crop the video. Defaults to (300,300)."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"use_video"})," ",(0,l.jsx)(n.em,{children:"bool, optional"})," - Weather to use video to do the post alignment. Defaults to False. # TODO check what to put in this docstring"]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"video_format"})," ",(0,l.jsx)(n.em,{children:"str, optional"})," - Video format, can be .mp4 or .avi. Defaults to '.mp4'."]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"check_video"})," ",(0,l.jsx)(n.em,{children:"bool, optional"})," - Weather to check the video. Defaults to False."]}),"\n"]}),"\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.strong,{children:"Raises"}),":"]}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.code,{children:"ValueError"})," - If the config.yaml indicates that the data is not egocentric."]}),"\n"]})]})}function h(e={}){const{wrapper:n}={...(0,s.R)(),...e.components};return n?(0,l.jsx)(n,{...e,children:(0,l.jsx)(a,{...e})}):a(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>o,x:()=>t});var l=i(6540);const s={},r=l.createContext(s);function o(e){const n=l.useContext(r);return l.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function t(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(s):e.components||s:o(e.components),l.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/97262fb0.37c580e0.js b/assets/js/97262fb0.37c580e0.js new file mode 100644 index 00000000..ff81913e --- /dev/null +++ b/assets/js/97262fb0.37c580e0.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[4841],{2383:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>c,contentTitle:()=>o,default:()=>h,frontMatter:()=>r,metadata:()=>t,toc:()=>d});var s=i(4848),l=i(8453);const r={sidebar_label:"align_egocentrical",title:"vame.util.align_egocentrical"},o=void 0,t={id:"reference/vame/util/align_egocentrical",title:"vame.util.align_egocentrical",description:"Variational Animal Motion Embedding 0.1 Toolbox",source:"@site/docs/reference/vame/util/align_egocentrical.md",sourceDirName:"reference/vame/util",slug:"/reference/vame/util/align_egocentrical",permalink:"/VAME/docs/reference/vame/util/align_egocentrical",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"align_egocentrical",title:"vame.util.align_egocentrical"},sidebar:"docsSidebar",previous:{title:"states",permalink:"/VAME/docs/reference/vame/schemas/states"},next:{title:"auxiliary",permalink:"/VAME/docs/reference/vame/util/auxiliary"}},c={},d=[{value:"align_mouse",id:"align_mouse",level:4},{value:"play_aligned_video",id:"play_aligned_video",level:4},{value:"alignment",id:"alignment",level:4},{value:"egocentric_alignment",id:"egocentric_alignment",level:4}];function a(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,l.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 0.1 Toolbox\n\xa9 K. Luxem & J. K\xfcrsch & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"align_mouse",children:"align_mouse"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def align_mouse(\n path_to_file: str,\n filename: str,\n video_format: str,\n crop_size: Tuple[int, int],\n pose_list: List[np.ndarray],\n pose_ref_index: Tuple[int, int],\n confidence: float,\n pose_flip_ref: Tuple[int, int],\n bg: np.ndarray,\n frame_count: int,\n use_video: bool = True,\n tqdm_stream: TqdmToLogger = None\n) -> Tuple[List[np.ndarray], List[List[np.ndarray]], np.ndarray]\n"})}),"\n",(0,s.jsx)(n.p,{children:"Align the mouse in the video frames."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"path_to_file"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path to the file directory."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"filename"})," ",(0,s.jsx)(n.em,{children:"str"})," - Name of the video file without the format."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"video_format"})," ",(0,s.jsx)(n.em,{children:"str"})," - Format of the video file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"crop_size"})," ",(0,s.jsx)(n.em,{children:"Tuple[int, int]"})," - Size to crop the video frames."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"pose_list"})," ",(0,s.jsx)(n.em,{children:"List[np.ndarray]"})," - List of pose coordinates."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"pose_ref_index"})," ",(0,s.jsx)(n.em,{children:"Tuple[int, int]"})," - Pose reference indices."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"confidence"})," ",(0,s.jsx)(n.em,{children:"float"})," - Pose confidence threshold."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"pose_flip_ref"})," ",(0,s.jsx)(n.em,{children:"Tuple[int, int]"})," - Reference indices for flipping."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"bg"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Background image."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"frame_count"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of frames to align."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"filename"}),"0 ",(0,s.jsx)(n.em,{children:"bool, optional"})," - bool if video should be cropped or DLC points only. Defaults to True."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"Tuple[List[np.ndarray], List[List[np.ndarray]], np.ndarray]: List of aligned images, list of aligned DLC points, and time series data."}),"\n",(0,s.jsx)(n.h4,{id:"play_aligned_video",children:"play_aligned_video"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def play_aligned_video(a: List[np.ndarray], n: List[List[np.ndarray]],\n frame_count: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Play the aligned video."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"a"})," ",(0,s.jsx)(n.em,{children:"List[np.ndarray]"})," - List of aligned images."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n"})," ",(0,s.jsx)(n.em,{children:"List[List[np.ndarray]]"})," - List of aligned DLC points."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"frame_count"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of frames in the video."]}),"\n"]}),"\n",(0,s.jsx)(n.h4,{id:"alignment",children:"alignment"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def alignment(\n path_to_file: str,\n filename: str,\n pose_ref_index: List[int],\n video_format: str,\n crop_size: Tuple[int, int],\n confidence: float,\n pose_estimation_filetype: PoseEstimationFiletype,\n path_to_pose_nwb_series_data: str = None,\n use_video: bool = False,\n check_video: bool = False,\n tqdm_stream: TqdmToLogger = None\n) -> Tuple[np.ndarray, List[np.ndarray]]\n"})}),"\n",(0,s.jsx)(n.p,{children:"Perform alignment of egocentric data."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"path_to_file"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path to the file directory."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"filename"})," ",(0,s.jsx)(n.em,{children:"str"})," - Name of the video file without the format."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"pose_ref_index"})," ",(0,s.jsx)(n.em,{children:"List[int]"})," - Pose reference indices."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"video_format"})," ",(0,s.jsx)(n.em,{children:"str"})," - Format of the video file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"crop_size"})," ",(0,s.jsx)(n.em,{children:"Tuple[int, int]"})," - Size to crop the video frames."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"confidence"})," ",(0,s.jsx)(n.em,{children:"float"})," - Pose confidence threshold."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"use_video"})," ",(0,s.jsx)(n.em,{children:"bool, optional"})," - Whether to use video for alignment. Defaults to False."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"check_video"})," ",(0,s.jsx)(n.em,{children:"bool, optional"})," - Whether to check the aligned video. Defaults to False."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"Tuple[np.ndarray, List[np.ndarray]]: Aligned time series data and list of aligned frames."}),"\n",(0,s.jsx)(n.h4,{id:"egocentric_alignment",children:"egocentric_alignment"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=EgocentricAlignmentFunctionSchema)\ndef egocentric_alignment(config: str,\n pose_ref_index: list = [5, 6],\n crop_size: tuple = (300, 300),\n use_video: bool = False,\n video_format: str = '.mp4',\n check_video: bool = False,\n save_logs: bool = False) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Aligns egocentric data for VAME training"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path for the project config file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"pose_ref_index"})," ",(0,s.jsx)(n.em,{children:"list, optional"})," - Pose reference index to be used to align. Defaults to [5,6]."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"crop_size"})," ",(0,s.jsx)(n.em,{children:"tuple, optional"})," - Size to crop the video. Defaults to (300,300)."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"use_video"})," ",(0,s.jsx)(n.em,{children:"bool, optional"})," - Weather to use video to do the post alignment. Defaults to False. # TODO check what to put in this docstring"]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"video_format"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Video format, can be .mp4 or .avi. Defaults to '.mp4'."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"check_video"})," ",(0,s.jsx)(n.em,{children:"bool, optional"})," - Weather to check the video. Defaults to False."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Raises"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"ValueError"})," - If the config.yaml indicates that the data is not egocentric."]}),"\n"]})]})}function h(e={}){const{wrapper:n}={...(0,l.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(a,{...e})}):a(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>o,x:()=>t});var s=i(6540);const l={},r=s.createContext(l);function o(e){const n=s.useContext(r);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function t(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(l):e.components||l:o(e.components),s.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/99e0554e.bb3798a8.js b/assets/js/99e0554e.d5a0b519.js similarity index 80% rename from assets/js/99e0554e.bb3798a8.js rename to assets/js/99e0554e.d5a0b519.js index d6beb37b..f1893624 100644 --- a/assets/js/99e0554e.bb3798a8.js +++ b/assets/js/99e0554e.d5a0b519.js @@ -1 +1 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[4641],{5733:(e,n,r)=>{r.r(n),r.d(n,{assets:()=>o,contentTitle:()=>c,default:()=>h,frontMatter:()=>l,metadata:()=>t,toc:()=>d});var s=r(4848),i=r(8453);const l={sidebar_label:"generative_functions",title:"vame.analysis.generative_functions"},c=void 0,t={id:"reference/vame/analysis/generative_functions",title:"vame.analysis.generative_functions",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/generative_functions.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/generative_functions",permalink:"/VAME/docs/reference/vame/analysis/generative_functions",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"generative_functions",title:"vame.analysis.generative_functions"},sidebar:"docsSidebar",previous:{title:"community_analysis",permalink:"/VAME/docs/reference/vame/analysis/community_analysis"},next:{title:"gif_creator",permalink:"/VAME/docs/reference/vame/analysis/gif_creator"}},o={},d=[{value:"random_generative_samples_motif",id:"random_generative_samples_motif",level:4},{value:"random_generative_samples",id:"random_generative_samples",level:4},{value:"random_reconstruction_samples",id:"random_reconstruction_samples",level:4},{value:"visualize_cluster_center",id:"visualize_cluster_center",level:4},{value:"generative_model",id:"generative_model",level:4}];function a(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"random_generative_samples_motif",children:"random_generative_samples_motif"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def random_generative_samples_motif(cfg: dict, model: torch.nn.Module,\n latent_vector: np.ndarray,\n labels: np.ndarray,\n n_cluster: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate random samples for motifs."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"latent_vector"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Latent vectors."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"labels"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"None"})," - Plot of generated samples."]}),"\n"]}),"\n",(0,s.jsx)(n.h4,{id:"random_generative_samples",children:"random_generative_samples"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def random_generative_samples(cfg: dict, model: torch.nn.Module,\n latent_vector: np.ndarray) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate random generative samples."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"latent_vector"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Latent vectors."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"random_reconstruction_samples",children:"random_reconstruction_samples"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def random_reconstruction_samples(cfg: dict, model: torch.nn.Module,\n latent_vector: np.ndarray) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate random reconstruction samples."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model to use."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"latent_vector"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Latent vectors."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"visualize_cluster_center",children:"visualize_cluster_center"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def visualize_cluster_center(cfg: dict, model: torch.nn.Module,\n cluster_center: np.ndarray) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize cluster centers."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cluster_center"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Cluster centers."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"generative_model",children:"generative_model"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'@save_state(model=GenerativeModelFunctionSchema)\ndef generative_model(config: str,\n mode: str = "sampling",\n save_logs: bool = False) -> plt.Figure\n'})}),"\n",(0,s.jsx)(n.p,{children:"Generative model."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"mode"})," ",(0,s.jsx)(n.em,{children:"str, optional"}),' - Mode for generating samples. Defaults to "sampling".']}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"plt.Figure"})," - Plot of generated samples."]}),"\n"]})]})}function h(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(a,{...e})}):a(e)}},8453:(e,n,r)=>{r.d(n,{R:()=>c,x:()=>t});var s=r(6540);const i={},l=s.createContext(i);function c(e){const n=s.useContext(l);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function t(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(i):e.components||i:c(e.components),s.createElement(l.Provider,{value:n},e.children)}}}]); \ No newline at end of file +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[4641],{5733:(e,n,r)=>{r.r(n),r.d(n,{assets:()=>o,contentTitle:()=>c,default:()=>h,frontMatter:()=>l,metadata:()=>t,toc:()=>a});var s=r(4848),i=r(8453);const l={sidebar_label:"generative_functions",title:"vame.analysis.generative_functions"},c=void 0,t={id:"reference/vame/analysis/generative_functions",title:"vame.analysis.generative_functions",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/generative_functions.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/generative_functions",permalink:"/VAME/docs/reference/vame/analysis/generative_functions",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"generative_functions",title:"vame.analysis.generative_functions"},sidebar:"docsSidebar",previous:{title:"community_analysis",permalink:"/VAME/docs/reference/vame/analysis/community_analysis"},next:{title:"gif_creator",permalink:"/VAME/docs/reference/vame/analysis/gif_creator"}},o={},a=[{value:"random_generative_samples_motif",id:"random_generative_samples_motif",level:4},{value:"random_generative_samples",id:"random_generative_samples",level:4},{value:"random_reconstruction_samples",id:"random_reconstruction_samples",level:4},{value:"visualize_cluster_center",id:"visualize_cluster_center",level:4},{value:"generative_model",id:"generative_model",level:4}];function d(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"random_generative_samples_motif",children:"random_generative_samples_motif"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def random_generative_samples_motif(cfg: dict, model: torch.nn.Module,\n latent_vector: np.ndarray,\n labels: np.ndarray,\n n_cluster: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate random samples for motifs."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"latent_vector"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Latent vectors."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"labels"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"None"})," - Plot of generated samples."]}),"\n"]}),"\n",(0,s.jsx)(n.h4,{id:"random_generative_samples",children:"random_generative_samples"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def random_generative_samples(cfg: dict, model: torch.nn.Module,\n latent_vector: np.ndarray) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate random generative samples."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"latent_vector"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Latent vectors."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"random_reconstruction_samples",children:"random_reconstruction_samples"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def random_reconstruction_samples(cfg: dict, model: torch.nn.Module,\n latent_vector: np.ndarray) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Generate random reconstruction samples."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model to use."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"latent_vector"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Latent vectors."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"visualize_cluster_center",children:"visualize_cluster_center"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def visualize_cluster_center(cfg: dict, model: torch.nn.Module,\n cluster_center: np.ndarray) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize cluster centers."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration dictionary."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model"})," ",(0,s.jsx)(n.em,{children:"torch.nn.Module"})," - PyTorch model."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cluster_center"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Cluster centers."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"generative_model",children:"generative_model"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'@save_state(model=GenerativeModelFunctionSchema)\ndef generative_model(config: str,\n parametrization: Parametrizations,\n mode: str = "sampling",\n save_logs: bool = False) -> Dict[str, plt.Figure]\n'})}),"\n",(0,s.jsx)(n.p,{children:"Generative model."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"str"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"mode"})," ",(0,s.jsx)(n.em,{children:"str, optional"}),' - Mode for generating samples. Defaults to "sampling".']}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"Dict[str, plt.Figure]: Plots of generated samples for each parametrization."})]})}function h(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(d,{...e})}):d(e)}},8453:(e,n,r)=>{r.d(n,{R:()=>c,x:()=>t});var s=r(6540);const i={},l=s.createContext(i);function c(e){const n=s.useContext(l);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function t(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(i):e.components||i:c(e.components),s.createElement(l.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/a54c3d34.1ae020bc.js b/assets/js/a54c3d34.4bd6b137.js similarity index 91% rename from assets/js/a54c3d34.1ae020bc.js rename to assets/js/a54c3d34.4bd6b137.js index 9684acad..148e5afd 100644 --- a/assets/js/a54c3d34.1ae020bc.js +++ b/assets/js/a54c3d34.4bd6b137.js @@ -1 +1 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[1470],{617:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>a,contentTitle:()=>t,default:()=>h,frontMatter:()=>l,metadata:()=>c,toc:()=>d});var i=s(4848),r=s(8453);const l={sidebar_label:"community_analysis",title:"vame.analysis.community_analysis"},t=void 0,c={id:"reference/vame/analysis/community_analysis",title:"vame.analysis.community_analysis",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/community_analysis.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/community_analysis",permalink:"/VAME/docs/reference/vame/analysis/community_analysis",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"community_analysis",title:"vame.analysis.community_analysis"},sidebar:"docsSidebar",previous:{title:"API reference",permalink:"/VAME/docs/category/api-reference"},next:{title:"generative_functions",permalink:"/VAME/docs/reference/vame/analysis/generative_functions"}},a={},d=[{value:"get_adjacency_matrix",id:"get_adjacency_matrix",level:4},{value:"get_transition_matrix",id:"get_transition_matrix",level:4},{value:"find_zero_labels",id:"find_zero_labels",level:4},{value:"augment_motif_timeseries",id:"augment_motif_timeseries",level:4},{value:"get_labels",id:"get_labels",level:4},{value:"get_community_label",id:"get_community_label",level:4},{value:"compute_transition_matrices",id:"compute_transition_matrices",level:4},{value:"create_community_bag",id:"create_community_bag",level:4},{value:"create_cohort_community_bag",id:"create_cohort_community_bag",level:4},{value:"get_community_labels",id:"get_community_labels",level:4},{value:"get_cohort_community_labels",id:"get_cohort_community_labels",level:4},{value:"community",id:"community",level:4}];function o(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,r.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xc2\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,i.jsx)(n.p,{children:"Updated 5/11/2022 with PH edits"}),"\n",(0,i.jsx)(n.h4,{id:"get_adjacency_matrix",children:"get_adjacency_matrix"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_adjacency_matrix(\n labels: np.ndarray,\n n_cluster: int) -> Tuple[np.ndarray, np.ndarray, np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Calculate the adjacency matrix, transition matrix, and temporal matrix."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Array of cluster labels."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(n.p,{children:"Tuple[np.ndarray, np.ndarray, np.ndarray]: Tuple containing adjacency matrix, transition matrix, and temporal matrix."}),"\n",(0,i.jsx)(n.h4,{id:"get_transition_matrix",children:"get_transition_matrix"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_transition_matrix(adjacency_matrix: np.ndarray,\n threshold: float = 0.0) -> np.ndarray\n"})}),"\n",(0,i.jsx)(n.p,{children:"Compute the transition matrix from the adjacency matrix."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"adjacency_matrix"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Adjacency matrix."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"threshold"})," ",(0,i.jsx)(n.em,{children:"float, optional"})," - Threshold for considering transitions. Defaults to 0.0."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"np.ndarray"})," - Transition matrix."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"find_zero_labels",children:"find_zero_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def find_zero_labels(motif_usage: Tuple[np.ndarray, np.ndarray],\n n_cluster: int) -> np.ndarray\n"})}),"\n",(0,i.jsx)(n.p,{children:"Find zero labels in motif usage and fill them."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"motif_usage"})," ",(0,i.jsx)(n.em,{children:"Tuple[np.ndarray, np.ndarray]"})," - 2D list where the first index is a unique list of motif used and the second index is the motif usage in frames."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"np.ndarray"})," - List of motif usage frames with 0's where motifs weren't used (array with zero labels filled)."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"augment_motif_timeseries",children:"augment_motif_timeseries"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def augment_motif_timeseries(label: np.ndarray,\n n_cluster: int) -> Tuple[np.ndarray, np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Augment motif time series by filling zero motifs."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"label"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Original label array."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(n.p,{children:"Tuple[np.ndarray, np.ndarray]: Augmented label array and indices of zero motifs."}),"\n",(0,i.jsx)(n.h4,{id:"get_labels",children:"get_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_labels(cfg: dict, files: List[str], model_name: str, n_cluster: int,\n parametrization: str) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Get cluster labels for given videos files."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cfg"})," ",(0,i.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of video files paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"model_name"})," ",(0,i.jsx)(n.em,{children:"str"})," - Model name."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"parametrization"})," ",(0,i.jsx)(n.em,{children:"str"})," - parametrization."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of cluster labels for each file."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"get_community_label",children:"get_community_label"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_community_label(cfg: dict, files: List[str], model_name: str,\n n_cluster: int, parametrization: str) -> np.ndarray\n"})}),"\n",(0,i.jsx)(n.p,{children:"Get community labels for given files."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cfg"})," ",(0,i.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of files paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"model_name"})," ",(0,i.jsx)(n.em,{children:"str"})," - Model name."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"parametrization"})," ",(0,i.jsx)(n.em,{children:"str"})," - parametrization."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"np.ndarray"})," - Array of community labels."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"compute_transition_matrices",children:"compute_transition_matrices"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def compute_transition_matrices(files: List[str], labels: List[np.ndarray],\n n_cluster: int) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Compute transition matrices for given files and labels."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of transition matrices."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"create_community_bag",children:"create_community_bag"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def create_community_bag(files: List[str], labels: List[np.ndarray],\n transition_matrices: List[np.ndarray], cut_tree: int,\n n_cluster: int) -> Tuple\n"})}),"\n",(0,i.jsx)(n.p,{children:"Create community bag for given files and labels (Markov chain to tree -> community detection)."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"transition_matrices"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of transition matrices."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cut_tree"})," ",(0,i.jsx)(n.em,{children:"int"})," - Cut line for tree."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"Tuple"})," - Tuple containing list of community bags and list of trees."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"create_cohort_community_bag",children:"create_cohort_community_bag"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def create_cohort_community_bag(labels: List[np.ndarray],\n trans_mat_full: np.ndarray, cut_tree: int,\n n_cluster: int) -> Tuple\n"})}),"\n",(0,i.jsx)(n.p,{children:"Create cohort community bag for given labels, transition matrix, cut tree, and number of clusters.\n(markov chain to tree -> community detection)"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"trans_mat_full"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Full transition matrix."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cut_tree"})," ",(0,i.jsx)(n.em,{children:"int"})," - Cut line for tree."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"Tuple"})," - Tuple containing list of community bags and list of trees."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"get_community_labels",children:"get_community_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_community_labels(\n files: List[str], labels: List[np.ndarray],\n communities_all: List[List[List[int]]]) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Transform kmeans parameterized latent vector into communities. Get community labels for given files and community bags."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"communities_all"})," ",(0,i.jsx)(n.em,{children:"List[List[List[int]]]"})," - List of community bags."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of community labels for each file."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"get_cohort_community_labels",children:"get_cohort_community_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_cohort_community_labels(\n files: List[str], labels: List[np.ndarray],\n communities_all: List[List[List[int]]]) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Transform kmeans parameterized latent vector into communities. Get cohort community labels for given labels, and community bags."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str], deprecated"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"communities_all"})," ",(0,i.jsx)(n.em,{children:"List[List[List[int]]]"})," - List of community bags."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of cohort community labels for each file."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"community",children:"community"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"@save_state(model=CommunityFunctionSchema)\ndef community(config: str,\n cohort: bool = True,\n cut_tree: int | None = None,\n save_logs: bool = False) -> None\n"})}),"\n",(0,i.jsx)(n.p,{children:"Perform community analysis."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"config"})," ",(0,i.jsx)(n.em,{children:"str"})," - Path to the configuration file."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cohort"})," ",(0,i.jsx)(n.em,{children:"bool, optional"})," - Flag indicating cohort analysis. Defaults to True."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cut_tree"})," ",(0,i.jsx)(n.em,{children:"int, optional"})," - Cut line for tree. Defaults to None."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(n.p,{children:"None"})]})}function h(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(o,{...e})}):o(e)}},8453:(e,n,s)=>{s.d(n,{R:()=>t,x:()=>c});var i=s(6540);const r={},l=i.createContext(r);function t(e){const n=i.useContext(l);return i.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function c(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(r):e.components||r:t(e.components),i.createElement(l.Provider,{value:n},e.children)}}}]); \ No newline at end of file +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[1470],{617:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>a,contentTitle:()=>t,default:()=>h,frontMatter:()=>l,metadata:()=>c,toc:()=>d});var i=s(4848),r=s(8453);const l={sidebar_label:"community_analysis",title:"vame.analysis.community_analysis"},t=void 0,c={id:"reference/vame/analysis/community_analysis",title:"vame.analysis.community_analysis",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/community_analysis.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/community_analysis",permalink:"/VAME/docs/reference/vame/analysis/community_analysis",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"community_analysis",title:"vame.analysis.community_analysis"},sidebar:"docsSidebar",previous:{title:"API reference",permalink:"/VAME/docs/category/api-reference"},next:{title:"generative_functions",permalink:"/VAME/docs/reference/vame/analysis/generative_functions"}},a={},d=[{value:"get_adjacency_matrix",id:"get_adjacency_matrix",level:4},{value:"get_transition_matrix",id:"get_transition_matrix",level:4},{value:"find_zero_labels",id:"find_zero_labels",level:4},{value:"augment_motif_timeseries",id:"augment_motif_timeseries",level:4},{value:"get_labels",id:"get_labels",level:4},{value:"get_community_label",id:"get_community_label",level:4},{value:"compute_transition_matrices",id:"compute_transition_matrices",level:4},{value:"create_community_bag",id:"create_community_bag",level:4},{value:"create_cohort_community_bag",id:"create_cohort_community_bag",level:4},{value:"get_community_labels",id:"get_community_labels",level:4},{value:"get_cohort_community_labels",id:"get_cohort_community_labels",level:4},{value:"community",id:"community",level:4}];function o(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,r.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xc2\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,i.jsx)(n.p,{children:"Updated 5/11/2022 with PH edits"}),"\n",(0,i.jsx)(n.h4,{id:"get_adjacency_matrix",children:"get_adjacency_matrix"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_adjacency_matrix(\n labels: np.ndarray,\n n_cluster: int) -> Tuple[np.ndarray, np.ndarray, np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Calculate the adjacency matrix, transition matrix, and temporal matrix."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Array of cluster labels."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(n.p,{children:"Tuple[np.ndarray, np.ndarray, np.ndarray]: Tuple containing adjacency matrix, transition matrix, and temporal matrix."}),"\n",(0,i.jsx)(n.h4,{id:"get_transition_matrix",children:"get_transition_matrix"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_transition_matrix(adjacency_matrix: np.ndarray,\n threshold: float = 0.0) -> np.ndarray\n"})}),"\n",(0,i.jsx)(n.p,{children:"Compute the transition matrix from the adjacency matrix."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"adjacency_matrix"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Adjacency matrix."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"threshold"})," ",(0,i.jsx)(n.em,{children:"float, optional"})," - Threshold for considering transitions. Defaults to 0.0."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"np.ndarray"})," - Transition matrix."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"find_zero_labels",children:"find_zero_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def find_zero_labels(motif_usage: Tuple[np.ndarray, np.ndarray],\n n_cluster: int) -> np.ndarray\n"})}),"\n",(0,i.jsx)(n.p,{children:"Find zero labels in motif usage and fill them."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"motif_usage"})," ",(0,i.jsx)(n.em,{children:"Tuple[np.ndarray, np.ndarray]"})," - 2D list where the first index is a unique list of motif used and the second index is the motif usage in frames."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"np.ndarray"})," - List of motif usage frames with 0's where motifs weren't used (array with zero labels filled)."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"augment_motif_timeseries",children:"augment_motif_timeseries"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def augment_motif_timeseries(label: np.ndarray,\n n_cluster: int) -> Tuple[np.ndarray, np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Augment motif time series by filling zero motifs."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"label"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Original label array."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(n.p,{children:"Tuple[np.ndarray, np.ndarray]: Augmented label array and indices of zero motifs."}),"\n",(0,i.jsx)(n.h4,{id:"get_labels",children:"get_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_labels(cfg: dict, files: List[str], model_name: str, n_cluster: int,\n parametrization: str) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Get cluster labels for given videos files."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cfg"})," ",(0,i.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of video files paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"model_name"})," ",(0,i.jsx)(n.em,{children:"str"})," - Model name."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"parametrization"})," ",(0,i.jsx)(n.em,{children:"str"})," - parametrization."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of cluster labels for each file."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"get_community_label",children:"get_community_label"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_community_label(cfg: dict, files: List[str], model_name: str,\n n_cluster: int, parametrization: str) -> np.ndarray\n"})}),"\n",(0,i.jsx)(n.p,{children:"Get community labels for given files."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cfg"})," ",(0,i.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of files paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"model_name"})," ",(0,i.jsx)(n.em,{children:"str"})," - Model name."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"parametrization"})," ",(0,i.jsx)(n.em,{children:"str"})," - parametrization."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"np.ndarray"})," - Array of community labels."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"compute_transition_matrices",children:"compute_transition_matrices"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def compute_transition_matrices(files: List[str], labels: List[np.ndarray],\n n_cluster: int) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Compute transition matrices for given files and labels."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of transition matrices."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"create_community_bag",children:"create_community_bag"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def create_community_bag(files: List[str], labels: List[np.ndarray],\n transition_matrices: List[np.ndarray], cut_tree: int,\n n_cluster: int) -> Tuple\n"})}),"\n",(0,i.jsx)(n.p,{children:"Create community bag for given files and labels (Markov chain to tree -> community detection)."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"transition_matrices"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of transition matrices."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cut_tree"})," ",(0,i.jsx)(n.em,{children:"int"})," - Cut line for tree."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"Tuple"})," - Tuple containing list of community bags and list of trees."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"create_cohort_community_bag",children:"create_cohort_community_bag"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def create_cohort_community_bag(labels: List[np.ndarray],\n trans_mat_full: np.ndarray, cut_tree: int,\n n_cluster: int) -> Tuple\n"})}),"\n",(0,i.jsx)(n.p,{children:"Create cohort community bag for given labels, transition matrix, cut tree, and number of clusters.\n(markov chain to tree -> community detection)"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"trans_mat_full"})," ",(0,i.jsx)(n.em,{children:"np.ndarray"})," - Full transition matrix."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cut_tree"})," ",(0,i.jsx)(n.em,{children:"int"})," - Cut line for tree."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"n_cluster"})," ",(0,i.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"Tuple"})," - Tuple containing list of community bags and list of trees."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"get_community_labels",children:"get_community_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_community_labels(\n files: List[str], labels: List[np.ndarray],\n communities_all: List[List[List[int]]]) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Transform kmeans parameterized latent vector into communities. Get community labels for given files and community bags."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str]"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"communities_all"})," ",(0,i.jsx)(n.em,{children:"List[List[List[int]]]"})," - List of community bags."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of community labels for each file."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"get_cohort_community_labels",children:"get_cohort_community_labels"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"def get_cohort_community_labels(\n files: List[str], labels: List[np.ndarray],\n communities_all: List[List[List[int]]]) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(n.p,{children:"Transform kmeans parameterized latent vector into communities. Get cohort community labels for given labels, and community bags."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"files"})," ",(0,i.jsx)(n.em,{children:"List[str], deprecated"})," - List of file paths."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"labels"})," ",(0,i.jsx)(n.em,{children:"List[np.ndarray]"})," - List of label arrays."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"communities_all"})," ",(0,i.jsx)(n.em,{children:"List[List[List[int]]]"})," - List of community bags."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"List[np.ndarray]"})," - List of cohort community labels for each file."]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"community",children:"community"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"@save_state(model=CommunityFunctionSchema)\ndef community(config: str,\n parametrization: Parametrizations,\n cohort: bool = True,\n cut_tree: int | None = None,\n save_logs: bool = False) -> None\n"})}),"\n",(0,i.jsx)(n.p,{children:"Perform community analysis."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"config"})," ",(0,i.jsx)(n.em,{children:"str"})," - Path to the configuration file."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cohort"})," ",(0,i.jsx)(n.em,{children:"bool, optional"})," - Flag indicating cohort analysis. Defaults to True."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"cut_tree"})," ",(0,i.jsx)(n.em,{children:"int, optional"})," - Cut line for tree. Defaults to None."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(n.p,{children:"None"})]})}function h(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(o,{...e})}):o(e)}},8453:(e,n,s)=>{s.d(n,{R:()=>t,x:()=>c});var i=s(6540);const r={},l=i.createContext(r);function t(e){const n=i.useContext(l);return i.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function c(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(r):e.components||r:t(e.components),i.createElement(l.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/ba2390ac.1d4d8ff9.js b/assets/js/ba2390ac.1d4d8ff9.js deleted file mode 100644 index a42e4ec3..00000000 --- a/assets/js/ba2390ac.1d4d8ff9.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[3274],{2175:(n,e,r)=>{r.r(e),r.d(e,{assets:()=>d,contentTitle:()=>a,default:()=>p,frontMatter:()=>l,metadata:()=>t,toc:()=>c});var i=r(4848),s=r(8453);const l={sidebar_label:"data_manipulation",title:"vame.util.data_manipulation"},a=void 0,t={id:"reference/vame/util/data_manipulation",title:"vame.util.data_manipulation",description:"consecutive",source:"@site/docs/reference/vame/util/data_manipulation.md",sourceDirName:"reference/vame/util",slug:"/reference/vame/util/data_manipulation",permalink:"/VAME/docs/reference/vame/util/data_manipulation",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"data_manipulation",title:"vame.util.data_manipulation"},sidebar:"docsSidebar",previous:{title:"csv_to_npy",permalink:"/VAME/docs/reference/vame/util/csv_to_npy"},next:{title:"gif_pose_helper",permalink:"/VAME/docs/reference/vame/util/gif_pose_helper"}},d={},c=[{value:"consecutive",id:"consecutive",level:4},{value:"nan_helper",id:"nan_helper",level:4},{value:"interpol_all_nans",id:"interpol_all_nans",level:4},{value:"interpol_first_rows_nans",id:"interpol_first_rows_nans",level:4},{value:"crop_and_flip",id:"crop_and_flip",level:4},{value:"background",id:"background",level:4}];function o(n){const e={code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,s.R)(),...n.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(e.h4,{id:"consecutive",children:"consecutive"}),"\n",(0,i.jsx)(e.pre,{children:(0,i.jsx)(e.code,{className:"language-python",children:"def consecutive(data: np.ndarray, stepsize: int = 1) -> List[np.ndarray]\n"})}),"\n",(0,i.jsx)(e.p,{children:"Find consecutive sequences in the data array."}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"data"})," ",(0,i.jsx)(e.em,{children:"np.ndarray"})," - Input array."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"stepsize"})," ",(0,i.jsx)(e.em,{children:"int, optional"})," - Step size. Defaults to 1."]}),"\n"]}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"List[np.ndarray]"})," - List of consecutive sequences."]}),"\n"]}),"\n",(0,i.jsx)(e.h4,{id:"nan_helper",children:"nan_helper"}),"\n",(0,i.jsx)(e.pre,{children:(0,i.jsx)(e.code,{className:"language-python",children:"def nan_helper(y: np.ndarray) -> Tuple\n"})}),"\n",(0,i.jsx)(e.p,{children:"Identifies indices of NaN values in an array and provides a function to convert them to non-NaN indices."}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"y"})," ",(0,i.jsx)(e.em,{children:"np.ndarray"})," - Input array containing NaN values."]}),"\n"]}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(e.p,{children:"Tuple[np.ndarray, Union[np.ndarray, None]]: A tuple containing two elements:"}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsx)(e.li,{children:"An array of boolean values indicating the positions of NaN values."}),"\n",(0,i.jsx)(e.li,{children:"A lambda function to convert NaN indices to non-NaN indices."}),"\n"]}),"\n",(0,i.jsx)(e.h4,{id:"interpol_all_nans",children:"interpol_all_nans"}),"\n",(0,i.jsx)(e.pre,{children:(0,i.jsx)(e.code,{className:"language-python",children:"def interpol_all_nans(arr: np.ndarray) -> np.ndarray\n"})}),"\n",(0,i.jsx)(e.p,{children:"Interpolates all NaN values in the given array."}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"arr"})," ",(0,i.jsx)(e.em,{children:"np.ndarray"})," - Input array containing NaN values."]}),"\n"]}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"np.ndarray"})," - Array with NaN values replaced by interpolated values."]}),"\n"]}),"\n",(0,i.jsx)(e.h4,{id:"interpol_first_rows_nans",children:"interpol_first_rows_nans"}),"\n",(0,i.jsx)(e.pre,{children:(0,i.jsx)(e.code,{className:"language-python",children:"def interpol_first_rows_nans(arr: np.ndarray) -> np.ndarray\n"})}),"\n",(0,i.jsx)(e.p,{children:"Interpolates NaN values in the given array."}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"arr"})," ",(0,i.jsx)(e.em,{children:"np.ndarray"})," - Input array with NaN values."]}),"\n"]}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"np.ndarray"})," - Array with interpolated NaN values."]}),"\n"]}),"\n",(0,i.jsx)(e.h4,{id:"crop_and_flip",children:"crop_and_flip"}),"\n",(0,i.jsx)(e.pre,{children:(0,i.jsx)(e.code,{className:"language-python",children:"def crop_and_flip(\n rect: Tuple, src: np.ndarray, points: List[np.ndarray],\n ref_index: Tuple[int, int]) -> Tuple[np.ndarray, List[np.ndarray]]\n"})}),"\n",(0,i.jsx)(e.p,{children:"Crop and flip the image based on the given rectangle and points."}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"rect"})," ",(0,i.jsx)(e.em,{children:"Tuple"})," - Rectangle coordinates (center, size, theta)."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"src"})," ",(0,i.jsx)(e.em,{children:"np.ndarray"})," - Source image."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"points"})," ",(0,i.jsx)(e.em,{children:"List[np.ndarray]"})," - List of points."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"ref_index"})," ",(0,i.jsx)(e.em,{children:"Tuple[int, int]"})," - Reference indices for alignment."]}),"\n"]}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsx)(e.p,{children:"Tuple[np.ndarray, List[np.ndarray]]: Cropped and flipped image, and shifted points."}),"\n",(0,i.jsx)(e.h4,{id:"background",children:"background"}),"\n",(0,i.jsx)(e.pre,{children:(0,i.jsx)(e.code,{className:"language-python",children:"def background(path_to_file: str,\n filename: str,\n file_format: str = '.mp4',\n num_frames: int = 1000,\n save_background: bool = True) -> np.ndarray\n"})}),"\n",(0,i.jsx)(e.p,{children:"Compute background image from fixed camera."}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"path_to_file"})," ",(0,i.jsx)(e.em,{children:"str"})," - Path to the directory containing the video files."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"filename"})," ",(0,i.jsx)(e.em,{children:"str"})," - Name of the video file."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"file_format"})," ",(0,i.jsx)(e.em,{children:"str, optional"})," - Format of the video file. Defaults to '.mp4'."]}),"\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"num_frames"})," ",(0,i.jsx)(e.em,{children:"int, optional"})," - Number of frames to use for background computation. Defaults to 1000."]}),"\n"]}),"\n",(0,i.jsxs)(e.p,{children:[(0,i.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,i.jsxs)(e.ul,{children:["\n",(0,i.jsxs)(e.li,{children:[(0,i.jsx)(e.code,{children:"np.ndarray"})," - Background image."]}),"\n"]})]})}function p(n={}){const{wrapper:e}={...(0,s.R)(),...n.components};return e?(0,i.jsx)(e,{...n,children:(0,i.jsx)(o,{...n})}):o(n)}},8453:(n,e,r)=>{r.d(e,{R:()=>a,x:()=>t});var i=r(6540);const s={},l=i.createContext(s);function a(n){const e=i.useContext(l);return i.useMemo((function(){return"function"==typeof n?n(e):{...e,...n}}),[e,n])}function t(n){let e;return e=n.disableParentContext?"function"==typeof n.components?n.components(s):n.components||s:a(n.components),i.createElement(l.Provider,{value:e},n.children)}}}]); \ No newline at end of file diff --git a/assets/js/ba2390ac.9dbc7be1.js b/assets/js/ba2390ac.9dbc7be1.js new file mode 100644 index 00000000..5f771d46 --- /dev/null +++ b/assets/js/ba2390ac.9dbc7be1.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[3274],{2175:(n,e,r)=>{r.r(e),r.d(e,{assets:()=>d,contentTitle:()=>a,default:()=>h,frontMatter:()=>l,metadata:()=>t,toc:()=>c});var s=r(4848),i=r(8453);const l={sidebar_label:"data_manipulation",title:"vame.util.data_manipulation"},a=void 0,t={id:"reference/vame/util/data_manipulation",title:"vame.util.data_manipulation",description:"get\\pose\\data\\from\\nwb\\_file",source:"@site/docs/reference/vame/util/data_manipulation.md",sourceDirName:"reference/vame/util",slug:"/reference/vame/util/data_manipulation",permalink:"/VAME/docs/reference/vame/util/data_manipulation",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"data_manipulation",title:"vame.util.data_manipulation"},sidebar:"docsSidebar",previous:{title:"csv_to_npy",permalink:"/VAME/docs/reference/vame/util/csv_to_npy"},next:{title:"gif_pose_helper",permalink:"/VAME/docs/reference/vame/util/gif_pose_helper"}},d={},c=[{value:"get_pose_data_from_nwb_file",id:"get_pose_data_from_nwb_file",level:4},{value:"consecutive",id:"consecutive",level:4},{value:"nan_helper",id:"nan_helper",level:4},{value:"interpol_all_nans",id:"interpol_all_nans",level:4},{value:"interpol_first_rows_nans",id:"interpol_first_rows_nans",level:4},{value:"crop_and_flip",id:"crop_and_flip",level:4},{value:"background",id:"background",level:4}];function o(n){const e={code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...n.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(e.h4,{id:"get_pose_data_from_nwb_file",children:"get_pose_data_from_nwb_file"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def get_pose_data_from_nwb_file(\n nwbfile: NWBFile, path_to_pose_nwb_series_data: str) -> LabelledDict\n"})}),"\n",(0,s.jsx)(e.p,{children:"Get pose data from nwb file using a inside path to the nwb data."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"nwbfile"})," ",(0,s.jsx)(e.em,{children:"NWBFile"})," - NWB file object."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"path_to_pose_nwb_series_data"})," ",(0,s.jsx)(e.em,{children:"str"})," - Path to the pose data inside the nwb file."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"LabelledDict"})," - Pose data."]}),"\n"]}),"\n",(0,s.jsx)(e.h4,{id:"consecutive",children:"consecutive"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def consecutive(data: np.ndarray, stepsize: int = 1) -> List[np.ndarray]\n"})}),"\n",(0,s.jsx)(e.p,{children:"Find consecutive sequences in the data array."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"data"})," ",(0,s.jsx)(e.em,{children:"np.ndarray"})," - Input array."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"stepsize"})," ",(0,s.jsx)(e.em,{children:"int, optional"})," - Step size. Defaults to 1."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"List[np.ndarray]"})," - List of consecutive sequences."]}),"\n"]}),"\n",(0,s.jsx)(e.h4,{id:"nan_helper",children:"nan_helper"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def nan_helper(y: np.ndarray) -> Tuple\n"})}),"\n",(0,s.jsx)(e.p,{children:"Identifies indices of NaN values in an array and provides a function to convert them to non-NaN indices."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"y"})," ",(0,s.jsx)(e.em,{children:"np.ndarray"})," - Input array containing NaN values."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(e.p,{children:"Tuple[np.ndarray, Union[np.ndarray, None]]: A tuple containing two elements:"}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsx)(e.li,{children:"An array of boolean values indicating the positions of NaN values."}),"\n",(0,s.jsx)(e.li,{children:"A lambda function to convert NaN indices to non-NaN indices."}),"\n"]}),"\n",(0,s.jsx)(e.h4,{id:"interpol_all_nans",children:"interpol_all_nans"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def interpol_all_nans(arr: np.ndarray) -> np.ndarray\n"})}),"\n",(0,s.jsx)(e.p,{children:"Interpolates all NaN values in the given array."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"arr"})," ",(0,s.jsx)(e.em,{children:"np.ndarray"})," - Input array containing NaN values."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"np.ndarray"})," - Array with NaN values replaced by interpolated values."]}),"\n"]}),"\n",(0,s.jsx)(e.h4,{id:"interpol_first_rows_nans",children:"interpol_first_rows_nans"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def interpol_first_rows_nans(arr: np.ndarray) -> np.ndarray\n"})}),"\n",(0,s.jsx)(e.p,{children:"Interpolates NaN values in the given array."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"arr"})," ",(0,s.jsx)(e.em,{children:"np.ndarray"})," - Input array with NaN values."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"np.ndarray"})," - Array with interpolated NaN values."]}),"\n"]}),"\n",(0,s.jsx)(e.h4,{id:"crop_and_flip",children:"crop_and_flip"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def crop_and_flip(\n rect: Tuple, src: np.ndarray, points: List[np.ndarray],\n ref_index: Tuple[int, int]) -> Tuple[np.ndarray, List[np.ndarray]]\n"})}),"\n",(0,s.jsx)(e.p,{children:"Crop and flip the image based on the given rectangle and points."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"rect"})," ",(0,s.jsx)(e.em,{children:"Tuple"})," - Rectangle coordinates (center, size, theta)."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"src"})," ",(0,s.jsx)(e.em,{children:"np.ndarray"})," - Source image."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"points"})," ",(0,s.jsx)(e.em,{children:"List[np.ndarray]"})," - List of points."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"ref_index"})," ",(0,s.jsx)(e.em,{children:"Tuple[int, int]"})," - Reference indices for alignment."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(e.p,{children:"Tuple[np.ndarray, List[np.ndarray]]: Cropped and flipped image, and shifted points."}),"\n",(0,s.jsx)(e.h4,{id:"background",children:"background"}),"\n",(0,s.jsx)(e.pre,{children:(0,s.jsx)(e.code,{className:"language-python",children:"def background(path_to_file: str,\n filename: str,\n file_format: str = '.mp4',\n num_frames: int = 1000,\n save_background: bool = True) -> np.ndarray\n"})}),"\n",(0,s.jsx)(e.p,{children:"Compute background image from fixed camera."}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"path_to_file"})," ",(0,s.jsx)(e.em,{children:"str"})," - Path to the directory containing the video files."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"filename"})," ",(0,s.jsx)(e.em,{children:"str"})," - Name of the video file."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"file_format"})," ",(0,s.jsx)(e.em,{children:"str, optional"})," - Format of the video file. Defaults to '.mp4'."]}),"\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"num_frames"})," ",(0,s.jsx)(e.em,{children:"int, optional"})," - Number of frames to use for background computation. Defaults to 1000."]}),"\n"]}),"\n",(0,s.jsxs)(e.p,{children:[(0,s.jsx)(e.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(e.ul,{children:["\n",(0,s.jsxs)(e.li,{children:[(0,s.jsx)(e.code,{children:"np.ndarray"})," - Background image."]}),"\n"]})]})}function h(n={}){const{wrapper:e}={...(0,i.R)(),...n.components};return e?(0,s.jsx)(e,{...n,children:(0,s.jsx)(o,{...n})}):o(n)}},8453:(n,e,r)=>{r.d(e,{R:()=>a,x:()=>t});var s=r(6540);const i={},l=s.createContext(i);function a(n){const e=s.useContext(l);return s.useMemo((function(){return"function"==typeof n?n(e):{...e,...n}}),[e,n])}function t(n){let e;return e=n.disableParentContext?"function"==typeof n.components?n.components(i):n.components||i:a(n.components),s.createElement(l.Provider,{value:e},n.children)}}}]); \ No newline at end of file diff --git a/assets/js/c4ea8f9a.26387692.js b/assets/js/c4ea8f9a.26387692.js new file mode 100644 index 00000000..123a8c5b --- /dev/null +++ b/assets/js/c4ea8f9a.26387692.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[5908],{5337:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>c,contentTitle:()=>s,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>l});var o=t(4848),i=t(8453);const r={sidebar_label:"csv_to_npy",title:"vame.util.csv_to_npy"},s=void 0,a={id:"reference/vame/util/csv_to_npy",title:"vame.util.csv_to_npy",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/util/csv_to_npy.md",sourceDirName:"reference/vame/util",slug:"/reference/vame/util/csv_to_npy",permalink:"/VAME/docs/reference/vame/util/csv_to_npy",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"csv_to_npy",title:"vame.util.csv_to_npy"},sidebar:"docsSidebar",previous:{title:"auxiliary",permalink:"/VAME/docs/reference/vame/util/auxiliary"},next:{title:"data_manipulation",permalink:"/VAME/docs/reference/vame/util/data_manipulation"}},c={},l=[{value:"pose_to_numpy",id:"pose_to_numpy",level:4}];function u(e){const n={a:"a",code:"code",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,o.jsxs)(o.Fragment,{children:[(0,o.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,o.jsx)(n.h4,{id:"pose_to_numpy",children:"pose_to_numpy"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-python",children:"@save_state(model=PoseToNumpyFunctionSchema)\ndef pose_to_numpy(config: str, save_logs=False) -> None\n"})}),"\n",(0,o.jsx)(n.p,{children:"Converts a pose-estimation.csv file to a numpy array. Note that this code is only useful for data which is a priori egocentric, i.e. head-fixed\nor otherwise restrained animals."}),"\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.strong,{children:"Raises"}),":"]}),"\n",(0,o.jsxs)(n.ul,{children:["\n",(0,o.jsxs)(n.li,{children:[(0,o.jsx)(n.code,{children:"ValueError"})," - If the config.yaml file indicates that the data is not egocentric."]}),"\n"]})]})}function d(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(u,{...e})}):u(e)}},8453:(e,n,t)=>{t.d(n,{R:()=>s,x:()=>a});var o=t(6540);const i={},r=o.createContext(i);function s(e){const n=o.useContext(r);return o.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function a(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(i):e.components||i:s(e.components),o.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/c4ea8f9a.8dcedad6.js b/assets/js/c4ea8f9a.8dcedad6.js deleted file mode 100644 index b7e118bc..00000000 --- a/assets/js/c4ea8f9a.8dcedad6.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[5908],{5337:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>c,contentTitle:()=>s,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>l});var i=t(4848),o=t(8453);const r={sidebar_label:"csv_to_npy",title:"vame.util.csv_to_npy"},s=void 0,a={id:"reference/vame/util/csv_to_npy",title:"vame.util.csv_to_npy",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/util/csv_to_npy.md",sourceDirName:"reference/vame/util",slug:"/reference/vame/util/csv_to_npy",permalink:"/VAME/docs/reference/vame/util/csv_to_npy",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"csv_to_npy",title:"vame.util.csv_to_npy"},sidebar:"docsSidebar",previous:{title:"auxiliary",permalink:"/VAME/docs/reference/vame/util/auxiliary"},next:{title:"data_manipulation",permalink:"/VAME/docs/reference/vame/util/data_manipulation"}},c={},l=[{value:"csv_to_numpy",id:"csv_to_numpy",level:4}];function u(e){const n={a:"a",code:"code",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,i.jsx)(n.h4,{id:"csv_to_numpy",children:"csv_to_numpy"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-python",children:"@save_state(model=CsvToNumpyFunctionSchema)\ndef csv_to_numpy(config: str, save_logs=False) -> None\n"})}),"\n",(0,i.jsx)(n.p,{children:"Converts a pose-estimation.csv file to a numpy array. Note that this code is only useful for data which is a priori egocentric, i.e. head-fixed\nor otherwise restrained animals."}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Raises"}),":"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.code,{children:"ValueError"})," - If the config.yaml file indicates that the data is not egocentric."]}),"\n"]})]})}function d(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(u,{...e})}):u(e)}},8453:(e,n,t)=>{t.d(n,{R:()=>s,x:()=>a});var i=t(6540);const o={},r=i.createContext(o);function s(e){const n=i.useContext(r);return i.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function a(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(o):e.components||o:s(e.components),i.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/f265b8db.703a06c5.js b/assets/js/f265b8db.703a06c5.js deleted file mode 100644 index 1ab89690..00000000 --- a/assets/js/f265b8db.703a06c5.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[1608],{7346:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>c,contentTitle:()=>a,default:()=>m,frontMatter:()=>r,metadata:()=>d,toc:()=>t});var s=i(4848),l=i(8453);const r={sidebar_label:"umap",title:"vame.analysis.umap"},a=void 0,d={id:"reference/vame/analysis/umap",title:"vame.analysis.umap",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/umap.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/umap",permalink:"/VAME/docs/reference/vame/analysis/umap",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"umap",title:"vame.analysis.umap"},sidebar:"docsSidebar",previous:{title:"tree_hierarchy",permalink:"/VAME/docs/reference/vame/analysis/tree_hierarchy"},next:{title:"umap_visualization",permalink:"/VAME/docs/reference/vame/analysis/umap_visualization"}},c={},t=[{value:"umap_embedding",id:"umap_embedding",level:4},{value:"umap_vis_community_labels",id:"umap_vis_community_labels",level:4},{value:"umap_vis",id:"umap_vis",level:4},{value:"umap_label_vis",id:"umap_label_vis",level:4},{value:"umap_vis_comm",id:"umap_vis_comm",level:4},{value:"visualization",id:"visualization",level:4}];function o(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,l.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"umap_embedding",children:"umap_embedding"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_embedding(cfg: dict, file: str, model_name: str, n_cluster: int,\n parametrization: str) -> np.ndarray\n"})}),"\n",(0,s.jsx)(n.p,{children:"Perform UMAP embedding for given file and parameters."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"file"})," ",(0,s.jsx)(n.em,{children:"str"})," - File path."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model_name"})," ",(0,s.jsx)(n.em,{children:"str"})," - Model name."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"parametrization"})," ",(0,s.jsx)(n.em,{children:"str"})," - parametrization."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"np.ndarray"})," - UMAP embedding."]}),"\n"]}),"\n",(0,s.jsx)(n.h4,{id:"umap_vis_community_labels",children:"umap_vis_community_labels"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_vis_community_labels(cfg: dict, embed: np.ndarray,\n community_labels_all: np.ndarray,\n save_path: str | None) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Create plotly visualizaton of UMAP embedding with community labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"community_labels_all"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Community labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"save_path"})," - Path to save the plot. If None it will not save the plot."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None"}),"\n",(0,s.jsx)(n.h4,{id:"umap_vis",children:"umap_vis"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_vis(embed: np.ndarray, num_points: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embedding without labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"num_points"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of data points to visualize."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Plot Visualization of UMAP embedding."}),"\n",(0,s.jsx)(n.h4,{id:"umap_label_vis",children:"umap_label_vis"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_label_vis(embed: np.ndarray, label: np.ndarray, n_cluster: int,\n num_points: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embedding with motif labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"label"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Motif labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"num_points"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of data points to visualize."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"fig - Plot figure of UMAP visualization embedding with motif labels."}),"\n",(0,s.jsx)(n.h4,{id:"umap_vis_comm",children:"umap_vis_comm"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_vis_comm(embed: np.ndarray, community_label: np.ndarray,\n num_points: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embedding with community labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"community_label"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Community labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"num_points"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of data points to visualize."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"fig - Plot figure of UMAP visualization embedding with community labels."}),"\n",(0,s.jsx)(n.h4,{id:"visualization",children:"visualization"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=VisualizationFunctionSchema)\ndef visualization(config: Union[str, Path],\n label: Optional[str] = None,\n save_logs: bool = False) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embeddings based on configuration settings."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"Union[str, Path]"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"label"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of labels to visualize. Default is None."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Plot Visualization of UMAP embeddings."})]})}function m(e={}){const{wrapper:n}={...(0,l.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(o,{...e})}):o(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>a,x:()=>d});var s=i(6540);const l={},r=s.createContext(l);function a(e){const n=s.useContext(r);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function d(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(l):e.components||l:a(e.components),s.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/f265b8db.a1ee1a3e.js b/assets/js/f265b8db.a1ee1a3e.js new file mode 100644 index 00000000..62e687e7 --- /dev/null +++ b/assets/js/f265b8db.a1ee1a3e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[1608],{7346:(e,n,i)=>{i.r(n),i.d(n,{assets:()=>t,contentTitle:()=>a,default:()=>m,frontMatter:()=>r,metadata:()=>d,toc:()=>c});var s=i(4848),l=i(8453);const r={sidebar_label:"umap",title:"vame.analysis.umap"},a=void 0,d={id:"reference/vame/analysis/umap",title:"vame.analysis.umap",description:"Variational Animal Motion Embedding 1.0-alpha Toolbox",source:"@site/docs/reference/vame/analysis/umap.md",sourceDirName:"reference/vame/analysis",slug:"/reference/vame/analysis/umap",permalink:"/VAME/docs/reference/vame/analysis/umap",draft:!1,unlisted:!1,tags:[],version:"current",frontMatter:{sidebar_label:"umap",title:"vame.analysis.umap"},sidebar:"docsSidebar",previous:{title:"tree_hierarchy",permalink:"/VAME/docs/reference/vame/analysis/tree_hierarchy"},next:{title:"umap_visualization",permalink:"/VAME/docs/reference/vame/analysis/umap_visualization"}},t={},c=[{value:"umap_embedding",id:"umap_embedding",level:4},{value:"umap_vis",id:"umap_vis",level:4},{value:"umap_label_vis",id:"umap_label_vis",level:4},{value:"umap_vis_comm",id:"umap_vis_comm",level:4},{value:"visualization",id:"visualization",level:4}];function o(e){const n={a:"a",code:"code",em:"em",h4:"h4",li:"li",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,l.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"Variational Animal Motion Embedding 1.0-alpha Toolbox\n\xa9 K. Luxem & P. Bauer, Department of Cellular Neuroscience\nLeibniz Institute for Neurobiology, Magdeburg, Germany"}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://github.com/LINCellularNeuroscience/VAME",children:"https://github.com/LINCellularNeuroscience/VAME"}),"\nLicensed under GNU General Public License v3.0"]}),"\n",(0,s.jsx)(n.h4,{id:"umap_embedding",children:"umap_embedding"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_embedding(cfg: dict, file: str, model_name: str, n_cluster: int,\n parametrization: str) -> np.ndarray\n"})}),"\n",(0,s.jsx)(n.p,{children:"Perform UMAP embedding for given file and parameters."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"cfg"})," ",(0,s.jsx)(n.em,{children:"dict"})," - Configuration parameters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"file"})," ",(0,s.jsx)(n.em,{children:"str"})," - File path."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"model_name"})," ",(0,s.jsx)(n.em,{children:"str"})," - Model name."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"parametrization"})," ",(0,s.jsx)(n.em,{children:"str"})," - parametrization."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"np.ndarray"})," - UMAP embedding."]}),"\n"]}),"\n",(0,s.jsx)(n.h4,{id:"umap_vis",children:"umap_vis"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_vis(embed: np.ndarray, num_points: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embedding without labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"num_points"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of data points to visualize."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Plot Visualization of UMAP embedding."}),"\n",(0,s.jsx)(n.h4,{id:"umap_label_vis",children:"umap_label_vis"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_label_vis(embed: np.ndarray, label: np.ndarray, n_cluster: int,\n num_points: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embedding with motif labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"label"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Motif labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"n_cluster"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of clusters."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"num_points"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of data points to visualize."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"fig - Plot figure of UMAP visualization embedding with motif labels."}),"\n",(0,s.jsx)(n.h4,{id:"umap_vis_comm",children:"umap_vis_comm"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"def umap_vis_comm(embed: np.ndarray, community_label: np.ndarray,\n num_points: int) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embedding with community labels."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"embed"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - UMAP embedding."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"community_label"})," ",(0,s.jsx)(n.em,{children:"np.ndarray"})," - Community labels."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"num_points"})," ",(0,s.jsx)(n.em,{children:"int"})," - Number of data points to visualize."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"fig - Plot figure of UMAP visualization embedding with community labels."}),"\n",(0,s.jsx)(n.h4,{id:"visualization",children:"visualization"}),"\n",(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:"@save_state(model=VisualizationFunctionSchema)\ndef visualization(config: Union[str, Path],\n parametrization: Parametrizations,\n label: Optional[str] = None,\n save_logs: bool = False) -> None\n"})}),"\n",(0,s.jsx)(n.p,{children:"Visualize UMAP embeddings based on configuration settings."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Arguments"}),":"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"config"})," ",(0,s.jsx)(n.em,{children:"Union[str, Path]"})," - Path to the configuration file."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.code,{children:"label"})," ",(0,s.jsx)(n.em,{children:"str, optional"})," - Type of labels to visualize. Default is None."]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Returns"}),":"]}),"\n",(0,s.jsx)(n.p,{children:"None - Plot Visualization of UMAP embeddings."})]})}function m(e={}){const{wrapper:n}={...(0,l.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(o,{...e})}):o(e)}},8453:(e,n,i)=>{i.d(n,{R:()=>a,x:()=>d});var s=i(6540);const l={},r=s.createContext(l);function a(e){const n=s.useContext(r);return s.useMemo((function(){return"function"==typeof e?e(n):{...n,...e}}),[n,e])}function d(e){let n;return n=e.disableParentContext?"function"==typeof e.components?e.components(l):e.components||l:a(e.components),s.createElement(r.Provider,{value:n},e.children)}}}]); \ No newline at end of file diff --git a/assets/js/f2b90fa0.403b7410.js b/assets/js/f2b90fa0.79025041.js similarity index 95% rename from assets/js/f2b90fa0.403b7410.js rename to assets/js/f2b90fa0.79025041.js index 2ab299a6..19f7b556 100644 --- a/assets/js/f2b90fa0.403b7410.js +++ b/assets/js/f2b90fa0.79025041.js @@ -1 +1 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[2865],{7882:e=>{e.exports=JSON.parse('{"version":{"pluginId":"default","version":"current","label":"Next","banner":null,"badge":false,"noIndex":false,"className":"docs-version-current","isLast":true,"docsSidebars":{"docsSidebar":[{"type":"link","label":"Introduction","href":"/VAME/docs/intro","docId":"intro","unlisted":false},{"type":"category","label":"Getting Started","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Installation","href":"/VAME/docs/getting_started/installation","docId":"getting_started/installation","unlisted":false},{"type":"link","label":"Running VAME Workflow","href":"/VAME/docs/getting_started/running","docId":"getting_started/running","unlisted":false}],"href":"/VAME/docs/category/getting-started"},{"type":"category","label":"API reference","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"vame","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"analysis","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"community_analysis","href":"/VAME/docs/reference/vame/analysis/community_analysis","docId":"reference/vame/analysis/community_analysis","unlisted":false},{"type":"link","label":"generative_functions","href":"/VAME/docs/reference/vame/analysis/generative_functions","docId":"reference/vame/analysis/generative_functions","unlisted":false},{"type":"link","label":"gif_creator","href":"/VAME/docs/reference/vame/analysis/gif_creator","docId":"reference/vame/analysis/gif_creator","unlisted":false},{"type":"link","label":"pose_segmentation","href":"/VAME/docs/reference/vame/analysis/pose_segmentation","docId":"reference/vame/analysis/pose_segmentation","unlisted":false},{"type":"link","label":"segment_behavior","href":"/VAME/docs/reference/vame/analysis/segment_behavior","docId":"reference/vame/analysis/segment_behavior","unlisted":false},{"type":"link","label":"tree_hierarchy","href":"/VAME/docs/reference/vame/analysis/tree_hierarchy","docId":"reference/vame/analysis/tree_hierarchy","unlisted":false},{"type":"link","label":"umap","href":"/VAME/docs/reference/vame/analysis/umap","docId":"reference/vame/analysis/umap","unlisted":false},{"type":"link","label":"umap_visualization","href":"/VAME/docs/reference/vame/analysis/umap_visualization","docId":"reference/vame/analysis/umap_visualization","unlisted":false},{"type":"link","label":"videowriter","href":"/VAME/docs/reference/vame/analysis/videowriter","docId":"reference/vame/analysis/videowriter","unlisted":false}]},{"type":"category","label":"initialize_project","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"new","href":"/VAME/docs/reference/vame/initialize_project/new","docId":"reference/vame/initialize_project/new","unlisted":false}]},{"type":"category","label":"logging","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"logger","href":"/VAME/docs/reference/vame/logging/logger","docId":"reference/vame/logging/logger","unlisted":false}]},{"type":"category","label":"model","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"create_training","href":"/VAME/docs/reference/vame/model/create_training","docId":"reference/vame/model/create_training","unlisted":false},{"type":"link","label":"dataloader","href":"/VAME/docs/reference/vame/model/dataloader","docId":"reference/vame/model/dataloader","unlisted":false},{"type":"link","label":"evaluate","href":"/VAME/docs/reference/vame/model/evaluate","docId":"reference/vame/model/evaluate","unlisted":false},{"type":"link","label":"rnn_model","href":"/VAME/docs/reference/vame/model/rnn_model","docId":"reference/vame/model/rnn_model","unlisted":false},{"type":"link","label":"rnn_vae","href":"/VAME/docs/reference/vame/model/rnn_vae","docId":"reference/vame/model/rnn_vae","unlisted":false}]},{"type":"category","label":"schemas","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"states","href":"/VAME/docs/reference/vame/schemas/states","docId":"reference/vame/schemas/states","unlisted":false}]},{"type":"category","label":"util","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"align_egocentrical","href":"/VAME/docs/reference/vame/util/align_egocentrical","docId":"reference/vame/util/align_egocentrical","unlisted":false},{"type":"link","label":"auxiliary","href":"/VAME/docs/reference/vame/util/auxiliary","docId":"reference/vame/util/auxiliary","unlisted":false},{"type":"link","label":"csv_to_npy","href":"/VAME/docs/reference/vame/util/csv_to_npy","docId":"reference/vame/util/csv_to_npy","unlisted":false},{"type":"link","label":"data_manipulation","href":"/VAME/docs/reference/vame/util/data_manipulation","docId":"reference/vame/util/data_manipulation","unlisted":false},{"type":"link","label":"gif_pose_helper","href":"/VAME/docs/reference/vame/util/gif_pose_helper","docId":"reference/vame/util/gif_pose_helper","unlisted":false},{"type":"link","label":"model_util","href":"/VAME/docs/reference/vame/util/model_util","docId":"reference/vame/util/model_util","unlisted":false}]}]}],"href":"/VAME/docs/category/api-reference"},{"type":"link","label":"FAQ","href":"/VAME/docs/faq","docId":"faq","unlisted":false}]},"docs":{"faq":{"id":"faq","title":"FAQ","description":"Frequently Asked Questions","sidebar":"docsSidebar"},"getting_started/installation":{"id":"getting_started/installation","title":"Installation","description":"Installation","sidebar":"docsSidebar"},"getting_started/running":{"id":"getting_started/running","title":"Running VAME Workflow","description":"Workflow Overview","sidebar":"docsSidebar"},"intro":{"id":"intro","title":"Introduction","description":"\ud83c\udf1f Welcome to EthoML/VAME (Variational Animal Motion Encoding), an open-source machine learning tool for behavioral segmentation and analyses.","sidebar":"docsSidebar"},"reference/vame/analysis/community_analysis":{"id":"reference/vame/analysis/community_analysis","title":"vame.analysis.community_analysis","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/generative_functions":{"id":"reference/vame/analysis/generative_functions","title":"vame.analysis.generative_functions","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/gif_creator":{"id":"reference/vame/analysis/gif_creator","title":"vame.analysis.gif_creator","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/pose_segmentation":{"id":"reference/vame/analysis/pose_segmentation","title":"vame.analysis.pose_segmentation","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/segment_behavior":{"id":"reference/vame/analysis/segment_behavior","title":"vame.analysis.segment_behavior","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/tree_hierarchy":{"id":"reference/vame/analysis/tree_hierarchy","title":"vame.analysis.tree_hierarchy","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/umap":{"id":"reference/vame/analysis/umap","title":"vame.analysis.umap","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/umap_visualization":{"id":"reference/vame/analysis/umap_visualization","title":"vame.analysis.umap_visualization","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/videowriter":{"id":"reference/vame/analysis/videowriter","title":"vame.analysis.videowriter","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/initialize_project/new":{"id":"reference/vame/initialize_project/new","title":"vame.initialize_project.new","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/logging/logger":{"id":"reference/vame/logging/logger","title":"vame.logging.logger","description":"TqdmToLogger Objects","sidebar":"docsSidebar"},"reference/vame/model/create_training":{"id":"reference/vame/model/create_training","title":"vame.model.create_training","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/model/dataloader":{"id":"reference/vame/model/dataloader","title":"vame.model.dataloader","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/model/evaluate":{"id":"reference/vame/model/evaluate","title":"vame.model.evaluate","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/model/rnn_model":{"id":"reference/vame/model/rnn_model","title":"vame.model.rnn_model","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/model/rnn_vae":{"id":"reference/vame/model/rnn_vae","title":"vame.model.rnn_vae","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/schemas/states":{"id":"reference/vame/schemas/states","title":"vame.schemas.states","description":"save\\\\_state","sidebar":"docsSidebar"},"reference/vame/util/align_egocentrical":{"id":"reference/vame/util/align_egocentrical","title":"vame.util.align_egocentrical","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/util/auxiliary":{"id":"reference/vame/util/auxiliary","title":"vame.util.auxiliary","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/util/csv_to_npy":{"id":"reference/vame/util/csv_to_npy","title":"vame.util.csv_to_npy","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/util/data_manipulation":{"id":"reference/vame/util/data_manipulation","title":"vame.util.data_manipulation","description":"consecutive","sidebar":"docsSidebar"},"reference/vame/util/gif_pose_helper":{"id":"reference/vame/util/gif_pose_helper","title":"vame.util.gif_pose_helper","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/util/model_util":{"id":"reference/vame/util/model_util","title":"vame.util.model_util","description":"load\\\\_model","sidebar":"docsSidebar"}}}}')}}]); \ No newline at end of file +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[2865],{7882:e=>{e.exports=JSON.parse('{"version":{"pluginId":"default","version":"current","label":"Next","banner":null,"badge":false,"noIndex":false,"className":"docs-version-current","isLast":true,"docsSidebars":{"docsSidebar":[{"type":"link","label":"Introduction","href":"/VAME/docs/intro","docId":"intro","unlisted":false},{"type":"category","label":"Getting Started","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"Installation","href":"/VAME/docs/getting_started/installation","docId":"getting_started/installation","unlisted":false},{"type":"link","label":"Running VAME Workflow","href":"/VAME/docs/getting_started/running","docId":"getting_started/running","unlisted":false}],"href":"/VAME/docs/category/getting-started"},{"type":"category","label":"API reference","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"vame","collapsible":true,"collapsed":true,"items":[{"type":"category","label":"analysis","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"community_analysis","href":"/VAME/docs/reference/vame/analysis/community_analysis","docId":"reference/vame/analysis/community_analysis","unlisted":false},{"type":"link","label":"generative_functions","href":"/VAME/docs/reference/vame/analysis/generative_functions","docId":"reference/vame/analysis/generative_functions","unlisted":false},{"type":"link","label":"gif_creator","href":"/VAME/docs/reference/vame/analysis/gif_creator","docId":"reference/vame/analysis/gif_creator","unlisted":false},{"type":"link","label":"pose_segmentation","href":"/VAME/docs/reference/vame/analysis/pose_segmentation","docId":"reference/vame/analysis/pose_segmentation","unlisted":false},{"type":"link","label":"segment_behavior","href":"/VAME/docs/reference/vame/analysis/segment_behavior","docId":"reference/vame/analysis/segment_behavior","unlisted":false},{"type":"link","label":"tree_hierarchy","href":"/VAME/docs/reference/vame/analysis/tree_hierarchy","docId":"reference/vame/analysis/tree_hierarchy","unlisted":false},{"type":"link","label":"umap","href":"/VAME/docs/reference/vame/analysis/umap","docId":"reference/vame/analysis/umap","unlisted":false},{"type":"link","label":"umap_visualization","href":"/VAME/docs/reference/vame/analysis/umap_visualization","docId":"reference/vame/analysis/umap_visualization","unlisted":false},{"type":"link","label":"videowriter","href":"/VAME/docs/reference/vame/analysis/videowriter","docId":"reference/vame/analysis/videowriter","unlisted":false}]},{"type":"category","label":"initialize_project","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"new","href":"/VAME/docs/reference/vame/initialize_project/new","docId":"reference/vame/initialize_project/new","unlisted":false}]},{"type":"category","label":"logging","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"logger","href":"/VAME/docs/reference/vame/logging/logger","docId":"reference/vame/logging/logger","unlisted":false}]},{"type":"category","label":"model","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"create_training","href":"/VAME/docs/reference/vame/model/create_training","docId":"reference/vame/model/create_training","unlisted":false},{"type":"link","label":"dataloader","href":"/VAME/docs/reference/vame/model/dataloader","docId":"reference/vame/model/dataloader","unlisted":false},{"type":"link","label":"evaluate","href":"/VAME/docs/reference/vame/model/evaluate","docId":"reference/vame/model/evaluate","unlisted":false},{"type":"link","label":"rnn_model","href":"/VAME/docs/reference/vame/model/rnn_model","docId":"reference/vame/model/rnn_model","unlisted":false},{"type":"link","label":"rnn_vae","href":"/VAME/docs/reference/vame/model/rnn_vae","docId":"reference/vame/model/rnn_vae","unlisted":false}]},{"type":"category","label":"schemas","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"states","href":"/VAME/docs/reference/vame/schemas/states","docId":"reference/vame/schemas/states","unlisted":false}]},{"type":"category","label":"util","collapsible":true,"collapsed":true,"items":[{"type":"link","label":"align_egocentrical","href":"/VAME/docs/reference/vame/util/align_egocentrical","docId":"reference/vame/util/align_egocentrical","unlisted":false},{"type":"link","label":"auxiliary","href":"/VAME/docs/reference/vame/util/auxiliary","docId":"reference/vame/util/auxiliary","unlisted":false},{"type":"link","label":"csv_to_npy","href":"/VAME/docs/reference/vame/util/csv_to_npy","docId":"reference/vame/util/csv_to_npy","unlisted":false},{"type":"link","label":"data_manipulation","href":"/VAME/docs/reference/vame/util/data_manipulation","docId":"reference/vame/util/data_manipulation","unlisted":false},{"type":"link","label":"gif_pose_helper","href":"/VAME/docs/reference/vame/util/gif_pose_helper","docId":"reference/vame/util/gif_pose_helper","unlisted":false},{"type":"link","label":"model_util","href":"/VAME/docs/reference/vame/util/model_util","docId":"reference/vame/util/model_util","unlisted":false}]}]}],"href":"/VAME/docs/category/api-reference"},{"type":"link","label":"FAQ","href":"/VAME/docs/faq","docId":"faq","unlisted":false}]},"docs":{"faq":{"id":"faq","title":"FAQ","description":"Frequently Asked Questions","sidebar":"docsSidebar"},"getting_started/installation":{"id":"getting_started/installation","title":"Installation","description":"Installation","sidebar":"docsSidebar"},"getting_started/running":{"id":"getting_started/running","title":"Running VAME Workflow","description":"Workflow Overview","sidebar":"docsSidebar"},"intro":{"id":"intro","title":"Introduction","description":"\ud83c\udf1f Welcome to EthoML/VAME (Variational Animal Motion Encoding), an open-source machine learning tool for behavioral segmentation and analyses.","sidebar":"docsSidebar"},"reference/vame/analysis/community_analysis":{"id":"reference/vame/analysis/community_analysis","title":"vame.analysis.community_analysis","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/generative_functions":{"id":"reference/vame/analysis/generative_functions","title":"vame.analysis.generative_functions","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/gif_creator":{"id":"reference/vame/analysis/gif_creator","title":"vame.analysis.gif_creator","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/pose_segmentation":{"id":"reference/vame/analysis/pose_segmentation","title":"vame.analysis.pose_segmentation","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/segment_behavior":{"id":"reference/vame/analysis/segment_behavior","title":"vame.analysis.segment_behavior","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/tree_hierarchy":{"id":"reference/vame/analysis/tree_hierarchy","title":"vame.analysis.tree_hierarchy","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/umap":{"id":"reference/vame/analysis/umap","title":"vame.analysis.umap","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/umap_visualization":{"id":"reference/vame/analysis/umap_visualization","title":"vame.analysis.umap_visualization","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/analysis/videowriter":{"id":"reference/vame/analysis/videowriter","title":"vame.analysis.videowriter","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/initialize_project/new":{"id":"reference/vame/initialize_project/new","title":"vame.initialize_project.new","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/logging/logger":{"id":"reference/vame/logging/logger","title":"vame.logging.logger","description":"TqdmToLogger Objects","sidebar":"docsSidebar"},"reference/vame/model/create_training":{"id":"reference/vame/model/create_training","title":"vame.model.create_training","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/model/dataloader":{"id":"reference/vame/model/dataloader","title":"vame.model.dataloader","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/model/evaluate":{"id":"reference/vame/model/evaluate","title":"vame.model.evaluate","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/model/rnn_model":{"id":"reference/vame/model/rnn_model","title":"vame.model.rnn_model","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/model/rnn_vae":{"id":"reference/vame/model/rnn_vae","title":"vame.model.rnn_vae","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/schemas/states":{"id":"reference/vame/schemas/states","title":"vame.schemas.states","description":"save\\\\_state","sidebar":"docsSidebar"},"reference/vame/util/align_egocentrical":{"id":"reference/vame/util/align_egocentrical","title":"vame.util.align_egocentrical","description":"Variational Animal Motion Embedding 0.1 Toolbox","sidebar":"docsSidebar"},"reference/vame/util/auxiliary":{"id":"reference/vame/util/auxiliary","title":"vame.util.auxiliary","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/util/csv_to_npy":{"id":"reference/vame/util/csv_to_npy","title":"vame.util.csv_to_npy","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/util/data_manipulation":{"id":"reference/vame/util/data_manipulation","title":"vame.util.data_manipulation","description":"get\\\\pose\\\\data\\\\from\\\\nwb\\\\_file","sidebar":"docsSidebar"},"reference/vame/util/gif_pose_helper":{"id":"reference/vame/util/gif_pose_helper","title":"vame.util.gif_pose_helper","description":"Variational Animal Motion Embedding 1.0-alpha Toolbox","sidebar":"docsSidebar"},"reference/vame/util/model_util":{"id":"reference/vame/util/model_util","title":"vame.util.model_util","description":"load\\\\_model","sidebar":"docsSidebar"}}}}')}}]); \ No newline at end of file diff --git a/assets/js/runtime~main.f382ef6c.js b/assets/js/runtime~main.9ca5a090.js similarity index 90% rename from assets/js/runtime~main.f382ef6c.js rename to assets/js/runtime~main.9ca5a090.js index 167897b7..453455a5 100644 --- a/assets/js/runtime~main.f382ef6c.js +++ b/assets/js/runtime~main.9ca5a090.js @@ -1 +1 @@ -(()=>{"use strict";var e,a,f,c,t,d={},r={};function o(e){var a=r[e];if(void 0!==a)return a.exports;var f=r[e]={id:e,loaded:!1,exports:{}};return d[e].call(f.exports,f,f.exports,o),f.loaded=!0,f.exports}o.m=d,o.c=r,e=[],o.O=(a,f,c,t)=>{if(!f){var d=1/0;for(i=0;i=t)&&Object.keys(o.O).every((e=>o.O[e](f[b])))?f.splice(b--,1):(r=!1,t0&&e[i-1][2]>t;i--)e[i]=e[i-1];e[i]=[f,c,t]},o.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return o.d(a,{a:a}),a},f=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,o.t=function(e,c){if(1&c&&(e=this(e)),8&c)return e;if("object"==typeof e&&e){if(4&c&&e.__esModule)return e;if(16&c&&"function"==typeof e.then)return e}var t=Object.create(null);o.r(t);var d={};a=a||[null,f({}),f([]),f(f)];for(var r=2&c&&e;"object"==typeof r&&!~a.indexOf(r);r=f(r))Object.getOwnPropertyNames(r).forEach((a=>d[a]=()=>e[a]));return d.default=()=>e,o.d(t,d),t},o.d=(e,a)=>{for(var f in a)o.o(a,f)&&!o.o(e,f)&&Object.defineProperty(e,f,{enumerable:!0,get:a[f]})},o.f={},o.e=e=>Promise.all(Object.keys(o.f).reduce(((a,f)=>(o.f[f](e,a),a)),[])),o.u=e=>"assets/js/"+({88:"04a8f406",134:"3ab23e74",1011:"ea313555",1235:"a7456010",1385:"e56dec7c",1470:"a54c3d34",1608:"f265b8db",1665:"d6fdf9fc",1824:"f2711dff",1903:"acecf23e",1967:"7ceb4274",2185:"af3fc117",2634:"c4f5d8e4",2711:"9e4087bc",2865:"f2b90fa0",2996:"268dfc8f",3100:"498287ba",3249:"ccc49370",3274:"ba2390ac",3567:"20a1189d",3976:"0e384e19",4077:"62763787",4134:"393be207",4641:"99e0554e",4693:"ecdbbd59",4813:"6875c492",4841:"97262fb0",5403:"13dc5c7b",5557:"d9f32620",5742:"aba21aa0",5908:"c4ea8f9a",6061:"1f391b9e",6592:"af23717f",6969:"14eb3368",7098:"a7bd4aaa",7382:"2707696f",7472:"814f3328",7643:"a6aa9e1f",7704:"1a8342bf",7707:"3a52db76",8020:"4ed6af3d",8061:"8adc0ad1",8209:"01a85c17",8247:"17f06cf5",8401:"17896441",8737:"7661071f",8816:"5a083f91",9048:"a94703ab",9266:"770df669",9590:"0a24c0cb",9632:"cebd9e3f",9647:"5e95c892",9858:"36994c47",9888:"f04d77e4",9932:"dfc4de34"}[e]||e)+"."+{88:"31392b8a",134:"5dc6553c",1011:"7d8f581e",1235:"5f9bbb01",1385:"8263ae9a",1470:"1ae020bc",1538:"a265ec9d",1608:"703a06c5",1665:"dcdb2abc",1824:"b582da10",1903:"0f1d688f",1967:"a456d19d",2185:"3be04c86",2237:"fe867cfb",2634:"f0404dde",2711:"9b70b1ae",2865:"403b7410",2996:"a0fa7719",3100:"d3292814",3242:"39aebfdc",3249:"05ee6649",3274:"1d4d8ff9",3567:"5177a708",3976:"203e7f6a",4077:"a2324226",4134:"c90d5c3f",4641:"bb3798a8",4693:"dedc33ec",4813:"80128212",4841:"27469122",5403:"ce3a2712",5557:"0f1c0e08",5742:"ed09cce9",5908:"8dcedad6",6061:"cb5e65be",6592:"28c66cd1",6969:"fff45cd7",7098:"b74e8d88",7382:"3b8a57d4",7472:"f50ea508",7643:"4fa468d7",7704:"8440d4ae",7707:"7dc9146f",8020:"29feac53",8061:"597e988f",8209:"5f985c68",8247:"0d25968e",8401:"ef38c3e6",8737:"c2e3da32",8816:"10d2bdfe",9048:"655717e3",9266:"369213ba",9590:"a59bded3",9632:"9790fec5",9647:"3d1fe17f",9858:"337a7516",9888:"3757e0cc",9932:"91786e1b"}[e]+".js",o.miniCssF=e=>{},o.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),o.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),c={},t="docs:",o.l=(e,a,f,d)=>{if(c[e])c[e].push(a);else{var r,b;if(void 0!==f)for(var n=document.getElementsByTagName("script"),i=0;i{r.onerror=r.onload=null,clearTimeout(s);var t=c[e];if(delete c[e],r.parentNode&&r.parentNode.removeChild(r),t&&t.forEach((e=>e(f))),a)return a(f)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:r}),12e4);r.onerror=l.bind(null,r.onerror),r.onload=l.bind(null,r.onload),b&&document.head.appendChild(r)}},o.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.p="/VAME/",o.gca=function(e){return e={17896441:"8401",62763787:"4077","04a8f406":"88","3ab23e74":"134",ea313555:"1011",a7456010:"1235",e56dec7c:"1385",a54c3d34:"1470",f265b8db:"1608",d6fdf9fc:"1665",f2711dff:"1824",acecf23e:"1903","7ceb4274":"1967",af3fc117:"2185",c4f5d8e4:"2634","9e4087bc":"2711",f2b90fa0:"2865","268dfc8f":"2996","498287ba":"3100",ccc49370:"3249",ba2390ac:"3274","20a1189d":"3567","0e384e19":"3976","393be207":"4134","99e0554e":"4641",ecdbbd59:"4693","6875c492":"4813","97262fb0":"4841","13dc5c7b":"5403",d9f32620:"5557",aba21aa0:"5742",c4ea8f9a:"5908","1f391b9e":"6061",af23717f:"6592","14eb3368":"6969",a7bd4aaa:"7098","2707696f":"7382","814f3328":"7472",a6aa9e1f:"7643","1a8342bf":"7704","3a52db76":"7707","4ed6af3d":"8020","8adc0ad1":"8061","01a85c17":"8209","17f06cf5":"8247","7661071f":"8737","5a083f91":"8816",a94703ab:"9048","770df669":"9266","0a24c0cb":"9590",cebd9e3f:"9632","5e95c892":"9647","36994c47":"9858",f04d77e4:"9888",dfc4de34:"9932"}[e]||e,o.p+o.u(e)},(()=>{var e={5354:0,1869:0};o.f.j=(a,f)=>{var c=o.o(e,a)?e[a]:void 0;if(0!==c)if(c)f.push(c[2]);else if(/^(1869|5354)$/.test(a))e[a]=0;else{var t=new Promise(((f,t)=>c=e[a]=[f,t]));f.push(c[2]=t);var d=o.p+o.u(a),r=new Error;o.l(d,(f=>{if(o.o(e,a)&&(0!==(c=e[a])&&(e[a]=void 0),c)){var t=f&&("load"===f.type?"missing":f.type),d=f&&f.target&&f.target.src;r.message="Loading chunk "+a+" failed.\n("+t+": "+d+")",r.name="ChunkLoadError",r.type=t,r.request=d,c[1](r)}}),"chunk-"+a,a)}},o.O.j=a=>0===e[a];var a=(a,f)=>{var c,t,d=f[0],r=f[1],b=f[2],n=0;if(d.some((a=>0!==e[a]))){for(c in r)o.o(r,c)&&(o.m[c]=r[c]);if(b)var i=b(o)}for(a&&a(f);n{"use strict";var e,a,f,c,t,d={},r={};function o(e){var a=r[e];if(void 0!==a)return a.exports;var f=r[e]={id:e,loaded:!1,exports:{}};return d[e].call(f.exports,f,f.exports,o),f.loaded=!0,f.exports}o.m=d,o.c=r,e=[],o.O=(a,f,c,t)=>{if(!f){var d=1/0;for(i=0;i=t)&&Object.keys(o.O).every((e=>o.O[e](f[b])))?f.splice(b--,1):(r=!1,t0&&e[i-1][2]>t;i--)e[i]=e[i-1];e[i]=[f,c,t]},o.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return o.d(a,{a:a}),a},f=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,o.t=function(e,c){if(1&c&&(e=this(e)),8&c)return e;if("object"==typeof e&&e){if(4&c&&e.__esModule)return e;if(16&c&&"function"==typeof e.then)return e}var t=Object.create(null);o.r(t);var d={};a=a||[null,f({}),f([]),f(f)];for(var r=2&c&&e;"object"==typeof r&&!~a.indexOf(r);r=f(r))Object.getOwnPropertyNames(r).forEach((a=>d[a]=()=>e[a]));return d.default=()=>e,o.d(t,d),t},o.d=(e,a)=>{for(var f in a)o.o(a,f)&&!o.o(e,f)&&Object.defineProperty(e,f,{enumerable:!0,get:a[f]})},o.f={},o.e=e=>Promise.all(Object.keys(o.f).reduce(((a,f)=>(o.f[f](e,a),a)),[])),o.u=e=>"assets/js/"+({88:"04a8f406",134:"3ab23e74",1011:"ea313555",1235:"a7456010",1385:"e56dec7c",1470:"a54c3d34",1608:"f265b8db",1665:"d6fdf9fc",1824:"f2711dff",1903:"acecf23e",1967:"7ceb4274",2185:"af3fc117",2634:"c4f5d8e4",2711:"9e4087bc",2865:"f2b90fa0",2996:"268dfc8f",3100:"498287ba",3249:"ccc49370",3274:"ba2390ac",3567:"20a1189d",3976:"0e384e19",4077:"62763787",4134:"393be207",4641:"99e0554e",4693:"ecdbbd59",4813:"6875c492",4841:"97262fb0",5403:"13dc5c7b",5557:"d9f32620",5742:"aba21aa0",5908:"c4ea8f9a",6061:"1f391b9e",6592:"af23717f",6969:"14eb3368",7098:"a7bd4aaa",7382:"2707696f",7472:"814f3328",7643:"a6aa9e1f",7704:"1a8342bf",7707:"3a52db76",8020:"4ed6af3d",8061:"8adc0ad1",8209:"01a85c17",8247:"17f06cf5",8401:"17896441",8737:"7661071f",8816:"5a083f91",9048:"a94703ab",9266:"770df669",9590:"0a24c0cb",9632:"cebd9e3f",9647:"5e95c892",9858:"36994c47",9888:"f04d77e4",9932:"dfc4de34"}[e]||e)+"."+{88:"432dd89d",134:"5dc6553c",1011:"7d8f581e",1235:"5f9bbb01",1385:"8263ae9a",1470:"4bd6b137",1538:"a265ec9d",1608:"a1ee1a3e",1665:"dcdb2abc",1824:"b582da10",1903:"0f1d688f",1967:"a456d19d",2185:"3be04c86",2237:"fe867cfb",2634:"f0404dde",2711:"9b70b1ae",2865:"79025041",2996:"a0fa7719",3100:"d3292814",3242:"39aebfdc",3249:"05ee6649",3274:"9dbc7be1",3567:"57cf727b",3976:"203e7f6a",4077:"9acdd957",4134:"c90d5c3f",4641:"d5a0b519",4693:"dedc33ec",4813:"80128212",4841:"37c580e0",5403:"ce3a2712",5557:"0f1c0e08",5742:"ed09cce9",5908:"26387692",6061:"cb5e65be",6592:"28c66cd1",6969:"fff45cd7",7098:"b74e8d88",7382:"3b8a57d4",7472:"f50ea508",7643:"4fa468d7",7704:"8440d4ae",7707:"7dc9146f",8020:"29feac53",8061:"597e988f",8209:"5f985c68",8247:"0d25968e",8401:"ef38c3e6",8737:"c2e3da32",8816:"10d2bdfe",9048:"655717e3",9266:"369213ba",9590:"a59bded3",9632:"9790fec5",9647:"3d1fe17f",9858:"337a7516",9888:"3757e0cc",9932:"91786e1b"}[e]+".js",o.miniCssF=e=>{},o.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),o.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),c={},t="docs:",o.l=(e,a,f,d)=>{if(c[e])c[e].push(a);else{var r,b;if(void 0!==f)for(var n=document.getElementsByTagName("script"),i=0;i{r.onerror=r.onload=null,clearTimeout(s);var t=c[e];if(delete c[e],r.parentNode&&r.parentNode.removeChild(r),t&&t.forEach((e=>e(f))),a)return a(f)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:r}),12e4);r.onerror=l.bind(null,r.onerror),r.onload=l.bind(null,r.onload),b&&document.head.appendChild(r)}},o.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.p="/VAME/",o.gca=function(e){return e={17896441:"8401",62763787:"4077","04a8f406":"88","3ab23e74":"134",ea313555:"1011",a7456010:"1235",e56dec7c:"1385",a54c3d34:"1470",f265b8db:"1608",d6fdf9fc:"1665",f2711dff:"1824",acecf23e:"1903","7ceb4274":"1967",af3fc117:"2185",c4f5d8e4:"2634","9e4087bc":"2711",f2b90fa0:"2865","268dfc8f":"2996","498287ba":"3100",ccc49370:"3249",ba2390ac:"3274","20a1189d":"3567","0e384e19":"3976","393be207":"4134","99e0554e":"4641",ecdbbd59:"4693","6875c492":"4813","97262fb0":"4841","13dc5c7b":"5403",d9f32620:"5557",aba21aa0:"5742",c4ea8f9a:"5908","1f391b9e":"6061",af23717f:"6592","14eb3368":"6969",a7bd4aaa:"7098","2707696f":"7382","814f3328":"7472",a6aa9e1f:"7643","1a8342bf":"7704","3a52db76":"7707","4ed6af3d":"8020","8adc0ad1":"8061","01a85c17":"8209","17f06cf5":"8247","7661071f":"8737","5a083f91":"8816",a94703ab:"9048","770df669":"9266","0a24c0cb":"9590",cebd9e3f:"9632","5e95c892":"9647","36994c47":"9858",f04d77e4:"9888",dfc4de34:"9932"}[e]||e,o.p+o.u(e)},(()=>{var e={5354:0,1869:0};o.f.j=(a,f)=>{var c=o.o(e,a)?e[a]:void 0;if(0!==c)if(c)f.push(c[2]);else if(/^(1869|5354)$/.test(a))e[a]=0;else{var t=new Promise(((f,t)=>c=e[a]=[f,t]));f.push(c[2]=t);var d=o.p+o.u(a),r=new Error;o.l(d,(f=>{if(o.o(e,a)&&(0!==(c=e[a])&&(e[a]=void 0),c)){var t=f&&("load"===f.type?"missing":f.type),d=f&&f.target&&f.target.src;r.message="Loading chunk "+a+" failed.\n("+t+": "+d+")",r.name="ChunkLoadError",r.type=t,r.request=d,c[1](r)}}),"chunk-"+a,a)}},o.O.j=a=>0===e[a];var a=(a,f)=>{var c,t,d=f[0],r=f[1],b=f[2],n=0;if(d.some((a=>0!==e[a]))){for(c in r)o.o(r,c)&&(o.m[c]=r[c]);if(b)var i=b(o)}for(a&&a(f);n Archive | VAME - + diff --git a/blog/index.html b/blog/index.html index 4e8299b0..a9173ce8 100644 --- a/blog/index.html +++ b/blog/index.html @@ -5,7 +5,7 @@ Blog | VAME - + diff --git a/blog/tags/blog/index.html b/blog/tags/blog/index.html index c6f77e1c..a6939e3d 100644 --- a/blog/tags/blog/index.html +++ b/blog/tags/blog/index.html @@ -5,7 +5,7 @@ One post tagged with "blog" | VAME - + diff --git a/blog/tags/index.html b/blog/tags/index.html index 76989c8a..8607d9d0 100644 --- a/blog/tags/index.html +++ b/blog/tags/index.html @@ -5,7 +5,7 @@ Tags | VAME - + diff --git a/blog/tags/vame/index.html b/blog/tags/vame/index.html index 54c33763..77077719 100644 --- a/blog/tags/vame/index.html +++ b/blog/tags/vame/index.html @@ -5,7 +5,7 @@ One post tagged with "vame" | VAME - + diff --git a/blog/welcome/index.html b/blog/welcome/index.html index e4070c92..c331bf04 100644 --- a/blog/welcome/index.html +++ b/blog/welcome/index.html @@ -5,7 +5,7 @@ Welcome | VAME - + diff --git a/docs/category/api-reference/index.html b/docs/category/api-reference/index.html index 91b5d2f2..479e3118 100644 --- a/docs/category/api-reference/index.html +++ b/docs/category/api-reference/index.html @@ -5,7 +5,7 @@ API reference | VAME - + diff --git a/docs/category/getting-started/index.html b/docs/category/getting-started/index.html index b5d44c87..4fc7c3b2 100644 --- a/docs/category/getting-started/index.html +++ b/docs/category/getting-started/index.html @@ -5,7 +5,7 @@ Getting Started | VAME - + diff --git a/docs/faq/index.html b/docs/faq/index.html index 2c83abe6..af096a2c 100644 --- a/docs/faq/index.html +++ b/docs/faq/index.html @@ -5,7 +5,7 @@ FAQ | VAME - + diff --git a/docs/getting_started/installation/index.html b/docs/getting_started/installation/index.html index 5b2534de..239f8cbc 100644 --- a/docs/getting_started/installation/index.html +++ b/docs/getting_started/installation/index.html @@ -5,7 +5,7 @@ Installation | VAME - + diff --git a/docs/getting_started/running/index.html b/docs/getting_started/running/index.html index cfc8dc73..c876e97b 100644 --- a/docs/getting_started/running/index.html +++ b/docs/getting_started/running/index.html @@ -5,7 +5,7 @@ Running VAME Workflow | VAME - + diff --git a/docs/intro/index.html b/docs/intro/index.html index dc2c14c7..12bffe4b 100644 --- a/docs/intro/index.html +++ b/docs/intro/index.html @@ -5,7 +5,7 @@ Introduction | VAME - + diff --git a/docs/reference/vame/analysis/community_analysis/index.html b/docs/reference/vame/analysis/community_analysis/index.html index 8ee42670..9843ef60 100644 --- a/docs/reference/vame/analysis/community_analysis/index.html +++ b/docs/reference/vame/analysis/community_analysis/index.html @@ -5,7 +5,7 @@ vame.analysis.community_analysis | VAME - + @@ -159,7 +159,7 @@

  • List[np.ndarray] - List of cohort community labels for each file.
  • community

    -
    @save_state(model=CommunityFunctionSchema)
    def community(config: str,
    cohort: bool = True,
    cut_tree: int | None = None,
    save_logs: bool = False) -> None
    +
    @save_state(model=CommunityFunctionSchema)
    def community(config: str,
    parametrization: Parametrizations,
    cohort: bool = True,
    cut_tree: int | None = None,
    save_logs: bool = False) -> None

    Perform community analysis.

    Arguments:

      diff --git a/docs/reference/vame/analysis/generative_functions/index.html b/docs/reference/vame/analysis/generative_functions/index.html index 94e6dfef..e77fab03 100644 --- a/docs/reference/vame/analysis/generative_functions/index.html +++ b/docs/reference/vame/analysis/generative_functions/index.html @@ -5,7 +5,7 @@ vame.analysis.generative_functions | VAME - + @@ -63,7 +63,7 @@

      vis

      Returns:

      None

      generative_model

      -
      @save_state(model=GenerativeModelFunctionSchema)
      def generative_model(config: str,
      mode: str = "sampling",
      save_logs: bool = False) -> plt.Figure
      +
      @save_state(model=GenerativeModelFunctionSchema)
      def generative_model(config: str,
      parametrization: Parametrizations,
      mode: str = "sampling",
      save_logs: bool = False) -> Dict[str, plt.Figure]

      Generative model.

      Arguments:

        @@ -71,8 +71,6 @@

        generative_
      • mode str, optional - Mode for generating samples. Defaults to "sampling".

      Returns:

      -
        -
      • plt.Figure - Plot of generated samples.
      • -
      +

      Dict[str, plt.Figure]: Plots of generated samples for each parametrization.

      \ No newline at end of file diff --git a/docs/reference/vame/analysis/gif_creator/index.html b/docs/reference/vame/analysis/gif_creator/index.html index e81f5d59..981f445f 100644 --- a/docs/reference/vame/analysis/gif_creator/index.html +++ b/docs/reference/vame/analysis/gif_creator/index.html @@ -5,7 +5,7 @@ vame.analysis.gif_creator | VAME - + @@ -32,7 +32,7 @@

      create_videoReturns:

      None

      gif

      -
      def gif(
      config: str,
      pose_ref_index: int,
      subtract_background: bool = True,
      start: int | None = None,
      length: int = 500,
      max_lag: int = 30,
      label: str = 'community',
      file_format: str = '.mp4',
      crop_size: Tuple[int, int] = (300, 300)) -> None
      +
      def gif(
      config: str,
      pose_ref_index: int,
      parametrization: Parametrizations,
      subtract_background: bool = True,
      start: int | None = None,
      length: int = 500,
      max_lag: int = 30,
      label: str = 'community',
      file_format: str = '.mp4',
      crop_size: Tuple[int, int] = (300, 300)) -> None

      Create a GIF from the given configuration.

      Arguments:

        diff --git a/docs/reference/vame/analysis/pose_segmentation/index.html b/docs/reference/vame/analysis/pose_segmentation/index.html index 0af651da..3c7da52c 100644 --- a/docs/reference/vame/analysis/pose_segmentation/index.html +++ b/docs/reference/vame/analysis/pose_segmentation/index.html @@ -5,7 +5,7 @@ vame.analysis.pose_segmentation | VAME - + diff --git a/docs/reference/vame/analysis/segment_behavior/index.html b/docs/reference/vame/analysis/segment_behavior/index.html index a476df6c..c5ba2cbd 100644 --- a/docs/reference/vame/analysis/segment_behavior/index.html +++ b/docs/reference/vame/analysis/segment_behavior/index.html @@ -5,7 +5,7 @@ vame.analysis.segment_behavior | VAME - + diff --git a/docs/reference/vame/analysis/tree_hierarchy/index.html b/docs/reference/vame/analysis/tree_hierarchy/index.html index f6f01307..7c503c6b 100644 --- a/docs/reference/vame/analysis/tree_hierarchy/index.html +++ b/docs/reference/vame/analysis/tree_hierarchy/index.html @@ -5,7 +5,7 @@ vame.analysis.tree_hierarchy | VAME - + diff --git a/docs/reference/vame/analysis/umap/index.html b/docs/reference/vame/analysis/umap/index.html index 0da56ede..f1c38819 100644 --- a/docs/reference/vame/analysis/umap/index.html +++ b/docs/reference/vame/analysis/umap/index.html @@ -5,7 +5,7 @@ vame.analysis.umap | VAME - + @@ -29,18 +29,6 @@

        umap_embeddin
        • np.ndarray - UMAP embedding.
        -

        umap_vis_community_labels

        -
        def umap_vis_community_labels(cfg: dict, embed: np.ndarray,
        community_labels_all: np.ndarray,
        save_path: str | None) -> None
        -

        Create plotly visualizaton of UMAP embedding with community labels.

        -

        Arguments:

        -
          -
        • cfg dict - Configuration parameters.
        • -
        • embed np.ndarray - UMAP embedding.
        • -
        • community_labels_all np.ndarray - Community labels.
        • -
        • save_path - Path to save the plot. If None it will not save the plot.
        • -
        -

        Returns:

        -

        None

        umap_vis

        def umap_vis(embed: np.ndarray, num_points: int) -> None

        Visualize UMAP embedding without labels.

        @@ -75,7 +63,7 @@

        umap_vis_comm<

        Returns:

        fig - Plot figure of UMAP visualization embedding with community labels.

        visualization

        -
        @save_state(model=VisualizationFunctionSchema)
        def visualization(config: Union[str, Path],
        label: Optional[str] = None,
        save_logs: bool = False) -> None
        +
        @save_state(model=VisualizationFunctionSchema)
        def visualization(config: Union[str, Path],
        parametrization: Parametrizations,
        label: Optional[str] = None,
        save_logs: bool = False) -> None

        Visualize UMAP embeddings based on configuration settings.

        Arguments:

          diff --git a/docs/reference/vame/analysis/umap_visualization/index.html b/docs/reference/vame/analysis/umap_visualization/index.html index 3d9c3f06..4bd7333f 100644 --- a/docs/reference/vame/analysis/umap_visualization/index.html +++ b/docs/reference/vame/analysis/umap_visualization/index.html @@ -5,7 +5,7 @@ vame.analysis.umap_visualization | VAME - + diff --git a/docs/reference/vame/analysis/videowriter/index.html b/docs/reference/vame/analysis/videowriter/index.html index 4f29198c..779d3b92 100644 --- a/docs/reference/vame/analysis/videowriter/index.html +++ b/docs/reference/vame/analysis/videowriter/index.html @@ -5,7 +5,7 @@ vame.analysis.videowriter | VAME - + @@ -15,7 +15,7 @@

          https://github.com/LINCellularNeuroscience/VAME Licensed under GNU General Public License v3.0

          get_cluster_vid

          -
          def get_cluster_vid(cfg: dict,
          path_to_file: str,
          file: str,
          n_cluster: int,
          videoType: str,
          flag: str,
          output_video_type: str = ".mp4",
          tqdm_logger_stream: TqdmToLogger | None = None) -> None
          +
          def get_cluster_vid(cfg: dict,
          path_to_file: str,
          file: str,
          n_cluster: int,
          videoType: str,
          flag: str,
          param: Parametrizations,
          output_video_type: str = ".mp4",
          tqdm_logger_stream: TqdmToLogger | None = None) -> None

          Generate cluster videos.

          Arguments:

            @@ -29,7 +29,7 @@

            get_cluster_

            Returns:

            None - Generate cluster videos and save them to fs on project folder.

            motif_videos

            -
            @save_state(model=MotifVideosFunctionSchema)
            def motif_videos(config: Union[str, Path],
            videoType: str = '.mp4',
            output_video_type: str = '.mp4',
            save_logs: bool = False) -> None
            +
            @save_state(model=MotifVideosFunctionSchema)
            def motif_videos(config: Union[str, Path],
            parametrization: Parametrizations,
            videoType: str = '.mp4',
            output_video_type: str = '.mp4',
            save_logs: bool = False) -> None

            Generate motif videos.

            Arguments:

              @@ -40,7 +40,7 @@

              motif_videosReturns:

              None - Generate motif videos and save them to filesystem on project cluster_videos folder.

              community_videos

              -
              @save_state(model=CommunityVideosFunctionSchema)
              def community_videos(config: Union[str, Path],
              videoType: str = '.mp4',
              save_logs: bool = False) -> None
              +
              @save_state(model=CommunityVideosFunctionSchema)
              def community_videos(config: Union[str, Path],
              parametrization: Parametrizations,
              videoType: str = '.mp4',
              save_logs: bool = False,
              output_video_type: str = '.mp4') -> None

              Generate community videos.

              Arguments:

                diff --git a/docs/reference/vame/initialize_project/new/index.html b/docs/reference/vame/initialize_project/new/index.html index 7e9d8f75..38f252d3 100644 --- a/docs/reference/vame/initialize_project/new/index.html +++ b/docs/reference/vame/initialize_project/new/index.html @@ -5,7 +5,7 @@ vame.initialize_project.new | VAME - + @@ -22,7 +22,7 @@ https://github.com/AlexEMG/DeepLabCut/blob/master/AUTHORS Licensed under GNU Lesser General Public License v3.0

                init_new_project

                -
                def init_new_project(project: str,
                videos: List[str],
                poses_estimations: List[str],
                working_directory: str = None,
                videotype: str = '.mp4') -> str
                +
                def init_new_project(
                project: str,
                videos: List[str],
                poses_estimations: List[str],
                working_directory: str = '.',
                videotype: str = '.mp4',
                paths_to_pose_nwb_series_data: Optional[str] = None) -> str

                Creates a new VAME project with the given parameters.

                Arguments:

                  diff --git a/docs/reference/vame/logging/logger/index.html b/docs/reference/vame/logging/logger/index.html index 38261fc7..8d2ff7e2 100644 --- a/docs/reference/vame/logging/logger/index.html +++ b/docs/reference/vame/logging/logger/index.html @@ -5,7 +5,7 @@ vame.logging.logger | VAME - + diff --git a/docs/reference/vame/model/create_training/index.html b/docs/reference/vame/model/create_training/index.html index b9410f97..a076f45e 100644 --- a/docs/reference/vame/model/create_training/index.html +++ b/docs/reference/vame/model/create_training/index.html @@ -5,7 +5,7 @@ vame.model.create_training | VAME - + diff --git a/docs/reference/vame/model/dataloader/index.html b/docs/reference/vame/model/dataloader/index.html index 6fc42a7e..8b8e76f7 100644 --- a/docs/reference/vame/model/dataloader/index.html +++ b/docs/reference/vame/model/dataloader/index.html @@ -5,7 +5,7 @@ vame.model.dataloader | VAME - + diff --git a/docs/reference/vame/model/evaluate/index.html b/docs/reference/vame/model/evaluate/index.html index 2163769f..f781cdab 100644 --- a/docs/reference/vame/model/evaluate/index.html +++ b/docs/reference/vame/model/evaluate/index.html @@ -5,7 +5,7 @@ vame.model.evaluate | VAME - + diff --git a/docs/reference/vame/model/rnn_model/index.html b/docs/reference/vame/model/rnn_model/index.html index d3e9e3b2..f69369b3 100644 --- a/docs/reference/vame/model/rnn_model/index.html +++ b/docs/reference/vame/model/rnn_model/index.html @@ -5,7 +5,7 @@ vame.model.rnn_model | VAME - + diff --git a/docs/reference/vame/model/rnn_vae/index.html b/docs/reference/vame/model/rnn_vae/index.html index 8e5d5cb8..14be8e9a 100644 --- a/docs/reference/vame/model/rnn_vae/index.html +++ b/docs/reference/vame/model/rnn_vae/index.html @@ -5,7 +5,7 @@ vame.model.rnn_vae | VAME - + diff --git a/docs/reference/vame/schemas/states/index.html b/docs/reference/vame/schemas/states/index.html index 03c12a3b..4c73dbac 100644 --- a/docs/reference/vame/schemas/states/index.html +++ b/docs/reference/vame/schemas/states/index.html @@ -5,7 +5,7 @@ vame.schemas.states | VAME - + diff --git a/docs/reference/vame/util/align_egocentrical/index.html b/docs/reference/vame/util/align_egocentrical/index.html index 9fde80ab..b14dfc08 100644 --- a/docs/reference/vame/util/align_egocentrical/index.html +++ b/docs/reference/vame/util/align_egocentrical/index.html @@ -5,7 +5,7 @@ vame.util.align_egocentrical | VAME - + @@ -43,7 +43,7 @@

                  play_alig
                • frame_count int - Number of frames in the video.

                alignment

                -
                def alignment(
                path_to_file: str,
                filename: str,
                pose_ref_index: List[int],
                video_format: str,
                crop_size: Tuple[int, int],
                confidence: float,
                use_video: bool = False,
                check_video: bool = False,
                tqdm_stream: TqdmToLogger = None
                ) -> Tuple[np.ndarray, List[np.ndarray]]
                +
                def alignment(
                path_to_file: str,
                filename: str,
                pose_ref_index: List[int],
                video_format: str,
                crop_size: Tuple[int, int],
                confidence: float,
                pose_estimation_filetype: PoseEstimationFiletype,
                path_to_pose_nwb_series_data: str = None,
                use_video: bool = False,
                check_video: bool = False,
                tqdm_stream: TqdmToLogger = None
                ) -> Tuple[np.ndarray, List[np.ndarray]]

                Perform alignment of egocentric data.

                Arguments:

                  diff --git a/docs/reference/vame/util/auxiliary/index.html b/docs/reference/vame/util/auxiliary/index.html index 0b0a87e0..1bb990cb 100644 --- a/docs/reference/vame/util/auxiliary/index.html +++ b/docs/reference/vame/util/auxiliary/index.html @@ -5,7 +5,7 @@ vame.util.auxiliary | VAME - + diff --git a/docs/reference/vame/util/csv_to_npy/index.html b/docs/reference/vame/util/csv_to_npy/index.html index c75c6ded..fd10e74b 100644 --- a/docs/reference/vame/util/csv_to_npy/index.html +++ b/docs/reference/vame/util/csv_to_npy/index.html @@ -5,7 +5,7 @@ vame.util.csv_to_npy | VAME - + @@ -14,8 +14,8 @@ Leibniz Institute for Neurobiology, Magdeburg, Germany

                  https://github.com/LINCellularNeuroscience/VAME Licensed under GNU General Public License v3.0

                  -

                  csv_to_numpy

                  -
                  @save_state(model=CsvToNumpyFunctionSchema)
                  def csv_to_numpy(config: str, save_logs=False) -> None
                  +

                  pose_to_numpy

                  +
                  @save_state(model=PoseToNumpyFunctionSchema)
                  def pose_to_numpy(config: str, save_logs=False) -> None

                  Converts a pose-estimation.csv file to a numpy array. Note that this code is only useful for data which is a priori egocentric, i.e. head-fixed or otherwise restrained animals.

                  Raises:

                  diff --git a/docs/reference/vame/util/data_manipulation/index.html b/docs/reference/vame/util/data_manipulation/index.html index 9e6ab122..4fc92571 100644 --- a/docs/reference/vame/util/data_manipulation/index.html +++ b/docs/reference/vame/util/data_manipulation/index.html @@ -3,13 +3,25 @@ -vame.util.data_manipulation | VAME +vame.util.data_manipulation | VAME - + -

                  vame.util.data_manipulation

                  consecutive

                  +

                  vame.util.data_manipulation

                  get_pose_data_from_nwb_file

                  +
                  def get_pose_data_from_nwb_file(
                  nwbfile: NWBFile, path_to_pose_nwb_series_data: str) -> LabelledDict
                  +

                  Get pose data from nwb file using a inside path to the nwb data.

                  +

                  Arguments:

                  +
                    +
                  • nwbfile NWBFile - NWB file object.
                  • +
                  • path_to_pose_nwb_series_data str - Path to the pose data inside the nwb file.
                  • +
                  +

                  Returns:

                  +
                    +
                  • LabelledDict - Pose data.
                  • +
                  +

                  consecutive

                  def consecutive(data: np.ndarray, stepsize: int = 1) -> List[np.ndarray]

                  Find consecutive sequences in the data array.

                  Arguments:

                  diff --git a/docs/reference/vame/util/gif_pose_helper/index.html b/docs/reference/vame/util/gif_pose_helper/index.html index ff231932..e636cfc1 100644 --- a/docs/reference/vame/util/gif_pose_helper/index.html +++ b/docs/reference/vame/util/gif_pose_helper/index.html @@ -5,7 +5,7 @@ vame.util.gif_pose_helper | VAME - + diff --git a/docs/reference/vame/util/model_util/index.html b/docs/reference/vame/util/model_util/index.html index b9ccc873..f017d618 100644 --- a/docs/reference/vame/util/model_util/index.html +++ b/docs/reference/vame/util/model_util/index.html @@ -5,7 +5,7 @@ vame.util.model_util | VAME - + diff --git a/index.html b/index.html index 868fbc22..e3945873 100644 --- a/index.html +++ b/index.html @@ -5,7 +5,7 @@ VAME | VAME - + diff --git a/markdown-page/index.html b/markdown-page/index.html index 7087b085..3b9cf73b 100644 --- a/markdown-page/index.html +++ b/markdown-page/index.html @@ -5,7 +5,7 @@ Markdown page example | VAME - +