diff --git a/Modules/Apps/PowerShell_Ollama_AI_Blobs.mkape b/Modules/Apps/PowerShell_Ollama_AI_Blobs.mkape new file mode 100644 index 000000000..d01255609 --- /dev/null +++ b/Modules/Apps/PowerShell_Ollama_AI_Blobs.mkape @@ -0,0 +1,17 @@ +Description: Ollama-AI Blob Files +Category: PowerShell +Author: DReneau +Version: 1.0 +Id: a31a4412-f6d4-4098-9ba1-feba2f96ad57 +ExportFormat: txt +Processors: + - + Executable: C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe + CommandLine: "$destinationPath = '%DestinationDirectory%\\ollama_combined_blobs.txt'; $usersPath = Join-Path '%SourceDirectory%' 'Users'; Get-ChildItem -Path $usersPath -Directory | ForEach-Object { $modelsPath = Join-Path $_.FullName '.ollama\\models\\blobs'; if (Test-Path $modelsPath) { Get-ChildItem -Path $modelsPath -File | Where-Object { $_.Length -lt 2KB } | ForEach-Object { $fileContent = Get-Content -Path $_.FullName -Raw -ErrorAction SilentlyContinue; if ($fileContent -match '\"model_format\"') { $entry = ('{0} | {1}' -f $_.Name, $fileContent); Add-Content -Path $destinationPath -Value $entry; Add-Content -Path $destinationPath -Value \"`r`n\"; } } } }" + ExportFormat: txt + +# Documentation +# https://ollama.com/blog | https://github.com/ollama/ollama | https://hub.docker.com/r/ollama/ollama +# Ollama is used for self-hosted AI inference, and it supports many models out of the box. +# Ollama serves as the backend for common AI projects such as OpenWebUI, among others. +# .\kape.exe --msource c:\ --mdest k:\case-12345\Kape\mout --module powershell_ollama_ai_blobs diff --git a/Modules/Apps/PowerShell_Ollama_AI_Keys.mkape b/Modules/Apps/PowerShell_Ollama_AI_Keys.mkape new file mode 100644 index 000000000..ad9b5b2c2 --- /dev/null +++ b/Modules/Apps/PowerShell_Ollama_AI_Keys.mkape @@ -0,0 +1,18 @@ +Description: Ollama-AI Private-Public Key Finder +Category: PowerShell +Author: DReneau +Version: 1.0 +Id: f5a65250-42bd-4c11-80dd-4ab621e0c8b8 +ExportFormat: TXT +Processors: + - + Executable: C:\Windows\System32\WindowsPowerShell\v1.0\powershell.exe + CommandLine: "$users = Get-ChildItem -Path '%SourceDirectory%\\Users' -Directory; foreach ($user in $users) { $ollamaPath = Join-Path $user.FullName '.ollama'; $privateKeyPath = Join-Path $ollamaPath 'id_*'; $publicKeyPath = Join-Path $ollamaPath '*.pub'; if (Test-Path $ollamaPath) { $privateKey = Get-ChildItem -Path $privateKeyPath -Exclude *.pub | ForEach-Object { $content = Get-Content -Path $_.FullName -Raw; [PSCustomObject]@{ Name = $_.Name; FilePath = $_.FullName; KeyLocated = $content } }; $privateKeyOutput = $privateKey | ForEach-Object { 'Name: ' + $_.Name + [System.Environment]::NewLine + [System.Environment]::NewLine + 'FilePath: ' + $_.FilePath + [System.Environment]::NewLine + [System.Environment]::NewLine + 'Key: ' + $_.KeyLocated + [System.Environment]::NewLine + [System.Environment]::NewLine }; Set-Content -Path '%destinationDirectory%\\ollama_privatekey.txt' -Value $privateKeyOutput -Encoding UTF8; if (Test-Path $publicKeyPath) { $publicKey = Get-ChildItem -Path $publicKeyPath | ForEach-Object { $pubContent = Get-Content -Path $_.FullName -Raw; 'Name: ' + $_.Name + [System.Environment]::NewLine + [System.Environment]::NewLine + 'FilePath: ' + $_.FullName + [System.Environment]::NewLine + [System.Environment]::NewLine + 'Key: ' + $pubContent + [System.Environment]::NewLine + [System.Environment]::NewLine }; Set-Content -Path '%destinationDirectory%\\ollama_publickey.txt' -Value $publicKey -Encoding UTF8 } } }" + ExportFormat: TXT + +# Documentation +# https://ollama.com/blog | https://github.com/ollama/ollama | https://hub.docker.com/r/ollama/ollama +# Ollama is used for self-hosted AI inference, and it supports many models out of the box. +# Ollama serves as the backend for common AI projects such as OpenWebUI, among others. +# The code will identify the installed Models, the Model Integrity hash and the Ollama PrivateKey. +# .\kape.exe --msource c:\ --mdest k:\case-12345\Kape\mout --module powershell_ollama_ai_keys diff --git a/Modules/Apps/PowerShell_Ollama_AI_Manifests.mkape b/Modules/Apps/PowerShell_Ollama_AI_Manifests.mkape new file mode 100644 index 000000000..e54325706 --- /dev/null +++ b/Modules/Apps/PowerShell_Ollama_AI_Manifests.mkape @@ -0,0 +1,19 @@ +Description: Ollama-AI Manifests +Category: PowerShell +Author: DReneau +Version: 1.0 +Id: 48146441-174c-43a6-8dd0-8c317f1004e2 +ExportFormat: txt +Processors: + - + Executable: C:\Windows\System32\WindowsPowerShell\v1.0\powershell.exe + CommandLine: "$destinationPath = '%destinationDirectory%\\ollama_combined_manifests.txt'; Remove-Item -Path $destinationPath -ErrorAction SilentlyContinue; $users = Get-ChildItem -Path '%SourceDirectory%\\Users' -Directory; foreach ($user in $users) { $ollamaPath = Join-Path $user.FullName '.ollama'; if (Test-Path $ollamaPath) { $modelsPath = Join-Path $ollamaPath 'models\\manifests'; if (Test-Path $modelsPath) { Get-ChildItem -Path $modelsPath -Recurse -File | ForEach-Object { $modelName = $_.Name; $fileContent = Get-Content -Path $_.FullName -Raw -ErrorAction SilentlyContinue; if ($fileContent -match '\"mediaType\"') { $entry = ('{0} | {1}' -f $_.FullName, $fileContent); Add-Content -Path $destinationPath -Value $entry; Add-Content -Path $destinationPath -Value \"`r`n`r`n\"; } } } } }" + ExportFormat: txt + +# Documentation +# https://ollama.com/blog | https://github.com/ollama/ollama | https://hub.docker.com/r/ollama/ollama +# Ollama is used for self-hosted AI inference, and it supports many models out of the box. +# Ollama serves as the backend for common AI projects such as OpenWebUI, among others. +# The code will identify the installed Models, the Model Integrity hash and the Ollama PrivateKey. +# .\kape.exe --msource c:\ --mdest k:\case-12345\Kape\mout --module powershell_ollama_ai_manifests +# https://www.wiz.io/blog/probllama-ollama-vulnerability-cve-2024-37032 diff --git a/Modules/Apps/PowerShell_Ollama_AI_Models.mkape b/Modules/Apps/PowerShell_Ollama_AI_Models.mkape new file mode 100644 index 000000000..ecf7c9a2f --- /dev/null +++ b/Modules/Apps/PowerShell_Ollama_AI_Models.mkape @@ -0,0 +1,19 @@ +Description: Ollama-AI Installed LLM's +Category: PowerShell +Author: DReneau +Version: 1.0 +Id: d165b496-55d9-480a-a592-b80dd50e2e14 +ExportFormat: CSV +Processors: + - + Executable: C:\Windows\System32\WindowsPowerShell\v1.0\powershell.exe + CommandLine: "$users = Get-ChildItem -Path '%SourceDirectory%\\Users' -Directory; foreach ($user in $users) { $ollamaPath = Join-Path $user.FullName '.ollama'; if (Test-Path $ollamaPath) { $outputPath = '%destinationDirectory%\\ollama_models_identified.csv'; Remove-Item $outputPath -ErrorAction SilentlyContinue; $manifestsPath = Join-Path $ollamaPath 'models\\manifests'; $legitPath = Join-Path $manifestsPath 'registry.ollama.ai\\library'; $allModels = Get-ChildItem -Path $manifestsPath -Directory -Recurse | Where-Object { $_.Name -notin @('registry.ollama.ai', 'library') -and (Get-ChildItem -Path $_.FullName -File -ErrorAction SilentlyContinue | Where-Object { $_.Extension -eq '' }) } | ForEach-Object { $modelName = $_.Name; $modelFile = Get-ChildItem -Path $_.FullName -File -ErrorAction SilentlyContinue | Select-Object -First 1; $isLegit = if ($_.FullName -match [regex]::Escape($legitPath)) { 'Ollama Repo' } else { 'Non Ollama Repo' }; if ($modelFile -and (Test-Path $modelFile.FullName)) { try { $jsonContent = Get-Content -Path $modelFile.FullName -Raw -ErrorAction Stop | ConvertFrom-Json; $modelIntegrity = ($jsonContent.layers | Where-Object { $_.mediaType -eq 'application/vnd.ollama.image.model' }).digest; $sizeInGB = '{0:N1} GB' -f (($jsonContent.layers | Measure-Object -Property size -Sum).Sum / 1GB); $modifiedDate = (Get-Item $modelFile.FullName).LastWriteTime.ToString('g'); } catch { $modelIntegrity = 'Parsing Error'; $sizeInGB = 'Unknown'; $modifiedDate = 'Unknown'; } } else { $modelIntegrity = 'Not Found'; $sizeInGB = 'Unknown'; $modifiedDate = 'Unknown'; $modelFile = 'Access Denied' }; [PSCustomObject]@{ Name = $modelName; Sha = $modelIntegrity.Split(':')[1]; Size = $sizeInGB; Modified = $modifiedDate; Path = $modelFile.FullName; 'Source' = $isLegit } }; $allModels | Export-Csv -Path $outputPath -NoTypeInformation -Force } }" + ExportFormat: CSV + +# Documentation +# Yaml will identify Ollama models and identify if they are downloaded from the official Ollama repo or not. +# https://ollama.com/blog | https://github.com/ollama/ollama | https://hub.docker.com/r/ollama/ollama +# Ollama is used for self-hosted AI inference, and it supports many models out of the box. +# Ollama serves as the backend for common AI projects such as OpenWebUI, among others. +# The code will identify the installed Models, the Model Integrity hash and the Ollama PrivateKey. +# .\kape.exe --msource c:\ --mdest k:\case-12345\Kape\mout --module powershell_ollama_ai_models diff --git a/Modules/Apps/PowerShell_Ollama_AI_cve-2024-37032.mkape b/Modules/Apps/PowerShell_Ollama_AI_cve-2024-37032.mkape new file mode 100644 index 000000000..72619867e --- /dev/null +++ b/Modules/Apps/PowerShell_Ollama_AI_cve-2024-37032.mkape @@ -0,0 +1,20 @@ +Description: Ollama-AI Manifest Path Traversal Scanner +Category: PowerShell +Author: DReneau +Version: 1.0 +Id: ce916f46-4160-4953-b635-3071494ab50b +ExportFormat: txt +Processors: + - + Executable: C:\Windows\System32\WindowsPowerShell\v1.0\powershell.exe + CommandLine: "$destinationPath = '%destinationDirectory%\\ollama_path_traversal_check.txt'; Remove-Item -Path $destinationPath -ErrorAction SilentlyContinue; $users = Get-ChildItem -Path '%SourceDirectory%\\Users' -Directory; foreach ($user in $users) { $ollamaPath = Join-Path $user.FullName '.ollama'; if (Test-Path $ollamaPath) { $modelsPath = Join-Path $ollamaPath 'models\\manifests'; if (Test-Path $modelsPath) { Get-ChildItem -Path $modelsPath -Recurse -File | ForEach-Object { $modelName = $_.Name; $fileContent = Get-Content -Path $_.FullName -Raw -ErrorAction SilentlyContinue; if ($fileContent -match '(\\..\\/)+|traversal') { $entry = ('{0} | {1}' -f $_.FullName, $fileContent.Substring(0, [Math]::Min($fileContent.Length, 200)) + '...'); Add-Content -Path $destinationPath -Value $entry; Add-Content -Path $destinationPath -Value \"`r`n`r`n\"; } } } } }" + ExportFormat: txt + +# Documentation +# https://ollama.com/blog | https://github.com/ollama/ollama | https://hub.docker.com/r/ollama/ollama +# Ollama is used for self-hosted AI inference, and it supports many models out of the box. +# Ollama serves as the backend for common AI projects such as OpenWebUI, among others. +# The code will identify the installed Models, the Model Integrity hash and the Ollama PrivateKey. +# .\kape.exe --msource c:\ --mdest k:\case-12345\Kape\mout --module powershell_ollama_ai_cve-2024-37032 +# https://www.wiz.io/blog/probllama-ollama-vulnerability-cve-2024-37032 +# https://owasp.org/www-community/attacks/Path_Traversal diff --git a/Modules/Compound/Ollama-AI.mkape b/Modules/Compound/Ollama-AI.mkape new file mode 100644 index 000000000..3b19fb7ca --- /dev/null +++ b/Modules/Compound/Ollama-AI.mkape @@ -0,0 +1,37 @@ +Description: Ollama-AI Parsers +Category: Modules +Author: DReneau +Version: 1.0 +Id: 4e934950-54e4-4c6d-a1de-cb24e3872f5e +ExportFormat: txt +Processors: + - + Executable: PowerShell_Ollama_AI_Blobs.mkape + CommandLine: "" + ExportFormat: "" + - + Executable: PowerShell_Ollama_AI_Keys.mkape + CommandLine: "" + ExportFormat: "" + - + Executable: PowerShell_Ollama_AI_Manifests.mkape + CommandLine: "" + ExportFormat: "" + - + Executable: PowerShell_Ollama_AI_Models.mkape + CommandLine: "" + ExportFormat: "" + - + Executable: PowerShell_Docker_Containers.mkape + CommandLine: "" + ExportFormat: "" + - + Executable: PowerShell_Ollama_AI_cve-2024-37032.mkape + CommandLine: "" + ExportFormat: "" + +# Documentation +# https://www.youtube.com/watch?v=aHhQvxwkuuw +# Ollama is used for self-hosted AI inference, and it supports many models out of the box. +# Ollama serves as the backend for common AI projects such as OpenWebUI, among others. +# .\kape.exe --msource c:\ --mdest k:\case-12345\Kape\mout --module ollama-ai