diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml
new file mode 100644
index 00000000..a1edc0c9
--- /dev/null
+++ b/.github/workflows/python.yml
@@ -0,0 +1,84 @@
+name: Python
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+ release:
+ types: [published]
+
+jobs:
+ detect-packages:
+ runs-on: ubuntu-latest
+ outputs:
+ packages: ${{ steps.find-packages.outputs.packages }}
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Find Python packages
+ id: find-packages
+ working-directory: src
+ run: |
+ PACKAGES=$(find . -name pyproject.toml -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]')
+ echo "packages=$PACKAGES" >> $GITHUB_OUTPUT
+
+ build:
+ needs: [detect-packages]
+ strategy:
+ matrix:
+ package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
+ name: Build ${{ matrix.package }}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v3
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version-file: "src/${{ matrix.package }}/.python-version"
+
+ - name: Install dependencies
+ working-directory: src/${{ matrix.package }}
+ run: uv sync --frozen --all-extras --dev
+
+ - name: Run pyright
+ working-directory: src/${{ matrix.package }}
+ run: uv run --frozen pyright
+
+ - name: Build package
+ working-directory: src/${{ matrix.package }}
+ run: uv build
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: dist-${{ matrix.package }}
+ path: src/${{ matrix.package }}/dist/
+
+ publish:
+ runs-on: ubuntu-latest
+ needs: [build, detect-packages]
+ if: github.event_name == 'release'
+
+ strategy:
+ matrix:
+ package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
+ name: Publish ${{ matrix.package }}
+
+ environment: release
+ permissions:
+ id-token: write # Required for trusted publishing
+
+ steps:
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ name: dist-${{ matrix.package }}
+ path: dist/
+
+ - name: Publish package to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
diff --git a/.github/workflows/main.yml b/.github/workflows/typescript.yml
similarity index 97%
rename from .github/workflows/main.yml
rename to .github/workflows/typescript.yml
index 7b85236b..07229176 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/typescript.yml
@@ -1,3 +1,5 @@
+name: TypeScript
+
on:
push:
branches:
diff --git a/README.md b/README.md
index 5f46926a..7c1a6f41 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,4 @@
-# MCP servers ![NPM Version](https://img.shields.io/npm/v/%40modelcontextprotocol%2Fexample-servers)
-
-[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)
+# MCP servers
A collection of reference implementations and community-contributed servers for the [Model Context Protocol](https://modelcontextprotocol.io/) (MCP). This repository showcases the versatility and extensibility of MCP, demonstrating how it can be used to give Large Language Models (LLMs) secure, controlled access to tools and data sources.
diff --git a/src/brave-search/package.json b/src/brave-search/package.json
index eb7ec5ac..0877bff4 100644
--- a/src/brave-search/package.json
+++ b/src/brave-search/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-brave-search",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for Brave Search API integration",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/everything/package.json b/src/everything/package.json
index ecad03e4..de83de7d 100644
--- a/src/everything/package.json
+++ b/src/everything/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-everything",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server that exercises all the features of the MCP protocol",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/filesystem/package.json b/src/filesystem/package.json
index dbd43f6c..1e1865bc 100644
--- a/src/filesystem/package.json
+++ b/src/filesystem/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-filesystem",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for filesystem access",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/gdrive/package.json b/src/gdrive/package.json
index 1373a18f..c583ca78 100644
--- a/src/gdrive/package.json
+++ b/src/gdrive/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-gdrive",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for interacting with Google Drive",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/git/README.md b/src/git/README.md
index cb2681f7..35885439 100644
--- a/src/git/README.md
+++ b/src/git/README.md
@@ -1,24 +1,22 @@
-# mcp-git: A git MCP server
+# mcp-server-git: A git MCP server
+
+## Overview
A Model Context Protocol server for Git repository interaction and automation. This server provides tools to read, search, and manipulate Git repositories via Large Language Models.
-Please note that mcp-git is currently in early development. The functionality and available tools are subject to change and expansion as we continue to develop and improve the server.
+Please note that mcp-server-git is currently in early development. The functionality and available tools are subject to change and expansion as we continue to develop and improve the server.
## Available Tools
The current list of tools includes:
-- `git_read_file`: Read contents of a file at a specific Git reference
-- `git_list_files`: List all files in a repository or subdirectory
-- `git_file_history`: Get commit history for a specific file
-- `git_commit`: Create Git commits with messages and specified files
-- `git_search_code`: Search repository content with pattern matching
-- `git_get_diff`: View diffs between Git references
-- `git_get_repo_structure`: View repository file structure
-- `git_list_repos`: List available Git repositories
-- `git_log`: Retrieve commit log for the repository
-- `git_list_branches`: List all branches in the repository
-- `git_list_tags`: List all tags in the repository
+- `git_status`: Shows the working tree status
+- `git_diff_unstaged`: Shows changes in the working directory that are not yet staged
+- `git_diff_staged`: Shows changes that are staged for commit
+- `git_commit`: Records changes to the repository
+- `git_add`: Adds file contents to the staging area
+- `git_reset`: Unstages all staged changes
+- `git_log`: Shows the commit logs
This list is expected to grow as we add more functionality to the server. We welcome contributions from the community to expand and enhance the available tools.
@@ -27,77 +25,108 @@ This list is expected to grow as we add more functionality to the server. We wel
### Using uv (recommended)
When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
-use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-git*.
+use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-git*.
### Using PIP
-Alternatively you can install `mcp-git` via pip:
+Alternatively you can install `mcp-server-git` via pip:
```
-pip install mcp-git
+pip install mcp-server-git
```
After installation, you can run it as a script using:
```
-python -m mcp_git
+python -m mcp_server_git
```
## Configuration
+
### Configure for Claude.app
Add to your Claude settings:
+
+Using uvx
+
```json
"mcpServers": {
- "mcp-git": {
+ "git": {
"command": "uvx",
- "args": ["mcp-git", "--repository", "path/to/git/repo"]
+ "args": ["mcp-server-git", "--repository", "path/to/git/repo"]
}
}
```
+
-Alternatively, if using pip installation:
+
+Using pip installation
```json
"mcpServers": {
- "mcp-git": {
+ "git": {
"command": "python",
- "args": ["-m", "mcp_git", "--repository", "path/to/git/repo"]
+ "args": ["-m", "mcp_server_git", "--repository", "path/to/git/repo"]
}
}
```
+
### Configure for Zed
Add to your Zed settings.json:
+
+Using uvx
+
```json
"context_servers": [
- "mcp-git": {
+ "mcp-server-git": {
"command": "uvx",
- "args": ["mcp-git"]
+ "args": ["mcp-server-git"]
}
],
```
+
-Alternatively, if using pip installation:
+
+Using pip installation
```json
"context_servers": {
- "mcp-git": {
+ "mcp-server-git": {
"command": "python",
- "args": ["-m", "mcp-git"]
+ "args": ["-m", "mcp_server_git"]
}
},
```
+
+
+## Debugging
+
+You can use the MCP inspector to debug the server. For uvx installations:
+
+```
+npx @modelcontextprotocol/inspector uvx mcp-server-git
+```
+Or if you've installed the package in a specific directory or are developing on it:
+
+```
+cd path/to/servers/src/git
+npx @modelcontextprotocol/inspector uv run mcp-server-git
+```
## Contributing
-We encourage contributions to help expand and improve mcp-git. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable.
+We encourage contributions to help expand and improve mcp-server-git. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable.
For examples of other MCP servers and implementation patterns, see:
https://github.com/modelcontextprotocol/servers
-Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-git even more powerful and useful.
+Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-git even more powerful and useful.
+
+## License
+
+mcp-server-git is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
diff --git a/src/git/pyproject.toml b/src/git/pyproject.toml
index 3fc2ae8c..85dd3ddf 100644
--- a/src/git/pyproject.toml
+++ b/src/git/pyproject.toml
@@ -1,6 +1,6 @@
[project]
-name = "mcp-git"
-version = "0.2.0"
+name="mcp-server-git"
+version = "0.4.0"
description = "A Model Context Protocol server providing tools to read, search, and manipulate Git repositories programmatically via LLMs"
readme = "README.md"
requires-python = ">=3.10"
@@ -23,11 +23,11 @@ dependencies = [
]
[project.scripts]
-mcp-git = "mcp_git:main"
+mcp-server-git = "mcp_server_git:main"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.uv]
-dev-dependencies = ["ruff>=0.7.3"]
+dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3"]
diff --git a/src/git/src/mcp_git/__init__.py b/src/git/src/mcp_git/__init__.py
deleted file mode 100644
index 25c85d50..00000000
--- a/src/git/src/mcp_git/__init__.py
+++ /dev/null
@@ -1,500 +0,0 @@
-import logging
-import json
-import sys
-import click
-import anyio
-import anyio.lowlevel
-from pathlib import Path
-from git.types import Sequence
-from mcp.server import Server
-from mcp.server.session import ServerSession
-from mcp.server.stdio import stdio_server
-from mcp.types import (
- ClientCapabilities,
- TextContent,
- Tool,
- EmbeddedResource,
- ImageContent,
- ListRootsResult,
- RootsCapability,
-)
-from enum import Enum
-import git
-from git.objects import Blob, Tree
-
-from pydantic import BaseModel, Field
-from typing import List, Optional
-
-
-class ReadFileInput(BaseModel):
- repo_path: str
- file_path: str
- ref: str = "HEAD"
-
-
-class ListFilesInput(BaseModel):
- repo_path: str
- path: str = ""
- ref: str = "HEAD"
-
-
-class FileHistoryInput(BaseModel):
- repo_path: str
- file_path: str
- max_entries: int = 10
-
-
-class CommitInput(BaseModel):
- repo_path: str
- message: str
- files: Optional[List[str]] = Field(
- None,
- description="List of files to stage and commit. If omitted, all changes will be staged.",
- )
-
-
-class SearchCodeInput(BaseModel):
- repo_path: str
- query: str
- file_pattern: str = "*"
- ref: str = "HEAD"
-
-
-class GetDiffInput(BaseModel):
- repo_path: str
- ref1: str
- ref2: str
- file_path: Optional[str] = None
-
-
-class GetRepoStructureInput(BaseModel):
- repo_path: str
- ref: str = "HEAD"
-
-
-class ListReposInput(BaseModel):
- pass
-
-
-class GitLogInput(BaseModel):
- repo_path: str
- max_count: int = 10
- ref: str = "HEAD"
-
-
-class ListBranchesInput(BaseModel):
- repo_path: str
-
-
-class ListTagsInput(BaseModel):
- repo_path: str
-
-
-class GitTools(str, Enum):
- READ_FILE = "git_read_file"
- LIST_FILES = "git_list_files"
- FILE_HISTORY = "git_file_history"
- COMMIT = "git_commit"
- SEARCH_CODE = "git_search_code"
- GET_DIFF = "git_get_diff"
- GET_REPO_STRUCTURE = "git_get_repo_structure"
- LIST_REPOS = "git_list_repos"
- GIT_LOG = "git_log"
- LIST_BRANCHES = "git_list_branches"
- LIST_TAGS = "git_list_tags"
-
-
-def git_read_file(repo: git.Repo, file_path: str, ref: str = "HEAD") -> str:
- tree = repo.commit(ref).tree
- blob = tree / file_path
- try:
- return blob.data_stream.read().decode("utf-8", errors="replace")
- except UnicodeDecodeError:
- # If it's a binary file, return a message indicating that
- return "[Binary file content not shown]"
-
-
-def git_list_files(repo: git.Repo, path: str = "", ref: str = "HEAD") -> Sequence[str]:
- tree = repo.commit(ref).tree
- if path:
- tree = tree / path
- # Use traverse() and isinstance() to get only blobs (files) recursively
- return [str(o.path) for o in tree.traverse() if isinstance(o, Blob)]
-
-
-def git_file_history(
- repo: git.Repo, file_path: str, max_entries: int = 10
-) -> Sequence[str]:
- commits = list(repo.iter_commits(paths=file_path, max_count=max_entries))
- history = []
- for commit in commits:
- history.append(
- f"Commit: {commit.hexsha}\n"
- f"Author: {commit.author}\n"
- f"Date: {commit.authored_datetime}\n"
- f"Message: {commit.message}\n"
- )
- return history
-
-
-def git_commit(repo: git.Repo, message: str, files: list[str] | None = None) -> str:
- if files is not None:
- repo.index.add(files)
- else:
- repo.index.add("*") # Stage all changes
- commit = repo.index.commit(message)
- return f"Changes committed successfully with hash {commit.hexsha}"
-
-
-def git_search_code(
- repo: git.Repo, query: str, file_pattern: str = "*", ref: str = "HEAD"
-) -> list[str]:
- results = []
- tree = repo.commit(ref).tree
- for blob in tree.traverse():
- if isinstance(blob, Blob) and Path(blob.path).match(file_pattern):
- try:
- content = blob.data_stream.read().decode("utf-8", errors="replace")
- for i, line in enumerate(content.splitlines()):
- if query in line:
- results.append(f"{blob.path}:{i+1}: {line}")
- except UnicodeDecodeError:
- # Skip binary files
- continue
- return results
-
-
-def git_get_diff(
- repo: git.Repo, ref1: str, ref2: str, file_path: str | None = None
-) -> str:
- if file_path:
- return repo.git.diff(ref1, ref2, "--", file_path)
- return repo.git.diff(ref1, ref2)
-
-
-def git_get_repo_structure(repo: git.Repo, ref: str = "HEAD") -> str:
- tree = repo.commit(ref).tree
-
- def build_tree(tree_obj: Tree) -> dict:
- result = {}
- for item in tree_obj:
- if isinstance(item, Tree):
- result[item.name] = build_tree(item)
- else:
- result[item.name] = item.type
- return result
-
- structure = build_tree(tree)
- return str(structure)
-
-
-def git_log(repo: git.Repo, max_count: int = 10, ref: str = "HEAD") -> list[str]:
- commits = list(repo.iter_commits(ref, max_count=max_count))
- log = []
- for commit in commits:
- log.append(
- f"Commit: {commit.hexsha}\n"
- f"Author: {commit.author}\n"
- f"Date: {commit.authored_datetime}\n"
- f"Message: {commit.message}\n"
- )
- return log
-
-
-def git_list_branches(repo: git.Repo) -> list[str]:
- return [str(branch) for branch in repo.branches]
-
-
-def git_list_tags(repo: git.Repo) -> list[str]:
- return [str(tag) for tag in repo.tags]
-
-
-async def serve(repository: Path | None) -> None:
- # Set up logging
- logger = logging.getLogger(__name__)
-
- if repository is not None:
- try:
- git.Repo(repository)
- logger.info(f"Using repository at {repository}")
- except git.InvalidGitRepositoryError:
- logger.error(f"{repository} is not a valid Git repository")
- return
-
- # Create server
- server = Server("mcp-git")
-
- @server.list_tools()
- async def list_tools() -> list[Tool]:
- return [
- Tool(
- name=GitTools.READ_FILE,
- description="Retrieves and returns the content of a specified file from "
- "a Git repository at a given reference (commit, branch, or tag). This "
- "allows you to view file contents at any point in the repository's "
- "history.",
- inputSchema=ReadFileInput.schema(),
- ),
- Tool(
- name=GitTools.LIST_FILES,
- description="Enumerates all files in a Git repository or a specific "
- "directory within the repository. This tool can be used to explore the "
- "file structure of a project at a particular reference.",
- inputSchema=ListFilesInput.schema(),
- ),
- Tool(
- name=GitTools.FILE_HISTORY,
- description="Retrieves the commit history for a specific file, showing "
- "how it has changed over time. This includes commit hashes, authors, "
- "dates, and commit messages, allowing you to track the evolution of a "
- "file.",
- inputSchema=FileHistoryInput.schema(),
- ),
- Tool(
- name=GitTools.COMMIT,
- description="Commits changes to the repository. You can "
- "specify particular files to commit or commit all staged changes. This "
- "tool allows you to create new snapshots of your project with "
- "descriptive commit messages.",
- inputSchema=CommitInput.schema(),
- ),
- Tool(
- name=GitTools.SEARCH_CODE,
- description="Searches for specific patterns or text across all files in "
- "the repository. This powerful tool allows you to find occurrences of "
- "code, comments, or any text within your project, optionally filtering "
- "by file patterns and at a specific reference.",
- inputSchema=SearchCodeInput.schema(),
- ),
- Tool(
- name=GitTools.GET_DIFF,
- description="Computes and displays the differences between two Git "
- "references (commits, branches, or tags). This tool is crucial for "
- "understanding changes between different versions of your codebase, "
- "optionally focusing on a specific file.",
- inputSchema=GetDiffInput.schema(),
- ),
- Tool(
- name=GitTools.GET_REPO_STRUCTURE,
- description="Generates a representation of the repository's file and "
- "directory structure at a given reference. This provides a high-level "
- "overview of your project's organization, helping you understand the "
- "layout of your codebase.",
- inputSchema=GetRepoStructureInput.schema(),
- ),
- Tool(
- name=GitTools.LIST_REPOS,
- description="Enumerates all available Git repositories from the "
- "specified roots. This tool helps you manage and navigate multiple "
- "repositories, providing a comprehensive list of Git projects "
- "accessible to the current session.",
- inputSchema=ListReposInput.schema(),
- ),
- Tool(
- name=GitTools.GIT_LOG,
- description="Retrieves the commit log for the repository, showing the "
- "history of commits including commit hashes, authors, dates, and "
- "commit messages. This tool provides an overview of the project's "
- "development history.",
- inputSchema=GitLogInput.schema(),
- ),
- Tool(
- name=GitTools.LIST_BRANCHES,
- description="Lists all branches in the Git repository. This tool "
- "provides an overview of the different lines of development in the "
- "project.",
- inputSchema=ListBranchesInput.schema(),
- ),
- Tool(
- name=GitTools.LIST_TAGS,
- description="Lists all tags in the Git repository. This tool "
- "provides an overview of the tagged versions or releases in the "
- "project.",
- inputSchema=ListTagsInput.schema(),
- ),
- ]
-
- async def list_repos() -> Sequence[str]:
- async def by_roots() -> Sequence[str]:
- if not isinstance(server.request_context.session, ServerSession):
- raise TypeError(
- "server.request_context.session must be a ServerSession"
- )
-
- if not server.request_context.session.check_client_capability(
- ClientCapabilities(roots=RootsCapability())
- ):
- return []
-
- roots_result: ListRootsResult = (
- await server.request_context.session.list_roots()
- )
- logger.debug(f"Roots result: {roots_result}")
- repo_paths = []
- for root in roots_result.roots:
- path = root.uri.path
- try:
- # Verify this is a git repo
- git.Repo(path)
- repo_paths.append(str(path))
- except git.InvalidGitRepositoryError:
- pass
- return repo_paths
-
- def by_commandline() -> Sequence[str]:
- return [str(repository)] if repository is not None else []
-
- cmd_repos = by_commandline()
- root_repos = await by_roots()
- return [*root_repos, *cmd_repos]
-
- @server.call_tool()
- async def call_tool(
- name: str, arguments: dict
- ) -> list[TextContent | ImageContent | EmbeddedResource]:
- if name == GitTools.LIST_REPOS:
- result = await list_repos()
- logging.debug(f"repos={result}")
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains a list of git repositories: {json.dumps(result)}",
- )
- ]
-
- repo_path = Path(arguments["repo_path"])
- repo = git.Repo(repo_path)
-
- match name:
- case GitTools.READ_FILE:
- content = git_read_file(
- repo, arguments["file_path"], arguments.get("ref", "HEAD")
- )
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains the contents of a file: {json.dumps({'content': content})}",
- )
- ]
-
- case GitTools.LIST_FILES:
- files = git_list_files(
- repo, arguments.get("path", ""), arguments.get("ref", "HEAD")
- )
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains a list of files: {json.dumps({'files': list(files)})}",
- )
- ]
-
- case GitTools.FILE_HISTORY:
- history = git_file_history(
- repo, arguments["file_path"], arguments.get("max_entries", 10)
- )
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains a file's history: {json.dumps({'history': list(history)})}",
- )
- ]
-
- case GitTools.COMMIT:
- result = git_commit(repo, arguments["message"], arguments.get("files"))
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains the commit result: {json.dumps({'result': result})}",
- )
- ]
-
- case GitTools.SEARCH_CODE:
- results = git_search_code(
- repo,
- arguments["query"],
- arguments.get("file_pattern", "*"),
- arguments.get("ref", "HEAD"),
- )
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains code search matches: {json.dumps({'matches': results})}",
- )
- ]
-
- case GitTools.GET_DIFF:
- diff = git_get_diff(
- repo,
- arguments["ref1"],
- arguments["ref2"],
- arguments.get("file_path"),
- )
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains a diff: {json.dumps({'diff': diff})}",
- )
- ]
-
- case GitTools.GET_REPO_STRUCTURE:
- structure = git_get_repo_structure(repo, arguments.get("ref", "HEAD"))
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains the repository structure: {json.dumps({'structure': structure})}",
- )
- ]
-
- case GitTools.GIT_LOG:
- log = git_log(
- repo, arguments.get("max_count", 10), arguments.get("ref", "HEAD")
- )
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains the git log: {json.dumps({'log': log})}",
- )
- ]
-
- case GitTools.LIST_BRANCHES:
- branches = git_list_branches(repo)
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains a list of branches: {json.dumps({'branches': branches})}",
- )
- ]
-
- case GitTools.LIST_TAGS:
- tags = git_list_tags(repo)
- return [
- TextContent(
- type="text",
- text=f"Here is some JSON that contains a list of tags: {json.dumps({'tags': tags})}",
- )
- ]
-
- case _:
- raise ValueError(f"Unknown tool: {name}")
-
- # Run the server
- options = server.create_initialization_options()
- async with stdio_server() as (read_stream, write_stream):
- await server.run(read_stream, write_stream, options, raise_exceptions=True)
-
-
-@click.command()
-@click.option("-r", "--repository", type=click.Path(path_type=Path, dir_okay=True))
-@click.option("-v", "--verbose", count=True)
-def main(repository: Path | None, verbose: int):
- logging_level = logging.WARN
- if verbose == 1:
- logging_level = logging.INFO
- elif verbose >= 2:
- logging_level = logging.DEBUG
- logging.basicConfig(level=logging_level, stream=sys.stderr)
- anyio.run(serve, repository)
-
-
-if __name__ == "__main__":
- main()
diff --git a/src/git/src/mcp_git/__main__.py b/src/git/src/mcp_git/__main__.py
deleted file mode 100644
index 7182e6f7..00000000
--- a/src/git/src/mcp_git/__main__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# __main__.py
-
-from mcp_git import main
-
-main()
diff --git a/src/git/src/mcp_server_git/__init__.py b/src/git/src/mcp_server_git/__init__.py
new file mode 100644
index 00000000..2dfb6e5c
--- /dev/null
+++ b/src/git/src/mcp_server_git/__init__.py
@@ -0,0 +1,25 @@
+import click
+from functools import partial
+from pathlib import Path
+import logging
+import sys
+from .server import serve
+
+@click.command()
+@click.option("--repository", "-r", type=Path, help="Git repository path")
+@click.option("-v", "--verbose", count=True)
+def main(repository: Path | None, verbose: bool) -> None:
+ """MCP Git Server - Git functionality for MCP"""
+ import asyncio
+
+ logging_level = logging.WARN
+ if verbose == 1:
+ logging_level = logging.INFO
+ elif verbose >= 2:
+ logging_level = logging.DEBUG
+
+ logging.basicConfig(level=logging_level, stream=sys.stderr)
+ asyncio.run(serve(repository))
+
+if __name__ == "__main__":
+ main()
diff --git a/src/git/src/mcp_server_git/__main__.py b/src/git/src/mcp_server_git/__main__.py
new file mode 100644
index 00000000..beda6b0e
--- /dev/null
+++ b/src/git/src/mcp_server_git/__main__.py
@@ -0,0 +1,5 @@
+# __main__.py
+
+from mcp_server_git import main
+
+main()
diff --git a/src/git/src/mcp_server_git/server.py b/src/git/src/mcp_server_git/server.py
new file mode 100644
index 00000000..d4fe3328
--- /dev/null
+++ b/src/git/src/mcp_server_git/server.py
@@ -0,0 +1,228 @@
+import logging
+import json
+from pathlib import Path
+from typing import Sequence
+from mcp.server import Server
+from mcp.server.session import ServerSession
+from mcp.server.stdio import stdio_server
+from mcp.types import (
+ ClientCapabilities,
+ TextContent,
+ Tool,
+ ListRootsResult,
+ RootsCapability,
+)
+from enum import Enum
+import git
+from pydantic import BaseModel, Field
+from typing import List, Optional
+
+class GitStatus(BaseModel):
+ repo_path: str
+
+class GitDiffUnstaged(BaseModel):
+ repo_path: str
+
+class GitDiffStaged(BaseModel):
+ repo_path: str
+
+class GitCommit(BaseModel):
+ repo_path: str
+ message: str
+
+class GitAdd(BaseModel):
+ repo_path: str
+ files: List[str]
+
+class GitReset(BaseModel):
+ repo_path: str
+
+class GitLog(BaseModel):
+ repo_path: str
+ max_count: int = 10
+
+class GitTools(str, Enum):
+ STATUS = "git_status"
+ DIFF_UNSTAGED = "git_diff_unstaged"
+ DIFF_STAGED = "git_diff_staged"
+ COMMIT = "git_commit"
+ ADD = "git_add"
+ RESET = "git_reset"
+ LOG = "git_log"
+
+def git_status(repo: git.Repo) -> str:
+ return repo.git.status()
+
+def git_diff_unstaged(repo: git.Repo) -> str:
+ return repo.git.diff()
+
+def git_diff_staged(repo: git.Repo) -> str:
+ return repo.git.diff("--cached")
+
+def git_commit(repo: git.Repo, message: str) -> str:
+ commit = repo.index.commit(message)
+ return f"Changes committed successfully with hash {commit.hexsha}"
+
+def git_add(repo: git.Repo, files: list[str]) -> str:
+ repo.index.add(files)
+ return "Files staged successfully"
+
+def git_reset(repo: git.Repo) -> str:
+ repo.index.reset()
+ return "All staged changes reset"
+
+def git_log(repo: git.Repo, max_count: int = 10) -> list[str]:
+ commits = list(repo.iter_commits(max_count=max_count))
+ log = []
+ for commit in commits:
+ log.append(
+ f"Commit: {commit.hexsha}\n"
+ f"Author: {commit.author}\n"
+ f"Date: {commit.authored_datetime}\n"
+ f"Message: {commit.message}\n"
+ )
+ return log
+
+async def serve(repository: Path | None) -> None:
+ logger = logging.getLogger(__name__)
+
+ if repository is not None:
+ try:
+ git.Repo(repository)
+ logger.info(f"Using repository at {repository}")
+ except git.InvalidGitRepositoryError:
+ logger.error(f"{repository} is not a valid Git repository")
+ return
+
+ server = Server("mcp-git")
+
+ @server.list_tools()
+ async def list_tools() -> list[Tool]:
+ return [
+ Tool(
+ name=GitTools.STATUS,
+ description="Shows the working tree status",
+ inputSchema=GitStatus.schema(),
+ ),
+ Tool(
+ name=GitTools.DIFF_UNSTAGED,
+ description="Shows changes in the working directory that are not yet staged",
+ inputSchema=GitDiffUnstaged.schema(),
+ ),
+ Tool(
+ name=GitTools.DIFF_STAGED,
+ description="Shows changes that are staged for commit",
+ inputSchema=GitDiffStaged.schema(),
+ ),
+ Tool(
+ name=GitTools.COMMIT,
+ description="Records changes to the repository",
+ inputSchema=GitCommit.schema(),
+ ),
+ Tool(
+ name=GitTools.ADD,
+ description="Adds file contents to the staging area",
+ inputSchema=GitAdd.schema(),
+ ),
+ Tool(
+ name=GitTools.RESET,
+ description="Unstages all staged changes",
+ inputSchema=GitReset.schema(),
+ ),
+ Tool(
+ name=GitTools.LOG,
+ description="Shows the commit logs",
+ inputSchema=GitLog.schema(),
+ ),
+ ]
+
+ async def list_repos() -> Sequence[str]:
+ async def by_roots() -> Sequence[str]:
+ if not isinstance(server.request_context.session, ServerSession):
+ raise TypeError("server.request_context.session must be a ServerSession")
+
+ if not server.request_context.session.check_client_capability(
+ ClientCapabilities(roots=RootsCapability())
+ ):
+ return []
+
+ roots_result: ListRootsResult = await server.request_context.session.list_roots()
+ logger.debug(f"Roots result: {roots_result}")
+ repo_paths = []
+ for root in roots_result.roots:
+ path = root.uri.path
+ try:
+ git.Repo(path)
+ repo_paths.append(str(path))
+ except git.InvalidGitRepositoryError:
+ pass
+ return repo_paths
+
+ def by_commandline() -> Sequence[str]:
+ return [str(repository)] if repository is not None else []
+
+ cmd_repos = by_commandline()
+ root_repos = await by_roots()
+ return [*root_repos, *cmd_repos]
+
+ @server.call_tool()
+ async def call_tool(name: str, arguments: dict) -> list[TextContent]:
+ repo_path = Path(arguments["repo_path"])
+ repo = git.Repo(repo_path)
+
+ match name:
+ case GitTools.STATUS:
+ status = git_status(repo)
+ return [TextContent(
+ type="text",
+ text=f"Repository status:\n{status}"
+ )]
+
+ case GitTools.DIFF_UNSTAGED:
+ diff = git_diff_unstaged(repo)
+ return [TextContent(
+ type="text",
+ text=f"Unstaged changes:\n{diff}"
+ )]
+
+ case GitTools.DIFF_STAGED:
+ diff = git_diff_staged(repo)
+ return [TextContent(
+ type="text",
+ text=f"Staged changes:\n{diff}"
+ )]
+
+ case GitTools.COMMIT:
+ result = git_commit(repo, arguments["message"])
+ return [TextContent(
+ type="text",
+ text=result
+ )]
+
+ case GitTools.ADD:
+ result = git_add(repo, arguments["files"])
+ return [TextContent(
+ type="text",
+ text=result
+ )]
+
+ case GitTools.RESET:
+ result = git_reset(repo)
+ return [TextContent(
+ type="text",
+ text=result
+ )]
+
+ case GitTools.LOG:
+ log = git_log(repo, arguments.get("max_count", 10))
+ return [TextContent(
+ type="text",
+ text="Commit history:\n" + "\n".join(log)
+ )]
+
+ case _:
+ raise ValueError(f"Unknown tool: {name}")
+
+ options = server.create_initialization_options()
+ async with stdio_server() as (read_stream, write_stream):
+ await server.run(read_stream, write_stream, options, raise_exceptions=True)
diff --git a/src/git/uv.lock b/src/git/uv.lock
index f994f688..8301144f 100644
--- a/src/git/uv.lock
+++ b/src/git/uv.lock
@@ -166,8 +166,8 @@ wheels = [
]
[[package]]
-name = "mcp-git"
-version = "0.2.0"
+name = "mcp-server-git"
+version = "0.3.0"
source = { editable = "." }
dependencies = [
{ name = "click" },
@@ -178,6 +178,7 @@ dependencies = [
[package.dev-dependencies]
dev = [
+ { name = "pyright" },
{ name = "ruff" },
]
@@ -190,7 +191,19 @@ requires-dist = [
]
[package.metadata.requires-dev]
-dev = [{ name = "ruff", specifier = ">=0.7.3" }]
+dev = [
+ { name = "pyright", specifier = ">=1.1.389" },
+ { name = "ruff", specifier = ">=0.7.3" },
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.9.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 },
+]
[[package]]
name = "pydantic"
@@ -273,6 +286,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/f9/b6bcaf874f410564a78908739c80861a171788ef4d4f76f5009656672dfe/pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753", size = 1920344 },
]
+[[package]]
+name = "pyright"
+version = "1.1.389"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "nodeenv" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/4e/9a5ab8745e7606b88c2c7ca223449ac9d82a71fd5e31df47b453f2cb39a1/pyright-1.1.389.tar.gz", hash = "sha256:716bf8cc174ab8b4dcf6828c3298cac05c5ed775dda9910106a5dcfe4c7fe220", size = 21940 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1b/26/c288cabf8cfc5a27e1aa9e5029b7682c0f920b8074f45d22bf844314d66a/pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60", size = 18581 },
+]
+
[[package]]
name = "ruff"
version = "0.7.4"
diff --git a/src/github/package.json b/src/github/package.json
index f66a97c6..a52534c1 100644
--- a/src/github/package.json
+++ b/src/github/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-github",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for using the GitHub API",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/google-maps/package.json b/src/google-maps/package.json
index 9b3e054a..a3b7854b 100644
--- a/src/google-maps/package.json
+++ b/src/google-maps/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-google-maps",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for using the Google Maps API",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/memory/package.json b/src/memory/package.json
index 89ad4baf..caa6d99c 100644
--- a/src/memory/package.json
+++ b/src/memory/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-memory",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for enabling memory for Claude through a knowledge graph",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/postgres/package.json b/src/postgres/package.json
index eed6ad71..9a46d2f5 100644
--- a/src/postgres/package.json
+++ b/src/postgres/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-postgres",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for interacting with PostgreSQL databases",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/puppeteer/package.json b/src/puppeteer/package.json
index a21981b6..849e169d 100644
--- a/src/puppeteer/package.json
+++ b/src/puppeteer/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-puppeteer",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for browser automation using Puppeteer",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/slack/package.json b/src/slack/package.json
index f6727140..055ad08e 100644
--- a/src/slack/package.json
+++ b/src/slack/package.json
@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-slack",
- "version": "0.2.0",
+ "version": "0.3.0",
"description": "MCP server for interacting with Slack",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",
diff --git a/src/sqlite/README.md b/src/sqlite/README.md
index 5e8e569e..36b63c6e 100644
--- a/src/sqlite/README.md
+++ b/src/sqlite/README.md
@@ -39,16 +39,16 @@ The server offers six core tools:
```bash
# Add the server to your claude_desktop_config.json
"mcpServers": {
- "sqlite": {
- "command": "uv",
- "args": [
- "--directory",
- "parent_of_servers_repo/servers/src/sqlite",
- "run",
- "sqlite",
- "--db-path",
- "~/test.db"
- ]
- }
+ "sqlite": {
+ "command": "uv",
+ "args": [
+ "--directory",
+ "parent_of_servers_repo/servers/src/sqlite",
+ "run",
+ "mcp-server-sqlite",
+ "--db-path",
+ "~/test.db"
+ ]
}
+}
```
diff --git a/src/sqlite/pyproject.toml b/src/sqlite/pyproject.toml
index b0b1b2b6..23c61ecf 100644
--- a/src/sqlite/pyproject.toml
+++ b/src/sqlite/pyproject.toml
@@ -1,16 +1,17 @@
[project]
-name = "sqlite"
-version = "0.1.0"
+name = "mcp-server-sqlite"
+version = "0.4.0"
description = "A simple SQLite MCP server"
readme = "README.md"
requires-python = ">=3.11"
-dependencies = [
- "mcp>=0.9.1",
-]
+dependencies = ["mcp>=0.9.1"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
+[tool.uv]
+dev-dependencies = ["pyright>=1.1.389"]
+
[project.scripts]
-sqlite = "sqlite:main"
\ No newline at end of file
+mcp-server-sqlite = "mcp_server_sqlite:main"
diff --git a/src/sqlite/src/sqlite/__init__.py b/src/sqlite/src/mcp_server_sqlite/__init__.py
similarity index 100%
rename from src/sqlite/src/sqlite/__init__.py
rename to src/sqlite/src/mcp_server_sqlite/__init__.py
diff --git a/src/sqlite/src/mcp_server_sqlite/server.py b/src/sqlite/src/mcp_server_sqlite/server.py
new file mode 100644
index 00000000..257715d5
--- /dev/null
+++ b/src/sqlite/src/mcp_server_sqlite/server.py
@@ -0,0 +1,376 @@
+import sqlite3
+import logging
+from logging.handlers import RotatingFileHandler
+from contextlib import closing
+from pathlib import Path
+from mcp.server.models import InitializationOptions
+import mcp.types as types
+from mcp.server import NotificationOptions, Server
+import mcp.server.stdio
+from pydantic import AnyUrl
+from typing import Any
+
+logging.basicConfig(level=logging.DEBUG)
+logger = logging.getLogger('mcp_sqlite_server')
+logger.info("Starting MCP SQLite Server")
+
+PROMPT_TEMPLATE = """
+The assistants goal is to walkthrough an informative demo of MCP. To demonstrate the Model Context Protocol (MCP) we will leverage this example server to interact with an SQLite database.
+It is important that you first explain to the user what is going on. The user has downloaded and installed the SQLite MCP Server and is now ready to use it.
+The have selected the MCP menu item which is contained within a parent menu denoted by the paperclip icon. Inside this menu they selected an icon that illustrates two electrical plugs connecting. This is the MCP menu.
+Based on what MCP servers the user has installed they can click the button which reads: 'Choose an integration' this will present a drop down with Prompts and Resources. The user hase selected the prompt titled: 'mcp-demo'.
+This text file is that prompt. The goal of the following instructions is to walk the user through the process of using the 3 core aspects of an MCP server. These are: Prompts, Tools, and Resources.
+They have already used a prompt and provided a topic. The topic is: {topic}. The user is now ready to begin the demo.
+Here is some more information about mcp and this specific mcp server:
+
+Prompts:
+This server provides a pre-written prompt called "mcp-demo" that helps users create and analyze database scenarios. The prompt accepts a "topic" argument and guides users through creating tables, analyzing data, and generating insights. For example, if a user provides "retail sales" as the topic, the prompt will help create relevant database tables and guide the analysis process. Prompts basically serve as interactive templates that help structure the conversation with the LLM in a useful way.
+Resources:
+This server exposes one key resource: "memo://insights", which is a business insights memo that gets automatically updated throughout the analysis process. As users analyze the database and discover insights, the memo resource gets updated in real-time to reflect new findings. The memo can even be enhanced with Claude's help if an Anthropic API key is provided, turning raw insights into a well-structured business document. Resources act as living documents that provide context to the conversation.
+Tools:
+This server provides several SQL-related tools:
+"read-query": Executes SELECT queries to read data from the database
+"write-query": Executes INSERT, UPDATE, or DELETE queries to modify data
+"create-table": Creates new tables in the database
+"list-tables": Shows all existing tables
+"describe-table": Shows the schema for a specific table
+"append-insight": Adds a new business insight to the memo resource
+
+
+You are an AI assistant tasked with generating a comprehensive business scenario based on a given topic.
+Your goal is to create a narrative that involves a data-driven business problem, develop a database structure to support it, generate relevant queries, create a dashboard, and provide a final solution.
+
+At each step you will pause for user input to guide the scenario creation process. Overall ensure the scenario is engaging, informative, and demonstrates the capabilities of the SQLite MCP Server.
+You should guide the scenario to completion. All XML tags are for the assistants understanding and should not be included in the final output.
+
+1. The user has chosen the topic: {topic}.
+
+2. Create a business problem narrative:
+a. Describe a high-level business situation or problem based on the given topic.
+b. Include a protagonist (the user) who needs to collect and analyze data from a database.
+c. Add an external, potentially comedic reason why the data hasn't been prepared yet.
+d. Mention an approaching deadline and the need to use Claude (you) as a business tool to help.
+
+3. Setup the data:
+a. Instead of asking about the data that is required for the scenario, just go ahead and use the tools to create the data. Inform the user you are "Setting up the data".
+b. Design a set of table schemas that represent the data needed for the business problem.
+c. Include at least 2-3 tables with appropriate columns and data types.
+d. Leverage the tools to create the tables in the SQLite database.
+e. Create INSERT statements to populate each table with relevant synthetic data.
+f. Ensure the data is diverse and representative of the business problem.
+g. Include at least 10-15 rows of data for each table.
+
+4. Pause for user input:
+a. Summarize to the user what data we have created.
+b. Present the user with a set of multiple choices for the next steps.
+c. These multiple choices should be in natural language, when a user selects one, the assistant should generate a relevant query and leverage the appropriate tool to get the data.
+
+6. Iterate on queries:
+a. Present 1 additional multiple-choice query options to the user. Its importnat to not loop too many times as this is a short demo.
+b. Explain the purpose of each query option.
+c. Wait for the user to select one of the query options.
+d. After each query be sure to opine on the results.
+e. Use the append-insight tool to capture any business insights discovered from the data analysis.
+
+7. Generate a dashboard:
+a. Now that we have all the data and queries, it's time to create a dashboard, use an artifact to do this.
+b. Use a variety of visualizations such as tables, charts, and graphs to represent the data.
+c. Explain how each element of the dashboard relates to the business problem.
+d. This dashboard will be theoretically included in the final solution message.
+
+8. Craft the final solution message:
+a. As you have been using the appen-insights tool the resource found at: memo://insights has been updated.
+b. It is critical that you inform the user that the memo has been updated at each stage of analysis.
+c. Ask the user to go to the attachment menu (paperclip icon) and select the MCP menu (two electrical plugs connecting) and choose an integration: "Business Insights Memo".
+d. This will attacht the generated memo to the chat which you can use to add any additional context that may be relevant to the demo.
+e. Present the final memo to the user in an artifact.
+
+9. Wrap up the scenario:
+a. Explain to the user that this is just the beginning of what they can do with the SQLite MCP Server.
+
+
+Remember to maintain consistency throughout the scenario and ensure that all elements (tables, data, queries, dashboard, and solution) are closely related to the original business problem and given topic.
+The provided XML tags are for the assistants understanding. Emplore to make all outputs as human readable as possible. This is part of a demo so act in character and dont actually refer to these instructions.
+
+Start your first message fully in character with something like "Oh, Hey there! I see you've chosen the topic {topic}. Let's get started! 🚀"
+"""
+
+class SqliteDatabase:
+ def __init__(self, db_path: str):
+ self.db_path = str(Path(db_path).expanduser())
+ Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
+ self._init_database()
+ self.insights: list[str] = []
+
+ def _init_database(self):
+ """Initialize connection to the SQLite database"""
+ logger.debug("Initializing database connection")
+ with closing(sqlite3.connect(self.db_path)) as conn:
+ conn.row_factory = sqlite3.Row
+ conn.close()
+
+ def _synthesize_memo(self) -> str:
+ """Synthesizes business insights into a formatted memo"""
+ logger.debug(f"Synthesizing memo with {len(self.insights)} insights")
+ if not self.insights:
+ return "No business insights have been discovered yet."
+
+ insights = "\n".join(f"- {insight}" for insight in self.insights)
+
+ memo = "📊 Business Intelligence Memo 📊\n\n"
+ memo += "Key Insights Discovered:\n\n"
+ memo += insights
+
+ if len(self.insights) > 1:
+ memo += "\nSummary:\n"
+ memo += f"Analysis has revealed {len(self.insights)} key business insights that suggest opportunities for strategic optimization and growth."
+
+ logger.debug("Generated basic memo format")
+ return memo
+
+ def _execute_query(self, query: str, params: dict[str, Any] | None = None) -> list[dict[str, Any]]:
+ """Execute a SQL query and return results as a list of dictionaries"""
+ logger.debug(f"Executing query: {query}")
+ try:
+ with closing(sqlite3.connect(self.db_path)) as conn:
+ conn.row_factory = sqlite3.Row
+ with closing(conn.cursor()) as cursor:
+ if params:
+ cursor.execute(query, params)
+ else:
+ cursor.execute(query)
+
+ if query.strip().upper().startswith(('INSERT', 'UPDATE', 'DELETE', 'CREATE', 'DROP', 'ALTER')):
+ conn.commit()
+ affected = cursor.rowcount
+ logger.debug(f"Write query affected {affected} rows")
+ return [{"affected_rows": affected}]
+
+ results = [dict(row) for row in cursor.fetchall()]
+ logger.debug(f"Read query returned {len(results)} rows")
+ return results
+ except Exception as e:
+ logger.error(f"Database error executing query: {e}")
+ raise
+
+async def main(db_path: str):
+ logger.info(f"Starting SQLite MCP Server with DB path: {db_path}")
+
+ db = SqliteDatabase(db_path)
+ server = Server("sqlite-manager")
+
+ # Register handlers
+ logger.debug("Registering handlers")
+
+ @server.list_resources()
+ async def handle_list_resources() -> list[types.Resource]:
+ logger.debug("Handling list_resources request")
+ return [
+ types.Resource(
+ uri=AnyUrl("memo://insights"),
+ name="Business Insights Memo",
+ description="A living document of discovered business insights",
+ mimeType="text/plain",
+ )
+ ]
+
+ @server.read_resource()
+ async def handle_read_resource(uri: AnyUrl) -> str:
+ logger.debug(f"Handling read_resource request for URI: {uri}")
+ if uri.scheme != "memo":
+ logger.error(f"Unsupported URI scheme: {uri.scheme}")
+ raise ValueError(f"Unsupported URI scheme: {uri.scheme}")
+
+ path = str(uri).replace("memo://", "")
+ if not path or path != "insights":
+ logger.error(f"Unknown resource path: {path}")
+ raise ValueError(f"Unknown resource path: {path}")
+
+ return db._synthesize_memo()
+
+ @server.list_prompts()
+ async def handle_list_prompts() -> list[types.Prompt]:
+ logger.debug("Handling list_prompts request")
+ return [
+ types.Prompt(
+ name="mcp-demo",
+ description="A prompt to seed the database with initial data and demonstrate what you can do with an SQLite MCP Server + Claude",
+ arguments=[
+ types.PromptArgument(
+ name="topic",
+ description="Topic to seed the database with initial data",
+ required=True,
+ )
+ ],
+ )
+ ]
+
+ @server.get_prompt()
+ async def handle_get_prompt(name: str, arguments: dict[str, str] | None) -> types.GetPromptResult:
+ logger.debug(f"Handling get_prompt request for {name} with args {arguments}")
+ if name != "mcp-demo":
+ logger.error(f"Unknown prompt: {name}")
+ raise ValueError(f"Unknown prompt: {name}")
+
+ if not arguments or "topic" not in arguments:
+ logger.error("Missing required argument: topic")
+ raise ValueError("Missing required argument: topic")
+
+ topic = arguments["topic"]
+ prompt = PROMPT_TEMPLATE.format(topic=topic)
+
+ logger.debug(f"Generated prompt template for topic: {topic}")
+ return types.GetPromptResult(
+ description=f"Demo template for {topic}",
+ messages=[
+ types.PromptMessage(
+ role="user",
+ content=types.TextContent(type="text", text=prompt.strip()),
+ )
+ ],
+ )
+
+ @server.list_tools()
+ async def handle_list_tools() -> list[types.Tool]:
+ """List available tools"""
+ return [
+ types.Tool(
+ name="read-query",
+ description="Execute a SELECT query on the SQLite database",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "query": {"type": "string", "description": "SELECT SQL query to execute"},
+ },
+ "required": ["query"],
+ },
+ ),
+ types.Tool(
+ name="write-query",
+ description="Execute an INSERT, UPDATE, or DELETE query on the SQLite database",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "query": {"type": "string", "description": "SQL query to execute"},
+ },
+ "required": ["query"],
+ },
+ ),
+ types.Tool(
+ name="create-table",
+ description="Create a new table in the SQLite database",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "query": {"type": "string", "description": "CREATE TABLE SQL statement"},
+ },
+ "required": ["query"],
+ },
+ ),
+ types.Tool(
+ name="list-tables",
+ description="List all tables in the SQLite database",
+ inputSchema={
+ "type": "object",
+ "properties": {},
+ },
+ ),
+ types.Tool(
+ name="describe-table",
+ description="Get the schema information for a specific table",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "table_name": {"type": "string", "description": "Name of the table to describe"},
+ },
+ "required": ["table_name"],
+ },
+ ),
+ types.Tool(
+ name="append-insight",
+ description="Add a business insight to the memo",
+ inputSchema={
+ "type": "object",
+ "properties": {
+ "insight": {"type": "string", "description": "Business insight discovered from data analysis"},
+ },
+ "required": ["insight"],
+ },
+ ),
+ ]
+
+ @server.call_tool()
+ async def handle_call_tool(
+ name: str, arguments: dict[str, Any] | None
+ ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
+ """Handle tool execution requests"""
+ try:
+ if name == "list-tables":
+ results = db._execute_query(
+ "SELECT name FROM sqlite_master WHERE type='table'"
+ )
+ return [types.TextContent(type="text", text=str(results))]
+
+ elif name == "describe-table":
+ if not arguments or "table_name" not in arguments:
+ raise ValueError("Missing table_name argument")
+ results = db._execute_query(
+ f"PRAGMA table_info({arguments['table_name']})"
+ )
+ return [types.TextContent(type="text", text=str(results))]
+
+ elif name == "append-insight":
+ if not arguments or "insight" not in arguments:
+ raise ValueError("Missing insight argument")
+
+ db.insights.append(arguments["insight"])
+ _ = db._synthesize_memo()
+
+ # Notify clients that the memo resource has changed
+ await server.request_context.session.send_resource_updated(AnyUrl("memo://insights"))
+
+ return [types.TextContent(type="text", text="Insight added to memo")]
+
+ if not arguments:
+ raise ValueError("Missing arguments")
+
+ if name == "read-query":
+ if not arguments["query"].strip().upper().startswith("SELECT"):
+ raise ValueError("Only SELECT queries are allowed for read-query")
+ results = db._execute_query(arguments["query"])
+ return [types.TextContent(type="text", text=str(results))]
+
+ elif name == "write-query":
+ if arguments["query"].strip().upper().startswith("SELECT"):
+ raise ValueError("SELECT queries are not allowed for write-query")
+ results = db._execute_query(arguments["query"])
+ return [types.TextContent(type="text", text=str(results))]
+
+ elif name == "create-table":
+ if not arguments["query"].strip().upper().startswith("CREATE TABLE"):
+ raise ValueError("Only CREATE TABLE statements are allowed")
+ db._execute_query(arguments["query"])
+ return [types.TextContent(type="text", text="Table created successfully")]
+
+ else:
+ raise ValueError(f"Unknown tool: {name}")
+
+ except sqlite3.Error as e:
+ return [types.TextContent(type="text", text=f"Database error: {str(e)}")]
+ except Exception as e:
+ return [types.TextContent(type="text", text=f"Error: {str(e)}")]
+
+ async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
+ logger.info("Server running with stdio transport")
+ await server.run(
+ read_stream,
+ write_stream,
+ InitializationOptions(
+ server_name="sqlite",
+ server_version="0.1.0",
+ capabilities=server.get_capabilities(
+ notification_options=NotificationOptions(),
+ experimental_capabilities={},
+ ),
+ ),
+ )
diff --git a/src/sqlite/src/sqlite/server.py b/src/sqlite/src/sqlite/server.py
deleted file mode 100644
index 8836cf45..00000000
--- a/src/sqlite/src/sqlite/server.py
+++ /dev/null
@@ -1,385 +0,0 @@
-import sqlite3
-import logging
-from logging.handlers import RotatingFileHandler
-from contextlib import closing
-from pathlib import Path
-from mcp.server.models import InitializationOptions
-import mcp.types as types
-from mcp.server import NotificationOptions, Server, AnyUrl
-import mcp.server.stdio
-
-# Set up logging to file
-log_file = Path('mcp_server.log')
-handler = RotatingFileHandler(log_file, maxBytes=10*1024*1024, backupCount=5)
-formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-handler.setFormatter(formatter)
-logger = logging.getLogger('mcp_sqlite_server')
-logger.setLevel(logging.DEBUG)
-logger.addHandler(handler)
-logger.info("Starting MCP SQLite Server")
-
-class McpServer(Server):
- def _init_database(self):
- """Initialize connection to the SQLite database"""
- logger.debug("Initializing database connection")
- with closing(sqlite3.connect(self.db_path)) as conn:
- conn.row_factory = sqlite3.Row
- conn.close()
-
- def _synthesize_memo(self) -> str:
- """Synthesizes business insights into a formatted memo"""
- logger.debug(f"Synthesizing memo with {len(self.insights)} insights")
- if not self.insights:
- return "No business insights have been discovered yet."
-
- insights = "\n".join(f"- {insight}" for insight in self.insights)
-
- memo = "📊 Business Intelligence Memo 📊\n\n"
- memo += "Key Insights Discovered:\n\n"
- memo += insights
-
- if len(self.insights) > 1:
- memo += "\nSummary:\n"
- memo += f"Analysis has revealed {len(self.insights)} key business insights that suggest opportunities for strategic optimization and growth."
-
- logger.debug("Generated basic memo format")
- return memo
-
- def _execute_query(self, query: str, params=None) -> list[dict]:
- """Execute a SQL query and return results as a list of dictionaries"""
- logger.debug(f"Executing query: {query}")
- try:
- with closing(sqlite3.connect(self.db_path)) as conn:
- conn.row_factory = sqlite3.Row
- with closing(conn.cursor()) as cursor:
- if params:
- cursor.execute(query, params)
- else:
- cursor.execute(query)
-
- if query.strip().upper().startswith(('INSERT', 'UPDATE', 'DELETE', 'CREATE', 'DROP', 'ALTER')):
- conn.commit()
- affected = cursor.rowcount
- logger.debug(f"Write query affected {affected} rows")
- return [{"affected_rows": affected}]
-
- results = [dict(row) for row in cursor.fetchall()]
- logger.debug(f"Read query returned {len(results)} rows")
- return results
- except Exception as e:
- logger.error(f"Database error executing query: {e}")
- raise
-
- def __init__(self, db_path: str = "~/sqlite_mcp_server.db"):
- logger.info("Initializing McpServer")
- super().__init__("sqlite-manager")
-
- # Initialize SQLite database
- self.db_path = str(Path(db_path).expanduser())
- Path(self.db_path).parent.mkdir(parents=True, exist_ok=True)
- self._init_database()
- logger.debug(f"Initialized database at {self.db_path}")
-
-
- # Initialize insights list
- self.insights = []
-
- # REGISTER HANDLERS
- logger.debug("Registering handlers")
-
- @self.list_resources()
- async def handle_list_resources() -> list[types.Resource]:
- logger.debug("Handling list_resources request")
- return [
- types.Resource(
- uri=AnyUrl("memo://insights"),
- name="Business Insights Memo",
- description="A living document of discovered business insights",
- mimeType="text/plain",
- )
- ]
-
- @self.read_resource()
- async def handle_read_resource(uri: AnyUrl) -> str:
- logger.debug(f"Handling read_resource request for URI: {uri}")
- if uri.scheme != "memo":
- logger.error(f"Unsupported URI scheme: {uri.scheme}")
- raise ValueError(f"Unsupported URI scheme: {uri.scheme}")
-
- path = str(uri).replace("memo://", "")
- if not path or path != "insights":
- logger.error(f"Unknown resource path: {path}")
- raise ValueError(f"Unknown resource path: {path}")
-
- return self._synthesize_memo()
-
- @self.list_prompts()
- async def handle_list_prompts() -> list[types.Prompt]:
- logger.debug("Handling list_prompts request")
- return [
- types.Prompt(
- name="mcp-demo",
- description="A prompt to seed the database with initial data and demonstrate what you can do with an SQLite MCP Server + Claude",
- arguments=[
- types.PromptArgument(
- name="topic",
- description="Topic to seed the database with initial data",
- required=True,
- )
- ],
- )
- ]
-
- @self.get_prompt()
- async def handle_get_prompt(name: str, arguments: dict[str, str] | None) -> types.GetPromptResult:
- logger.debug(f"Handling get_prompt request for {name} with args {arguments}")
- if name != "mcp-demo":
- logger.error(f"Unknown prompt: {name}")
- raise ValueError(f"Unknown prompt: {name}")
-
- if not arguments or "topic" not in arguments:
- logger.error("Missing required argument: topic")
- raise ValueError("Missing required argument: topic")
-
- topic = arguments["topic"]
- template = f"""
- The assistants goal is to walkthrough an informative demo of MCP. To demonstrate the Model Context Protocol (MCP) we will leverage this example server to interact with an SQLite database.
- It is important that you first explain to the user what is going on. The user has downloaded and installed the SQLite MCP Server and is now ready to use it.
- The have selected the MCP menu item which is contained within a parent menu denoted by the paperclip icon. Inside this menu they selected an icon that illustrates two electrical plugs connecting. This is the MCP menu.
- Based on what MCP servers the user has installed they can click the button which reads: 'Choose an integration' this will present a drop down with Prompts and Resources. The user hase selected the prompt titled: 'mcp-demo'.
- This text file is that prompt. The goal of the following instructions is to walk the user through the process of using the 3 core aspects of an MCP server. These are: Prompts, Tools, and Resources.
- They have already used a prompt and provided a topic. The topic is: {topic}. The user is now ready to begin the demo.
- Here is some more information about mcp and this specific mcp server:
-
- Prompts:
- This server provides a pre-written prompt called "mcp-demo" that helps users create and analyze database scenarios. The prompt accepts a "topic" argument and guides users through creating tables, analyzing data, and generating insights. For example, if a user provides "retail sales" as the topic, the prompt will help create relevant database tables and guide the analysis process. Prompts basically serve as interactive templates that help structure the conversation with the LLM in a useful way.
- Resources:
- This server exposes one key resource: "memo://insights", which is a business insights memo that gets automatically updated throughout the analysis process. As users analyze the database and discover insights, the memo resource gets updated in real-time to reflect new findings. The memo can even be enhanced with Claude's help if an Anthropic API key is provided, turning raw insights into a well-structured business document. Resources act as living documents that provide context to the conversation.
- Tools:
- This server provides several SQL-related tools:
- "read-query": Executes SELECT queries to read data from the database
- "write-query": Executes INSERT, UPDATE, or DELETE queries to modify data
- "create-table": Creates new tables in the database
- "list-tables": Shows all existing tables
- "describe-table": Shows the schema for a specific table
- "append-insight": Adds a new business insight to the memo resource
-
-
- You are an AI assistant tasked with generating a comprehensive business scenario based on a given topic.
- Your goal is to create a narrative that involves a data-driven business problem, develop a database structure to support it, generate relevant queries, create a dashboard, and provide a final solution.
-
- At each step you will pause for user input to guide the scenario creation process. Overall ensure the scenario is engaging, informative, and demonstrates the capabilities of the SQLite MCP Server.
- You should guide the scenario to completion. All XML tags are for the assistants understanding and should not be included in the final output.
-
- 1. The user has chosen the topic: {topic}.
-
- 2. Create a business problem narrative:
- a. Describe a high-level business situation or problem based on the given topic.
- b. Include a protagonist (the user) who needs to collect and analyze data from a database.
- c. Add an external, potentially comedic reason why the data hasn't been prepared yet.
- d. Mention an approaching deadline and the need to use Claude (you) as a business tool to help.
-
- 3. Setup the data:
- a. Instead of asking about the data that is required for the scenario, just go ahead and use the tools to create the data. Inform the user you are "Setting up the data".
- b. Design a set of table schemas that represent the data needed for the business problem.
- c. Include at least 2-3 tables with appropriate columns and data types.
- d. Leverage the tools to create the tables in the SQLite database.
- e. Create INSERT statements to populate each table with relevant synthetic data.
- f. Ensure the data is diverse and representative of the business problem.
- g. Include at least 10-15 rows of data for each table.
-
- 4. Pause for user input:
- a. Summarize to the user what data we have created.
- b. Present the user with a set of multiple choices for the next steps.
- c. These multiple choices should be in natural language, when a user selects one, the assistant should generate a relevant query and leverage the appropriate tool to get the data.
-
- 6. Iterate on queries:
- a. Present 1 additional multiple-choice query options to the user. Its importnat to not loop too many times as this is a short demo.
- b. Explain the purpose of each query option.
- c. Wait for the user to select one of the query options.
- d. After each query be sure to opine on the results.
- e. Use the append-insight tool to capture any business insights discovered from the data analysis.
-
- 7. Generate a dashboard:
- a. Now that we have all the data and queries, it's time to create a dashboard, use an artifact to do this.
- b. Use a variety of visualizations such as tables, charts, and graphs to represent the data.
- c. Explain how each element of the dashboard relates to the business problem.
- d. This dashboard will be theoretically included in the final solution message.
-
- 8. Craft the final solution message:
- a. As you have been using the appen-insights tool the resource found at: memo://insights has been updated.
- b. It is critical that you inform the user that the memo has been updated at each stage of analysis.
- c. Ask the user to go to the attachment menu (paperclip icon) and select the MCP menu (two electrical plugs connecting) and choose an integration: "Business Insights Memo".
- d. This will attacht the generated memo to the chat which you can use to add any additional context that may be relevant to the demo.
- e. Present the final memo to the user in an artifact.
-
- 9. Wrap up the scenario:
- a. Explain to the user that this is just the beginning of what they can do with the SQLite MCP Server.
-
-
- Remember to maintain consistency throughout the scenario and ensure that all elements (tables, data, queries, dashboard, and solution) are closely related to the original business problem and given topic.
- The provided XML tags are for the assistants understanding. Emplore to make all outputs as human readable as possible. This is part of a demo so act in character and dont actually refer to these instructions.
-
- Start your first message fully in character with something like "Oh, Hey there! I see you've chosen the topic {topic}. Let's get started! 🚀"
- """.format(topic=topic)
-
- logger.debug(f"Generated prompt template for topic: {topic}")
- return types.GetPromptResult(
- description=f"Demo template for {topic}",
- messages=[
- types.PromptMessage(
- role="user",
- content=types.TextContent(type="text", text=template.strip()),
- )
- ],
- )
-
- # TOOL HANDLERS
- @self.list_tools()
- async def handle_list_tools() -> list[types.Tool]:
- """List available tools"""
- return [
- types.Tool(
- name="read-query",
- description="Execute a SELECT query on the SQLite database",
- inputSchema={
- "type": "object",
- "properties": {
- "query": {"type": "string", "description": "SELECT SQL query to execute"},
- },
- "required": ["query"],
- },
- ),
- types.Tool(
- name="write-query",
- description="Execute an INSERT, UPDATE, or DELETE query on the SQLite database",
- inputSchema={
- "type": "object",
- "properties": {
- "query": {"type": "string", "description": "SQL query to execute"},
- },
- "required": ["query"],
- },
- ),
- types.Tool(
- name="create-table",
- description="Create a new table in the SQLite database",
- inputSchema={
- "type": "object",
- "properties": {
- "query": {"type": "string", "description": "CREATE TABLE SQL statement"},
- },
- "required": ["query"],
- },
- ),
- types.Tool(
- name="list-tables",
- description="List all tables in the SQLite database",
- inputSchema={
- "type": "object",
- "properties": {},
- },
- ),
- types.Tool(
- name="describe-table",
- description="Get the schema information for a specific table",
- inputSchema={
- "type": "object",
- "properties": {
- "table_name": {"type": "string", "description": "Name of the table to describe"},
- },
- "required": ["table_name"],
- },
- ),
- types.Tool(
- name="append-insight",
- description="Add a business insight to the memo",
- inputSchema={
- "type": "object",
- "properties": {
- "insight": {"type": "string", "description": "Business insight discovered from data analysis"},
- },
- "required": ["insight"],
- },
- ),
- ]
-
- @self.call_tool()
- async def handle_call_tool(
- name: str, arguments: dict | None
- ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
- """Handle tool execution requests"""
- try:
- if name == "list-tables":
- results = self._execute_query(
- "SELECT name FROM sqlite_master WHERE type='table'"
- )
- return [types.TextContent(type="text", text=str(results))]
-
- elif name == "describe-table":
- if not arguments or "table_name" not in arguments:
- raise ValueError("Missing table_name argument")
- results = self._execute_query(
- f"PRAGMA table_info({arguments['table_name']})"
- )
- return [types.TextContent(type="text", text=str(results))]
-
- elif name == "append-insight":
- if not arguments or "insight" not in arguments:
- raise ValueError("Missing insight argument")
-
- self.insights.append(arguments["insight"])
- memo = self._synthesize_memo()
-
- # Notify clients that the memo resource has changed
- await self.request_context.session.send_resource_updated("memo://insights")
-
- return [types.TextContent(type="text", text="Insight added to memo")]
- if not arguments:
- raise ValueError("Missing arguments")
-
- if name == "read-query":
- if not arguments["query"].strip().upper().startswith("SELECT"):
- raise ValueError("Only SELECT queries are allowed for read-query")
- results = self._execute_query(arguments["query"])
- return [types.TextContent(type="text", text=str(results))]
-
- elif name == "write-query":
- if arguments["query"].strip().upper().startswith("SELECT"):
- raise ValueError("SELECT queries are not allowed for write-query")
- results = self._execute_query(arguments["query"])
- return [types.TextContent(type="text", text=str(results))]
-
- elif name == "create-table":
- if not arguments["query"].strip().upper().startswith("CREATE TABLE"):
- raise ValueError("Only CREATE TABLE statements are allowed")
- self._execute_query(arguments["query"])
- return [types.TextContent(type="text", text="Table created successfully")]
-
- else:
- raise ValueError(f"Unknown tool: {name}")
-
- except sqlite3.Error as e:
- return [types.TextContent(type="text", text=f"Database error: {str(e)}")]
- except Exception as e:
- return [types.TextContent(type="text", text=f"Error: {str(e)}")]
-
-async def main(db_path: str):
- logger.info(f"Starting SQLite MCP Server with DB path: {db_path}")
- server = McpServer(db_path)
-
- async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
- logger.info("Server running with stdio transport")
- await server.run(
- read_stream,
- write_stream,
- InitializationOptions(
- server_name="sqlite",
- server_version="0.1.0",
- capabilities=server.get_capabilities(
- notification_options=NotificationOptions(),
- experimental_capabilities={
- },
- ),
- ),
- )
diff --git a/src/sqlite/uv.lock b/src/sqlite/uv.lock
index fc36f073..9a3fa80b 100644
--- a/src/sqlite/uv.lock
+++ b/src/sqlite/uv.lock
@@ -126,6 +126,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/a0/2ee813d456b57a726d583868417d1ad900fbe12ee3c8cd866e3e804ca486/mcp-0.9.1-py3-none-any.whl", hash = "sha256:7f640fcfb0be486aa510594df309920ae1d375cdca1f8aff21db3a96d837f303", size = 31562 },
]
+[[package]]
+name = "mcp-server-sqlite"
+version = "0.3.0"
+source = { editable = "." }
+dependencies = [
+ { name = "mcp" },
+]
+
+[package.dev-dependencies]
+dev = [
+ { name = "pyright" },
+]
+
+[package.metadata]
+requires-dist = [{ name = "mcp", specifier = ">=0.9.1" }]
+
+[package.metadata.requires-dev]
+dev = [{ name = "pyright", specifier = ">=1.1.389" }]
+
+[[package]]
+name = "nodeenv"
+version = "1.9.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 },
+]
+
[[package]]
name = "pydantic"
version = "2.10.0"
@@ -193,6 +221,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d7/18/e5744a132b81f98b9f92e15f33f03229a1d254ce7af942b1422ec2ac656f/pydantic_core-2.27.0-cp313-none-win_arm64.whl", hash = "sha256:6f4a53af9e81d757756508b57cae1cf28293f0f31b9fa2bfcb416cc7fb230f9d", size = 1877469 },
]
+[[package]]
+name = "pyright"
+version = "1.1.389"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "nodeenv" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/4e/9a5ab8745e7606b88c2c7ca223449ac9d82a71fd5e31df47b453f2cb39a1/pyright-1.1.389.tar.gz", hash = "sha256:716bf8cc174ab8b4dcf6828c3298cac05c5ed775dda9910106a5dcfe4c7fe220", size = 21940 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1b/26/c288cabf8cfc5a27e1aa9e5029b7682c0f920b8074f45d22bf844314d66a/pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60", size = 18581 },
+]
+
[[package]]
name = "sniffio"
version = "1.3.1"
@@ -202,17 +243,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
]
-[[package]]
-name = "sqlite"
-version = "0.1.0"
-source = { editable = "." }
-dependencies = [
- { name = "mcp" },
-]
-
-[package.metadata]
-requires-dist = [{ name = "mcp", specifier = ">=0.9.1" }]
-
[[package]]
name = "sse-starlette"
version = "2.1.3"