Skip to content

Commit

Permalink
Analog information decoding (#183)
Browse files Browse the repository at this point in the history
## Description
WIP.
This PR integrates the newest analog information decoding scripts. 

## Checklist:

<!---
This checklist serves as a reminder of a couple of things that ensure
your pull request will be merged swiftly.
-->

- [x] The pull request only contains commits that are related to it.
- [ ] I have added appropriate tests and documentation.
- [ ] I have made sure that all CI jobs on GitHub pass.
- [x] The pull request introduces no new warnings and follows the
project's style guidelines.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: burgholzer <[email protected]>
  • Loading branch information
3 people authored Dec 22, 2023
1 parent d61ccab commit 5c75bc0
Show file tree
Hide file tree
Showing 50 changed files with 4,293 additions and 237 deletions.
2 changes: 2 additions & 0 deletions .github/codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ ignore:
- "**/python"
- "test/**/*"
- "src/mqt/qecc/cc_decoder/plots.py"
- "src/mqt/qecc/analog_information_decoding/utils/data_utils.py"
- "src/mqt/qecc/analog_information_decoding/code_construction/*"

coverage:
range: 60..90
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
setup-python-dependencies: false
Expand All @@ -79,7 +79,7 @@ jobs:
run: cmake --build build

- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v3
with:
upload: False
output: sarif-results
Expand All @@ -93,6 +93,6 @@ jobs:
output: sarif-results/${{ matrix.language }}.sarif

- name: Upload SARIF
uses: github/codeql-action/upload-sarif@v2
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: sarif-results/${{ matrix.language }}.sarif
2 changes: 1 addition & 1 deletion .github/workflows/cpp-linter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
cmake -S . -B build -DCMAKE_BUILD_TYPE=Debug
- name: Run cpp-linter
uses: cpp-linter/cpp-linter-action@v2
uses: cpp-linter/cpp-linter-action@v2.7.2
id: linter
env:
GITHUB_TOKEN: ${{ github.token }}
Expand Down
11 changes: 7 additions & 4 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,9 @@ jobs:
- name: Verify clean directory
run: git diff --exit-code
shell: bash
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: cibw-wheels-${{ matrix.runs-on }}-${{ strategy.job-index }}
path: ./wheelhouse/*.whl

build_sdist:
Expand All @@ -61,8 +62,9 @@ jobs:
run: pipx run build --sdist
- name: Check metadata
run: pipx run twine check dist/*
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: cibw-sdist
path: dist/*.tar.gz

upload_pypi:
Expand All @@ -76,8 +78,9 @@ jobs:
permissions:
id-token: write
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v4
with:
name: artifact
pattern: cibw-*
path: dist
merge-multiple: true
- uses: pypa/gh-action-pypi-publish@release/v1
29 changes: 11 additions & 18 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@ repos:

# Clean jupyter notebooks
- repo: https://github.com/srstevenson/nb-clean
rev: 3.1.0
rev: 3.2.0
hooks:
- id: nb-clean

# Handling unwanted unicode characters
- repo: https://github.com/sirosen/texthooks
rev: 0.6.2
rev: 0.6.3
hooks:
- id: fix-ligatures
- id: fix-smartquotes
Expand All @@ -51,7 +51,7 @@ repos:

# Python linting and formatting using ruff
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.4
rev: v0.1.9
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
Expand All @@ -68,27 +68,31 @@ repos:

# Check static types with mypy
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.6.1
rev: v1.8.0
hooks:
- id: mypy
files: ^(src/mqt|test/python)
exclude: "code_construction* | ^data_utils\\.py$"
args: []
additional_dependencies:
- importlib_resources
- numpy
- pytest
- pytest-mock
- ldpc
- bposd
- numba

# Check for spelling
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
args: ["-L", "wille,linz,applys", "--skip", "*.ipynb"]
args: ["-L", "wille,linz,applys,ser", "--skip", "*.ipynb"]

# Clang-format the C++ part of the code base automatically
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v17.0.4
rev: v17.0.6
hooks:
- id: clang-format
types_or: [c++, c, cuda]
Expand All @@ -104,7 +108,7 @@ repos:

# Format configuration files with prettier
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.3
rev: v3.1.0
hooks:
- id: prettier
types_or: [yaml, markdown, html, css, scss, javascript, json]
Expand All @@ -117,14 +121,3 @@ repos:
language: pygrep
entry: PyBind|Numpy|Cmake|CCache|Github|PyTest|Mqt|Tum
exclude: .pre-commit-config.yaml

# Checking sdist validity
- repo: https://github.com/henryiii/check-sdist
rev: v0.1.3
hooks:
- id: check-sdist
args: [--inject-junk]
additional_dependencies:
- scikit-build-core[pyproject]>=0.5.0,<0.6 # TODO: remove upper cap once scikit-build-core is updated
- setuptools-scm>=7
- pybind11>=2.11
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ The tool can be used to:
- The framework allows to apply different QECC schemes to quantum circuits and either exports the resulting
circuits or simulates them using Qiskit [[4]](https://qiskit.org/). Currently, six different ECCs are supported
with varying extent of functionality.
- WIP: Decode bosonic quantum LDPC codes and conduct numerical simulations for analog information decoding under phenomenological
(cat qubit) noise.

<p align="center">
<a href="https://mqt.readthedocs.io/projects/qecc">
Expand Down Expand Up @@ -132,6 +134,9 @@ Windows support is currently experimental.

If you use our tool for your research, we will be thankful if you refer to it by citing the appropriate publication:

- [![a](https://img.shields.io/static/v1?label=arXiv&message=2311.01328&color=inactive&style=flat-square)](https://arxiv.org/abs/2311.01328)
L. Berent, T. Hillmann, J. Eisert, R. Wille, and J. Roffe, "Analog information decoding of bosonic quantum LDPC codes".

- [![a](https://img.shields.io/static/v1?label=arXiv&message=2303.14237&color=inactive&style=flat-square)](https://arxiv.org/abs/2303.14237)
L. Berent, L. Burgholzer, P.J. Derks, J. Eisert, and R. Wille, "Decoding quantum color codes with MaxSAT".

Expand All @@ -144,3 +149,13 @@ If you use our tool for your research, we will be thankful if you refer to it by
L. Berent, L. Burgholzer, and R.
Wille, "[Software Tools for Decoding Quantum Low-Density Parity Check Codes](https://arxiv.org/abs/2209.01180),"
in Asia and South Pacific Design Automation Conference (ASP-DAC), 2023

## Credits

The authors of this software are:

- Lucas Berent
- Lukas Burgholzer
- Thomas Grurl
- Peter-Jan H.S. Derks
- Timo Hillmann
56 changes: 56 additions & 0 deletions docs/source/AnalogInfo.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
Analog Information Decoding
===========================
This submodule provides means to conduct decoding simulations for quantum CSS codes with
analog syndrome information as proposed in the corresponding paper :cite:labelpar:`berent2023analog`.
Proper integration and setup is a work in progress.
The main functionality is provided in the :code:`simulators` module, which contains the main classes
that can be used to conduct several types of simulations:

- :code:`ATD_Simulator`: Analog Tanner graph decoding,
- :code:`Single-Shot Simulator`: Analog Single-Shot decoding with meta checks.
- :code:`QSS_Simulator`: Quasi-Single-Shot decoding, and

Moreover, :code:`memory_experiment` contains methods for analog overlapping window decoding, in
particular the :code:`decode_multiround` method.

Results
-------
The :code:`results` directory contains the results used in the paper :cite:labelpar:`berent2023analog`.

Codes
-----

The :code:`codes` directory contains the parity-check matrices of the codes used in the paper :cite:labelpar:`berent2023analog`.
Three dimensional toric codes can either be constructed with the hypergraph product construction
or with a library, e.g., panqec :cite:labelpar:`huang2023panceq`.

Code construction
-----------------

The :code:`code_construction` directory contains the code used to construct higher-dimensional hypergraph
product codes and used the :code:`compute_distances.sh` script to automatically compute bounds on the
distances of the constructed codes using the GAP library QDistRnd :cite:labelpar:`pryadko2023qdistrnd`.

Utils
-----
Here we present an overview of the functionality of the utils package.

Plotting
++++++++
The :code:`plotting` directory contains the code used to plot the results in the paper :cite:labelpar:`berent2023analog`.

Data Utils
++++++++++

We have implemented several utility functions as part of this package that might be of independent
interest for decoding simulations and data analysis. These are provided in the :code:`data_utils` module.

Simulation Utils
++++++++++++++++
This module contains functionality needed throughout different simulation types.

Dependencies
++++++++++++

The used BP+OSD implementation, as well as our implementation of the soft-syndrome minimum-sum decoder are provided
in the ldpc2 package a preliminary beta version of which is available on github :cite:labelpar:`roffe2023ldpc`.
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@
class CDAStyle(UnsrtStyle):
"""Custom style for including PDF links."""

def format_url(self, _e: Entry) -> HRef:
def format_url(self, _e: Entry) -> HRef: # noqa: PLR6301
"""Format URL field as a link to the PDF."""
url = field("url", raw=True)
return href()[url, "[PDF]"]
Expand Down
32 changes: 32 additions & 0 deletions docs/source/refs.bib
Original file line number Diff line number Diff line change
Expand Up @@ -108,3 +108,35 @@ @article{FowlerSurfaceCodes
doi = {10.1103/PhysRevA.80.052312},
url = {https://link.aps.org/doi/10.1103/PhysRevA.80.052312}
}
@article{berent2023analog,
title={Analog information decoding of bosonic quantum LDPC codes},
author={Berent, Lucas and Hillmann, Timo and Eisert, Jens and Wille, Robert and Roffe, Joschka},
journal={arXiv preprint arXiv:2311.01328},
year={2023}
}
@misc{huang2023panceq,
author = {Huang, Eric and Pesah Arthur},
title = {PanQEC},
year = {2023},
publisher = {GitHub},
journal = {GitHub repository},
howpublished = {\url{https://github.com/panqec/panqec}},
commit = {a9dd27e}
}

@misc{roffe2023ldpc,
author = {Roffe Joschka},
title = {LDPC},
year = {2023},
publisher = {GitHub},
journal = {GitHub repository},
howpublished = {\url{https://github.com/quantumgizmos/ldpc/tree/ldpc_v2}},
commit = {9fe6d13}
}

@article{pryadko2023qdistrnd,
title={QDistRnd: A GAP package for computing the distance of quantum error-correcting codes},
author={Pryadko, Leonid P and Shabashov, Vadim A and Kozin, Valerii K},
journal={arXiv preprint arXiv:2308.15140},
year={2023}
}
2 changes: 1 addition & 1 deletion extern/mqt-core
Submodule mqt-core updated 217 files
38 changes: 10 additions & 28 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@

PYTHON_ALL_VERSIONS = ["3.8", "3.9", "3.10", "3.11", "3.12"]

BUILD_REQUIREMENTS = [
"scikit-build-core[pyproject]>=0.6.1",
"setuptools_scm>=7",
"pybind11>=2.11",
]

if os.environ.get("CI", None):
nox.options.error_on_missing_interpreters = True

Expand All @@ -27,19 +33,6 @@ def lint(session: nox.Session) -> None:
session.run("pre-commit", "run", "--all-files", *session.posargs)


@nox.session(reuse_venv=True)
def pylint(session: nox.Session) -> None:
"""Run PyLint.
Simply execute `nox -rs pylint` to run PyLint.
"""
session.install(
"scikit-build-core[pyproject]<0.6", "setuptools_scm", "pybind11"
) # TODO: remove upper cap once scikit-build-core is updated
session.install("--no-build-isolation", "-ve.", "pylint")
session.run("pylint", "mqt.qecc", *session.posargs)


def _run_tests(
session: nox.Session,
*,
Expand All @@ -51,21 +44,15 @@ def _run_tests(
env = {"PIP_DISABLE_PIP_VERSION_CHECK": "1"}

if os.environ.get("CI", None) and sys.platform == "win32":
env["SKBUILD_CMAKE_ARGS"] = "-T ClangCL"
env["CMAKE_GENERATOR"] = "Ninja"

_extras = ["test", *extras]
if "--cov" in posargs:
_extras.append("coverage")
posargs.append("--cov-config=pyproject.toml")

session.install(
"scikit-build-core[pyproject]<0.6",
"setuptools_scm",
"pybind11",
*install_args,
env=env,
) # TODO: remove upper cap once scikit-build-core is updated
install_arg = f"-ve.[{','.join(_extras)}]"
session.install(*BUILD_REQUIREMENTS, *install_args, env=env)
install_arg = f".[{','.join(_extras)}]"
session.install("--no-build-isolation", install_arg, *install_args, env=env)
session.run("pytest", *run_args, *posargs, env=env)

Expand Down Expand Up @@ -98,13 +85,8 @@ def docs(session: nox.Session) -> None:
if args.builder != "html" and args.serve:
session.error("Must not specify non-HTML builder with --serve")

build_requirements = [
"scikit-build-core[pyproject]<0.6",
"setuptools_scm",
"pybind11",
] # TODO: remove upper cap once scikit-build-core is updated
extra_installs = ["sphinx-autobuild"] if args.serve else []
session.install(*build_requirements, *extra_installs)
session.install(*BUILD_REQUIREMENTS, *extra_installs)
session.install("--no-build-isolation", "-ve.[docs]")
session.chdir("docs")

Expand Down
Loading

0 comments on commit 5c75bc0

Please sign in to comment.