diff --git a/.github/workflows/deploy-develop.yml b/.github/workflows/deploy-develop.yml index a6fcfbb..f50deb0 100644 --- a/.github/workflows/deploy-develop.yml +++ b/.github/workflows/deploy-develop.yml @@ -7,9 +7,9 @@ on: jobs: deploy: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Register SSH key env: SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} @@ -19,11 +19,11 @@ jobs: chmod 600 $HOME/.ssh/id_ed25519 - name: Add SSH config env: - SSH_HOST: ${{ secrets.SSH_HOST }} + SSH_HOST: ${{ secrets.SSH_HOST_UBUNTU_24_04 }} SSH_USER: ${{ secrets.SSH_USER }} run: | set -ex - echo "Host wandbox" >> $HOME/.ssh/config + echo "Host wandbox-ubuntu-24.04" >> $HOME/.ssh/config echo " HostName $SSH_HOST" >> $HOME/.ssh/config echo " User $SSH_USER" >> $HOME/.ssh/config echo " ServerAliveInterval 60" >> $HOME/.ssh/config @@ -33,31 +33,27 @@ jobs: run: sudo apt-get install -y libcap-dev - name: Get cached tools id: cache-tools - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: _install - key: install-deps-v2 - - name: Install deps - run: ./install_deps.sh --release + key: install-deps-v3 - name: Package cattleshed - run: | - ./package.sh cattleshed develop + run: python3 run.py package --env develop cattleshed - name: Package kennel - run: | - ./package.sh kennel develop + run: python3 run.py package --env develop kennel - name: Deploy cattleshed to develop.wandbox.org run: | - ./deploy.sh wandbox cattleshed develop + python3 run.py deploy wandbox-ubuntu-24.04 cattleshed --env develop - name: Deploy kennel to develop.wandbox.org run: | - ./deploy.sh wandbox kennel develop + python3 run.py deploy wandbox-ubuntu-24.04 kennel --env develop - uses: actions/setup-node@v2 with: node-version: '16' - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v4 env: - cache-name: cache-node-modules-v1 + cache-name: cache-node-modules-v2 with: path: ~/.npm key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('canine/package-lock.json') }} diff --git a/.github/workflows/deploy-master.yml b/.github/workflows/deploy-master.yml index 78d24b7..bf5965a 100644 --- a/.github/workflows/deploy-master.yml +++ b/.github/workflows/deploy-master.yml @@ -7,9 +7,9 @@ on: jobs: deploy: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Register SSH key env: SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} @@ -19,11 +19,11 @@ jobs: chmod 600 $HOME/.ssh/id_ed25519 - name: Add SSH config env: - SSH_HOST: ${{ secrets.SSH_HOST }} + SSH_HOST: ${{ secrets.SSH_HOST_UBUNTU_24_04 }} SSH_USER: ${{ secrets.SSH_USER }} run: | set -ex - echo "Host wandbox" >> $HOME/.ssh/config + echo "Host wandbox-ubuntu-24.04" >> $HOME/.ssh/config echo " HostName $SSH_HOST" >> $HOME/.ssh/config echo " User $SSH_USER" >> $HOME/.ssh/config echo " ServerAliveInterval 60" >> $HOME/.ssh/config @@ -33,31 +33,27 @@ jobs: run: sudo apt-get install -y libcap-dev - name: Get cached tools id: cache-tools - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: _install - key: install-deps-v2 - - name: Install deps - run: ./install_deps.sh --release + key: install-deps-v3 - name: Package cattleshed - run: | - ./package.sh cattleshed master + run: python3 run.py package --env master cattleshed - name: Package kennel - run: | - ./package.sh kennel master + run: python3 run.py package --env master kennel - name: Deploy cattleshed to wandbox.org run: | - ./deploy.sh wandbox cattleshed master + python3 run.py deploy wandbox-ubuntu-24.04 cattleshed --env master - name: Deploy kennel to wandbox.org run: | - ./deploy.sh wandbox kennel master + python3 run.py deploy wandbox-ubuntu-24.04 kennel --env master - uses: actions/setup-node@v2 with: node-version: '16' - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v4 env: - cache-name: cache-node-modules-v1 + cache-name: cache-node-modules-v2 with: path: ~/.npm key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('canine/package-lock.json') }} @@ -71,11 +67,11 @@ jobs: set -ex npm install npm run build - - name: Publish canine - working-directory: canine - env: - CF_API_TOKEN: ${{ secrets.CF_API_TOKEN }} - run: | - set -ex - npm install -g @cloudflare/wrangler - npm run deploy:prod \ No newline at end of file + # - name: Publish canine + # working-directory: canine + # env: + # CF_API_TOKEN: ${{ secrets.CF_API_TOKEN }} + # run: | + # set -ex + # npm install -g @cloudflare/wrangler + # npm run deploy:prod \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index fc886bf..e240d09 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,16 @@ { + "python.analysis.autoImportCompletions": true, + "python.analysis.typeCheckingMode": "basic", + "[python]": { + "editor.formatOnSave": true, + "editor.formatOnSaveMode": "file", + "editor.defaultFormatter": "charliermarsh.ruff", + "editor.autoIndent": "full", + "editor.codeActionsOnSave": { + "source.fixAll.ruff": "always", + "source.organizeImports.ruff": "explicit" + } + }, "[javascript]": { "editor.formatOnSave": true, "editor.defaultFormatter": "esbenp.prettier-vscode" diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..3b2312d --- /dev/null +++ b/VERSION @@ -0,0 +1,11 @@ +WANDBOX_VERSION=0.1.0 +CMAKE_VERSION=3.29.4 +GRPC_VERSION=v1.64.1 +GGRPC_VERSION=0.5.8 +SPDLOG_VERSION=v1.14.1 +BOOST_VERSION=1.86.0 +CLI11_VERSION=v2.4.2 +PROTOC_GEN_JSONIF_VERSION=0.13.0 +SQLITE3_VERSION=3.46.1 +SQLITE3_YEAR=2024 +CPPDB_VERSION=v0.3.1 diff --git a/buildbase.py b/buildbase.py new file mode 100644 index 0000000..b04d77a --- /dev/null +++ b/buildbase.py @@ -0,0 +1,2062 @@ +# buildbase.py はビルドスクリプトのテンプレートとなるファイル +# +# 自身のリポジトリにコピーして利用する。 +# +# 元のファイルは以下のリポジトリにある: +# https://github.com/melpon/buildbase +# +# 更新する場合は以下のコマンドを利用する: +# curl -LO https://raw.githubusercontent.com/melpon/buildbase/master/buildbase.py +# +# ライセンス: Apache License 2.0 +# +# Copyright 2024 melpon (Wandbox) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import filecmp +import glob +import logging +import multiprocessing +import os +import platform +import shlex +import shutil +import stat +import subprocess +import tarfile +import urllib.parse +import zipfile +from typing import Dict, List, NamedTuple, Optional + +if platform.system() == "Windows": + import winreg + + +class ChangeDirectory(object): + def __init__(self, cwd): + self._cwd = cwd + + def __enter__(self): + self._old_cwd = os.getcwd() + logging.debug(f"pushd {self._old_cwd} --> {self._cwd}") + os.chdir(self._cwd) + + def __exit__(self, exctype, excvalue, trace): + logging.debug(f"popd {self._old_cwd} <-- {self._cwd}") + os.chdir(self._old_cwd) + return False + + +def cd(cwd): + return ChangeDirectory(cwd) + + +def cmd(args, **kwargs): + logging.debug(f"+{args} {kwargs}") + if "check" not in kwargs: + kwargs["check"] = True + if "resolve" in kwargs: + resolve = kwargs["resolve"] + del kwargs["resolve"] + else: + resolve = True + if resolve: + args = [shutil.which(args[0]), *args[1:]] + return subprocess.run(args, **kwargs) + + +# 標準出力をキャプチャするコマンド実行。シェルの `cmd ...` や $(cmd ...) と同じ +def cmdcap(args, **kwargs): + # 3.7 でしか使えない + # kwargs['capture_output'] = True + kwargs["stdout"] = subprocess.PIPE + kwargs["stderr"] = subprocess.PIPE + kwargs["encoding"] = "utf-8" + return cmd(args, **kwargs).stdout.strip() + + +# https://stackoverflow.com/a/2656405 +def onerror(func, path, exc_info): + """ + Error handler for ``shutil.rmtree``. + If the error is due to an access error (read only file) + it attempts to add write permission and then retries. + If the error is for another reason it re-raises the error. + + Usage : ``shutil.rmtree(path, onerror=onerror)`` + """ + import stat + + # Is the error an access error? + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWUSR) + func(path) + else: + raise + + +def rm_rf(path: str): + if not os.path.exists(path): + logging.debug(f"rm -rf {path} => path not found") + return + if os.path.isfile(path) or os.path.islink(path): + os.remove(path) + logging.debug(f"rm -rf {path} => file removed") + if os.path.isdir(path): + shutil.rmtree(path, onerror=onerror) + logging.debug(f"rm -rf {path} => directory removed") + + +def mkdir_p(path: str): + if os.path.exists(path): + logging.debug(f"mkdir -p {path} => already exists") + return + os.makedirs(path, exist_ok=True) + logging.debug(f"mkdir -p {path} => directory created") + + +if platform.system() == "Windows": + PATH_SEPARATOR = ";" +else: + PATH_SEPARATOR = ":" + + +def add_path(path: str, is_after=False): + logging.debug(f"add_path: {path}") + if "PATH" not in os.environ: + os.environ["PATH"] = path + return + + if is_after: + os.environ["PATH"] = os.environ["PATH"] + PATH_SEPARATOR + path + else: + os.environ["PATH"] = path + PATH_SEPARATOR + os.environ["PATH"] + + +def download(url: str, output_dir: Optional[str] = None, filename: Optional[str] = None) -> str: + if filename is None: + output_path = urllib.parse.urlparse(url).path.split("/")[-1] + else: + output_path = filename + + if output_dir is not None: + output_path = os.path.join(output_dir, output_path) + + if os.path.exists(output_path): + return output_path + + try: + if shutil.which("curl") is not None: + cmd(["curl", "-fLo", output_path, url]) + else: + cmd(["wget", "-cO", output_path, url]) + except Exception: + # ゴミを残さないようにする + if os.path.exists(output_path): + os.remove(output_path) + raise + + return output_path + + +def read_version_file(path: str) -> Dict[str, str]: + versions = {} + + lines = open(path).readlines() + for line in lines: + line = line.strip() + + # コメント行 + if line[:1] == "#": + continue + + # 空行 + if len(line) == 0: + continue + + [a, b] = map(lambda x: x.strip(), line.split("=", 2)) + versions[a] = b.strip('"') + + return versions + + +# dir 以下にある全てのファイルパスを、dir2 からの相対パスで返す +def enum_all_files(dir, dir2): + for root, _, files in os.walk(dir): + for file in files: + yield os.path.relpath(os.path.join(root, file), dir2) + + +def versioned(func): + def wrapper(version, version_file, *args, **kwargs): + if "ignore_version" in kwargs: + if kwargs.get("ignore_version"): + rm_rf(version_file) + del kwargs["ignore_version"] + + if os.path.exists(version_file): + ver = open(version_file).read() + if ver.strip() == version.strip(): + return + + r = func(version=version, *args, **kwargs) + + with open(version_file, "w") as f: + f.write(version) + + return r + + return wrapper + + +# アーカイブが単一のディレクトリに全て格納されているかどうかを調べる。 +# +# 単一のディレクトリに格納されている場合はそのディレクトリ名を返す。 +# そうでない場合は None を返す。 +def _is_single_dir(infos, get_name, is_dir) -> Optional[str]: + # tarfile: ['path', 'path/to', 'path/to/file.txt'] + # zipfile: ['path/', 'path/to/', 'path/to/file.txt'] + # どちらも / 区切りだが、ディレクトリの場合、後ろに / が付くかどうかが違う + dirname = None + for info in infos: + name = get_name(info) + n = name.rstrip("/").find("/") + if n == -1: + # ルートディレクトリにファイルが存在している + if not is_dir(info): + return None + dir = name.rstrip("/") + else: + dir = name[0:n] + # ルートディレクトリに2個以上のディレクトリが存在している + if dirname is not None and dirname != dir: + return None + dirname = dir + + return dirname + + +def is_single_dir_tar(tar: tarfile.TarFile) -> Optional[str]: + return _is_single_dir(tar.getmembers(), lambda t: t.name, lambda t: t.isdir()) + + +def is_single_dir_zip(zip: zipfile.ZipFile) -> Optional[str]: + return _is_single_dir(zip.infolist(), lambda z: z.filename, lambda z: z.is_dir()) + + +# 解凍した上でファイル属性を付与する +def _extractzip(z: zipfile.ZipFile, path: str): + z.extractall(path) + if platform.system() == "Windows": + return + for info in z.infolist(): + if info.is_dir(): + continue + filepath = os.path.join(path, info.filename) + mod = info.external_attr >> 16 + if (mod & 0o120000) == 0o120000: + # シンボリックリンク + with open(filepath, "r") as f: + src = f.read() + os.remove(filepath) + with cd(os.path.dirname(filepath)): + if os.path.exists(src): + os.symlink(src, filepath) + if os.path.exists(filepath): + # 普通のファイル + os.chmod(filepath, mod & 0o777) + + +# zip または tar.gz ファイルを展開する。 +# +# 展開先のディレクトリは {output_dir}/{output_dirname} となり、 +# 展開先のディレクトリが既に存在していた場合は削除される。 +# +# もしアーカイブの内容が単一のディレクトリであった場合、 +# そのディレクトリは無いものとして展開される。 +# +# つまりアーカイブ libsora-1.23.tar.gz の内容が +# ['libsora-1.23', 'libsora-1.23/file1', 'libsora-1.23/file2'] +# であった場合、extract('libsora-1.23.tar.gz', 'out', 'libsora') のようにすると +# - out/libsora/file1 +# - out/libsora/file2 +# が出力される。 +# +# また、アーカイブ libsora-1.23.tar.gz の内容が +# ['libsora-1.23', 'libsora-1.23/file1', 'libsora-1.23/file2', 'LICENSE'] +# であった場合、extract('libsora-1.23.tar.gz', 'out', 'libsora') のようにすると +# - out/libsora/libsora-1.23/file1 +# - out/libsora/libsora-1.23/file2 +# - out/libsora/LICENSE +# が出力される。 +def extract(file: str, output_dir: str, output_dirname: str, filetype: Optional[str] = None): + path = os.path.join(output_dir, output_dirname) + logging.info(f"Extract {file} to {path}") + if filetype == "gzip" or file.endswith(".tar.gz"): + rm_rf(path) + with tarfile.open(file) as t: + dir = is_single_dir_tar(t) + if dir is None: + os.makedirs(path, exist_ok=True) + t.extractall(path) + else: + logging.info(f"Directory {dir} is stripped") + path2 = os.path.join(output_dir, dir) + rm_rf(path2) + t.extractall(output_dir) + if path != path2: + logging.debug(f"mv {path2} {path}") + os.replace(path2, path) + elif filetype == "zip" or file.endswith(".zip"): + rm_rf(path) + with zipfile.ZipFile(file) as z: + dir = is_single_dir_zip(z) + if dir is None: + os.makedirs(path, exist_ok=True) + # z.extractall(path) + _extractzip(z, path) + else: + logging.info(f"Directory {dir} is stripped") + path2 = os.path.join(output_dir, dir) + rm_rf(path2) + # z.extractall(output_dir) + _extractzip(z, output_dir) + if path != path2: + logging.debug(f"mv {path2} {path}") + os.replace(path2, path) + else: + raise Exception("file should end with .tar.gz or .zip") + + +def clone_and_checkout(url, version, dir, fetch, fetch_force): + if fetch_force: + rm_rf(dir) + + if not os.path.exists(os.path.join(dir, ".git")): + cmd(["git", "clone", url, dir]) + fetch = True + + if fetch: + with cd(dir): + cmd(["git", "fetch"]) + cmd(["git", "reset", "--hard"]) + cmd(["git", "clean", "-df"]) + cmd(["git", "checkout", "-f", version]) + + +def git_clone_shallow(url, hash, dir, submodule=False): + rm_rf(dir) + mkdir_p(dir) + with cd(dir): + cmd(["git", "init"]) + cmd(["git", "remote", "add", "origin", url]) + cmd(["git", "fetch", "--depth=1", "origin", hash]) + cmd(["git", "reset", "--hard", "FETCH_HEAD"]) + if submodule: + cmd( + [ + "git", + "submodule", + "update", + "--init", + "--recursive", + "--recommend-shallow", + "--depth", + "1", + ] + ) + + +def apply_patch(patch, dir, depth): + with cd(dir): + logging.info(f"patch -p{depth} < {patch}") + if platform.system() == "Windows": + cmd( + [ + "git", + "apply", + f"-p{depth}", + "--ignore-space-change", + "--ignore-whitespace", + "--whitespace=nowarn", + patch, + ] + ) + else: + with open(patch) as stdin: + cmd(["patch", f"-p{depth}"], stdin=stdin) + + +def apply_patch_text(patch_text, dir, depth): + with cd(dir): + logging.info(f"echo '{patch_text[:100]}...' | patch -p{depth} -") + directory = cmdcap(["git", "rev-parse", "--show-prefix"]) + if platform.system() == "Windows": + cmd( + [ + "git", + "apply", + f"-p{depth}", + "--ignore-space-change", + "--ignore-whitespace", + "--whitespace=nowarn", + f"--directory={directory}", + "-", + ], + input=patch_text, + text=True, + encoding="utf-8", + ) + else: + cmd(["patch", f"-p{depth}"], input=patch_text, text=True, encoding="utf-8") + + +def copyfile_if_different(src, dst): + if os.path.exists(dst) and filecmp.cmp(src, dst, shallow=False): + return + shutil.copyfile(src, dst) + + +# NOTE(enm10k): shutil.copytree に Python 3.8 で追加された dirs_exist_ok=True を指定して使いたかったが、 +# GitHub Actions の Windows のランナー (widnwos-2019) にインストールされている Python のバージョンが古くて利用できなかった +# actions/setup-python で Python 3.8 を設定してビルドしたところ、 Lyra のビルドがエラーになったためこの関数を自作した +# Windows のランナーを更新した場合は、この関数は不要になる可能性が高い +def copytree(src_dir, dst_dir): + for file_path in glob.glob(src_dir + "/**", recursive=True): + dest_path = os.path.join(dst_dir, os.path.relpath(file_path, src_dir)) + + if os.path.isdir(file_path): + os.makedirs(dest_path, exist_ok=True) + else: + shutil.copy2(file_path, dest_path) + + +def git_get_url_and_revision(dir): + with cd(dir): + rev = cmdcap(["git", "rev-parse", "HEAD"]) + url = cmdcap(["git", "remote", "get-url", "origin"]) + return url, rev + + +def replace_vcproj_static_runtime(project_file: str): + # なぜか MSVC_STATIC_RUNTIME が効かずに DLL ランタイムを使ってしまうので + # 生成されたプロジェクトに対して静的ランタイムを使うように変更する + s = open(project_file, "r", encoding="utf-8").read() + s = s.replace("MultiThreadedDLL", "MultiThreaded") + s = s.replace("MultiThreadedDebugDLL", "MultiThreadedDebug") + open(project_file, "w", encoding="utf-8").write(s) + + +@versioned +def install_webrtc(version, source_dir, install_dir, platform: str): + win = platform.startswith("windows_") + filename = f'webrtc.{platform}.{"zip" if win else "tar.gz"}' + rm_rf(os.path.join(source_dir, filename)) + archive = download( + f"https://github.com/shiguredo-webrtc-build/webrtc-build/releases/download/{version}/{filename}", + output_dir=source_dir, + ) + rm_rf(os.path.join(install_dir, "webrtc")) + extract(archive, output_dir=install_dir, output_dirname="webrtc") + + +def build_webrtc(platform, local_webrtc_build_dir, local_webrtc_build_args, debug): + with cd(local_webrtc_build_dir): + args = ["--webrtc-nobuild-ios-framework", "--webrtc-nobuild-android-aar"] + if debug: + args += ["--debug"] + + args += local_webrtc_build_args + + cmd(["python3", "run.py", "build", platform, *args]) + + # インクルードディレクトリを増やしたくないので、 + # __config_site を libc++ のディレクトリにコピーしておく + webrtc_source_dir = os.path.join(local_webrtc_build_dir, "_source", platform, "webrtc") + src_config = os.path.join( + webrtc_source_dir, "src", "buildtools", "third_party", "libc++", "__config_site" + ) + dst_config = os.path.join( + webrtc_source_dir, "src", "third_party", "libc++", "src", "include", "__config_site" + ) + copyfile_if_different(src_config, dst_config) + + # __assertion_handler をコピーする + src_assertion = os.path.join( + webrtc_source_dir, + "src", + "buildtools", + "third_party", + "libc++", + "__assertion_handler", + ) + dst_assertion = os.path.join( + webrtc_source_dir, + "src", + "third_party", + "libc++", + "src", + "include", + "__assertion_handler", + ) + copyfile_if_different(src_assertion, dst_assertion) + + +class WebrtcInfo(NamedTuple): + version_file: str + deps_file: str + webrtc_include_dir: str + webrtc_source_dir: Optional[str] + webrtc_library_dir: str + clang_dir: str + libcxx_dir: str + + +def get_webrtc_info( + platform: str, local_webrtc_build_dir: Optional[str], install_dir: str, debug: bool +) -> WebrtcInfo: + webrtc_install_dir = os.path.join(install_dir, "webrtc") + + if local_webrtc_build_dir is None: + return WebrtcInfo( + version_file=os.path.join(webrtc_install_dir, "VERSIONS"), + deps_file=os.path.join(webrtc_install_dir, "DEPS"), + webrtc_include_dir=os.path.join(webrtc_install_dir, "include"), + webrtc_source_dir=None, + webrtc_library_dir=os.path.join(webrtc_install_dir, "lib"), + clang_dir=os.path.join(install_dir, "llvm", "clang"), + libcxx_dir=os.path.join(install_dir, "llvm", "libcxx"), + ) + else: + webrtc_build_source_dir = os.path.join( + local_webrtc_build_dir, "_source", platform, "webrtc" + ) + configuration = "debug" if debug else "release" + webrtc_build_build_dir = os.path.join( + local_webrtc_build_dir, "_build", platform, configuration, "webrtc" + ) + + return WebrtcInfo( + version_file=os.path.join(local_webrtc_build_dir, "VERSION"), + deps_file=os.path.join(local_webrtc_build_dir, "DEPS"), + webrtc_include_dir=os.path.join(webrtc_build_source_dir, "src"), + webrtc_source_dir=os.path.join(webrtc_build_source_dir, "src"), + webrtc_library_dir=webrtc_build_build_dir, + clang_dir=os.path.join( + webrtc_build_source_dir, "src", "third_party", "llvm-build", "Release+Asserts" + ), + libcxx_dir=os.path.join(webrtc_build_source_dir, "src", "third_party", "libc++", "src"), + ) + + +@versioned +def install_boost(version, source_dir, install_dir, sora_version, platform: str): + win = platform.startswith("windows_") + filename = ( + f'boost-{version}_sora-cpp-sdk-{sora_version}_{platform}.{"zip" if win else "tar.gz"}' + ) + rm_rf(os.path.join(source_dir, filename)) + archive = download( + f"https://github.com/shiguredo/sora-cpp-sdk/releases/download/{sora_version}/{filename}", + output_dir=source_dir, + ) + rm_rf(os.path.join(install_dir, "boost")) + extract(archive, output_dir=install_dir, output_dirname="boost") + + +# 以下の問題を解決するためのパッチ +# +# No support for msvc-toolset 14.4x (VS 2022, 17.10.x): https://github.com/boostorg/boost/issues/914 +BOOST_PATCH_SUPPORT_14_4 = r""" +diff --git a/tools/build/src/engine/config_toolset.bat b/tools/build/src/engine/config_toolset.bat +index 4ba577cac..3e3f6a3a1 100644 +--- a/tools/build/src/engine/config_toolset.bat ++++ b/tools/build/src/engine/config_toolset.bat +@@ -157,7 +157,7 @@ pushd %CD% + if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS% + popd + @REM set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +-set "B2_CXX="%CXX%" /nologo -TP /wd4996 /wd4675 /EHs /GR /Zc:throwingNew /O2 /Ob2 /W3 /MD /Zc:forScope /Zc:wchar_t /Zc:inline /Gw /favor:blend /Feb2" ++set "B2_CXX="%CXX%" /nologo -TP /wd4996 /wd4675 /EHs /GR /Zc:throwingNew /O2 /Ob2 /W3 /MT /Zc:forScope /Zc:wchar_t /Zc:inline /Gw /favor:blend /Feb2" + set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" + set "_known_=1" + goto :Embed_Minafest_Via_Link +diff --git a/tools/build/src/tools/msvc.jam b/tools/build/src/tools/msvc.jam +index 54a6ced32..4bb3810b3 100644 +--- a/tools/build/src/tools/msvc.jam ++++ b/tools/build/src/tools/msvc.jam +@@ -1137,7 +1137,15 @@ local rule generate-setup-cmd ( version : command : parent : options * : cpu : g + } + else + { +- if [ MATCH "(14.3)" : $(version) ] ++ if [ MATCH "(14.4)" : $(version) ] ++ { ++ if $(.debug-configuration) ++ { ++ ECHO "notice: [generate-setup-cmd] $(version) is 14.4" ; ++ } ++ parent = [ path.native [ path.join $(parent) "..\\..\\..\\..\\..\\Auxiliary\\Build" ] ] ; ++ } ++ else if [ MATCH "(14.3)" : $(version) ] + { + if $(.debug-configuration) + { +@@ -1316,7 +1324,11 @@ local rule configure-really ( version ? : options * ) + # version from the path. + # FIXME: We currently detect both Microsoft Visual Studio 9.0 and + # 9.0express as 9.0 here. +- if [ MATCH "(MSVC\\\\14.3)" : $(command) ] ++ if [ MATCH "(MSVC\\\\14.4)" : $(command) ] ++ { ++ version = 14.4 ; ++ } ++ else if [ MATCH "(MSVC\\\\14.3)" : $(command) ] + { + version = 14.3 ; + } +@@ -1745,13 +1757,17 @@ local rule default-path ( version ) + # And fortunately, forward slashes do also work in native Windows. + local vswhere = "$(root)/Microsoft Visual Studio/Installer/vswhere.exe" ; + # The check for $(root) is to avoid a segmentation fault if not found. +- if $(version) in 14.1 14.2 14.3 default && $(root) && [ path.exists $(vswhere) ] ++ if $(version) in 14.1 14.2 14.3 14.4 default && $(root) && [ path.exists $(vswhere) ] + { + local req = "-requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64" ; + local prop = "-property installationPath" ; + local limit ; + +- if $(version) = 14.3 ++ if $(version) = 14.4 ++ { ++ limit = "-version \"[17.0,18.0)\" -prerelease" ; ++ } ++ else if $(version) = 14.3 + { + limit = "-version \"[17.0,18.0)\" -prerelease" ; + } +@@ -2174,7 +2190,7 @@ for local arch in [ MATCH "^\\.cpus-on-(.*)" : [ VARNAMES $(__name__) ] ] + armv7 armv7s ; + + # Known toolset versions, in order of preference. +-.known-versions = 14.3 14.2 14.1 14.0 12.0 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 ++.known-versions = 14.4 14.3 14.2 14.1 14.0 12.0 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 + 7.1toolkit 7.0 6.0 ; + + # Version aliases. +@@ -2226,6 +2242,11 @@ for local arch in [ MATCH "^\\.cpus-on-(.*)" : [ VARNAMES $(__name__) ] ] + "Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*/bin/Host*/*" + ; + .version-14.3-env = VS170COMNTOOLS ProgramFiles ProgramFiles(x86) ; ++.version-14.4-path = ++ "../../VC/Tools/MSVC/*/bin/Host*/*" ++ "Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*/bin/Host*/*" ++ ; ++.version-14.4-env = VS170COMNTOOLS ProgramFiles ProgramFiles(x86) ; + + # Auto-detect all the available msvc installations on the system. + auto-detect-toolset-versions ; +""" + + +@versioned +def build_and_install_boost( + version: str, + source_dir, + build_dir, + install_dir, + debug: bool, + cxx: str, + cflags: List[str], + cxxflags: List[str], + linkflags: List[str], + toolset, + visibility, + target_os, + architecture, + android_ndk, + native_api_level, + address_model="64", + runtime_link=None, +): + version_underscore = version.replace(".", "_") + archive = download( + f"https://boostorg.jfrog.io/artifactory/main/release/{version}/source/boost_{version_underscore}.tar.gz", + source_dir, + ) + extract(archive, output_dir=build_dir, output_dirname="boost") + with cd(os.path.join(build_dir, "boost")): + bootstrap = ".\\bootstrap.bat" if target_os == "windows" else "./bootstrap.sh" + b2 = "b2" if target_os == "windows" else "./b2" + if runtime_link is None: + runtime_link = "static" if target_os == "windows" else "shared" + + # Windows かつ Boost 1.85.0 の場合はパッチを当てる + if target_os == "windows" and version == "1.85.0": + apply_patch_text(BOOST_PATCH_SUPPORT_14_4, os.path.join(build_dir, "boost"), 1) + + cmd([bootstrap]) + + if target_os == "iphone": + IOS_BUILD_TARGETS = [("arm64", "iphoneos")] + for arch, sdk in IOS_BUILD_TARGETS: + clangpp = cmdcap(["xcodebuild", "-find", "clang++"]) + sysroot = cmdcap(["xcrun", "--sdk", sdk, "--show-sdk-path"]) + boost_arch = "x86" if arch == "x86_64" else "arm" + with open("project-config.jam", "w") as f: + f.write( + f"using clang \ + : iphone \ + : {clangpp} -arch {arch} -isysroot {sysroot} \ + -fembed-bitcode \ + -mios-version-min=10.0 \ + -fvisibility=hidden \ + : {sysroot} \ + ; \ + " + ) + cmd( + [ + b2, + "install", + "-d+0", + f'--build-dir={os.path.join(build_dir, "boost", f"build-{arch}-{sdk}")}', + f'--prefix={os.path.join(build_dir, "boost", f"install-{arch}-{sdk}")}', + "--with-json", + "--with-filesystem", + "--layout=system", + "--ignore-site-config", + f'variant={"debug" if debug else "release"}', + f'cflags={" ".join(cflags)}', + f'cxxflags={" ".join(cxxflags)}', + f'linkflags={" ".join(linkflags)}', + f"toolset={toolset}", + f"visibility={visibility}", + f"target-os={target_os}", + f"address-model={address_model}", + "link=static", + f"runtime-link={runtime_link}", + "threading=multi", + f"architecture={boost_arch}", + ] + ) + arch, sdk = IOS_BUILD_TARGETS[0] + installed_path = os.path.join(build_dir, "boost", f"install-{arch}-{sdk}") + rm_rf(os.path.join(install_dir, "boost")) + cmd(["cp", "-r", installed_path, os.path.join(install_dir, "boost")]) + + for lib in enum_all_files( + os.path.join(installed_path, "lib"), os.path.join(installed_path, "lib") + ): + if not lib.endswith(".a"): + continue + files = [ + os.path.join(build_dir, "boost", f"install-{arch}-{sdk}", "lib", lib) + for arch, sdk in IOS_BUILD_TARGETS + ] + if len(files) == 1: + shutil.copyfile(files[0], os.path.join(install_dir, "boost", "lib", lib)) + else: + cmd( + [ + "lipo", + "-create", + "-output", + os.path.join(install_dir, "boost", "lib", lib), + ] + + files + ) + elif target_os == "android": + # Android の場合、android-ndk を使ってビルドする + with open("project-config.jam", "w") as f: + bin = os.path.join( + android_ndk, "toolchains", "llvm", "prebuilt", "linux-x86_64", "bin" + ) + sysroot = os.path.join( + android_ndk, "toolchains", "llvm", "prebuilt", "linux-x86_64", "sysroot" + ) + f.write( + f"using clang \ + : android \ + : {os.path.join(bin, 'clang++')} \ + --target=aarch64-none-linux-android{native_api_level} \ + --sysroot={sysroot} \ + : {os.path.join(bin, 'llvm-ar')} \ + {os.path.join(bin, 'llvm-ranlib')} \ + ; \ + " + ) + cmd( + [ + b2, + "install", + "-d+0", + f'--prefix={os.path.join(install_dir, "boost")}', + "--with-json", + "--with-filesystem", + "--layout=system", + "--ignore-site-config", + f'variant={"debug" if debug else "release"}', + f"compileflags=--sysroot={sysroot}", + f'cflags={" ".join(cflags)}', + f'cxxflags={" ".join(cxxflags)}', + f'linkflags={" ".join(linkflags)}', + f"toolset={toolset}", + f"visibility={visibility}", + f"target-os={target_os}", + f"address-model={address_model}", + "link=static", + f"runtime-link={runtime_link}", + "threading=multi", + "architecture=arm", + ] + ) + else: + if len(cxx) != 0: + with open("project-config.jam", "w") as f: + f.write(f"using {toolset} : : {cxx} : ;") + cmd( + [ + b2, + "install", + "-d+0", + f'--prefix={os.path.join(install_dir, "boost")}', + "--with-json", + "--with-filesystem", + "--layout=system", + "--ignore-site-config", + f'variant={"debug" if debug else "release"}', + f'cflags={" ".join(cflags)}', + f'cxxflags={" ".join(cxxflags)}', + f'linkflags={" ".join(linkflags)}', + f"toolset={toolset}", + f"visibility={visibility}", + f"target-os={target_os}", + f"address-model={address_model}", + "link=static", + f"runtime-link={runtime_link}", + "threading=multi", + f"architecture={architecture}", + ] + ) + + +@versioned +def install_sora(version, source_dir, install_dir, platform: str): + win = platform.startswith("windows_") + filename = f'sora-cpp-sdk-{version}_{platform}.{"zip" if win else "tar.gz"}' + rm_rf(os.path.join(source_dir, filename)) + archive = download( + f"https://github.com/shiguredo/sora-cpp-sdk/releases/download/{version}/{filename}", + output_dir=source_dir, + ) + rm_rf(os.path.join(install_dir, "sora")) + extract(archive, output_dir=install_dir, output_dirname="sora") + + +def install_sora_and_deps( + sora_version: str, boost_version: str, platform: str, source_dir: str, install_dir: str +): + # Boost + install_boost_args = { + "version": boost_version, + "version_file": os.path.join(install_dir, "boost.version"), + "source_dir": source_dir, + "install_dir": install_dir, + "sora_version": sora_version, + "platform": platform, + } + install_boost(**install_boost_args) + + # Sora C++ SDK + install_sora_args = { + "version": sora_version, + "version_file": os.path.join(install_dir, "sora.version"), + "source_dir": source_dir, + "install_dir": install_dir, + "platform": platform, + } + install_sora(**install_sora_args) + + +def build_sora( + platform: str, + local_sora_cpp_sdk_dir: str, + local_sora_cpp_sdk_args: List[str], + debug: bool, + local_webrtc_build_dir: Optional[str], +): + if debug and "--debug" not in local_sora_cpp_sdk_args: + local_sora_cpp_sdk_args = ["--debug", *local_sora_cpp_sdk_args] + if local_webrtc_build_dir is not None: + local_sora_cpp_sdk_args = [ + "--local-webrtc-build-dir", + local_webrtc_build_dir, + *local_sora_cpp_sdk_args, + ] + + with cd(local_sora_cpp_sdk_dir): + cmd(["python3", "run.py", platform, *local_sora_cpp_sdk_args]) + + +class SoraInfo(NamedTuple): + sora_install_dir: str + boost_install_dir: str + + +def get_sora_info( + platform: str, local_sora_cpp_sdk_dir: Optional[str], install_dir: str, debug: bool +) -> SoraInfo: + if local_sora_cpp_sdk_dir is not None: + configuration = "debug" if debug else "release" + install_dir = os.path.join(local_sora_cpp_sdk_dir, "_install", platform, configuration) + + return SoraInfo( + sora_install_dir=os.path.join(install_dir, "sora"), + boost_install_dir=os.path.join(install_dir, "boost"), + ) + + +@versioned +def install_rootfs(version, install_dir, conf, arch="arm64"): + rootfs_dir = os.path.join(install_dir, "rootfs") + rm_rf(rootfs_dir) + cmd(["multistrap", "--no-auth", "-a", arch, "-d", rootfs_dir, "-f", conf]) + # 絶対パスのシンボリックリンクを相対パスに置き換えていく + for dir, _, filenames in os.walk(rootfs_dir): + for filename in filenames: + linkpath = os.path.join(dir, filename) + # symlink かどうか + if not os.path.islink(linkpath): + continue + target = os.readlink(linkpath) + # 絶対パスかどうか + if not os.path.isabs(target): + continue + # rootfs_dir を先頭に付けることで、 + # rootfs の外から見て正しい絶対パスにする + targetpath = rootfs_dir + target + # 参照先の絶対パスが存在するかどうか + if not os.path.exists(targetpath): + continue + # 相対パスに置き換える + relpath = os.path.relpath(targetpath, dir) + logging.debug(f"{linkpath[len(rootfs_dir):]} targets {target} to {relpath}") + os.remove(linkpath) + os.symlink(relpath, linkpath) + + # なぜかシンボリックリンクが登録されていないので作っておく + link = os.path.join(rootfs_dir, "usr", "lib", "aarch64-linux-gnu", "tegra", "libnvbuf_fdmap.so") + file = os.path.join( + rootfs_dir, "usr", "lib", "aarch64-linux-gnu", "tegra", "libnvbuf_fdmap.so.1.0.0" + ) + if os.path.exists(file) and not os.path.exists(link): + os.symlink(os.path.basename(file), link) + + # JetPack 6 から tegra → nvidia になった + link = os.path.join( + rootfs_dir, "usr", "lib", "aarch64-linux-gnu", "nvidia", "libnvbuf_fdmap.so" + ) + file = os.path.join( + rootfs_dir, "usr", "lib", "aarch64-linux-gnu", "nvidia", "libnvbuf_fdmap.so.1.0.0" + ) + if os.path.exists(file) and not os.path.exists(link): + os.symlink(os.path.basename(file), link) + + +@versioned +def install_android_ndk(version, install_dir, source_dir, platform="linux"): + if platform not in ("darwin", "linux"): + raise Exception(f"Not supported platform: {platform}") + + if platform == "darwin": + url = f"https://dl.google.com/android/repository/android-ndk-{version}-{platform}.dmg" + file = f"android-ndk-{version}-{platform}.dmg" + else: + url = f"https://dl.google.com/android/repository/android-ndk-{version}-{platform}.zip" + archive = download(url, source_dir) + rm_rf(os.path.join(install_dir, "android-ndk")) + if platform == "darwin": + cap = cmdcap(["hdiutil", "attach", os.path.join(source_dir, file)]) + # 以下のような結果が得られるはずなので、ここから /Volumes/Android NDK r26 のところだけ取り出す + # /dev/disk4 GUID_partition_scheme + # /dev/disk4s1 EFI + # /dev/disk4s2 Apple_HFS /Volumes/Android NDK r26 + volume = cap.split("\n")[-1].split("\t")[-1] + # AndroidNDK10792818.app みたいな感じの app があるはず + app = glob.glob("AndroidNDK*.app", root_dir=volume)[0] + # NDK ディレクトリをコピー + cmd( + [ + "cp", + "-r", + os.path.join(volume, app, "Contents", "NDK"), + os.path.join(install_dir, "android-ndk"), + ] + ) + cmdcap(["hdiutil", "detach", volume]) + else: + extract(archive, output_dir=install_dir, output_dirname="android-ndk") + + +@versioned +def install_android_sdk_cmdline_tools(version, install_dir, source_dir): + archive = download( + f"https://dl.google.com/android/repository/commandlinetools-linux-{version}_latest.zip", + source_dir, + ) + tools_dir = os.path.join(install_dir, "android-sdk-cmdline-tools") + rm_rf(tools_dir) + extract(archive, output_dir=tools_dir, output_dirname="cmdline-tools") + sdkmanager = os.path.join(tools_dir, "cmdline-tools", "bin", "sdkmanager") + # ライセンスを許諾する + cmd(["/bin/bash", "-c", f"yes | {sdkmanager} --sdk_root={tools_dir} --licenses"]) + + +@versioned +def install_llvm( + version, + install_dir, + tools_url, + tools_commit, + libcxx_url, + libcxx_commit, + buildtools_url, + buildtools_commit, +): + llvm_dir = os.path.join(install_dir, "llvm") + rm_rf(llvm_dir) + mkdir_p(llvm_dir) + with cd(llvm_dir): + # tools の update.py を叩いて特定バージョンの clang バイナリを拾う + git_clone_shallow(tools_url, tools_commit, "tools") + with cd("tools"): + cmd( + [ + "python3", + os.path.join("clang", "scripts", "update.py"), + "--output-dir", + os.path.join(llvm_dir, "clang"), + ] + ) + + # 特定バージョンの libcxx を利用する + git_clone_shallow(libcxx_url, libcxx_commit, "libcxx") + + # __config_site のために特定バージョンの buildtools を取得する + git_clone_shallow(buildtools_url, buildtools_commit, "buildtools") + with cd("buildtools"): + cmd(["git", "reset", "--hard", buildtools_commit]) + shutil.copyfile( + os.path.join(llvm_dir, "buildtools", "third_party", "libc++", "__config_site"), + os.path.join(llvm_dir, "libcxx", "include", "__config_site"), + ) + + # __assertion_handler をコピーする + # 背景: https://source.chromium.org/chromium/_/chromium/external/github.com/llvm/llvm-project/libcxx.git/+/1e5bda0d1ce8e346955aa4a85eaab258785f11f7 + shutil.copyfile( + # NOTE(enm10k): 最初は default_assertion_handler.in をコピーしていたが、 buildtools 以下に + # default_assertion_handler.in から生成されたと思われる __assertion_handler が存在するため、それをコピーする + # os.path.join(llvm_dir, "libcxx", "vendor", "llvm", "default_assertion_handler.in"), + os.path.join(llvm_dir, "buildtools", "third_party", "libc++", "__assertion_handler"), + os.path.join(llvm_dir, "libcxx", "include", "__assertion_handler"), + ) + + +def cmake_path(path: str) -> str: + return path.replace("\\", "/") + + +@versioned +def install_cmake(version, source_dir, install_dir, platform: str, ext): + url = f"https://github.com/Kitware/CMake/releases/download/v{version}/cmake-{version}-{platform}.{ext}" + path = download(url, source_dir) + extract(path, install_dir, "cmake") + # Android で自前の CMake を利用する場合、ninja へのパスが見つけられない問題があるので、同じディレクトリに symlink を貼る + # https://issuetracker.google.com/issues/206099937 + if platform.startswith("linux"): + with cd(os.path.join(install_dir, "cmake", "bin")): + cmd(["ln", "-s", "/usr/bin/ninja", "ninja"]) + + +@versioned +def install_sdl2( + version, source_dir, build_dir, install_dir, debug: bool, platform: str, cmake_args: List[str] +): + url = f"http://www.libsdl.org/release/SDL2-{version}.zip" + path = download(url, source_dir) + sdl2_source_dir = os.path.join(source_dir, "sdl2") + sdl2_build_dir = os.path.join(build_dir, "sdl2") + sdl2_install_dir = os.path.join(install_dir, "sdl2") + rm_rf(sdl2_source_dir) + rm_rf(sdl2_build_dir) + rm_rf(sdl2_install_dir) + extract(path, source_dir, "sdl2") + + mkdir_p(sdl2_build_dir) + with cd(sdl2_build_dir): + configuration = "Debug" if debug else "Release" + cmake_args = cmake_args[:] + cmake_args += [ + sdl2_source_dir, + f"-DCMAKE_BUILD_TYPE={configuration}", + f"-DCMAKE_INSTALL_PREFIX={cmake_path(sdl2_install_dir)}", + "-DBUILD_SHARED_LIBS=OFF", + ] + if platform == "windows": + cmake_args += [ + "-DSDL_FORCE_STATIC_VCRT=ON", + "-DHAVE_LIBC=ON", + ] + elif platform == "macos": + # システムでインストール済みかによって ON/OFF が切り替わってしまうため、 + # どの環境でも同じようにインストールされるようにするため全部 ON/OFF を明示的に指定する + cmake_args += [ + "-DSDL_ATOMIC=OFF", + "-DSDL_AUDIO=OFF", + "-DSDL_VIDEO=ON", + "-DSDL_RENDER=ON", + "-DSDL_EVENTS=ON", + "-DSDL_JOYSTICK=ON", + "-DSDL_HAPTIC=ON", + "-DSDL_POWER=ON", + "-DSDL_THREADS=ON", + "-DSDL_TIMERS=OFF", + "-DSDL_FILE=OFF", + "-DSDL_LOADSO=ON", + "-DSDL_CPUINFO=OFF", + "-DSDL_FILESYSTEM=OFF", + "-DSDL_SENSOR=ON", + "-DSDL_OPENGL=ON", + "-DSDL_OPENGLES=ON", + "-DSDL_RPI=OFF", + "-DSDL_WAYLAND=OFF", + "-DSDL_X11=OFF", + "-DSDL_VULKAN=OFF", + "-DSDL_VIVANTE=OFF", + "-DSDL_COCOA=ON", + "-DSDL_METAL=ON", + "-DSDL_KMSDRM=OFF", + ] + elif platform == "linux": + # システムでインストール済みかによって ON/OFF が切り替わってしまうため、 + # どの環境でも同じようにインストールされるようにするため全部 ON/OFF を明示的に指定する + cmake_args += [ + "-DSDL_ATOMIC=OFF", + "-DSDL_AUDIO=OFF", + "-DSDL_VIDEO=ON", + "-DSDL_RENDER=ON", + "-DSDL_EVENTS=ON", + "-DSDL_JOYSTICK=ON", + "-DSDL_HAPTIC=ON", + "-DSDL_POWER=ON", + "-DSDL_THREADS=ON", + "-DSDL_TIMERS=OFF", + "-DSDL_FILE=OFF", + "-DSDL_LOADSO=ON", + "-DSDL_CPUINFO=OFF", + "-DSDL_FILESYSTEM=OFF", + "-DSDL_SENSOR=ON", + "-DSDL_OPENGL=ON", + "-DSDL_OPENGLES=ON", + "-DSDL_RPI=OFF", + "-DSDL_WAYLAND=OFF", + "-DSDL_X11=ON", + "-DSDL_X11_SHARED=OFF", + "-DSDL_X11_XCURSOR=OFF", + "-DSDL_X11_XDBE=OFF", + "-DSDL_X11_XFIXES=OFF", + "-DSDL_X11_XINERAMA=OFF", + "-DSDL_X11_XINPUT=OFF", + "-DSDL_X11_XRANDR=OFF", + "-DSDL_X11_XSCRNSAVER=OFF", + "-DSDL_X11_XSHAPE=OFF", + "-DSDL_X11_XVM=OFF", + "-DSDL_VULKAN=OFF", + "-DSDL_VIVANTE=OFF", + "-DSDL_COCOA=OFF", + "-DSDL_METAL=OFF", + "-DSDL_KMSDRM=OFF", + ] + cmd(["cmake"] + cmake_args) + + cmd( + ["cmake", "--build", ".", "--config", configuration, f"-j{multiprocessing.cpu_count()}"] + ) + cmd(["cmake", "--install", ".", "--config", configuration]) + + +@versioned +def install_cli11(version, install_dir): + cli11_install_dir = os.path.join(install_dir, "cli11") + rm_rf(cli11_install_dir) + cmd( + [ + "git", + "clone", + "--branch", + version, + "--depth", + "1", + "https://github.com/CLIUtils/CLI11.git", + cli11_install_dir, + ] + ) + + +@versioned +def install_cuda_windows(version, source_dir, build_dir, install_dir): + rm_rf(os.path.join(build_dir, "cuda")) + rm_rf(os.path.join(install_dir, "cuda")) + if version == "10.2.89-1": + url = "http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_441.22_win10.exe" # noqa: E501 + elif version == "11.8.0-1": + url = "https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_522.06_windows.exe" # noqa: E501 + else: + raise Exception(f"Unknown CUDA version {version}") + file = download(url, source_dir) + + mkdir_p(os.path.join(build_dir, "cuda")) + mkdir_p(os.path.join(install_dir, "cuda")) + with cd(os.path.join(build_dir, "cuda")): + cmd(["7z", "x", file]) + copytree( + os.path.join(build_dir, "cuda", "cuda_nvcc", "nvcc"), os.path.join(install_dir, "cuda") + ) + copytree( + os.path.join(build_dir, "cuda", "cuda_cudart", "cudart"), os.path.join(install_dir, "cuda") + ) + + +@versioned +def install_vpl(version, configuration, source_dir, build_dir, install_dir, cmake_args): + vpl_source_dir = os.path.join(source_dir, "vpl") + vpl_build_dir = os.path.join(build_dir, "vpl") + vpl_install_dir = os.path.join(install_dir, "vpl") + rm_rf(vpl_source_dir) + rm_rf(vpl_build_dir) + rm_rf(vpl_install_dir) + git_clone_shallow("https://github.com/intel/libvpl.git", version, vpl_source_dir) + + mkdir_p(vpl_build_dir) + with cd(vpl_build_dir): + cmd( + [ + "cmake", + f"-DCMAKE_INSTALL_PREFIX={cmake_path(vpl_install_dir)}", + f"-DCMAKE_BUILD_TYPE={configuration}", + "-DBUILD_SHARED_LIBS=OFF", + "-DBUILD_TOOLS=OFF", + "-DBUILD_EXAMPLES=OFF", + "-DBUILD_PREVIEW=OFF", + "-DINSTALL_EXAMPLE_CODE=OFF", + "-DBUILD_TOOLS_ONEVPL_EXPERIMENTAL=OFF", + "-DUSE_MSVC_STATIC_RUNTIME=ON", + vpl_source_dir, + *cmake_args, + ] + ) + # 生成されたプロジェクトに対して静的ランタイムを使うように変更する + vpl_path = os.path.join("libvpl", "VPL.vcxproj") + if os.path.exists(vpl_path): + replace_vcproj_static_runtime(vpl_path) + + cmd( + ["cmake", "--build", ".", f"-j{multiprocessing.cpu_count()}", "--config", configuration] + ) + cmd(["cmake", "--install", ".", "--config", configuration]) + + +@versioned +def install_blend2d( + version, + configuration, + source_dir, + build_dir, + install_dir, + blend2d_version, + asmjit_version, + ios, + cmake_args, +): + rm_rf(os.path.join(source_dir, "blend2d")) + rm_rf(os.path.join(build_dir, "blend2d")) + rm_rf(os.path.join(install_dir, "blend2d")) + + git_clone_shallow( + "https://github.com/blend2d/blend2d", blend2d_version, os.path.join(source_dir, "blend2d") + ) + mkdir_p(os.path.join(source_dir, "blend2d", "3rdparty")) + git_clone_shallow( + "https://github.com/asmjit/asmjit", + asmjit_version, + os.path.join(source_dir, "blend2d", "3rdparty", "asmjit"), + ) + + mkdir_p(os.path.join(build_dir, "blend2d")) + with cd(os.path.join(build_dir, "blend2d")): + cmd( + [ + "cmake", + os.path.join(source_dir, "blend2d"), + f"-DCMAKE_BUILD_TYPE={configuration}", + f"-DCMAKE_INSTALL_PREFIX={cmake_path(os.path.join(install_dir, 'blend2d'))}", + "-DBLEND2D_STATIC=ON", + *cmake_args, + ] + ) + # 生成されたプロジェクトに対して静的ランタイムを使うように変更する + project_path = os.path.join(build_dir, "blend2d", "blend2d.vcxproj") + if os.path.exists(project_path): + replace_vcproj_static_runtime(project_path) + + if ios: + cmd( + [ + "cmake", + "--build", + ".", + f"-j{multiprocessing.cpu_count()}", + "--config", + configuration, + "--target", + "blend2d", + "--", + "-arch", + "arm64", + "-sdk", + "iphoneos", + ] + ) + cmd(["cmake", "--build", ".", "--target", "install", "--config", configuration]) + else: + cmd( + [ + "cmake", + "--build", + ".", + f"-j{multiprocessing.cpu_count()}", + "--config", + configuration, + ] + ) + cmd(["cmake", "--build", ".", "--target", "install", "--config", configuration]) + + +@versioned +def install_openh264(version, source_dir, install_dir, is_windows): + rm_rf(os.path.join(source_dir, "openh264")) + rm_rf(os.path.join(install_dir, "openh264")) + git_clone_shallow( + "https://github.com/cisco/openh264.git", version, os.path.join(source_dir, "openh264") + ) + with cd(os.path.join(source_dir, "openh264")): + if is_windows: + # Windows は make が無いので手動でコピーする + # install-headers: + # mkdir -p $(DESTDIR)$(PREFIX)/include/wels + # install -m 644 $(SRC_PATH)/codec/api/wels/codec*.h $(DESTDIR)$(PREFIX)/include/wels + mkdir_p(os.path.join(install_dir, "openh264", "include", "wels")) + with cd(os.path.join("codec", "api", "wels")): + for file in glob.glob("codec*.h"): + shutil.copyfile( + file, os.path.join(install_dir, "openh264", "include", "wels", file) + ) + else: + cmd(["make", f'PREFIX={os.path.join(install_dir, "openh264")}', "install-headers"]) + + +@versioned +def install_yaml(version, source_dir, build_dir, install_dir, cmake_args): + rm_rf(os.path.join(source_dir, "yaml")) + rm_rf(os.path.join(install_dir, "yaml")) + rm_rf(os.path.join(build_dir, "yaml")) + git_clone_shallow( + "https://github.com/jbeder/yaml-cpp.git", version, os.path.join(source_dir, "yaml") + ) + + mkdir_p(os.path.join(build_dir, "yaml")) + with cd(os.path.join(build_dir, "yaml")): + cmd( + [ + "cmake", + os.path.join(source_dir, "yaml"), + "-DCMAKE_BUILD_TYPE=Release", + f"-DCMAKE_INSTALL_PREFIX={install_dir}/yaml", + "-DYAML_CPP_BUILD_TESTS=OFF", + "-DYAML_CPP_BUILD_TOOLS=OFF", + *cmake_args, + ] + ) + cmd(["cmake", "--build", ".", f"-j{multiprocessing.cpu_count()}"]) + cmd(["cmake", "--build", ".", "--target", "install"]) + + +@versioned +def install_catch2(version, source_dir, build_dir, install_dir, configuration, cmake_args): + rm_rf(os.path.join(source_dir, "catch2")) + rm_rf(os.path.join(install_dir, "catch2")) + rm_rf(os.path.join(build_dir, "catch2")) + git_clone_shallow( + "https://github.com/catchorg/Catch2.git", version, os.path.join(source_dir, "catch2") + ) + + mkdir_p(os.path.join(build_dir, "catch2")) + with cd(os.path.join(build_dir, "catch2")): + cmd( + [ + "cmake", + os.path.join(source_dir, "catch2"), + f"-DCMAKE_BUILD_TYPE={configuration}", + f"-DCMAKE_INSTALL_PREFIX={install_dir}/catch2", + "-DCATCH_BUILD_TESTING=OFF", + *cmake_args, + ] + ) + # 生成されたプロジェクトに対して静的ランタイムを使うように変更する + project_path = os.path.join("src", "Catch2.vcxproj") + if os.path.exists(project_path): + replace_vcproj_static_runtime(project_path) + project_path = os.path.join("src", "Catch2WithMain.vcxproj") + if os.path.exists(project_path): + replace_vcproj_static_runtime(project_path) + cmd( + ["cmake", "--build", ".", "--config", configuration, f"-j{multiprocessing.cpu_count()}"] + ) + cmd(["cmake", "--build", ".", "--config", configuration, "--target", "install"]) + + +@versioned +def install_protobuf(version, source_dir, install_dir, platform: str): + # platform: + # - linux-aarch_64 + # - linux-ppcle_64 + # - linux-s390_64 + # - linux-x86_32 + # - linux-x86_64 + # - osx-aarch_64 + # - osx-universal_binary + # - osx-x86_64 + # - win32 + # - win64 + url = f"https://github.com/protocolbuffers/protobuf/releases/download/v{version}/protoc-{version}-{platform}.zip" + path = download(url, source_dir) + rm_rf(os.path.join(install_dir, "protobuf")) + extract(path, install_dir, "protobuf") + # なぜか実行属性が消えてるので入れてやる + for file in os.scandir(os.path.join(install_dir, "protobuf", "bin")): + if file.is_file(): + os.chmod(file.path, file.stat().st_mode | stat.S_IXUSR) + + +@versioned +def install_protoc_gen_jsonif(version, source_dir, install_dir, platform: str): + # platform: + # - darwin-amd64 + # - darwin-arm64 + # - linux-amd64 + # - windows-amd64 + url = f"https://github.com/melpon/protoc-gen-jsonif/releases/download/{version}/protoc-gen-jsonif.tar.gz" + rm_rf(os.path.join(source_dir, "protoc-gen-jsonif.tar.gz")) + path = download(url, source_dir) + jsonif_install_dir = os.path.join(install_dir, "protoc-gen-jsonif") + rm_rf(jsonif_install_dir) + extract(path, install_dir, "protoc-gen-jsonif") + # 自分の環境のバイナリを /bin に配置する + shutil.copytree( + os.path.join(jsonif_install_dir, *platform.split("-")), + os.path.join(jsonif_install_dir, "bin"), + ) + # なぜか実行属性が消えてるので入れてやる + for file in os.scandir(os.path.join(jsonif_install_dir, "bin")): + if file.is_file(): + os.chmod(file.path, file.stat().st_mode | stat.S_IXUSR) + + +# iOS, Android などのクロスコンパイル環境で実行可能ファイルを生成しようとしてエラーになるのを防止するパッチ +# +# v1.64.1 をベースにパッチを当てている +# +# MEMO: gRPC の、submodule を含めて全ての diff を取得するコマンド +# git --no-pager diff --ignore-submodules && git submodule foreach --recursive 'git --no-pager diff --ignore-submodules --src-prefix a/$path/ --dst-prefix b/$path/' | grep -v '^Entering' +GRPC_PATCH_NO_EXECUTABLE = r""" +diff --git a/third_party/boringssl-with-bazel/CMakeLists.txt b/third_party/boringssl-with-bazel/CMakeLists.txt +index 6464e200f..c7bc417a1 100644 +--- a/third_party/boringssl-with-bazel/CMakeLists.txt ++++ b/third_party/boringssl-with-bazel/CMakeLists.txt +@@ -543,30 +543,6 @@ add_library( + + target_link_libraries(ssl crypto) + +-add_executable( +- bssl +- +- src/tool/args.cc +- src/tool/ciphers.cc +- src/tool/client.cc +- src/tool/const.cc +- src/tool/digest.cc +- src/tool/fd.cc +- src/tool/file.cc +- src/tool/generate_ech.cc +- src/tool/generate_ed25519.cc +- src/tool/genrsa.cc +- src/tool/pkcs12.cc +- src/tool/rand.cc +- src/tool/server.cc +- src/tool/sign.cc +- src/tool/speed.cc +- src/tool/tool.cc +- src/tool/transport_common.cc +-) +- +-target_link_libraries(bssl ssl crypto) +- + if(NOT CMAKE_SYSTEM_NAME STREQUAL "Android") + find_package(Threads REQUIRED) + target_link_libraries(crypto Threads::Threads) +diff --git a/third_party/zlib/CMakeLists.txt b/third_party/zlib/CMakeLists.txt +index 7f1b69f..bcf5577 100644 +--- a/third_party/zlib/CMakeLists.txt ++++ b/third_party/zlib/CMakeLists.txt +@@ -147,10 +147,7 @@ if(MINGW) + set(ZLIB_DLL_SRCS ${CMAKE_CURRENT_BINARY_DIR}/zlib1rc.obj) + endif(MINGW) + +-add_library(zlib SHARED ${ZLIB_SRCS} ${ZLIB_DLL_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS}) + add_library(zlibstatic STATIC ${ZLIB_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS}) +-set_target_properties(zlib PROPERTIES DEFINE_SYMBOL ZLIB_DLL) +-set_target_properties(zlib PROPERTIES SOVERSION 1) + + if(NOT CYGWIN) + # This property causes shared libraries on Linux to have the full version +@@ -160,22 +157,16 @@ if(NOT CYGWIN) + # + # This has no effect with MSVC, on that platform the version info for + # the DLL comes from the resource file win32/zlib1.rc +- set_target_properties(zlib PROPERTIES VERSION ${ZLIB_FULL_VERSION}) + endif() + + if(UNIX) + # On unix-like platforms the library is almost always called libz +- set_target_properties(zlib zlibstatic PROPERTIES OUTPUT_NAME z) +- if(NOT APPLE) +- set_target_properties(zlib PROPERTIES LINK_FLAGS "-Wl,--version-script,\"${CMAKE_CURRENT_SOURCE_DIR}/zlib.map\"") +- endif() + elseif(BUILD_SHARED_LIBS AND WIN32) + # Creates zlib1.dll when building shared library version +- set_target_properties(zlib PROPERTIES SUFFIX "1.dll") + endif() + + if(NOT SKIP_INSTALL_LIBRARIES AND NOT SKIP_INSTALL_ALL ) +- install(TARGETS zlib zlibstatic ++ install(TARGETS zlibstatic + RUNTIME DESTINATION "${INSTALL_BIN_DIR}" + ARCHIVE DESTINATION "${INSTALL_LIB_DIR}" + LIBRARY DESTINATION "${INSTALL_LIB_DIR}" ) +@@ -193,21 +184,3 @@ endif() + #============================================================================ + # Example binaries + #============================================================================ +- +-add_executable(example test/example.c) +-target_link_libraries(example zlib) +-add_test(example example) +- +-add_executable(minigzip test/minigzip.c) +-target_link_libraries(minigzip zlib) +- +-if(HAVE_OFF64_T) +- add_executable(example64 test/example.c) +- target_link_libraries(example64 zlib) +- set_target_properties(example64 PROPERTIES COMPILE_FLAGS "-D_FILE_OFFSET_BITS=64") +- add_test(example64 example64) +- +- add_executable(minigzip64 test/minigzip.c) +- target_link_libraries(minigzip64 zlib) +- set_target_properties(minigzip64 PROPERTIES COMPILE_FLAGS "-D_FILE_OFFSET_BITS=64") +-endif() +""" + + +@versioned +def install_grpc(version, source_dir, build_dir, install_dir, debug: bool, cmake_args: List[str]): + grpc_source_dir = os.path.join(source_dir, "grpc") + grpc_build_dir = os.path.join(build_dir, "grpc") + grpc_install_dir = os.path.join(install_dir, "grpc") + rm_rf(grpc_source_dir) + rm_rf(grpc_build_dir) + rm_rf(grpc_install_dir) + git_clone_shallow("https://github.com/grpc/grpc.git", version, grpc_source_dir, submodule=True) + apply_patch_text(GRPC_PATCH_NO_EXECUTABLE, grpc_source_dir, 1) + mkdir_p(grpc_build_dir) + with cd(grpc_build_dir): + configuration = "Debug" if debug else "Release" + cmd( + [ + "cmake", + grpc_source_dir, + f"-DCMAKE_INSTALL_PREFIX={cmake_path(grpc_install_dir)}", + f"-DCMAKE_BUILD_TYPE={configuration}", + *cmake_args, + ] + ) + cmd( + ["cmake", "--build", ".", f"-j{multiprocessing.cpu_count()}", "--config", configuration] + ) + cmd(["cmake", "--install", ".", "--config", configuration]) + + +@versioned +def install_ggrpc(version, install_dir): + ggrpc_install_dir = os.path.join(install_dir, "ggrpc") + rm_rf(ggrpc_install_dir) + git_clone_shallow("https://github.com/melpon/ggrpc.git", version, ggrpc_install_dir) + + +@versioned +def install_spdlog(version, install_dir): + spdlog_install_dir = os.path.join(install_dir, "spdlog") + rm_rf(spdlog_install_dir) + git_clone_shallow("https://github.com/gabime/spdlog.git", version, spdlog_install_dir) + + +@versioned +def install_sqlite3(version, year, source_dir, build_dir, install_dir, debug, configure_args): + sqlite3_source_dir = os.path.join(source_dir, "sqlite3") + sqlite3_build_dir = os.path.join(build_dir, "sqlite3") + sqlite3_install_dir = os.path.join(install_dir, "sqlite3") + rm_rf(sqlite3_source_dir) + rm_rf(sqlite3_build_dir) + rm_rf(sqlite3_install_dir) + + # 3.46.1 → 3460100 + a, b, c, *_ = version.split(".") + version_number = f"{a}{b:>02}{c:>02}00" + + url = f"https://www.sqlite.org/{year}/sqlite-autoconf-{version_number}.tar.gz" + path = download(url, source_dir) + extract(path, source_dir, "sqlite3") + + mkdir_p(sqlite3_build_dir) + with cd(sqlite3_build_dir): + cmd( + [ + os.path.join(sqlite3_source_dir, "configure"), + "--prefix", + sqlite3_install_dir, + "--disable-shared", + "--enable-static", + *(["--enable-debug"] if debug else []), + *configure_args, + ] + ) + cmd(["make", f"-j{multiprocessing.cpu_count()}"]) + cmd(["make", "install"]) + + +# CppDB を動的ライブラリではなく静的ライブラリとしてビルドするパッチ +CPPDB_PATCH_NOSHARED = r""" +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 5e81b09..d94bb84 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -231,16 +231,16 @@ set(CPPDB_SRC + ${INTERNAL_SOURCES} + ) + +-add_library(cppdb SHARED ${CPPDB_SRC}) ++#add_library(cppdb SHARED ${CPPDB_SRC}) + add_library(cppdb-static STATIC ${CPPDB_SRC}) +-set_target_properties(cppdb PROPERTIES COMPILE_DEFINITIONS CPPDB_EXPORTS) ++#set_target_properties(cppdb PROPERTIES COMPILE_DEFINITIONS CPPDB_EXPORTS) + +-if(NOT WIN32) +- if(DL_LIB) +- target_link_libraries(cppdb ${DL_LIB}) +- endif() +- target_link_libraries(cppdb ${PTHREAD_LIB}) +-endif() ++#if(NOT WIN32) ++# if(DL_LIB) ++# target_link_libraries(cppdb ${DL_LIB}) ++# endif() ++# target_link_libraries(cppdb ${PTHREAD_LIB}) ++#endif() + + # Link and backends configuration + +@@ -250,49 +250,50 @@ set(INST_LIBS) + + foreach(LIB ${BACKEND_LIBRARIES} cppdb) + +- set(INST_LIBS ${INST_LIBS} ${LIB} ${LIB}-static) ++# set(INST_LIBS ${INST_LIBS} ${LIB} ${LIB}-static) ++ set(INST_LIBS ${INST_LIBS} ${LIB}-static) + +- if(WIN32) +- set_target_properties(${LIB} PROPERTIES VERSION ${CPPDB_SOVERSION} SOVERSION ${CPPDB_SOVERSION}) +- else() +- set_target_properties(${LIB} PROPERTIES VERSION ${CPPDB_VERSION} SOVERSION ${CPPDB_SOVERSION}) +- endif() ++# if(WIN32) ++# set_target_properties(${LIB} PROPERTIES VERSION ${CPPDB_SOVERSION} SOVERSION ${CPPDB_SOVERSION}) ++# else() ++# set_target_properties(${LIB} PROPERTIES VERSION ${CPPDB_VERSION} SOVERSION ${CPPDB_SOVERSION}) ++# endif() + + +- set_target_properties(${LIB} PROPERTIES CLEAN_DIRECT_OUTPUT 1) +- set_target_properties(${LIB} PROPERTIES OUTPUT_NAME "${LIB}") ++# set_target_properties(${LIB} PROPERTIES CLEAN_DIRECT_OUTPUT 1) ++# set_target_properties(${LIB} PROPERTIES OUTPUT_NAME "${LIB}") + set_target_properties(${LIB}-static PROPERTIES CLEAN_DIRECT_OUTPUT 1) + set_target_properties(${LIB}-static PROPERTIES OUTPUT_NAME "${LIB}") + set_target_properties(${LIB}-static PROPERTIES PREFIX "lib") # Make sure import and normal library do not collide + + endforeach() + +-foreach(LIB ${BACKEND_LIBRARIES}) +- set_target_properties(${LIB} PROPERTIES COMPILE_DEFINITIONS CPPDB_DRIVER_EXPORTS) +- target_link_libraries(${LIB} cppdb) +-endforeach() +- +-foreach(LIB ${INTERNAL_LIBRARIES}) +- target_link_libraries(cppdb ${LIB}) +-endforeach() ++#foreach(LIB ${BACKEND_LIBRARIES}) ++# set_target_properties(${LIB} PROPERTIES COMPILE_DEFINITIONS CPPDB_DRIVER_EXPORTS) ++# target_link_libraries(${LIB} cppdb) ++#endforeach() ++# ++#foreach(LIB ${INTERNAL_LIBRARIES}) ++# target_link_libraries(cppdb ${LIB}) ++#endforeach() + + # tests + +-add_executable(test_perf test/test_perf.cpp) +-add_executable(test_basic test/test_basic.cpp) +-add_executable(test_backend test/test_backend.cpp) +-add_executable(test_caching test/test_caching.cpp) +-add_executable(example examples/example1.cpp) +- +-set_target_properties( test_perf test_backend test_basic test_caching example +- PROPERTIES +- COMPILE_DEFINITIONS CPPDB_EXPORTS) +- +-target_link_libraries(test_perf cppdb) +-target_link_libraries(test_basic cppdb) +-target_link_libraries(test_backend cppdb) +-target_link_libraries(test_caching cppdb) +-target_link_libraries(example cppdb) ++#add_executable(test_perf test/test_perf.cpp) ++#add_executable(test_basic test/test_basic.cpp) ++#add_executable(test_backend test/test_backend.cpp) ++#add_executable(test_caching test/test_caching.cpp) ++#add_executable(example examples/example1.cpp) ++# ++#set_target_properties( test_perf test_backend test_basic test_caching example ++# PROPERTIES ++# COMPILE_DEFINITIONS CPPDB_EXPORTS) ++# ++#target_link_libraries(test_perf cppdb) ++#target_link_libraries(test_basic cppdb) ++#target_link_libraries(test_backend cppdb) ++#target_link_libraries(test_caching cppdb) ++#target_link_libraries(example cppdb) + + install(TARGETS ${INST_LIBS} + RUNTIME DESTINATION bin +""" + + +@versioned +def install_cppdb( + version, source_dir, build_dir, install_dir, debug, sqlite3_install_dir, cmake_args +): + cppdb_source_dir = os.path.join(source_dir, "cppdb") + cppdb_build_dir = os.path.join(build_dir, "cppdb") + cppdb_install_dir = os.path.join(install_dir, "cppdb") + rm_rf(cppdb_source_dir) + rm_rf(cppdb_build_dir) + rm_rf(cppdb_install_dir) + + git_clone_shallow("https://github.com/melpon/cppdb.git", version, cppdb_source_dir) + apply_patch_text(CPPDB_PATCH_NOSHARED, cppdb_source_dir, 1) + + mkdir_p(cppdb_build_dir) + with cd(cppdb_build_dir): + configuration = "Debug" if debug else "Release" + cmd( + [ + "cmake", + cppdb_source_dir, + f"-DCMAKE_BUILD_TYPE={configuration}", + f"-DCMAKE_INSTALL_PREFIX={cppdb_install_dir}", + f"-DCMAKE_PREFIX_PATH={sqlite3_install_dir}", + "-DDISABLE_MYSQL=ON", + "-DDISABLE_PQ=ON", + "-DDISABLE_ODBC=ON", + "-DSQLITE_BACKEND_INTERNAL=ON", + *cmake_args, + ] + ) + cmd( + ["cmake", "--build", ".", f"-j{multiprocessing.cpu_count()}", "--config", configuration] + ) + cmd(["cmake", "--install", ".", "--config", configuration]) + + +class PlatformTarget(object): + def __init__(self, os, osver, arch, extra=None): + self.os = os + self.osver = osver + self.arch = arch + self.extra = extra + + @property + def package_name(self): + if self.os == "windows": + return f"windows_{self.arch}" + if self.os == "macos": + return f"macos_{self.arch}" + if self.os == "ubuntu": + return f"ubuntu-{self.osver}_{self.arch}" + if self.os == "ios": + return "ios" + if self.os == "android": + return "android" + if self.os == "raspberry-pi-os": + return f"raspberry-pi-os_{self.arch}" + if self.os == "jetson": + if self.extra is None: + ubuntu_version = "ubuntu-20.04" + else: + ubuntu_version = self.extra + if self.osver is None: + return f"{ubuntu_version}_armv8_jetson" + return f"{ubuntu_version}_armv8_jetson_{self.osver}" + raise Exception("error") + + +def get_windows_osver(): + osver = platform.release() + # Windows 以外の環境だと reportAttributeAccessIssue を報告されてしまうので ignore する + with winreg.OpenKeyEx( # type: ignore[reportAttributeAccessIssue] + winreg.HKEY_LOCAL_MACHINE, # type: ignore[reportAttributeAccessIssue] + "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion", + ) as key: + return osver + "." + winreg.QueryValueEx(key, "ReleaseId")[0] # type: ignore[reportAttributeAccessIssue] + + +def get_macos_osver(): + platform.mac_ver()[0] + + +def get_build_platform() -> PlatformTarget: + os = platform.system() + if os == "Windows": + os = "windows" + osver = get_windows_osver() + elif os == "Darwin": + os = "macos" + osver = get_macos_osver() + elif os == "Linux": + release = read_version_file("/etc/os-release") + os = release["NAME"] + if os == "Ubuntu": + os = "ubuntu" + osver = release["VERSION_ID"] + else: + raise Exception(f"OS {os} not supported") + pass + else: + raise Exception(f"OS {os} not supported") + + arch = platform.machine() + if arch in ("AMD64", "x86_64"): + arch = "x86_64" + elif arch in ("aarch64", "arm64"): + arch = "arm64" + else: + raise Exception(f"Arch {arch} not supported") + + return PlatformTarget(os, osver, arch) + + +def get_clang_version(clang): + version_str = cmdcap([clang, "--version"]) + + # version_str は以下のような文字列になっているので、ここからバージョンを取る + # + # clang version 16.0.0 (...) + # Target: x86_64-unknown-linux-gnu + # Thread model: posix + # InstalledDir: /path/to/clang/bin + # + # Android 版だと以下のような文字列になっている + # + # Android (8490178, based on r450784d) clang version 14.0.6 (...) + # Target: aarch64-unknown-linux-android29 + # Thread model: posix + # InstalledDir: /path/to/android-ndk/toolchains/llvm/prebuilt/linux-x86_64/bin + + # clang version の次の文字列を取る + xs = version_str.split("\n")[0].split(" ") + for i in range(2, len(xs)): + if xs[i - 2] == "clang" and xs[i - 1] == "version": + return xs[i] + + raise Exception("Failed to get clang version") + + +def fix_clang_version(clang_dir, clang_version): + # /lib/clang//include または + # /lib64/clang//include が存在するか調べて、 + # 存在しない場合は clang_version を調節して、存在するバージョンに変換する + # + # /lib/clang/16.0.0/include になっている場合と + # /lib/clang/16/include になっている場合があるため + paths = [os.path.join(clang_dir, "lib", "clang"), os.path.join(clang_dir, "lib64", "clang")] + exists = any(map(lambda x: os.path.exists(os.path.join(x, clang_version, "include")), paths)) + if exists: + return clang_version + + fixed_clang_version = clang_version.split(".")[0] + exists = any( + map(lambda x: os.path.exists(os.path.join(x, fixed_clang_version, "include")), paths) + ) + if exists: + return fixed_clang_version + + raise Exception( + f"Failed to fix clang version: clang_dir={clang_dir} clang_version={clang_version}" + ) + + +class Platform(object): + def _check(self, flag): + if not flag: + raise Exception("Not supported") + + def _check_platform_target(self, p: PlatformTarget): + if p.os == "raspberry-pi-os": + self._check(p.arch in ("armv6", "armv7", "armv8")) + elif p.os == "jetson": + self._check(p.arch == "armv8") + elif p.os in ("ios", "android"): + self._check(p.arch is None) + elif p.os == "ubuntu": + self._check(p.arch in ("x86_64", "armv8")) + else: + self._check(p.arch in ("x86_64", "arm64", "hololens2")) + + def __init__(self, target_os, target_osver, target_arch, target_extra=None): + build = get_build_platform() + target = PlatformTarget(target_os, target_osver, target_arch, target_extra) + + self._check_platform_target(build) + self._check_platform_target(target) + + if target.os == "windows": + self._check(target.arch in ("x86_64", "arm64", "hololens2")) + self._check(build.os == "windows") + self._check(build.arch == "x86_64") + if target.os == "macos": + self._check(build.os == "macos") + self._check(build.arch in ("x86_64", "arm64")) + if target.os == "ios": + self._check(build.os == "macos") + self._check(build.arch in ("x86_64", "arm64")) + if target.os == "android": + self._check(build.os in ("ubuntu", "macos")) + if build.os == "ubuntu": + self._check(build.arch == "x86_64") + elif build.os == "macos": + self._check(build.arch in ("x86_64", "arm64")) + if target.os == "ubuntu": + self._check(build.os == "ubuntu") + self._check(build.arch == "x86_64") + if target.arch == "x86_64": + self._check(build.osver == target.osver) + if target.os == "raspberry-pi-os": + self._check(build.os == "ubuntu") + self._check(build.arch == "x86_64") + if target.os == "jetson": + self._check(build.os == "ubuntu") + self._check(build.arch == "x86_64") + + self.build = build + self.target = target + + +def get_webrtc_platform(platform: Platform) -> str: + # WebRTC + if platform.target.os == "windows": + return f"windows_{platform.target.arch}" + elif platform.target.os == "macos": + return f"macos_{platform.target.arch}" + elif platform.target.os == "ios": + return "ios" + elif platform.target.os == "android": + return "android" + elif platform.target.os == "ubuntu": + return f"ubuntu-{platform.target.osver}_{platform.target.arch}" + elif platform.target.os == "raspberry-pi-os": + return f"raspberry-pi-os_{platform.target.arch}" + elif platform.target.os == "jetson": + if platform.target.extra is None: + return "ubuntu-20.04_armv8" + else: + return f"{platform.target.extra}_armv8" + else: + raise Exception(f"Unknown platform {platform.target.os}") + + +# 内部で os.path.abspath() を利用しており、 os.path.abspath() はカレントディレクトリに依存するため、 +# この関数を利用する場合は ArgumentParser.parse_args() 実行前にカレントディレクトリを変更してはならない +# +# また、 --sora-args の指定には `--sora-args='--test'` のように `=` を使う必要がある +# `--sora-args '--test'` のようにスペースを使うと、ハイフンから始まるオプションが正しく解釈されない +def add_sora_arguments(parser): + parser.add_argument( + "--local-sora-cpp-sdk-dir", + type=os.path.abspath, + default=None, + help="Refer to local Sora C++ SDK. " + "When this option is specified, Sora C++ SDK will also be built.", + ) + parser.add_argument( + "--local-sora-cpp-sdk-args", + type=shlex.split, + default=[], + help="Options for building local Sora C++ SDK when `--local-sora-cpp-sdk-dir` is specified.", + ) + + +# add_sora_arguments と同様の注意点があるので注意すること +def add_webrtc_build_arguments(parser): + parser.add_argument( + "--local-webrtc-build-dir", + type=os.path.abspath, + default=None, + help="Refer to local webrtc-build. " + "When this option is specified, webrtc-build will also be built.", + ) + parser.add_argument( + "--local-webrtc-build-args", + type=shlex.split, + default=[], + help="Options for building local webrtc-build when `--local-webrtc-build-dir` is specified.", + ) diff --git a/cattleshed/CMakeLists.txt b/cattleshed/CMakeLists.txt index ba43f16..c15d103 100644 --- a/cattleshed/CMakeLists.txt +++ b/cattleshed/CMakeLists.txt @@ -10,6 +10,7 @@ find_package(CAP REQUIRED) find_package(Threads REQUIRED) find_package(Protobuf REQUIRED) find_package(gRPC REQUIRED) +find_package(utf8_range REQUIRED) find_package(Spdlog REQUIRED) find_package(CLI11 REQUIRED) find_package(Ggrpc REQUIRED) @@ -25,7 +26,7 @@ if(NOT CATTLESHED_SYSCONFDIR) endif() if(NOT CATTLESHED_STOREDIR) - set(CATTLESHED_STOREDIR "/var/log/wandbox/ran") + set(CATTLESHED_STOREDIR "/opt/wandbox/_log/ran") endif() if(NOT CATTLESHED_BASEDIR) @@ -84,7 +85,7 @@ add_executable(cattleshed src/server.cc src/load_config.cc ${CATTLESHED_PROTO}) -set_target_properties(cattleshed PROPERTIES CXX_STANDARD 14 C_STANDARD 99) +set_target_properties(cattleshed PROPERTIES CXX_STANDARD 17 C_STANDARD 99) target_include_directories(cattleshed PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/proto") target_compile_definitions(cattleshed PRIVATE @@ -94,10 +95,9 @@ target_link_libraries(cattleshed Boost::boost RT::RT Threads::Threads - gRPC::grpc++ + Ggrpc::ggrpc Spdlog::Spdlog - CLI11::CLI11 - Ggrpc::Ggrpc) + CLI11::CLI11) set_sanitizer(cattleshed) diff --git a/cattleshed/cmake.sh b/cattleshed/cmake.sh deleted file mode 100755 index fbe3a2e..0000000 --- a/cattleshed/cmake.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/bin/bash - -set -ex - -cd `dirname $0` -INSTALL_DIR="`pwd`/../_install" -MODULE_PATH="`pwd`/../cmake" -PROJECT_DIR="`pwd`" - -BUILD_DIR="`pwd`/_build/local" -GRPC_DIR="$INSTALL_DIR/grpc" -CMAKE_BUILD_TYPE=Release -ENABLE_TSAN=OFF -ENABLE_ASAN=OFF -CMAKE_INSTALL_PREFIX="$PROJECT_DIR/_install" -CMAKE_OPTS=" \ - -DCATTLESHED_STOREDIR=$BUILD_DIR/cattleshed-develop-log - -DCATTLESHED_BASEDIR=$BUILD_DIR/cattleshed-develop - -DCATTLESHED_BINDIR=$BUILD_DIR -" -LOCAL=1 -RUN_AFTER_BUILD=0 - -while [ $# -ne 0 ]; do - case "$1" in - "--help" ) - echo "$0 [--tsan] [--asan] [--local] [--develop] [--master] [--prefix ] [--run] [--help]" - exit 0 - ;; - - "--prefix" ) - CMAKE_INSTALL_PREFIX="$2" - shift 1 - ;; - - "--local" ) - LOCAL=1 - BUILD_DIR="`pwd`/_build/local" - CMAKE_OPTS=" \ - -DCATTLESHED_STOREDIR=$BUILD_DIR/cattleshed-develop-log - -DCATTLESHED_BASEDIR=$BUILD_DIR/cattleshed-develop - -DCATTLESHED_BINDIR=$BUILD_DIR - " - ;; - "--develop" ) - LOCAL=0 - BUILD_DIR="`pwd`/_build/develop" - CMAKE_OPTS=" \ - -DCATTLESHED_STOREDIR=/tmp/cattleshed-develop-log - -DCATTLESHED_BASEDIR=/tmp/cattleshed-develop - -DCATTLESHED_LISTEN_PORT=50052 - " - CATTLESHED_BINDIR="$CMAKE_INSTALL_PREFIX/bin" - ;; - "--master" ) - LOCAL=0 - BUILD_DIR="`pwd`/_build/master" - CMAKE_OPTS=" \ - " - CATTLESHED_BINDIR="$CMAKE_INSTALL_PREFIX/bin" - ;; - - "--tsan" ) - ENABLE_TSAN=ON - BUILD_DIR="`pwd`/_build/tsan" - GRPC_DIR="$INSTALL_DIR/grpc-tsan" - CMAKE_BUILD_TYPE=Debug - ;; - "--asan" ) - ENABLE_ASAN=ON - BUILD_DIR="`pwd`/_build/asan" - GRPC_DIR="$INSTALL_DIR/grpc-asan" - CMAKE_BUILD_TYPE=Debug - ;; - - "--run" ) - RUN_AFTER_BUILD=1 - - esac - shift 1 -done - -export PATH=$INSTALL_DIR/cmake/bin:$PATH - -mkdir -p $BUILD_DIR -pushd $BUILD_DIR - cmake $PROJECT_DIR \ - -DCLI11_ROOT_DIR="$INSTALL_DIR/CLI11" \ - -DSPDLOG_ROOT_DIR="$INSTALL_DIR/spdlog" \ - -DGGRPC_ROOT_DIR="$INSTALL_DIR/ggrpc" \ - -DCMAKE_PREFIX_PATH="$INSTALL_DIR/boost;$GRPC_DIR" \ - -DCMAKE_MODULE_PATH=$MODULE_PATH \ - -DCMAKE_INSTALL_PREFIX=$CMAKE_INSTALL_PREFIX \ - -DCMAKE_BUILD_TYPE=$CMAKE_BUILD_TYPE \ - -DENABLE_TSAN=$ENABLE_TSAN \ - -DENABLE_ASAN=$ENABLE_ASAN \ - -DCATTLESHED_BINDIR=$CATTLESHED_BINDIR \ - $CMAKE_OPTS - cmake --build . -j`nproc` -popd - -if [ $LOCAL -eq 1 -a $RUN_AFTER_BUILD -eq 1 ]; then - sudo setcap cap_sys_admin,cap_chown,cap_setuid,cap_setgid,cap_sys_chroot,cap_mknod,cap_net_admin=p $BUILD_DIR/cattlegrid - exec $BUILD_DIR/cattleshed -c $BUILD_DIR/cattleshed.conf -c $PROJECT_DIR/compiler.default -fi \ No newline at end of file diff --git a/cattleshed/src/cattleshed_server.h b/cattleshed/src/cattleshed_server.h index aba55c7..0278ba3 100644 --- a/cattleshed/src/cattleshed_server.h +++ b/cattleshed/src/cattleshed_server.h @@ -24,7 +24,7 @@ #include // protobuf -#include +#include #include "cattleshed.grpc.pb.h" #include "cattleshed.pb.h" @@ -567,14 +567,14 @@ class RunJobHandler logdir_ = logdirbase; loginfoname_ = unique_name + ".json"; { - google::protobuf::util::JsonPrintOptions opt; + google::protobuf::json::PrintOptions opt; opt.add_whitespace = true; - opt.always_print_primitive_fields = true; + opt.always_print_fields_with_no_presence = true; // ソースの情報を以外を JSON 化する auto req = *req_; req.clear_default_source(); req.clear_sources(); - google::protobuf::util::MessageToJsonString(req, &loginfocontent_, opt); + google::protobuf::json::MessageToJsonString(req, &loginfocontent_, opt); } // まずソース以外の情報を書き込む diff --git a/cmake/FindGgrpc.cmake b/cmake/FindGgrpc.cmake index d0afcf1..fac13a0 100644 --- a/cmake/FindGgrpc.cmake +++ b/cmake/FindGgrpc.cmake @@ -1,9 +1,17 @@ find_package(Spdlog REQUIRED) # 頑張って探したりせず、単純に GGRPC_ROOT_DIR を見る -if(NOT TARGET Ggrpc::Ggrpc) - add_library(Ggrpc::Ggrpc INTERFACE IMPORTED) - set_target_properties(Ggrpc::Ggrpc PROPERTIES +if(NOT TARGET Ggrpc::ggrpc) + add_library(Ggrpc::ggrpc INTERFACE IMPORTED) + set_target_properties(Ggrpc::ggrpc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${GGRPC_ROOT_DIR}/include") -target_link_libraries(Ggrpc::Ggrpc INTERFACE Spdlog::Spdlog) + target_link_libraries(Ggrpc::ggrpc INTERFACE + gRPC::grpc++ + protobuf::libprotobuf + gRPC::utf8_range_lib + utf8_range::utf8_validity + absl::log + absl::cord + absl::cord_internal + Spdlog::Spdlog) endif() diff --git a/deploy.sh b/deploy.sh deleted file mode 100755 index d5a3356..0000000 --- a/deploy.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -PROG=$0 - -function show_help() { - echo "$PROG " -} - -if [ $# -lt 3 ]; then - show_help - exit 1 -fi - -REMOTE=$1 -APP=$2 -ENV=$3 - -cd `dirname $0` -PACKAGE_DIR="`pwd`/_package" - -set -ex - -if [ "$APP" != "kennel" -a "$APP" != "cattleshed" ]; then - show_help - exit 1 -fi - -if [ "$ENV" != "develop" -a "$ENV" != "master" ]; then - show_help - exit 1 -fi - -scp $PACKAGE_DIR/$APP-$ENV.tar.gz $REMOTE:/tmp/$APP-$ENV.tar.gz -ssh $REMOTE /bin/bash -c " - set -ex - mkdir -p /opt/wandbox-data/release - pushd /opt/wandbox-data/release - tar xf /tmp/$APP-$ENV.tar.gz - rm /tmp/$APP-$ENV.tar.gz - - pushd $APP-$ENV - if [ "$APP" = "cattleshed" ]; then - setcap cap_sys_admin,cap_chown,cap_setuid,cap_setgid,cap_sys_chroot,cap_mknod,cap_net_admin=p bin/cattlegrid - fi - if [ "$APP" = "kennel" ]; then - # データ置き場を作る - mkdir -p var/lib/kennel - chown -R ubuntu:ubuntu var/ - fi - popd - popd - cp /opt/wandbox-data/release/$APP-$ENV/etc/$APP.service /etc/systemd/system/$APP-$ENV.service - systemctl enable $APP-$ENV - systemctl restart $APP-$ENV -" diff --git a/install_deps.sh b/install_deps.sh deleted file mode 100755 index 5749981..0000000 --- a/install_deps.sh +++ /dev/null @@ -1,308 +0,0 @@ -#!/bin/bash - -SOURCE_DIR="`pwd`/_source" -BUILD_DIR="`pwd`/_build" -INSTALL_DIR="`pwd`/_install" - -set -ex - -mkdir -p $SOURCE_DIR -mkdir -p $BUILD_DIR -mkdir -p $INSTALL_DIR - -RELEASE_MODE=0 -if [ "$1" = "--release" ]; then - RELEASE_MODE=1 -fi - -CMAKE_VERSION="3.22.2" -CMAKE_VERSION_FILE="$INSTALL_DIR/cmake.version" -CMAKE_CHANGED=0 -if [ ! -e $CMAKE_VERSION_FILE -o "$CMAKE_VERSION" != "`cat $CMAKE_VERSION_FILE`" ]; then - CMAKE_CHANGED=1 -fi - -BOOST_VERSION="1.78.0" -BOOST_VERSION_FILE="$INSTALL_DIR/boost.version" -BOOST_CHANGED=0 -if [ ! -e $BOOST_VERSION_FILE -o "$BOOST_VERSION" != "`cat $BOOST_VERSION_FILE`" ]; then - BOOST_CHANGED=1 -fi - -CPPDB_VERSION="0.3.1" -CPPDB_VERSION_FILE="$INSTALL_DIR/cppdb.version" -CPPDB_CHANGED=0 -if [ ! -e $CPPDB_VERSION_FILE -o "$CPPDB_VERSION" != "`cat $CPPDB_VERSION_FILE`" ]; then - CPPDB_CHANGED=1 -fi - -GRPC_VERSION="1.44.0" -GRPC_VERSION_FILE="$INSTALL_DIR/grpc.version" -GRPC_CHANGED=0 -if [ ! -e $GRPC_VERSION_FILE -o "$GRPC_VERSION" != "`cat $GRPC_VERSION_FILE`" ]; then - GRPC_CHANGED=1 -fi - -SQLITE3_VERSION="3.38.0" -SQLITE3_VERSION_NUMBER="3380000" -SQLITE3_YEAR="2022" -SQLITE3_VERSION_FILE="$INSTALL_DIR/sqlite3.version" -SQLITE3_CHANGED=0 -if [ ! -e $SQLITE3_VERSION_FILE -o "$SQLITE3_VERSION" != "`cat $SQLITE3_VERSION_FILE`" ]; then - SQLITE3_CHANGED=1 -fi - -CLI11_VERSION="2.1.2" -CLI11_VERSION_FILE="$INSTALL_DIR/cli11.version" -CLI11_CHANGED=0 -if [ ! -e $CLI11_VERSION_FILE -o "$CLI11_VERSION" != "`cat $CLI11_VERSION_FILE`" ]; then - CLI11_CHANGED=1 -fi - -SPDLOG_VERSION="1.9.2" -SPDLOG_VERSION_FILE="$INSTALL_DIR/spdlog.version" -SPDLOG_CHANGED=0 -if [ ! -e $SPDLOG_VERSION_FILE -o "$SPDLOG_VERSION" != "`cat $SPDLOG_VERSION_FILE`" ]; then - SPDLOG_CHANGED=1 -fi - -GGRPC_VERSION="0.5.7" -GGRPC_VERSION_FILE="$INSTALL_DIR/ggrpc.version" -GGRPC_CHANGED=0 -if [ ! -e $GGRPC_VERSION_FILE -o "$GGRPC_VERSION" != "`cat $GGRPC_VERSION_FILE`" ]; then - GGRPC_CHANGED=1 -fi - -PROTOC_GEN_JSONIF_VERSION="0.5.0" -PROTOC_GEN_JSONIF_VERSION_FILE="$INSTALL_DIR/protoc-gen-jsonif.version" -PROTOC_GEN_JSONIF_CHANGED=0 -if [ ! -e $PROTOC_GEN_JSONIF_VERSION_FILE -o "$PROTOC_GEN_JSONIF_VERSION" != "`cat $PROTOC_GEN_JSONIF_VERSION_FILE`" ]; then - PROTOC_GEN_JSONIF_CHANGED=1 -fi - -if [ -z "$JOBS" ]; then - # Linux - JOBS=`nproc 2>/dev/null` - if [ -z "$JOBS" ]; then - # macOS - JOBS=`sysctl -n hw.logicalcpu_max 2>/dev/null` - if [ -z "$JOBS" ]; then - JOBS=1 - fi - fi -fi - -# CMake が古いとビルド出来ないので、インストール済み CMake から新しい CMake をインストールする -if [ $CMAKE_CHANGED -eq 1 -o ! -e $INSTALL_DIR/cmake/bin/cmake ]; then - _URL=https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-x86_64.tar.gz - _FILE=$SOURCE_DIR/cmake-${CMAKE_VERSION}-linux-x86_64.tar.gz - if [ ! -e $_FILE ]; then - echo "file(DOWNLOAD $_URL $_FILE)" > $SOURCE_DIR/tmp.cmake - cmake -P $SOURCE_DIR/tmp.cmake - rm $SOURCE_DIR/tmp.cmake - fi - - pushd $SOURCE_DIR - rm -rf cmake-${CMAKE_VERSION}-linux-x86_64 - cmake -E tar xf $_FILE - popd - - rm -rf $INSTALL_DIR/cmake - mv $SOURCE_DIR/cmake-${CMAKE_VERSION}-linux-x86_64 $INSTALL_DIR/cmake -fi -echo $CMAKE_VERSION > $CMAKE_VERSION_FILE - -export PATH=$INSTALL_DIR/cmake/bin:$PATH - -# grpc (cmake) -if [ $GRPC_CHANGED -eq 1 -o ! -e $INSTALL_DIR/grpc/lib/libgrpc.a ]; then - # gRPC のソース取得 - if [ ! -e $SOURCE_DIR/grpc/.git ]; then - git clone https://github.com/grpc/grpc.git $SOURCE_DIR/grpc - fi - pushd $SOURCE_DIR/grpc - git fetch - git reset --hard v$GRPC_VERSION - git submodule update -i --recursive - popd - - # RELEASE_MODE=1 の場合は tsan, asan は入れない - _BUILDTYPE="release tsan asan" - if [ $RELEASE_MODE -eq 1 ]; then - _BUILDTYPE="release" - fi - for buildtype in $_BUILDTYPE; do - case "$buildtype" in - "release" ) - _POSTFIX="" - _OPTS=" - -DCMAKE_BUILD_TYPE=Release \ - " - ;; - "tsan" ) - _POSTFIX="-tsan" - _OPTS=" - -DCMAKE_BUILD_TYPE=Debug \ - -DCMAKE_C_FLAGS="-fsanitize=thread" \ - -DCMAKE_CXX_FLAGS="-fsanitize=thread" \ - " - ;; - "asan" ) - _POSTFIX="-asan" - _OPTS=" - -DCMAKE_BUILD_TYPE=Debug \ - -DCMAKE_C_FLAGS="-fsanitize=address" \ - -DCMAKE_CXX_FLAGS="-fsanitize=address" \ - " - ;; - esac - - rm -rf $BUILD_DIR/grpc-build$_POSTFIX - mkdir -p $BUILD_DIR/grpc-build$_POSTFIX - pushd $BUILD_DIR/grpc-build$_POSTFIX - cmake $SOURCE_DIR/grpc \ - -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR/grpc$_POSTFIX \ - -DgRPC_BUILD_CSHARP_EXT=OFF \ - $_OPTS - # なんかセグフォで死ぬことがある(再度実行すると成功する)っぽいので、2回実行する - make -j$JOBS || make -j$JOBS - make install - # boringssl のヘッダーも入れておく - cp -r $SOURCE_DIR/grpc/third_party/boringssl-with-bazel/src/include/openssl $INSTALL_DIR/grpc/include/openssl - popd - done -fi -echo $GRPC_VERSION > $GRPC_VERSION_FILE - -# boost -if [ $BOOST_CHANGED -eq 1 -o ! -e $INSTALL_DIR/boost/lib/libboost_filesystem.a ]; then - rm -rf $SOURCE_DIR/boost_${_VERSION_UNDERSCORE} - rm -rf $INSTALL_DIR/boost - - _VERSION_UNDERSCORE=${BOOST_VERSION//./_} - _URL=https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${_VERSION_UNDERSCORE}.tar.gz - _FILE=$SOURCE_DIR/boost_${_VERSION_UNDERSCORE}.tar.gz - if [ ! -e $_FILE ]; then - echo "file(DOWNLOAD $_URL $_FILE)" > $BUILD_DIR/tmp.cmake - cmake -P $BUILD_DIR/tmp.cmake - rm $BUILD_DIR/tmp.cmake - fi - pushd $SOURCE_DIR - rm -rf boost_${_VERSION_UNDERSCORE} - cmake -E tar xf $_FILE - popd - - pushd $SOURCE_DIR/boost_${_VERSION_UNDERSCORE} - ./bootstrap.sh - ./b2 install \ - --prefix=$INSTALL_DIR/boost \ - --build-dir=$BUILD_DIR/boost-build \ - --with-filesystem \ - --with-program_options \ - --with-json \ - target-os=linux \ - address-model=64 \ - variant=release \ - link=static - popd -fi -echo $BOOST_VERSION > $BOOST_VERSION_FILE - -# sqlite3 -if [ $SQLITE3_CHANGED -eq 1 -o ! -e $INSTALL_DIR/sqlite3/lib/libsqlite3.a ]; then - _URL=https://www.sqlite.org/$SQLITE3_YEAR/sqlite-autoconf-$SQLITE3_VERSION_NUMBER.tar.gz - _FILE=$SOURCE_DIR/sqlite-autoconf-$SQLITE3_VERSION_NUMBER.tar.gz - if [ ! -e $_FILE ]; then - echo "file(DOWNLOAD $_URL $_FILE)" > $BUILD_DIR/tmp.cmake - cmake -P $BUILD_DIR/tmp.cmake - rm $BUILD_DIR/tmp.cmake - fi - - pushd $SOURCE_DIR - rm -rf sqlite-autoconf-$SQLITE3_VERSION_NUMBER - cmake -E tar xf $_FILE - popd - - rm -rf $BUILD_DIR/sqlite-build - mkdir -p $BUILD_DIR/sqlite-build - pushd $BUILD_DIR/sqlite-build - $SOURCE_DIR/sqlite-autoconf-$SQLITE3_VERSION_NUMBER/configure \ - --prefix=$INSTALL_DIR/sqlite3 \ - --disable-shared \ - --enable-static - make -j$JOBS - make install - popd -fi -echo $SQLITE3_VERSION > $SQLITE3_VERSION_FILE - -# cppdb -if [ $CPPDB_CHANGED -eq 1 -o ! -e $INSTALL_DIR/cppdb/lib/libcppdb.a ]; then - rm -rf $SOURCE_DIR/cppdb - git clone --branch v$CPPDB_VERSION --depth 1 https://github.com/melpon/cppdb.git $SOURCE_DIR/cppdb - - PATCH_DIR=`pwd`/patch - # パッチの適用 - pushd $SOURCE_DIR/cppdb - patch -p1 < $PATCH_DIR/004_cppdb_noshared.patch - popd - - # ビルドとインストール - rm -rf $BUILD_DIR/cppdb-build - mkdir -p $BUILD_DIR/cppdb-build - pushd $BUILD_DIR/cppdb-build - cmake $SOURCE_DIR/cppdb \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR/cppdb \ - -DCMAKE_PREFIX_PATH="$INSTALL_DIR/sqlite3" \ - -DDISABLE_MYSQL=ON \ - -DDISABLE_PQ=ON \ - -DDISABLE_ODBC=ON \ - -DSQLITE_BACKEND_INTERNAL=ON - make - make install - popd -fi -echo $CPPDB_VERSION > $CPPDB_VERSION_FILE - -# CLI11 -if [ $CLI11_CHANGED -eq 1 -o ! -e $INSTALL_DIR/CLI11/include ]; then - rm -rf $INSTALL_DIR/CLI11 - git clone --branch v$CLI11_VERSION --depth 1 https://github.com/CLIUtils/CLI11.git $INSTALL_DIR/CLI11 -fi -echo $CLI11_VERSION > $CLI11_VERSION_FILE - -# spdlog -if [ $SPDLOG_CHANGED -eq 1 -o ! -e $INSTALL_DIR/spdlog/include ]; then - rm -rf $INSTALL_DIR/spdlog - git clone --branch v$SPDLOG_VERSION --depth 1 https://github.com/gabime/spdlog.git $INSTALL_DIR/spdlog -fi -echo $SPDLOG_VERSION > $SPDLOG_VERSION_FILE - -# ggrpc -if [ $GGRPC_CHANGED -eq 1 -o ! -e $INSTALL_DIR/ggrpc/include/server.h ]; then - rm -rf $INSTALL_DIR/ggrpc - git clone --branch $GGRPC_VERSION --depth 1 https://github.com/melpon/ggrpc.git $INSTALL_DIR/ggrpc -fi -echo $GGRPC_VERSION > $GGRPC_VERSION_FILE - -# protoc-gen-jsonif -if [ $PROTOC_GEN_JSONIF_CHANGED -eq 1 -o ! -e $INSTALL_DIR/protoc-gen-jsonif/linux/amd64/protoc-gen-jsonif-cpp ]; then - _URL=https://github.com/melpon/protoc-gen-jsonif/releases/download/$PROTOC_GEN_JSONIF_VERSION/protoc-gen-jsonif.tar.gz - _FILE=$BUILD_DIR/protoc-gen-jsonif.tar.gz - mkdir -p $BUILD_DIR - rm -f $_FILE - if [ ! -e $_FILE ]; then - echo "file(DOWNLOAD $_URL $_FILE)" > $BUILD_DIR/tmp.cmake - cmake -P $BUILD_DIR/tmp.cmake - rm $BUILD_DIR/tmp.cmake - fi - rm -rf $BUILD_DIR/protoc-gen-jsonif - rm -rf $INSTALL_DIR/protoc-gen-jsonif - pushd $BUILD_DIR - tar -xf $_FILE - mv protoc-gen-jsonif $INSTALL_DIR/protoc-gen-jsonif - chmod +x $INSTALL_DIR/protoc-gen-jsonif/linux/amd64/protoc-gen-jsonif-cpp - popd -fi -echo $PROTOC_GEN_JSONIF_VERSION > $PROTOC_GEN_JSONIF_VERSION_FILE \ No newline at end of file diff --git a/kennel/CMakeLists.txt b/kennel/CMakeLists.txt index 0dcf499..060f9f1 100644 --- a/kennel/CMakeLists.txt +++ b/kennel/CMakeLists.txt @@ -11,6 +11,7 @@ find_package(CppDB REQUIRED) find_package(Threads REQUIRED) find_package(Protobuf REQUIRED) find_package(gRPC REQUIRED) +find_package(utf8_range REQUIRED) find_package(Spdlog REQUIRED) find_package(CLI11 REQUIRED) find_package(Ggrpc REQUIRED) @@ -90,7 +91,7 @@ add_custom_command( COMMAND $ ARGS --jsonif-cpp_out "${CMAKE_CURRENT_BINARY_DIR}/proto" - -I "${CMAKE_CURRENT_SOURCE_DIR}/../_install/protoc-gen-jsonif/proto" + -I "${PROTOC_GEN_JSONIF_DIR}/proto" -I "${CMAKE_CURRENT_SOURCE_DIR}/../proto" --plugin=protoc-gen-jsonif-cpp="${PROTOC_GEN_JSONIF_CPP}" "${CMAKE_CURRENT_SOURCE_DIR}/../proto/kennel.proto" @@ -131,14 +132,12 @@ target_compile_definitions(kennel PRIVATE SPDLOG_ACTIVE_LEVEL=SPDLOG_LEVEL_TRACE target_include_directories(kennel PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/proto") target_link_libraries(kennel Boost::json - protobuf::libprotobuf - gRPC::grpc++ + Ggrpc::ggrpc CppDB::CppDB SQLite::SQLite3 Threads::Threads Spdlog::Spdlog - CLI11::CLI11 - Ggrpc::Ggrpc) + CLI11::CLI11) set_sanitizer(kennel) diff --git a/kennel/cmake.sh b/kennel/cmake.sh deleted file mode 100755 index afd55d6..0000000 --- a/kennel/cmake.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/bin/bash - -set -ex - -cd `dirname $0` -INSTALL_DIR="`pwd`/../_install" -MODULE_PATH="`pwd`/../cmake" -PROJECT_DIR="`pwd`" - -BUILD_DIR="_build/local" -GRPC_DIR="$INSTALL_DIR/grpc" -CMAKE_BUILD_TYPE=Debug -ENABLE_TSAN=OFF -ENABLE_ASAN=OFF -CMAKE_INSTALL_PREFIX=$PROJECT_DIR/_install -CMAKE_OPTS=" \ -" -LOCAL=1 -GDB=0 -RUN_AFTER_BUILD=0 - -while [ $# -ne 0 ]; do - case "$1" in - "--help" ) - echo "$0 [--tsan] [--asan] [--local] [--develop] [--master] [--prefix ] [--run] [--gdb] [--help]" - exit 0 - ;; - - "--prefix" ) - CMAKE_INSTALL_PREFIX="$2" - shift 1 - ;; - - "--local" ) - LOCAL=1 - BUILD_DIR="_build/local" - CMAKE_BUILD_TYPE=Debug - CMAKE_OPTS=" \ - " - ;; - - "--develop" ) - LOCAL=0 - BUILD_DIR="_build/develop" - CMAKE_BUILD_TYPE=Release - CMAKE_OPTS=" \ - -DKENNEL_PORT=3501 \ - -DKENNEL_CATTLESHED_PORT=50052 \ - -DKENNEL_URL=https://develop.wandbox.org \ - " - ;; - "--master" ) - LOCAL=0 - BUILD_DIR="_build/master" - CMAKE_BUILD_TYPE=Release - CMAKE_OPTS=" \ - " - ;; - - "--tsan" ) - ENABLE_TSAN=ON - BUILD_DIR="_build/tsan" - GRPC_DIR="$INSTALL_DIR/grpc-tsan" - CMAKE_BUILD_TYPE=Debug - ;; - "--asan" ) - ENABLE_ASAN=ON - BUILD_DIR="_build/asan" - GRPC_DIR="$INSTALL_DIR/grpc-asan" - CMAKE_BUILD_TYPE=Debug - ;; - - "--run" ) - RUN_AFTER_BUILD=1 - ;; - "--gdb" ) - GDB=1 - ;; - - * ) - echo "Unknown option $1" 1>&2 - exit 1 - ;; - - esac - shift 1 -done - -export PATH=$INSTALL_DIR/cmake/bin:$PATH - -mkdir -p $BUILD_DIR -pushd $BUILD_DIR - cmake $PROJECT_DIR \ - -DPROTOC_GEN_JSONIF_CPP="$INSTALL_DIR/protoc-gen-jsonif/linux/amd64/protoc-gen-jsonif-cpp" \ - -DCppDB_ROOT_DIR="$INSTALL_DIR/cppdb" \ - -DSQLite3_INCLUDE_DIR="$INSTALL_DIR/sqlite3/include" \ - -DSPDLOG_ROOT_DIR="$INSTALL_DIR/spdlog" \ - -DCLI11_ROOT_DIR="$INSTALL_DIR/CLI11" \ - -DGGRPC_ROOT_DIR="$INSTALL_DIR/ggrpc" \ - -DCMAKE_PREFIX_PATH="$INSTALL_DIR/boost;$INSTALL_DIR/cppdb;$INSTALL_DIR/sqlite3;$GRPC_DIR" \ - -DCMAKE_INSTALL_PREFIX=$CMAKE_INSTALL_PREFIX \ - -DCMAKE_MODULE_PATH=$MODULE_PATH \ - -DCMAKE_BUILD_TYPE=$CMAKE_BUILD_TYPE \ - -DENABLE_TSAN=$ENABLE_TSAN \ - -DENABLE_ASAN=$ENABLE_ASAN \ - $CMAKE_OPTS - cmake --build . -j`nproc` -popd - -if [ $LOCAL -eq 1 -a $RUN_AFTER_BUILD -eq 1 ]; then - if [ $GDB -eq 1 ]; then - exec gdb -ex r --args $BUILD_DIR/kennel --log-level debug - else - exec $BUILD_DIR/kennel --log-level debug - fi -fi \ No newline at end of file diff --git a/kennel/src/kennel_session.h b/kennel/src/kennel_session.h index 6b7336f..0715f61 100644 --- a/kennel/src/kennel_session.h +++ b/kennel/src/kennel_session.h @@ -29,7 +29,7 @@ boost::beast::http::response BadRequest( res.set(boost::beast::http::field::server, BOOST_BEAST_VERSION_STRING); res.set(boost::beast::http::field::content_type, "text/html"); res.keep_alive(req.keep_alive()); - res.body() = why.to_string(); + res.body() = why; res.prepare_payload(); return res; } @@ -42,7 +42,7 @@ boost::beast::http::response NotFound( res.set(boost::beast::http::field::server, BOOST_BEAST_VERSION_STRING); res.set(boost::beast::http::field::content_type, "text/html"); res.keep_alive(req.keep_alive()); - res.body() = "The resource '" + target.to_string() + "' was not found."; + res.body() = "The resource '" + std::string(target) + "' was not found."; res.prepare_payload(); return res; } @@ -55,7 +55,7 @@ boost::beast::http::response ServerError( res.set(boost::beast::http::field::server, BOOST_BEAST_VERSION_STRING); res.set(boost::beast::http::field::content_type, "text/html"); res.keep_alive(req.keep_alive()); - res.body() = "An error occurred: '" + what.to_string() + "'"; + res.body() = "An error occurred: '" + std::string(what) + "'"; res.prepare_payload(); return res; } @@ -136,13 +136,13 @@ static wandbox::cattleshed::Issuer make_issuer( issuer.set_remote_addr(""); auto it = req.find("X-Real-IP"); if (it != req.end()) { - issuer.set_real_ip(it->value().to_string()); + issuer.set_real_ip(std::string(it->value())); } it = req.find("X-Forwarded-For"); if (it != req.end()) { - issuer.set_forwarded_for(it->value().to_string()); + issuer.set_forwarded_for(std::string(it->value())); } - issuer.set_path_info(req.target().to_string()); + issuer.set_path_info(std::string(req.target())); issuer.set_github_username(std::move(github_user)); return issuer; } @@ -285,7 +285,7 @@ class KennelSession : public std::enable_shared_from_this { return; } - SPDLOG_DEBUG("[{}] requested", req_.target().to_string()); + SPDLOG_DEBUG("[{}] requested", std::string(req_.target())); // 念のため catch しておく(投げっぱなしだとプロセスごと落ちてしまうので) try { @@ -315,15 +315,15 @@ class KennelSession : public std::enable_shared_from_this { SendResponse(NotFound(req_, req_.target())); } } catch (const char* e) { - SPDLOG_ERROR("[{}] Unexpected exception: {}", req_.target().to_string(), + SPDLOG_ERROR("[{}] Unexpected exception: {}", std::string(req_.target()), e); SendResponse(ServerError(req_, e)); } catch (std::exception& e) { - SPDLOG_ERROR("[{}] Unexpected exception: {}", req_.target().to_string(), + SPDLOG_ERROR("[{}] Unexpected exception: {}", std::string(req_.target()), e.what()); SendResponse(ServerError(req_, e.what())); } catch (...) { - SPDLOG_ERROR("[{}] Unexpected exception", req_.target().to_string()); + SPDLOG_ERROR("[{}] Unexpected exception", std::string(req_.target())); SendResponse(ServerError(req_, "unexpected")); } } @@ -367,7 +367,7 @@ class KennelSession : public std::enable_shared_from_this { permlink pl(config_.database); wandbox::kennel::GetPermlinkResponse presp; try { - presp = pl.get_permlink(permlink_id.to_string()); + presp = pl.get_permlink(permlink_id); } catch (cppdb::null_value_fetch) { SendResponse(NotFound(req_, req_.target())); return; @@ -651,7 +651,7 @@ class KennelSession : public std::enable_shared_from_this { std::placeholders::_1, std::placeholders::_2, sp->need_eof())); - SPDLOG_INFO("[{}] responsed {}", req_.target().to_string(), + SPDLOG_INFO("[{}] responsed {}", std::string(req_.target()), (int)sp->result()); } diff --git a/package.sh b/package.sh deleted file mode 100755 index dda9adb..0000000 --- a/package.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -PROG=$0 - -function show_help() { - echo "$PROG " -} - -if [ $# -lt 2 ]; then - show_help - exit 1 -fi - -APP=$1 -ENV=$2 - -cd `dirname $0` -INSTALL_DIR="`pwd`/_install" - -set -ex - -if [ "$APP" != "kennel" -a "$APP" != "cattleshed" ]; then - show_help - exit 1 -fi - -if [ "$ENV" != "develop" -a "$ENV" != "master" ]; then - show_help - exit 1 -fi - -PREFIX="/opt/wandbox-data/release/$APP-$ENV" - -./install_deps.sh - -if [ "$APP" = "kennel" ]; then - pushd kennel - rm -rf _build/$ENV - ./cmake.sh --prefix $PREFIX --$ENV - sudo $INSTALL_DIR/cmake/bin/cmake --install _build/$ENV - popd -elif [ "$APP" = "cattleshed" ]; then - pushd cattleshed - rm -rf _build/$ENV - ./cmake.sh --prefix $PREFIX --$ENV - sudo $INSTALL_DIR/cmake/bin/cmake --install _build/$ENV - popd -fi - -mkdir -p _package -TARFILE="`pwd`/_package/$APP-$ENV.tar.gz" -pushd `dirname $PREFIX` - tar czf $TARFILE `basename $PREFIX` -popd -sudo rm -rf $PREFIX diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..71de868 --- /dev/null +++ b/ruff.toml @@ -0,0 +1 @@ +line-length = 100 diff --git a/run.py b/run.py new file mode 100644 index 0000000..adfbae1 --- /dev/null +++ b/run.py @@ -0,0 +1,333 @@ +import argparse +import logging +import multiprocessing +import os + +from buildbase import ( + add_path, + build_and_install_boost, + cd, + cmd, + install_cli11, + install_cmake, + install_cppdb, + install_ggrpc, + install_grpc, + install_protoc_gen_jsonif, + install_spdlog, + install_sqlite3, + mkdir_p, + read_version_file, + rm_rf, +) + +logging.basicConfig(level=logging.DEBUG) + + +BASE_DIR = os.path.abspath(os.path.dirname(__file__)) + + +def install_deps(source_dir: str, build_dir: str, install_dir: str, debug: bool): + with cd(BASE_DIR): + version = read_version_file("VERSION") + + # CMake + install_cmake_args = { + "version": version["CMAKE_VERSION"], + "version_file": os.path.join(install_dir, "cmake.version"), + "source_dir": source_dir, + "install_dir": install_dir, + "platform": "linux-x86_64", + "ext": "tar.gz", + } + install_cmake(**install_cmake_args) + add_path(os.path.join(install_dir, "cmake", "bin")) + + # gRPC + install_grpc_args = { + "version": version["GRPC_VERSION"], + "version_file": os.path.join(install_dir, "grpc.version"), + "source_dir": source_dir, + "build_dir": build_dir, + "install_dir": install_dir, + "debug": debug, + "cmake_args": [], + } + install_grpc(**install_grpc_args) + + # ggrpc + install_ggrpc_args = { + "version": version["GGRPC_VERSION"], + "version_file": os.path.join(install_dir, "ggrpc.version"), + "install_dir": install_dir, + } + install_ggrpc(**install_ggrpc_args) + + # spdlog + install_spdlog_args = { + "version": version["SPDLOG_VERSION"], + "version_file": os.path.join(install_dir, "spdlog.version"), + "install_dir": install_dir, + } + install_spdlog(**install_spdlog_args) + + # Boost + install_boost_args = { + "version": version["BOOST_VERSION"], + "version_file": os.path.join(install_dir, "boost.version"), + "source_dir": source_dir, + "build_dir": build_dir, + "install_dir": install_dir, + "cxx": "", + "cflags": [], + "cxxflags": [], + "linkflags": [], + "toolset": "gcc", + "visibility": "global", + "target_os": "linux", + "debug": debug, + "android_ndk": "", + "native_api_level": "", + "architecture": "x86", + } + build_and_install_boost(**install_boost_args) + + # CLI11 + install_cli11_args = { + "version": version["CLI11_VERSION"], + "version_file": os.path.join(install_dir, "cli11.version"), + "install_dir": install_dir, + } + install_cli11(**install_cli11_args) + + # protoc-gen-jsonif + install_jsonif_args = { + "version": version["PROTOC_GEN_JSONIF_VERSION"], + "version_file": os.path.join(install_dir, "protoc-gen-jsonif.version"), + "source_dir": source_dir, + "install_dir": install_dir, + "platform": "linux-amd64", + } + install_protoc_gen_jsonif(**install_jsonif_args) + + # SQLite3 + install_sqlite3_args = { + "version": version["SQLITE3_VERSION"], + "version_file": os.path.join(install_dir, "sqlite3.version"), + "year": version["SQLITE3_YEAR"], + "source_dir": source_dir, + "build_dir": build_dir, + "install_dir": install_dir, + "debug": debug, + "configure_args": [], + } + install_sqlite3(**install_sqlite3_args) + + # CppDB + install_cppdb_args = { + "version": version["CPPDB_VERSION"], + "version_file": os.path.join(install_dir, "cppdb.version"), + "source_dir": source_dir, + "build_dir": build_dir, + "install_dir": install_dir, + "debug": debug, + "sqlite3_install_dir": os.path.join(install_dir, "sqlite3"), + "cmake_args": [], + } + install_cppdb(**install_cppdb_args) + + +def do_build( + debug: bool, target: str, env: str, cattleshed_install_dir=None, kennel_install_dir=None +): + configuration = "debug" if debug else "release" + cmake_configuration = "Debug" if debug else "Release" + source_dir = os.path.join(BASE_DIR, "_source", configuration) + build_dir = os.path.join(BASE_DIR, "_build", configuration) + install_dir = os.path.join(BASE_DIR, "_install", configuration) + # package_dir = os.path.join(BASE_DIR, "_package", dir, configuration) + mkdir_p(source_dir) + mkdir_p(build_dir) + mkdir_p(install_dir) + + install_deps(source_dir, build_dir, install_dir, debug) + + if target == "cattleshed": + cattleshed_source_dir = os.path.join(BASE_DIR, "cattleshed") + cattleshed_build_dir = os.path.join(build_dir, "cattleshed") + if cattleshed_install_dir is None: + cattleshed_install_dir = os.path.join(install_dir, "cattleshed") + mkdir_p(cattleshed_build_dir) + with cd(cattleshed_build_dir): + cmd( + [ + "cmake", + cattleshed_source_dir, + f"-DCLI11_ROOT_DIR={os.path.join(install_dir, 'cli11')}", + f"-DSPDLOG_ROOT_DIR={os.path.join(install_dir, 'spdlog')}", + f"-DGGRPC_ROOT_DIR={os.path.join(install_dir, 'ggrpc')}", + f"-DCMAKE_PREFIX_PATH={os.path.join(install_dir, 'boost')};{os.path.join(install_dir, 'grpc')}", + f"-DCMAKE_MODULE_PATH={os.path.join(BASE_DIR, 'cmake')}", + f"-DCMAKE_INSTALL_PREFIX={cattleshed_install_dir}", + f"-DCMAKE_BUILD_TYPE={cmake_configuration}", + *( + [ + "-DCATTLESHED_STOREDIR=/tmp/cattleshed-develop-log", + "-DCATTLESHED_BASEDIR=/tmp/cattleshed-develop", + "-DCATTLESHED_LISTEN_PORT=50052", + ] + if env == "develop" + else [] + ), + ] + ) + cmd( + [ + "cmake", + "--build", + ".", + "--config", + cmake_configuration, + f"-j{multiprocessing.cpu_count()}", + ] + ) + + # 各種権限を設定 + cmd( + [ + "sudo", + "setcap", + "cap_sys_admin,cap_chown,cap_setuid,cap_setgid,cap_sys_chroot,cap_mknod,cap_net_admin=p", + os.path.join(cattleshed_build_dir, "cattlegrid"), + ] + ) + + elif target == "kennel": + kennel_source_dir = os.path.join(BASE_DIR, "kennel") + kennel_build_dir = os.path.join(build_dir, "kennel") + if kennel_install_dir is None: + kennel_install_dir = os.path.join(install_dir, "kennel") + mkdir_p(kennel_build_dir) + with cd(kennel_build_dir): + cmd( + [ + "cmake", + kennel_source_dir, + f"-DCLI11_ROOT_DIR={os.path.join(install_dir, 'cli11')}", + f"-DSPDLOG_ROOT_DIR={os.path.join(install_dir, 'spdlog')}", + f"-DGGRPC_ROOT_DIR={os.path.join(install_dir, 'ggrpc')}", + f"-DCMAKE_PREFIX_PATH={os.path.join(install_dir, 'boost')};{os.path.join(install_dir, 'grpc')};{os.path.join(install_dir, 'cppdb')};{os.path.join(install_dir, 'sqlite3')}", + f"-DCMAKE_MODULE_PATH={os.path.join(BASE_DIR, 'cmake')}", + f"-DCMAKE_INSTALL_PREFIX={kennel_install_dir}", + f"-DCMAKE_BUILD_TYPE={cmake_configuration}", + f"-DPROTOC_GEN_JSONIF_DIR={os.path.join(install_dir, 'protoc-gen-jsonif')}", + f"-DPROTOC_GEN_JSONIF_CPP={os.path.join(install_dir, 'protoc-gen-jsonif', 'bin', 'protoc-gen-jsonif-cpp')}", + f"-DCppDB_ROOT_DIR={os.path.join(install_dir, 'cppdb')}", + f"-DSQLite3_INCLUDE_DIR={os.path.join(install_dir, 'sqlite3', 'include')}", + *( + [ + "-DKENNEL_PORT=3501", + "-DKENNEL_CATTLESHED_PORT=50052", + "-DKENNEL_URL=https://develop.wandbox.org", + ] + if env == "develop" + else [] + ), + ] + ) + cmd( + [ + "cmake", + "--build", + ".", + "--config", + cmake_configuration, + f"-j{multiprocessing.cpu_count()}", + ] + ) + + +def do_package(debug: bool, target: str, env: str, prefix: str): + configuration = "debug" if debug else "release" + cmake_configuration = "Debug" if debug else "Release" + build_dir = os.path.join(BASE_DIR, "_build", configuration) + package_dir = os.path.join(BASE_DIR, "_package") + mkdir_p(package_dir) + do_build(debug, target, env, f"{prefix}/cattleshed-{env}", f"{prefix}/kennel-{env}") + cmd(["cmake", "--install", os.path.join(build_dir, target), "--config", cmake_configuration]) + with cd(prefix): + cmd(["tar", "czf", os.path.join(package_dir, f"{target}-{env}.tar.gz"), f"{target}-{env}"]) + rm_rf(os.path.join(prefix, f"{target}-{env}")) + + +def do_deploy(remote: str, target: str, env: str, prefix: str): + package_dir = os.path.join(BASE_DIR, "_package") + + cmd( + [ + "scp", + os.path.join(package_dir, f"{target}-{env}.tar.gz"), + f"{remote}:/tmp/{target}-{env}.tar.gz", + ] + ) + remote_command = f""" +set -ex +mkdir -p {prefix} +pushd {prefix} + tar xf /tmp/{target}-{env}.tar.gz + rm /tmp/{target}-{env}.tar.gz + + pushd {target}-{env} + if [ "{target}" = "cattleshed" ]; then + setcap cap_sys_admin,cap_chown,cap_setuid,cap_setgid,cap_sys_chroot,cap_mknod,cap_net_admin=p bin/cattlegrid + fi + if [ "{target}" = "kennel" ]; then + # データ置き場を作る + mkdir -p var/lib/kennel + chown -R ubuntu:ubuntu var/ + fi + popd +popd +cp /opt/wandbox-data/release/{target}-{env}/etc/{target}.service /etc/systemd/system/{target}-{env}.service +systemctl enable {target}-{env} +systemctl restart {target}-{env} +""" + cmd(["ssh", remote, "/bin/bash", "-c", remote_command]) + + +PACKAGE_PREFIX = "/opt/wandbox-data/release" + + +def main(): + parser = argparse.ArgumentParser() + sp = parser.add_subparsers() + bp = sp.add_parser("build") + bp.set_defaults(op="build") + bp.add_argument("target", choices=["kennel", "cattleshed"]) + bp.add_argument("--env", choices=["master", "develop"], required=True) + bp.add_argument("--debug", action="store_true") + pp = sp.add_parser("package") + pp.set_defaults(op="package") + pp.add_argument("target", choices=["kennel", "cattleshed"]) + pp.add_argument("--env", choices=["master", "develop"], required=True) + pp.add_argument("--debug", action="store_true") + pp.add_argument("--prefix", default=PACKAGE_PREFIX) + dp = sp.add_parser("deploy") + dp.set_defaults(op="deploy") + dp.add_argument("remote") + dp.add_argument("target", choices=["kennel", "cattleshed"]) + dp.add_argument("--env", choices=["master", "develop"], required=True) + dp.add_argument("--prefix", default=PACKAGE_PREFIX) + + args = parser.parse_args() + + if args.op == "build": + do_build(debug=args.debug, target=args.target, env=args.env) + elif args.op == "package": + do_package(debug=args.debug, target=args.target, env=args.env, prefix=args.prefix) + elif args.op == "deploy": + do_deploy(remote=args.remote, target=args.target, env=args.env, prefix=args.prefix) + + +if __name__ == "__main__": + main()