diff --git a/.github/workflows/arma.yml b/.github/workflows/arma.yml index 8bc07984..2abf697c 100644 --- a/.github/workflows/arma.yml +++ b/.github/workflows/arma.yml @@ -1,29 +1,34 @@ -name: Arma - +name: Testing on: - push: - branches: - - main pull_request: + types: [opened, synchronize, ready_for_review] + push: + branches: main jobs: validate: + name: Validation runs-on: ubuntu-latest steps: - name: Checkout the source code uses: actions/checkout@master - name: Validate SQF + if: always() run: python3 tools/sqf_validator.py - name: Validate Config + if: always() run: python3 tools/config_style_checker.py - name: Validate Stringtables + if: always() run: python3 tools/stringtable_validator.py - name: Check Strings + if: always() run: python3 tools/check_strings.py # - name: Check for BOM # uses: arma-actions/bom-check@master lint: + name: Linting runs-on: ubuntu-latest steps: - name: Checkout the source code @@ -33,6 +38,7 @@ jobs: continue-on-error: true # No failure due to many false-positives hemtt: + name: HEMTT Check runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 76efc176..35ac7944 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,20 +1,39 @@ -name: Build +name: Build and upload artifacts + on: - release: - types: [ published ] + push: + branches: + - main + pull_request_target: + jobs: build: - runs-on: ubuntu-latest + name: Build + runs-on: windows-latest steps: - - uses: actions/checkout@v2 - - name: Setup HEMTT - uses: arma-actions/hemtt@v1 - - name: Run HEMTT build - run: ./tools/deploy.sh ${{ github.ref_name }} - - name: Upload release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - tag: ${{ github.ref_name }} - file: release/7CavAddon_${{ github.ref_name }}-hemtt.zip - overwrite: true + - name: Checkout the source code + uses: actions/checkout@v4 + - name: Setup HEMTT + uses: arma-actions/hemtt@v1 + - name: Checkout pull request + uses: actions/checkout@v4 + if: ${{ github.event_name == 'pull_request_target' }} + with: + path: pullrequest + ref: 'refs/pull/${{ github.event.number }}/merge' + - name: Replace addons with pull request addons + if: ${{ github.event_name == 'pull_request_target' }} + run: | + rm -r addons\ + rm -r include\ + xcopy /e /h /q pullrequest\addons addons\ + xcopy /e /h /q pullrequest\include include\ + - name: Run HEMTT build + run: hemtt build + - name: Rename build folder + run: mv .hemttout/build .hemttout/@cav + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: cav-${{ github.sha }} + path: .hemttout/@* \ No newline at end of file diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 00000000..2ff57fb5 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,25 @@ +name: Deploy Release +on: + release: + types: [published] + +jobs: + release: + runs-on: windows-latest + steps: + - name: Checkout the source code + uses: actions/checkout@v4 + - name: Setup HEMTT + uses: arma-actions/hemtt@v1 + - name: Run HEMTT release + run: | + .\tools\deploy-setVersion.ps1 + hemtt release + Rename-Item release\cav-latest.zip 7CavAddon-${{github.ref_name}}.zip + - name: Upload release + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + tag: ${{ github.ref }} + asset_name: 7CavAddon-${{github.ref_name}}.zip + file: release/7CavAddon-${{github.ref_name}}.zip diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..a6e3392c --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,51 @@ +name: Publish +run-name: Publishing ${{github.ref_name}} to ${{ inputs.environment }} + +on: + workflow_dispatch: + inputs: + environment: + description: "Selected deploy environment" + required: true + default: "dev" + type: choice + options: + - dev + - live +jobs: + publish: + name: Publish to steam workshop + runs-on: windows-latest + steps: + - name: Checkout the source code + uses: actions/checkout@v4 + - name: Setup HEMTT + uses: arma-actions/hemtt@v1 + - name: Setup steamcmd + uses: CyberAndrii/setup-steamcmd@v1 + + - name: Obtaining release + if: ${{ inputs.environment == 'live' }} + run: | + .\tools\deploy-getRelease.ps1 ${{github.ref_name}} + Get-ChildItem -Path . -ErrorAction SilentlyContinue + - name: Deploy release build to workshop + if: ${{ inputs.environment == 'live' }} + run: | + Write-Host "Yey i did it!" + env: + ENVIRONMENT: ${{ inputs.environment }} + + - name: Run HEMTT build + if: ${{ inputs.environment == 'dev' }} + run: | + hemtt build + env: + ENVIRONMENT: ${{ inputs.environment }} + - name: Deploy dev build to workshop + if: ${{ inputs.environment == 'dev' }} + run: | + steamcmd.exe +login $env:STEAM_USERNAME $env:STEAM_PASSWORD +workshop_build_item resources\dev-payload.vdf +quit + env: + ENVIRONMENT: ${{ inputs.environment }} + \ No newline at end of file diff --git a/addons/insignia/data/Bravo_2.paa b/addons/insignia/data/Bravo_2.paa index f0e24db3..a2e4bfe6 100644 Binary files a/addons/insignia/data/Bravo_2.paa and b/addons/insignia/data/Bravo_2.paa differ diff --git a/addons/pronelauncher/functions/fnc_onKeyDown.sqf b/addons/pronelauncher/functions/fnc_onKeyDown.sqf index fc92c1ed..7f35b081 100644 --- a/addons/pronelauncher/functions/fnc_onKeyDown.sqf +++ b/addons/pronelauncher/functions/fnc_onKeyDown.sqf @@ -1,4 +1,4 @@ -#include "script_component.hpp" +#include "..\script_component.hpp" /* * Author: PiZZADOX, Jonpas * Handles keyDown EH for overriding engine stance changes when in AT launcher stance. diff --git a/addons/pronelauncher/functions/script_component.hpp b/addons/pronelauncher/functions/script_component.hpp deleted file mode 100644 index cdc3dede..00000000 --- a/addons/pronelauncher/functions/script_component.hpp +++ /dev/null @@ -1 +0,0 @@ -#include "../script_component.hpp" diff --git a/resources/dev-description.txt b/resources/dev-description.txt new file mode 100644 index 00000000..1753f4e4 --- /dev/null +++ b/resources/dev-description.txt @@ -0,0 +1,4 @@ +[h1]7th Cavalry Gaming Community Addon Development Build[/h1] +This is the development and experimental build of the 7th Cavalry Gaming Community Addon or 7CavAddon for short. This build contain experimental features and or items and is not recomended to be used unless you know what your doing. + +To get the regular build go [url=https://steamcommunity.com/sharedfiles/filedetails/?id=3298466460]here[/url]. \ No newline at end of file diff --git a/resources/dev-payload.vdf b/resources/dev-payload.vdf new file mode 100644 index 00000000..772bbe23 --- /dev/null +++ b/resources/dev-payload.vdf @@ -0,0 +1,10 @@ +"workshopitem" +{ + "appid" "107410" + "publishedfileid" "3298481411" + "contentfolder" "D:\\a\\7CavAddon\\7CavAddon\\.hemttout\\build" + "previewfile" "D:\\a\\7CavAddon\\7CavAddon\\resources\\steam_preview-dev.jpg" + "title" "7CavAddon DevBuild [7CAV]" + "description" "" + "changenote" "" +} \ No newline at end of file diff --git a/tools/check_strings.py b/tools/check_strings.py index 09897781..bfe7ddf8 100644 --- a/tools/check_strings.py +++ b/tools/check_strings.py @@ -11,14 +11,15 @@ def getDefinedStrings(filepath): # print("getDefinedStrings {0}".format(filepath)) with open(filepath, 'r', encoding="latin-1") as file: content = file.read() - srch = re.compile('Key ID\=\"(STR_CAV_[_a-zA-Z0-9]*)"', re.IGNORECASE) + srch = re.compile('Key ID\=\"(STR_ACE_[_a-zA-Z0-9]*)"', re.IGNORECASE) modStrings = srch.findall(content) modStrings = [s.lower() for s in modStrings] return modStrings def getStringUsage(filepath): - selfmodule = (re.search('addons[\W]*([_a-zA-Z0-9]*)', filepath)).group(1) - # print("Checking {0} from {1}".format(filepath,selfmodule)) + selfmodule = (re.search('(addons|optionals)[\W]*([_a-zA-Z0-9]*)', filepath)).group(2) + submodule = (re.search(f'(addons|optionals)[\W]*{selfmodule}[\W]*([_a-zA-Z0-9]*)', filepath)).group(2) + # print(f"Checking {filepath} from {selfmodule} ({submodule})") fileStrings = [] with open(filepath, 'r') as file: @@ -27,7 +28,7 @@ def getStringUsage(filepath): srch = re.compile('(STR_CAV_[_a-zA-Z0-9]*)', re.IGNORECASE) fileStrings = srch.findall(content) - srch = re.compile('[^E][CL]STRING\(([_a-zA-Z0-9]*)\)', re.IGNORECASE) + srch = re.compile('[^EB][CL]STRING\(([_a-zA-Z0-9]*)\)', re.IGNORECASE) modStrings = srch.findall(content) for localString in modStrings: fileStrings.append("STR_CAV_{0}_{1}".format(selfmodule, localString)) @@ -37,6 +38,11 @@ def getStringUsage(filepath): for (exModule, exString) in exStrings: fileStrings.append("STR_CAV_{0}_{1}".format(exModule, exString)) + srch = re.compile('SUB[CL]STRING\(([_a-zA-Z0-9]*)\)') + subStrings = srch.findall(content) + for (subString) in subStrings: + fileStrings.append(f"STR_CAV_{submodule}_{subString}") + srch = re.compile('IGNORE_STRING_WARNING\([\'"]*([_a-zA-Z0-9]*)[\'"]*\)') ignoreWarnings = srch.findall(content) @@ -51,23 +57,24 @@ def main(argv): allDefinedStrings = [] allUsedStrings = [] - # Allow running from root directory as well as from inside the tools directory - rootDir = "../addons" - if (os.path.exists("addons")): - rootDir = "addons" - - for root, dirnames, filenames in os.walk(rootDir): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.cpp'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.hpp'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.h'): - sqf_list.append(os.path.join(root, filename)) - - for filename in fnmatch.filter(filenames, '*.xml'): - xml_list.append(os.path.join(root, filename)) + for folder in ['addons', 'optionals']: + # Allow running from root directory as well as from inside the tools directory + rootDir = "../" + folder + if (os.path.exists(folder)): + rootDir = folder + + for root, dirnames, filenames in os.walk(rootDir): + for filename in fnmatch.filter(filenames, '*.sqf'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.cpp'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.hpp'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.h'): + sqf_list.append(os.path.join(root, filename)) + + for filename in fnmatch.filter(filenames, '*.xml'): + xml_list.append(os.path.join(root, filename)) for filename in xml_list: allDefinedStrings = allDefinedStrings + getDefinedStrings(filename) @@ -77,6 +84,8 @@ def main(argv): allDefinedStrings = list(sorted(set(allDefinedStrings))) allUsedStrings = list(sorted(set(allUsedStrings))) + if ("str_cav_tagging_name" in allUsedStrings): allUsedStrings.remove("str_cav_tagging_name") # Handle tagging macro + print("-----------") countUnusedStrings = 0 countUndefinedStrings = 0 @@ -98,4 +107,4 @@ def main(argv): return countUndefinedStrings if __name__ == "__main__": - main(sys.argv) \ No newline at end of file + main(sys.argv) diff --git a/tools/compileExtensions.py b/tools/compileExtensions.py new file mode 100644 index 00000000..8864d98e --- /dev/null +++ b/tools/compileExtensions.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +# vim: set fileencoding=utf-8 : + +# compileExtensions.py (from acre2's make.py) + +############################################################################### + +# The MIT License (MIT) + +# Copyright (c) 2013-2014 Ryan Schultz + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +############################################################################### + +import sys +import os.path +import subprocess +import shutil +import time +import timeit + +if sys.platform == "win32": + import winreg + + +def compile_extensions(force_build): + originalDir = os.getcwd() + extensions_root = os.path.join(os.path.dirname(os.getcwd()), "extensions") + os.chdir(extensions_root) + print("\nCompiling extensions in {} with rebuild:{}\n".format(extensions_root, force_build)) + + if shutil.which("git") == None: + print("Failed to find Git!") + return + if shutil.which("cmake") == None: + print("Failed to find CMake!") + return + if shutil.which("msbuild") == None: + print("Failed to find MSBuild!") + return + + try: + buildType = "rebuild" if force_build else "build" + # 32-bit + vcproj32 = os.path.join(extensions_root, "vcproj32") + if not os.path.exists(vcproj32): + os.mkdir(vcproj32) + os.chdir(vcproj32) + subprocess.call(["cmake", "..", "-A", "Win32"]) # note: cmake will update ace_version stuff + subprocess.call(["msbuild", "ACE.sln", "/m", "/t:{}".format(buildType), "/p:Configuration=Release"]) + + # 64-bit + vcproj64 = os.path.join(extensions_root, "vcproj64") + if not os.path.exists(vcproj64): + os.mkdir(vcproj64) + os.chdir(vcproj64) + subprocess.call(["cmake", "..", "-A", "x64"]) + subprocess.call(["msbuild", "ACE.sln", "/m", "/t:{}".format(buildType), "/p:Configuration=Release"]) + except Exception as e: + print("Error: COMPILING EXTENSIONS - {}".format(e)) + raise + finally: + os.chdir(originalDir) + + +def main(argv): + if "force" in argv: + argv.remove("force") + force_build = True + else: + force_build = False + + compile_extensions(force_build) + + +if __name__ == "__main__": + start_time = timeit.default_timer() + main(sys.argv) + print("\nTotal Program time elapsed: {0} sec".format(timeit.default_timer() - start_time)) + input("Press Enter to continue...") diff --git a/tools/config_style_checker.py b/tools/config_style_checker.py index 03d8f222..afa78a2d 100644 --- a/tools/config_style_checker.py +++ b/tools/config_style_checker.py @@ -16,6 +16,14 @@ def pushClosing(t): def popClosing(): closing << closingStack.pop() + reIsClass = re.compile(r'^\s*class(.*)') + reIsClassInherit = re.compile(r'^\s*class(.*):') + reIsClassBody = re.compile(r'^\s*class(.*){') + reBadColon = re.compile(r'\s*class (.*) :') + reSpaceAfterColon = re.compile(r'\s*class (.*): ') + reSpaceBeforeCurly = re.compile(r'\s*class (.*) {') + reClassSingleLine = re.compile(r'\s*class (.*)[{;]') + with open(filepath, 'r', encoding='utf-8', errors='ignore') as file: content = file.read() @@ -118,6 +126,23 @@ def popClosing(): if brackets_list.count('{') != brackets_list.count('}'): print("ERROR: A possible missing curly brace {{ or }} in file {0} {{ = {1} }} = {2}".format(filepath,brackets_list.count('{'),brackets_list.count('}'))) bad_count_file += 1 + + file.seek(0) + for lineNumber, line in enumerate(file.readlines()): + if reIsClass.match(line): + if reBadColon.match(line): + print(f"WARNING: bad class colon {filepath} Line number: {lineNumber+1}") + # bad_count_file += 1 + if reIsClassInherit.match(line): + if not reSpaceAfterColon.match(line): + print(f"WARNING: bad class missing space after colon {filepath} Line number: {lineNumber+1}") + if reIsClassBody.match(line): + if not reSpaceBeforeCurly.match(line): + print(f"WARNING: bad class inherit missing space before curly braces {filepath} Line number: {lineNumber+1}") + if not reClassSingleLine.match(line): + print(f"WARNING: bad class braces placement {filepath} Line number: {lineNumber+1}") + # bad_count_file += 1 + return bad_count_file def main(): @@ -131,16 +156,17 @@ def main(): parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="") args = parser.parse_args() - # Allow running from root directory as well as from inside the tools directory - rootDir = "../addons" - if (os.path.exists("addons")): - rootDir = "addons" - - for root, dirnames, filenames in os.walk(rootDir + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.cpp'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.hpp'): - sqf_list.append(os.path.join(root, filename)) + for folder in ['addons', 'optionals']: + # Allow running from root directory as well as from inside the tools directory + rootDir = "../" + folder + if (os.path.exists(folder)): + rootDir = folder + + for root, dirnames, filenames in os.walk(rootDir + '/' + args.module): + for filename in fnmatch.filter(filenames, '*.cpp'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.hpp'): + sqf_list.append(os.path.join(root, filename)) for filename in sqf_list: bad_count = bad_count + check_config_style(filename) diff --git a/tools/config_validator.py b/tools/config_validator.py index 71076007..50722319 100644 --- a/tools/config_validator.py +++ b/tools/config_validator.py @@ -13,7 +13,7 @@ ######## GLOBALS ######### MAINPREFIX = "Z" -PREFIX = "cav" +PREFIX = "ACE" ########################## def Fract_Sec(s): diff --git a/tools/deploy-getRelease.ps1 b/tools/deploy-getRelease.ps1 new file mode 100644 index 00000000..4c797d0e --- /dev/null +++ b/tools/deploy-getRelease.ps1 @@ -0,0 +1,39 @@ +# Download latest dotnet/codeformatter release from github +# https://gist.github.com/MarkTiedemann/c0adc1701f3f5c215fc2c2d5b1d5efd3 + +$version = $args[0] + +$repo = "7Cav/7CavAddon" +$file = "7CavAddon-$version.zip" + +$releases = "https://api.github.com/repos/$repo/releases" + +Write-Host "Looking for $version..." +$releases = (Invoke-WebRequest $releases | ConvertFrom-Json) +forEach ($tagName in $releases ) { + $tagName = $tagName[0].tag_name + $tag = if ( "$version" -eq "$tagName" ) { + return "$tagName" + } else { + "false" + } +} +if ( "$tag" -eq "false") { + Write-Error "No release tag $version detected" + exit 1 +} +Write-Host "Release $tag found..." + +download = "https://github.com/$repo/releases/download/$tag/$file" +name = $file.Split(".")[0] +zip = "$name-$tag.zip" +dir = "$name-$tag" + +#Write-Host Dowloading latest release +Invoke-WebRequest $download -Out $zip +# +#Write-Host Extracting release files +Expand-Archive $zip -Force + +Get-ChildItem -Path . -ErrorAction SilentlyContinue +Get-ChildItem -Path $dir -ErrorAction SilentlyContinue diff --git a/tools/deploy-setVersion.ps1 b/tools/deploy-setVersion.ps1 new file mode 100644 index 00000000..17577e10 --- /dev/null +++ b/tools/deploy-setVersion.ps1 @@ -0,0 +1,21 @@ +cd $PSScriptRoot +cd .. + +# Set version +$tagVersion = git describe --tags --abbrev=0 +Write-Host "Build version $tagVersion" -ForegroundColor yellow + +$version = $tagVersion.Split(".") +$versionMajor = $version[0] +$versionMinor = $version[1] +$versionPatch = $version[2] +$versionBuild = 0 + +sed -e "s/DevBuild/$tagVersion/g" "mod.cpp" | Set-Content "mod.cpp" + +sed -e "s/major = 0/major = $versionMajor/g" ".hemtt/project.toml" | Set-Content ".hemtt/project.toml" +sed -e "s/minor = 0/minor = $versionMinor/g" ".hemtt/project.toml" | Set-Content ".hemtt/project.toml" +sed -e "s/patch = 0/patch = $versionPatch/g" ".hemtt/project.toml" | Set-Content ".hemtt/project.toml" +sed -e "s/build = 0/build = $versionBuild/g" ".hemtt/project.toml" | Set-Content ".hemtt/project.toml" + +return $tagVersion \ No newline at end of file diff --git a/tools/deploy_both.ps1 b/tools/deploy_both.ps1 deleted file mode 100644 index c22ec925..00000000 --- a/tools/deploy_both.ps1 +++ /dev/null @@ -1,46 +0,0 @@ -if ( ![bool](Test-Path -Path "P:") ) { - Write-Host "Arma 3 P: is not mounted..." -ForegroundColor Red - exit 1 -} - -cd $PSScriptRoot\.. - -# Clear folder -Remove-Item releases -Force -Recurse -ErrorAction 'silentlycontinue' - -$tagVersion = git describe --tags --abbrev=0 -Write-Host "Build release versions for tag $tagVersion" -ForegroundColor Yellow - -$version = $tagVersion.Split(".") -$versionMajor = $version[0] -$versionMinor = $version[1] -$versionPatch = $version[2] -$versionBuild = 0 - -# Set version -Write-Host "Build both Hemtt and PBO Project Releases" -ForegroundColor Blue - -# Hemtt -Write-Host "Build Hemtt" -ForegroundColor Blue -cd $PSScriptRoot -.\deploy.ps1 - -# Rename hemtt zip -cd $PSScriptRoot\.. - -Write-Host "Renaming and saving hemtt release in temp" -New-Item -ItemType Directory -Path tools\temp -Force >$null 2>&1 -Rename-Item -Path .\releases\7CavAddon_$tagVersion.zip -NewName 7CavAddon_$tagVersion-hemtt.zip -Move-Item -Path .\releases\7CavAddon_$tagVersion-hemtt.zip -Destination .\tools\temp\. - - -# Pbo Project -Write-Host "Build Pbo Project" -ForegroundColor Blue -cd $PSScriptRoot -.\deploy_legacy.ps1 -cd $PSScriptRoot\.. - -# Rename hemtt zip -Rename-Item -Path .\releases\7CavAddon_$tagVersion.zip -NewName 7CavAddon_$tagVersion-pboProject.zip -Move-Item -Path .\tools\temp\7CavAddon_$tagVersion-hemtt.zip -Destination .\releases\. -Remove-Item .\tools\temp -Force -Recurse -ErrorAction 'SilentlyContinue' \ No newline at end of file diff --git a/tools/deploy_legacy.ps1 b/tools/deploy_legacy.ps1 deleted file mode 100644 index 9a0da7d7..00000000 --- a/tools/deploy_legacy.ps1 +++ /dev/null @@ -1,29 +0,0 @@ -if ( ![bool](Test-Path -Path "P:") ) { - Write-Host "Arma 3 P: is not mounted..." -ForegroundColor Red - exit 1 -} - -# Set version -$tagVersion = git describe --tags --abbrev=0 -Write-Host "Build version $tagVersion" - -$version = $tagVersion.Split(".") -$versionMajor = $version[0] -$versionMinor = $version[1] -$versionPatch = $version[2] -$versionBuild = 0 - -sed -e "s/DevBuild/$tagVersion/g" "../mod.cpp" | Set-Content "../mod.cpp" - -Set-Content -Path '../addons/main/script_version.hpp' -Value "#define MAJOR $versionMajor -#define MINOR $versionMinor -#define PATCHLVL $versionPatch -#define BUILD $versionBuild" - -# Build release -py make.py release ci - -# Clean up -Write-Host "Restoring version files..." -git checkout origin/main ../addons/main/script_version.hpp -git checkout origin/main ../mod.cpp diff --git a/tools/extract_dependencies.py b/tools/extract_dependencies.py new file mode 100644 index 00000000..e28eb6cd --- /dev/null +++ b/tools/extract_dependencies.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +# Author: Jonpas +# Extracts dependencies to "docs/_includes/dependencies_list.md" for use with Jekyll include statement. +# Use the following line to add dependencies to an ACE3 feature page: {% include dependencies_list.md component="" %} + + +import os +import sys +import re + + +def get_dependencies(line): + dependencies = re.findall(r'"(.*?)"', line) + return dependencies + + +def main(): + if "--markdown" not in sys.argv: + print(""" + ############################################# + # Extract ACE3 Module Dependencies # + # (for Jekyll include) # + ############################################# + """) + + # Mod paths + script_path = os.path.realpath(__file__) + project_path = os.path.dirname(os.path.dirname(script_path)) + addons_path = os.path.join(project_path, "addons") + optionals_path = os.path.join(project_path, "optionals") + + # Documentation paths + include_path = os.path.join(project_path, "docs", "_includes") + dependencies_path = os.path.join(include_path, "dependencies_list.md") + + # Prepare files and paths list + if not os.path.exists(include_path): + print("Jekyll documentation not found!") + sys.exit(0) + + open(dependencies_path, "w", newline="\n").close() + if os.path.exists(addons_path): + addons = sorted(next(os.walk(addons_path))[1]) + if os.path.exists(optionals_path): + addons += ["."] + sorted(next(os.walk(optionals_path))[1]) + + dependencies_path_current = dependencies_path + addons_path_current = addons_path + + # Iterate through folders in the addons directories + for folder in addons: + # Ignore "main" component + if folder == "main": + continue + + # Change to optionals list on "." separator + if folder == ".": + if addons_path_current == addons_path: + addons_path_current = optionals_path + continue + + # Open config.cpp file and extract dependencies + data = [] + configfile = os.path.join(addons_path_current, folder, "config.cpp") + + if os.path.exists(configfile): + with open(os.path.join(addons_path_current, folder, "config.cpp")) as file: + match = False + for line in file: + # One-line + if not match and re.match(r"\s+requiredAddons\[\]\ = {.+?};", line): + data += get_dependencies(line) + break + # Multi-line + else: + if re.match(r"\s+requiredAddons\[\]\ = {", line): + # First line + match = True + data += get_dependencies(line) + continue + elif match and re.match(r"\s+};", line): + # Final line + data += get_dependencies(line) + match = False + break + elif match: + # All lines between + data += get_dependencies(line) + continue + + data = "`, `".join(data) + data = "`{}`".format(data) + + jekyll_statement = "".join([ + "{% if include.component == \"" + folder + "\" %}\n", + "- {}\n".format(data.replace(", ", "\n- ")), + "{% endif %}\n" + ]) + + with open(dependencies_path_current, "a", newline="\n") as file: + file.writelines([jekyll_statement, "\n"]) + + if "--markdown" not in sys.argv: + print("{}: {}".format(folder, data)) + else: + print(jekyll_statement) + + +if __name__ == "__main__": + main() diff --git a/tools/publish.py b/tools/publish.py new file mode 100644 index 00000000..5ddc6980 --- /dev/null +++ b/tools/publish.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +# Author: PabstMirror + +# Uploads ace relases to workshop +# Will slice up compats to their own folders + +import sys + +if sys.version_info[0] == 2: + print("Python 3 is required.") + sys.exit(1) + +import os +import os.path +import shutil +import platform +import glob +import subprocess +import hashlib +import configparser +import json +import traceback +import time +import timeit +import re +import fnmatch + +if sys.platform == "win32": + import winreg + +def find_bi_tools(): + reg = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) + try: + k = winreg.OpenKey(reg, r"Software\bohemia interactive\arma 3 tools") + arma3tools_path = winreg.QueryValueEx(k, "path")[0] + winreg.CloseKey(k) + except: + raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.") + + publisher_path = os.path.join(arma3tools_path, "Publisher", "PublisherCmd.exe") + + if os.path.isfile(publisher_path): + return publisher_path + else: + raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.") + + +def publishFolder(folder,modID,changeNotes): + cmd = [publisherTool_path, "update", "/id:{}".format(modID), "/changeNoteFile:{}".format(changeNotes), "/path:{}".format(folder)] + + print ("running: {}".format(cmd)) + + print("") + print("Publishing folder {} to workshop ID {}".format(folder,modID)) + print("") + if (not do_publish): + print("Just doing test build") + return + ret = subprocess.call(cmd) + if ret != 0: + print("publisher faild with code {}".format(ret)) + raise Exception("Publisher","Publisher had problems") + + +#GLOBALS +release_dir = "P:\\z\\ace\\release" +project = "@ace" +publisherTool_path = find_bi_tools() +changelog_path = os.path.join(release_dir,"changelog.txt") +ace_release_dir = os.path.join(release_dir, project) +ace_optionals_dir = os.path.join(ace_release_dir, "optionals") + +do_publish = True +# do_publish = False #will let you just build dirs and test without running publisher + + +def main(argv): + if not os.path.exists(ace_release_dir): + raise Exception("ace_release_dir not found","ACE not built or in wrong path") + if not os.path.exists(ace_optionals_dir): + raise Exception("ace_optionals_dir not found","ACE not built or in wrong path") + if not os.path.exists(publisherTool_path): + raise Exception("publisherTool_path not found","Arma Tools not found") + if not os.path.exists(changelog_path): + raise Exception("changelog_path not found","Requires changelog.txt be present in the release dir") + + if do_publish: + repl = input("\nThis will publish to steam, are you positive release dir has correct files? (y/n): ") + if repl.lower() != "y": + return 0 + + #ACE Main - http://steamcommunity.com/sharedfiles/filedetails/?id=463939057 + # Note: command line publisher doesn't like our file structure, just upload this one manually + + #noactionmenu: - https://steamcommunity.com/sharedfiles/filedetails/?id=2202412030 + publishFolder(os.path.join(ace_optionals_dir,"@ace_noactionmenu"), "2202412030", changelog_path) + + #nocrosshair: - https://steamcommunity.com/sharedfiles/filedetails/?id=2202412481 + publishFolder(os.path.join(ace_optionals_dir,"@ace_nocrosshair"), "2202412481", changelog_path) + + #nomedical: - https://steamcommunity.com/sharedfiles/filedetails/?id=3053169823 + publishFolder(os.path.join(ace_optionals_dir,"@ace_nomedical"), "3053169823", changelog_path) + + #norealisticnames: - https://steamcommunity.com/sharedfiles/filedetails/?id=3053177117 + publishFolder(os.path.join(ace_optionals_dir,"@ace_norealisticnames"), "3053177117", changelog_path) + + #nouniformrestrictions: - https://steamcommunity.com/sharedfiles/filedetails/?id=2202413047 + publishFolder(os.path.join(ace_optionals_dir,"@ace_nouniformrestrictions"), "2202413047", changelog_path) + + #particles: - https://steamcommunity.com/sharedfiles/filedetails/?id=2202413537 + publishFolder(os.path.join(ace_optionals_dir,"@ace_particles"), "2202413537", changelog_path) + + #realisticdispersion: - https://steamcommunity.com/sharedfiles/filedetails/?id=2202414018 + publishFolder(os.path.join(ace_optionals_dir,"@ace_realisticdispersion"), "2202414018", changelog_path) + + #tracers: - https://steamcommunity.com/sharedfiles/filedetails/?id=2202414450 + publishFolder(os.path.join(ace_optionals_dir,"@ace_tracers"), "2202414450", changelog_path) + + +if __name__ == "__main__": + main(sys.argv) diff --git a/tools/search_undefinedFunctions.py b/tools/search_undefinedFunctions.py index 2c4fbeb5..6789bbe9 100644 --- a/tools/search_undefinedFunctions.py +++ b/tools/search_undefinedFunctions.py @@ -7,86 +7,104 @@ import sys import argparse +# handle x64 python clipboard, ref https://forums.autodesk.com/t5/maya-programming/ctypes-bug-cannot-copy-data-to-clipboard-via-python/m-p/9197068/highlight/true#M10992 import ctypes - -#from http://stackoverflow.com/a/3429034 -#Get required functions, strcpy.. -strcpy = ctypes.cdll.msvcrt.strcpy -ocb = ctypes.windll.user32.OpenClipboard #Basic Clipboard functions -ecb = ctypes.windll.user32.EmptyClipboard -gcd = ctypes.windll.user32.GetClipboardData -scd = ctypes.windll.user32.SetClipboardData -ccb = ctypes.windll.user32.CloseClipboard -ga = ctypes.windll.kernel32.GlobalAlloc # Global Memory allocation -gl = ctypes.windll.kernel32.GlobalLock # Global Memory Locking -gul = ctypes.windll.kernel32.GlobalUnlock -GMEM_DDESHARE = 0x2000 - -def Get( ): - ocb(None) # Open Clip, Default task - pcontents = gcd(1) # 1 means CF_TEXT.. too lazy to get the token thingy ... - data = ctypes.c_char_p(pcontents).value - #gul(pcontents) ? - ccb() - return data +from ctypes import wintypes +CF_UNICODETEXT = 13 + + +user32 = ctypes.WinDLL('user32') +kernel32 = ctypes.WinDLL('kernel32') + +OpenClipboard = user32.OpenClipboard +OpenClipboard.argtypes = wintypes.HWND, +OpenClipboard.restype = wintypes.BOOL +CloseClipboard = user32.CloseClipboard +CloseClipboard.restype = wintypes.BOOL +EmptyClipboard = user32.EmptyClipboard +EmptyClipboard.restype = wintypes.BOOL +GetClipboardData = user32.GetClipboardData +GetClipboardData.argtypes = wintypes.UINT, +GetClipboardData.restype = wintypes.HANDLE +SetClipboardData = user32.SetClipboardData +SetClipboardData.argtypes = (wintypes.UINT, wintypes.HANDLE) +SetClipboardData.restype = wintypes.HANDLE +GlobalLock = kernel32.GlobalLock +GlobalLock.argtypes = wintypes.HGLOBAL, +GlobalLock.restype = wintypes.LPVOID +GlobalUnlock = kernel32.GlobalUnlock +GlobalUnlock.argtypes = wintypes.HGLOBAL, +GlobalUnlock.restype = wintypes.BOOL +GlobalAlloc = kernel32.GlobalAlloc +GlobalAlloc.argtypes = (wintypes.UINT, ctypes.c_size_t) +GlobalAlloc.restype = wintypes.HGLOBAL +GlobalSize = kernel32.GlobalSize +GlobalSize.argtypes = wintypes.HGLOBAL, +GlobalSize.restype = ctypes.c_size_t + +GMEM_MOVEABLE = 0x0002 +GMEM_ZEROINIT = 0x0040 def Paste( data ): - ocb(None) # Open Clip, Default task - ecb() - hCd = ga( GMEM_DDESHARE, len( bytes(data,"ascii") )+1 ) - pchData = gl(hCd) - strcpy(ctypes.c_char_p(pchData),bytes(data,"ascii")) - gul(hCd) - scd(1,hCd) - ccb() + data = data.encode('utf-16le') + OpenClipboard(None) + EmptyClipboard() + handle = GlobalAlloc(GMEM_MOVEABLE | GMEM_ZEROINIT, len(data) + 2) + pcontents = GlobalLock(handle) + ctypes.memmove(pcontents, data, len(data)) + GlobalUnlock(handle) + SetClipboardData(CF_UNICODETEXT, handle) + CloseClipboard() def getFunctions(filepath): - selfmodule = (re.search('addons[\W]*([_a-zA-Z0-9]*)', filepath)).group(1) + selfmodule = (re.search(r'addons[\W]*([_a-zA-Z0-9]*)', filepath)).group(1) # print("Checking {0} from {1}".format(filepath,selfmodule)) + if (selfmodule.startswith("compat")): return [] with open(filepath, 'r') as file: content = file.read() - srch = re.compile('[^E]FUNC\(([_a-zA-Z0-9]*)\)') + srch = re.compile(r'[^E]FUNC\(([_a-zA-Z0-9]*)\)') modfuncs = srch.findall(content) modfuncs = sorted(set(modfuncs)) - srch = re.compile('EFUNC\(([_a-zA-Z0-9]*),([_a-zA-Z0-9]*)\)') + srch = re.compile(r'EFUNC\(([_a-zA-Z0-9]*),([_a-zA-Z0-9]*)\)') exfuncs = srch.findall(content) exfuncs = sorted(set(exfuncs)) fileFuncs = [] for func in modfuncs: - fileFuncs.append("cav_{0}_fnc_{1}".format(selfmodule,func)) + fileFuncs.append("ace_{0}_fnc_{1}".format(selfmodule,func)) for exModule,func in exfuncs: - fileFuncs.append("cav_{0}_fnc_{1}".format(exModule, func)) + fileFuncs.append("ace_{0}_fnc_{1}".format(exModule, func)) return fileFuncs def getStrings(filepath): - selfmodule = (re.search('addons[\W]*([_a-zA-Z0-9]*)', filepath)).group(1) + selfmodule = (re.search(r'addons[\W]*([_a-zA-Z0-9]*)', filepath)).group(1) # print("Checking {0} from {1}".format(filepath,selfmodule)) + if (selfmodule.startswith("compat")): return [] with open(filepath, 'r') as file: content = file.read() - srch = re.compile('[^E][CL]STRING\(([_a-zA-Z0-9]*)\)') + srch = re.compile(r'[^E][CL]STRING\(([_a-zA-Z0-9]*)\)') modStrings = srch.findall(content) modStrings = sorted(set(modStrings)) - srch = re.compile('E[CL]STRING\(([_a-zA-Z0-9]*),([_a-zA-Z0-9]*)\)') + srch = re.compile(r'E[CL]STRING\(([_a-zA-Z0-9]*),([_a-zA-Z0-9]*)\)') exStrings = srch.findall(content) exStrings = sorted(set(exStrings)) fileStrings = [] for localString in modStrings: - fileStrings.append("STR_CAV_{0}_{1}".format(selfmodule, localString)) + fileStrings.append("STR_ACE_{0}_{1}".format(selfmodule, localString)) for (exModule, exString) in exStrings: - fileStrings.append("STR_CAV_{0}_{1}".format(exModule, exString)) + fileStrings.append("STR_ACE_{0}_{1}".format(exModule, exString)) return fileStrings @@ -105,7 +123,8 @@ def main(): parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default=".") args = parser.parse_args() - for root, dirnames, filenames in os.walk('../addons' + '/' + args.module): + addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + for root, dirnames, filenames in os.walk(addon_base_path +"/" + 'addons' + '/' + args.module): for filename in fnmatch.filter(filenames, '*.sqf'): sqf_list.append(os.path.join(root, filename)) for filename in fnmatch.filter(filenames, '*.cpp'): @@ -126,7 +145,7 @@ def main(): outputCode = "{0} allFunctions = {1}; allStrings = {2}; {3} {4}".format(codeHeader, list(set(allFunctions)), list(set(allStrings)), codeFuncCheck, codeStringCheck) print(outputCode) - Paste(outputCode); + Paste(outputCode) print ("") print ("Copied to clipboard, [funcs {0} / strings {1}]'".format(len(set(allFunctions)), len(set(allStrings)))) diff --git a/tools/setup.bat b/tools/setup.bat new file mode 100644 index 00000000..8d7245ab --- /dev/null +++ b/tools/setup.bat @@ -0,0 +1,16 @@ +;@Findstr -bv ;@F "%~f0" | powershell -Command - & pause & goto:eof + +Write-Output "=> Downloading ..." +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +$url = "https://github.com/BrettMayson/HEMTT/releases/latest/download/windows-x64.zip" +(New-Object Net.WebClient).DownloadFile($url, "hemtt.zip"); Write-Output "$url => hemtt.zip" + +Write-Output "`n=> Extracting ..." +Expand-Archive -Path "hemtt.zip" -DestinationPath "..\." -Force; Write-Output "hemtt.zip" +Remove-Item "hemtt.zip" + +Write-Output "`n=> Verifying ..." +Start-Process -FilePath ..\hemtt.exe -ArgumentList --version -NoNewWindow -Wait + +Write-Output "`nTools successfully installed to project!" diff --git a/tools/setupEditorPreviewImages.py b/tools/setupEditorPreviewImages.py deleted file mode 100644 index b659ad89..00000000 --- a/tools/setupEditorPreviewImages.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 - -# requires python-resize-image (PIL) -# pip install python-resize-image - -import os -import sys -from resizeimage import resizeimage -from PIL import Image - -__version__ = 0.1 - -######## GLOBALS ######### -DIRECTORY = "C:\\Users\\andre\\OneDrive\\Dokument\\Arma 3 - Other Profiles\\Camera\\Screenshots\\EditorPreviews\\@BlackOrder" -SIZE = [455,256] -########################## - -# Sets up arguments -PARAMS = sys.argv -paramsList = ["-config"] -param_config = [paramsList[0],"creates and add each object to a config file"] - -def main(): - print(""" -Editor Preview Image Creator {} -This script resize and convert the Arma 3 Snapshots to correct size and format. -Created by: Andreas Brostrom | Evul - """.format(__version__)) - - if "-h" in PARAMS or "--help" in PARAMS: - print("usage: {}".format(sys.argv[0]),end=' ') - for p in paramsList: - print("[{}]".format(p),end=' ') - print("[-h, --help]\n") - print("optional arguments:\n -h, --help show this help message and exit") - print(" {} {}".format(param_config[0],param_config[1])) - sys.exit() - - os.chdir(DIRECTORY) - imageList = [f for f in os.listdir(DIRECTORY) if os.path.isfile(os.path.join(DIRECTORY, f)) and ".png" in f] - - - if len(imageList) >= 1: - print("Found {} images".format(len(imageList))) - else: - sys.exit("No image files could be found in directory:\n\"{}\"\n\nPlease check the path or consult the BIS guide for how to set up Eden Preview images:\nhttps://community.bistudio.com/wiki/Eden_Editor:_Configuring_Asset_Previews".format(DIRECTORY)) - print("Prepering resize") - - for pic in imageList: - print ('resizing "{}" to {}x{}'.format(pic,SIZE[0],SIZE[1])) - - with open(pic, 'r+b') as f: - with Image.open(pic) as image: - # resizing - cover = resizeimage.resize_cover(image, [SIZE[0],SIZE[1]]) - cover.save(pic, image.format) - # converting image - with Image.open(pic) as image: - print("Converting and saving {} [{}] to jpg".format(pic[:-4],cover.mode)) - cover = image.convert('RGB') - cover.save('{}.jpg'.format(pic[:-4]), quality=95) - print("{}.jpg [{}] is converted and saved".format(pic[:-4],cover.mode)) - - # Remove png - for file in imageList: - print("Removing {}".format(file)) - os.remove(file) - print("All files are replaced with jpg") - - # Create config - if "-config" in PARAMS: - print("Creating config file") - configFile = open('EditorPreview.hpp', 'w') - configFile.write('class CfgVehicles'+' {\n') - for file in imageList: - print("Writing and adding class \"{}\" to config".format(file[:-4])) - configFile.write(' class {}'.format(file[:-4])+' {\n') - configFile.write(' QPATHTOF(EditorPreviews\\{}.jpg);\n'.format(file[:-4])) - configFile.write(' };\n') - configFile.write('};\n') - configFile.close() - print("Config created") -if __name__ == "__main__": - sys.exit(main()) diff --git a/tools/sqf_linter.py b/tools/sqf_linter.py index 79df3919..c71c7209 100644 --- a/tools/sqf_linter.py +++ b/tools/sqf_linter.py @@ -1,66 +1,83 @@ #!/usr/bin/env python3 - # Requires: https://github.com/LordGolias/sqf -import fnmatch import os import sys import argparse +import concurrent.futures from sqf.parser import parse import sqf.analyzer from sqf.exceptions import SQFParserError +addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + + +def get_files_to_process(basePath): + arma_files = [] + for (root, _dirs, files) in os.walk(basePath): + for file in files: + if file.endswith(".sqf"): + if file.endswith(".inc.sqf"): + continue + filePath = os.path.join(root, file) + arma_files.append(filePath) + return arma_files -def analyze(filename, writer=sys.stdout): - warnings = 0 - errors = 0 - with open(filename, 'r') as file: - code = file.read() - try: - result = parse(code) - except SQFParserError as e: - print("{}:".format(filename)) - writer.write(' [%d,%d]:%s\n' % (e.position[0], e.position[1] - 1, e.message)) - return 0, 1 - exceptions = sqf.analyzer.analyze(result).exceptions +def process_file(filePath): + errors = [] + warnings = [] + try: + with open(filePath, "r", encoding="utf-8", errors="ignore") as file: + content = file.read() + if "#ASC_ignoreFile" in content: + return (filePath, errors, warnings) + sqfLintParse = parse(content) + exceptions = sqf.analyzer.analyze(sqfLintParse).exceptions if (exceptions): - print("{}:".format(filename)) for e in exceptions: - if (e.message.startswith("error")): - errors += 1 - else: - warnings += 1 - writer.write(' [%d,%d]:%s\n' % (e.position[0], e.position[1] - 1, e.message)) + if ("assigned to an outer scope" in e.message): + warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") + if ("is not from this scope" in e.message): + warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") + if ("not used" in e.message): + warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") - return warnings, errors + # most of this is just noise about macro parsing: + # if (e.message.startswith("error")): + # errors.append(f"[{e.position[0]},{e.position[1]}] {e.message}") + # else: + # warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") + except Exception as e: + # errors.append(f"Exception {e}") + pass + return (filePath, errors, warnings) -def main(): - print("#########################") - print("# Lint Check #") - print("#########################") - sqf_list = [] - all_warnings = 0 - all_errors = 0 +def main(): parser = argparse.ArgumentParser() - parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default=".") + parser.add_argument('-m', '--module', help='only search specified module addon folder', required=False, default=".") args = parser.parse_args() - for root, dirnames, filenames in os.walk('../addons' + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) - - for filename in sqf_list: - warnings, errors = analyze(filename) - all_warnings += warnings - all_errors += errors + error_count = 0 + addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + if (args.module): addon_base_path = os.path.join(addon_base_path, "addons", args.module) + arma_files = get_files_to_process(addon_base_path) + print(f"Checking {len(arma_files)} files from {addon_base_path}") + with concurrent.futures.ThreadPoolExecutor(max_workers=12) as executor: + for (filePath, errors, warnings) in executor.map(process_file, arma_files): + if errors or warnings: + error_count += 1 + print(f"{filePath}") + for e in errors: + print(f" {e}") + for e in warnings: + print(f" {e}") - print ("Parse Errors {0} - Warnings {1}".format(all_errors,all_warnings)) + print("Errors: {}".format(error_count)) + return error_count - # return (all_errors + all_warnings) - return all_errors if __name__ == "__main__": - main() + sys.exit(main()) diff --git a/tools/sqf_validator.py b/tools/sqf_validator.py index 6686004a..facdb114 100644 --- a/tools/sqf_validator.py +++ b/tools/sqf_validator.py @@ -149,6 +149,10 @@ def popClosing(): if pattern.match(content): print("ERROR: A found #include after block comment in file {0}".format(filepath)) bad_count_file += 1 + if ("functions" in filepath): + if (content.startswith("#include \"script_component.hpp\"")): + print(f"ERROR: Using old script_component.hpp in {filepath}") + bad_count_file += 1 @@ -165,14 +169,15 @@ def main(): parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="") args = parser.parse_args() - # Allow running from root directory as well as from inside the tools directory - rootDir = "../addons" - if (os.path.exists("addons")): - rootDir = "addons" + for folder in ['addons', 'optionals']: + # Allow running from root directory as well as from inside the tools directory + rootDir = "../" + folder + if (os.path.exists(folder)): + rootDir = folder - for root, dirnames, filenames in os.walk(rootDir + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) + for root, dirnames, filenames in os.walk(rootDir + '/' + args.module): + for filename in fnmatch.filter(filenames, '*.sqf'): + sqf_list.append(os.path.join(root, filename)) for filename in sqf_list: bad_count = bad_count + check_sqf_syntax(filename) diff --git a/tools/sqfvmChecker.py b/tools/sqfvmChecker.py new file mode 100644 index 00000000..69d75661 --- /dev/null +++ b/tools/sqfvmChecker.py @@ -0,0 +1,96 @@ +import os +import sys +import subprocess +import concurrent.futures +import tomllib + +addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + +sqfvm_exe = os.path.join(addon_base_path, "sqfvm.exe") +virtual_paths = [ + # would need to add more even more to /include to use it + "P:/a3|/a3", # "{}|/a3".format(os.path.join(addon_base_path, "include", "a3")), + "P:/a3|/A3", + "P:/x/cba|/x/cba", + "{}|/z/ace".format(addon_base_path), +] + + +def get_files_to_process(basePath): + arma_files = [] + for root, _dirs, files in os.walk(os.path.join(addon_base_path, "addons")): + for file in files: + if file.endswith(".sqf") or file == "config.cpp": + if file.endswith(".inc.sqf"): + continue + skipPreprocessing = False + for addonTomlPath in [os.path.join(root, "addon.toml"), os.path.join(os.path.dirname(root), "addon.toml")]: + if os.path.isfile(addonTomlPath): + with open(addonTomlPath, "rb") as f: + tomlFile = tomllib.load(f) + try: + skipPreprocessing = tomlFile.get('tools')['sqfvm_skipConfigChecks'] + except: + pass + if file == "config.cpp" and skipPreprocessing: + continue # ignore configs with __has_include + filePath = os.path.join(root, file) + arma_files.append(filePath) + return arma_files + + +def process_file(filePath, skipA3Warnings=True, skipPragmaHemtt=True): + with open(filePath, "r", encoding="utf-8", errors="ignore") as file: + content = file.read() + if content.startswith("//pragma SKIP_COMPILE"): + return False + cmd = [sqfvm_exe, "--input", filePath, "--parse-only", "--automated"] + for v in virtual_paths: + cmd.append("-v") + cmd.append(v) + # cmd.append("-V") + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) + try: + ret = proc.wait(12) # max wait - seconds + except Exception as _e: + print("sqfvm timed out: {}".format(filePath)) + return True + # print("{} = {}".format(filePath, ret)) + + fileHasError = False + keepReadingLines = True + while keepReadingLines: + line = proc.stdout.readline() + if not line: + keepReadingLines = False + else: + line = line.rstrip() + if line.startswith("[ERR]"): + fileHasError = True + if not ( + (skipA3Warnings and line.startswith("[WRN]") and ("a3/" in line) and (("Unexpected IFDEF" in line) or ("defined twice" in line))) + or (skipPragmaHemtt and line.startswith("[WRN]") and ("Unknown pragma instruction 'hemtt'" in line)) + ): + print(" {}".format(line)) + return fileHasError + + +def main(): + if not os.path.isfile(sqfvm_exe): + print("Error: sqfvm.exe not found in base folder [{}]".format(sqfvm_exe)) + return 1 + + error_count = 0 + arma_files = get_files_to_process(addon_base_path) + print("Checking {} files".format(len(arma_files))) + with concurrent.futures.ThreadPoolExecutor(max_workers=12) as executor: + for fileError in executor.map(process_file, arma_files): + if fileError: + error_count += 1 + + print("Errors: {}".format(error_count)) + return error_count + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tools/stringtable_validator.py b/tools/stringtable_validator.py index d86bbd96..8cd00014 100644 --- a/tools/stringtable_validator.py +++ b/tools/stringtable_validator.py @@ -16,15 +16,15 @@ ######## GLOBALS ######### -PROJECT_NAME = "cav" +PROJECT_NAME = "CAV" ########################## def check_stringtable(filepath): try: tree = ET.parse(filepath) - except: - print(" ERROR: Failed to parse file.") + except Exception as e: + print(" ERROR: Failed to parse file. {}".format(e)) return 1 errors = 0 @@ -53,7 +53,8 @@ def check_stringtable(filepath): print(" ERROR: Package name attribute '{}' is all lowercase, should be in titlecase.".format(package_name)) errors += 1 - if package_name.lower() != os.path.basename(os.path.dirname(filepath)): + component_folder = os.path.basename(os.path.dirname(filepath)) + if package_name.lower() != component_folder: print(" ERROR: Package name attribute '{}' does not match the component folder name.".format(package_name)) errors += 1 @@ -115,6 +116,29 @@ def check_stringtable(filepath): print(" ERROR: Key '{}' is defined {} times.".format(id, count)) errors += 1 + # Check whitespace for tabs and correct number of indenting spaces + with open(filepath, "r", encoding = "utf-8") as file: + spacing_depth = 0 + + for line_number, line in enumerate(file, 1): + if "\t" in line: + print(" ERROR: Found a tab on line {}.".format(line_number)) + errors += 1 + + line_clean = line.lstrip().lower() + + if line_clean.startswith("