diff --git a/.github/workflows/build-and-publish.yml b/.github/workflows/build-and-publish.yml
new file mode 100644
index 0000000..568c863
--- /dev/null
+++ b/.github/workflows/build-and-publish.yml
@@ -0,0 +1,32 @@
+# This workflow will build a .NET project
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net
+
+name: Build Project and Publish
+on:
+ push:
+ tags:
+ - "[0-9]+.[0-9]+.[0-9]+"
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ submodules: 'recursive'
+
+ - name: "Setup Library SDKs & Components"
+ uses: X-Hax/SA3D.ProjectConfigurations/.github/actions/setup-sdks-components@main
+
+ - name: Build
+ run: dotnet build -c Release ./src
+
+ - name: "Upload Packages"
+ uses: X-Hax/SA3D.ProjectConfigurations/.github/actions/upload-packages@main
+ with:
+ nuget-key: ${{ secrets.NUGET_KEY }}
+ is-release: ${{ startsWith(github.ref, 'refs/tags/') }}
+ release-tag: ${{ github.ref_name }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..1483ff0
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,27 @@
+# This workflow will build a .NET project
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net
+
+name: Build Project
+
+on:
+ push:
+ branches: [ "dev", "main" ]
+ pull_request:
+ branches: [ "dev", "main" ]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ submodules: 'recursive'
+
+ - name: "Setup Library SDKs & Components"
+ uses: X-Hax/SA3D.ProjectConfigurations/.github/actions/setup-sdks-components@main
+
+ - name: Build
+ run: dotnet build -c Release ./src
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..8a30d25
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,398 @@
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Mono auto generated files
+mono_crash.*
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Ww][Ii][Nn]32/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+[Ll]ogs/
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUnit
+*.VisualState.xml
+TestResult.xml
+nunit-*.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# ASP.NET Scaffolding
+ScaffoldingReadMe.txt
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.log
+*.tlog
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Coverlet is a free, cross platform Code Coverage Tool
+coverage*.json
+coverage*.xml
+coverage*.info
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# NuGet Symbol Packages
+*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+*.appxbundle
+*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+*- [Bb]ackup.rdl
+*- [Bb]ackup ([0-9]).rdl
+*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio 6 auto-generated project file (contains which files were open etc.)
+*.vbp
+
+# Visual Studio 6 workspace and project file (working project files containing files to include in project)
+*.dsw
+*.dsp
+
+# Visual Studio 6 technical files
+*.ncb
+*.aps
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# Visual Studio History (VSHistory) files
+.vshistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+MigrationBackup/
+
+# Ionide (cross platform F# VS Code tools) working folder
+.ionide/
+
+# Fody - auto-generated XML schema
+FodyWeavers.xsd
+
+# VS Code files for those working on multiple tools
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+*.code-workspace
+
+# Local History for Visual Studio Code
+.history/
+
+# Windows Installer files from build outputs
+*.cab
+*.msi
+*.msix
+*.msm
+*.msp
+
+# JetBrains Rider
+*.sln.iml
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..6ed3e9c
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "src/SA3D.ProjectConfigurations"]
+ path = src/SA3D.ProjectConfigurations
+ url = https://github.com/X-Hax/SA3D.ProjectConfigurations.git
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f288702
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..6b0c013
--- /dev/null
+++ b/README.md
@@ -0,0 +1,27 @@
+# SA3D.Modeling
+A Sonic Adventure modeling library with support for all game related model formats. Also contains support for various other SEGA based games, although support is not guaranteed.
+
+## Contents
+| Namespace (SA3D.Modeling.*) | Description |
+|----------------------------- |-------------------------------------------------------------------------------------------------------------------------------------- |
+| File | Model data storage file handlers for select native- and X-Hax custom file-formats. |
+| Mesh | Library for handling, reading and writing mesh data. |
+| Mesh.Basic | Basic mesh data library. Used in SA1 (everything) and SA2 (collision geometry only) |
+| Mesh.Chunk | Chunk mesh data library. Used in SA2. |
+| Mesh.Gamecube | Gamecube-like mesh data library. Used in SA2B and its ports. |
+| Mesh.Buffer | SA3D internal mesh format. Used for conversion and rendering purposes. Is a simplified version of Chunk and mixes in Basic elements. |
+| Mesh.Weighted | SA3D internal mesh format. Used only for conversion purposes, as it is more in line with most modern mesh formats. |
+| ObjectData | Library for handling, reading and writing node and geometry container data. |
+| Animation | Library for handling, reading and writing animation data. |
+| Structs | Common structure code between all namespaces. |
+| Strippify | Triangle strip generating and handling code. |
+
+## Releasing
+!! Requires authorization via the X-Hax organisation
+
+1. Edit the version number in src/SA3D.Common/SA3D.Common.csproj; Example: `1.0.0` -> `2.0.0`
+2. Commit the change but dont yet push.
+3. Tag the commit: `git tag -a [version number] HEAD -m "Release version [version number]"`
+4. Push with tags: `git push --follow-tags`
+
+This will automatically start the Github `Build and Publish` workflow
\ No newline at end of file
diff --git a/src/.editorconfig b/src/.editorconfig
new file mode 120000
index 0000000..29c6ba7
--- /dev/null
+++ b/src/.editorconfig
@@ -0,0 +1 @@
+SA3D.ProjectConfigurations/.editorconfig
\ No newline at end of file
diff --git a/src/SA3D.Modeling.sln b/src/SA3D.Modeling.sln
new file mode 100644
index 0000000..f70b8d7
--- /dev/null
+++ b/src/SA3D.Modeling.sln
@@ -0,0 +1,25 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.7.34202.233
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SA3D.Modeling", "SA3D.Modeling\SA3D.Modeling.csproj", "{7AD0BC1C-070B-4CB3-B1D2-98EB451DBEC4}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {7AD0BC1C-070B-4CB3-B1D2-98EB451DBEC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7AD0BC1C-070B-4CB3-B1D2-98EB451DBEC4}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7AD0BC1C-070B-4CB3-B1D2-98EB451DBEC4}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7AD0BC1C-070B-4CB3-B1D2-98EB451DBEC4}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {3D099321-2535-4D95-9B92-FDCD041E8841}
+ EndGlobalSection
+EndGlobal
diff --git a/src/SA3D.Modeling/Animation/Enums.cs b/src/SA3D.Modeling/Animation/Enums.cs
new file mode 100644
index 0000000..d3c11a4
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Enums.cs
@@ -0,0 +1,130 @@
+using SA3D.Common;
+using System;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Depicts keyframe contents.
+ ///
+ [Flags]
+ public enum KeyframeAttributes : ushort
+ {
+ ///
+ /// Animation includes position keyframes.
+ ///
+ Position = Flag16.B0,
+
+ ///
+ /// Animation includes rotation (euler angles) keyframes.
+ ///
+ EulerRotation = Flag16.B1,
+
+ ///
+ /// Animation includes scale keyframes.
+ ///
+ Scale = Flag16.B2,
+
+ ///
+ /// Animation includes vector keyframes.
+ ///
+ Vector = Flag16.B3,
+
+ ///
+ /// Animation includes vertex keyframes.
+ ///
+ Vertex = Flag16.B4,
+
+ ///
+ /// Animation includes normal keyframes.
+ ///
+ Normal = Flag16.B5,
+
+ ///
+ /// Animation includes target keyframes.
+ ///
+ Target = Flag16.B6,
+
+ ///
+ /// Animation includes roll keyframes.
+ ///
+ Roll = Flag16.B7,
+
+ ///
+ /// Animation includes angle keyframes.
+ ///
+ Angle = Flag16.B8,
+
+ ///
+ /// Animation includes light color keyframes.
+ ///
+ LightColor = Flag16.B9,
+
+ ///
+ /// Animation includes intensity keyframes.
+ ///
+ Intensity = Flag16.B10,
+
+ ///
+ /// Animation includes spotlight keyframes.
+ ///
+ Spot = Flag16.B11,
+
+ ///
+ /// Animation includes point keyframes.
+ ///
+ Point = Flag16.B12,
+
+ ///
+ /// Animation includes rotation (quaternion) keyframes.
+ ///
+ QuaternionRotation = Flag16.B13
+ }
+
+ ///
+ /// Keyframe interpolation mode.
+ ///
+ public enum InterpolationMode
+ {
+ ///
+ /// Linear interpolation.
+ ///
+ Linear,
+
+ ///
+ /// Spline interpolation (?).
+ ///
+ Spline,
+
+ ///
+ /// User defined interpolation.
+ ///
+ User
+ }
+
+ ///
+ /// Animation enum extension methods.
+ ///
+ public static class EnumExtensions
+ {
+ ///
+ /// Counts the number of channels defined in animation attributes.
+ ///
+ /// Animation attributes to count.
+ /// The number of channel defined in animation attributes.
+ public static int ChannelCount(this KeyframeAttributes attributes)
+ {
+ int channels = 0;
+
+ ushort value = (ushort)attributes;
+ for(int i = 0; i < 14; i++, value >>= 1)
+ {
+ if((value & 1) != 0)
+ {
+ channels++;
+ }
+ }
+
+ return channels;
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Frame.cs b/src/SA3D.Modeling/Animation/Frame.cs
new file mode 100644
index 0000000..e672032
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Frame.cs
@@ -0,0 +1,87 @@
+using SA3D.Modeling.Structs;
+using System.Numerics;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Frame on timeline with interpolated values from a keyframe storage.
+ ///
+ public struct Frame
+ {
+ ///
+ /// Position on the timeline.
+ ///
+ public float FrameTime { get; set; }
+
+ ///
+ /// Position at the frame.
+ ///
+ public Vector3? Position { get; set; }
+
+ ///
+ /// Rotation (euler angles) at the frame.
+ ///
+ public Vector3? EulerRotation { get; set; }
+
+ ///
+ /// Scale at the frame.
+ ///
+ public Vector3? Scale { get; set; }
+
+ ///
+ /// Vector at the frame.
+ ///
+ public Vector3? Vector { get; set; }
+
+ ///
+ /// Vertex positions at the frame.
+ ///
+ public Vector3[]? Vertex { get; set; }
+
+ ///
+ /// Vertex normals at the frame.
+ ///
+ public Vector3[]? Normal { get; set; }
+
+ ///
+ /// Camera target position at the frame.
+ ///
+ public Vector3? Target { get; set; }
+
+ ///
+ /// Camera roll at the frame.
+ ///
+ public float? Roll { get; set; }
+
+ ///
+ /// Camera FOV at the frame.
+ ///
+ public float? Angle { get; set; }
+
+ ///
+ /// Light color at the frame.
+ ///
+ public Color? Color { get; set; }
+
+ ///
+ /// Light intensity at the frame.
+ ///
+ public float? Intensity { get; set; }
+
+ ///
+ /// Spotlight at the frame.
+ ///
+ public Spotlight? Spotlight { get; set; }
+
+ ///
+ /// Point light stuff at the frame.
+ ///
+ public Vector2? Point { get; set; }
+
+ ///
+ /// Rotation (quaternion) at the frame.
+ ///
+ public Quaternion? QuaternionRotation { get; set; }
+
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Keyframes.cs b/src/SA3D.Modeling/Animation/Keyframes.cs
new file mode 100644
index 0000000..30d49de
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Keyframes.cs
@@ -0,0 +1,475 @@
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Animation.Utilities;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Keyframe storage for an animation.
+ ///
+ public class Keyframes
+ {
+ ///
+ /// Transform position keyframes.
+ ///
+ public SortedDictionary Position { get; private set; }
+
+ ///
+ /// Transform rotation (euler angles) keyframes.
+ ///
+ public SortedDictionary EulerRotation { get; private set; }
+
+ ///
+ /// Transform scale keyframes.
+ ///
+ public SortedDictionary Scale { get; private set; }
+
+ ///
+ /// General vector3 keyframes.
+ ///
+ public SortedDictionary Vector { get; private set; }
+
+ ///
+ /// Mesh vertex positions.
+ ///
+ public SortedDictionary> Vertex { get; private set; }
+
+ ///
+ /// Mesh vertex normals.
+ ///
+ public SortedDictionary> Normal { get; private set; }
+
+ ///
+ /// Camera lookat target.
+ ///
+ public SortedDictionary Target { get; private set; }
+
+ ///
+ /// Camera Roll (euler angle).
+ ///
+ public SortedDictionary Roll { get; private set; }
+
+ ///
+ /// Camera field of view (radians).
+ ///
+ public SortedDictionary Angle { get; private set; }
+
+ ///
+ /// Light Color.
+ ///
+ public SortedDictionary LightColor { get; private set; }
+
+ ///
+ /// Light intensity.
+ ///
+ public SortedDictionary Intensity { get; private set; }
+
+ ///
+ /// Spotlights.
+ ///
+ public SortedDictionary Spot { get; private set; }
+
+ ///
+ /// Point light positions.
+ ///
+ public SortedDictionary Point { get; private set; }
+
+ ///
+ /// Rotation (quaternion) keyframes.
+ ///
+ public SortedDictionary QuaternionRotation { get; private set; }
+
+ ///
+ /// Whether any keyframes exist in this keyframe set
+ ///
+ public bool HasKeyframes
+ => GetKeyEnumerable().Any(x => x.Any());
+
+ ///
+ /// Returns the number of keyframes in the biggest keyframe dictionary.
+ ///
+ public uint KeyframeCount
+ {
+ get
+ {
+ bool hasKeys = false;
+ uint maxKey = 0;
+ foreach(IEnumerable keys in GetKeyEnumerable())
+ {
+ if(!keys.Any())
+ {
+ continue;
+ }
+
+ hasKeys = true;
+ maxKey = uint.Max(maxKey, keys.Last());
+ }
+
+ if(hasKeys)
+ {
+ return maxKey + 1;
+ }
+ else
+ {
+ return 0;
+ }
+ }
+ }
+
+ ///
+ /// Channels that contain keyframes.
+ ///
+ public KeyframeAttributes Type
+ {
+ get
+ {
+ KeyframeAttributes attribs = 0;
+
+ foreach((KeyframeAttributes type, IEnumerable keys) in GetTypeKeyEnumerable())
+ {
+ if(keys.Any())
+ {
+ attribs |= type;
+ }
+ }
+
+ return attribs;
+ }
+ }
+
+
+ ///
+ /// Creates an empty keyframe storage
+ ///
+ public Keyframes()
+ {
+ Position = new();
+ EulerRotation = new();
+ Scale = new();
+ Vector = new();
+ Vertex = new();
+ Normal = new();
+ Target = new();
+ Roll = new();
+ Angle = new();
+ LightColor = new();
+ Intensity = new();
+ Spot = new();
+ Point = new();
+ QuaternionRotation = new();
+ }
+
+
+ private IEnumerable> GetKeyEnumerable()
+ {
+ yield return Position.Keys;
+ yield return EulerRotation.Keys;
+ yield return Scale.Keys;
+ yield return Vector.Keys;
+ yield return Vertex.Keys;
+ yield return Normal.Keys;
+ yield return Target.Keys;
+ yield return Roll.Keys;
+ yield return Angle.Keys;
+ yield return LightColor.Keys;
+ yield return Intensity.Keys;
+ yield return Spot.Keys;
+ yield return Point.Keys;
+ yield return QuaternionRotation.Keys;
+ }
+
+ private IEnumerable<(KeyframeAttributes type, IEnumerable keys)> GetTypeKeyEnumerable()
+ {
+ uint current = 1;
+ foreach(IEnumerable keys in GetKeyEnumerable())
+ {
+ yield return ((KeyframeAttributes)current, keys);
+ current <<= 1;
+ }
+ }
+
+ ///
+ /// Returns a all values at a specific frame
+ ///
+ /// Frame to get the values of
+ ///
+ public Frame GetFrameAt(float frame)
+ {
+ return new()
+ {
+ FrameTime = frame,
+ Position = Position.ValueAtFrame(frame),
+ EulerRotation = EulerRotation.ValueAtFrame(frame),
+ Scale = Scale.ValueAtFrame(frame),
+ Vector = Vector.ValueAtFrame(frame),
+ Vertex = Vertex.ValueAtFrame(frame),
+ Normal = Normal.ValueAtFrame(frame),
+ Target = Target.ValueAtFrame(frame),
+ Roll = Roll.ValueAtFrame(frame),
+ Angle = Angle.ValueAtFrame(frame),
+ Color = LightColor.ValueAtFrame(frame),
+ Intensity = Intensity.ValueAtFrame(frame),
+ Spotlight = Spot.ValueAtFrame(frame),
+ Point = Point.ValueAtFrame(frame),
+ QuaternionRotation = QuaternionRotation.ValueAtFrame(frame),
+ };
+ }
+
+ ///
+ /// Optimizes the keyframes.
+ ///
+ ///
+ /// Difference threshold to use between quaternion keyframes.
+ /// Difference threshold to use between colors.
+ /// Compare angle keyframes as degrees and not as radians.
+ /// Frame from which to start optimizing. uses default.
+ /// Frame at which to end optimizing. uses default.
+ public void Optimize(
+ float generalThreshold,
+ float quaternionThreshold,
+ float colorThreshold,
+ bool asDegrees,
+ uint? start = null,
+ uint? end = null)
+ {
+ Position.OptimizeVector3(generalThreshold, start, end);
+
+ if(asDegrees)
+ {
+ EulerRotation.OptimizeVector3Degrees(generalThreshold, start, end);
+ Roll.OptimizeFloat(generalThreshold, start, end);
+ Angle.OptimizeFloat(generalThreshold, start, end);
+ }
+ else
+ {
+ EulerRotation.OptimizeVector3(generalThreshold, start, end);
+ Roll.OptimizeFloat(generalThreshold, start, end);
+ Angle.OptimizeFloat(generalThreshold, start, end);
+ }
+
+ Scale.OptimizeVector3(generalThreshold, start, end);
+ Vector.OptimizeVector3(generalThreshold, start, end);
+ Target.OptimizeVector3(generalThreshold, start, end);
+ LightColor.OptimizeColor(colorThreshold, start, end);
+ Intensity.OptimizeFloat(generalThreshold, start, end);
+ Spot.OptimizeSpotlight(generalThreshold, start, end);
+ Point.OptimizeVector2(generalThreshold, start, end);
+ QuaternionRotation.OptimizeQuaternion(quaternionThreshold, start, end);
+ }
+
+ ///
+ /// Ensures that specified node transform properties have start- and end-frames.
+ ///
+ /// The node for which to ensure frames. If no keyframes exist, then they will be added with the values from this node.
+ /// Keyframe types to target.
+ /// The frame until which keyframes need to exist.
+ public void EnsureNodeKeyframes(Node node, KeyframeAttributes targets, uint endFrame)
+ {
+ void Ensure(SortedDictionary keyframes, KeyframeAttributes type, T value)
+ {
+ if(!targets.HasFlag(type))
+ {
+ return;
+ }
+
+ if(!keyframes.ContainsKey(0))
+ {
+ keyframes.Add(0, value);
+ }
+
+ if(keyframes.Keys.Max() < endFrame)
+ {
+ keyframes.Add(endFrame, value);
+ }
+ }
+
+ Ensure(Position, KeyframeAttributes.Position, node.Position);
+ Ensure(EulerRotation, KeyframeAttributes.EulerRotation, node.EulerRotation);
+ Ensure(QuaternionRotation, KeyframeAttributes.QuaternionRotation, node.QuaternionRotation);
+ Ensure(Scale, KeyframeAttributes.Scale, node.Scale);
+ }
+
+
+ ///
+ /// Writes the keyframe set to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Which channels should be written
+ /// Pointer references to utilize.
+ /// Whether to write euler rotations 16-bit instead of 32-bit.
+ public (uint address, uint count)[] Write(EndianStackWriter writer, KeyframeAttributes writeAttributes, PointerLUT lut, bool shortRot = false)
+ {
+ int channels = writeAttributes.ChannelCount();
+ (uint address, uint count)[] keyframeLocs = new (uint address, uint count)[channels];
+ int channelIndex = -1;
+
+ foreach((KeyframeAttributes type, IEnumerable keys) in GetTypeKeyEnumerable())
+ {
+ if(!writeAttributes.HasFlag(type))
+ {
+ continue;
+ }
+
+ channelIndex++;
+
+ int count = keys.Count();
+ if(count == 0)
+ {
+ continue;
+ }
+
+ uint[]? arrayData = null;
+ if(type == KeyframeAttributes.Vertex)
+ {
+ arrayData = writer.WriteVector3ArrayData(Vertex, lut);
+ }
+ else if(type == KeyframeAttributes.Normal)
+ {
+ arrayData = writer.WriteVector3ArrayData(Normal, lut);
+ }
+
+ keyframeLocs[channelIndex] = (writer.PointerPosition, (uint)count);
+
+ switch(type)
+ {
+ case KeyframeAttributes.Position:
+ writer.WriteVector3Set(Position, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.EulerRotation:
+ writer.WriteVector3Set(EulerRotation, shortRot ? FloatIOType.BAMS16 : FloatIOType.BAMS32);
+ break;
+ case KeyframeAttributes.Scale:
+ writer.WriteVector3Set(Scale, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.Vector:
+ writer.WriteVector3Set(Vector, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.Vertex:
+ case KeyframeAttributes.Normal:
+ writer.WriteVector3ArraySet(arrayData!);
+ break;
+ case KeyframeAttributes.Target:
+ writer.WriteVector3Set(Target, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.Roll:
+ writer.WriteFloatSet(Roll, true);
+ break;
+ case KeyframeAttributes.Angle:
+ writer.WriteFloatSet(Angle, true);
+ break;
+ case KeyframeAttributes.LightColor:
+ writer.WriteColorSet(LightColor, ColorIOType.ARGB8_32);
+ break;
+ case KeyframeAttributes.Intensity:
+ writer.WriteFloatSet(Intensity, false);
+ break;
+ case KeyframeAttributes.Spot:
+ writer.WriteSpotlightSet(Spot);
+ break;
+ case KeyframeAttributes.Point:
+ writer.WriteVector2Set(Point, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.QuaternionRotation:
+ writer.WriteQuaternionSet(QuaternionRotation);
+ break;
+ default:
+ break;
+ }
+ }
+
+ return keyframeLocs;
+ }
+
+ ///
+ /// Reads a set of keyframes off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Channels that the keyframes contain.
+ /// Pointer references to utilize.
+ /// Whether to write euler rotations 16-bit instead of 32-bit.
+ /// The keyframes that were read.
+ public static Keyframes Read(EndianStackReader reader, ref uint address, KeyframeAttributes type, PointerLUT lut, bool shortRot = false)
+ {
+ int channelCount = type.ChannelCount();
+ uint keyframePointerArray = address;
+ uint keyframeCountArray = (uint)(address + (4 * channelCount));
+
+ Keyframes result = new();
+
+ foreach(KeyframeAttributes flag in Enum.GetValues())
+ {
+ if(!type.HasFlag(flag))
+ {
+ continue;
+ }
+
+ if(reader.TryReadPointer(keyframePointerArray, out uint setAddress))
+ {
+ uint frameCount = reader.ReadUInt(keyframeCountArray);
+ switch(flag)
+ {
+ case KeyframeAttributes.Position:
+ reader.ReadVector3Set(setAddress, frameCount, result.Position, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.EulerRotation:
+ reader.ReadVector3Set(setAddress, frameCount, result.EulerRotation, shortRot ? FloatIOType.BAMS16 : FloatIOType.BAMS32);
+ break;
+ case KeyframeAttributes.Scale:
+ reader.ReadVector3Set(setAddress, frameCount, result.Scale, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.Vector:
+ reader.ReadVector3Set(setAddress, frameCount, result.Vector, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.Vertex:
+ reader.ReadVector3ArraySet(setAddress, frameCount, "vertex_", result.Vertex, lut);
+ break;
+ case KeyframeAttributes.Normal:
+ reader.ReadVector3ArraySet(setAddress, frameCount, "normal_", result.Normal, lut);
+ break;
+ case KeyframeAttributes.Target:
+ reader.ReadVector3Set(setAddress, frameCount, result.Target, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.Roll:
+ reader.ReadFloatSet(setAddress, frameCount, result.Roll, true);
+ break;
+ case KeyframeAttributes.Angle:
+ reader.ReadFloatSet(setAddress, frameCount, result.Angle, true);
+ break;
+ case KeyframeAttributes.LightColor:
+ reader.ReadColorSet(setAddress, frameCount, result.LightColor, ColorIOType.ARGB8_32);
+ break;
+ case KeyframeAttributes.Intensity:
+ reader.ReadFloatSet(setAddress, frameCount, result.Intensity, false);
+ break;
+ case KeyframeAttributes.Spot:
+ reader.ReadSpotSet(setAddress, frameCount, result.Spot);
+ break;
+ case KeyframeAttributes.Point:
+ reader.ReadVector2Set(setAddress, frameCount, result.Point, FloatIOType.Float);
+ break;
+ case KeyframeAttributes.QuaternionRotation:
+ reader.ReadQuaternionSet(setAddress, frameCount, result.QuaternionRotation);
+ break;
+ default:
+ break;
+ }
+ }
+
+ keyframePointerArray += 4;
+ keyframeCountArray += 4;
+ }
+
+ address = keyframeCountArray;
+
+ return result;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/src/SA3D.Modeling/Animation/LandentryMotion.cs b/src/SA3D.Modeling/Animation/LandentryMotion.cs
new file mode 100644
index 0000000..095944d
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/LandentryMotion.cs
@@ -0,0 +1,160 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.Structs;
+using System;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Level geometry animation (only used in sa1)
+ ///
+ public class LandEntryMotion
+ {
+ ///
+ /// Size of the structure in bytes.
+ ///
+ public static uint StructSize => 24;
+
+ ///
+ /// First keyframe / Keyframe to start the animation at.
+ ///
+ public float Frame { get; set; }
+
+ ///
+ /// Keyframes traversed per frame-update / Animation Speed.
+ ///
+ public float Step { get; set; }
+
+ ///
+ /// Last keyframe / Length of the animation.
+ ///
+ public float MaxFrame { get; set; }
+
+ ///
+ /// Model that is being animated.
+ ///
+ public Node Model { get; set; }
+
+ ///
+ /// The corresponding node motion pair.
+ ///
+ public NodeMotion NodeMotion { get; set; }
+
+ ///
+ /// Texture list address to use.
+ ///
+ public uint TextureListPointer { get; set; }
+
+
+ ///
+ /// Creates a new geometry animation
+ ///
+ /// First keyframe / Keyframe to start the animation at.
+ /// Keyframes traversed per frame-update / Animation Speed.
+ /// Last keyframe / Length of the animation.
+ /// Model that is being animated.
+ /// Animation to play.
+ /// Texture list address to use.
+ public LandEntryMotion(float frame, float step, float maxFrame, Node model, Motion motion, uint textureListPointer)
+ : this(frame, step, maxFrame, model, new NodeMotion(model, motion), textureListPointer) { }
+
+ ///
+ /// Creates a new geometry animation
+ ///
+ /// First keyframe / Keyframe to start the animation at.
+ /// Keyframes traversed per frame-update / Animation Speed.
+ /// Last keyframe / Length of the animation.
+ /// Model and animation to use.
+ /// Texture list address to use.
+ public LandEntryMotion(float frame, float step, float maxFrame, NodeMotion nodeMotion, uint textureListPointer)
+ : this(frame, step, maxFrame, nodeMotion.Model, nodeMotion, textureListPointer) { }
+
+ ///
+ /// Creates a new geometry animation
+ ///
+ /// First keyframe / Keyframe to start the animation at.
+ /// Keyframes traversed per frame-update / Animation Speed.
+ /// Last keyframe / Length of the animation.
+ /// Model that is being animated.
+ /// Model and animation to use.
+ /// Texture list address to use.
+ public LandEntryMotion(float frame, float step, float maxFrame, Node model, NodeMotion nodeMotion, uint textureListPointer)
+ {
+ Frame = frame;
+ Step = step;
+ MaxFrame = maxFrame;
+ Model = model;
+ NodeMotion = nodeMotion;
+ TextureListPointer = textureListPointer;
+ }
+
+
+
+ ///
+ /// Reads a geometry animation from a byte array
+ ///
+ /// Byte source
+ /// Address at which the geometry animation is located
+ /// Attach format
+ ///
+ ///
+ public static LandEntryMotion Read(EndianStackReader data, uint address, ModelFormat format, PointerLUT lut)
+ {
+ float frame = data.ReadFloat(address);
+ float step = data.ReadFloat(address + 4);
+ float maxFrame = data.ReadFloat(address + 8);
+
+ uint modelAddress = data.ReadPointer(address + 0xC);
+ Node model = Node.Read(data, modelAddress, format, lut);
+
+ uint motionAddress = data.ReadPointer(address + 0x10);
+ NodeMotion action = NodeMotion.Read(data, motionAddress, format, lut);
+
+ uint texListPtr = data.ReadUInt(address + 0x14);
+
+ return new LandEntryMotion(frame, step, maxFrame, model, action, texListPtr);
+ }
+
+ ///
+ /// Write the model and animation data to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// The format to write the model data in.
+ /// Pointer references to utilize.
+ public void WriteData(EndianStackWriter writer, ModelFormat format, PointerLUT lut)
+ {
+ Model.Write(writer, format, lut);
+ NodeMotion.Write(writer, format, lut);
+ }
+
+ ///
+ /// Writes the landentry motion structure to an endian stack writer.
+ ///
+ ///
+ /// Requires the data to be written before via
+ ///
+ /// The writer to write to.
+ /// Pointer references to utilize.
+ ///
+ public void Write(EndianStackWriter writer, PointerLUT lut)
+ {
+ if(!lut.Nodes.TryGetAddress(Model, out uint mdlAddress))
+ {
+ throw new NullReferenceException($"Model \"{Model.Label}\" has not been written yet / cannot be found in the pointer LUT!");
+ }
+
+ if(!lut.NodeMotions.TryGetAddress(NodeMotion, out uint actionAddress))
+ {
+ throw new NullReferenceException($"Nodemotion has not been written yet / cannot be found in the pointer LUT!");
+ }
+
+ writer.WriteFloat(Frame);
+ writer.WriteFloat(Step);
+ writer.WriteFloat(MaxFrame);
+ writer.WriteUInt(mdlAddress);
+ writer.WriteUInt(actionAddress);
+ writer.WriteUInt(TextureListPointer);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Motion.cs b/src/SA3D.Modeling/Animation/Motion.cs
new file mode 100644
index 0000000..5101a89
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Motion.cs
@@ -0,0 +1,292 @@
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.Structs;
+using System.Collections.Generic;
+using System.Linq;
+using static SA3D.Common.StringExtensions;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Animation data for various targets.
+ ///
+ public class Motion : ILabel
+ {
+ ///
+ /// Size of the motion struct in bytes.
+ ///
+ public const uint StructSize = 16;
+
+ ///
+ public string Label { get; set; }
+
+ ///
+ /// Node in the models node tree that this animation targets.
+ ///
+ public uint ModelCount { get; set; }
+
+ ///
+ /// Intepolation mode between keyframes.
+ ///
+ public InterpolationMode InterpolationMode { get; set; }
+
+ ///
+ /// Whether to use 16-bit for euler rotation BAMS values.
+ ///
+ public bool ShortRot { get; set; }
+
+ ///
+ /// Keyframes based on their model id
+ ///
+ public Dictionary Keyframes { get; }
+
+ ///
+ /// Types of keyframe stored in this animation.
+ ///
+ public KeyframeAttributes KeyframeTypes
+ {
+ get
+ {
+ KeyframeAttributes type = 0;
+ foreach(Keyframes kf in Keyframes.Values)
+ {
+ type |= kf.Type;
+ }
+
+ return type | ManualKeyframeTypes;
+ }
+ }
+
+ ///
+ /// Manually enforced keyframe types.
+ ///
+ public KeyframeAttributes ManualKeyframeTypes { get; set; }
+
+
+ ///
+ /// Whether the motion transforms nodes.
+ ///
+ public bool IsNodeMotion
+ => !IsShapeMotion && !IsCameraMotion && !IsSpotLightMotion && !IsLightMotion;
+
+ ///
+ /// Whether the motion alters vertex positions and/or normals of meshes.
+ ///
+ public bool IsShapeMotion
+ => HasAnyAttributes(KeyframeAttributes.Vertex | KeyframeAttributes.Normal);
+
+ ///
+ /// Whether the motion transforms a camera.
+ ///
+ public bool IsCameraMotion
+ => HasAnyAttributes(KeyframeAttributes.Angle | KeyframeAttributes.Roll | KeyframeAttributes.Target);
+
+ ///
+ /// Whether the motion targets a spotlight
+ ///
+ public bool IsSpotLightMotion
+ => HasAnyAttributes(KeyframeAttributes.Spot);
+
+ ///
+ /// Whether the motion targets lights
+ ///
+ public bool IsLightMotion
+ => HasAnyAttributes(KeyframeAttributes.Intensity | KeyframeAttributes.LightColor | KeyframeAttributes.Vector);
+
+
+ ///
+ /// Creates a new empty motion.
+ ///
+ public Motion()
+ {
+ Label = "animation_" + GenerateIdentifier();
+ Keyframes = new();
+ }
+
+
+ private bool HasAnyAttributes(KeyframeAttributes attributes)
+ {
+ return (KeyframeTypes & attributes) != 0;
+ }
+
+ ///
+ /// Returns the number of frames in this motion.
+ ///
+ ///
+ public uint GetFrameCount()
+ {
+ uint result = 0;
+ foreach(Keyframes k in Keyframes.Values)
+ {
+ result = uint.Max(result, k.KeyframeCount);
+ }
+
+ return result;
+ }
+
+ ///
+ /// Optimizes all keyframes across the motion.
+ ///
+ ///
+ /// Difference threshold to use between quaternion keyframes.
+ /// Difference threshold to use between colors.
+ /// Compare angle keyframes as degrees and not as radians.
+ /// Frame from which to start optimizing. uses default.
+ /// Frame at which to end optimizing. uses default.
+ public void Optimize(
+ float generalThreshold,
+ float quaternionThreshold,
+ float colorThreshold,
+ bool asDegrees,
+ uint? start = null,
+ uint? end = null)
+ {
+ foreach(Keyframes keyframes in Keyframes.Values)
+ {
+ keyframes.Optimize(generalThreshold, quaternionThreshold, colorThreshold, asDegrees, start, end);
+ }
+ }
+
+ ///
+ /// Ensures that the transform propertie of all nodes in a model tree have start- and end-frames.
+ ///
+ /// Any node from a tree for which keyframes should be ensured..
+ /// Keyframe types to target.
+ /// If enabled, new keyframe sets will be created for any node that does not have any yet. Otherwise, only preexisting keyframe sets will be ensured to have start and end.
+ public void EnsureNodeKeyframes(Node model, KeyframeAttributes targetTypes, bool createKeyframes)
+ {
+ if(default == (targetTypes & (
+ KeyframeAttributes.Position
+ | KeyframeAttributes.EulerRotation
+ | KeyframeAttributes.QuaternionRotation
+ | KeyframeAttributes.Scale)))
+ {
+ return;
+ }
+
+ uint maxFrame = GetFrameCount() - 1;
+
+ int i = 0;
+ foreach(Node node in model.GetAnimTreeNodes())
+ {
+ if(!Keyframes.TryGetValue(i, out Keyframes? keyframes))
+ {
+ if(!createKeyframes)
+ {
+ continue;
+ }
+
+ keyframes = new();
+ Keyframes.Add(i, keyframes);
+ }
+
+ keyframes.EnsureNodeKeyframes(node, targetTypes, maxFrame);
+
+ i++;
+ }
+ }
+
+
+ ///
+ /// Writes the motion to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Pointer references to utilize.
+ public uint Write(EndianStackWriter writer, PointerLUT lut)
+ {
+ uint onWrite()
+ {
+ KeyframeAttributes type = KeyframeTypes;
+ int channels = type.ChannelCount();
+
+ uint keyframeCount = uint.Max(ModelCount, (uint)Keyframes.Keys.Max() + 1u);
+
+ (uint address, uint count)[][] keyFrameLocations = new (uint addr, uint count)[keyframeCount][];
+
+ for(int i = 0; i < keyframeCount; i++)
+ {
+ keyFrameLocations[i] = !Keyframes.ContainsKey(i)
+ ? new (uint, uint)[channels]
+ : Keyframes[i].Write(writer, type, lut, ShortRot);
+ }
+
+ uint keyframesAddr = writer.PointerPosition;
+
+ foreach((uint addr, uint count)[] kf in keyFrameLocations)
+ {
+ for(int i = 0; i < kf.Length; i++)
+ {
+ writer.WriteUInt(kf[i].addr);
+ }
+
+ for(int i = 0; i < kf.Length; i++)
+ {
+ writer.WriteUInt(kf[i].count);
+ }
+ }
+
+ uint result = writer.PointerPosition;
+
+ writer.WriteUInt(keyframesAddr);
+ writer.WriteUInt(GetFrameCount());
+ writer.WriteUShort((ushort)type);
+ writer.WriteUShort((ushort)((channels & 0xF) | ((int)InterpolationMode << 6)));
+
+ return result;
+
+ }
+
+ return lut.GetAddAddress(this, onWrite);
+ }
+
+ ///
+ /// Reads a motion off an endian stack reader.
+ ///
+ /// Byte source
+ /// Address at which to start reading.
+ /// Number of nodes in the tree of the targeted model.
+ /// Pointer references to utilize.
+ /// Whether euler rotations are stored in 16-bit instead of 32-bit.
+ /// The motion that was read
+ public static Motion Read(EndianStackReader reader, uint address, uint modelCount, PointerLUT lut, bool shortRot = false)
+ {
+ Motion onRead()
+ {
+ uint keyframeAddr = reader.ReadPointer(address);
+ // offset 4 is frame count. We dont need to read that.
+ KeyframeAttributes keyframeType = (KeyframeAttributes)reader.ReadUShort(address + 8);
+
+ ushort tmp = reader.ReadUShort(address + 10);
+ InterpolationMode mode = (InterpolationMode)((tmp >> 6) & 0x3);
+ int channels = tmp & 0xF;
+
+ Motion result = new()
+ {
+ InterpolationMode = mode,
+ ModelCount = modelCount,
+ ShortRot = shortRot,
+ ManualKeyframeTypes = keyframeType
+ };
+
+ for(int i = 0; i < modelCount; i++)
+ {
+ Keyframes kf = Animation.Keyframes.Read(reader, ref keyframeAddr, keyframeType, lut, shortRot);
+ result.Keyframes.Add(i, kf);
+ }
+
+ return result;
+
+ }
+
+ return lut.GetAddLabeledValue(address, "animation_", onRead);
+ }
+
+
+ ///
+ public override string ToString()
+ {
+ return $"{Label} : {ModelCount} - {Keyframes.Count}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/NodeMotion.cs b/src/SA3D.Modeling/Animation/NodeMotion.cs
new file mode 100644
index 0000000..60e7101
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/NodeMotion.cs
@@ -0,0 +1,89 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.Structs;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Pairs a node and motion together.
+ ///
+ public class NodeMotion : ILabel
+ {
+ ///
+ public string Label { get; set; }
+
+ ///
+ /// Assigned node.
+ ///
+ public Node Model { get; set; }
+
+ ///
+ /// Assigned motion.
+ ///
+ public Motion Animation { get; set; }
+
+
+ ///
+ /// Creates a new node motion.
+ ///
+ /// The model of the pair.
+ /// The animation of the pair.
+ public NodeMotion(Node model, Motion animation)
+ {
+ Label = "action_" + StringExtensions.GenerateIdentifier();
+ Model = model;
+ Animation = animation;
+ }
+
+
+ ///
+ /// Writes the node motion and its contents to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// The format in which the model should be written.
+ /// Pointer references to utilize.
+ /// Address at which the node motion was written.
+ public uint Write(EndianStackWriter writer, ModelFormat format, PointerLUT lut)
+ {
+ uint OnWrite(NodeMotion nodeMotion)
+ {
+ uint nodeAddress = Model.Write(writer, format, lut);
+ uint motionAddress = Animation.Write(writer, lut);
+
+ uint result = writer.PointerPosition;
+
+ writer.WriteUInt(nodeAddress);
+ writer.WriteUInt(motionAddress);
+
+ return result;
+ }
+
+ return lut.GetAddAddress(this, OnWrite);
+ }
+
+ ///
+ /// Reads a NodeMotion off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The format that the node should be read in.
+ /// Pointer references to utilize.
+ /// The node motion pair that was read
+ public static NodeMotion Read(EndianStackReader reader, uint address, ModelFormat format, PointerLUT lut)
+ {
+ NodeMotion onRead()
+ {
+ Node mdl = Node.Read(reader, reader.ReadPointer(address), format, lut);
+ Motion mtn = Motion.Read(reader, reader.ReadPointer(address + 4), (uint)mdl.GetTreeNodeCount(), lut);
+
+ return new NodeMotion(mdl, mtn);
+ }
+
+ return lut.GetAddLabeledValue(address, "action_", onRead);
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Spotlight.cs b/src/SA3D.Modeling/Animation/Spotlight.cs
new file mode 100644
index 0000000..9506e72
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Spotlight.cs
@@ -0,0 +1,101 @@
+using SA3D.Common.IO;
+using System;
+using static SA3D.Common.MathHelper;
+
+namespace SA3D.Modeling.Animation
+{
+ ///
+ /// Spotlight for cutscenes.
+ ///
+ public struct Spotlight
+ {
+ ///
+ /// Size of the spotlight struct.
+ ///
+ public static uint StructSize => 16;
+
+ ///
+ /// Closest light distance.
+ ///
+ public float near;
+
+ ///
+ /// Furthest light distance.
+ ///
+ public float far;
+
+ ///
+ /// Inner cone angle.
+ ///
+ public float insideAngle;
+
+ ///
+ /// Outer cone angle.
+ ///
+ public float outsideAngle;
+
+ ///
+ /// Linearly interpolate between two spotlights.
+ ///
+ /// Spotlight from which to start interpolating.
+ /// Spotlight to which to interpolate.
+ /// Value by which to interpolate
+ /// The interpolated spotlight.
+ public static Spotlight Lerp(Spotlight from, Spotlight to, float time)
+ {
+ float inverse = 1 - time;
+ return new Spotlight()
+ {
+ near = (to.near * time) + (from.near * inverse),
+ far = (to.far * time) + (from.far * inverse),
+ insideAngle = (to.insideAngle * time) + (from.insideAngle * inverse),
+ outsideAngle = (to.outsideAngle * time) + (from.outsideAngle * inverse),
+ };
+ }
+
+ ///
+ /// Calculates the distance between two spotlight values (handled like a Vector4).
+ ///
+ /// First spotlight.
+ /// Second spotlight.
+ /// The distance
+ public static float Distance(Spotlight from, Spotlight to)
+ {
+ return MathF.Sqrt(
+ MathF.Pow(from.near - to.near, 2) +
+ MathF.Pow(from.far - to.far, 2) +
+ MathF.Pow(from.insideAngle - to.insideAngle, 2) +
+ MathF.Pow(from.outsideAngle - to.outsideAngle, 2)
+ );
+ }
+
+ ///
+ /// Reads a spotlight off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The spotlight that was read.
+ public static Spotlight Read(EndianStackReader reader, uint address)
+ {
+ return new Spotlight()
+ {
+ near = reader.ReadFloat(address),
+ far = reader.ReadFloat(address + 4),
+ insideAngle = BAMSToRad(reader.ReadInt(address + 8)),
+ outsideAngle = BAMSToRad(reader.ReadInt(address + 12))
+ };
+ }
+
+ ///
+ /// Writes the spotlight to an endian stack writer.
+ ///
+ /// The writer to write to.
+ public readonly void Write(EndianStackWriter writer)
+ {
+ writer.WriteFloat(near);
+ writer.WriteFloat(far);
+ writer.WriteInt(RadToBAMS(insideAngle));
+ writer.WriteInt(RadToBAMS(outsideAngle));
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Utilities/KeyframeInterpolate.cs b/src/SA3D.Modeling/Animation/Utilities/KeyframeInterpolate.cs
new file mode 100644
index 0000000..7a0f1ff
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Utilities/KeyframeInterpolate.cs
@@ -0,0 +1,225 @@
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Animation.Utilities
+{
+ ///
+ /// Keyframe interpolation methods
+ ///
+ internal static class KeyframeInterpolate
+ {
+ ///
+ /// Searches through a keyframe dictionary and returns the interpolation between the values last and next.
+ /// If the returned float is 0, then next will be default (as its not used)
+ ///
+ /// Type of the Keyframe values
+ /// KEyframes to iterate through
+ /// Current frame to get
+ ///
+ /// Last Keyframe before given frame
+ /// Next Keyframe after given frame
+ ///
+ private static bool GetNearestFrames(SortedDictionary keyframes, float timestamp, out float interpolation, out T before, [MaybeNullWhen(false)] out T next)
+ {
+ if(timestamp < 0)
+ {
+ timestamp = 0;
+ }
+
+ // if there is only one frame, we can take that one
+ next = default;
+ interpolation = 0;
+
+ if(keyframes.Count == 1)
+ {
+ foreach(T val in keyframes.Values) // faster than converting to an array and accessing the first index
+ {
+ before = val;
+ return false;
+ }
+ }
+
+ // if the given frame is spot on and exists, then we can use it
+ uint baseFrame = (uint)Math.Floor(timestamp);
+ if(timestamp == baseFrame && keyframes.ContainsKey(baseFrame))
+ {
+ before = keyframes[baseFrame];
+ return false;
+ }
+
+ // we gotta find the frames that the given frame is between
+ // this is pretty easy thanks to the fact that the dictionary is always sorted
+
+ // getting the first frame index
+ SortedDictionary.KeyCollection keys = keyframes.Keys;
+ uint nextSmallestFrame = keys.First();
+
+ // if the smallest frame is greater than the frame we are at right now, then we can just return the frame
+ if(nextSmallestFrame > baseFrame)
+ {
+ before = keyframes[nextSmallestFrame];
+ return false;
+ }
+
+ // getting the actual next smallest and biggest frames
+ uint nextBiggestFrame = baseFrame;
+ foreach(uint key in keyframes.Keys)
+ {
+ if(key > nextSmallestFrame && key <= baseFrame)
+ {
+ nextSmallestFrame = key;
+ }
+ else if(key > baseFrame)
+ {
+ // the first bigger value must be the next biggest frame
+ nextBiggestFrame = key;
+ break;
+ }
+ }
+
+ // if the next biggest frame hasnt changed, then that means we are past the last frame
+ before = keyframes[nextSmallestFrame];
+ if(nextBiggestFrame == baseFrame)
+ {
+ return false;
+ }
+
+ // the regular result
+ next = keyframes[nextBiggestFrame];
+
+ // getting the interpolation between the two frames
+ float duration = nextBiggestFrame - nextSmallestFrame;
+ interpolation = (timestamp - nextSmallestFrame) / duration;
+ return true;
+ }
+
+ public static Vector3? ValueAtFrame(this SortedDictionary keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out Vector3 before, out Vector3 next))
+ {
+ return before;
+ }
+ else
+ {
+ return Vector3.Lerp(before, next, interpolation);
+ }
+ }
+
+ public static Vector3[]? ValueAtFrame(this SortedDictionary> keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out ILabeledArray before, out ILabeledArray? next))
+ {
+ return before.ToArray();
+ }
+
+ Vector3[] result = new Vector3[before.Length];
+ for(int i = 0; i < result.Length; i++)
+ {
+ result[i] = Vector3.Lerp(before[i], next[i], interpolation);
+ }
+
+ return result;
+ }
+
+ public static Vector2? ValueAtFrame(this SortedDictionary keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out Vector2 before, out Vector2 next))
+ {
+ return before;
+ }
+ else
+ {
+ return Vector2.Lerp(before, next, interpolation);
+ }
+ }
+
+ public static Color? ValueAtFrame(this SortedDictionary keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out Color before, out Color next))
+ {
+ return before;
+ }
+ else
+ {
+ return Color.Lerp(before, next, interpolation);
+ }
+ }
+
+ public static float? ValueAtFrame(this SortedDictionary keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out float before, out float next))
+ {
+ return before;
+ }
+ else
+ {
+ return (next * interpolation) + (before * (1 - interpolation));
+ }
+ }
+
+ public static Spotlight? ValueAtFrame(this SortedDictionary keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out Spotlight before, out Spotlight next))
+ {
+ return before;
+ }
+ else
+ {
+ return Spotlight.Lerp(before, next, interpolation);
+ }
+ }
+
+ public static Quaternion? ValueAtFrame(this SortedDictionary keyframes, float frame)
+ {
+ if(keyframes.Count == 0)
+ {
+ return null;
+ }
+
+ if(!GetNearestFrames(keyframes, frame, out float interpolation, out Quaternion before, out Quaternion next))
+ {
+ return before;
+ }
+ else
+ {
+ return Quaternion.Lerp(before, next, interpolation);
+ }
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Utilities/KeyframeOptimizationUtils.cs b/src/SA3D.Modeling/Animation/Utilities/KeyframeOptimizationUtils.cs
new file mode 100644
index 0000000..39072f0
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Utilities/KeyframeOptimizationUtils.cs
@@ -0,0 +1,131 @@
+using SA3D.Common;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Animation.Utilities
+{
+ internal static class KeyframeOptimizationUtils
+ {
+ private static void RemoveDeviations(
+ this SortedDictionary keyframes,
+ uint? start,
+ uint? end,
+ float deviationThreshold,
+ Func lerp,
+ Func calculateDeviation)
+ {
+ start ??= keyframes.Keys.FirstOrDefault();
+ end ??= keyframes.Keys.LastOrDefault();
+
+ if(end - start < 2 || deviationThreshold <= 0.0)
+ {
+ return;
+ }
+
+ List frames = new();
+ for(uint frame = start.Value; frame <= end; frame++)
+ {
+ if(keyframes.ContainsKey(frame))
+ {
+ frames.Add(frame);
+ }
+ }
+
+ // whenever a frame is removed, we skip the next one.
+ // repeat that until we reach an iteration where no frame was removed
+
+ bool done;
+ do
+ {
+ done = true;
+ for(int i = 1; i < frames.Count - 1; i++)
+ {
+ uint previous = frames[i - 1];
+ uint current = frames[i];
+ uint next = frames[i + 1];
+
+ float linearFac = (current - previous) / (float)(next - previous);
+
+ T linear = lerp(keyframes[previous], keyframes[next], linearFac);
+ T actual = keyframes[current];
+
+ float deviation = calculateDeviation(linear, actual);
+ if(deviation < deviationThreshold)
+ {
+ keyframes.Remove(current);
+ frames.RemoveAt(i);
+ done = false;
+ }
+ }
+ }
+ while(!done);
+ }
+
+ public static void OptimizeFloat(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ (a, b, t) => (a * (1 - t)) + (b * t),
+ (a, b) => Math.Abs(a - b));
+ }
+
+ public static void OptimizeFloatDegrees(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ (a, b, t) => (a * (1 - t)) + (b * t),
+ (a, b) => MathHelper.RadToDeg(Math.Abs(a - b)));
+ }
+
+ public static void OptimizeVector2(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ Vector2.Lerp,
+ (a, b) => (a - b).Length());
+ }
+
+ public static void OptimizeVector3(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ Vector3.Lerp,
+ (a, b) => (a - b).Length());
+ }
+
+ public static void OptimizeVector3Degrees(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ Vector3.Lerp,
+ (a, b) => MathHelper.RadToDeg((a - b).Length()));
+ }
+
+ public static void OptimizeColor(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ Color.Lerp,
+ Color.Distance);
+ }
+
+ public static void OptimizeQuaternion(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ QuaternionUtilities.RealLerp,
+ (a, b) => (a - b).Length());
+ }
+
+ public static void OptimizeSpotlight(this SortedDictionary keyframes, float deviationThreshold, uint? start, uint? end)
+ {
+ keyframes.RemoveDeviations(
+ start, end, deviationThreshold,
+ Spotlight.Lerp,
+ Spotlight.Distance);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Utilities/KeyframeRead.cs b/src/SA3D.Modeling/Animation/Utilities/KeyframeRead.cs
new file mode 100644
index 0000000..8587994
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Utilities/KeyframeRead.cs
@@ -0,0 +1,142 @@
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Structs;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+using static SA3D.Common.MathHelper;
+
+namespace SA3D.Modeling.Animation.Utilities
+{
+ internal static class KeyframeRead
+ {
+ public static void ReadVector3Set(this EndianStackReader reader, uint address, uint count, SortedDictionary dictionary, FloatIOType type)
+ {
+ if(type == FloatIOType.BAMS16)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUShort(address);
+ address += 2;
+ dictionary.Add(frame, reader.ReadVector3(ref address, type));
+ }
+ }
+ else
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ address += 4;
+ dictionary.Add(frame, reader.ReadVector3(ref address, type));
+ }
+ }
+
+ }
+
+ public static void ReadVector3ArraySet(this EndianStackReader reader, uint address, uint count, string labelPrefix, SortedDictionary> dictionary, PointerLUT lut)
+ {
+ if(count == 0)
+ {
+ return;
+ }
+
+ uint startAddr = address;
+
+ //
+ SortedDictionary frameAddresses = new();
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ uint ptr = reader.ReadPointer(address += 4);
+ address += 4;
+
+ frameAddresses.Add(frame, ptr);
+ }
+
+ uint[] addresses = frameAddresses.Values.Distinct().Order().ToArray();
+ // get the smallest array size
+ uint size = (startAddr - addresses[^1]) / 12;
+ for(int i = 1; i < addresses.Length; i++)
+ {
+ for(int j = 0; j < i; j++)
+ {
+ uint newSize = (addresses[i] - addresses[j]) / 12;
+ if(newSize < size)
+ {
+ size = newSize;
+ }
+ }
+ }
+
+ foreach(KeyValuePair item in frameAddresses)
+ {
+ ILabeledArray vectors = lut.GetAddLabeledValue(item.Value, labelPrefix, () =>
+ {
+ LabeledArray result = new(size);
+
+ uint ptr = item.Value;
+ for(int j = 0; j < size; j++)
+ {
+ result[j] = reader.ReadVector3(ref ptr);
+ }
+
+ return result;
+ });
+
+ dictionary.Add(item.Key, vectors);
+ }
+ }
+
+ public static void ReadVector2Set(this EndianStackReader reader, uint address, uint count, SortedDictionary dictionary, FloatIOType type)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ address += 4;
+ dictionary.Add(frame, reader.ReadVector2(ref address, type));
+ }
+ }
+
+ public static void ReadColorSet(this EndianStackReader reader, uint address, uint count, SortedDictionary dictionary, ColorIOType type)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ address += 4;
+ dictionary.Add(frame, reader.ReadColor(ref address, type));
+ }
+ }
+
+ public static void ReadFloatSet(this EndianStackReader reader, uint address, uint count, SortedDictionary dictionary, bool BAMS)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ float value = BAMS ? BAMSToRad(reader.ReadInt(address + 4)) : reader.ReadFloat(address + 4);
+ address += 8;
+ dictionary.Add(frame, value);
+ }
+ }
+
+ public static void ReadSpotSet(this EndianStackReader reader, uint address, uint count, SortedDictionary dictionary)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ Spotlight value = Spotlight.Read(reader, address + 4);
+ address += 8 + Spotlight.StructSize;
+ dictionary.Add(frame, value);
+ }
+ }
+
+ public static void ReadQuaternionSet(this EndianStackReader reader, uint address, uint count, SortedDictionary dictionary)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ uint frame = reader.ReadUInt(address);
+ address += 4;
+ dictionary.Add(frame, reader.ReadQuaternion(ref address));
+ }
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Utilities/KeyframeRotationUtils.cs b/src/SA3D.Modeling/Animation/Utilities/KeyframeRotationUtils.cs
new file mode 100644
index 0000000..a9cfb72
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Utilities/KeyframeRotationUtils.cs
@@ -0,0 +1,433 @@
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Numerics;
+using Matrix4x4KF = System.Collections.Generic.SortedDictionary;
+using QuaternionKF = System.Collections.Generic.SortedDictionary;
+using EulerKF = System.Collections.Generic.SortedDictionary;
+
+namespace SA3D.Modeling.Animation.Utilities
+{
+ ///
+ /// Utility methods for converting keyframe rotations from and to matrices and each other.
+ ///
+ public static class KeyframeRotationUtils
+ {
+ #region Quaternion -> Euler
+
+ ///
+ /// Converts quaternion rotation keyframes to euler rotation keyframes.
+ ///
+ /// The quaternion keyframes to convert.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// Keyframes in which the result should be stored.
+ public static void QuaternionToEuler(QuaternionKF source, float deviationThreshold, bool rotateZYX, EulerKF result)
+ {
+ if(source.Count == 0)
+ {
+ return;
+ }
+
+ deviationThreshold = Math.Max(deviationThreshold, 0);
+
+ uint previousFrame = uint.MaxValue;
+ foreach(KeyValuePair item in source)
+ {
+ if(previousFrame != uint.MaxValue)
+ {
+ float frameCount = item.Key - previousFrame;
+
+ Vector3 previousEuler = result[previousFrame];
+ for(uint i = 1; i <= frameCount; i++)
+ {
+ float fac = i / frameCount;
+ Quaternion lerp = Quaternion.Lerp(source[previousFrame], item.Value, fac);
+
+ previousEuler = lerp.QuaternionToCompatibleEuler(previousEuler, rotateZYX);
+
+ result.Add(previousFrame + i, previousEuler);
+ }
+
+ if(deviationThreshold > 0)
+ {
+ result.OptimizeVector3(deviationThreshold, previousFrame, item.Key);
+ }
+ }
+ else
+ {
+ Vector3 rotation = item.Value.QuaternionToEuler(rotateZYX);
+ result.Add(item.Key, rotation);
+ }
+
+ previousFrame = item.Key;
+ }
+ }
+
+ ///
+ /// Converts quaternion rotation keyframes to euler rotation keyframes.
+ ///
+ /// The quaternion keyframes to convert.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// The converted euler rotation keyframes.
+ public static EulerKF QuaternionToEuler(QuaternionKF source, float deviationThreshold, bool rotateZYX)
+ {
+ EulerKF result = new();
+ QuaternionToEuler(source, deviationThreshold, rotateZYX, result);
+ return result;
+ }
+
+ ///
+ /// Converts quaternion rotation keyframes to euler rotation keyframes.
+ ///
+ /// The keyframes to convert and output to.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// Whether quaternion keyframes should be cleared after converting.
+ public static void QuaternionToEuler(this Keyframes keyframes, float deviationThreshold, bool rotateZYX, bool clearQuaternion)
+ {
+ keyframes.EulerRotation.Clear();
+ QuaternionToEuler(keyframes.QuaternionRotation, deviationThreshold, rotateZYX, keyframes.EulerRotation);
+
+ if(clearQuaternion)
+ {
+ keyframes.QuaternionRotation.Clear();
+ }
+ }
+
+ #endregion
+
+
+ #region Euler -> Quaternion
+
+ ///
+ /// Converts euler rotation keyframes to quaternion rotation keyframes.
+ ///
+ /// The euler keyframes to convert.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// Keyframes in which the result should be stored.
+ public static void EulerToQuaternion(EulerKF source, float deviationThreshold, bool rotateZYX, QuaternionKF result)
+ {
+ if(source.Count == 0)
+ {
+ return;
+ }
+
+ deviationThreshold = Math.Max(deviationThreshold, 0);
+
+ uint previousFrame = uint.MaxValue;
+ foreach(KeyValuePair item in source)
+ {
+ if(previousFrame != uint.MaxValue)
+ {
+ float frameCount = item.Key - previousFrame;
+
+ for(uint i = 1; i <= frameCount; i++)
+ {
+ float fac = i / frameCount;
+ Vector3 lerp = Vector3.Lerp(source[previousFrame], item.Value, fac);
+ result.Add(previousFrame + i, lerp.EulerToQuaternion(rotateZYX));
+ }
+
+ if(deviationThreshold > 0)
+ {
+ result.OptimizeQuaternion(deviationThreshold, previousFrame, item.Key);
+ }
+ }
+ else
+ {
+ Quaternion quaternion = item.Value.EulerToQuaternion(rotateZYX);
+ result.Add(item.Key, quaternion);
+ }
+
+ previousFrame = item.Key;
+ }
+ }
+
+ ///
+ /// Converts euler rotation keyframes to quaternion rotation keyframes.
+ ///
+ /// The euler keyframes to convert.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// The converted quaternion rotation keyframes.
+ public static QuaternionKF EulerToQuaternion(EulerKF source, float deviationThreshold, bool rotateZYX)
+ {
+ QuaternionKF result = new();
+ EulerToQuaternion(source, deviationThreshold, rotateZYX, result);
+ return result;
+ }
+
+ ///
+ /// Converts euler rotation keyframes to quaternion rotation keyframes.
+ ///
+ /// The keyframes to convert and output to.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// Whether euler keyframes should be cleared after converting.
+ public static void EulerToQuaternion(this Keyframes keyframes, float deviationThreshold, bool rotateZYX, bool clearEuler)
+ {
+ keyframes.QuaternionRotation.Clear();
+ EulerToQuaternion(keyframes.EulerRotation, deviationThreshold, rotateZYX, keyframes.QuaternionRotation);
+
+ if(clearEuler)
+ {
+ keyframes.EulerRotation.Clear();
+ }
+ }
+
+ #endregion
+
+
+ #region Euler / Quaternion -> Matrix
+
+ private static Matrix4x4[]? GetComplementaryMatrices(Vector3 previous, Vector3 current, bool rotateZYX)
+ {
+ Vector3 dif = current - previous;
+ float maxDif = Vector3.Abs(dif).GreatestValue();
+
+ int complementary_len = (int)MathF.Floor(maxDif / MathF.PI);
+ if(complementary_len == 0)
+ {
+ return null;
+ }
+
+ complementary_len++;
+ float dif_fac = 1.0f / (complementary_len + 1);
+
+ Matrix4x4[] result = new Matrix4x4[complementary_len];
+
+ for(int i = 0; i < complementary_len; i++)
+ {
+ Vector3 compl_euler = previous + (dif * (dif_fac * (i + 1)));
+ result[i] = MatrixUtilities.CreateRotationMatrix(compl_euler, rotateZYX);
+ }
+
+ return result;
+ }
+
+
+ ///
+ /// Converts rotation keyframes to rotation matrices. Will use Euler or Quaternion rotations. If both are present, euler is used regardless.
+ ///
+ /// The keyframes to convert.
+ /// Whether the result is intended to be handled like quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether euler angles are applied in ZYX order.
+ /// Whether the output was converted between euler and quaternion
+ /// Complementary matrices for quaternion target.
+ /// The converted rotation matrix keyframes.
+ public static Matrix4x4KF GetRotationMatrices(this Keyframes keyframes, bool targetQuaternion, float deviationThreshold, bool rotateZYX, out bool converted, out Dictionary? complementary)
+ {
+ Matrix4x4KF result = new();
+ complementary = null;
+ converted = false;
+
+ if(keyframes.EulerRotation.Count == 0 && keyframes.QuaternionRotation.Count == 0)
+ {
+ return result;
+ }
+
+ if(targetQuaternion)
+ {
+ QuaternionKF output = keyframes.QuaternionRotation;
+
+ if(keyframes.EulerRotation.Count > 0) // If eulers exist, convert to quaternion regardless of whether quaternions had values before
+ {
+ converted = true;
+ output = EulerToQuaternion(keyframes.EulerRotation, deviationThreshold, rotateZYX);
+ }
+
+ foreach(KeyValuePair quaternion in output)
+ {
+ result.Add(quaternion.Key, Matrix4x4.CreateFromQuaternion(quaternion.Value));
+ }
+ }
+ else
+ {
+ EulerKF output = keyframes.EulerRotation;
+
+ if(output.Count == 0)
+ {
+ converted = true;
+ output = QuaternionToEuler(keyframes.QuaternionRotation, deviationThreshold, rotateZYX);
+ }
+
+ Vector3? previous = null;
+ uint previousFrame = 0;
+ complementary = new();
+
+ foreach(KeyValuePair rotation in output)
+ {
+ result.Add(rotation.Key, MatrixUtilities.CreateRotationMatrix(rotation.Value, rotateZYX));
+
+ if(previous != null)
+ {
+ Matrix4x4[]? compl_matrices = GetComplementaryMatrices(previous.Value, rotation.Value, rotateZYX);
+ if(compl_matrices != null)
+ {
+ complementary.Add(previousFrame, compl_matrices);
+ }
+ }
+
+ previous = rotation.Value;
+ previousFrame = rotation.Key;
+ }
+
+ if(complementary.Count == 0)
+ {
+ complementary = null;
+ }
+ }
+
+ return result;
+ }
+
+ private static void ConvertMatrixToQuaternion(Matrix4x4KF source, QuaternionKF result)
+ {
+ foreach(KeyValuePair item in source)
+ {
+ Matrix4x4.Decompose(item.Value, out _, out Quaternion value, out _);
+ result.Add(item.Key, value);
+ }
+ }
+
+ private static void ConvertMatrixToRotation(Matrix4x4KF source, bool rotateZYX, Dictionary? complementary, EulerKF result)
+ {
+ Vector3 previousEuler = default;
+
+ foreach(KeyValuePair item in source)
+ {
+ previousEuler = MatrixUtilities.ToCompatibleEuler(item.Value, previousEuler, rotateZYX);
+ result.Add(item.Key, previousEuler);
+
+ if(complementary?.TryGetValue(item.Key, out Matrix4x4[]? matrices) == true)
+ {
+ for(int i = 0; i < matrices.Length; i++)
+ {
+ previousEuler = MatrixUtilities.ToCompatibleEuler(matrices[i], previousEuler, rotateZYX);
+ }
+ }
+ }
+ }
+
+ #endregion
+
+
+ #region Matrix -> Quaternion
+
+ ///
+ /// Converts rotation matrix keyframes to quaternion rotation keyframes.
+ ///
+ /// Rotation matrix keyframes to convert.
+ /// Whether the matrices should be handled as quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether the euler angles should be applied in ZYX order.
+ /// The keyframes to output to.
+ public static void MatrixToQuaternion(Matrix4x4KF source, bool wasQuaternion, float deviationThreshold, bool rotateZYX, QuaternionKF result)
+ {
+ if(wasQuaternion)
+ {
+ ConvertMatrixToQuaternion(source, result);
+ }
+ else
+ {
+ EulerKF rotations = new();
+ ConvertMatrixToRotation(source, rotateZYX, null, rotations);
+ EulerToQuaternion(rotations, deviationThreshold, rotateZYX, result);
+ }
+ }
+
+ ///
+ /// Converts rotation matrix keyframes to quaternion rotation keyframes.
+ ///
+ /// Rotation matrix keyframes to convert.
+ /// Whether the matrices should be handled as quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether the euler angles should be applied in ZYX order.
+ /// The converted quaternion keyframes.
+ public static QuaternionKF MatrixToQuaternion(Matrix4x4KF source, bool wasQuaternion, float deviationThreshold, bool rotateZYX)
+ {
+ QuaternionKF result = new();
+ MatrixToQuaternion(source, wasQuaternion, deviationThreshold, rotateZYX, result);
+ return result;
+ }
+
+ ///
+ /// Converts rotation matrix keyframes to quaternion rotation keyframes.
+ ///
+ /// The keyframes to store the converted rotation into.
+ /// Rotation matrix keyframes to convert.
+ /// Whether the matrices should be handled as quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether the euler angles should be applied in ZYX order.
+ public static void MatrixToQuaternion(this Keyframes keyframes, Matrix4x4KF source, bool wasQuaternion, float deviationThreshold, bool rotateZYX)
+ {
+ keyframes.QuaternionRotation.Clear();
+ MatrixToQuaternion(source, wasQuaternion, deviationThreshold, rotateZYX, keyframes.QuaternionRotation);
+ }
+
+ #endregion
+
+
+ #region Matrix -> Euler
+
+ ///
+ /// Converts rotation matrix keyframes to euler rotation keyframes.
+ ///
+ /// Rotation matrix keyframes to convert.
+ /// Whether the matrices should be handled as quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether the euler angles should be applied in ZYX order.
+ /// Rotation matrices to be applied in between keyframes. Used for achieving angle differences greater than 180 degrees.
+ /// The keyframes to output to.
+ public static void MatrixToEuler(Matrix4x4KF source, bool wasQuaternion, float deviationThreshold, bool rotateZYX, Dictionary? complementary, EulerKF result)
+ {
+ if(wasQuaternion)
+ {
+ QuaternionKF quaternions = new();
+ ConvertMatrixToQuaternion(source, quaternions);
+ QuaternionToEuler(quaternions, deviationThreshold, rotateZYX, result);
+ }
+ else
+ {
+ ConvertMatrixToRotation(source, rotateZYX, complementary, result);
+ }
+ }
+
+ ///
+ /// Converts rotation matrix keyframes to euler rotation keyframes.
+ ///
+ /// Rotation matrix keyframes to convert.
+ /// Whether the matrices should be handled as quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether the euler angles should be applied in ZYX order.
+ /// Rotation matrices to be applied in between keyframes. Used for achieving angle differences greater than 180 degrees.
+ /// The converted euler keyframes.
+ public static EulerKF MatrixToEuler(Matrix4x4KF source, bool wasQuaternion, float deviationThreshold, bool rotateZYX, Dictionary? complementary)
+ {
+ EulerKF result = new();
+ MatrixToEuler(source, wasQuaternion, deviationThreshold, rotateZYX, complementary, result);
+ return result;
+ }
+
+ ///
+ /// Converts rotation matrix keyframes to euler rotation keyframes.
+ ///
+ /// The keyframes to store the converted rotation into.
+ /// Rotation matrix keyframes to convert.
+ /// Whether the matrices should be handled as quaternion rotations.
+ /// The deviation threshold below which converted values should be ignored.
+ /// Whether the euler angles should be applied in ZYX order.
+ /// Rotation matrices to be applied in between keyframes. Used for achieving angle differences greater than 180 degrees.
+ public static void MatrixToEuler(this Keyframes keyframes, Matrix4x4KF source, bool wasQuaternion, float deviationThreshold, bool rotateZYX, Dictionary? complementary)
+ {
+ keyframes.EulerRotation.Clear();
+ MatrixToEuler(source, wasQuaternion, deviationThreshold, rotateZYX, complementary, keyframes.EulerRotation);
+ }
+
+ #endregion
+ }
+}
diff --git a/src/SA3D.Modeling/Animation/Utilities/KeyframeWrite.cs b/src/SA3D.Modeling/Animation/Utilities/KeyframeWrite.cs
new file mode 100644
index 0000000..e632386
--- /dev/null
+++ b/src/SA3D.Modeling/Animation/Utilities/KeyframeWrite.cs
@@ -0,0 +1,107 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Structs;
+using System.Collections.Generic;
+using System.Numerics;
+
+namespace SA3D.Modeling.Animation.Utilities
+{
+ internal static class KeyframeWrite
+ {
+ public static void WriteVector3Set(this EndianStackWriter writer, SortedDictionary dict, FloatIOType ioType)
+ {
+ foreach(KeyValuePair pair in dict)
+ {
+ writer.WriteUInt(pair.Key);
+ writer.WriteVector3(pair.Value, ioType);
+ }
+ }
+
+ public static void WriteVector2Set(this EndianStackWriter writer, SortedDictionary dict, FloatIOType ioType)
+ {
+ foreach(KeyValuePair pair in dict)
+ {
+ writer.WriteUInt(pair.Key);
+ writer.WriteVector2(pair.Value, ioType);
+ }
+ }
+
+ public static void WriteColorSet(this EndianStackWriter writer, SortedDictionary dict, ColorIOType ioType)
+ {
+ foreach(KeyValuePair pair in dict)
+ {
+ writer.WriteUInt(pair.Key);
+ writer.WriteColor(pair.Value, ioType);
+ }
+ }
+
+ public static uint[] WriteVector3ArrayData(this EndianStackWriter writer, SortedDictionary> dict, PointerLUT lut)
+ {
+ uint[] result = new uint[dict.Count];
+ int i = 0;
+
+ foreach(KeyValuePair> pair in dict)
+ {
+ result[i] = pair.Key;
+ result[i++] = lut.GetAddAddress(pair.Value, (array) =>
+ {
+ uint result = writer.PointerPosition;
+
+ foreach(Vector3 v in pair.Value)
+ {
+ writer.WriteVector3(v);
+ }
+
+ return result;
+ });
+ i++;
+ }
+
+ return result;
+ }
+
+ public static void WriteVector3ArraySet(this EndianStackWriter writer, uint[] arrayData)
+ {
+ foreach(uint value in arrayData)
+ {
+ writer.WriteUInt(value);
+ }
+ }
+
+ public static void WriteFloatSet(this EndianStackWriter writer, SortedDictionary dict, bool BAMS)
+ {
+ foreach(KeyValuePair pair in dict)
+ {
+ writer.WriteUInt(pair.Key);
+ if(BAMS)
+ {
+ writer.WriteInt(MathHelper.RadToBAMS(pair.Value));
+ }
+ else
+ {
+ writer.WriteFloat(pair.Value);
+ }
+ }
+ }
+
+ public static void WriteSpotlightSet(this EndianStackWriter writer, SortedDictionary dict)
+ {
+ foreach(KeyValuePair pair in dict)
+ {
+ writer.WriteUInt(pair.Key);
+ pair.Value.Write(writer);
+ }
+ }
+
+ public static void WriteQuaternionSet(this EndianStackWriter writer, SortedDictionary dict)
+ {
+ foreach(KeyValuePair pair in dict)
+ {
+ writer.WriteUInt(pair.Key);
+ writer.WriteQuaternion(pair.Value);
+ }
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/File/AnimationFile.cs b/src/SA3D.Modeling/File/AnimationFile.cs
new file mode 100644
index 0000000..1c7e56d
--- /dev/null
+++ b/src/SA3D.Modeling/File/AnimationFile.cs
@@ -0,0 +1,357 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Animation;
+using SA3D.Modeling.Structs;
+using System;
+using System.IO;
+using static SA3D.Modeling.File.FileHeaders;
+using SA3D.Common;
+
+namespace SA3D.Modeling.File
+{
+ ///
+ /// Animation file contents.
+ ///
+ public class AnimationFile
+ {
+ ///
+ /// Animation of the file.
+ ///
+ public Motion Animation { get; }
+
+ ///
+ /// Metadata in the file.
+ ///
+ public MetaData MetaData { get; }
+
+
+ private AnimationFile(Motion animation, MetaData metaData)
+ {
+ Animation = animation;
+ MetaData = metaData;
+ }
+
+
+ ///
+ /// Checks whether data is formatted as a animation file.
+ ///
+ /// The data to check.
+ public static bool CheckIsAnimationFile(byte[] data)
+ {
+ return CheckIsAnimationFile(data, 0);
+ }
+
+ ///
+ /// Checks whether data is formatted as a animation file.
+ ///
+ /// The data to check.
+ /// Address at which to check.
+ public static bool CheckIsAnimationFile(byte[] data, uint address)
+ {
+ return CheckIsAnimationFile(new EndianStackReader(data), address);
+ }
+
+ ///
+ /// Checks whether data is formatted as a animation file.
+ ///
+ /// The reader to read from.
+ public static bool CheckIsAnimationFile(EndianStackReader reader)
+ {
+ return CheckIsAnimationFile(reader, 0);
+ }
+
+ ///
+ /// Checks whether data is formatted as a animation file.
+ ///
+ /// The reader to read from.
+ /// Address at which to check.
+ public static bool CheckIsAnimationFile(EndianStackReader reader, uint address)
+ {
+ return reader.ReadUInt(address) == NMDM || (
+ (reader.ReadULong(address) & HeaderMask) == SAANIM
+ && reader[address + 7] <= CurrentAnimVersion);
+ }
+
+
+ ///
+ /// Reads a animation file.
+ ///
+ /// Path to the file to read.
+ /// The animation file that was read.
+ public static AnimationFile ReadFromFile(string filepath)
+ {
+ return ReadFromData(System.IO.File.ReadAllBytes(filepath), 0);
+ }
+
+ ///
+ /// Reads a animation file.
+ ///
+ /// Path to the file to read.
+ /// Number of nodes in the targeted model node tree.
Only acts as fallback, in case the file does not contain the value.
+ /// Whether euler rotations are stored in 16-bit instead of 32-bit.
Only acts as fallback, in case the file does not contain the value.
+ /// The animation file that was read.
+ public static AnimationFile ReadFromFile(string filepath, uint? nodeCount, bool shortRot)
+ {
+ return ReadFromData(System.IO.File.ReadAllBytes(filepath), 0, nodeCount, shortRot);
+ }
+
+ ///
+ /// Reads a animation file off byte data.
+ ///
+ /// The data to read.
+ /// The animation file that was read.
+ public static AnimationFile ReadFromData(byte[] data)
+ {
+ return ReadFromData(data, 0, null, false);
+ }
+
+ ///
+ /// Reads a animation file off byte data.
+ ///
+ /// The data to read.
+ /// Address at which to start reading.
+ /// The animation file that was read.
+ public static AnimationFile ReadFromData(byte[] data, uint address)
+ {
+ return ReadFromData(data, address, null, false);
+ }
+
+ ///
+ /// Reads a animation file off byte data.
+ ///
+ /// The data to read.
+ /// Address at which to start reading.
+ /// Number of nodes in the targeted model node tree.
Only acts as fallback, in case the file does not contain the value.
+ /// Whether euler rotations are stored in 16-bit instead of 32-bit.
Only acts as fallback, in case the file does not contain the value.
+ /// The animation file that was read.
+ public static AnimationFile ReadFromData(byte[] data, uint address, uint? nodeCount, bool shortRot)
+ {
+ using(EndianStackReader reader = new(data))
+ {
+ return Read(reader, address, nodeCount, shortRot);
+ }
+ }
+
+ ///
+ /// Reads a animation file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// The animation file that was read.
+ public static AnimationFile Read(EndianStackReader reader)
+ {
+ return Read(reader, 0, null, false);
+ }
+
+ ///
+ /// Reads a animation file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The animation file that was read.
+ public static AnimationFile Read(EndianStackReader reader, uint address)
+ {
+ return Read(reader, address, null, false);
+ }
+
+ ///
+ /// Reads a animation file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Number of nodes in the targeted model node tree.
Only acts as fallback, in case the file does not contain the value.
+ /// Whether euler rotations are stored in 16-bit instead of 32-bit.
Only acts as fallback, in case the file does not contain the value.
+ /// The animation file that was read.
+ public static AnimationFile Read(EndianStackReader reader, uint address, uint? nodeCount, bool shortRot)
+ {
+ reader.PushBigEndian(false);
+
+ try
+ {
+ if(reader.ReadUInt(address) == NMDM)
+ {
+ return ReadNM(reader, address, nodeCount);
+ }
+ else if((reader.ReadULong(address) & HeaderMask) == SAANIM)
+ {
+ return ReadSA(reader, address, nodeCount, shortRot);
+ }
+ else
+ {
+ throw new FormatException("Animation file invalid!");
+ }
+ }
+ finally
+ {
+ reader.PopEndian();
+ }
+ }
+
+ private static AnimationFile ReadNM(EndianStackReader reader, uint address, uint? nodeCount)
+ {
+ if(nodeCount == null)
+ {
+ throw new ArgumentException("Cannot read NMDM animations without providing node count!");
+ }
+
+ // Determines big endian via the framecount.
+ // As long as that one is not bigger than 65,535 or 18 minutes of animation at 60fps, we good
+ reader.PushBigEndian(reader.CheckBigEndian32(address + 0xC));
+ uint prevImageBase = reader.ImageBase;
+
+ try
+ {
+ uint dataAddress = address + 8;
+ reader.ImageBase = unchecked((uint)-dataAddress);
+ Motion motion = Motion.Read(reader, dataAddress, nodeCount.Value, new(), true);
+ return new(motion, new());
+ }
+ finally
+ {
+ reader.ImageBase = prevImageBase;
+ reader.PopEndian();
+ }
+ }
+
+ private static AnimationFile ReadSA(EndianStackReader reader, uint address, uint? nodeCount, bool shortRot)
+ {
+ byte version = reader[7];
+ if(version > CurrentAnimVersion)
+ {
+ throw new FormatException("Not a valid SAANIM file.");
+ }
+
+ uint motionAddress = reader.ReadUInt(address + 8);
+
+ MetaData metaData = new();
+ if(version >= 2)
+ {
+ // motion v2 uses metadata v3
+ metaData = MetaData.Read(reader, address + 0xC, 3, false);
+ }
+ else if(reader.TryReadPointer(address + 0xC, out uint labelAddr))
+ {
+ metaData.Labels.Add(motionAddress, reader.ReadNullterminatedString(labelAddr));
+ }
+
+ if(version > 0)
+ {
+ const uint shortRotMask = (uint)Flag32.B31;
+ uint fileNodeCount = reader.ReadUInt(0x10);
+ shortRot = (fileNodeCount & shortRotMask) != 0;
+ nodeCount = fileNodeCount & ~shortRotMask;
+ }
+ else if(nodeCount == null)
+ {
+ throw new ArgumentException("Cannot open version 0 animations without providing node count!");
+ }
+
+ PointerLUT lut = new(metaData.Labels);
+ Motion motion = Motion.Read(reader, motionAddress, nodeCount.Value, lut, shortRot);
+
+ return new(motion, metaData);
+ }
+
+
+ ///
+ /// Write the animation file to a file. Previous labels may get lost.
+ ///
+ /// Path to the file to write to.
+ ///
+ public void WriteToFile(string filepath)
+ {
+ WriteToFile(filepath, Animation, MetaData);
+ }
+
+ ///
+ /// Writes the animation file to a byte array. Previous labels may get lost.
+ ///
+ ///
+ ///
+ public byte[] WriteToData()
+ {
+ return WriteToData(Animation, MetaData);
+ }
+
+ ///
+ /// Writes the animation file to an endian stack writer. Previous labels may get lost.
+ ///
+ /// The writer to write to.
+ ///
+ public void Write(EndianStackWriter writer)
+ {
+ Write(writer, Animation, MetaData);
+ }
+
+
+ ///
+ /// Write a animation file to a file.
+ ///
+ /// Path to the file to write to.
+ /// The animation to write.
+ /// The metadata to include.
+ ///
+ public static void WriteToFile(string filepath, Motion animation, MetaData? metaData = null)
+ {
+ using(FileStream stream = System.IO.File.Create(filepath))
+ {
+ EndianStackWriter writer = new(stream);
+ Write(writer, animation, metaData);
+ }
+ }
+
+ ///
+ /// Writes a animation file to a byte array.
+ ///
+ /// The animation to write.
+ /// The metadata to include.
+ /// The written byte data.
+ ///
+ public static byte[] WriteToData(Motion animation, MetaData? metaData = null)
+ {
+ using(MemoryStream stream = new())
+ {
+ EndianStackWriter writer = new(stream);
+ Write(writer, animation, metaData);
+ return stream.ToArray();
+ }
+ }
+
+ ///
+ /// Writes a animation file to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// The animation to write.
+ /// The metadata to include.
+ ///
+ public static void Write(EndianStackWriter writer, Motion animation, MetaData? metaData = null)
+ {
+ writer.WriteULong(SAANIMVer);
+
+ uint placeholderAddr = writer.Position;
+ // 4 bytes: motion address placeholder
+ // 4 bytes: metadata placeholder
+ writer.WriteEmpty(8);
+
+ uint animFileInfo = animation.ModelCount;
+ if(animation.ShortRot)
+ {
+ animFileInfo |= (uint)Flag32.B31;
+ }
+
+ writer.WriteUInt(animFileInfo);
+
+ PointerLUT lut = new();
+
+ uint motionAddress = animation.Write(writer, lut);
+
+ metaData ??= new();
+ metaData.Labels = lut.Labels.GetDictFrom();
+ uint metaDataAddress = metaData.Write(writer);
+
+ uint end = writer.Position;
+ writer.Seek(placeholderAddr, SeekOrigin.Begin);
+ writer.WriteUInt(motionAddress);
+ writer.WriteUInt(metaDataAddress);
+ writer.Seek(end, SeekOrigin.Begin);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/File/FileHeaders.cs b/src/SA3D.Modeling/File/FileHeaders.cs
new file mode 100644
index 0000000..03e29ec
--- /dev/null
+++ b/src/SA3D.Modeling/File/FileHeaders.cs
@@ -0,0 +1,94 @@
+namespace SA3D.Modeling.File
+{
+ internal class FileHeaders
+ {
+ public const ulong HeaderMask = ~((ulong)0xFF << 56);
+
+ public const ulong LVL = 0x4C564Cu;
+ public const ulong MDL = 0x4C444Du;
+
+ public const ulong SA1 = 0x314153u;
+ public const ulong SADX = 0x58444153u;
+ public const ulong SA2 = 0x324153u;
+ public const ulong SA2B = 0x42324153u;
+ public const ulong BUF = 0x465542u;
+
+ #region Landtable
+
+ public const ulong SA1LVL = (LVL << 24) | SA1;
+ public const ulong SADXLVL = (LVL << 32) | SADX;
+ public const ulong SA2LVL = (LVL << 24) | SA2;
+ public const ulong SA2BLVL = (LVL << 32) | SA2B;
+ public const ulong BUFLVL = (LVL << 24) | BUF;
+
+ public const ulong CurrentLandtableVersion = 3;
+ public const ulong CurrentLandtableVersionShifted = CurrentLandtableVersion << 56;
+
+ public const ulong SA1LVLVer = SA1LVL | CurrentLandtableVersionShifted;
+ public const ulong SADXLVLVer = SADXLVL | CurrentLandtableVersionShifted;
+ public const ulong SA2LVLVer = SA2LVL | CurrentLandtableVersionShifted;
+ public const ulong SA2BLVLVer = SA2BLVL | CurrentLandtableVersionShifted;
+ public const ulong BUFLVLVer = BUFLVL | CurrentLandtableVersionShifted;
+
+ #endregion
+
+ #region Model
+
+ public const ulong SA1MDL = (MDL << 24) | SA1;
+ public const ulong SADXMDL = (MDL << 32) | SADX;
+ public const ulong SA2MDL = (MDL << 24) | SA2;
+ public const ulong SA2BMDL = (MDL << 32) | SA2B;
+ public const ulong BUFMDL = (MDL << 24) | BUF;
+
+ public const ulong CurrentModelVersion = 3;
+ public const ulong CurrentModelVersionShifted = CurrentModelVersion << 56;
+
+ public const ulong SA1MDLVer = SA1MDL | CurrentModelVersionShifted;
+ public const ulong SADXMDLVer = SADXMDL | CurrentModelVersionShifted;
+ public const ulong SA2MDLVer = SA2MDL | CurrentModelVersionShifted;
+ public const ulong SA2BMDLVer = SA2BMDL | CurrentModelVersionShifted;
+ public const ulong BUFMDLVer = BUFMDL | CurrentModelVersionShifted;
+
+ #endregion
+
+ #region Animation
+
+ public const ulong SAANIM = 0x4D494E414153u;
+
+ public const ulong CurrentAnimVersion = 2;
+ public const ulong CurrentAnimVersionShifted = CurrentAnimVersion << 56;
+
+ public const ulong SAANIMVer = SAANIM | CurrentAnimVersionShifted;
+
+
+ #endregion
+
+ #region Other
+
+ ///
+ /// NJ header.
+ ///
+ public const ushort NJ = (ushort)0x4A4Eu;
+
+ ///
+ /// GJ Header.
+ ///
+ public const ushort GJ = (ushort)0x4A47u;
+
+ ///
+ /// Chunk model block header.
+ ///
+ public const ushort CM = (ushort)0x4D43u;
+
+ ///
+ /// Basic model block header.
+ ///
+ public const ushort BM = (ushort)0x4D42u;
+
+ ///
+ /// NM (motion) file header.
+ ///
+ public const uint NMDM = 0x4D444D4Eu;
+ #endregion
+ }
+}
diff --git a/src/SA3D.Modeling/File/LevelFile.cs b/src/SA3D.Modeling/File/LevelFile.cs
new file mode 100644
index 0000000..9ae6a17
--- /dev/null
+++ b/src/SA3D.Modeling/File/LevelFile.cs
@@ -0,0 +1,290 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.Structs;
+using System;
+using System.IO;
+using static SA3D.Modeling.File.FileHeaders;
+
+namespace SA3D.Modeling.File
+{
+ ///
+ /// Level geometry file contents.
+ ///
+ public class LevelFile
+ {
+ ///
+ /// Landtable of the file.
+ ///
+ public LandTable Level { get; }
+
+ ///
+ /// MetaData of/for a LVL file
+ ///
+ public MetaData MetaData { get; }
+
+
+ private LevelFile(LandTable level, MetaData metaData)
+ {
+ Level = level;
+ MetaData = metaData;
+ }
+
+
+ ///
+ /// Checks whether data is formatted as a level file.
+ ///
+ /// The data to check.
+ public static bool CheckIsLevelFile(byte[] data)
+ {
+ return CheckIsLevelFile(data, 0);
+ }
+
+ ///
+ /// Checks whether data is formatted as a level file.
+ ///
+ /// The data to check.
+ /// Address at which to check.
+ public static bool CheckIsLevelFile(byte[] data, uint address)
+ {
+ return CheckIsLevelFile(new EndianStackReader(data), address);
+ }
+
+ ///
+ /// Checks whether data is formatted as a level file.
+ ///
+ /// The reader to read from.
+ public static bool CheckIsLevelFile(EndianStackReader reader)
+ {
+ return CheckIsLevelFile(reader, 0);
+ }
+
+ ///
+ /// Checks whether data is formatted as a level file.
+ ///
+ /// The reader to read from.
+ /// Address at which to check.
+ public static bool CheckIsLevelFile(EndianStackReader reader, uint address)
+ {
+ switch(reader.ReadULong(address) & HeaderMask)
+ {
+ case SA1LVL:
+ case SADXLVL:
+ case SA2LVL:
+ case SA2BLVL:
+ case BUFLVL:
+ break;
+ default:
+ return false;
+ }
+
+ return reader[address + 7] <= CurrentLandtableVersion;
+ }
+
+
+ ///
+ /// Reads a level file.
+ ///
+ /// Path to the file to read.
+ /// The level file that was read.
+ public static LevelFile ReadFromFile(string filepath)
+ {
+ return ReadFromData(System.IO.File.ReadAllBytes(filepath));
+ }
+
+ ///
+ /// Reads a level file off byte data.
+ ///
+ /// The data to read.
+ /// The level file that was read.
+ public static LevelFile ReadFromData(byte[] data)
+ {
+ return ReadFromData(data, 0);
+ }
+
+ ///
+ /// Reads a level file off byte data.
+ ///
+ /// The data to read.
+ /// Address at which to start reading.
+ /// The level file that was read.
+ public static LevelFile ReadFromData(byte[] data, uint address)
+ {
+ using(EndianStackReader reader = new(data))
+ {
+ return Read(reader, address);
+ }
+ }
+
+ ///
+ /// Reads a level file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// The level file that was read.
+ public static LevelFile Read(EndianStackReader reader)
+ {
+ return Read(reader, 0);
+ }
+
+ ///
+ /// Reads a level file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The level file that was read.
+ public static LevelFile Read(EndianStackReader reader, uint address)
+ {
+ reader.PushBigEndian(false);
+
+ try
+ {
+ ulong header = reader.ReadULong(0) & HeaderMask;
+ byte version = reader[7];
+
+ ModelFormat format = header switch
+ {
+ SA1LVL => ModelFormat.SA1,
+ SADXLVL => ModelFormat.SADX,
+ SA2LVL => ModelFormat.SA2,
+ SA2BLVL => ModelFormat.SA2B,
+ BUFLVL => ModelFormat.Buffer,
+ _ => throw new FormatException("File invalid; Header malformed"),
+ };
+
+ if(version > CurrentLandtableVersion)
+ {
+ throw new FormatException("File invalid; Version not supported");
+ }
+
+ MetaData metaData = MetaData.Read(reader, address + 0xC, version, false);
+ PointerLUT lut = new(metaData.Labels);
+
+ uint ltblAddress = reader.ReadUInt(address + 8);
+ LandTable table = LandTable.Read(reader, ltblAddress, format, lut);
+
+ return new(table, metaData);
+ }
+ finally
+ {
+ reader.PopEndian();
+ }
+ }
+
+
+ ///
+ /// Write the level file to a file. Previous labels may get lost.
+ ///
+ /// Path to the file to write to.
+ ///
+ public void WriteToFile(string filepath)
+ {
+ WriteToFile(filepath, Level, MetaData);
+ }
+
+ ///
+ /// Writes the level file to a byte array. Previous labels may get lost.
+ ///
+ ///
+ ///
+ public byte[] WriteToData()
+ {
+ return WriteToData(Level, MetaData);
+ }
+
+ ///
+ /// Writes the level file to an endian stack writer. Previous labels may get lost.
+ ///
+ /// The writer to write to.
+ ///
+ public void Write(EndianStackWriter writer)
+ {
+ Write(writer, Level, MetaData);
+ }
+
+
+ ///
+ /// Write a level file to a file.
+ ///
+ /// Path to the file to write to.
+ /// The level to write.
+ /// The metadata to include.
+ ///
+ public static void WriteToFile(string filepath, LandTable level, MetaData? metaData = null)
+ {
+ using(FileStream stream = System.IO.File.Create(filepath))
+ {
+ EndianStackWriter writer = new(stream);
+ Write(writer, level, metaData);
+ }
+ }
+
+ ///
+ /// Writes a level file to a byte array.
+ ///
+ /// The level to write.
+ /// The metadata to include.
+ /// The written byte data.
+ ///
+ public static byte[] WriteToData(LandTable level, MetaData? metaData = null)
+ {
+ using(MemoryStream stream = new())
+ {
+ EndianStackWriter writer = new(stream);
+ Write(writer, level, metaData);
+ return stream.ToArray();
+ }
+ }
+
+ ///
+ /// Writes a level file to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// The level to write.
+ /// The metadata to include.
+ ///
+ public static void Write(EndianStackWriter writer, LandTable level, MetaData? metaData = null)
+ {
+ // writing indicator
+ switch(level.Format)
+ {
+ case ModelFormat.SA1:
+ writer.WriteULong(SA1LVLVer);
+ break;
+ case ModelFormat.SADX:
+ writer.WriteULong(SADXLVLVer);
+ break;
+ case ModelFormat.SA2:
+ writer.WriteULong(SA2LVLVer);
+ break;
+ case ModelFormat.SA2B:
+ writer.WriteULong(SA2BLVLVer);
+ break;
+ case ModelFormat.Buffer:
+ writer.WriteULong(BUFLVLVer);
+ break;
+ default:
+ break;
+ }
+
+ uint placeholderAddr = writer.Position;
+ // 4 bytes: landtable address placeholder
+ // 4 bytes: metadata placeholder
+ writer.WriteEmpty(8);
+
+ PointerLUT lut = new();
+
+ uint ltblAddress = level.Write(writer, lut);
+
+ metaData ??= new();
+ metaData.Labels = lut.Labels.GetDictFrom();
+ uint metaDataAddress = metaData.Write(writer);
+
+ uint end = writer.Position;
+ writer.Seek(placeholderAddr, SeekOrigin.Begin);
+ writer.WriteUInt(ltblAddress);
+ writer.WriteUInt(metaDataAddress);
+ writer.Seek(end, SeekOrigin.Begin);
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/File/MetaBlockType.cs b/src/SA3D.Modeling/File/MetaBlockType.cs
new file mode 100644
index 0000000..56d6278
--- /dev/null
+++ b/src/SA3D.Modeling/File/MetaBlockType.cs
@@ -0,0 +1,58 @@
+namespace SA3D.Modeling.File
+{
+ ///
+ /// Meta data type
+ ///
+ public enum MetaBlockType : uint
+ {
+ ///
+ /// Data labels.
+ ///
+ Label = 0x4C42414C,
+
+ ///
+ /// List of animation files paths.
+ ///
+ Animation = 0x4D494E41,
+
+ ///
+ /// List of morph animation file paths.
+ ///
+ Morph = 0x46524F4D,
+
+ ///
+ /// Author of the file.
+ ///
+ Author = 0x48545541,
+
+ ///
+ /// Tool used to create the file.
+ ///
+ Tool = 0x4C4F4F54,
+
+ ///
+ /// Description given to the file.
+ ///
+ Description = 0x43534544,
+
+ ///
+ /// Texture info.
+ ///
+ Texture = 0x584554,
+
+ ///
+ /// Name of the action that the data belongs to.
+ ///
+ ActionName = 0x4143544E,
+
+ ///
+ /// Name of the object that the data belongs to.
+ ///
+ ObjectName = 0x4F424A4E,
+
+ ///
+ /// End marker.
+ ///
+ End = 0x444E45
+ }
+}
diff --git a/src/SA3D.Modeling/File/MetaData.cs b/src/SA3D.Modeling/File/MetaData.cs
new file mode 100644
index 0000000..a34277f
--- /dev/null
+++ b/src/SA3D.Modeling/File/MetaData.cs
@@ -0,0 +1,397 @@
+using SA3D.Common.IO;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+
+
+namespace SA3D.Modeling.File
+{
+ ///
+ /// Meta data storage for files.
+ ///
+ public class MetaData
+ {
+ ///
+ /// Author of the file.
+ ///
+ public string? Author { get; set; }
+
+ ///
+ /// Description of the files contents.
+ ///
+ public string? Description { get; set; }
+
+ ///
+ /// Action name.
+ ///
+ public string? ActionName { get; set; }
+
+ ///
+ /// Object name.
+ ///
+ public string? ObjectName { get; set; }
+
+ ///
+ /// C struct labels (only for reading).
+ ///
+ public Dictionary Labels { get; set; }
+
+ ///
+ /// Animation file paths.
+ ///
+ public List AnimFiles { get; }
+
+ ///
+ /// Morph animation file paths.
+ ///
+ public List MorphFiles { get; }
+
+ ///
+ /// Other chunk blocks that have no implementation.
+ ///
+ public Dictionary Other { get; set; }
+
+
+ ///
+ /// Creates a new empty set of meta data.
+ ///
+ public MetaData()
+ {
+ AnimFiles = new();
+ MorphFiles = new();
+ Other = new();
+ Labels = new();
+ }
+
+
+ private void ReadVersion0(EndianStackReader reader, uint address)
+ {
+ // reading animation locations
+ if(reader.TryReadPointer(address, out uint dataAddr))
+ {
+ uint pathAddr = reader.ReadPointer(dataAddr);
+ while(pathAddr != uint.MaxValue)
+ {
+ AnimFiles.Add(reader.ReadNullterminatedString(pathAddr));
+ pathAddr = reader.ReadPointer(dataAddr += 4);
+ }
+ }
+
+ // reading morph locations
+ if(reader.TryReadPointer(address + 4, out dataAddr))
+ {
+ uint pathAddr = reader.ReadPointer(dataAddr);
+ while(pathAddr != uint.MaxValue)
+ {
+ MorphFiles.Add(reader.ReadNullterminatedString(pathAddr));
+ pathAddr = reader.ReadPointer(dataAddr += 4);
+ }
+ }
+ }
+
+ private void ReadVersion1(EndianStackReader reader, uint address, bool hasAnimMorphFiles)
+ {
+ if(hasAnimMorphFiles)
+ {
+ ReadVersion0(reader, address);
+ address += 8;
+ }
+
+ if(!reader.TryReadPointer(address, out uint labelsAddress))
+ {
+ return;
+ }
+
+ uint labelPointer = reader.ReadUInt(labelsAddress);
+ while(labelPointer != uint.MaxValue)
+ {
+ uint labelTextPointer = reader.ReadUInt(labelsAddress + 4);
+ string labelText = reader.ReadNullterminatedString(labelTextPointer);
+ Labels.Add(labelPointer, labelText);
+
+ labelsAddress += 8;
+ labelPointer = reader.ReadUInt(labelsAddress);
+ }
+ }
+
+ private bool ReadMetaBlockType(EndianStackReader reader, ref uint address, MetaBlockType type)
+ {
+ switch(type)
+ {
+ case MetaBlockType.Label:
+ while(reader.ReadULong(address) != ulong.MaxValue)
+ {
+ uint labelAddress = reader.ReadUInt(address);
+ string label = reader.ReadNullterminatedString(reader.ReadPointer(address += 4));
+ if(!Labels.TryAdd(labelAddress, label))
+ {
+ Labels[labelAddress] = label;
+ }
+
+ address += 4;
+ }
+
+ break;
+ case MetaBlockType.Animation:
+ while(reader.ReadUInt(address) != uint.MaxValue)
+ {
+ AnimFiles.Add(
+ reader.ReadNullterminatedString(
+ reader.ReadPointer(address)));
+ address += 4;
+ }
+
+ break;
+ case MetaBlockType.Morph:
+ while(reader.ReadUInt(address) != uint.MaxValue)
+ {
+ MorphFiles.Add(
+ reader.ReadNullterminatedString(
+ reader.ReadPointer(address)));
+ address += 4;
+ }
+
+ break;
+ case MetaBlockType.Author:
+ Author = reader.ReadNullterminatedString(address);
+ break;
+ case MetaBlockType.Description:
+ Description = reader.ReadNullterminatedString(address);
+ break;
+ case MetaBlockType.ActionName:
+ ActionName = reader.ReadNullterminatedString(address);
+ break;
+ case MetaBlockType.ObjectName:
+ ObjectName = reader.ReadNullterminatedString(address);
+ break;
+ case MetaBlockType.Tool:
+ break;
+ case MetaBlockType.Texture:
+ break;
+ case MetaBlockType.End:
+ break;
+ default:
+ return false;
+ }
+
+ return true;
+ }
+
+ private void ReadVersion2(EndianStackReader reader, uint address)
+ {
+ if(!reader.TryReadPointer(address, out uint tmpAddr))
+ {
+ return;
+ }
+
+ MetaBlockType type = (MetaBlockType)reader.ReadUInt(tmpAddr);
+
+ while(type != MetaBlockType.End)
+ {
+ uint blockSize = reader.ReadUInt(tmpAddr + 4);
+ uint blockStart = tmpAddr + 8;
+ tmpAddr = blockStart + blockSize;
+
+ ReadMetaBlockType(reader, ref blockStart, type);
+
+ type = (MetaBlockType)reader.ReadUInt(tmpAddr);
+ }
+ }
+
+ private void ReadVersion3(EndianStackReader reader, uint address)
+ {
+ if(!reader.TryReadPointer(address, out uint metaAddr))
+ {
+ return;
+ }
+
+ MetaBlockType type = (MetaBlockType)reader.ReadUInt(metaAddr);
+
+ while(type != MetaBlockType.End)
+ {
+ uint blockSize = reader.ReadUInt(metaAddr += 4);
+ metaAddr += 4;
+
+ reader.ImageBase = ~metaAddr + 1;
+ uint blockAddr = metaAddr;
+
+ if(!ReadMetaBlockType(reader, ref blockAddr, type))
+ {
+ byte[] block = reader.Slice((int)blockAddr, (int)blockSize).ToArray();
+ Other.Add((uint)type, block);
+ }
+
+ metaAddr += blockSize;
+ type = (MetaBlockType)reader.ReadUInt(metaAddr);
+ }
+ }
+
+ ///
+ /// Reads meta data off an endian stack reader.
+ ///
+ /// The reader to read from
+ /// Address at which to start reading.
+ /// File version.
+ /// Whether the file contains animation and morph animation file paths (only applicable to Version 1)
+ ///
+ public static MetaData Read(EndianStackReader reader, uint address, int version, bool hasAnimMorphFiles)
+ {
+ MetaData result = new();
+ uint prevImageBase = reader.ImageBase;
+ reader.ImageBase = 0;
+
+ switch(version)
+ {
+ case 0:
+ result.ReadVersion0(reader, address);
+ break;
+ case 1:
+ result.ReadVersion1(reader, address, hasAnimMorphFiles);
+ break;
+ case 2:
+ result.ReadVersion2(reader, address);
+ break;
+ case 3:
+ result.ReadVersion3(reader, address);
+ break;
+ default:
+ break;
+ }
+
+ reader.ImageBase = prevImageBase;
+ return result;
+ }
+
+
+ ///
+ /// Writes the meta data to a stream
+ ///
+ /// Output stream
+ public uint Write(EndianStackWriter writer)
+ {
+ uint result = writer.PointerPosition;
+
+ if(Labels.Count > 0)
+ {
+ WriteBlock(writer, MetaBlockType.Label, () =>
+ {
+ uint straddr = (uint)((Labels.Count * 8) + 8);
+
+ foreach(KeyValuePair label in Labels)
+ {
+ writer.WriteUInt(label.Key);
+ writer.WriteUInt(straddr);
+ straddr += CalcStringLength(label.Value);
+ }
+
+ writer.WriteLong(-1L);
+
+ foreach(KeyValuePair label in Labels)
+ {
+ WriteString(writer, label.Value);
+ }
+ });
+ }
+
+ if(AnimFiles?.Count > 0)
+ {
+ WriteStringList(writer, MetaBlockType.Animation, AnimFiles);
+ }
+
+ if(MorphFiles?.Count > 0)
+ {
+ WriteStringList(writer, MetaBlockType.Morph, MorphFiles);
+ }
+
+ WriteStringBlock(writer, MetaBlockType.Author, Author);
+ WriteStringBlock(writer, MetaBlockType.Description, Description);
+ WriteStringBlock(writer, MetaBlockType.ActionName, ActionName);
+ WriteStringBlock(writer, MetaBlockType.ObjectName, ObjectName);
+
+ foreach(KeyValuePair item in Other)
+ {
+ writer.WriteUInt(item.Key);
+ writer.WriteUInt((uint)item.Value.Length);
+ writer.Write(item.Value);
+ }
+
+ writer.WriteUInt((uint)MetaBlockType.End);
+ writer.WriteUInt(0);
+
+ return result;
+ }
+
+ #region Utility methods
+
+ private static void WriteBlock(EndianStackWriter writer, MetaBlockType type, Action write)
+ {
+ uint start = writer.Position;
+
+ writer.WriteUInt((uint)type);
+ writer.WriteEmpty(4);
+
+ write();
+
+ uint bytesWritten = writer.Position - start - 8;
+
+ uint prevPos = writer.Position;
+ writer.Seek(start + 4, SeekOrigin.Begin);
+ writer.WriteUInt(bytesWritten);
+ writer.Seek(prevPos, SeekOrigin.Begin);
+ }
+
+ private static void WriteString(EndianStackWriter writer, string value)
+ {
+ byte[] bytes = Encoding.UTF8.GetBytes(value + "\0");
+ uint start = writer.Position;
+ writer.Write(bytes);
+ writer.AlignFrom(4, start);
+ }
+
+ private static uint CalcStringLength(string value)
+ {
+ uint length = (uint)Encoding.UTF8.GetBytes(value).Length + 1;
+ uint padding = length % 4;
+ if(padding > 0)
+ {
+ length += 4 - padding;
+ }
+
+ return length;
+ }
+
+ private static void WriteStringBlock(EndianStackWriter writer, MetaBlockType type, string? value)
+ {
+ if(string.IsNullOrEmpty(value))
+ {
+ return;
+ }
+
+ WriteBlock(writer, type, () => WriteString(writer, value));
+ }
+
+ private static void WriteStringList(EndianStackWriter writer, MetaBlockType type, List values)
+ {
+ WriteBlock(writer, type, () =>
+ {
+ uint straddr = (uint)((values.Count + 1) * 4);
+
+ foreach(string value in values)
+ {
+ writer.WriteUInt(straddr);
+ straddr += CalcStringLength(value);
+ }
+
+ writer.WriteUInt(uint.MaxValue);
+
+ foreach(string value in values)
+ {
+ WriteString(writer, value);
+ }
+ });
+ }
+
+ #endregion
+ }
+}
diff --git a/src/SA3D.Modeling/File/ModelFile.cs b/src/SA3D.Modeling/File/ModelFile.cs
new file mode 100644
index 0000000..cf06926
--- /dev/null
+++ b/src/SA3D.Modeling/File/ModelFile.cs
@@ -0,0 +1,437 @@
+using SA3D.Modeling.Mesh;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.Structs;
+using SA3D.Common.IO;
+using System.IO;
+using System;
+using static SA3D.Modeling.File.FileHeaders;
+
+namespace SA3D.Modeling.File
+{
+ ///
+ /// Node model with attach data file contents.
+ ///
+ public class ModelFile
+ {
+ ///
+ /// Whether the file is an NJ binary
+ ///
+ public bool NJFile { get; }
+
+ ///
+ /// Attach format of the file
+ ///
+ public ModelFormat Format { get; }
+
+ ///
+ /// Hierarchy tip of the file
+ ///
+ public Node Model { get; }
+
+ ///
+ /// Meta data of the file
+ ///
+ public MetaData MetaData { get; }
+
+
+ private ModelFile(ModelFormat format, Node model, MetaData metaData, bool nj)
+ {
+ Format = format;
+ Model = model;
+ MetaData = metaData;
+ NJFile = nj;
+ }
+
+
+ ///
+ /// Checks whether data is formatted as a model file.
+ ///
+ /// The data to check.
+ public static bool CheckIsModelFile(byte[] data)
+ {
+ return CheckIsModelFile(data, 0);
+ }
+
+ ///
+ /// Checks whether data is formatted as a model file.
+ ///
+ /// The data to check.
+ /// Address at which to check.
+ public static bool CheckIsModelFile(byte[] data, uint address)
+ {
+ return CheckIsModelFile(new EndianStackReader(data), address);
+ }
+
+ ///
+ /// Checks whether data is formatted as a model file.
+ ///
+ /// The reader to read from.
+ public static bool CheckIsModelFile(EndianStackReader reader)
+ {
+ return CheckIsModelFile(reader, 0);
+ }
+
+ ///
+ /// Checks whether data is formatted as a model file.
+ ///
+ /// The reader to read from.
+ /// Address at which to check.
+ public static bool CheckIsModelFile(EndianStackReader reader, uint address)
+ {
+ try
+ {
+ _ = GetNJModelBlockAddress(reader, address);
+ return true;
+ }
+ catch(FormatException) { }
+
+ switch(reader.ReadULong(address) & HeaderMask)
+ {
+ case SA1MDL:
+ case SADXMDL:
+ case SA2MDL:
+ case SA2BMDL:
+ case BUFMDL:
+ break;
+ default:
+ return false;
+ }
+
+ return reader[address + 7] <= CurrentModelVersion;
+ }
+
+
+ ///
+ /// Reads a model file.
+ ///
+ /// The path to the file that should be read.
+ /// The model file that was read.
+ public static ModelFile ReadFromFile(string filepath)
+ {
+ return ReadFromData(System.IO.File.ReadAllBytes(filepath));
+ }
+
+ ///
+ /// Reads a model file off byte data.
+ ///
+ /// Data to read.
+ /// The model file that was read.
+ public static ModelFile ReadFromData(byte[] data)
+ {
+ return ReadFromData(data, 0);
+ }
+
+ ///
+ /// Reads a model file off byte data.
+ ///
+ /// The data to read from.
+ /// The address at which to start reading.
+ /// The model file that was read.
+ public static ModelFile ReadFromData(byte[] data, uint address)
+ {
+ using(EndianStackReader reader = new(data))
+ {
+ return Read(reader, address);
+ }
+ }
+
+ ///
+ /// Reads a model file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// The model file that was read.
+ public static ModelFile Read(EndianStackReader reader)
+ {
+ return Read(reader, 0);
+ }
+
+ ///
+ /// Reads a model file off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The model file that was read.
+ public static ModelFile Read(EndianStackReader reader, uint address)
+ {
+ reader.PushBigEndian(false);
+
+ try
+ {
+ return reader.ReadUShort(address) is NJ or GJ
+ ? ReadNJ(reader, address)
+ : ReadSA(reader, address);
+ }
+ finally
+ {
+ reader.PopEndian();
+ }
+ }
+
+
+ private static uint GetNJModelBlockAddress(EndianStackReader reader, uint address)
+ {
+ uint blockAddress = address;
+ while(address < reader.Length + 8)
+ {
+ ushort njHeader = reader.ReadUShort(blockAddress);
+
+ if(njHeader is not NJ or GJ)
+ {
+ throw new FormatException("Malformatted NJ data.");
+ }
+
+ ushort blockHeader = reader.ReadUShort(blockAddress + 2);
+
+ if(blockHeader is BM or CM)
+ {
+ return blockAddress;
+ }
+
+ uint blockSize = reader.ReadUInt(blockAddress + 4);
+ blockAddress += 8 + blockSize;
+ }
+
+ throw new FormatException("No model block found");
+ }
+
+ private static ModelFile ReadNJ(EndianStackReader reader, uint address)
+ {
+ uint blockAddress = GetNJModelBlockAddress(reader, address);
+ ushort blockHeader = reader.ReadUShort(blockAddress + 2);
+
+ uint modelAddress = blockAddress + 8;
+ bool fileEndian = reader.CheckBigEndian32(modelAddress);
+ reader.PushBigEndian(fileEndian);
+
+ reader.ImageBase = unchecked((uint)-modelAddress);
+
+ ModelFormat format = blockHeader switch
+ {
+ BM => ModelFormat.SA1,
+ CM => ModelFormat.SA2,
+ _ => throw new FormatException()
+ };
+
+ Node model = Node.Read(reader, modelAddress, format, new());
+
+ return new(format, model, new(), true);
+ }
+
+ private static ModelFile ReadSA(EndianStackReader reader, uint address)
+ {
+ ulong header8 = reader.ReadULong(address) & HeaderMask;
+ ModelFormat format = header8 switch
+ {
+ SA1MDL => ModelFormat.SA1,
+ SADXMDL => ModelFormat.SADX,
+ SA2MDL => ModelFormat.SA2,
+ SA2BMDL => ModelFormat.SA2B,
+ BUFMDL => ModelFormat.Buffer,
+ _ => throw new FormatException("File invalid; Header malformed"),
+ };
+
+ // checking the version
+ byte version = reader[address + 7];
+ if(version > CurrentModelVersion)
+ {
+ throw new FormatException("File invalid; Unsupported version");
+ }
+
+ MetaData metaData = MetaData.Read(reader, address + 0xC, version, true);
+ PointerLUT lut = new(metaData.Labels);
+
+ uint prevImageBase = reader.ImageBase;
+ if(address != 0)
+ {
+ reader.ImageBase = unchecked((uint)-address);
+ }
+
+ uint modelAddr = reader.ReadPointer(address + 8);
+ Node model = Node.Read(reader, modelAddr, format, lut);
+
+ reader.ImageBase = prevImageBase;
+
+ return new(format, model, metaData, false);
+ }
+
+
+ ///
+ /// Write the model file to a file. Previous labels may get lost.
+ ///
+ /// Path to the file to write to.
+ ///
+ public void WriteToFile(string filepath)
+ {
+ WriteToFile(filepath, Model, NJFile, MetaData, Format);
+ }
+
+ ///
+ /// Writes the model file to a byte array. Previous labels may get lost.
+ ///
+ ///
+ /// The written byte data.
+ public byte[] WriteToData()
+ {
+ return WriteToData(Model, NJFile, MetaData, Format);
+ }
+
+ ///
+ /// Writes the model file to an endian stack writer. Previous labels may get lost.
+ ///
+ /// The writer to write to.
+ ///
+ public void Write(EndianStackWriter writer)
+ {
+ Write(writer, Model, NJFile, MetaData, Format);
+ }
+
+
+ ///
+ /// Write a model file to a file.
+ ///
+ /// Path to the file to write to.
+ /// The model to write.
+ /// Whether to format as an NJ file.
+ /// The metadata to include.
+ /// The format to write in.
+ ///
+ public static void WriteToFile(string filepath, Node model, bool nj = false, MetaData? metaData = null, ModelFormat? format = null)
+ {
+ using(FileStream stream = System.IO.File.Create(filepath))
+ {
+ EndianStackWriter writer = new(stream);
+ Write(writer, model, nj, metaData, format);
+ }
+ }
+
+ ///
+ /// Writes a model file to a byte array.
+ ///
+ /// The model to write.
+ /// Whether to format as an NJ file.
+ /// The metadata to include.
+ /// The format to write in.
+ ///
+ /// The written byte data.
+ public static byte[] WriteToData(Node model, bool nj = false, MetaData? metaData = null, ModelFormat? format = null)
+ {
+ using(MemoryStream stream = new())
+ {
+ EndianStackWriter writer = new(stream);
+ Write(writer, model, nj, metaData, format);
+ return stream.ToArray();
+ }
+ }
+
+ ///
+ /// Writes a model file to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// The model to write.
+ /// Whether to format as an NJ file.
+ /// The metadata to include.
+ /// The format to write in.
+ ///
+ public static void Write(EndianStackWriter writer, Node model, bool nj = false, MetaData? metaData = null, ModelFormat? format = null)
+ {
+ format ??= model.GetAttachFormat() switch
+ {
+ AttachFormat.Buffer => ModelFormat.Buffer,
+ AttachFormat.BASIC => ModelFormat.SA1,
+ AttachFormat.CHUNK => ModelFormat.SA2,
+ AttachFormat.GC => ModelFormat.SA2B,
+ _ => throw new InvalidOperationException(),
+ };
+
+ if(nj)
+ {
+ WriteNJ(writer, model, format.Value);
+ }
+ else
+ {
+ WriteSA(writer, model, format.Value, metaData);
+ }
+ }
+
+ private static void WriteNJ(EndianStackWriter writer, Node model, ModelFormat format)
+ {
+ writer.WriteUShort(NJ);
+ switch(format)
+ {
+ case ModelFormat.SA1 or ModelFormat.SADX:
+ writer.WriteUShort(BM);
+ break;
+ case ModelFormat.SA2:
+ writer.WriteUShort(CM);
+ break;
+ default:
+ throw new ArgumentException($"Attach format {format} not supported for NJ binaries");
+ }
+
+ uint placeholderAddress = writer.Position;
+ writer.WriteEmpty(4); // file length placeholder
+
+ uint nodeStart = writer.Position;
+ writer.ImageBase = unchecked((uint)-nodeStart);
+ writer.WriteEmpty(Node.StructSize);
+
+ PointerLUT lut = new();
+
+ model.Child?.Write(writer, format, lut);
+ model.Next?.Write(writer, format, lut);
+ model.Attach?.Write(writer, format, lut);
+
+ uint byteSize = writer.Position - nodeStart;
+
+ // write the root node to the start
+ uint end = writer.Position;
+ writer.Seek(placeholderAddress, SeekOrigin.Begin);
+ writer.WriteUInt(byteSize);
+ model.Write(writer, format, lut);
+
+ // replace size
+ writer.Seek(end, SeekOrigin.Begin);
+ }
+
+ private static void WriteSA(EndianStackWriter writer, Node model, ModelFormat format, MetaData? metadata)
+ {
+ ulong header = format switch
+ {
+ ModelFormat.SA1 => SA1MDLVer,
+ ModelFormat.SADX => SADXMDLVer,
+ ModelFormat.SA2 => SA2MDLVer,
+ ModelFormat.SA2B => SA2BMDLVer,
+ ModelFormat.Buffer => BUFMDLVer,
+ _ => throw new ArgumentException($"Model format {format} not supported for SAMDL files"),
+ };
+
+ writer.WriteULong(header);
+
+ uint placeholderAddr = writer.Position;
+ // 4 bytes: node address placeholder
+ // 4 bytes: metadata placeholder
+ writer.WriteEmpty(8);
+
+ PointerLUT lut = new();
+ uint modelAddress = model.Write(writer, format, lut);
+
+ metadata ??= new();
+ metadata.Labels = lut.Labels.GetDictFrom();
+ uint metadataAddress = metadata.Write(writer);
+
+ uint end = writer.Position;
+ writer.Seek(placeholderAddr, SeekOrigin.Begin);
+ writer.WriteUInt(modelAddress);
+ writer.WriteUInt(metadataAddress);
+ writer.Seek(end, SeekOrigin.Begin);
+ }
+
+
+ ///
+ public override string ToString()
+ {
+ return $"{(NJFile ? "" : "NJ")} Modelfile - {Format}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Attach.cs b/src/SA3D.Modeling/Mesh/Attach.cs
new file mode 100644
index 0000000..753e7c2
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Attach.cs
@@ -0,0 +1,265 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Mesh.Buffer;
+using SA3D.Modeling.Mesh.Basic;
+using SA3D.Modeling.Mesh.Chunk;
+using SA3D.Modeling.Mesh.Gamecube;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using static SA3D.Common.StringExtensions;
+
+namespace SA3D.Modeling.Mesh
+{
+ ///
+ /// 3D mesh data attach. Its possible for multiple attaches to make up one full mesh.
+ ///
+ public class Attach : ICloneable, ILabel
+ {
+ ///
+ public string Label { get; set; }
+
+ ///
+ /// Format of the attach.
+ ///
+ public virtual AttachFormat Format => AttachFormat.Buffer;
+
+ ///
+ /// Bounding sphere of the attach.
+ ///
+ public Bounds MeshBounds { get; set; }
+
+ ///
+ /// Mesh data ready to draw and used for converting to other attach formats.
+ ///
+ public BufferMesh[] MeshData { get; set; }
+
+
+ ///
+ /// Base constructor for derived attach types.
+ ///
+ protected Attach()
+ {
+ MeshData = Array.Empty();
+ Label = "attach_" + GenerateIdentifier();
+ }
+
+ ///
+ /// Create a new attach using existing meshdata.
+ ///
+ /// The meshdata to use.
+ public Attach(BufferMesh[] meshdata)
+ {
+ MeshData = meshdata;
+ Label = "attach_" + GenerateIdentifier();
+ }
+
+
+ ///
+ /// Checks whether the attaches mesh data has/relies on weights.
+ ///
+ /// Whether the attaches mesh data has/relies on weights
+ public virtual bool CheckHasWeights()
+ {
+ return MeshData.Any(x => x.ContinueWeight || x.IndexList == null || x.IndexList.Length == 0);
+ }
+
+ ///
+ /// Returns opaque and transparent buffer meshes from . Meshes without polygons will be ignored.
+ ///
+ ///
+ public (BufferMesh[] opaque, BufferMesh[] transparent) GetDisplayMeshes()
+ {
+ List opaque = new();
+ List transparent = new();
+
+ foreach(BufferMesh mesh in MeshData)
+ {
+ if(mesh.Corners == null)
+ {
+ continue;
+ }
+
+ if(mesh.Material.UseAlpha)
+ {
+ transparent.Add(mesh);
+ }
+ else
+ {
+ opaque.Add(mesh);
+ }
+ }
+
+ return (opaque.ToArray(), transparent.ToArray());
+ }
+
+ ///
+ /// Recalculates from the attach data.
+ ///
+ public virtual void RecalculateBounds()
+ {
+ if(MeshData.Length == 0)
+ {
+ MeshBounds = default;
+ return;
+ }
+
+ MeshBounds = Bounds.FromPoints(MeshData.SelectManyIgnoringNull(x => x.Vertices).Select(x => x.Position));
+ }
+
+
+ ///
+ /// Reads an attach from an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Type of attach to read.
+ /// Pointer references to use.
+ /// The buffer attach that was read.
+ public static Attach Read(EndianStackReader reader, uint address, ModelFormat format, PointerLUT lut)
+ {
+ return format switch
+ {
+ ModelFormat.SA1 or ModelFormat.SADX => BasicAttach.Read(reader, address, format == ModelFormat.SADX, lut),
+ ModelFormat.SA2 => ChunkAttach.Read(reader, address, lut),
+ ModelFormat.SA2B => GCAttach.Read(reader, address, lut),
+ ModelFormat.Buffer => ReadBuffer(reader, address, lut),
+ _ => throw new ArgumentException("Invalid format.", nameof(format)),
+ };
+ }
+
+ ///
+ /// Reads a buffer attach from an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Pointer references to use.
+ /// The buffer attach that was read.
+ public static Attach ReadBuffer(EndianStackReader reader, uint address, PointerLUT lut)
+ {
+ Attach onRead()
+ {
+ uint meshCount = reader.ReadUInt(address);
+ uint meshAddr = reader.ReadPointer(address + 4);
+
+ uint[] meshAddresses = new uint[meshCount];
+ for(int i = 0; i < meshCount; i++)
+ {
+ meshAddresses[i] = reader.ReadPointer(meshAddr);
+ meshAddr += 4;
+ }
+
+ BufferMesh[] meshes = new BufferMesh[meshCount];
+
+ for(int i = 0; i < meshCount; i++)
+ {
+ meshes[i] = BufferMesh.Read(reader, meshAddresses[i]);
+ }
+
+ return new Attach(meshes);
+ }
+
+ return lut.GetAddLabeledValue(address, "attach_", onRead);
+ }
+
+
+ ///
+ /// Checks whether the attach can be written in the given model format.
+ ///
+ /// The format to check.
+ /// Whether the model can be written.
+ public virtual bool CanWrite(ModelFormat format)
+ {
+ return format is ModelFormat.Buffer;
+ }
+
+ ///
+ /// Writes the attach and returns the address to the mesh
+ ///
+ ///
+ ///
+ ///
+ /// address pointing to the attach
+ public uint Write(EndianStackWriter writer, ModelFormat format, PointerLUT lut)
+ {
+ if(!CanWrite(format))
+ {
+ throw new ArgumentException($"Attach type \"{Format}\" does not support writing in model format \"{format}\".");
+ }
+
+ uint onWrite()
+ {
+ if(format == ModelFormat.Buffer)
+ {
+ return WriteBuffer(writer);
+ }
+ else
+ {
+ return WriteInternal(writer, format, lut);
+ }
+ }
+
+ return lut.GetAddAddress(this, onWrite);
+ }
+
+ ///
+ /// The internal method for writing attach data.
+ ///
+ /// The writer to write to.
+ /// The model format to write as.
+ /// Pointer references to use.
+ /// The address at which the attach was written.
+ protected virtual uint WriteInternal(EndianStackWriter writer, ModelFormat format, PointerLUT lut)
+ {
+ return WriteBuffer(writer);
+ }
+
+ private uint WriteBuffer(EndianStackWriter writer)
+ {
+ // write the meshes first
+ uint[] meshAddresses = new uint[MeshData.Length];
+ for(int i = 0; i < MeshData.Length; i++)
+ {
+ meshAddresses[i] = MeshData[i].Write(writer);
+ }
+
+ // write the pointer array
+ uint arrayAddr = writer.Position + writer.ImageBase;
+ for(int i = 0; i < MeshData.Length; i++)
+ {
+ writer.WriteUInt(meshAddresses[i]);
+ }
+
+ uint address = writer.PointerPosition;
+
+ writer.WriteUInt((uint)meshAddresses.Length);
+ writer.WriteUInt(arrayAddr);
+
+ return address;
+ }
+
+
+ object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a deep clone of the attach.
+ ///
+ /// The cloned attach.
+ public virtual Attach Clone()
+ {
+ return new(MeshData.ContentClone()) { Label = Label };
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Label} - Buffer";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/BasicAttach.cs b/src/SA3D.Modeling/Mesh/Basic/BasicAttach.cs
new file mode 100644
index 0000000..997b5f5
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/BasicAttach.cs
@@ -0,0 +1,222 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Mesh.Buffer;
+using SA3D.Modeling.Mesh.Converters;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.Structs;
+using System;
+using System.Numerics;
+using static SA3D.Common.StringExtensions;
+
+namespace SA3D.Modeling.Mesh.Basic
+{
+ ///
+ /// Mesh data format used by SA1 and SA2
+ ///
+ public sealed class BasicAttach : Attach
+ {
+ ///
+ /// Vertex positions.
+ ///
+ public ILabeledArray Positions { get; }
+
+ ///
+ /// Vertex normals.
+ ///
+ public ILabeledArray Normals { get; }
+
+ ///
+ /// Polygon structures.
+ ///
+ public ILabeledArray Meshes { get; }
+
+ ///
+ /// Materials for the meshes.
+ ///
+ public ILabeledArray Materials { get; }
+
+ ///
+ public override AttachFormat Format
+ => AttachFormat.BASIC;
+
+
+ private BasicAttach(ILabeledArray positions, ILabeledArray normals, ILabeledArray meshes, ILabeledArray materials, Bounds meshBounds) : base()
+ {
+ Label = "attach_" + GenerateIdentifier();
+
+ Positions = positions;
+ Normals = normals;
+ Meshes = meshes;
+ Materials = materials;
+ MeshBounds = meshBounds;
+ }
+
+ ///
+ /// Creates a new BASIC attach using existing data.
+ ///
Array labels are automatically generated.
+ ///
+ /// Vertex positions.
+ /// Vertex normals.
+ /// Polygons structures.
+ /// Materials the meshes.
+ public BasicAttach(Vector3[] positions, Vector3[] normals, BasicMesh[] meshes, BasicMaterial[] materials) : base()
+ {
+ string identifier = GenerateIdentifier();
+ Label = "attach_" + identifier;
+
+ Positions = new LabeledArray("vertex_" + identifier, positions);
+ Normals = new LabeledArray("normal_" + identifier, normals);
+ Meshes = new LabeledArray("meshlist_" + identifier, meshes);
+ Materials = new LabeledArray("matlist_" + identifier, materials);
+ }
+
+ ///
+ /// Creates a new BASIC attach using existing data.
+ ///
+ /// Vertex positions.
+ /// Vertex normals.
+ /// Polygons structures.
+ /// Materials the meshes.
+ public BasicAttach(ILabeledArray positions, ILabeledArray normals, ILabeledArray meshes, ILabeledArray materials) : base()
+ {
+ Label = "attach_" + GenerateIdentifier();
+
+ Positions = positions;
+ Normals = normals;
+ Meshes = meshes;
+ Materials = materials;
+
+ if(normals != null && positions.Length != normals.Length)
+ {
+ throw new ArgumentException("Position and Normal count doesnt match!");
+ }
+ }
+
+ ///
+ public override bool CheckHasWeights()
+ {
+ return false;
+ }
+
+ ///
+ public override void RecalculateBounds()
+ {
+ MeshBounds = Bounds.FromPoints(Positions);
+ }
+
+ ///
+ public override bool CanWrite(ModelFormat format)
+ {
+ return base.CanWrite(format) || format is ModelFormat.SA1 or ModelFormat.SADX;
+ }
+
+ ///
+ /// Converts the baisc attach to a set of buffer meshes.
+ ///
+ /// Whether to optimize the buffer mesh data.
+ /// The converted buffer meshes.
+ public BufferMesh[] ConvertToBufferMeshData(bool optimize)
+ {
+ return BasicConverter.ConvertBasicToBuffer(this, optimize);
+ }
+
+
+ ///
+ protected override uint WriteInternal(EndianStackWriter writer, ModelFormat format, PointerLUT lut)
+ {
+ uint posAddress = writer.WriteCollection(Positions, (w, v) => w.WriteVector3(v));
+ uint nrmAddress = writer.WriteCollection(Normals, (w, v) => w.WriteVector3(v));
+
+ uint meshAddress = writer.WriteCollection(Meshes,
+ (w, m) => m.WriteMeshset(w, format == ModelFormat.SADX, lut),
+ (w, m) => m.WriteData(w, lut));
+
+ uint materialAddress = writer.WriteCollection(Materials, (w, m) => m.Write(w));
+
+ uint outAddress = writer.PointerPosition;
+
+ writer.WriteUInt(posAddress);
+ writer.WriteUInt(nrmAddress);
+ writer.WriteUInt((uint)Positions.Length);
+ writer.WriteUInt(meshAddress);
+ writer.WriteUInt(materialAddress);
+ writer.WriteUShort((ushort)Meshes.Length);
+ writer.WriteUShort((ushort)Materials.Length);
+ MeshBounds.Write(writer);
+
+ if(format == ModelFormat.SADX)
+ {
+ writer.WriteUInt(0);
+ }
+
+ return outAddress;
+ }
+
+ ///
+ /// Reads a BASIC attach from an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which the attach is located.
+ /// Whether the attach is from SADX.
+ /// Pointer references to use.
+ /// The BASIC attach that was read.
+ public static BasicAttach Read(EndianStackReader reader, uint address, bool DX, PointerLUT lut)
+ {
+ BasicAttach onRead()
+ {
+ ILabeledArray readArray(uint arrayOffset, uint countOffset, string genPrefix, bool shortCount, uint elementSize, EndianIOExtensions.ReadValueDelegate read)
+ {
+ uint itemCount = shortCount
+ ? reader.ReadUShort(address + countOffset)
+ : reader.ReadUInt(address + countOffset);
+
+ /* === Note regarding Empty arrays here ===
+ * Some modded models in the past appear to have used empty arrays.
+ * In an effort to support them, we just create a new array for them.
+ * Seeing how this is only for old mod models, its not tragic if
+ * we have to remove it in case they actually break something else.
+ */
+
+ if(itemCount == 0)
+ {
+ return new LabeledArray(0);
+ }
+
+ uint itemAddr = reader.ReadPointer(address + arrayOffset);
+ return reader.ReadLabeledArray(itemAddr, itemCount, elementSize, read, genPrefix, lut);
+ }
+
+ uint meshSize = DX ? BasicMesh.StructSizeDX : BasicMesh.StructSize;
+
+ ILabeledArray positions /***/ = readArray(0x00, 0x08, "vertex_", false, 12, /*******************/ (r, p) => r.ReadVector3(p));
+ ILabeledArray normals /*****/ = readArray(0x04, 0x08, "normal_", false, 12, /*******************/ (r, p) => r.ReadVector3(p));
+ ILabeledArray meshes /****/ = readArray(0x0C, 0x14, "meshlist_", true, meshSize, /************/ (r, p) => BasicMesh.Read(r, p, lut));
+ ILabeledArray materials = readArray(0x10, 0x16, "matlist_", true, BasicMaterial.StructSize, (r, p) => BasicMaterial.Read(r, p));
+
+ Bounds bounds = Bounds.Read(reader, address + 24);
+
+ return new BasicAttach(positions, normals, meshes, materials, bounds);
+ }
+
+ return lut.GetAddLabeledValue(address, "attach_", onRead);
+ }
+
+
+ ///
+ public override Attach Clone()
+ {
+ return new BasicAttach(Positions.Clone(), Normals.Clone(), Meshes.ContentClone(), Materials.Clone(), MeshBounds)
+ {
+ Label = Label
+ };
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Label} - BASIC";
+ }
+ }
+}
+
diff --git a/src/SA3D.Modeling/Mesh/Basic/BasicMaterial.cs b/src/SA3D.Modeling/Mesh/Basic/BasicMaterial.cs
new file mode 100644
index 0000000..c4bb320
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/BasicMaterial.cs
@@ -0,0 +1,362 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Structs;
+using System;
+
+namespace SA3D.Modeling.Mesh.Basic
+{
+ ///
+ /// BASIC format material
+ ///
+ public struct BasicMaterial
+ {
+ ///
+ /// Number of bytes the structure occupies.
+ ///
+ public const uint StructSize = 20;
+
+ ///
+ /// Material with default values.
+ ///
+ public static readonly BasicMaterial DefaultValues = new()
+ {
+ DiffuseColor = Color.ColorWhite,
+ SpecularColor = new Color(0xFF, 0xFF, 0xFF, 0),
+ UseAlpha = true,
+ UseTexture = true,
+ DoubleSided = true,
+ FlatShading = false,
+ IgnoreLighting = false,
+ ClampU = false,
+ ClampV = false,
+ MirrorU = false,
+ MirrorV = false,
+ EnvironmentMap = false,
+ DestinationAlpha = BlendMode.SrcAlphaInverted,
+ SourceAlpha = BlendMode.SrcAlpha,
+ };
+
+ ///
+ /// Diffuse color.
+ ///
+ public Color DiffuseColor { get; set; }
+
+ ///
+ /// Specular color.
+ ///
+ public Color SpecularColor { get; set; }
+
+ ///
+ /// Specular exponent.
+ ///
+ public float SpecularExponent { get; set; }
+
+ ///
+ /// Texture ID.
+ ///
+ public uint TextureID { get; set; }
+
+ ///
+ /// Attributes containing various information.
+ ///
+ public uint Attributes { get; set; }
+
+ #region Attribute Properties
+
+ ///
+ /// User defined attributes.
+ ///
| 0x0000007F
+ ///
+ public byte UserAttributes
+ {
+ readonly get => (byte)(Attributes & 0x7Fu);
+ set => Attributes = (Attributes & ~0x7Fu) | (value & 0x7Fu);
+ }
+
+ ///
+ /// Editor property (?).
+ ///
| 0x00000080
+ ///
+ public bool PickStatus
+ {
+ readonly get => (Attributes & 0x80u) != 0;
+ set => SetAttributeBit(0x80u, value);
+ }
+
+ ///
+ /// Mipmad distance multiplier.
+ ///
| 0x00000F00
+ ///
+ public float MipmapDistanceMultiplier
+ {
+ readonly get => ((Attributes & 0xF00u) >> 8) * 0.25f;
+ set => Attributes = (Attributes & ~0xF00u) | ((uint)Math.Max(0, Math.Min(0xF, Math.Round(value / 0.25, MidpointRounding.AwayFromZero))) << 8);
+ }
+
+ ///
+ /// Super sampling (Anisotropic filtering?).
+ ///
| 0x00001000
+ ///
+ public bool SuperSample
+ {
+ readonly get => GetAttributeBit(0x1000u);
+ set => SetAttributeBit(0x1000u, value);
+ }
+
+ ///
+ /// Texture filter mode.
+ ///
| 0x00006000
+ ///
+ public FilterMode FilterMode
+ {
+ readonly get => (FilterMode)((Attributes >> 13) & 3);
+ set => Attributes = (Attributes & ~0x6000u) | ((uint)value << 13);
+ }
+
+ ///
+ /// Texture clamp along the V axis.
+ ///
| 0x00008000
+ ///
+ public bool ClampV
+ {
+ readonly get => GetAttributeBit(0x8000u);
+ set => SetAttributeBit(0x8000u, value);
+ }
+
+ ///
+ /// Texture clamp along the U axis.
+ ///
| 0x00010000
+ ///
+ public bool ClampU
+ {
+ readonly get => GetAttributeBit(0x10000u);
+ set => SetAttributeBit(0x10000u, value);
+ }
+
+ ///
+ /// Texture mirror along the V axis.
+ ///
| 0x00020000
+ ///
+ public bool MirrorV
+ {
+ readonly get => GetAttributeBit(0x20000u);
+ set => SetAttributeBit(0x20000u, value);
+ }
+
+ ///
+ /// Texture mirror along the U axis.
+ ///
| 0x00040000
+ ///
+ public bool MirrorU
+ {
+ readonly get => GetAttributeBit(0x40000u);
+ set => SetAttributeBit(0x40000u, value);
+ }
+
+ ///
+ /// Disables specular shading.
+ ///
| 0x00080000
+ ///
+ public bool IgnoreSpecular
+ {
+ readonly get => GetAttributeBit(0x80000u);
+ set => SetAttributeBit(0x80000u, value);
+ }
+
+ ///
+ /// Enables alpha blending.
+ ///
| 0x00100000
+ ///
+ public bool UseAlpha
+ {
+ readonly get => GetAttributeBit(0x100000u);
+ set => SetAttributeBit(0x100000u, value);
+ }
+
+ ///
+ /// Enables texture rendering.
+ ///
| 0x00200000
+ ///
+ public bool UseTexture
+ {
+ readonly get => GetAttributeBit(0x200000u);
+ set => SetAttributeBit(0x200000u, value);
+ }
+
+ ///
+ /// Applies the texture based on angle between camera and mesh normals (matcap method).
+ ///
| 0x00400000
+ ///
+ public bool EnvironmentMap
+ {
+ readonly get => GetAttributeBit(0x400000);
+ set => SetAttributeBit(0x400000u, value);
+ }
+
+ ///
+ /// Disables backface culling.
+ ///
| 0x00800000
+ ///
+ public bool DoubleSided
+ {
+ readonly get => GetAttributeBit(0x800000);
+ set => SetAttributeBit(0x800000u, value);
+ }
+
+ ///
+ /// Ignores interpolated normals and instead uses polygon-wide normals.
+ ///
| 0x01000000
+ ///
+ public bool FlatShading
+ {
+ readonly get => GetAttributeBit(0x1000000);
+ set => SetAttributeBit(0x1000000u, value);
+ }
+
+ ///
+ /// Disables shading altogether.
+ ///
| 0x02000000
+ ///
+ public bool IgnoreLighting
+ {
+ readonly get => GetAttributeBit(0x2000000);
+ set => SetAttributeBit(0x2000000u, value);
+ }
+
+ ///
+ /// Destination blend mode.
+ ///
| 0x1C000000
+ ///
+ public BlendMode DestinationAlpha
+ {
+ readonly get => (BlendMode)((Attributes >> 26) & 7);
+ set => Attributes = (uint)((Attributes & ~0x1C000000) | ((uint)value << 26));
+ }
+
+ ///
+ /// Source blend mode.
+ ///
| 0xE0000000
+ ///
+ public BlendMode SourceAlpha
+ {
+ readonly get => (BlendMode)((Attributes >> 29) & 7);
+ set => Attributes = (Attributes & ~0xE0000000) | ((uint)value << 29);
+ }
+
+ private readonly bool GetAttributeBit(uint mask)
+ {
+ return (Attributes & mask) != 0;
+ }
+
+ private void SetAttributeBit(uint mask, bool value)
+ {
+ if(value)
+ {
+ Attributes |= mask;
+ }
+ else
+ {
+ Attributes &= ~mask;
+ }
+ }
+
+ #endregion
+
+
+ ///
+ /// Creates a new basic material from a template.
+ ///
+ /// The template.
+ public BasicMaterial(BasicMaterial template)
+ {
+ DiffuseColor = template.DiffuseColor;
+ SpecularColor = template.SpecularColor;
+ SpecularExponent = template.SpecularExponent;
+ TextureID = template.TextureID;
+ Attributes = template.Attributes;
+ }
+
+
+ ///
+ /// Reads a material from an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which the material is located.
+ /// The read material.
+ public static BasicMaterial Read(EndianStackReader reader, uint address)
+ {
+ Color dif = reader.ReadColor(ref address, ColorIOType.ARGB8_32);
+ Color spec = reader.ReadColor(ref address, ColorIOType.ARGB8_32);
+ float exp = reader.ReadFloat(address);
+ uint texID = reader.ReadUInt(address + 4);
+ uint attribs = reader.ReadUInt(address + 8);
+
+ return new BasicMaterial()
+ {
+ DiffuseColor = dif,
+ SpecularColor = spec,
+ SpecularExponent = exp,
+ TextureID = texID,
+ Attributes = attribs
+ };
+ }
+
+ ///
+ /// Writes the materials structure to an endian stack writer.
+ ///
+ /// The writer to write to.
+ public readonly void Write(EndianStackWriter writer)
+ {
+ writer.WriteColor(DiffuseColor, ColorIOType.ARGB8_32);
+ writer.WriteColor(SpecularColor, ColorIOType.ARGB8_32);
+ writer.WriteFloat(SpecularExponent);
+ writer.WriteUInt(TextureID);
+ writer.WriteUInt(Attributes);
+ }
+
+
+ ///
+ public override readonly bool Equals(object? obj)
+ {
+ return obj is BasicMaterial material &&
+ DiffuseColor == material.DiffuseColor &&
+ SpecularColor == material.SpecularColor &&
+ SpecularExponent == material.SpecularExponent &&
+ TextureID == material.TextureID &&
+ Attributes == material.Attributes;
+ }
+
+ ///
+ public override readonly int GetHashCode()
+ {
+ return HashCode.Combine(DiffuseColor, SpecularColor, SpecularExponent, TextureID, Attributes);
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"Texture: {TextureID} / Use Alpha: {UseAlpha}";
+ }
+
+ ///
+ /// Compares two materials for equality.
+ ///
+ /// Lefthand material
+ /// Righthand material
+ /// Whether the materials are equal.
+ public static bool operator ==(BasicMaterial left, BasicMaterial right)
+ {
+ return left.Equals(right);
+ }
+
+ ///
+ /// Compares two materials for inequality.
+ ///
+ /// Lefthand material
+ /// Righthand material
+ /// Whether the materials are inequal.
+ public static bool operator !=(BasicMaterial left, BasicMaterial right)
+ {
+ return !(left == right);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/BasicMesh.cs b/src/SA3D.Modeling/Mesh/Basic/BasicMesh.cs
new file mode 100644
index 0000000..a10b753
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/BasicMesh.cs
@@ -0,0 +1,364 @@
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Mesh.Basic.Polygon;
+using SA3D.Modeling.Structs;
+using System;
+using System.Linq;
+using System.Numerics;
+using static SA3D.Common.StringExtensions;
+
+namespace SA3D.Modeling.Mesh.Basic
+{
+ ///
+ /// BASIC format mesh structure for holding polygon information.
+ ///
+ public class BasicMesh : ICloneable
+ {
+ ///
+ /// Number of bytes the structure occupies.
+ ///
+ public const uint StructSize = 24;
+
+ ///
+ /// Number of bytes the structure occupies. (SADX)
+ ///
+ public const uint StructSizeDX = 28;
+
+ private ILabeledArray? _normals;
+ private ILabeledArray? _colors;
+ private ILabeledArray? _texcoords;
+
+ ///
+ /// Index indicating which material to use from .
+ ///
+ public ushort MaterialIndex { get; set; }
+
+ ///
+ /// Indicating how polygons are stored.
+ ///
+ public BasicPolygonType PolygonType { get; }
+
+ ///
+ /// Polygons of the mesh.
+ ///
+ public LabeledReadOnlyArray Polygons { get; }
+
+ ///
+ /// The amount of corners/loops in the polygons. Determines the lengths of the other arrays.
+ ///
+ public int PolygonCornerCount { get; }
+
+ ///
+ /// Polygon attributes. (Unused)
+ ///
+ public uint PolyAttributes { get; set; }
+
+ ///
+ /// Per corner custom polygon normals.
+ ///
+ public ILabeledArray? Normals
+ {
+ get => _normals;
+ set
+ {
+ if(value != null && value.Length != PolygonCornerCount)
+ {
+ throw new ArgumentException($"New array has a length of {value.Length}, while {PolygonCornerCount} is expected");
+ }
+
+ _normals = value;
+ }
+ }
+
+ ///
+ /// Per corner polygon colors.
+ ///
+ public ILabeledArray? Colors
+ {
+ get => _colors;
+ set
+ {
+ if(value != null && value.Length != PolygonCornerCount)
+ {
+ throw new ArgumentException($"New array has a length of {value.Length}, while {PolygonCornerCount} is expected");
+ }
+
+ _colors = value;
+ }
+ }
+
+ ///
+ /// Per corner polygon texture coordinates.
+ ///
+ public ILabeledArray? Texcoords
+ {
+ get => _texcoords;
+ set
+ {
+ if(value != null && value.Length != PolygonCornerCount)
+ {
+ throw new ArgumentException($"New array has a length of {value.Length}, while {PolygonCornerCount} is expected");
+ }
+
+ _texcoords = value;
+ }
+ }
+
+ private BasicMesh(
+ ILabeledArray? normals,
+ ILabeledArray? colors,
+ ILabeledArray? texcoords,
+ ushort materialIndex,
+ BasicPolygonType polygonType,
+ LabeledReadOnlyArray polygons,
+ int polygonCornerCount,
+ uint polyAttributes)
+ {
+ _normals = normals;
+ _colors = colors;
+ _texcoords = texcoords;
+ MaterialIndex = materialIndex;
+ PolygonType = polygonType;
+ Polygons = polygons;
+ PolygonCornerCount = polygonCornerCount;
+ PolyAttributes = polyAttributes;
+ }
+
+ ///
+ /// Creates a new basic mesh from preexisting data.
+ ///
+ /// Index indicating which material to use.
+ /// Indicating how polygons are stored.
+ /// Polygons of the mesh.
+ /// Per corner custom polygon normals.
+ /// Per corner polygon colors.
+ /// Per corner polygon texture coordinates.
+ public BasicMesh(
+ ushort materialID,
+ BasicPolygonType polyType,
+ LabeledReadOnlyArray polys,
+ ILabeledArray? normals,
+ ILabeledArray? colors,
+ ILabeledArray? texcoords)
+ {
+ MaterialIndex = materialID;
+ PolygonType = polyType;
+ Polygons = polys;
+
+ foreach(IBasicPolygon p in Polygons)
+ {
+ PolygonCornerCount += p.NumIndices;
+ }
+
+ if(normals != null && normals.Length != PolygonCornerCount)
+ {
+ throw new ArgumentException($"Polygon corner count ({PolygonCornerCount}) and normal count ({normals.Length}) dont match up!", nameof(normals));
+ }
+
+ if(colors != null && colors.Length != PolygonCornerCount)
+ {
+ throw new ArgumentException($"Polygon corner count ({PolygonCornerCount}) and colors count ({colors.Length}) dont match up!", nameof(colors));
+ }
+
+ if(texcoords != null && texcoords.Length != PolygonCornerCount)
+ {
+ throw new ArgumentException($"Polygon corner count ({PolygonCornerCount}) and texcoord count ({texcoords.Length}) dont match up!", nameof(texcoords));
+ }
+
+ Normals = normals;
+ Colors = colors;
+ Texcoords = texcoords;
+ }
+
+ ///
+ /// Creates a new (empty) mesh based on polygon data.
+ ///
+ /// Indicating how polygons are stored.
+ /// Polygons to use.
+ /// Index indicating which material to use.
+ /// Whether the mesh contains custom normals
+ /// Whether the model contains colors.
+ /// Whether the model contains texture coordinate.
+ public BasicMesh(
+ BasicPolygonType polygonType,
+ IBasicPolygon[] polygons,
+ ushort materialIndex,
+ bool hasNormal,
+ bool hasColor,
+ bool hasTexcoords)
+ {
+ PolygonType = polygonType;
+ MaterialIndex = materialIndex;
+
+ string identifier = GenerateIdentifier();
+ Polygons = new LabeledReadOnlyArray("poly_" + identifier, polygons);
+
+ foreach(IBasicPolygon p in polygons)
+ {
+ PolygonCornerCount += p.NumIndices;
+ }
+
+ if(hasNormal)
+ {
+ Normals = new LabeledArray("polynormal_" + identifier, PolygonCornerCount);
+ }
+
+ if(hasColor)
+ {
+ Colors = new LabeledArray("vcolor_" + identifier, PolygonCornerCount);
+ }
+
+ if(hasTexcoords)
+ {
+ Texcoords = new LabeledArray("uv_" + identifier, PolygonCornerCount);
+ }
+ }
+
+
+ ///
+ /// Reads a basic mesh off an endian strack reader.
+ ///
+ /// Reader to read from.
+ /// Address at which the mesh is located.
+ /// Pointer references to use.
+ /// The read mesh.
+ public static BasicMesh Read(EndianStackReader reader, uint address, PointerLUT lut)
+ {
+ ushort header = reader.ReadUShort(address);
+ ushort materialID = (ushort)(header & 0x3FFFu);
+ BasicPolygonType polyType = (BasicPolygonType)(header >> 14);
+ uint polyAttributes = reader.ReadUInt(address + 8);
+
+ //==================================================================
+
+ ushort polyCount = reader.ReadUShort(address + 2);
+ uint polyAddr = reader.ReadPointer(address + 4);
+
+ IBasicPolygon onReadPolys(EndianStackReader reader, ref uint address)
+ {
+ return IBasicPolygon.Read(reader, ref address, polyType);
+ }
+
+ LabeledReadOnlyArray polys = reader.ReadLabeledReadOnlyArray(polyAddr, polyCount, onReadPolys, "poly_", lut);
+
+ int cornerCount = polys.Sum(x => x.NumIndices);
+
+ //==================================================================
+
+ LabeledArray? ReadArray(uint offset, string prefix, uint valueSize, EndianIOExtensions.ReadValueDelegate readValue)
+ {
+ LabeledArray? result = null;
+
+ if(reader.TryReadPointer(address + offset, out uint pointer))
+ {
+ result = reader.ReadLabeledArray(pointer, (uint)cornerCount, valueSize, readValue, prefix, lut);
+ }
+
+ return result;
+ }
+
+ LabeledArray? normals /****/ = ReadArray(0x0C, "polynormal_", 12, (r, p) => r.ReadVector3(p));
+ LabeledArray? colors /*******/ = ReadArray(0x10, "vcolor_", /**/ 4, (r, p) => r.ReadColor(p, ColorIOType.ARGB8_32));
+ LabeledArray? texcoords /**/ = ReadArray(0x14, "polynormal_", 08, (r, p) => r.ReadVector2(p, FloatIOType.Short) / 255f);
+
+ //==================================================================
+
+ return new BasicMesh(
+ normals,
+ colors,
+ texcoords,
+ materialID,
+ polyType,
+ polys,
+ cornerCount,
+ polyAttributes);
+ }
+
+ ///
+ /// Writes the different data arrays to a stream
+ ///
+ /// Output stream
+ ///
+ public void WriteData(EndianStackWriter writer, PointerLUT lut)
+ {
+ _ = lut.GetAddAddress(Polygons, (array) =>
+ {
+ uint result = writer.PointerPosition;
+
+ foreach(IBasicPolygon p in array)
+ {
+ p.Write(writer);
+ }
+
+ writer.Align(4);
+
+ return result;
+ });
+
+ _ = writer.WriteCollectionWithLUT(Normals, (w, v) => w.WriteVector3(v), lut);
+ _ = writer.WriteCollectionWithLUT(Colors, (w, c) => w.WriteColor(c, ColorIOType.ARGB8_32), lut);
+ _ = writer.WriteCollectionWithLUT(Texcoords, (w, v) => w.WriteVector2(v * 255f, FloatIOType.Short), lut);
+ }
+
+ ///
+ /// Writes the meshset to a stream
+ ///
+ /// Ouput stream
+ /// Whether the mesh should be written for SADX
+ ///
+ public void WriteMeshset(EndianStackWriter writer, bool DX, PointerLUT lut)
+ {
+ uint normalsAddress = 0;
+ uint colorsAddress = 0;
+ uint texcoordAddress = 0;
+
+ if(!lut.All.TryGetAddress(Polygons, out uint polyAddress)
+ || (Normals != null && !lut.All.TryGetAddress(Normals, out normalsAddress))
+ || (Colors != null && !lut.All.TryGetAddress(Colors, out colorsAddress))
+ || (Texcoords != null && !lut.All.TryGetAddress(Texcoords, out texcoordAddress)))
+ {
+ throw new NullReferenceException("Data has not been written yet");
+ }
+
+ ushort header = MaterialIndex;
+ header |= (ushort)((uint)PolygonType << 14);
+
+ writer.WriteUShort(header);
+ writer.WriteUShort((ushort)Polygons.Length);
+ writer.WriteUInt(polyAddress);
+ writer.WriteUInt(PolyAttributes);
+ writer.WriteUInt(normalsAddress);
+ writer.WriteUInt(colorsAddress);
+ writer.WriteUInt(texcoordAddress);
+
+ if(DX)
+ {
+ writer.WriteEmpty(4);
+ }
+ }
+
+
+ object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a deep clone of the mesh.
+ ///
+ /// The clone.
+ public BasicMesh Clone()
+ {
+ return new BasicMesh(
+ Normals?.Clone(),
+ Colors?.Clone(),
+ Texcoords?.Clone(),
+ MaterialIndex,
+ PolygonType,
+ Polygons,
+ PolygonCornerCount,
+ PolyAttributes);
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicMultiPolygon.cs b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicMultiPolygon.cs
new file mode 100644
index 0000000..6ab00c9
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicMultiPolygon.cs
@@ -0,0 +1,116 @@
+using SA3D.Common.IO;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace SA3D.Modeling.Mesh.Basic.Polygon
+{
+ ///
+ /// A BASIC polygon containing a variable number of corners.
+ ///
+ public struct BasicMultiPolygon : IBasicPolygon
+ {
+ ///
+ /// Indices of the polygon.
+ ///
+ public ushort[] Indices { get; set; }
+
+ ///
+ /// Whether the backface culling direction is flipped.
+ ///
+ public bool Reversed { get; set; }
+
+ ///
+ public readonly uint Size => (uint)(2 + (Indices.Length * 2));
+
+ ///
+ public readonly int NumIndices => Indices.Length;
+
+
+ ///
+ public readonly ushort this[int index]
+ {
+ get => Indices[index];
+ set => Indices[index] = value;
+ }
+
+ ///
+ /// Creates a new multi polygon.
+ ///
+ /// Indices of the polygon.
+ /// Whether the polygons backface culling direction is flipped.
+ public BasicMultiPolygon(ushort[] indices, bool reversed)
+ {
+ Indices = indices;
+ Reversed = reversed;
+ }
+
+ ///
+ /// Creates a new empty multi polygon.
+ ///
+ /// Number of indices the polygon holds.
+ /// Whether the polygons backface culling direction is flipped.
+ public BasicMultiPolygon(uint size, bool reversed)
+ : this(new ushort[size], reversed) { }
+
+
+ ///
+ /// Reads a basic multi polygon off of an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// The reader to read from.
+ /// Address at which the polygon is located.
+ /// The polygon that was read.
+ public static BasicMultiPolygon Read(EndianStackReader data, ref uint address)
+ {
+ ushort header = data.ReadUShort(address);
+ ushort[] indices = new ushort[header & 0x7FFF];
+ bool reversed = (header & 0x8000) != 0;
+ address += 2;
+ for(int i = 0; i < indices.Length; i++)
+ {
+ indices[i] = data.ReadUShort(address);
+ address += 2;
+ }
+
+ return new BasicMultiPolygon(indices, reversed);
+ }
+
+ ///
+ public readonly void Write(EndianStackWriter writer)
+ {
+ writer.WriteUShort((ushort)((Indices.Length & 0x7FFF) | (Reversed ? 0x8000 : 0)));
+ for(int i = 0; i < Indices.Length; i++)
+ {
+ writer.WriteUShort(Indices[i]);
+ }
+ }
+
+
+ ///
+ public readonly IEnumerator GetEnumerator()
+ {
+ return ((IEnumerable)Indices).GetEnumerator();
+ }
+
+ ///
+ readonly IEnumerator IEnumerable.GetEnumerator()
+ {
+ return GetEnumerator();
+ }
+
+
+ ///
+ public readonly object Clone()
+ {
+ return new BasicMultiPolygon(Indices.ToArray(), Reversed);
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"Multi: {Reversed} - {Indices.Length}";
+ }
+
+
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicPolygonType.cs b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicPolygonType.cs
new file mode 100644
index 0000000..509ceb9
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicPolygonType.cs
@@ -0,0 +1,29 @@
+namespace SA3D.Modeling.Mesh.Basic.Polygon
+{
+ ///
+ /// The different primitive types for BASIC meshes
+ ///
+ public enum BasicPolygonType
+ {
+ ///
+ /// Arranges polygons in a triangle list.
+ ///
+ Triangles,
+
+ ///
+ /// Arranges polygons in a quad list.
+ ///
+ Quads,
+
+ ///
+ /// Arranges polygons with an arbitrary number of corners in a list.
+ ///
+ NPoly,
+
+ ///
+ /// Arranges triangles in strips.
+ ///
+ TriangleStrips
+ }
+
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicQuad.cs b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicQuad.cs
new file mode 100644
index 0000000..9349f3b
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicQuad.cs
@@ -0,0 +1,146 @@
+using SA3D.Common.IO;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+
+namespace SA3D.Modeling.Mesh.Basic.Polygon
+{
+ ///
+ /// A polygon with four corners.
+ ///
+ public struct BasicQuad : IBasicPolygon
+ {
+ ///
+ public readonly uint Size => 8;
+
+ ///
+ public readonly int NumIndices => 4;
+
+
+ ///
+ /// First vertex index.
+ ///
+ public ushort Index1 { get; set; }
+
+ ///
+ /// Second vertex index.
+ ///
+ public ushort Index2 { get; set; }
+
+ ///
+ /// Third vertex index.
+ ///
+ public ushort Index3 { get; set; }
+
+ ///
+ /// Fourth vertex index.
+ ///
+ public ushort Index4 { get; set; }
+
+
+ ///
+ public ushort this[int index]
+ {
+ readonly get => index switch
+ {
+ 0 => Index1,
+ 1 => Index2,
+ 2 => Index3,
+ 3 => Index4,
+ _ => throw new IndexOutOfRangeException(),
+ };
+ set
+ {
+ switch(index)
+ {
+ case 0:
+ Index1 = value;
+ break;
+ case 1:
+ Index2 = value;
+ break;
+ case 2:
+ Index3 = value;
+ break;
+ case 3:
+ Index4 = value;
+ break;
+ default:
+ throw new IndexOutOfRangeException();
+ }
+ }
+ }
+
+
+ ///
+ /// Creates a new populated basic quad.
+ ///
+ /// First vertex index.
+ /// Second vertex index.
+ /// Third vertex index.
+ /// Fourth vertex index.
+ public BasicQuad(ushort index1, ushort index2, ushort index3, ushort index4)
+ {
+ Index1 = index1;
+ Index2 = index2;
+ Index3 = index3;
+ Index4 = index4;
+ }
+
+
+ ///
+ public readonly void Write(EndianStackWriter writer)
+ {
+ writer.WriteUShort(Index1);
+ writer.WriteUShort(Index2);
+ writer.WriteUShort(Index3);
+ writer.WriteUShort(Index4);
+ }
+
+ ///
+ /// Reads a quad off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// The Reader to read from.
+ /// Address at which the quad is located.
+ /// The quad that was read.
+ public static BasicQuad Read(EndianStackReader reader, ref uint address)
+ {
+ BasicQuad t = new(
+ reader.ReadUShort(address),
+ reader.ReadUShort(address + 2),
+ reader.ReadUShort(address + 4),
+ reader.ReadUShort(address + 6));
+
+ address += 8;
+ return t;
+ }
+
+
+ ///
+ public readonly IEnumerator GetEnumerator()
+ {
+ yield return Index1;
+ yield return Index2;
+ yield return Index3;
+ yield return Index4;
+ }
+
+ readonly IEnumerator IEnumerable.GetEnumerator()
+ {
+ return GetEnumerator();
+ }
+
+
+ ///
+ public readonly object Clone()
+ {
+ return this;
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"Quad: [{Index1}, {Index2}, {Index3}, {Index4}]";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicTriangle.cs b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicTriangle.cs
new file mode 100644
index 0000000..214d177
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/Polygon/BasicTriangle.cs
@@ -0,0 +1,131 @@
+using SA3D.Common.IO;
+using System;
+using System.Collections;
+using System.Collections.Generic;
+
+namespace SA3D.Modeling.Mesh.Basic.Polygon
+{
+ ///
+ /// A polygon with three corners.
+ ///
+ public struct BasicTriangle : IBasicPolygon
+ {
+ ///
+ public readonly uint Size => 6;
+
+ ///
+ public readonly int NumIndices => 3;
+
+
+ ///
+ /// First vertex index.
+ ///
+ public ushort Index1 { get; set; }
+
+ ///
+ /// Second vertex index.
+ ///
+ public ushort Index2 { get; set; }
+
+ ///
+ /// Third vertex index.
+ ///
+ public ushort Index3 { get; set; }
+
+
+ ///
+ public ushort this[int index]
+ {
+ readonly get => index switch
+ {
+ 0 => Index1,
+ 1 => Index2,
+ 2 => Index3,
+ _ => throw new IndexOutOfRangeException(),
+ };
+ set
+ {
+ switch(index)
+ {
+ case 0:
+ Index1 = value;
+ break;
+ case 1:
+ Index2 = value;
+ break;
+ case 2:
+ Index3 = value;
+ break;
+ default:
+ throw new IndexOutOfRangeException();
+ }
+ }
+ }
+
+ ///
+ /// Creates a new populated basic quad.
+ ///
+ /// First vertex index.
+ /// Second vertex index.
+ /// Third vertex index.
+ public BasicTriangle(ushort index1, ushort index2, ushort index3)
+ {
+ Index1 = index1;
+ Index2 = index2;
+ Index3 = index3;
+ }
+
+
+ ///
+ public readonly void Write(EndianStackWriter writer)
+ {
+ writer.WriteUShort(Index1);
+ writer.WriteUShort(Index2);
+ writer.WriteUShort(Index3);
+ }
+
+ ///
+ /// Reads a quad off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// The Reader to read from.
+ /// Address at which the quad is located.
+ /// The quad that was read.
+ public static BasicTriangle Read(EndianStackReader reader, ref uint address)
+ {
+ BasicTriangle t = new(
+ reader.ReadUShort(address),
+ reader.ReadUShort(address + 2),
+ reader.ReadUShort(address + 4));
+
+ address += 6;
+ return t;
+ }
+
+
+ ///
+ public readonly IEnumerator GetEnumerator()
+ {
+ yield return Index1;
+ yield return Index2;
+ yield return Index3;
+ }
+
+ readonly IEnumerator IEnumerable.GetEnumerator()
+ {
+ return GetEnumerator();
+ }
+
+ ///
+ public readonly object Clone()
+ {
+ return this;
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"Triangle: [{Index1}, {Index2}, {Index3}]";
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Basic/Polygon/IBasicPolygon.cs b/src/SA3D.Modeling/Mesh/Basic/Polygon/IBasicPolygon.cs
new file mode 100644
index 0000000..d8c858f
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Basic/Polygon/IBasicPolygon.cs
@@ -0,0 +1,55 @@
+using SA3D.Common.IO;
+using System;
+using System.Collections.Generic;
+
+namespace SA3D.Modeling.Mesh.Basic.Polygon
+{
+ ///
+ /// BASIC polygon interface.
+ ///
+ public interface IBasicPolygon : ICloneable, IEnumerable
+ {
+ ///
+ /// Size of the primitive in bytes.
+ ///
+ public uint Size { get; }
+
+ ///
+ /// Number of indices in the polygon.
+ ///
+ public int NumIndices { get; }
+
+ ///
+ /// Access and set vertex indices of the polygon.
+ ///
+ /// The index of the corner.
+ /// The vertex index.
+ public ushort this[int index] { get; set; }
+
+
+ ///
+ /// Reads a primitive off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which the primitive is located.
+ /// Type of primitive to read.
+ /// The read primitive
+ ///
+ public static IBasicPolygon Read(EndianStackReader reader, ref uint address, BasicPolygonType type)
+ {
+ return type switch
+ {
+ BasicPolygonType.Triangles => BasicTriangle.Read(reader, ref address),
+ BasicPolygonType.Quads => BasicQuad.Read(reader, ref address),
+ BasicPolygonType.NPoly or BasicPolygonType.TriangleStrips => BasicMultiPolygon.Read(reader, ref address),
+ _ => throw new ArgumentException("Unknown poly type!", nameof(type)),
+ };
+ }
+
+ ///
+ /// Writes the polygon to an endian stack writer.
+ ///
+ /// The writer to write to.
+ public void Write(EndianStackWriter writer);
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Buffer/BufferCorner.cs b/src/SA3D.Modeling/Mesh/Buffer/BufferCorner.cs
new file mode 100644
index 0000000..6e9b153
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Buffer/BufferCorner.cs
@@ -0,0 +1,147 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Structs;
+using System;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Buffer
+{
+ ///
+ /// A single corner in a triangle
+ ///
+ public struct BufferCorner : IEquatable
+ {
+ ///
+ /// Size of a buffer corner in bytes.
+ ///
+ public const uint StructSize = 14;
+
+ ///
+ /// Size of a buffer corner without color in bytes.
+ ///
+ public const uint StructSizeNoColor = 10;
+
+ ///
+ /// Vertex cache index to use.
+ ///
+ public ushort VertexIndex { get; set; }
+
+ ///
+ /// Color.
+ ///
+ public Color Color { get; set; }
+
+ ///
+ /// Coordinates for texture rendering.
+ ///
+ public Vector2 Texcoord { get; set; }
+
+
+ ///
+ /// Creates a new buffer corner.
+ ///
+ /// Buffer array index of the vertex
+ /// Color.
+ /// Coordinates for texture rendering.
+ public BufferCorner(ushort vertexIndex, Color color, Vector2 texcoord)
+ {
+ VertexIndex = vertexIndex;
+ Color = color;
+ Texcoord = texcoord;
+ }
+
+ ///
+ /// Creates a new white buffer corner with no texture coordinates.
+ ///
+ /// Buffer array index of the vertex
+ public BufferCorner(ushort vertexIndex)
+ {
+ VertexIndex = vertexIndex;
+ Color = BufferMesh.DefaultColor;
+ Texcoord = Vector2.Zero;
+ }
+
+
+ ///
+ /// Writes the buffer corner to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Whether to write the corners color too.
+ public readonly void Write(EndianStackWriter writer, bool writeColor)
+ {
+ writer.WriteUShort(VertexIndex);
+ writer.WriteVector2(Texcoord);
+ if(writeColor)
+ {
+ writer.WriteColor(Color, ColorIOType.RGBA8);
+ }
+ }
+
+ ///
+ /// Reads a buffer corner off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Whether the corner contains a color.
+ /// The corner that was read.
+ public static BufferCorner Read(EndianStackReader reader, ref uint address, bool hasColor)
+ {
+ ushort index = reader.ReadUShort(address);
+ address += 2;
+ Vector2 texcoord = reader.ReadVector2(ref address);
+ Color col = hasColor ? reader.ReadColor(ref address, ColorIOType.RGBA8) : BufferMesh.DefaultColor;
+
+ return new BufferCorner(index, col, texcoord);
+ }
+
+
+ ///
+ public override readonly bool Equals(object? obj)
+ {
+ return obj is BufferCorner corner &&
+ VertexIndex == corner.VertexIndex &&
+ Color == corner.Color &&
+ Texcoord == corner.Texcoord;
+ }
+
+ ///
+ public override readonly int GetHashCode()
+ {
+ return System.HashCode.Combine(VertexIndex, Color, Texcoord);
+ }
+
+ ///
+ readonly bool IEquatable.Equals(BufferCorner other)
+ {
+ return Equals(other);
+ }
+
+ ///
+ /// Compares two corners for equality.
+ ///
+ /// Lefthand corners.
+ /// Righthand corners.
+ /// Whether the two corners are equal.
+ public static bool operator ==(BufferCorner l, BufferCorner r)
+ {
+ return l.Equals(r);
+ }
+
+ ///
+ /// Compares two corners for inequality.
+ ///
+ /// Lefthand corners.
+ /// Righthand corners.
+ /// Whether the two corners are inequal.
+ public static bool operator !=(BufferCorner l, BufferCorner r)
+ {
+ return !l.Equals(r);
+ }
+
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"{VertexIndex}: \t{Color}; \t{Texcoord.DebugString()}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Buffer/BufferMaterial.cs b/src/SA3D.Modeling/Mesh/Buffer/BufferMaterial.cs
new file mode 100644
index 0000000..4facc46
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Buffer/BufferMaterial.cs
@@ -0,0 +1,394 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Mesh.Gamecube.Enums;
+using SA3D.Modeling.Structs;
+
+namespace SA3D.Modeling.Mesh.Buffer
+{
+ ///
+ /// Rendering properties for a buffer mesh.
+ ///
+ public struct BufferMaterial
+ {
+ ///
+ /// Size of the structure in bytes.
+ ///
+ public const uint StructSize = 0x20;
+
+ ///
+ /// Default material values.
+ ///
+ public static readonly BufferMaterial DefaultValues = new()
+ {
+ Diffuse = Color.ColorWhite,
+ Specular = Color.ColorWhite,
+ SpecularExponent = 11,
+ Ambient = Color.ColorBlack,
+ SourceBlendMode = BlendMode.SrcAlpha,
+ DestinationBlendmode = BlendMode.SrcAlphaInverted,
+ TextureFiltering = FilterMode.Bilinear,
+ GCShadowStencil = 1,
+ GCTexCoordID = GCTexCoordID.TexCoord0,
+ GCTexCoordType = GCTexCoordType.Matrix2x4,
+ GCTexCoordSource = GCTexCoordSource.TexCoord0,
+ GCMatrixID = GCTexcoordMatrix.Identity,
+ };
+
+ #region Storage properties
+
+ ///
+ /// The diffuse color.
+ ///
+ public Color Diffuse { readonly get; set; }
+
+ ///
+ /// The specular color.
+ ///
+ public Color Specular { readonly get; set; }
+
+ ///
+ /// The specular exponent.
+ ///
+ public float SpecularExponent { readonly get; set; }
+
+ ///
+ /// The Ambient color.
+ ///
+ public Color Ambient { readonly get; set; }
+
+ ///
+ /// Texture Index.
+ ///
+ public uint TextureIndex { readonly get; set; }
+
+ ///
+ /// Texture filtering mode.
+ ///
+ public FilterMode TextureFiltering { readonly get; set; }
+
+ ///
+ /// Mipmap distance multiplier.
+ ///
+ public float MipmapDistanceMultiplier { readonly get; set; }
+
+ ///
+ /// Source blend mode.
+ ///
+ public BlendMode SourceBlendMode { readonly get; set; }
+
+ ///
+ /// Destination blend mode.
+ ///
+ public BlendMode DestinationBlendmode { readonly get; set; }
+
+ ///
+ /// Additional Material attributes.
+ ///
+ public MaterialAttributes Attributes { readonly get; set; }
+
+ ///
+ /// Data container for all gamecube related info.
+ ///
+ public uint GamecubeData { readonly get; set; }
+
+ #endregion
+
+ #region Attribute Properties
+
+ ///
+ /// Whether textures should be rendered.
+ ///
Wrapper around flag in .
+ ///
+ public bool UseTexture
+ {
+ readonly get => HasAttributes(MaterialAttributes.UseTexture);
+ set => SetAttributes(MaterialAttributes.UseTexture, value);
+ }
+
+ ///
+ /// Enables anisotropic filtering.
+ ///
Wrapper around flag in .
+ ///
+ public bool AnisotropicFiltering
+ {
+ readonly get => HasAttributes(MaterialAttributes.AnisotropicFiltering);
+ set => SetAttributes(MaterialAttributes.AnisotropicFiltering, value);
+ }
+
+ ///
+ /// Clamps texture corrdinates along the horizontal axis between -1 and 1.
+ ///
Wrapper around flag in .
+ ///
+ public bool ClampU
+ {
+ readonly get => HasAttributes(MaterialAttributes.ClampU);
+ set => SetAttributes(MaterialAttributes.ClampU, value);
+ }
+
+ ///
+ /// Clamps texture corrdinates along the vertical axis between -1 and 1.
+ ///
Wrapper around flag in .
+ ///
+ public bool ClampV
+ {
+ readonly get => HasAttributes(MaterialAttributes.ClampV);
+ set => SetAttributes(MaterialAttributes.ClampV, value);
+ }
+
+ ///
+ /// Mirrors texture coordinates along the horizontal axis every other full unit.
+ ///
Wrapper around flag in .
+ ///
+ public bool MirrorU
+ {
+ readonly get => HasAttributes(MaterialAttributes.MirrorU);
+ set => SetAttributes(MaterialAttributes.MirrorU, value);
+ }
+
+ ///
+ /// Mirrors texture coordinates along the vertical axis every other full unit.
+ ///
Wrapper around flag in .
+ ///
+ public bool MirrorV
+ {
+ readonly get => HasAttributes(MaterialAttributes.MirrorV);
+ set => SetAttributes(MaterialAttributes.MirrorV, value);
+ }
+
+ ///
+ /// Whether to use normal mapping for textures.
+ ///
Wrapper around flag in .
+ ///
+ public bool NormalMapping
+ {
+ readonly get => HasAttributes(MaterialAttributes.NormalMapping);
+ set => SetAttributes(MaterialAttributes.NormalMapping, value);
+ }
+
+ ///
+ /// Ignores lighting as a whole.
+ ///
Wrapper around flag in .
+ ///
+ public bool NoLighting
+ {
+ readonly get => HasAttributes(MaterialAttributes.NoLighting);
+ set => SetAttributes(MaterialAttributes.NoLighting, value);
+ }
+
+ ///
+ /// Ignores ambient lighting.
+ ///
Wrapper around flag in .
+ ///
+ public bool NoAmbient
+ {
+ readonly get => HasAttributes(MaterialAttributes.NoAmbient);
+ set => SetAttributes(MaterialAttributes.NoAmbient, value);
+ }
+
+ ///
+ /// Ignores specular lighting.
+ ///
Wrapper around flag in .
+ ///
+ public bool NoSpecular
+ {
+ readonly get => HasAttributes(MaterialAttributes.NoSpecular);
+ set => SetAttributes(MaterialAttributes.NoSpecular, value);
+ }
+
+ ///
+ /// Ignores interpolated normals and instead renders every polygon flat.
+ ///
Wrapper around flag in .
+ ///
+ public bool Flat
+ {
+ readonly get => HasAttributes(MaterialAttributes.Flat);
+ set => SetAttributes(MaterialAttributes.Flat, value);
+ }
+
+ ///
+ /// Enables transparent rendering.
+ ///
Wrapper around flag in .
+ ///
+ public bool UseAlpha
+ {
+ readonly get => HasAttributes(MaterialAttributes.UseAlpha);
+ set => SetAttributes(MaterialAttributes.UseAlpha, value);
+ }
+
+ ///
+ /// Enables backface culling.
+ ///
Wrapper around flag in .
+ ///
+ public bool BackfaceCulling
+ {
+ readonly get => HasAttributes(MaterialAttributes.BackfaceCulling);
+ set => SetAttributes(MaterialAttributes.BackfaceCulling, value);
+ }
+
+ #endregion
+
+ #region Gamecube Properties
+
+ ///
+ /// GC Specific: Shadow stencil.
+ ///
+ public byte GCShadowStencil
+ {
+ readonly get => (byte)((GamecubeData >> 24) & 0xFF);
+ set
+ {
+ GamecubeData &= 0xFFFFFF;
+ GamecubeData |= (uint)value << 24;
+ }
+ }
+
+ ///
+ /// GC Specific: Output location to use for generated texture coordinates.
+ ///
+ public GCTexCoordID GCTexCoordID
+ {
+ readonly get => (GCTexCoordID)((GamecubeData >> 16) & 0xFF);
+ set
+ {
+ GamecubeData &= 0xFF00FFFF;
+ GamecubeData |= (uint)value << 16;
+ }
+ }
+
+ ///
+ /// GC Specific: The function to use for generating the texture coordinates
+ ///
+ public GCTexCoordType GCTexCoordType
+ {
+ readonly get => (GCTexCoordType)((GamecubeData >> 12) & 0xF);
+ set
+ {
+ GamecubeData &= 0xFFFF0FFF;
+ GamecubeData |= (uint)value << 12;
+ }
+ }
+
+ ///
+ /// GC Specific: The source which should be used to generate the texture coordinates
+ ///
+ public GCTexCoordSource GCTexCoordSource
+ {
+ readonly get => (GCTexCoordSource)((GamecubeData >> 4) & 0xFF);
+ set
+ {
+ GamecubeData &= 0xFFFFF00F;
+ GamecubeData |= (uint)value << 4;
+ }
+ }
+
+ ///
+ /// GC Specific: The ID of the matrix to use for generating the texture coordinates
+ ///
+ public GCTexcoordMatrix GCMatrixID
+ {
+ readonly get => (GCTexcoordMatrix)(GamecubeData & 0xF);
+ set
+ {
+ GamecubeData &= 0xFFFFFFF0;
+ GamecubeData |= (uint)value;
+ }
+ }
+
+ #endregion
+
+ ///
+ /// Creates a new buffer material from a template.
+ ///
+ /// The template to use.
+ public BufferMaterial(BufferMaterial template)
+ {
+ Diffuse = template.Diffuse;
+ Specular = template.Specular;
+ SpecularExponent = template.SpecularExponent;
+ Ambient = template.Ambient;
+ TextureIndex = template.TextureIndex;
+ TextureFiltering = template.TextureFiltering;
+ MipmapDistanceMultiplier = template.MipmapDistanceMultiplier;
+ SourceBlendMode = template.SourceBlendMode;
+ DestinationBlendmode = template.DestinationBlendmode;
+ Attributes = template.Attributes;
+ GamecubeData = template.GamecubeData;
+ }
+
+
+ ///
+ /// Set material attributes.
+ ///
+ /// The attributes to set.
+ /// New state for the attributes.
+ public void SetAttributes(MaterialAttributes attrib, bool state)
+ {
+ if(state)
+ {
+ Attributes |= attrib;
+ }
+ else
+ {
+ Attributes &= ~attrib;
+ }
+ }
+
+ ///
+ /// Checks if materials attributes are set.
+ ///
+ /// The attributes to check.
+ /// Whether all specified attributes are set.
+ public readonly bool HasAttributes(MaterialAttributes attrib)
+ {
+ return Attributes.HasFlag(attrib);
+ }
+
+
+ ///
+ /// Writes the material to an endian stack writer.
+ ///
+ /// The writer to write to.
+ public readonly void Write(EndianStackWriter writer)
+ {
+ uint attributes = (uint)Attributes;
+ attributes |= (uint)SourceBlendMode << 16;
+ attributes |= (uint)DestinationBlendmode << 19;
+ attributes |= (uint)TextureFiltering << 22;
+
+ writer.WriteColor(Diffuse, ColorIOType.RGBA8);
+ writer.WriteColor(Specular, ColorIOType.RGBA8);
+ writer.WriteFloat(SpecularExponent);
+ writer.WriteColor(Ambient, ColorIOType.RGBA8);
+ writer.WriteUInt(TextureIndex);
+ writer.WriteFloat(MipmapDistanceMultiplier);
+ writer.WriteUInt(attributes);
+ writer.WriteUInt(GamecubeData);
+ }
+
+ ///
+ /// Reads a buffer material off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The buffer material that was read.
+ public static BufferMaterial Read(EndianStackReader reader, uint address)
+ {
+ BufferMaterial result = default;
+
+ result.Diffuse = reader.ReadColor(address, ColorIOType.RGBA8);
+ result.Specular = reader.ReadColor(address + 4, ColorIOType.RGBA8);
+ result.SpecularExponent = reader.ReadFloat(address + 8);
+ result.Ambient = reader.ReadColor(address + 0xC, ColorIOType.RGBA8);
+ result.TextureIndex = reader.ReadUInt(address + 0x10);
+ result.MipmapDistanceMultiplier = reader.ReadFloat(address + 0x14);
+ uint attributes = reader.ReadUInt(address + 0x18);
+ result.GamecubeData = reader.ReadUInt(address + 0x1C);
+
+ result.Attributes = (MaterialAttributes)(attributes & 0xFFFF);
+ result.SourceBlendMode = (BlendMode)((attributes >> 16) & 7);
+ result.DestinationBlendmode = (BlendMode)((attributes >> 19) & 7);
+ result.TextureFiltering = (FilterMode)(attributes >> 22);
+
+ return result;
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Buffer/BufferMesh.cs b/src/SA3D.Modeling/Mesh/Buffer/BufferMesh.cs
new file mode 100644
index 0000000..da19b6f
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Buffer/BufferMesh.cs
@@ -0,0 +1,702 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Modeling.Strippify;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Buffer
+{
+ ///
+ /// Data set for a renderable mesh
+ /// Can also consist of only vertices.
+ ///
+ public class BufferMesh : ICloneable
+ {
+ ///
+ /// Default normal direction for buffer mesh vertices.
+ ///
+ public static readonly Vector3 DefaultNormal = Vector3.UnitY;
+
+ ///
+ /// Default color for buffer mesh corners.
+ ///
+ public static readonly Color DefaultColor = Color.ColorWhite;
+
+
+ ///
+ /// Mesh vertices.
+ ///
+ public BufferVertex[]? Vertices { get; }
+
+ ///
+ /// Polygon rendering information.
+ ///
+ public BufferMaterial Material { get; private set; }
+
+ ///
+ /// Polygon corners.
+ ///
+ public BufferCorner[]? Corners { get; private set; }
+
+ ///
+ /// Index list combining polygon corners into triangles.
+ ///
If null, use the corners in order.
+ ///
+ public uint[]? IndexList { get; private set; }
+
+ ///
+ /// When set, / is made up of one big triangle strip, instead of individual triangles.
+ ///
+ public bool Strippified { get; private set; }
+
+ ///
+ /// If true, the vertices will be added onto the existing buffered vertices.
+ ///
+ public bool ContinueWeight { get; }
+
+ ///
+ /// Whether the model uses vertex normals.
+ ///
+ public bool HasNormals { get; }
+
+ ///
+ /// Whether the model uses polygon colors.
+ ///
+ public bool HasColors { get; private set; }
+
+ ///
+ /// Index offset for when writing vertices into the buffer array.
+ ///
+ public ushort VertexWriteOffset { get; internal set; }
+
+ ///
+ /// Index offset for when reading vertices from the buffer array for rendering.
+ ///
+ public ushort VertexReadOffset { get; internal set; }
+
+
+ ///
+ /// Creates a new buffer mesh.
+ ///
+ /// Buffer vertices.
+ /// Polygon rendering information.
+ /// Polygon corners.
+ /// Index list combining polygon corners into triangles
+ /// When set, is made up of one big triangle strip, instead of individual triangles.
+ /// If true, the vertices will be added onto the existing buffered vertices.
+ /// Whether the model uses vertex normals.
+ /// Whether the model uses polygon colors.
+ /// Index offset for when writing vertices into the buffer array.
+ /// Index offset for when reading vertices from the buffer array for rendering.
+ ///
+ public BufferMesh(
+ BufferVertex[]? vertices,
+ BufferMaterial material,
+ BufferCorner[]? corners,
+ uint[]? indexList,
+ bool strippified,
+ bool continueWeight,
+ bool hasNormals,
+ bool hasColors,
+ ushort vertexWriteOffset,
+ ushort vertexReadOffset)
+ {
+ Vertices = vertices;
+ Material = material;
+ Corners = corners;
+ IndexList = indexList;
+ Strippified = strippified;
+ ContinueWeight = continueWeight;
+ HasNormals = hasNormals;
+ HasColors = hasColors;
+ VertexWriteOffset = vertexWriteOffset;
+ VertexReadOffset = vertexReadOffset;
+
+ VerifyVertexData();
+ VerifyPolygonData();
+ }
+
+ ///
+ /// Creates a new buffer mesh from only vertex data.
+ ///
+ /// Buffer vertices.
+ /// If true, the vertices will be added onto the existing buffered vertices.
+ /// Whether the model uses vertex normals.
+ /// Index offset for when writing vertices into the buffer array.
+ ///
+ public BufferMesh(BufferVertex[] vertices, bool continueWeight, bool hasNormals, ushort vertexWriteOffset)
+ {
+ Vertices = vertices;
+ ContinueWeight = continueWeight;
+ VertexWriteOffset = vertexWriteOffset;
+ HasNormals = hasNormals;
+
+ VerifyVertexData();
+ }
+
+ ///
+ /// Creates a new buffer mesh with only polygon data.
+ ///
+ /// Polygon rendering information.
+ /// Polygon corners.
+ /// Index list combining polygon corners into triangles
+ /// When set, is made up of one big triangle strip, instead of individual triangles.
+ /// Whether the model uses polygon colors.
+ /// Index offset for when reading vertices from the buffer array for rendering.
+ ///
+ public BufferMesh(BufferMaterial material, BufferCorner[] corners, uint[]? indexList, bool strippified, bool hasColors, ushort vertexReadOffset)
+ {
+ Material = material;
+ Corners = corners;
+ IndexList = indexList;
+ Strippified = strippified;
+ HasColors = hasColors;
+ VertexReadOffset = vertexReadOffset;
+
+ VerifyPolygonData();
+ }
+
+
+ private void VerifyVertexData()
+ {
+ if(Vertices == null || Vertices.Length == 0)
+ {
+ throw new ArgumentException("Vertices can't be empty", "vertices");
+ }
+ }
+
+ private void VerifyPolygonData()
+ {
+ if(Corners == null || Corners.Length == 0)
+ {
+ throw new ArgumentException("Corners can't be empty", "corners");
+ }
+
+ if(IndexList != null && IndexList.Length == 0)
+ {
+ throw new ArgumentException("Triangle list cant be empty", "triangleList");
+ }
+ }
+
+
+ ///
+ /// Compiles the triangle list of indices to .
+ ///
+ /// The index triangle list.
+ ///
+ public uint[] GetIndexTriangleList()
+ {
+ if(Corners == null)
+ {
+ throw new InvalidOperationException("The mesh contains no polygon information.");
+ }
+
+ uint[] result;
+
+ if(IndexList == null)
+ {
+ if(Strippified)
+ {
+ List triangles = new();
+
+ bool rev = false;
+
+ for(uint i = 2; i < Corners.Length; i++, rev = !rev)
+ {
+ uint i1 = i - 2;
+ uint i2 = i - 1;
+ uint i3 = i;
+
+ BufferCorner c1 = Corners[i1];
+ BufferCorner c2 = Corners[i2];
+ BufferCorner c3 = Corners[i3];
+
+ if(c1.VertexIndex == c2.VertexIndex
+ || c2.VertexIndex == c3.VertexIndex
+ || c3.VertexIndex == c1.VertexIndex)
+ {
+ continue;
+ }
+
+ if(rev)
+ {
+ triangles.Add(i2);
+ triangles.Add(i1);
+ triangles.Add(i3);
+ }
+ else
+ {
+ triangles.Add(i1);
+ triangles.Add(i2);
+ triangles.Add(i3);
+ }
+ }
+
+ result = triangles.ToArray();
+ }
+ else
+ {
+ result = new uint[Corners.Length];
+ for(uint i = 0; i < result.Length; i++)
+ {
+ result[i] = i;
+ }
+ }
+ }
+ else
+ {
+ if(Strippified)
+ {
+ List triangles = new();
+ bool rev = false;
+
+ for(int i = 2; i < IndexList.Length; i++, rev = !rev)
+ {
+ uint i1 = IndexList[i - 2];
+ uint i2 = IndexList[i - 1];
+ uint i3 = IndexList[i];
+
+ if(i1 == i2 || i2 == i3 || i3 == i1)
+ {
+ continue;
+ }
+
+ if(rev)
+ {
+ triangles.Add(i2);
+ triangles.Add(i1);
+ triangles.Add(i3);
+ }
+ else
+ {
+ triangles.Add(i1);
+ triangles.Add(i2);
+ triangles.Add(i3);
+ }
+ }
+
+ result = triangles.ToArray();
+
+ }
+ else
+ {
+ // fast copy
+ result = IndexList.ToArray();
+ }
+
+ }
+
+ return result;
+ }
+
+ ///
+ /// Compiles the triangle list of .
+ ///
+ ///
+ ///
+ public BufferCorner[] GetCornerTriangleList()
+ {
+ if(Corners == null)
+ {
+ throw new InvalidOperationException("The mesh contains no polygon information.");
+ }
+
+ BufferCorner[] result;
+
+ if(IndexList == null)
+ {
+ if(Strippified)
+ {
+ List triangles = new();
+ bool rev = false;
+
+ for(uint i = 2; i < Corners.Length; i++, rev = !rev)
+ {
+ BufferCorner c1 = Corners[i - 2];
+ BufferCorner c2 = Corners[i - 1];
+ BufferCorner c3 = Corners[i];
+
+ if(c1.VertexIndex == c2.VertexIndex
+ || c2.VertexIndex == c3.VertexIndex
+ || c3.VertexIndex == c1.VertexIndex)
+ {
+ continue;
+ }
+
+ if(rev)
+ {
+ triangles.Add(c2);
+ triangles.Add(c1);
+ triangles.Add(c3);
+ }
+ else
+ {
+ triangles.Add(c1);
+ triangles.Add(c2);
+ triangles.Add(c3);
+ }
+ }
+
+ result = triangles.ToArray();
+ }
+ else
+ {
+ // fast copy
+ result = Corners.ToArray();
+ }
+ }
+ else
+ {
+ if(Strippified)
+ {
+ List triangles = new();
+ bool rev = false;
+
+ for(uint i = 2; i < IndexList.Length; i++, rev = !rev)
+ {
+ uint i1 = IndexList[i - 2];
+ uint i2 = IndexList[i - 1];
+ uint i3 = IndexList[i];
+
+ if(i1 == i2 || i2 == i3 || i3 == i1)
+ {
+ continue;
+ }
+
+ if(rev)
+ {
+ triangles.Add(Corners[i2]);
+ triangles.Add(Corners[i1]);
+ triangles.Add(Corners[i3]);
+ }
+ else
+ {
+ triangles.Add(Corners[i1]);
+ triangles.Add(Corners[i2]);
+ triangles.Add(Corners[i3]);
+ }
+ }
+
+ result = triangles.ToArray();
+ }
+ else
+ {
+ result = new BufferCorner[IndexList.Length];
+ for(int i = 0; i < result.Length; i++)
+ {
+ result[i] = Corners[IndexList[i]];
+ }
+ }
+ }
+
+ return result;
+ }
+
+ ///
+ /// Attempts to optimize the polygons by strippifying and/or generating index lists.
+ ///
+ public void OptimizePolygons()
+ {
+ if(Corners == null)
+ {
+ return;
+ }
+
+ BufferCorner[] corners = GetCornerTriangleList();
+
+ // filter degenerate triangles
+ int newArraySize = corners.Length;
+ for(int i = 0; i < newArraySize; i += 3)
+ {
+ ushort index1 = corners[i].VertexIndex;
+ ushort index2 = corners[i + 1].VertexIndex;
+ ushort index3 = corners[i + 2].VertexIndex;
+
+ if(index1 == index2 || index2 == index3 || index3 == index1)
+ {
+ corners[i] = corners[newArraySize - 3];
+ corners[i + 1] = corners[newArraySize - 2];
+ corners[i + 2] = corners[newArraySize - 1];
+ i -= 3;
+ newArraySize -= 3;
+ }
+ }
+
+ if(newArraySize == 0)
+ {
+ Corners = null;
+ IndexList = null;
+ Material = default;
+ HasColors = false;
+ VertexReadOffset = 0;
+ return;
+ }
+
+ Array.Resize(ref corners, newArraySize);
+
+ if(!corners.TryCreateDistinctMap(out DistinctMap distinctMap))
+ {
+ Corners = corners;
+ return;
+ }
+
+ int[][] strips = TriangleStrippifier.Global.Strippify(distinctMap.Map!);
+
+ uint stripLength = (uint)TriangleStrippifier.JoinedStripEnumerator(strips, null).Count();
+ uint structSize = HasColors ? BufferCorner.StructSize : BufferCorner.StructSizeNoColor;
+
+ uint sizeTriList = structSize * (uint)corners.Length;
+ uint sizeTriListIndexed = (uint)((structSize * distinctMap.Values.Count) + (corners.Length * 2));
+ uint sizeStrips = structSize * stripLength;
+ uint sizeStripsIndexed = (uint)((structSize * distinctMap.Values.Count) + (stripLength * 2));
+
+ uint smallestSize = uint.Min(
+ uint.Min(sizeTriList, sizeTriListIndexed),
+ uint.Min(sizeStrips, sizeStripsIndexed));
+
+ if(sizeStripsIndexed == smallestSize)
+ {
+ Corners = distinctMap.ValueArray;
+ IndexList = (uint[])(object)TriangleStrippifier.JoinStrips(strips, null);
+ Strippified = true;
+ }
+ else if(sizeStrips == smallestSize)
+ {
+ Corners = TriangleStrippifier.JoinedStripEnumerator(strips, null)
+ .Select(x => distinctMap.Values[x]).ToArray();
+ IndexList = null;
+ Strippified = true;
+ }
+ else if(sizeTriListIndexed == smallestSize)
+ {
+ Corners = distinctMap.ValueArray;
+ IndexList = (uint[])(object)distinctMap.Map!;
+ Strippified = false;
+ }
+ else
+ {
+ Corners = corners;
+ IndexList = null;
+ Strippified = false;
+ }
+
+
+ }
+
+ ///
+ /// Optimizes a collection of buffer meshes by optimizing the polygons and combining vertex and poly data between meshes.
+ ///
Reuses arrays and buffermeshes.
+ ///
+ /// The collection to optimize
+ /// The optimized buffermeshes.
+ public static BufferMesh[] Optimize(IList input)
+ {
+ List result = new();
+
+ foreach(BufferMesh mesh in input)
+ {
+ mesh.OptimizePolygons();
+
+ if(mesh.Vertices != null)
+ {
+ result.Add(mesh);
+ continue;
+ }
+
+ if(result.Count > 0 && result[^1].Corners == null)
+ {
+ BufferMesh prev = result[^1];
+ prev.Corners = mesh.Corners!;
+ prev.IndexList = mesh.IndexList;
+ prev.Material = mesh.Material;
+ prev.HasColors = mesh.HasColors;
+ prev.VertexReadOffset = mesh.VertexReadOffset;
+ }
+ else
+ {
+ result.Add(mesh);
+ }
+ }
+
+ return result.ToArray();
+ }
+
+
+ ///
+ /// Writes the buffer mesh to an endian stack writer.
+ ///
+ /// The writer to write to.
+ public uint Write(EndianStackWriter writer)
+ {
+ uint vtxAddr = 0;
+ if(Vertices != null)
+ {
+ vtxAddr = writer.PointerPosition;
+ foreach(BufferVertex vtx in Vertices)
+ {
+ vtx.Write(writer, HasNormals);
+ }
+ }
+
+ uint cornerAddr = 0;
+ if(Corners != null)
+ {
+ cornerAddr = writer.PointerPosition;
+ foreach(BufferCorner c in Corners)
+ {
+ c.Write(writer, HasColors);
+ }
+ }
+
+ uint triangleAddr = 0;
+ if(IndexList != null)
+ {
+ triangleAddr = writer.PointerPosition;
+ foreach(uint t in IndexList)
+ {
+ writer.WriteUInt(t);
+ }
+ }
+
+ uint address = writer.PointerPosition;
+
+ ushort flags = 0;
+ if(ContinueWeight)
+ {
+ flags |= 1;
+ }
+
+ if(Strippified)
+ {
+ flags |= 2;
+ }
+
+ if(HasNormals)
+ {
+ flags |= 4;
+ }
+
+ if(HasColors)
+ {
+ flags |= 8;
+ }
+
+
+ writer.WriteUShort(VertexReadOffset);
+ writer.WriteUShort(VertexWriteOffset);
+ writer.WriteUShort(flags);
+
+ writer.WriteUShort((ushort)(Vertices?.Length ?? 0));
+ writer.WriteUInt(vtxAddr);
+
+ writer.WriteUInt((uint)(Corners?.Length ?? 0));
+ writer.WriteUInt(cornerAddr);
+
+ writer.WriteUInt((uint)(IndexList?.Length ?? 0));
+ writer.WriteUInt(triangleAddr);
+
+ Material.Write(writer);
+
+ return address;
+ }
+
+ ///
+ /// Reads a buffer mesh from a byte array
+ ///
+ /// Byte source
+ /// Address at which the buffermesh is located
+ public static BufferMesh Read(EndianStackReader reader, uint address)
+ {
+ ushort vertexReadOffset = reader.ReadUShort(address + 0);
+ ushort vertexWriteOffset = reader.ReadUShort(address + 2);
+
+ ushort flags = reader.ReadUShort(address + 4);
+ bool continueWeight = (flags & 1) != 0;
+ bool strippified = (flags & 2) != 0;
+ bool hasNormals = (flags & 4) != 0;
+ bool hasColors = (flags & 8) != 0;
+
+
+ BufferVertex[]? vertices = null;
+ if(reader.TryReadPointer(address + 8, out uint vtxAddr))
+ {
+ vertices = new BufferVertex[reader.ReadUShort(address + 6)];
+ for(int i = 0; i < vertices.Length; i++)
+ {
+ vertices[i] = BufferVertex.Read(reader, ref vtxAddr, hasNormals);
+ }
+ }
+
+ BufferCorner[]? corners = null;
+ if(reader.TryReadPointer(address + 0x10, out uint cornerAddr))
+ {
+ corners = new BufferCorner[reader.ReadUInt(address + 0xC)];
+ for(int i = 0; i < corners.Length; i++)
+ {
+ corners[i] = BufferCorner.Read(reader, ref cornerAddr, hasColors);
+ }
+ }
+
+ uint[]? triangles = null;
+ if(reader.TryReadPointer(address + 0x18, out uint triangleAddr))
+ {
+ triangles = new uint[reader.ReadUInt(address + 0x14)];
+ for(int i = 0; i < triangles.Length; i++)
+ {
+ triangles[i] = reader.ReadUInt(triangleAddr);
+ triangleAddr += 4;
+ }
+ }
+
+ BufferMaterial material = BufferMaterial.Read(reader, address + 0x1C);
+
+ return new(
+ vertices,
+ material,
+ corners,
+ triangles,
+ strippified,
+ continueWeight,
+ hasNormals,
+ hasColors,
+ vertexWriteOffset,
+ vertexReadOffset
+ );
+ }
+
+
+ object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a deep clone of the mesh.
+ ///
+ /// The clone of the mesh.
+ public BufferMesh Clone()
+ {
+ return new(
+ Vertices?.ToArray(),
+ Material,
+ Corners?.ToArray(),
+ IndexList?.ToArray(),
+ Strippified,
+ ContinueWeight,
+ HasNormals,
+ HasColors,
+ VertexWriteOffset,
+ VertexReadOffset
+ );
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Vertices?.Length} - {Corners?.Length} - {IndexList?.Length}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Buffer/BufferVertex.cs b/src/SA3D.Modeling/Mesh/Buffer/BufferVertex.cs
new file mode 100644
index 0000000..f04ba9c
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Buffer/BufferVertex.cs
@@ -0,0 +1,218 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Structs;
+using System;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Buffer
+{
+ ///
+ /// A single point in space with a direction and weight.
+ ///
+ public struct BufferVertex : IEquatable
+ {
+ ///
+ /// Position in 3D space.
+ ///
+ public Vector3 Position { get; set; }
+
+ ///
+ /// Normalized direction that the vertex if facing in.
+ ///
+ public Vector3 Normal { get; set; }
+
+ ///
+ /// Index in the vertex cache that this vertex occupies.
+ ///
+ public ushort Index { get; set; }
+
+ ///
+ /// Influence of the assigned node on the vertices position and direction.
+ ///
+ public float Weight { get; set; }
+
+
+ ///
+ /// Creates a new buffer vertex with default normal and full weight.
+ ///
+ /// Position in 3D space.
+ /// Index in the buffer array that this vertex occupies.
+ public BufferVertex(Vector3 position, ushort index)
+ {
+ Position = position;
+ Normal = BufferMesh.DefaultNormal;
+ Index = index;
+ Weight = 1;
+ }
+
+ ///
+ /// Creates a new buffer vertex with full weight.
+ ///
+ /// Position in 3D space.
+ /// Normalized direction that the vertex if facing in.
+ /// Index in the buffer array that this vertex occupies.
+ public BufferVertex(Vector3 position, Vector3 normal, ushort index)
+ {
+ Position = position;
+ Normal = normal;
+ Index = index;
+ Weight = 1;
+ }
+
+ ///
+ /// Creates a new buffer vertex.
+ ///
+ /// Position in 3D space.
+ /// Normalized direction that the vertex if facing in.
+ /// Index in the buffer array that this vertex occupies.
+ /// Influence of the assigned node on the vertices position and direction.
+ public BufferVertex(Vector3 position, Vector3 normal, ushort index, float weight)
+ {
+ Position = position;
+ Normal = normal;
+ Index = index;
+ Weight = weight;
+ }
+
+
+
+ ///
+ /// Writes the vertex to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Whether the normal should be written too.
+ public readonly void Write(EndianStackWriter writer, bool writeNormal)
+ {
+ writer.WriteUShort(Index);
+ writer.WriteUShort((ushort)(Weight * ushort.MaxValue));
+
+ writer.WriteVector3(Position);
+ if(writeNormal)
+ {
+ writer.WriteVector3(Normal);
+ }
+ }
+
+ ///
+ /// Reads a buffer vertex off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// Byte source
+ /// Address at which to start reading.
+ /// Whether the vertex contains a normal.
+ /// The vertex that was read
+ public static BufferVertex Read(EndianStackReader reader, ref uint address, bool hasNormal)
+ {
+ const float WeightFactor = 1f / ushort.MaxValue;
+
+ BufferVertex result = default;
+
+ result.Index = reader.ReadUShort(address);
+ result.Weight = reader.ReadUShort(address + 2) * WeightFactor;
+ address += 4;
+ result.Position = reader.ReadVector3(ref address);
+ result.Normal = hasNormal ? reader.ReadVector3(ref address) : BufferMesh.DefaultNormal;
+
+ return result;
+ }
+
+
+ ///
+ public override readonly bool Equals(object? obj)
+ {
+ return obj is BufferVertex vertex &&
+ Position.Equals(vertex.Position) &&
+ Normal.Equals(vertex.Normal) &&
+ Index == vertex.Index &&
+ Weight == vertex.Weight;
+ }
+
+ ///
+ public override readonly int GetHashCode()
+ {
+ return HashCode.Combine(Position, Normal, Index, Weight);
+ }
+
+ ///
+ public readonly bool Equals(BufferVertex other)
+ {
+ return Equals(other);
+ }
+
+ ///
+ /// Compares two vertices for equality.
+ ///
+ /// Lefthand vertex.
+ /// Righthand vertex.
+ /// Whether the two vertices are equal.
+ public static bool operator ==(BufferVertex left, BufferVertex right)
+ {
+ return left.Equals(right);
+ }
+
+ ///
+ /// Compares two vertices for inequality.
+ ///
+ /// Lefthand vertex.
+ /// Righthand vertex.
+ /// Whether the two vertices are inequal.
+ public static bool operator !=(BufferVertex left, BufferVertex right)
+ {
+ return !(left == right);
+ }
+
+
+ ///
+ /// Adds the position and normal of two vertices together.
+ ///
Index is taken from the lefthand vertex,
+ ///
Weight is always 1.
+ ///
+ /// Lefthand vertex.
+ /// Righthand vertex.
+ ///
+ public static BufferVertex operator +(BufferVertex l, BufferVertex r)
+ {
+ return new BufferVertex()
+ {
+ Position = l.Position + r.Position,
+ Normal = l.Normal + r.Normal,
+ Index = l.Index,
+ Weight = 1
+ };
+ }
+
+ ///
+ /// Multiplies position and normal of a vertex by a value.
+ ///
+ /// Vertex to multiply.
+ /// Value to multiply by.
+ ///
+ public static BufferVertex operator *(BufferVertex l, float r)
+ {
+ return new BufferVertex()
+ {
+ Position = l.Position * r,
+ Normal = l.Normal * r,
+ Index = l.Index,
+ Weight = l.Weight
+ };
+ }
+
+ ///
+ /// Multiplies position and normal of a vertex by a value.
+ ///
+ /// Value to multiply by.
+ /// Vertex to multiply.
+ public static BufferVertex operator *(float l, BufferVertex r)
+ {
+ return r * l;
+ }
+
+
+ ///
+ public override readonly string ToString()
+ {
+ return Weight == 1.0f
+ ? $"{Index}: {Position.DebugString()}; {Normal.DebugString()}"
+ : $"{Index}: {Position.DebugString()}; {Normal.DebugString()}; {Weight}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Buffer/MaterialAttributes.cs b/src/SA3D.Modeling/Mesh/Buffer/MaterialAttributes.cs
new file mode 100644
index 0000000..4c7c9d9
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Buffer/MaterialAttributes.cs
@@ -0,0 +1,77 @@
+using SA3D.Common;
+using System;
+
+namespace SA3D.Modeling.Mesh.Buffer
+{
+ ///
+ /// Rendering attributes of materials.
+ ///
+ [Flags]
+ public enum MaterialAttributes : ushort
+ {
+ ///
+ /// Whether textures should be rendered.
+ ///
+ UseTexture = Flag16.B0,
+
+ ///
+ /// Enables anisotropic filtering.
+ ///
+ AnisotropicFiltering = Flag16.B1,
+
+ ///
+ /// Clamps texture corrdinates along the horizontal axis between -1 and 1.
+ ///
+ ClampU = Flag16.B2,
+
+ ///
+ /// Clamps texture corrdinates along the vertical axis between -1 and 1.
+ ///
+ ClampV = Flag16.B3,
+
+ ///
+ /// Mirrors texture coordinates along the horizontal axis every other full unit.
+ ///
+ MirrorU = Flag16.B4,
+
+ ///
+ /// Mirrors texture coordinates along the vertical axis every other full unit.
+ ///
+ MirrorV = Flag16.B5,
+
+ ///
+ /// Whether to use normal mapping for textures.
+ ///
+ NormalMapping = Flag16.B6,
+
+ ///
+ /// Ignores lighting as a whole.
+ ///
+ NoLighting = Flag16.B7,
+
+ ///
+ /// Ignores ambient lighting.
+ ///
+ NoAmbient = Flag16.B8,
+
+ ///
+ /// Ignores specular lighting.
+ ///
+ NoSpecular = Flag16.B9,
+
+ ///
+ /// Ignores interpolated normals and instead renders every polygon flat.
+ ///
+ Flat = Flag16.B10,
+
+ ///
+ /// Enables transparent rendering.
+ ///
+ UseAlpha = Flag16.B11,
+
+ ///
+ /// Enables backface culling.
+ ///
+ BackfaceCulling = Flag16.B12,
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/ChunkAttach.cs b/src/SA3D.Modeling/Mesh/Chunk/ChunkAttach.cs
new file mode 100644
index 0000000..c818663
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/ChunkAttach.cs
@@ -0,0 +1,235 @@
+using SA3D.Common.IO;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Mesh.Chunk.PolyChunks;
+using SA3D.Modeling.Mesh.Chunk.Structs;
+using SA3D.Modeling.Mesh.Converters;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.ObjectData.Enums;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+using static SA3D.Common.StringExtensions;
+
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Chunk format mesh data
+ ///
+ public sealed class ChunkAttach : Attach
+ {
+ ///
+ /// Vertex data blocks.
+ ///
+ public ILabeledArray? VertexChunks { get; set; }
+
+ ///
+ /// Polygon data blocks.
+ ///
+ public ILabeledArray? PolyChunks { get; set; }
+
+ ///
+ public override AttachFormat Format
+ => AttachFormat.CHUNK;
+
+
+ ///
+ /// Creates a new chunk attach.
+ ///
+ /// Vertex data blocks.
+ /// Polygon data blocks
+ public ChunkAttach(VertexChunk?[]? vertexChunks, PolyChunk?[]? polyChunks) : base()
+ {
+ string identifier = GenerateIdentifier();
+ Label = "attach_" + identifier;
+ VertexChunks = vertexChunks == null ? null : new LabeledArray("vertex_" + identifier, vertexChunks);
+ PolyChunks = polyChunks == null ? null : new LabeledArray("poly_" + identifier, polyChunks);
+ }
+
+ ///
+ /// Creates a new chunk attach.
+ ///
+ /// Vertex data blocks.
+ /// Polygon data blocks
+ public ChunkAttach(ILabeledArray? vertexChunks, ILabeledArray? polyChunks) : base()
+ {
+ Label = "attach_" + GenerateIdentifier();
+ VertexChunks = vertexChunks;
+ PolyChunks = polyChunks;
+ }
+
+ private ChunkAttach(string label, ILabeledArray? vertexChunks, ILabeledArray? polyChunks, Bounds meshBounds) : base()
+ {
+ Label = label;
+ VertexChunks = vertexChunks;
+ PolyChunks = polyChunks;
+ MeshBounds = meshBounds;
+ }
+
+
+ ///
+ public override bool CheckHasWeights()
+ {
+ if(PolyChunks == null || !PolyChunks.Any(a => a is StripChunk))
+ {
+ return VertexChunks != null && VertexChunks.Any(a => a?.HasWeight == true);
+ }
+
+ HashSet ids = new();
+ if(VertexChunks != null)
+ {
+ foreach(VertexChunk? vc in VertexChunks)
+ {
+ if(vc == null)
+ {
+ continue;
+ }
+
+ if(vc.HasWeight)
+ {
+ return true;
+ }
+
+ ids.UnionWith(Enumerable.Range(vc.IndexOffset, vc.Vertices.Length));
+ }
+ }
+
+ return PolyChunks
+ .OfType()
+ .SelectMany(a => a.Strips)
+ .SelectMany(a => a.Corners)
+ .Any(a => !ids.Contains(a.Index));
+ }
+
+ ///
+ public override void RecalculateBounds()
+ {
+ if(PolyChunks == null || VertexChunks == null)
+ {
+ MeshBounds = default;
+ return;
+ }
+
+ IEnumerable vertexEnumerator()
+ {
+ foreach(VertexChunk? cnk in VertexChunks!)
+ {
+ if(cnk == null)
+ {
+ continue;
+ }
+
+ foreach(ChunkVertex vtx in cnk.Vertices)
+ {
+ yield return vtx.Position;
+ }
+ }
+ }
+
+ MeshBounds = Bounds.FromPoints(vertexEnumerator());
+
+ if(CheckHasWeights())
+ {
+ MeshBounds = new(MeshBounds.Position, 0);
+ }
+ }
+
+ ///
+ public override bool CanWrite(ModelFormat format)
+ {
+ return base.CanWrite(format) || format is ModelFormat.SA2;
+ }
+
+ ///
+ /// Calculates the active polygon chunks per chunk attaches in a model tree.
+ ///
+ /// The model to get the active polygon chunks for.
+ /// The active polygon chunks.
+ public static Dictionary GetActivePolyChunks(Node model)
+ {
+ if(model.GetAttachFormat() != AttachFormat.CHUNK)
+ {
+ throw new FormatException($"Model {model.Label} is not a chunk model.");
+ }
+
+ return ChunkConverter.GetActivePolyChunks(model);
+ }
+
+ ///
+ /// Reads a chunk attach off an endian byte reader.
+ ///
+ /// Reader to read from.
+ /// Address at which to start reading.
+ /// Pointer references to utilize.
+ /// The chunk attach that was read.
+ public static ChunkAttach Read(EndianStackReader reader, uint address, PointerLUT lut)
+ {
+ ChunkAttach onRead()
+ {
+ ILabeledArray? vertexChunks = null;
+ if(reader.TryReadPointer(address, out uint vertexAddress))
+ {
+ vertexChunks = lut.GetAddLabeledValue>(vertexAddress, "vertex_",
+ () => new(VertexChunk.ReadArray(reader, vertexAddress)));
+ }
+
+ ILabeledArray? polyChunks = null;
+ if(reader.TryReadPointer(address + 4, out uint polyAddress))
+ {
+ polyChunks = lut.GetAddLabeledValue>(polyAddress, "poly_",
+ () => new(PolyChunk.ReadArray(reader, polyAddress, lut)));
+ }
+
+ return new ChunkAttach(vertexChunks, polyChunks)
+ {
+ MeshBounds = Bounds.Read(reader, address + 8)
+ };
+ }
+
+ return lut.GetAddLabeledValue(address, "attach_", onRead);
+ }
+
+ ///
+ protected override uint WriteInternal(EndianStackWriter writer, ModelFormat format, PointerLUT lut)
+ {
+ uint vertexAddress = lut.GetAddAddress(VertexChunks, () => VertexChunk.WriteArray(writer, VertexChunks!));
+ uint polyAddress = lut.GetAddAddress(PolyChunks, () => PolyChunk.WriteArray(writer, PolyChunks!, lut));
+ uint address = writer.PointerPosition;
+
+ writer.WriteUInt(vertexAddress);
+ writer.WriteUInt(polyAddress);
+ MeshBounds.Write(writer);
+
+ return address;
+ }
+
+
+ ///
+ public override ChunkAttach Clone()
+ {
+ LabeledArray? vertexChunks = null;
+ LabeledArray? polyChunks = null;
+
+ if(VertexChunks != null)
+ {
+ VertexChunk?[] chunks = VertexChunks.Select(x => x?.Clone()).ToArray();
+ vertexChunks = new(VertexChunks.Label, chunks);
+ }
+
+ if(PolyChunks != null)
+ {
+ PolyChunk?[] chunks = PolyChunks.Select(x => x?.Clone()).ToArray();
+ polyChunks = new(PolyChunks.Label, chunks);
+ }
+
+ return new ChunkAttach(Label, vertexChunks, polyChunks, MeshBounds);
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"CHUNK {Label} - V[{VertexChunks?.Length}], P[{PolyChunks?.Length}]";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/ChunkTypeExtensions.cs b/src/SA3D.Modeling/Mesh/Chunk/ChunkTypeExtensions.cs
new file mode 100644
index 0000000..2427a1f
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/ChunkTypeExtensions.cs
@@ -0,0 +1,171 @@
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Extension methods
+ ///
+ public static class ChunkTypeExtensions
+ {
+ internal const byte _bits = 1;
+ internal const byte _tiny = 8;
+ internal const byte _material = 16;
+ internal const byte _vertex = 32;
+ internal const byte _volume = 56;
+ internal const byte _strip = 64;
+
+ ///
+ /// Checks whether a vertex chunktype uses 4 component vectors for positions/normals.
+ ///
+ /// The type to check.
+ public static bool CheckIsVec4(this VertexChunkType type)
+ {
+ return type is VertexChunkType.BlankVec4 or VertexChunkType.NormalVec4;
+ }
+
+ ///
+ /// Checks whether a vertex chunktype uses 32 bit compressed vectors for normals.
+ ///
+ /// The type to check.
+ public static bool CheckIsNormal32(this VertexChunkType type)
+ {
+ return type is VertexChunkType.Normal32
+ or VertexChunkType.Normal32Diffuse
+ or VertexChunkType.Normal32UserAttributes;
+ }
+
+ ///
+ /// Checks whether a vertex chunktype has normals.
+ ///
+ /// The type to check.
+ public static bool CheckHasNormal(this VertexChunkType type)
+ {
+ return type
+ is VertexChunkType.NormalVec4
+ or VertexChunkType.Normal
+ or VertexChunkType.NormalDiffuse
+ or VertexChunkType.NormalUserAttributes
+ or VertexChunkType.NormalAttributes
+ or VertexChunkType.NormalDiffuseSpecular5
+ or VertexChunkType.NormalDiffuseSpecular4
+ or VertexChunkType.NormalIntensity
+ or VertexChunkType.Normal32
+ or VertexChunkType.Normal32Diffuse
+ or VertexChunkType.Normal32UserAttributes;
+ }
+
+ ///
+ /// Checks whether a vertex chunktype has diffuse colors.
+ ///
+ /// The type to check.
+ ///
+ public static bool CheckHasDiffuseColor(this VertexChunkType type)
+ {
+ return type
+ is VertexChunkType.Diffuse
+ or VertexChunkType.DiffuseSpecular5
+ or VertexChunkType.DiffuseSpecular4
+ or VertexChunkType.Intensity
+ or VertexChunkType.NormalDiffuse
+ or VertexChunkType.NormalDiffuseSpecular5
+ or VertexChunkType.NormalDiffuseSpecular4
+ or VertexChunkType.NormalIntensity
+ or VertexChunkType.Normal32Diffuse;
+
+ }
+
+ ///
+ /// Checks whether a vertex chunktype has specular colors.
+ ///
+ /// The type to check.
+ ///
+ public static bool CheckHasSpecularColor(this VertexChunkType type)
+ {
+ return type
+ is VertexChunkType.DiffuseSpecular5
+ or VertexChunkType.DiffuseSpecular4
+ or VertexChunkType.Intensity
+ or VertexChunkType.NormalDiffuseSpecular5
+ or VertexChunkType.NormalDiffuseSpecular4
+ or VertexChunkType.NormalIntensity;
+ }
+
+ ///
+ /// Checks whether a vertex chunktype has attributes (user attributes included too).
+ ///
+ /// The type to check.
+ ///
+ public static bool CheckHasAttributes(this VertexChunkType type)
+ {
+ return type is VertexChunkType.Attributes
+ or VertexChunkType.UserAttributes
+ or VertexChunkType.NormalAttributes
+ or VertexChunkType.NormalUserAttributes
+ or VertexChunkType.Normal32UserAttributes;
+ }
+
+ ///
+ /// Checks whether a vertex chunktype has weights.
+ ///
+ /// The type to check.
+ ///
+ public static bool CheckHasWeights(this VertexChunkType type)
+ {
+ return type is VertexChunkType.Attributes
+ or VertexChunkType.NormalAttributes;
+ }
+
+ ///
+ /// Checks whether a vertex chunktype has diffuse colors
+ ///
+ /// The type to check.
+ ///
+ public static bool CheckStripHasColor(this PolyChunkType type)
+ {
+ return type is PolyChunkType.Strip_Color
+ or PolyChunkType.Strip_TexColor
+ or PolyChunkType.Strip_HDTexColor;
+ }
+
+ ///
+ /// Returns the number of 4-byte values a chunk vertex has.
+ ///
+ /// Type to get the size of.
+ ///
+ public static ushort GetIntegerSize(this VertexChunkType type)
+ {
+ switch(type)
+ {
+ case VertexChunkType.Blank:
+ return 3;
+ case VertexChunkType.BlankVec4:
+ case VertexChunkType.Diffuse:
+ case VertexChunkType.UserAttributes:
+ case VertexChunkType.Attributes:
+ case VertexChunkType.DiffuseSpecular5:
+ case VertexChunkType.DiffuseSpecular4:
+ case VertexChunkType.Intensity:
+ case VertexChunkType.Normal32:
+ return 4;
+ case VertexChunkType.Normal32Diffuse:
+ case VertexChunkType.Normal32UserAttributes:
+ return 5;
+ case VertexChunkType.Normal:
+ return 6;
+ case VertexChunkType.NormalDiffuse:
+ case VertexChunkType.NormalUserAttributes:
+ case VertexChunkType.NormalAttributes:
+ case VertexChunkType.NormalDiffuseSpecular5:
+ case VertexChunkType.NormalDiffuseSpecular4:
+ case VertexChunkType.NormalIntensity:
+ return 7;
+ case VertexChunkType.NormalVec4:
+ return 8;
+ case VertexChunkType.Null:
+ case VertexChunkType.End:
+ default:
+ throw new ArgumentException($"Invalid vertex chunk type: {type}", nameof(type));
+ }
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunk.cs
new file mode 100644
index 0000000..6b80eda
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunk.cs
@@ -0,0 +1,234 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Mesh.Chunk.PolyChunks;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Polychunk base class.
+ ///
+ public abstract class PolyChunk : ICloneable
+ {
+ ///
+ /// Type.
+ ///
+ public PolyChunkType Type { get; protected set; }
+
+ ///
+ /// Additonal attributes.
+ ///
+ public byte Attributes { get; set; }
+
+ ///
+ /// Size of the chunk in bytes.
+ ///
+ public abstract uint ByteSize { get; }
+
+ ///
+ /// Base constructor for every poly chunk.
+ ///
+ ///
+ protected PolyChunk(PolyChunkType type)
+ {
+ Type = type;
+ }
+
+
+ ///
+ /// Writes the poly chunk to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Pointer references to utilize.
+ public void Write(EndianStackWriter writer, PointerLUT lut)
+ {
+ lut.PolyChunks.Add(writer.PointerPosition, this);
+ writer.WriteUShort((ushort)((byte)Type | (Attributes << 8)));
+ InternalWrite(writer);
+ }
+
+ ///
+ /// Writes an array of poly chunks to an endian stack writer. Includes NULL and END chunks.
+ ///
+ /// The writer to write to.
+ /// Chunks to writ.
+ /// Pointer references to utilize.
+ ///
+ public static uint WriteArray(EndianStackWriter writer, IEnumerable chunks, PointerLUT lut)
+ {
+ uint result = writer.PointerPosition;
+
+ foreach(PolyChunk? chunk in chunks)
+ {
+ if(chunk == null)
+ {
+ writer.WriteEmpty(2);
+ continue;
+ }
+
+ chunk.Write(writer, lut);
+ }
+
+ // end chunk
+ writer.WriteUShort(0xFF);
+
+ return result;
+ }
+
+ ///
+ /// Writes the poly chunks body to an endian stack writer.
+ ///
+ /// The writer to write to.
+ protected abstract void InternalWrite(EndianStackWriter writer);
+
+ ///
+ /// Reads a poly chunk off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// Reader to read from.
+ /// Address at which to start reading.
+ /// Pointer references to utilize.
+ /// The poly chunk that was read.
+ public static PolyChunk Read(EndianStackReader reader, ref uint address, PointerLUT lut)
+ {
+ ushort header = reader.ReadUShort(address);
+ PolyChunkType type = (PolyChunkType)(header & 0xFF);
+ byte attribs = (byte)(header >> 8);
+
+ if(!Enum.IsDefined(type) || type is PolyChunkType.End or PolyChunkType.Null)
+ {
+ throw new FormatException($"Poly chunk type is invalid: {type}");
+ }
+
+ PolyChunk chunk;
+ switch(type)
+ {
+ case PolyChunkType.BlendAlpha:
+ chunk = new BlendAlphaChunk();
+ address += 2;
+ break;
+ case PolyChunkType.MipmapDistanceMultiplier:
+ chunk = new MipmapDistanceMultiplierChunk();
+ address += 2;
+ break;
+ case PolyChunkType.SpecularExponent:
+ chunk = new SpecularExponentChunk();
+ address += 2;
+ break;
+ case PolyChunkType.CacheList:
+ chunk = new CacheListChunk();
+ address += 2;
+ break;
+ case PolyChunkType.DrawList:
+ chunk = new DrawListChunk();
+ address += 2;
+ break;
+ case PolyChunkType.TextureID:
+ case PolyChunkType.TextureID2:
+ chunk = TextureChunk.Read(reader, address);
+ address += chunk.ByteSize;
+ break;
+ case PolyChunkType.Material_Diffuse:
+ case PolyChunkType.Material_Ambient:
+ case PolyChunkType.Material_DiffuseAmbient:
+ case PolyChunkType.Material_Specular:
+ case PolyChunkType.Material_DiffuseSpecular:
+ case PolyChunkType.Material_AmbientSpecular:
+ case PolyChunkType.Material_DiffuseAmbientSpecular:
+ case PolyChunkType.Material_Diffuse2:
+ case PolyChunkType.Material_Ambient2:
+ case PolyChunkType.Material_DiffuseAmbient2:
+ case PolyChunkType.Material_Specular2:
+ case PolyChunkType.Material_DiffuseSpecular2:
+ case PolyChunkType.Material_AmbientSpecular2:
+ case PolyChunkType.Material_DiffuseAmbientSpecular2:
+ chunk = MaterialChunk.Read(reader, ref address);
+ break;
+ case PolyChunkType.Material_Bump:
+ chunk = MaterialBumpChunk.Read(reader, address);
+ address += 16;
+ break;
+ case PolyChunkType.Volume_Polygon3:
+ case PolyChunkType.Volume_Polygon4:
+ case PolyChunkType.Volume_Strip:
+ chunk = VolumeChunk.Read(reader, ref address);
+ break;
+ case PolyChunkType.Strip_Blank:
+ case PolyChunkType.Strip_Tex:
+ case PolyChunkType.Strip_HDTex:
+ case PolyChunkType.Strip_Normal:
+ case PolyChunkType.Strip_TexNormal:
+ case PolyChunkType.Strip_HDTexNormal:
+ case PolyChunkType.Strip_Color:
+ case PolyChunkType.Strip_TexColor:
+ case PolyChunkType.Strip_HDTexColor:
+ case PolyChunkType.Strip_BlankDouble:
+ case PolyChunkType.Strip_TexDouble:
+ case PolyChunkType.Strip_HDTexDouble:
+ chunk = StripChunk.Read(reader, ref address);
+ break;
+ case PolyChunkType.Null:
+ case PolyChunkType.End:
+ default:
+ throw new InvalidOperationException(); // cant be reached
+ }
+
+ chunk.Attributes = attribs;
+ lut.PolyChunks.Add(address, chunk);
+ return chunk;
+ }
+
+ ///
+ /// Reads an array of poly chunks off an endian stack reader. Respects NULL and END chunks.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Pointer references to utilize.
+ /// The poly chunks that were read.
+ public static PolyChunk?[] ReadArray(EndianStackReader reader, uint address, PointerLUT lut)
+ {
+ List result = new();
+
+ PolyChunkType readType()
+ {
+ return (PolyChunkType)(reader.ReadUShort(address) & 0xFF);
+ }
+
+ for(PolyChunkType type = readType(); type != PolyChunkType.End; type = readType())
+ {
+ if(type == PolyChunkType.Null)
+ {
+ result.Add(null);
+ address += 2;
+ continue;
+ }
+
+ result.Add(Read(reader, ref address, lut));
+ }
+
+ return result.ToArray();
+ }
+
+
+
+ object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a deep clone of the poly chunk.
+ ///
+ /// The cloned poly chunk
+ public virtual PolyChunk Clone()
+ {
+ return (PolyChunk)MemberwiseClone();
+ }
+
+ ///
+ public override string ToString()
+ {
+ return Type.ToString();
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunkType.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunkType.cs
new file mode 100644
index 0000000..9f15fc5
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunkType.cs
@@ -0,0 +1,210 @@
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Chunk type
+ ///
+ public enum PolyChunkType : byte
+ {
+ ///
+ /// Null chunk.
+ ///
+ Null = 0,
+
+ ///
+ /// Contains transparency blendmodes.
+ ///
+ BlendAlpha = ChunkTypeExtensions._bits + 0,
+
+ ///
+ /// Contains mipmap distance multiplier.
+ ///
+ MipmapDistanceMultiplier = ChunkTypeExtensions._bits + 1,
+
+ ///
+ /// Contains specularity exponent.
+ ///
+ SpecularExponent = ChunkTypeExtensions._bits + 2,
+
+ ///
+ /// Contains index for caching poly chunks.
+ ///
+ CacheList = ChunkTypeExtensions._bits + 3,
+
+ ///
+ /// Contains index for drawing poly chunks
+ ///
+ DrawList = ChunkTypeExtensions._bits + 4,
+
+ ///
+ /// Contains texture information.
+ ///
+ TextureID = ChunkTypeExtensions._tiny + 0,
+
+ ///
+ /// Contains texture information. Same as .
+ ///
+ TextureID2 = ChunkTypeExtensions._tiny + 1,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32).
+ ///
+ Material_Diffuse = ChunkTypeExtensions._material + 1,
+
+ ///
+ /// Material; Contains ambient color (RGB24).
+ ///
+ Material_Ambient = ChunkTypeExtensions._material + 2,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32) and ambient color (RGB24).
+ ///
+ Material_DiffuseAmbient = ChunkTypeExtensions._material + 3,
+
+ ///
+ /// Material; Contains specular exponent and color (RGB24).
+ ///
+ Material_Specular = ChunkTypeExtensions._material + 4,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32), specular exponent and color (RGB24).
+ ///
+ Material_DiffuseSpecular = ChunkTypeExtensions._material + 5,
+
+ ///
+ /// Material; Contains ambient color (RGB24), specular exponent and color (RGB24).
+ ///
+ Material_AmbientSpecular = ChunkTypeExtensions._material + 6,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32), ambient color (RGB24), specular exponent and color (RGB24).
+ ///
+ Material_DiffuseAmbientSpecular = ChunkTypeExtensions._material + 7,
+
+ ///
+ /// Material; Contains "bump" information (?).
+ ///
+ Material_Bump = ChunkTypeExtensions._material + 8,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32).
+ ///
Same as .
+ ///
+ Material_Diffuse2 = ChunkTypeExtensions._material + 9,
+
+ ///
+ /// Material; Contains ambient color (RGB24).
+ ///
Same as .
+ ///
+ Material_Ambient2 = ChunkTypeExtensions._material + 10,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32) and ambient color (RGB24).
+ ///
Same as .
+ ///
+ Material_DiffuseAmbient2 = ChunkTypeExtensions._material + 11,
+
+ ///
+ /// Material; Contains specular exponent and color (RGB24).
+ ///
Same as .
+ ///
+ Material_Specular2 = ChunkTypeExtensions._material + 12,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32), specular exponent and color (RGB24).
+ ///
Same as .
+ ///
+ Material_DiffuseSpecular2 = ChunkTypeExtensions._material + 13,
+
+ ///
+ /// Material; Contains ambient color (RGB24), specular exponent and color (RGB24).
+ ///
Same as .
+ ///
+ Material_AmbientSpecular2 = ChunkTypeExtensions._material + 14,
+
+ ///
+ /// Material; Contains diffuse color (ARGB32), ambient color (RGB24), specular exponent and color (RGB24).
+ ///
Same as .
+ ///
+ Material_DiffuseAmbientSpecular2 = ChunkTypeExtensions._material + 15,
+
+ ///
+ /// Volume defined from triangles.
+ ///
+ Volume_Polygon3 = ChunkTypeExtensions._volume + 0,
+
+ ///
+ /// Volume defined from quads.
+ ///
+ Volume_Polygon4 = ChunkTypeExtensions._volume + 1,
+
+ ///
+ /// Volume defined from triangle strips.
+ ///
+ Volume_Strip = ChunkTypeExtensions._volume + 2,
+
+ ///
+ /// Triangle strips for rendering; No additional info.
+ ///
+ Strip_Blank = ChunkTypeExtensions._strip + 0,
+
+ ///
+ /// Triangle strips for rendering; Contains texture coordinates (0-255 range).
+ ///
+ Strip_Tex = ChunkTypeExtensions._strip + 1,
+
+ ///
+ /// Triangle strips for rendering; Contains texture coordinates (0-1023 range).
+ ///
+ Strip_HDTex = ChunkTypeExtensions._strip + 2,
+
+ ///
+ /// Triangle strips for rendering; Contains normals.
+ ///
+ Strip_Normal = ChunkTypeExtensions._strip + 3,
+
+ ///
+ /// Triangle strips for rendering; Contains normals, texture coordinates (0-255 range).
+ ///
+ Strip_TexNormal = ChunkTypeExtensions._strip + 4,
+
+ ///
+ /// Triangle strips for rendering; Contains normals, texture coordinates (0-1023 range).
+ ///
+ Strip_HDTexNormal = ChunkTypeExtensions._strip + 5,
+
+ ///
+ /// Triangle strips for rendering; Contains colors (ARGB32).
+ ///
+ Strip_Color = ChunkTypeExtensions._strip + 6,
+
+ ///
+ /// Triangle strips for rendering; Contains colors (ARGB32), texture coordinates (0-255 range).
+ ///
+ Strip_TexColor = ChunkTypeExtensions._strip + 7,
+
+ ///
+ /// Triangle strips for rendering; Contains colors (ARGB32), texture coordinates (0-1023 range).
+ ///
+ Strip_HDTexColor = ChunkTypeExtensions._strip + 8,
+
+ ///
+ /// Triangle strips for rendering; The same as .
+ ///
+ Strip_BlankDouble = ChunkTypeExtensions._strip + 9,
+
+ ///
+ /// Triangle strips for rendering; Contains 2 sets of texture coordinates (0-255 range).
+ ///
+ Strip_TexDouble = ChunkTypeExtensions._strip + 10,
+
+ ///
+ /// Triangle strips for rendering; Contains 2 sets of texture coordinates (0-1023 range).
+ ///
+ Strip_HDTexDouble = ChunkTypeExtensions._strip + 11,
+
+ ///
+ /// End marker chunk.
+ ///
+ End = 255
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/BitsChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/BitsChunk.cs
new file mode 100644
index 0000000..d369dbd
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/BitsChunk.cs
@@ -0,0 +1,22 @@
+using SA3D.Common.IO;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Base class for poly chunks with no body.
+ ///
+ public abstract class BitsChunk : PolyChunk
+ {
+ ///
+ public override uint ByteSize => 2;
+
+ ///
+ /// Base constructor for bits chunks.
+ ///
+ ///
+ protected BitsChunk(PolyChunkType type) : base(type) { }
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer) { }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/BlendAlphaChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/BlendAlphaChunk.cs
new file mode 100644
index 0000000..99c570a
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/BlendAlphaChunk.cs
@@ -0,0 +1,37 @@
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Sets the blendmode of the following strip chunks.
+ ///
+ public class BlendAlphaChunk : BitsChunk
+ {
+ ///
+ /// Source blendmode.
+ ///
+ public BlendMode SourceAlpha
+ {
+ get => (BlendMode)((Attributes >> 3) & 7);
+ set => Attributes = (byte)((Attributes & ~0x38) | ((byte)value << 3));
+ }
+
+ ///
+ /// Destination blendmode.
+ ///
+ public BlendMode DestinationAlpha
+ {
+ get => (BlendMode)(Attributes & 7);
+ set => Attributes = (byte)((Attributes & ~7) | (byte)value);
+ }
+
+ ///
+ /// Creates a new blendalpha chunk.
+ ///
+ public BlendAlphaChunk() : base(PolyChunkType.BlendAlpha) { }
+
+ ///
+ public override string ToString()
+ {
+ return $"BlendAlpha - {SourceAlpha} -> {DestinationAlpha}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/CacheListChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/CacheListChunk.cs
new file mode 100644
index 0000000..1b5110e
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/CacheListChunk.cs
@@ -0,0 +1,28 @@
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Caches the succeeding polygon chunks of the same attach into specified index.
+ ///
+ public class CacheListChunk : BitsChunk
+ {
+ ///
+ /// Cache ID.
+ ///
+ public byte List
+ {
+ get => Attributes;
+ set => Attributes = value;
+ }
+
+ ///
+ /// Creates a new cache list chunk.
+ ///
+ public CacheListChunk() : base(PolyChunkType.CacheList) { }
+
+ ///
+ public override string ToString()
+ {
+ return $"Cache list - {List}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/DrawListChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/DrawListChunk.cs
new file mode 100644
index 0000000..b303b5b
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/DrawListChunk.cs
@@ -0,0 +1,28 @@
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Draws the polygon chunks cached by a specific index.
+ ///
+ public class DrawListChunk : BitsChunk
+ {
+ ///
+ /// Cache ID
+ ///
+ public byte List
+ {
+ get => Attributes;
+ set => Attributes = value;
+ }
+
+ ///
+ /// Creates a new draw list chunk.
+ ///
+ public DrawListChunk() : base(PolyChunkType.DrawList) { }
+
+ ///
+ public override string ToString()
+ {
+ return $"Draw List - {List}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MaterialBumpChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MaterialBumpChunk.cs
new file mode 100644
index 0000000..7885863
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MaterialBumpChunk.cs
@@ -0,0 +1,79 @@
+using SA3D.Common.IO;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Polychunk with unknown usage.
+ ///
+ public class MaterialBumpChunk : SizedChunk
+ {
+ ///
+ public override ushort Size => 6;
+
+ ///
+ /// DX.
+ ///
+ public ushort DX { get; set; }
+
+ ///
+ /// DY.
+ ///
+ public ushort DY { get; set; }
+
+ ///
+ /// DZ.
+ ///
+ public ushort DZ { get; set; }
+
+ ///
+ /// UX.
+ ///
+ public ushort UX { get; set; }
+
+ ///
+ /// UY.
+ ///
+ public ushort UY { get; set; }
+
+ ///
+ /// UZ.
+ ///
+ public ushort UZ { get; set; }
+
+ ///
+ /// Creates a new material bump chunk.
+ ///
+ public MaterialBumpChunk() : base(PolyChunkType.Material_Bump) { }
+
+ internal static MaterialBumpChunk Read(EndianStackReader reader, uint address)
+ {
+ ushort header = reader.ReadUShort(address);
+ byte attrib = (byte)(header >> 8);
+ // skipping size
+ address += 4;
+
+ return new MaterialBumpChunk()
+ {
+ Attributes = attrib,
+ DX = reader.ReadUShort(address),
+ DY = reader.ReadUShort(address += 2),
+ DZ = reader.ReadUShort(address += 2),
+ UX = reader.ReadUShort(address += 2),
+ UY = reader.ReadUShort(address += 2),
+ UZ = reader.ReadUShort(address += 2),
+ };
+ }
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer)
+ {
+ base.InternalWrite(writer);
+ writer.WriteUShort(DX);
+ writer.WriteUShort(DY);
+ writer.WriteUShort(DZ);
+ writer.WriteUShort(UX);
+ writer.WriteUShort(UY);
+ writer.WriteUShort(UZ);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MaterialChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MaterialChunk.cs
new file mode 100644
index 0000000..46b083c
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MaterialChunk.cs
@@ -0,0 +1,181 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Structs;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Material information for the following strip chunks
+ ///
+ public class MaterialChunk : SizedChunk
+ {
+ private Color? _diffuse;
+ private Color? _ambient;
+ private Color? _specular;
+
+ ///
+ /// Whether the material type is a second type
+ ///
+ public bool Second
+ {
+ get => ((byte)Type & 0x08) != 0;
+ set => TypeAttribute(0x08, value);
+ }
+
+ ///
+ public override ushort Size
+ {
+ get
+ {
+ byte type = (byte)Type;
+
+ return (ushort)(2 *
+ ((type & 1)
+ + ((type >> 1) & 1)
+ + ((type >> 2) & 1)));
+ }
+ }
+
+ ///
+ /// Source blendmode
+ ///
+ public BlendMode SourceAlpha
+ {
+ get => (BlendMode)((Attributes >> 3) & 7);
+ set => Attributes = (byte)((Attributes & ~0x38) | ((byte)value << 3));
+ }
+
+ ///
+ /// Destination blendmode
+ ///
+ public BlendMode DestinationAlpha
+ {
+ get => (BlendMode)(Attributes & 7);
+ set => Attributes = (byte)((Attributes & ~7) | (byte)value);
+ }
+
+ ///
+ /// Diffuse color
+ ///
+ public Color? Diffuse
+ {
+ get => _diffuse;
+ set
+ {
+ TypeAttribute(0x01, value.HasValue);
+ _diffuse = value;
+ }
+ }
+
+ ///
+ /// Ambient color
+ ///
+ public Color? Ambient
+ {
+ get => _ambient;
+ set
+ {
+ TypeAttribute(0x02, value.HasValue);
+ _ambient = value;
+ }
+ }
+
+ ///
+ /// Specular color
+ ///
+ public Color? Specular
+ {
+ get => _specular;
+ set
+ {
+ TypeAttribute(0x04, value.HasValue);
+ _specular = value;
+ }
+ }
+
+ ///
+ /// Specular exponent
+ /// Requires to be set
+ ///
+ public byte SpecularExponent { get; set; }
+
+ ///
+ /// Creates a new material chunk. Defaults to with a white diffuse color.
+ ///
+ public MaterialChunk() : base(PolyChunkType.Material_Diffuse)
+ {
+ _diffuse = Color.ColorWhite;
+ }
+
+ private void TypeAttribute(byte val, bool state)
+ {
+ byte type = (byte)Type;
+ Type = (PolyChunkType)(byte)(state
+ ? type | val
+ : type & ~val);
+ }
+
+ internal static MaterialChunk Read(EndianStackReader reader, ref uint address)
+ {
+ ushort header = reader.ReadUShort(address);
+ PolyChunkType type = (PolyChunkType)(header & 0xFF);
+ // skipping size
+ address += 4;
+
+ MaterialChunk mat = new()
+ {
+ Attributes = (byte)(header >> 8)
+ };
+
+ if(((byte)type & 0x01) != 0)
+ {
+ mat.Diffuse = reader.ReadColor(ref address, ColorIOType.ARGB8_16);
+ }
+
+ if(((byte)type & 0x02) != 0)
+ {
+ mat.Ambient = reader.ReadColor(ref address, ColorIOType.ARGB8_16);
+ }
+
+ if(((byte)type & 0x04) != 0)
+ {
+ Color spec = reader.ReadColor(ref address, ColorIOType.ARGB8_16);
+ mat.SpecularExponent = spec.Alpha;
+ spec.Alpha = 255;
+ mat.Specular = spec;
+ }
+
+ mat.Second = ((byte)type & 0x08) != 0;
+
+ return mat;
+ }
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer)
+ {
+ if(_diffuse == null && _specular == null && _ambient == null)
+ {
+ throw new InvalidOperationException("Material has no colors and thus no valid type!");
+ }
+
+ base.InternalWrite(writer);
+
+ if(_diffuse.HasValue)
+ {
+ writer.WriteColor(_diffuse.Value, ColorIOType.ARGB8_16);
+ }
+
+ if(_ambient.HasValue)
+ {
+ writer.WriteColor(_ambient.Value, ColorIOType.ARGB8_16);
+ }
+
+ if(_specular.HasValue)
+ {
+ Color wSpecular = _specular.Value;
+ wSpecular.Alpha = SpecularExponent;
+ writer.WriteColor(wSpecular, ColorIOType.ARGB8_16);
+ }
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MipmapDistanceMultiplierChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MipmapDistanceMultiplierChunk.cs
new file mode 100644
index 0000000..7ffcdf1
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/MipmapDistanceMultiplierChunk.cs
@@ -0,0 +1,31 @@
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Adjusts the mipmap distance of the following strip chunks
+ ///
+ public class MipmapDistanceMultiplierChunk : BitsChunk
+ {
+ ///
+ /// The mipmap distance multiplier
+ /// Ranges from 0 to 3.75f in increments of 0.25
+ ///
+ public float MipmapDistanceMultiplier
+ {
+ get => (Attributes & 0xF) * 0.25f;
+ set => Attributes = (byte)((Attributes & 0xF0) | (byte)Math.Max(0, Math.Min(0xF, Math.Round(value / 0.25, MidpointRounding.AwayFromZero))));
+ }
+
+ ///
+ /// Creates a new mipmap distance multiplier chunk.
+ ///
+ public MipmapDistanceMultiplierChunk() : base(PolyChunkType.MipmapDistanceMultiplier) { }
+
+ ///
+ public override string ToString()
+ {
+ return $"MMDM - {MipmapDistanceMultiplier}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/SizedChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/SizedChunk.cs
new file mode 100644
index 0000000..d2706cc
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/SizedChunk.cs
@@ -0,0 +1,30 @@
+using SA3D.Common.IO;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Base class for polygon chunks with a size header.
+ ///
+ public abstract class SizedChunk : PolyChunk
+ {
+ ///
+ /// Amount of shorts in the chunk
+ ///
+ public abstract ushort Size { get; }
+
+ ///
+ public sealed override uint ByteSize => (Size * 2u) + 4u;
+
+ ///
+ /// Base constructor for sized chunks.
+ ///
+ ///
+ public SizedChunk(PolyChunkType type) : base(type) { }
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer)
+ {
+ writer.WriteUShort(Size);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/SpecularExponentChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/SpecularExponentChunk.cs
new file mode 100644
index 0000000..9491457
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/SpecularExponentChunk.cs
@@ -0,0 +1,31 @@
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Sets the specular exponent of the following strip chunks
+ ///
+ public class SpecularExponentChunk : BitsChunk
+ {
+ ///
+ /// Specular exponent
+ /// Ranges from 0 to 16
+ ///
+ public byte SpecularExponent
+ {
+ get => (byte)(Attributes & 0x1F);
+ set => Attributes = (byte)((Attributes & ~0x1F) | Math.Min(value, (byte)16));
+ }
+
+ ///
+ /// Creates a new Specular exponent chunk.
+ ///
+ public SpecularExponentChunk() : base(PolyChunkType.SpecularExponent) { }
+
+ ///
+ public override string ToString()
+ {
+ return $"Specular Exponent - {SpecularExponent}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/StripChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/StripChunk.cs
new file mode 100644
index 0000000..3502a00
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/StripChunk.cs
@@ -0,0 +1,346 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Modeling.Mesh.Chunk.Structs;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Chunk holding polygon data for rendering.
+ ///
+ public class StripChunk : SizedChunk
+ {
+ private int _triangleAttributeCount;
+
+ #region Type dependent properties
+
+ ///
+ /// The number of texture coordinate sets the polygons utilize.
+ ///
+ public int TexcoordCount
+ {
+ get
+ {
+ if(Type is PolyChunkType.Strip_Tex
+ or PolyChunkType.Strip_HDTex
+ or PolyChunkType.Strip_TexNormal
+ or PolyChunkType.Strip_HDTexNormal
+ or PolyChunkType.Strip_TexColor
+ or PolyChunkType.Strip_HDTexColor)
+ {
+ return 1;
+ }
+ else if(Type is PolyChunkType.Strip_TexDouble
+ or PolyChunkType.Strip_HDTexDouble)
+ {
+ return 2;
+ }
+ else
+ {
+ return 0;
+ }
+ }
+ }
+
+ ///
+ /// Whether texture coordinates are in the 0-1023 range, instead of 0-255.
+ ///
+ public bool HasHDTexcoords =>
+ Type is PolyChunkType.Strip_HDTex
+ or PolyChunkType.Strip_HDTexColor
+ or PolyChunkType.Strip_HDTexNormal
+ or PolyChunkType.Strip_HDTexDouble;
+
+ ///
+ /// Whether polygons utilize normals.
+ ///
+ public bool HasNormals =>
+ Type is PolyChunkType.Strip_Normal
+ or PolyChunkType.Strip_TexNormal
+ or PolyChunkType.Strip_HDTexNormal;
+
+ ///
+ /// Whether polygons utilize colors.
+ ///
+ public bool HasColors =>
+ Type is PolyChunkType.Strip_Color
+ or PolyChunkType.Strip_TexColor
+ or PolyChunkType.Strip_HDTexColor;
+
+ #endregion
+
+ #region Attribute Properties
+
+ ///
+ /// Ignores lighting as a whole.
+ ///
0x01 in .
+ ///
+ public bool IgnoreLight
+ {
+ get => GetAttributeBit(1);
+ set => SetAttributeBit(0x01, value);
+ }
+
+ ///
+ /// Ignores specular lighting.
+ ///
0x02 in .
+ ///
+ public bool IgnoreSpecular
+ {
+ get => GetAttributeBit(2);
+ set => SetAttributeBit(0x02, value);
+ }
+
+ ///
+ /// Ignores ambient lighting.
+ ///
0x04 in .
+ ///
+ public bool IgnoreAmbient
+ {
+ get => GetAttributeBit(4);
+ set => SetAttributeBit(0x04, value);
+ }
+
+ ///
+ /// Renders polygons with transparency enabled.
+ ///
0x08 in .
+ ///
+ public bool UseAlpha
+ {
+ get => GetAttributeBit(8);
+ set => SetAttributeBit(0x08, value);
+ }
+
+ ///
+ /// Disables backface culling.
+ ///
0x10 in .
+ ///
+ public bool DoubleSide
+ {
+ get => GetAttributeBit(0x10);
+ set => SetAttributeBit(0x10, value);
+ }
+
+ ///
+ /// Ignore normals and render every polygon flat.
+ ///
0x20 in .
+ ///
+ public bool FlatShading
+ {
+ get => GetAttributeBit(0x20);
+ set => SetAttributeBit(0x20, value);
+ }
+
+ ///
+ /// Ignore texture coordinates and use normals for environment (matcap/normal) mapping.
+ ///
0x40 in .
+ ///
+ public bool EnvironmentMapping
+ {
+ get => GetAttributeBit(0x40);
+ set => SetAttributeBit(0x40, value);
+ }
+
+ ///
+ /// Unknown effect.
+ ///
0x80 in .
+ ///
+ public bool UnknownAttribute
+ {
+ get => GetAttributeBit(0x80);
+ set => SetAttributeBit(0x80, value);
+ }
+
+ private bool GetAttributeBit(byte bits)
+ {
+ return (Attributes & bits) != 0;
+ }
+
+ private void SetAttributeBit(byte bits, bool value)
+ {
+ if(value)
+ {
+ Attributes |= bits;
+ }
+ else
+ {
+ Attributes &= (byte)~bits;
+ }
+ }
+
+ #endregion
+
+
+ ///
+ /// Triangle strips making up the polygons.
+ ///
+ public ChunkStrip[] Strips { get; private set; }
+
+ ///
+ /// Number of custom attributes for each triangle.
+ ///
+ public int TriangleAttributeCount
+ {
+ get => _triangleAttributeCount;
+ set
+ {
+ if(value is < 0 or > 3)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), "Value out of range. Must be between 0 and 3.");
+ }
+
+ _triangleAttributeCount = value;
+ }
+ }
+
+ ///
+ /// Raw size not constrained to 16 bits.
+ ///
+ public uint RawSize
+ {
+ get
+ {
+ uint result = 2;
+
+ int texcoordCount = TexcoordCount;
+ bool hasNormals = HasNormals;
+ bool hasColors = HasColors;
+
+ foreach(ChunkStrip str in Strips)
+ {
+ result += str.Size(texcoordCount, hasNormals, hasColors, _triangleAttributeCount);
+ }
+
+ return result / 2;
+ }
+ }
+
+ ///
+ public override ushort Size
+ {
+ get
+ {
+ uint result = RawSize;
+
+ if(result > ushort.MaxValue)
+ {
+ throw new InvalidOperationException($"Strip chunk size ({result}) exceeds maximum size ({ushort.MaxValue}).");
+ }
+
+ return (ushort)result;
+ }
+ }
+
+ ///
+ /// Creates a new strip chunk.
+ ///
+ /// Type of strip chunk.
+ /// Triangle strips.
+ /// Number of custom attributes for each triangle.
+ ///
+ public StripChunk(PolyChunkType type, ChunkStrip[] strips, int triangleAttributeCount) : base(type)
+ {
+ if(type is < PolyChunkType.Strip_Blank or > PolyChunkType.Strip_HDTexDouble)
+ {
+ throw new ArgumentException($"Type \"{type}\" is not a valid strip chunk type!");
+ }
+
+ Strips = strips;
+ TriangleAttributeCount = triangleAttributeCount;
+ }
+
+ ///
+ /// Creates a new strip chunk.
+ ///
+ /// Type of strip chunk.
+ /// Number of strips to create the stripchunk with.
+ /// Number of custom attributes for each triangle.
+ ///
+ public StripChunk(PolyChunkType type, ushort stripCount, int triangleAttributeCount)
+ : this(type, new ChunkStrip[stripCount], triangleAttributeCount) { }
+
+
+ ///
+ /// Changes the type of the strip chunk.
+ ///
+ public void ChangeType(PolyChunkType type)
+ {
+ if(type is < PolyChunkType.Strip_Blank or > PolyChunkType.Strip_HDTexDouble)
+ {
+ throw new ArgumentException($"Type \"{type}\" is not a valid strip chunk type!");
+ }
+
+ Type = type;
+ }
+
+
+ internal static StripChunk Read(EndianStackReader reader, ref uint address)
+ {
+ ushort header = reader.ReadUShort(address);
+ ushort header2 = reader.ReadUShort(address + 4);
+
+ PolyChunkType type = (PolyChunkType)(header & 0xFF);
+ byte attribs = (byte)(header >> 8);
+ ushort polyCount = (ushort)(header2 & 0x3FFFu);
+ byte triangleAttributeCount = (byte)(header2 >> 14);
+
+ StripChunk result = new(type, polyCount, triangleAttributeCount)
+ {
+ Attributes = attribs
+ };
+
+ address += 6;
+
+ int texcoordCount = result.TexcoordCount;
+ bool hasNormals = result.HasNormals;
+ bool hasColors = result.HasColors;
+ bool hdTexcoord = result.HasHDTexcoords;
+
+ for(int i = 0; i < polyCount; i++)
+ {
+ result.Strips[i] = ChunkStrip.Read(reader, ref address, texcoordCount, hdTexcoord, hasNormals, hasColors, triangleAttributeCount);
+ }
+
+ return result;
+ }
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer)
+ {
+ if(Strips.Length > 0x3FFF)
+ {
+ throw new InvalidOperationException($"Strip count ({Strips.Length}) exceeds maximum ({0x3FFF})");
+ }
+
+ base.InternalWrite(writer);
+
+ writer.WriteUShort((ushort)(Strips.Length | (TriangleAttributeCount << 14)));
+
+ int texcoordCount = TexcoordCount;
+ bool hasNormals = HasNormals;
+ bool hasColors = HasColors;
+ bool hdTexcoord = HasHDTexcoords;
+
+ foreach(ChunkStrip s in Strips)
+ {
+ s.Write(writer, texcoordCount, hdTexcoord, hasNormals, hasColors, _triangleAttributeCount);
+ }
+ }
+
+
+ ///
+ public override StripChunk Clone()
+ {
+ return new(
+ Type,
+ Strips.ContentClone(),
+ TriangleAttributeCount);
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Type} - 0x{Attributes:X2}, {TriangleAttributeCount} : {Strips.Length}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/TextureChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/TextureChunk.cs
new file mode 100644
index 0000000..dded1ea
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/TextureChunk.cs
@@ -0,0 +1,136 @@
+using SA3D.Common.IO;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Contains texture information.
+ ///
+ public class TextureChunk : PolyChunk
+ {
+ ///
+ public override uint ByteSize => 4;
+
+ ///
+ /// Whether the chunktype is .
+ ///
+ public bool Second
+ {
+ get => Type == PolyChunkType.TextureID2;
+ set => Type = value ? PolyChunkType.TextureID2 : PolyChunkType.TextureID;
+ }
+
+ ///
+ /// The mipmap distance multiplier.
+ ///
Ranges from 0 to 3.75f in increments of 0.25.
+ ///
+ public float MipmapDistanceMultiplier
+ {
+ get => (Attributes & 0xF) * 0.25f;
+ set => Attributes = (byte)((Attributes & 0xF0) | (byte)Math.Max(0, Math.Min(0xF, Math.Round(value / 0.25, MidpointRounding.AwayFromZero))));
+ }
+
+ ///
+ /// Clamps texture corrdinates on the vertical axis between -1 and 1.
+ ///
+ public bool ClampV
+ {
+ get => (Attributes & 0x10) != 0;
+ set => _ = value ? Attributes |= 0x10 : Attributes &= 0xEF;
+ }
+
+ ///
+ /// Clamps texture corrdinates on the horizontal axis between -1 and 1.
+ ///
+ public bool ClampU
+ {
+ get => (Attributes & 0x20) != 0;
+ set => _ = value ? Attributes |= 0x20 : Attributes &= 0xDF;
+ }
+
+ ///
+ /// Mirrors the texture every second time the texture is repeated along the vertical axis.
+ ///
+ public bool MirrorV
+ {
+ get => (Attributes & 0x40) != 0;
+ set => _ = value ? Attributes |= 0x40 : Attributes &= 0xBF;
+ }
+
+ ///
+ /// Mirrors the texture every second time the texture is repeated along the horizontal axis.
+ ///
+ public bool MirrorU
+ {
+ get => (Attributes & 0x80) != 0;
+ set => _ = value ? Attributes |= 0x80 : Attributes &= 0x7F;
+ }
+
+
+ ///
+ /// Second set of data bytes.
+ ///
+ public ushort Data { get; private set; }
+
+ ///
+ /// Texture ID to use.
+ ///
+ public ushort TextureID
+ {
+ get => (ushort)(Data & 0x1FFFu);
+ set => Data = (ushort)((Data & ~0x1FFF) | Math.Min(value, (ushort)0x1FFF));
+ }
+
+ ///
+ /// Whether to use super sampling (anisotropic filtering).
+ ///
+ public bool SuperSample
+ {
+ get => (Data & 0x2000) != 0;
+ set => _ = value ? Data |= 0x2000 : Data &= 0xDFFF;
+ }
+
+ ///
+ /// Texture pixel filtering mode.
+ ///
+ public FilterMode FilterMode
+ {
+ get => (FilterMode)(Data >> 14);
+ set => Data = (ushort)((Data & ~0xC000) | ((ushort)value << 14));
+ }
+
+
+ ///
+ /// Creates a new texture chunk.
+ ///
+ /// Whether it is
+ public TextureChunk(bool second = false) : base(second ? PolyChunkType.TextureID2 : PolyChunkType.TextureID) { }
+
+
+ internal static TextureChunk Read(EndianStackReader data, uint address)
+ {
+ ushort header = data.ReadUShort(address);
+ PolyChunkType type = (PolyChunkType)(header & 0xFF);
+ byte attribs = (byte)(header >> 8);
+ ushort cnkData = data.ReadUShort(address + 2);
+
+ return new TextureChunk(type == PolyChunkType.TextureID2)
+ {
+ Attributes = attribs,
+ Data = cnkData
+ };
+ }
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer)
+ {
+ writer.WriteUShort(Data);
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Type} - {TextureID}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/VolumeChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/VolumeChunk.cs
new file mode 100644
index 0000000..5e08ac0
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/PolyChunks/VolumeChunk.cs
@@ -0,0 +1,171 @@
+using SA3D.Common;
+using SA3D.Common.IO;
+using SA3D.Modeling.Mesh.Chunk.Structs;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.PolyChunks
+{
+ ///
+ /// Chunk containing a volume build from polygons.
+ ///
+ public class VolumeChunk : SizedChunk
+ {
+ private int _polygonAttributeCount;
+
+ ///
+ /// Polygons of the volume
+ ///
+ public IChunkVolumePolygon[] Polygons { get; }
+
+ ///
+ /// User attribute count (ranges from 0 to 3)
+ ///
+ public int PolygonAttributeCount
+ {
+ get => _polygonAttributeCount;
+ set
+ {
+ if(value is < 0 or > 3)
+ {
+ throw new ArgumentOutOfRangeException(nameof(value), "Value out of range. Must be between 0 and 3.");
+ }
+
+ _polygonAttributeCount = value;
+ }
+ }
+
+ ///
+ /// Raw size not constrained to 16 bits.
+ ///
+ public uint RawSize
+ {
+ get
+ {
+ uint size = 2;
+ foreach(IChunkVolumePolygon p in Polygons)
+ {
+ size += p.Size(PolygonAttributeCount);
+ }
+
+ return size / 2;
+ }
+ }
+
+ ///
+ public override ushort Size
+ {
+ get
+ {
+ uint result = RawSize;
+
+ if(result > ushort.MaxValue)
+ {
+ throw new InvalidOperationException($"Strip chunk size ({result}) exceeds maximum size ({ushort.MaxValue}).");
+ }
+
+ return (ushort)result;
+ }
+ }
+
+
+ ///
+ /// Creates a new volume chunk.
+ ///
+ /// Type of volume chunk.
+ /// Polygons to use.
+ /// Number of attributes for each polygon.
+ public VolumeChunk(PolyChunkType type, IChunkVolumePolygon[] polygons, int polygonAttributeCount) : base(type)
+ {
+ if(type is < PolyChunkType.Volume_Polygon3 or > PolyChunkType.Volume_Strip)
+ {
+ throw new ArgumentException($"Type \"{type}\" is not a valid volume chunk type!");
+ }
+
+ Polygons = polygons;
+ PolygonAttributeCount = polygonAttributeCount;
+ }
+
+ ///
+ /// Creates a new, empty volume chunk.
+ ///
+ /// Type of volume chunk.
+ /// Number of polygons in the chunk.
+ /// Number of attributes for each polygon.
+ public VolumeChunk(PolyChunkType type, ushort polygonCount, int polygonAttributeCount)
+ : this(type, new IChunkVolumePolygon[polygonCount], polygonAttributeCount) { }
+
+
+ ///
+ protected override void InternalWrite(EndianStackWriter writer)
+ {
+ if(Polygons.Length > 0x3FFF)
+ {
+ throw new InvalidOperationException($"Poly count ({Polygons.Length}) exceeds maximum ({0x3FFF})");
+ }
+
+ base.InternalWrite(writer);
+
+ writer.WriteUShort((ushort)(Polygons.Length | (PolygonAttributeCount << 14)));
+
+ foreach(IChunkVolumePolygon p in Polygons)
+ {
+ p.Write(writer, PolygonAttributeCount);
+ }
+ }
+
+ internal static VolumeChunk Read(EndianStackReader reader, ref uint address)
+ {
+ ushort header = reader.ReadUShort(address);
+ ushort Header2 = reader.ReadUShort(address + 4);
+
+ PolyChunkType type = (PolyChunkType)(header & 0xFF);
+ byte attrib = (byte)(header >> 8);
+ ushort polyCount = (ushort)(Header2 & 0x3FFFu);
+ byte userAttribs = (byte)(Header2 >> 14);
+
+ VolumeChunk result = new(type, polyCount, userAttribs)
+ {
+ Attributes = attrib,
+ };
+
+ address += 6;
+
+ if(type == PolyChunkType.Volume_Polygon3)
+ {
+ for(int i = 0; i < polyCount; i++)
+ {
+ result.Polygons[i] = ChunkVolumeTriangle.Read(reader, ref address, userAttribs);
+ }
+ }
+ else if(type == PolyChunkType.Volume_Polygon4)
+ {
+ for(int i = 0; i < polyCount; i++)
+ {
+ result.Polygons[i] = ChunkVolumeQuad.Read(reader, ref address, userAttribs);
+ }
+ }
+ else // Volume_Strip
+ {
+ for(int i = 0; i < polyCount; i++)
+ {
+ result.Polygons[i] = ChunkVolumeStrip.Read(reader, ref address, userAttribs);
+ }
+ }
+
+ return result;
+ }
+
+
+ ///
+ public override VolumeChunk Clone()
+ {
+ return new(Type, Polygons.ContentClone(), PolygonAttributeCount);
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Type} - {PolygonAttributeCount} : {Polygons.Length}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkCorner.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkCorner.cs
new file mode 100644
index 0000000..c0d27da
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkCorner.cs
@@ -0,0 +1,105 @@
+using SA3D.Modeling.Structs;
+using System;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// A single polygon corner for chunk models.
+ ///
+ public struct ChunkCorner : IEquatable
+ {
+ ///
+ /// Vertex Cache index.
+ ///
+ public ushort Index { get; set; }
+
+ ///
+ /// Texture coordinates.
+ ///
+ public Vector2 Texcoord { get; set; }
+
+ ///
+ /// Second set of texture coordinates.
+ ///
+ public Vector2 Texcoord2 { get; set; }
+
+ ///
+ /// Normalized direction.
+ ///
+ public Vector3 Normal { get; set; }
+
+ ///
+ /// Color.
+ ///
+ public Color Color { get; set; }
+
+ ///
+ /// First set of attributes for the triangle that this corner closes.
+ ///
+ public ushort Attributes1 { get; set; }
+
+ ///
+ /// Second set of attributes for the triangle that this corner closes.
+ ///
+ public ushort Attributes2 { get; set; }
+
+ ///
+ /// Third set of attributes for the triangle that this corner closes.
+ ///
+ public ushort Attributes3 { get; set; }
+
+
+ ///
+ public override readonly bool Equals(object? obj)
+ {
+ return obj is ChunkCorner corner &&
+ Index == corner.Index &&
+ Texcoord.Equals(corner.Texcoord) &&
+ Normal.Equals(corner.Normal) &&
+ Color.Equals(corner.Color) &&
+ Attributes1 == corner.Attributes1 &&
+ Attributes2 == corner.Attributes2 &&
+ Attributes3 == corner.Attributes3;
+ }
+
+ ///
+ public override readonly int GetHashCode()
+ {
+ return HashCode.Combine(Index, Texcoord, Normal, Color, Attributes1, Attributes2, Attributes3);
+ }
+
+ readonly bool IEquatable.Equals(ChunkCorner other)
+ {
+ return Equals(other);
+ }
+
+ ///
+ /// Compares two chunk corners for equality.
+ ///
+ /// Lefthand corner.
+ /// Righthand corner.
+ /// Wether the corners are equal.
+ public static bool operator ==(ChunkCorner left, ChunkCorner right)
+ {
+ return left.Equals(right);
+ }
+
+ ///
+ /// Compares two chunk corners for inequality.
+ ///
+ /// Lefthand corner.
+ /// Righthand corner.
+ /// Wether the corners are inequal.
+ public static bool operator !=(ChunkCorner left, ChunkCorner right)
+ {
+ return !(left == right);
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"{Index} : {Texcoord.DebugString()}, {Color}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkStrip.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkStrip.cs
new file mode 100644
index 0000000..9534a9a
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkStrip.cs
@@ -0,0 +1,223 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Structs;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// Triangle string structure for strip chunks.
+ ///
+ public struct ChunkStrip : ICloneable
+ {
+ ///
+ /// Triangle corners.
+ ///
The first two corners are only used for their index.
+ ///
+ public ChunkCorner[] Corners { get; private set; }
+
+ ///
+ /// Whether to inverse the culling direction of the triangles.
+ ///
+ public bool Reversed { get; private set; }
+
+
+ ///
+ /// Creates a new strip.
+ ///
+ /// Triangle corners.
+ /// Whether to inverse the culling direction of the triangles
+ public ChunkStrip(ChunkCorner[] corners, bool reverse)
+ {
+ Reversed = reverse;
+ Corners = corners;
+ }
+
+
+ ///
+ /// Calculates the size of the strip in bytes.
+ ///
+ /// Number of texture coordinate sets in the strip.
+ /// Whether the strip has normals.
+ /// Whether the strip has colors.
+ /// Number of attribute sets for every triangle.
+ /// The size of the strip in bytes.
+ public readonly uint Size(int texcoordCount, bool hasNormal, bool hasColor, int triangleAttributeCount)
+ {
+ uint structSize = (uint)(2u
+ + (texcoordCount * 4u)
+ + (hasNormal ? 12u : 0u)
+ + (hasColor ? 4u : 0u));
+
+ return (uint)(
+ 2u // strip header
+ + (Corners.Length * structSize) // individual corners
+ + ((Corners.Length - 2) * triangleAttributeCount * 2)); // triangle attributes
+ }
+
+ ///
+ /// Reads a strip off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// Number of texture coordinate sets in the strip.
+ /// Whether the texture coordinate data ranges from 0-1023, instead of 0-255
+ /// Whether the strip has normals.
+ /// Whether the strip has colors.
+ /// Number of attribute sets for every triangle.
+ /// The strip that was read.
+ public static ChunkStrip Read(EndianStackReader reader, ref uint address, int texcoordCount, bool hdTexcoord, bool hasNormal, bool hasColor, int triangleAttributeCount)
+ {
+ const float NormalFactor = 1f / short.MaxValue;
+
+ short header = reader.ReadShort(address);
+ bool reverse = header < 0;
+ ChunkCorner[] corners = new ChunkCorner[Math.Abs(header)];
+
+ bool hasUV = texcoordCount > 0;
+ bool hasUV2 = texcoordCount > 1;
+ float uvMultiplier = hdTexcoord ? 1f / 1023f : 1f / 255f;
+
+ bool flag1 = triangleAttributeCount > 0;
+ bool flag2 = triangleAttributeCount > 1;
+ bool flag3 = triangleAttributeCount > 2;
+
+ address += 2;
+
+ for(int i = 0; i < corners.Length; i++)
+ {
+ ChunkCorner c = new()
+ {
+ Index = reader.ReadUShort(address),
+ Color = Color.ColorWhite
+ };
+ address += 2;
+
+ if(hasUV)
+ {
+ c.Texcoord = reader.ReadVector2(ref address, FloatIOType.Short) * uvMultiplier;
+
+ if(hasUV2)
+ {
+ c.Texcoord2 = reader.ReadVector2(ref address, FloatIOType.Short) * uvMultiplier;
+ }
+ }
+
+ if(hasNormal)
+ {
+ c.Normal = reader.ReadVector3(ref address, FloatIOType.Short) * NormalFactor;
+ }
+ else if(hasColor)
+ {
+ c.Color = reader.ReadColor(ref address, ColorIOType.ARGB8_16);
+ }
+
+ if(flag1 && i > 1)
+ {
+ c.Attributes1 = reader.ReadUShort(address);
+ address += 2;
+ if(flag2)
+ {
+ c.Attributes2 = reader.ReadUShort(address);
+ address += 2;
+ if(flag3)
+ {
+ c.Attributes3 = reader.ReadUShort(address);
+ address += 2;
+ }
+ }
+ }
+
+ corners[i] = c;
+ }
+
+ return new ChunkStrip(corners, reverse);
+ }
+
+ ///
+ /// Writes the strip to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Number of texture coordinate sets in the strip.
+ /// Whether the texture coordinate data ranges from 0-1023, instead of 0-255
+ /// Whether the strip has normals.
+ /// Whether the strip has colors.
+ /// Number of attribute sets for every triangle.
+ public readonly void Write(EndianStackWriter writer, int texcoordCount, bool hdTexcoord, bool hasNormal, bool hasColor, int triangleAttributeCount)
+ {
+ if(Corners.Length > short.MaxValue)
+ {
+ throw new InvalidOperationException("Strip has too many corners!");
+ }
+
+ writer.WriteShort(Reversed
+ ? (short)-Corners.Length
+ : (short)Corners.Length);
+
+ bool hasUV = texcoordCount > 0;
+ bool hasUV2 = texcoordCount > 1;
+ float uvMultiplier = hdTexcoord ? 1023f : 255f;
+
+ bool flag1 = triangleAttributeCount > 0;
+ bool flag2 = triangleAttributeCount > 1;
+ bool flag3 = triangleAttributeCount > 2;
+
+ for(int i = 0; i < Corners.Length; i++)
+ {
+ ChunkCorner c = Corners[i];
+ writer.WriteUShort(c.Index);
+ if(hasUV)
+ {
+ writer.WriteVector2(c.Texcoord * uvMultiplier, FloatIOType.Short);
+
+ if(hasUV2)
+ {
+ writer.WriteVector2(c.Texcoord2 * uvMultiplier, FloatIOType.Short);
+ }
+ }
+
+ if(hasNormal)
+ {
+ writer.WriteVector3(c.Normal * short.MaxValue, FloatIOType.Short);
+ }
+ else if(hasColor)
+ {
+ writer.WriteColor(c.Color, ColorIOType.ARGB8_16);
+ }
+
+ if(flag1 && i > 1)
+ {
+ writer.WriteUShort(c.Attributes1);
+ if(flag2)
+ {
+ writer.WriteUShort(c.Attributes2);
+ if(flag3)
+ {
+ writer.WriteUShort(c.Attributes3);
+ }
+ }
+ }
+ }
+ }
+
+
+ readonly object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a deep clone of the strip.
+ ///
+ /// The cloned strip.
+ public readonly ChunkStrip Clone()
+ {
+ return new((ChunkCorner[])Corners.Clone(), Reversed);
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"{Reversed} : {Corners.Length}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVertex.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVertex.cs
new file mode 100644
index 0000000..ca23742
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVertex.cs
@@ -0,0 +1,164 @@
+using SA3D.Modeling.Structs;
+using System;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// Single vertex of a vertex chunk
+ ///
+ public struct ChunkVertex : IEquatable
+ {
+ ///
+ /// Position in 3D space.
+ ///
+ public Vector3 Position { get; set; }
+
+ ///
+ /// Normalized direction.
+ ///
+ public Vector3 Normal { get; set; }
+
+ ///
+ /// Diffuse Color.
+ ///
+ public Color Diffuse { get; set; }
+
+ ///
+ /// Specular color.
+ ///
+ public Color Specular { get; set; }
+
+ ///
+ /// Additonal Attributes.
+ ///
+ public uint Attributes { get; set; }
+
+ ///
+ /// Vertex cache index.
+ ///
+ public ushort Index
+ {
+ readonly get => (ushort)(Attributes & 0xFFFF);
+ set => Attributes = (Attributes & ~0xFFFFu) | value;
+ }
+
+ ///
+ /// Node influence.
+ ///
+ public float Weight
+ {
+ readonly get => (Attributes >> 16) / 255f;
+ set => Attributes = (Attributes & 0xFFFFF) | ((uint)Math.Round(value * 255f) << 16);
+ }
+
+
+ ///
+ /// Creates a chunk vertex with a normal.
+ ///
+ /// Position in 3D space.
+ /// Normalized direction.
+ public ChunkVertex(Vector3 position, Vector3 normal) : this()
+ {
+ Position = position;
+ Normal = normal;
+ Attributes = 0;
+ Weight = 1;
+ }
+
+ ///
+ /// Creates a chunk vertex with a normal and attributes.
+ ///
+ /// Position in 3D space.
+ /// Normalized direction.
+ /// Additional attributes.
+ public ChunkVertex(Vector3 position, Vector3 normal, uint attribs) : this()
+ {
+ Position = position;
+ Normal = normal;
+ Attributes = attribs;
+ Weight = 1;
+ }
+
+ ///
+ /// Creates a chunk vertex with a normal and weight info.
+ ///
+ /// Position in 3D space.
+ /// Normalized direction.
+ /// Vertex cache index.
+ /// Node influence.
+ public ChunkVertex(Vector3 position, Vector3 normal, ushort index, float weight) : this()
+ {
+ Position = position;
+ Normal = normal;
+ Index = index;
+ Weight = weight;
+ }
+
+ ///
+ /// Creates a chunk with colors.
+ ///
+ /// Position in 3D space.
+ /// Diffuse color.
+ /// Specular color.
+ public ChunkVertex(Vector3 position, Color diffuse, Color specular) : this()
+ {
+ Position = position;
+ Diffuse = diffuse;
+ Specular = specular;
+ }
+
+
+ ///
+ public override readonly bool Equals(object? obj)
+ {
+ return obj is ChunkVertex vertex &&
+ Position.Equals(vertex.Position) &&
+ Normal.Equals(vertex.Normal) &&
+ Diffuse.Equals(vertex.Diffuse) &&
+ Specular.Equals(vertex.Specular) &&
+ Attributes == vertex.Attributes;
+ }
+
+ ///
+ public override readonly int GetHashCode()
+ {
+ return HashCode.Combine(Position, Normal, Diffuse, Specular, Attributes);
+ }
+
+ ///
+ readonly bool IEquatable.Equals(ChunkVertex other)
+ {
+ return Equals(other);
+ }
+
+ ///
+ /// Compares two chunk vertices for equality.
+ ///
+ /// Lefthand vertex.
+ /// Rigthand vertex.
+ /// Whether the vertices are equal.
+ public static bool operator ==(ChunkVertex left, ChunkVertex right)
+ {
+ return left.Equals(right);
+ }
+
+ ///
+ /// Compares two chunk vertices for inequality.
+ ///
+ /// Lefthand vertex.
+ /// Rigthand vertex.
+ /// Whether the vertices are inequal.
+ public static bool operator !=(ChunkVertex left, ChunkVertex right)
+ {
+ return !(left == right);
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"{Position.DebugString()}, {Normal.DebugString()} : {Index}, {Weight:F3}";
+ }
+
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeQuad.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeQuad.cs
new file mode 100644
index 0000000..3a17ad5
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeQuad.cs
@@ -0,0 +1,210 @@
+using SA3D.Common.IO;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// Quad polygon for volume chunks.
+ ///
+ public struct ChunkVolumeQuad : IChunkVolumePolygon
+ {
+ ///
+ public readonly int NumIndices => 4;
+
+
+ ///
+ /// First vertex index.
+ ///
+ public ushort Index1 { get; set; }
+
+ ///
+ /// Second vertex index.
+ ///
+ public ushort Index2 { get; set; }
+
+ ///
+ /// Third vertex index.
+ ///
+ public ushort Index3 { get; set; }
+
+ ///
+ /// Fourth vertex index.
+ ///
+ public ushort Index4 { get; set; }
+
+
+ ///
+ /// First polygon attribute.
+ ///
+ public ushort Attribute1 { get; set; }
+
+ ///
+ /// Second polygon attribute.
+ ///
+ public ushort Attribute2 { get; set; }
+
+ ///
+ /// Third polygon attribute.
+ ///
+ public ushort Attribute3 { get; set; }
+
+
+ ///
+ public ushort this[int index]
+ {
+ readonly get => index switch
+ {
+ 0 => Index1,
+ 1 => Index2,
+ 2 => Index3,
+ 4 => Index4,
+ _ => throw new IndexOutOfRangeException(),
+ };
+ set
+ {
+ switch(index)
+ {
+ case 0:
+ Index1 = value;
+ break;
+ case 1:
+ Index2 = value;
+ break;
+ case 2:
+ Index3 = value;
+ break;
+ case 3:
+ Index4 = value;
+ break;
+ default:
+ throw new IndexOutOfRangeException();
+ }
+ }
+ }
+
+
+ ///
+ /// Creates a new chunk volume quad.
+ ///
+ /// First vertex index.
+ /// Second vertex index.
+ /// Third vertex index.
+ /// Third vertex index.
+ /// First polygon attribute.
+ /// Second polygon attribute.
+ /// Third polygon attribute.
+ public ChunkVolumeQuad(ushort index1, ushort index2, ushort index3, ushort index4, ushort attribute1, ushort attribute2, ushort attribute3)
+ {
+ Index1 = index1;
+ Index2 = index2;
+ Index3 = index3;
+ Index4 = index4;
+ Attribute1 = attribute1;
+ Attribute2 = attribute2;
+ Attribute3 = attribute3;
+ }
+
+ ///
+ /// Creates a new chunk volume quad.
+ ///
+ /// First vertex index.
+ /// Second vertex index.
+ /// Third vertex index.
+ /// Third vertex index.
+ public ChunkVolumeQuad(ushort index1, ushort index2, ushort index3, ushort index4) : this()
+ {
+ Index1 = index1;
+ Index2 = index2;
+ Index3 = index3;
+ Index4 = index4;
+ }
+
+
+ ///
+ public readonly ushort Size(int polygonAttributeCount)
+ {
+ return (ushort)(8u + (polygonAttributeCount * 2u));
+ }
+
+ ///
+ public readonly void Write(EndianStackWriter writer, int polygonAttributeCount)
+ {
+ writer.WriteUShort(Index1);
+ writer.WriteUShort(Index2);
+ writer.WriteUShort(Index3);
+ writer.WriteUShort(Index4);
+
+ if(polygonAttributeCount > 0)
+ {
+ writer.WriteUShort(Attribute1);
+ if(polygonAttributeCount > 1)
+ {
+ writer.WriteUShort(Attribute2);
+ if(polygonAttributeCount > 0)
+ {
+ writer.WriteUShort(Attribute3);
+ }
+ }
+ }
+ }
+
+ ///
+ /// Reads a chunk volume quad off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// Reader to read from.
+ /// Address at which to start reading.
+ /// Number of attributes to read for the quad.
+ /// The quad that was read.
+ public static ChunkVolumeQuad Read(EndianStackReader reader, ref uint address, int polygonAttributeCount)
+ {
+ ChunkVolumeQuad result = new(
+ reader.ReadUShort(address),
+ reader.ReadUShort(address + 2),
+ reader.ReadUShort(address + 4),
+ reader.ReadUShort(address + 6));
+
+ address += 8;
+
+ if(polygonAttributeCount > 0)
+ {
+ result.Attribute1 = reader.ReadUShort(address);
+ address += 2;
+
+ if(polygonAttributeCount > 1)
+ {
+ result.Attribute2 = reader.ReadUShort(address);
+ address += 2;
+
+ if(polygonAttributeCount > 2)
+ {
+ result.Attribute3 = reader.ReadUShort(address);
+ address += 2;
+ }
+ }
+ }
+
+ return result;
+ }
+
+
+ readonly object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a clone of the quad.
+ ///
+ /// The clonsed quad.
+ public readonly ChunkVolumeQuad Clone()
+ {
+ return this;
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"Quad - {{ {Index1}, {Index2}, {Index3}, {Index4} }}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeStrip.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeStrip.cs
new file mode 100644
index 0000000..0132067
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeStrip.cs
@@ -0,0 +1,138 @@
+using SA3D.Common.IO;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// Triangle strip polygon for volume chunks.
+ ///
+ public struct ChunkVolumeStrip : IChunkVolumePolygon
+ {
+ ///
+ /// Vertex indices.
+ ///
+ public ushort[] Indices { get; }
+
+ ///
+ public readonly int NumIndices => Indices.Length;
+
+ ///
+ /// Triangle attributes for each triangle. [triangle index, attribute index]
+ ///
+ public ushort[,] TriangleAttributes { get; }
+
+ ///
+ /// Whether the triangles use reversed culling direction.
+ ///
+ public bool Reversed { get; set; }
+
+ ///
+ public readonly ushort this[int index]
+ {
+ get => Indices[index];
+ set => Indices[index] = value;
+ }
+
+ private ChunkVolumeStrip(ushort[] indices, ushort[,] triangleAttributes, bool reversed)
+ {
+ Indices = indices;
+ TriangleAttributes = triangleAttributes;
+ Reversed = reversed;
+ }
+
+ ///
+ /// Creates a new empty chunk volume strip.
+ ///
+ /// Number of vertex indices.
+ /// Whether the triangles use reversed culling direction.
+ public ChunkVolumeStrip(int size, bool reversed)
+ {
+ Indices = new ushort[size];
+ TriangleAttributes = new ushort[size - 2, 3];
+ Reversed = reversed;
+ }
+
+ ///
+ /// Creates a new empty chunk volume strip.
+ ///
+ /// Vertex indices to use.
+ /// Whether the triangles use reversed culling direction.
+ public ChunkVolumeStrip(ushort[] indices, bool reversed)
+ {
+ Indices = indices;
+ TriangleAttributes = new ushort[Indices.Length - 2, 3];
+ Reversed = reversed;
+ }
+
+ ///
+ public readonly ushort Size(int polygonAttributeCount)
+ {
+ return (ushort)(2u + (2 * (Indices.Length + (TriangleAttributes.Length * polygonAttributeCount))));
+ }
+
+ ///
+ public readonly void Write(EndianStackWriter writer, int polygonAttributeCount)
+ {
+ short count = (short)Math.Min(Indices.Length, short.MaxValue);
+ writer.WriteShort(Reversed ? (short)-count : count);
+
+ writer.WriteUShort(Indices[0]);
+ writer.WriteUShort(Indices[1]);
+ for(int i = 2; i < count; i++)
+ {
+ writer.WriteUShort(Indices[i]);
+
+ for(int j = 0; j < polygonAttributeCount; j++)
+ {
+ writer.WriteUShort(TriangleAttributes[i - 2, j]);
+ }
+ }
+ }
+
+ ///
+ /// Reads a chunk volume strip off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// Reader to read from.
+ /// Address at which to start reading.
+ /// Number of attributes to read for each triangle in the strip.
+ /// The strip that was read.
+ public static ChunkVolumeStrip Read(EndianStackReader reader, ref uint address, int polygonAttributeCount)
+ {
+ short header = reader.ReadShort(address);
+ ChunkVolumeStrip result = new(Math.Abs(header), header < 0);
+ address += 2;
+
+ result.Indices[0] = reader.ReadUShort(address);
+ result.Indices[1] = reader.ReadUShort(address += 2);
+
+ for(int i = 2; i < result.Indices.Length; i++)
+ {
+ result.Indices[i] = reader.ReadUShort(address += 2);
+
+ for(int j = 0; j < polygonAttributeCount; j++)
+ {
+ result.TriangleAttributes[i - 2, j] = reader.ReadUShort(address += 2);
+ }
+ }
+
+ return result;
+ }
+
+ readonly object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Returns a deep clone of the chunk volume strip.
+ ///
+ /// The cloned strip.
+ public readonly ChunkVolumeStrip Clone()
+ {
+ return new(
+ (ushort[])Indices.Clone(),
+ (ushort[,])TriangleAttributes.Clone(),
+ Reversed);
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeTriangle.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeTriangle.cs
new file mode 100644
index 0000000..d357318
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/ChunkVolumeTriangle.cs
@@ -0,0 +1,195 @@
+using SA3D.Common.IO;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// Triangle polygon for volume chunks.
+ ///
+ public struct ChunkVolumeTriangle : IChunkVolumePolygon
+ {
+ ///
+ public readonly int NumIndices => 3;
+
+
+ ///
+ /// First vertex index.
+ ///
+ public ushort Index1 { get; set; }
+
+ ///
+ /// Second vertex index.
+ ///
+ public ushort Index2 { get; set; }
+
+ ///
+ /// Third vertex index.
+ ///
+ public ushort Index3 { get; set; }
+
+
+ ///
+ /// First polygon attribute.
+ ///
+ public ushort Attribute1 { get; set; }
+
+ ///
+ /// Second polygon attribute.
+ ///
+ public ushort Attribute2 { get; set; }
+
+ ///
+ /// Third polygon attribute.
+ ///
+ public ushort Attribute3 { get; set; }
+
+
+ ///
+ public ushort this[int index]
+ {
+ readonly get => index switch
+ {
+ 0 => Index1,
+ 1 => Index2,
+ 2 => Index3,
+ _ => throw new IndexOutOfRangeException(),
+ };
+ set
+ {
+ switch(index)
+ {
+ case 0:
+ Index1 = value;
+ break;
+ case 1:
+ Index2 = value;
+ break;
+ case 2:
+ Index3 = value;
+ break;
+ default:
+ throw new IndexOutOfRangeException();
+ }
+ }
+ }
+
+
+ ///
+ /// Creates a new chunk volume triangle.
+ ///
+ /// First vertex index.
+ /// Second vertex index.
+ /// Third vertex index.
+ /// First polygon attribute.
+ /// Second polygon attribute.
+ /// Third polygon attribute.
+ public ChunkVolumeTriangle(ushort index1, ushort index2, ushort index3, ushort attribute1, ushort attribute2, ushort attribute3)
+ {
+ Index1 = index1;
+ Index2 = index2;
+ Index3 = index3;
+ Attribute1 = attribute1;
+ Attribute2 = attribute2;
+ Attribute3 = attribute3;
+ }
+
+ ///
+ /// Creates a new chunk volume triangle.
+ ///
+ /// First vertex index.
+ /// Second vertex index.
+ /// Third vertex index.
+ public ChunkVolumeTriangle(ushort index1, ushort index2, ushort index3) : this()
+ {
+ Index1 = index1;
+ Index2 = index2;
+ Index3 = index3;
+ }
+
+
+ ///
+ public readonly ushort Size(int polygonAttributeCount)
+ {
+ return (ushort)(6u + (polygonAttributeCount * 2u));
+ }
+
+ ///
+ public readonly void Write(EndianStackWriter writer, int polygonAttributeCount)
+ {
+ writer.WriteUShort(Index1);
+ writer.WriteUShort(Index2);
+ writer.WriteUShort(Index3);
+
+ if(polygonAttributeCount > 0)
+ {
+ writer.WriteUShort(Attribute1);
+ if(polygonAttributeCount > 1)
+ {
+ writer.WriteUShort(Attribute2);
+ if(polygonAttributeCount > 0)
+ {
+ writer.WriteUShort(Attribute3);
+ }
+ }
+ }
+ }
+
+ ///
+ /// Reads a chunk volume triangle off an endian stack reader. Advances the address by the number of bytes read.
+ ///
+ /// Reader to read from.
+ /// Address at which to start reading.
+ /// Number of attributes to read for the triangle.
+ /// The triangle that was read.
+ public static ChunkVolumeTriangle Read(EndianStackReader reader, ref uint address, int polygonAttributeCount)
+ {
+ ChunkVolumeTriangle result = new(
+ reader.ReadUShort(address),
+ reader.ReadUShort(address + 2),
+ reader.ReadUShort(address + 4));
+
+ address += 6;
+
+ if(polygonAttributeCount > 0)
+ {
+ result.Attribute1 = reader.ReadUShort(address);
+ address += 2;
+
+ if(polygonAttributeCount > 1)
+ {
+ result.Attribute2 = reader.ReadUShort(address);
+ address += 2;
+
+ if(polygonAttributeCount > 2)
+ {
+ result.Attribute3 = reader.ReadUShort(address);
+ address += 2;
+ }
+ }
+ }
+
+ return result;
+ }
+
+
+ readonly object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a clone of the triangle.
+ ///
+ /// The clonsed triangle.
+ public readonly ChunkVolumeTriangle Clone()
+ {
+ return this;
+ }
+
+ ///
+ public override readonly string ToString()
+ {
+ return $"Triangle - {{ {Index1}, {Index2}, {Index3} }}";
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/Structs/IChunkVolumePolygon.cs b/src/SA3D.Modeling/Mesh/Chunk/Structs/IChunkVolumePolygon.cs
new file mode 100644
index 0000000..2d3d08b
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/Structs/IChunkVolumePolygon.cs
@@ -0,0 +1,37 @@
+using SA3D.Common.IO;
+using System;
+
+namespace SA3D.Modeling.Mesh.Chunk.Structs
+{
+ ///
+ /// Chunk volume polygon interface.
+ ///
+ public interface IChunkVolumePolygon : ICloneable
+ {
+ ///
+ /// Number of indices in the polygon.
+ ///
+ public int NumIndices { get; }
+
+ ///
+ /// Access and set vertex indices of the polygon.
+ ///
+ /// The index of the corner.
+ /// The vertex index.
+ public ushort this[int index] { get; set; }
+
+ ///
+ /// Calculates the size of the polygon in bytes.
+ ///
+ /// Number of attributes for every polygon.
+ /// The size of the polygon in bytes
+ public ushort Size(int polygonAttributeCount);
+
+ ///
+ /// Write the polygon to an endian stack writer.
+ ///
+ /// The writer to write to.
+ /// Number of attributes for every polygon to write.
+ public abstract void Write(EndianStackWriter writer, int polygonAttributeCount);
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/VertexChunk.cs b/src/SA3D.Modeling/Mesh/Chunk/VertexChunk.cs
new file mode 100644
index 0000000..482f291
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/VertexChunk.cs
@@ -0,0 +1,393 @@
+using SA3D.Common.IO;
+using SA3D.Modeling.Mesh.Chunk.Structs;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Set of vertex data of a chunk model
+ ///
+ public class VertexChunk : ICloneable
+ {
+ ///
+ /// Type of vertex chunk.
+ ///
+ public VertexChunkType Type { get; }
+
+ ///
+ /// Various attributes.
+ ///
+ public byte Attributes { get; }
+
+ ///
+ /// Determines how vertices are applied to the vertex cache.
+ ///
+ public WeightStatus WeightStatus => (WeightStatus)(Attributes & 3);
+
+ ///
+ /// Offset that gets added to every index in the vertices.
+ ///
+ public ushort IndexOffset { get; set; }
+
+ ///
+ /// Whether the chunk has weighted vertex data.
+ ///
+ public bool HasWeight => Type.CheckHasWeights();
+
+ ///
+ /// Whether the vertices contain normals.
+ ///
+ public bool HasNormals => Type.CheckHasNormal();
+
+ ///
+ /// Whether the vertices contain diffuse colors.
+ ///
+ public bool HasDiffuseColors => Type.CheckHasDiffuseColor();
+
+ ///
+ /// Whether the vertices contain specular colors.
+ ///
+ public bool HasSpecularColors => Type.CheckHasSpecularColor();
+
+ ///
+ /// Vertices of the chunk
+ ///
+ public ChunkVertex[] Vertices { get; }
+
+
+ ///
+ /// Creates a new Vertex chunk.
+ ///
+ /// Vertex chunk type.
+ /// Attributes of the chunk.
+ /// Index offset for all vertices.
+ /// Vertex data.
+ public VertexChunk(VertexChunkType type, byte attributes, ushort indexOffset, ChunkVertex[] vertices)
+ {
+ if(!Enum.IsDefined(type) || type is VertexChunkType.End or VertexChunkType.Null)
+ {
+ throw new ArgumentException($"Vertex chunk type is invalid: {type}", nameof(type));
+ }
+
+ Type = type;
+ Attributes = attributes;
+ IndexOffset = indexOffset;
+ Vertices = vertices;
+ }
+
+ ///
+ /// Creates a new Vertex chunk with all relevant data
+ ///
+ /// Vertex chunk type.
+ /// Determines how vertices are applied to the vertex cache.
+ /// Index offset for all vertices.
+ /// Vertex data.
+ public VertexChunk(VertexChunkType type, WeightStatus weightstatus, ushort indexOffset, ChunkVertex[] vertices)
+ : this(type, (byte)weightstatus, indexOffset, vertices) { }
+
+
+ ///
+ /// Writes a vertex chunk to an endian stack writer. Splits it up into multiple chunks if necessary.
+ ///
+ /// The writer to write to.
+ public void Write(EndianStackWriter writer)
+ {
+ if(Vertices.Length > short.MaxValue)
+ {
+ throw new InvalidOperationException($"Vertex count ({Vertices.Length}) exceeds maximum vertex count (32767)");
+ }
+
+ ushort vertSize = Type.GetIntegerSize();
+ ushort vertexLimitPerChunk = (ushort)((ushort.MaxValue - 1) / vertSize); // -1 because header2 also counts as part of the size, which is always there
+ ChunkVertex[] remainingVerts = (ChunkVertex[])Vertices.Clone();
+ uint header1Base = (uint)Type | (uint)(Attributes << 8);
+ ushort offset = IndexOffset;
+
+ bool hasNormal = Type.CheckHasNormal();
+ bool vec4 = Type.CheckIsVec4();
+ bool normal32 = Type.CheckIsNormal32();
+
+ while(remainingVerts.Length > 0)
+ {
+ ushort vertCount = ushort.Min((ushort)remainingVerts.Length, vertexLimitPerChunk);
+ ushort size = (ushort)((vertCount * vertSize) + 1);
+
+ writer.WriteUInt(header1Base | (uint)(size << 16));
+ writer.WriteUInt(offset | (uint)(vertCount << 16));
+
+ for(int i = 0; i < vertCount; i++)
+ {
+ ChunkVertex vtx = remainingVerts[i];
+ writer.WriteVector3(vtx.Position);
+ if(vec4)
+ {
+ writer.WriteFloat(1.0f);
+ }
+
+ if(hasNormal)
+ {
+ if(normal32)
+ {
+ ushort x = (ushort)Math.Round((vtx.Normal.X + 1) * 0x3FF);
+ ushort y = (ushort)Math.Round((vtx.Normal.Y + 1) * 0x3FF);
+ ushort z = (ushort)Math.Round((vtx.Normal.Z + 1) * 0x3FF);
+
+ uint composed = (uint)((x << 20) | (y << 10) | z);
+ writer.WriteUInt(composed);
+ }
+ else
+ {
+ writer.WriteVector3(vtx.Normal);
+ if(vec4)
+ {
+ writer.WriteFloat(0.0f);
+ }
+ }
+ }
+
+ switch(Type)
+ {
+ case VertexChunkType.Diffuse:
+ case VertexChunkType.NormalDiffuse:
+ case VertexChunkType.Normal32Diffuse:
+ writer.WriteColor(vtx.Diffuse, ColorIOType.ARGB8_32);
+ break;
+ case VertexChunkType.DiffuseSpecular5:
+ case VertexChunkType.NormalDiffuseSpecular5:
+ writer.WriteColor(vtx.Diffuse, ColorIOType.RGB565);
+ writer.WriteColor(vtx.Specular, ColorIOType.RGB565);
+ break;
+ case VertexChunkType.DiffuseSpecular4:
+ case VertexChunkType.NormalDiffuseSpecular4:
+ writer.WriteColor(vtx.Diffuse, ColorIOType.ARGB4);
+ writer.WriteColor(vtx.Specular, ColorIOType.RGB565);
+ break;
+ case VertexChunkType.Intensity:
+ case VertexChunkType.NormalIntensity:
+ writer.WriteUShort((ushort)Math.Round(vtx.Diffuse.GetLuminance() * ushort.MaxValue));
+ writer.WriteUShort((ushort)Math.Round(vtx.Specular.GetLuminance() * ushort.MaxValue));
+ break;
+ case VertexChunkType.Attributes:
+ case VertexChunkType.UserAttributes:
+ case VertexChunkType.NormalAttributes:
+ case VertexChunkType.NormalUserAttributes:
+ case VertexChunkType.Normal32UserAttributes:
+ writer.WriteUInt(vtx.Attributes);
+ break;
+ case VertexChunkType.Blank:
+ case VertexChunkType.BlankVec4:
+ case VertexChunkType.Normal:
+ case VertexChunkType.NormalVec4:
+ case VertexChunkType.Normal32:
+ break;
+ case VertexChunkType.Null:
+ case VertexChunkType.End:
+ default:
+ throw new InvalidOperationException(); // cant be reached
+ }
+ }
+
+ remainingVerts = remainingVerts.Skip(vertCount).ToArray();
+ if(!Type.CheckHasWeights())
+ {
+ offset += vertCount;
+ }
+ }
+ }
+
+ ///
+ /// Writes an array of vertex chunks to an endian stack writer. Includes NULL and END chunks.
+ ///
+ /// The writer to write to.
+ /// Chunks to write.
+ /// The address at wich the chunks were written
+ public static uint WriteArray(EndianStackWriter writer, IEnumerable chunks)
+ {
+ uint result = writer.PointerPosition;
+
+ foreach(VertexChunk? cnk in chunks)
+ {
+ if(cnk == null)
+ {
+ writer.WriteEmpty(8);
+ }
+ else
+ {
+ cnk.Write(writer);
+ }
+ }
+
+ // end chunk
+ writer.WriteUInt(0xFF);
+ writer.WriteEmpty(4);
+
+ return result;
+ }
+
+ ///
+ /// Reads a vertex chunk off an endian stack reader.
+ ///
+ /// The reader to read from.
+ /// Address at which to start reading.
+ /// The vertex chunk that was read.
+ public static VertexChunk Read(EndianStackReader reader, ref uint address)
+ {
+ uint header1 = reader.ReadUInt(address);
+ byte attribs = (byte)((header1 >> 8) & 0xFF);
+ VertexChunkType type = (VertexChunkType)(header1 & 0xFF);
+
+ if(!Enum.IsDefined(type) || type is VertexChunkType.End or VertexChunkType.Null)
+ {
+ throw new FormatException($"Vertex chunk type is invalid: {type}");
+ }
+
+ uint header2 = reader.ReadUInt(address + 4);
+ ushort indexOffset = (ushort)(header2 & 0xFFFF);
+ ChunkVertex[] vertices = new ChunkVertex[(ushort)(header2 >> 16)];
+
+ address += 8;
+
+ uint vec4 = type.CheckIsVec4() ? 4u : 0u;
+ bool hasNormal = type.CheckHasNormal();
+ bool normal32 = type.CheckIsNormal32();
+
+ for(int i = 0; i < vertices.Length; i++)
+ {
+ ChunkVertex vtx = new(reader.ReadVector3(ref address), Color.ColorWhite, Color.ColorWhite);
+ address += vec4;
+
+ if(!hasNormal)
+ {
+ vtx.Normal = Vector3.UnitY;
+ }
+ else if(normal32)
+ {
+ const float componentFactor = 1f / ushort.MaxValue;
+
+ uint composed = reader.ReadUInt(address);
+ ushort x = (ushort)((composed >> 20) & 0x3FF);
+ ushort y = (ushort)((composed >> 10) & 0x3FF);
+ ushort z = (ushort)(composed & 0x3FF);
+
+ vtx.Normal = new Vector3(
+ (x * componentFactor) - 1f,
+ (y * componentFactor) - 1f,
+ (z * componentFactor) - 1f);
+
+ address += 4;
+ }
+ else
+ {
+ vtx.Normal = reader.ReadVector3(ref address);
+ address += vec4;
+ }
+
+ switch(type)
+ {
+ case VertexChunkType.Diffuse:
+ case VertexChunkType.NormalDiffuse:
+ case VertexChunkType.Normal32Diffuse:
+ vtx.Diffuse = reader.ReadColor(ref address, ColorIOType.ARGB8_32);
+ break;
+ case VertexChunkType.DiffuseSpecular5:
+ case VertexChunkType.NormalDiffuseSpecular5:
+ vtx.Diffuse = reader.ReadColor(ref address, ColorIOType.RGB565);
+ vtx.Specular = reader.ReadColor(ref address, ColorIOType.RGB565);
+ break;
+ case VertexChunkType.DiffuseSpecular4:
+ case VertexChunkType.NormalDiffuseSpecular4:
+ vtx.Diffuse = reader.ReadColor(ref address, ColorIOType.ARGB4);
+ vtx.Specular = reader.ReadColor(ref address, ColorIOType.RGB565);
+ break;
+ case VertexChunkType.Intensity:
+ case VertexChunkType.NormalIntensity:
+ byte diffuseIntensity = (byte)(reader.ReadUShort(address) >> 16);
+ byte specularIntensity = (byte)(reader.ReadUShort(address + 2) >> 16);
+
+ vtx.Diffuse = new(diffuseIntensity, diffuseIntensity, diffuseIntensity);
+ vtx.Specular = new(specularIntensity, specularIntensity, specularIntensity);
+ address += 4;
+ break;
+ case VertexChunkType.Attributes:
+ case VertexChunkType.UserAttributes:
+ case VertexChunkType.NormalAttributes:
+ case VertexChunkType.NormalUserAttributes:
+ case VertexChunkType.Normal32UserAttributes:
+ vtx.Attributes = reader.ReadUInt(address);
+ address += 4;
+ break;
+ case VertexChunkType.Blank:
+ case VertexChunkType.BlankVec4:
+ case VertexChunkType.Normal:
+ case VertexChunkType.NormalVec4:
+ case VertexChunkType.Normal32:
+ break;
+ case VertexChunkType.Null:
+ case VertexChunkType.End:
+ default:
+ throw new InvalidOperationException(); // cant be reached
+ }
+
+ vertices[i] = vtx;
+ }
+
+ return new VertexChunk(type, attribs, indexOffset, vertices);
+ }
+
+ ///
+ /// Reads an array of chunk (respects NULL and END chunks).
+ ///
+ /// The reader to read form.
+ /// Addres at which to start reading.
+ /// The read vertex chunks.
+ public static VertexChunk?[] ReadArray(EndianStackReader reader, uint address)
+ {
+ List result = new();
+
+ VertexChunkType readType()
+ {
+ return (VertexChunkType)(reader.ReadUInt(address) & 0xFF);
+ }
+
+ for(VertexChunkType type = readType(); type != VertexChunkType.End; type = readType())
+ {
+ if(type == VertexChunkType.Null)
+ {
+ result.Add(null);
+ address += 8;
+ continue;
+ }
+
+ result.Add(Read(reader, ref address));
+ }
+
+ return result.ToArray();
+ }
+
+
+ object ICloneable.Clone()
+ {
+ return Clone();
+ }
+
+ ///
+ /// Creates a deep clone of the vertex chunk.
+ ///
+ ///
+ public VertexChunk Clone()
+ {
+ return new VertexChunk(Type, Attributes, IndexOffset, (ChunkVertex[])Vertices.Clone());
+ }
+
+ ///
+ public override string ToString()
+ {
+ return $"{Type}, {WeightStatus}, {IndexOffset} : [{Vertices.Length}]";
+ }
+ }
+}
+
diff --git a/src/SA3D.Modeling/Mesh/Chunk/VertexChunkType.cs b/src/SA3D.Modeling/Mesh/Chunk/VertexChunkType.cs
new file mode 100644
index 0000000..e1a9f61
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/VertexChunkType.cs
@@ -0,0 +1,113 @@
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Types of vertex chunks.
+ ///
+ public enum VertexChunkType : byte
+ {
+ ///
+ /// Null chunk.
+ ///
+ Null = 0,
+
+ ///
+ /// Position only; Uses GPU ready 4 component vector.
+ ///
+ BlankVec4 = ChunkTypeExtensions._vertex + 0,
+
+ ///
+ /// Contains: Normals; Uses GPU ready 4 component vector.
+ ///
+ NormalVec4 = ChunkTypeExtensions._vertex + 1,
+
+ ///
+ /// Position only.
+ ///
+ Blank = ChunkTypeExtensions._vertex + 2,
+
+ ///
+ /// Contains: Diffuse colors (BGRA8).
+ ///
+ Diffuse = ChunkTypeExtensions._vertex + 3,
+
+ ///
+ /// Contains: User defined attributes.
+ ///
+ UserAttributes = ChunkTypeExtensions._vertex + 4,
+
+ ///
+ /// Contains: System defined attributes.
+ ///
+ Attributes = ChunkTypeExtensions._vertex + 5,
+
+ ///
+ /// Contains: Diffuse colors (RGB565), Specular colors (RGB565).
+ ///
+ DiffuseSpecular5 = ChunkTypeExtensions._vertex + 6,
+
+ ///
+ /// Contains: Diffuse colors (RGB4444), Specular colors (RGB565).
+ ///
+ DiffuseSpecular4 = ChunkTypeExtensions._vertex + 7,
+
+ ///
+ /// Contains: Diffuse intensity (16-bit), Specular intensity (16-bit).
+ ///
+ Intensity = ChunkTypeExtensions._vertex + 8,
+
+ ///
+ /// Contains: Normals.
+ ///
+ Normal = ChunkTypeExtensions._vertex + 9,
+
+ ///
+ /// Contains: Normals, Normal, Diffuse colors (BGRA32).
+ ///
+ NormalDiffuse = ChunkTypeExtensions._vertex + 10,
+
+ ///
+ /// Contains: Normals, User defined attributes.
+ ///
+ NormalUserAttributes = ChunkTypeExtensions._vertex + 11,
+
+ ///
+ /// Contains: Normals, System defined attributes.
+ ///
+ NormalAttributes = ChunkTypeExtensions._vertex + 12,
+
+ ///
+ /// Contains: Normals, Diffuse colors (RGB565), Specular colors (RGB565).
+ ///
+ NormalDiffuseSpecular5 = ChunkTypeExtensions._vertex + 13,
+
+ ///
+ /// Contains: Normals, Diffuse colors (RGB4444), Specular colors (RGB565).
+ ///
+ NormalDiffuseSpecular4 = ChunkTypeExtensions._vertex + 14,
+
+ ///
+ /// Contains: Normals, Diffuse intensity (16-bit), Specular intensity (16-bit).
+ ///
+ NormalIntensity = ChunkTypeExtensions._vertex + 15,
+
+ ///
+ /// Contains: 32 Bit vertex normals (first 2 bits unused, each channel takes 10).
+ ///
+ Normal32 = ChunkTypeExtensions._vertex + 16,
+
+ ///
+ /// Contains: 32 Bit vertex normals (first 2 bits unused, each channel takes 10), Diffuse color (BGRA32).
+ ///
+ Normal32Diffuse = ChunkTypeExtensions._vertex + 17,
+
+ ///
+ /// Contains: 32 Bit vertex normals (first 2 bits unused, each channel takes 10), user attributes.
+ ///
+ Normal32UserAttributes = ChunkTypeExtensions._vertex + 18,
+
+ ///
+ /// End marker chunk.
+ ///
+ End = 255
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Chunk/WeightStatus.cs b/src/SA3D.Modeling/Mesh/Chunk/WeightStatus.cs
new file mode 100644
index 0000000..3127210
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Chunk/WeightStatus.cs
@@ -0,0 +1,23 @@
+namespace SA3D.Modeling.Mesh.Chunk
+{
+ ///
+ /// Vertex chunk weight status.
+ ///
+ public enum WeightStatus
+ {
+ ///
+ /// Start of a weighted model (replaces cached vertices).
+ ///
+ Start,
+
+ ///
+ /// Middle of a weighted model (adds onto cached vertices).
+ ///
+ Middle,
+
+ ///
+ /// End of a weighted model (adds onto cached vertices and normalizes them).
+ ///
+ End
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Converters/BasicConverter.cs b/src/SA3D.Modeling/Mesh/Converters/BasicConverter.cs
new file mode 100644
index 0000000..6202acd
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Converters/BasicConverter.cs
@@ -0,0 +1,417 @@
+using SA3D.Common;
+using SA3D.Common.Lookup;
+using SA3D.Modeling.Mesh.Basic;
+using SA3D.Modeling.Mesh.Basic.Polygon;
+using SA3D.Modeling.Mesh.Buffer;
+using SA3D.Modeling.Mesh.Weighted;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.Strippify;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Converters
+{
+ ///
+ /// Provides buffer conversion methods for BASIC
+ ///
+ internal static class BasicConverter
+ {
+ #region Convert To Basic
+
+ private static BasicMaterial ConvertToBasicMaterial(BufferMaterial mat)
+ {
+ return new(BasicMaterial.DefaultValues)
+ {
+ DiffuseColor = mat.Diffuse,
+ SpecularColor = mat.Specular,
+ SpecularExponent = mat.SpecularExponent,
+ TextureID = mat.TextureIndex,
+ FilterMode = mat.TextureFiltering,
+ MipmapDistanceMultiplier = mat.MipmapDistanceMultiplier,
+ SuperSample = mat.AnisotropicFiltering,
+ ClampU = mat.ClampU,
+ ClampV = mat.ClampV,
+ MirrorU = mat.MirrorU,
+ MirrorV = mat.MirrorV,
+ UseAlpha = mat.UseAlpha,
+ SourceAlpha = mat.SourceBlendMode,
+ DestinationAlpha = mat.DestinationBlendmode,
+ DoubleSided = !mat.BackfaceCulling,
+
+ IgnoreLighting = mat.NoLighting,
+ IgnoreSpecular = mat.NoSpecular,
+ UseTexture = mat.UseTexture,
+ EnvironmentMap = mat.NormalMapping
+ };
+ }
+
+ private static void ConvertToBasicStrips(
+ BufferCorner[][] strips,
+ bool[] reversedStrips,
+ out IBasicPolygon[] polygons,
+ out Vector2[] texcoords,
+ out Color[] colors)
+ {
+ polygons = new IBasicPolygon[strips.Length];
+
+ int cornerCount = strips.Sum(x => x.Length);
+ texcoords = new Vector2[cornerCount];
+ colors = new Color[cornerCount];
+
+ int absoluteIndex = 0;
+
+ for(int i = 0; i < strips.Length; i++)
+ {
+ BufferCorner[] strip = strips[i];
+
+ BasicMultiPolygon polygon = new((ushort)strip.Length, reversedStrips[i]);
+
+ for(int j = 0; j < strip.Length; j++, absoluteIndex++)
+ {
+ BufferCorner corner = strip[j];
+
+ polygon.Indices[j] = corner.VertexIndex;
+ colors[absoluteIndex] = corner.Color;
+ texcoords[absoluteIndex] = corner.Texcoord;
+ }
+
+ polygons[i] = polygon;
+ }
+ }
+
+ private static void ConvertToBasicTriangles(
+ BufferCorner[] corners,
+ out IBasicPolygon[] polygons,
+ out Vector2[] texcoords,
+ out Color[] colors)
+ {
+ polygons = new IBasicPolygon[corners.Length / 3];
+ texcoords = new Vector2[corners.Length];
+ colors = new Color[corners.Length];
+
+ int absoluteIndex = 0;
+
+ for(int i = 0; i < polygons.Length; i++)
+ {
+ BufferCorner corner1 = corners[absoluteIndex];
+ colors[absoluteIndex] = corner1.Color;
+ texcoords[absoluteIndex] = corner1.Texcoord;
+ absoluteIndex++;
+
+ BufferCorner corner2 = corners[absoluteIndex];
+ colors[absoluteIndex] = corner2.Color;
+ texcoords[absoluteIndex] = corner2.Texcoord;
+ absoluteIndex++;
+
+ BufferCorner corner3 = corners[absoluteIndex];
+ colors[absoluteIndex] = corner3.Color;
+ texcoords[absoluteIndex] = corner3.Texcoord;
+ absoluteIndex++;
+
+ polygons[i] = new BasicTriangle(corner1.VertexIndex, corner2.VertexIndex, corner3.VertexIndex);
+ }
+ }
+
+ private static BasicMesh ConvertToBasicMesh(BufferCorner[] bCorners, bool hasColors, int index, string identifier)
+ {
+ BufferCorner[][] strips = TriangleStrippifier.Global.StrippifyNoDegen(bCorners, out bool[] reversed);
+
+ int triangleByteLength = bCorners.Length * 2;
+ int stripByteLength = (strips.Length + strips.Sum(x => x.Length)) * 2;
+
+ BasicPolygonType type;
+ IBasicPolygon[] polygons;
+ Vector2[] texcoords;
+ Color[] colors;
+
+ if(stripByteLength < triangleByteLength)
+ {
+ type = BasicPolygonType.TriangleStrips;
+ ConvertToBasicStrips(strips, reversed, out polygons, out texcoords, out colors);
+ }
+ else
+ {
+ type = BasicPolygonType.Triangles;
+ ConvertToBasicTriangles(bCorners, out polygons, out texcoords, out colors);
+ }
+
+ bool hasTexcoords = texcoords.Any(x => x != default);
+
+ BasicMesh basicmesh = new(type, polygons, (ushort)index, false, hasColors, hasTexcoords);
+
+ if(hasColors)
+ {
+ basicmesh.Colors = new LabeledArray("vcolor_" + index + "_" + identifier, colors);
+ }
+
+ if(hasTexcoords)
+ {
+ basicmesh.Texcoords = new LabeledArray("uv_" + index + "_" + identifier, texcoords);
+ }
+
+ return basicmesh;
+ }
+
+ private static BasicAttach OptimizeBasicVertices(BasicAttach attach)
+ {
+ PositionNormal[] vertices = new PositionNormal[attach.Positions.Length];
+
+ for(int i = 0; i < vertices.Length; i++)
+ {
+ vertices[i] = new(attach.Positions[i], attach.Normals[i]);
+ }
+
+ if(!DistinctMap.TryCreateDistinctMap(vertices, out DistinctMap distinctMap))
+ {
+ return attach;
+ }
+
+ LabeledArray positions = new(attach.Positions.Label, distinctMap.Map!.Length);
+ LabeledArray normals = new(attach.Normals.Label, positions.Length);
+
+ for(int i = 0; i < positions.Length; i++)
+ {
+ PositionNormal pn = distinctMap.Values[i];
+ positions[i] = pn.position;
+ normals[i] = pn.normal;
+ }
+
+ foreach(BasicMesh mesh in attach.Meshes)
+ {
+ foreach(IBasicPolygon polygon in mesh.Polygons)
+ {
+ for(int i = 0; i < polygon.NumIndices; i++)
+ {
+ polygon[i] = distinctMap[polygon[i]];
+ }
+ }
+ }
+
+ return new BasicAttach(positions, normals, attach.Meshes, attach.Materials);
+ }
+
+ public static void ConvertWeightedToBasic(
+ Node model,
+ WeightedMesh[] meshData,
+ bool optimize,
+ bool ignoreWeights)
+ {
+ if(meshData.Any(x => x.IsWeighted) && !ignoreWeights)
+ {
+ throw new FormatException("Model is weighted, cannot convert to BASIC format!");
+ }
+
+ Node[] nodes = model.GetTreeNodes();
+ BasicAttach?[] attaches = new BasicAttach[nodes.Length];
+
+ meshData = WeightedMesh.MergeAtRoots(meshData);
+
+ foreach(WeightedMesh weightedAttach in meshData)
+ {
+ Vector3[] positions = new Vector3[weightedAttach.Vertices.Length];
+ Vector3[] normals = new Vector3[positions.Length];
+ string identifier = StringExtensions.GenerateIdentifier();
+
+ for(int i = 0; i < positions.Length; i++)
+ {
+ WeightedVertex vtx = weightedAttach.Vertices[i];
+
+ positions[i] = vtx.Position;
+ normals[i] = vtx.Normal;
+ }
+
+ // putting together polygons
+ BasicMesh[] meshes = new BasicMesh[weightedAttach.TriangleSets.Length];
+ BasicMaterial[] materials = new BasicMaterial[meshes.Length];
+
+ for(int i = 0; i < meshes.Length; i++)
+ {
+ meshes[i] = ConvertToBasicMesh(weightedAttach.TriangleSets[i], weightedAttach.HasColors, i, identifier);
+ materials[i] = ConvertToBasicMaterial(weightedAttach.Materials[i]);
+ }
+
+ BasicAttach result = new(positions, normals, meshes, materials);
+
+ if(optimize)
+ {
+ result = OptimizeBasicVertices(result);
+ }
+
+ result.Label = weightedAttach.Label ?? "BASIC_" + StringExtensions.GenerateIdentifier();
+
+ foreach(int index in weightedAttach.RootIndices)
+ {
+ attaches[index] = result;
+ }
+ }
+
+ model.ClearAttachesFromTree();
+
+ // Linking the attaches to the nodes
+ for(int i = 0; i < nodes.Length; i++)
+ {
+ nodes[i].Attach = attaches[i];
+ }
+ }
+
+ #endregion
+
+ #region Convert to Buffer
+
+ private static BufferMaterial ConvertToBufferMaterial(BasicMaterial mat)
+ {
+ return new(BufferMaterial.DefaultValues)
+ {
+ Diffuse = mat.DiffuseColor,
+ Specular = mat.SpecularColor,
+ SpecularExponent = mat.SpecularExponent,
+ TextureIndex = mat.TextureID,
+ TextureFiltering = mat.FilterMode,
+ MipmapDistanceMultiplier = mat.MipmapDistanceMultiplier,
+ AnisotropicFiltering = mat.SuperSample,
+ ClampU = mat.ClampU,
+ ClampV = mat.ClampV,
+ MirrorU = mat.MirrorU,
+ MirrorV = mat.MirrorV,
+ UseAlpha = mat.UseAlpha,
+ SourceBlendMode = mat.SourceAlpha,
+ DestinationBlendmode = mat.DestinationAlpha,
+ BackfaceCulling = !mat.DoubleSided,
+
+ NoLighting = mat.IgnoreLighting,
+ NoSpecular = mat.IgnoreSpecular,
+ UseTexture = mat.UseTexture,
+ NormalMapping = mat.EnvironmentMap
+ };
+ }
+
+ private static void ConvertPolygons(BasicMesh mesh, out BufferCorner[] corners, out uint[]? indexList)
+ {
+ if(mesh.PolygonType is BasicPolygonType.NPoly or BasicPolygonType.TriangleStrips)
+ {
+ indexList = null;
+
+ IEnumerable polys = mesh.Polygons.Cast();
+
+ BufferCorner[][] strips = new BufferCorner[polys.Count()][];
+ bool[] reversed = new bool[strips.Length];
+
+ int stripNum = 0;
+ int absoluteIndex = 0;
+
+ foreach(BasicMultiPolygon poly in polys)
+ {
+ BufferCorner[] strip = new BufferCorner[poly.Indices.Length];
+
+ for(int i = 0; i < strip.Length; i++, absoluteIndex++)
+ {
+ strip[i] = new BufferCorner(
+ poly.Indices[i],
+ mesh.Colors?[absoluteIndex] ?? BufferMesh.DefaultColor,
+ mesh.Texcoords?[absoluteIndex] ?? Vector2.Zero);
+ }
+
+ strips[stripNum] = strip;
+ reversed[stripNum] = poly.Reversed;
+ stripNum++;
+ }
+
+ corners = TriangleStrippifier.JoinStrips(strips, reversed);
+ }
+ else
+ {
+ int absoluteIndex = 0;
+ corners = new BufferCorner[mesh.PolygonCornerCount];
+
+ foreach(IBasicPolygon triangle in mesh.Polygons)
+ {
+ foreach(ushort index in triangle)
+ {
+ corners[absoluteIndex] = new BufferCorner(
+ index,
+ mesh.Colors?[absoluteIndex] ?? BufferMesh.DefaultColor,
+ mesh.Texcoords?[absoluteIndex] ?? Vector2.Zero);
+ absoluteIndex++;
+ }
+ }
+
+ if(mesh.PolygonType == BasicPolygonType.Quads)
+ {
+ indexList = new uint[mesh.Polygons.Length * 6];
+
+ for(uint i = 0, q = 0; i < corners.Length; i += 4, q += 6)
+ {
+ indexList[q] = i;
+ indexList[q + 1] = i + 1;
+ indexList[q + 2] = i + 2;
+
+ indexList[q + 3] = i + 2;
+ indexList[q + 4] = i + 1;
+ indexList[q + 5] = i + 3;
+ }
+ }
+ else
+ {
+ indexList = null;
+ }
+ }
+
+ }
+
+ public static BufferMesh[] ConvertBasicToBuffer(BasicAttach attach, bool optimize)
+ {
+ BufferVertex[] verts = new BufferVertex[attach.Positions.Length];
+ for(ushort i = 0; i < verts.Length; i++)
+ {
+ verts[i] = new BufferVertex(attach.Positions[i], attach.Normals[i], i);
+ }
+
+ bool hasNormals = attach.Normals.Any(x => !x.Equals(BufferMesh.DefaultNormal));
+
+ List meshes = new();
+ foreach(BasicMesh mesh in attach.Meshes)
+ {
+ // creating the material
+ BufferMaterial bMat = ConvertToBufferMaterial(
+ mesh.MaterialIndex < attach.Materials.Length
+ ? attach.Materials[mesh.MaterialIndex]
+ : BasicMaterial.DefaultValues);
+
+ ConvertPolygons(mesh, out BufferCorner[] corners, out uint[]? indexList);
+ bool strippified = mesh.PolygonType is BasicPolygonType.TriangleStrips or BasicPolygonType.NPoly;
+
+ // first mesh includes vertex data
+ BufferMesh bmesh = meshes.Count == 0
+ ? new(verts, bMat, corners, indexList, strippified, false, hasNormals, mesh.Colors != null, 0, 0)
+ : new(bMat, corners, indexList, strippified, mesh.Colors != null, 0);
+
+ if(optimize)
+ {
+ bmesh.OptimizePolygons();
+ }
+
+ meshes.Add(bmesh);
+ }
+
+ return optimize ? BufferMesh.Optimize(meshes) : meshes.ToArray();
+ }
+
+ ///
+ /// Generates Buffer meshes for all attaches in the model
+ ///
+ /// The tip of the model hierarchy to convert
+ /// Whether the buffer model should be optimized
+ public static void BufferBasicModel(Node model, bool optimize = true)
+ {
+ foreach(Attach atc in model.GetTreeAttaches())
+ {
+ atc.MeshData = ConvertBasicToBuffer((BasicAttach)atc, optimize);
+ }
+ }
+
+ #endregion
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Converters/ChunkConverter.cs b/src/SA3D.Modeling/Mesh/Converters/ChunkConverter.cs
new file mode 100644
index 0000000..b9ad25c
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Converters/ChunkConverter.cs
@@ -0,0 +1,902 @@
+using SA3D.Common;
+using SA3D.Modeling.Mesh.Buffer;
+using SA3D.Modeling.Mesh.Chunk;
+using SA3D.Modeling.Mesh.Chunk.PolyChunks;
+using SA3D.Modeling.Mesh.Chunk.Structs;
+using SA3D.Modeling.Mesh.Weighted;
+using SA3D.Modeling.ObjectData;
+using SA3D.Modeling.Strippify;
+using SA3D.Modeling.Structs;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Converters
+{
+ internal static class ChunkConverter
+ {
+ private readonly struct ChunkResult : IOffsetableAttachResult
+ {
+ public string Label { get; }
+ public int VertexCount { get; }
+ public bool Weighted { get; }
+ public int[] AttachIndices { get; }
+ public ChunkAttach[] Attaches { get; }
+
+ Attach[] IOffsetableAttachResult.Attaches => Attaches;
+
+ public ChunkResult(string label, int vertexCount, bool weighted, int[] attachIndices, ChunkAttach[] attaches)
+ {
+ Label = label;
+ VertexCount = vertexCount;
+ Weighted = weighted;
+ AttachIndices = attachIndices;
+ Attaches = attaches;
+ }
+
+ public void ModifyVertexOffset(int offset)
+ {
+ foreach(ChunkAttach attach in Attaches)
+ {
+ if(attach.VertexChunks != null)
+ {
+ foreach(VertexChunk vtx in attach.VertexChunks.OfType())
+ {
+ vtx.IndexOffset += (ushort)offset;
+ }
+ }
+
+ if(attach.PolyChunks != null)
+ {
+ foreach(StripChunk stripChunk in attach.PolyChunks.OfType())
+ {
+ foreach(ChunkStrip strip in stripChunk.Strips)
+ {
+ for(int i = 0; i < strip.Corners.Length; i++)
+ {
+ strip.Corners[i].Index += (ushort)offset;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private class OffsettableChunkConverter : OffsetableAttachConverter
+ {
+ private readonly struct IndexedWeightVertex
+ {
+ public readonly int index;
+ public readonly WeightedVertex vertex;
+
+ public IndexedWeightVertex(int index, WeightedVertex vertex)
+ {
+ this.index = index;
+ this.vertex = vertex;
+ }
+
+ public override string ToString()
+ {
+ return $"{index} {vertex}";
+ }
+ }
+
+ private readonly struct BinaryWeightColorVertex : IEquatable
+ {
+ public readonly int nodeIndex;
+ public readonly Vector3 position;
+ public readonly Color color;
+
+ public BinaryWeightColorVertex(int nodeIndex, Vector3 position, Color color)
+ {
+ this.nodeIndex = nodeIndex;
+ this.position = position;
+ this.color = color;
+ }
+
+ public override bool Equals(object? obj)
+ {
+ return obj is BinaryWeightColorVertex vertex &&
+ nodeIndex == vertex.nodeIndex &&
+ position.Equals(vertex.position) &&
+ color.Equals(vertex.color);
+ }
+
+ bool IEquatable.Equals(BinaryWeightColorVertex other)
+ {
+ return Equals(other);
+ }
+
+ public override int GetHashCode()
+ {
+ return HashCode.Combine(nodeIndex, position, color);
+ }
+ }
+
+
+ private static ChunkResult ConvertWeightedBinaryColored(WeightedMesh wba)
+ {
+ List vertices = new();
+ ChunkCorner[][] cornerSets = new ChunkCorner[wba.TriangleSets.Length][];
+
+ // Get every vertex per corner
+ for(int i = 0; i < wba.TriangleSets.Length; i++)
+ {
+ BufferCorner[] bufferCorners = wba.TriangleSets[i];
+ ChunkCorner[] corners = new ChunkCorner[bufferCorners.Length];
+ for(int j = 0; j < bufferCorners.Length; j++)
+ {
+ BufferCorner bc = bufferCorners[j];
+ corners[j] = new()
+ {
+ Index = (ushort)vertices.Count,
+ Texcoord = bc.Texcoord
+ };
+
+ WeightedVertex vertex = wba.Vertices[bc.VertexIndex];
+ vertices.Add(new(vertex.GetMaxWeightIndex(), vertex.Position, bc.Color));
+ }
+
+ cornerSets[i] = corners;
+ }
+
+ // first, get rid of all duplicate vertices
+ DistinctMap distinctVerts = vertices.CreateDistinctMap();
+ (int index, BinaryWeightColorVertex vert)[] sortedVertices = new (int index, BinaryWeightColorVertex)[distinctVerts.Values.Count];
+
+ for(int i = 0; i < sortedVertices.Length; i++)
+ {
+ sortedVertices[i] = (i, distinctVerts.Values[i]);
+ }
+
+ // now sort the vertices by node index
+ sortedVertices = sortedVertices.OrderBy(x => x.vert.nodeIndex).ToArray();
+
+ // Create a vertex chunk per node index
+ List<(int nodeIndex, VertexChunk chunk)> vertexChunks = new();
+
+ int currentNodeIndex = -1;
+ List chunkVertices = new();
+ ushort currentVertexOffset = 0;
+ int[] sortedVertMap = new int[sortedVertices.Length];
+ for(int i = 0; i < sortedVertices.Length; i++)
+ {
+ (int index, BinaryWeightColorVertex vert) vert = sortedVertices[i];
+ if(vert.vert.nodeIndex != currentNodeIndex)
+ {
+ if(chunkVertices.Count > 0)
+ {
+ vertexChunks.Add((currentNodeIndex, new(
+ VertexChunkType.Diffuse,
+ WeightStatus.Start,
+ currentVertexOffset,
+ chunkVertices.ToArray())));
+ }
+
+ currentVertexOffset = (ushort)i;
+ chunkVertices.Clear();
+ currentNodeIndex = vert.vert.nodeIndex;
+ }
+
+ chunkVertices.Add(new(vert.vert.position, vert.vert.color, Color.ColorWhite));
+ sortedVertMap[vert.index] = i;
+ }
+
+ vertexChunks.Add((currentNodeIndex, new(
+ VertexChunkType.Diffuse,
+ WeightStatus.Start,
+ currentVertexOffset,
+ chunkVertices.ToArray())));
+
+ // get the poly chunks
+ List polyChunks = new();
+ for(int i = 0; i < cornerSets.Length; i++)
+ {
+ ChunkCorner[] corners = cornerSets[i];
+ for(int j = 0; j < corners.Length; j++)
+ {
+ int index = distinctVerts[corners[j].Index];
+ corners[j].Index = (ushort)sortedVertMap[index];
+ }
+
+ polyChunks.AddRange(CreateStripChunk(corners, wba.Materials[i], wba.WriteSpecular));
+ }
+
+ // assemble the attaches
+ List nodeAttachIndices = new();
+ List attaches = new();
+
+ for(int i = 0; i < vertexChunks.Count - 1; i++)
+ {
+ (int nodeIndex, VertexChunk chunks) = vertexChunks[i];
+ nodeAttachIndices.Add(nodeIndex);
+ attaches.Add(new(new[] { chunks }, null));
+ }
+
+ (int lastNodeindex, VertexChunk lastVertexChunk) = vertexChunks[^1];
+ nodeAttachIndices.Add(lastNodeindex);
+ attaches.Add(new(new[] { lastVertexChunk }, polyChunks.ToArray()));
+
+ return new(
+ wba.Label ?? "CHUNK_" + StringExtensions.GenerateIdentifier(),
+ sortedVertices.Length,
+ true,
+ nodeAttachIndices.ToArray(),
+ attaches.ToArray());
+ }
+
+ private static ChunkResult ConvertWeighted(WeightedMesh wba)
+ {
+
+ List singleWeights = new();
+ List multiWeights = new();
+
+ for(int i = 0; i < wba.Vertices.Length; i++)
+ {
+ WeightedVertex vtx = wba.Vertices[i];
+ int weightCount = vtx.GetWeightCount();
+ if(weightCount == 0)
+ {
+ throw new InvalidDataException("Vertex has no specified weights");
+ }
+ else if(weightCount == 1)
+ {
+ singleWeights.Add(new(i, vtx));
+ }
+ else
+ {
+ multiWeights.Add(new(i, vtx));
+ }
+ }
+
+ singleWeights = singleWeights.OrderBy(x => x.vertex.GetFirstWeightIndex()).ThenBy(x => x.index).ToList();
+ int multiWeightOffset = singleWeights.Count;
+
+ int[] firstWeightIndices = multiWeights.Select(x => x.vertex.GetFirstWeightIndex()).ToArray();
+ int[] lastWeightIndices = multiWeights.Select(x => x.vertex.GetLastWeightIndex()).ToArray();
+
+ // grouping the vertices together by node
+ List<(int nodeIndex, VertexChunk[] chunks)> vertexChunks = new();
+
+ foreach(int nodeIndex in wba.DependingNodeIndices.Order())
+ {
+ List chunks = new();
+
+ // find out if any singleWeights belong to the node index
+ int singleWeightIndexOffset = 0;
+ List singleWeightVerts = new();
+ for(int i = 0; i < singleWeights.Count; i++)
+ {
+ WeightedVertex vert = singleWeights[i].vertex;
+ bool contains = vert.Weights![nodeIndex] > 0f;
+ if(contains)
+ {
+ if(singleWeightVerts.Count == 0)
+ {
+ singleWeightIndexOffset = i;
+ }
+
+ Vector3 pos = new(vert.Position.X, vert.Position.Y, vert.Position.Z);
+ singleWeightVerts.Add(new(pos, vert.Normal, (ushort)i, 1f));
+ }
+
+ if(!contains && singleWeightVerts.Count > 0)
+ {
+ break;
+ }
+ }
+
+ if(singleWeightVerts.Count > 0)
+ {
+ chunks.Add(
+ new VertexChunk(
+ VertexChunkType.Normal,
+ WeightStatus.Start,
+ (ushort)singleWeightIndexOffset,
+ singleWeightVerts.ToArray()));
+ }
+
+ // now the ones with weights. we differentiate between
+ // those that initiate and those that continue
+ List initWeightsVerts = new();
+ List continueWeightsVerts = new();
+ List endWeightsVerts = new();
+
+ for(int i = 0; i < multiWeights.Count; i++)
+ {
+ WeightedVertex vert = multiWeights[i].vertex;
+
+ float weight = vert.Weights![nodeIndex];
+ if(weight == 0f)
+ {
+ continue;
+ }
+
+ ChunkVertex chunkVert = new(
+ vert.Position,
+ vert.Normal,
+ (ushort)(i + multiWeightOffset),
+ weight);
+
+ if(firstWeightIndices[i] == nodeIndex)
+ {
+ initWeightsVerts.Add(chunkVert);
+ }
+ else if(lastWeightIndices[i] == nodeIndex)
+ {
+ endWeightsVerts.Add(chunkVert);
+ }
+ else
+ {
+ continueWeightsVerts.Add(chunkVert);
+ }
+ }
+
+ if(initWeightsVerts.Count > 0)
+ {
+ chunks.Add(
+ new VertexChunk(
+ VertexChunkType.NormalAttributes,
+ WeightStatus.Start, 0,
+ initWeightsVerts.ToArray()));
+ }
+
+ if(continueWeightsVerts.Count > 0)
+ {
+ chunks.Add(
+ new VertexChunk(
+ VertexChunkType.NormalAttributes,
+ WeightStatus.Middle, 0,
+ continueWeightsVerts.ToArray()));
+ }
+
+ if(endWeightsVerts.Count > 0)
+ {
+ chunks.Add(
+ new VertexChunk(
+ VertexChunkType.NormalAttributes,
+ WeightStatus.End, 0,
+ endWeightsVerts.ToArray()));
+ }
+
+ vertexChunks.Add((nodeIndex, chunks.ToArray()));
+ }
+
+ // mapping the indices for the polygons
+ ushort[] indexMap = new ushort[wba.Vertices.Length];
+ for(int i = 0; i < singleWeights.Count; i++)
+ {
+ indexMap[singleWeights[i].index] = (ushort)i;
+ }
+
+ for(int i = 0; i < multiWeights.Count; i++)
+ {
+ indexMap[multiWeights[i].index] = (ushort)(i + multiWeightOffset);
+ }
+
+ // assemble the polygon chunks
+ List polyChunks = new();
+ for(int i = 0; i < wba.TriangleSets.Length; i++)
+ {
+ // mapping the triangles to the chunk format
+ BufferCorner[] bufferCorners = wba.TriangleSets[i];
+ ChunkCorner[] corners = new ChunkCorner[bufferCorners.Length];
+ for(int j = 0; j < bufferCorners.Length; j++)
+ {
+ BufferCorner bc = bufferCorners[j];
+ corners[j] = new()
+ {
+ Index = indexMap[bc.VertexIndex],
+ Texcoord = bc.Texcoord
+ };
+ }
+
+ polyChunks.AddRange(CreateStripChunk(corners, wba.Materials[i], wba.WriteSpecular));
+ }
+
+ // assemble the attaches
+ List nodeAttachIndices = new();
+ List attaches = new();
+
+ for(int i = 0; i < vertexChunks.Count - 1; i++)
+ {
+ (int nodeIndex, VertexChunk[] chunks) = vertexChunks[i];
+ nodeAttachIndices.Add(nodeIndex);
+ attaches.Add(new(chunks, null));
+ }
+
+ (int lastNodeindex, VertexChunk[] lastVertexChunk) = vertexChunks[^1];
+ nodeAttachIndices.Add(lastNodeindex);
+ attaches.Add(new(lastVertexChunk, polyChunks.ToArray()));
+
+ return new(
+ wba.Label ?? "CHUNK_" + StringExtensions.GenerateIdentifier(),
+ wba.Vertices.Length,
+ true,
+ nodeAttachIndices.ToArray(),
+ attaches.ToArray());
+ }
+
+ protected override ChunkResult ConvertWeighted(WeightedMesh wba, bool optimize)
+ {
+ bool binaryWeighted = wba.HasColors;
+ if(binaryWeighted && !wba.ForceVertexColors)
+ {
+ foreach(WeightedVertex vertex in wba.Vertices)
+ {
+ if(vertex.IsWeighted())
+ {
+ binaryWeighted = false;
+ break;
+ }
+ }
+ }
+
+ if(binaryWeighted)
+ {
+ return ConvertWeightedBinaryColored(wba);
+ }
+ else
+ {
+ return ConvertWeighted(wba);
+ }
+ }
+
+ protected override ChunkResult ConvertWeightless(WeightedMesh wba, bool optimize)
+ {
+ ChunkVertex[] vertices;
+ ChunkCorner[][] cornerSets = new ChunkCorner[wba.TriangleSets.Length][];
+
+ VertexChunkType type;
+ if(wba.HasColors)
+ {
+ type = VertexChunkType.Diffuse;
+ List colorVertices = new();
+ for(int i = 0; i < wba.TriangleSets.Length; i++)
+ {
+ BufferCorner[] bufferCorners = wba.TriangleSets[i];
+ ChunkCorner[] corners = new ChunkCorner[bufferCorners.Length];
+ for(int j = 0; j < bufferCorners.Length; j++)
+ {
+ BufferCorner bc = bufferCorners[j];
+ corners[j] = new()
+ {
+ Index = (ushort)colorVertices.Count,
+ Texcoord = bc.Texcoord
+ };
+
+ WeightedVertex vertex = wba.Vertices[bc.VertexIndex];
+ colorVertices.Add(new(vertex.Position, bc.Color, Color.ColorWhite));
+ }
+
+ cornerSets[i] = corners;
+ }
+
+ // first, get rid of all duplicate vertices
+ if(colorVertices.TryCreateDistinctMap(out DistinctMap distinctVerts))
+ {
+ for(int i = 0; i < cornerSets.Length; i++)
+ {
+ ChunkCorner[] corners = cornerSets[i];
+ for(int j = 0; j < corners.Length; j++)
+ {
+ corners[j].Index = distinctVerts[corners[j].Index];
+ }
+ }
+ }
+
+ vertices = distinctVerts.ValueArray;
+ }
+ else
+ {
+ type = VertexChunkType.Normal;
+ vertices = new ChunkVertex[wba.Vertices.Length];
+ // converting the vertices 1:1, with normal information
+ for(int i = 0; i < wba.Vertices.Length; i++)
+ {
+ WeightedVertex vert = wba.Vertices[i];
+ Vector3 position = new(vert.Position.X, vert.Position.Y, vert.Position.Z);
+ vertices[i] = new(position, vert.Normal);
+ }
+
+ for(int i = 0; i < wba.TriangleSets.Length; i++)
+ {
+ BufferCorner[] bufferCorners = wba.TriangleSets[i];
+ ChunkCorner[] corners = new ChunkCorner[bufferCorners.Length];
+ for(int j = 0; j < bufferCorners.Length; j++)
+ {
+ BufferCorner bc = bufferCorners[j];
+ corners[j] = new()
+ {
+ Index = bc.VertexIndex,
+ Texcoord = bc.Texcoord
+ };
+ }
+
+ cornerSets[i] = corners;
+ }
+ }
+
+ VertexChunk vtxChunk = new(type, WeightStatus.Start, 0, vertices);
+ List polyChunks = new();
+ for(int i = 0; i < cornerSets.Length; i++)
+ {
+ polyChunks.AddRange(CreateStripChunk(cornerSets[i], wba.Materials[i], wba.WriteSpecular));
+ }
+
+ return new(
+ wba.Label ?? "CHUNK_" + StringExtensions.GenerateIdentifier(),
+ vertices.Length,
+ false,
+ wba.RootIndices.ToArray(),
+ new[] {
+ new ChunkAttach(
+ new[] { vtxChunk },
+ polyChunks.ToArray())
+ });
+ }
+
+ private static PolyChunk[] CreateStripChunk(ChunkCorner[] corners, BufferMaterial material, bool writeSpecular)
+ {
+ ChunkCorner[][] stripCorners = TriangleStrippifier.Global.StrippifyNoDegen(corners, out bool[] reversed);
+ ChunkStrip[] strips = new ChunkStrip[stripCorners.Length];
+
+ for(int i = 0; i < strips.Length; i++)
+ {
+ strips[i] = new(stripCorners[i], reversed[i]);
+ }
+
+ bool hasUV = material.UseTexture && !material.NormalMapping;
+ PolyChunkType stripType = hasUV ? PolyChunkType.Strip_Tex : PolyChunkType.Strip_Blank;
+
+ StripChunk stripchunk = new(stripType, strips, 0)
+ {
+ FlatShading = material.Flat,
+ IgnoreAmbient = material.NoAmbient,
+ IgnoreLight = material.NoLighting,
+ IgnoreSpecular = material.NoSpecular,
+ EnvironmentMapping = material.NormalMapping,
+ UseAlpha = material.UseAlpha,
+ DoubleSide = !material.BackfaceCulling
+ };
+
+ TextureChunk textureChunk = new()
+ {
+ ClampU = material.ClampU,
+ ClampV = material.ClampV,
+ MirrorU = material.MirrorU,
+ MirrorV = material.MirrorV,
+ FilterMode = material.TextureFiltering,
+ SuperSample = material.AnisotropicFiltering,
+ TextureID = (ushort)material.TextureIndex
+ };
+
+ MaterialChunk materialChunk = new()
+ {
+ SourceAlpha = material.SourceBlendMode,
+ DestinationAlpha = material.DestinationBlendmode,
+ Diffuse = material.Diffuse,
+ Ambient = material.Ambient,
+ };
+
+ if(writeSpecular)
+ {
+ materialChunk.Specular = material.Specular;
+ materialChunk.SpecularExponent = (byte)material.SpecularExponent;
+ }
+
+ return new PolyChunk[] { materialChunk, textureChunk, stripchunk };
+ }
+
+ protected override void CorrectSpace(Attach attach, Matrix4x4 vertexMatrix)
+ {
+ ChunkAttach chunkAttach = (ChunkAttach)attach;
+ if(chunkAttach.VertexChunks == null)
+ {
+ return;
+ }
+
+ foreach(VertexChunk? vtxChunk in chunkAttach.VertexChunks)
+ {
+ if(vtxChunk == null)
+ {
+ continue;
+ }
+
+ for(int j = 0; j < vtxChunk.Vertices.Length; j++)
+ {
+ ChunkVertex vertex = vtxChunk.Vertices[j];
+
+ vertex.Position = Vector3.Transform(vertex.Position, vertexMatrix);
+ vertex.Normal = Vector3.TransformNormal(vertex.Normal, vertexMatrix);
+
+ vtxChunk.Vertices[j] = vertex;
+ }
+ }
+ }
+
+ protected override ChunkResult WeightedClone(string label, int vertexCount, int[] attachIndices, Attach[] attaches)
+ {
+ return new(
+ label,
+ vertexCount,
+ true,
+ attachIndices,
+ attaches.Cast().ToArray());
+ }
+
+ protected override Attach CombineAttaches(List attaches, string label)
+ {
+ List vertexChunks = new();
+ List polyChunks = new();
+
+ foreach(ChunkAttach atc in attaches.Cast())
+ {
+ if(atc.VertexChunks != null)
+ {
+ vertexChunks.AddRange(atc.VertexChunks);
+ }
+
+ if(atc.PolyChunks != null)
+ {
+ polyChunks.AddRange(atc.PolyChunks);
+ }
+ }
+
+ return new ChunkAttach(vertexChunks.ToArray(), polyChunks.ToArray()) { Label = label };
+ }
+ }
+
+ public static void ConvertWeightedToChunk(Node model, WeightedMesh[] meshData, bool optimize)
+ {
+ new OffsettableChunkConverter().Convert(model, meshData, optimize);
+ }
+
+ #region Convert to Buffer
+
+ private static BufferCorner[] ConvertStripChunk(StripChunk chunk, ChunkVertex[] vertexCache)
+ {
+ bool hasColor = chunk.HasColors;
+
+ BufferCorner[][] bufferStrips = new BufferCorner[chunk.Strips.Length][];
+ bool[] reversed = new bool[bufferStrips.Length];
+
+ for(int i = 0; i < chunk.Strips.Length; i++)
+ {
+ ChunkStrip strip = chunk.Strips[i];
+ BufferCorner[] bufferStrip = new BufferCorner[strip.Corners.Length];
+
+ for(int j = 0; j < strip.Corners.Length; j++)
+ {
+ ChunkCorner corner = strip.Corners[j];
+
+ Color color = hasColor
+ ? corner.Color
+ : vertexCache[corner.Index].Diffuse;
+
+ bufferStrip[j] = new(corner.Index, color, corner.Texcoord);
+ }
+
+ bufferStrips[i] = bufferStrip;
+ reversed[i] = strip.Reversed;
+ }
+
+ return TriangleStrippifier.JoinStrips(bufferStrips, reversed);
+ }
+
+ public static void BufferChunkModel(Node model, bool optimize)
+ {
+ BufferMaterial material = BufferMaterial.DefaultValues;
+
+ ChunkVertex[] vertexCache = new ChunkVertex[0x10000];
+ Dictionary activeChunks = GetActivePolyChunks(model);
+
+ foreach(ChunkAttach atc in model.GetTreeAttachEnumerable().OfType())
+ {
+ List meshes = new();
+
+ BufferVertex[]? vertices = null;
+ bool continueWeight = false;
+ bool hasVertexNormals = false;
+ bool hasVertexColors = false;
+ ushort vertexWriteOffset = 0;
+
+ if(atc.VertexChunks != null)
+ {
+ for(int i = 0; i < atc.VertexChunks.Length; i++)
+ {
+ VertexChunk? cnk = atc.VertexChunks[i];
+
+ if(cnk == null)
+ {
+ continue;
+ }
+
+ List vertexList = new();
+ if(!cnk.HasWeight)
+ {
+ for(int j = 0; j < cnk.Vertices.Length; j++)
+ {
+ ChunkVertex vtx = cnk.Vertices[j];
+ vertexCache[j + cnk.IndexOffset] = vtx;
+ vertexList.Add(new BufferVertex(vtx.Position, vtx.Normal, (ushort)j));
+ }
+ }
+ else
+ {
+
+ for(int j = 0; j < cnk.Vertices.Length; j++)
+ {
+ ChunkVertex vtx = cnk.Vertices[j];
+ vertexCache[vtx.Index + cnk.IndexOffset] = vtx;
+ vertexList.Add(new BufferVertex(vtx.Position, vtx.Normal, vtx.Index, vtx.Weight));
+ }
+ }
+
+ vertices = vertexList.ToArray();
+ continueWeight = cnk.WeightStatus != WeightStatus.Start;
+ hasVertexNormals = cnk.HasNormals;
+ hasVertexColors |= cnk.HasDiffuseColors;
+ vertexWriteOffset = cnk.IndexOffset;
+
+ // if not last
+ if(i < atc.VertexChunks.Length - 1)
+ {
+ meshes.Add(new BufferMesh(vertices, continueWeight, hasVertexNormals, vertexWriteOffset));
+ }
+ }
+ }
+
+ if(activeChunks.TryGetValue(atc, out PolyChunk?[]? polyChunks))
+ {
+ foreach(PolyChunk? chunk in polyChunks)
+ {
+ switch(chunk)
+ {
+ case BlendAlphaChunk blendAlphaChunk:
+ material.SourceBlendMode = blendAlphaChunk.SourceAlpha;
+ material.DestinationBlendmode = blendAlphaChunk.DestinationAlpha;
+ break;
+ case MipmapDistanceMultiplierChunk mmdmChunk:
+ material.MipmapDistanceMultiplier = mmdmChunk.MipmapDistanceMultiplier;
+ break;
+ case SpecularExponentChunk specularExponentChunk:
+ material.SpecularExponent = specularExponentChunk.SpecularExponent;
+ break;
+ case TextureChunk textureChunk:
+ material.TextureIndex = textureChunk.TextureID;
+ material.MirrorU = textureChunk.MirrorU;
+ material.MirrorV = textureChunk.MirrorV;
+ material.ClampU = textureChunk.ClampU;
+ material.ClampV = textureChunk.ClampV;
+ material.AnisotropicFiltering = textureChunk.SuperSample;
+ material.TextureFiltering = textureChunk.FilterMode;
+ break;
+ case MaterialChunk materialChunk:
+ material.SourceBlendMode = materialChunk.SourceAlpha;
+ material.DestinationBlendmode = materialChunk.DestinationAlpha;
+
+ if(materialChunk.Diffuse.HasValue)
+ {
+ material.Diffuse = materialChunk.Diffuse.Value;
+ }
+
+ if(materialChunk.Ambient.HasValue)
+ {
+ material.Ambient = materialChunk.Ambient.Value;
+ }
+
+ if(materialChunk.Specular.HasValue)
+ {
+ material.Specular = materialChunk.Specular.Value;
+ material.SpecularExponent = materialChunk.SpecularExponent;
+ }
+
+ break;
+ case StripChunk stripChunk:
+ material.Flat = stripChunk.FlatShading;
+ material.NoAmbient = stripChunk.IgnoreAmbient;
+ material.NoLighting = stripChunk.IgnoreLight;
+ material.NoSpecular = stripChunk.IgnoreSpecular;
+ material.NormalMapping = stripChunk.EnvironmentMapping;
+ material.UseTexture = stripChunk.TexcoordCount > 0 || stripChunk.EnvironmentMapping;
+ material.UseAlpha = stripChunk.UseAlpha;
+ material.BackfaceCulling = !stripChunk.DoubleSide;
+
+ BufferCorner[] corners = ConvertStripChunk(stripChunk, vertexCache);
+
+ bool hasColor = stripChunk.HasColors || hasVertexColors;
+
+ if(corners.Length > 0)
+ {
+ if(vertices != null)
+ {
+ meshes.Add(new BufferMesh(vertices, material, corners, null, true, continueWeight, hasVertexNormals, hasColor, vertexWriteOffset, 0));
+ vertices = null;
+ }
+ else
+ {
+ meshes.Add(new BufferMesh(material, corners, null, true, hasColor, 0));
+ }
+ }
+
+ break;
+ default:
+ break;
+ }
+ }
+
+ }
+
+ if(vertices != null)
+ {
+ meshes.Add(new BufferMesh(vertices, continueWeight, hasVertexNormals, vertexWriteOffset));
+ }
+
+ atc.MeshData = optimize ? BufferMesh.Optimize(meshes) : meshes.ToArray();
+ }
+ }
+
+ #endregion
+
+ public static Dictionary GetActivePolyChunks(Node model)
+ {
+ Dictionary result = new();
+ List[] polyChunkCache = Array.Empty>();
+
+ foreach(ChunkAttach attach in model.GetTreeAttachEnumerable().OfType())
+ {
+ if(attach.PolyChunks == null)
+ {
+ continue;
+ }
+
+ List active = new();
+
+ int cacheID = -1;
+ foreach(PolyChunk? polyChunk in attach.PolyChunks)
+ {
+ switch(polyChunk)
+ {
+ case CacheListChunk cache:
+ cacheID = cache.List;
+
+ if(polyChunkCache.Length <= cacheID)
+ {
+ Array.Resize(ref polyChunkCache, cacheID + 1);
+ }
+
+ polyChunkCache[cacheID] = new List();
+ break;
+ case DrawListChunk draw:
+ active.AddRange(polyChunkCache[draw.List]);
+ break;
+ default:
+ if(cacheID > -1)
+ {
+ polyChunkCache[cacheID].Add(polyChunk);
+ }
+ else
+ {
+ active.Add(polyChunk);
+ }
+
+ break;
+ }
+
+ }
+
+
+ if(active.Count > 0)
+ {
+ result.Add(attach, active.ToArray());
+ }
+ }
+
+ return result;
+ }
+ }
+}
diff --git a/src/SA3D.Modeling/Mesh/Converters/FromWeightedConverter.cs b/src/SA3D.Modeling/Mesh/Converters/FromWeightedConverter.cs
new file mode 100644
index 0000000..514d37e
--- /dev/null
+++ b/src/SA3D.Modeling/Mesh/Converters/FromWeightedConverter.cs
@@ -0,0 +1,227 @@
+using SA3D.Common;
+using SA3D.Modeling.Mesh.Buffer;
+using SA3D.Modeling.Mesh.Weighted;
+using SA3D.Modeling.ObjectData;
+using System.Collections.Generic;
+using System.Linq;
+using System.Numerics;
+
+namespace SA3D.Modeling.Mesh.Converters
+{
+ internal static class FromWeightedConverter
+ {
+ private readonly struct BufferResult : IOffsetableAttachResult
+ {
+ public string Label { get; }
+ public int VertexCount { get; }
+ public bool Weighted { get; }
+ public int[] AttachIndices { get; }
+ public Attach[] Attaches { get; }
+
+ public BufferResult(string label, int vertexCount, bool weighted, int[] attachIndices, Attach[] attaches)
+ {
+ Label = label;
+ VertexCount = vertexCount;
+ Weighted = weighted;
+ AttachIndices = attachIndices;
+ Attaches = attaches;
+ }
+
+ public void ModifyVertexOffset(int offset)
+ {
+ foreach(Attach atc in Attaches)
+ {
+ foreach(BufferMesh bm in atc.MeshData)
+ {
+ bm.VertexWriteOffset = (ushort)(bm.VertexWriteOffset + offset);
+ bm.VertexReadOffset = (ushort)(bm.VertexReadOffset + offset);
+ }
+ }
+ }
+ }
+
+ private class OffsettableBufferConverter : OffsetableAttachConverter
+ {
+ protected override BufferResult ConvertWeighted(WeightedMesh wba, bool optimize)
+ {
+ List<(int nodeIndex, BufferMesh[])> meshSets = new();
+ int[] weightInits = wba.Vertices.Select(x => x.GetFirstWeightIndex()).ToArray();
+
+ foreach(int nodeIndex in wba.DependingNodeIndices)
+ {
+ List initVerts = new();
+ List continueVerts = new();
+
+ for(int i = 0; i < wba.Vertices.Length; i++)
+ {
+ WeightedVertex wVert = wba.Vertices[i];
+
+ float weight = wVert.Weights![nodeIndex];
+ if(weight == 0)
+ {
+ continue;
+ }
+
+ BufferVertex vert = new(wVert.Position, wVert.Normal, (ushort)i, weight);
+
+ if(weightInits[i] == nodeIndex)
+ {
+ initVerts.Add(vert);
+ }
+ else
+ {
+ continueVerts.Add(vert);
+ }
+ }
+
+ List vertexMeshes = new();
+
+ if(initVerts.Count > 0)
+ {
+ vertexMeshes.Add(new(initVerts.ToArray(), false, true, 0));
+ }
+
+ if(continueVerts.Count > 0)
+ {
+ vertexMeshes.Add(new(continueVerts.ToArray(), true, true, 0));
+ }
+
+ meshSets.Add((nodeIndex, vertexMeshes.ToArray()));
+ }
+
+ BufferMesh[] polyMeshes = GetPolygonMeshes(wba);
+
+ if(optimize)
+ {
+ foreach(BufferMesh polyMesh in polyMeshes)
+ {
+ polyMesh.OptimizePolygons();
+ }
+ }
+
+ int[] nodeIndices = new int[meshSets.Count];
+ Attach[] attaches = new Attach[meshSets.Count];
+
+ for(int i = 0; i < meshSets.Count - 1; i++)
+ {
+ (int nodeIndex, BufferMesh[] vertexMeshes) = meshSets[i];
+ nodeIndices[i] = nodeIndex;
+ attaches[i] = new(vertexMeshes);
+ }
+
+ int lastIndex = meshSets.Count - 1;
+ (int lastNodeIndex, BufferMesh[] lastMeshes) = meshSets[lastIndex];
+ nodeIndices[lastIndex] = lastNodeIndex;
+
+ List meshes = new();
+ meshes.AddRange(lastMeshes);
+ meshes.AddRange(polyMeshes);
+ attaches[lastIndex] = new(meshes.ToArray());
+
+ return new(
+ wba.Label ?? "BUFFER_" + StringExtensions.GenerateIdentifier(),
+ wba.Vertices.Length,
+ true,
+ nodeIndices,
+ attaches);
+ }
+
+ protected override BufferResult ConvertWeightless(WeightedMesh wba, bool optimize)
+ {
+ List meshes = new();
+
+ BufferVertex[] vertices = new BufferVertex[wba.Vertices.Length];
+
+ for(int i = 0; i < vertices.Length; i++)
+ {
+ WeightedVertex wVert = wba.Vertices[i];
+ vertices[i] = new(wVert.Position, wVert.Normal, (ushort)i);
+ }
+
+ BufferMesh[] polygonMeshes = GetPolygonMeshes(wba);
+
+ meshes.Add(new(vertices, false, true, 0));
+
+ meshes.AddRange(polygonMeshes);
+
+ BufferMesh[] result = BufferMesh.Optimize(meshes);
+
+ return new(
+ wba.Label ?? "BUFFER_" + StringExtensions.GenerateIdentifier(),
+ vertices.Length,
+ false,
+ wba.RootIndices.ToArray(),
+ new Attach[] { new(result) });
+ }
+
+ private static BufferMesh[] GetPolygonMeshes(WeightedMesh wba)
+ {
+ List